Framework integration
Next.js + Vercel AI SDK
Use Nexevo as a model provider in Vercel @ai-sdk to create streaming UI.
Node.js/TypeScript
javascript
// Vercel AI SDK works with any OpenAI-compatible endpoint.
// Use the openai provider with a custom base URL.
import {
const { messages } = await req.json();
const result = await streamText({
model: nexevo("deepseek-chat"),
messages,
});
return result.toDataStreamResponse();
} from "@ai-sdk/openai";
import {
const { messages } = await req.json();
const result = await streamText({
model: nexevo("deepseek-chat"),
messages,
});
return result.toDataStreamResponse();
} from "ai";
const nexevo = createOpenAI({
const { messages } = await req.json();
const result = await streamText({
model: nexevo("deepseek-chat"),
messages,
});
return result.toDataStreamResponse();
});
const result = await streamText({
const { messages } = await req.json();
const result = await streamText({
model: nexevo("deepseek-chat"),
messages,
});
return result.toDataStreamResponse();
});
// Use in a Next.js Route Handler:
export async function POST(req: Request) {
const { messages } = await req.json();
const result = await streamText({
model: nexevo("deepseek-chat"),
messages,
});
return result.toDataStreamResponse();
}