import { InferenceClient } from "@huggingface/inference";

const client = new InferenceClient(process.env.HF_TOKEN);

let out = "";

const stream = client.chatCompletionStream({ model: "deepseek-ai/DeepSeek-V3.2:novita", messages: [ { role: "user", content: "What is the capital of France?", }, ], });

for await (const chunk of stream) { if (chunk.choices && chunk.choices.length > 0) { const newContent = chunk.choices[0].delta.content; out += newContent; console.log(newContent); } }

Downloads last month

-

Downloads are not tracked for this model. How to track
Inference Providers NEW
This model isn't deployed by any Inference Provider. 🙋 Ask for provider support