Skip to content

Commit

Permalink
Update last chunk handling in examples
Browse files Browse the repository at this point in the history
  • Loading branch information
CharlieFRuan committed Jun 4, 2024
1 parent 3f9e529 commit 5dc369a
Show file tree
Hide file tree
Showing 5 changed files with 15 additions and 15 deletions.
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -184,16 +184,16 @@ const chunks = await engine.chat.completions.create({
});

let reply = "";
let lastChunk: webllm.ChatCompletionChunk | undefined = undefined;
for await (const chunk of chunks) {
reply += chunk.choices[0]?.delta.content || "";
console.log(reply);
lastChunk = chunk;
if (chunk.usage) {
console.log(chunk.usage); // only last chunk has usage
}
}

const fullReply = await engine.getMessage()
const fullReply = await engine.getMessage();
console.log(fullReply);
console.log(lastChunk!.usage);
```

## Advanced Usage
Expand Down
6 changes: 3 additions & 3 deletions examples/get-started-web-worker/src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,16 +85,16 @@ async function mainStreaming() {

const asyncChunkGenerator = await engine.chat.completions.create(request);
let message = "";
let lastChunk: webllm.ChatCompletionChunk | undefined = undefined;
for await (const chunk of asyncChunkGenerator) {
console.log(chunk);
message += chunk.choices[0]?.delta?.content || "";
setLabel("generate-label", message);
lastChunk = chunk;
if (chunk.usage) {
console.log(chunk.usage); // only last chunk has usage
}
// engine.interruptGenerate(); // works with interrupt as well
}
console.log("Final message:\n", await engine.getMessage()); // the concatenated message
console.log(lastChunk!.usage);
}

// Run one of the function below
Expand Down
6 changes: 3 additions & 3 deletions examples/service-worker/src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -102,16 +102,16 @@ async function mainStreaming() {

const asyncChunkGenerator = await engine.chat.completions.create(request);
let message = "";
let lastChunk: webllm.ChatCompletionChunk | undefined = undefined;
for await (const chunk of asyncChunkGenerator) {
console.log(chunk);
message += chunk.choices[0]?.delta?.content || "";
setLabel("generate-label", message);
lastChunk = chunk;
if (chunk.usage) {
console.log(chunk.usage); // only last chunk has usage
}
// engine.interruptGenerate(); // works with interrupt as well
}
console.log("Final message:\n", await engine.getMessage()); // the concatenated message
console.log(lastChunk!.usage);
}

registerServiceWorker();
Expand Down
8 changes: 4 additions & 4 deletions examples/streaming/src/streaming.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ async function main() {
const initProgressCallback = (report: webllm.InitProgressReport) => {
setLabel("init-label", report.text);
};
const selectedModel = "Llama-3-8B-Instruct-q4f32_1-MLC";
const selectedModel = "TinyLlama-1.1B-Chat-v0.4-q4f16_1-MLC";
const engine: webllm.MLCEngineInterface = await webllm.CreateMLCEngine(
selectedModel,
{ initProgressCallback: initProgressCallback },
Expand All @@ -38,16 +38,16 @@ async function main() {

const asyncChunkGenerator = await engine.chat.completions.create(request);
let message = "";
let lastChunk: webllm.ChatCompletionChunk | undefined = undefined;
for await (const chunk of asyncChunkGenerator) {
console.log(chunk);
message += chunk.choices[0]?.delta?.content || "";
setLabel("generate-label", message);
lastChunk = chunk;
if (chunk.usage) {
console.log(chunk.usage); // only last chunk has usage
}
// engine.interruptGenerate(); // works with interrupt as well
}
console.log("Final message:\n", await engine.getMessage()); // the concatenated message
console.log(lastChunk!.usage);
}

main();
2 changes: 1 addition & 1 deletion src/openai_api_protocols/chat_completion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -469,7 +469,7 @@ export function postInitAndCheckFields(
}

// 8. Only set stream_options when streaming
if (request.stream_options !== undefined && request.tools !== null) {
if (request.stream_options !== undefined && request.stream_options !== null) {
if (!request.stream) {
throw new Error("Only specify stream_options when stream=True.");
}
Expand Down

0 comments on commit 5dc369a

Please sign in to comment.