Skip to content

Instantly share code, notes, and snippets.

@developit
Created February 12, 2026 15:07
Show Gist options
  • Select an option

  • Save developit/f7b01ceebb99108beba3ebd8a5bbf308 to your computer and use it in GitHub Desktop.

Select an option

Save developit/f7b01ceebb99108beba3ebd8a5bbf308 to your computer and use it in GitHub Desktop.
import ai from "ko-ai";
const BASE_URL = "https://proxy-shopify-ai.local.shop.dev/v1";
const MODEL = "openai:gpt-4o-mini";
const PROMPT = "In one sentence, what is the capital of Japan?";
async function run(label: string, mode: "completions" | "responses", stream: boolean) {
const chat = ai({ baseURL: BASE_URL, apiKey: "", model: MODEL, temperature: 0, max_output_tokens: 100, mode, stream });
const t = performance.now();
let text = "", chunks = 0;
for await (const chunk of chat.send(PROMPT)) {
if (chunk.type === "text") { text += chunk.text; chunks++; }
}
const ms = (performance.now() - t) | 0;
const ok = text.length > 0;
console.log(`${ok ? "\x1b[32m✓\x1b[0m" : "\x1b[31m✗\x1b[0m"} ${label} (${chunks} chunks, ${ms}ms)`);
if (ok) console.log(` "${text}"`);
else console.log(` \x1b[2m(no text received — 0 chunks yielded)\x1b[0m`);
}
console.log("--- ko-ai: 4 modes ---\n");
await run("streaming + completions", "completions", true);
await run("streaming + responses", "responses", true);
await run("non-streaming + completions", "completions", false);
await run("non-streaming + responses", "responses", false);
console.log(`
\x1b[33mBug:\x1b[0m non-streaming yields 0 chunks.
ko-ai splits the response body by newlines and parses each line as JSON.
This works for streaming (each SSE line is a complete JSON object), but
non-streaming responses from this proxy are pretty-printed across multiple
lines, so no individual line is valid JSON — they all silently fail to parse.`);
{
"name": "2026-02-12-ko-ai-non-streaming",
"version": "1.0.0",
"description": "",
"main": "index.ts",
"keywords": [],
"author": "",
"license": "ISC",
"packageManager": "pnpm@10.7.1",
"dependencies": {
"ko-ai": "^0.1.1"
},
"type": "module"
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment