dexaai/openai-fetch

Chat Completion Readable Stream

Opened this issue · 1 comments

Is this function already implemented?

kyr0 commented

Example code:

// https://github.com/dexaai/openai-fetch
import { OpenAIClient, type ChatParams } from 'openai-fetch';

const client = new OpenAIClient({ apiKey: 'YOUR_KEY' });

let partialResponseText = ''

async function readStreamChunks(stream: ReadableStream, cb: Function) {
    const reader = stream.getReader();
  
    // A function to handle reading each chunk
    async function read() {
      const { done, value } = await reader.read();
      if (done) {
        cb();
        return;
      }
      
      console.log('delta chunk response', value.choices[0].delta.content)
      partialResponseText += value.choices[0].delta.content
      console.log('concat partial response', partialResponseText)
  
      // Recursively read the next chunk
      read();
    }
  
    // Start reading
    read();
  }
  
const readableStream: ChatStreamResponse = await client.streamChatCompletion({
    model: "gpt-4-turbo-preview", // make sure on platform.openai.com that you have access to that model or choose another
    messages: [
        {
            role: "system",
            content: "Tell me a story about openai-fetch, a library that can stream OpenAI responses."
        },
    ],
    temperature: 0.7,
    n: 1,
} as ChatParams);

// start reading the stream
readStreamChunks(readableStream, () => {
    console.log('full response ready', partialResponseText)
})