rakyll/openai-go

Fatal error of json decode

teslapatrick opened this issue · 1 comments

I added Stream: true to the request parameters and returned some "Server-Sent Events" data that could not be parsed.

  • Request:
client := chat.NewClient(s, "gpt-3.5-turbo")
resp, err := client.CreateCompletion(ctx, &chat.CreateCompletionParams{
    Messages: []*chat.Message{
	    {Role: "user", Content: "hello"},
    },
    Stream: true,
})
  • Response:

data: {"id":"chatcmpl-6s9Fmuz4z...jxmcQKQeiGr","object":"chat.completion.chunk","created":1678363302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6s9Fmuz4z...jxmcQKQeiGr","object":"chat.completion.chunk","created":1678363302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"\n\n"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6s9Fmuz4z...jxmcQKQeiGr","object":"chat.completion.chunk","created":1678363302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"Hello"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6s9Fmuz4z...jxmcQKQeiGr","object":"chat.completion.chunk","created":1678363302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"!"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6s9Fmuz4z...jxmcQKQeiGr","object":"chat.completion.chunk","created":1678363302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":" How"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6s9Fmuz4z...jxmcQKQeiGr","object":"chat.completion.chunk","created":1678363302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":" can"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6s9Fmuz4z...jxmcQKQeiGr","object":"chat.completion.chunk","created":1678363302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":" I"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6s9Fmuz4z...jxmcQKQeiGr","object":"chat.completion.chunk","created":1678363302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":" assist"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6s9Fmuz4z...jxmcQKQeiGr","object":"chat.completion.chunk","created":1678363302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":" you"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6s9Fmuz4z...jxmcQKQeiGr","object":"chat.completion.chunk","created":1678363302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":" today"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6s9Fmuz4z...jxmcQKQeiGr","object":"chat.completion.chunk","created":1678363302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"?"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6s9Fmuz4z...jxmcQKQeiGr","object":"chat.completion.chunk","created":1678363302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}
data: [DONE]

Failed to complete: EOF
panic: runtime error: invalid memory address or nil pointer dereference
[signal SIGSEGV: segmentation violation code=0x1 addr=0x28 pc=0x631bb6]

I added a new StreamingClient for streaming purposes. Here is an example usage:

package main

import (
	"context"
	"fmt"
	"log"
	"os"

	"github.com/rakyll/openai-go"
	"github.com/rakyll/openai-go/chat"
)

func main() {
	ctx := context.Background()
	s := openai.NewSession(os.Getenv("OPENAI_API_KEY"))

	client := chat.NewStreamingClient(s, "gpt-3.5-turbo")
	err := client.CreateCompletion(ctx, &chat.CreateCompletionParams{
		Messages: []*chat.Message{
			{Role: "user", Content: "hello"},
		},
	}, func(r *chat.CreateCompletionStreamingResponse) {
		fmt.Println(r.Choices[0].Delta)
	})
	if err != nil {
		log.Fatal(err)
	}
}