From c6675ee4e6bd55104fd6fb93950a308657873aba Mon Sep 17 00:00:00 2001 From: 1307 <407700026@qq.com> Date: Sat, 13 May 2023 11:31:20 +0800 Subject: [PATCH] feat: AI responses support streaming (#1826) Is an AI response that supports streaming return --- ai/ai.go | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/ai/ai.go b/ai/ai.go index e9c6d126..f1fb902e 100644 --- a/ai/ai.go +++ b/ai/ai.go @@ -18,6 +18,7 @@ import ( "context" "fmt" "io" + "net/http" "strings" "time" @@ -78,7 +79,10 @@ func QueryAnswerStream(authToken string, question string, writer io.Writer, buil client := getProxyClientFromToken(authToken) ctx := context.Background() - + flusher, ok := writer.(http.Flusher) + if !ok { + return fmt.Errorf("writer does not implement http.Flusher") + } // https://platform.openai.com/tokenizer // https://github.com/pkoukk/tiktoken-go#available-encodings promptTokens, err := getTokenSize(openai.GPT3TextDavinci003, question) @@ -128,7 +132,7 @@ func QueryAnswerStream(authToken string, question string, writer io.Writer, buil if _, err = fmt.Fprintf(writer, "data: %s\n\n", data); err != nil { return err } - + flusher.Flush() // Append the response to the strings.Builder builder.WriteString(data) }