feat: AI responses support streaming (#1826)

Is an AI response that supports streaming return
This commit is contained in:
1307 2023-05-13 11:31:20 +08:00 committed by GitHub
parent 6f0b7f3f24
commit c6675ee4e6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -18,6 +18,7 @@ import (
"context" "context"
"fmt" "fmt"
"io" "io"
"net/http"
"strings" "strings"
"time" "time"
@ -78,7 +79,10 @@ func QueryAnswerStream(authToken string, question string, writer io.Writer, buil
client := getProxyClientFromToken(authToken) client := getProxyClientFromToken(authToken)
ctx := context.Background() ctx := context.Background()
flusher, ok := writer.(http.Flusher)
if !ok {
return fmt.Errorf("writer does not implement http.Flusher")
}
// https://platform.openai.com/tokenizer // https://platform.openai.com/tokenizer
// https://github.com/pkoukk/tiktoken-go#available-encodings // https://github.com/pkoukk/tiktoken-go#available-encodings
promptTokens, err := getTokenSize(openai.GPT3TextDavinci003, question) promptTokens, err := getTokenSize(openai.GPT3TextDavinci003, question)
@ -128,7 +132,7 @@ func QueryAnswerStream(authToken string, question string, writer io.Writer, buil
if _, err = fmt.Fprintf(writer, "data: %s\n\n", data); err != nil { if _, err = fmt.Fprintf(writer, "data: %s\n\n", data); err != nil {
return err return err
} }
flusher.Flush()
// Append the response to the strings.Builder // Append the response to the strings.Builder
builder.WriteString(data) builder.WriteString(data)
} }