mirror of
https://github.com/casdoor/casdoor.git
synced 2025-05-23 02:35:49 +08:00
feat: AI responses support streaming (#1826)
Is an AI response that supports streaming return
This commit is contained in:
parent
6f0b7f3f24
commit
c6675ee4e6
8
ai/ai.go
8
ai/ai.go
@ -18,6 +18,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@ -78,7 +79,10 @@ func QueryAnswerStream(authToken string, question string, writer io.Writer, buil
|
||||
client := getProxyClientFromToken(authToken)
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
flusher, ok := writer.(http.Flusher)
|
||||
if !ok {
|
||||
return fmt.Errorf("writer does not implement http.Flusher")
|
||||
}
|
||||
// https://platform.openai.com/tokenizer
|
||||
// https://github.com/pkoukk/tiktoken-go#available-encodings
|
||||
promptTokens, err := getTokenSize(openai.GPT3TextDavinci003, question)
|
||||
@ -128,7 +132,7 @@ func QueryAnswerStream(authToken string, question string, writer io.Writer, buil
|
||||
if _, err = fmt.Fprintf(writer, "data: %s\n\n", data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
flusher.Flush()
|
||||
// Append the response to the strings.Builder
|
||||
builder.WriteString(data)
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user