149 lines
3 KiB
Go
149 lines
3 KiB
Go
![]() |
package main
|
||
|
|
||
|
import (
|
||
|
"bufio"
|
||
|
"context"
|
||
|
"fmt"
|
||
|
"os"
|
||
|
|
||
|
"github.com/openai/openai-go"
|
||
|
"github.com/openai/openai-go/option"
|
||
|
)
|
||
|
|
||
|
var (
|
||
|
sys = `You are an expert golang programmer.`
|
||
|
|
||
|
model = "Qwen/Qwen2.5-Coder-7B-Instruct"
|
||
|
|
||
|
reader = bufio.NewReader(os.Stdin)
|
||
|
|
||
|
history = []History{}
|
||
|
|
||
|
client = openai.NewClient(
|
||
|
option.WithAPIKey("EMPTY"), // defaults to os.LookupEnv("OPENAI_API_KEY")
|
||
|
option.WithBaseURL("http://192.168.55.14:8001/v1/"),
|
||
|
)
|
||
|
ctx = context.Background()
|
||
|
)
|
||
|
|
||
|
type History struct {
|
||
|
Assistant string
|
||
|
User string
|
||
|
}
|
||
|
|
||
|
func readKey(input chan rune) {
|
||
|
char, _, err := reader.ReadRune()
|
||
|
if err != nil {
|
||
|
fmt.Println("Error reading key: ", err)
|
||
|
}
|
||
|
input <- char
|
||
|
}
|
||
|
|
||
|
func push(h History) int {
|
||
|
history = append(history, h)
|
||
|
return len(history)
|
||
|
}
|
||
|
|
||
|
func resume(s string) string {
|
||
|
|
||
|
resumer := "you are a golang expert; very able to resume the conversation; Limited to responding to the question without adding anything else."
|
||
|
question := "resume this conversation in max 100 tokens: " + s
|
||
|
|
||
|
completion, err := client.Chat.Completions.New(ctx, openai.ChatCompletionNewParams{
|
||
|
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
|
||
|
openai.SystemMessage(resumer),
|
||
|
openai.UserMessage(question),
|
||
|
}),
|
||
|
Model: openai.F(model),
|
||
|
})
|
||
|
if err != nil {
|
||
|
fmt.Printf("Error: %v\n", err)
|
||
|
}
|
||
|
|
||
|
return completion.Choices[0].Message.Content
|
||
|
}
|
||
|
|
||
|
func main() {
|
||
|
|
||
|
input := make(chan rune, 1)
|
||
|
|
||
|
for {
|
||
|
|
||
|
current_history := History{
|
||
|
Assistant: "",
|
||
|
User: "",
|
||
|
}
|
||
|
|
||
|
print("> ")
|
||
|
loop:
|
||
|
for {
|
||
|
go readKey(input)
|
||
|
|
||
|
select {
|
||
|
case i := <-input:
|
||
|
current_history.User += string(i)
|
||
|
switch i {
|
||
|
case '\n':
|
||
|
break loop
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
println()
|
||
|
|
||
|
// stream := client.Chat.Completions.NewStreaming(ctx, openai.ChatCompletionNewParams{
|
||
|
// Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
|
||
|
// openai.SystemMessage(sys), openai.UserMessage(user),
|
||
|
// }),
|
||
|
// Model: openai.F(model),
|
||
|
// })
|
||
|
|
||
|
var messages_hist []openai.ChatCompletionMessageParamUnion
|
||
|
for _, h := range history {
|
||
|
messages_hist = append(messages_hist, openai.UserMessage(h.User), openai.AssistantMessage(h.Assistant))
|
||
|
}
|
||
|
|
||
|
// fmt.Printf("%+v\n", messages_hist)
|
||
|
|
||
|
msg := []openai.ChatCompletionMessageParamUnion{
|
||
|
openai.SystemMessage(sys),
|
||
|
}
|
||
|
msg = append(msg, messages_hist...)
|
||
|
msg = append(msg, openai.UserMessage(current_history.User))
|
||
|
|
||
|
stream := client.Chat.Completions.NewStreaming(ctx, openai.ChatCompletionNewParams{
|
||
|
Messages: openai.F(msg),
|
||
|
Model: openai.F(model),
|
||
|
})
|
||
|
|
||
|
// fmt.Printf("%+v\n", msg)
|
||
|
|
||
|
fmt.Println()
|
||
|
|
||
|
// history = append(history, openai.UserMessage(user))
|
||
|
|
||
|
for stream.Next() {
|
||
|
evt := stream.Current()
|
||
|
if len(evt.Choices) > 0 {
|
||
|
print(evt.Choices[0].Delta.Content)
|
||
|
current_history.Assistant += evt.Choices[0].Delta.Content
|
||
|
}
|
||
|
}
|
||
|
println()
|
||
|
|
||
|
if err := stream.Err(); err != nil {
|
||
|
panic(err)
|
||
|
}
|
||
|
|
||
|
current_history.Assistant = resume(current_history.Assistant)
|
||
|
|
||
|
// println()
|
||
|
// fmt.Printf("%+v\n", current_history)
|
||
|
|
||
|
history = append(history, current_history)
|
||
|
|
||
|
println()
|
||
|
println()
|
||
|
}
|
||
|
}
|