diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4c49bd7 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +.env diff --git a/clients/openai.go b/clients/openai.go new file mode 100644 index 0000000..9640c38 --- /dev/null +++ b/clients/openai.go @@ -0,0 +1,123 @@ +package openai + +import ( + "bufio" + "bytes" + "encoding/json" + "errors" + "io" + "net/http" + "os" + "sync" + + "github.com/joho/godotenv" +) + +const OpenAIEndpoint = "https://api.openai.com/v1/chat/completions" + +type OpenAIClient struct { + APIKey string + Model string +} + +type ChatMessage struct { + Role string `json:"role"` + Content string `json:"content"` +} + +type ChatRequest struct { + Model string `json:"model"` + Messages []ChatMessage `json:"messages"` + Stream bool `json:"stream,omitempty"` + Temperature float32 `json:"temperature,omitempty"` +} + +type ChatResponse struct { + Choices []struct { + Delta struct { + Content string `json:"content"` + } `json:"delta"` + Message ChatMessage `json:"message"` + } `json:"choices"` +} + +// ensures .env is only loaded once +var loadEnvOnce sync.Once + +// NewClient loads env and returns a configured client +func NewClient(model string) *OpenAIClient { + loadEnvOnce.Do(func() { + _ = godotenv.Load() + }) + + return &OpenAIClient{ + APIKey: os.Getenv("OPENAI_API_KEY"), + Model: model, + } +} + +func (c *OpenAIClient) Chat(messages []ChatMessage) (string, error) { + reqBody, _ := json.Marshal(ChatRequest{ + Model: c.Model, + Messages: messages, + }) + req, _ := http.NewRequest("POST", OpenAIEndpoint, bytes.NewBuffer(reqBody)) + req.Header.Set("Authorization", "Bearer "+c.APIKey) + req.Header.Set("Content-Type", "application/json") + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return "", err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + return "", errors.New(string(body)) + } + + var result ChatResponse + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + return "", err + } + return result.Choices[0].Message.Content, nil +} + +func (c *OpenAIClient) StreamChat(messages []ChatMessage, onDelta func(string)) error { + reqBody, _ := json.Marshal(ChatRequest{ + Model: c.Model, + Messages: messages, + Stream: true, + }) + req, _ := http.NewRequest("POST", OpenAIEndpoint, bytes.NewBuffer(reqBody)) + req.Header.Set("Authorization", "Bearer "+c.APIKey) + req.Header.Set("Content-Type", "application/json") + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + return errors.New(string(body)) + } + + scanner := bufio.NewScanner(resp.Body) + for scanner.Scan() { + line := scanner.Text() + if line == "" || line == "data: [DONE]" { + continue + } + var chunk ChatResponse + if err := json.Unmarshal([]byte(line[6:]), &chunk); err != nil { + continue + } + if len(chunk.Choices) > 0 { + delta := chunk.Choices[0].Delta.Content + onDelta(delta) + } + } + return nil +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..3ddeb6f --- /dev/null +++ b/go.mod @@ -0,0 +1,8 @@ +module bdsc + +go 1.22.2 + +require ( + github.com/gorilla/websocket v1.5.3 + github.com/joho/godotenv v1.5.1 +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..09f4ebf --- /dev/null +++ b/go.sum @@ -0,0 +1,4 @@ +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= diff --git a/main.go b/main.go new file mode 100644 index 0000000..84f32a1 --- /dev/null +++ b/main.go @@ -0,0 +1,135 @@ +package main + +import ( + openai "bdsc/clients" + "fmt" + "html/template" + "net/http" + "sync" + + "github.com/gorilla/websocket" +) + +var ( + tmpl = template.Must(template.ParseFiles("templates/chat.html")) + upgrader = websocket.Upgrader{} + clients = make(map[*websocket.Conn]bool) + broadcast = make(chan string) + clientMutex sync.Mutex + + openAIClient = openai.NewClient("gpt-3.5-turbo") + chatHistory []openai.ChatMessage + historyMutex sync.Mutex +) + +type PageData struct { + Greeting template.HTML +} + +func main() { + http.HandleFunc("/", chatHandler) + http.HandleFunc("/send", sendHandler) + http.HandleFunc("/ws", wsHandler) + http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static")))) + + go handleMessages() + + fmt.Println("Listening on :8080") + http.ListenAndServe(":8080", nil) +} + +func chatHandler(w http.ResponseWriter, r *http.Request) { + + historyMutex.Lock() + defer historyMutex.Unlock() + + chatHistory = append(chatHistory, + openai.ChatMessage{Role: "system", Content: "You are a friendly and helpful assistant."}, + ) + resp, err := openAIClient.Chat(chatHistory) + if err != nil { + fmt.Println("GPT error:", err) + return + } + + chatHistory = append(chatHistory, openai.ChatMessage{Role: "assistant", Content: resp}) + html := fmt.Sprintf(`