Browse Source

initial gpt hook up with htmx

main
Stephanie Gredell 8 months ago
parent
commit
eb4b35497e
  1. 1
      .gitignore
  2. 123
      clients/openai.go
  3. 8
      go.mod
  4. 4
      go.sum
  5. 135
      main.go
  6. 61
      templates/chat.html

1
.gitignore vendored

@ -0,0 +1 @@ @@ -0,0 +1 @@
.env

123
clients/openai.go

@ -0,0 +1,123 @@ @@ -0,0 +1,123 @@
package openai
import (
"bufio"
"bytes"
"encoding/json"
"errors"
"io"
"net/http"
"os"
"sync"
"github.com/joho/godotenv"
)
const OpenAIEndpoint = "https://api.openai.com/v1/chat/completions"
type OpenAIClient struct {
APIKey string
Model string
}
type ChatMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
type ChatRequest struct {
Model string `json:"model"`
Messages []ChatMessage `json:"messages"`
Stream bool `json:"stream,omitempty"`
Temperature float32 `json:"temperature,omitempty"`
}
type ChatResponse struct {
Choices []struct {
Delta struct {
Content string `json:"content"`
} `json:"delta"`
Message ChatMessage `json:"message"`
} `json:"choices"`
}
// ensures .env is only loaded once
var loadEnvOnce sync.Once
// NewClient loads env and returns a configured client
func NewClient(model string) *OpenAIClient {
loadEnvOnce.Do(func() {
_ = godotenv.Load()
})
return &OpenAIClient{
APIKey: os.Getenv("OPENAI_API_KEY"),
Model: model,
}
}
func (c *OpenAIClient) Chat(messages []ChatMessage) (string, error) {
reqBody, _ := json.Marshal(ChatRequest{
Model: c.Model,
Messages: messages,
})
req, _ := http.NewRequest("POST", OpenAIEndpoint, bytes.NewBuffer(reqBody))
req.Header.Set("Authorization", "Bearer "+c.APIKey)
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", errors.New(string(body))
}
var result ChatResponse
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", err
}
return result.Choices[0].Message.Content, nil
}
func (c *OpenAIClient) StreamChat(messages []ChatMessage, onDelta func(string)) error {
reqBody, _ := json.Marshal(ChatRequest{
Model: c.Model,
Messages: messages,
Stream: true,
})
req, _ := http.NewRequest("POST", OpenAIEndpoint, bytes.NewBuffer(reqBody))
req.Header.Set("Authorization", "Bearer "+c.APIKey)
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return errors.New(string(body))
}
scanner := bufio.NewScanner(resp.Body)
for scanner.Scan() {
line := scanner.Text()
if line == "" || line == "data: [DONE]" {
continue
}
var chunk ChatResponse
if err := json.Unmarshal([]byte(line[6:]), &chunk); err != nil {
continue
}
if len(chunk.Choices) > 0 {
delta := chunk.Choices[0].Delta.Content
onDelta(delta)
}
}
return nil
}

8
go.mod

@ -0,0 +1,8 @@ @@ -0,0 +1,8 @@
module bdsc
go 1.22.2
require (
github.com/gorilla/websocket v1.5.3
github.com/joho/godotenv v1.5.1
)

4
go.sum

@ -0,0 +1,4 @@ @@ -0,0 +1,4 @@
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=

135
main.go

@ -0,0 +1,135 @@ @@ -0,0 +1,135 @@
package main
import (
openai "bdsc/clients"
"fmt"
"html/template"
"net/http"
"sync"
"github.com/gorilla/websocket"
)
var (
tmpl = template.Must(template.ParseFiles("templates/chat.html"))
upgrader = websocket.Upgrader{}
clients = make(map[*websocket.Conn]bool)
broadcast = make(chan string)
clientMutex sync.Mutex
openAIClient = openai.NewClient("gpt-3.5-turbo")
chatHistory []openai.ChatMessage
historyMutex sync.Mutex
)
type PageData struct {
Greeting template.HTML
}
func main() {
http.HandleFunc("/", chatHandler)
http.HandleFunc("/send", sendHandler)
http.HandleFunc("/ws", wsHandler)
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
go handleMessages()
fmt.Println("Listening on :8080")
http.ListenAndServe(":8080", nil)
}
func chatHandler(w http.ResponseWriter, r *http.Request) {
historyMutex.Lock()
defer historyMutex.Unlock()
chatHistory = append(chatHistory,
openai.ChatMessage{Role: "system", Content: "You are a friendly and helpful assistant."},
)
resp, err := openAIClient.Chat(chatHistory)
if err != nil {
fmt.Println("GPT error:", err)
return
}
chatHistory = append(chatHistory, openai.ChatMessage{Role: "assistant", Content: resp})
html := fmt.Sprintf(`<div><b>Assistant:</b> %s</div>`, template.HTMLEscapeString(resp))
tmpl.Execute(w, PageData{Greeting: template.HTML(html)})
}
func sendHandler(w http.ResponseWriter, r *http.Request) {
r.ParseForm()
message := r.FormValue("message")
historyMutex.Lock()
chatHistory = append(chatHistory, openai.ChatMessage{
Role: "user",
Content: message,
})
historyMutex.Unlock()
userHTML := fmt.Sprintf(`<div><b>You:</b> %s</div>`, template.HTMLEscapeString(message))
broadcast <- userHTML
historyMutex.Lock()
resp, err := openAIClient.Chat(chatHistory)
if err != nil {
historyMutex.Unlock()
http.Error(w, "Gpt failed", http.StatusInternalServerError)
return
}
chatHistory = append(chatHistory, openai.ChatMessage{
Role: "assistant",
Content: resp,
})
historyMutex.Unlock()
gptHTML := fmt.Sprintf(`<div><b>Assistant:</b> %s</div>`, template.HTMLEscapeString(resp))
broadcast <- gptHTML
w.Header().Set("Content-Type", "text/html")
fmt.Fprint(w, userHTML+gptHTML)
}
func wsHandler(w http.ResponseWriter, r *http.Request) {
conn, err := upgrader.Upgrade(w, r, nil)
if err != nil {
return
}
clientMutex.Lock()
clients[conn] = true
clientMutex.Unlock()
defer func() {
clientMutex.Lock()
delete(clients, conn)
clientMutex.Unlock()
conn.Close()
}()
for {
_, _, err := conn.ReadMessage()
if err != nil {
break
}
}
}
func handleMessages() {
for {
msg := <-broadcast
fmt.Print("Message is brodcasting:", msg, "\n")
clientMutex.Lock()
for client := range clients {
err := client.WriteMessage(websocket.TextMessage, []byte(msg))
if err != nil {
fmt.Print("error sending message to client")
client.Close()
delete(clients, client)
}
}
clientMutex.Unlock()
}
}

61
templates/chat.html

@ -0,0 +1,61 @@ @@ -0,0 +1,61 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>GPT Chat</title>
<script src="https://unpkg.com/htmx.org@2.0.4" integrity="sha384-HGfztofotfshcF7+8n44JQL2oJmowVChPTg48S+jvZoztPfvwD79OC/LTtG6dMp+" crossorigin="anonymous"></script>
<script src="https://unpkg.com/htmx-ext-ws@2.0.2" integrity="sha384-vuKxTKv5TX/b3lLzDKP2U363sOAoRo5wSvzzc3LJsbaQRSBSS+3rKKHcOx5J8doU" crossorigin="anonymous"></script>
<style>
body {
font-family: sans-serif;
margin: 2rem;
}
#messages {
border: 1px solid #ccc;
padding: 1rem;
height: 300px;
overflow-y: auto;
margin-bottom: 1rem;
}
form {
display: flex;
gap: 0.5rem;
}
input[type="text"] {
flex: 1;
padding: 0.5rem;
}
</style>
</head>
<body>
<script>
const realWS = WebSocket;
WebSocket = function (...args) {
const ws = new realWS(...args);
ws.addEventListener("message", (e) => {
console.log("🔥 Raw WS message:", e.data);
});
return ws;
};
</script>
<h1>Chat with GPT</h1>
<!-- WebSocket-connected chat window -->
<div id="messages"
hx-ext="ws"
ws-connect="/ws"
hx-swap="beforeend">
{{ .Greeting }}
</div>
<!-- Form to send messages to GPT -->
<form hx-post="/send" hx-target="#messages" hx-swap="beforeend" hx-on::after-request="if(event.detail.successful) this.reset()">
<input type="text" name="message" placeholder="Say something..." required />
<button type="submit">Send</button>
</form>
</body>
</html>
Loading…
Cancel
Save