Jade/RequestOpenai.go
2024-04-24 08:49:58 +02:00

154 lines
4.2 KiB
Go

package main
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"text/template"
"time"
"github.com/gofiber/fiber/v2"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/primitive"
)
type dataForTemplate struct {
Message OpenaiMessage
}
type ChatCompletionRequest struct {
Model string `json:"model"`
Messages []OpenaiMessage `json:"messages"`
Temperature float64 `json:"temperature"`
}
type OpenaiMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
type ChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage OpenaiUsage `json:"usage"`
Choices []OpenaiChoice `json:"choices"`
}
type OpenaiUsage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
}
type OpenaiChoice struct {
Message OpenaiMessage `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
var lastMessageAsked string // TODO Remove this
func addOpenaiMessage(c *fiber.Ctx) error {
message := lastMessageAsked // TODO Remove this
chatCompletion, err := RequestOpenai("gpt-3.5-turbo", []Message{{Content: message, Role: "user", Date: time.Now(), ID: primitive.NilObjectID}}, 0.7)
if err != nil {
// Print error
fmt.Println("Error:", err)
return err
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from OpenAI")
return err
}
collection := mongoClient.Database("chat").Collection("messages")
collection.InsertOne(context.Background(), bson.M{"message": message, "role": "user", "date": time.Now()})
// Render bot message MAYBE to optimize
// HOW TO GET STRING OF HTML FROM TEMPLATE
tmpl, err := template.ParseFiles("views/partials/bot-message.gohtml")
if err != nil {
fmt.Println("Error parsing template:", err)
return err
}
// Add bot message if there is no error
var renderedMessage bytes.Buffer
Message := chatCompletion.Choices[0].Message
if err := tmpl.Execute(&renderedMessage, Message); err != nil {
fmt.Println("Error rendering template:", err)
return err
}
collection.InsertOne(context.Background(), bson.M{"message": Message.Content, "role": "bot", "date": time.Now()})
return c.SendString(renderedMessage.String())
}
func Message2OpenaiMessage(message Message) OpenaiMessage {
return OpenaiMessage{
Role: message.Role,
Content: message.Content,
}
}
func Messages2OpenaiMessages(messages []Message) []OpenaiMessage {
var openaiMessages []OpenaiMessage
for _, message := range messages {
openaiMessages = append(openaiMessages, Message2OpenaiMessage(message))
}
return openaiMessages
}
func RequestOpenai(model string, messages []Message, temperature float64) (ChatCompletionResponse, error) {
apiKey := "sk-proj-f7StCvXCtcmiOOayiVmgT3BlbkFJlVtAcOo3JcrnGq1cPa5o" // TODO Use env variable
url := "https://api.openai.com/v1/chat/completions"
// Convert messages to OpenAI format
openaiMessages := Messages2OpenaiMessages(messages)
requestBody := ChatCompletionRequest{
Model: model,
Messages: openaiMessages,
Temperature: temperature,
}
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return ChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return ChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+apiKey)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return ChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return ChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
}
var chatCompletionResponse ChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return ChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
}
return chatCompletionResponse, nil
}