195 lines
5.7 KiB
Go
195 lines
5.7 KiB
Go
package main
|
|
|
|
import (
|
|
"bytes"
|
|
"encoding/json"
|
|
"fmt"
|
|
"io"
|
|
"net/http"
|
|
|
|
"github.com/edgedb/edgedb-go"
|
|
"github.com/gofiber/fiber/v2"
|
|
)
|
|
|
|
type MistralChatCompletionRequest struct {
|
|
Model string `json:"model"`
|
|
Messages []RequestMessage `json:"messages"`
|
|
Temperature float64 `json:"temperature"`
|
|
}
|
|
type MistralChatCompletionResponse struct {
|
|
ID string `json:"id"`
|
|
Object string `json:"object"`
|
|
Created int64 `json:"created"`
|
|
Model string `json:"model"`
|
|
Usage MistralUsage `json:"usage"`
|
|
Choices []MistralChoice `json:"choices"`
|
|
}
|
|
|
|
type MistralUsage struct {
|
|
PromptTokens int32 `json:"prompt_tokens"`
|
|
CompletionTokens int32 `json:"completion_tokens"`
|
|
TotalTokens int32 `json:"total_tokens"`
|
|
}
|
|
|
|
type MistralChoice struct {
|
|
Message Message `json:"message"`
|
|
FinishReason string `json:"finish_reason"`
|
|
Index int `json:"index"`
|
|
}
|
|
|
|
func addMistralMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
|
|
Messages := getAllSelectedMessages(c)
|
|
|
|
chatCompletion, err := RequestMistral(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context)
|
|
if err != nil {
|
|
fmt.Println("Error fetching user profile")
|
|
panic(err)
|
|
} else if len(chatCompletion.Choices) == 0 {
|
|
id := insertBotMessage(c, "No response from Mistral", selected, llm.ID)
|
|
return id
|
|
} else {
|
|
Content := chatCompletion.Choices[0].Message.Content
|
|
id := insertBotMessage(c, Content, selected, llm.ID)
|
|
return id
|
|
}
|
|
}
|
|
|
|
func TestMistralKey(apiKey string) bool {
|
|
url := "https://api.mistral.ai/v1/chat/completions"
|
|
|
|
// Convert messages to Mistral format
|
|
mistralMessages := []RequestMessage{
|
|
{
|
|
Role: "user",
|
|
Content: "Hello",
|
|
},
|
|
}
|
|
|
|
requestBody := MistralChatCompletionRequest{
|
|
Model: "open-mistral-7b",
|
|
Messages: mistralMessages,
|
|
Temperature: 0,
|
|
}
|
|
|
|
jsonBody, err := json.Marshal(requestBody)
|
|
if err != nil {
|
|
fmt.Println("Error marshalling request to Mistral")
|
|
return false
|
|
}
|
|
|
|
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
|
|
if err != nil {
|
|
fmt.Println("Error creating request to Mistral")
|
|
return false
|
|
}
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
req.Header.Set("Accept", "application/json")
|
|
req.Header.Set("Authorization", "Bearer "+apiKey)
|
|
|
|
client := &http.Client{}
|
|
resp, err := client.Do(req)
|
|
if err != nil {
|
|
fmt.Println("Error sending request to Mistral")
|
|
return false
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
body, err := io.ReadAll(resp.Body)
|
|
if err != nil {
|
|
fmt.Println("Error reading response from Mistral")
|
|
return false
|
|
}
|
|
|
|
var chatCompletionResponse MistralChatCompletionResponse
|
|
err = json.Unmarshal(body, &chatCompletionResponse)
|
|
if err != nil {
|
|
fmt.Println("Error unmarshalling response from Mistral")
|
|
return false
|
|
}
|
|
if chatCompletionResponse.Usage.CompletionTokens == 0 {
|
|
fmt.Println("No response from Mistral")
|
|
return false
|
|
}
|
|
return true
|
|
}
|
|
|
|
func RequestMistral(c *fiber.Ctx, model string, messages []Message, temperature float64, context string) (MistralChatCompletionResponse, error) {
|
|
var apiKey string
|
|
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token", "")}).QuerySingle(edgeCtx, `
|
|
with
|
|
filtered_keys := (
|
|
select Key {
|
|
key
|
|
} filter .company.name = <str>$0 AND .<keys[is Setting].<setting[is User] = global currentUser
|
|
)
|
|
select filtered_keys.key limit 1
|
|
`, &apiKey, "mistral")
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error getting OpenAI API key: %w", err)
|
|
}
|
|
|
|
url := "https://api.mistral.ai/v1/chat/completions"
|
|
|
|
requestBody := MistralChatCompletionRequest{
|
|
Model: model,
|
|
Messages: Message2RequestMessage(messages, context),
|
|
Temperature: temperature,
|
|
}
|
|
|
|
jsonBody, err := json.Marshal(requestBody)
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
|
|
}
|
|
|
|
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
|
|
}
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
req.Header.Set("Accept", "application/json")
|
|
req.Header.Set("Authorization", "Bearer "+apiKey)
|
|
|
|
client := &http.Client{}
|
|
resp, err := client.Do(req)
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
body, err := io.ReadAll(resp.Body)
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
|
|
}
|
|
|
|
var chatCompletionResponse MistralChatCompletionResponse
|
|
err = json.Unmarshal(body, &chatCompletionResponse)
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
|
|
}
|
|
|
|
var usedModelInfo ModelInfo
|
|
err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token", "")}).QuerySingle(edgeCtx, `
|
|
SELECT ModelInfo {
|
|
inputPrice,
|
|
outputPrice
|
|
}
|
|
FILTER .modelID = <str>$0
|
|
LIMIT 1
|
|
`, &usedModelInfo, model)
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err)
|
|
}
|
|
|
|
if usedModelInfo.InputPrice == 0 || usedModelInfo.OutputPrice == 0 {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("model %s not found in Mistral", model)
|
|
}
|
|
|
|
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
|
|
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
|
|
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
|
|
|
|
return chatCompletionResponse, nil
|
|
}
|