189 lines
5.0 KiB
Go
189 lines
5.0 KiB
Go
package main
|
|
|
|
import (
|
|
"bytes"
|
|
"encoding/json"
|
|
"fmt"
|
|
"io"
|
|
"net/http"
|
|
|
|
"github.com/edgedb/edgedb-go"
|
|
)
|
|
|
|
type MistralChatCompletionRequest struct {
|
|
Model string `json:"model"`
|
|
Messages []Message `json:"messages"`
|
|
Temperature float64 `json:"temperature"`
|
|
}
|
|
type MistralChatCompletionResponse struct {
|
|
ID string `json:"id"`
|
|
Object string `json:"object"`
|
|
Created int64 `json:"created"`
|
|
Model string `json:"model"`
|
|
Usage MistralUsage `json:"usage"`
|
|
Choices []MistralChoice `json:"choices"`
|
|
}
|
|
|
|
type MistralUsage struct {
|
|
PromptTokens int32 `json:"prompt_tokens"`
|
|
CompletionTokens int32 `json:"completion_tokens"`
|
|
TotalTokens int32 `json:"total_tokens"`
|
|
}
|
|
|
|
type MistralChoice struct {
|
|
Message Message `json:"message"`
|
|
FinishReason string `json:"finish_reason"`
|
|
Index int `json:"index"`
|
|
}
|
|
|
|
func addMistralMessage(llm LLM, selected bool) edgedb.UUID {
|
|
Messages := getAllSelectedMessages()
|
|
|
|
chatCompletion, err := RequestMistral(llm.Model.ModelID, Messages, float64(llm.Temperature))
|
|
if err != nil {
|
|
fmt.Println("Error:", err)
|
|
} else if len(chatCompletion.Choices) == 0 {
|
|
fmt.Println(chatCompletion)
|
|
fmt.Println("No response from Mistral")
|
|
id := insertBotMessage("No response from Mistral", selected, llm.ID)
|
|
return id
|
|
} else {
|
|
Content := chatCompletion.Choices[0].Message.Content
|
|
id := insertBotMessage(Content, selected, llm.ID)
|
|
return id
|
|
}
|
|
return edgedb.UUID{}
|
|
}
|
|
|
|
func TestMistralKey(apiKey string) bool {
|
|
url := "https://api.mistral.ai/v1/chat/completions"
|
|
|
|
// Convert messages to Mistral format
|
|
mistralMessages := []Message{
|
|
{
|
|
Role: "user",
|
|
Content: "Hello",
|
|
},
|
|
}
|
|
|
|
requestBody := MistralChatCompletionRequest{
|
|
Model: "open-mistral-7b",
|
|
Messages: mistralMessages,
|
|
Temperature: 0,
|
|
}
|
|
|
|
jsonBody, err := json.Marshal(requestBody)
|
|
if err != nil {
|
|
fmt.Println("Error:", err)
|
|
return false
|
|
}
|
|
|
|
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
|
|
if err != nil {
|
|
fmt.Println("Error:", err)
|
|
return false
|
|
}
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
req.Header.Set("Accept", "application/json")
|
|
req.Header.Set("Authorization", "Bearer "+apiKey)
|
|
|
|
client := &http.Client{}
|
|
resp, err := client.Do(req)
|
|
if err != nil {
|
|
fmt.Println("Error:", err)
|
|
return false
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
body, err := io.ReadAll(resp.Body)
|
|
if err != nil {
|
|
fmt.Println("Error:", err)
|
|
return false
|
|
}
|
|
|
|
var chatCompletionResponse MistralChatCompletionResponse
|
|
err = json.Unmarshal(body, &chatCompletionResponse)
|
|
fmt.Println(chatCompletionResponse)
|
|
if err != nil {
|
|
fmt.Println("Error:", err)
|
|
return false
|
|
}
|
|
if chatCompletionResponse.Usage.CompletionTokens == 0 {
|
|
fmt.Println("Error: No response from Mistral")
|
|
return false
|
|
}
|
|
return true
|
|
}
|
|
|
|
func RequestMistral(model string, messages []Message, temperature float64) (MistralChatCompletionResponse, error) {
|
|
var apiKey string
|
|
err := edgeClient.QuerySingle(edgeCtx, `
|
|
with
|
|
filtered_keys := (
|
|
select Key {
|
|
key
|
|
} filter .company.name = <str>$0
|
|
)
|
|
select filtered_keys.key limit 1
|
|
`, &apiKey, "mistral")
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error getting OpenAI API key: %w", err)
|
|
}
|
|
|
|
url := "https://api.mistral.ai/v1/chat/completions"
|
|
|
|
requestBody := MistralChatCompletionRequest{
|
|
Model: model,
|
|
Messages: ChangeRoleBot2Assistant(messages),
|
|
Temperature: temperature,
|
|
}
|
|
|
|
jsonBody, err := json.Marshal(requestBody)
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
|
|
}
|
|
|
|
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
|
|
}
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
req.Header.Set("Accept", "application/json")
|
|
req.Header.Set("Authorization", "Bearer "+apiKey)
|
|
|
|
client := &http.Client{}
|
|
resp, err := client.Do(req)
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
body, err := io.ReadAll(resp.Body)
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
|
|
}
|
|
|
|
var chatCompletionResponse MistralChatCompletionResponse
|
|
err = json.Unmarshal(body, &chatCompletionResponse)
|
|
if err != nil {
|
|
return MistralChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
|
|
}
|
|
|
|
var usedModelInfo ModelInfo
|
|
edgeClient.QuerySingle(edgeCtx, `
|
|
Select ModelInfo {
|
|
inputPrice,
|
|
outputPrice
|
|
}
|
|
Filter ModelInfo.model = <str>$0
|
|
`, &usedModelInfo, model)
|
|
|
|
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
|
|
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
|
|
addUsage(inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
|
|
|
|
return chatCompletionResponse, nil
|
|
}
|