Jade/RequestMistral.go

264 lines
6.9 KiB
Go

package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"github.com/edgedb/edgedb-go"
)
type MistralChatCompletionRequest struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
Temperature float64 `json:"temperature"`
}
type MistralChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage MistralUsage `json:"usage"`
Choices []MistralChoice `json:"choices"`
}
type MistralUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type MistralChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func init() {
var ModelInfosList = []ModelInfo{}
modelInfo := ModelInfo{
ID: "open-mistral-7b",
Name: "Mistral 7b",
Icon: "mistral",
MaxToken: 32000,
InputPrice: 0.25 / 1000000,
OutputPrice: 1.25 / 1000000,
}
ModelInfosList = append(ModelInfosList, modelInfo)
ModelsInfos = append(ModelsInfos, modelInfo)
modelInfo = ModelInfo{
ID: "open-mixtral-8x7b",
Name: "Mistral 8x7b",
Icon: "mistral",
MaxToken: 32000,
InputPrice: 0.7 / 1000000,
OutputPrice: 0.7 / 1000000,
}
ModelInfosList = append(ModelInfosList, modelInfo)
ModelsInfos = append(ModelsInfos, modelInfo)
modelInfo = ModelInfo{
ID: "open-mixtral-8x22b",
Name: "Mistral 8x22b",
Icon: "mistral",
MaxToken: 64000,
InputPrice: 2.0 / 1000000,
OutputPrice: 6.0 / 1000000,
}
ModelInfosList = append(ModelInfosList, modelInfo)
ModelsInfos = append(ModelsInfos, modelInfo)
modelInfo = ModelInfo{
ID: "mistral-small-latest",
Name: "Mistral Small",
Icon: "mistral",
MaxToken: 32000,
InputPrice: 1.0 / 1000000,
OutputPrice: 3.0 / 1000000,
}
ModelInfosList = append(ModelInfosList, modelInfo)
ModelsInfos = append(ModelsInfos, modelInfo)
modelInfo = ModelInfo{
ID: "mistral-medium-latest",
Name: "Mistral Medium",
Icon: "mistral",
MaxToken: 32000,
InputPrice: 2.7 / 1000000,
OutputPrice: 8.1 / 1000000,
}
ModelInfosList = append(ModelInfosList, modelInfo)
ModelsInfos = append(ModelsInfos, modelInfo)
modelInfo = ModelInfo{
ID: "mistral-large-latest",
Name: "Mistral Large",
Icon: "mistral",
MaxToken: 32000,
InputPrice: 4.0 / 1000000,
OutputPrice: 12.0 / 1000000,
}
ModelInfosList = append(ModelInfosList, modelInfo)
ModelsInfos = append(ModelsInfos, modelInfo)
companyInfo := CompanyInfo{
ID: "mistral",
Name: "Mistral",
Icon: "icons/mistral.png",
ModelInfos: ModelInfosList,
}
CompanyInfos = append(CompanyInfos, companyInfo)
}
func addMistralMessage(modelID string, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages()
chatCompletion, err := RequestMistral(modelID, Messages, 0.7)
if err != nil {
fmt.Println("Error:", err)
} else if len(chatCompletion.Choices) == 0 {
fmt.Println(chatCompletion)
fmt.Println("No response from Mistral")
id := insertBotMessage("No response from Mistral", selected, modelID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(Content, selected, modelID)
return id
}
return edgedb.UUID{}
}
func TestMistralKey(apiKey string) bool {
url := "https://api.mistral.ai/v1/chat/completions"
// Convert messages to Mistral format
mistralMessages := []Message{
{
Role: "user",
Content: "Hello",
},
}
requestBody := MistralChatCompletionRequest{
Model: "open-mistral-7b",
Messages: mistralMessages,
Temperature: 0,
}
jsonBody, err := json.Marshal(requestBody)
if err != nil {
fmt.Println("Error:", err)
return false
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
fmt.Println("Error:", err)
return false
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
req.Header.Set("Authorization", "Bearer "+apiKey)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
fmt.Println("Error:", err)
return false
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
fmt.Println("Error:", err)
return false
}
var chatCompletionResponse MistralChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
fmt.Println(chatCompletionResponse)
if err != nil {
fmt.Println("Error:", err)
return false
}
if chatCompletionResponse.Usage.CompletionTokens == 0 {
fmt.Println("Error: No response from Mistral")
return false
}
return true
}
func RequestMistral(model string, messages []Message, temperature float64) (MistralChatCompletionResponse, error) {
var apiKey string
err := edgeClient.QuerySingle(edgeCtx, `
with
filtered_keys := (
select Key {
key
} filter .company = <str>$0
)
select filtered_keys.key limit 1
`, &apiKey, "mistral")
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error getting OpenAI API key: %w", err)
}
url := "https://api.mistral.ai/v1/chat/completions"
requestBody := MistralChatCompletionRequest{
Model: model,
Messages: ChangeRoleBot2Assistant(messages),
Temperature: temperature,
}
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
req.Header.Set("Authorization", "Bearer "+apiKey)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
}
var chatCompletionResponse MistralChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
}
var usedModelInfo ModelInfo
for mi := range ModelsInfos {
if ModelsInfos[mi].ID == model {
usedModelInfo = ModelsInfos[mi]
}
}
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil
}