package main import ( "bytes" "encoding/json" "fmt" "io" "net/http" "github.com/edgedb/edgedb-go" ) type MistralChatCompletionRequest struct { Model string `json:"model"` Messages []RequestMessage `json:"messages"` Temperature float64 `json:"temperature"` } type MistralChatCompletionResponse struct { ID string `json:"id"` Object string `json:"object"` Created int64 `json:"created"` Model string `json:"model"` Usage MistralUsage `json:"usage"` Choices []MistralChoice `json:"choices"` } type MistralUsage struct { PromptTokens int32 `json:"prompt_tokens"` CompletionTokens int32 `json:"completion_tokens"` TotalTokens int32 `json:"total_tokens"` } type MistralChoice struct { Message Message `json:"message"` FinishReason string `json:"finish_reason"` Index int `json:"index"` } func addMistralMessage(llm LLM, selected bool) edgedb.UUID { Messages := getAllSelectedMessages() chatCompletion, err := RequestMistral(llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context) if err != nil { panic(err) } else if len(chatCompletion.Choices) == 0 { fmt.Println("No response from Mistral") id := insertBotMessage("No response from Mistral", selected, llm.ID) return id } else { Content := chatCompletion.Choices[0].Message.Content id := insertBotMessage(Content, selected, llm.ID) return id } } func TestMistralKey(apiKey string) bool { url := "https://api.mistral.ai/v1/chat/completions" // Convert messages to Mistral format mistralMessages := []RequestMessage{ { Role: "user", Content: "Hello", }, } requestBody := MistralChatCompletionRequest{ Model: "open-mistral-7b", Messages: mistralMessages, Temperature: 0, } jsonBody, err := json.Marshal(requestBody) if err != nil { fmt.Println("Error:", err) return false } req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) if err != nil { fmt.Println("Error:", err) return false } req.Header.Set("Content-Type", "application/json") req.Header.Set("Accept", "application/json") req.Header.Set("Authorization", "Bearer "+apiKey) client := &http.Client{} resp, err := client.Do(req) if err != nil { fmt.Println("Error:", err) return false } defer resp.Body.Close() body, err := io.ReadAll(resp.Body) if err != nil { fmt.Println("Error:", err) return false } var chatCompletionResponse MistralChatCompletionResponse err = json.Unmarshal(body, &chatCompletionResponse) if err != nil { fmt.Println("Error:", err) return false } if chatCompletionResponse.Usage.CompletionTokens == 0 { fmt.Println("Error: No response from Mistral") return false } return true } func RequestMistral(model string, messages []Message, temperature float64, context string) (MistralChatCompletionResponse, error) { var apiKey string err := edgeClient.QuerySingle(edgeCtx, ` with filtered_keys := ( select Key { key } filter .company.name = $0 AND .$0 LIMIT 1 `, &usedModelInfo, model) if err != nil { return MistralChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err) } if usedModelInfo.InputPrice == 0 || usedModelInfo.OutputPrice == 0 { return MistralChatCompletionResponse{}, fmt.Errorf("model %s not found in Mistral", model) } var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice addUsage(inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model) return chatCompletionResponse, nil }