188 lines
4.8 KiB
Go
188 lines
4.8 KiB
Go
package main
|
|
|
|
import (
|
|
"bytes"
|
|
"encoding/json"
|
|
"fmt"
|
|
"io"
|
|
"net/http"
|
|
|
|
"github.com/edgedb/edgedb-go"
|
|
)
|
|
|
|
type GoogleRequestMessage struct {
|
|
Role string `json:"role"`
|
|
Parts []GooglePart `json:"parts"`
|
|
}
|
|
|
|
type GooglePart struct {
|
|
Text string `json:"text"`
|
|
}
|
|
|
|
type GoogleChatCompletionRequest struct {
|
|
Messages []GoogleRequestMessage `json:"contents"`
|
|
}
|
|
|
|
type GoogleChatCompletionResponse struct {
|
|
ID string `json:"id"`
|
|
Object string `json:"object"`
|
|
Created int64 `json:"created"`
|
|
Model string `json:"model"`
|
|
Usage GoogleUsage `json:"usage"`
|
|
Choices []GoogleChoice `json:"choices"`
|
|
}
|
|
|
|
type GoogleUsage struct {
|
|
PromptTokens int32 `json:"prompt_tokens"`
|
|
CompletionTokens int32 `json:"completion_tokens"`
|
|
TotalTokens int32 `json:"total_tokens"`
|
|
}
|
|
|
|
type GoogleChoice struct {
|
|
Message Message `json:"message"`
|
|
FinishReason string `json:"finish_reason"`
|
|
Index int `json:"index"`
|
|
}
|
|
|
|
func addGoogleMessage(llm LLM, selected bool) edgedb.UUID {
|
|
Messages := getAllSelectedMessages()
|
|
|
|
chatCompletion, err := RequestGoogle(llm.Model.ModelID, Messages, float64(llm.Temperature))
|
|
if err != nil {
|
|
panic(err)
|
|
} else if len(chatCompletion.Choices) == 0 {
|
|
fmt.Println("No response from OpenAI")
|
|
id := insertBotMessage("No response from OpenAI", selected, llm.ID)
|
|
return id
|
|
} else {
|
|
Content := chatCompletion.Choices[0].Message.Content
|
|
id := insertBotMessage(Content, selected, llm.ID)
|
|
return id
|
|
}
|
|
}
|
|
|
|
func TestGoogleKey(apiKey string) bool {
|
|
url := "https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent?key=" + apiKey
|
|
|
|
googlePart := GooglePart{
|
|
Text: "Hello",
|
|
}
|
|
// Convert messages to OpenAI format
|
|
googleMessages := []GoogleRequestMessage{
|
|
{
|
|
Role: "user",
|
|
Parts: []GooglePart{googlePart},
|
|
},
|
|
}
|
|
|
|
requestBody := GoogleChatCompletionRequest{
|
|
Messages: googleMessages,
|
|
}
|
|
|
|
jsonBody, err := json.Marshal(requestBody)
|
|
if err != nil {
|
|
return false
|
|
}
|
|
|
|
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
|
|
if err != nil {
|
|
return false
|
|
}
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
|
|
client := &http.Client{}
|
|
resp, err := client.Do(req)
|
|
if err != nil {
|
|
return false
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
body, err := io.ReadAll(resp.Body)
|
|
if err != nil {
|
|
return false
|
|
}
|
|
|
|
fmt.Println(string(body))
|
|
|
|
var chatCompletionResponse GoogleChatCompletionResponse
|
|
err = json.Unmarshal(body, &chatCompletionResponse)
|
|
if err != nil {
|
|
return false
|
|
}
|
|
if chatCompletionResponse.Usage.CompletionTokens == 0 {
|
|
return false
|
|
}
|
|
return true
|
|
}
|
|
|
|
func RequestGoogle(model string, messages []Message, temperature float64) (OpenaiChatCompletionResponse, error) {
|
|
var apiKey string
|
|
err := edgeClient.QuerySingle(edgeCtx, `
|
|
with
|
|
filtered_keys := (
|
|
select Key {
|
|
key
|
|
} filter .company.name = <str>$0 AND .<keys[is Setting].<setting[is User] = global currentUser
|
|
)
|
|
select filtered_keys.key limit 1
|
|
`, &apiKey, "openai")
|
|
if err != nil {
|
|
return OpenaiChatCompletionResponse{}, fmt.Errorf("error getting OpenAI API key: %w", err)
|
|
}
|
|
|
|
url := "https://api.openai.com/v1/chat/completions"
|
|
|
|
requestBody := OpenaiChatCompletionRequest{
|
|
Model: model,
|
|
Messages: Message2RequestMessage(messages),
|
|
Temperature: temperature,
|
|
}
|
|
|
|
jsonBody, err := json.Marshal(requestBody)
|
|
if err != nil {
|
|
return OpenaiChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
|
|
}
|
|
|
|
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
|
|
if err != nil {
|
|
return OpenaiChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
|
|
}
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
req.Header.Set("Authorization", "Bearer "+apiKey)
|
|
|
|
client := &http.Client{}
|
|
resp, err := client.Do(req)
|
|
if err != nil {
|
|
return OpenaiChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
body, err := io.ReadAll(resp.Body)
|
|
if err != nil {
|
|
return OpenaiChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
|
|
}
|
|
|
|
var chatCompletionResponse OpenaiChatCompletionResponse
|
|
err = json.Unmarshal(body, &chatCompletionResponse)
|
|
if err != nil {
|
|
return OpenaiChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
|
|
}
|
|
|
|
var usedModelInfo ModelInfo
|
|
edgeClient.QuerySingle(edgeCtx, `
|
|
Select ModelInfo {
|
|
inputPrice,
|
|
outputPrice
|
|
}
|
|
Filter ModelInfo.model = <str>$0
|
|
`, &usedModelInfo, model)
|
|
|
|
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
|
|
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
|
|
addUsage(inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
|
|
|
|
return chatCompletionResponse, nil
|
|
}
|