package main import ( "bytes" "encoding/json" "io" "net/http" "strings" "github.com/gofiber/fiber/v2" ) type OpenaiChatCompletionRequest struct { Model string `json:"model"` Messages []RequestMessage `json:"messages"` MaxTokens int `json:"max_tokens"` Temperature float64 `json:"temperature"` } type OpenaiChatCompletionResponse struct { ID string `json:"id"` Object string `json:"object"` Created int64 `json:"created"` Model string `json:"model"` Usage OpenaiUsage `json:"usage"` Choices []OpenaiChoice `json:"choices"` } type OpenaiUsage struct { PromptTokens int32 `json:"prompt_tokens"` CompletionTokens int32 `json:"completion_tokens"` TotalTokens int32 `json:"total_tokens"` } type OpenaiChoice struct { Message Message `json:"message"` FinishReason string `json:"finish_reason"` Index int `json:"index"` } var OpenaiErrorCodes map[string]string func init() { OpenaiErrorCodes = make(map[string]string) OpenaiErrorCodes["400"] = "Invalid Request - Please contact the support." OpenaiErrorCodes["401"] = "Invalid Authentication - Ensure that the API key is still valid." OpenaiErrorCodes["403"] = "Accessing the API from an unsupported country, region, or territory." OpenaiErrorCodes["404"] = "Model not found." OpenaiErrorCodes["429"] = "Rate limit reached for requests - You are sending requests too quickly OR You have run out of credits or hit your maximum monthly spend - Buy more credits or learn how to increase your limits." OpenaiErrorCodes["500"] = "Issue on Provider servers - Retry your request after a brief wait and contact the provider if the issue persists." OpenaiErrorCodes["503"] = "Servers are experiencing high traffic - Please retry your requests after a brief wait." } func TestOpenaiKey(apiKey string) bool { url := "https://api.openai.com/v1/chat/completions" // Convert messages to OpenAI format openaiMessages := []RequestMessage{ { Role: "user", Content: "Hello", }, } requestBody := OpenaiChatCompletionRequest{ Model: "gpt-3.5-turbo", Messages: openaiMessages, Temperature: 0, MaxTokens: 10, } jsonBody, err := json.Marshal(requestBody) if err != nil { return false } req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) if err != nil { return false } req.Header.Set("Content-Type", "application/json") req.Header.Set("Authorization", "Bearer "+apiKey) client := &http.Client{} resp, err := client.Do(req) if err != nil { return false } defer resp.Body.Close() body, err := io.ReadAll(resp.Body) if err != nil { return false } var chatCompletionResponse OpenaiChatCompletionResponse err = json.Unmarshal(body, &chatCompletionResponse) if err != nil { return false } if chatCompletionResponse.Usage.CompletionTokens == 0 { return false } return true } func RequestOpenai(c *fiber.Ctx, llm LLM, messages []Message) string { model := llm.Model.ModelID temperature := float64(llm.Temperature) context := llm.Context maxTokens := int(llm.MaxToken) var apiKey string err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` with filtered_keys := ( select Key { key } filter .company.name = $0 AND .$0 LIMIT 1 `, &usedModelInfo, model) if err != nil { return "JADE internal error: 00-00-0006. Please contact the support." } var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model) if len(chatCompletionResponse.Choices) == 0 { logMissingErrorCode.Println("Openai -", resp.Status, "-", string(body)) return "JADE internal error: 00-03-0007. Please contact the support." } return chatCompletionResponse.Choices[0].Message.Content }