package main import ( "bytes" "encoding/json" "io" "net/http" "strings" "github.com/gofiber/fiber/v2" ) type AnthropicChatCompletionRequest struct { Model string `json:"model"` Messages []RequestMessage `json:"messages"` MaxTokens int `json:"max_tokens"` Temperature float64 `json:"temperature"` Context string `json:"system"` } type AnthropicChatCompletionResponse struct { ID string `json:"id"` Content []AnthropicContentItem `json:"content"` Model string `json:"model"` StopReason string `json:"stop_reason"` StopSequence string `json:"stop_sequence"` Usage AnthropicUsage `json:"usage"` } type AnthropicContentItem struct { Text string `json:"text"` } type AnthropicUsage struct { InputTokens int32 `json:"input_tokens"` OutputTokens int32 `json:"output_tokens"` } var AnthropicErrorCodes map[string]string // TODO Update func init() { AnthropicErrorCodes = make(map[string]string) AnthropicErrorCodes["401"] = "Invalid Authentication - Ensure that the API key is still valid." AnthropicErrorCodes["403"] = "Accessing the API from an unsupported country, region, or territory." AnthropicErrorCodes["429"] = "Rate limit reached for requests - You are sending requests too quickly." AnthropicErrorCodes["429"] = "You have run out of credits or hit your maximum monthly spend - Buy more credits or learn how to increase your limits." AnthropicErrorCodes["500"] = "Issue on OpenAI servers - Retry your request after a brief wait and contact OpenAI if the issue persists. Check the status page https://status.openai.com/." AnthropicErrorCodes["503"] = "OpenAI servers are experiencing high traffic - Please retry your requests after a brief wait." } func TestAnthropicKey(apiKey string) bool { url := "https://api.anthropic.com/v1/messages" AnthropicMessages := []RequestMessage{ { Role: "user", Content: "Hello", }, } requestBody := AnthropicChatCompletionRequest{ Model: "claude-3-haiku-20240307", Messages: AnthropicMessages, MaxTokens: 10, Temperature: 0, Context: "", } jsonBody, err := json.Marshal(requestBody) if err != nil { return false } req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) if err != nil { return false } req.Header.Set("content-Type", "application/json") req.Header.Set("anthropic-version", "2023-06-01") req.Header.Set("x-api-key", apiKey) client := &http.Client{} resp, err := client.Do(req) if err != nil { return false } defer resp.Body.Close() body, err := io.ReadAll(resp.Body) if err != nil { return false } var chatCompletionResponse AnthropicChatCompletionResponse err = json.Unmarshal(body, &chatCompletionResponse) if err != nil { return false } if chatCompletionResponse.Content == nil { return false } return true } func RequestAnthropic(c *fiber.Ctx, llm LLM, messages []Message) string { model := llm.Model.ModelID temperature := float64(llm.Temperature) context := llm.Context maxTokens := int(llm.MaxToken) if maxTokens == 0 { maxTokens = 4096 } var apiKey struct { Key string `edgedb:"key"` } err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` SELECT Key { key } FILTER .$0 LIMIT 1 `, &usedModelInfo, model) if err != nil { return "JADE internal error: 01-00-0006. Please contact the support." } var inputCost float32 = float32(chatCompletionResponse.Usage.InputTokens) * usedModelInfo.InputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.OutputTokens) * usedModelInfo.OutputPrice addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.InputTokens, chatCompletionResponse.Usage.OutputTokens, model) if len(chatCompletionResponse.Content) == 0 { return "JADE internal error: 01-03-0007. Please contact the support." } return chatCompletionResponse.Content[0].Text }