package main import ( "bytes" "encoding/json" "fmt" "io" "net/http" "strings" "github.com/edgedb/edgedb-go" "github.com/gofiber/fiber/v2" ) type PerplexityChatCompletionRequest struct { Model string `json:"model"` Messages []RequestMessage `json:"messages"` Temperature float64 `json:"temperature"` } type PerplexityChatCompletionResponse struct { ID string `json:"id"` Object string `json:"object"` Created int64 `json:"created"` Model string `json:"model"` Usage PerplexityUsage `json:"usage"` Choices []PerplexityChoice `json:"choices"` } type PerplexityUsage struct { PromptTokens int32 `json:"prompt_tokens"` CompletionTokens int32 `json:"completion_tokens"` TotalTokens int32 `json:"total_tokens"` } type PerplexityChoice struct { Message Message `json:"message"` FinishReason string `json:"finish_reason"` Index int `json:"index"` } func addPerplexityMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID { Messages := getAllSelectedMessages(c) chatCompletion, err := RequestPerplexity(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context) if err != nil { fmt.Println("Error requesting Perplexity: ", err) id := insertBotMessage(c, "Error requesting Perplexity, model may not be available anymore. Better error message in development.", selected, llm.ID) return id } else if len(chatCompletion.Choices) == 0 { fmt.Println("No response from Perplexity") id := insertBotMessage(c, "No response from Perplexity", selected, llm.ID) return id } else { Content := chatCompletion.Choices[0].Message.Content id := insertBotMessage(c, Content, selected, llm.ID) return id } } func TestPerplexityKey(apiKey string) bool { url := "https://api.perplexity.ai/chat/completions" // Convert messages to OpenAI format perplexityMessages := []RequestMessage{ { Role: "user", Content: "Hello", }, } requestBody := PerplexityChatCompletionRequest{ Model: "llama-3-8b-instruct", Messages: perplexityMessages, Temperature: 0, } jsonBody, err := json.Marshal(requestBody) if err != nil { return false } req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) if err != nil { return false } req.Header.Set("Content-Type", "application/json") req.Header.Set("Authorization", "Bearer "+apiKey) client := &http.Client{} resp, err := client.Do(req) if err != nil { return false } defer resp.Body.Close() body, err := io.ReadAll(resp.Body) if err != nil { return false } var chatCompletionResponse PerplexityChatCompletionResponse err = json.Unmarshal(body, &chatCompletionResponse) if err != nil { return false } if chatCompletionResponse.Usage.CompletionTokens == 0 { return false } return true } func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperature float64, context string) (PerplexityChatCompletionResponse, error) { var apiKey string err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` with filtered_keys := ( select Key { key } filter .company.name = $0 AND .$0 LIMIT 1 `, &usedModelInfo, model) if err != nil { return PerplexityChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err) } var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice // If online model end with -online add a small cost if strings.HasSuffix(model, "-online") { inputCost += 0.005 outputCost += 0.005 } addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model) return chatCompletionResponse, nil }