Change how messages are requested to make it simplier

This commit is contained in:
Adrien Bouvais 2024-08-07 15:07:41 +02:00
parent 8a962f510e
commit 3e8be1400b
18 changed files with 444 additions and 860 deletions

View File

@ -195,7 +195,7 @@ func insertUserMessage(c *fiber.Ctx, content string) edgedb.UUID {
return inserted.id return inserted.id
} }
func insertBotMessage(c *fiber.Ctx, content string, selected bool, llmUUID edgedb.UUID) edgedb.UUID { func insertBotMessage(c *fiber.Ctx, content string, llmUUID edgedb.UUID) edgedb.UUID {
var lastArea Area var lastArea Area
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
SELECT Area { SELECT Area {
@ -215,15 +215,15 @@ func insertBotMessage(c *fiber.Ctx, content string, selected bool, llmUUID edged
INSERT Message { INSERT Message {
role := <str>$0, role := <str>$0,
content := <str>$2, content := <str>$2,
selected := <bool>$3, selected := false,
conversation := ( conversation := (
SELECT Area SELECT Area
FILTER .id = <uuid>$4 FILTER .id = <uuid>$3
LIMIT 1 LIMIT 1
).conversation, ).conversation,
area := ( area := (
SELECT Area SELECT Area
FILTER .id = <uuid>$4 FILTER .id = <uuid>$3
LIMIT 1 LIMIT 1
), ),
llm := ( llm := (
@ -232,7 +232,7 @@ func insertBotMessage(c *fiber.Ctx, content string, selected bool, llmUUID edged
LIMIT 1 LIMIT 1
) )
} }
`, &inserted, "bot", llmUUID, content, selected, lastArea.ID) `, &inserted, "bot", llmUUID, content, lastArea.ID)
if err != nil { if err != nil {
fmt.Println("Error inserting bot message") fmt.Println("Error inserting bot message")
panic(err) panic(err)

33
JadeInternalError.md Normal file
View File

@ -0,0 +1,33 @@
# Request errors
The format of request error is: aa-bb-cccc
Where aa is the provider number, bb the error type and cccc the error code.
## Provider number
00: OpenAI
01: Anthropic
02: Mistral
03: Google
04: Groq
05: Nim
06: Perplexity
07: TogetherAI
08: DeepSeek
09: Firework
10: Custom
99: JADE
## Error type
00: Database error
01: Golang error
02: HTTP error
03: Missing status code
## Error code
0000: Can't find API key in database
0001: Error using `jsonBody, err := json.Marshal(requestBody)` in RequestProvider function
0002: Error using `req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))` in RequestProvider function
0003: Error using `resp, err := client.Do(req)` in RequestProvider function
0004: Error using `body, err := io.ReadAll(resp.Body)` in RequestProvider function
0005: Error using `err = json.Unmarshal(body, &chatCompletionResponse)` in RequestProvider function
0006: Can't find modelInfo in database
0007: No choice at the end of request. Mostly because an error is missing.

View File

@ -117,6 +117,8 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
authCookie := c.Cookies("jade-edgedb-auth-token") authCookie := c.Cookies("jade-edgedb-auth-token")
var messages []Message = getAllSelectedMessages(c)
// Create a wait group to synchronize the goroutines // Create a wait group to synchronize the goroutines
var wg sync.WaitGroup var wg sync.WaitGroup
@ -136,38 +138,34 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
defer cancel() // Ensure the context is cancelled to free resources defer cancel() // Ensure the context is cancelled to free resources
// Determine which message function to call based on the model // Determine which message function to call based on the model
var addMessageFunc func(c *fiber.Ctx, selectedLLM LLM, selected bool) edgedb.UUID var addMessageFunc func(c *fiber.Ctx, llm LLM, messages []Message) string
switch selectedLLMs[idx].Model.Company.Name { switch selectedLLMs[idx].Model.Company.Name {
case "openai": case "openai":
addMessageFunc = addOpenaiMessage addMessageFunc = RequestOpenai
case "anthropic": case "anthropic":
addMessageFunc = addAnthropicMessage addMessageFunc = RequestAnthropic
case "mistral": case "mistral":
addMessageFunc = addMistralMessage addMessageFunc = RequestMistral
case "groq": case "groq":
addMessageFunc = addGroqMessage addMessageFunc = RequestGroq
case "gooseai":
addMessageFunc = addGooseaiMessage
case "huggingface": case "huggingface":
addMessageFunc = addHuggingfaceMessage addMessageFunc = RequestHuggingface
case "google": case "google":
addMessageFunc = addGoogleMessage addMessageFunc = RequestGoogle
case "perplexity": case "perplexity":
addMessageFunc = addPerplexityMessage addMessageFunc = RequestPerplexity
case "fireworks": case "fireworks":
addMessageFunc = addFireworkMessage addMessageFunc = RequestFirework
case "nim": case "nim":
addMessageFunc = addNimMessage addMessageFunc = RequestNim
case "together": case "together":
addMessageFunc = addTogetherMessage addMessageFunc = RequestTogether
case "deepseek": case "deepseek":
addMessageFunc = addDeepseekMessage addMessageFunc = RequestDeepseek
} }
var messageID edgedb.UUID var content string = addMessageFunc(c, selectedLLMs[idx], messages)
if addMessageFunc != nil { var messageUUID edgedb.UUID = insertBotMessage(c, content, selectedLLMs[idx].ID)
messageID = addMessageFunc(c, selectedLLMs[idx], false)
}
var message Message var message Message
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
@ -189,7 +187,7 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
} }
} }
FILTER .id = <uuid>$0; FILTER .id = <uuid>$0;
`, &message, messageID) `, &message, messageUUID)
if err != nil { if err != nil {
fmt.Println("Error getting message for the placeholder. The function addProviderMessage seem to not return any message ID.") fmt.Println("Error getting message for the placeholder. The function addProviderMessage seem to not return any message ID.")
panic(err) panic(err)
@ -201,6 +199,12 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
default: default:
select { select {
case firstDone <- idx: case firstDone <- idx:
_ = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": authCookie}).Execute(edgeCtx, `
UPDATE Message
FILTER .id = <uuid>$0
SET {selected := true};
`, messageUUID)
outIcon := `<img src="` + selectedLLMs[idx].Model.Company.Icon + `" alt="User Image" id="selectedIcon-` + fmt.Sprintf("%d", message.Area.Position) + `">` outIcon := `<img src="` + selectedLLMs[idx].Model.Company.Icon + `" alt="User Image" id="selectedIcon-` + fmt.Sprintf("%d", message.Area.Position) + `">`
sendEvent( sendEvent(

View File

@ -3,11 +3,10 @@ package main
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"net/http" "net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2"
) )
@ -37,22 +36,17 @@ type AnthropicUsage struct {
OutputTokens int32 `json:"output_tokens"` OutputTokens int32 `json:"output_tokens"`
} }
func addAnthropicMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID { var AnthropicErrorCodes map[string]string
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestAnthropic(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken)) // TODO Update
if err != nil { func init() {
fmt.Println("Error requesting Anthropic: ", err) AnthropicErrorCodes = make(map[string]string)
id := insertBotMessage(c, "Error requesting Anthropic, model may not be available anymore. Better error message in development.", selected, llm.ID) AnthropicErrorCodes["401"] = "Invalid Authentication - Ensure that the API key is still valid."
return id AnthropicErrorCodes["403"] = "Accessing the API from an unsupported country, region, or territory."
} else if len(chatCompletion.Content) == 0 { AnthropicErrorCodes["429"] = "Rate limit reached for requests - You are sending requests too quickly."
fmt.Println("No response from Anthropic") AnthropicErrorCodes["429"] = "You have run out of credits or hit your maximum monthly spend - Buy more credits or learn how to increase your limits."
id := insertBotMessage(c, "No response from Anthropic", selected, llm.ID) AnthropicErrorCodes["500"] = "Issue on OpenAI servers - Retry your request after a brief wait and contact OpenAI if the issue persists. Check the status page https://status.openai.com/."
return id AnthropicErrorCodes["503"] = "OpenAI servers are experiencing high traffic - Please retry your requests after a brief wait."
} else {
id := insertBotMessage(c, chatCompletion.Content[0].Text, selected, llm.ID)
return id
}
} }
func TestAnthropicKey(apiKey string) bool { func TestAnthropicKey(apiKey string) bool {
@ -110,13 +104,16 @@ func TestAnthropicKey(apiKey string) bool {
return true return true
} }
func RequestAnthropic(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (AnthropicChatCompletionResponse, error) { func RequestAnthropic(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
if maxTokens == 0 { if maxTokens == 0 {
maxTokens = 4096 maxTokens = 4096
} }
fmt.Println("Requesting anthropic using max token:", maxTokens)
var apiKey struct { var apiKey struct {
Key string `edgedb:"key"` Key string `edgedb:"key"`
} }
@ -128,7 +125,7 @@ func RequestAnthropic(c *fiber.Ctx, model string, messages []Message, temperatur
LIMIT 1 LIMIT 1
`, &apiKey, "anthropic") `, &apiKey, "anthropic")
if err != nil { if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error getting Anthropic API key: %w", err) return "JADE internal error: 01-00-0000. Please contact the support."
} }
url := "https://api.anthropic.com/v1/messages" url := "https://api.anthropic.com/v1/messages"
@ -143,12 +140,13 @@ func RequestAnthropic(c *fiber.Ctx, model string, messages []Message, temperatur
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err) return "JADE internal error: 01-01-0001. Please contact the support."
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err) return "JADE internal error: 01-02-0002. Please contact the support."
} }
req.Header.Set("x-api-key", apiKey.Key) req.Header.Set("x-api-key", apiKey.Key)
@ -158,19 +156,25 @@ func RequestAnthropic(c *fiber.Ctx, model string, messages []Message, temperatur
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err) return "JADE internal error: 01-02-0003. Please contact the support."
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err) return "JADE internal error: 01-01-0004. Please contact the support."
}
for key, value := range AnthropicErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
} }
var chatCompletionResponse AnthropicChatCompletionResponse var chatCompletionResponse AnthropicChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err) return "JADE internal error: 01-01-0005. Please contact the support."
} }
var usedModelInfo ModelInfo var usedModelInfo ModelInfo
@ -183,12 +187,16 @@ func RequestAnthropic(c *fiber.Ctx, model string, messages []Message, temperatur
LIMIT 1 LIMIT 1
`, &usedModelInfo, model) `, &usedModelInfo, model)
if err != nil { if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err) return "JADE internal error: 01-00-0006. Please contact the support."
} }
var inputCost float32 = float32(chatCompletionResponse.Usage.InputTokens) * usedModelInfo.InputPrice var inputCost float32 = float32(chatCompletionResponse.Usage.InputTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.OutputTokens) * usedModelInfo.OutputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.OutputTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.InputTokens, chatCompletionResponse.Usage.OutputTokens, model) addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.InputTokens, chatCompletionResponse.Usage.OutputTokens, model)
return chatCompletionResponse, nil if len(chatCompletionResponse.Content) == 0 {
return "JADE internal error: 01-03-0007. Please contact the support."
}
return chatCompletionResponse.Content[0].Text
} }

66
RequestCustomEndpoint.go Normal file
View File

@ -0,0 +1,66 @@
package main
import (
"bytes"
"encoding/json"
"io"
"net/http"
"strings"
"github.com/gofiber/fiber/v2"
)
func RequestHuggingface(c *fiber.Ctx, llm LLM, messages []Message) string {
url := llm.Endpoint.Endpoint
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
requestBody := OpenaiChatCompletionRequest{
Model: "tgi",
Messages: Message2RequestMessage(messages, context),
Temperature: temperature,
MaxTokens: maxTokens,
}
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return "JADE internal error: 10-01-0001. Please contact the support."
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return "JADE internal error: 10-02-0002. Please contact the support."
}
req.Header.Set("Authorization", "Bearer "+llm.Endpoint.Key)
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return "JADE internal error: 10-02-0003. Please contact the support."
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return "JADE internal error: 10-01-0004. Please contact the support."
}
for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return "JADE internal error: 10-01-0005. Please contact the support."
}
addUsage(c, 0, 0, 0, 0, llm.Model.ModelID)
return chatCompletionResponse.Choices[0].Message.Content
}

View File

@ -3,61 +3,13 @@ package main
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"net/http" "net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2"
) )
type DeepseekChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type DeepseekChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage DeepseekUsage `json:"usage"`
Choices []DeepseekChoice `json:"choices"`
}
type DeepseekUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type DeepseekChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addDeepseekMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestDeepseek(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Deepseek: ", err)
id := insertBotMessage(c, "Error requesting DeepSeek, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from DeepSeek")
id := insertBotMessage(c, "No response from DeepSeek", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestDeepseekKey(apiKey string) bool { func TestDeepseekKey(apiKey string) bool {
url := "https://api.deepseek.com/chat/completions" url := "https://api.deepseek.com/chat/completions"
@ -69,7 +21,7 @@ func TestDeepseekKey(apiKey string) bool {
}, },
} }
requestBody := DeepseekChatCompletionRequest{ requestBody := OpenaiChatCompletionRequest{
Model: "deepseek-chat", Model: "deepseek-chat",
Messages: deepseekMessages, Messages: deepseekMessages,
Temperature: 0, Temperature: 0,
@ -78,13 +30,11 @@ func TestDeepseekKey(apiKey string) bool {
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
fmt.Println("Failed to test Deepseek API key - json.Marshal :", err)
return false return false
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
fmt.Println("Failed to test Deepseek API key - http.NewRequest :", err)
return false return false
} }
@ -94,35 +44,36 @@ func TestDeepseekKey(apiKey string) bool {
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
fmt.Println("Failed to test Deepseek API key - client.Do :", err)
return false return false
} }
defer resp.Body.Close() defer resp.Body.Close()
fmt.Println(resp.Status)
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
fmt.Println("Failed to test Deepseek API key - io.ReadAll :", err)
return false return false
} }
var chatCompletionResponse DeepseekChatCompletionResponse var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
fmt.Println("Failed to test Deepseek API key - json.Marshal :", err)
return false return false
} }
if chatCompletionResponse.Usage.CompletionTokens == 0 { if chatCompletionResponse.Usage.CompletionTokens == 0 {
fmt.Println("Failed to test Deepseek API key - No completion tokens :", err)
return false return false
} }
return true return true
} }
func RequestDeepseek(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (DeepseekChatCompletionResponse, error) { func RequestDeepseek(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
url := "https://api.deepseek.com/chat/completions"
var apiKey string var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with with
@ -134,12 +85,10 @@ func RequestDeepseek(c *fiber.Ctx, model string, messages []Message, temperature
select filtered_keys.key limit 1 select filtered_keys.key limit 1
`, &apiKey, "deepseek") `, &apiKey, "deepseek")
if err != nil { if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error getting DeepSeek API key: %w", err) return "JADE internal error: 08-00-0000. Please contact the support."
} }
url := "https://api.deepseek.com/chat/completions" requestBody := OpenaiChatCompletionRequest{
requestBody := DeepseekChatCompletionRequest{
Model: model, Model: model,
Messages: Message2RequestMessage(messages, context), Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens, MaxTokens: maxTokens,
@ -148,12 +97,12 @@ func RequestDeepseek(c *fiber.Ctx, model string, messages []Message, temperature
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err) return "JADE internal error: 08-01-0001. Please contact the support."
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err) return "JADE internal error: 08-02-0002. Please contact the support."
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
@ -162,24 +111,25 @@ func RequestDeepseek(c *fiber.Ctx, model string, messages []Message, temperature
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err) return "JADE internal error: 08-02-0003. Please contact the support."
} }
defer resp.Body.Close() defer resp.Body.Close()
// TODO: Add a message to the user and do it for all 400 things
if resp.Status == "402 Payment Required" {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
}
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err) return "JADE internal error: 08-01-0004. Please contact the support."
} }
var chatCompletionResponse DeepseekChatCompletionResponse for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err) return "JADE internal error: 08-01-0005. Please contact the support."
} }
var usedModelInfo ModelInfo var usedModelInfo ModelInfo
@ -192,12 +142,16 @@ func RequestDeepseek(c *fiber.Ctx, model string, messages []Message, temperature
LIMIT 1 LIMIT 1
`, &usedModelInfo, model) `, &usedModelInfo, model)
if err != nil { if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err) return "JADE internal error: 08-00-0006. Please contact the support."
} }
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model) addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 08-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
} }

View File

@ -3,61 +3,13 @@ package main
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"net/http" "net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2"
) )
type FireworkChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type FireworkChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage FireworkUsage `json:"usage"`
Choices []FireworkChoice `json:"choices"`
}
type FireworkUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type FireworkChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addFireworkMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestFirework(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Firework: ", err)
id := insertBotMessage(c, "Error requesting Firework, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from Firework")
id := insertBotMessage(c, "No response from Firework", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestFireworkKey(apiKey string) bool { func TestFireworkKey(apiKey string) bool {
url := "https://api.fireworks.ai/inference/v1/chat/completions" url := "https://api.fireworks.ai/inference/v1/chat/completions"
@ -101,7 +53,7 @@ func TestFireworkKey(apiKey string) bool {
return false return false
} }
var chatCompletionResponse FireworkChatCompletionResponse var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return false return false
@ -112,7 +64,12 @@ func TestFireworkKey(apiKey string) bool {
return true return true
} }
func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (FireworkChatCompletionResponse, error) { func RequestFirework(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with with
@ -122,15 +79,15 @@ func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature
} filter .company.name = <str>$0 AND .<keys[is Setting].<setting[is User] = global currentUser } filter .company.name = <str>$0 AND .<keys[is Setting].<setting[is User] = global currentUser
) )
select filtered_keys.key limit 1 select filtered_keys.key limit 1
`, &apiKey, "fireworks") `, &apiKey, "firework")
if err != nil { if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error getting Firework API key: %w", err) return "JADE internal error: 09-00-0000. Please contact the support."
} }
url := "https://api.fireworks.ai/inference/v1/chat/completions" url := "https://api.fireworks.ai/inference/v1/chat/completions"
requestBody := FireworkChatCompletionRequest{ requestBody := OpenaiChatCompletionRequest{
Model: "accounts/fireworks/models/" + model, Model: model,
Messages: Message2RequestMessage(messages, context), Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens, MaxTokens: maxTokens,
Temperature: temperature, Temperature: temperature,
@ -138,12 +95,12 @@ func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err) return "JADE internal error: 09-01-0001. Please contact the support."
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err) return "JADE internal error: 09-02-0002. Please contact the support."
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
@ -152,19 +109,25 @@ func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err) return "JADE internal error: 09-02-0003. Please contact the support."
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err) return "JADE internal error: 09-01-0004. Please contact the support."
} }
var chatCompletionResponse FireworkChatCompletionResponse for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err) return "JADE internal error: 09-01-0005. Please contact the support."
} }
var usedModelInfo ModelInfo var usedModelInfo ModelInfo
@ -177,18 +140,16 @@ func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature
LIMIT 1 LIMIT 1
`, &usedModelInfo, model) `, &usedModelInfo, model)
if err != nil { if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err) return "JADE internal error: 09-00-0006. Please contact the support."
}
if len(chatCompletionResponse.Choices) == 0 {
// Print the response as a JSON string
fmt.Println(string(body))
return FireworkChatCompletionResponse{}, fmt.Errorf("no response from Firework")
} }
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model) addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 09-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
} }

View File

@ -5,11 +5,10 @@ package main
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"net/http" "net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2"
) )
@ -52,23 +51,17 @@ type GoogleUsageMetadata struct {
TotalTokenCount int32 `json:"totalTokenCount"` TotalTokenCount int32 `json:"totalTokenCount"`
} }
func addGoogleMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID { var GoogleErrorCodes map[string]string
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestGoogle(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context) // TODO: Update
if err != nil { func init() {
fmt.Println("Error requesting Google: ", err) GoogleErrorCodes = make(map[string]string)
id := insertBotMessage(c, "Error requesting Google.", selected, llm.ID) GoogleErrorCodes["401"] = "Invalid Authentication - Ensure that the API key is still valid."
return id GoogleErrorCodes["403"] = "Accessing the API from an unsupported country, region, or territory."
} else if len(chatCompletion.Candidates) == 0 { GoogleErrorCodes["429"] = "Rate limit reached for requests - You are sending requests too quickly."
fmt.Println("No response from Google") GoogleErrorCodes["429"] = "You have run out of credits or hit your maximum monthly spend - Buy more credits or learn how to increase your limits."
id := insertBotMessage(c, "No response from Google", selected, llm.ID) GoogleErrorCodes["500"] = "Issue on Provider servers - Retry your request after a brief wait and contact the provider if the issue persists."
return id GoogleErrorCodes["503"] = "Servers are experiencing high traffic - Please retry your requests after a brief wait."
} else {
Content := chatCompletion.Candidates[0].Content.Parts[0].Text
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
} }
func TestGoogleKey(apiKey string) bool { func TestGoogleKey(apiKey string) bool {
@ -91,13 +84,11 @@ func TestGoogleKey(apiKey string) bool {
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
fmt.Println("Error marshalling JSON: ", err)
return false return false
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
fmt.Println("Error creating request: ", err)
return false return false
} }
@ -106,33 +97,35 @@ func TestGoogleKey(apiKey string) bool {
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
fmt.Println("Error sending request: ", err)
return false return false
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
fmt.Println("Error reading response body: ", err)
return false return false
} }
var chatCompletionResponse GoogleChatCompletionResponse var chatCompletionResponse GoogleChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
fmt.Println("Error unmarshaling JSON: ", err)
return false return false
} }
if chatCompletionResponse.UsageMetadata.CandidatesTokenCount == 0 { if chatCompletionResponse.UsageMetadata.CandidatesTokenCount == 0 {
fmt.Println("No response from Google")
return false return false
} }
fmt.Println("Response from Google: ", chatCompletionResponse)
return true return true
} }
func RequestGoogle(c *fiber.Ctx, model string, messages []Message, temperature float64, context string) (GoogleChatCompletionResponse, error) { func RequestGoogle(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
// TODO: Use those parameters
// temperature := float64(llm.Temperature)
// context := llm.Context
//maxTokens := int(llm.MaxToken)
var apiKey string var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with with
@ -144,7 +137,7 @@ func RequestGoogle(c *fiber.Ctx, model string, messages []Message, temperature f
select filtered_keys.key limit 1 select filtered_keys.key limit 1
`, &apiKey, "google") `, &apiKey, "google")
if err != nil { if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error getting Google API key: %w", err) return "JADE internal error: 03-00-0000. Please contact the support."
} }
url := "https://generativelanguage.googleapis.com/v1beta/models/" + model + ":generateContent?key=" + apiKey url := "https://generativelanguage.googleapis.com/v1beta/models/" + model + ":generateContent?key=" + apiKey
@ -171,12 +164,12 @@ func RequestGoogle(c *fiber.Ctx, model string, messages []Message, temperature f
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err) return "JADE internal error: 03-01-0001. Please contact the support."
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err) return "JADE internal error: 03-02-0002. Please contact the support."
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
@ -184,19 +177,25 @@ func RequestGoogle(c *fiber.Ctx, model string, messages []Message, temperature f
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err) return "JADE internal error: 03-02-0003. Please contact the support."
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err) return "JADE internal error: 03-01-0004. Please contact the support."
}
for key, value := range GoogleErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
} }
var chatCompletionResponse GoogleChatCompletionResponse var chatCompletionResponse GoogleChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err) return "JADE internal error: 03-01-0005. Please contact the support."
} }
var usedModelInfo ModelInfo var usedModelInfo ModelInfo
@ -209,12 +208,16 @@ func RequestGoogle(c *fiber.Ctx, model string, messages []Message, temperature f
LIMIT 1 LIMIT 1
`, &usedModelInfo, model) `, &usedModelInfo, model)
if err != nil { if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err) return "JADE internal error: 03-00-0006. Please contact the support."
} }
var inputCost float32 = float32(chatCompletionResponse.UsageMetadata.PromptTokenCount) * usedModelInfo.InputPrice var inputCost float32 = float32(chatCompletionResponse.UsageMetadata.PromptTokenCount) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.UsageMetadata.CandidatesTokenCount) * usedModelInfo.OutputPrice var outputCost float32 = float32(chatCompletionResponse.UsageMetadata.CandidatesTokenCount) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.UsageMetadata.PromptTokenCount, chatCompletionResponse.UsageMetadata.CandidatesTokenCount, model) addUsage(c, inputCost, outputCost, chatCompletionResponse.UsageMetadata.PromptTokenCount, chatCompletionResponse.UsageMetadata.CandidatesTokenCount, model)
return chatCompletionResponse, nil if len(chatCompletionResponse.Candidates) == 0 {
return "JADE internal error: 03-03-0007. Please contact the support."
}
return chatCompletionResponse.Candidates[0].Content.Parts[0].Text
} }

View File

@ -1,164 +0,0 @@
// It work but I disable it because it is not chat API
// It is text completion, not chat completion. But they will soon release API for chat
// So I leave it here for now
package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
type GooseaiCompletionRequest struct {
Model string `json:"model"`
Prompt []string `json:"prompt"`
Temperature float64 `json:"temperature"`
MaxToken int32 `json:"max_tokens"`
}
type GooseaiCompletionResponse struct {
ID string `json:"id"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []GooseaiChoice `json:"choices"`
}
type GooseaiChoice struct {
Text string `json:"text"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addGooseaiMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestGooseai(c, llm.Model.ModelID, Messages, float64(llm.Temperature))
if err != nil {
fmt.Println("Error fetching user profile")
panic(err)
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from GooseAI")
id := insertBotMessage(c, "No response from GooseAI", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Text
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestGooseaiKey(apiKey string) bool {
url := "https://api.goose.ai/v1/engines/gpt-j-6b/completions"
requestBody := GooseaiCompletionRequest{
Model: "gpt-j-6b",
Prompt: []string{"Hello, how are you?"},
Temperature: 0,
MaxToken: 10,
}
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return false
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return false
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+apiKey)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return false
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return false
}
var chatCompletionResponse GooseaiCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return false
}
if chatCompletionResponse.Choices[0].Text == "" {
return false
}
return true
}
func RequestGooseai(c *fiber.Ctx, model string, messages []Message, temperature float64) (GooseaiCompletionResponse, error) {
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
filtered_keys := (
select Key {
key
} filter .company.name = "gooseai" AND .<keys[is Setting].<setting[is User] = global currentUser
)
select filtered_keys.key limit 1
`, &apiKey)
if err != nil {
return GooseaiCompletionResponse{}, fmt.Errorf("error getting GooseAI API key: %w", err)
}
url := "https://api.goose.ai/v1/engines/" + model + "/completions"
var prompt string
for _, message := range messages {
prompt += message.Content
}
requestBody := GooseaiCompletionRequest{
Model: model,
Prompt: []string{prompt},
Temperature: temperature,
MaxToken: 300,
}
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return GooseaiCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return GooseaiCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+apiKey)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return GooseaiCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return GooseaiCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
}
var chatCompletionResponse GooseaiCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return GooseaiCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
}
addUsage(c, 0, 0, 0, 0, model)
return chatCompletionResponse, nil
}

View File

@ -3,61 +3,13 @@ package main
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"net/http" "net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2"
) )
type GroqChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type GroqChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage GroqUsage `json:"usage"`
Choices []GroqChoice `json:"choices"`
}
type GroqUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type GroqChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addGroqMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestGroq(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Groq: ", err)
id := insertBotMessage(c, "Error requesting Groq, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from Groq")
id := insertBotMessage(c, "No response from Groq", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestGroqKey(apiKey string) bool { func TestGroqKey(apiKey string) bool {
url := "https://api.groq.com/openai/v1/chat/completions" url := "https://api.groq.com/openai/v1/chat/completions"
@ -69,7 +21,7 @@ func TestGroqKey(apiKey string) bool {
}, },
} }
requestBody := GroqChatCompletionRequest{ requestBody := OpenaiChatCompletionRequest{
Model: "llama3-8b-8192", Model: "llama3-8b-8192",
Messages: Message2RequestMessage(groqMessages, ""), Messages: Message2RequestMessage(groqMessages, ""),
Temperature: 0, Temperature: 0,
@ -101,7 +53,7 @@ func TestGroqKey(apiKey string) bool {
return false return false
} }
var chatCompletionResponse GroqChatCompletionResponse var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return false return false
@ -112,7 +64,12 @@ func TestGroqKey(apiKey string) bool {
return true return true
} }
func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (GroqChatCompletionResponse, error) { func RequestGroq(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with with
@ -124,12 +81,12 @@ func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature flo
select filtered_keys.key limit 1 select filtered_keys.key limit 1
`, &apiKey, "groq") `, &apiKey, "groq")
if err != nil { if err != nil {
return GroqChatCompletionResponse{}, fmt.Errorf("error getting Groq API key: %w", err) return "JADE internal error: 04-00-0000. Please contact the support."
} }
url := "https://api.groq.com/openai/v1/chat/completions" url := "https://api.groq.com/openai/v1/chat/completions"
requestBody := GroqChatCompletionRequest{ requestBody := OpenaiChatCompletionRequest{
Model: model, Model: model,
Messages: Message2RequestMessage(messages, context), Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens, MaxTokens: maxTokens,
@ -138,12 +95,12 @@ func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature flo
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
return GroqChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err) return "JADE internal error: 04-01-0001. Please contact the support."
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
return GroqChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err) return "JADE internal error: 04-02-0002. Please contact the support."
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
@ -152,23 +109,29 @@ func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature flo
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return GroqChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err) return "JADE internal error: 04-02-0003. Please contact the support."
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return GroqChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err) return "JADE internal error: 04-01-0004. Please contact the support."
} }
var chatCompletionResponse GroqChatCompletionResponse for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return GroqChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err) return "JADE internal error: 04-01-0005. Please contact the support."
} }
var usedModelInfo ModelInfo var usedModelInfo ModelInfo
edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
SELECT ModelInfo { SELECT ModelInfo {
inputPrice, inputPrice,
outputPrice outputPrice
@ -176,10 +139,17 @@ func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature flo
FILTER .modelID = <str>$0 FILTER .modelID = <str>$0
LIMIT 1 LIMIT 1
`, &usedModelInfo, model) `, &usedModelInfo, model)
if err != nil {
return "JADE internal error: 04-00-0006. Please contact the support."
}
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model) addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 04-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
} }

View File

@ -1,106 +0,0 @@
package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
type HuggingfaceChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
Temperature float64 `json:"temperature"`
MaxTokens int `json:"max_tokens"`
Stream bool `json:"stream"`
}
type HuggingfaceChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []HuggingfaceChoice `json:"choices"`
}
type HuggingfaceUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type HuggingfaceChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addHuggingfaceMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestHuggingface(c, llm, Messages, float64(llm.Temperature), int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Huggingface: ", err)
id := insertBotMessage(c, "Error requesting Huggingface.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from Endpoint")
id := insertBotMessage(c, "No response from Endpoint", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func RequestHuggingface(c *fiber.Ctx, llm LLM, messages []Message, temperature float64, maxTokens int) (HuggingfaceChatCompletionResponse, error) {
url := llm.Endpoint.Endpoint
requestBody := HuggingfaceChatCompletionRequest{
Model: "tgi",
Messages: Message2RequestMessage(messages, llm.Context),
Temperature: temperature,
MaxTokens: maxTokens,
Stream: false,
}
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Authorization", "Bearer "+llm.Endpoint.Key)
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
}
var chatCompletionResponse HuggingfaceChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
}
addUsage(c, 0, 0, 0, 0, llm.Model.ModelID)
return chatCompletionResponse, nil
}

View File

@ -3,59 +3,13 @@ package main
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"net/http" "net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2"
) )
type MistralChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type MistralChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage MistralUsage `json:"usage"`
Choices []MistralChoice `json:"choices"`
}
type MistralUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type MistralChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addMistralMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestMistral(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Mistral: ", err)
id := insertBotMessage(c, "Error requesting Mistral, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
id := insertBotMessage(c, "No response from Mistral", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestMistralKey(apiKey string) bool { func TestMistralKey(apiKey string) bool {
url := "https://api.mistral.ai/v1/chat/completions" url := "https://api.mistral.ai/v1/chat/completions"
@ -67,7 +21,7 @@ func TestMistralKey(apiKey string) bool {
}, },
} }
requestBody := MistralChatCompletionRequest{ requestBody := OpenaiChatCompletionRequest{
Model: "open-mistral-7b", Model: "open-mistral-7b",
Messages: mistralMessages, Messages: mistralMessages,
Temperature: 0, Temperature: 0,
@ -76,13 +30,11 @@ func TestMistralKey(apiKey string) bool {
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
fmt.Println("Error marshalling request to Mistral")
return false return false
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
fmt.Println("Error creating request to Mistral")
return false return false
} }
@ -93,31 +45,32 @@ func TestMistralKey(apiKey string) bool {
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
fmt.Println("Error sending request to Mistral")
return false return false
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
fmt.Println("Error reading response from Mistral")
return false return false
} }
var chatCompletionResponse MistralChatCompletionResponse var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
fmt.Println("Error unmarshalling response from Mistral")
return false return false
} }
if chatCompletionResponse.Usage.CompletionTokens == 0 { if chatCompletionResponse.Usage.CompletionTokens == 0 {
fmt.Println("No response from Mistral")
return false return false
} }
return true return true
} }
func RequestMistral(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (MistralChatCompletionResponse, error) { func RequestMistral(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with with
@ -129,12 +82,12 @@ func RequestMistral(c *fiber.Ctx, model string, messages []Message, temperature
select filtered_keys.key limit 1 select filtered_keys.key limit 1
`, &apiKey, "mistral") `, &apiKey, "mistral")
if err != nil { if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error getting OpenAI API key: %w", err) return "JADE internal error: 02-00-0000. Please contact the support."
} }
url := "https://api.mistral.ai/v1/chat/completions" url := "https://api.mistral.ai/v1/chat/completions"
requestBody := MistralChatCompletionRequest{ requestBody := OpenaiChatCompletionRequest{
Model: model, Model: model,
Messages: Message2RequestMessage(messages, context), Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens, MaxTokens: maxTokens,
@ -143,34 +96,39 @@ func RequestMistral(c *fiber.Ctx, model string, messages []Message, temperature
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err) return "JADE internal error: 02-01-0001. Please contact the support."
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err) return "JADE internal error: 02-02-0002. Please contact the support."
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
req.Header.Set("Authorization", "Bearer "+apiKey) req.Header.Set("Authorization", "Bearer "+apiKey)
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err) return "JADE internal error: 02-02-0003. Please contact the support."
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err) return "JADE internal error: 02-01-0004. Please contact the support."
} }
var chatCompletionResponse MistralChatCompletionResponse for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err) return "JADE internal error: 02-01-0005. Please contact the support."
} }
var usedModelInfo ModelInfo var usedModelInfo ModelInfo
@ -183,16 +141,16 @@ func RequestMistral(c *fiber.Ctx, model string, messages []Message, temperature
LIMIT 1 LIMIT 1
`, &usedModelInfo, model) `, &usedModelInfo, model)
if err != nil { if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err) return "JADE internal error: 02-00-0006. Please contact the support."
}
if usedModelInfo.InputPrice == 0 || usedModelInfo.OutputPrice == 0 {
return MistralChatCompletionResponse{}, fmt.Errorf("model %s not found in Mistral", model)
} }
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model) addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 02-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
} }

View File

@ -3,66 +3,15 @@ package main
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"net/http" "net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2"
) )
type NimChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type NimChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage NimUsage `json:"usage"`
Choices []NimChoice `json:"choices"`
}
type NimUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type NimChoice struct {
Message RequestMessage `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addNimMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestNim(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting NIM: ", err)
id := insertBotMessage(c, "Error requesting NIM, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from NIM")
id := insertBotMessage(c, "No response from NIM", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestNimKey(apiKey string) bool { func TestNimKey(apiKey string) bool {
url := "https://integrate.api.nvidia.com/v1/chat/completions" url := "https://integrate.api.nvidia.com/v1/chat/completions"
//apiKey := "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"
fmt.Println("Testing new Nvidia NIM key:", apiKey)
// Convert messages to OpenAI format // Convert messages to OpenAI format
nimMessages := []RequestMessage{ nimMessages := []RequestMessage{
@ -72,7 +21,7 @@ func TestNimKey(apiKey string) bool {
}, },
} }
requestBody := NimChatCompletionRequest{ requestBody := OpenaiChatCompletionRequest{
Model: "meta/llama3-8b-instruct", Model: "meta/llama3-8b-instruct",
Messages: nimMessages, Messages: nimMessages,
Temperature: 0, Temperature: 0,
@ -81,13 +30,11 @@ func TestNimKey(apiKey string) bool {
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
fmt.Println("Error when testing NIM key. Cant parse JSON request.")
return false return false
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
fmt.Println("Error when testing NIM key. Cant generate new request")
return false return false
} }
@ -97,41 +44,33 @@ func TestNimKey(apiKey string) bool {
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
fmt.Println("Error when testing NIM key. Cant send request.")
return false return false
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
fmt.Println("Error when testing NIM key. Cant read response.")
return false return false
} }
var chatCompletionResponse NimChatCompletionResponse var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
fmt.Println(resp.Status)
fmt.Println(resp.Body)
fmt.Println("Error when testing NIM key. Cant unmarshal response.")
return false return false
} }
if chatCompletionResponse.Usage.CompletionTokens == 0 { if chatCompletionResponse.Usage.CompletionTokens == 0 {
fmt.Println(resp.Status)
fmt.Println(resp.Body)
fmt.Println("Error when testing NIM key. No completion token.")
return false return false
} }
Content := chatCompletionResponse.Choices[0].Message.Content
fmt.Println(Content)
return true return true
} }
func RequestNim(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxToken int) (NimChatCompletionResponse, error) { func RequestNim(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with with
@ -143,26 +82,26 @@ func RequestNim(c *fiber.Ctx, model string, messages []Message, temperature floa
select filtered_keys.key limit 1 select filtered_keys.key limit 1
`, &apiKey, "nim") `, &apiKey, "nim")
if err != nil { if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error getting NIM API key: %w", err) return "JADE internal error: 05-00-0000. Please contact the support."
} }
url := "https://integrate.api.nvidia.com/v1/chat/completions" url := "https://integrate.api.nvidia.com/v1/chat/completions"
requestBody := NimChatCompletionRequest{ requestBody := OpenaiChatCompletionRequest{
Model: model, Model: model,
Messages: Message2RequestMessage(messages, context), Messages: Message2RequestMessage(messages, context),
MaxTokens: maxToken, MaxTokens: maxTokens,
Temperature: temperature, Temperature: temperature,
} }
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err) return "JADE internal error: 05-01-0001. Please contact the support."
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err) return "JADE internal error: 05-02-0002. Please contact the support."
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
@ -171,19 +110,25 @@ func RequestNim(c *fiber.Ctx, model string, messages []Message, temperature floa
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err) return "JADE internal error: 05-02-0003. Please contact the support."
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err) return "JADE internal error: 05-01-0004. Please contact the support."
} }
var chatCompletionResponse NimChatCompletionResponse for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err) return "JADE internal error: 05-01-0005. Please contact the support."
} }
var usedModelInfo ModelInfo var usedModelInfo ModelInfo
@ -196,12 +141,16 @@ func RequestNim(c *fiber.Ctx, model string, messages []Message, temperature floa
LIMIT 1 LIMIT 1
`, &usedModelInfo, model) `, &usedModelInfo, model)
if err != nil { if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err) return "JADE internal error: 05-00-0006. Please contact the support."
} }
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model) addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 05-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
} }

View File

@ -3,11 +3,10 @@ package main
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"net/http" "net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2"
) )
@ -39,23 +38,16 @@ type OpenaiChoice struct {
Index int `json:"index"` Index int `json:"index"`
} }
func addOpenaiMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID { var OpenaiErrorCodes map[string]string
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestOpenai(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken)) func init() {
if err != nil { OpenaiErrorCodes = make(map[string]string)
fmt.Println("Error requesting OpenAI: ", err) OpenaiErrorCodes["401"] = "Invalid Authentication - Ensure that the API key is still valid."
id := insertBotMessage(c, "Error requesting OpenAI, model may not be available anymore. Better error message in development.", selected, llm.ID) OpenaiErrorCodes["403"] = "Accessing the API from an unsupported country, region, or territory."
return id OpenaiErrorCodes["429"] = "Rate limit reached for requests - You are sending requests too quickly."
} else if len(chatCompletion.Choices) == 0 { OpenaiErrorCodes["429"] = "You have run out of credits or hit your maximum monthly spend - Buy more credits or learn how to increase your limits."
fmt.Println("No response from OpenAI") OpenaiErrorCodes["500"] = "Issue on Provider servers - Retry your request after a brief wait and contact the provider if the issue persists."
id := insertBotMessage(c, "No response from OpenAI", selected, llm.ID) OpenaiErrorCodes["503"] = "Servers are experiencing high traffic - Please retry your requests after a brief wait."
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
} }
func TestOpenaiKey(apiKey string) bool { func TestOpenaiKey(apiKey string) bool {
@ -112,7 +104,12 @@ func TestOpenaiKey(apiKey string) bool {
return true return true
} }
func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (OpenaiChatCompletionResponse, error) { func RequestOpenai(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with with
@ -124,7 +121,7 @@ func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature f
select filtered_keys.key limit 1 select filtered_keys.key limit 1
`, &apiKey, "openai") `, &apiKey, "openai")
if err != nil { if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error getting OpenAI API key: %w", err) return "JADE internal error: 00-00-0000. Please contact the support."
} }
url := "https://api.openai.com/v1/chat/completions" url := "https://api.openai.com/v1/chat/completions"
@ -138,12 +135,12 @@ func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature f
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err) return "JADE internal error: 00-01-0001. Please contact the support."
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err) return "JADE internal error: 00-02-0002. Please contact the support."
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
@ -152,19 +149,25 @@ func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature f
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err) return "JADE internal error: 00-02-0003. Please contact the support."
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err) return "JADE internal error: 00-01-0004. Please contact the support."
}
for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
} }
var chatCompletionResponse OpenaiChatCompletionResponse var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err) return "JADE internal error: 00-01-0005. Please contact the support."
} }
var usedModelInfo ModelInfo var usedModelInfo ModelInfo
@ -177,12 +180,16 @@ func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature f
LIMIT 1 LIMIT 1
`, &usedModelInfo, model) `, &usedModelInfo, model)
if err != nil { if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err) return "JADE internal error: 00-00-0006. Please contact the support."
} }
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model) addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 00-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
} }

View File

@ -3,62 +3,13 @@ package main
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"net/http" "net/http"
"strings" "strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2"
) )
type PerplexityChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type PerplexityChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage PerplexityUsage `json:"usage"`
Choices []PerplexityChoice `json:"choices"`
}
type PerplexityUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type PerplexityChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addPerplexityMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestPerplexity(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Perplexity: ", err)
id := insertBotMessage(c, "Error requesting Perplexity, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from Perplexity")
id := insertBotMessage(c, "No response from Perplexity", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestPerplexityKey(apiKey string) bool { func TestPerplexityKey(apiKey string) bool {
url := "https://api.perplexity.ai/chat/completions" url := "https://api.perplexity.ai/chat/completions"
@ -70,7 +21,7 @@ func TestPerplexityKey(apiKey string) bool {
}, },
} }
requestBody := PerplexityChatCompletionRequest{ requestBody := OpenaiChatCompletionRequest{
Model: "llama-3-8b-instruct", Model: "llama-3-8b-instruct",
Messages: perplexityMessages, Messages: perplexityMessages,
Temperature: 0, Temperature: 0,
@ -102,7 +53,7 @@ func TestPerplexityKey(apiKey string) bool {
return false return false
} }
var chatCompletionResponse PerplexityChatCompletionResponse var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return false return false
@ -113,7 +64,12 @@ func TestPerplexityKey(apiKey string) bool {
return true return true
} }
func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (PerplexityChatCompletionResponse, error) { func RequestPerplexity(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with with
@ -125,12 +81,12 @@ func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperatu
select filtered_keys.key limit 1 select filtered_keys.key limit 1
`, &apiKey, "perplexity") `, &apiKey, "perplexity")
if err != nil { if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error getting Perplexity API key: %w", err) return "JADE internal error: 06-00-0000. Please contact the support."
} }
url := "https://api.perplexity.ai/chat/completions" url := "https://api.perplexity.ai/chat/completions"
requestBody := PerplexityChatCompletionRequest{ requestBody := OpenaiChatCompletionRequest{
Model: model, Model: model,
Messages: Message2RequestMessage(messages, context), Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens, MaxTokens: maxTokens,
@ -139,12 +95,12 @@ func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperatu
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err) return "JADE internal error: 06-01-0001. Please contact the support."
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err) return "JADE internal error: 06-02-0002. Please contact the support."
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
@ -153,19 +109,25 @@ func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperatu
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err) return "JADE internal error: 06-02-0003. Please contact the support."
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err) return "JADE internal error: 06-01-0004. Please contact the support."
} }
var chatCompletionResponse PerplexityChatCompletionResponse for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err) return "JADE internal error: 06-01-0005. Please contact the support."
} }
var usedModelInfo ModelInfo var usedModelInfo ModelInfo
@ -178,19 +140,16 @@ func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperatu
LIMIT 1 LIMIT 1
`, &usedModelInfo, model) `, &usedModelInfo, model)
if err != nil { if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err) return "JADE internal error: 06-00-0006. Please contact the support."
} }
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
// If online model end with -online add a small cost
if strings.HasSuffix(model, "-online") {
inputCost += 0.005
outputCost += 0.005
}
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model) addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 06-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
} }

View File

@ -3,61 +3,28 @@ package main
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"net/http" "net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2"
) )
type TogetherChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type TogetherChatCompletionResponse struct { type TogetherChatCompletionResponse struct {
ID string `json:"id"` ID string `json:"id"`
Object string `json:"object"` Object string `json:"object"`
Created int64 `json:"created"` Created int64 `json:"created"`
Model string `json:"model"` Model string `json:"model"`
Usage TogetherUsage `json:"usage"` Usage OpenaiUsage `json:"usage"`
Choices []TogetherChoice `json:"choices"` Choices []TogetherChoice `json:"choices"`
} }
type TogetherUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type TogetherChoice struct { type TogetherChoice struct {
Text string `json:"text"` Text string `json:"text"`
FinishReason string `json:"finish_reason"` FinishReason string `json:"finish_reason"`
Index int `json:"index"` Index int `json:"index"`
} }
func addTogetherMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestTogether(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Together: ", err)
id := insertBotMessage(c, "Error requesting Together, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from Together")
id := insertBotMessage(c, "No response from Together", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Text
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestTogetherKey(apiKey string) bool { func TestTogetherKey(apiKey string) bool {
url := "https://api.together.xyz/v1/completions" url := "https://api.together.xyz/v1/completions"
@ -69,7 +36,7 @@ func TestTogetherKey(apiKey string) bool {
}, },
} }
requestBody := TogetherChatCompletionRequest{ requestBody := OpenaiChatCompletionRequest{
Model: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", Model: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
Messages: togetherMessages, Messages: togetherMessages,
Temperature: 0, Temperature: 0,
@ -101,7 +68,7 @@ func TestTogetherKey(apiKey string) bool {
return false return false
} }
var chatCompletionResponse TogetherChatCompletionResponse var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return false return false
@ -112,7 +79,12 @@ func TestTogetherKey(apiKey string) bool {
return true return true
} }
func RequestTogether(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (TogetherChatCompletionResponse, error) { func RequestTogether(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, ` err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with with
@ -124,12 +96,12 @@ func RequestTogether(c *fiber.Ctx, model string, messages []Message, temperature
select filtered_keys.key limit 1 select filtered_keys.key limit 1
`, &apiKey, "together") `, &apiKey, "together")
if err != nil { if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error getting Together AI API key: %w", err) return "JADE internal error: 07-00-0000. Please contact the support."
} }
url := "https://api.together.xyz/v1/completions" url := "https://api.together.xyz/v1/completions"
requestBody := TogetherChatCompletionRequest{ requestBody := OpenaiChatCompletionRequest{
Model: model, Model: model,
Messages: Message2RequestMessage(messages, context), Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens, MaxTokens: maxTokens,
@ -138,12 +110,12 @@ func RequestTogether(c *fiber.Ctx, model string, messages []Message, temperature
jsonBody, err := json.Marshal(requestBody) jsonBody, err := json.Marshal(requestBody)
if err != nil { if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err) return "JADE internal error: 07-01-0001. Please contact the support."
} }
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err) return "JADE internal error: 07-02-0002. Please contact the support."
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
@ -152,19 +124,25 @@ func RequestTogether(c *fiber.Ctx, model string, messages []Message, temperature
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err) return "JADE internal error: 07-02-0003. Please contact the support."
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err) return "JADE internal error: 07-01-0004. Please contact the support."
}
for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
} }
var chatCompletionResponse TogetherChatCompletionResponse var chatCompletionResponse TogetherChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse) err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil { if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err) return "JADE internal error: 07-01-0005. Please contact the support."
} }
var usedModelInfo ModelInfo var usedModelInfo ModelInfo
@ -177,12 +155,16 @@ func RequestTogether(c *fiber.Ctx, model string, messages []Message, temperature
LIMIT 1 LIMIT 1
`, &usedModelInfo, model) `, &usedModelInfo, model)
if err != nil { if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err) return "JADE internal error: 07-00-0006. Please contact the support."
} }
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model) addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 07-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Text
} }

View File

@ -25,3 +25,4 @@
[ ] Change the terms of service and enter keys page to an HTML [ ] Change the terms of service and enter keys page to an HTML
[ ] Split Chat.go into smaller files [ ] Split Chat.go into smaller files
[ ] Create a Request package [ ] Create a Request package
[ ] Use the normal RequestProvider function instead of TestProvider to remove TestProvider

View File

@ -147,7 +147,6 @@ func addKeys(c *fiber.Ctx) error {
"anthropic": TestAnthropicKey, "anthropic": TestAnthropicKey,
"mistral": TestMistralKey, "mistral": TestMistralKey,
"groq": TestGroqKey, "groq": TestGroqKey,
"gooseai": TestGooseaiKey,
"google": TestGoogleKey, "google": TestGoogleKey,
"nim": TestNimKey, "nim": TestNimKey,
"perplexity": TestPerplexityKey, "perplexity": TestPerplexityKey,