Change how messages are requested to make it simplier

This commit is contained in:
Adrien Bouvais 2024-08-07 15:07:41 +02:00
parent 8a962f510e
commit 3e8be1400b
18 changed files with 444 additions and 860 deletions

View File

@ -195,7 +195,7 @@ func insertUserMessage(c *fiber.Ctx, content string) edgedb.UUID {
return inserted.id
}
func insertBotMessage(c *fiber.Ctx, content string, selected bool, llmUUID edgedb.UUID) edgedb.UUID {
func insertBotMessage(c *fiber.Ctx, content string, llmUUID edgedb.UUID) edgedb.UUID {
var lastArea Area
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
SELECT Area {
@ -215,15 +215,15 @@ func insertBotMessage(c *fiber.Ctx, content string, selected bool, llmUUID edged
INSERT Message {
role := <str>$0,
content := <str>$2,
selected := <bool>$3,
selected := false,
conversation := (
SELECT Area
FILTER .id = <uuid>$4
FILTER .id = <uuid>$3
LIMIT 1
).conversation,
area := (
SELECT Area
FILTER .id = <uuid>$4
FILTER .id = <uuid>$3
LIMIT 1
),
llm := (
@ -232,7 +232,7 @@ func insertBotMessage(c *fiber.Ctx, content string, selected bool, llmUUID edged
LIMIT 1
)
}
`, &inserted, "bot", llmUUID, content, selected, lastArea.ID)
`, &inserted, "bot", llmUUID, content, lastArea.ID)
if err != nil {
fmt.Println("Error inserting bot message")
panic(err)

33
JadeInternalError.md Normal file
View File

@ -0,0 +1,33 @@
# Request errors
The format of request error is: aa-bb-cccc
Where aa is the provider number, bb the error type and cccc the error code.
## Provider number
00: OpenAI
01: Anthropic
02: Mistral
03: Google
04: Groq
05: Nim
06: Perplexity
07: TogetherAI
08: DeepSeek
09: Firework
10: Custom
99: JADE
## Error type
00: Database error
01: Golang error
02: HTTP error
03: Missing status code
## Error code
0000: Can't find API key in database
0001: Error using `jsonBody, err := json.Marshal(requestBody)` in RequestProvider function
0002: Error using `req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))` in RequestProvider function
0003: Error using `resp, err := client.Do(req)` in RequestProvider function
0004: Error using `body, err := io.ReadAll(resp.Body)` in RequestProvider function
0005: Error using `err = json.Unmarshal(body, &chatCompletionResponse)` in RequestProvider function
0006: Can't find modelInfo in database
0007: No choice at the end of request. Mostly because an error is missing.

View File

@ -117,6 +117,8 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
authCookie := c.Cookies("jade-edgedb-auth-token")
var messages []Message = getAllSelectedMessages(c)
// Create a wait group to synchronize the goroutines
var wg sync.WaitGroup
@ -136,38 +138,34 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
defer cancel() // Ensure the context is cancelled to free resources
// Determine which message function to call based on the model
var addMessageFunc func(c *fiber.Ctx, selectedLLM LLM, selected bool) edgedb.UUID
var addMessageFunc func(c *fiber.Ctx, llm LLM, messages []Message) string
switch selectedLLMs[idx].Model.Company.Name {
case "openai":
addMessageFunc = addOpenaiMessage
addMessageFunc = RequestOpenai
case "anthropic":
addMessageFunc = addAnthropicMessage
addMessageFunc = RequestAnthropic
case "mistral":
addMessageFunc = addMistralMessage
addMessageFunc = RequestMistral
case "groq":
addMessageFunc = addGroqMessage
case "gooseai":
addMessageFunc = addGooseaiMessage
addMessageFunc = RequestGroq
case "huggingface":
addMessageFunc = addHuggingfaceMessage
addMessageFunc = RequestHuggingface
case "google":
addMessageFunc = addGoogleMessage
addMessageFunc = RequestGoogle
case "perplexity":
addMessageFunc = addPerplexityMessage
addMessageFunc = RequestPerplexity
case "fireworks":
addMessageFunc = addFireworkMessage
addMessageFunc = RequestFirework
case "nim":
addMessageFunc = addNimMessage
addMessageFunc = RequestNim
case "together":
addMessageFunc = addTogetherMessage
addMessageFunc = RequestTogether
case "deepseek":
addMessageFunc = addDeepseekMessage
addMessageFunc = RequestDeepseek
}
var messageID edgedb.UUID
if addMessageFunc != nil {
messageID = addMessageFunc(c, selectedLLMs[idx], false)
}
var content string = addMessageFunc(c, selectedLLMs[idx], messages)
var messageUUID edgedb.UUID = insertBotMessage(c, content, selectedLLMs[idx].ID)
var message Message
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
@ -189,7 +187,7 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
}
}
FILTER .id = <uuid>$0;
`, &message, messageID)
`, &message, messageUUID)
if err != nil {
fmt.Println("Error getting message for the placeholder. The function addProviderMessage seem to not return any message ID.")
panic(err)
@ -201,6 +199,12 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
default:
select {
case firstDone <- idx:
_ = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": authCookie}).Execute(edgeCtx, `
UPDATE Message
FILTER .id = <uuid>$0
SET {selected := true};
`, messageUUID)
outIcon := `<img src="` + selectedLLMs[idx].Model.Company.Icon + `" alt="User Image" id="selectedIcon-` + fmt.Sprintf("%d", message.Area.Position) + `">`
sendEvent(

View File

@ -3,11 +3,10 @@ package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
@ -37,22 +36,17 @@ type AnthropicUsage struct {
OutputTokens int32 `json:"output_tokens"`
}
func addAnthropicMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
var AnthropicErrorCodes map[string]string
chatCompletion, err := RequestAnthropic(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Anthropic: ", err)
id := insertBotMessage(c, "Error requesting Anthropic, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Content) == 0 {
fmt.Println("No response from Anthropic")
id := insertBotMessage(c, "No response from Anthropic", selected, llm.ID)
return id
} else {
id := insertBotMessage(c, chatCompletion.Content[0].Text, selected, llm.ID)
return id
}
// TODO Update
func init() {
AnthropicErrorCodes = make(map[string]string)
AnthropicErrorCodes["401"] = "Invalid Authentication - Ensure that the API key is still valid."
AnthropicErrorCodes["403"] = "Accessing the API from an unsupported country, region, or territory."
AnthropicErrorCodes["429"] = "Rate limit reached for requests - You are sending requests too quickly."
AnthropicErrorCodes["429"] = "You have run out of credits or hit your maximum monthly spend - Buy more credits or learn how to increase your limits."
AnthropicErrorCodes["500"] = "Issue on OpenAI servers - Retry your request after a brief wait and contact OpenAI if the issue persists. Check the status page https://status.openai.com/."
AnthropicErrorCodes["503"] = "OpenAI servers are experiencing high traffic - Please retry your requests after a brief wait."
}
func TestAnthropicKey(apiKey string) bool {
@ -110,13 +104,16 @@ func TestAnthropicKey(apiKey string) bool {
return true
}
func RequestAnthropic(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (AnthropicChatCompletionResponse, error) {
func RequestAnthropic(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
if maxTokens == 0 {
maxTokens = 4096
}
fmt.Println("Requesting anthropic using max token:", maxTokens)
var apiKey struct {
Key string `edgedb:"key"`
}
@ -128,7 +125,7 @@ func RequestAnthropic(c *fiber.Ctx, model string, messages []Message, temperatur
LIMIT 1
`, &apiKey, "anthropic")
if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error getting Anthropic API key: %w", err)
return "JADE internal error: 01-00-0000. Please contact the support."
}
url := "https://api.anthropic.com/v1/messages"
@ -143,12 +140,13 @@ func RequestAnthropic(c *fiber.Ctx, model string, messages []Message, temperatur
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
return "JADE internal error: 01-01-0001. Please contact the support."
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
return "JADE internal error: 01-02-0002. Please contact the support."
}
req.Header.Set("x-api-key", apiKey.Key)
@ -158,19 +156,25 @@ func RequestAnthropic(c *fiber.Ctx, model string, messages []Message, temperatur
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
return "JADE internal error: 01-02-0003. Please contact the support."
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
return "JADE internal error: 01-01-0004. Please contact the support."
}
for key, value := range AnthropicErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse AnthropicChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
return "JADE internal error: 01-01-0005. Please contact the support."
}
var usedModelInfo ModelInfo
@ -183,12 +187,16 @@ func RequestAnthropic(c *fiber.Ctx, model string, messages []Message, temperatur
LIMIT 1
`, &usedModelInfo, model)
if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err)
return "JADE internal error: 01-00-0006. Please contact the support."
}
var inputCost float32 = float32(chatCompletionResponse.Usage.InputTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.OutputTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.InputTokens, chatCompletionResponse.Usage.OutputTokens, model)
return chatCompletionResponse, nil
if len(chatCompletionResponse.Content) == 0 {
return "JADE internal error: 01-03-0007. Please contact the support."
}
return chatCompletionResponse.Content[0].Text
}

66
RequestCustomEndpoint.go Normal file
View File

@ -0,0 +1,66 @@
package main
import (
"bytes"
"encoding/json"
"io"
"net/http"
"strings"
"github.com/gofiber/fiber/v2"
)
func RequestHuggingface(c *fiber.Ctx, llm LLM, messages []Message) string {
url := llm.Endpoint.Endpoint
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
requestBody := OpenaiChatCompletionRequest{
Model: "tgi",
Messages: Message2RequestMessage(messages, context),
Temperature: temperature,
MaxTokens: maxTokens,
}
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return "JADE internal error: 10-01-0001. Please contact the support."
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return "JADE internal error: 10-02-0002. Please contact the support."
}
req.Header.Set("Authorization", "Bearer "+llm.Endpoint.Key)
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return "JADE internal error: 10-02-0003. Please contact the support."
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return "JADE internal error: 10-01-0004. Please contact the support."
}
for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return "JADE internal error: 10-01-0005. Please contact the support."
}
addUsage(c, 0, 0, 0, 0, llm.Model.ModelID)
return chatCompletionResponse.Choices[0].Message.Content
}

View File

@ -3,61 +3,13 @@ package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
type DeepseekChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type DeepseekChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage DeepseekUsage `json:"usage"`
Choices []DeepseekChoice `json:"choices"`
}
type DeepseekUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type DeepseekChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addDeepseekMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestDeepseek(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Deepseek: ", err)
id := insertBotMessage(c, "Error requesting DeepSeek, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from DeepSeek")
id := insertBotMessage(c, "No response from DeepSeek", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestDeepseekKey(apiKey string) bool {
url := "https://api.deepseek.com/chat/completions"
@ -69,7 +21,7 @@ func TestDeepseekKey(apiKey string) bool {
},
}
requestBody := DeepseekChatCompletionRequest{
requestBody := OpenaiChatCompletionRequest{
Model: "deepseek-chat",
Messages: deepseekMessages,
Temperature: 0,
@ -78,13 +30,11 @@ func TestDeepseekKey(apiKey string) bool {
jsonBody, err := json.Marshal(requestBody)
if err != nil {
fmt.Println("Failed to test Deepseek API key - json.Marshal :", err)
return false
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
fmt.Println("Failed to test Deepseek API key - http.NewRequest :", err)
return false
}
@ -94,35 +44,36 @@ func TestDeepseekKey(apiKey string) bool {
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
fmt.Println("Failed to test Deepseek API key - client.Do :", err)
return false
}
defer resp.Body.Close()
fmt.Println(resp.Status)
body, err := io.ReadAll(resp.Body)
if err != nil {
fmt.Println("Failed to test Deepseek API key - io.ReadAll :", err)
return false
}
var chatCompletionResponse DeepseekChatCompletionResponse
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
fmt.Println("Failed to test Deepseek API key - json.Marshal :", err)
return false
}
if chatCompletionResponse.Usage.CompletionTokens == 0 {
fmt.Println("Failed to test Deepseek API key - No completion tokens :", err)
return false
}
return true
}
func RequestDeepseek(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (DeepseekChatCompletionResponse, error) {
func RequestDeepseek(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
url := "https://api.deepseek.com/chat/completions"
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -134,12 +85,10 @@ func RequestDeepseek(c *fiber.Ctx, model string, messages []Message, temperature
select filtered_keys.key limit 1
`, &apiKey, "deepseek")
if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error getting DeepSeek API key: %w", err)
return "JADE internal error: 08-00-0000. Please contact the support."
}
url := "https://api.deepseek.com/chat/completions"
requestBody := DeepseekChatCompletionRequest{
requestBody := OpenaiChatCompletionRequest{
Model: model,
Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens,
@ -148,12 +97,12 @@ func RequestDeepseek(c *fiber.Ctx, model string, messages []Message, temperature
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
return "JADE internal error: 08-01-0001. Please contact the support."
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
return "JADE internal error: 08-02-0002. Please contact the support."
}
req.Header.Set("Content-Type", "application/json")
@ -162,24 +111,25 @@ func RequestDeepseek(c *fiber.Ctx, model string, messages []Message, temperature
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
return "JADE internal error: 08-02-0003. Please contact the support."
}
defer resp.Body.Close()
// TODO: Add a message to the user and do it for all 400 things
if resp.Status == "402 Payment Required" {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
return "JADE internal error: 08-01-0004. Please contact the support."
}
var chatCompletionResponse DeepseekChatCompletionResponse
for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
return "JADE internal error: 08-01-0005. Please contact the support."
}
var usedModelInfo ModelInfo
@ -192,12 +142,16 @@ func RequestDeepseek(c *fiber.Ctx, model string, messages []Message, temperature
LIMIT 1
`, &usedModelInfo, model)
if err != nil {
return DeepseekChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err)
return "JADE internal error: 08-00-0006. Please contact the support."
}
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil
if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 08-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
}

View File

@ -3,61 +3,13 @@ package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
type FireworkChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type FireworkChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage FireworkUsage `json:"usage"`
Choices []FireworkChoice `json:"choices"`
}
type FireworkUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type FireworkChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addFireworkMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestFirework(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Firework: ", err)
id := insertBotMessage(c, "Error requesting Firework, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from Firework")
id := insertBotMessage(c, "No response from Firework", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestFireworkKey(apiKey string) bool {
url := "https://api.fireworks.ai/inference/v1/chat/completions"
@ -101,7 +53,7 @@ func TestFireworkKey(apiKey string) bool {
return false
}
var chatCompletionResponse FireworkChatCompletionResponse
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return false
@ -112,7 +64,12 @@ func TestFireworkKey(apiKey string) bool {
return true
}
func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (FireworkChatCompletionResponse, error) {
func RequestFirework(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -122,15 +79,15 @@ func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature
} filter .company.name = <str>$0 AND .<keys[is Setting].<setting[is User] = global currentUser
)
select filtered_keys.key limit 1
`, &apiKey, "fireworks")
`, &apiKey, "firework")
if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error getting Firework API key: %w", err)
return "JADE internal error: 09-00-0000. Please contact the support."
}
url := "https://api.fireworks.ai/inference/v1/chat/completions"
requestBody := FireworkChatCompletionRequest{
Model: "accounts/fireworks/models/" + model,
requestBody := OpenaiChatCompletionRequest{
Model: model,
Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens,
Temperature: temperature,
@ -138,12 +95,12 @@ func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
return "JADE internal error: 09-01-0001. Please contact the support."
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
return "JADE internal error: 09-02-0002. Please contact the support."
}
req.Header.Set("Content-Type", "application/json")
@ -152,19 +109,25 @@ func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
return "JADE internal error: 09-02-0003. Please contact the support."
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
return "JADE internal error: 09-01-0004. Please contact the support."
}
var chatCompletionResponse FireworkChatCompletionResponse
for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
return "JADE internal error: 09-01-0005. Please contact the support."
}
var usedModelInfo ModelInfo
@ -177,18 +140,16 @@ func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature
LIMIT 1
`, &usedModelInfo, model)
if err != nil {
return FireworkChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err)
}
if len(chatCompletionResponse.Choices) == 0 {
// Print the response as a JSON string
fmt.Println(string(body))
return FireworkChatCompletionResponse{}, fmt.Errorf("no response from Firework")
return "JADE internal error: 09-00-0006. Please contact the support."
}
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil
if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 09-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
}

View File

@ -5,11 +5,10 @@ package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
@ -52,23 +51,17 @@ type GoogleUsageMetadata struct {
TotalTokenCount int32 `json:"totalTokenCount"`
}
func addGoogleMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
var GoogleErrorCodes map[string]string
chatCompletion, err := RequestGoogle(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context)
if err != nil {
fmt.Println("Error requesting Google: ", err)
id := insertBotMessage(c, "Error requesting Google.", selected, llm.ID)
return id
} else if len(chatCompletion.Candidates) == 0 {
fmt.Println("No response from Google")
id := insertBotMessage(c, "No response from Google", selected, llm.ID)
return id
} else {
Content := chatCompletion.Candidates[0].Content.Parts[0].Text
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
// TODO: Update
func init() {
GoogleErrorCodes = make(map[string]string)
GoogleErrorCodes["401"] = "Invalid Authentication - Ensure that the API key is still valid."
GoogleErrorCodes["403"] = "Accessing the API from an unsupported country, region, or territory."
GoogleErrorCodes["429"] = "Rate limit reached for requests - You are sending requests too quickly."
GoogleErrorCodes["429"] = "You have run out of credits or hit your maximum monthly spend - Buy more credits or learn how to increase your limits."
GoogleErrorCodes["500"] = "Issue on Provider servers - Retry your request after a brief wait and contact the provider if the issue persists."
GoogleErrorCodes["503"] = "Servers are experiencing high traffic - Please retry your requests after a brief wait."
}
func TestGoogleKey(apiKey string) bool {
@ -91,13 +84,11 @@ func TestGoogleKey(apiKey string) bool {
jsonBody, err := json.Marshal(requestBody)
if err != nil {
fmt.Println("Error marshalling JSON: ", err)
return false
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
fmt.Println("Error creating request: ", err)
return false
}
@ -106,33 +97,35 @@ func TestGoogleKey(apiKey string) bool {
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
fmt.Println("Error sending request: ", err)
return false
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
fmt.Println("Error reading response body: ", err)
return false
}
var chatCompletionResponse GoogleChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
fmt.Println("Error unmarshaling JSON: ", err)
return false
}
if chatCompletionResponse.UsageMetadata.CandidatesTokenCount == 0 {
fmt.Println("No response from Google")
return false
}
fmt.Println("Response from Google: ", chatCompletionResponse)
return true
}
func RequestGoogle(c *fiber.Ctx, model string, messages []Message, temperature float64, context string) (GoogleChatCompletionResponse, error) {
func RequestGoogle(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
// TODO: Use those parameters
// temperature := float64(llm.Temperature)
// context := llm.Context
//maxTokens := int(llm.MaxToken)
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -144,7 +137,7 @@ func RequestGoogle(c *fiber.Ctx, model string, messages []Message, temperature f
select filtered_keys.key limit 1
`, &apiKey, "google")
if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error getting Google API key: %w", err)
return "JADE internal error: 03-00-0000. Please contact the support."
}
url := "https://generativelanguage.googleapis.com/v1beta/models/" + model + ":generateContent?key=" + apiKey
@ -171,12 +164,12 @@ func RequestGoogle(c *fiber.Ctx, model string, messages []Message, temperature f
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
return "JADE internal error: 03-01-0001. Please contact the support."
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
return "JADE internal error: 03-02-0002. Please contact the support."
}
req.Header.Set("Content-Type", "application/json")
@ -184,19 +177,25 @@ func RequestGoogle(c *fiber.Ctx, model string, messages []Message, temperature f
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
return "JADE internal error: 03-02-0003. Please contact the support."
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
return "JADE internal error: 03-01-0004. Please contact the support."
}
for key, value := range GoogleErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse GoogleChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
return "JADE internal error: 03-01-0005. Please contact the support."
}
var usedModelInfo ModelInfo
@ -209,12 +208,16 @@ func RequestGoogle(c *fiber.Ctx, model string, messages []Message, temperature f
LIMIT 1
`, &usedModelInfo, model)
if err != nil {
return GoogleChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err)
return "JADE internal error: 03-00-0006. Please contact the support."
}
var inputCost float32 = float32(chatCompletionResponse.UsageMetadata.PromptTokenCount) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.UsageMetadata.CandidatesTokenCount) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.UsageMetadata.PromptTokenCount, chatCompletionResponse.UsageMetadata.CandidatesTokenCount, model)
return chatCompletionResponse, nil
if len(chatCompletionResponse.Candidates) == 0 {
return "JADE internal error: 03-03-0007. Please contact the support."
}
return chatCompletionResponse.Candidates[0].Content.Parts[0].Text
}

View File

@ -1,164 +0,0 @@
// It work but I disable it because it is not chat API
// It is text completion, not chat completion. But they will soon release API for chat
// So I leave it here for now
package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
type GooseaiCompletionRequest struct {
Model string `json:"model"`
Prompt []string `json:"prompt"`
Temperature float64 `json:"temperature"`
MaxToken int32 `json:"max_tokens"`
}
type GooseaiCompletionResponse struct {
ID string `json:"id"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []GooseaiChoice `json:"choices"`
}
type GooseaiChoice struct {
Text string `json:"text"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addGooseaiMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestGooseai(c, llm.Model.ModelID, Messages, float64(llm.Temperature))
if err != nil {
fmt.Println("Error fetching user profile")
panic(err)
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from GooseAI")
id := insertBotMessage(c, "No response from GooseAI", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Text
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestGooseaiKey(apiKey string) bool {
url := "https://api.goose.ai/v1/engines/gpt-j-6b/completions"
requestBody := GooseaiCompletionRequest{
Model: "gpt-j-6b",
Prompt: []string{"Hello, how are you?"},
Temperature: 0,
MaxToken: 10,
}
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return false
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return false
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+apiKey)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return false
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return false
}
var chatCompletionResponse GooseaiCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return false
}
if chatCompletionResponse.Choices[0].Text == "" {
return false
}
return true
}
func RequestGooseai(c *fiber.Ctx, model string, messages []Message, temperature float64) (GooseaiCompletionResponse, error) {
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
filtered_keys := (
select Key {
key
} filter .company.name = "gooseai" AND .<keys[is Setting].<setting[is User] = global currentUser
)
select filtered_keys.key limit 1
`, &apiKey)
if err != nil {
return GooseaiCompletionResponse{}, fmt.Errorf("error getting GooseAI API key: %w", err)
}
url := "https://api.goose.ai/v1/engines/" + model + "/completions"
var prompt string
for _, message := range messages {
prompt += message.Content
}
requestBody := GooseaiCompletionRequest{
Model: model,
Prompt: []string{prompt},
Temperature: temperature,
MaxToken: 300,
}
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return GooseaiCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return GooseaiCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+apiKey)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return GooseaiCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return GooseaiCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
}
var chatCompletionResponse GooseaiCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return GooseaiCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
}
addUsage(c, 0, 0, 0, 0, model)
return chatCompletionResponse, nil
}

View File

@ -3,61 +3,13 @@ package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
type GroqChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type GroqChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage GroqUsage `json:"usage"`
Choices []GroqChoice `json:"choices"`
}
type GroqUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type GroqChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addGroqMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestGroq(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Groq: ", err)
id := insertBotMessage(c, "Error requesting Groq, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from Groq")
id := insertBotMessage(c, "No response from Groq", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestGroqKey(apiKey string) bool {
url := "https://api.groq.com/openai/v1/chat/completions"
@ -69,7 +21,7 @@ func TestGroqKey(apiKey string) bool {
},
}
requestBody := GroqChatCompletionRequest{
requestBody := OpenaiChatCompletionRequest{
Model: "llama3-8b-8192",
Messages: Message2RequestMessage(groqMessages, ""),
Temperature: 0,
@ -101,7 +53,7 @@ func TestGroqKey(apiKey string) bool {
return false
}
var chatCompletionResponse GroqChatCompletionResponse
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return false
@ -112,7 +64,12 @@ func TestGroqKey(apiKey string) bool {
return true
}
func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (GroqChatCompletionResponse, error) {
func RequestGroq(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -124,12 +81,12 @@ func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature flo
select filtered_keys.key limit 1
`, &apiKey, "groq")
if err != nil {
return GroqChatCompletionResponse{}, fmt.Errorf("error getting Groq API key: %w", err)
return "JADE internal error: 04-00-0000. Please contact the support."
}
url := "https://api.groq.com/openai/v1/chat/completions"
requestBody := GroqChatCompletionRequest{
requestBody := OpenaiChatCompletionRequest{
Model: model,
Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens,
@ -138,12 +95,12 @@ func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature flo
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return GroqChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
return "JADE internal error: 04-01-0001. Please contact the support."
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return GroqChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
return "JADE internal error: 04-02-0002. Please contact the support."
}
req.Header.Set("Content-Type", "application/json")
@ -152,23 +109,29 @@ func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature flo
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return GroqChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
return "JADE internal error: 04-02-0003. Please contact the support."
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return GroqChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
return "JADE internal error: 04-01-0004. Please contact the support."
}
var chatCompletionResponse GroqChatCompletionResponse
for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return GroqChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
return "JADE internal error: 04-01-0005. Please contact the support."
}
var usedModelInfo ModelInfo
edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
SELECT ModelInfo {
inputPrice,
outputPrice
@ -176,10 +139,17 @@ func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature flo
FILTER .modelID = <str>$0
LIMIT 1
`, &usedModelInfo, model)
if err != nil {
return "JADE internal error: 04-00-0006. Please contact the support."
}
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil
if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 04-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
}

View File

@ -1,106 +0,0 @@
package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
type HuggingfaceChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
Temperature float64 `json:"temperature"`
MaxTokens int `json:"max_tokens"`
Stream bool `json:"stream"`
}
type HuggingfaceChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []HuggingfaceChoice `json:"choices"`
}
type HuggingfaceUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type HuggingfaceChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addHuggingfaceMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestHuggingface(c, llm, Messages, float64(llm.Temperature), int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Huggingface: ", err)
id := insertBotMessage(c, "Error requesting Huggingface.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from Endpoint")
id := insertBotMessage(c, "No response from Endpoint", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func RequestHuggingface(c *fiber.Ctx, llm LLM, messages []Message, temperature float64, maxTokens int) (HuggingfaceChatCompletionResponse, error) {
url := llm.Endpoint.Endpoint
requestBody := HuggingfaceChatCompletionRequest{
Model: "tgi",
Messages: Message2RequestMessage(messages, llm.Context),
Temperature: temperature,
MaxTokens: maxTokens,
Stream: false,
}
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Authorization", "Bearer "+llm.Endpoint.Key)
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
}
var chatCompletionResponse HuggingfaceChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
}
addUsage(c, 0, 0, 0, 0, llm.Model.ModelID)
return chatCompletionResponse, nil
}

View File

@ -3,59 +3,13 @@ package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
type MistralChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type MistralChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage MistralUsage `json:"usage"`
Choices []MistralChoice `json:"choices"`
}
type MistralUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type MistralChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addMistralMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestMistral(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Mistral: ", err)
id := insertBotMessage(c, "Error requesting Mistral, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
id := insertBotMessage(c, "No response from Mistral", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestMistralKey(apiKey string) bool {
url := "https://api.mistral.ai/v1/chat/completions"
@ -67,7 +21,7 @@ func TestMistralKey(apiKey string) bool {
},
}
requestBody := MistralChatCompletionRequest{
requestBody := OpenaiChatCompletionRequest{
Model: "open-mistral-7b",
Messages: mistralMessages,
Temperature: 0,
@ -76,13 +30,11 @@ func TestMistralKey(apiKey string) bool {
jsonBody, err := json.Marshal(requestBody)
if err != nil {
fmt.Println("Error marshalling request to Mistral")
return false
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
fmt.Println("Error creating request to Mistral")
return false
}
@ -93,31 +45,32 @@ func TestMistralKey(apiKey string) bool {
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
fmt.Println("Error sending request to Mistral")
return false
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
fmt.Println("Error reading response from Mistral")
return false
}
var chatCompletionResponse MistralChatCompletionResponse
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
fmt.Println("Error unmarshalling response from Mistral")
return false
}
if chatCompletionResponse.Usage.CompletionTokens == 0 {
fmt.Println("No response from Mistral")
return false
}
return true
}
func RequestMistral(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (MistralChatCompletionResponse, error) {
func RequestMistral(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -129,12 +82,12 @@ func RequestMistral(c *fiber.Ctx, model string, messages []Message, temperature
select filtered_keys.key limit 1
`, &apiKey, "mistral")
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error getting OpenAI API key: %w", err)
return "JADE internal error: 02-00-0000. Please contact the support."
}
url := "https://api.mistral.ai/v1/chat/completions"
requestBody := MistralChatCompletionRequest{
requestBody := OpenaiChatCompletionRequest{
Model: model,
Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens,
@ -143,34 +96,39 @@ func RequestMistral(c *fiber.Ctx, model string, messages []Message, temperature
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
return "JADE internal error: 02-01-0001. Please contact the support."
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
return "JADE internal error: 02-02-0002. Please contact the support."
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
req.Header.Set("Authorization", "Bearer "+apiKey)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
return "JADE internal error: 02-02-0003. Please contact the support."
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
return "JADE internal error: 02-01-0004. Please contact the support."
}
var chatCompletionResponse MistralChatCompletionResponse
for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
return "JADE internal error: 02-01-0005. Please contact the support."
}
var usedModelInfo ModelInfo
@ -183,16 +141,16 @@ func RequestMistral(c *fiber.Ctx, model string, messages []Message, temperature
LIMIT 1
`, &usedModelInfo, model)
if err != nil {
return MistralChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err)
}
if usedModelInfo.InputPrice == 0 || usedModelInfo.OutputPrice == 0 {
return MistralChatCompletionResponse{}, fmt.Errorf("model %s not found in Mistral", model)
return "JADE internal error: 02-00-0006. Please contact the support."
}
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil
if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 02-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
}

View File

@ -3,66 +3,15 @@ package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
type NimChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type NimChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage NimUsage `json:"usage"`
Choices []NimChoice `json:"choices"`
}
type NimUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type NimChoice struct {
Message RequestMessage `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addNimMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestNim(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting NIM: ", err)
id := insertBotMessage(c, "Error requesting NIM, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from NIM")
id := insertBotMessage(c, "No response from NIM", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestNimKey(apiKey string) bool {
url := "https://integrate.api.nvidia.com/v1/chat/completions"
//apiKey := "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"
fmt.Println("Testing new Nvidia NIM key:", apiKey)
// Convert messages to OpenAI format
nimMessages := []RequestMessage{
@ -72,7 +21,7 @@ func TestNimKey(apiKey string) bool {
},
}
requestBody := NimChatCompletionRequest{
requestBody := OpenaiChatCompletionRequest{
Model: "meta/llama3-8b-instruct",
Messages: nimMessages,
Temperature: 0,
@ -81,13 +30,11 @@ func TestNimKey(apiKey string) bool {
jsonBody, err := json.Marshal(requestBody)
if err != nil {
fmt.Println("Error when testing NIM key. Cant parse JSON request.")
return false
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
fmt.Println("Error when testing NIM key. Cant generate new request")
return false
}
@ -97,41 +44,33 @@ func TestNimKey(apiKey string) bool {
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
fmt.Println("Error when testing NIM key. Cant send request.")
return false
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
fmt.Println("Error when testing NIM key. Cant read response.")
return false
}
var chatCompletionResponse NimChatCompletionResponse
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
fmt.Println(resp.Status)
fmt.Println(resp.Body)
fmt.Println("Error when testing NIM key. Cant unmarshal response.")
return false
}
if chatCompletionResponse.Usage.CompletionTokens == 0 {
fmt.Println(resp.Status)
fmt.Println(resp.Body)
fmt.Println("Error when testing NIM key. No completion token.")
return false
}
Content := chatCompletionResponse.Choices[0].Message.Content
fmt.Println(Content)
return true
}
func RequestNim(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxToken int) (NimChatCompletionResponse, error) {
func RequestNim(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -143,26 +82,26 @@ func RequestNim(c *fiber.Ctx, model string, messages []Message, temperature floa
select filtered_keys.key limit 1
`, &apiKey, "nim")
if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error getting NIM API key: %w", err)
return "JADE internal error: 05-00-0000. Please contact the support."
}
url := "https://integrate.api.nvidia.com/v1/chat/completions"
requestBody := NimChatCompletionRequest{
requestBody := OpenaiChatCompletionRequest{
Model: model,
Messages: Message2RequestMessage(messages, context),
MaxTokens: maxToken,
MaxTokens: maxTokens,
Temperature: temperature,
}
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
return "JADE internal error: 05-01-0001. Please contact the support."
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
return "JADE internal error: 05-02-0002. Please contact the support."
}
req.Header.Set("Content-Type", "application/json")
@ -171,19 +110,25 @@ func RequestNim(c *fiber.Ctx, model string, messages []Message, temperature floa
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
return "JADE internal error: 05-02-0003. Please contact the support."
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
return "JADE internal error: 05-01-0004. Please contact the support."
}
var chatCompletionResponse NimChatCompletionResponse
for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
return "JADE internal error: 05-01-0005. Please contact the support."
}
var usedModelInfo ModelInfo
@ -196,12 +141,16 @@ func RequestNim(c *fiber.Ctx, model string, messages []Message, temperature floa
LIMIT 1
`, &usedModelInfo, model)
if err != nil {
return NimChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err)
return "JADE internal error: 05-00-0006. Please contact the support."
}
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil
if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 05-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
}

View File

@ -3,11 +3,10 @@ package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
@ -39,23 +38,16 @@ type OpenaiChoice struct {
Index int `json:"index"`
}
func addOpenaiMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
var OpenaiErrorCodes map[string]string
chatCompletion, err := RequestOpenai(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting OpenAI: ", err)
id := insertBotMessage(c, "Error requesting OpenAI, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from OpenAI")
id := insertBotMessage(c, "No response from OpenAI", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
func init() {
OpenaiErrorCodes = make(map[string]string)
OpenaiErrorCodes["401"] = "Invalid Authentication - Ensure that the API key is still valid."
OpenaiErrorCodes["403"] = "Accessing the API from an unsupported country, region, or territory."
OpenaiErrorCodes["429"] = "Rate limit reached for requests - You are sending requests too quickly."
OpenaiErrorCodes["429"] = "You have run out of credits or hit your maximum monthly spend - Buy more credits or learn how to increase your limits."
OpenaiErrorCodes["500"] = "Issue on Provider servers - Retry your request after a brief wait and contact the provider if the issue persists."
OpenaiErrorCodes["503"] = "Servers are experiencing high traffic - Please retry your requests after a brief wait."
}
func TestOpenaiKey(apiKey string) bool {
@ -112,7 +104,12 @@ func TestOpenaiKey(apiKey string) bool {
return true
}
func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (OpenaiChatCompletionResponse, error) {
func RequestOpenai(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -124,7 +121,7 @@ func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature f
select filtered_keys.key limit 1
`, &apiKey, "openai")
if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error getting OpenAI API key: %w", err)
return "JADE internal error: 00-00-0000. Please contact the support."
}
url := "https://api.openai.com/v1/chat/completions"
@ -138,12 +135,12 @@ func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature f
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
return "JADE internal error: 00-01-0001. Please contact the support."
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
return "JADE internal error: 00-02-0002. Please contact the support."
}
req.Header.Set("Content-Type", "application/json")
@ -152,19 +149,25 @@ func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature f
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
return "JADE internal error: 00-02-0003. Please contact the support."
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
return "JADE internal error: 00-01-0004. Please contact the support."
}
for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
return "JADE internal error: 00-01-0005. Please contact the support."
}
var usedModelInfo ModelInfo
@ -177,12 +180,16 @@ func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature f
LIMIT 1
`, &usedModelInfo, model)
if err != nil {
return OpenaiChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err)
return "JADE internal error: 00-00-0006. Please contact the support."
}
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil
if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 00-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
}

View File

@ -3,62 +3,13 @@ package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
type PerplexityChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type PerplexityChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage PerplexityUsage `json:"usage"`
Choices []PerplexityChoice `json:"choices"`
}
type PerplexityUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type PerplexityChoice struct {
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addPerplexityMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestPerplexity(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Perplexity: ", err)
id := insertBotMessage(c, "Error requesting Perplexity, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from Perplexity")
id := insertBotMessage(c, "No response from Perplexity", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Message.Content
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestPerplexityKey(apiKey string) bool {
url := "https://api.perplexity.ai/chat/completions"
@ -70,7 +21,7 @@ func TestPerplexityKey(apiKey string) bool {
},
}
requestBody := PerplexityChatCompletionRequest{
requestBody := OpenaiChatCompletionRequest{
Model: "llama-3-8b-instruct",
Messages: perplexityMessages,
Temperature: 0,
@ -102,7 +53,7 @@ func TestPerplexityKey(apiKey string) bool {
return false
}
var chatCompletionResponse PerplexityChatCompletionResponse
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return false
@ -113,7 +64,12 @@ func TestPerplexityKey(apiKey string) bool {
return true
}
func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (PerplexityChatCompletionResponse, error) {
func RequestPerplexity(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -125,12 +81,12 @@ func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperatu
select filtered_keys.key limit 1
`, &apiKey, "perplexity")
if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error getting Perplexity API key: %w", err)
return "JADE internal error: 06-00-0000. Please contact the support."
}
url := "https://api.perplexity.ai/chat/completions"
requestBody := PerplexityChatCompletionRequest{
requestBody := OpenaiChatCompletionRequest{
Model: model,
Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens,
@ -139,12 +95,12 @@ func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperatu
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
return "JADE internal error: 06-01-0001. Please contact the support."
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
return "JADE internal error: 06-02-0002. Please contact the support."
}
req.Header.Set("Content-Type", "application/json")
@ -153,19 +109,25 @@ func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperatu
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
return "JADE internal error: 06-02-0003. Please contact the support."
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
return "JADE internal error: 06-01-0004. Please contact the support."
}
var chatCompletionResponse PerplexityChatCompletionResponse
for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
return "JADE internal error: 06-01-0005. Please contact the support."
}
var usedModelInfo ModelInfo
@ -178,19 +140,16 @@ func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperatu
LIMIT 1
`, &usedModelInfo, model)
if err != nil {
return PerplexityChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err)
return "JADE internal error: 06-00-0006. Please contact the support."
}
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
// If online model end with -online add a small cost
if strings.HasSuffix(model, "-online") {
inputCost += 0.005
outputCost += 0.005
}
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil
if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 06-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Message.Content
}

View File

@ -3,61 +3,28 @@ package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/edgedb/edgedb-go"
"github.com/gofiber/fiber/v2"
)
type TogetherChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type TogetherChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Usage TogetherUsage `json:"usage"`
Usage OpenaiUsage `json:"usage"`
Choices []TogetherChoice `json:"choices"`
}
type TogetherUsage struct {
PromptTokens int32 `json:"prompt_tokens"`
CompletionTokens int32 `json:"completion_tokens"`
TotalTokens int32 `json:"total_tokens"`
}
type TogetherChoice struct {
Text string `json:"text"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
}
func addTogetherMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestTogether(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Together: ", err)
id := insertBotMessage(c, "Error requesting Together, model may not be available anymore. Better error message in development.", selected, llm.ID)
return id
} else if len(chatCompletion.Choices) == 0 {
fmt.Println("No response from Together")
id := insertBotMessage(c, "No response from Together", selected, llm.ID)
return id
} else {
Content := chatCompletion.Choices[0].Text
id := insertBotMessage(c, Content, selected, llm.ID)
return id
}
}
func TestTogetherKey(apiKey string) bool {
url := "https://api.together.xyz/v1/completions"
@ -69,7 +36,7 @@ func TestTogetherKey(apiKey string) bool {
},
}
requestBody := TogetherChatCompletionRequest{
requestBody := OpenaiChatCompletionRequest{
Model: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
Messages: togetherMessages,
Temperature: 0,
@ -101,7 +68,7 @@ func TestTogetherKey(apiKey string) bool {
return false
}
var chatCompletionResponse TogetherChatCompletionResponse
var chatCompletionResponse OpenaiChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return false
@ -112,7 +79,12 @@ func TestTogetherKey(apiKey string) bool {
return true
}
func RequestTogether(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (TogetherChatCompletionResponse, error) {
func RequestTogether(c *fiber.Ctx, llm LLM, messages []Message) string {
model := llm.Model.ModelID
temperature := float64(llm.Temperature)
context := llm.Context
maxTokens := int(llm.MaxToken)
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -124,12 +96,12 @@ func RequestTogether(c *fiber.Ctx, model string, messages []Message, temperature
select filtered_keys.key limit 1
`, &apiKey, "together")
if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error getting Together AI API key: %w", err)
return "JADE internal error: 07-00-0000. Please contact the support."
}
url := "https://api.together.xyz/v1/completions"
requestBody := TogetherChatCompletionRequest{
requestBody := OpenaiChatCompletionRequest{
Model: model,
Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens,
@ -138,12 +110,12 @@ func RequestTogether(c *fiber.Ctx, model string, messages []Message, temperature
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
return "JADE internal error: 07-01-0001. Please contact the support."
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
return "JADE internal error: 07-02-0002. Please contact the support."
}
req.Header.Set("Content-Type", "application/json")
@ -152,19 +124,25 @@ func RequestTogether(c *fiber.Ctx, model string, messages []Message, temperature
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
return "JADE internal error: 07-02-0003. Please contact the support."
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
return "JADE internal error: 07-01-0004. Please contact the support."
}
for key, value := range OpenaiErrorCodes {
if strings.Contains(resp.Status, key) {
return value
}
}
var chatCompletionResponse TogetherChatCompletionResponse
err = json.Unmarshal(body, &chatCompletionResponse)
if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
return "JADE internal error: 07-01-0005. Please contact the support."
}
var usedModelInfo ModelInfo
@ -177,12 +155,16 @@ func RequestTogether(c *fiber.Ctx, model string, messages []Message, temperature
LIMIT 1
`, &usedModelInfo, model)
if err != nil {
return TogetherChatCompletionResponse{}, fmt.Errorf("error getting model info: %w", err)
return "JADE internal error: 07-00-0006. Please contact the support."
}
var inputCost float32 = float32(chatCompletionResponse.Usage.PromptTokens) * usedModelInfo.InputPrice
var outputCost float32 = float32(chatCompletionResponse.Usage.CompletionTokens) * usedModelInfo.OutputPrice
addUsage(c, inputCost, outputCost, chatCompletionResponse.Usage.PromptTokens, chatCompletionResponse.Usage.CompletionTokens, model)
return chatCompletionResponse, nil
if len(chatCompletionResponse.Choices) == 0 {
return "JADE internal error: 07-03-0007. Please contact the support."
}
return chatCompletionResponse.Choices[0].Text
}

View File

@ -25,3 +25,4 @@
[ ] Change the terms of service and enter keys page to an HTML
[ ] Split Chat.go into smaller files
[ ] Create a Request package
[ ] Use the normal RequestProvider function instead of TestProvider to remove TestProvider

View File

@ -147,7 +147,6 @@ func addKeys(c *fiber.Ctx) error {
"anthropic": TestAnthropicKey,
"mistral": TestMistralKey,
"groq": TestGroqKey,
"gooseai": TestGooseaiKey,
"google": TestGoogleKey,
"nim": TestNimKey,
"perplexity": TestPerplexityKey,