Lots of fix
Can now add new api key again, and some minor bugs
This commit is contained in:
parent
4dca94b1ee
commit
edd5c58a7b
@ -261,7 +261,8 @@ func handleCallbackSignup(c *fiber.Ctx) error {
|
||||
INSERT Conversation {
|
||||
name := 'Default',
|
||||
user := global currentUser,
|
||||
position := 1
|
||||
position := 1,
|
||||
selected := true,
|
||||
}`)
|
||||
if err != nil {
|
||||
fmt.Println("Error creating default conversation")
|
||||
|
@ -105,8 +105,30 @@ type CompanyInfo struct {
|
||||
Icon string `edgedb:"icon"`
|
||||
}
|
||||
|
||||
var edgeCtx context.Context
|
||||
var edgeGlobalClient *edgedb.Client
|
||||
var (
|
||||
edgeCtx context.Context
|
||||
edgeGlobalClient *edgedb.Client
|
||||
allModelInfo []ModelInfo
|
||||
allCompany []CompanyInfo
|
||||
)
|
||||
|
||||
func getModelInfoByID(id edgedb.UUID) (ModelInfo, bool) {
|
||||
for _, model := range allModelInfo {
|
||||
if model.ID == id {
|
||||
return model, true
|
||||
}
|
||||
}
|
||||
return ModelInfo{}, false
|
||||
}
|
||||
|
||||
func getCompanyInfoByID(id edgedb.UUID) (CompanyInfo, bool) {
|
||||
for _, company := range allCompany {
|
||||
if company.ID == id {
|
||||
return company, true
|
||||
}
|
||||
}
|
||||
return CompanyInfo{}, false
|
||||
}
|
||||
|
||||
func init() {
|
||||
var ctx = context.Background()
|
||||
@ -118,6 +140,16 @@ func init() {
|
||||
|
||||
edgeCtx = ctx
|
||||
edgeGlobalClient = client
|
||||
|
||||
err = edgeGlobalClient.Query(edgeCtx, `SELECT ModelInfo { id, inputPrice, outputPrice, modelID, name, company}`, &allModelInfo)
|
||||
if err != nil {
|
||||
panic("Can't get all ModelInfo")
|
||||
}
|
||||
|
||||
err = edgeGlobalClient.Query(edgeCtx, `SELECT Company { id, icon, name}`, &allCompany)
|
||||
if err != nil {
|
||||
panic("Can't get all Company")
|
||||
}
|
||||
}
|
||||
|
||||
func checkIfLogin(c *fiber.Ctx) bool {
|
||||
|
@ -53,6 +53,7 @@ func GeneratePlaceholderHTML(c *fiber.Ctx, message string, selectedLLMIds []stri
|
||||
key
|
||||
},
|
||||
modelInfo : {
|
||||
id,
|
||||
modelID,
|
||||
company : {
|
||||
icon,
|
||||
@ -138,7 +139,7 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
|
||||
defer cancel() // Ensure the context is cancelled to free resources
|
||||
|
||||
// Determine which message function to call based on the model
|
||||
var addMessageFunc func(c *fiber.Ctx, llm LLM, messages []Message) string
|
||||
var addMessageFunc func(c *fiber.Ctx, llm LLM, messages []Message, testApiKey string) string
|
||||
switch selectedLLMs[idx].Model.Company.Name {
|
||||
case "openai":
|
||||
addMessageFunc = RequestOpenai
|
||||
@ -149,7 +150,7 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
|
||||
case "groq":
|
||||
addMessageFunc = RequestGroq
|
||||
case "huggingface":
|
||||
addMessageFunc = RequestHuggingface
|
||||
addMessageFunc = RequestCustomEndpoint
|
||||
case "google":
|
||||
addMessageFunc = RequestGoogle
|
||||
case "perplexity":
|
||||
@ -164,7 +165,7 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
|
||||
addMessageFunc = RequestDeepseek
|
||||
}
|
||||
|
||||
var content string = addMessageFunc(c, selectedLLMs[idx], messages)
|
||||
var content string = addMessageFunc(c, selectedLLMs[idx], messages, "")
|
||||
var messageUUID edgedb.UUID = insertBotMessage(c, content, selectedLLMs[idx].ID)
|
||||
|
||||
var message Message
|
||||
|
@ -50,7 +50,7 @@ func init() {
|
||||
AnthropicErrorCodes["529"] = "Provider error: Anthropic’s server is temporarily overloaded."
|
||||
}
|
||||
|
||||
func RequestAnthropic(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
func RequestAnthropic(c *fiber.Ctx, llm LLM, messages []Message, testApiKey string) string {
|
||||
model := llm.Model.ModelID
|
||||
temperature := float64(llm.Temperature)
|
||||
context := llm.Context
|
||||
@ -63,15 +63,20 @@ func RequestAnthropic(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
var apiKey struct {
|
||||
Key string `edgedb:"key"`
|
||||
}
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
|
||||
if testApiKey == "" {
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
SELECT Key {
|
||||
key
|
||||
}
|
||||
FILTER .<keys[is Setting].<setting[is User] = global currentUser and .company.name = "anthropic"
|
||||
LIMIT 1
|
||||
`, &apiKey, "anthropic")
|
||||
if err != nil {
|
||||
return "JADE internal error: 01-00-0000. Please contact the support."
|
||||
if err != nil {
|
||||
return "JADE internal error: 01-00-0000. Please contact the support."
|
||||
}
|
||||
} else {
|
||||
apiKey.Key = testApiKey
|
||||
}
|
||||
|
||||
url := "https://api.anthropic.com/v1/messages"
|
||||
@ -127,16 +132,13 @@ func RequestAnthropic(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
return "JADE internal error: 01-01-0005. Please contact the support."
|
||||
}
|
||||
|
||||
if testApiKey != "" {
|
||||
return chatCompletionResponse.Content[0].Text
|
||||
}
|
||||
|
||||
var usedModelInfo ModelInfo
|
||||
err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
SELECT ModelInfo {
|
||||
inputPrice,
|
||||
outputPrice
|
||||
}
|
||||
FILTER .modelID = <str>$0
|
||||
LIMIT 1
|
||||
`, &usedModelInfo, model)
|
||||
if err != nil {
|
||||
usedModelInfo, found := getModelInfoByID(llm.Model.ID)
|
||||
if !found {
|
||||
logErrorCode.Println("01-00-0006")
|
||||
return "JADE internal error: 01-00-0006. Please contact the support."
|
||||
}
|
||||
|
@ -10,7 +10,7 @@ import (
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
func RequestHuggingface(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
func RequestCustomEndpoint(c *fiber.Ctx, llm LLM, messages []Message, testApiKey string) string {
|
||||
url := llm.Endpoint.Endpoint
|
||||
temperature := float64(llm.Temperature)
|
||||
context := llm.Context
|
||||
|
@ -23,7 +23,7 @@ func init() {
|
||||
DeepseekErrorCodes["503"] = "Provider error: Deepseek’s server is temporarily overloaded."
|
||||
}
|
||||
|
||||
func RequestDeepseek(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
func RequestDeepseek(c *fiber.Ctx, llm LLM, messages []Message, testApiKey string) string {
|
||||
model := llm.Model.ModelID
|
||||
temperature := float64(llm.Temperature)
|
||||
context := llm.Context
|
||||
@ -32,7 +32,8 @@ func RequestDeepseek(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
url := "https://api.deepseek.com/chat/completions"
|
||||
|
||||
var apiKey string
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
if testApiKey == "" {
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
with
|
||||
filtered_keys := (
|
||||
select Key {
|
||||
@ -41,8 +42,12 @@ func RequestDeepseek(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
)
|
||||
select filtered_keys.key limit 1
|
||||
`, &apiKey, "deepseek")
|
||||
if err != nil {
|
||||
return "JADE internal error: 08-00-0000. Please contact the support."
|
||||
if err != nil {
|
||||
logErrorCode.Println("08-00-0000")
|
||||
return "JADE internal error: 08-00-0000. Please contact the support."
|
||||
}
|
||||
} else {
|
||||
apiKey = testApiKey
|
||||
}
|
||||
|
||||
requestBody := OpenaiChatCompletionRequest{
|
||||
@ -94,16 +99,13 @@ func RequestDeepseek(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
return "JADE internal error: 08-01-0005. Please contact the support."
|
||||
}
|
||||
|
||||
if testApiKey != "" {
|
||||
return chatCompletionResponse.Choices[0].Message.Content
|
||||
}
|
||||
|
||||
var usedModelInfo ModelInfo
|
||||
err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
SELECT ModelInfo {
|
||||
inputPrice,
|
||||
outputPrice
|
||||
}
|
||||
FILTER .modelID = <str>$0
|
||||
LIMIT 1
|
||||
`, &usedModelInfo, model)
|
||||
if err != nil {
|
||||
usedModelInfo, found := getModelInfoByID(llm.Model.ID)
|
||||
if !found {
|
||||
logErrorCode.Println("08-00-0006")
|
||||
return "JADE internal error: 08-00-0006. Please contact the support."
|
||||
}
|
||||
|
@ -10,14 +10,15 @@ import (
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
func RequestFirework(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
func RequestFirework(c *fiber.Ctx, llm LLM, messages []Message, testApiKey string) string {
|
||||
model := llm.Model.ModelID
|
||||
temperature := float64(llm.Temperature)
|
||||
context := llm.Context
|
||||
maxTokens := int(llm.MaxToken)
|
||||
|
||||
var apiKey string
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
if testApiKey == "" {
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
with
|
||||
filtered_keys := (
|
||||
select Key {
|
||||
@ -26,9 +27,12 @@ func RequestFirework(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
)
|
||||
select filtered_keys.key limit 1
|
||||
`, &apiKey, "fireworks")
|
||||
if err != nil {
|
||||
logErrorCode.Println("09-00-0000")
|
||||
return "JADE internal error: 09-00-0000. Please contact the support."
|
||||
if err != nil {
|
||||
logErrorCode.Println("09-00-0000")
|
||||
return "JADE internal error: 09-00-0000. Please contact the support."
|
||||
}
|
||||
} else {
|
||||
apiKey = testApiKey
|
||||
}
|
||||
|
||||
url := "https://api.fireworks.ai/inference/v1/chat/completions"
|
||||
@ -82,16 +86,13 @@ func RequestFirework(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
return "JADE internal error: 09-01-0005. Please contact the support."
|
||||
}
|
||||
|
||||
if testApiKey != "" {
|
||||
return chatCompletionResponse.Choices[0].Message.Content
|
||||
}
|
||||
|
||||
var usedModelInfo ModelInfo
|
||||
err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
SELECT ModelInfo {
|
||||
inputPrice,
|
||||
outputPrice
|
||||
}
|
||||
FILTER .modelID = <str>$0
|
||||
LIMIT 1
|
||||
`, &usedModelInfo, model)
|
||||
if err != nil {
|
||||
usedModelInfo, found := getModelInfoByID(llm.Model.ID)
|
||||
if !found {
|
||||
logErrorCode.Println("09-00-0006")
|
||||
return "JADE internal error: 09-00-0006. Please contact the support."
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ func init() {
|
||||
GoogleErrorCodes["503"] = "Provider error: Servers are experiencing high traffic - Please retry your requests after a brief wait."
|
||||
}
|
||||
|
||||
func RequestGoogle(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
func RequestGoogle(c *fiber.Ctx, llm LLM, messages []Message, testApiKey string) string {
|
||||
model := llm.Model.ModelID
|
||||
|
||||
// TODO: Use those parameters
|
||||
@ -71,7 +71,8 @@ func RequestGoogle(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
//maxTokens := int(llm.MaxToken)
|
||||
|
||||
var apiKey string
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
if testApiKey == "" {
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
with
|
||||
filtered_keys := (
|
||||
select Key {
|
||||
@ -80,8 +81,12 @@ func RequestGoogle(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
)
|
||||
select filtered_keys.key limit 1
|
||||
`, &apiKey, "google")
|
||||
if err != nil {
|
||||
return "JADE internal error: 03-00-0000. Please contact the support."
|
||||
if err != nil {
|
||||
logErrorCode.Println("03-00-0000")
|
||||
return "JADE internal error: 03-00-0000. Please contact the support."
|
||||
}
|
||||
} else {
|
||||
apiKey = testApiKey
|
||||
}
|
||||
|
||||
url := "https://generativelanguage.googleapis.com/v1beta/models/" + model + ":generateContent?key=" + apiKey
|
||||
@ -147,16 +152,13 @@ func RequestGoogle(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
return "JADE internal error: 03-01-0005. Please contact the support."
|
||||
}
|
||||
|
||||
if testApiKey != "" {
|
||||
return chatCompletionResponse.Candidates[0].Content.Parts[0].Text
|
||||
}
|
||||
|
||||
var usedModelInfo ModelInfo
|
||||
err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
SELECT ModelInfo {
|
||||
inputPrice,
|
||||
outputPrice
|
||||
}
|
||||
FILTER .modelID = <str>$0
|
||||
LIMIT 1
|
||||
`, &usedModelInfo, model)
|
||||
if err != nil {
|
||||
usedModelInfo, found := getModelInfoByID(llm.Model.ID)
|
||||
if !found {
|
||||
logErrorCode.Println("03-00-0006")
|
||||
return "JADE internal error: 03-00-0006. Please contact the support."
|
||||
}
|
||||
|
@ -23,14 +23,15 @@ func init() {
|
||||
GroqErrorCodes["503"] = "Provider error: The server is not ready to handle the request, often due to maintenance or overload. Wait before retrying the request."
|
||||
}
|
||||
|
||||
func RequestGroq(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
func RequestGroq(c *fiber.Ctx, llm LLM, messages []Message, testApiKey string) string {
|
||||
model := llm.Model.ModelID
|
||||
temperature := float64(llm.Temperature)
|
||||
context := llm.Context
|
||||
maxTokens := int(llm.MaxToken)
|
||||
|
||||
var apiKey string
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
if testApiKey == "" {
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
with
|
||||
filtered_keys := (
|
||||
select Key {
|
||||
@ -39,9 +40,12 @@ func RequestGroq(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
)
|
||||
select filtered_keys.key limit 1
|
||||
`, &apiKey, "groq")
|
||||
if err != nil {
|
||||
logErrorCode.Println("04-00-0000")
|
||||
return "JADE internal error: 04-00-0000. Please contact the support."
|
||||
if err != nil {
|
||||
logErrorCode.Println("04-00-0000")
|
||||
return "JADE internal error: 04-00-0000. Please contact the support."
|
||||
}
|
||||
} else {
|
||||
apiKey = testApiKey
|
||||
}
|
||||
|
||||
url := "https://api.groq.com/openai/v1/chat/completions"
|
||||
@ -95,16 +99,13 @@ func RequestGroq(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
return "JADE internal error: 04-01-0005. Please contact the support."
|
||||
}
|
||||
|
||||
if testApiKey != "" {
|
||||
return chatCompletionResponse.Choices[0].Message.Content
|
||||
}
|
||||
|
||||
var usedModelInfo ModelInfo
|
||||
err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
SELECT ModelInfo {
|
||||
inputPrice,
|
||||
outputPrice
|
||||
}
|
||||
FILTER .modelID = <str>$0
|
||||
LIMIT 1
|
||||
`, &usedModelInfo, model)
|
||||
if err != nil {
|
||||
usedModelInfo, found := getModelInfoByID(llm.Model.ID)
|
||||
if !found {
|
||||
logErrorCode.Println("04-00-0006")
|
||||
return "JADE internal error: 04-00-0006. Please contact the support."
|
||||
}
|
||||
|
@ -10,14 +10,15 @@ import (
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
func RequestMistral(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
func RequestMistral(c *fiber.Ctx, llm LLM, messages []Message, testApiKey string) string {
|
||||
model := llm.Model.ModelID
|
||||
temperature := float64(llm.Temperature)
|
||||
context := llm.Context
|
||||
maxTokens := int(llm.MaxToken)
|
||||
|
||||
var apiKey string
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
if testApiKey == "" {
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
with
|
||||
filtered_keys := (
|
||||
select Key {
|
||||
@ -26,9 +27,12 @@ func RequestMistral(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
)
|
||||
select filtered_keys.key limit 1
|
||||
`, &apiKey, "mistral")
|
||||
if err != nil {
|
||||
logErrorCode.Println("02-00-0000")
|
||||
return "JADE internal error: 02-00-0000. Please contact the support."
|
||||
if err != nil {
|
||||
logErrorCode.Println("02-00-0000")
|
||||
return "JADE internal error: 02-00-0000. Please contact the support."
|
||||
}
|
||||
} else {
|
||||
apiKey = testApiKey
|
||||
}
|
||||
|
||||
url := "https://api.mistral.ai/v1/chat/completions"
|
||||
@ -82,16 +86,13 @@ func RequestMistral(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
return "JADE internal error: 02-01-0005. Please contact the support."
|
||||
}
|
||||
|
||||
if testApiKey != "" {
|
||||
return chatCompletionResponse.Choices[0].Message.Content
|
||||
}
|
||||
|
||||
var usedModelInfo ModelInfo
|
||||
err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
SELECT ModelInfo {
|
||||
inputPrice,
|
||||
outputPrice
|
||||
}
|
||||
FILTER .modelID = <str>$0
|
||||
LIMIT 1
|
||||
`, &usedModelInfo, model)
|
||||
if err != nil {
|
||||
usedModelInfo, found := getModelInfoByID(llm.Model.ID)
|
||||
if !found {
|
||||
logErrorCode.Println("02-00-0006")
|
||||
return "JADE internal error: 02-00-0006. Please contact the support."
|
||||
}
|
||||
|
@ -10,14 +10,15 @@ import (
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
func RequestNim(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
func RequestNim(c *fiber.Ctx, llm LLM, messages []Message, testApiKey string) string {
|
||||
model := llm.Model.ModelID
|
||||
temperature := float64(llm.Temperature)
|
||||
context := llm.Context
|
||||
maxTokens := int(llm.MaxToken)
|
||||
|
||||
var apiKey string
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
if testApiKey == "" {
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
with
|
||||
filtered_keys := (
|
||||
select Key {
|
||||
@ -26,9 +27,12 @@ func RequestNim(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
)
|
||||
select filtered_keys.key limit 1
|
||||
`, &apiKey, "nim")
|
||||
if err != nil {
|
||||
logErrorCode.Println("05-00-0000")
|
||||
return "JADE internal error: 05-00-0000. Please contact the support."
|
||||
if err != nil {
|
||||
logErrorCode.Println("05-00-0000")
|
||||
return "JADE internal error: 05-00-0000. Please contact the support."
|
||||
}
|
||||
} else {
|
||||
apiKey = testApiKey
|
||||
}
|
||||
|
||||
url := "https://integrate.api.nvidia.com/v1/chat/completions"
|
||||
@ -82,16 +86,13 @@ func RequestNim(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
return "JADE internal error: 05-01-0005. Please contact the support."
|
||||
}
|
||||
|
||||
if testApiKey != "" {
|
||||
return chatCompletionResponse.Choices[0].Message.Content
|
||||
}
|
||||
|
||||
var usedModelInfo ModelInfo
|
||||
err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
SELECT ModelInfo {
|
||||
inputPrice,
|
||||
outputPrice
|
||||
}
|
||||
FILTER .modelID = <str>$0
|
||||
LIMIT 1
|
||||
`, &usedModelInfo, model)
|
||||
if err != nil {
|
||||
usedModelInfo, found := getModelInfoByID(llm.Model.ID)
|
||||
if !found {
|
||||
logErrorCode.Println("05-00-0006")
|
||||
return "JADE internal error: 05-00-0006. Please contact the support."
|
||||
}
|
||||
|
@ -51,14 +51,15 @@ func init() {
|
||||
OpenaiErrorCodes["503"] = "Provider error: Servers are experiencing high traffic - Please retry your requests after a brief wait."
|
||||
}
|
||||
|
||||
func RequestOpenai(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
func RequestOpenai(c *fiber.Ctx, llm LLM, messages []Message, testApiKey string) string {
|
||||
model := llm.Model.ModelID
|
||||
temperature := float64(llm.Temperature)
|
||||
context := llm.Context
|
||||
maxTokens := int(llm.MaxToken)
|
||||
|
||||
var apiKey string
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
if testApiKey == "" {
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
with
|
||||
filtered_keys := (
|
||||
select Key {
|
||||
@ -67,9 +68,12 @@ func RequestOpenai(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
)
|
||||
select filtered_keys.key limit 1
|
||||
`, &apiKey, "openai")
|
||||
if err != nil {
|
||||
logErrorCode.Println("00-00-0000")
|
||||
return "JADE internal error: 00-00-0000. Please contact the support."
|
||||
if err != nil {
|
||||
logErrorCode.Println("00-00-0000")
|
||||
return "JADE internal error: 00-00-0000. Please contact the support."
|
||||
}
|
||||
} else {
|
||||
apiKey = testApiKey
|
||||
}
|
||||
|
||||
url := "https://api.openai.com/v1/chat/completions"
|
||||
@ -123,16 +127,13 @@ func RequestOpenai(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
return "JADE internal error: 00-01-0005. Please contact the support."
|
||||
}
|
||||
|
||||
if testApiKey != "" {
|
||||
return chatCompletionResponse.Choices[0].Message.Content
|
||||
}
|
||||
|
||||
var usedModelInfo ModelInfo
|
||||
err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
SELECT ModelInfo {
|
||||
inputPrice,
|
||||
outputPrice
|
||||
}
|
||||
FILTER .modelID = <str>$0
|
||||
LIMIT 1
|
||||
`, &usedModelInfo, model)
|
||||
if err != nil {
|
||||
usedModelInfo, found := getModelInfoByID(llm.Model.ID)
|
||||
if !found {
|
||||
logErrorCode.Println("00-00-0006")
|
||||
return "JADE internal error: 00-00-0006. Please contact the support."
|
||||
}
|
||||
|
@ -10,14 +10,15 @@ import (
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
func RequestPerplexity(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
func RequestPerplexity(c *fiber.Ctx, llm LLM, messages []Message, testApiKey string) string {
|
||||
model := llm.Model.ModelID
|
||||
temperature := float64(llm.Temperature)
|
||||
context := llm.Context
|
||||
maxTokens := int(llm.MaxToken)
|
||||
|
||||
var apiKey string
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
if testApiKey == "" {
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
with
|
||||
filtered_keys := (
|
||||
select Key {
|
||||
@ -26,9 +27,12 @@ func RequestPerplexity(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
)
|
||||
select filtered_keys.key limit 1
|
||||
`, &apiKey, "perplexity")
|
||||
if err != nil {
|
||||
logErrorCode.Println("06-00-0000")
|
||||
return "JADE internal error: 06-00-0000. Please contact the support."
|
||||
if err != nil {
|
||||
logErrorCode.Println("06-00-0000")
|
||||
return "JADE internal error: 06-00-0000. Please contact the support."
|
||||
}
|
||||
} else {
|
||||
apiKey = testApiKey
|
||||
}
|
||||
|
||||
url := "https://api.perplexity.ai/chat/completions"
|
||||
@ -82,16 +86,13 @@ func RequestPerplexity(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
return "JADE internal error: 06-01-0005. Please contact the support."
|
||||
}
|
||||
|
||||
if testApiKey != "" {
|
||||
return chatCompletionResponse.Choices[0].Message.Content
|
||||
}
|
||||
|
||||
var usedModelInfo ModelInfo
|
||||
err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
SELECT ModelInfo {
|
||||
inputPrice,
|
||||
outputPrice
|
||||
}
|
||||
FILTER .modelID = <str>$0
|
||||
LIMIT 1
|
||||
`, &usedModelInfo, model)
|
||||
if err != nil {
|
||||
usedModelInfo, found := getModelInfoByID(llm.Model.ID)
|
||||
if !found {
|
||||
logErrorCode.Println("06-00-0006")
|
||||
return "JADE internal error: 06-00-0006. Please contact the support."
|
||||
}
|
||||
|
@ -45,14 +45,15 @@ func init() {
|
||||
TogetherErrorCodes["529"] = "Provider error: n unexpected error has occurred internal to Together’s systems."
|
||||
}
|
||||
|
||||
func RequestTogether(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
func RequestTogether(c *fiber.Ctx, llm LLM, messages []Message, testApiKey string) string {
|
||||
model := llm.Model.ModelID
|
||||
temperature := float64(llm.Temperature)
|
||||
context := llm.Context
|
||||
maxTokens := int(llm.MaxToken)
|
||||
|
||||
var apiKey string
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
if testApiKey == "" {
|
||||
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
with
|
||||
filtered_keys := (
|
||||
select Key {
|
||||
@ -61,9 +62,12 @@ func RequestTogether(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
)
|
||||
select filtered_keys.key limit 1
|
||||
`, &apiKey, "together")
|
||||
if err != nil {
|
||||
logErrorCode.Println("07-00-0000")
|
||||
return "JADE internal error: 07-00-0000. Please contact the support."
|
||||
if err != nil {
|
||||
logErrorCode.Println("07-00-0000")
|
||||
return "JADE internal error: 07-00-0000. Please contact the support."
|
||||
}
|
||||
} else {
|
||||
apiKey = testApiKey
|
||||
}
|
||||
|
||||
url := "https://api.together.xyz/v1/completions"
|
||||
@ -117,16 +121,13 @@ func RequestTogether(c *fiber.Ctx, llm LLM, messages []Message) string {
|
||||
return "JADE internal error: 07-01-0005. Please contact the support."
|
||||
}
|
||||
|
||||
if testApiKey != "" {
|
||||
return chatCompletionResponse.Choices[0].Text
|
||||
}
|
||||
|
||||
var usedModelInfo ModelInfo
|
||||
err = edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
|
||||
SELECT ModelInfo {
|
||||
inputPrice,
|
||||
outputPrice
|
||||
}
|
||||
FILTER .modelID = <str>$0
|
||||
LIMIT 1
|
||||
`, &usedModelInfo, model)
|
||||
if err != nil {
|
||||
usedModelInfo, found := getModelInfoByID(llm.Model.ID)
|
||||
if !found {
|
||||
logErrorCode.Println("07-00-0006")
|
||||
return "JADE internal error: 07-00-0006. Please contact the support."
|
||||
}
|
||||
|
4
main.go
4
main.go
@ -147,7 +147,7 @@ func addKeys(c *fiber.Ctx) error {
|
||||
"deepseek": c.FormValue("deepseek_key"),
|
||||
}
|
||||
|
||||
requestFunctions := map[string]func(*fiber.Ctx, LLM, []Message) string{
|
||||
requestFunctions := map[string]func(*fiber.Ctx, LLM, []Message, string) string{
|
||||
"openai": RequestOpenai,
|
||||
"anthropic": RequestAnthropic,
|
||||
"mistral": RequestMistral,
|
||||
@ -208,7 +208,7 @@ func addKeys(c *fiber.Ctx) error {
|
||||
Context: "",
|
||||
}
|
||||
|
||||
var responseText string = requestFunctions[company](c, llm, messages)
|
||||
var responseText string = requestFunctions[company](c, llm, messages, key)
|
||||
|
||||
if responseText == "" || strings.Contains(responseText, "JADE internal error") || strings.Contains(responseText, "Provider error") {
|
||||
return c.SendString(fmt.Sprintf("Invalid %s API Key\n", company))
|
||||
|
@ -23,7 +23,7 @@
|
||||
<script defer src="dependencies/marked.js"></script>
|
||||
|
||||
<!--script defer src="wasm_exec.js"></script>
|
||||
<script>
|
||||
<script>
|
||||
window.addEventListener('load', function() {
|
||||
hljs.highlightAll();
|
||||
|
||||
@ -40,6 +40,19 @@
|
||||
{{ embed }}
|
||||
|
||||
<script>
|
||||
const companyTempMinMaxValues = {
|
||||
'openai': { min: 0, max: 2 },
|
||||
'anthropic': { min: 0, max: 1 },
|
||||
'deepseek': { min: 0, max: 2 },
|
||||
'fireworks': { min: 0, max: 2 },
|
||||
'google': { min: 0, max: 2 },
|
||||
'groq': { min: 0, max: 2 },
|
||||
'mistral': { min: 0, max: 2 },
|
||||
'nim': { min: 0, max: 2 },
|
||||
'perplexity': { min: 0, max: 2 },
|
||||
'together': { min: 0, max: 2 },
|
||||
};
|
||||
|
||||
function copyToClipboardCode(button) {
|
||||
// Get the code element next to the button
|
||||
var codeElement = button.parentElement.nextElementSibling;
|
||||
|
@ -38,6 +38,7 @@
|
||||
|
||||
<script>
|
||||
var textareaControl = document.getElementById('textarea-control');
|
||||
const textarea = document.getElementById('chat-input-textarea');
|
||||
|
||||
// Every 0.01s check if the text area have htmx-request class, if yes, add the class is-loading
|
||||
setInterval(function () {
|
||||
@ -49,8 +50,10 @@
|
||||
} else {
|
||||
textareaControl.classList.remove('is-loading');
|
||||
}
|
||||
toggleSendButton();
|
||||
}, 10);
|
||||
if (document.getElementById('chat-input-textarea') != null ) {
|
||||
toggleSendButton();
|
||||
}
|
||||
}, 100);
|
||||
|
||||
function updateIcons() {
|
||||
if (window.innerWidth < 450) {
|
||||
@ -90,7 +93,6 @@
|
||||
}
|
||||
});
|
||||
|
||||
const textarea = document.querySelector('#chat-input-textarea');
|
||||
textarea.addEventListener('keydown', handleTextareaKeydown);
|
||||
|
||||
document.addEventListener('htmx:afterSwap', toggleSendButton)
|
||||
|
@ -44,7 +44,7 @@
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="message-body">
|
||||
<div class="content" style="overflow-x: auto; width: 100%;">{{ message.Content | safe }}</div>
|
||||
<div class="content" style="overflow: hidden; width: 100%;">{{ message.Content | safe }}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
@ -14,7 +14,7 @@
|
||||
</div>
|
||||
<div class="message-body">
|
||||
<div class="content"
|
||||
style="overflow-x: auto;
|
||||
style="overflow: hidden;
|
||||
width: 100%"
|
||||
id="content-{{ ID }}">{{ Content | safe }}</div>
|
||||
</div>
|
||||
|
@ -143,19 +143,6 @@
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
const companyTempMinMaxValues = {
|
||||
'openai': { min: 0, max: 2 },
|
||||
'anthropic': { min: 0, max: 1 },
|
||||
'deepseek': { min: 0, max: 2 },
|
||||
'fireworks': { min: 0, max: 2 },
|
||||
'google': { min: 0, max: 2 },
|
||||
'groq': { min: 0, max: 2 },
|
||||
'mistral': { min: 0, max: 2 },
|
||||
'nim': { min: 0, max: 2 },
|
||||
'perplexity': { min: 0, max: 2 },
|
||||
'together': { min: 0, max: 2 },
|
||||
};
|
||||
|
||||
var sortable = new Sortable(document.getElementById('llm-list'), {
|
||||
animation: 150,
|
||||
onEnd: function (evt) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user