Added max tokens working for last providers

This commit is contained in:
Adrien Bouvais 2024-08-03 16:29:05 +02:00
parent 183bec7203
commit 306dc1c0d8
6 changed files with 24 additions and 12 deletions

View File

@ -14,6 +14,7 @@ import (
type FireworkChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
@ -41,7 +42,7 @@ type FireworkChoice struct {
func addFireworkMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestFirework(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context)
chatCompletion, err := RequestFirework(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Firework: ", err)
id := insertBotMessage(c, "Error requesting Firework, model may not be available anymore. Better error message in development.", selected, llm.ID)
@ -110,7 +111,7 @@ func TestFireworkKey(apiKey string) bool {
return true
}
func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature float64, context string) (FireworkChatCompletionResponse, error) {
func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (FireworkChatCompletionResponse, error) {
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -130,6 +131,7 @@ func RequestFirework(c *fiber.Ctx, model string, messages []Message, temperature
requestBody := FireworkChatCompletionRequest{
Model: "accounts/fireworks/models/" + model,
Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens,
Temperature: temperature,
}

View File

@ -14,6 +14,7 @@ import (
type GroqChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
@ -41,7 +42,7 @@ type GroqChoice struct {
func addGroqMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestGroq(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context)
chatCompletion, err := RequestGroq(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Groq: ", err)
id := insertBotMessage(c, "Error requesting Groq, model may not be available anymore. Better error message in development.", selected, llm.ID)
@ -110,7 +111,7 @@ func TestGroqKey(apiKey string) bool {
return true
}
func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature float64, context string) (GroqChatCompletionResponse, error) {
func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (GroqChatCompletionResponse, error) {
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -130,6 +131,7 @@ func RequestGroq(c *fiber.Ctx, model string, messages []Message, temperature flo
requestBody := GroqChatCompletionRequest{
Model: model,
Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens,
Temperature: temperature,
}

View File

@ -15,6 +15,7 @@ type HuggingfaceChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
Temperature float64 `json:"temperature"`
MaxTokens int `json:"max_tokens"`
Stream bool `json:"stream"`
}
@ -41,7 +42,7 @@ type HuggingfaceChoice struct {
func addHuggingfaceMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestHuggingface(c, llm, Messages, float64(llm.Temperature))
chatCompletion, err := RequestHuggingface(c, llm, Messages, float64(llm.Temperature), int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Huggingface: ", err)
id := insertBotMessage(c, "Error requesting Huggingface.", selected, llm.ID)
@ -57,13 +58,14 @@ func addHuggingfaceMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
}
}
func RequestHuggingface(c *fiber.Ctx, llm LLM, messages []Message, temperature float64) (HuggingfaceChatCompletionResponse, error) {
func RequestHuggingface(c *fiber.Ctx, llm LLM, messages []Message, temperature float64, maxTokens int) (HuggingfaceChatCompletionResponse, error) {
url := llm.Endpoint.Endpoint
requestBody := HuggingfaceChatCompletionRequest{
Model: "tgi",
Messages: Message2RequestMessage(messages, llm.Context),
Temperature: temperature,
MaxTokens: maxTokens,
Stream: false,
}

View File

@ -14,6 +14,7 @@ import (
type MistralChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
type MistralChatCompletionResponse struct {
@ -40,7 +41,7 @@ type MistralChoice struct {
func addMistralMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestMistral(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context)
chatCompletion, err := RequestMistral(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Mistral: ", err)
id := insertBotMessage(c, "Error requesting Mistral, model may not be available anymore. Better error message in development.", selected, llm.ID)
@ -115,7 +116,7 @@ func TestMistralKey(apiKey string) bool {
return true
}
func RequestMistral(c *fiber.Ctx, model string, messages []Message, temperature float64, context string) (MistralChatCompletionResponse, error) {
func RequestMistral(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (MistralChatCompletionResponse, error) {
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -135,6 +136,7 @@ func RequestMistral(c *fiber.Ctx, model string, messages []Message, temperature
requestBody := MistralChatCompletionRequest{
Model: model,
Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens,
Temperature: temperature,
}

View File

@ -14,6 +14,7 @@ import (
type OpenaiChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
@ -41,7 +42,7 @@ type OpenaiChoice struct {
func addOpenaiMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestOpenai(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context)
chatCompletion, err := RequestOpenai(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting OpenAI: ", err)
id := insertBotMessage(c, "Error requesting OpenAI, model may not be available anymore. Better error message in development.", selected, llm.ID)
@ -110,7 +111,7 @@ func TestOpenaiKey(apiKey string) bool {
return true
}
func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature float64, context string) (OpenaiChatCompletionResponse, error) {
func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (OpenaiChatCompletionResponse, error) {
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -130,6 +131,7 @@ func RequestOpenai(c *fiber.Ctx, model string, messages []Message, temperature f
requestBody := OpenaiChatCompletionRequest{
Model: model,
Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens,
Temperature: temperature,
}

View File

@ -15,6 +15,7 @@ import (
type PerplexityChatCompletionRequest struct {
Model string `json:"model"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
@ -42,7 +43,7 @@ type PerplexityChoice struct {
func addPerplexityMessage(c *fiber.Ctx, llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages(c)
chatCompletion, err := RequestPerplexity(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context)
chatCompletion, err := RequestPerplexity(c, llm.Model.ModelID, Messages, float64(llm.Temperature), llm.Context, int(llm.MaxToken))
if err != nil {
fmt.Println("Error requesting Perplexity: ", err)
id := insertBotMessage(c, "Error requesting Perplexity, model may not be available anymore. Better error message in development.", selected, llm.ID)
@ -111,7 +112,7 @@ func TestPerplexityKey(apiKey string) bool {
return true
}
func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperature float64, context string) (PerplexityChatCompletionResponse, error) {
func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperature float64, context string, maxTokens int) (PerplexityChatCompletionResponse, error) {
var apiKey string
err := edgeGlobalClient.WithGlobals(map[string]interface{}{"ext::auth::client_token": c.Cookies("jade-edgedb-auth-token")}).QuerySingle(edgeCtx, `
with
@ -131,6 +132,7 @@ func RequestPerplexity(c *fiber.Ctx, model string, messages []Message, temperatu
requestBody := PerplexityChatCompletionRequest{
Model: model,
Messages: Message2RequestMessage(messages, context),
MaxTokens: maxTokens,
Temperature: temperature,
}