fix request bug

This commit is contained in:
Adrien Bouvais 2024-05-17 12:18:17 +02:00
parent e9d47faa1d
commit 17880f1d0f
7 changed files with 43 additions and 29 deletions

View File

@ -13,11 +13,18 @@ import (
"github.com/gofiber/fiber/v2"
)
type RequestMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
var lastSelectedLLMs []LLM
func GeneratePlaceholderHandler(c *fiber.Ctx) error {
// Step 1 I create a User message and send it as output with a placeholder
// that will make a request to GenerateMultipleMessagesHandler when loading
message := c.FormValue("message", "")
selectedLLMIds := []string{"1e5a07c4-12fe-11ef-8da6-67d29b408c53"} // TODO Hanle in the UI
selectedLLMIds := []string{"3cd15ca8-1433-11ef-9f22-93f2b78c78de"} // TODO Hanle in the UI
var selectedLLMs []LLM
var selectedLLM LLM
@ -32,6 +39,7 @@ func GeneratePlaceholderHandler(c *fiber.Ctx) error {
temperature,
modelInfo : {
modelID,
maxToken,
company : {
icon,
name
@ -63,6 +71,8 @@ func GeneratePlaceholderHandler(c *fiber.Ctx) error {
}
func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
// Step 2 generate multiple messages
// And send them one by one using events
insertArea()
selectedLLMs := lastSelectedLLMs

View File

@ -12,7 +12,7 @@ import (
type AnthropicChatCompletionRequest struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
Messages []RequestMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature"`
}
@ -38,7 +38,7 @@ type AnthropicUsage struct {
func addAnthropicMessage(llm LLM, selected bool) edgedb.UUID {
Messages := getAllSelectedMessages()
chatCompletion, err := RequestAnthropic(llm.Model.ModelID, Messages, 2048, float64(llm.Temperature))
chatCompletion, err := RequestAnthropic(llm.Model.ModelID, Messages, int(llm.Model.MaxToken), float64(llm.Temperature))
if err != nil {
fmt.Println("Error:", err)
} else if len(chatCompletion.Content) == 0 {
@ -56,7 +56,7 @@ func addAnthropicMessage(llm LLM, selected bool) edgedb.UUID {
func TestAnthropicKey(apiKey string) bool {
url := "https://api.anthropic.com/v1/messages"
AnthropicMessages := []Message{
AnthropicMessages := []RequestMessage{
{
Role: "user",
Content: "Hello",
@ -119,18 +119,20 @@ func RequestAnthropic(model string, messages []Message, maxTokens int, temperatu
select filtered_keys.key limit 1
`, &apiKey, "anthropic")
if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error getting OpenAI API key: %w", err)
return AnthropicChatCompletionResponse{}, fmt.Errorf("error getting Anthropic API key: %w", err)
}
url := "https://api.anthropic.com/v1/messages"
requestBody := AnthropicChatCompletionRequest{
Model: model,
Messages: ChangeRoleBot2Assistant(messages),
Messages: Message2RequestMessage(messages),
MaxTokens: maxTokens,
Temperature: temperature,
}
fmt.Println(maxTokens)
jsonBody, err := json.Marshal(requestBody)
if err != nil {
return AnthropicChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)

View File

@ -12,7 +12,7 @@ import (
type GroqChatCompletionRequest struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
Messages []RequestMessage `json:"messages"`
Temperature float64 `json:"temperature"`
}
@ -68,7 +68,7 @@ func TestGroqKey(apiKey string) bool {
requestBody := GroqChatCompletionRequest{
Model: "llama3-8b-8192",
Messages: groqMessages,
Messages: Message2RequestMessage(groqMessages),
Temperature: 0,
}
@ -127,7 +127,7 @@ func RequestGroq(model string, messages []Message, temperature float64) (GroqCha
requestBody := GroqChatCompletionRequest{
Model: model,
Messages: ChangeRoleBot2Assistant(messages),
Messages: Message2RequestMessage(messages),
Temperature: temperature,
}

View File

@ -12,7 +12,7 @@ import (
type MistralChatCompletionRequest struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
Messages []RequestMessage `json:"messages"`
Temperature float64 `json:"temperature"`
}
type MistralChatCompletionResponse struct {
@ -58,7 +58,7 @@ func TestMistralKey(apiKey string) bool {
url := "https://api.mistral.ai/v1/chat/completions"
// Convert messages to Mistral format
mistralMessages := []Message{
mistralMessages := []RequestMessage{
{
Role: "user",
Content: "Hello",
@ -133,7 +133,7 @@ func RequestMistral(model string, messages []Message, temperature float64) (Mist
requestBody := MistralChatCompletionRequest{
Model: model,
Messages: ChangeRoleBot2Assistant(messages),
Messages: Message2RequestMessage(messages),
Temperature: temperature,
}

View File

@ -12,7 +12,7 @@ import (
type OpenaiChatCompletionRequest struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
Messages []RequestMessage `json:"messages"`
Temperature float64 `json:"temperature"`
}
@ -60,7 +60,7 @@ func TestOpenaiKey(apiKey string) bool {
url := "https://api.openai.com/v1/chat/completions"
// Convert messages to OpenAI format
openaiMessages := []Message{
openaiMessages := []RequestMessage{
{
Role: "user",
Content: "Hello",
@ -124,11 +124,13 @@ func RequestOpenai(model string, messages []Message, temperature float64) (Opena
return OpenaiChatCompletionResponse{}, fmt.Errorf("error getting OpenAI API key: %w", err)
}
fmt.Println("Messages:", messages)
url := "https://api.openai.com/v1/chat/completions"
requestBody := OpenaiChatCompletionRequest{
Model: model,
Messages: ChangeRoleBot2Assistant(messages),
Messages: Message2RequestMessage(messages),
Temperature: temperature,
}

View File

@ -76,7 +76,7 @@ type LLM struct {
type ModelInfo struct {
ID edgedb.UUID `edgedb:"id"`
Name string `edgedb:"name"`
MaxToken int32 `edgedb:"max_token"`
MaxToken int32 `edgedb:"maxToken"`
InputPrice float32 `edgedb:"inputPrice"`
OutputPrice float32 `edgedb:"outputPrice"`
ModelID string `edgedb:"modelID"`

View File

@ -96,8 +96,8 @@ func getExistingKeys() (bool, bool, bool, bool) {
return openaiExists, anthropicExists, mistralExists, groqExists
}
func ChangeRoleBot2Assistant(messages []Message) []Message {
openaiMessages := make([]Message, len(messages))
func Message2RequestMessage(messages []Message) []RequestMessage {
m := make([]RequestMessage, len(messages))
for i, msg := range messages {
var role string
switch msg.Role {
@ -108,10 +108,10 @@ func ChangeRoleBot2Assistant(messages []Message) []Message {
default:
role = "system"
}
openaiMessages[i] = Message{
m[i] = RequestMessage{
Role: role,
Content: msg.Content,
}
}
return openaiMessages
return m
}