Added GPT-4 Omni and some fix
This commit is contained in:
parent
07180d5176
commit
947ac9c713
2
Chat.go
2
Chat.go
@ -305,7 +305,7 @@ func RedoMessageHandler(c *fiber.Ctx) error {
|
|||||||
selectedModelIds = append(selectedModelIds, ModelsInfos[ModelInfo].ID)
|
selectedModelIds = append(selectedModelIds, ModelsInfos[ModelInfo].ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
lastSelectedModelIds = selectedModelIds
|
lastSelectedModelIds = removeDuplicate(selectedModelIds)
|
||||||
|
|
||||||
return c.SendString(messageOut)
|
return c.SendString(messageOut)
|
||||||
}
|
}
|
||||||
|
67
Request.go
67
Request.go
@ -1,9 +1,11 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
"sync"
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/edgedb/edgedb-go"
|
"github.com/edgedb/edgedb-go"
|
||||||
"github.com/flosch/pongo2"
|
"github.com/flosch/pongo2"
|
||||||
@ -61,45 +63,62 @@ func addUsage(inputCost float32, outputCost float32, inputToken int32, outputTok
|
|||||||
}
|
}
|
||||||
|
|
||||||
func GenerateMultipleMessages(c *fiber.Ctx) error {
|
func GenerateMultipleMessages(c *fiber.Ctx) error {
|
||||||
|
insertArea()
|
||||||
|
|
||||||
// Create a wait group to synchronize the goroutines
|
// Create a wait group to synchronize the goroutines
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
var InsertedIDs []edgedb.UUID
|
|
||||||
|
|
||||||
// Add the length of lastSelectedModelIds goroutines to the wait group
|
// Add the length of lastSelectedModelIds goroutines to the wait group
|
||||||
wg.Add(len(lastSelectedModelIds))
|
wg.Add(len(lastSelectedModelIds))
|
||||||
|
|
||||||
for i := range lastSelectedModelIds {
|
for i := range lastSelectedModelIds {
|
||||||
idx := i
|
idx := i
|
||||||
if model2Icon(lastSelectedModelIds[i]) == "openai" {
|
go func() {
|
||||||
|
// Create a context with a 1-minute timeout
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
||||||
|
defer cancel() // Ensure the context is cancelled to free resources
|
||||||
|
|
||||||
|
// Use a channel to signal the completion of addxxxMessage
|
||||||
|
done := make(chan struct{}, 1)
|
||||||
|
|
||||||
|
// Determine which message function to call based on the model
|
||||||
|
var addMessageFunc func(modelID string, selected bool) edgedb.UUID
|
||||||
|
switch model2Icon(lastSelectedModelIds[idx]) {
|
||||||
|
case "openai":
|
||||||
|
addMessageFunc = addOpenaiMessage
|
||||||
|
case "anthropic":
|
||||||
|
addMessageFunc = addAnthropicMessage
|
||||||
|
case "mistral":
|
||||||
|
addMessageFunc = addMistralMessage
|
||||||
|
case "groq":
|
||||||
|
addMessageFunc = addGroqMessage
|
||||||
|
}
|
||||||
|
|
||||||
|
// Call the selected addMessageFunc in a goroutine
|
||||||
go func() {
|
go func() {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
response := addOpenaiMessage(lastSelectedModelIds[idx], idx == 0)
|
if addMessageFunc != nil {
|
||||||
InsertedIDs = append(InsertedIDs, response)
|
addMessageFunc(lastSelectedModelIds[idx], idx == 0)
|
||||||
|
}
|
||||||
|
done <- struct{}{}
|
||||||
}()
|
}()
|
||||||
} else if model2Icon(lastSelectedModelIds[i]) == "anthropic" {
|
|
||||||
go func() {
|
// Use select to wait on multiple channel operations
|
||||||
defer wg.Done()
|
select {
|
||||||
response := addAnthropicMessage(lastSelectedModelIds[idx], idx == 0)
|
case <-ctx.Done(): // Context's deadline is exceeded
|
||||||
InsertedIDs = append(InsertedIDs, response)
|
// Insert a bot message indicating a timeout
|
||||||
}()
|
insertBotMessage(lastSelectedModelIds[idx]+" too long to answer", idx == 0, lastSelectedModelIds[idx])
|
||||||
} else if model2Icon(lastSelectedModelIds[i]) == "mistral" {
|
case <-done: // addMessageFunc completed within the deadline
|
||||||
go func() {
|
// No action needed, the function completed successfully
|
||||||
defer wg.Done()
|
}
|
||||||
response := addMistralMessage(lastSelectedModelIds[idx], idx == 0)
|
}()
|
||||||
InsertedIDs = append(InsertedIDs, response)
|
|
||||||
}()
|
|
||||||
} else if model2Icon(lastSelectedModelIds[i]) == "groq" {
|
|
||||||
go func() {
|
|
||||||
defer wg.Done()
|
|
||||||
response := addGroqMessage(lastSelectedModelIds[idx], idx == 0)
|
|
||||||
InsertedIDs = append(InsertedIDs, response)
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Wait for both goroutines to finish
|
// Wait for all goroutines to finish
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
|
|
||||||
|
fmt.Println("Done!")
|
||||||
|
|
||||||
return c.SendString(generateChatHTML())
|
return c.SendString(generateChatHTML())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -47,9 +47,9 @@ func init() {
|
|||||||
|
|
||||||
modelInfo := ModelInfo{
|
modelInfo := ModelInfo{
|
||||||
ID: "gpt-3.5-turbo",
|
ID: "gpt-3.5-turbo",
|
||||||
Name: "GPT-3.5",
|
Name: "GPT-3.5 Turbo",
|
||||||
Icon: "openai",
|
Icon: "openai",
|
||||||
MaxToken: 4096,
|
MaxToken: 16385,
|
||||||
InputPrice: 0.50 / 1000000,
|
InputPrice: 0.50 / 1000000,
|
||||||
OutputPrice: 1.50 / 1000000,
|
OutputPrice: 1.50 / 1000000,
|
||||||
}
|
}
|
||||||
@ -57,16 +57,38 @@ func init() {
|
|||||||
ModelsInfos = append(ModelsInfos, modelInfo)
|
ModelsInfos = append(ModelsInfos, modelInfo)
|
||||||
|
|
||||||
modelInfo = ModelInfo{
|
modelInfo = ModelInfo{
|
||||||
ID: "gpt-4-turbo",
|
ID: "gpt-4",
|
||||||
Name: "GPT-4",
|
Name: "GPT-4",
|
||||||
Icon: "openai",
|
Icon: "openai",
|
||||||
MaxToken: 8192,
|
MaxToken: 8192,
|
||||||
|
InputPrice: 30.00 / 1000000,
|
||||||
|
OutputPrice: 60.00 / 1000000,
|
||||||
|
}
|
||||||
|
ModelInfosList = append(ModelInfosList, modelInfo)
|
||||||
|
ModelsInfos = append(ModelsInfos, modelInfo)
|
||||||
|
|
||||||
|
modelInfo = ModelInfo{
|
||||||
|
ID: "gpt-4-turbo",
|
||||||
|
Name: "GPT-4 Turbo",
|
||||||
|
Icon: "openai",
|
||||||
|
MaxToken: 128000,
|
||||||
InputPrice: 10.00 / 1000000,
|
InputPrice: 10.00 / 1000000,
|
||||||
OutputPrice: 30.00 / 1000000,
|
OutputPrice: 30.00 / 1000000,
|
||||||
}
|
}
|
||||||
ModelInfosList = append(ModelInfosList, modelInfo)
|
ModelInfosList = append(ModelInfosList, modelInfo)
|
||||||
ModelsInfos = append(ModelsInfos, modelInfo)
|
ModelsInfos = append(ModelsInfos, modelInfo)
|
||||||
|
|
||||||
|
modelInfo = ModelInfo{
|
||||||
|
ID: "gpt-4o",
|
||||||
|
Name: "GPT-4 Omni",
|
||||||
|
Icon: "openai",
|
||||||
|
MaxToken: 128000,
|
||||||
|
InputPrice: 5.00 / 1000000,
|
||||||
|
OutputPrice: 15.00 / 1000000,
|
||||||
|
}
|
||||||
|
ModelInfosList = append(ModelInfosList, modelInfo)
|
||||||
|
ModelsInfos = append(ModelsInfos, modelInfo)
|
||||||
|
|
||||||
companyInfo := CompanyInfo{
|
companyInfo := CompanyInfo{
|
||||||
ID: "openai",
|
ID: "openai",
|
||||||
Name: "OpenAI",
|
Name: "OpenAI",
|
||||||
|
@ -100,8 +100,6 @@ func getCurrentUser() User {
|
|||||||
var result User
|
var result User
|
||||||
err := edgeClient.QuerySingle(edgeCtx, "SELECT global currentUser LIMIT 1;", &result)
|
err := edgeClient.QuerySingle(edgeCtx, "SELECT global currentUser LIMIT 1;", &result)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Error in edgedb.QuerySingle: in getCurrentUser")
|
|
||||||
fmt.Println(err)
|
|
||||||
return User{}
|
return User{}
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
|
BIN
static/.DS_Store
vendored
BIN
static/.DS_Store
vendored
Binary file not shown.
32
utils.go
32
utils.go
@ -4,7 +4,6 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/yuin/goldmark"
|
"github.com/yuin/goldmark"
|
||||||
highlighting "github.com/yuin/goldmark-highlighting"
|
highlighting "github.com/yuin/goldmark-highlighting"
|
||||||
@ -40,21 +39,10 @@ func addCopyButtonsToCode(htmlContent string) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func model2Icon(model string) string {
|
func model2Icon(model string) string {
|
||||||
if model == "gpt-3.5-turbo" {
|
for i := range ModelsInfos {
|
||||||
return "openai"
|
if ModelsInfos[i].ID == model {
|
||||||
}
|
return ModelsInfos[i].Icon
|
||||||
if model == "gpt-4" {
|
}
|
||||||
return "openai"
|
|
||||||
}
|
|
||||||
// If model name contain claude-3 retrun anthropic
|
|
||||||
if strings.Contains(model, "claude-3") {
|
|
||||||
return "anthropic"
|
|
||||||
}
|
|
||||||
if strings.Contains(model, "mistral") || strings.Contains(model, "mixtral") {
|
|
||||||
return "mistral"
|
|
||||||
}
|
|
||||||
if strings.Contains(model, "llama3") || strings.Contains(model, "gemma") {
|
|
||||||
return "groq"
|
|
||||||
}
|
}
|
||||||
return "bouvai2"
|
return "bouvai2"
|
||||||
}
|
}
|
||||||
@ -124,3 +112,15 @@ func getExistingKeys() (bool, bool, bool, bool) {
|
|||||||
|
|
||||||
return openaiExists, anthropicExists, mistralExists, groqExists
|
return openaiExists, anthropicExists, mistralExists, groqExists
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func removeDuplicate(s []string) []string {
|
||||||
|
m := make(map[string]bool)
|
||||||
|
var result []string
|
||||||
|
for _, str := range s {
|
||||||
|
if !m[str] {
|
||||||
|
m[str] = true
|
||||||
|
result = append(result, str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user