Working huggingface endpoint
This commit is contained in:
parent
d139f36efd
commit
1603532150
2
Chat.go
2
Chat.go
@ -156,7 +156,7 @@ func GetMessageContentHandler(c *fiber.Ctx) error {
|
||||
|
||||
out := "<div class='message-header'>"
|
||||
out += "<p>"
|
||||
out += "<strong>" + selectedMessage.LLM.Name + "</strong> <small>" + selectedMessage.LLM.Model.ModelID + "</small>"
|
||||
out += "<strong>" + selectedMessage.LLM.Name + "</strong> <small>" + selectedMessage.LLM.Model.Name + "</small>"
|
||||
out += " </p>"
|
||||
out += "</div>"
|
||||
out += "<div class='message-body'>"
|
||||
|
@ -43,6 +43,11 @@ func GeneratePlaceholderHandler(c *fiber.Ctx) error {
|
||||
name,
|
||||
context,
|
||||
temperature,
|
||||
custom_endpoint : {
|
||||
id,
|
||||
endpoint,
|
||||
key
|
||||
},
|
||||
modelInfo : {
|
||||
modelID,
|
||||
maxToken,
|
||||
@ -112,6 +117,8 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
|
||||
addMessageFunc = addGroqMessage
|
||||
case "gooseai":
|
||||
addMessageFunc = addGooseaiMessage
|
||||
case "huggingface":
|
||||
addMessageFunc = addHuggingfaceMessage
|
||||
}
|
||||
|
||||
var messageID edgedb.UUID
|
||||
@ -141,6 +148,7 @@ func GenerateMultipleMessagesHandler(c *fiber.Ctx) error {
|
||||
FILTER .id = <uuid>$0;
|
||||
`, &message, messageID)
|
||||
if err != nil {
|
||||
fmt.Println("Is it here ?")
|
||||
panic(err)
|
||||
}
|
||||
|
||||
|
101
RequestHuggingface.go
Normal file
101
RequestHuggingface.go
Normal file
@ -0,0 +1,101 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/edgedb/edgedb-go"
|
||||
)
|
||||
|
||||
type HuggingfaceChatCompletionRequest struct {
|
||||
Model string `json:"model"`
|
||||
Messages []RequestMessage `json:"messages"`
|
||||
Temperature float64 `json:"temperature"`
|
||||
Stream bool `json:"stream"`
|
||||
}
|
||||
|
||||
type HuggingfaceChatCompletionResponse struct {
|
||||
ID string `json:"id"`
|
||||
Object string `json:"object"`
|
||||
Created int64 `json:"created"`
|
||||
Model string `json:"model"`
|
||||
Choices []HuggingfaceChoice `json:"choices"`
|
||||
}
|
||||
|
||||
type HuggingfaceUsage struct {
|
||||
PromptTokens int32 `json:"prompt_tokens"`
|
||||
CompletionTokens int32 `json:"completion_tokens"`
|
||||
TotalTokens int32 `json:"total_tokens"`
|
||||
}
|
||||
|
||||
type HuggingfaceChoice struct {
|
||||
Message Message `json:"message"`
|
||||
FinishReason string `json:"finish_reason"`
|
||||
Index int `json:"index"`
|
||||
}
|
||||
|
||||
func addHuggingfaceMessage(llm LLM, selected bool) edgedb.UUID {
|
||||
Messages := getAllSelectedMessages()
|
||||
|
||||
chatCompletion, err := RequestHuggingface(llm, Messages, float64(llm.Temperature))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
} else if len(chatCompletion.Choices) == 0 {
|
||||
fmt.Println("No response from Endpoint")
|
||||
id := insertBotMessage("No response from Endpoint", selected, llm.ID)
|
||||
return id
|
||||
} else {
|
||||
Content := chatCompletion.Choices[0].Message.Content
|
||||
id := insertBotMessage(Content, selected, llm.ID)
|
||||
return id
|
||||
}
|
||||
}
|
||||
|
||||
func RequestHuggingface(llm LLM, messages []Message, temperature float64) (HuggingfaceChatCompletionResponse, error) {
|
||||
url := llm.Endpoint.Endpoint
|
||||
|
||||
requestBody := HuggingfaceChatCompletionRequest{
|
||||
Model: "tgi",
|
||||
Messages: Message2RequestMessage(messages),
|
||||
Temperature: temperature,
|
||||
Stream: false,
|
||||
}
|
||||
|
||||
jsonBody, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error marshaling JSON: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
|
||||
if err != nil {
|
||||
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error creating request: %w", err)
|
||||
}
|
||||
|
||||
req.Header.Set("Authorization", "Bearer "+llm.Endpoint.Key)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error sending request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error reading response body: %w", err)
|
||||
}
|
||||
|
||||
var chatCompletionResponse HuggingfaceChatCompletionResponse
|
||||
err = json.Unmarshal(body, &chatCompletionResponse)
|
||||
if err != nil {
|
||||
return HuggingfaceChatCompletionResponse{}, fmt.Errorf("error unmarshaling JSON: %w", err)
|
||||
}
|
||||
|
||||
addUsage(0, 0, 0, 0, llm.Model.ModelID)
|
||||
|
||||
return chatCompletionResponse, nil
|
||||
}
|
18
database.go
18
database.go
@ -64,11 +64,19 @@ type Usage struct {
|
||||
}
|
||||
|
||||
type LLM struct {
|
||||
ID edgedb.UUID `edgedb:"id"`
|
||||
Name string `edgedb:"name"`
|
||||
Context string `edgedb:"context"`
|
||||
Temperature float32 `edgedb:"temperature"`
|
||||
Model ModelInfo `edgedb:"modelInfo"`
|
||||
ID edgedb.UUID `edgedb:"id"`
|
||||
Name string `edgedb:"name"`
|
||||
Context string `edgedb:"context"`
|
||||
Temperature float32 `edgedb:"temperature"`
|
||||
Model ModelInfo `edgedb:"modelInfo"`
|
||||
Endpoint CustomEndpoint `edgedb:"custom_endpoint"`
|
||||
}
|
||||
|
||||
type CustomEndpoint struct {
|
||||
edgedb.Optional
|
||||
ID edgedb.UUID `edgedb:"id"`
|
||||
Endpoint string `edgedb:"endpoint"`
|
||||
Key string `edgedb:"key"`
|
||||
}
|
||||
|
||||
type ModelInfo struct {
|
||||
|
@ -87,6 +87,14 @@ module default {
|
||||
required user: User {
|
||||
on target delete allow;
|
||||
};
|
||||
custom_endpoint: CustomEndpoint {
|
||||
on source delete delete target;
|
||||
};
|
||||
}
|
||||
|
||||
type CustomEndpoint {
|
||||
required endpoint: str;
|
||||
required key: str;
|
||||
}
|
||||
|
||||
type Company {
|
||||
|
8
dbschema/migrations/00033-m1eiric.edgeql
Normal file
8
dbschema/migrations/00033-m1eiric.edgeql
Normal file
@ -0,0 +1,8 @@
|
||||
CREATE MIGRATION m1eiric4fqayh7eieleesdm2s66f3sk7j4incugnyk2xzncp2t4rxa
|
||||
ONTO m1nonmddagbu3p7dcqmy3bvxkwinjfosg7iuna5xxwruig4rcnr4yq
|
||||
{
|
||||
CREATE TYPE default::customEndpoint {
|
||||
CREATE REQUIRED PROPERTY endpoint: std::str;
|
||||
CREATE REQUIRED PROPERTY key: std::str;
|
||||
};
|
||||
};
|
10
dbschema/migrations/00034-m1x75hd.edgeql
Normal file
10
dbschema/migrations/00034-m1x75hd.edgeql
Normal file
@ -0,0 +1,10 @@
|
||||
CREATE MIGRATION m1x75hdgm27pmshypxbzfrhje6xru5ypx65efdiu6zuwnute2xschq
|
||||
ONTO m1eiric4fqayh7eieleesdm2s66f3sk7j4incugnyk2xzncp2t4rxa
|
||||
{
|
||||
ALTER TYPE default::customEndpoint RENAME TO default::CustomEndpoint;
|
||||
ALTER TYPE default::LLM {
|
||||
CREATE LINK custom_endpoint: default::CustomEndpoint {
|
||||
ON SOURCE DELETE DELETE TARGET;
|
||||
};
|
||||
};
|
||||
};
|
@ -46,6 +46,7 @@
|
||||
placeholder="Model name">
|
||||
<div class="select is-fullwidth is-small mb-3" id="model-id-input">
|
||||
<select name="selectedLLMId">
|
||||
<option value="custom">Custom endpoint</option>
|
||||
{% for modelInfo in ModelInfos %}
|
||||
<option value="{{ modelInfo.ModelID }}">{{ modelInfo.ModelID }}</option>
|
||||
{% endfor %}
|
||||
|
Loading…
x
Reference in New Issue
Block a user