Added max token = 10 when testing the key
This commit is contained in:
parent
6c3e4d4b8a
commit
c25a5772ce
@ -73,6 +73,7 @@ func TestFireworkKey(apiKey string) bool {
|
|||||||
Model: "accounts/fireworks/models/llama-v2-7b-chat",
|
Model: "accounts/fireworks/models/llama-v2-7b-chat",
|
||||||
Messages: fireworkMessages,
|
Messages: fireworkMessages,
|
||||||
Temperature: 0,
|
Temperature: 0,
|
||||||
|
MaxTokens: 10,
|
||||||
}
|
}
|
||||||
|
|
||||||
jsonBody, err := json.Marshal(requestBody)
|
jsonBody, err := json.Marshal(requestBody)
|
||||||
|
@ -73,6 +73,7 @@ func TestGroqKey(apiKey string) bool {
|
|||||||
Model: "llama3-8b-8192",
|
Model: "llama3-8b-8192",
|
||||||
Messages: Message2RequestMessage(groqMessages, ""),
|
Messages: Message2RequestMessage(groqMessages, ""),
|
||||||
Temperature: 0,
|
Temperature: 0,
|
||||||
|
MaxTokens: 10,
|
||||||
}
|
}
|
||||||
|
|
||||||
jsonBody, err := json.Marshal(requestBody)
|
jsonBody, err := json.Marshal(requestBody)
|
||||||
|
@ -71,6 +71,7 @@ func TestMistralKey(apiKey string) bool {
|
|||||||
Model: "open-mistral-7b",
|
Model: "open-mistral-7b",
|
||||||
Messages: mistralMessages,
|
Messages: mistralMessages,
|
||||||
Temperature: 0,
|
Temperature: 0,
|
||||||
|
MaxTokens: 10,
|
||||||
}
|
}
|
||||||
|
|
||||||
jsonBody, err := json.Marshal(requestBody)
|
jsonBody, err := json.Marshal(requestBody)
|
||||||
|
@ -76,6 +76,7 @@ func TestNimKey(apiKey string) bool {
|
|||||||
Model: "meta/llama3-8b-instruct",
|
Model: "meta/llama3-8b-instruct",
|
||||||
Messages: nimMessages,
|
Messages: nimMessages,
|
||||||
Temperature: 0,
|
Temperature: 0,
|
||||||
|
MaxTokens: 10,
|
||||||
}
|
}
|
||||||
|
|
||||||
jsonBody, err := json.Marshal(requestBody)
|
jsonBody, err := json.Marshal(requestBody)
|
||||||
|
@ -73,6 +73,7 @@ func TestOpenaiKey(apiKey string) bool {
|
|||||||
Model: "gpt-3.5-turbo",
|
Model: "gpt-3.5-turbo",
|
||||||
Messages: openaiMessages,
|
Messages: openaiMessages,
|
||||||
Temperature: 0,
|
Temperature: 0,
|
||||||
|
MaxTokens: 10,
|
||||||
}
|
}
|
||||||
|
|
||||||
jsonBody, err := json.Marshal(requestBody)
|
jsonBody, err := json.Marshal(requestBody)
|
||||||
|
@ -74,6 +74,7 @@ func TestPerplexityKey(apiKey string) bool {
|
|||||||
Model: "llama-3-8b-instruct",
|
Model: "llama-3-8b-instruct",
|
||||||
Messages: perplexityMessages,
|
Messages: perplexityMessages,
|
||||||
Temperature: 0,
|
Temperature: 0,
|
||||||
|
MaxTokens: 10,
|
||||||
}
|
}
|
||||||
|
|
||||||
jsonBody, err := json.Marshal(requestBody)
|
jsonBody, err := json.Marshal(requestBody)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user