mirror of https://github.com/ollama/ollama.git
Merge f429a5fa23
into bc71278670
This commit is contained in:
commit
0177a3dbd5
|
@ -68,7 +68,7 @@ func TestContextExhaustion(t *testing.T) {
|
||||||
|
|
||||||
// Send multiple generate requests with prior context and ensure the response is coherant and expected
|
// Send multiple generate requests with prior context and ensure the response is coherant and expected
|
||||||
func TestParallelGenerateWithHistory(t *testing.T) {
|
func TestParallelGenerateWithHistory(t *testing.T) {
|
||||||
modelOverride := ollamaEngineChatModels[0] // Most recent ollama engine model
|
modelOverride := "gpt-oss:20b"
|
||||||
req, resp := GenerateRequests()
|
req, resp := GenerateRequests()
|
||||||
numParallel := 2
|
numParallel := 2
|
||||||
iterLimit := 2
|
iterLimit := 2
|
||||||
|
@ -155,7 +155,7 @@ func TestGenerateWithHistory(t *testing.T) {
|
||||||
|
|
||||||
// Send multiple chat requests with prior context and ensure the response is coherant and expected
|
// Send multiple chat requests with prior context and ensure the response is coherant and expected
|
||||||
func TestParallelChatWithHistory(t *testing.T) {
|
func TestParallelChatWithHistory(t *testing.T) {
|
||||||
modelOverride := ollamaEngineChatModels[0] // Most recent ollama engine model
|
modelOverride := "gpt-oss:20b"
|
||||||
req, resp := ChatRequests()
|
req, resp := ChatRequests()
|
||||||
numParallel := 2
|
numParallel := 2
|
||||||
iterLimit := 2
|
iterLimit := 2
|
||||||
|
|
|
@ -38,6 +38,7 @@ var (
|
||||||
|
|
||||||
// Note: add newer models at the top of the list to test them first
|
// Note: add newer models at the top of the list to test them first
|
||||||
ollamaEngineChatModels = []string{
|
ollamaEngineChatModels = []string{
|
||||||
|
"qwen3-coder:30b",
|
||||||
"gpt-oss:20b",
|
"gpt-oss:20b",
|
||||||
"gemma3n:e2b",
|
"gemma3n:e2b",
|
||||||
"mistral-small3.2:latest",
|
"mistral-small3.2:latest",
|
||||||
|
@ -46,6 +47,7 @@ var (
|
||||||
"qwen2.5-coder:latest",
|
"qwen2.5-coder:latest",
|
||||||
"qwen2.5vl:3b",
|
"qwen2.5vl:3b",
|
||||||
"qwen3:0.6b", // dense
|
"qwen3:0.6b", // dense
|
||||||
|
"qwen3:1.7b", // dense
|
||||||
"qwen3:30b", // MOE
|
"qwen3:30b", // MOE
|
||||||
"gemma3:1b",
|
"gemma3:1b",
|
||||||
"llama3.1:latest",
|
"llama3.1:latest",
|
||||||
|
@ -270,7 +272,7 @@ var (
|
||||||
"Can they form if there are no clouds? Be breif in your reply",
|
"Can they form if there are no clouds? Be breif in your reply",
|
||||||
"Do they happen on other planets? Be brief in your reply",
|
"Do they happen on other planets? Be brief in your reply",
|
||||||
}
|
}
|
||||||
rainbowExpected = []string{"water", "droplet", "mist", "glow", "refract", "reflect", "scatter", "wave", "color", "spectrum", "raindrop", "atmosphere", "frequency", "end", "gold", "fortune", "blessing", "prosperity", "magic", "shower", "sky", "shimmer", "light", "storm", "sunny"}
|
rainbowExpected = []string{"water", "droplet", "mist", "glow", "refract", "reflect", "scatter", "particles", "wave", "color", "spectrum", "raindrop", "atmosphere", "frequency", "end", "gold", "fortune", "blessing", "prosperity", "hope", "bridge", "magic", "shower", "sky", "shimmer", "light", "storm", "sunny", "beauty", "sunburst", "phenomenon", "mars", "venus", "jupiter"}
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
Loading…
Reference in New Issue