mirror of
https://github.com/ollama/ollama.git
synced 2026-04-19 20:54:25 +02:00
mlx: get parameters from modelfile during model creation (#14747)
This commit is contained in:
@@ -1,7 +1,13 @@
|
||||
package client
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/ollama/ollama/manifest"
|
||||
"github.com/ollama/ollama/parser"
|
||||
)
|
||||
|
||||
func TestModelfileConfig(t *testing.T) {
|
||||
@@ -31,6 +37,40 @@ func TestModelfileConfig(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFromModelfile(t *testing.T) {
|
||||
modelfile, err := parser.ParseFile(strings.NewReader(`
|
||||
FROM ./model
|
||||
TEMPLATE {{ .Prompt }}
|
||||
PARAMETER temperature 0.7
|
||||
PARAMETER stop USER:
|
||||
PARAMETER stop ASSISTANT:
|
||||
`))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
modelDir, mfConfig, err := ConfigFromModelfile(modelfile)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if modelDir != "./model" {
|
||||
t.Fatalf("modelDir = %q, want %q", modelDir, "./model")
|
||||
}
|
||||
|
||||
if mfConfig.Template != "{{ .Prompt }}" {
|
||||
t.Fatalf("Template = %q, want %q", mfConfig.Template, "{{ .Prompt }}")
|
||||
}
|
||||
|
||||
if got := mfConfig.Parameters["temperature"]; got != float32(0.7) {
|
||||
t.Fatalf("temperature = %#v, want %v", got, float32(0.7))
|
||||
}
|
||||
|
||||
if got := mfConfig.Parameters["stop"]; got == nil || len(got.([]string)) != 2 {
|
||||
t.Fatalf("unexpected stop params: %#v", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestModelfileConfig_Empty(t *testing.T) {
|
||||
config := &ModelfileConfig{}
|
||||
|
||||
@@ -120,6 +160,9 @@ func TestCreateOptions(t *testing.T) {
|
||||
License: "MIT",
|
||||
Parser: "qwen3-thinking",
|
||||
Renderer: "qwen3",
|
||||
Parameters: map[string]any{
|
||||
"temperature": float32(0.7),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -144,6 +187,9 @@ func TestCreateOptions(t *testing.T) {
|
||||
if opts.Modelfile.Renderer != "qwen3" {
|
||||
t.Errorf("Modelfile.Renderer = %q, want %q", opts.Modelfile.Renderer, "qwen3")
|
||||
}
|
||||
if opts.Modelfile.Parameters["temperature"] != float32(0.7) {
|
||||
t.Errorf("Modelfile.Parameters[temperature] = %v, want %v", opts.Modelfile.Parameters["temperature"], float32(0.7))
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveParserName(t *testing.T) {
|
||||
@@ -252,3 +298,44 @@ func TestQuantizeSupported(t *testing.T) {
|
||||
// We can't easily test both cases, so just verify it returns something
|
||||
_ = supported
|
||||
}
|
||||
|
||||
func TestCreateModelfileLayersIncludesParameters(t *testing.T) {
|
||||
t.Setenv("OLLAMA_MODELS", t.TempDir())
|
||||
|
||||
layers, err := createModelfileLayers(&ModelfileConfig{
|
||||
Parameters: map[string]any{
|
||||
"temperature": float32(0.7),
|
||||
"stop": []string{"USER:", "ASSISTANT:"},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(layers) != 1 {
|
||||
t.Fatalf("len(layers) = %d, want 1", len(layers))
|
||||
}
|
||||
|
||||
if layers[0].MediaType != "application/vnd.ollama.image.params" {
|
||||
t.Fatalf("MediaType = %q, want %q", layers[0].MediaType, "application/vnd.ollama.image.params")
|
||||
}
|
||||
|
||||
blobPath, err := manifest.BlobsPath(layers[0].Digest)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(blobPath)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
var got map[string]any
|
||||
if err := json.Unmarshal(data, &got); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if got["temperature"] != float64(0.7) {
|
||||
t.Fatalf("temperature = %v, want %v", got["temperature"], float64(0.7))
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user