2023-07-17 08:02:22 +08:00
|
|
|
package server
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
2023-07-26 05:08:51 +08:00
|
|
|
"context"
|
2023-07-17 08:02:22 +08:00
|
|
|
"crypto/sha256"
|
2023-08-29 11:50:24 +08:00
|
|
|
"encoding/hex"
|
2023-07-17 08:02:22 +08:00
|
|
|
"encoding/json"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"log"
|
2024-01-19 02:52:01 +08:00
|
|
|
"log/slog"
|
server: allow mixed-case model names on push, pull, cp, and create (#7676)
This change allows for mixed-case model names to be pushed, pulled,
copied, and created, which was previously disallowed because the Ollama
registry was backed by a Docker registry that enforced a naming
convention that disallowed mixed-case names, which is no longer the
case.
This does not break existing, intended, behaviors.
Also, make TestCase test a story of creating, updating, pulling, and
copying a model with case variations, ensuring the model's manifest is
updated correctly, and not duplicated across different files with
different case variations.
2024-11-20 07:05:57 +08:00
|
|
|
"net"
|
2023-07-17 08:02:22 +08:00
|
|
|
"net/http"
|
2023-08-22 09:38:31 +08:00
|
|
|
"net/url"
|
2023-07-17 08:02:22 +08:00
|
|
|
"os"
|
|
|
|
"path/filepath"
|
2023-08-22 09:24:42 +08:00
|
|
|
"runtime"
|
2024-05-22 12:30:52 +08:00
|
|
|
"slices"
|
2024-02-15 03:29:49 +08:00
|
|
|
"strconv"
|
2023-07-17 08:02:22 +08:00
|
|
|
"strings"
|
|
|
|
|
2024-03-27 04:04:17 +08:00
|
|
|
"github.com/ollama/ollama/api"
|
2024-06-05 02:53:23 +08:00
|
|
|
"github.com/ollama/ollama/envconfig"
|
2025-06-21 02:11:40 +08:00
|
|
|
"github.com/ollama/ollama/fs/gguf"
|
add qwen3-coder tool support
The format qwen3-coder uses is relatively unique, both in rendering and
in parsing. To implement parsing, I wrote a custom parser in similar
style to harmony. For the rendering, I found that the logic would be
much more difficult to follow in a template, so I introduced the concept
of a built-in renderer that uses go code, rather than a template to
generate prompts.
I set us up for future built-in parsers and renderers by making it so
they can be specified in a Modelfile like so:
```
RENDERER "qwen3-coder"
PARSER "qwen3-coder"
```
These need to be provided explicitly because the architecture alone is
not enough to understand what format the model expects to receive, and
what format we expect it to output (e.g., qwen3-coder is `qwen3moe`,
which includes other qwen3-family models as well)
I haven't converted harmony to be one of these "built-ins" yet, since
some of it is in flux with the changes @ParthSareen has been making to
move harmony to the runner. It is likely that many other built-ins will
need to move to the runner as well, but I'm able to slightly defer that
decision since qwen3-coder doesn't have thinking (and therefore doesn't
need to be in the runner to make structured outputs work). I expect to
unify harmony with this approach very soon.
Whether a particular model supports tools or thinking was previously
inferred from templates, but without a template we now also use the
parser itself to declare what it supports. If we have future models that
re-use the same parsing format, but have different capabilities, we'll
want to parameterize them and give them different names to be specified
as a `PARSER`.
Misc changes:
- I worked on the renderer by diffing outputs from the reference
implementation and ours. To make it easier to do this, I extended
<https://github.com/ollama/ollama/pull/11875> to also support
returning the prompt via the openai compat layer
2025-09-12 04:40:35 +08:00
|
|
|
"github.com/ollama/ollama/model/parsers"
|
2024-05-21 02:26:45 +08:00
|
|
|
"github.com/ollama/ollama/parser"
|
2024-06-11 05:54:42 +08:00
|
|
|
"github.com/ollama/ollama/template"
|
2025-06-07 03:02:20 +08:00
|
|
|
"github.com/ollama/ollama/thinking"
|
2024-04-17 07:22:38 +08:00
|
|
|
"github.com/ollama/ollama/types/model"
|
2024-03-27 04:04:17 +08:00
|
|
|
"github.com/ollama/ollama/version"
|
2023-07-17 08:02:22 +08:00
|
|
|
)
|
|
|
|
|
2024-06-21 10:13:36 +08:00
|
|
|
var (
|
|
|
|
errCapabilities = errors.New("does not support")
|
|
|
|
errCapabilityCompletion = errors.New("completion")
|
|
|
|
errCapabilityTools = errors.New("tools")
|
|
|
|
errCapabilityInsert = errors.New("insert")
|
2025-04-02 06:21:46 +08:00
|
|
|
errCapabilityVision = errors.New("vision")
|
|
|
|
errCapabilityEmbedding = errors.New("embedding")
|
2025-05-29 10:38:52 +08:00
|
|
|
errCapabilityThinking = errors.New("thinking")
|
2025-03-29 02:50:22 +08:00
|
|
|
errInsecureProtocol = errors.New("insecure protocol http")
|
2024-06-21 10:13:36 +08:00
|
|
|
)
|
2024-06-18 01:38:55 +08:00
|
|
|
|
2024-02-15 03:29:49 +08:00
|
|
|
type registryOptions struct {
|
|
|
|
Insecure bool
|
|
|
|
Username string
|
|
|
|
Password string
|
|
|
|
Token string
|
2024-07-26 06:58:30 +08:00
|
|
|
|
|
|
|
CheckRedirect func(req *http.Request, via []*http.Request) error
|
2024-02-15 03:29:49 +08:00
|
|
|
}
|
|
|
|
|
2023-07-17 08:02:22 +08:00
|
|
|
type Model struct {
|
2023-12-01 02:30:23 +08:00
|
|
|
Name string `json:"name"`
|
2023-12-02 03:37:17 +08:00
|
|
|
Config ConfigV2
|
2023-12-01 02:30:23 +08:00
|
|
|
ShortName string
|
|
|
|
ModelPath string
|
2024-01-26 04:12:36 +08:00
|
|
|
ParentModel string
|
2023-12-01 02:30:23 +08:00
|
|
|
AdapterPaths []string
|
|
|
|
ProjectorPaths []string
|
|
|
|
System string
|
|
|
|
License []string
|
|
|
|
Digest string
|
2025-04-03 00:44:27 +08:00
|
|
|
Options map[string]any
|
2024-06-20 05:14:28 +08:00
|
|
|
Messages []api.Message
|
2024-06-11 05:54:42 +08:00
|
|
|
|
|
|
|
Template *template.Template
|
2024-01-26 04:12:36 +08:00
|
|
|
}
|
|
|
|
|
2025-04-02 06:21:46 +08:00
|
|
|
// Capabilities returns the capabilities that the model supports
|
|
|
|
func (m *Model) Capabilities() []model.Capability {
|
|
|
|
capabilities := []model.Capability{}
|
2024-06-15 05:57:49 +08:00
|
|
|
|
2025-04-02 06:21:46 +08:00
|
|
|
// Check for completion capability
|
2025-09-18 05:40:53 +08:00
|
|
|
if m.ModelPath != "" {
|
|
|
|
f, err := gguf.Open(m.ModelPath)
|
|
|
|
if err == nil {
|
|
|
|
defer f.Close()
|
|
|
|
|
|
|
|
if f.KeyValue("pooling_type").Valid() {
|
|
|
|
capabilities = append(capabilities, model.CapabilityEmbedding)
|
|
|
|
} else {
|
|
|
|
// If no embedding is specified, we assume the model supports completion
|
|
|
|
capabilities = append(capabilities, model.CapabilityCompletion)
|
|
|
|
}
|
|
|
|
if f.KeyValue("vision.block_count").Valid() {
|
|
|
|
capabilities = append(capabilities, model.CapabilityVision)
|
|
|
|
}
|
2025-04-02 06:21:46 +08:00
|
|
|
} else {
|
2025-09-18 05:40:53 +08:00
|
|
|
slog.Error("couldn't open model file", "error", err)
|
2025-06-21 02:11:40 +08:00
|
|
|
}
|
2025-09-18 05:40:53 +08:00
|
|
|
} else if len(m.Config.Capabilities) > 0 {
|
|
|
|
for _, c := range m.Config.Capabilities {
|
|
|
|
capabilities = append(capabilities, model.Capability(c))
|
2025-04-02 06:21:46 +08:00
|
|
|
}
|
|
|
|
} else {
|
2025-09-18 05:40:53 +08:00
|
|
|
slog.Warn("unknown capabilities for model", "model", m.Name)
|
2025-04-02 06:21:46 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if m.Template == nil {
|
|
|
|
return capabilities
|
|
|
|
}
|
|
|
|
|
add qwen3-coder tool support
The format qwen3-coder uses is relatively unique, both in rendering and
in parsing. To implement parsing, I wrote a custom parser in similar
style to harmony. For the rendering, I found that the logic would be
much more difficult to follow in a template, so I introduced the concept
of a built-in renderer that uses go code, rather than a template to
generate prompts.
I set us up for future built-in parsers and renderers by making it so
they can be specified in a Modelfile like so:
```
RENDERER "qwen3-coder"
PARSER "qwen3-coder"
```
These need to be provided explicitly because the architecture alone is
not enough to understand what format the model expects to receive, and
what format we expect it to output (e.g., qwen3-coder is `qwen3moe`,
which includes other qwen3-family models as well)
I haven't converted harmony to be one of these "built-ins" yet, since
some of it is in flux with the changes @ParthSareen has been making to
move harmony to the runner. It is likely that many other built-ins will
need to move to the runner as well, but I'm able to slightly defer that
decision since qwen3-coder doesn't have thinking (and therefore doesn't
need to be in the runner to make structured outputs work). I expect to
unify harmony with this approach very soon.
Whether a particular model supports tools or thinking was previously
inferred from templates, but without a template we now also use the
parser itself to declare what it supports. If we have future models that
re-use the same parsing format, but have different capabilities, we'll
want to parameterize them and give them different names to be specified
as a `PARSER`.
Misc changes:
- I worked on the renderer by diffing outputs from the reference
implementation and ours. To make it easier to do this, I extended
<https://github.com/ollama/ollama/pull/11875> to also support
returning the prompt via the openai compat layer
2025-09-12 04:40:35 +08:00
|
|
|
builtinParser := parsers.ParserForName(m.Config.Parser)
|
2025-04-02 06:21:46 +08:00
|
|
|
// Check for tools capability
|
2025-10-03 08:25:55 +08:00
|
|
|
v, err := m.Template.Vars()
|
|
|
|
if err != nil {
|
|
|
|
slog.Warn("model template contains errors", "error", err)
|
|
|
|
}
|
|
|
|
if slices.Contains(v, "tools") || (builtinParser != nil && builtinParser.HasToolSupport()) {
|
2025-04-02 06:21:46 +08:00
|
|
|
capabilities = append(capabilities, model.CapabilityTools)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check for insert capability
|
2025-10-03 08:25:55 +08:00
|
|
|
if slices.Contains(v, "suffix") {
|
2025-04-02 06:21:46 +08:00
|
|
|
capabilities = append(capabilities, model.CapabilityInsert)
|
|
|
|
}
|
|
|
|
|
2025-05-02 07:50:20 +08:00
|
|
|
// Check for vision capability in projector-based models
|
|
|
|
if len(m.ProjectorPaths) > 0 {
|
|
|
|
capabilities = append(capabilities, model.CapabilityVision)
|
|
|
|
}
|
|
|
|
|
2025-09-18 05:40:53 +08:00
|
|
|
// Skip the thinking check if it's already set
|
|
|
|
if slices.Contains(capabilities, "thinking") {
|
|
|
|
return capabilities
|
|
|
|
}
|
|
|
|
|
2025-05-29 10:38:52 +08:00
|
|
|
// Check for thinking capability
|
2025-06-07 03:02:20 +08:00
|
|
|
openingTag, closingTag := thinking.InferTags(m.Template.Template)
|
2025-08-06 03:21:16 +08:00
|
|
|
hasTags := openingTag != "" && closingTag != ""
|
add qwen3-coder tool support
The format qwen3-coder uses is relatively unique, both in rendering and
in parsing. To implement parsing, I wrote a custom parser in similar
style to harmony. For the rendering, I found that the logic would be
much more difficult to follow in a template, so I introduced the concept
of a built-in renderer that uses go code, rather than a template to
generate prompts.
I set us up for future built-in parsers and renderers by making it so
they can be specified in a Modelfile like so:
```
RENDERER "qwen3-coder"
PARSER "qwen3-coder"
```
These need to be provided explicitly because the architecture alone is
not enough to understand what format the model expects to receive, and
what format we expect it to output (e.g., qwen3-coder is `qwen3moe`,
which includes other qwen3-family models as well)
I haven't converted harmony to be one of these "built-ins" yet, since
some of it is in flux with the changes @ParthSareen has been making to
move harmony to the runner. It is likely that many other built-ins will
need to move to the runner as well, but I'm able to slightly defer that
decision since qwen3-coder doesn't have thinking (and therefore doesn't
need to be in the runner to make structured outputs work). I expect to
unify harmony with this approach very soon.
Whether a particular model supports tools or thinking was previously
inferred from templates, but without a template we now also use the
parser itself to declare what it supports. If we have future models that
re-use the same parsing format, but have different capabilities, we'll
want to parameterize them and give them different names to be specified
as a `PARSER`.
Misc changes:
- I worked on the renderer by diffing outputs from the reference
implementation and ours. To make it easier to do this, I extended
<https://github.com/ollama/ollama/pull/11875> to also support
returning the prompt via the openai compat layer
2025-09-12 04:40:35 +08:00
|
|
|
isGptoss := slices.Contains([]string{"gptoss", "gpt-oss"}, m.Config.ModelFamily)
|
|
|
|
if hasTags || isGptoss || (builtinParser != nil && builtinParser.HasThinkingSupport()) {
|
2025-05-29 10:38:52 +08:00
|
|
|
capabilities = append(capabilities, model.CapabilityThinking)
|
|
|
|
}
|
|
|
|
|
2025-04-02 06:21:46 +08:00
|
|
|
return capabilities
|
|
|
|
}
|
|
|
|
|
|
|
|
// CheckCapabilities checks if the model has the specified capabilities returning an error describing
|
|
|
|
// any missing or unknown capabilities
|
|
|
|
func (m *Model) CheckCapabilities(want ...model.Capability) error {
|
|
|
|
available := m.Capabilities()
|
|
|
|
var errs []error
|
|
|
|
|
|
|
|
// Map capabilities to their corresponding error
|
|
|
|
capToErr := map[model.Capability]error{
|
|
|
|
model.CapabilityCompletion: errCapabilityCompletion,
|
|
|
|
model.CapabilityTools: errCapabilityTools,
|
|
|
|
model.CapabilityInsert: errCapabilityInsert,
|
|
|
|
model.CapabilityVision: errCapabilityVision,
|
|
|
|
model.CapabilityEmbedding: errCapabilityEmbedding,
|
2025-05-29 10:38:52 +08:00
|
|
|
model.CapabilityThinking: errCapabilityThinking,
|
2025-04-02 06:21:46 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, cap := range want {
|
|
|
|
err, ok := capToErr[cap]
|
|
|
|
if !ok {
|
2024-06-12 05:03:42 +08:00
|
|
|
slog.Error("unknown capability", "capability", cap)
|
2024-06-18 01:38:55 +08:00
|
|
|
return fmt.Errorf("unknown capability: %s", cap)
|
2024-06-12 05:03:42 +08:00
|
|
|
}
|
2025-04-02 06:21:46 +08:00
|
|
|
|
|
|
|
if !slices.Contains(available, cap) {
|
|
|
|
errs = append(errs, err)
|
|
|
|
}
|
2024-06-12 05:03:42 +08:00
|
|
|
}
|
|
|
|
|
2025-05-29 10:38:52 +08:00
|
|
|
var err error
|
2025-04-02 06:21:46 +08:00
|
|
|
if len(errs) > 0 {
|
2025-05-29 10:38:52 +08:00
|
|
|
err = fmt.Errorf("%w %w", errCapabilities, errors.Join(errs...))
|
|
|
|
}
|
|
|
|
|
|
|
|
if slices.Contains(errs, errCapabilityThinking) {
|
|
|
|
if m.Config.ModelFamily == "qwen3" || model.ParseName(m.Name).Model == "deepseek-r1" {
|
|
|
|
// append a message to the existing error
|
|
|
|
return fmt.Errorf("%w. Pull the model again to get the latest version with full thinking support", err)
|
|
|
|
}
|
2024-06-18 01:38:55 +08:00
|
|
|
}
|
|
|
|
|
2025-05-29 10:38:52 +08:00
|
|
|
return err
|
2024-02-21 10:37:29 +08:00
|
|
|
}
|
|
|
|
|
2024-05-01 01:55:19 +08:00
|
|
|
func (m *Model) String() string {
|
2025-01-01 10:02:30 +08:00
|
|
|
var modelfile parser.Modelfile
|
2024-05-01 01:55:19 +08:00
|
|
|
|
2024-05-21 02:26:45 +08:00
|
|
|
modelfile.Commands = append(modelfile.Commands, parser.Command{
|
2024-05-01 01:55:19 +08:00
|
|
|
Name: "model",
|
|
|
|
Args: m.ModelPath,
|
|
|
|
})
|
2024-04-27 07:59:31 +08:00
|
|
|
|
2024-05-09 03:42:48 +08:00
|
|
|
for _, adapter := range m.AdapterPaths {
|
2024-05-21 02:26:45 +08:00
|
|
|
modelfile.Commands = append(modelfile.Commands, parser.Command{
|
2024-05-09 03:42:48 +08:00
|
|
|
Name: "adapter",
|
|
|
|
Args: adapter,
|
2024-05-01 01:55:19 +08:00
|
|
|
})
|
2024-04-27 07:59:31 +08:00
|
|
|
}
|
|
|
|
|
2024-05-09 03:42:48 +08:00
|
|
|
for _, projector := range m.ProjectorPaths {
|
2024-05-21 02:26:45 +08:00
|
|
|
modelfile.Commands = append(modelfile.Commands, parser.Command{
|
2024-05-09 03:42:48 +08:00
|
|
|
Name: "model",
|
|
|
|
Args: projector,
|
2024-05-01 01:55:19 +08:00
|
|
|
})
|
2024-04-27 07:59:31 +08:00
|
|
|
}
|
|
|
|
|
2024-06-11 05:54:42 +08:00
|
|
|
if m.Template != nil {
|
2024-05-21 02:26:45 +08:00
|
|
|
modelfile.Commands = append(modelfile.Commands, parser.Command{
|
2024-05-09 03:42:48 +08:00
|
|
|
Name: "template",
|
2024-06-11 05:54:42 +08:00
|
|
|
Args: m.Template.String(),
|
2024-05-01 01:55:19 +08:00
|
|
|
})
|
2024-04-27 07:59:31 +08:00
|
|
|
}
|
|
|
|
|
2024-05-09 03:42:48 +08:00
|
|
|
if m.System != "" {
|
2024-05-21 02:26:45 +08:00
|
|
|
modelfile.Commands = append(modelfile.Commands, parser.Command{
|
2024-05-09 03:42:48 +08:00
|
|
|
Name: "system",
|
|
|
|
Args: m.System,
|
2024-05-01 01:55:19 +08:00
|
|
|
})
|
2024-04-27 07:59:31 +08:00
|
|
|
}
|
|
|
|
|
add qwen3-coder tool support
The format qwen3-coder uses is relatively unique, both in rendering and
in parsing. To implement parsing, I wrote a custom parser in similar
style to harmony. For the rendering, I found that the logic would be
much more difficult to follow in a template, so I introduced the concept
of a built-in renderer that uses go code, rather than a template to
generate prompts.
I set us up for future built-in parsers and renderers by making it so
they can be specified in a Modelfile like so:
```
RENDERER "qwen3-coder"
PARSER "qwen3-coder"
```
These need to be provided explicitly because the architecture alone is
not enough to understand what format the model expects to receive, and
what format we expect it to output (e.g., qwen3-coder is `qwen3moe`,
which includes other qwen3-family models as well)
I haven't converted harmony to be one of these "built-ins" yet, since
some of it is in flux with the changes @ParthSareen has been making to
move harmony to the runner. It is likely that many other built-ins will
need to move to the runner as well, but I'm able to slightly defer that
decision since qwen3-coder doesn't have thinking (and therefore doesn't
need to be in the runner to make structured outputs work). I expect to
unify harmony with this approach very soon.
Whether a particular model supports tools or thinking was previously
inferred from templates, but without a template we now also use the
parser itself to declare what it supports. If we have future models that
re-use the same parsing format, but have different capabilities, we'll
want to parameterize them and give them different names to be specified
as a `PARSER`.
Misc changes:
- I worked on the renderer by diffing outputs from the reference
implementation and ours. To make it easier to do this, I extended
<https://github.com/ollama/ollama/pull/11875> to also support
returning the prompt via the openai compat layer
2025-09-12 04:40:35 +08:00
|
|
|
if m.Config.Renderer != "" {
|
|
|
|
modelfile.Commands = append(modelfile.Commands, parser.Command{
|
|
|
|
Name: "renderer",
|
|
|
|
Args: m.Config.Renderer,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.Config.Parser != "" {
|
|
|
|
modelfile.Commands = append(modelfile.Commands, parser.Command{
|
|
|
|
Name: "parser",
|
|
|
|
Args: m.Config.Parser,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2024-04-27 07:59:31 +08:00
|
|
|
for k, v := range m.Options {
|
|
|
|
switch v := v.(type) {
|
|
|
|
case []any:
|
|
|
|
for _, s := range v {
|
2024-05-21 02:26:45 +08:00
|
|
|
modelfile.Commands = append(modelfile.Commands, parser.Command{
|
2024-05-01 01:55:19 +08:00
|
|
|
Name: k,
|
|
|
|
Args: fmt.Sprintf("%v", s),
|
|
|
|
})
|
2024-04-27 07:59:31 +08:00
|
|
|
}
|
|
|
|
default:
|
2024-05-21 02:26:45 +08:00
|
|
|
modelfile.Commands = append(modelfile.Commands, parser.Command{
|
2024-05-01 01:55:19 +08:00
|
|
|
Name: k,
|
|
|
|
Args: fmt.Sprintf("%v", v),
|
|
|
|
})
|
2024-04-27 07:59:31 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, license := range m.License {
|
2024-05-21 02:26:45 +08:00
|
|
|
modelfile.Commands = append(modelfile.Commands, parser.Command{
|
2024-05-01 01:55:19 +08:00
|
|
|
Name: "license",
|
|
|
|
Args: license,
|
|
|
|
})
|
2024-04-27 07:59:31 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, msg := range m.Messages {
|
2024-05-21 02:26:45 +08:00
|
|
|
modelfile.Commands = append(modelfile.Commands, parser.Command{
|
2024-05-01 01:55:19 +08:00
|
|
|
Name: "message",
|
2024-08-01 07:52:09 +08:00
|
|
|
Args: fmt.Sprintf("%s: %s", msg.Role, msg.Content),
|
2024-05-01 01:55:19 +08:00
|
|
|
})
|
2024-04-27 07:59:31 +08:00
|
|
|
}
|
|
|
|
|
2024-05-01 01:55:19 +08:00
|
|
|
return modelfile.String()
|
2024-04-27 07:59:31 +08:00
|
|
|
}
|
|
|
|
|
2023-07-17 08:02:22 +08:00
|
|
|
type ConfigV2 struct {
|
2023-12-09 18:05:43 +08:00
|
|
|
ModelFormat string `json:"model_format"`
|
|
|
|
ModelFamily string `json:"model_family"`
|
|
|
|
ModelFamilies []string `json:"model_families"`
|
2025-09-18 05:40:53 +08:00
|
|
|
ModelType string `json:"model_type"` // shown as Parameter Size
|
|
|
|
FileType string `json:"file_type"` // shown as Quantization Level
|
add qwen3-coder tool support
The format qwen3-coder uses is relatively unique, both in rendering and
in parsing. To implement parsing, I wrote a custom parser in similar
style to harmony. For the rendering, I found that the logic would be
much more difficult to follow in a template, so I introduced the concept
of a built-in renderer that uses go code, rather than a template to
generate prompts.
I set us up for future built-in parsers and renderers by making it so
they can be specified in a Modelfile like so:
```
RENDERER "qwen3-coder"
PARSER "qwen3-coder"
```
These need to be provided explicitly because the architecture alone is
not enough to understand what format the model expects to receive, and
what format we expect it to output (e.g., qwen3-coder is `qwen3moe`,
which includes other qwen3-family models as well)
I haven't converted harmony to be one of these "built-ins" yet, since
some of it is in flux with the changes @ParthSareen has been making to
move harmony to the runner. It is likely that many other built-ins will
need to move to the runner as well, but I'm able to slightly defer that
decision since qwen3-coder doesn't have thinking (and therefore doesn't
need to be in the runner to make structured outputs work). I expect to
unify harmony with this approach very soon.
Whether a particular model supports tools or thinking was previously
inferred from templates, but without a template we now also use the
parser itself to declare what it supports. If we have future models that
re-use the same parsing format, but have different capabilities, we'll
want to parameterize them and give them different names to be specified
as a `PARSER`.
Misc changes:
- I worked on the renderer by diffing outputs from the reference
implementation and ours. To make it easier to do this, I extended
<https://github.com/ollama/ollama/pull/11875> to also support
returning the prompt via the openai compat layer
2025-09-12 04:40:35 +08:00
|
|
|
Renderer string `json:"renderer,omitempty"`
|
|
|
|
Parser string `json:"parser,omitempty"`
|
2023-12-09 18:05:43 +08:00
|
|
|
|
2025-09-18 05:40:53 +08:00
|
|
|
RemoteHost string `json:"remote_host,omitempty"`
|
|
|
|
RemoteModel string `json:"remote_model,omitempty"`
|
|
|
|
|
|
|
|
// used for remotes
|
|
|
|
Capabilities []string `json:"capabilities,omitempty"`
|
|
|
|
ContextLen int `json:"context_length,omitempty"`
|
|
|
|
EmbedLen int `json:"embedding_length,omitempty"`
|
|
|
|
BaseName string `json:"base_name,omitempty"`
|
|
|
|
|
2023-07-22 04:33:56 +08:00
|
|
|
// required by spec
|
2023-07-17 08:02:22 +08:00
|
|
|
Architecture string `json:"architecture"`
|
|
|
|
OS string `json:"os"`
|
2023-12-02 03:37:17 +08:00
|
|
|
RootFS RootFS `json:"rootfs"`
|
2023-07-17 08:02:22 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
type RootFS struct {
|
|
|
|
Type string `json:"type"`
|
|
|
|
DiffIDs []string `json:"diff_ids"`
|
|
|
|
}
|
|
|
|
|
2024-06-10 23:47:13 +08:00
|
|
|
func GetManifest(mp ModelPath) (*Manifest, string, error) {
|
2023-10-27 22:19:59 +08:00
|
|
|
fp, err := mp.GetManifestPath()
|
2023-07-18 02:03:55 +08:00
|
|
|
if err != nil {
|
2023-08-29 11:50:24 +08:00
|
|
|
return nil, "", err
|
2023-07-18 02:03:55 +08:00
|
|
|
}
|
2023-07-18 05:21:27 +08:00
|
|
|
|
2024-08-15 05:37:51 +08:00
|
|
|
f, err := os.Open(fp)
|
2023-07-17 08:02:22 +08:00
|
|
|
if err != nil {
|
2024-08-15 05:37:51 +08:00
|
|
|
return nil, "", err
|
2023-07-17 08:02:22 +08:00
|
|
|
}
|
2024-08-15 05:37:51 +08:00
|
|
|
defer f.Close()
|
2023-07-17 08:02:22 +08:00
|
|
|
|
2024-08-15 05:37:51 +08:00
|
|
|
sha256sum := sha256.New()
|
2023-08-29 11:50:24 +08:00
|
|
|
|
2024-08-15 05:37:51 +08:00
|
|
|
var manifest Manifest
|
|
|
|
if err := json.NewDecoder(io.TeeReader(f, sha256sum)).Decode(&manifest); err != nil {
|
2023-08-29 11:50:24 +08:00
|
|
|
return nil, "", err
|
2023-07-17 08:02:22 +08:00
|
|
|
}
|
|
|
|
|
2024-08-15 05:37:51 +08:00
|
|
|
return &manifest, hex.EncodeToString(sha256sum.Sum(nil)), nil
|
2023-07-17 08:02:22 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func GetModel(name string) (*Model, error) {
|
2023-08-23 00:39:42 +08:00
|
|
|
mp := ParseModelPath(name)
|
2023-08-29 11:50:24 +08:00
|
|
|
manifest, digest, err := GetManifest(mp)
|
2023-07-17 08:02:22 +08:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
model := &Model{
|
2023-10-19 22:39:58 +08:00
|
|
|
Name: mp.GetFullTagname(),
|
|
|
|
ShortName: mp.GetShortTagname(),
|
|
|
|
Digest: digest,
|
2024-06-11 05:54:42 +08:00
|
|
|
Template: template.DefaultTemplate,
|
2023-07-17 08:02:22 +08:00
|
|
|
}
|
|
|
|
|
2024-08-06 08:13:52 +08:00
|
|
|
if manifest.Config.Digest != "" {
|
|
|
|
filename, err := GetBlobsPath(manifest.Config.Digest)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2023-12-02 03:37:17 +08:00
|
|
|
|
2024-08-06 08:13:52 +08:00
|
|
|
configFile, err := os.Open(filename)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer configFile.Close()
|
2023-12-02 03:37:17 +08:00
|
|
|
|
2024-08-06 08:13:52 +08:00
|
|
|
if err := json.NewDecoder(configFile).Decode(&model.Config); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2023-12-02 03:37:17 +08:00
|
|
|
}
|
|
|
|
|
2023-07-17 08:02:22 +08:00
|
|
|
for _, layer := range manifest.Layers {
|
2023-07-18 13:44:21 +08:00
|
|
|
filename, err := GetBlobsPath(layer.Digest)
|
2023-07-18 02:03:55 +08:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-07-17 08:02:22 +08:00
|
|
|
switch layer.MediaType {
|
|
|
|
case "application/vnd.ollama.image.model":
|
|
|
|
model.ModelPath = filename
|
2024-01-26 04:12:36 +08:00
|
|
|
model.ParentModel = layer.From
|
2023-08-05 06:56:40 +08:00
|
|
|
case "application/vnd.ollama.image.embed":
|
2023-10-16 23:07:37 +08:00
|
|
|
// Deprecated in versions > 0.1.2
|
|
|
|
// TODO: remove this warning in a future version
|
2024-01-19 02:52:01 +08:00
|
|
|
slog.Info("WARNING: model contains embeddings, but embeddings in modelfiles have been deprecated and will be ignored.")
|
2023-08-04 08:16:05 +08:00
|
|
|
case "application/vnd.ollama.image.adapter":
|
|
|
|
model.AdapterPaths = append(model.AdapterPaths, filename)
|
2023-12-01 02:30:23 +08:00
|
|
|
case "application/vnd.ollama.image.projector":
|
|
|
|
model.ProjectorPaths = append(model.ProjectorPaths, filename)
|
2024-06-11 05:54:42 +08:00
|
|
|
case "application/vnd.ollama.image.prompt",
|
|
|
|
"application/vnd.ollama.image.template":
|
2023-07-18 05:21:27 +08:00
|
|
|
bts, err := os.ReadFile(filename)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-06-11 05:54:42 +08:00
|
|
|
model.Template, err = template.Parse(string(bts))
|
2023-07-17 08:02:22 +08:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-06-11 05:54:42 +08:00
|
|
|
case "application/vnd.ollama.image.system":
|
2023-07-20 10:43:00 +08:00
|
|
|
bts, err := os.ReadFile(filename)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-06-11 05:54:42 +08:00
|
|
|
model.System = string(bts)
|
2023-07-17 08:02:22 +08:00
|
|
|
case "application/vnd.ollama.image.params":
|
2023-07-18 03:08:10 +08:00
|
|
|
params, err := os.Open(filename)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer params.Close()
|
2023-07-17 08:02:22 +08:00
|
|
|
|
2023-08-01 03:07:04 +08:00
|
|
|
// parse model options parameters into a map so that we can see which fields have been specified explicitly
|
2023-08-02 01:36:31 +08:00
|
|
|
if err = json.NewDecoder(params).Decode(&model.Options); err != nil {
|
2023-08-01 03:07:04 +08:00
|
|
|
return nil, err
|
|
|
|
}
|
2024-01-26 04:12:36 +08:00
|
|
|
case "application/vnd.ollama.image.messages":
|
|
|
|
msgs, err := os.Open(filename)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer msgs.Close()
|
|
|
|
|
|
|
|
if err = json.NewDecoder(msgs).Decode(&model.Messages); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2023-09-07 02:04:17 +08:00
|
|
|
case "application/vnd.ollama.image.license":
|
|
|
|
bts, err := os.ReadFile(filename)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
model.License = append(model.License, string(bts))
|
2023-07-17 08:02:22 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return model, nil
|
|
|
|
}
|
|
|
|
|
2024-04-17 07:22:38 +08:00
|
|
|
func CopyModel(src, dst model.Name) error {
|
2024-04-27 04:08:32 +08:00
|
|
|
if !dst.IsFullyQualified() {
|
|
|
|
return model.Unqualified(dst)
|
|
|
|
}
|
|
|
|
if !src.IsFullyQualified() {
|
|
|
|
return model.Unqualified(src)
|
|
|
|
}
|
|
|
|
|
2024-04-29 11:47:49 +08:00
|
|
|
if src.Filepath() == dst.Filepath() {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-04-17 07:22:38 +08:00
|
|
|
manifests, err := GetManifestPath()
|
2023-08-22 12:56:56 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-04-27 04:08:32 +08:00
|
|
|
dstpath := filepath.Join(manifests, dst.Filepath())
|
2024-04-17 07:22:38 +08:00
|
|
|
if err := os.MkdirAll(filepath.Dir(dstpath), 0o755); err != nil {
|
2023-10-27 22:19:59 +08:00
|
|
|
return err
|
|
|
|
}
|
2023-07-24 23:27:28 +08:00
|
|
|
|
2024-04-27 04:08:32 +08:00
|
|
|
srcpath := filepath.Join(manifests, src.Filepath())
|
2024-04-17 07:22:38 +08:00
|
|
|
srcfile, err := os.Open(srcpath)
|
2023-07-24 23:27:28 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-04-17 07:22:38 +08:00
|
|
|
defer srcfile.Close()
|
2023-07-24 23:27:28 +08:00
|
|
|
|
2024-04-17 07:22:38 +08:00
|
|
|
dstfile, err := os.Create(dstpath)
|
2023-07-24 23:27:28 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-04-17 07:22:38 +08:00
|
|
|
defer dstfile.Close()
|
2023-07-24 23:27:28 +08:00
|
|
|
|
2024-04-17 07:22:38 +08:00
|
|
|
_, err = io.Copy(dstfile, srcfile)
|
|
|
|
return err
|
2023-07-24 23:27:28 +08:00
|
|
|
}
|
|
|
|
|
2024-08-15 07:36:07 +08:00
|
|
|
func deleteUnusedLayers(deleteMap map[string]struct{}) error {
|
2024-11-06 06:21:45 +08:00
|
|
|
// Ignore corrupt manifests to avoid blocking deletion of layers that are freshly orphaned
|
|
|
|
manifests, err := Manifests(true)
|
2023-07-21 07:09:23 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-08-31 02:31:12 +08:00
|
|
|
|
2024-08-15 07:36:07 +08:00
|
|
|
for _, manifest := range manifests {
|
2023-08-31 02:31:12 +08:00
|
|
|
for _, layer := range manifest.Layers {
|
|
|
|
delete(deleteMap, layer.Digest)
|
|
|
|
}
|
|
|
|
|
|
|
|
delete(deleteMap, manifest.Config.Digest)
|
2023-08-01 06:26:18 +08:00
|
|
|
}
|
2023-07-21 07:09:23 +08:00
|
|
|
|
|
|
|
// only delete the files which are still in the deleteMap
|
2023-11-15 04:30:34 +08:00
|
|
|
for k := range deleteMap {
|
|
|
|
fp, err := GetBlobsPath(k)
|
|
|
|
if err != nil {
|
2024-01-19 02:52:01 +08:00
|
|
|
slog.Info(fmt.Sprintf("couldn't get file path for '%s': %v", k, err))
|
2023-11-15 04:30:34 +08:00
|
|
|
continue
|
|
|
|
}
|
2024-05-10 07:35:20 +08:00
|
|
|
if err := os.Remove(fp); err != nil {
|
|
|
|
slog.Info(fmt.Sprintf("couldn't remove file '%s': %v", fp, err))
|
|
|
|
continue
|
2023-07-21 07:09:23 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-12 02:46:35 +08:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func PruneLayers() error {
|
2023-11-15 04:30:34 +08:00
|
|
|
deleteMap := make(map[string]struct{})
|
2023-09-12 02:46:35 +08:00
|
|
|
p, err := GetBlobsPath("")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
blobs, err := os.ReadDir(p)
|
|
|
|
if err != nil {
|
2024-01-19 02:52:01 +08:00
|
|
|
slog.Info(fmt.Sprintf("couldn't read dir '%s': %v", p, err))
|
2023-09-12 02:46:35 +08:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, blob := range blobs {
|
|
|
|
name := blob.Name()
|
2024-03-15 11:18:06 +08:00
|
|
|
name = strings.ReplaceAll(name, "-", ":")
|
2024-05-10 07:35:20 +08:00
|
|
|
|
|
|
|
_, err := GetBlobsPath(name)
|
|
|
|
if err != nil {
|
|
|
|
if errors.Is(err, ErrInvalidDigestFormat) {
|
|
|
|
// remove invalid blobs (e.g. partial downloads)
|
|
|
|
if err := os.Remove(filepath.Join(p, blob.Name())); err != nil {
|
|
|
|
slog.Error("couldn't remove blob", "blob", blob.Name(), "error", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
continue
|
2023-11-15 06:27:51 +08:00
|
|
|
}
|
2024-05-10 07:35:20 +08:00
|
|
|
|
|
|
|
deleteMap[name] = struct{}{}
|
2023-09-12 02:46:35 +08:00
|
|
|
}
|
|
|
|
|
2024-01-19 02:52:01 +08:00
|
|
|
slog.Info(fmt.Sprintf("total blobs: %d", len(deleteMap)))
|
2023-09-12 02:46:35 +08:00
|
|
|
|
2024-08-15 07:36:07 +08:00
|
|
|
if err := deleteUnusedLayers(deleteMap); err != nil {
|
2024-08-06 08:13:52 +08:00
|
|
|
slog.Error(fmt.Sprintf("couldn't remove unused layers: %v", err))
|
2024-08-02 06:05:16 +08:00
|
|
|
return nil
|
2023-09-12 02:46:35 +08:00
|
|
|
}
|
|
|
|
|
2024-01-19 02:52:01 +08:00
|
|
|
slog.Info(fmt.Sprintf("total unused blobs removed: %d", len(deleteMap)))
|
2023-09-12 02:46:35 +08:00
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-09-27 08:28:14 +08:00
|
|
|
func PruneDirectory(path string) error {
|
|
|
|
info, err := os.Lstat(path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if info.IsDir() && info.Mode()&os.ModeSymlink == 0 {
|
|
|
|
entries, err := os.ReadDir(path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, entry := range entries {
|
|
|
|
if err := PruneDirectory(filepath.Join(path, entry.Name())); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
entries, err = os.ReadDir(path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(entries) > 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return os.Remove(path)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-02-15 03:29:49 +08:00
|
|
|
func PushModel(ctx context.Context, name string, regOpts *registryOptions, fn func(api.ProgressResponse)) error {
|
2023-08-23 00:39:42 +08:00
|
|
|
mp := ParseModelPath(name)
|
2023-07-19 09:51:30 +08:00
|
|
|
fn(api.ProgressResponse{Status: "retrieving manifest"})
|
|
|
|
|
2023-08-23 00:39:42 +08:00
|
|
|
if mp.ProtocolScheme == "http" && !regOpts.Insecure {
|
2025-03-29 02:50:22 +08:00
|
|
|
return errInsecureProtocol
|
2023-08-23 00:39:42 +08:00
|
|
|
}
|
|
|
|
|
2023-08-29 11:50:24 +08:00
|
|
|
manifest, _, err := GetManifest(mp)
|
2023-07-17 08:02:22 +08:00
|
|
|
if err != nil {
|
2023-07-19 09:51:30 +08:00
|
|
|
fn(api.ProgressResponse{Status: "couldn't retrieve manifest"})
|
2023-07-17 08:02:22 +08:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-08-08 05:22:17 +08:00
|
|
|
var layers []Layer
|
2023-08-01 09:37:40 +08:00
|
|
|
layers = append(layers, manifest.Layers...)
|
2024-08-06 08:13:52 +08:00
|
|
|
if manifest.Config.Digest != "" {
|
2024-08-08 05:22:17 +08:00
|
|
|
layers = append(layers, manifest.Config)
|
2024-08-06 08:13:52 +08:00
|
|
|
}
|
2023-07-17 08:02:22 +08:00
|
|
|
|
|
|
|
for _, layer := range layers {
|
2023-10-10 01:24:27 +08:00
|
|
|
if err := uploadBlob(ctx, mp, layer, regOpts, fn); err != nil {
|
2024-01-19 02:52:01 +08:00
|
|
|
slog.Info(fmt.Sprintf("error uploading blob: %v", err))
|
2023-07-17 08:02:22 +08:00
|
|
|
return err
|
|
|
|
}
|
2023-07-19 09:51:30 +08:00
|
|
|
}
|
|
|
|
|
2023-07-23 08:31:26 +08:00
|
|
|
fn(api.ProgressResponse{Status: "pushing manifest"})
|
2023-08-22 09:38:31 +08:00
|
|
|
requestURL := mp.BaseURL()
|
|
|
|
requestURL = requestURL.JoinPath("v2", mp.GetNamespaceRepository(), "manifests", mp.Tag)
|
2023-07-17 08:02:22 +08:00
|
|
|
|
|
|
|
manifestJSON, err := json.Marshal(manifest)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-08-22 09:24:42 +08:00
|
|
|
headers := make(http.Header)
|
|
|
|
headers.Set("Content-Type", "application/vnd.docker.distribution.manifest.v2+json")
|
2023-11-03 04:10:58 +08:00
|
|
|
resp, err := makeRequestWithRetry(ctx, http.MethodPut, requestURL, headers, bytes.NewReader(manifestJSON), regOpts)
|
2023-07-17 08:02:22 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
2023-07-23 08:31:26 +08:00
|
|
|
fn(api.ProgressResponse{Status: "success"})
|
2023-07-17 08:02:22 +08:00
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-02-15 03:29:49 +08:00
|
|
|
func PullModel(ctx context.Context, name string, regOpts *registryOptions, fn func(api.ProgressResponse)) error {
|
2023-08-23 00:39:42 +08:00
|
|
|
mp := ParseModelPath(name)
|
|
|
|
|
2023-09-12 02:46:35 +08:00
|
|
|
// build deleteMap to prune unused layers
|
2023-11-15 04:30:34 +08:00
|
|
|
deleteMap := make(map[string]struct{})
|
2024-08-15 05:37:51 +08:00
|
|
|
manifest, _, err := GetManifest(mp)
|
|
|
|
if errors.Is(err, os.ErrNotExist) {
|
|
|
|
// noop
|
2024-11-06 06:21:45 +08:00
|
|
|
} else if err != nil {
|
|
|
|
slog.Warn("pulling model with bad existing manifest", "name", name, "error", err)
|
2024-08-15 05:37:51 +08:00
|
|
|
} else {
|
|
|
|
for _, l := range manifest.Layers {
|
|
|
|
deleteMap[l.Digest] = struct{}{}
|
2023-09-12 02:46:35 +08:00
|
|
|
}
|
2024-08-15 05:37:51 +08:00
|
|
|
if manifest.Config.Digest != "" {
|
|
|
|
deleteMap[manifest.Config.Digest] = struct{}{}
|
2023-09-12 02:46:35 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-08-23 00:39:42 +08:00
|
|
|
if mp.ProtocolScheme == "http" && !regOpts.Insecure {
|
2025-03-29 02:50:22 +08:00
|
|
|
return errInsecureProtocol
|
2023-08-22 12:56:56 +08:00
|
|
|
}
|
2023-07-17 08:02:22 +08:00
|
|
|
|
2023-07-19 09:51:30 +08:00
|
|
|
fn(api.ProgressResponse{Status: "pulling manifest"})
|
2023-07-17 08:02:22 +08:00
|
|
|
|
2023-09-12 02:46:35 +08:00
|
|
|
manifest, err = pullModelManifest(ctx, mp, regOpts)
|
2023-07-17 08:02:22 +08:00
|
|
|
if err != nil {
|
2023-07-25 05:48:17 +08:00
|
|
|
return fmt.Errorf("pull model manifest: %s", err)
|
2023-07-17 08:02:22 +08:00
|
|
|
}
|
|
|
|
|
2024-08-08 05:22:17 +08:00
|
|
|
var layers []Layer
|
2023-07-21 02:18:00 +08:00
|
|
|
layers = append(layers, manifest.Layers...)
|
2024-08-06 08:13:52 +08:00
|
|
|
if manifest.Config.Digest != "" {
|
2024-08-08 05:22:17 +08:00
|
|
|
layers = append(layers, manifest.Config)
|
2024-08-06 08:13:52 +08:00
|
|
|
}
|
2023-07-17 08:02:22 +08:00
|
|
|
|
2024-05-24 23:40:40 +08:00
|
|
|
skipVerify := make(map[string]bool)
|
2023-07-17 08:02:22 +08:00
|
|
|
for _, layer := range layers {
|
2024-05-24 23:40:40 +08:00
|
|
|
cacheHit, err := downloadBlob(ctx, downloadOpts{
|
|
|
|
mp: mp,
|
|
|
|
digest: layer.Digest,
|
|
|
|
regOpts: regOpts,
|
|
|
|
fn: fn,
|
|
|
|
})
|
|
|
|
if err != nil {
|
2023-07-17 08:02:22 +08:00
|
|
|
return err
|
|
|
|
}
|
2024-05-24 23:40:40 +08:00
|
|
|
skipVerify[layer.Digest] = cacheHit
|
2023-09-12 02:46:35 +08:00
|
|
|
delete(deleteMap, layer.Digest)
|
2023-07-17 08:02:22 +08:00
|
|
|
}
|
2023-09-12 02:46:35 +08:00
|
|
|
delete(deleteMap, manifest.Config.Digest)
|
2023-07-17 08:02:22 +08:00
|
|
|
|
2023-07-21 02:44:05 +08:00
|
|
|
fn(api.ProgressResponse{Status: "verifying sha256 digest"})
|
|
|
|
for _, layer := range layers {
|
2024-05-24 23:40:40 +08:00
|
|
|
if skipVerify[layer.Digest] {
|
|
|
|
continue
|
|
|
|
}
|
2023-07-21 02:44:05 +08:00
|
|
|
if err := verifyBlob(layer.Digest); err != nil {
|
2023-07-25 02:53:01 +08:00
|
|
|
if errors.Is(err, errDigestMismatch) {
|
|
|
|
// something went wrong, delete the blob
|
|
|
|
fp, err := GetBlobsPath(layer.Digest)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := os.Remove(fp); err != nil {
|
|
|
|
// log this, but return the original error
|
2024-01-19 02:52:01 +08:00
|
|
|
slog.Info(fmt.Sprintf("couldn't remove file with digest mismatch '%s': %v", fp, err))
|
2023-07-25 02:53:01 +08:00
|
|
|
}
|
|
|
|
}
|
2023-07-21 02:44:05 +08:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-19 09:51:30 +08:00
|
|
|
fn(api.ProgressResponse{Status: "writing manifest"})
|
2023-07-17 08:02:22 +08:00
|
|
|
|
2023-07-18 02:03:55 +08:00
|
|
|
manifestJSON, err := json.Marshal(manifest)
|
2023-07-17 08:02:22 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-10-27 22:19:59 +08:00
|
|
|
fp, err := mp.GetManifestPath()
|
2023-07-17 08:02:22 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-10-27 22:19:59 +08:00
|
|
|
if err := os.MkdirAll(filepath.Dir(fp), 0o755); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-17 08:02:22 +08:00
|
|
|
|
2023-07-21 02:18:00 +08:00
|
|
|
err = os.WriteFile(fp, manifestJSON, 0o644)
|
2023-07-17 08:02:22 +08:00
|
|
|
if err != nil {
|
2024-01-19 02:52:01 +08:00
|
|
|
slog.Info(fmt.Sprintf("couldn't write to %s", fp))
|
2023-07-17 08:02:22 +08:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-08-15 05:37:51 +08:00
|
|
|
if !envconfig.NoPrune() && len(deleteMap) > 0 {
|
|
|
|
fn(api.ProgressResponse{Status: "removing unused layers"})
|
2024-08-15 07:36:07 +08:00
|
|
|
if err := deleteUnusedLayers(deleteMap); err != nil {
|
2024-08-02 06:05:16 +08:00
|
|
|
fn(api.ProgressResponse{Status: fmt.Sprintf("couldn't remove unused layers: %v", err)})
|
2023-09-12 02:46:35 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-19 09:51:30 +08:00
|
|
|
fn(api.ProgressResponse{Status: "success"})
|
2023-07-17 08:02:22 +08:00
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-06-10 23:47:13 +08:00
|
|
|
func pullModelManifest(ctx context.Context, mp ModelPath, regOpts *registryOptions) (*Manifest, error) {
|
2023-08-22 09:38:31 +08:00
|
|
|
requestURL := mp.BaseURL().JoinPath("v2", mp.GetNamespaceRepository(), "manifests", mp.Tag)
|
2023-07-17 08:02:22 +08:00
|
|
|
|
2023-08-22 09:24:42 +08:00
|
|
|
headers := make(http.Header)
|
|
|
|
headers.Set("Accept", "application/vnd.docker.distribution.manifest.v2+json")
|
2023-11-03 04:13:32 +08:00
|
|
|
resp, err := makeRequestWithRetry(ctx, http.MethodGet, requestURL, headers, nil, regOpts)
|
2023-07-17 08:02:22 +08:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
2024-08-15 05:37:51 +08:00
|
|
|
var m Manifest
|
2023-07-17 08:02:22 +08:00
|
|
|
if err := json.NewDecoder(resp.Body).Decode(&m); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-08-15 05:37:51 +08:00
|
|
|
return &m, err
|
2023-07-17 08:02:22 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// GetSHA256Digest returns the SHA256 hash of a given buffer and returns it, and the size of buffer
|
2023-09-29 01:00:34 +08:00
|
|
|
func GetSHA256Digest(r io.Reader) (string, int64) {
|
2023-07-19 08:14:12 +08:00
|
|
|
h := sha256.New()
|
|
|
|
n, err := io.Copy(h, r)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
|
2023-09-29 01:00:34 +08:00
|
|
|
return fmt.Sprintf("sha256:%x", h.Sum(nil)), n
|
2023-07-17 08:02:22 +08:00
|
|
|
}
|
|
|
|
|
2024-08-02 05:52:15 +08:00
|
|
|
var errUnauthorized = errors.New("unauthorized: access denied")
|
2024-05-01 02:02:08 +08:00
|
|
|
|
2024-02-15 03:29:49 +08:00
|
|
|
func makeRequestWithRetry(ctx context.Context, method string, requestURL *url.URL, headers http.Header, body io.ReadSeeker, regOpts *registryOptions) (*http.Response, error) {
|
2024-05-22 13:21:04 +08:00
|
|
|
for range 2 {
|
2024-02-15 03:29:49 +08:00
|
|
|
resp, err := makeRequest(ctx, method, requestURL, headers, body, regOpts)
|
2023-08-18 03:35:29 +08:00
|
|
|
if err != nil {
|
2024-01-13 05:32:24 +08:00
|
|
|
if !errors.Is(err, context.Canceled) {
|
2024-01-19 02:52:01 +08:00
|
|
|
slog.Info(fmt.Sprintf("request failed: %v", err))
|
2024-01-13 05:32:24 +08:00
|
|
|
}
|
|
|
|
|
2023-08-18 03:35:29 +08:00
|
|
|
return nil, err
|
|
|
|
}
|
2024-01-13 05:32:24 +08:00
|
|
|
|
|
|
|
switch {
|
|
|
|
case resp.StatusCode == http.StatusUnauthorized:
|
2024-09-27 03:00:31 +08:00
|
|
|
resp.Body.Close()
|
|
|
|
|
2024-01-13 05:32:24 +08:00
|
|
|
// Handle authentication error with one retry
|
2024-02-15 03:29:49 +08:00
|
|
|
challenge := parseRegistryChallenge(resp.Header.Get("www-authenticate"))
|
|
|
|
token, err := getAuthorizationToken(ctx, challenge)
|
2023-08-18 03:35:29 +08:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-01-13 05:32:24 +08:00
|
|
|
regOpts.Token = token
|
|
|
|
if body != nil {
|
|
|
|
_, err = body.Seek(0, io.SeekStart)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case resp.StatusCode == http.StatusNotFound:
|
2024-09-27 03:00:31 +08:00
|
|
|
resp.Body.Close()
|
2024-01-13 05:32:24 +08:00
|
|
|
return nil, os.ErrNotExist
|
|
|
|
case resp.StatusCode >= http.StatusBadRequest:
|
2024-09-27 03:00:31 +08:00
|
|
|
defer resp.Body.Close()
|
2024-01-13 05:32:24 +08:00
|
|
|
responseBody, err := io.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("%d: %s", resp.StatusCode, err)
|
|
|
|
}
|
|
|
|
return nil, fmt.Errorf("%d: %s", resp.StatusCode, responseBody)
|
|
|
|
default:
|
|
|
|
return resp, nil
|
2023-08-18 03:35:29 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-13 05:32:24 +08:00
|
|
|
return nil, errUnauthorized
|
2023-08-18 03:35:29 +08:00
|
|
|
}
|
|
|
|
|
server: allow mixed-case model names on push, pull, cp, and create (#7676)
This change allows for mixed-case model names to be pushed, pulled,
copied, and created, which was previously disallowed because the Ollama
registry was backed by a Docker registry that enforced a naming
convention that disallowed mixed-case names, which is no longer the
case.
This does not break existing, intended, behaviors.
Also, make TestCase test a story of creating, updating, pulling, and
copying a model with case variations, ensuring the model's manifest is
updated correctly, and not duplicated across different files with
different case variations.
2024-11-20 07:05:57 +08:00
|
|
|
// testMakeRequestDialContext specifies the dial function for the http client in
|
|
|
|
// makeRequest. It can be used to resolve hosts in model names to local
|
|
|
|
// addresses for testing. For example, the model name ("example.com/my/model")
|
|
|
|
// can be directed to push/pull from "127.0.0.1:1234".
|
|
|
|
//
|
|
|
|
// This is not safe to set across goroutines. It should be set in
|
|
|
|
// the main test goroutine, and not by tests marked to run in parallel with
|
|
|
|
// t.Parallel().
|
|
|
|
//
|
|
|
|
// It should be cleared after use, otherwise it will affect other tests.
|
|
|
|
//
|
|
|
|
// Ideally we would have some set this up the stack, but the code is not
|
|
|
|
// structured in a way that makes this easy, so this will have to do for now.
|
|
|
|
var testMakeRequestDialContext func(ctx context.Context, network, addr string) (net.Conn, error)
|
|
|
|
|
2024-02-15 03:29:49 +08:00
|
|
|
func makeRequest(ctx context.Context, method string, requestURL *url.URL, headers http.Header, body io.Reader, regOpts *registryOptions) (*http.Response, error) {
|
|
|
|
if requestURL.Scheme != "http" && regOpts != nil && regOpts.Insecure {
|
|
|
|
requestURL.Scheme = "http"
|
|
|
|
}
|
|
|
|
|
|
|
|
req, err := http.NewRequestWithContext(ctx, method, requestURL.String(), body)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if headers != nil {
|
|
|
|
req.Header = headers
|
|
|
|
}
|
|
|
|
|
|
|
|
if regOpts != nil {
|
|
|
|
if regOpts.Token != "" {
|
|
|
|
req.Header.Set("Authorization", "Bearer "+regOpts.Token)
|
|
|
|
} else if regOpts.Username != "" && regOpts.Password != "" {
|
|
|
|
req.SetBasicAuth(regOpts.Username, regOpts.Password)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-09-06 05:26:00 +08:00
|
|
|
req.Header.Set("User-Agent", fmt.Sprintf("ollama/%s (%s %s) Go/%s", version.Version, runtime.GOARCH, runtime.GOOS, runtime.Version()))
|
2024-02-15 03:29:49 +08:00
|
|
|
|
|
|
|
if s := req.Header.Get("Content-Length"); s != "" {
|
|
|
|
contentLength, err := strconv.ParseInt(s, 10, 64)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
req.ContentLength = contentLength
|
|
|
|
}
|
|
|
|
|
2024-11-26 07:08:34 +08:00
|
|
|
c := &http.Client{
|
2024-07-26 06:58:30 +08:00
|
|
|
CheckRedirect: regOpts.CheckRedirect,
|
2024-02-15 03:29:49 +08:00
|
|
|
}
|
2024-11-26 07:08:34 +08:00
|
|
|
if testMakeRequestDialContext != nil {
|
|
|
|
tr := http.DefaultTransport.(*http.Transport).Clone()
|
|
|
|
tr.DialContext = testMakeRequestDialContext
|
|
|
|
c.Transport = tr
|
|
|
|
}
|
|
|
|
return c.Do(req)
|
2024-02-15 03:29:49 +08:00
|
|
|
}
|
|
|
|
|
2023-08-11 02:34:25 +08:00
|
|
|
func getValue(header, key string) string {
|
|
|
|
startIdx := strings.Index(header, key+"=")
|
|
|
|
if startIdx == -1 {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
|
|
|
// Move the index to the starting quote after the key.
|
|
|
|
startIdx += len(key) + 2
|
|
|
|
endIdx := startIdx
|
|
|
|
|
|
|
|
for endIdx < len(header) {
|
|
|
|
if header[endIdx] == '"' {
|
|
|
|
if endIdx+1 < len(header) && header[endIdx+1] != ',' { // If the next character isn't a comma, continue
|
|
|
|
endIdx++
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
break
|
|
|
|
}
|
|
|
|
endIdx++
|
|
|
|
}
|
|
|
|
return header[startIdx:endIdx]
|
|
|
|
}
|
|
|
|
|
2024-02-15 03:29:49 +08:00
|
|
|
func parseRegistryChallenge(authStr string) registryChallenge {
|
2023-08-11 02:34:25 +08:00
|
|
|
authStr = strings.TrimPrefix(authStr, "Bearer ")
|
|
|
|
|
2024-02-15 03:29:49 +08:00
|
|
|
return registryChallenge{
|
2023-08-11 02:34:25 +08:00
|
|
|
Realm: getValue(authStr, "realm"),
|
|
|
|
Service: getValue(authStr, "service"),
|
|
|
|
Scope: getValue(authStr, "scope"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-04-21 10:11:06 +08:00
|
|
|
var errDigestMismatch = errors.New("digest mismatch, file must be downloaded again")
|
2023-07-25 02:53:01 +08:00
|
|
|
|
2023-07-21 02:44:05 +08:00
|
|
|
func verifyBlob(digest string) error {
|
|
|
|
fp, err := GetBlobsPath(digest)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
f, err := os.Open(fp)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
|
|
|
|
fileDigest, _ := GetSHA256Digest(f)
|
|
|
|
if digest != fileDigest {
|
2023-07-25 02:53:01 +08:00
|
|
|
return fmt.Errorf("%w: want %s, got %s", errDigestMismatch, digest, fileDigest)
|
2023-07-21 02:44:05 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|