mirror of
https://github.com/ollama/ollama.git
synced 2026-04-27 19:25:55 +02:00
Compare commits
9 Commits
v0.13.4-rc
...
grace/deep
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e1878e6e33 | ||
|
|
f0733c13b5 | ||
|
|
07162c509f | ||
|
|
5be8277683 | ||
|
|
ec65cc3690 | ||
|
|
e3731fb160 | ||
|
|
8dbc9e7b68 | ||
|
|
abe67acf8a | ||
|
|
4ff8a691bc |
64
app/ui/ui.go
64
app/ui/ui.go
@@ -12,13 +12,13 @@ import (
|
|||||||
"log/slog"
|
"log/slog"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httputil"
|
"net/http/httputil"
|
||||||
"net/url"
|
|
||||||
"os"
|
"os"
|
||||||
"runtime"
|
"runtime"
|
||||||
"runtime/debug"
|
"runtime/debug"
|
||||||
"slices"
|
"slices"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
@@ -117,40 +117,66 @@ func (s *Server) log() *slog.Logger {
|
|||||||
|
|
||||||
// ollamaProxy creates a reverse proxy handler to the Ollama server
|
// ollamaProxy creates a reverse proxy handler to the Ollama server
|
||||||
func (s *Server) ollamaProxy() http.Handler {
|
func (s *Server) ollamaProxy() http.Handler {
|
||||||
ollamaHost := os.Getenv("OLLAMA_HOST")
|
var (
|
||||||
if ollamaHost == "" {
|
proxy http.Handler
|
||||||
ollamaHost = "http://127.0.0.1:11434"
|
proxyMu sync.Mutex
|
||||||
}
|
)
|
||||||
|
|
||||||
if !strings.HasPrefix(ollamaHost, "http://") && !strings.HasPrefix(ollamaHost, "https://") {
|
|
||||||
ollamaHost = "http://" + ollamaHost
|
|
||||||
}
|
|
||||||
|
|
||||||
target, err := url.Parse(ollamaHost)
|
|
||||||
if err != nil {
|
|
||||||
s.log().Error("failed to parse OLLAMA_HOST", "error", err, "host", ollamaHost)
|
|
||||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
http.Error(w, "failed to configure proxy", http.StatusInternalServerError)
|
proxyMu.Lock()
|
||||||
})
|
p := proxy
|
||||||
|
proxyMu.Unlock()
|
||||||
|
|
||||||
|
if p == nil {
|
||||||
|
proxyMu.Lock()
|
||||||
|
if proxy == nil {
|
||||||
|
var err error
|
||||||
|
for i := range 2 {
|
||||||
|
if i > 0 {
|
||||||
|
s.log().Warn("ollama server not ready, retrying", "attempt", i+1)
|
||||||
|
time.Sleep(1 * time.Second)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
err = WaitForServer(context.Background(), 10*time.Second)
|
||||||
|
if err == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
proxyMu.Unlock()
|
||||||
|
s.log().Error("ollama server not ready after retries", "error", err)
|
||||||
|
http.Error(w, "Ollama server is not ready", http.StatusServiceUnavailable)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
target := envconfig.Host()
|
||||||
s.log().Info("configuring ollama proxy", "target", target.String())
|
s.log().Info("configuring ollama proxy", "target", target.String())
|
||||||
|
|
||||||
proxy := httputil.NewSingleHostReverseProxy(target)
|
newProxy := httputil.NewSingleHostReverseProxy(target)
|
||||||
|
|
||||||
originalDirector := proxy.Director
|
originalDirector := newProxy.Director
|
||||||
proxy.Director = func(req *http.Request) {
|
newProxy.Director = func(req *http.Request) {
|
||||||
originalDirector(req)
|
originalDirector(req)
|
||||||
req.Host = target.Host
|
req.Host = target.Host
|
||||||
s.log().Debug("proxying request", "method", req.Method, "path", req.URL.Path, "target", target.Host)
|
s.log().Debug("proxying request", "method", req.Method, "path", req.URL.Path, "target", target.Host)
|
||||||
}
|
}
|
||||||
|
|
||||||
proxy.ErrorHandler = func(w http.ResponseWriter, r *http.Request, err error) {
|
newProxy.ErrorHandler = func(w http.ResponseWriter, r *http.Request, err error) {
|
||||||
s.log().Error("proxy error", "error", err, "path", r.URL.Path, "target", target.String())
|
s.log().Error("proxy error", "error", err, "path", r.URL.Path, "target", target.String())
|
||||||
http.Error(w, "proxy error: "+err.Error(), http.StatusBadGateway)
|
http.Error(w, "proxy error: "+err.Error(), http.StatusBadGateway)
|
||||||
}
|
}
|
||||||
|
|
||||||
return proxy
|
proxy = newProxy
|
||||||
|
p = newProxy
|
||||||
|
} else {
|
||||||
|
p = proxy
|
||||||
|
}
|
||||||
|
proxyMu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
p.ServeHTTP(w, r)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
type errHandlerFunc func(http.ResponseWriter, *http.Request) error
|
type errHandlerFunc func(http.ResponseWriter, *http.Request) error
|
||||||
|
|||||||
@@ -199,7 +199,7 @@ var (
|
|||||||
// MultiUserCache optimizes prompt caching for multi-user scenarios
|
// MultiUserCache optimizes prompt caching for multi-user scenarios
|
||||||
MultiUserCache = Bool("OLLAMA_MULTIUSER_CACHE")
|
MultiUserCache = Bool("OLLAMA_MULTIUSER_CACHE")
|
||||||
// Enable the new Ollama engine
|
// Enable the new Ollama engine
|
||||||
NewEngine = BoolWithDefault("OLLAMA_NEW_ENGINE")
|
NewEngine = Bool("OLLAMA_NEW_ENGINE")
|
||||||
// ContextLength sets the default context length
|
// ContextLength sets the default context length
|
||||||
ContextLength = Uint("OLLAMA_CONTEXT_LENGTH", 4096)
|
ContextLength = Uint("OLLAMA_CONTEXT_LENGTH", 4096)
|
||||||
// Auth enables authentication between the Ollama client and server
|
// Auth enables authentication between the Ollama client and server
|
||||||
@@ -291,7 +291,7 @@ func AsMap() map[string]EnvVar {
|
|||||||
"OLLAMA_SCHED_SPREAD": {"OLLAMA_SCHED_SPREAD", SchedSpread(), "Always schedule model across all GPUs"},
|
"OLLAMA_SCHED_SPREAD": {"OLLAMA_SCHED_SPREAD", SchedSpread(), "Always schedule model across all GPUs"},
|
||||||
"OLLAMA_MULTIUSER_CACHE": {"OLLAMA_MULTIUSER_CACHE", MultiUserCache(), "Optimize prompt caching for multi-user scenarios"},
|
"OLLAMA_MULTIUSER_CACHE": {"OLLAMA_MULTIUSER_CACHE", MultiUserCache(), "Optimize prompt caching for multi-user scenarios"},
|
||||||
"OLLAMA_CONTEXT_LENGTH": {"OLLAMA_CONTEXT_LENGTH", ContextLength(), "Context length to use unless otherwise specified (default: 4096)"},
|
"OLLAMA_CONTEXT_LENGTH": {"OLLAMA_CONTEXT_LENGTH", ContextLength(), "Context length to use unless otherwise specified (default: 4096)"},
|
||||||
"OLLAMA_NEW_ENGINE": {"OLLAMA_NEW_ENGINE", NewEngine(true), "Enable the new Ollama engine"},
|
"OLLAMA_NEW_ENGINE": {"OLLAMA_NEW_ENGINE", NewEngine(), "Enable the new Ollama engine"},
|
||||||
"OLLAMA_REMOTES": {"OLLAMA_REMOTES", Remotes(), "Allowed hosts for remote models (default \"ollama.com\")"},
|
"OLLAMA_REMOTES": {"OLLAMA_REMOTES", Remotes(), "Allowed hosts for remote models (default \"ollama.com\")"},
|
||||||
|
|
||||||
// Informational
|
// Informational
|
||||||
|
|||||||
@@ -143,7 +143,7 @@ func NewLlamaServer(systemInfo ml.SystemInfo, gpus []ml.DeviceInfo, modelPath st
|
|||||||
var llamaModel *llama.Model
|
var llamaModel *llama.Model
|
||||||
var textProcessor model.TextProcessor
|
var textProcessor model.TextProcessor
|
||||||
var err error
|
var err error
|
||||||
if envconfig.NewEngine(true) || f.KV().OllamaEngineRequired() {
|
if envconfig.NewEngine() || f.KV().OllamaEngineRequired() {
|
||||||
if len(projectors) == 0 {
|
if len(projectors) == 0 {
|
||||||
textProcessor, err = model.NewTextProcessor(modelPath)
|
textProcessor, err = model.NewTextProcessor(modelPath)
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ package gemma3
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"math"
|
"math"
|
||||||
"slices"
|
|
||||||
|
|
||||||
"github.com/ollama/ollama/fs"
|
"github.com/ollama/ollama/fs"
|
||||||
"github.com/ollama/ollama/kvcache"
|
"github.com/ollama/ollama/kvcache"
|
||||||
@@ -13,11 +12,12 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type TextConfig struct {
|
type TextConfig struct {
|
||||||
hiddenSize, numHeads, numKVHeads int
|
hiddenSize, contextLength, numHeads, numKVHeads int
|
||||||
attnKeyLen, attnValLen int
|
attnKeyLen, attnValLen int
|
||||||
eps, ropeScale float32
|
eps, ropeScale float32
|
||||||
ropeLocalBase float32
|
ropeLocalBase float32
|
||||||
largeModelScaling bool
|
largeModelScaling bool
|
||||||
|
slidingWindow uint32
|
||||||
slidingWindowPattern []bool
|
slidingWindowPattern []bool
|
||||||
ropeBase float32
|
ropeBase float32
|
||||||
ropeType string
|
ropeType string
|
||||||
@@ -55,6 +55,9 @@ type TextModel struct {
|
|||||||
|
|
||||||
const (
|
const (
|
||||||
gemmaGlobalCacheCount = 6
|
gemmaGlobalCacheCount = 6
|
||||||
|
gemma1BLayerCount = 26
|
||||||
|
gemma4BLayerCount = 34
|
||||||
|
gemma12BLayerCount = 48
|
||||||
gemma27BLayerCount = 62
|
gemma27BLayerCount = 62
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -70,6 +73,7 @@ func newTextModel(c fs.Config) *TextModel {
|
|||||||
Layers: make([]TextLayer, numBlocks),
|
Layers: make([]TextLayer, numBlocks),
|
||||||
TextConfig: &TextConfig{
|
TextConfig: &TextConfig{
|
||||||
hiddenSize: int(c.Uint("embedding_length")),
|
hiddenSize: int(c.Uint("embedding_length")),
|
||||||
|
contextLength: int(c.Uint("context_length")),
|
||||||
numHeads: int(c.Uint("attention.head_count")),
|
numHeads: int(c.Uint("attention.head_count")),
|
||||||
numKVHeads: int(c.Uint("attention.head_count_kv")),
|
numKVHeads: int(c.Uint("attention.head_count_kv")),
|
||||||
attnKeyLen: int(c.Uint("attention.key_length", 256)),
|
attnKeyLen: int(c.Uint("attention.key_length", 256)),
|
||||||
@@ -77,28 +81,32 @@ func newTextModel(c fs.Config) *TextModel {
|
|||||||
eps: c.Float("attention.layer_norm_rms_epsilon", 1e-06),
|
eps: c.Float("attention.layer_norm_rms_epsilon", 1e-06),
|
||||||
ropeLocalBase: c.Float("rope.local.freq_base", 10000.0),
|
ropeLocalBase: c.Float("rope.local.freq_base", 10000.0),
|
||||||
ropeBase: c.Float("rope.freq_base", 1000000.0),
|
ropeBase: c.Float("rope.freq_base", 1000000.0),
|
||||||
|
slidingWindow: c.Uint("attention.sliding_window"),
|
||||||
slidingWindowPattern: c.Bools("attention.sliding_window_pattern"),
|
slidingWindowPattern: c.Bools("attention.sliding_window_pattern"),
|
||||||
ropeType: c.String("rope.scaling.type"),
|
ropeType: c.String("rope.scaling.type"),
|
||||||
ropeOriginalContext: int(c.Uint("rope.scaling.original_context_length")),
|
ropeOriginalContext: int(c.Uint("rope.scaling.original_context_length")),
|
||||||
ropeExtrapolation: c.Float("rope.scaling.extrapolation_factor", 1.0),
|
ropeExtrapolation: c.Float("rope.scaling.extrapolation_factor", 1.0),
|
||||||
ropeBetaFast: c.Float("rope.scaling.beta_fast", 64.0),
|
ropeBetaFast: c.Float("rope.scaling.beta_fast", 64.0),
|
||||||
ropeBetaSlow: c.Float("rope.scaling.beta_slow", 1.0),
|
ropeBetaSlow: c.Float("rope.scaling.beta_slow", 1.0),
|
||||||
ropeScale: c.Float("rope.scaling.factor", 8.0),
|
ropeScale: c.Float("rope.scaling.factor", 1.0),
|
||||||
finalLogitSoftcap: c.Float("final_logit_softcapping", 0.0),
|
finalLogitSoftcap: c.Float("final_logit_softcapping", 0.0),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
// Google's Gemma 3 release with sliding window attention does
|
// Apply corrections for older versions of the Gemma 3 models
|
||||||
// not use final logit softcapping, and so force it to 0.0
|
// by looking at whether they use sliding window attention and
|
||||||
// The QAT weights for Gemma 3 also included an incorrect
|
// based on their layer counts.
|
||||||
// value for the rope scale, so we need to set it to 1.0 here.
|
if m.TextConfig.slidingWindow < uint32(m.TextConfig.contextLength) {
|
||||||
// TODO (jmorganca): this should ideally be set to 0.0 in the
|
switch numBlocks {
|
||||||
// model configuration instead of here, as future versions of
|
case gemma1BLayerCount:
|
||||||
// models may include both sliding window attention and final
|
// The 1B model has final logit softcapping set to 30.0
|
||||||
// logit softcapping.
|
// but it should be 0.0
|
||||||
if slices.Contains(m.TextConfig.slidingWindowPattern, true) {
|
|
||||||
m.TextConfig.finalLogitSoftcap = 0.0
|
m.TextConfig.finalLogitSoftcap = 0.0
|
||||||
m.TextConfig.ropeScale = 1.0
|
case gemma4BLayerCount, gemma12BLayerCount, gemma27BLayerCount:
|
||||||
|
// The 4B, 12B, and 27B models have rope scale unset
|
||||||
|
// but it shuold be set to 8.0
|
||||||
|
m.TextConfig.ropeScale = 8.0
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if numBlocks == gemma27BLayerCount {
|
if numBlocks == gemma27BLayerCount {
|
||||||
|
|||||||
292
model/parsers/deepseek.go
Normal file
292
model/parsers/deepseek.go
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
package parsers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"log/slog"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
|
||||||
|
"github.com/ollama/ollama/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DeepSeekParserState int
|
||||||
|
|
||||||
|
const (
|
||||||
|
DeepSeekCollectingThinking DeepSeekParserState = iota
|
||||||
|
DeepSeekCollectingContent
|
||||||
|
DeepSeekCollectingToolCalls
|
||||||
|
DeepSeekCollectingToolOutput
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
deepseekThinkingCloseTag = "</think>"
|
||||||
|
deepseekToolCallsBeginTag = "<|tool▁calls▁begin|>"
|
||||||
|
deepseekToolCallsEndTag = "<|tool▁calls▁end|>"
|
||||||
|
deepseekToolCallBeginTag = "<|tool▁call▁begin|>"
|
||||||
|
deepseekToolCallEndTag = "<|tool▁call▁end|>"
|
||||||
|
deepseekToolSepTag = "<|tool▁sep|>"
|
||||||
|
deepseekToolOutputBeginTag = "<|tool▁output▁begin|>"
|
||||||
|
deepseekToolOutputEndTag = "<|tool▁output▁end|>"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DeepSeekParser struct {
|
||||||
|
state DeepSeekParserState
|
||||||
|
buffer strings.Builder
|
||||||
|
hasThinkingSupport bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *DeepSeekParser) HasToolSupport() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *DeepSeekParser) HasThinkingSupport() bool {
|
||||||
|
return p.hasThinkingSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *DeepSeekParser) setInitialState(lastMessage *api.Message, tools []api.Tool, thinkValue *api.ThinkValue) {
|
||||||
|
prefill := lastMessage != nil && lastMessage.Role == "assistant"
|
||||||
|
|
||||||
|
// Check both model capability AND request preference
|
||||||
|
thinkingEnabled := p.HasThinkingSupport() && (thinkValue == nil || thinkValue.Bool())
|
||||||
|
|
||||||
|
if !thinkingEnabled {
|
||||||
|
p.state = DeepSeekCollectingContent
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if prefill && lastMessage.Content != "" {
|
||||||
|
p.state = DeepSeekCollectingContent
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
p.state = DeepSeekCollectingThinking
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *DeepSeekParser) Init(tools []api.Tool, lastMessage *api.Message, thinkValue *api.ThinkValue) []api.Tool {
|
||||||
|
p.setInitialState(lastMessage, tools, thinkValue)
|
||||||
|
return tools
|
||||||
|
}
|
||||||
|
|
||||||
|
type deepseekEvent interface {
|
||||||
|
isDeepSeekEvent()
|
||||||
|
}
|
||||||
|
|
||||||
|
type deepseekEventThinkingContent struct {
|
||||||
|
content string
|
||||||
|
}
|
||||||
|
|
||||||
|
type deepseekEventContent struct {
|
||||||
|
content string
|
||||||
|
}
|
||||||
|
|
||||||
|
type deepseekEventToolCall struct {
|
||||||
|
toolCall api.ToolCall
|
||||||
|
}
|
||||||
|
|
||||||
|
func (deepseekEventThinkingContent) isDeepSeekEvent() {}
|
||||||
|
func (deepseekEventContent) isDeepSeekEvent() {}
|
||||||
|
func (deepseekEventToolCall) isDeepSeekEvent() {}
|
||||||
|
|
||||||
|
func (p *DeepSeekParser) Add(s string, done bool) (content string, thinking string, calls []api.ToolCall, err error) {
|
||||||
|
p.buffer.WriteString(s)
|
||||||
|
events := p.parseEvents()
|
||||||
|
|
||||||
|
var toolCalls []api.ToolCall
|
||||||
|
var contentSb strings.Builder
|
||||||
|
var thinkingSb strings.Builder
|
||||||
|
for _, event := range events {
|
||||||
|
switch event := event.(type) {
|
||||||
|
case deepseekEventToolCall:
|
||||||
|
toolCalls = append(toolCalls, event.toolCall)
|
||||||
|
case deepseekEventThinkingContent:
|
||||||
|
thinkingSb.WriteString(event.content)
|
||||||
|
case deepseekEventContent:
|
||||||
|
contentSb.WriteString(event.content)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return contentSb.String(), thinkingSb.String(), toolCalls, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *DeepSeekParser) parseEvents() []deepseekEvent {
|
||||||
|
var all []deepseekEvent
|
||||||
|
|
||||||
|
keepLooping := true
|
||||||
|
for keepLooping {
|
||||||
|
var events []deepseekEvent
|
||||||
|
events, keepLooping = p.eat()
|
||||||
|
if len(events) > 0 {
|
||||||
|
all = append(all, events...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return all
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *DeepSeekParser) eat() ([]deepseekEvent, bool) {
|
||||||
|
var events []deepseekEvent
|
||||||
|
bufStr := p.buffer.String()
|
||||||
|
if bufStr == "" {
|
||||||
|
return events, false
|
||||||
|
}
|
||||||
|
|
||||||
|
switch p.state {
|
||||||
|
case DeepSeekCollectingThinking:
|
||||||
|
if strings.Contains(bufStr, deepseekThinkingCloseTag) { // thinking[</think>] -> content
|
||||||
|
split := strings.SplitN(bufStr, deepseekThinkingCloseTag, 2)
|
||||||
|
thinking := split[0]
|
||||||
|
thinking = strings.TrimRightFunc(thinking, unicode.IsSpace)
|
||||||
|
|
||||||
|
remaining := split[1]
|
||||||
|
remaining = strings.TrimLeftFunc(remaining, unicode.IsSpace)
|
||||||
|
|
||||||
|
p.buffer.Reset()
|
||||||
|
p.buffer.WriteString(remaining)
|
||||||
|
p.state = DeepSeekCollectingContent
|
||||||
|
|
||||||
|
if len(thinking) > 0 {
|
||||||
|
events = append(events, deepseekEventThinkingContent{content: thinking})
|
||||||
|
}
|
||||||
|
return events, true
|
||||||
|
} else if overlapLen := overlap(bufStr, deepseekThinkingCloseTag); overlapLen > 0 { // partial </think>
|
||||||
|
beforePartialTag := bufStr[:len(bufStr)-overlapLen]
|
||||||
|
trailingLen := trailingWhitespaceLen(beforePartialTag)
|
||||||
|
ambiguousStart := len(beforePartialTag) - trailingLen
|
||||||
|
|
||||||
|
unambiguous := bufStr[:ambiguousStart]
|
||||||
|
ambiguous := bufStr[ambiguousStart:]
|
||||||
|
p.buffer.Reset()
|
||||||
|
p.buffer.WriteString(ambiguous)
|
||||||
|
if len(unambiguous) > 0 {
|
||||||
|
events = append(events, deepseekEventThinkingContent{content: unambiguous})
|
||||||
|
}
|
||||||
|
return events, false
|
||||||
|
} else { // otherwise its thinking content
|
||||||
|
whitespaceLen := trailingWhitespaceLen(bufStr)
|
||||||
|
ambiguousStart := len(bufStr) - whitespaceLen
|
||||||
|
|
||||||
|
unambiguous := bufStr[:ambiguousStart]
|
||||||
|
ambiguous := bufStr[ambiguousStart:]
|
||||||
|
p.buffer.Reset()
|
||||||
|
p.buffer.WriteString(ambiguous)
|
||||||
|
if len(unambiguous) > 0 {
|
||||||
|
events = append(events, deepseekEventThinkingContent{content: unambiguous})
|
||||||
|
}
|
||||||
|
return events, false
|
||||||
|
}
|
||||||
|
|
||||||
|
case DeepSeekCollectingContent:
|
||||||
|
switch {
|
||||||
|
case strings.Contains(bufStr, deepseekToolCallsBeginTag): // content[<|tool▁calls▁begin|>] -> tool calls
|
||||||
|
split := strings.SplitN(bufStr, deepseekToolCallsBeginTag, 2)
|
||||||
|
contentBefore := strings.TrimRightFunc(split[0], unicode.IsSpace)
|
||||||
|
remaining := split[1]
|
||||||
|
|
||||||
|
p.buffer.Reset()
|
||||||
|
p.buffer.WriteString(remaining)
|
||||||
|
p.state = DeepSeekCollectingToolCalls
|
||||||
|
|
||||||
|
if len(contentBefore) > 0 {
|
||||||
|
events = append(events, deepseekEventContent{content: contentBefore})
|
||||||
|
}
|
||||||
|
return events, true
|
||||||
|
case strings.Contains(bufStr, deepseekToolOutputBeginTag): // content[<|tool▁output▁begin|>] -> tool output
|
||||||
|
split := strings.SplitN(bufStr, deepseekToolOutputBeginTag, 2)
|
||||||
|
contentBefore := split[0] // Don't trim whitespace - preserve spaces
|
||||||
|
remaining := split[1]
|
||||||
|
|
||||||
|
p.buffer.Reset()
|
||||||
|
p.buffer.WriteString(remaining)
|
||||||
|
p.state = DeepSeekCollectingToolOutput
|
||||||
|
|
||||||
|
if len(contentBefore) > 0 {
|
||||||
|
events = append(events, deepseekEventContent{content: contentBefore})
|
||||||
|
}
|
||||||
|
return events, true
|
||||||
|
default: // otherwise its content
|
||||||
|
p.buffer.Reset()
|
||||||
|
if len(bufStr) > 0 {
|
||||||
|
events = append(events, deepseekEventContent{content: bufStr})
|
||||||
|
}
|
||||||
|
return events, false
|
||||||
|
}
|
||||||
|
|
||||||
|
case DeepSeekCollectingToolCalls:
|
||||||
|
if idx := strings.Index(bufStr, deepseekToolCallBeginTag); idx != -1 {
|
||||||
|
startIdx := idx + len(deepseekToolCallBeginTag)
|
||||||
|
if endIdx := strings.Index(bufStr[startIdx:], deepseekToolCallEndTag); endIdx != -1 {
|
||||||
|
toolCallContent := bufStr[startIdx : startIdx+endIdx]
|
||||||
|
|
||||||
|
if toolCall, err := p.parseToolCallContent(toolCallContent); err == nil {
|
||||||
|
remaining := bufStr[startIdx+endIdx+len(deepseekToolCallEndTag):]
|
||||||
|
remaining = strings.TrimLeftFunc(remaining, unicode.IsSpace)
|
||||||
|
|
||||||
|
p.buffer.Reset()
|
||||||
|
p.buffer.WriteString(remaining)
|
||||||
|
|
||||||
|
events = append(events, deepseekEventToolCall{toolCall: toolCall})
|
||||||
|
return events, true
|
||||||
|
} else {
|
||||||
|
slog.Warn("deepseek tool call parsing failed", "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if idx := strings.Index(bufStr, deepseekToolCallsEndTag); idx != -1 {
|
||||||
|
remaining := bufStr[idx+len(deepseekToolCallsEndTag):]
|
||||||
|
remaining = strings.TrimLeftFunc(remaining, unicode.IsSpace)
|
||||||
|
|
||||||
|
p.buffer.Reset()
|
||||||
|
p.buffer.WriteString(remaining)
|
||||||
|
p.state = DeepSeekCollectingContent
|
||||||
|
|
||||||
|
return events, true
|
||||||
|
}
|
||||||
|
|
||||||
|
return events, false
|
||||||
|
|
||||||
|
case DeepSeekCollectingToolOutput:
|
||||||
|
if idx := strings.Index(bufStr, deepseekToolOutputEndTag); idx != -1 {
|
||||||
|
toolOutputContent := bufStr[:idx]
|
||||||
|
remaining := bufStr[idx+len(deepseekToolOutputEndTag):]
|
||||||
|
// Don't trim whitespace - preserve spaces after tool output tags
|
||||||
|
|
||||||
|
p.buffer.Reset()
|
||||||
|
p.buffer.WriteString(remaining)
|
||||||
|
p.state = DeepSeekCollectingContent
|
||||||
|
|
||||||
|
if len(toolOutputContent) > 0 {
|
||||||
|
events = append(events, deepseekEventContent{content: toolOutputContent})
|
||||||
|
}
|
||||||
|
return events, true
|
||||||
|
}
|
||||||
|
|
||||||
|
return events, false
|
||||||
|
}
|
||||||
|
|
||||||
|
return events, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *DeepSeekParser) parseToolCallContent(content string) (api.ToolCall, error) {
|
||||||
|
// Expected format: tool_name<|tool▁sep|>{args}
|
||||||
|
parts := strings.SplitN(content, deepseekToolSepTag, 2)
|
||||||
|
if len(parts) < 2 {
|
||||||
|
return api.ToolCall{}, errors.New("invalid format")
|
||||||
|
}
|
||||||
|
|
||||||
|
toolName := strings.TrimSpace(parts[0])
|
||||||
|
argsJSON := strings.TrimSpace(parts[1])
|
||||||
|
|
||||||
|
var args api.ToolCallFunctionArguments
|
||||||
|
if err := json.Unmarshal([]byte(argsJSON), &args); err != nil {
|
||||||
|
return api.ToolCall{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return api.ToolCall{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: toolName,
|
||||||
|
Arguments: args,
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
721
model/parsers/deepseek_test.go
Normal file
721
model/parsers/deepseek_test.go
Normal file
@@ -0,0 +1,721 @@
|
|||||||
|
package parsers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/google/go-cmp/cmp"
|
||||||
|
|
||||||
|
"github.com/ollama/ollama/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDeepSeekParser(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input string
|
||||||
|
expectedContent string
|
||||||
|
expectedThinking string
|
||||||
|
expectedCalls []api.ToolCall
|
||||||
|
hasThinking bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "simple_content",
|
||||||
|
input: "Hello, how are you?",
|
||||||
|
expectedContent: "Hello, how are you?",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "thinking_content",
|
||||||
|
input: "I need to think about this...</think>The answer is 42.",
|
||||||
|
expectedThinking: "I need to think about this...",
|
||||||
|
expectedContent: "The answer is 42.",
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "no_thinking_simple",
|
||||||
|
input: "Just a regular response.",
|
||||||
|
expectedContent: "Just a regular response.",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "thinking_with_newlines",
|
||||||
|
input: "Let me think:\n- Point 1\n- Point 2</think>\n\nHere's my answer.",
|
||||||
|
expectedThinking: "Let me think:\n- Point 1\n- Point 2",
|
||||||
|
expectedContent: "Here's my answer.",
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "tool_call_simple",
|
||||||
|
input: "I'll check the weather.<|tool▁calls▁begin|><|tool▁call▁begin|>get_weather<|tool▁sep|>{\"location\":\"Paris\"}<|tool▁call▁end|><|tool▁calls▁end|>",
|
||||||
|
expectedContent: "I'll check the weather.",
|
||||||
|
expectedCalls: []api.ToolCall{
|
||||||
|
{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "get_weather",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"location": "Paris",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple_tool_calls",
|
||||||
|
input: "Getting weather for both cities.<|tool▁calls▁begin|><|tool▁call▁begin|>get_weather<|tool▁sep|>{\"location\":\"Paris\"}<|tool▁call▁end|><|tool▁call▁begin|>get_weather<|tool▁sep|>{\"location\":\"London\"}<|tool▁call▁end|><|tool▁calls▁end|>",
|
||||||
|
expectedContent: "Getting weather for both cities.",
|
||||||
|
expectedCalls: []api.ToolCall{
|
||||||
|
{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "get_weather",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"location": "Paris",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "get_weather",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"location": "London",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "tool_output",
|
||||||
|
input: "Here's the weather: <|tool▁output▁begin|>Temperature: 22°C, Sunny<|tool▁output▁end|> Hope that helps!",
|
||||||
|
expectedContent: "Here's the weather: Temperature: 22°C, Sunny Hope that helps!",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "complex_tool_arguments",
|
||||||
|
input: "Processing data.<|tool▁calls▁begin|><|tool▁call▁begin|>process_data<|tool▁sep|>{\"items\":[\"item1\",\"item2\"],\"config\":{\"enabled\":true,\"threshold\":0.95}}<|tool▁call▁end|><|tool▁calls▁end|>",
|
||||||
|
expectedContent: "Processing data.",
|
||||||
|
expectedCalls: []api.ToolCall{
|
||||||
|
{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "process_data",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"items": []interface{}{"item1", "item2"},
|
||||||
|
"config": map[string]interface{}{"enabled": true, "threshold": 0.95},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "thinking_with_tool_call", // technically this can't happen, but the parser can handle it
|
||||||
|
input: "Let me check the weather...</think>I'll get that for you.<|tool▁calls▁begin|><|tool▁call▁begin|>get_weather<|tool▁sep|>{\"location\":\"Paris\"}<|tool▁call▁end|><|tool▁calls▁end|>",
|
||||||
|
expectedThinking: "Let me check the weather...",
|
||||||
|
expectedContent: "I'll get that for you.",
|
||||||
|
expectedCalls: []api.ToolCall{
|
||||||
|
{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "get_weather",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"location": "Paris",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty_content",
|
||||||
|
input: "",
|
||||||
|
expectedContent: "",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "only_thinking",
|
||||||
|
input: "Just thinking content</think>",
|
||||||
|
expectedThinking: "Just thinking content",
|
||||||
|
expectedContent: "",
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple_tool_outputs",
|
||||||
|
input: "Results: <|tool▁output▁begin|>Paris: 22°C<|tool▁output▁end|> and <|tool▁output▁begin|>London: 18°C<|tool▁output▁end|>",
|
||||||
|
expectedContent: "Results: Paris: 22°C and London: 18°C",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "unicode_content",
|
||||||
|
input: "مرحبا بالعالم! 你好世界! 🌍",
|
||||||
|
expectedContent: "مرحبا بالعالم! 你好世界! 🌍",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "emoji_passthrough",
|
||||||
|
input: "Task completed ✅ 🎉",
|
||||||
|
expectedContent: "Task completed ✅ 🎉",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "emoji_after_tool_call",
|
||||||
|
input: "I'll help you.<|tool▁calls▁begin|><|tool▁call▁begin|>get_weather<|tool▁sep|>{\"location\":\"Tokyo\"}<|tool▁call▁end|><|tool▁calls▁end|>完成 ✅",
|
||||||
|
expectedContent: "I'll help you.完成 ✅",
|
||||||
|
expectedCalls: []api.ToolCall{
|
||||||
|
{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "get_weather",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"location": "Tokyo",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "newlines_and_whitespace",
|
||||||
|
input: "Line 1\n\nLine 3\t\tTabbed content",
|
||||||
|
expectedContent: "Line 1\n\nLine 3\t\tTabbed content",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "thinking_with_unicode",
|
||||||
|
input: "我在思考这个问题...</think>答案是42。",
|
||||||
|
expectedThinking: "我在思考这个问题...",
|
||||||
|
expectedContent: "答案是42。",
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "tool_call_with_unicode_args",
|
||||||
|
input: "Searching for information.<|tool▁calls▁begin|><|tool▁call▁begin|>search<|tool▁sep|>{\"query\":\"北京天气\",\"language\":\"中文\"}<|tool▁call▁end|><|tool▁calls▁end|>",
|
||||||
|
expectedContent: "Searching for information.",
|
||||||
|
expectedCalls: []api.ToolCall{
|
||||||
|
{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "search",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"query": "北京天气",
|
||||||
|
"language": "中文",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "tool_output_with_unicode",
|
||||||
|
input: "天气信息: <|tool▁output▁begin|>北京: 25°C, 晴天<|tool▁output▁end|> 希望对您有帮助!",
|
||||||
|
expectedContent: "天气信息: 北京: 25°C, 晴天 希望对您有帮助!",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "mixed_content_with_special_chars",
|
||||||
|
input: "Price: $100 & tax @ 10% = $110 <|tool▁output▁begin|>Total: $110<|tool▁output▁end|> (final)",
|
||||||
|
expectedContent: "Price: $100 & tax @ 10% = $110 Total: $110 (final)",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "tool_call_with_special_chars",
|
||||||
|
input: "Processing data.<|tool▁calls▁begin|><|tool▁call▁begin|>execute_command<|tool▁sep|>{\"command\":\"ls && echo \\\"done\\\"\",\"path\":\"/home/user\"}<|tool▁call▁end|><|tool▁calls▁end|>",
|
||||||
|
expectedContent: "Processing data.",
|
||||||
|
expectedCalls: []api.ToolCall{
|
||||||
|
{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "execute_command",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"command": "ls && echo \"done\"",
|
||||||
|
"path": "/home/user",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "thinking_with_special_chars",
|
||||||
|
input: "Let me calculate: 2+2=4 & 3*3=9...</think>The results are correct!",
|
||||||
|
expectedThinking: "Let me calculate: 2+2=4 & 3*3=9...",
|
||||||
|
expectedContent: "The results are correct!",
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty_tool_call_args",
|
||||||
|
input: "Pinging server.<|tool▁calls▁begin|><|tool▁call▁begin|>ping<|tool▁sep|>{}<|tool▁call▁end|><|tool▁calls▁end|>",
|
||||||
|
expectedContent: "Pinging server.",
|
||||||
|
expectedCalls: []api.ToolCall{
|
||||||
|
{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "ping",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty_tool_output",
|
||||||
|
input: "Checking status: <|tool▁output▁begin|><|tool▁output▁end|> No output received.",
|
||||||
|
expectedContent: "Checking status: No output received.",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
parser := &DeepSeekParser{hasThinkingSupport: tt.hasThinking}
|
||||||
|
parser.Init([]api.Tool{}, nil, &api.ThinkValue{Value: tt.hasThinking})
|
||||||
|
|
||||||
|
content, thinking, calls, err := parser.Add(tt.input, true)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Add() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if diff := cmp.Diff(tt.expectedContent, content); diff != "" {
|
||||||
|
t.Errorf("Content mismatch (-want +got):\n%s", diff)
|
||||||
|
}
|
||||||
|
|
||||||
|
if diff := cmp.Diff(tt.expectedThinking, thinking); diff != "" {
|
||||||
|
t.Errorf("Thinking mismatch (-want +got):\n%s", diff)
|
||||||
|
}
|
||||||
|
|
||||||
|
if diff := cmp.Diff(tt.expectedCalls, calls); diff != "" {
|
||||||
|
t.Errorf("Tool calls mismatch (-want +got):\n%s", diff)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeepSeekParser_Streaming(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
chunks []string
|
||||||
|
expectedContent string
|
||||||
|
expectedThinking string
|
||||||
|
expectedCalls []api.ToolCall
|
||||||
|
hasThinking bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "streaming_simple_content",
|
||||||
|
chunks: []string{"Hello, ", "how are ", "you?"},
|
||||||
|
expectedContent: "Hello, how are you?",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "streaming_thinking",
|
||||||
|
chunks: []string{"I need to ", "think about this", "...</think>", "The answer is 42."},
|
||||||
|
expectedThinking: "I need to think about this...",
|
||||||
|
expectedContent: "The answer is 42.",
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "streaming_tool_call",
|
||||||
|
chunks: []string{"I'll check weather.", "<|tool▁calls▁begin|>", "<|tool▁call▁begin|>get_weather", "<|tool▁sep|>{\"location\":\"Paris\"}", "<|tool▁call▁end|><|tool▁calls▁end|>"},
|
||||||
|
expectedContent: "I'll check weather.",
|
||||||
|
expectedCalls: []api.ToolCall{
|
||||||
|
{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "get_weather",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"location": "Paris",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "streaming_thinking_with_partial_tag",
|
||||||
|
chunks: []string{"Thinking about this", "...</", "think>", "Done thinking."},
|
||||||
|
expectedThinking: "Thinking about this...",
|
||||||
|
expectedContent: "Done thinking.",
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "streaming_tool_output",
|
||||||
|
chunks: []string{"Weather info: ", "<|tool▁output▁begin|>", "25°C, Sunny", "<|tool▁output▁end|>", " Enjoy!"},
|
||||||
|
expectedContent: "Weather info: 25°C, Sunny Enjoy!",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "streaming_with_split_tags",
|
||||||
|
chunks: []string{"Content before ", "<|tool▁calls▁begin|><|tool▁call▁begin|>test", "<|tool▁sep|>{}", "<|tool▁call▁end|><|tool▁calls▁end|>", " after"},
|
||||||
|
expectedContent: "Content before after",
|
||||||
|
expectedCalls: []api.ToolCall{
|
||||||
|
{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "test",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "streaming_thinking_with_split_end_tag",
|
||||||
|
chunks: []string{"Thinking content", "</th", "ink>", "Regular content"},
|
||||||
|
expectedThinking: "Thinking content",
|
||||||
|
expectedContent: "Regular content",
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "streaming_unicode_content",
|
||||||
|
chunks: []string{"مرحبا ", "بالعالم! ", "你好", "世界!"},
|
||||||
|
expectedContent: "مرحبا بالعالم! 你好世界!",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "streaming_multiple_tool_outputs",
|
||||||
|
chunks: []string{"Results: ", "<|tool▁output▁begin|>", "Paris: 22°C", "<|tool▁output▁end|>", " and ", "<|tool▁output▁begin|>", "London: 18°C", "<|tool▁output▁end|>"},
|
||||||
|
expectedContent: "Results: Paris: 22°C and London: 18°C",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "streaming_tool_call_with_split_json",
|
||||||
|
chunks: []string{"Processing.", "<|tool▁calls▁begin|><|tool▁call▁begin|>calc<|tool▁sep|>{\"x\":", "42,\"y\":", "24}<|tool▁call▁end|><|tool▁calls▁end|>"},
|
||||||
|
expectedContent: "Processing.",
|
||||||
|
expectedCalls: []api.ToolCall{
|
||||||
|
{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "calc",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"x": float64(42),
|
||||||
|
"y": float64(24),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
parser := &DeepSeekParser{hasThinkingSupport: tt.hasThinking}
|
||||||
|
parser.Init([]api.Tool{}, nil, &api.ThinkValue{Value: tt.hasThinking})
|
||||||
|
|
||||||
|
var allContent, allThinking string
|
||||||
|
var allCalls []api.ToolCall
|
||||||
|
|
||||||
|
for i, chunk := range tt.chunks {
|
||||||
|
done := i == len(tt.chunks)-1
|
||||||
|
content, thinking, calls, err := parser.Add(chunk, done)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Add() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
allContent += content
|
||||||
|
allThinking += thinking
|
||||||
|
allCalls = append(allCalls, calls...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if diff := cmp.Diff(tt.expectedContent, allContent); diff != "" {
|
||||||
|
t.Errorf("Content mismatch (-want +got):\n%s", diff)
|
||||||
|
}
|
||||||
|
|
||||||
|
if diff := cmp.Diff(tt.expectedThinking, allThinking); diff != "" {
|
||||||
|
t.Errorf("Thinking mismatch (-want +got):\n%s", diff)
|
||||||
|
}
|
||||||
|
|
||||||
|
if diff := cmp.Diff(tt.expectedCalls, allCalls); diff != "" {
|
||||||
|
t.Errorf("Tool calls mismatch (-want +got):\n%s", diff)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeepSeekParser_HasThinkingSupport(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
hasThinking bool
|
||||||
|
expectedSupport bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "thinking_enabled",
|
||||||
|
hasThinking: true,
|
||||||
|
expectedSupport: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "thinking_disabled",
|
||||||
|
hasThinking: false,
|
||||||
|
expectedSupport: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
parser := &DeepSeekParser{hasThinkingSupport: tt.hasThinking}
|
||||||
|
if got := parser.HasThinkingSupport(); got != tt.expectedSupport {
|
||||||
|
t.Errorf("HasThinkingSupport() = %v, want %v", got, tt.expectedSupport)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeepSeekParser_HasToolSupport(t *testing.T) {
|
||||||
|
parser := &DeepSeekParser{}
|
||||||
|
if !parser.HasToolSupport() {
|
||||||
|
t.Error("HasToolSupport() should return true")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeepSeekParser_Init(t *testing.T) {
|
||||||
|
parser := &DeepSeekParser{hasThinkingSupport: true}
|
||||||
|
tools := []api.Tool{
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: api.ToolFunction{
|
||||||
|
Name: "test_tool",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
returnedTools := parser.Init(tools, nil, &api.ThinkValue{Value: true})
|
||||||
|
|
||||||
|
if diff := cmp.Diff(tools, returnedTools); diff != "" {
|
||||||
|
t.Errorf("Init() returned tools mismatch (-want +got):\n%s", diff)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test initial state is set to thinking when enabled
|
||||||
|
if parser.state != DeepSeekCollectingThinking {
|
||||||
|
t.Errorf("Expected initial state to be DeepSeekCollectingThinking, got %v", parser.state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeepSeekParser_parseToolCallContent(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
content string
|
||||||
|
expected api.ToolCall
|
||||||
|
expectError bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "valid_tool_call",
|
||||||
|
content: "get_weather<|tool▁sep|>{\"location\":\"Paris\"}",
|
||||||
|
expected: api.ToolCall{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "get_weather",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"location": "Paris",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "complex_arguments",
|
||||||
|
content: "process_data<|tool▁sep|>{\"items\":[\"a\",\"b\"],\"config\":{\"enabled\":true}}",
|
||||||
|
expected: api.ToolCall{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "process_data",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"items": []interface{}{"a", "b"},
|
||||||
|
"config": map[string]interface{}{"enabled": true},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty_arguments",
|
||||||
|
content: "ping<|tool▁sep|>{}",
|
||||||
|
expected: api.ToolCall{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "ping",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "unicode_in_tool_name",
|
||||||
|
content: "获取天气<|tool▁sep|>{\"城市\":\"北京\"}",
|
||||||
|
expected: api.ToolCall{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "获取天气",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"城市": "北京",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "special_chars_in_arguments",
|
||||||
|
content: "execute<|tool▁sep|>{\"command\":\"ls && echo \\\"done\\\"\",\"path\":\"/home/user\"}",
|
||||||
|
expected: api.ToolCall{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "execute",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"command": "ls && echo \"done\"",
|
||||||
|
"path": "/home/user",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "numeric_arguments",
|
||||||
|
content: "calculate<|tool▁sep|>{\"x\":3.14,\"y\":42,\"enabled\":true}",
|
||||||
|
expected: api.ToolCall{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "calculate",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"x": 3.14,
|
||||||
|
"y": float64(42),
|
||||||
|
"enabled": true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid_format_no_separator",
|
||||||
|
content: "get_weather{\"location\":\"Paris\"}",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid_json",
|
||||||
|
content: "get_weather<|tool▁sep|>{invalid json}",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty_tool_name",
|
||||||
|
content: "<|tool▁sep|>{\"arg\":\"value\"}",
|
||||||
|
expectError: false, // This should work, just empty name
|
||||||
|
expected: api.ToolCall{
|
||||||
|
Function: api.ToolCallFunction{
|
||||||
|
Name: "",
|
||||||
|
Arguments: api.ToolCallFunctionArguments{
|
||||||
|
"arg": "value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "missing_json_part",
|
||||||
|
content: "tool_name<|tool▁sep|>",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
parser := &DeepSeekParser{}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result, err := parser.parseToolCallContent(tt.content)
|
||||||
|
|
||||||
|
if tt.expectError {
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error but got none")
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if diff := cmp.Diff(tt.expected, result); diff != "" {
|
||||||
|
t.Errorf("parseToolCallContent() mismatch (-want +got):\n%s", diff)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeepSeekParser_EdgeCases(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input string
|
||||||
|
expectedContent string
|
||||||
|
expectedThinking string
|
||||||
|
hasThinking bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "nested_think_tags_in_thinking",
|
||||||
|
input: "Outer thinking <think>inner</think> content</think>Final content",
|
||||||
|
expectedThinking: "Outer thinking <think>inner",
|
||||||
|
expectedContent: "content</think>Final content",
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple_think_close_tags",
|
||||||
|
input: "First thought</think>Second thought</think>Final content",
|
||||||
|
expectedThinking: "First thought",
|
||||||
|
expectedContent: "Second thought</think>Final content",
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty_thinking_content",
|
||||||
|
input: "</think>Just content",
|
||||||
|
expectedThinking: "",
|
||||||
|
expectedContent: "Just content",
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "thinking_disabled_with_think_tags",
|
||||||
|
input: "Some content</think>More content",
|
||||||
|
expectedContent: "Some content</think>More content",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "malformed_tool_call_missing_sep",
|
||||||
|
input: "Testing.<|tool▁calls▁begin|><|tool▁call▁begin|>bad_tool{\"arg\":\"value\"}<|tool▁call▁end|><|tool▁calls▁end|>",
|
||||||
|
expectedContent: "Testing.",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "malformed_tool_call_invalid_json",
|
||||||
|
input: "Testing.<|tool▁calls▁begin|><|tool▁call▁begin|>bad_tool<|tool▁sep|>{invalid json}<|tool▁call▁end|><|tool▁calls▁end|>",
|
||||||
|
expectedContent: "Testing.",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "partial_tool_tag_at_end",
|
||||||
|
input: "Content with partial <|tool▁calls▁",
|
||||||
|
expectedContent: "Content with partial <|tool▁calls▁",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "partial_think_tag_at_end",
|
||||||
|
input: "Thinking content</th",
|
||||||
|
expectedContent: "Thinking content</th",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "partial_think_tag_at_end_with_thinking",
|
||||||
|
input: "Thinking content</th",
|
||||||
|
expectedThinking: "Thinking content",
|
||||||
|
expectedContent: "",
|
||||||
|
hasThinking: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "whitespace_only_content",
|
||||||
|
input: " \n\t ",
|
||||||
|
expectedContent: " \n\t ",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "tool_output_with_newlines",
|
||||||
|
input: "Output:\n<|tool▁output▁begin|>Line 1\nLine 2\nLine 3<|tool▁output▁end|>\nDone.",
|
||||||
|
expectedContent: "Output:\nLine 1\nLine 2\nLine 3\nDone.",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "consecutive_tool_calls",
|
||||||
|
input: "First.<|tool▁calls▁begin|><|tool▁call▁begin|>tool1<|tool▁sep|>{}<|tool▁call▁end|><|tool▁calls▁end|>Second.<|tool▁calls▁begin|><|tool▁call▁begin|>tool2<|tool▁sep|>{}<|tool▁call▁end|><|tool▁calls▁end|>",
|
||||||
|
expectedContent: "First.",
|
||||||
|
hasThinking: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
parser := &DeepSeekParser{hasThinkingSupport: tt.hasThinking}
|
||||||
|
parser.Init([]api.Tool{}, nil, &api.ThinkValue{Value: tt.hasThinking})
|
||||||
|
|
||||||
|
content, thinking, _, err := parser.Add(tt.input, true)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Add() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if diff := cmp.Diff(tt.expectedContent, content); diff != "" {
|
||||||
|
t.Errorf("Content mismatch (-want +got):\n%s", diff)
|
||||||
|
}
|
||||||
|
|
||||||
|
if diff := cmp.Diff(tt.expectedThinking, thinking); diff != "" {
|
||||||
|
t.Errorf("Thinking mismatch (-want +got):\n%s", diff)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -58,6 +58,8 @@ func ParserForName(name string) Parser {
|
|||||||
return harmony.NewHarmonyMessageHandler()
|
return harmony.NewHarmonyMessageHandler()
|
||||||
case "cogito":
|
case "cogito":
|
||||||
return &CogitoParser{}
|
return &CogitoParser{}
|
||||||
|
case "deepseek":
|
||||||
|
return &DeepSeekParser{hasThinkingSupport: true}
|
||||||
case "olmo3":
|
case "olmo3":
|
||||||
return &Olmo3Parser{}
|
return &Olmo3Parser{}
|
||||||
case "olmo3-think":
|
case "olmo3-think":
|
||||||
|
|||||||
@@ -11,11 +11,14 @@ import (
|
|||||||
|
|
||||||
const (
|
const (
|
||||||
olmo3DefaultSystemMessage = "You are a helpful function-calling AI assistant. "
|
olmo3DefaultSystemMessage = "You are a helpful function-calling AI assistant. "
|
||||||
|
olmo31DefaultSystemMessage = "You are Olmo, a helpful AI assistant built by Ai2. Your date cutoff is December 2024, and your model weights are available at https://huggingface.co/allenai. "
|
||||||
olmo3NoFunctionsMessage = "You do not currently have access to any functions. "
|
olmo3NoFunctionsMessage = "You do not currently have access to any functions. "
|
||||||
olmo3WithFunctionsMessage = "You are provided with function signatures within <functions></functions> XML tags. You may call one or more functions to assist with the user query. Output any function calls within <function_calls></function_calls> XML tags. Do not make assumptions about what values to plug into functions."
|
olmo3WithFunctionsMessage = "You are provided with function signatures within <functions></functions> XML tags. You may call one or more functions to assist with the user query. Output any function calls within <function_calls></function_calls> XML tags. Do not make assumptions about what values to plug into functions."
|
||||||
)
|
)
|
||||||
|
|
||||||
type Olmo3Renderer struct{}
|
type Olmo3Renderer struct {
|
||||||
|
UseExtendedSystemMessage bool
|
||||||
|
}
|
||||||
|
|
||||||
func (r *Olmo3Renderer) Render(messages []api.Message, tools []api.Tool, _ *api.ThinkValue) (string, error) {
|
func (r *Olmo3Renderer) Render(messages []api.Message, tools []api.Tool, _ *api.ThinkValue) (string, error) {
|
||||||
var sb strings.Builder
|
var sb strings.Builder
|
||||||
@@ -51,7 +54,11 @@ func (r *Olmo3Renderer) Render(messages []api.Message, tools []api.Tool, _ *api.
|
|||||||
} else {
|
} else {
|
||||||
// Default system message - single newline after "system"
|
// Default system message - single newline after "system"
|
||||||
sb.WriteString("<|im_start|>system\n")
|
sb.WriteString("<|im_start|>system\n")
|
||||||
|
if r.UseExtendedSystemMessage {
|
||||||
|
sb.WriteString(olmo31DefaultSystemMessage)
|
||||||
|
} else {
|
||||||
sb.WriteString(olmo3DefaultSystemMessage)
|
sb.WriteString(olmo3DefaultSystemMessage)
|
||||||
|
}
|
||||||
|
|
||||||
if len(tools) > 0 {
|
if len(tools) > 0 {
|
||||||
functionsJSON, err := marshalWithSpaces(tools)
|
functionsJSON, err := marshalWithSpaces(tools)
|
||||||
@@ -140,7 +147,7 @@ func (r *Olmo3Renderer) Render(messages []api.Message, tools []api.Tool, _ *api.
|
|||||||
}
|
}
|
||||||
|
|
||||||
if needsGenerationPrompt {
|
if needsGenerationPrompt {
|
||||||
sb.WriteString("<|im_start|>assistant\n\n")
|
sb.WriteString("<|im_start|>assistant\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
return sb.String(), nil
|
return sb.String(), nil
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ func TestOlmo3Renderer(t *testing.T) {
|
|||||||
"You are a helpful function-calling AI assistant. You do not currently have access to any functions. <functions></functions><|im_end|>\n" +
|
"You are a helpful function-calling AI assistant. You do not currently have access to any functions. <functions></functions><|im_end|>\n" +
|
||||||
"<|im_start|>user\n" +
|
"<|im_start|>user\n" +
|
||||||
"Hello!<|im_end|>\n" +
|
"Hello!<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n\n",
|
"<|im_start|>assistant\n",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "with system message no tools",
|
name: "with system message no tools",
|
||||||
@@ -36,7 +36,7 @@ func TestOlmo3Renderer(t *testing.T) {
|
|||||||
"You are a helpful assistant.<|im_end|>\n" +
|
"You are a helpful assistant.<|im_end|>\n" +
|
||||||
"<|im_start|>user\n" +
|
"<|im_start|>user\n" +
|
||||||
"Hello!<|im_end|>\n" +
|
"Hello!<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n\n",
|
"<|im_start|>assistant\n",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "with system message and tools",
|
name: "with system message and tools",
|
||||||
@@ -64,7 +64,7 @@ func TestOlmo3Renderer(t *testing.T) {
|
|||||||
`You are a helpful assistant.<functions>[{"type": "function", "function": {"name": "get_weather", "description": "Get the current weather", "parameters": {"type": "object", "required": ["location"], "properties": {"location": {"type": "string", "description": "The city"}}}}}]</functions><|im_end|>` + "\n" +
|
`You are a helpful assistant.<functions>[{"type": "function", "function": {"name": "get_weather", "description": "Get the current weather", "parameters": {"type": "object", "required": ["location"], "properties": {"location": {"type": "string", "description": "The city"}}}}}]</functions><|im_end|>` + "\n" +
|
||||||
"<|im_start|>user\n" +
|
"<|im_start|>user\n" +
|
||||||
"What is the weather?<|im_end|>\n" +
|
"What is the weather?<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n\n",
|
"<|im_start|>assistant\n",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "default system with tools - includes function instruction",
|
name: "default system with tools - includes function instruction",
|
||||||
@@ -93,7 +93,7 @@ func TestOlmo3Renderer(t *testing.T) {
|
|||||||
`<functions>[{"type": "function", "function": {"name": "get_weather", "description": "Get the current weather", "parameters": {"type": "object", "required": ["location"], "properties": {"location": {"type": "string", "description": "The city"}}}}}]</functions><|im_end|>` + "\n" +
|
`<functions>[{"type": "function", "function": {"name": "get_weather", "description": "Get the current weather", "parameters": {"type": "object", "required": ["location"], "properties": {"location": {"type": "string", "description": "The city"}}}}}]</functions><|im_end|>` + "\n" +
|
||||||
"<|im_start|>user\n" +
|
"<|im_start|>user\n" +
|
||||||
"What is the weather?<|im_end|>\n" +
|
"What is the weather?<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n\n",
|
"<|im_start|>assistant\n",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "assistant with tool calls - function call syntax",
|
name: "assistant with tool calls - function call syntax",
|
||||||
@@ -141,7 +141,7 @@ func TestOlmo3Renderer(t *testing.T) {
|
|||||||
`Let me check the weather.<function_calls>get_weather(location="San Francisco")</function_calls><|im_end|>` + "\n" +
|
`Let me check the weather.<function_calls>get_weather(location="San Francisco")</function_calls><|im_end|>` + "\n" +
|
||||||
"<|im_start|>environment\n" +
|
"<|im_start|>environment\n" +
|
||||||
`{"temperature": 68}<|im_end|>` + "\n" +
|
`{"temperature": 68}<|im_end|>` + "\n" +
|
||||||
"<|im_start|>assistant\n\n",
|
"<|im_start|>assistant\n",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multi-turn conversation",
|
name: "multi-turn conversation",
|
||||||
@@ -159,7 +159,7 @@ func TestOlmo3Renderer(t *testing.T) {
|
|||||||
"Hi there!<|im_end|>\n" +
|
"Hi there!<|im_end|>\n" +
|
||||||
"<|im_start|>user\n" +
|
"<|im_start|>user\n" +
|
||||||
"How are you?<|im_end|>\n" +
|
"How are you?<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n\n",
|
"<|im_start|>assistant\n",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "parallel tool calls - newline separated",
|
name: "parallel tool calls - newline separated",
|
||||||
@@ -214,7 +214,7 @@ func TestOlmo3Renderer(t *testing.T) {
|
|||||||
`{"temperature": 68}<|im_end|>` + "\n" +
|
`{"temperature": 68}<|im_end|>` + "\n" +
|
||||||
"<|im_start|>environment\n" +
|
"<|im_start|>environment\n" +
|
||||||
`{"temperature": 55}<|im_end|>` + "\n" +
|
`{"temperature": 55}<|im_end|>` + "\n" +
|
||||||
"<|im_start|>assistant\n\n",
|
"<|im_start|>assistant\n",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "tool call with multiple arguments",
|
name: "tool call with multiple arguments",
|
||||||
@@ -259,7 +259,7 @@ func TestOlmo3Renderer(t *testing.T) {
|
|||||||
"Book a flight<|im_end|>\n" +
|
"Book a flight<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n" +
|
"<|im_start|>assistant\n" +
|
||||||
`<function_calls>book_flight(from="SFO", to="NYC")</function_calls><|im_end|>` + "\n" +
|
`<function_calls>book_flight(from="SFO", to="NYC")</function_calls><|im_end|>` + "\n" +
|
||||||
"<|im_start|>assistant\n\n",
|
"<|im_start|>assistant\n",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "assistant prefill - no generation prompt",
|
name: "assistant prefill - no generation prompt",
|
||||||
|
|||||||
@@ -1,31 +1,31 @@
|
|||||||
package renderers
|
package renderers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/ollama/ollama/api"
|
"github.com/ollama/ollama/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type Olmo3ThinkVariant int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
olmo3ThinkDefaultSystemMessage = "You are OLMo, a helpful function-calling AI assistant built by Ai2. Your date cutoff is November 2024, and your model weights are available at https://huggingface.co/allenai."
|
// Olmo3Think32B is for allenai/Olmo-3-32B-Think
|
||||||
olmo3ThinkNoFunctionsMessage = " You do not currently have access to any functions."
|
Olmo3Think32B Olmo3ThinkVariant = iota
|
||||||
|
// Olmo31Think is for allenai/Olmo-3-7B-Think and allenai/Olmo-3.1-32B-Think (includes model info)
|
||||||
|
Olmo31Think
|
||||||
)
|
)
|
||||||
|
|
||||||
type Olmo3ThinkRenderer struct{}
|
const (
|
||||||
|
olmo3ThinkFunctionsSuffix = " You do not currently have access to any functions. <functions></functions>"
|
||||||
|
olmo3Think32BSystemMessage = "You are a helpful AI assistant."
|
||||||
|
olmo31ThinkSystemMessage = "You are Olmo, a helpful AI assistant built by Ai2. Your date cutoff is December 2024, and your model weights are available at https://huggingface.co/allenai."
|
||||||
|
)
|
||||||
|
|
||||||
type olmo3ThinkToolCall struct {
|
type Olmo3ThinkRenderer struct {
|
||||||
ID string `json:"id,omitempty"`
|
Variant Olmo3ThinkVariant
|
||||||
Type string `json:"type,omitempty"`
|
|
||||||
Function olmo3ThinkToolCallFunc `json:"function"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type olmo3ThinkToolCallFunc struct {
|
func (r *Olmo3ThinkRenderer) Render(messages []api.Message, _ []api.Tool, _ *api.ThinkValue) (string, error) {
|
||||||
Name string `json:"name"`
|
|
||||||
Arguments string `json:"arguments"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Olmo3ThinkRenderer) Render(messages []api.Message, tools []api.Tool, _ *api.ThinkValue) (string, error) {
|
|
||||||
var sb strings.Builder
|
var sb strings.Builder
|
||||||
|
|
||||||
var systemMessage *api.Message
|
var systemMessage *api.Message
|
||||||
@@ -37,34 +37,31 @@ func (r *Olmo3ThinkRenderer) Render(messages []api.Message, tools []api.Tool, _
|
|||||||
}
|
}
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
// Skip tool messages - Think models don't support tools
|
||||||
|
if message.Role == "tool" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
filteredMessages = append(filteredMessages, message)
|
filteredMessages = append(filteredMessages, message)
|
||||||
}
|
}
|
||||||
|
|
||||||
systemContent := olmo3ThinkDefaultSystemMessage
|
|
||||||
if systemMessage != nil {
|
|
||||||
systemContent = systemMessage.Content
|
|
||||||
}
|
|
||||||
|
|
||||||
sb.WriteString("<|im_start|>system\n")
|
sb.WriteString("<|im_start|>system\n")
|
||||||
sb.WriteString(systemContent)
|
|
||||||
|
|
||||||
if len(tools) > 0 {
|
if systemMessage != nil {
|
||||||
functionsJSON, err := marshalWithSpaces(tools)
|
sb.WriteString(systemMessage.Content)
|
||||||
if err != nil {
|
sb.WriteString(olmo3ThinkFunctionsSuffix)
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
sb.WriteString(" <functions>")
|
|
||||||
sb.WriteString(string(functionsJSON))
|
|
||||||
sb.WriteString("</functions>")
|
|
||||||
} else {
|
} else {
|
||||||
sb.WriteString(olmo3ThinkNoFunctionsMessage)
|
// Default system message varies by variant
|
||||||
sb.WriteString(" <functions></functions>")
|
switch r.Variant {
|
||||||
|
case Olmo3Think32B:
|
||||||
|
sb.WriteString(olmo3Think32BSystemMessage)
|
||||||
|
default: // Olmo3Think7B, Olmo31Think use same template - diverges from HF but confirmed difference from team
|
||||||
|
sb.WriteString(olmo31ThinkSystemMessage)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
sb.WriteString("<|im_end|>\n")
|
sb.WriteString("<|im_end|>\n")
|
||||||
|
|
||||||
for i, message := range filteredMessages {
|
for _, message := range filteredMessages {
|
||||||
lastMessage := i == len(filteredMessages)-1
|
|
||||||
|
|
||||||
switch message.Role {
|
switch message.Role {
|
||||||
case "user":
|
case "user":
|
||||||
sb.WriteString("<|im_start|>user\n")
|
sb.WriteString("<|im_start|>user\n")
|
||||||
@@ -73,58 +70,15 @@ func (r *Olmo3ThinkRenderer) Render(messages []api.Message, tools []api.Tool, _
|
|||||||
|
|
||||||
case "assistant":
|
case "assistant":
|
||||||
sb.WriteString("<|im_start|>assistant\n")
|
sb.WriteString("<|im_start|>assistant\n")
|
||||||
|
|
||||||
if message.Content != "" {
|
if message.Content != "" {
|
||||||
sb.WriteString(message.Content)
|
sb.WriteString(message.Content)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(message.ToolCalls) > 0 {
|
|
||||||
toolCalls := make([]olmo3ThinkToolCall, len(message.ToolCalls))
|
|
||||||
for j, tc := range message.ToolCalls {
|
|
||||||
argsJSON, err := json.Marshal(tc.Function.Arguments)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
toolCalls[j] = olmo3ThinkToolCall{
|
|
||||||
ID: tc.ID,
|
|
||||||
Type: "function",
|
|
||||||
Function: olmo3ThinkToolCallFunc{
|
|
||||||
Name: tc.Function.Name,
|
|
||||||
Arguments: string(argsJSON),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
toolCallsJSON, err := marshalWithSpaces(toolCalls)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
sb.WriteString("<function_calls>")
|
|
||||||
sb.WriteString(string(toolCallsJSON))
|
|
||||||
sb.WriteString("</function_calls>")
|
|
||||||
}
|
|
||||||
|
|
||||||
if !lastMessage {
|
|
||||||
sb.WriteString("<|im_end|>\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
case "tool":
|
|
||||||
sb.WriteString("<|im_start|>environment\n")
|
|
||||||
sb.WriteString(message.Content)
|
|
||||||
sb.WriteString("<|im_end|>\n")
|
sb.WriteString("<|im_end|>\n")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
needsGenerationPrompt := true
|
// Always add generation prompt with <think> tag for thinking models
|
||||||
if len(filteredMessages) > 0 {
|
|
||||||
lastMsg := filteredMessages[len(filteredMessages)-1]
|
|
||||||
if lastMsg.Role == "assistant" && len(lastMsg.ToolCalls) == 0 && lastMsg.Content != "" {
|
|
||||||
needsGenerationPrompt = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if needsGenerationPrompt {
|
|
||||||
sb.WriteString("<|im_start|>assistant\n<think>")
|
sb.WriteString("<|im_start|>assistant\n<think>")
|
||||||
}
|
|
||||||
|
|
||||||
return sb.String(), nil
|
return sb.String(), nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,24 +11,27 @@ import (
|
|||||||
func TestOlmo3ThinkRenderer(t *testing.T) {
|
func TestOlmo3ThinkRenderer(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
|
variant Olmo3ThinkVariant
|
||||||
msgs []api.Message
|
msgs []api.Message
|
||||||
tools []api.Tool
|
tools []api.Tool
|
||||||
expected string
|
expected string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "basic without system - adds default system",
|
name: "7b_basic_without_system",
|
||||||
|
variant: Olmo31Think,
|
||||||
msgs: []api.Message{
|
msgs: []api.Message{
|
||||||
{Role: "user", Content: "Hello!"},
|
{Role: "user", Content: "Hello!"},
|
||||||
},
|
},
|
||||||
expected: "<|im_start|>system\n" +
|
expected: "<|im_start|>system\n" +
|
||||||
"You are OLMo, a helpful function-calling AI assistant built by Ai2. Your date cutoff is November 2024, and your model weights are available at https://huggingface.co/allenai. You do not currently have access to any functions. <functions></functions><|im_end|>\n" +
|
"You are Olmo, a helpful AI assistant built by Ai2. Your date cutoff is December 2024, and your model weights are available at https://huggingface.co/allenai.<|im_end|>\n" +
|
||||||
"<|im_start|>user\n" +
|
"<|im_start|>user\n" +
|
||||||
"Hello!<|im_end|>\n" +
|
"Hello!<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n" +
|
"<|im_start|>assistant\n" +
|
||||||
"<think>",
|
"<think>",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "with system message no tools",
|
name: "7b_with_custom_system",
|
||||||
|
variant: Olmo31Think,
|
||||||
msgs: []api.Message{
|
msgs: []api.Message{
|
||||||
{Role: "system", Content: "You are a helpful assistant."},
|
{Role: "system", Content: "You are a helpful assistant."},
|
||||||
{Role: "user", Content: "Hello!"},
|
{Role: "user", Content: "Hello!"},
|
||||||
@@ -41,9 +44,9 @@ func TestOlmo3ThinkRenderer(t *testing.T) {
|
|||||||
"<think>",
|
"<think>",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "with system message and tools",
|
name: "7b_tools_ignored",
|
||||||
|
variant: Olmo31Think,
|
||||||
msgs: []api.Message{
|
msgs: []api.Message{
|
||||||
{Role: "system", Content: "You are a helpful assistant."},
|
|
||||||
{Role: "user", Content: "What is the weather?"},
|
{Role: "user", Content: "What is the weather?"},
|
||||||
},
|
},
|
||||||
tools: []api.Tool{
|
tools: []api.Tool{
|
||||||
@@ -52,27 +55,20 @@ func TestOlmo3ThinkRenderer(t *testing.T) {
|
|||||||
Function: api.ToolFunction{
|
Function: api.ToolFunction{
|
||||||
Name: "get_weather",
|
Name: "get_weather",
|
||||||
Description: "Get the current weather",
|
Description: "Get the current weather",
|
||||||
Parameters: api.ToolFunctionParameters{
|
|
||||||
Type: "object",
|
|
||||||
Required: []string{"location"},
|
|
||||||
Properties: map[string]api.ToolProperty{
|
|
||||||
"location": {Type: api.PropertyType{"string"}, Description: "The city"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: "<|im_start|>system\n" +
|
expected: "<|im_start|>system\n" +
|
||||||
`You are a helpful assistant. <functions>[{"type": "function", "function": {"name": "get_weather", "description": "Get the current weather", "parameters": {"type": "object", "required": ["location"], "properties": {"location": {"type": "string", "description": "The city"}}}}}]</functions><|im_end|>` + "\n" +
|
"You are Olmo, a helpful AI assistant built by Ai2. Your date cutoff is December 2024, and your model weights are available at https://huggingface.co/allenai.<|im_end|>\n" +
|
||||||
"<|im_start|>user\n" +
|
"<|im_start|>user\n" +
|
||||||
"What is the weather?<|im_end|>\n" +
|
"What is the weather?<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n" +
|
"<|im_start|>assistant\n" +
|
||||||
"<think>",
|
"<think>",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "assistant with tool calls",
|
name: "7b_tool_calls_and_tool_messages_ignored",
|
||||||
|
variant: Olmo31Think,
|
||||||
msgs: []api.Message{
|
msgs: []api.Message{
|
||||||
{Role: "system", Content: "You are a helpful assistant."},
|
|
||||||
{Role: "user", Content: "What is the weather in SF?"},
|
{Role: "user", Content: "What is the weather in SF?"},
|
||||||
{
|
{
|
||||||
Role: "assistant",
|
Role: "assistant",
|
||||||
@@ -82,52 +78,32 @@ func TestOlmo3ThinkRenderer(t *testing.T) {
|
|||||||
ID: "call_1",
|
ID: "call_1",
|
||||||
Function: api.ToolCallFunction{
|
Function: api.ToolCallFunction{
|
||||||
Name: "get_weather",
|
Name: "get_weather",
|
||||||
Arguments: map[string]any{
|
Arguments: map[string]any{"location": "San Francisco"},
|
||||||
"location": "San Francisco",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{Role: "tool", Content: `{"temperature": 68}`, ToolName: "get_weather"},
|
|
||||||
},
|
|
||||||
tools: []api.Tool{
|
|
||||||
{
|
|
||||||
Type: "function",
|
|
||||||
Function: api.ToolFunction{
|
|
||||||
Name: "get_weather",
|
|
||||||
Description: "Get the current weather",
|
|
||||||
Parameters: api.ToolFunctionParameters{
|
|
||||||
Type: "object",
|
|
||||||
Required: []string{"location"},
|
|
||||||
Properties: map[string]api.ToolProperty{
|
|
||||||
"location": {Type: api.PropertyType{"string"}, Description: "The city"},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{Role: "tool", Content: `{"temperature": 68}`},
|
||||||
},
|
},
|
||||||
expected: "<|im_start|>system\n" +
|
expected: "<|im_start|>system\n" +
|
||||||
`You are a helpful assistant. <functions>[{"type": "function", "function": {"name": "get_weather", "description": "Get the current weather", "parameters": {"type": "object", "required": ["location"], "properties": {"location": {"type": "string", "description": "The city"}}}}}]</functions><|im_end|>` + "\n" +
|
"You are Olmo, a helpful AI assistant built by Ai2. Your date cutoff is December 2024, and your model weights are available at https://huggingface.co/allenai.<|im_end|>\n" +
|
||||||
"<|im_start|>user\n" +
|
"<|im_start|>user\n" +
|
||||||
"What is the weather in SF?<|im_end|>\n" +
|
"What is the weather in SF?<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n" +
|
"<|im_start|>assistant\n" +
|
||||||
`Let me check the weather.<function_calls>[{"id": "call_1", "type": "function", "function": {"name": "get_weather", "arguments": "{\"location\":\"San Francisco\"}"}}]</function_calls><|im_end|>` + "\n" +
|
"Let me check the weather.<|im_end|>\n" +
|
||||||
"<|im_start|>environment\n" +
|
|
||||||
`{"temperature": 68}<|im_end|>` + "\n" +
|
|
||||||
"<|im_start|>assistant\n" +
|
"<|im_start|>assistant\n" +
|
||||||
"<think>",
|
"<think>",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multi-turn conversation",
|
name: "7b_multi_turn_conversation",
|
||||||
|
variant: Olmo31Think,
|
||||||
msgs: []api.Message{
|
msgs: []api.Message{
|
||||||
{Role: "system", Content: "You are a helpful assistant."},
|
|
||||||
{Role: "user", Content: "Hello"},
|
{Role: "user", Content: "Hello"},
|
||||||
{Role: "assistant", Content: "Hi there!"},
|
{Role: "assistant", Content: "Hi there!"},
|
||||||
{Role: "user", Content: "How are you?"},
|
{Role: "user", Content: "How are you?"},
|
||||||
},
|
},
|
||||||
expected: "<|im_start|>system\n" +
|
expected: "<|im_start|>system\n" +
|
||||||
"You are a helpful assistant. You do not currently have access to any functions. <functions></functions><|im_end|>\n" +
|
"You are Olmo, a helpful AI assistant built by Ai2. Your date cutoff is December 2024, and your model weights are available at https://huggingface.co/allenai.<|im_end|>\n" +
|
||||||
"<|im_start|>user\n" +
|
"<|im_start|>user\n" +
|
||||||
"Hello<|im_end|>\n" +
|
"Hello<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n" +
|
"<|im_start|>assistant\n" +
|
||||||
@@ -138,73 +114,56 @@ func TestOlmo3ThinkRenderer(t *testing.T) {
|
|||||||
"<think>",
|
"<think>",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "parallel tool calls",
|
name: "32b_basic_without_system",
|
||||||
|
variant: Olmo3Think32B,
|
||||||
msgs: []api.Message{
|
msgs: []api.Message{
|
||||||
{Role: "user", Content: "Get weather in SF and NYC"},
|
{Role: "user", Content: "Hello!"},
|
||||||
{
|
|
||||||
Role: "assistant",
|
|
||||||
ToolCalls: []api.ToolCall{
|
|
||||||
{
|
|
||||||
ID: "call_1",
|
|
||||||
Function: api.ToolCallFunction{
|
|
||||||
Name: "get_weather",
|
|
||||||
Arguments: map[string]any{"location": "San Francisco"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
ID: "call_2",
|
|
||||||
Function: api.ToolCallFunction{
|
|
||||||
Name: "get_weather",
|
|
||||||
Arguments: map[string]any{"location": "New York"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{Role: "tool", Content: `{"temperature": 68}`, ToolName: "get_weather"},
|
|
||||||
{Role: "tool", Content: `{"temperature": 55}`, ToolName: "get_weather"},
|
|
||||||
},
|
|
||||||
tools: []api.Tool{
|
|
||||||
{
|
|
||||||
Type: "function",
|
|
||||||
Function: api.ToolFunction{
|
|
||||||
Name: "get_weather",
|
|
||||||
Parameters: api.ToolFunctionParameters{
|
|
||||||
Type: "object",
|
|
||||||
Properties: map[string]api.ToolProperty{
|
|
||||||
"location": {Type: api.PropertyType{"string"}},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
expected: "<|im_start|>system\n" +
|
expected: "<|im_start|>system\n" +
|
||||||
`You are OLMo, a helpful function-calling AI assistant built by Ai2. Your date cutoff is November 2024, and your model weights are available at https://huggingface.co/allenai. <functions>[{"type": "function", "function": {"name": "get_weather", "parameters": {"type": "object", "properties": {"location": {"type": "string"}}}}}]</functions><|im_end|>` + "\n" +
|
"You are a helpful AI assistant.<|im_end|>\n" +
|
||||||
"<|im_start|>user\n" +
|
"<|im_start|>user\n" +
|
||||||
"Get weather in SF and NYC<|im_end|>\n" +
|
"Hello!<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n" +
|
|
||||||
`<function_calls>[{"id": "call_1", "type": "function", "function": {"name": "get_weather", "arguments": "{\"location\":\"San Francisco\"}"}}, {"id": "call_2", "type": "function", "function": {"name": "get_weather", "arguments": "{\"location\":\"New York\"}"}}]</function_calls><|im_end|>` + "\n" +
|
|
||||||
"<|im_start|>environment\n" +
|
|
||||||
`{"temperature": 68}<|im_end|>` + "\n" +
|
|
||||||
"<|im_start|>environment\n" +
|
|
||||||
`{"temperature": 55}<|im_end|>` + "\n" +
|
|
||||||
"<|im_start|>assistant\n" +
|
"<|im_start|>assistant\n" +
|
||||||
"<think>",
|
"<think>",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "assistant message only content no tool calls",
|
name: "32b_with_custom_system_gets_suffix",
|
||||||
|
variant: Olmo3Think32B,
|
||||||
msgs: []api.Message{
|
msgs: []api.Message{
|
||||||
{Role: "user", Content: "Tell me a joke"},
|
{Role: "system", Content: "You are a helpful assistant."},
|
||||||
{Role: "assistant", Content: "Why did the chicken cross the road?"},
|
{Role: "user", Content: "Hello!"},
|
||||||
{Role: "user", Content: "I don't know, why?"},
|
|
||||||
},
|
},
|
||||||
expected: "<|im_start|>system\n" +
|
expected: "<|im_start|>system\n" +
|
||||||
"You are OLMo, a helpful function-calling AI assistant built by Ai2. Your date cutoff is November 2024, and your model weights are available at https://huggingface.co/allenai. You do not currently have access to any functions. <functions></functions><|im_end|>\n" +
|
"You are a helpful assistant. You do not currently have access to any functions. <functions></functions><|im_end|>\n" +
|
||||||
"<|im_start|>user\n" +
|
"<|im_start|>user\n" +
|
||||||
"Tell me a joke<|im_end|>\n" +
|
"Hello!<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n" +
|
"<|im_start|>assistant\n" +
|
||||||
"Why did the chicken cross the road?<|im_end|>\n" +
|
"<think>",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "31_basic_without_system",
|
||||||
|
variant: Olmo31Think,
|
||||||
|
msgs: []api.Message{
|
||||||
|
{Role: "user", Content: "Hello!"},
|
||||||
|
},
|
||||||
|
expected: "<|im_start|>system\n" +
|
||||||
|
"You are Olmo, a helpful AI assistant built by Ai2. Your date cutoff is December 2024, and your model weights are available at https://huggingface.co/allenai.<|im_end|>\n" +
|
||||||
"<|im_start|>user\n" +
|
"<|im_start|>user\n" +
|
||||||
"I don't know, why?<|im_end|>\n" +
|
"Hello!<|im_end|>\n" +
|
||||||
|
"<|im_start|>assistant\n" +
|
||||||
|
"<think>",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "31_with_custom_system_gets_suffix",
|
||||||
|
variant: Olmo31Think,
|
||||||
|
msgs: []api.Message{
|
||||||
|
{Role: "system", Content: "You are a helpful assistant."},
|
||||||
|
{Role: "user", Content: "Hello!"},
|
||||||
|
},
|
||||||
|
expected: "<|im_start|>system\n" +
|
||||||
|
"You are a helpful assistant. You do not currently have access to any functions. <functions></functions><|im_end|>\n" +
|
||||||
|
"<|im_start|>user\n" +
|
||||||
|
"Hello!<|im_end|>\n" +
|
||||||
"<|im_start|>assistant\n" +
|
"<|im_start|>assistant\n" +
|
||||||
"<think>",
|
"<think>",
|
||||||
},
|
},
|
||||||
@@ -212,7 +171,7 @@ func TestOlmo3ThinkRenderer(t *testing.T) {
|
|||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
rendered, err := (&Olmo3ThinkRenderer{}).Render(tt.msgs, tt.tools, nil)
|
rendered, err := (&Olmo3ThinkRenderer{Variant: tt.variant}).Render(tt.msgs, tt.tools, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -60,10 +60,18 @@ func rendererForName(name string) Renderer {
|
|||||||
renderer := &CogitoRenderer{isThinking: true}
|
renderer := &CogitoRenderer{isThinking: true}
|
||||||
return renderer
|
return renderer
|
||||||
case "olmo3":
|
case "olmo3":
|
||||||
renderer := &Olmo3Renderer{}
|
renderer := &Olmo3Renderer{UseExtendedSystemMessage: false}
|
||||||
|
return renderer
|
||||||
|
case "olmo3.1":
|
||||||
|
renderer := &Olmo3Renderer{UseExtendedSystemMessage: true}
|
||||||
return renderer
|
return renderer
|
||||||
case "olmo3-think":
|
case "olmo3-think":
|
||||||
renderer := &Olmo3ThinkRenderer{}
|
// Used for Olmo-3-7B-Think and Olmo-3.1-32B-Think (same template)
|
||||||
|
renderer := &Olmo3ThinkRenderer{Variant: Olmo31Think}
|
||||||
|
return renderer
|
||||||
|
case "olmo3-32b-think":
|
||||||
|
// Used for Olmo-3-32B-Think
|
||||||
|
renderer := &Olmo3ThinkRenderer{Variant: Olmo3Think32B}
|
||||||
return renderer
|
return renderer
|
||||||
default:
|
default:
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
Reference in New Issue
Block a user