repo
stringlengths
6
47
file_url
stringlengths
77
269
file_path
stringlengths
5
186
content
stringlengths
0
32.8k
language
stringclasses
1 value
license
stringclasses
7 values
commit_sha
stringlengths
40
40
retrieved_at
stringdate
2026-01-07 08:35:43
2026-01-07 08:55:24
truncated
bool
2 classes
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/ml/nn/embedding.go
ml/nn/embedding.go
package nn import "github.com/ollama/ollama/ml" type Embedding struct { Weight ml.Tensor `gguf:"weight"` } func (m *Embedding) Forward(ctx ml.Context, hiddenState ml.Tensor) ml.Tensor { return m.Weight.Rows(ctx, hiddenState) }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/ml/nn/rope.go
ml/nn/rope.go
package nn import ( "github.com/ollama/ollama/ml" "github.com/ollama/ollama/ml/nn/rope" ) // fastRoPE is an interface for tensors that support fast rotary positional embedding. type fastRoPE interface { RoPE(ctx ml.Context, positions ml.Tensor, dim int, base, scale float32, options ...func(*rope.Options)) ml.Tensor } // RoPE applies rotary positional embedding to tensor `t`. func RoPE(ctx ml.Context, t, positions ml.Tensor, dim int, base, scale float32, options ...func(*rope.Options)) ml.Tensor { if t, ok := t.(fastRoPE); ok { return t.RoPE(ctx, positions, dim, base, scale, options...) } panic("RoPE not implemented for this tensor type") }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/ml/nn/linear.go
ml/nn/linear.go
package nn import "github.com/ollama/ollama/ml" type Linear struct { Weight ml.Tensor `gguf:"weight"` Bias ml.Tensor `gguf:"bias"` } func (m *Linear) Forward(ctx ml.Context, t ml.Tensor) ml.Tensor { t = m.Weight.Mulmat(ctx, t) if m.Bias != nil { t = t.Add(ctx, m.Bias) } return t } type LinearBatch struct { Weight ml.Tensor `gguf:"weight"` Bias ml.Tensor `gguf:"bias"` } func (m *LinearBatch) Forward(ctx ml.Context, t, indices ml.Tensor) ml.Tensor { t = m.Weight.MulmatID(ctx, t, indices) if m.Bias != nil { t = t.AddID(ctx, m.Bias, indices) } return t }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/ml/nn/convolution.go
ml/nn/convolution.go
package nn import "github.com/ollama/ollama/ml" type Conv2D struct { Weight ml.Tensor `gguf:"weight"` Bias ml.Tensor `gguf:"bias"` } func (m *Conv2D) Forward(ctx ml.Context, t ml.Tensor, s0, s1, p0, p1, d0, d1 int) ml.Tensor { t = m.Weight.Conv2D(ctx, t, s0, s1, p0, p1, d0, d1) if m.Bias != nil { // Bias shape is (out_channels,) while t shape is (width, height, out_channels, batch) t = t.Add(ctx, m.Bias.Reshape(ctx, 1, 1, -1)) } return t } type Conv3D struct { Weight ml.Tensor `gguf:"weight"` Bias ml.Tensor `gguf:"bias"` } func (m *Conv3D) Forward(ctx ml.Context, t ml.Tensor, c, s0, s1, s2, p0, p1, p2, d0, d1, d2 int) ml.Tensor { t = m.Weight.Conv3D(ctx, t, c, s0, s1, s2, p0, p1, p2, d0, d1, d2) if m.Bias != nil { t = t.Add(ctx, m.Bias) } return t }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/ml/nn/normalization.go
ml/nn/normalization.go
package nn import ( "github.com/ollama/ollama/ml" ) type LayerNorm struct { Weight ml.Tensor `gguf:"weight"` Bias ml.Tensor `gguf:"bias"` } func (m *LayerNorm) Forward(ctx ml.Context, t ml.Tensor, eps float32) ml.Tensor { return t.LayerNorm(ctx, m.Weight, m.Bias, eps) } type RMSNorm struct { Weight ml.Tensor `gguf:"weight"` } func (m *RMSNorm) Forward(ctx ml.Context, t ml.Tensor, eps float32) ml.Tensor { return t.RMSNorm(ctx, m.Weight, eps) }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/ml/nn/attention.go
ml/nn/attention.go
package nn import ( "fmt" "github.com/ollama/ollama/kvcache" "github.com/ollama/ollama/ml" ) // Attention implements scaled dot-product attention for transformer models: // Attention(Q, K, V) = softmax(QK^T/√d_k)V // // Parameters: // - ctx: Context for tensor operations // - query: Query tensor (Q) with shape [d_k, heads, seq_len_q] // - key: Key tensor (K) with shape [d_k, kv_heads, seq_len_k], can be nil to read from cache only // - value: Value tensor (V) with shape [d_v, kv_heads, seq_len_k], can be nil to read from cache only // - scale: Scaling factor, typically 1/√d_k where d_k is the key dimension // - cache: KV cache to store key/value and get past history, can be nil to only use provided key/value // // Returns: // // Attention output with shape [d_v, heads, seq_len_q] func Attention(ctx ml.Context, query, key, value ml.Tensor, scale float64, cache kvcache.Cache) ml.Tensor { return AttentionWithVMLA(ctx, query, key, value, nil, nil, scale, cache) } func AttentionWithSinks(ctx ml.Context, query, key, value, sinks ml.Tensor, scale float64, cache kvcache.Cache) ml.Tensor { return AttentionWithVMLA(ctx, query, key, value, sinks, nil, scale, cache) } func AttentionWithVMLA(ctx ml.Context, query, key, value, sinks ml.Tensor, vmla ml.Tensor, scale float64, cache kvcache.Cache) ml.Tensor { ctx.Forward(query) if key != nil && value != nil { if query.Dim(0) != key.Dim(0) { panic(fmt.Errorf("d_k in attention operation does not match between query(%v) and key(%v)", query.Dim(0), key.Dim(0))) } if key.Dim(1) != value.Dim(1) { panic(fmt.Errorf("kv_heads in attention operation does not match between key(%v) and value(%v)", key.Dim(1), value.Dim(1))) } if key.Dim(2) != value.Dim(2) { panic(fmt.Errorf("seq_len_k in attention operation does not match between key(%v) and value(%v)", key.Dim(2), value.Dim(2))) } ctx.Forward(key, value) if cache != nil { cache.Put(ctx, key, value) } } else if cache == nil { panic("key & value tensors must be provided if cache is nil") } var mask ml.Tensor if cache != nil { key, value, mask = cache.Get(ctx) } if sdpa, ok := query.(ml.ScaledDotProductAttention); ok { cacheConfigApplied := cache != nil return sdpa.ScaledDotProductAttention(ctx, key, value, mask, sinks, vmla, scale, cacheConfigApplied) } else { query = query.Permute(ctx, 0, 2, 1, 3) key = key.Permute(ctx, 0, 2, 1, 3) value = value.Permute(ctx, 1, 2, 0, 3).Contiguous(ctx) kq := key.MulmatFullPrec(ctx, query) kq = kq.Scale(ctx, scale) if mask != nil { kq = kq.Add(ctx, mask) } kq = kq.Softmax(ctx) kqv := value.Mulmat(ctx, kq) if vmla != nil { kqv = vmla.Mulmat(ctx, kqv) } return kqv.Permute(ctx, 0, 2, 1, 3).Contiguous(ctx) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/ml/nn/rope/options.go
ml/nn/rope/options.go
// Package rope provides options for RoPE package rope import "github.com/ollama/ollama/ml" // Options contains optional parameters for RoPE function type Options struct { Type int Factors ml.Tensor // YaRN options YaRN struct { OriginalContextLength int ExtrapolationFactor, AttentionFactor, BetaFast, BetaSlow float32 } // MRoPE options MRoPE struct { Sections []int } } // WithTypeNeoX sets RoPE type to NeoX func WithTypeNeoX() func(*Options) { return func(opts *Options) { opts.Type = 2 } } // WithFactors sets custom rope factors func WithFactors(factors ml.Tensor) func(*Options) { return func(opts *Options) { if factors != nil { opts.Factors = factors } } } // WithOriginalContextLength sets a custom context length func WithOriginalContextLength(n int) func(*Options) { return func(opts *Options) { opts.YaRN.OriginalContextLength = n } } func WithExtrapolationFactor(extrapolationFactor float32) func(*Options) { return func(opts *Options) { opts.YaRN.ExtrapolationFactor = extrapolationFactor } } func WithAttentionFactor(attentionFactor float32) func(*Options) { return func(opts *Options) { opts.YaRN.AttentionFactor = attentionFactor } } func WithBetaFast(betaFast float32) func(*Options) { return func(opts *Options) { opts.YaRN.BetaFast = betaFast } } func WithBetaSlow(betaSlow float32) func(*Options) { return func(opts *Options) { opts.YaRN.BetaSlow = betaSlow } } func WithMRoPE(sections []int) func(*Options) { return func(opts *Options) { opts.Type |= 1 << 3 opts.MRoPE.Sections = sections } } func WithVision(sections []int) func(*Options) { return func(opts *Options) { opts.Type |= 1<<3 | 1<<4 opts.MRoPE.Sections = sections } } func WithInterleaveMRoPE(sections []int) func(*Options) { return func(opts *Options) { opts.Type |= 1<<3 | 1<<5 opts.MRoPE.Sections = sections } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/ml/nn/pooling/pooling.go
ml/nn/pooling/pooling.go
package pooling import ( "github.com/ollama/ollama/ml" ) type Type uint32 const ( TypeNone Type = iota TypeMean TypeCLS TypeLast ) func (t Type) String() string { switch t { case TypeMean: return "Mean" case TypeCLS: return "CLS" case TypeLast: return "Last" default: return "Unknown" } } func (t Type) Forward(ctx ml.Context, hiddenStates ml.Tensor) ml.Tensor { switch t { case TypeMean: hiddenStates = hiddenStates.Permute(ctx, 1, 0, 2, 3).Contiguous(ctx).Mean(ctx) return hiddenStates.Permute(ctx, 1, 0, 2, 3).Contiguous(ctx) case TypeCLS: return hiddenStates.Slice(ctx, 1, 0, 1, 1) case TypeLast: return hiddenStates.Slice(ctx, 1, hiddenStates.Dim(1)-1, hiddenStates.Dim(1), 1) default: panic("unknown pooling type") } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/ml/nn/pooling/pooling_test.go
ml/nn/pooling/pooling_test.go
package pooling_test import ( "bytes" "os" "testing" "github.com/google/go-cmp/cmp" fsggml "github.com/ollama/ollama/fs/ggml" "github.com/ollama/ollama/ml" "github.com/ollama/ollama/ml/backend/ggml" "github.com/ollama/ollama/ml/nn/pooling" ) func setup(tb testing.TB, n int) ml.Backend { tb.Helper() f, err := os.CreateTemp(tb.TempDir(), "*.bin") if err != nil { tb.Fatal(err) } defer f.Close() if err := fsggml.WriteGGUF(f, fsggml.KV{ "general.architecture": "test", "test.block_count": uint32(1), }, []*fsggml.Tensor{ {Name: "blk.0.weight", Shape: []uint64{1}, WriterTo: bytes.NewBuffer(make([]byte, 4))}, }); err != nil { tb.Fatal(err) } b, err := ggml.New(f.Name(), ml.BackendParams{AllocMemory: true}) if err != nil { tb.Fatal(err) } return b } func TestForward(t *testing.T) { cases := map[pooling.Type][]float32{ pooling.TypeMean: {4, 5, 6, 7, 8, 9, 10, 11}, pooling.TypeCLS: {0, 1, 2, 3, 4, 5, 6, 7}, pooling.TypeLast: {8, 9, 10, 11, 12, 13, 14, 15}, } for typ, want := range cases { t.Run(typ.String(), func(t *testing.T) { b := setup(t, 99) defer b.Close() ctx := b.NewContext() defer ctx.Close() tt := ctx.Input().Arange(0, 16, 1, ml.DTypeF32).Reshape(ctx, 8, 2) tt = typ.Forward(ctx, tt) ctx.Forward(tt).Compute(tt) if diff := cmp.Diff(want, tt.Floats()); diff != "" { t.Error(diff) } }) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/auth/auth.go
auth/auth.go
package auth import ( "bytes" "context" "crypto/rand" "encoding/base64" "errors" "fmt" "io" "log/slog" "os" "path/filepath" "strings" "golang.org/x/crypto/ssh" ) const defaultPrivateKey = "id_ed25519" func GetPublicKey() (string, error) { home, err := os.UserHomeDir() if err != nil { return "", err } keyPath := filepath.Join(home, ".ollama", defaultPrivateKey) privateKeyFile, err := os.ReadFile(keyPath) if err != nil { slog.Info(fmt.Sprintf("Failed to load private key: %v", err)) return "", err } privateKey, err := ssh.ParsePrivateKey(privateKeyFile) if err != nil { return "", err } publicKey := ssh.MarshalAuthorizedKey(privateKey.PublicKey()) return strings.TrimSpace(string(publicKey)), nil } func NewNonce(r io.Reader, length int) (string, error) { nonce := make([]byte, length) if _, err := io.ReadFull(r, nonce); err != nil { return "", err } return base64.RawURLEncoding.EncodeToString(nonce), nil } func Sign(ctx context.Context, bts []byte) (string, error) { home, err := os.UserHomeDir() if err != nil { return "", err } keyPath := filepath.Join(home, ".ollama", defaultPrivateKey) privateKeyFile, err := os.ReadFile(keyPath) if err != nil { slog.Info(fmt.Sprintf("Failed to load private key: %v", err)) return "", err } privateKey, err := ssh.ParsePrivateKey(privateKeyFile) if err != nil { return "", err } // get the pubkey, but remove the type publicKey := ssh.MarshalAuthorizedKey(privateKey.PublicKey()) parts := bytes.Split(publicKey, []byte(" ")) if len(parts) < 2 { return "", errors.New("malformed public key") } signedData, err := privateKey.Sign(rand.Reader, bts) if err != nil { return "", err } // signature is <pubkey>:<signature> return fmt.Sprintf("%s:%s", bytes.TrimSpace(parts[1]), base64.StdEncoding.EncodeToString(signedData.Blob)), nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/cmd/start.go
cmd/start.go
//go:build darwin || windows package cmd import ( "context" "errors" "time" "github.com/ollama/ollama/api" ) func waitForServer(ctx context.Context, client *api.Client) error { // wait for the server to start timeout := time.After(5 * time.Second) tick := time.Tick(500 * time.Millisecond) for { select { case <-timeout: return errors.New("timed out waiting for server to start") case <-tick: if err := client.Heartbeat(ctx); err == nil { return nil // server has started } } } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/cmd/interactive_test.go
cmd/interactive_test.go
package cmd import ( "os" "path/filepath" "testing" "github.com/stretchr/testify/assert" ) func TestExtractFilenames(t *testing.T) { // Unix style paths input := ` some preamble ./relative\ path/one.png inbetween1 ./not a valid two.jpg inbetween2 ./1.svg /unescaped space /three.jpeg inbetween3 /valid\ path/dir/four.png "./quoted with spaces/five.JPG /unescaped space /six.webp inbetween6 /valid\ path/dir/seven.WEBP` res := extractFileNames(input) assert.Len(t, res, 7) assert.Contains(t, res[0], "one.png") assert.Contains(t, res[1], "two.jpg") assert.Contains(t, res[2], "three.jpeg") assert.Contains(t, res[3], "four.png") assert.Contains(t, res[4], "five.JPG") assert.Contains(t, res[5], "six.webp") assert.Contains(t, res[6], "seven.WEBP") assert.NotContains(t, res[4], '"') assert.NotContains(t, res, "inbetween1") assert.NotContains(t, res, "./1.svg") // Windows style paths input = ` some preamble c:/users/jdoe/one.png inbetween1 c:/program files/someplace/two.jpg inbetween2 /absolute/nospace/three.jpeg inbetween3 /absolute/with space/four.png inbetween4 ./relative\ path/five.JPG inbetween5 "./relative with/spaces/six.png inbetween6 d:\path with\spaces\seven.JPEG inbetween7 c:\users\jdoe\eight.png inbetween8 d:\program files\someplace\nine.png inbetween9 "E:\program files\someplace\ten.PNG c:/users/jdoe/eleven.webp inbetween11 c:/program files/someplace/twelve.WebP inbetween12 d:\path with\spaces\thirteen.WEBP some ending ` res = extractFileNames(input) assert.Len(t, res, 13) assert.NotContains(t, res, "inbetween2") assert.Contains(t, res[0], "one.png") assert.Contains(t, res[0], "c:") assert.Contains(t, res[1], "two.jpg") assert.Contains(t, res[1], "c:") assert.Contains(t, res[2], "three.jpeg") assert.Contains(t, res[3], "four.png") assert.Contains(t, res[4], "five.JPG") assert.Contains(t, res[5], "six.png") assert.Contains(t, res[6], "seven.JPEG") assert.Contains(t, res[6], "d:") assert.Contains(t, res[7], "eight.png") assert.Contains(t, res[7], "c:") assert.Contains(t, res[8], "nine.png") assert.Contains(t, res[8], "d:") assert.Contains(t, res[9], "ten.PNG") assert.Contains(t, res[9], "E:") assert.Contains(t, res[10], "eleven.webp") assert.Contains(t, res[10], "c:") assert.Contains(t, res[11], "twelve.WebP") assert.Contains(t, res[11], "c:") assert.Contains(t, res[12], "thirteen.WEBP") assert.Contains(t, res[12], "d:") } // Ensure that file paths wrapped in single quotes are removed with the quotes. func TestExtractFileDataRemovesQuotedFilepath(t *testing.T) { dir := t.TempDir() fp := filepath.Join(dir, "img.jpg") data := make([]byte, 600) copy(data, []byte{ 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 'J', 'F', 'I', 'F', 0x00, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xd9, }) if err := os.WriteFile(fp, data, 0o600); err != nil { t.Fatalf("failed to write test image: %v", err) } input := "before '" + fp + "' after" cleaned, imgs, err := extractFileData(input) assert.NoError(t, err) assert.Len(t, imgs, 1) assert.Equal(t, cleaned, "before after") }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/cmd/start_windows.go
cmd/start_windows.go
package cmd import ( "context" "errors" "fmt" "log/slog" "os" "os/exec" "path" "path/filepath" "strings" "syscall" "unsafe" "github.com/ollama/ollama/api" "golang.org/x/sys/windows" ) const ( Installer = "OllamaSetup.exe" ) func startApp(ctx context.Context, client *api.Client) error { if len(isProcRunning(Installer)) > 0 { return fmt.Errorf("upgrade in progress...") } AppName := "ollama app.exe" exe, err := os.Executable() if err != nil { return err } appExe := filepath.Join(filepath.Dir(exe), AppName) _, err = os.Stat(appExe) if errors.Is(err, os.ErrNotExist) { // Try the standard install location localAppData := os.Getenv("LOCALAPPDATA") appExe = filepath.Join(localAppData, "Ollama", AppName) _, err := os.Stat(appExe) if errors.Is(err, os.ErrNotExist) { // Finally look in the path appExe, err = exec.LookPath(AppName) if err != nil { return errors.New("could not locate ollama app") } } } cmd_path := "c:\\Windows\\system32\\cmd.exe" cmd := exec.Command(cmd_path, "/c", appExe, "--hide", "--fast-startup") cmd.SysProcAttr = &syscall.SysProcAttr{CreationFlags: 0x08000000, HideWindow: true} cmd.Stdin = strings.NewReader("") cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr if err := cmd.Start(); err != nil { return fmt.Errorf("unable to start ollama app %w", err) } if cmd.Process != nil { defer cmd.Process.Release() //nolint:errcheck } return waitForServer(ctx, client) } func isProcRunning(procName string) []uint32 { pids := make([]uint32, 2048) var ret uint32 if err := windows.EnumProcesses(pids, &ret); err != nil || ret == 0 { slog.Debug("failed to check for running installers", "error", err) return nil } if ret > uint32(len(pids)) { pids = make([]uint32, ret+10) if err := windows.EnumProcesses(pids, &ret); err != nil || ret == 0 { slog.Debug("failed to check for running installers", "error", err) return nil } } if ret < uint32(len(pids)) { pids = pids[:ret] } var matches []uint32 for _, pid := range pids { if pid == 0 { continue } hProcess, err := windows.OpenProcess(windows.PROCESS_QUERY_INFORMATION|windows.PROCESS_VM_READ, false, pid) if err != nil { continue } defer windows.CloseHandle(hProcess) var module windows.Handle var cbNeeded uint32 cb := (uint32)(unsafe.Sizeof(module)) if err := windows.EnumProcessModules(hProcess, &module, cb, &cbNeeded); err != nil { continue } var sz uint32 = 1024 * 8 moduleName := make([]uint16, sz) cb = uint32(len(moduleName)) * (uint32)(unsafe.Sizeof(uint16(0))) if err := windows.GetModuleBaseName(hProcess, module, &moduleName[0], cb); err != nil && err != syscall.ERROR_INSUFFICIENT_BUFFER { continue } exeFile := path.Base(strings.ToLower(syscall.UTF16ToString(moduleName))) if strings.EqualFold(exeFile, procName) { matches = append(matches, pid) } } return matches }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/cmd/warn_thinking_test.go
cmd/warn_thinking_test.go
package cmd import ( "encoding/json" "io" "net/http" "net/http/httptest" "os" "strings" "testing" "github.com/ollama/ollama/api" "github.com/ollama/ollama/types/model" ) // Test that a warning is printed when thinking is requested but not supported. func TestWarnMissingThinking(t *testing.T) { cases := []struct { capabilities []model.Capability expectWarn bool }{ {capabilities: []model.Capability{model.CapabilityThinking}, expectWarn: false}, {capabilities: []model.Capability{}, expectWarn: true}, } for _, tc := range cases { srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path != "/api/show" || r.Method != http.MethodPost { t.Fatalf("unexpected request to %s %s", r.URL.Path, r.Method) } var req api.ShowRequest if err := json.NewDecoder(r.Body).Decode(&req); err != nil { t.Fatalf("decode request: %v", err) } resp := api.ShowResponse{Capabilities: tc.capabilities} if err := json.NewEncoder(w).Encode(resp); err != nil { t.Fatalf("encode response: %v", err) } })) defer srv.Close() t.Setenv("OLLAMA_HOST", srv.URL) client, err := api.ClientFromEnvironment() if err != nil { t.Fatal(err) } oldStderr := os.Stderr r, w, _ := os.Pipe() os.Stderr = w ensureThinkingSupport(t.Context(), client, "m") w.Close() os.Stderr = oldStderr out, _ := io.ReadAll(r) warned := strings.Contains(string(out), "warning:") if tc.expectWarn && !warned { t.Errorf("expected warning, got none") } if !tc.expectWarn && warned { t.Errorf("did not expect warning, got: %s", string(out)) } } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/cmd/cmd_test.go
cmd/cmd_test.go
package cmd import ( "bytes" "encoding/json" "fmt" "io" "net/http" "net/http/httptest" "os" "reflect" "strings" "testing" "time" "github.com/google/go-cmp/cmp" "github.com/spf13/cobra" "github.com/ollama/ollama/api" "github.com/ollama/ollama/types/model" ) func TestShowInfo(t *testing.T) { t.Run("bare details", func(t *testing.T) { var b bytes.Buffer if err := showInfo(&api.ShowResponse{ Details: api.ModelDetails{ Family: "test", ParameterSize: "7B", QuantizationLevel: "FP16", }, }, false, &b); err != nil { t.Fatal(err) } expect := ` Model architecture test parameters 7B quantization FP16 ` if diff := cmp.Diff(expect, b.String()); diff != "" { t.Errorf("unexpected output (-want +got):\n%s", diff) } }) t.Run("bare model info", func(t *testing.T) { var b bytes.Buffer if err := showInfo(&api.ShowResponse{ ModelInfo: map[string]any{ "general.architecture": "test", "general.parameter_count": float64(7_000_000_000), "test.context_length": float64(0), "test.embedding_length": float64(0), }, Details: api.ModelDetails{ Family: "test", ParameterSize: "7B", QuantizationLevel: "FP16", }, }, false, &b); err != nil { t.Fatal(err) } expect := ` Model architecture test parameters 7B context length 0 embedding length 0 quantization FP16 ` if diff := cmp.Diff(expect, b.String()); diff != "" { t.Errorf("unexpected output (-want +got):\n%s", diff) } }) t.Run("verbose model", func(t *testing.T) { var b bytes.Buffer if err := showInfo(&api.ShowResponse{ Details: api.ModelDetails{ Family: "test", ParameterSize: "8B", QuantizationLevel: "FP16", }, Parameters: ` stop up`, ModelInfo: map[string]any{ "general.architecture": "test", "general.parameter_count": float64(8_000_000_000), "some.true_bool": true, "some.false_bool": false, "test.context_length": float64(1000), "test.embedding_length": float64(11434), }, Tensors: []api.Tensor{ {Name: "blk.0.attn_k.weight", Type: "BF16", Shape: []uint64{42, 3117}}, {Name: "blk.0.attn_q.weight", Type: "FP16", Shape: []uint64{3117, 42}}, }, }, true, &b); err != nil { t.Fatal(err) } expect := ` Model architecture test parameters 8B context length 1000 embedding length 11434 quantization FP16 Parameters stop up Metadata general.architecture test general.parameter_count 8e+09 some.false_bool false some.true_bool true test.context_length 1000 test.embedding_length 11434 Tensors blk.0.attn_k.weight BF16 [42 3117] blk.0.attn_q.weight FP16 [3117 42] ` if diff := cmp.Diff(expect, b.String()); diff != "" { t.Errorf("unexpected output (-want +got):\n%s", diff) } }) t.Run("parameters", func(t *testing.T) { var b bytes.Buffer if err := showInfo(&api.ShowResponse{ Details: api.ModelDetails{ Family: "test", ParameterSize: "7B", QuantizationLevel: "FP16", }, Parameters: ` stop never stop gonna stop give stop you stop up temperature 99`, }, false, &b); err != nil { t.Fatal(err) } expect := ` Model architecture test parameters 7B quantization FP16 Parameters stop never stop gonna stop give stop you stop up temperature 99 ` if diff := cmp.Diff(expect, b.String()); diff != "" { t.Errorf("unexpected output (-want +got):\n%s", diff) } }) t.Run("project info", func(t *testing.T) { var b bytes.Buffer if err := showInfo(&api.ShowResponse{ Details: api.ModelDetails{ Family: "test", ParameterSize: "7B", QuantizationLevel: "FP16", }, ProjectorInfo: map[string]any{ "general.architecture": "clip", "general.parameter_count": float64(133_700_000), "clip.vision.embedding_length": float64(0), "clip.vision.projection_dim": float64(0), }, }, false, &b); err != nil { t.Fatal(err) } expect := ` Model architecture test parameters 7B quantization FP16 Projector architecture clip parameters 133.70M embedding length 0 dimensions 0 ` if diff := cmp.Diff(expect, b.String()); diff != "" { t.Errorf("unexpected output (-want +got):\n%s", diff) } }) t.Run("system", func(t *testing.T) { var b bytes.Buffer if err := showInfo(&api.ShowResponse{ Details: api.ModelDetails{ Family: "test", ParameterSize: "7B", QuantizationLevel: "FP16", }, System: `You are a pirate! Ahoy, matey! Weigh anchor! `, }, false, &b); err != nil { t.Fatal(err) } expect := ` Model architecture test parameters 7B quantization FP16 System You are a pirate! Ahoy, matey! ... ` if diff := cmp.Diff(expect, b.String()); diff != "" { t.Errorf("unexpected output (-want +got):\n%s", diff) } }) t.Run("license", func(t *testing.T) { var b bytes.Buffer license := "MIT License\nCopyright (c) Ollama\n" if err := showInfo(&api.ShowResponse{ Details: api.ModelDetails{ Family: "test", ParameterSize: "7B", QuantizationLevel: "FP16", }, License: license, }, false, &b); err != nil { t.Fatal(err) } expect := ` Model architecture test parameters 7B quantization FP16 License MIT License Copyright (c) Ollama ` if diff := cmp.Diff(expect, b.String()); diff != "" { t.Errorf("unexpected output (-want +got):\n%s", diff) } }) t.Run("capabilities", func(t *testing.T) { var b bytes.Buffer if err := showInfo(&api.ShowResponse{ Details: api.ModelDetails{ Family: "test", ParameterSize: "7B", QuantizationLevel: "FP16", }, Capabilities: []model.Capability{model.CapabilityVision, model.CapabilityTools}, }, false, &b); err != nil { t.Fatal(err) } expect := " Model\n" + " architecture test \n" + " parameters 7B \n" + " quantization FP16 \n" + "\n" + " Capabilities\n" + " vision \n" + " tools \n" + "\n" if diff := cmp.Diff(expect, b.String()); diff != "" { t.Errorf("unexpected output (-want +got):\n%s", diff) } }) t.Run("min version", func(t *testing.T) { var b bytes.Buffer if err := showInfo(&api.ShowResponse{ Details: api.ModelDetails{ Family: "test", ParameterSize: "7B", QuantizationLevel: "FP16", }, Requires: "0.14.0", }, false, &b); err != nil { t.Fatal(err) } expect := ` Model architecture test parameters 7B quantization FP16 requires 0.14.0 ` if diff := cmp.Diff(expect, b.String()); diff != "" { t.Errorf("unexpected output (-want +got):\n%s", diff) } }) } func TestDeleteHandler(t *testing.T) { stopped := false mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/api/delete" && r.Method == http.MethodDelete { var req api.DeleteRequest if err := json.NewDecoder(r.Body).Decode(&req); err != nil { http.Error(w, err.Error(), http.StatusBadRequest) return } if req.Name == "test-model" { w.WriteHeader(http.StatusOK) } else { w.WriteHeader(http.StatusNotFound) errPayload := `{"error":"model '%s' not found"}` w.Write([]byte(fmt.Sprintf(errPayload, req.Name))) } return } if r.URL.Path == "/api/generate" && r.Method == http.MethodPost { var req api.GenerateRequest if err := json.NewDecoder(r.Body).Decode(&req); err != nil { http.Error(w, err.Error(), http.StatusBadRequest) return } if req.Model == "test-model" { w.WriteHeader(http.StatusOK) if err := json.NewEncoder(w).Encode(api.GenerateResponse{ Done: true, }); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } stopped = true return } else { w.WriteHeader(http.StatusNotFound) if err := json.NewEncoder(w).Encode(api.GenerateResponse{ Done: false, }); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } } } })) t.Setenv("OLLAMA_HOST", mockServer.URL) t.Cleanup(mockServer.Close) cmd := &cobra.Command{} cmd.SetContext(t.Context()) if err := DeleteHandler(cmd, []string{"test-model"}); err != nil { t.Fatalf("DeleteHandler failed: %v", err) } if !stopped { t.Fatal("Model was not stopped before deletion") } err := DeleteHandler(cmd, []string{"test-model-not-found"}) if err == nil || !strings.Contains(err.Error(), "model 'test-model-not-found' not found") { t.Fatalf("DeleteHandler failed: expected error about stopping non-existent model, got %v", err) } } func TestRunEmbeddingModel(t *testing.T) { reqCh := make(chan api.EmbedRequest, 1) mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/api/show" && r.Method == http.MethodPost { w.Header().Set("Content-Type", "application/json") if err := json.NewEncoder(w).Encode(api.ShowResponse{ Capabilities: []model.Capability{model.CapabilityEmbedding}, }); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } return } if r.URL.Path == "/api/embed" && r.Method == http.MethodPost { var req api.EmbedRequest if err := json.NewDecoder(r.Body).Decode(&req); err != nil { http.Error(w, err.Error(), http.StatusBadRequest) return } reqCh <- req w.Header().Set("Content-Type", "application/json") if err := json.NewEncoder(w).Encode(api.EmbedResponse{ Model: "test-embedding-model", Embeddings: [][]float32{{0.1, 0.2, 0.3}}, }); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } return } http.NotFound(w, r) })) t.Setenv("OLLAMA_HOST", mockServer.URL) t.Cleanup(mockServer.Close) cmd := &cobra.Command{} cmd.SetContext(t.Context()) cmd.Flags().String("keepalive", "", "") cmd.Flags().Bool("truncate", false, "") cmd.Flags().Int("dimensions", 0, "") cmd.Flags().Bool("verbose", false, "") cmd.Flags().Bool("insecure", false, "") cmd.Flags().Bool("nowordwrap", false, "") cmd.Flags().String("format", "", "") cmd.Flags().String("think", "", "") cmd.Flags().Bool("hidethinking", false, "") oldStdout := os.Stdout r, w, _ := os.Pipe() os.Stdout = w errCh := make(chan error, 1) go func() { errCh <- RunHandler(cmd, []string{"test-embedding-model", "hello", "world"}) }() err := <-errCh w.Close() os.Stdout = oldStdout if err != nil { t.Fatalf("RunHandler returned error: %v", err) } var out bytes.Buffer io.Copy(&out, r) select { case req := <-reqCh: inputText, _ := req.Input.(string) if diff := cmp.Diff("hello world", inputText); diff != "" { t.Errorf("unexpected input (-want +got):\n%s", diff) } if req.Truncate != nil { t.Errorf("expected truncate to be nil, got %v", *req.Truncate) } if req.KeepAlive != nil { t.Errorf("expected keepalive to be nil, got %v", req.KeepAlive) } if req.Dimensions != 0 { t.Errorf("expected dimensions to be 0, got %d", req.Dimensions) } default: t.Fatal("server did not receive embed request") } expectOutput := "[0.1,0.2,0.3]\n" if diff := cmp.Diff(expectOutput, out.String()); diff != "" { t.Errorf("unexpected output (-want +got):\n%s", diff) } } func TestRunEmbeddingModelWithFlags(t *testing.T) { reqCh := make(chan api.EmbedRequest, 1) mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/api/show" && r.Method == http.MethodPost { w.Header().Set("Content-Type", "application/json") if err := json.NewEncoder(w).Encode(api.ShowResponse{ Capabilities: []model.Capability{model.CapabilityEmbedding}, }); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } return } if r.URL.Path == "/api/embed" && r.Method == http.MethodPost { var req api.EmbedRequest if err := json.NewDecoder(r.Body).Decode(&req); err != nil { http.Error(w, err.Error(), http.StatusBadRequest) return } reqCh <- req w.Header().Set("Content-Type", "application/json") if err := json.NewEncoder(w).Encode(api.EmbedResponse{ Model: "test-embedding-model", Embeddings: [][]float32{{0.4, 0.5}}, LoadDuration: 5 * time.Millisecond, }); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } return } http.NotFound(w, r) })) t.Setenv("OLLAMA_HOST", mockServer.URL) t.Cleanup(mockServer.Close) cmd := &cobra.Command{} cmd.SetContext(t.Context()) cmd.Flags().String("keepalive", "", "") cmd.Flags().Bool("truncate", false, "") cmd.Flags().Int("dimensions", 0, "") cmd.Flags().Bool("verbose", false, "") cmd.Flags().Bool("insecure", false, "") cmd.Flags().Bool("nowordwrap", false, "") cmd.Flags().String("format", "", "") cmd.Flags().String("think", "", "") cmd.Flags().Bool("hidethinking", false, "") if err := cmd.Flags().Set("truncate", "true"); err != nil { t.Fatalf("failed to set truncate flag: %v", err) } if err := cmd.Flags().Set("dimensions", "2"); err != nil { t.Fatalf("failed to set dimensions flag: %v", err) } if err := cmd.Flags().Set("keepalive", "5m"); err != nil { t.Fatalf("failed to set keepalive flag: %v", err) } oldStdout := os.Stdout r, w, _ := os.Pipe() os.Stdout = w errCh := make(chan error, 1) go func() { errCh <- RunHandler(cmd, []string{"test-embedding-model", "test", "input"}) }() err := <-errCh w.Close() os.Stdout = oldStdout if err != nil { t.Fatalf("RunHandler returned error: %v", err) } var out bytes.Buffer io.Copy(&out, r) select { case req := <-reqCh: inputText, _ := req.Input.(string) if diff := cmp.Diff("test input", inputText); diff != "" { t.Errorf("unexpected input (-want +got):\n%s", diff) } if req.Truncate == nil || !*req.Truncate { t.Errorf("expected truncate pointer true, got %v", req.Truncate) } if req.Dimensions != 2 { t.Errorf("expected dimensions 2, got %d", req.Dimensions) } if req.KeepAlive == nil || req.KeepAlive.Duration != 5*time.Minute { t.Errorf("unexpected keepalive duration: %v", req.KeepAlive) } default: t.Fatal("server did not receive embed request") } expectOutput := "[0.4,0.5]\n" if diff := cmp.Diff(expectOutput, out.String()); diff != "" { t.Errorf("unexpected output (-want +got):\n%s", diff) } } func TestRunEmbeddingModelPipedInput(t *testing.T) { reqCh := make(chan api.EmbedRequest, 1) mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/api/show" && r.Method == http.MethodPost { w.Header().Set("Content-Type", "application/json") if err := json.NewEncoder(w).Encode(api.ShowResponse{ Capabilities: []model.Capability{model.CapabilityEmbedding}, }); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } return } if r.URL.Path == "/api/embed" && r.Method == http.MethodPost { var req api.EmbedRequest if err := json.NewDecoder(r.Body).Decode(&req); err != nil { http.Error(w, err.Error(), http.StatusBadRequest) return } reqCh <- req w.Header().Set("Content-Type", "application/json") if err := json.NewEncoder(w).Encode(api.EmbedResponse{ Model: "test-embedding-model", Embeddings: [][]float32{{0.6, 0.7}}, }); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } return } http.NotFound(w, r) })) t.Setenv("OLLAMA_HOST", mockServer.URL) t.Cleanup(mockServer.Close) cmd := &cobra.Command{} cmd.SetContext(t.Context()) cmd.Flags().String("keepalive", "", "") cmd.Flags().Bool("truncate", false, "") cmd.Flags().Int("dimensions", 0, "") cmd.Flags().Bool("verbose", false, "") cmd.Flags().Bool("insecure", false, "") cmd.Flags().Bool("nowordwrap", false, "") cmd.Flags().String("format", "", "") cmd.Flags().String("think", "", "") cmd.Flags().Bool("hidethinking", false, "") // Capture stdin oldStdin := os.Stdin stdinR, stdinW, _ := os.Pipe() os.Stdin = stdinR stdinW.Write([]byte("piped text")) stdinW.Close() // Capture stdout oldStdout := os.Stdout stdoutR, stdoutW, _ := os.Pipe() os.Stdout = stdoutW errCh := make(chan error, 1) go func() { errCh <- RunHandler(cmd, []string{"test-embedding-model", "additional", "args"}) }() err := <-errCh stdoutW.Close() os.Stdout = oldStdout os.Stdin = oldStdin if err != nil { t.Fatalf("RunHandler returned error: %v", err) } var out bytes.Buffer io.Copy(&out, stdoutR) select { case req := <-reqCh: inputText, _ := req.Input.(string) // Should combine piped input with command line args if diff := cmp.Diff("piped text additional args", inputText); diff != "" { t.Errorf("unexpected input (-want +got):\n%s", diff) } default: t.Fatal("server did not receive embed request") } expectOutput := "[0.6,0.7]\n" if diff := cmp.Diff(expectOutput, out.String()); diff != "" { t.Errorf("unexpected output (-want +got):\n%s", diff) } } func TestRunEmbeddingModelNoInput(t *testing.T) { mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/api/show" && r.Method == http.MethodPost { w.Header().Set("Content-Type", "application/json") if err := json.NewEncoder(w).Encode(api.ShowResponse{ Capabilities: []model.Capability{model.CapabilityEmbedding}, }); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } return } http.NotFound(w, r) })) t.Setenv("OLLAMA_HOST", mockServer.URL) t.Cleanup(mockServer.Close) cmd := &cobra.Command{} cmd.SetContext(t.Context()) cmd.Flags().String("keepalive", "", "") cmd.Flags().Bool("truncate", false, "") cmd.Flags().Int("dimensions", 0, "") cmd.Flags().Bool("verbose", false, "") cmd.Flags().Bool("insecure", false, "") cmd.Flags().Bool("nowordwrap", false, "") cmd.Flags().String("format", "", "") cmd.Flags().String("think", "", "") cmd.Flags().Bool("hidethinking", false, "") cmd.SetOut(io.Discard) cmd.SetErr(io.Discard) // Test with no input arguments (only model name) err := RunHandler(cmd, []string{"test-embedding-model"}) if err == nil || !strings.Contains(err.Error(), "embedding models require input text") { t.Fatalf("expected error about missing input, got %v", err) } } func TestGetModelfileName(t *testing.T) { tests := []struct { name string modelfileName string fileExists bool expectedName string expectedErr error }{ { name: "no modelfile specified, no modelfile exists", modelfileName: "", fileExists: false, expectedName: "", expectedErr: os.ErrNotExist, }, { name: "no modelfile specified, modelfile exists", modelfileName: "", fileExists: true, expectedName: "Modelfile", expectedErr: nil, }, { name: "modelfile specified, no modelfile exists", modelfileName: "crazyfile", fileExists: false, expectedName: "", expectedErr: os.ErrNotExist, }, { name: "modelfile specified, modelfile exists", modelfileName: "anotherfile", fileExists: true, expectedName: "anotherfile", expectedErr: nil, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { cmd := &cobra.Command{ Use: "fakecmd", } cmd.Flags().String("file", "", "path to modelfile") var expectedFilename string if tt.fileExists { var fn string if tt.modelfileName != "" { fn = tt.modelfileName } else { fn = "Modelfile" } tempFile, err := os.CreateTemp(t.TempDir(), fn) if err != nil { t.Fatalf("temp modelfile creation failed: %v", err) } defer tempFile.Close() expectedFilename = tempFile.Name() err = cmd.Flags().Set("file", expectedFilename) if err != nil { t.Fatalf("couldn't set file flag: %v", err) } } else { expectedFilename = tt.expectedName if tt.modelfileName != "" { err := cmd.Flags().Set("file", tt.modelfileName) if err != nil { t.Fatalf("couldn't set file flag: %v", err) } } } actualFilename, actualErr := getModelfileName(cmd) if actualFilename != expectedFilename { t.Errorf("expected filename: '%s' actual filename: '%s'", expectedFilename, actualFilename) } if tt.expectedErr != os.ErrNotExist { if actualErr != tt.expectedErr { t.Errorf("expected err: %v actual err: %v", tt.expectedErr, actualErr) } } else { if !os.IsNotExist(actualErr) { t.Errorf("expected err: %v actual err: %v", tt.expectedErr, actualErr) } } }) } } func TestPushHandler(t *testing.T) { tests := []struct { name string modelName string serverResponse map[string]func(w http.ResponseWriter, r *http.Request) expectedError string expectedOutput string }{ { name: "successful push", modelName: "test-model", serverResponse: map[string]func(w http.ResponseWriter, r *http.Request){ "/api/push": func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodPost { t.Errorf("expected POST request, got %s", r.Method) } var req api.PushRequest if err := json.NewDecoder(r.Body).Decode(&req); err != nil { http.Error(w, err.Error(), http.StatusBadRequest) return } if req.Name != "test-model" { t.Errorf("expected model name 'test-model', got %s", req.Name) } // Simulate progress updates responses := []api.ProgressResponse{ {Status: "preparing manifest"}, {Digest: "sha256:abc123456789", Total: 100, Completed: 50}, {Digest: "sha256:abc123456789", Total: 100, Completed: 100}, } for _, resp := range responses { if err := json.NewEncoder(w).Encode(resp); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } w.(http.Flusher).Flush() } }, "/api/me": func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodPost { t.Errorf("expected POST request, got %s", r.Method) } }, }, expectedOutput: "\nYou can find your model at:\n\n\thttps://ollama.com/test-model\n", }, { name: "not signed in push", modelName: "notsignedin-model", serverResponse: map[string]func(w http.ResponseWriter, r *http.Request){ "/api/me": func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodPost { t.Errorf("expected POST request, got %s", r.Method) } w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusUnauthorized) err := json.NewEncoder(w).Encode(map[string]string{ "error": "unauthorized", "signin_url": "https://somethingsomething", }) if err != nil { t.Fatal(err) } }, }, expectedOutput: "You need to be signed in to push", }, { name: "unauthorized push", modelName: "unauthorized-model", serverResponse: map[string]func(w http.ResponseWriter, r *http.Request){ "/api/push": func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusUnauthorized) err := json.NewEncoder(w).Encode(map[string]string{ "error": "403: {\"errors\":[{\"code\":\"ACCESS DENIED\", \"message\":\"access denied\"}]}", }) if err != nil { t.Fatal(err) } }, "/api/me": func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodPost { t.Errorf("expected POST request, got %s", r.Method) } }, }, expectedError: "you are not authorized to push to this namespace, create the model under a namespace you own", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if handler, ok := tt.serverResponse[r.URL.Path]; ok { handler(w, r) return } http.Error(w, "not found", http.StatusNotFound) })) defer mockServer.Close() t.Setenv("OLLAMA_HOST", mockServer.URL) tmpDir := t.TempDir() t.Setenv("HOME", tmpDir) t.Setenv("USERPROFILE", tmpDir) initializeKeypair() cmd := &cobra.Command{} cmd.Flags().Bool("insecure", false, "") cmd.SetContext(t.Context()) // Redirect stderr to capture progress output oldStderr := os.Stderr r, w, _ := os.Pipe() os.Stderr = w // Capture stdout for the "Model pushed" message oldStdout := os.Stdout outR, outW, _ := os.Pipe() os.Stdout = outW err := PushHandler(cmd, []string{tt.modelName}) // Restore stderr w.Close() os.Stderr = oldStderr // drain the pipe if _, err := io.ReadAll(r); err != nil { t.Fatal(err) } // Restore stdout and get output outW.Close() os.Stdout = oldStdout stdout, _ := io.ReadAll(outR) if tt.expectedError == "" { if err != nil { t.Errorf("expected no error, got %v", err) } if tt.expectedOutput != "" { if got := string(stdout); !strings.Contains(got, tt.expectedOutput) { t.Errorf("expected output %q, got %q", tt.expectedOutput, got) } } } else { if err == nil || !strings.Contains(err.Error(), tt.expectedError) { t.Errorf("expected error containing %q, got %v", tt.expectedError, err) } } }) } } func TestListHandler(t *testing.T) { tests := []struct { name string args []string serverResponse []api.ListModelResponse expectedError string expectedOutput string }{ { name: "list all models", args: []string{}, serverResponse: []api.ListModelResponse{ {Name: "model1", Digest: "sha256:abc123", Size: 1024, ModifiedAt: time.Now().Add(-24 * time.Hour)}, {Name: "model2", Digest: "sha256:def456", Size: 2048, ModifiedAt: time.Now().Add(-48 * time.Hour)}, }, expectedOutput: "NAME ID SIZE MODIFIED \n" + "model1 sha256:abc12 1.0 KB 24 hours ago \n" + "model2 sha256:def45 2.0 KB 2 days ago \n", }, { name: "filter models by prefix", args: []string{"model1"}, serverResponse: []api.ListModelResponse{ {Name: "model1", Digest: "sha256:abc123", Size: 1024, ModifiedAt: time.Now().Add(-24 * time.Hour)}, {Name: "model2", Digest: "sha256:def456", Size: 2048, ModifiedAt: time.Now().Add(-24 * time.Hour)}, }, expectedOutput: "NAME ID SIZE MODIFIED \n" + "model1 sha256:abc12 1.0 KB 24 hours ago \n", }, { name: "server error", args: []string{}, expectedError: "server error", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path != "/api/tags" || r.Method != http.MethodGet { t.Errorf("unexpected request to %s %s", r.Method, r.URL.Path) http.Error(w, "not found", http.StatusNotFound) return } if tt.expectedError != "" { http.Error(w, tt.expectedError, http.StatusInternalServerError) return } response := api.ListResponse{Models: tt.serverResponse} if err := json.NewEncoder(w).Encode(response); err != nil { t.Fatal(err) } })) defer mockServer.Close() t.Setenv("OLLAMA_HOST", mockServer.URL) cmd := &cobra.Command{} cmd.SetContext(t.Context()) // Capture stdout oldStdout := os.Stdout r, w, _ := os.Pipe() os.Stdout = w err := ListHandler(cmd, tt.args) // Restore stdout and get output w.Close() os.Stdout = oldStdout output, _ := io.ReadAll(r) if tt.expectedError == "" { if err != nil { t.Errorf("expected no error, got %v", err) } if got := string(output); got != tt.expectedOutput { t.Errorf("expected output:\n%s\ngot:\n%s", tt.expectedOutput, got) } } else { if err == nil || !strings.Contains(err.Error(), tt.expectedError) { t.Errorf("expected error containing %q, got %v", tt.expectedError, err) } } }) } } func TestCreateHandler(t *testing.T) { tests := []struct { name string modelName string modelFile string serverResponse map[string]func(w http.ResponseWriter, r *http.Request) expectedError string expectedOutput string }{ { name: "successful create", modelName: "test-model", modelFile: "FROM foo", serverResponse: map[string]func(w http.ResponseWriter, r *http.Request){ "/api/create": func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodPost { t.Errorf("expected POST request, got %s", r.Method) } req := api.CreateRequest{} if err := json.NewDecoder(r.Body).Decode(&req); err != nil { http.Error(w, err.Error(), http.StatusBadRequest) return } if req.Model != "test-model" { t.Errorf("expected model name 'test-model', got %s", req.Name) } if req.From != "foo" { t.Errorf("expected from 'foo', got %s", req.From) } responses := []api.ProgressResponse{ {Status: "using existing layer sha256:56bb8bd477a519ffa694fc449c2413c6f0e1d3b1c88fa7e3c9d88d3ae49d4dcb"}, {Status: "writing manifest"}, {Status: "success"}, } for _, resp := range responses { if err := json.NewEncoder(w).Encode(resp); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } w.(http.Flusher).Flush() } }, }, expectedOutput: "", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { handler, ok := tt.serverResponse[r.URL.Path] if !ok { t.Errorf("unexpected request to %s", r.URL.Path) http.Error(w, "not found", http.StatusNotFound) return } handler(w, r) })) t.Setenv("OLLAMA_HOST", mockServer.URL) t.Cleanup(mockServer.Close) tempFile, err := os.CreateTemp(t.TempDir(), "modelfile") if err != nil { t.Fatal(err) } defer os.Remove(tempFile.Name()) if _, err := tempFile.WriteString(tt.modelFile); err != nil { t.Fatal(err) } if err := tempFile.Close(); err != nil { t.Fatal(err) } cmd := &cobra.Command{} cmd.Flags().String("file", "", "") if err := cmd.Flags().Set("file", tempFile.Name()); err != nil { t.Fatal(err) } cmd.Flags().Bool("insecure", false, "") cmd.SetContext(t.Context()) // Redirect stderr to capture progress output oldStderr := os.Stderr r, w, _ := os.Pipe() os.Stderr = w // Capture stdout for the "Model pushed" message oldStdout := os.Stdout outR, outW, _ := os.Pipe() os.Stdout = outW err = CreateHandler(cmd, []string{tt.modelName}) // Restore stderr w.Close() os.Stderr = oldStderr // drain the pipe if _, err := io.ReadAll(r); err != nil { t.Fatal(err) } // Restore stdout and get output outW.Close() os.Stdout = oldStdout stdout, _ := io.ReadAll(outR) if tt.expectedError == "" { if err != nil { t.Errorf("expected no error, got %v", err) } if tt.expectedOutput != "" { if got := string(stdout); got != tt.expectedOutput { t.Errorf("expected output %q, got %q", tt.expectedOutput, got) } } } }) } } func TestNewCreateRequest(t *testing.T) { tests := []struct { name string from string opts runOptions expected *api.CreateRequest }{ { "basic test", "newmodel", runOptions{ Model: "mymodel", ParentModel: "", Prompt: "You are a fun AI agent", Messages: []api.Message{}, WordWrap: true, }, &api.CreateRequest{ From: "mymodel", Model: "newmodel", }, }, { "parent model test", "newmodel", runOptions{ Model: "mymodel", ParentModel: "parentmodel",
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
true
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/cmd/interactive.go
cmd/interactive.go
package cmd import ( "cmp" "errors" "fmt" "io" "net/http" "os" "path/filepath" "regexp" "slices" "strings" "github.com/spf13/cobra" "github.com/ollama/ollama/api" "github.com/ollama/ollama/envconfig" "github.com/ollama/ollama/readline" "github.com/ollama/ollama/types/errtypes" "github.com/ollama/ollama/types/model" ) type MultilineState int const ( MultilineNone MultilineState = iota MultilinePrompt MultilineSystem ) func generateInteractive(cmd *cobra.Command, opts runOptions) error { usage := func() { fmt.Fprintln(os.Stderr, "Available Commands:") fmt.Fprintln(os.Stderr, " /set Set session variables") fmt.Fprintln(os.Stderr, " /show Show model information") fmt.Fprintln(os.Stderr, " /load <model> Load a session or model") fmt.Fprintln(os.Stderr, " /save <model> Save your current session") fmt.Fprintln(os.Stderr, " /clear Clear session context") fmt.Fprintln(os.Stderr, " /bye Exit") fmt.Fprintln(os.Stderr, " /?, /help Help for a command") fmt.Fprintln(os.Stderr, " /? shortcuts Help for keyboard shortcuts") fmt.Fprintln(os.Stderr, "") fmt.Fprintln(os.Stderr, "Use \"\"\" to begin a multi-line message.") if opts.MultiModal { fmt.Fprintf(os.Stderr, "Use %s to include .jpg, .png, or .webp images.\n", filepath.FromSlash("/path/to/file")) } fmt.Fprintln(os.Stderr, "") } usageSet := func() { fmt.Fprintln(os.Stderr, "Available Commands:") fmt.Fprintln(os.Stderr, " /set parameter ... Set a parameter") fmt.Fprintln(os.Stderr, " /set system <string> Set system message") fmt.Fprintln(os.Stderr, " /set history Enable history") fmt.Fprintln(os.Stderr, " /set nohistory Disable history") fmt.Fprintln(os.Stderr, " /set wordwrap Enable wordwrap") fmt.Fprintln(os.Stderr, " /set nowordwrap Disable wordwrap") fmt.Fprintln(os.Stderr, " /set format json Enable JSON mode") fmt.Fprintln(os.Stderr, " /set noformat Disable formatting") fmt.Fprintln(os.Stderr, " /set verbose Show LLM stats") fmt.Fprintln(os.Stderr, " /set quiet Disable LLM stats") fmt.Fprintln(os.Stderr, " /set think Enable thinking") fmt.Fprintln(os.Stderr, " /set nothink Disable thinking") fmt.Fprintln(os.Stderr, "") } usageShortcuts := func() { fmt.Fprintln(os.Stderr, "Available keyboard shortcuts:") fmt.Fprintln(os.Stderr, " Ctrl + a Move to the beginning of the line (Home)") fmt.Fprintln(os.Stderr, " Ctrl + e Move to the end of the line (End)") fmt.Fprintln(os.Stderr, " Alt + b Move back (left) one word") fmt.Fprintln(os.Stderr, " Alt + f Move forward (right) one word") fmt.Fprintln(os.Stderr, " Ctrl + k Delete the sentence after the cursor") fmt.Fprintln(os.Stderr, " Ctrl + u Delete the sentence before the cursor") fmt.Fprintln(os.Stderr, " Ctrl + w Delete the word before the cursor") fmt.Fprintln(os.Stderr, "") fmt.Fprintln(os.Stderr, " Ctrl + l Clear the screen") fmt.Fprintln(os.Stderr, " Ctrl + c Stop the model from responding") fmt.Fprintln(os.Stderr, " Ctrl + d Exit ollama (/bye)") fmt.Fprintln(os.Stderr, "") } usageShow := func() { fmt.Fprintln(os.Stderr, "Available Commands:") fmt.Fprintln(os.Stderr, " /show info Show details for this model") fmt.Fprintln(os.Stderr, " /show license Show model license") fmt.Fprintln(os.Stderr, " /show modelfile Show Modelfile for this model") fmt.Fprintln(os.Stderr, " /show parameters Show parameters for this model") fmt.Fprintln(os.Stderr, " /show system Show system message") fmt.Fprintln(os.Stderr, " /show template Show prompt template") fmt.Fprintln(os.Stderr, "") } // only list out the most common parameters usageParameters := func() { fmt.Fprintln(os.Stderr, "Available Parameters:") fmt.Fprintln(os.Stderr, " /set parameter seed <int> Random number seed") fmt.Fprintln(os.Stderr, " /set parameter num_predict <int> Max number of tokens to predict") fmt.Fprintln(os.Stderr, " /set parameter top_k <int> Pick from top k num of tokens") fmt.Fprintln(os.Stderr, " /set parameter top_p <float> Pick token based on sum of probabilities") fmt.Fprintln(os.Stderr, " /set parameter min_p <float> Pick token based on top token probability * min_p") fmt.Fprintln(os.Stderr, " /set parameter num_ctx <int> Set the context size") fmt.Fprintln(os.Stderr, " /set parameter temperature <float> Set creativity level") fmt.Fprintln(os.Stderr, " /set parameter repeat_penalty <float> How strongly to penalize repetitions") fmt.Fprintln(os.Stderr, " /set parameter repeat_last_n <int> Set how far back to look for repetitions") fmt.Fprintln(os.Stderr, " /set parameter num_gpu <int> The number of layers to send to the GPU") fmt.Fprintln(os.Stderr, " /set parameter stop <string> <string> ... Set the stop parameters") fmt.Fprintln(os.Stderr, "") } scanner, err := readline.New(readline.Prompt{ Prompt: ">>> ", AltPrompt: "... ", Placeholder: "Send a message (/? for help)", AltPlaceholder: `Use """ to end multi-line input`, }) if err != nil { return err } if envconfig.NoHistory() { scanner.HistoryDisable() } fmt.Print(readline.StartBracketedPaste) defer fmt.Printf(readline.EndBracketedPaste) var sb strings.Builder var multiline MultilineState var thinkExplicitlySet bool = opts.Think != nil for { line, err := scanner.Readline() switch { case errors.Is(err, io.EOF): fmt.Println() return nil case errors.Is(err, readline.ErrInterrupt): if line == "" { fmt.Println("\nUse Ctrl + d or /bye to exit.") } scanner.Prompt.UseAlt = false sb.Reset() continue case err != nil: return err } switch { case multiline != MultilineNone: // check if there's a multiline terminating string before, ok := strings.CutSuffix(line, `"""`) sb.WriteString(before) if !ok { fmt.Fprintln(&sb) continue } switch multiline { case MultilineSystem: opts.System = sb.String() opts.Messages = append(opts.Messages, api.Message{Role: "system", Content: opts.System}) fmt.Println("Set system message.") sb.Reset() } multiline = MultilineNone scanner.Prompt.UseAlt = false case strings.HasPrefix(line, `"""`): line := strings.TrimPrefix(line, `"""`) line, ok := strings.CutSuffix(line, `"""`) sb.WriteString(line) if !ok { // no multiline terminating string; need more input fmt.Fprintln(&sb) multiline = MultilinePrompt scanner.Prompt.UseAlt = true } case scanner.Pasting: fmt.Fprintln(&sb, line) continue case strings.HasPrefix(line, "/list"): args := strings.Fields(line) if err := ListHandler(cmd, args[1:]); err != nil { return err } case strings.HasPrefix(line, "/load"): args := strings.Fields(line) if len(args) != 2 { fmt.Println("Usage:\n /load <modelname>") continue } origOpts := opts.Copy() opts.Model = args[1] opts.Messages = []api.Message{} fmt.Printf("Loading model '%s'\n", opts.Model) opts.Think, err = inferThinkingOption(nil, &opts, thinkExplicitlySet) if err != nil { if strings.Contains(err.Error(), "not found") { fmt.Printf("Couldn't find model '%s'\n", opts.Model) opts = origOpts.Copy() continue } return err } if err := loadOrUnloadModel(cmd, &opts); err != nil { if strings.Contains(err.Error(), "not found") { fmt.Printf("Couldn't find model '%s'\n", opts.Model) opts = origOpts.Copy() continue } if strings.Contains(err.Error(), "does not support thinking") { fmt.Printf("error: %v\n", err) continue } return err } continue case strings.HasPrefix(line, "/save"): args := strings.Fields(line) if len(args) != 2 { fmt.Println("Usage:\n /save <modelname>") continue } client, err := api.ClientFromEnvironment() if err != nil { fmt.Println("error: couldn't connect to ollama server") return err } req := NewCreateRequest(args[1], opts) fn := func(resp api.ProgressResponse) error { return nil } err = client.Create(cmd.Context(), req, fn) if err != nil { if strings.Contains(err.Error(), errtypes.InvalidModelNameErrMsg) { fmt.Printf("error: The model name '%s' is invalid\n", args[1]) continue } return err } fmt.Printf("Created new model '%s'\n", args[1]) continue case strings.HasPrefix(line, "/clear"): opts.Messages = []api.Message{} if opts.System != "" { newMessage := api.Message{Role: "system", Content: opts.System} opts.Messages = append(opts.Messages, newMessage) } fmt.Println("Cleared session context") continue case strings.HasPrefix(line, "/set"): args := strings.Fields(line) if len(args) > 1 { switch args[1] { case "history": scanner.HistoryEnable() case "nohistory": scanner.HistoryDisable() case "wordwrap": opts.WordWrap = true fmt.Println("Set 'wordwrap' mode.") case "nowordwrap": opts.WordWrap = false fmt.Println("Set 'nowordwrap' mode.") case "verbose": if err := cmd.Flags().Set("verbose", "true"); err != nil { return err } fmt.Println("Set 'verbose' mode.") case "quiet": if err := cmd.Flags().Set("verbose", "false"); err != nil { return err } fmt.Println("Set 'quiet' mode.") case "think": thinkValue := api.ThinkValue{Value: true} var maybeLevel string if len(args) > 2 { maybeLevel = args[2] } if maybeLevel != "" { // TODO(drifkin): validate the level, could be model dependent // though... It will also be validated on the server once a call is // made. thinkValue.Value = maybeLevel } opts.Think = &thinkValue thinkExplicitlySet = true if client, err := api.ClientFromEnvironment(); err == nil { ensureThinkingSupport(cmd.Context(), client, opts.Model) } if maybeLevel != "" { fmt.Printf("Set 'think' mode to '%s'.\n", maybeLevel) } else { fmt.Println("Set 'think' mode.") } case "nothink": opts.Think = &api.ThinkValue{Value: false} thinkExplicitlySet = true if client, err := api.ClientFromEnvironment(); err == nil { ensureThinkingSupport(cmd.Context(), client, opts.Model) } fmt.Println("Set 'nothink' mode.") case "format": if len(args) < 3 || args[2] != "json" { fmt.Println("Invalid or missing format. For 'json' mode use '/set format json'") } else { opts.Format = args[2] fmt.Printf("Set format to '%s' mode.\n", args[2]) } case "noformat": opts.Format = "" fmt.Println("Disabled format.") case "parameter": if len(args) < 4 { usageParameters() continue } params := args[3:] fp, err := api.FormatParams(map[string][]string{args[2]: params}) if err != nil { fmt.Printf("Couldn't set parameter: %q\n", err) continue } fmt.Printf("Set parameter '%s' to '%s'\n", args[2], strings.Join(params, ", ")) opts.Options[args[2]] = fp[args[2]] case "system": if len(args) < 3 { usageSet() continue } multiline = MultilineSystem line := strings.Join(args[2:], " ") line, ok := strings.CutPrefix(line, `"""`) if !ok { multiline = MultilineNone } else { // only cut suffix if the line is multiline line, ok = strings.CutSuffix(line, `"""`) if ok { multiline = MultilineNone } } sb.WriteString(line) if multiline != MultilineNone { scanner.Prompt.UseAlt = true continue } opts.System = sb.String() // for display in modelfile newMessage := api.Message{Role: "system", Content: sb.String()} // Check if the slice is not empty and the last message is from 'system' if len(opts.Messages) > 0 && opts.Messages[len(opts.Messages)-1].Role == "system" { // Replace the last message opts.Messages[len(opts.Messages)-1] = newMessage } else { opts.Messages = append(opts.Messages, newMessage) } fmt.Println("Set system message.") sb.Reset() continue default: fmt.Printf("Unknown command '/set %s'. Type /? for help\n", args[1]) } } else { usageSet() } case strings.HasPrefix(line, "/show"): args := strings.Fields(line) if len(args) > 1 { client, err := api.ClientFromEnvironment() if err != nil { fmt.Println("error: couldn't connect to ollama server") return err } req := &api.ShowRequest{ Name: opts.Model, System: opts.System, Options: opts.Options, } resp, err := client.Show(cmd.Context(), req) if err != nil { fmt.Println("error: couldn't get model") return err } switch args[1] { case "info": _ = showInfo(resp, false, os.Stderr) case "license": if resp.License == "" { fmt.Println("No license was specified for this model.") } else { fmt.Println(resp.License) } case "modelfile": fmt.Println(resp.Modelfile) case "parameters": fmt.Println("Model defined parameters:") if resp.Parameters == "" { fmt.Println(" No additional parameters were specified for this model.") } else { for _, l := range strings.Split(resp.Parameters, "\n") { fmt.Printf(" %s\n", l) } } fmt.Println() if len(opts.Options) > 0 { fmt.Println("User defined parameters:") for k, v := range opts.Options { fmt.Printf(" %-*s %v\n", 30, k, v) } fmt.Println() } case "system": switch { case opts.System != "": fmt.Println(opts.System + "\n") case resp.System != "": fmt.Println(resp.System + "\n") default: fmt.Println("No system message was specified for this model.") } case "template": if resp.Template != "" { fmt.Println(resp.Template) } else { fmt.Println("No prompt template was specified for this model.") } default: fmt.Printf("Unknown command '/show %s'. Type /? for help\n", args[1]) } } else { usageShow() } case strings.HasPrefix(line, "/help"), strings.HasPrefix(line, "/?"): args := strings.Fields(line) if len(args) > 1 { switch args[1] { case "set", "/set": usageSet() case "show", "/show": usageShow() case "shortcut", "shortcuts": usageShortcuts() } } else { usage() } case strings.HasPrefix(line, "/exit"), strings.HasPrefix(line, "/bye"): return nil case strings.HasPrefix(line, "/"): args := strings.Fields(line) isFile := false if opts.MultiModal { for _, f := range extractFileNames(line) { if strings.HasPrefix(f, args[0]) { isFile = true break } } } if !isFile { fmt.Printf("Unknown command '%s'. Type /? for help\n", args[0]) continue } sb.WriteString(line) default: sb.WriteString(line) } if sb.Len() > 0 && multiline == MultilineNone { newMessage := api.Message{Role: "user", Content: sb.String()} if opts.MultiModal { msg, images, err := extractFileData(sb.String()) if err != nil { return err } newMessage.Content = msg newMessage.Images = images } opts.Messages = append(opts.Messages, newMessage) assistant, err := chat(cmd, opts) if err != nil { if strings.Contains(err.Error(), "does not support thinking") || strings.Contains(err.Error(), "invalid think value") { fmt.Printf("error: %v\n", err) sb.Reset() continue } return err } if assistant != nil { opts.Messages = append(opts.Messages, *assistant) } sb.Reset() } } } func NewCreateRequest(name string, opts runOptions) *api.CreateRequest { parentModel := opts.ParentModel modelName := model.ParseName(parentModel) if !modelName.IsValid() { parentModel = "" } req := &api.CreateRequest{ Model: name, From: cmp.Or(parentModel, opts.Model), } if opts.System != "" { req.System = opts.System } if len(opts.Options) > 0 { req.Parameters = opts.Options } if len(opts.Messages) > 0 { req.Messages = opts.Messages } return req } func normalizeFilePath(fp string) string { return strings.NewReplacer( "\\ ", " ", // Escaped space "\\(", "(", // Escaped left parenthesis "\\)", ")", // Escaped right parenthesis "\\[", "[", // Escaped left square bracket "\\]", "]", // Escaped right square bracket "\\{", "{", // Escaped left curly brace "\\}", "}", // Escaped right curly brace "\\$", "$", // Escaped dollar sign "\\&", "&", // Escaped ampersand "\\;", ";", // Escaped semicolon "\\'", "'", // Escaped single quote "\\\\", "\\", // Escaped backslash "\\*", "*", // Escaped asterisk "\\?", "?", // Escaped question mark "\\~", "~", // Escaped tilde ).Replace(fp) } func extractFileNames(input string) []string { // Regex to match file paths starting with optional drive letter, / ./ \ or .\ and include escaped or unescaped spaces (\ or %20) // and followed by more characters and a file extension // This will capture non filename strings, but we'll check for file existence to remove mismatches regexPattern := `(?:[a-zA-Z]:)?(?:\./|/|\\)[\S\\ ]+?\.(?i:jpg|jpeg|png|webp)\b` re := regexp.MustCompile(regexPattern) return re.FindAllString(input, -1) } func extractFileData(input string) (string, []api.ImageData, error) { filePaths := extractFileNames(input) var imgs []api.ImageData for _, fp := range filePaths { nfp := normalizeFilePath(fp) data, err := getImageData(nfp) if errors.Is(err, os.ErrNotExist) { continue } else if err != nil { fmt.Fprintf(os.Stderr, "Couldn't process image: %q\n", err) return "", imgs, err } fmt.Fprintf(os.Stderr, "Added image '%s'\n", nfp) input = strings.ReplaceAll(input, "'"+nfp+"'", "") input = strings.ReplaceAll(input, "'"+fp+"'", "") input = strings.ReplaceAll(input, fp, "") imgs = append(imgs, data) } return strings.TrimSpace(input), imgs, nil } func getImageData(filePath string) ([]byte, error) { file, err := os.Open(filePath) if err != nil { return nil, err } defer file.Close() buf := make([]byte, 512) _, err = file.Read(buf) if err != nil { return nil, err } contentType := http.DetectContentType(buf) allowedTypes := []string{"image/jpeg", "image/jpg", "image/png", "image/webp"} if !slices.Contains(allowedTypes, contentType) { return nil, fmt.Errorf("invalid image type: %s", contentType) } info, err := file.Stat() if err != nil { return nil, err } // Check if the file size exceeds 100MB var maxSize int64 = 100 * 1024 * 1024 // 100MB in bytes if info.Size() > maxSize { return nil, errors.New("file size exceeds maximum limit (100MB)") } buf = make([]byte, info.Size()) _, err = file.Seek(0, 0) if err != nil { return nil, err } _, err = io.ReadFull(file, buf) if err != nil { return nil, err } return buf, nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/cmd/start_default.go
cmd/start_default.go
//go:build !windows && !darwin package cmd import ( "context" "errors" "github.com/ollama/ollama/api" ) func startApp(ctx context.Context, client *api.Client) error { return errors.New("could not connect to ollama server, run 'ollama serve' to start it") }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/cmd/cmd.go
cmd/cmd.go
package cmd import ( "bufio" "context" "crypto/ed25519" "crypto/rand" "encoding/json" "encoding/pem" "errors" "fmt" "io" "log" "math" "net" "net/http" "os" "os/signal" "path/filepath" "runtime" "slices" "sort" "strconv" "strings" "sync/atomic" "syscall" "time" "github.com/containerd/console" "github.com/mattn/go-runewidth" "github.com/olekukonko/tablewriter" "github.com/spf13/cobra" "golang.org/x/crypto/ssh" "golang.org/x/sync/errgroup" "golang.org/x/term" "github.com/ollama/ollama/api" "github.com/ollama/ollama/envconfig" "github.com/ollama/ollama/format" "github.com/ollama/ollama/parser" "github.com/ollama/ollama/progress" "github.com/ollama/ollama/readline" "github.com/ollama/ollama/runner" "github.com/ollama/ollama/server" "github.com/ollama/ollama/types/model" "github.com/ollama/ollama/types/syncmap" "github.com/ollama/ollama/version" xcmd "github.com/ollama/ollama/x/cmd" ) const ConnectInstructions = "To sign in, navigate to:\n %s\n\n" // ensureThinkingSupport emits a warning if the model does not advertise thinking support func ensureThinkingSupport(ctx context.Context, client *api.Client, name string) { if name == "" { return } resp, err := client.Show(ctx, &api.ShowRequest{Model: name}) if err != nil { return } if slices.Contains(resp.Capabilities, model.CapabilityThinking) { return } fmt.Fprintf(os.Stderr, "warning: model %q does not support thinking output\n", name) } var errModelfileNotFound = errors.New("specified Modelfile wasn't found") func getModelfileName(cmd *cobra.Command) (string, error) { filename, _ := cmd.Flags().GetString("file") if filename == "" { filename = "Modelfile" } absName, err := filepath.Abs(filename) if err != nil { return "", err } _, err = os.Stat(absName) if err != nil { return "", err } return absName, nil } func CreateHandler(cmd *cobra.Command, args []string) error { p := progress.NewProgress(os.Stderr) defer p.Stop() var reader io.Reader filename, err := getModelfileName(cmd) if os.IsNotExist(err) { if filename == "" { reader = strings.NewReader("FROM .\n") } else { return errModelfileNotFound } } else if err != nil { return err } else { f, err := os.Open(filename) if err != nil { return err } reader = f defer f.Close() } modelfile, err := parser.ParseFile(reader) if err != nil { return err } status := "gathering model components" spinner := progress.NewSpinner(status) p.Add(status, spinner) req, err := modelfile.CreateRequest(filepath.Dir(filename)) if err != nil { return err } spinner.Stop() req.Model = args[0] quantize, _ := cmd.Flags().GetString("quantize") if quantize != "" { req.Quantize = quantize } client, err := api.ClientFromEnvironment() if err != nil { return err } var g errgroup.Group g.SetLimit(max(runtime.GOMAXPROCS(0)-1, 1)) files := syncmap.NewSyncMap[string, string]() for f, digest := range req.Files { g.Go(func() error { if _, err := createBlob(cmd, client, f, digest, p); err != nil { return err } // TODO: this is incorrect since the file might be in a subdirectory // instead this should take the path relative to the model directory // but the current implementation does not allow this files.Store(filepath.Base(f), digest) return nil }) } adapters := syncmap.NewSyncMap[string, string]() for f, digest := range req.Adapters { g.Go(func() error { if _, err := createBlob(cmd, client, f, digest, p); err != nil { return err } // TODO: same here adapters.Store(filepath.Base(f), digest) return nil }) } if err := g.Wait(); err != nil { return err } req.Files = files.Items() req.Adapters = adapters.Items() bars := make(map[string]*progress.Bar) fn := func(resp api.ProgressResponse) error { if resp.Digest != "" { bar, ok := bars[resp.Digest] if !ok { msg := resp.Status if msg == "" { msg = fmt.Sprintf("pulling %s...", resp.Digest[7:19]) } bar = progress.NewBar(msg, resp.Total, resp.Completed) bars[resp.Digest] = bar p.Add(resp.Digest, bar) } bar.Set(resp.Completed) } else if status != resp.Status { spinner.Stop() status = resp.Status spinner = progress.NewSpinner(status) p.Add(status, spinner) } return nil } if err := client.Create(cmd.Context(), req, fn); err != nil { if strings.Contains(err.Error(), "path or Modelfile are required") { return fmt.Errorf("the ollama server must be updated to use `ollama create` with this client") } return err } return nil } func createBlob(cmd *cobra.Command, client *api.Client, path string, digest string, p *progress.Progress) (string, error) { realPath, err := filepath.EvalSymlinks(path) if err != nil { return "", err } bin, err := os.Open(realPath) if err != nil { return "", err } defer bin.Close() // Get file info to retrieve the size fileInfo, err := bin.Stat() if err != nil { return "", err } fileSize := fileInfo.Size() var pw progressWriter status := fmt.Sprintf("copying file %s 0%%", digest) spinner := progress.NewSpinner(status) p.Add(status, spinner) defer spinner.Stop() done := make(chan struct{}) defer close(done) go func() { ticker := time.NewTicker(60 * time.Millisecond) defer ticker.Stop() for { select { case <-ticker.C: spinner.SetMessage(fmt.Sprintf("copying file %s %d%%", digest, int(100*pw.n.Load()/fileSize))) case <-done: spinner.SetMessage(fmt.Sprintf("copying file %s 100%%", digest)) return } } }() if err := client.CreateBlob(cmd.Context(), digest, io.TeeReader(bin, &pw)); err != nil { return "", err } return digest, nil } type progressWriter struct { n atomic.Int64 } func (w *progressWriter) Write(p []byte) (n int, err error) { w.n.Add(int64(len(p))) return len(p), nil } func loadOrUnloadModel(cmd *cobra.Command, opts *runOptions) error { p := progress.NewProgress(os.Stderr) defer p.StopAndClear() spinner := progress.NewSpinner("") p.Add("", spinner) client, err := api.ClientFromEnvironment() if err != nil { return err } if info, err := client.Show(cmd.Context(), &api.ShowRequest{Model: opts.Model}); err != nil { return err } else if info.RemoteHost != "" { // Cloud model, no need to load/unload if opts.ShowConnect { p.StopAndClear() if strings.HasPrefix(info.RemoteHost, "https://ollama.com") { fmt.Fprintf(os.Stderr, "Connecting to '%s' on 'ollama.com' ⚡\n", info.RemoteModel) } else { fmt.Fprintf(os.Stderr, "Connecting to '%s' on '%s'\n", info.RemoteModel, info.RemoteHost) } } return nil } req := &api.GenerateRequest{ Model: opts.Model, KeepAlive: opts.KeepAlive, // pass Think here so we fail before getting to the chat prompt if the model doesn't support it Think: opts.Think, } return client.Generate(cmd.Context(), req, func(r api.GenerateResponse) error { return nil }) } func StopHandler(cmd *cobra.Command, args []string) error { opts := &runOptions{ Model: args[0], KeepAlive: &api.Duration{Duration: 0}, } if err := loadOrUnloadModel(cmd, opts); err != nil { if strings.Contains(err.Error(), "not found") { return fmt.Errorf("couldn't find model \"%s\" to stop", args[0]) } return err } return nil } func generateEmbedding(cmd *cobra.Command, modelName, input string, keepAlive *api.Duration, truncate *bool, dimensions int) error { client, err := api.ClientFromEnvironment() if err != nil { return err } req := &api.EmbedRequest{ Model: modelName, Input: input, } if keepAlive != nil { req.KeepAlive = keepAlive } if truncate != nil { req.Truncate = truncate } if dimensions > 0 { req.Dimensions = dimensions } resp, err := client.Embed(cmd.Context(), req) if err != nil { return err } if len(resp.Embeddings) == 0 { return errors.New("no embeddings returned") } output, err := json.Marshal(resp.Embeddings[0]) if err != nil { return err } fmt.Println(string(output)) return nil } func RunHandler(cmd *cobra.Command, args []string) error { interactive := true opts := runOptions{ Model: args[0], WordWrap: os.Getenv("TERM") == "xterm-256color", Options: map[string]any{}, ShowConnect: true, } format, err := cmd.Flags().GetString("format") if err != nil { return err } opts.Format = format thinkFlag := cmd.Flags().Lookup("think") if thinkFlag.Changed { thinkStr, err := cmd.Flags().GetString("think") if err != nil { return err } // Handle different values for --think switch thinkStr { case "", "true": // --think or --think=true opts.Think = &api.ThinkValue{Value: true} case "false": opts.Think = &api.ThinkValue{Value: false} case "high", "medium", "low": opts.Think = &api.ThinkValue{Value: thinkStr} default: return fmt.Errorf("invalid value for --think: %q (must be true, false, high, medium, or low)", thinkStr) } } else { opts.Think = nil } hidethinking, err := cmd.Flags().GetBool("hidethinking") if err != nil { return err } opts.HideThinking = hidethinking keepAlive, err := cmd.Flags().GetString("keepalive") if err != nil { return err } if keepAlive != "" { d, err := time.ParseDuration(keepAlive) if err != nil { return err } opts.KeepAlive = &api.Duration{Duration: d} } prompts := args[1:] // prepend stdin to the prompt if provided if !term.IsTerminal(int(os.Stdin.Fd())) { in, err := io.ReadAll(os.Stdin) if err != nil { return err } // Only prepend stdin content if it's not empty stdinContent := string(in) if len(stdinContent) > 0 { prompts = append([]string{stdinContent}, prompts...) } opts.ShowConnect = false opts.WordWrap = false interactive = false } opts.Prompt = strings.Join(prompts, " ") if len(prompts) > 0 { interactive = false } // Be quiet if we're redirecting to a pipe or file if !term.IsTerminal(int(os.Stdout.Fd())) { interactive = false } nowrap, err := cmd.Flags().GetBool("nowordwrap") if err != nil { return err } opts.WordWrap = !nowrap // Fill out the rest of the options based on information about the // model. client, err := api.ClientFromEnvironment() if err != nil { return err } name := args[0] info, err := func() (*api.ShowResponse, error) { showReq := &api.ShowRequest{Name: name} info, err := client.Show(cmd.Context(), showReq) var se api.StatusError if errors.As(err, &se) && se.StatusCode == http.StatusNotFound { if err := PullHandler(cmd, []string{name}); err != nil { return nil, err } return client.Show(cmd.Context(), &api.ShowRequest{Name: name}) } return info, err }() if err != nil { return err } opts.Think, err = inferThinkingOption(&info.Capabilities, &opts, thinkFlag.Changed) if err != nil { return err } opts.MultiModal = slices.Contains(info.Capabilities, model.CapabilityVision) // TODO: remove the projector info and vision info checks below, // these are left in for backwards compatibility with older servers // that don't have the capabilities field in the model info if len(info.ProjectorInfo) != 0 { opts.MultiModal = true } for k := range info.ModelInfo { if strings.Contains(k, ".vision.") { opts.MultiModal = true break } } opts.ParentModel = info.Details.ParentModel // Check if this is an embedding model isEmbeddingModel := slices.Contains(info.Capabilities, model.CapabilityEmbedding) // If it's an embedding model, handle embedding generation if isEmbeddingModel { if opts.Prompt == "" { return errors.New("embedding models require input text. Usage: ollama run " + name + " \"your text here\"") } // Get embedding-specific flags var truncate *bool if truncateFlag, err := cmd.Flags().GetBool("truncate"); err == nil && cmd.Flags().Changed("truncate") { truncate = &truncateFlag } dimensions, err := cmd.Flags().GetInt("dimensions") if err != nil { return err } return generateEmbedding(cmd, name, opts.Prompt, opts.KeepAlive, truncate, dimensions) } // Check for experimental flag isExperimental, _ := cmd.Flags().GetBool("experimental") if interactive { if err := loadOrUnloadModel(cmd, &opts); err != nil { var sErr api.AuthorizationError if errors.As(err, &sErr) && sErr.StatusCode == http.StatusUnauthorized { fmt.Printf("You need to be signed in to Ollama to run Cloud models.\n\n") if sErr.SigninURL != "" { fmt.Printf(ConnectInstructions, sErr.SigninURL) } return nil } return err } for _, msg := range info.Messages { switch msg.Role { case "user": fmt.Printf(">>> %s\n", msg.Content) case "assistant": state := &displayResponseState{} displayResponse(msg.Content, opts.WordWrap, state) fmt.Println() fmt.Println() } } // Use experimental agent loop with if isExperimental { return xcmd.GenerateInteractive(cmd, opts.Model, opts.WordWrap, opts.Options, opts.Think, opts.HideThinking, opts.KeepAlive) } return generateInteractive(cmd, opts) } return generate(cmd, opts) } func SigninHandler(cmd *cobra.Command, args []string) error { client, err := api.ClientFromEnvironment() if err != nil { return err } user, err := client.Whoami(cmd.Context()) if err != nil { var aErr api.AuthorizationError if errors.As(err, &aErr) && aErr.StatusCode == http.StatusUnauthorized { fmt.Println("You need to be signed in to Ollama to run Cloud models.") fmt.Println() if aErr.SigninURL != "" { fmt.Printf(ConnectInstructions, aErr.SigninURL) } return nil } return err } if user != nil && user.Name != "" { fmt.Printf("You are already signed in as user '%s'\n", user.Name) fmt.Println() return nil } return nil } func SignoutHandler(cmd *cobra.Command, args []string) error { client, err := api.ClientFromEnvironment() if err != nil { return err } err = client.Signout(cmd.Context()) if err != nil { var aErr api.AuthorizationError if errors.As(err, &aErr) && aErr.StatusCode == http.StatusUnauthorized { fmt.Println("You are not signed in to ollama.com") fmt.Println() return nil } else { return err } } fmt.Println("You have signed out of ollama.com") fmt.Println() return nil } func PushHandler(cmd *cobra.Command, args []string) error { client, err := api.ClientFromEnvironment() if err != nil { return err } insecure, err := cmd.Flags().GetBool("insecure") if err != nil { return err } n := model.ParseName(args[0]) if strings.HasSuffix(n.Host, ".ollama.ai") || strings.HasSuffix(n.Host, ".ollama.com") { _, err := client.Whoami(cmd.Context()) if err != nil { var aErr api.AuthorizationError if errors.As(err, &aErr) && aErr.StatusCode == http.StatusUnauthorized { fmt.Println("You need to be signed in to push models to ollama.com.") fmt.Println() if aErr.SigninURL != "" { fmt.Printf(ConnectInstructions, aErr.SigninURL) } return nil } return err } } p := progress.NewProgress(os.Stderr) defer p.Stop() bars := make(map[string]*progress.Bar) var status string var spinner *progress.Spinner fn := func(resp api.ProgressResponse) error { if resp.Digest != "" { if spinner != nil { spinner.Stop() } bar, ok := bars[resp.Digest] if !ok { bar = progress.NewBar(fmt.Sprintf("pushing %s...", resp.Digest[7:19]), resp.Total, resp.Completed) bars[resp.Digest] = bar p.Add(resp.Digest, bar) } bar.Set(resp.Completed) } else if status != resp.Status { if spinner != nil { spinner.Stop() } status = resp.Status spinner = progress.NewSpinner(status) p.Add(status, spinner) } return nil } request := api.PushRequest{Name: args[0], Insecure: insecure} if err := client.Push(cmd.Context(), &request, fn); err != nil { if spinner != nil { spinner.Stop() } errStr := strings.ToLower(err.Error()) if strings.Contains(errStr, "access denied") || strings.Contains(errStr, "unauthorized") { return errors.New("you are not authorized to push to this namespace, create the model under a namespace you own") } return err } p.Stop() spinner.Stop() destination := n.String() if strings.HasSuffix(n.Host, ".ollama.ai") || strings.HasSuffix(n.Host, ".ollama.com") { destination = "https://ollama.com/" + strings.TrimSuffix(n.DisplayShortest(), ":latest") } fmt.Printf("\nYou can find your model at:\n\n") fmt.Printf("\t%s\n", destination) return nil } func ListHandler(cmd *cobra.Command, args []string) error { client, err := api.ClientFromEnvironment() if err != nil { return err } models, err := client.List(cmd.Context()) if err != nil { return err } var data [][]string for _, m := range models.Models { if len(args) == 0 || strings.HasPrefix(strings.ToLower(m.Name), strings.ToLower(args[0])) { var size string if m.RemoteModel != "" { size = "-" } else { size = format.HumanBytes(m.Size) } data = append(data, []string{m.Name, m.Digest[:12], size, format.HumanTime(m.ModifiedAt, "Never")}) } } table := tablewriter.NewWriter(os.Stdout) table.SetHeader([]string{"NAME", "ID", "SIZE", "MODIFIED"}) table.SetHeaderAlignment(tablewriter.ALIGN_LEFT) table.SetAlignment(tablewriter.ALIGN_LEFT) table.SetHeaderLine(false) table.SetBorder(false) table.SetNoWhiteSpace(true) table.SetTablePadding(" ") table.AppendBulk(data) table.Render() return nil } func ListRunningHandler(cmd *cobra.Command, args []string) error { client, err := api.ClientFromEnvironment() if err != nil { return err } models, err := client.ListRunning(cmd.Context()) if err != nil { return err } var data [][]string for _, m := range models.Models { if len(args) == 0 || strings.HasPrefix(m.Name, args[0]) { var procStr string switch { case m.SizeVRAM == 0: procStr = "100% CPU" case m.SizeVRAM == m.Size: procStr = "100% GPU" case m.SizeVRAM > m.Size || m.Size == 0: procStr = "Unknown" default: sizeCPU := m.Size - m.SizeVRAM cpuPercent := math.Round(float64(sizeCPU) / float64(m.Size) * 100) procStr = fmt.Sprintf("%d%%/%d%% CPU/GPU", int(cpuPercent), int(100-cpuPercent)) } var until string delta := time.Since(m.ExpiresAt) if delta > 0 { until = "Stopping..." } else { until = format.HumanTime(m.ExpiresAt, "Never") } ctxStr := strconv.Itoa(m.ContextLength) data = append(data, []string{m.Name, m.Digest[:12], format.HumanBytes(m.Size), procStr, ctxStr, until}) } } table := tablewriter.NewWriter(os.Stdout) table.SetHeader([]string{"NAME", "ID", "SIZE", "PROCESSOR", "CONTEXT", "UNTIL"}) table.SetHeaderAlignment(tablewriter.ALIGN_LEFT) table.SetAlignment(tablewriter.ALIGN_LEFT) table.SetHeaderLine(false) table.SetBorder(false) table.SetNoWhiteSpace(true) table.SetTablePadding(" ") table.AppendBulk(data) table.Render() return nil } func DeleteHandler(cmd *cobra.Command, args []string) error { client, err := api.ClientFromEnvironment() if err != nil { return err } for _, arg := range args { // Unload the model if it's running before deletion if err := loadOrUnloadModel(cmd, &runOptions{ Model: args[0], KeepAlive: &api.Duration{Duration: 0}, }); err != nil { if !strings.Contains(strings.ToLower(err.Error()), "not found") { fmt.Fprintf(os.Stderr, "Warning: unable to stop model '%s'\n", args[0]) } } if err := client.Delete(cmd.Context(), &api.DeleteRequest{Name: arg}); err != nil { return err } fmt.Printf("deleted '%s'\n", arg) } return nil } func ShowHandler(cmd *cobra.Command, args []string) error { client, err := api.ClientFromEnvironment() if err != nil { return err } license, errLicense := cmd.Flags().GetBool("license") modelfile, errModelfile := cmd.Flags().GetBool("modelfile") parameters, errParams := cmd.Flags().GetBool("parameters") system, errSystem := cmd.Flags().GetBool("system") template, errTemplate := cmd.Flags().GetBool("template") verbose, errVerbose := cmd.Flags().GetBool("verbose") for _, boolErr := range []error{errLicense, errModelfile, errParams, errSystem, errTemplate, errVerbose} { if boolErr != nil { return errors.New("error retrieving flags") } } flagsSet := 0 showType := "" if license { flagsSet++ showType = "license" } if modelfile { flagsSet++ showType = "modelfile" } if parameters { flagsSet++ showType = "parameters" } if system { flagsSet++ showType = "system" } if template { flagsSet++ showType = "template" } if flagsSet > 1 { return errors.New("only one of '--license', '--modelfile', '--parameters', '--system', or '--template' can be specified") } req := api.ShowRequest{Name: args[0], Verbose: verbose} resp, err := client.Show(cmd.Context(), &req) if err != nil { return err } if flagsSet == 1 { switch showType { case "license": fmt.Println(resp.License) case "modelfile": fmt.Println(resp.Modelfile) case "parameters": fmt.Println(resp.Parameters) case "system": fmt.Print(resp.System) case "template": fmt.Print(resp.Template) } return nil } return showInfo(resp, verbose, os.Stdout) } func showInfo(resp *api.ShowResponse, verbose bool, w io.Writer) error { tableRender := func(header string, rows func() [][]string) { fmt.Fprintln(w, " ", header) table := tablewriter.NewWriter(w) table.SetAlignment(tablewriter.ALIGN_LEFT) table.SetBorder(false) table.SetNoWhiteSpace(true) table.SetTablePadding(" ") switch header { case "Template", "System", "License": table.SetColWidth(100) } table.AppendBulk(rows()) table.Render() fmt.Fprintln(w) } tableRender("Model", func() (rows [][]string) { if resp.RemoteHost != "" { rows = append(rows, []string{"", "Remote model", resp.RemoteModel}) rows = append(rows, []string{"", "Remote URL", resp.RemoteHost}) } if resp.ModelInfo != nil { arch := resp.ModelInfo["general.architecture"].(string) rows = append(rows, []string{"", "architecture", arch}) var paramStr string if resp.Details.ParameterSize != "" { paramStr = resp.Details.ParameterSize } else if v, ok := resp.ModelInfo["general.parameter_count"]; ok { if f, ok := v.(float64); ok { paramStr = format.HumanNumber(uint64(f)) } } rows = append(rows, []string{"", "parameters", paramStr}) if v, ok := resp.ModelInfo[fmt.Sprintf("%s.context_length", arch)]; ok { if f, ok := v.(float64); ok { rows = append(rows, []string{"", "context length", strconv.FormatFloat(f, 'f', -1, 64)}) } } if v, ok := resp.ModelInfo[fmt.Sprintf("%s.embedding_length", arch)]; ok { if f, ok := v.(float64); ok { rows = append(rows, []string{"", "embedding length", strconv.FormatFloat(f, 'f', -1, 64)}) } } } else { rows = append(rows, []string{"", "architecture", resp.Details.Family}) rows = append(rows, []string{"", "parameters", resp.Details.ParameterSize}) } rows = append(rows, []string{"", "quantization", resp.Details.QuantizationLevel}) if resp.Requires != "" { rows = append(rows, []string{"", "requires", resp.Requires}) } return }) if len(resp.Capabilities) > 0 { tableRender("Capabilities", func() (rows [][]string) { for _, capability := range resp.Capabilities { rows = append(rows, []string{"", capability.String()}) } return }) } if resp.ProjectorInfo != nil { tableRender("Projector", func() (rows [][]string) { arch := resp.ProjectorInfo["general.architecture"].(string) rows = append(rows, []string{"", "architecture", arch}) rows = append(rows, []string{"", "parameters", format.HumanNumber(uint64(resp.ProjectorInfo["general.parameter_count"].(float64)))}) rows = append(rows, []string{"", "embedding length", strconv.FormatFloat(resp.ProjectorInfo[fmt.Sprintf("%s.vision.embedding_length", arch)].(float64), 'f', -1, 64)}) rows = append(rows, []string{"", "dimensions", strconv.FormatFloat(resp.ProjectorInfo[fmt.Sprintf("%s.vision.projection_dim", arch)].(float64), 'f', -1, 64)}) return }) } if resp.Parameters != "" { tableRender("Parameters", func() (rows [][]string) { scanner := bufio.NewScanner(strings.NewReader(resp.Parameters)) for scanner.Scan() { if text := scanner.Text(); text != "" { rows = append(rows, append([]string{""}, strings.Fields(text)...)) } } return }) } if resp.ModelInfo != nil && verbose { tableRender("Metadata", func() (rows [][]string) { keys := make([]string, 0, len(resp.ModelInfo)) for k := range resp.ModelInfo { keys = append(keys, k) } sort.Strings(keys) for _, k := range keys { var v string switch vData := resp.ModelInfo[k].(type) { case bool: v = fmt.Sprintf("%t", vData) case string: v = vData case float64: v = fmt.Sprintf("%g", vData) case []any: targetWidth := 10 // Small width where we are displaying the data in a column var itemsToShow int totalWidth := 1 // Start with 1 for opening bracket // Find how many we can fit for i := range vData { itemStr := fmt.Sprintf("%v", vData[i]) width := runewidth.StringWidth(itemStr) // Add separator width (", ") for all items except the first if i > 0 { width += 2 } // Check if adding this item would exceed our width limit if totalWidth+width > targetWidth && i > 0 { break } totalWidth += width itemsToShow++ } // Format the output if itemsToShow < len(vData) { v = fmt.Sprintf("%v", vData[:itemsToShow]) v = strings.TrimSuffix(v, "]") v += fmt.Sprintf(" ...+%d more]", len(vData)-itemsToShow) } else { v = fmt.Sprintf("%v", vData) } default: v = fmt.Sprintf("%T", vData) } rows = append(rows, []string{"", k, v}) } return }) } if len(resp.Tensors) > 0 && verbose { tableRender("Tensors", func() (rows [][]string) { for _, t := range resp.Tensors { rows = append(rows, []string{"", t.Name, t.Type, fmt.Sprint(t.Shape)}) } return }) } head := func(s string, n int) (rows [][]string) { scanner := bufio.NewScanner(strings.NewReader(s)) count := 0 for scanner.Scan() { text := strings.TrimSpace(scanner.Text()) if text == "" { continue } count++ if n < 0 || count <= n { rows = append(rows, []string{"", text}) } } if n >= 0 && count > n { rows = append(rows, []string{"", "..."}) } return } if resp.System != "" { tableRender("System", func() [][]string { return head(resp.System, 2) }) } if resp.License != "" { tableRender("License", func() [][]string { return head(resp.License, 2) }) } return nil } func CopyHandler(cmd *cobra.Command, args []string) error { client, err := api.ClientFromEnvironment() if err != nil { return err } req := api.CopyRequest{Source: args[0], Destination: args[1]} if err := client.Copy(cmd.Context(), &req); err != nil { return err } fmt.Printf("copied '%s' to '%s'\n", args[0], args[1]) return nil } func PullHandler(cmd *cobra.Command, args []string) error { insecure, err := cmd.Flags().GetBool("insecure") if err != nil { return err } client, err := api.ClientFromEnvironment() if err != nil { return err } p := progress.NewProgress(os.Stderr) defer p.Stop() bars := make(map[string]*progress.Bar) var status string var spinner *progress.Spinner fn := func(resp api.ProgressResponse) error { if resp.Digest != "" { if resp.Completed == 0 { // This is the initial status update for the // layer, which the server sends before // beginning the download, for clients to // compute total size and prepare for // downloads, if needed. // // Skipping this here to avoid showing a 0% // progress bar, which *should* clue the user // into the fact that many things are being // downloaded and that the current active // download is not that last. However, in rare // cases it seems to be triggering to some, and // it isn't worth explaining, so just ignore // and regress to the old UI that keeps giving // you the "But wait, there is more!" after // each "100% done" bar, which is "better." return nil } if spinner != nil { spinner.Stop() } bar, ok := bars[resp.Digest] if !ok { name, isDigest := strings.CutPrefix(resp.Digest, "sha256:") name = strings.TrimSpace(name) if isDigest { name = name[:min(12, len(name))] } bar = progress.NewBar(fmt.Sprintf("pulling %s:", name), resp.Total, resp.Completed) bars[resp.Digest] = bar p.Add(resp.Digest, bar) } bar.Set(resp.Completed) } else if status != resp.Status { if spinner != nil { spinner.Stop() } status = resp.Status spinner = progress.NewSpinner(status) p.Add(status, spinner) } return nil } request := api.PullRequest{Name: args[0], Insecure: insecure} return client.Pull(cmd.Context(), &request, fn) } type generateContextKey string type runOptions struct { Model string ParentModel string Prompt string Messages []api.Message WordWrap bool Format string System string Images []api.ImageData Options map[string]any MultiModal bool KeepAlive *api.Duration Think *api.ThinkValue HideThinking bool ShowConnect bool } func (r runOptions) Copy() runOptions { var messages []api.Message if r.Messages != nil { messages = make([]api.Message, len(r.Messages)) copy(messages, r.Messages) } var images []api.ImageData if r.Images != nil { images = make([]api.ImageData, len(r.Images)) copy(images, r.Images) } var opts map[string]any if r.Options != nil { opts = make(map[string]any, len(r.Options)) for k, v := range r.Options { opts[k] = v } } var think *api.ThinkValue if r.Think != nil { cThink := *r.Think think = &cThink } return runOptions{ Model: r.Model, ParentModel: r.ParentModel, Prompt: r.Prompt, Messages: messages, WordWrap: r.WordWrap, Format: r.Format, System: r.System, Images: images, Options: opts, MultiModal: r.MultiModal, KeepAlive: r.KeepAlive, Think: think, HideThinking: r.HideThinking, ShowConnect: r.ShowConnect, } } type displayResponseState struct { lineLength int wordBuffer string } func displayResponse(content string, wordWrap bool, state *displayResponseState) { termWidth, _, _ := term.GetSize(int(os.Stdout.Fd())) if wordWrap && termWidth >= 10 { for _, ch := range content { if state.lineLength+1 > termWidth-5 { if runewidth.StringWidth(state.wordBuffer) > termWidth-10 { fmt.Printf("%s%c", state.wordBuffer, ch) state.wordBuffer = "" state.lineLength = 0 continue } // backtrack the length of the last word and clear to the end of the line a := runewidth.StringWidth(state.wordBuffer) if a > 0 { fmt.Printf("\x1b[%dD", a) } fmt.Printf("\x1b[K\n") fmt.Printf("%s%c", state.wordBuffer, ch) chWidth := runewidth.RuneWidth(ch) state.lineLength = runewidth.StringWidth(state.wordBuffer) + chWidth } else { fmt.Print(string(ch)) state.lineLength += runewidth.RuneWidth(ch) if runewidth.RuneWidth(ch) >= 2 { state.wordBuffer = "" continue } switch ch { case ' ', '\t': state.wordBuffer = "" case '\n', '\r': state.lineLength = 0 state.wordBuffer = "" default: state.wordBuffer += string(ch) } } } } else { fmt.Printf("%s%s", state.wordBuffer, content) if len(state.wordBuffer) > 0 { state.wordBuffer = "" } } } func thinkingOutputOpeningText(plainText bool) string { text := "Thinking...\n" if plainText { return text } return readline.ColorGrey + readline.ColorBold + text + readline.ColorDefault + readline.ColorGrey } func thinkingOutputClosingText(plainText bool) string { text := "...done thinking.\n\n" if plainText { return text } return readline.ColorGrey + readline.ColorBold + text + readline.ColorDefault } func chat(cmd *cobra.Command, opts runOptions) (*api.Message, error) { client, err := api.ClientFromEnvironment() if err != nil { return nil, err } p := progress.NewProgress(os.Stderr) defer p.StopAndClear() spinner := progress.NewSpinner("") p.Add("", spinner) cancelCtx, cancel := context.WithCancel(cmd.Context()) defer cancel() sigChan := make(chan os.Signal, 1) signal.Notify(sigChan, syscall.SIGINT) go func() { <-sigChan cancel() }() var state *displayResponseState = &displayResponseState{} var thinkingContent strings.Builder
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
true
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/cmd/start_darwin.go
cmd/start_darwin.go
package cmd import ( "context" "errors" "os" "os/exec" "regexp" "github.com/ollama/ollama/api" ) func startApp(ctx context.Context, client *api.Client) error { exe, err := os.Executable() if err != nil { return err } link, err := os.Readlink(exe) if err != nil { return err } r := regexp.MustCompile(`^.*/Ollama\s?\d*.app`) m := r.FindStringSubmatch(link) if len(m) != 1 { return errors.New("could not find ollama app") } if err := exec.Command("/usr/bin/open", "-j", "-a", m[0], "--args", "--fast-startup").Run(); err != nil { return err } return waitForServer(ctx, client) }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/cmd/bench/bench_test.go
cmd/bench/bench_test.go
package main import ( "bytes" "crypto/rand" "encoding/json" "io" "net/http" "net/http/httptest" "os" "strings" "testing" "time" "github.com/ollama/ollama/api" ) func createTestFlagOptions() flagOptions { models := "test-model" format := "benchstat" epochs := 1 maxTokens := 100 temperature := 0.7 seed := 42 timeout := 30 prompt := "test prompt" imageFile := "" keepAlive := 5.0 verbose := false debug := false return flagOptions{ models: &models, format: &format, epochs: &epochs, maxTokens: &maxTokens, temperature: &temperature, seed: &seed, timeout: &timeout, prompt: &prompt, imageFile: &imageFile, keepAlive: &keepAlive, verbose: &verbose, debug: &debug, } } func captureOutput(f func()) string { oldStdout := os.Stdout oldStderr := os.Stderr defer func() { os.Stdout = oldStdout os.Stderr = oldStderr }() r, w, _ := os.Pipe() os.Stdout = w os.Stderr = w f() w.Close() var buf bytes.Buffer io.Copy(&buf, r) return buf.String() } func createMockOllamaServer(t *testing.T, responses []api.ChatResponse) *httptest.Server { return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path != "/api/chat" { t.Errorf("Expected path /api/chat, got %s", r.URL.Path) http.Error(w, "Not found", http.StatusNotFound) return } if r.Method != "POST" { t.Errorf("Expected POST method, got %s", r.Method) http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) return } w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusOK) for _, resp := range responses { jsonData, err := json.Marshal(resp) if err != nil { t.Errorf("Failed to marshal response: %v", err) return } w.Write(jsonData) w.Write([]byte("\n")) if f, ok := w.(http.Flusher); ok { f.Flush() } time.Sleep(10 * time.Millisecond) // Simulate some delay } })) } func TestBenchmarkChat_Success(t *testing.T) { fOpt := createTestFlagOptions() mockResponses := []api.ChatResponse{ { Model: "test-model", Message: api.Message{ Role: "assistant", Content: "test response part 1", }, Done: false, }, { Model: "test-model", Message: api.Message{ Role: "assistant", Content: "test response part 2", }, Done: true, Metrics: api.Metrics{ PromptEvalCount: 10, PromptEvalDuration: 100 * time.Millisecond, EvalCount: 50, EvalDuration: 500 * time.Millisecond, TotalDuration: 600 * time.Millisecond, LoadDuration: 50 * time.Millisecond, }, }, } server := createMockOllamaServer(t, mockResponses) defer server.Close() t.Setenv("OLLAMA_HOST", server.URL) output := captureOutput(func() { err := BenchmarkChat(fOpt) if err != nil { t.Errorf("Expected no error, got %v", err) } }) if !strings.Contains(output, "BenchmarkModel/name=test-model/step=prefill") { t.Errorf("Expected output to contain prefill metrics, got: %s", output) } if !strings.Contains(output, "BenchmarkModel/name=test-model/step=generate") { t.Errorf("Expected output to contain generate metrics, got: %s", output) } if !strings.Contains(output, "ns/token") { t.Errorf("Expected output to contain ns/token metric, got: %s", output) } } func TestBenchmarkChat_ServerError(t *testing.T) { fOpt := createTestFlagOptions() server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { http.Error(w, "Internal server error", http.StatusInternalServerError) })) defer server.Close() t.Setenv("OLLAMA_HOST", server.URL) output := captureOutput(func() { err := BenchmarkChat(fOpt) if err != nil { t.Errorf("Expected error to be handled internally, got returned error: %v", err) } }) if !strings.Contains(output, "ERROR: Couldn't chat with model") { t.Errorf("Expected error message about chat failure, got: %s", output) } } func TestBenchmarkChat_Timeout(t *testing.T) { fOpt := createTestFlagOptions() shortTimeout := 1 // Very short timeout fOpt.timeout = &shortTimeout server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // Simulate a long delay that will cause timeout time.Sleep(2 * time.Second) w.Header().Set("Content-Type", "application/json") response := api.ChatResponse{ Model: "test-model", Message: api.Message{ Role: "assistant", Content: "test response", }, Done: true, Metrics: api.Metrics{ PromptEvalCount: 10, PromptEvalDuration: 100 * time.Millisecond, EvalCount: 50, EvalDuration: 500 * time.Millisecond, TotalDuration: 600 * time.Millisecond, LoadDuration: 50 * time.Millisecond, }, } jsonData, _ := json.Marshal(response) w.Write(jsonData) })) defer server.Close() t.Setenv("OLLAMA_HOST", server.URL) output := captureOutput(func() { err := BenchmarkChat(fOpt) if err != nil { t.Errorf("Expected timeout to be handled internally, got returned error: %v", err) } }) if !strings.Contains(output, "ERROR: Chat request timed out") { t.Errorf("Expected timeout error message, got: %s", output) } } func TestBenchmarkChat_NoMetrics(t *testing.T) { fOpt := createTestFlagOptions() mockResponses := []api.ChatResponse{ { Model: "test-model", Message: api.Message{ Role: "assistant", Content: "test response", }, Done: false, // Never sends Done=true }, } server := createMockOllamaServer(t, mockResponses) defer server.Close() t.Setenv("OLLAMA_HOST", server.URL) output := captureOutput(func() { err := BenchmarkChat(fOpt) if err != nil { t.Errorf("Expected no error, got %v", err) } }) if !strings.Contains(output, "ERROR: No metrics received") { t.Errorf("Expected no metrics error message, got: %s", output) } } func TestBenchmarkChat_MultipleModels(t *testing.T) { fOpt := createTestFlagOptions() models := "model1,model2" epochs := 2 fOpt.models = &models fOpt.epochs = &epochs callCount := 0 server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { callCount++ w.Header().Set("Content-Type", "application/json") var req api.ChatRequest body, _ := io.ReadAll(r.Body) json.Unmarshal(body, &req) response := api.ChatResponse{ Model: req.Model, Message: api.Message{ Role: "assistant", Content: "test response for " + req.Model, }, Done: true, Metrics: api.Metrics{ PromptEvalCount: 10, PromptEvalDuration: 100 * time.Millisecond, EvalCount: 50, EvalDuration: 500 * time.Millisecond, TotalDuration: 600 * time.Millisecond, LoadDuration: 50 * time.Millisecond, }, } jsonData, _ := json.Marshal(response) w.Write(jsonData) })) defer server.Close() t.Setenv("OLLAMA_HOST", server.URL) output := captureOutput(func() { err := BenchmarkChat(fOpt) if err != nil { t.Errorf("Expected no error, got %v", err) } }) // Should be called 4 times (2 models × 2 epochs) if callCount != 4 { t.Errorf("Expected 4 API calls, got %d", callCount) } if !strings.Contains(output, "BenchmarkModel/name=model1") || !strings.Contains(output, "BenchmarkModel/name=model2") { t.Errorf("Expected output for both models, got: %s", output) } } func TestBenchmarkChat_WithImage(t *testing.T) { fOpt := createTestFlagOptions() tmpfile, err := os.CreateTemp(t.TempDir(), "testimage") if err != nil { t.Fatalf("Failed to create temp file: %v", err) } defer os.Remove(tmpfile.Name()) content := []byte("fake image data") if _, err := tmpfile.Write(content); err != nil { t.Fatalf("Failed to write to temp file: %v", err) } tmpfile.Close() tmpfileName := tmpfile.Name() fOpt.imageFile = &tmpfileName server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // Verify the request contains image data var req api.ChatRequest body, _ := io.ReadAll(r.Body) json.Unmarshal(body, &req) if len(req.Messages) == 0 || len(req.Messages[0].Images) == 0 { t.Error("Expected request to contain images") } w.Header().Set("Content-Type", "application/json") response := api.ChatResponse{ Model: "test-model", Message: api.Message{ Role: "assistant", Content: "test response with image", }, Done: true, Metrics: api.Metrics{ PromptEvalCount: 10, PromptEvalDuration: 100 * time.Millisecond, EvalCount: 50, EvalDuration: 500 * time.Millisecond, TotalDuration: 600 * time.Millisecond, LoadDuration: 50 * time.Millisecond, }, } jsonData, _ := json.Marshal(response) w.Write(jsonData) })) defer server.Close() t.Setenv("OLLAMA_HOST", server.URL) output := captureOutput(func() { err := BenchmarkChat(fOpt) if err != nil { t.Errorf("Expected no error, got %v", err) } }) if !strings.Contains(output, "BenchmarkModel/name=test-model") { t.Errorf("Expected benchmark output, got: %s", output) } } func TestBenchmarkChat_ImageError(t *testing.T) { randFileName := func() string { const charset = "abcdefghijklmnopqrstuvwxyz0123456789" const length = 8 result := make([]byte, length) rand.Read(result) // Fill with random bytes for i := range result { result[i] = charset[result[i]%byte(len(charset))] } return string(result) + ".txt" } fOpt := createTestFlagOptions() imageFile := randFileName() fOpt.imageFile = &imageFile output := captureOutput(func() { err := BenchmarkChat(fOpt) if err == nil { t.Error("Expected error from image reading, got nil") } }) if !strings.Contains(output, "ERROR: Couldn't read image") { t.Errorf("Expected image read error message, got: %s", output) } } func TestReadImage_Success(t *testing.T) { tmpfile, err := os.CreateTemp(t.TempDir(), "testimage") if err != nil { t.Fatalf("Failed to create temp file: %v", err) } defer os.Remove(tmpfile.Name()) content := []byte("fake image data") if _, err := tmpfile.Write(content); err != nil { t.Fatalf("Failed to write to temp file: %v", err) } tmpfile.Close() imgData, err := readImage(tmpfile.Name()) if err != nil { t.Errorf("Expected no error, got %v", err) } if imgData == nil { t.Error("Expected image data, got nil") } expected := api.ImageData(content) if string(imgData) != string(expected) { t.Errorf("Expected image data %v, got %v", expected, imgData) } } func TestReadImage_FileNotFound(t *testing.T) { imgData, err := readImage("nonexistentfile.jpg") if err == nil { t.Error("Expected error for non-existent file, got nil") } if imgData != nil { t.Error("Expected nil image data for non-existent file") } } func TestOptionsMapCreation(t *testing.T) { fOpt := createTestFlagOptions() options := make(map[string]interface{}) if *fOpt.maxTokens > 0 { options["num_predict"] = *fOpt.maxTokens } options["temperature"] = *fOpt.temperature if fOpt.seed != nil && *fOpt.seed > 0 { options["seed"] = *fOpt.seed } if options["num_predict"] != *fOpt.maxTokens { t.Errorf("Expected num_predict %d, got %v", *fOpt.maxTokens, options["num_predict"]) } if options["temperature"] != *fOpt.temperature { t.Errorf("Expected temperature %f, got %v", *fOpt.temperature, options["temperature"]) } if options["seed"] != *fOpt.seed { t.Errorf("Expected seed %d, got %v", *fOpt.seed, options["seed"]) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/cmd/bench/bench.go
cmd/bench/bench.go
package main import ( "cmp" "context" "flag" "fmt" "io" "os" "runtime" "slices" "strings" "sync" "time" "github.com/ollama/ollama/api" ) type flagOptions struct { models *string epochs *int maxTokens *int temperature *float64 seed *int timeout *int prompt *string imageFile *string keepAlive *float64 format *string outputFile *string debug *bool verbose *bool } type Metrics struct { Model string Step string Count int Duration time.Duration } var once sync.Once const DefaultPrompt = `Please write a descriptive story about a llama named Alonso who grows up to be President of the Land of Llamas. Include details about Alonso's childhood, adolescent years, and how he grew up to be a political mover and shaker. Write the story with a sense of whimsy.` func OutputMetrics(w io.Writer, format string, metrics []Metrics, verbose bool) { switch format { case "benchstat": if verbose { printHeader := func() { fmt.Fprintf(w, "sysname: %s\n", runtime.GOOS) fmt.Fprintf(w, "machine: %s\n", runtime.GOARCH) } once.Do(printHeader) } for _, m := range metrics { if m.Step == "generate" || m.Step == "prefill" { if m.Count > 0 { nsPerToken := float64(m.Duration.Nanoseconds()) / float64(m.Count) tokensPerSec := float64(m.Count) / (float64(m.Duration.Nanoseconds()) + 1e-12) * 1e9 fmt.Fprintf(w, "BenchmarkModel/name=%s/step=%s %d %.2f ns/token %.2f token/sec\n", m.Model, m.Step, m.Count, nsPerToken, tokensPerSec) } else { fmt.Fprintf(w, "BenchmarkModel/name=%s/step=%s %d 0 ns/token 0 token/sec\n", m.Model, m.Step, m.Count) } } else { var suffix string if m.Step == "load" { suffix = "/step=load" } fmt.Fprintf(w, "BenchmarkModel/name=%s%s 1 %d ns/request\n", m.Model, suffix, m.Duration.Nanoseconds()) } } case "csv": printHeader := func() { headings := []string{"NAME", "STEP", "COUNT", "NS_PER_COUNT", "TOKEN_PER_SEC"} fmt.Fprintln(w, strings.Join(headings, ",")) } once.Do(printHeader) for _, m := range metrics { if m.Step == "generate" || m.Step == "prefill" { var nsPerToken float64 var tokensPerSec float64 if m.Count > 0 { nsPerToken = float64(m.Duration.Nanoseconds()) / float64(m.Count) tokensPerSec = float64(m.Count) / (float64(m.Duration.Nanoseconds()) + 1e-12) * 1e9 } fmt.Fprintf(w, "%s,%s,%d,%.2f,%.2f\n", m.Model, m.Step, m.Count, nsPerToken, tokensPerSec) } else { fmt.Fprintf(w, "%s,%s,1,%d,0\n", m.Model, m.Step, m.Duration.Nanoseconds()) } } case "markdown": printHeader := func() { fmt.Fprintln(w, "| Model | Step | Count | Duration | nsPerToken | tokensPerSec |") fmt.Fprintln(w, "|-------|------|-------|----------|------------|--------------|") } once.Do(printHeader) for _, m := range metrics { var nsPerToken, tokensPerSec float64 var nsPerTokenStr, tokensPerSecStr string if m.Step == "generate" || m.Step == "prefill" { nsPerToken = float64(m.Duration.Nanoseconds()) / float64(m.Count) tokensPerSec = float64(m.Count) / (float64(m.Duration.Nanoseconds()) + 1e-12) * 1e9 nsPerTokenStr = fmt.Sprintf("%.2f", nsPerToken) tokensPerSecStr = fmt.Sprintf("%.2f", tokensPerSec) } else { nsPerTokenStr = "-" tokensPerSecStr = "-" } fmt.Fprintf(w, "| %s | %s | %d | %v | %s | %s |\n", m.Model, m.Step, m.Count, m.Duration, nsPerTokenStr, tokensPerSecStr) } default: fmt.Fprintf(os.Stderr, "Unknown output format '%s'\n", format) } } func BenchmarkChat(fOpt flagOptions) error { models := strings.Split(*fOpt.models, ",") // todo - add multi-image support var imgData api.ImageData var err error if *fOpt.imageFile != "" { imgData, err = readImage(*fOpt.imageFile) if err != nil { fmt.Fprintf(os.Stderr, "ERROR: Couldn't read image '%s': %v\n", *fOpt.imageFile, err) return err } } if *fOpt.debug && imgData != nil { fmt.Fprintf(os.Stderr, "Read file '%s'\n", *fOpt.imageFile) } client, err := api.ClientFromEnvironment() if err != nil { fmt.Fprintf(os.Stderr, "ERROR: Couldn't create ollama client: %v\n", err) return err } var out io.Writer = os.Stdout if fOpt.outputFile != nil && *fOpt.outputFile != "" { f, err := os.OpenFile(*fOpt.outputFile, os.O_CREATE|os.O_WRONLY, 0o644) if err != nil { fmt.Fprintf(os.Stderr, "ERROR: cannot open output file %s: %v\n", *fOpt.outputFile, err) return err } defer f.Close() out = f } for _, model := range models { for range *fOpt.epochs { options := make(map[string]interface{}) if *fOpt.maxTokens > 0 { options["num_predict"] = *fOpt.maxTokens } options["temperature"] = *fOpt.temperature if fOpt.seed != nil && *fOpt.seed > 0 { options["seed"] = *fOpt.seed } var keepAliveDuration *api.Duration if *fOpt.keepAlive > 0 { duration := api.Duration{Duration: time.Duration(*fOpt.keepAlive * float64(time.Second))} keepAliveDuration = &duration } req := &api.ChatRequest{ Model: model, Messages: []api.Message{ { Role: "user", Content: *fOpt.prompt, }, }, Options: options, KeepAlive: keepAliveDuration, } if imgData != nil { req.Messages[0].Images = []api.ImageData{imgData} } var responseMetrics *api.Metrics ctx, cancel := context.WithTimeout(context.Background(), time.Duration(*fOpt.timeout)*time.Second) defer cancel() err = client.Chat(ctx, req, func(resp api.ChatResponse) error { if *fOpt.debug { fmt.Fprintf(os.Stderr, "%s", cmp.Or(resp.Message.Thinking, resp.Message.Content)) } if resp.Done { responseMetrics = &resp.Metrics } return nil }) if *fOpt.debug { fmt.Fprintln(os.Stderr) } if err != nil { if ctx.Err() == context.DeadlineExceeded { fmt.Fprintf(os.Stderr, "ERROR: Chat request timed out with model '%s' after %vs\n", model, 1) continue } fmt.Fprintf(os.Stderr, "ERROR: Couldn't chat with model '%s': %v\n", model, err) continue } if responseMetrics == nil { fmt.Fprintf(os.Stderr, "ERROR: No metrics received for model '%s'\n", model) continue } metrics := []Metrics{ { Model: model, Step: "prefill", Count: responseMetrics.PromptEvalCount, Duration: responseMetrics.PromptEvalDuration, }, { Model: model, Step: "generate", Count: responseMetrics.EvalCount, Duration: responseMetrics.EvalDuration, }, { Model: model, Step: "load", Count: 1, Duration: responseMetrics.LoadDuration, }, { Model: model, Step: "total", Count: 1, Duration: responseMetrics.TotalDuration, }, } OutputMetrics(out, *fOpt.format, metrics, *fOpt.verbose) if *fOpt.keepAlive > 0 { time.Sleep(time.Duration(*fOpt.keepAlive*float64(time.Second)) + 200*time.Millisecond) } } } return nil } func readImage(filePath string) (api.ImageData, error) { file, err := os.Open(filePath) if err != nil { return nil, err } defer file.Close() data, err := io.ReadAll(file) if err != nil { return nil, err } return api.ImageData(data), nil } func main() { fOpt := flagOptions{ models: flag.String("model", "", "Model to benchmark"), epochs: flag.Int("epochs", 6, "Number of epochs (iterations) per model"), maxTokens: flag.Int("max-tokens", 200, "Maximum tokens for model response"), temperature: flag.Float64("temperature", 0, "Temperature parameter"), seed: flag.Int("seed", 0, "Random seed"), timeout: flag.Int("timeout", 60*5, "Timeout in seconds (default 300s)"), prompt: flag.String("p", DefaultPrompt, "Prompt to use"), imageFile: flag.String("image", "", "Filename for an image to include"), keepAlive: flag.Float64("k", 0, "Keep alive duration in seconds"), format: flag.String("format", "markdown", "Output format [benchstat|csv] (default benchstat)"), outputFile: flag.String("output", "", "Output file for results (stdout if empty)"), verbose: flag.Bool("v", false, "Show system information"), debug: flag.Bool("debug", false, "Show debug information"), } flag.Usage = func() { fmt.Fprintf(os.Stderr, "Usage: %s [OPTIONS]\n\n", os.Args[0]) fmt.Fprintf(os.Stderr, "Description:\n") fmt.Fprintf(os.Stderr, " Model benchmarking tool with configurable parameters\n\n") fmt.Fprintf(os.Stderr, "Options:\n") flag.PrintDefaults() fmt.Fprintf(os.Stderr, "\nExamples:\n") fmt.Fprintf(os.Stderr, " bench -model gpt-oss:20b -epochs 3 -temperature 0.7\n") } flag.Parse() if !slices.Contains([]string{"markdown", "benchstat", "csv"}, *fOpt.format) { fmt.Fprintf(os.Stderr, "ERROR: Unknown format '%s'\n", *fOpt.format) os.Exit(1) } if len(*fOpt.models) == 0 { fmt.Fprintf(os.Stderr, "ERROR: No model(s) specified to benchmark.\n") flag.Usage() return } BenchmarkChat(fOpt) }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/cmd/runner/main.go
cmd/runner/main.go
package main import ( "fmt" "os" "github.com/ollama/ollama/runner" ) func main() { if err := runner.Execute(os.Args[1:]); err != nil { fmt.Fprintf(os.Stderr, "error: %s\n", err) os.Exit(1) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/llm_image_test.go
integration/llm_image_test.go
//go:build integration package integration import ( "context" "encoding/base64" "testing" "time" "github.com/ollama/ollama/api" ) func TestVisionModels(t *testing.T) { skipUnderMinVRAM(t, 6) type testCase struct { model string } testCases := []testCase{ { model: "qwen2.5vl", }, { model: "llama3.2-vision", }, { model: "gemma3", }, { model: "qwen3-vl:8b", }, { // Qwen 3 VL mixture of experts model: "qwen3-vl:30b", }, { model: "ministral-3", }, } for _, v := range testCases { t.Run(v.model, func(t *testing.T) { image, err := base64.StdEncoding.DecodeString(imageEncoding) if err != nil { t.Fatal(err) } req := api.ChatRequest{ Model: v.model, Messages: []api.Message{ { Role: "user", Content: "what does the text in this image say?", Images: []api.ImageData{ image, }, }, }, Stream: &stream, Options: map[string]any{ "seed": 42, "temperature": 0.0, }, } ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) // Note: sometimes it returns "the ollamas" sometimes "the ollams" resp := "the ollam" defer cleanup() if err := PullIfMissing(ctx, client, req.Model); err != nil { t.Fatal(err) } // Preload to skip if we're less than 80% on GPU to avoid extremely slow tests err = client.Generate(ctx, &api.GenerateRequest{Model: req.Model}, func(response api.GenerateResponse) error { return nil }) if err != nil { t.Fatalf("failed to load model %s: %s", req.Model, err) } skipIfNotGPULoaded(ctx, t, client, req.Model, 80) // llava models on CPU can be quite slow to start DoChat(ctx, t, client, req, []string{resp}, 240*time.Second, 30*time.Second) }) } } func TestIntegrationSplitBatch(t *testing.T) { skipUnderMinVRAM(t, 6) image, err := base64.StdEncoding.DecodeString(imageEncoding) if err != nil { t.Fatal(err) } req := api.GenerateRequest{ Model: "gemma3:4b", // Fill up a chunk of the batch so the image will partially spill over into the next one System: "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed aliquet, justo in malesuada lobortis, odio ligula volutpat quam, quis faucibus ipsum magna quis sapien. Aliquam in venenatis diam, eu viverra magna. Phasellus imperdiet hendrerit volutpat. Vivamus sem ex, facilisis placerat felis non, dictum elementum est. Phasellus aliquam imperdiet lacus, eget placerat ligula sodales vel. Pellentesque nec auctor mi. Curabitur arcu nisi, faucibus eget nunc id, viverra interdum mi. Curabitur ornare ipsum ex, ac euismod ex aliquam in. Vestibulum id magna at purus accumsan fermentum. Proin scelerisque posuere nunc quis interdum. Maecenas sed mollis nisl. Etiam vitae ipsum interdum, placerat est quis, tincidunt velit. Nullam tempor nibh non lorem volutpat efficitur. Cras laoreet diam imperdiet ipsum auctor bibendum. Suspendisse ultrices urna sed metus sagittis suscipit. Quisque ullamcorper aliquam nibh ut mollis. Aenean dapibus mauris pharetra, venenatis elit ac, hendrerit odio. Cras vestibulum erat tempor, lobortis justo eu, lobortis ipsum. Nam laoreet dapibus sem. Proin vel diam ultrices, elementum ante et, ornare lectus. Proin eu accumsan nisl. Praesent ac ex vitae ipsum vulputate tristique facilisis sit amet lacus. Nullam faucibus magna a pellentesque pretium. Nunc lacinia ullamcorper sollicitudin. Donec vitae accumsan turpis, sed porttitor est. Donec porttitor mi vitae augue faucibus, vel mollis diam tincidunt.", Prompt: "what does the text in this image say?", Stream: &stream, Options: map[string]any{ "seed": 42, "temperature": 0.0, }, Images: []api.ImageData{ image, }, } // Note: sometimes it returns "the ollamas" sometimes "the ollams" resp := "the ollam" ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() if err := PullIfMissing(ctx, client, req.Model); err != nil { t.Fatal(err) } // llava models on CPU can be quite slow to start, DoGenerate(ctx, t, client, req, []string{resp}, 120*time.Second, 30*time.Second) } const imageEncoding = `iVBORw0KGgoAAAANSUhEUgAAANIAAAB4CAYAAACHHqzKAAAAAXNSR0IArs4c6QAAAIRlWElmTU0AKgAAAAgABQESAAMAAAABAAEAAAEaAAUAAAABAAAASgEb AAUAAAABAAAAUgEoAAMAAAABAAIAAIdpAAQAAAABAAAAWgAAAAAAAABIAAAAAQAAAEgAAAABAAOgAQADAAAAAQABAACgAgAEAAAAAQAAANKgAwAEAAAAAQAA AHgAAAAAXdsepgAAAAlwSFlzAAALEwAACxMBAJqcGAAAAVlpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6 bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDYuMC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1z eW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNv bS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAg PC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KGV7hBwAAQABJREFUeAGE3QfgX9P5OP6TIRKRncgmS6aR2DNCKEKLqqpRW9FWq0q1dEQparZKF7VK7aq99yZGSCRB BhErk0Qmyf95nZOTfOqrv/9J7ud977nnPPt5zrz3Ntp0s61XrLnmmql58+Zp6dKlqUWLFmnZsmXp888/Tx07dkwLFy5MX3zxRT4aNWqUmjVrlho3bpzatGmT Pvnkk5y/YsWKXHfttdfOv/VauSZNmuRj0aJFSX15cIAPruS3adOmafny5Uld5dDkXP05c+akTp06pTXWWCN99tlnacmSJQGnUVp77VbpvffeS126dM4wli4t dK8RsJoHDvUXL16cy7du3TrjXrBgQS675prNUsu1WgV/AW/ZktSxQ4dMC37BXbDgs7Q4aG7cpHFq2bJlpo984EY/3vELB94k+eqjU36V1fz580OmSyO/WZZt 8+Zr5jKu8YZv8pTgkCoMcnCgm17atm2bz+Gv8NWnvxUrlgd9S3P+4sWLQnZNc91PP/0ktWrVOst19uzZwc9akd98lczxN3fu3FwPLudrtwrelqcsM7LG95rN Qv4LF2U6XLvfvMWaq2gi90ahX2mttdbK5ej2o48+ymXokv7Ri/ZPP/00LQ16O3bqmOuwCbiaNSv8Ngs5fhFl2QPe1fXLBtgLutHrVyJnciffZWELS0KWytEL Odd66oDjHrnjpdoiGTbyL3DRAX3AT77xEzAW5nrwuY9m/DTp3bvf6Hbt2oWgW2WC3ARYZQdA8+bNW2UYiILU4T6FIsw1w0NAYaZ5RoT4KgRIwa8GgBgEEjC4 DFJdB9jynTNYDqF+pQdDyqw23ma5nGv1MIcnuAgMHPfQWuholtKKlNaEP2heujQMYyVuTrT8i+VpUeCsNFIEueAFDWBSXD1nOO7PmjU7nK9J+uLzkE/AnRnX yi5atDgbcMsoN3/+Z2nK1PfS2i1bxL0mmQ+OXmlEO4fEX4eOHTJORiefPNdYoxiR8nTHwCR8f/EFY8T/iqyThjyjkdHBRdbkIMGFdrLiqIx5/vwFaY2ma+R7 1UA5M0OjM7Dw59x9sPANDn47dGgfZVOmPSOJP2RF/+5LfjmsX/ckcqp0gkfv+GQDZF9tjyyc+yUbNLjmGHPmzE0LQk6u8Yov5zUYu0YvPGRGFpmfkDd+QvAZ F9jwg7F8+RfB29KcX+WMbvxKTfoPGDQ6HC2nShjBKuwXg126dMkKwBAiOA/CCRYBkAHaKhBSvnodIsKrywDBpVCplnWubFWSX+UZP1jKFYK/yPgqXLDQQyFw Y1Id5THVPBxl5qxZWfBgEgZ6CLdJtC5oBrd5i+ZRNoQWPM1fMD8bIyNcGBEXn40bRUQKXhktOASMdzRSgoNTukbbhx/OjOtmqVevnql9GHe3bl1DZi2Cjpap e/duaZ11OoXzvJsWhzI6d+6Yhg/fOk17590MFz7w8A0Pep2DvzgMC72Zt7in3DrrrBM8r53pgrsamJZEvWoUZAU2OLWMewyPQ+KHE+LBr7qff74sG7M6Ak1U z62yenBXfJ9FsGkaLR5HoAt6qLjAw0MNouo64ENTTZwWTDaCR85SaCgtkxYV33SmnFTpJidlHXQPPidaFHjR4T6a3NNCCSBgKM9e8Fdhocu5+5wK7ehUFr8f f/xxBL3S25LvkO+Qcrldd/v6imIcy+JG41WMtm/fPjMHISF/8P77YXALMnEAIFbkEvkqUADlI0pSFyMEDXltip0zTvkExckWMNaVzgaeesoQLmPW3arOUxlm OIRVIzI+aotBMeoTrnx4wMQXfGhv0rhprvtFRBtOMC/gaYWaN2+R+dK1+DycS3k0zZz5cZQvRt0BnFAeJc+aPTftsvMO6eennJwVWmRTWgmGKJqhffr099LR 3/t+uvKKv6W+ffumu++5N+2z37Fpj123TLNmzkyd1umcHR9f8FG4rqdgwHnwQNG1C4vH6mRVT4xCGfjcw7trMip8N849DDDJrtZniM7xQz8McUG0SuS+NLq+ 5Coo0Lcya0b3q0uXrmFEjdMnK1tLAbYaL9lrAeCuhkf2nBgs5dgJWeFVYh/oZch4rc7iGr01YMqvOleX3XFK+iU79kEOeFLPffck53A40AFmlQ/+lXeNVvfR Cwd86tb6aNA6fx49D3LNbawKGMcI711rrZYZGCYh5JGQUI6EQIDdg7h6dEOi5akPsaQ8BolMs+saXr9gtwyHIVhEKYdQTGICHMpQlkDeD6emCHQU41oYDtM2 160wlCcMNOJLFwhNaJTAnzN7Tnacxk0apQ8+CIFFfoeOneKvrkTrTN/cuXMyfjQZ04DHOVvHQcFahsefHp+O+V7vaGk6A/0/U+9evdK222wVrVW3XGZA//VT 9y5tomWakV59+ZnUfO0eaY/dts+8MUo8zA4nHfvqi9Eh7x79pPfSVlvvkLp27Rz5c7KclCM/vEnkRYbyyBe/8hg/OZAhuc6KVptcyQ9PeHEfTvkSmS0LvgUz 9+NGLqMcvLPn6LYW54M/yyX0AoZruoIPbnYwM4KFfE5vuCDRAxrkf77SDhly5YHNKYMH+pTQxyblK8d58PTZZ9EdjfLKgk8GyqAHTOd+yQU+/KFNK5wDRshB HQHAWJJ9tY8u6lotip2xAXXBwYNrrSacTQm6fft2uZIbCONUkGNeswspJhDIUAkVEgw5KAIw5xA5RyRBggGmOqIruBwVnEqMFkekd28ZZqKOuu6DRdBoqwZB mNVp4Q7zyTQTJhjKoo/Q5FV60MYJCYLQFy1cnAezTVY0zhG2jkeaNFkjfRKKUL9ROJl6eKs8wl0VCd+2W/ZP199wSx5Xde68TuZ39913y3Jj8HfffXemY8xL L6d33p2+ypnRPueTxenHxx8VrdkJacqUqenKq65PHdq3ztH//odfSDuP2DRdfPGf8phDj+C5515Izzz3Sho8sE+aMeP9rBfKZ7DgodU5eaOf/J37JdOqC2Xc x0s98AhWNXaBY01jreVF9sZEJjEWL14SjhRjthhHduzUYZUDkgVc4Ah04DvneA734FcOrRy04qTTpStth5wrP3TuUKfaolYCjeq7x07c0+XnANVuODY7U7d/ //5RZvZK+2yWJ0DkC5r40c0nB3Q50EVmi6Krr4vLJ9hVjx49Mgw0uCZv+Brt8839c9eOsarsJgG46Rpws3cIQjxlOK9NX0NGCUOSRxgSj2e46kJeiC9llEOs svKrUNFAobWsusqgi4O4B9aSJYuzMEUFjFa60WywbHaKQ+uOEOr8+TFLFJMKZoWUb8J5o2yZ4SoGBHaTiLJpRaPc314UhiOBAzchi3auK83odr502fL0wnOP pf2+fWC65por8njt3XCc9dZbN3XtPjB9MGNKOurow9Mf/3BhhvX66+NiZmlJ2mzTTTMOfx599LH03UOOC8dpm/b/9l7puOOOybhqAfhv+8/t6fCjT047bjc0 ZtEEqIURzUv/f3l0N4xPi9HqfpQILmqThyCGVrJirGTRIsaL9MDQ/CpDBytCbmYttcqSmT7BsM4GNo3JCF1kxkTHuqfkSTYcRyKrqj92U4JYCaLkpuyCGKN+ +un8fF51TIdsEN3orLYCpm4cmLNnzwrcZbxKN2wEPvTArw6cyreLY8rUqbm1gZfjVRzV/ti2AMAG2K18ZeUL9mTJWefNm5umTXsn+4BGSCBv0q/fgNGEvmYQ 9nkIGIGYAQzTiKnRQblqyBDJZ6AShBAjrrYgZvGygYXy1VOe4MB1TlDV+8EDSz44tVvmPlrANIXMQQgLvqKg0q81roGLcpct/SK1DVjRXoZBLItAEN21EIKx SnXmFs2j/7xC/zYmHYIegs+RJcaJxkaMjlHBj3a4yAKdzhkrXuGkODR2aN82JlzapoED1k+7fm2XXF/5F154LQ0Z3C+1DmV2jan6UaN2z/cooVvXrlneYEq9 e/eKaPl+8Ls0XXDB77Niyf2ll14K2TTJRrHBkCEpZp3T3fc9HBMbrbKC0fDZgtJ9IadyLItfA/fSvwe/ZQyaa9fOAJrDcIZPPpmX+cGHvLlz52V+Ca7qiuzw TS7krx4jIxeHGVCtBHmSjXK1LJ3Kd78Etfmruk/oAkdZuMkUHjDlfxF5einqu4dhY1nd02qH9PRZyJoeq/3Jq/b0/gcfZD1VfcFJZuQOJ3rhq/erbvkCvsEB b/r06VG+TJigV7lP5n2SGkOqn4tQwnGt+eXFy8IIeTRiJcAoAUMEXg0cMkAJqEYAMIx7uoahmMVCbG3uFy2K/nYkeZVRsCRlGLQmWpJPmHDoWoBLGcpwjI8+ mpnvK2sw3DrGLB07ts+O0CzWPXRPPo3+fBZ08AKe+nhep9M6Ofo2DgESCD7jNNOs5ZKnbBWuuvhfK2jQunFowkcTmVDu4sUxuI/fhmnhwiURyRdlWrUYWkjp i+ganXHGWWmXXfZKb7/99qoq667bM+277zeyA8u8/vob09Zbb51+ceovV8ll1113SdMmvxN4W+RybVq3CZ21Cf60MsYrbbOMBC50043Wh34YBjrmBv0mFIx3 QvVZH/ihE7Dw7aAn+WDRBXj0LcDg28Fu/AqA5KGco8qQ3MAgszJWKt1/QYLc6VMib06kxVCfY5jUAb/aoVlZa1NsxX1OiiaOXINsDW5owUPRXZkVxB9aqk2Y 6ZOnDhx4c0gtAqZxMDs2BjZ+AqvaLR3SZlMZmNBciYIMliIgInSVEMLJKAPjFIFASCuzBFaZAINAwHFUBzWuUB9RYCqHeAqoc/yUprw858rVFkpdNHEQXQGt RvtoBfDw5ptvp6nT56Z2rddOc2YtjO5U+9R/wHphEK1j0W9ZsFq6m1qoYC1wl1m8tQJGs+DfDMyKFWumiZMmp5dfnRRO1jr16NYl06sV1D1jDOPfeCONe218 GrbpJhEgtKAMrwQBvHaKxUXOiwfJDyMAv8xwWmcrEx4zZryXrrvuP+FEL6exY19P/fr1y3XQ16vXevmcXA866ID03e8elGVB7hJ5RRubloSxrR2LrYsbi+gW CGOdK1okk0Z0R+aMgp7o1DoNZzMm0FWzcLl2q9LdW7rU5EBpkeCNdibLnnx1f8kQDPxUmGyHDuXrLtORBK+ZRvTW8YV6nJY+S8Ashk/XDjCUn/7uu3mSg6Oy I/iVh6caOX7A40jyXYMBNtrpynKGQysMtrLsBw3KrHZCOomJpnBgSZliD9HafFp6SvLAltDEFyrPaG7KKx26AISCeATKQ0x1JERWJ6IkTiAPMcozIr+QMX7n fhGgm0FpEkEQrHsIAQMhDtcEUnHDBy6m9ZUJQDkK7dmzR5o8eWoaP+7ltOHGm6cRI7ZLh0Ykx2AR7JIY2L+bXhzzahr7ynNp8ODNUt9+6wbesvsBHC0j/Mp/ GgJ74vGx6YfHH5jWW3fdcJgJ6aorb0t77Dk8RyKCffTxZ9NmwwanQ797YJ55/Nf1t0YLqEtYAoaIXVrIsosjMxzxau7c+alXr245AOCxJkbbrt3acVnWxGo+ pTIeCe8ffvhh/JoIaFxakzh/4YUXU5uI/vRD1mRjXOcaDtcmBJyDoYdBvyZD6GzKlCnZmTikWUs4tNKClPILY8HbTJZAoUVFEz7hokPw4BBMGTkHhs89MrV2 VoMXOdM3e1JfkGEnaEOva7Bck3ObgE0/bEEwdbADdgGf8nhRto6hXCsPHv4ki/bsAU26rmy24mTnxQbKfIAewxwzdVG30FS6w/yCDaMbz/jgSGChh87ByY6E KYUQ7KaCEGIOQsS7lgijElwiVYl0kClXBaSM+5QCudaOA8lz3WZlF87qtTJaOQLSpDNszX+NGNUQCMU5g7rj9mfSYYfvkc79/Zlpww03yBGaAhsmszEGpK+P G5/+c9sd6W9/uzTt881vZzrnxAAaLzNmzFjZ0i5JDz10Qxq50070n2Wx225fS78947w0ZFC/9MRTY9KJP/5e+v73j4t6jD+lb++3b/rBD08IesvYEg9zYmzR Irpbq1MEizXLDoGWa7WI3QKly+A+Q6C0xo17hHxX16B4rbtkQuSyy65IZ511Rr7+xS9OS9/61jfT25Mnh6xjRi4rNGbqYmxXDDQWciPQ6faC0yKmbhk62hwG y7qtdYZLK9Z0jTJV3ry58ULp/zcLp6GvttHq0gPZ0jGj0X2Diy7pSjl8WFvT/WZDtWXjoGyHPay1Vo8sc3aiDON0D4w8vgm7Y/xwyBNIlYHfNZ7YDpvUerDP du3a5zzyEuDVz3Jb6VCl3vIsB7jAZEN4QTP4aHPPssziuMafa/6AFrzCyXlMvKknHy3KuN+0eLaoWebiOZFmnEBVwHStBHjDFgcRjBFAZY1RdGsQhlhJeUpw 1HP1ssOF0DlOxSNPPTQxAgnjEsGbGFi0aFma+s6M9O9//zntFlPM+rANE6YktBJs9+7d8zFypx3TPvt8I536y9NTq6BzrYA1PwyrWRj5gw88l84886Q0cmQ4 UST8wP/NffZO9933QJow4c3Us0fndNDBB2Yncp8RDR48OB1//PfTkd/7WfrayC2CtsUxydE68wq/JKK3a98mR7rPYrq9UJdvxR9dLVPQZdW+5goYN998W/r6 1/fMeE466cS0Taw/tQ7YW225ZZadRdBzf3922njDARGtSzeubZt20RX5JH0a4zfbe6o8yZjhiOrkWQykDPzJ2oIr3ZmN03rQIUdYKxxfeVEXz8rSN13oujHw teNgfORhskonl2Mpo2xprcpY2EBdQhca1KEvcMkL37pinMF9ToDmsj6k1V8z4JWxW7VX8MCBR1l2qx6YbBnf7rM/tuy63hOIBJ08Oxll8INuvKJLWXToorJL dg0vWsGQlAG3KaYRgGnIGLnCBqsAyqvNF68HkHEAZkqREAGyh8zOBEyAVR2pwlQmYnCup65rDmqMkreaBNPqYsCBDjDkgSEaGat89NGsdP21l6ehQzfOjKAR PId6NdVz+eBQ3q67fi0ZyB9w0OF5wgDudu1iKjVw9+vXN1dVlmOrr86QwYPSVdfenw7af6fciipUDQDs9QJeixamYmOPXRifCD79vRmZbnLlXNOmfhzdyvXC OdcIA4wp4qBZophlMWUeYSLv0cuZ8Uekve66q9JOOw1PBx98UKZxjz1G1dsxppqc/nzp39LQYVtmh9faM76msf4FJifS1VqwQCtQornAoKvml/Lfi/FZm1Ym J5pmWkXc6mTGFC1bakVjEimMjTGxAXzTB+eXqozlg8sIya4amfuMk42BQV5+ydhvNXD0wA82GrRO8LIPZeALVWS4yrIH9euEFD3BoxzcYKlLFsoJJGwSjVri alf2VFb7oSv0g48OTs0R4cKHxkV9B9tUDhz1+UMO+5orGbWw8QxCAFIYQSIFQh2aVdEBQkwoR4BgEBqB1Xx1IRUxCEn3ojKKeIQp656y8givGrKyYIP50IN3 xoLkc9mJqgCq0bvv+HICRxl8OB80aFC65qrL0l77HJw22rB/jnC9e9p9vLolU67Cyr/LpmcF1Tz3azJ2+WD6+LTxkN6xhUrXp3lMWLyd/vKXv6Utt9wit2i9 +nTM24+sybz99ox09dX/TBtvvFG66aZbQlHNwmGGpWuvuyFosR1nWbrkkr+H0++ZTj3t7DRmzMsxqzcyxg1t8lrRxEmTot5tQU+z2CHROesDb02DRw5ovOPa NiLBUKvCyMjXWFV0V66l9aQoZzeBpHx1BK3SsmXR5QuYHcI2rNeRoW1cur261mRQ5UC/dOZgN+TENhgclTQPWuNPtiEBl4x0AU0YsSfGTp/qwtM07IFNsA3B 29JM7daBif6Kx84D25U+iOlt8kMgG1QOzxyITYHPKdCmvsaCbbFL58qWGcfSc0ITpwQLv1pp8gEXjWCBbfYw0yoDEkbMCwHgKBAi1lw9obgGQB6BAapp1Epw JMoSIRAmAQ4uYajrQJh6jbRoMTCmoOVflIVaAmXMy5aVRzWMQ0TZ4gDL0yMPv5j+9vfLwji3zApEA5juO/f7xhsT8jy/fJFngw2GZPy1DMU732ijDdMfL/pd +u2Z54WQ10iTJryUZ7oy4V/6Q6kpdcxw6i3wqxE5l2zYDcayUrp1XSedfPJPa/H/83v88S/nvH79hqY+fbpnFzYm3Guvb+T8HUbskneHR0OT/nLlneGUl6yC 0bZD79Sze8f0+muxbahB2mCDLVLnLmUvGx3SyaSJ74aBTVxZyjrV0jRw8LA0aOD6eVdEcBFT9aV1oRvGQm4c46VX3kgz3n0jdV93SN5ou07HNmnC+EkBY35a f+AmqX+0sMZ4JhgkemYfDM+5NbKPP56VHnv69TS4/7pRYkV6d/qHYR9rpSlvjc11OnXpG3B65qBA14yULhkoudaWgR1J1TZ1K9HLds06OtiblpnDgWFJgwwk sPAEnpYaLvrjPPTPpt1Du1/1wGf7tZHRg6o8wosWh/JsqqnoYDoaIsqEFFEQTY4BLa/lWPJU4M2coRhnWZMBWFK2JkTUaIDQ99//IE8hEzanUV+yaKpcZj4E IAKpq1+KIUJD09Bh/dPeKw0NbdV5/Kpzzjnnpt/HmCGlWC9Zu11aGq3DKT//XvrpiT/OExGF3jITBu+IEcPTn6PVgEsyWP+qZMtSSrNW0ftVZeSRH8W0iXHM A/c/lTbbfNt0wHf2TUNi8ZRxahl0/QhewHnzzTfTLbfcnu655z9p1932zDTusedeWe6vjp2UDjn4W2m/mMwwdpk6dVq6NLpyAtBLL4+P8dLm6bxzz8w7zPH1 0Ucfp3/968Zo6f4R48Y9s0E9+khMjpx4ZLRsF2dZhghz9LzvvgfSRRddkLbbbqcsQy0M+fbs2TPjfuyJ59O2W2+Sfn3aT3LrbT0J7crQ28yZs9Jrr72WHnzo 8fTiS+PTTiO2yjJkF1XfAuyDj76Yvr779unIIw9JA/r3j8ks61hlWUBZOwOee+75dN55v0/rD9g49e2zbgTzsimXExj4M9xqF87h5wjsCQ52SOZ01zLkxBGq ntkkx9StY5d1AkH56mD0pqdlLU4DAB4erBMasqjHifWiJDJwuJbvXBk4I+A2y1EUQkoGCHAFDdQV4o1meiw+IRAxEHMuwlFPl8F99eRhDOOQYZwgOUD1eoTJ V0ZrqJ/5+edlAyziXINlXeb+++7KXaU66CPc6kxwn3HG79KFF54fU9V7Rb1irMpc/c9b0+x4Hujiiy/MuNRBD57Qf9CB+6efnnJGTJ9vmfPR9OWkrLRgQXRr ViZwakKjhGbdpclT3o2u2Vkxs7bv/3MT67bbbpO++c1v5n14Z//+j6ldGwuPAs5Hadddtk+//vVpmWaw+4ch0os6V199TTrwwAOykblX0447jojWerM8qzhs k63Sww/fEl3GHevtVb+77LJLsg/wlJ+Pjoma6JZHy89ILTC/9vqkdM7vTk3GY3on/yuNGLFD0HBgdDFvTr86/YK07VYbZZmGaLMu581blq676o8JTXoqX5U2 2WSTmPzZOx1++KHpoj/8Kf3njgfToGi5LGbTnYDLvhgtWuiBvdA3mRtvLlpUNloXfazI9lqm8cu6mTqm9+lcAo/9sk+tCccJ98g26b7yDk4ER7UV5dm8Vqra EPrITTn3m0Jcu3I8WFPHudhKbc4A5ySSMtVTEQMQ4PI0rZAr07J57DSOSGG7zfIoo6yoLVVBFQcqA1iGoq9K8GAhFF70pDV6ps022zTXrX8qQ08//Uw40QUx rb1ftJbl0Qx4ML39NsPS5Zf/NX3jG3vGDNgeq4RQYWy++WYxy8bhS5ei5jf85dBSXQdzjseivDJekOd6bBjiWWeemp2o5i2P/BXBR4FT6lXFar04RY8ePdIO O+yQ9t5n3zTu9TFp9G9+kY2GzCQK699//XTxny5Jhxzy3ZznHjlK5MRIjj32mHT/Aw/FTvQjsxPJd9RyaCTXXXbZOeuNQ339G9/MRjLmlQnpxuv+ljiJpKy6 fhvWr7R37NghnPbYvDv66ON+nvbda2RE/7nppVcnpvvvvjH0tVmGU2GoBw541UjRYsz6h4vOj8B2errkL9eGU24Y9YrMazm9EmW1JH7xzvENKwRm+eBb8xEg 2ZVxjW4nubAFjqDnInBXOPI4JpgcVjeOczlng+6h1wFOpQcfxpt4U4a9N4a4RKT5ubLBsxuI8atyNW6eV8tXoSBCs2naUXkM6S5qvh0ijGZaUhaBZoCUQ4hf EQexGEYYZt2zcwGMoRt0D2OLxwgiKS9V5T4Smzx7rDsgO4N7hIxO50ui7zxqj31ifejhLKBKc4WhhevXu0d6+81XM74M+Et/tHASumpa3R7pmsZGxu6DQvjz 0/DtN89T1sqRm6SbQr7gwE8Gfh3VyIYP3z7WuP4Wi6TvpnW6rp/loK6yDgmMb+/3rXxer/EBjntVyWeecXo8NDg8l6v3ajmw4JRM9R951DFpXhjb9Pc+TMcf d0iqTlTLKF9oL/S6rrRX/vb/9n7pR8cdHN3GmdHVG5cu+P2vsxOBIYgoX+uxKXoGs9JCX2zi2GOOjh3tvWPM2DfrTlm6B4d9sQ2tCVtUl9M45xTKyJOM2+St HYHZw5V1fRQdJmjA0bJxGLSwRV3XPn1653tsmNOAiUfX1R7lo9ehOygfLOWa6tIpgBCzH/bXAYJIQER3h/squSfCfhqRQB6jnfHee2UNIaIAQqtDIZTXE1Ql 3nVD4SIKIeASHmFhTl35unsDBvTLXZssqfijLBgijMcK1u3ZJUemWh/tyjAgfeoxL72a+8rGKuBWR1Ju3XV7ZLBVERVH/dXNzSnqfVUiCzDfmDA5Jgv2yPwL KoF6FZ6xY19LTz/9dHbGbbbZJmkJJTzgEe6dohv2u7MvSB9/8NYqNOAqIwk2hZ8yneuarCs/YDg3wSLh31gMj+Rfy4HnHp3vHM708CNPp2lTxsUs4hm5noCh TMX71FNPxS6KMVkvI0fuGLoYkGHBV2nf8+uj0lXRjV4jHvLbbrvtMhx/BBGJjV151dUxGTQxdNs27b7brtmR4UCXtG7sJtlyi01yqybAqcMO2Au9sh8J7RyR 3RkueBhVC6KMnhX63VeuSV4GKPalrqn+CjMQx9ixU8b/2Wd24JRH+/W8WrUqXcjW4Yz272HD+A298FZ/0Kiw39y4FARlU6PFqRaNV284RJSKPFw3j2IogEIR S0wYsK2Cd8qDRLIYatrSFCtGK3OUqg4lVGURZvVsCnK/RIamgXNuKHlIpgNcuKvw0fT+B/FkacfygJYogz6bHBm4+xxhwpvv5G5A+NF/JTNyHfOetdKV+a+b Ky9W6nkVzpxdM+PC6YLYQrPFJgNi4XZSjnRkVtO9996Xd3vH5v/IIptlafz48Xkxl7LxIzGCDYYMSO9Om7QqL9+IP8qRlfdBnHvueemZZ1+MJ2x75XFU3z59 Vt2v5cn7wgv/kO684/60TucOUeesvPujOlMt16NH93Cit9KwTbaJKftJuTUlv5qMxw477NA0aMim6eOZn6Q/XXJ5evSRu1atxVT9dQ3BLoz1ss2GDcyOoj6+ qqPpfp/00xPTJptunZcILjj/3PSPK65Mhx16SA4iyr/zzjvp3tjNbuHb+IfBCxTg2CzKHtgQx9JT0dUG32K24Mv+2A37wT+90416DJ3dgcdG0A0+J1CHk4CN d0MJ+I1Xl0Q+2y6blOfnyTg40eRg6/DC37huFjU7RdCcRQHEumaQtbAKiFEHITZ71oiHEMD9MiRM2FHOGdVxIBQs46GMPBgSWZXBkCQSORfVLSZab2gdmzKr 0nKhlX+qgZWdGGWhTzkLeroFWiNN8NyZFgRXd80qDC89MYaTCO+rUsWLjprwWRMZfTZvaizWrp0eefTpdMGFF0XrNCFmtl7PM4mjRu0eRdcJYayZNtq4LCJP mTK1Vl/1ywBssfmvFHjAr/huvvnmPMvVuXOndM3Vd6Ybb7w5F680Kivdd9/96fTTf5P69F0vzu9Ml1z65zCs1U67sli8kwLP7WOQ3jn97OSz099jecFs4ph4 ZOPHJ5yYnWj9AUNj2nt63sQ7MZYJbKmSKi7nxtQ9unWIMa4F+0KD/EqXVurSoKFv396hy1Zpg422TEcecXh0LY9Nt97673TTzbekn//8lzFe9S6FsobJDozf 4WHwbNKajXytB3tyj56rjZEhm1A+XDk7D/uUz/Y4AftTT52pU6dmp6vDFffqzKEH+sC2gfbdOJxLyjrgAYO9G6fFmlsZt/DcShQjN3EAMU+uAtFXdY4QjKjD aRBQnUFddTiI2Q+/ooCkPHzVIQmnNssijXpwKKffyvj9atUQrm7DxPgZsIF8EVaz3LKpXxRgIDg3NY+mumGkrTDwtzqtNoDVeavP4K/JlH1N1ciXxA7zvn16 pvMu/Fs69Rc/j9vrpF/+8rgwlNtC8PGUbhitPXhW7G2KldRFp+RX4Knn5ST+hs8qB78lhP4xVSzQ7DBiWPo4pr3JHW/qV1o8TNh/4LD8qMSIHb8Wi8ExVo1F Vj2LUm4lzsxTaf1H7bFdOuaY72W022y3Y9orumuMnE7qgYfevXrlMtUmXORxSQTeWXNivBXbgMy0ki0jg0937vvfPy4dGi2QQP3BBx/mcq+88uqqiZl9v3Vg 7mazJbbFNuClyw8//CgHdg5SW3tLNXTOqMmA7VYbYV9wkxOZgGkii5xcgymRhzqu/brvV0+GHeolgAEnG2ar+OGc4MPrOuPjTSojXkuBAcgIAiDEA6LrVZG4 VpYwOUAVWGVUPiRaBoZg9g6j1Zit3RBSUWosd8ZsifEOHJgCB1wG2CwMz+Pa8qvBZClkQbSMfq6nd0WSMkFBYGgnJPTPi4euttt6w+zQtV79tZovikpw/r9S VWAus9qPVgUZ6z1jX5+Qvj5qRBjNDdFNG5IF/r9gFt5XO5Jy9v5J7kmhhv9KWuk3J01PvXr1yDLxuETDVB1Jnn19dp9oIbSmDe+tBL+yanmf3d13PZJO+MlP 0xGHH5bWX79fNo6GsBueV9oz7JU0egFMp04d0t8vuzw/Acye6AFeduOXATryeCh2fVgi+MlPTkjPPvtcOve8P4RjCIjlvRycNkf6MNgaKI1xOQX7oWcTCeyC rTBmNinpXTF69sTZ2Cv9cTo0uSZrAVp9tsi2HMqzZ+XYsXto51BwlABSghY5KMd2GlfvBUCmpADmEYNIRCjHyZwrC7Dr+gtYdQ7E1cOGQAS6V5GCjRlCYcxV yGC5V2gyQ+SJ0DZp3LiJmZZMXPwpzJRmduONNkjPvTA2O5168BAYRYrCn8Rs2qbDNo4I0yFXh6sma1+T3iyD+2q89V79reV1EWuqefXa72uvvZF23mm7dPEf L4pB8xarnAg/X5W+CoaxnfS/aGFkXWNXg0khL1VsSFNDHFb3Z3zoYb2y88AOkv9ORQYcqmuPgfGA3/x0/vmj8ybYDTfcIMsfDf+Ljq+i/dP50aOIx3b/9tfr 48nei3JgZCd0VX/JQoBlKxV2x44d8tLE3/92cejLU9Bla473ArIXemTYnEpLQX3y6Jhd0Xk9p3uHWWL5yknsCh3smd0pD5BALYhzOLDwVXpBpSVk31pPLT+a 0aHM/PkLsn3Xa3VjYqX0JSuw6lCVeQRgGgGmtUX59dZbL78Jx85fwBwY0ApgHuEEJg8BYIHh2m/ZxlEilToIzi1jKJ4AJNFUXbu7X3zhiRiMvpvzqwKqge66 69fSJ3PeybDhBs+qt6c/1Z0777NYYNw9aCizVfitMN6L2cZ773kqtV+n76q8jKTBHzAl9dX9cmKszZr3zpteTznlpNzyoTvTF7ySnxeuvBQvPrFP8O6778kv OQGn0lFhMpSG+V++r52yN09LNHPmrKz0Wve/fwud5GrSp8p0dZnS4uHpg/cmpt7r9UhHH31UNqZKO14ddlWMGfNSTHA8m+6//4HczQIHbQ3F8fEH72Zed95l q3Taab/IY+gbbrgxTZr0Zh7XgEsWjI69VBjk5Bg0aFA6+aQfpSefeDiMssywKaunorfBdqzdMGy2WAO6Fk6LBb58b5VVVhLM4ZT8KkMmbFM9j5iwVS2cfPVt MoazOrBuarvY7qS169OnT66HLjjsRaz8NGUoGIEEUEwC6FwyRgEU4ZpSZfVPlbNOQElgaLk4jHxlJQS7T+gQKmOHRMuW+uqrHytGuHJg124fOMrn1LhLeuaZ Z+Nx661WGXMV0JbRRTj3vPNjsHxS2uVro/LiL3p7xINlt95yQwxiT831wPmyIzz3/AupVbvWqWvnMvYryP77LyVJZILmLyd0LF08NW2//fdzlwWvaM9OEJb2 +ONPpJ+ceOrK54YWp6mT30h33nlXXrfIZRoArHJT/8tJWe8ucI8sjWG/XN/YLcw/Nq7GWHLp+5lmxuR9fV8uC36Vx6hRu+boTP50WPNNAhx02EmxITeeDo6u 62OP3hcPSo6JWbvOWVdVBwEp9V1/YI7UHqWwYdcevgMOODSwLImtTgdE8O2Zd2hYr9k4Jl20RGgCA15p6NChqVvPQdHy2ARbuoL4FQx0a3Uli0OVRVR1TD4o oyHgDBKYYMsTnNShv2rHbL32mJTVZaxOZhOv2dzqhPKr3Qv21Ufq/Yqvqe6VGTjMcAjX5v89EiHitG3rmf+YCQsiEe2+iG9GDQMO/VX1EUUJDi2DX4x4k2mn eAFJxYMxhKhj1dqEBII5nhZPQguDUX9k7I6+4sp/pW/H4p8nY92rrRwcPzr+hzli/P7ci2M6d2quv0FsTD3vvAtitf97mcZKX/3lsFdeeW3aYbth6a47b8v4 c8Uv/amzeRQRcfhLd+My8EsMo2FCl3TbbbenV15+On17/wPjYbwpCXWDBg3M95RpaOD4tVewpgrDtXO0oF90bBePazhnxKsS8uLSTGu89yg/K/TmvCmpd691 c/1arsKtvwzpy4nBXHnlP1O/Xp1ik2u/NG3a9LTTyN0DVq9clK5XJ8EjJpHCZv58yUW5dXHvoNi1cebvzkl/uviiNGSDzdKtt92bPl0Qzz6t1TQ9/+wjeVq7 8FAggRHE5zfc0jHdszt0MmbycXAIMmCL7tWD46CLbtHPRmpij+7Lh1P3TvAGg90J2vApBz59wyGPjcJb67rPXuEFC57GraIiQNWDFTI4LU7ROHe5bDBk+Jjj BJ5r4Z36k1oTgAwgOQrHAhzSOmXoeSOEaXbBNXMDlrx6oAEs1+7V6Xjl7QSfv2BhvATk+iwX+ODACLrRfGzMOD3z1IPRhXoqvfjiE+meu25OHogjGGXBUVYd yYzUC89PiPxyXfPzzQZ/0CKBUVND49cCSOhvmGqZI444NO37rf1DubPj2aaBuWXt27d0Jb+M03vi4mmk/3KuCpOPaHE6RDcDLhM4UTDn1jIVHvlsPHRg7jFk Bw8SKz21bPkttDd8WQsYyjKyn5/y09Snd/fcNR06dEg4xAU50Llfy4HjfNKEV9Luu+6cnch98rKw+rszT49Nsn+M9ZgWcd0hnvhNaccR22Sd1bqFFq/u+iS9 /96kwF/sBwxLLb169cq/+GLQfvWK2BhZsE159RztumOV52prxkgcAFyOww7rPICyuntsiXNJ4MHHpjmV+9WBBH/8lb2KUUZTzCkMuhCgcm0xOAoAVoCtIBMY QMo71xWRjxGeizhIa1LGPUgxqi5HAR+BiK+Og1kMutbimTp3Xz44nh065ZSfxcr9BnmBU5574FaB2cXuaJjc40RgC9cc58knn4qW6hdp91HbxfM4xVGU+aqE ZqltPNtTE9wVp/Ge5ClavFIEXBXeRhttlK7951URWcubTHVT/lfy1qUYHWYH+D9lgg9p2rszQlfelxBTsf+nUMkgw7GvTojWW5cl1vFivPlVqe7asJt7j1jv qnz5lXbYYXjadNNNsk58zYJeGqZazm/neG+fXegNdUIO7OeEE36U9t9/v+wo5OLhSq1DlWGF+Xx0tXccuUeWI1kyVDDANE6yxiTfNTtjc2gynjax5LHz7Bgh ay/7XLoiglsc5MHp4KNPrREYtTUSbNkRmMpJrtl31b/fyg9+2S97bxZraDYrN+Y4WhKZiMwzGisBdevWPTsXHVZCOIGEKMoE3B4mTTDiIEAgxNVhOF6tr+vG YdXHYHUw9zGCKQJBB6MEEw7RaqeRu8bEwSGxs/mRrPTqRGA5lG14qCffLwVyIltehg/fNxkUG1iWcUn7XAZfyjb89V5wqfa/nVd8zhs1snWqbzwO8WhsA3pG Vk7oqLjx6+sMnEj+e/EELWW4vzrF66FC/p4ZslAsVfrzhT9RvG4ero9I577cygKVdg8QDonA431+dOBhwYaplrNlptna68VbYm+L3RZvZJrca3jQhzUVBstG TJygXZlKP5rW67FOuuXf90SLW2RQAwk7UE6AGzRoYGwx6p+dqOIAS9lXx45Nl19xbXxep204Q3kuiN0YThg6mBggOy0Reerq1YP92YWgPFweaFwSr0WT593t 4OcAH/c4MOeGl+3jjw3o9SiHLmVy+RAae1ZOb0mq+WjS42LDjsYMGACAJb8Aa6E4Vm3mEO8asYycgtSTGKRyjG1evK2lejg4zqvAXSPMGz0JRB0EijZgYgI9 8MAnUWKFIyLtPmqbNGrvI9Jf//r3DKMqAi0cq+Ehrx4UYlvK9ttvn0bsODSEUx4rnhUvR+nUpV0o8rWMT31JPfw88cRTsYVmq/TAw0/llXX34KzlJk6cGHxM jlm7HhF1j07PP/98rut+pU2dmm6++dZ4dqh0Ud13SB999GF6KHZGbDxsq5jpKlPyFYb7Iu2rY19PG26wfsbdqWP7NG78hMgvK+5kjGZKtaetS3zx4v33Z+T3 /D37/NgsK3Ckagx2YKy91hqpezxpe/pvz8ovVIGzysxvTYzYg5UmT6RaxrmV/xeefyJtMnRwPMZ/dAS6h7MulYGr8ljtoNavMrSOdPyPTso7Gzhq1TkDrkYt n62wJ70A43YOIbEXAY+jsyF45ZGFPEmer4uA6YU47hmTsbeSik7BZKOVdrbZ0B61gnpYnJhMjMfYcpMBAwaP9hpajoFhrQBkKiMCUJ4HsHsQE4j8ipR3ugc4 4binP1mZ4Agiky0ejNOgGTz5HKY6KcIkMMCrzSl6lJUvWm22yZB0+VU3p6efejKYMegr06V1vIM+9DO+yZMnR2txX/r9uRfGw3BnR3dlZHQJtJ4l8kTRmOHr HN2vu2LXwLrJ++TQpu4VV1wVW1suzrNNZsnmzJmVF1pzlA9FaIFOOOHUlY8M2MXeMva/XRr9+ZY5wlEUHsBitH/581/TBedfFlP50+IdDr3iratlkP9hyMWb ghaGbDp1ap9uuuXuNGhAn/ywHXmQ7+WXX5H+ef3t8XTsOqGD8gj325Onh0yWx9hrcJY5ed8cM233P/BozLJ5+1Os+4UcvVPi3cC54YYbZD1bq7GJ9gc/PDlt MLhPlvXEcN7b/n1n0B1rMRGR2QLaGe+rr45NF170h3TzLXflbTv9+/fNYyX6evvtyekPf7wkXsxSPuMD7+9+95fQ8+yVeinrgeA5GDojtsj++uuvp6tiP98J J41OA9bvFW8sja+BRHBlP1pA+uZIJcB6cWl5Fx/+awuF5+pQZMXQa4smP1BmeOyJDbENdMMBdrVXDl9bHfaoDLw1waOMg51Vh2Ur4DTyWRcCg0CharSUJ0Hk vkggcQQEcRT3lAdU3eo86iqj9SK06r1w1AEbxtRh8H6VI2jX6tb7GKr0MUj9ZjBEinfemZ5eG1seud5++M7RKvSOuX2tUJO8jvRhbO2fEI9bz4w9Wzvvsm1W BAcSDKpiCcI14Tz26Csxhb5lDLDXi2nel2M88nHacvMN86Mg0RGLB9/eDNwt0sgdt43JjwXplpsfCcccFq1piYyUY/Fz8uQZIafF8Uh7v+irR8sxbnx6+aV4 J97m28XsZYf8vu4nHn8wfXPfb8fO9Z7RskzKhqOLgkcGMH7C22mLzTaKl1C2Tc8+Nya9NGZsGrnzdiHv8vgAXOT76CMPp+E77Bhjx0FZHi+MeT1ahoEhszJ+ pB+yfWf6B/lFmd/+1p4xCzUzHsr7VzxisnfWKX0xOOOg++/TNZsdLf9eEeDWjE3BH8X3pj6NcVajCDh2FixJTz35cDriyO/l2d3/3PlQ6hsTEt5w68sVbMGW oGnvvJffJ9i8Vbe09x4jo5WJ97/FUooZRb2Wt9+eFu8RfDD16bdhtLIDsz51eWsLVO2L7iVwBXT32YbEnhbEjDB54TEui15TeSMQ+/xsYXn3vLIND/fIDzw2 QE5etmmTqnJwkDEHQgP8AqiJmfLUdPENtpMddqutt1+hhWCwjCt7V/zWaWlG7Z5fCCQINGkIAFxTCwYnANhMCwJqBFKPYBCjbiY6ytZ8zinBDZaBOdgcrEYo 9yodlTFwRCn19fc9W8PQzGwZv3lEwuZb99W1Z8vLMgwQ0SnBI4qByQDUmzPHZy1bRAvTOu7ZA1i21TM2OyTsSO/QocxQLoztSfgEhzIpCK/WyzzoVt7b93nA 8uRxbAD+bElqF3U5pJk8Y5wWsSPAm0+9ow6/ZO0wW2q2cr11GWqr/OgKmcAhaeHt/MD7jBkfhtxjCj5mvGhJ94g+6EFramHRxMPcufHSyAgaPXt0CxmU7g+c ZNm9e/eAFdu5Yjy6NOTIqclA3XmxlcpYS5dfd9JDlJysezevBGiV68PFqNCu60zf6PXev48/nJ1mR+vWLt5V3nItL5DsHpzHv6DF4O/TkDu6G9pgXGaavc2o 2gOa7NDu3LlLyG+mItnolcEHB8gPlMYEBH1ZvKbbaismx9igPAGkOiw4bBme6kBg0Sm+5Bt+wK+OXzap9XLeaOiwLfI3ZDGsgISgamgQmiAAVN9QsrXGuEhL Y4euRHgMFhzlEE7h1bkogDGDhzjnBE0oBoe6l5zAvdrVg7PWwRyaKkN+TV9K6Kj3LVqus/I7Re6LOvgCS9ONNzgkdShepCNENOrvahks/qGVExE+OpShaHxm eGFcIr9WEkw0gQdv5cNgl1Gt0SwG9muEDOMl+3j+dH75/lHLlmtnRVQ60VcDDBrhhdOBHnKFp8oPzZ4WFeO0hgIaOpVTBp0O12CU7mZ59xuc8tBM5s7pS1mP NdSuujy6RJdf9kDPegUCAfiMTT3Jm4IEEHKtEwHWHZ173g0Mzt8qvkEVb8HJsIrDl50I9KKMQOHpajO4cIJHl3CTQeG3aZYf3IIcGc38uLwvJOOJZ6T8MvYK lx2Bgz+68tLOL0LfZFNtDxzl8Yw/+iRPsiAn+OWBAXeTvv3WH61J5wgKYkg3DnKRup771YzOn2+ae1EG4D6i1K1O4QV+ALuX36kdzgKWPq8yCMMUQgjEFnye PTcmA+aFoKyVuIcJSXnn8BQcBRcclelaNtMTjBK4soSB2SoIBoaW6uzwMFyJcNBBOSI4fqshikTwo4VyJee+5mAcgrdKp19w0ZZDbJT1/rwu8apg+wrNPnIu kx8SWSjI0NBc6VscdKDNATf6HHhFl1QjZNFb+YIIujmGg17Ac1+3ynoeeVenMTuo9xC+kN/EAyZ85COf3JyDhSew8KwX4Bw/6HEP7c7J2Lm8InebQst2HLOJ kOklfBEPbGqh8Y5O+kRnhYcOsDgkeulSUoYxu1+cN7auBW8dwomyQUd5XyIBRxK0qm3jhQz9gknf4Nqho7w8NIOjZ0RWUTzbA37oFt/4Y2sCqrJgNBkyZKPR biBORo3ezgGHjMJEL01hJVB5RHJCCJSnxAoYUXmHbTAOuboMHFPguTbdiwlEwUN5yoKBYU5QFVuZJsgqcH1aeNWnEC0bXmzzkK8OwTh3VKNAr0OCA0+Uhi78 5ygVecqg2S+4aKplfM0hKzobWDEytFZFWVT8PJThq9/V6Hz5gfH53I03vGZjj1YKbRlWtHxmEZUnP7JSBk0UCTY9uC+pJ48uRGwOJ8lTxsFR8QiWa3yQvetq UAxCa12jq3zRmVzJjxyV8etVxrqtgim46K66qS2cPLiMsapd5S+ehxOpK6GDngVO+tS10/KAYWHaxlM8sykwJNfoVhd8PQFy1lX3Si5dUu8/52D01irGruRo ls5LM8sXV8p70A0fvOu77rbJCLTqQR968Bek5USe5FDxuyYjtDnHf5Nu3XuOdoFAXTKCZxAKMSDCIlDEA+SwJiBff1pXQtdF9wcyCCjULwOtTLsHDyLBsIBm IU0+7/dLWcqL8HnGKWhSp8JDsIOg5CtnYoGxEaxyjI7A8IB+SpWvy8dpwZdPiZwCPGXR6hcNhItOsMjFUY0ZnT68ZXcyGAzLh52XfV6cPkf36L7g8fOch9bo hsS6RphClosuTlASgSSmYJtEfz26tO4brNvRrYsbJOYnjNGUDS4y0I0W4x8tGr7kVecmE/zi0T0y0fXzYkcwGCS9ue8az8pwIvzWpAz4xhpoW/BZeeGiCRm9 Et169VuuFWsvYQMcmr3gmfXpDsOl96IsuuDUEhuX6TaTO2edNdMrq2OhN/jQQuEdXVpPep41e2bWMdjGVAIR5xPE2R/9m8Ej28aNS+tDvqX1ixYs9KRlgYPu ygfZyk5vfFpS4bjgsFnl0dE+1jKDpNy1RLsDH+pwdjInO3l+mwwesuHo6lkESXGSCgpVgWstquBFbIy5lwUaBleRUQplUzAYDJAywfJLwCYyasRVDgOEhg7n CKNY+BwEoL4EnnvwOXffOWWqB75z+RJ+ssAiMlFepQ04tIPrPrwOBohX+aKde4wBTHjAI+hKD8dzyIMTD+Aaa8Enj3EIOBRuYoHToVUgoTB8VDrgcE9iaGHL WdbkQw4OMhZELFy7pgt0w0U2tSwYzr0nHE1kg3+8OJTFn6N0UYu80cJQBAm7FSpfunRkZDcMw9OFUgZcLQCY8JhEoWO4BDnwtUj1wUWyp/fZs3w1r+jfs0Ho gRsv4Hxm3BxByURMluPS0r3Duy54CRreoOqtwPbele4wmZCdXz0m5eCT2J1rMnbIR7dE9mjAI/xVx+Gj2YmVJWt6dQ/fVVdNBg4cMtqF9RgMKIAAzIt8zitA nlgVpk5FiAhltFwE57y2LroLCKDQOi5w30qy+u75ZaDVoDhq25hBwzDGwGSgxWBLS0Cw6mGKcAgBk8rUVAUFrvOsnJWtq0E+5cuvDuk+XHiUCIpBoVc0NrGC HjNOC6NFVVfrhTZ8kFWFV4OH1ghd+CVTkRJM9PtOrTUP9eBChwQWmHZQ60LqworkFM448YtOdegEfPKr8oSHvOmjBiy8qiupJ+lJSPJNT6OJvtwHS11fw/CO QnxpBdDGkRivBU4GjS740USG7EbCpwSeaWN1Jc4DHrkKLuQNFzmoT8Zaa91fOLt27ZZp1Jpo/SufelCCEbvSqhT52zIULU7oi/0IYPCa8ofDwb7wDD9Z6RbD yYbARpt89Tg4+sCGx33BQ88NrWBk++rRc73RIpwmk0AoYO0QHOVIgKngGvCaR3CIMWXMGCCATBdKOcgJVF3wwUYggh2E4FodDCCm4mKcmvbKGJwMxhqE8nBh wH39ffAluBgDuGhzTrlRNMrNy/eVcR8MggZDWfxVntHiHJ3KK6ubgi/34AajCjsLPMr6rcYNprrkAA651qlYdY0BoniGozuIL2XIFfwi79IVg4eBmxxgiq6V lZQlW7zUbrlr+OGp/Fae0eMc3+TsnRVwyxOEGD960aElAN/snICly1Z5zl36oENggAN+sOlJHQZLN2gxlilGzchLKwWPc9t4tDCu4SRfQQRNYHAKegSTA6Lb UIBe1Zk5c1Z2DF8eN6mBjyiaFsdH0+ijLs7WCRI00hG6HFX/gg4eq65cV1jo09oJjmjxsCr8dCJASU3ad+g0mjMgHCCICAZDKlXEujmQYEg5QsColqxGMEL2 cWBlwEMkhYOjDMbBVpehYFRZ91zDpQ4G3KMUA1YCMltGGZQND1qUVwPigIcAAEAASURBVA8M5eGpExbyCKPcL04hj5I4BiGC4bziRjM4DsqKnxzJRF00GRui 3bl6eIOj8skQ5OMx447WSGQETz7c8HrosHngMgbBo7LKkANnyLIL2oKxLGvwlcG7X3ygvfKOb9dgUy6Dd0+9HIACNl3Br35WfPCAXvISoUVsdEv0K5Gzbike wNOCqWNWk97JB81wOOAjH3qGDz0CJprQXA+81kCgJQETTnLFC1jZYYMGRuwcvFlBp0kF/FkTIytrbyYbjIFMENAbWuN/1I2JiZALvsgHfegFj4PByabkuY8/ sq+yxYvgLXgF+Vl+YHEeOExWuJdp32ijYaPzYDiAKYQZrz8qxlTGHioRAsCMl5O5X4RaFlYrsxh2DhaBSIgjUDM2lUhCAJeDilhVsFWJ4DhEPgqpNDBIjBMA uK7BFiHkMRR01TrV8Ny3VqVvz3gITfcOnQSMP7+itPqacfhtl6nO41eUhV95+7vyYDZoMMCFXxm0OldGywqmrqEPjenzk4FZpEAXNKyVaYeTbHUbReG5MQMF ppelKM849ftrt4hc4aBJ3T84GWFJxfDpiIzwAT4Zkxc8aJPQhW8BCzC8SVVPljDKNiyL9B9l56EjMFvFuMQEgq4Rm0ADOasrwQEvw6tyq3aDVvfYirU4Y0pJ XbZBf+TrlW5Izbpp2z4HE/ySA7haB/fYCVhgcrI8vg3dLo6dGOwQvWwbneiHn4OTDzrRxfHYg+6jaf6s91iHAtNYi/zy/YBFTq45Oltp0rlzt9GIdwPwKnTE QQwJhbkPCOB+ax0MI4YwRQXdgEq4rh3F1Zbo41go40xgY8KB+Sp4ZR1wKAMPuPIwI195+NCjDKEY2KKDAiqdlCoaedoRDxSsXw8W+tR1qINnNEpV0fLcJwM4 JTCr0Ctu9dBFkfhQFl/wmYo1CUEmCxbE91UX6T4Y9wlQJXpXeOjw6L7dA3AycF0+v1Xu8sFFB524xo9ruHVf6bDqSH1dbQ9n4oeMaiuuPl61LmgoMiifiZQv uOl2kTuDVqYGLN/TXRYD/8JHacXIp+pRADBUYGgMV100osuvlrHK1s4QPNCjfGXJDz0Wdn1vChz1zLCRRR0nkYV75GCs9Hl8awqO2mLl1iry5SmDrxpY67DD Dh10o6faGRmRI3kJdO7hPUjLeuY86ISHgynfZMDAwaOdAIYBBKsoVSIZrqlGjOheuI8hjBMUpcjznU4CwSDC5VEEoorSy769KiwGLaooBy9GEa88PH6r4TPQ 6iTGcMopz0gpzaJnpSMbcdDvAcUKD6w6loIfPPUpDH/oJgdl/LqPropX90FCp/uS33roqonolCbCUpD9eAxZPnnhZUnANS4hO2sW6qNHFPWaXWsfeX0tIm0N FlXR1dnBQptukTzllMG3fOf4Fa3zOChoQk/lB69krwy9oMVWILRLZAAH2XEWDklOymrR6YxzuU//YKHB/WLIZRbXPbCrXvGZ6Qq5Oyd3s23ga4WVc1F161eA QbeFbOtE6kTBLDvl60ttvOqMYWvZvDTSBAk+BQM6I3v2UZZVSutFt5zSfWXtwC/6W/2eRduh2GKVsbKckd4ERQ4Gd5M+fdcfjTkFJBUAA7jMbhQGRS6IAUUU hjBIABhWD3DnfinVuToVnvIURIjZqKKOyMeB3UMHhanHiU2ZLojtSLPnzAuDKU2te9UhDL4xY+xScYmGBpZwcGL0ogfeHF3DyPDmcA2fbgfjkLSY6EWP7o4A ousbf3I3zhQu4yiCLFFUqzFh4pT06ivPpS/iy+geYQC7DNJ1T8uWGNPV5Pzw4y+nt9/5MPXq2TlaoegCRn3y1B1Rh0FaK7FLpFMMcJ0X5ZUuK0fBb7OmMUER eBrFNhvyzw/wRSuo22Nig0pNkhgoO9e1RRcZwUcGtUXlyN7f/VmMM9rF/j3l6BwtWlswc4sX52bPOJQAhl56Iy8HvdO5eoUnzlGCjvvkRgbFqWI8GWXlcyY4 TXnPmzcnf5LStSDkPj11Xqdzthuv6qJPupkfYx280znnZvie1WI/Agka6A+MEpzLliI0uGfBHA/4F8TQ7fOh6Cl6K/onczTUljzDD0fGa6ald59+oxVi2DIo iIBdMxjnHMq9Qnx5KR/CMMCoASVA0Y9iIOSpYJQoU56BEbl01WqXj4ARhhgH5uADk5AWL16WZnzg0YUB6aPoFr4xYUp+CSM64LB9hKApEg8hufgY8iex1UjX zTikTCqgFR5l0UlYxaDKY8XZKFc5TxkToY3BUQLajIU0495MRCHGDehgZG++PT0dfuj+8T2iE+NxgD7ptjseCAfwBtPumS7Gwzk5wyNPjku/OfW4dNTh34n6 S9M119yS+q2/XsaDLokc4Kxf4StjgqIjfMMbpYpzhZPofljgDrsJvksXq1mzJrG7+pF4bVf3CFYd42uHD8Qm3t5hjJ3y9HXtPnFe7wW8/74xabvth8W6Vpv0 wCMvBf2tY1+gKfYyY8vw4a700S06BRH5jNJ1Xb+hd2WKkZcF2hrQ5Bv7+ESoVpkTaCmyDgOB++Tl9QV0UBwKX6WHYOkBXPZENqyejsFnf3CTEcdQjsOjr8Kp vLvWa1C34sYn3GyCvZQF8rIGB6Zy9K8c2GzfeaOdRu62AgEVgILWPERPUVolFQgJQkgk1+rwZgaWgcU1Y9Ac1zKlfF03KE1sNXwOox4H5KyVhshKTz4zNv3k +MPjZYvH5BYL09fH651+PfqCtOnQAbmLBI46DM6ugIcefCU+tLV5fun+008/n8a8ODEeFdguK0BZ/FA6BfjFA6FrhbRsNuOaISwGYKaoTK7YTqIbhU/RSuuo nunQKVOnx2dJzokvUWyXnx71IpGJEyelgw85KhtlMagmwV+8OCZ2L9x0w9X5cXldEHzffvud6ccn/jKesRqcFY4WrQi+TIbQhy4LA/00d6nCSCJ6Gn+ZyMh9 +DCm7EhB44rIbxX0PfjAY+mPF5+TDj7owBycpk+fHs8UXRyPh7wSj250z/Qw0sVBU48e3dNpp/4svxiS3t566+34ftJp0S3iRD6sRS5lX5pAhCc2oTv9xRda hrL2Qp9sBC2Mnu7JmT0wcHLX3Yvq+VGM7YaPTOPfmJLmzpoaj5TsH4P2udkZ6AoOXXi6MFMn5S58BEQtD1zuCdjsl5wcAhCcDjCMwdCBLrgFM05C945Fi8sY y1Q/3VabUJ8OwGTTcOEDTMHeAZbkvMk6nbuOdqKgyjyX51l8BChHxriHCNc8kEBEeIAZA+QG/PIrkYQBZiFCi7F6cA85x8AcmGY9GITIwmA+/nhO8tzMmWee Hu8x+yAdecQRObofcMB3otFZkh57/NnUr2+vzBSnf2/GByGEBemmG/+eX3iy88iR6bsHH5h69e6Sv5LQvVunTBsjEBjwqLVFP9oNxku0Ed3KtK9Ibdqb4LQs XnLhc50dO3bIgtOlfeixMemc+B7S1/fcIx5m+1286PDr+d3YRx55RLSig9Kvf3VqPNvUL2B/np579on4fOcD+WsRJ5xwQrxl9FupV69e6aAw9AUL5qX/3P5A 6hXvlyuRMF7c2LVL7nvrUul+0AN5iZKc2oDX9Dn5ruldd3FfC23C4sEHXo5XYp2dfvCD4/ITuxdeeGHabbfd8nsZ7r773pDpR2HYYVQRILTe1137j/x2Jjzc fPNN8bamb8eTxNulS/78j9yC6R3UaEw+aEQL2fmlS91jBk22WnH6r60/mYPBZjigOjffdEP+muLxPzg6Xs81LF1w8ZVp/b49Qt7FNtgFGMY7z784Lo0f92I8 ibss3k0+KfVct1tQUZ5PEnTAU1Yij0pf1nMEI8n9Yr/lKQO2zkl06fQuyE89vw7OB5Zz5SoMPMAnH4zqpNFzXt26cIQ6TakA5LW7xoEIhEMNHDgwA2GM8kRX ZQmuGqn6PLYQWyYGasQCQz7G1CnP1JR3kDOcZdGNOvmkn2QnGjBgQLxK9yfxlbnd09ixY/PrtSZNfCVeIH9dfo3WzfGAWs+e3dKD99+eX9Zx66235N9HH30k HRUGfeSh30oz4it46KFEjiFVYaEb34RDSIJK7XqWBb4yQyNfN0JwIAt09u/bLX/E7LHHHs0vfwSbc/zlL3+Jz5tsm86PN44uDV6eiDHRtddel59QPe644yLy r5+N78gjj4z3N7yXvvH1PdOH78fYJZyTUTJQ6xzkxZm1ZsYrzhk0nZj8+Cy6Q/LQgz+y/OKLFfElvOHpqKOOiKdgn4qv5u2YH6/XUqL9oovOjXHVkpDJzPTc M4+n6/55WR5j0psXf2699TbpiAhcvvJ++GH7xxuZxkfUL7NoZEVOElhaeDpmM2iuwZVNoIeOycRyA/qdP//cy/EBgMvzS1XOP//8eOnk/fH+u/3T1ZdfEC/r vCPKtg8dmEW1nNEiPiN6TzrmqAPi8fk3ws5ejXdLPBO09co8CGqMnW44cZ1QQRt+yEqwLC1HeeoATWijf3XYsAMcMtKd7Bw79dmmeupLeOJoYKpLFmA55DUa vsPOKxp6MYFQJmJ4nAoQA6IcYSijxVLGPeUYqcF53M5lKyNmrWwlgVA98AhcF2bSpMn5ZYP9+/eLSP5eTHe2TBPfnBLjje/EeOOEMIajciQVvW+77bb8Yo3z zjsvdz1ef31cGNiszLCWCh39+w/In1M89NBD88sGfTnBm0IZ9V57fyt3HQhdYhBoZATq4s2BJ/wTDoVymFIn+sQxA5QH1/FskY9zHbD/PvmrD5zjkEMOCSPc Ot6iOiX98Ic/jMfb78l4yEWLpJvy+uuv53Ivv/xypvuMM87ITj9w4OD8oNrOu+wWsoO3vKHWoJ68m0QrTemMiwzp4sNoVXS/rTOtWFGmbyn/kYfvj3dC3JCN 0/Wrr76aX8j405/+NPnc5EEHHZTrMxB6owuOP378+Phk5yUZD8PXFXzxxTHpa1/bJb7wNypkV3aP0KNuGjnpYmkZyc8ak5bIOcfu0aNHhq+ngmYtuE+YXhbv Bj/qqCPjW1J75Rb82XiDKxz77/+d+HTp3umeu28PucW2q7bxTrl570bA1ELuFwFnenxz99YIXN/Ige7Agw4NPIKONaDSvaIzTkHmNSCZQbXrHl14pXcyro/7 eFyFbtSpgYJdVAfiTK7xRF7w0Q85kEH1iaYQY1RBvwBSgIIOAlGBQHR3TEMqa3wAiKjBUDDB6KrnglGMaFlqG4QY4LnO8KN78v4HH4eCRuT3Fjz2+NNRb2Ee yI977fn40sJfsoH/4x//iJc8npcNkhH4rAncXhLZo0f3zBg8hNO1S9c0+vTRiVFLBC5ymAb1VfIqSDxK4DBIi4paGvfBko9fCf94k/IqeeCx1R+ItyaNjUfJ f51hTJgwIUdz5XQD0XPjjTdm/GD60PFmm22Wneu3v/1txqOs/j1jMFbxQWIOu3BhTJhEV5deyKzMyplNLVPt4Okmdw/+Q4zZkO19Wx7OZJtMfGkpbbXVlvld FZtvvvmqlzVqZX7961/n3oSgZVzTp0+fbFyXXnppfP7lPiRl2nw5nh7teCl5ZZ3Kfj+9h2qQy5eWLhU7IB8Gh3eGxpDJn82suWb5hvBmWwxPe+65Z25d8ChQ
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
true
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/max_queue_test.go
integration/max_queue_test.go
//go:build integration package integration import ( "context" "errors" "log/slog" "os" "strconv" "strings" "sync" "testing" "time" "github.com/ollama/ollama/api" ) func TestMaxQueue(t *testing.T) { t.Skip("this test needs to be re-evaluated to use a proper embedding model") if os.Getenv("OLLAMA_TEST_EXISTING") != "" { t.Skip("Max Queue test requires spawning a local server so we can adjust the queue size") return } // Note: This test can be quite slow when running in CPU mode, so keep the threadCount low unless your on GPU // Also note that by default Darwin can't sustain > ~128 connections without adjusting limits threadCount := 16 t.Setenv("OLLAMA_MAX_QUEUE", strconv.Itoa(threadCount)) req := api.GenerateRequest{ Model: smol, Prompt: "write a long historical fiction story about christopher columbus. use at least 10 facts from his actual journey", Options: map[string]any{ "seed": 42, "temperature": 0.0, }, } resp := []string{"explore", "discover", "ocean"} // CPU mode takes much longer at the limit with a large queue setting ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() if err := PullIfMissing(ctx, client, req.Model); err != nil { t.Fatal(err) } // Context for the worker threads so we can shut them down // embedCtx, embedCancel := context.WithCancel(ctx) embedCtx := ctx var genwg sync.WaitGroup genwg.Add(1) go func() { defer genwg.Done() slog.Info("Starting generate request") DoGenerate(ctx, t, client, req, resp, 45*time.Second, 5*time.Second) slog.Info("generate completed") }() // Give the generate a chance to get started before we start hammering on embed requests time.Sleep(10 * time.Millisecond) threadCount += 10 // Add a few extra to ensure we push the queue past its limit busyCount := 0 resetByPeerCount := 0 canceledCount := 0 successCount := 0 counterMu := sync.Mutex{} var embedwg sync.WaitGroup for i := 0; i < threadCount; i++ { embedwg.Add(1) go func(i int) { defer embedwg.Done() slog.Info("embed started", "id", i) embedReq := api.EmbeddingRequest{ Model: req.Model, Prompt: req.Prompt, Options: req.Options, } // Fresh client for every request client, _ = GetTestEndpoint() resp, genErr := client.Embeddings(embedCtx, &embedReq) counterMu.Lock() defer counterMu.Unlock() switch { case genErr == nil: successCount++ if len(resp.Embedding) < 5 { // somewhat arbitrary, but sufficient to be reasonable t.Fatalf("embeddings shorter than expected: %d", len(resp.Embedding)) } case errors.Is(genErr, context.Canceled): canceledCount++ case strings.Contains(genErr.Error(), "busy"): busyCount++ case strings.Contains(genErr.Error(), "connection reset by peer"): resetByPeerCount++ default: if genErr != nil { t.Fatalf("%d request failed", i) } } slog.Info("embed finished", "id", i) }(i) } genwg.Wait() slog.Info("generate done, waiting for embeds") embedwg.Wait() slog.Info("embeds completed", "success", successCount, "busy", busyCount, "reset", resetByPeerCount, "canceled", canceledCount) if resetByPeerCount != 0 { t.Fatalf("Connections reset by peer, have you updated your fd and socket limits? %d", resetByPeerCount) } if busyCount == 0 { t.Fatalf("no requests hit busy error but some should have") } if canceledCount > 0 { t.Fatalf("no requests should have been canceled due to timeout %d", canceledCount) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/library_models_test.go
integration/library_models_test.go
//go:build integration && library package integration import ( "context" "fmt" "log/slog" "os" "testing" "time" "github.com/ollama/ollama/api" ) // First run of this scenario on a target system will take a long time to download // ~1.5TB of models. Set a sufficiently large -timeout for your network speed func TestLibraryModelsChat(t *testing.T) { softTimeout, hardTimeout := getTimeouts(t) slog.Info("Setting timeouts", "soft", softTimeout, "hard", hardTimeout) ctx, cancel := context.WithTimeout(context.Background(), hardTimeout) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() targetArch := os.Getenv("OLLAMA_TEST_ARCHITECTURE") chatModels := libraryChatModels for _, model := range chatModels { t.Run(model, func(t *testing.T) { if time.Now().Sub(started) > softTimeout { t.Skip("skipping remaining tests to avoid excessive runtime") } if err := PullIfMissing(ctx, client, model); err != nil { t.Fatalf("pull failed %s", err) } if targetArch != "" { resp, err := client.Show(ctx, &api.ShowRequest{Name: model}) if err != nil { t.Fatalf("unable to show model: %s", err) } arch := resp.ModelInfo["general.architecture"].(string) if arch != targetArch { t.Skip(fmt.Sprintf("Skipping %s architecture %s != %s", model, arch, targetArch)) } } req := api.ChatRequest{ Model: model, Messages: []api.Message{ { Role: "user", Content: blueSkyPrompt, }, }, KeepAlive: &api.Duration{Duration: 10 * time.Second}, Options: map[string]interface{}{ "temperature": 0.1, "seed": 123, }, } anyResp := blueSkyExpected // Special cases if model == "duckdb-nsql" { anyResp = []string{"select", "from"} } else if model == "granite3-guardian" || model == "shieldgemma" || model == "llama-guard3" || model == "bespoke-minicheck" { anyResp = []string{"yes", "no", "safe", "unsafe"} } else if model == "openthinker" { anyResp = []string{"plugin", "im_sep", "components", "function call"} } else if model == "starcoder" || model == "starcoder2" || model == "magicoder" || model == "deepseek-coder" { req.Messages[0].Content = "def fibonacci():" anyResp = []string{"f(n)", "sequence", "n-1", "main()", "__main__", "while"} } DoChat(ctx, t, client, req, anyResp, 120*time.Second, 30*time.Second) }) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/embed_test.go
integration/embed_test.go
//go:build integration package integration import ( "context" "errors" "math" "strings" "testing" "time" "github.com/google/go-cmp/cmp" "github.com/ollama/ollama/api" ) func dotProduct[V float32 | float64](v1, v2 []V) V { var result V = 0 if len(v1) != len(v2) { return result } for i := 0; i < len(v1); i++ { result += v1[i] * v2[i] } return result } func magnitude[V float32 | float64](v []V) V { var result V = 0 for _, val := range v { result += val * val } return V(math.Sqrt(float64(result))) } func cosineSimilarity[V float32 | float64](v1, v2 []V) V { mag1 := magnitude(v1) mag2 := magnitude(v2) if mag1 == 0 || mag2 == 0 { return 0 } return dotProduct(v1, v2) / (magnitude(v1) * magnitude(v2)) } func euclideanDistance[V float32 | float64](v1, v2 []V) V { if len(v1) != len(v2) { return V(math.Inf(1)) } var sum V = 0 for i := 0; i < len(v1); i++ { diff := v1[i] - v2[i] sum += diff * diff } return V(math.Sqrt(float64(sum))) } func manhattanDistance[V float32 | float64](v1, v2 []V) V { if len(v1) != len(v2) { return V(math.Inf(1)) } var sum V = 0 for i := 0; i < len(v1); i++ { sum += V(math.Abs(float64(v1[i] - v2[i]))) } return sum } func TestEmbedCosineDistanceCorrelation(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() for _, model := range libraryEmbedModels { t.Run(model, func(t *testing.T) { testCases := []struct { a string b string c string }{ {"cat", "kitten", "dog"}, {"king", "queen", "baron"}, {"paris", "london", "vancouver"}, {"The cat is sleeping on the sofa", "A feline is sleeping on the couch", "Quantum physics is complex"}, {"I love programming in python", "Coding in python brings me joy", "Pizza is delicious"}, {"Machine learning is fascinating", "Artificial intelligence is amazing", "I need to buy groceries"}, {"The quick brown fox jumps over the lazy dog", "A fast brown fox leaps over a sleepy dog", "The weather is warm and sunny today"}, } for _, tc := range testCases { testEmbed := make(map[string][]float32) strs := []string{tc.a, tc.b, tc.c} req := api.EmbedRequest{ Model: model, Input: strs, KeepAlive: &api.Duration{Duration: 10 * time.Second}, } resp, err := embedTestHelper(ctx, client, t, req) if err != nil { t.Fatal(err) } for cnt, v := range resp.Embeddings { testEmbed[strs[cnt]] = v } // Calculate cosine similarities cosAB := cosineSimilarity(testEmbed[tc.a], testEmbed[tc.b]) cosAC := cosineSimilarity(testEmbed[tc.a], testEmbed[tc.c]) // Calculate distances distAB := euclideanDistance(testEmbed[tc.a], testEmbed[tc.b]) distAC := euclideanDistance(testEmbed[tc.a], testEmbed[tc.c]) manhattanAB := manhattanDistance(testEmbed[tc.a], testEmbed[tc.b]) manhattanAC := manhattanDistance(testEmbed[tc.a], testEmbed[tc.c]) // Consistency check: if cosAB > cosAC, then distances should be smaller if cosAB > cosAC { if distAB >= distAC { t.Errorf("Euclidean distance inconsistency (%s) for %s-%s-%s: cosAB=%f > cosAC=%f but distAB=%f >= distAC=%f", model, tc.a, tc.b, tc.c, cosAB, cosAC, distAB, distAC) } if manhattanAB >= manhattanAC { t.Errorf("Manhattan distance inconsistency (%s) for %s-%s-%s: cosAB=%f > cosAC=%f but manhattanAB=%f >= manhattanAC=%f", model, tc.a, tc.b, tc.c, cosAB, cosAC, manhattanAB, manhattanAC) } } else { t.Errorf("Cosine Similarity inconsistency (%s): cosinSim(%s, %s) < cosinSim(%s, %s)", model, tc.a, tc.b, tc.a, tc.c) } } }) } } func TestAllMiniLMEmbeddings(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() req := api.EmbeddingRequest{ Model: "all-minilm", Prompt: "why is the sky blue?", KeepAlive: &api.Duration{Duration: 10 * time.Second}, } res, err := embeddingTestHelper(ctx, client, t, req) if err != nil { t.Fatal(err) } if len(res.Embedding) != 384 { t.Fatalf("expected 384 floats, got %d", len(res.Embedding)) } expected := []float64{ 0.06642947345972061, -0.01160573959350586, 0.3302811086177826, 0.309552937746048, 0.36223655939102173, 0.05672447010874748, 0.6955016851425171, -0.17069467902183533, 0.8547305464744568, 0.21076075732707977, -0.29339903593063354, -0.05926772207021713, -0.003363408148288727, -0.4204462468624115, -0.1061280220746994, 0.30754348635673523, -0.14551642537117004, -1.0430994033813477, -0.4805174171924591, -0.40448474884033203, -0.4345352053642273, 0.3573606014251709, -0.4098161458969116, 0.25664326548576355, -0.3021087646484375, 0.36236199736595154, -0.23262615501880646, 0.08319848775863647, 0.28042519092559814, -0.052289899438619614, -0.12552005052566528, 0.402255117893219, 0.24357250332832336, 0.08881516754627228, -0.17023836076259613, -0.2868475615978241, 0.4790303707122803, -0.3199635446071625, 0.02826809138059616, -0.19417747855186462, -0.19217649102210999, -0.21705707907676697, -0.1210065633058548, 0.10262420773506165, -0.07726037502288818, 0.10094445943832397, -0.06194962561130524, 0.1712605208158493, 0.628441333770752, -0.10222385078668594, -0.16214007139205933, 0.059920795261859894, -0.5053377151489258, 0.10545563697814941, 0.32686805725097656, 0.7650210857391357, 0.006465774029493332, -0.13403119146823883, 0.6090353727340698, 0.05603303387761116, -0.37635889649391174, 0.45424884557724, -0.5053073763847351, 0.4572359323501587, 0.6084011197090149, -0.3659921884536743, -0.3536888360977173, 0.05569244921207428, -0.4166066646575928, -0.43796032667160034, -0.16600576043128967, 0.12460685521364212, 0.40493422746658325, -0.18632565438747406, 0.2390710711479187, 0.007283639162778854, 0.4001992344856262, -0.4455743134021759, -0.05360018089413643, -0.08401738107204437, 0.2041706144809723, -0.42083415389060974, -0.491476833820343, 0.7860275506973267, 0.08280622214078903, 0.4309011697769165, 0.09778489172458649, 0.3392091989517212, -0.5618907809257507, 0.06766007840633392, -0.05127308890223503, -0.23472431302070618, -0.7611223459243774, -0.20227840542793274, -0.5491426587104797, 0.09030043333768845, 0.37326449155807495, -0.2696656584739685, 0.2814738154411316, 0.1461343765258789, 0.309052437543869, -0.3387487828731537, 0.1990429162979126, 0.0474909171462059, -0.02756538614630699, -0.20544570684432983, 0.5137258768081665, 0.22562497854232788, 0.40487033128738403, 0.04954294115304947, -0.23911823332309723, -0.5578761696815491, 0.14376327395439148, -0.12795016169548035, -0.26285219192504883, 0.3614377975463867, -0.22225692868232727, 0.11940789222717285, -0.6961514353752136, -0.3324243426322937, -0.07613810151815414, 0.24946099519729614, 0.1462409496307373, 0.5309336185455322, 0.051560595631599426, -0.11104149371385574, -0.39189594984054565, -4.767201176712463e-32, 0.892546534538269, -0.07396792620420456, 0.6088366508483887, 0.23729179799556732, 0.2614588737487793, -0.3626874089241028, -0.23131835460662842, -0.024579279124736786, -0.12901946902275085, -0.2306443750858307, -0.0376533679664135, -0.09649471938610077, -0.16013199090957642, -0.31914401054382324, 0.3151017129421234, -0.11264121532440186, -0.4020160734653473, 0.039211247116327286, -0.5478582978248596, 0.5563258528709412, -0.6903842091560364, 0.2746567130088806, -0.24196553230285645, -0.053318753838539124, -0.18611761927604675, -0.28490889072418213, 0.237456813454628, 0.4946249723434448, 0.37237465381622314, 0.07815749943256378, 0.6494859457015991, 0.6915512084960938, -0.14422327280044556, 0.30338582396507263, -0.17378094792366028, -0.33589833974838257, -0.09702004492282867, -0.04210608825087547, -0.566387414932251, 0.18866634368896484, -0.3533778488636017, 0.37286972999572754, -0.39420801401138306, 0.0818595215678215, 0.436712384223938, -0.08886678516864777, 0.2527940273284912, -0.5864061117172241, -0.37891554832458496, 0.21103361248970032, -0.2275354266166687, 0.1558678150177002, 0.09536703675985336, -0.27437490224838257, 0.4484926164150238, 0.20584626495838165, 0.45972558856010437, -0.231113001704216, -0.021833699196577072, 0.3253912925720215, -0.08802174031734467, -0.023067735135555267, 0.33492740988731384, 0.5189340114593506, 0.2481488585472107, -0.07638847082853317, 0.25147074460983276, 0.2771286964416504, -0.08443005383014679, -0.5207436084747314, 0.05951530486345291, 0.08816319704055786, 0.15935833752155304, 0.0644921213388443, -0.07194079458713531, -0.5383226871490479, 0.17800968885421753, -0.195652037858963, -0.028597159311175346, 0.08582349121570587, -0.23225288093090057, -0.12984338402748108, 0.3651025593280792, -0.4039592146873474, -0.3628298342227936, 0.08263863623142242, -0.12648534774780273, -0.08284908533096313, -0.1042669266462326, -0.4579034447669983, -0.2961195111274719, -0.32282471656799316, 0.3182551860809326, -0.6890494227409363, -0.7114676237106323, 2.3665072841905432e-32, -0.0030965525656938553, -0.5696439146995544, -0.5794872045516968, 0.04729880392551422, -0.048917483538389206, -0.10963250696659088, 0.298623263835907, 0.4452674388885498, -0.2828809320926666, 0.5696343183517456, 0.3004711866378784, 0.44842660427093506, 0.06550214439630508, -0.020054858177900314, 0.385932058095932, -0.23460465669631958, 0.23865005373954773, 0.4363722801208496, -0.24931970238685608, -0.41073542833328247, -0.2937365770339966, 0.5095447301864624, 0.2864843010902405, -0.14028388261795044, -0.14269764721393585, 0.4107881486415863, -0.2581801116466522, 0.18544888496398926, -0.08612997084856033, 0.33715111017227173, -0.24288496375083923, 0.3599962592124939, -0.43829354643821716, 0.15094976127147675, 0.03177203983068466, 0.5965112447738647, 0.03364168107509613, -0.5481097102165222, -0.363423228263855, 0.4825053811073303, -0.7288467288017273, -0.13361915946006775, 0.7423286437988281, -0.3515661358833313, -0.37989044189453125, -0.1576842963695526, 0.3734908998012543, 0.8393698930740356, 0.23719121515750885, -0.28990280628204346, 0.11215505003929138, -0.16382968425750732, 0.47951722145080566, 0.28471529483795166, 0.5308315753936768, -0.1286555975675583, -0.22689077258110046, 0.6377706527709961, 0.34224453568458557, 0.07091143727302551, 0.26538553833961487, 0.014475930482149124, -0.050034329295158386, 0.011025313287973404, 0.09357182681560516, 0.1345357596874237, -0.1523902863264084, 0.14176052808761597, -0.0609259307384491, -0.3332745134830475, -0.1072426363825798, -0.5933747291564941, -0.40028926730155945, 0.5343422293663025, 0.016202416270971298, 0.27436596155166626, 0.28844428062438965, -0.1660136878490448, -0.6286065578460693, 0.5850632190704346, -0.6491153836250305, -0.03207448124885559, 0.23312292993068695, 0.09339666366577148, -0.42595869302749634, -0.5011518001556396, 0.08187201619148254, -0.3312609791755676, -0.3677852153778076, -0.3758619427680969, -0.12195874005556107, -0.014479270204901695, -0.014539752155542374, 0.23270025849342346, -0.3609132170677185, -9.438503667524856e-8, -0.05230816453695297, 0.17612962424755096, 0.01489749364554882, 0.06601762771606445, -0.14300350844860077, -0.1422577053308487, 0.7347333431243896, 0.030603498220443726, 0.24959787726402283, 0.026135217398405075, -0.4412609338760376, -0.18663707375526428, -0.29235413670539856, 0.4696626365184784, 0.12353914976119995, -0.3236965537071228, -0.6856554746627808, -0.28768694400787354, 0.0671629011631012, 0.27566438913345337, -0.0893339067697525, -0.22328855097293854, -0.16536207497119904, -0.08968719840049744, 0.022607458755373955, 0.21818216145038605, -0.14408129453659058, 0.14458191394805908, 0.4712568521499634, 0.13527995347976685, 0.16118602454662323, 0.23675017058849335, -0.0062652211636304855, -0.4045848250389099, -0.5631943345069885, 0.04897312819957733, -0.2558498978614807, 0.5269845128059387, -0.16870160400867462, -0.39874112606048584, 0.3996037244796753, 0.5432316660881042, -0.3740345239639282, 0.031965695321559906, 0.29769593477249146, 0.1568443477153778, 0.287019282579422, 0.6005253791809082, -0.33905476331710815, -0.07407552748918533, -0.4541633129119873, 0.047827333211898804, 0.4803982973098755, -0.2860602140426636, 0.17097190022468567, -0.7525586485862732, -0.06290972977876663, 0.14645379781723022, 0.176426962018013, 0.024587953463196754, 0.105128213763237, 0.023733407258987427, -0.1363760083913803, 0.22127331793308258, } sim := cosineSimilarity(res.Embedding, expected) if sim < 0.99 { t.Fatalf("expected %v, got %v (similarity: %f)", expected[0:5], res.Embedding[0:5], sim) } } func TestAllMiniLMEmbed(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() req := api.EmbedRequest{ Model: "all-minilm", Input: "why is the sky blue?", } res, err := embedTestHelper(ctx, client, t, req) if err != nil { t.Fatal(err) } if len(res.Embeddings) != 1 { t.Fatalf("expected 1 embedding, got %d", len(res.Embeddings)) } if len(res.Embeddings[0]) != 384 { t.Fatalf("expected 384 floats, got %d", len(res.Embeddings[0])) } expected := []float32{ 0.010071031, -0.0017594865, 0.050072223, 0.046929732, 0.05491682, 0.008599705, 0.105441436, -0.025878143, 0.1295813, 0.031952355, -0.04448072, -0.0089852745, -0.000509909, -0.06374169, -0.016089523, 0.04662509, -0.022060998, -0.15813895, -0.072848774, -0.061321855, -0.065877646, 0.054177605, -0.06213012, 0.038908366, -0.04580116, 0.05493584, -0.035267256, 0.012613296, 0.04251382, -0.007927403, -0.01902945, 0.060983833, 0.036926776, 0.013464811, -0.025808964, -0.043487485, 0.072623335, -0.04850803, 0.00428558, -0.02943825, -0.02913489, -0.03290691, -0.018345183, 0.0155583285, -0.011713048, 0.01530367, -0.009391865, 0.025963927, 0.09527476, -0.015497632, -0.024581224, 0.009084283, -0.07661165, 0.015987588, 0.049554788, 0.115980916, 0.0009802427, -0.02031978, 0.09233272, 0.00849488, -0.05705784, 0.068866335, -0.076607056, 0.06931919, 0.09223656, -0.055486195, -0.053620946, 0.008443246, -0.06315959, -0.066396914, -0.02516728, 0.018891005, 0.061389998, -0.028247874, 0.036244337, 0.0011042351, 0.06067215, -0.06755123, -0.008126048, -0.012737444, 0.030953258, -0.06380051, -0.07451028, 0.1191656, 0.012553826, 0.06532671, 0.014824665, 0.051425762, -0.08518537, 0.010257597, -0.0077732494, -0.035585348, -0.115389846, -0.03066639, -0.0832527, 0.013689985, 0.056588713, -0.040882625, 0.042672798, 0.022154681, 0.04685385, -0.05135596, 0.030175874, 0.007199854, -0.0041790465, -0.031146567, 0.07788334, 0.034205843, 0.06138031, 0.007510951, -0.036251485, -0.08457674, 0.021795211, -0.019397866, -0.03984967, 0.054795727, -0.033695232, 0.018102817, -0.10553994, -0.050397146, -0.011542906, 0.0378195, 0.022170838, 0.08049212, 0.007816837, -0.01683443, -0.059413332, -7.227309e-33, 0.13531439, -0.011213897, 0.0923026, 0.03597459, 0.039638437, -0.054985173, -0.03506899, -0.0037263383, -0.01955998, -0.034966808, -0.0057084337, -0.014629069, -0.024276787, -0.048383784, 0.04777095, -0.017076956, -0.06094759, 0.0059446157, -0.083057985, 0.084341705, -0.1046656, 0.041639294, -0.03668315, -0.008083383, -0.028216336, -0.04319357, 0.035999607, 0.07498755, 0.05645381, 0.011849057, 0.09846523, 0.10484252, -0.021864949, 0.045994766, -0.026346037, -0.05092382, -0.014708711, -0.0063834875, -0.085867085, 0.028602734, -0.0535738, 0.056528863, -0.059763853, 0.012410302, 0.06620772, -0.013472636, 0.038324803, -0.08890202, -0.05744544, 0.03199372, -0.034495477, 0.02363032, 0.014458106, -0.04159657, 0.06799366, 0.031207295, 0.069696635, -0.035037853, -0.0033100948, 0.0493309, -0.0133445235, -0.0034971808, 0.050776623, 0.078672916, 0.037620574, -0.011580864, 0.03812419, 0.04201406, -0.012800006, -0.07894726, 0.00902281, 0.013365969, 0.024159499, 0.009777319, -0.010906574, -0.08161233, 0.026987134, -0.0296618, -0.004335468, 0.013011258, -0.035210665, -0.019684888, 0.055351324, -0.06124218, -0.055006765, 0.012528419, -0.019175794, -0.012560324, -0.015807373, -0.06942039, -0.044893157, -0.048941795, 0.048249032, -0.10446324, -0.10786195, 3.58774e-33, -0.0004694524, -0.08636079, -0.087853074, 0.0071707284, -0.007416128, -0.01662082, 0.045272738, 0.06750471, -0.042886123, 0.08635933, 0.04555289, 0.06798365, 0.009930444, -0.003040414, 0.058509175, -0.035567205, 0.036180507, 0.06615616, -0.03779808, -0.062269486, -0.044531893, 0.07724946, 0.04343241, -0.021267718, -0.021633657, 0.06227748, -0.03914136, 0.028114952, -0.013057723, 0.051113747, -0.036822543, 0.054577183, -0.06644743, 0.022884717, 0.0048167957, 0.09043401, 0.0051002423, -0.083096094, -0.055096727, 0.07315016, -0.11049671, -0.020257315, 0.11254063, -0.053299136, -0.057593238, -0.023905706, 0.056623034, 0.12725255, 0.03595934, -0.043950673, 0.017003251, -0.024837377, 0.07269714, 0.043164223, 0.08047665, -0.019504813, -0.034397744, 0.096689135, 0.051885936, 0.010750518, 0.04023374, 0.0021946214, -0.0075854477, 0.0016714911, 0.014185944, 0.020396275, -0.023103109, 0.021491585, -0.009236667, -0.050526038, -0.016258504, -0.0899585, -0.0606858, 0.08100888, 0.0024563652, 0.041595213, 0.043729555, -0.025168482, -0.09529981, 0.088698424, -0.09840905, -0.0048626475, 0.03534257, 0.014159388, -0.06457741, -0.07597705, 0.012412196, -0.050220776, -0.055758025, -0.0569825, -0.018489538, -0.0021951278, -0.002204297, 0.03527849, -0.0547162, -1.430923e-8, -0.007930172, 0.026702108, 0.0022585324, 0.010008593, -0.021680027, -0.02156696, 0.111389145, 0.004639639, 0.03784025, 0.003962226, -0.0668973, -0.028295087, -0.04432231, 0.07120314, 0.018729135, -0.04907397, -0.103948705, -0.043614738, 0.010182222, 0.04179206, -0.013543455, -0.03385163, -0.025069695, -0.013597015, 0.0034274007, 0.033077475, -0.021843424, 0.021919321, 0.07144483, 0.020509098, 0.024436586, 0.035892475, -0.00094983797, -0.061337028, -0.085383, 0.007424564, -0.038788088, 0.07989341, -0.025575982, -0.060451094, 0.060581867, 0.082356565, -0.056705453, 0.0048461547, 0.04513215, 0.023778366, 0.043513518, 0.09104256, -0.05140235, -0.01123021, -0.06885336, 0.007250856, 0.072830714, -0.04336812, 0.025920171, -0.11409155, -0.009537421, 0.022203108, 0.026747186, 0.0037276533, 0.015937949, 0.0035980998, -0.020675266, 0.03354611, } sim := cosineSimilarity(res.Embeddings[0], expected) if sim < 0.99 { t.Fatalf("expected %v, got %v (similarity: %f)", expected[0:5], res.Embeddings[0][0:5], sim) } if res.PromptEvalCount != 8 { t.Fatalf("expected 8 prompt tokens, got %d", res.PromptEvalCount) } } func TestAllMiniLMBatchEmbed(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() req := api.EmbedRequest{ Model: "all-minilm", Input: []string{"why is the sky blue?", "why is the grass green?"}, } res, err := embedTestHelper(ctx, client, t, req) if err != nil { t.Fatal(err) } if len(res.Embeddings) != 2 { t.Fatalf("expected 2 embeddings, got %d", len(res.Embeddings)) } if len(res.Embeddings[0]) != 384 { t.Fatalf("expected 384 floats, got %d", len(res.Embeddings[0])) } expected := [][]float32{ { 0.010071031, -0.0017594865, 0.050072223, 0.046929732, 0.05491682, 0.008599705, 0.105441436, -0.025878143, 0.1295813, 0.031952355, -0.04448072, -0.0089852745, -0.000509909, -0.06374169, -0.016089523, 0.04662509, -0.022060998, -0.15813895, -0.072848774, -0.061321855, -0.065877646, 0.054177605, -0.06213012, 0.038908366, -0.04580116, 0.05493584, -0.035267256, 0.012613296, 0.04251382, -0.007927403, -0.01902945, 0.060983833, 0.036926776, 0.013464811, -0.025808964, -0.043487485, 0.072623335, -0.04850803, 0.00428558, -0.02943825, -0.02913489, -0.03290691, -0.018345183, 0.0155583285, -0.011713048, 0.01530367, -0.009391865, 0.025963927, 0.09527476, -0.015497632, -0.024581224, 0.009084283, -0.07661165, 0.015987588, 0.049554788, 0.115980916, 0.0009802427, -0.02031978, 0.09233272, 0.00849488, -0.05705784, 0.068866335, -0.076607056, 0.06931919, 0.09223656, -0.055486195, -0.053620946, 0.008443246, -0.06315959, -0.066396914, -0.02516728, 0.018891005, 0.061389998, -0.028247874, 0.036244337, 0.0011042351, 0.06067215, -0.06755123, -0.008126048, -0.012737444, 0.030953258, -0.06380051, -0.07451028, 0.1191656, 0.012553826, 0.06532671, 0.014824665, 0.051425762, -0.08518537, 0.010257597, -0.0077732494, -0.035585348, -0.115389846, -0.03066639, -0.0832527, 0.013689985, 0.056588713, -0.040882625, 0.042672798, 0.022154681, 0.04685385, -0.05135596, 0.030175874, 0.007199854, -0.0041790465, -0.031146567, 0.07788334, 0.034205843, 0.06138031, 0.007510951, -0.036251485, -0.08457674, 0.021795211, -0.019397866, -0.03984967, 0.054795727, -0.033695232, 0.018102817, -0.10553994, -0.050397146, -0.011542906, 0.0378195, 0.022170838, 0.08049212, 0.007816837, -0.01683443, -0.059413332, -7.227309e-33, 0.13531439, -0.011213897, 0.0923026, 0.03597459, 0.039638437, -0.054985173, -0.03506899, -0.0037263383, -0.01955998, -0.034966808, -0.0057084337, -0.014629069, -0.024276787, -0.048383784, 0.04777095, -0.017076956, -0.06094759, 0.0059446157, -0.083057985, 0.084341705, -0.1046656, 0.041639294, -0.03668315, -0.008083383, -0.028216336, -0.04319357, 0.035999607, 0.07498755, 0.05645381, 0.011849057, 0.09846523, 0.10484252, -0.021864949, 0.045994766, -0.026346037, -0.05092382, -0.014708711, -0.0063834875, -0.085867085, 0.028602734, -0.0535738, 0.056528863, -0.059763853, 0.012410302, 0.06620772, -0.013472636, 0.038324803, -0.08890202, -0.05744544, 0.03199372, -0.034495477, 0.02363032, 0.014458106, -0.04159657, 0.06799366, 0.031207295, 0.069696635, -0.035037853, -0.0033100948, 0.0493309, -0.0133445235, -0.0034971808, 0.050776623, 0.078672916, 0.037620574, -0.011580864, 0.03812419, 0.04201406, -0.012800006, -0.07894726, 0.00902281, 0.013365969, 0.024159499, 0.009777319, -0.010906574, -0.08161233, 0.026987134, -0.0296618, -0.004335468, 0.013011258, -0.035210665, -0.019684888, 0.055351324, -0.06124218, -0.055006765, 0.012528419, -0.019175794, -0.012560324, -0.015807373, -0.06942039, -0.044893157, -0.048941795, 0.048249032, -0.10446324, -0.10786195, 3.58774e-33, -0.0004694524, -0.08636079, -0.087853074, 0.0071707284, -0.007416128, -0.01662082, 0.045272738, 0.06750471, -0.042886123, 0.08635933, 0.04555289, 0.06798365, 0.009930444, -0.003040414, 0.058509175, -0.035567205, 0.036180507, 0.06615616, -0.03779808, -0.062269486, -0.044531893, 0.07724946, 0.04343241, -0.021267718, -0.021633657, 0.06227748, -0.03914136, 0.028114952, -0.013057723, 0.051113747, -0.036822543, 0.054577183, -0.06644743, 0.022884717, 0.0048167957, 0.09043401, 0.0051002423, -0.083096094, -0.055096727, 0.07315016, -0.11049671, -0.020257315, 0.11254063, -0.053299136, -0.057593238, -0.023905706, 0.056623034, 0.12725255, 0.03595934, -0.043950673, 0.017003251, -0.024837377, 0.07269714, 0.043164223, 0.08047665, -0.019504813, -0.034397744, 0.096689135, 0.051885936, 0.010750518, 0.04023374, 0.0021946214, -0.0075854477, 0.0016714911, 0.014185944, 0.020396275, -0.023103109, 0.021491585, -0.009236667, -0.050526038, -0.016258504, -0.0899585, -0.0606858, 0.08100888, 0.0024563652, 0.041595213, 0.043729555, -0.025168482, -0.09529981, 0.088698424, -0.09840905, -0.0048626475, 0.03534257, 0.014159388, -0.06457741, -0.07597705, 0.012412196, -0.050220776, -0.055758025, -0.0569825, -0.018489538, -0.0021951278, -0.002204297, 0.03527849, -0.0547162, -1.430923e-8, -0.007930172, 0.026702108, 0.0022585324, 0.010008593, -0.021680027, -0.02156696, 0.111389145, 0.004639639, 0.03784025, 0.003962226, -0.0668973, -0.028295087, -0.04432231, 0.07120314, 0.018729135, -0.04907397, -0.103948705, -0.043614738, 0.010182222, 0.04179206, -0.013543455, -0.03385163, -0.025069695, -0.013597015, 0.0034274007, 0.033077475, -0.021843424, 0.021919321, 0.07144483, 0.020509098, 0.024436586, 0.035892475, -0.00094983797, -0.061337028, -0.085383, 0.007424564, -0.038788088, 0.07989341, -0.025575982, -0.060451094, 0.060581867, 0.082356565, -0.056705453, 0.0048461547, 0.04513215, 0.023778366, 0.043513518, 0.09104256, -0.05140235, -0.01123021, -0.06885336, 0.007250856, 0.072830714, -0.04336812, 0.025920171, -0.11409155, -0.009537421, 0.022203108, 0.026747186, 0.0037276533, 0.015937949, 0.0035980998, -0.020675266, 0.03354611, }, { -0.009802706, 0.060424678, 0.025257956, -0.0063643856, 0.07272723, 0.01719488, 0.090320334, -0.051705167, 0.099515095, 0.09072479, 0.007301506, -0.01968127, -0.075095184, -0.017409375, 0.019365614, 0.040805466, -0.011079843, -0.05856395, -0.12545314, -0.048980292, -0.044052314, 0.03115607, 0.037880868, -0.03187379, -0.0909825, 0.06357952, -0.076541565, 0.085011445, 0.03554875, -0.071272224, 0.021114277, 0.11005397, 0.03312636, -0.025947863, -0.061563145, -0.026466936, 0.02054478, -0.05426622, 0.056569945, 0.03292456, -0.09005933, -0.05698778, 0.026827272, 0.0751872, -0.07142025, -0.0043633, 0.054151993, 0.026441583, 0.078053534, -0.048995998, 0.056577347, -0.048973206, -0.07581186, 0.006902122, 0.0062451144, 0.037024222, 0.025028007, 0.021724675, 0.010117283, -0.040492155, -0.012010403, -0.03334674, -0.07570402, 0.071321115, -0.02062346, -0.0631419, -0.001237942, -0.055173304, 0.009124682, -0.08703634, 0.020684991, 0.05294139, -0.009563882, -0.052647192, -0.06467313, 0.041968923, 0.04473555, 0.03270584, -0.019611169, 0.00013324046, 0.038228948, 0.0509972, 0.0047100335, 0.05736671, 0.046469305, 0.04269017, -0.017305125, 0.011859765, -0.05701112, -0.03498464, -0.018940303, -0.0074608736, -0.07385685, 0.043892473, -0.09890047, 0.041379265, -0.024019944, -0.12034819, 0.0001821356, -0.0038607453, 0.056144036, -0.0005059898, 0.07110965, -0.03616245, -0.06406574, -0.009435536, -0.042290587, 0.07791005, -0.02365763, 0.007864432, -0.023739463, -0.018536761, -0.033538047, 0.0776669, -0.06058719, 0.05363198, 0.033863083, 0.012545284, -0.03260245, 0.029770961, -0.016934512, 0.028213669, -0.018053731, 0.06651968, -0.06952628, -0.017853932, -0.037421644, -6.839719e-33, -0.0055490523, -0.031681225, 0.04819487, -0.09944883, 0.09372583, -0.051811725, -0.037059266, -0.026262678, -0.037466466, -0.030253021, 0.0060922937, -0.09831781, -0.017570594, -0.07247917, 0.03856134, 0.00888377, -0.13072893, 0.02145255, -0.075681135, -0.010470858, -0.017236665, 0.058358245, 0.022016024, 0.0015762328, 0.009419801, -0.031423207, 0.08002972, 0.030580623, 0.05696977, -0.012164853, 0.11575935, 0.0040441174, 0.01759827, 0.043209996, 0.02948431, -0.0069428794, -0.025078153, -0.026160793, 0.013364178, 0.121543564, -0.004469769, -0.04534167, 0.043418996, -0.01768049, 0.062162045, -0.039375506, 0.017406953, 0.008458191, -0.02603069, 0.010130821, 0.023227274, 0.05305319, 0.06899141, 0.053088874, -0.0003113895, 0.009642751, 0.08884011, -0.030399954, -0.090916164, -0.051467095, -0.07382789, 0.08624027, 0.003223033, 0.010827092, -0.008318035, -0.011421701, -0.02900046, 0.06548931, 0.005405483, 0.068780296, 0.0428464, -0.01878741, -0.016996592, -0.036818627, -0.0062817424, -0.08700542, -0.008640271, -0.013171244, -0.004574588, 0.04233393, -0.03579696, 0.017357353, -0.087162524, -0.050884914, -0.14957926, -0.002008126, -0.02634847, 0.018098367, 0.02162604, -0.01503002, 0.0037868456, -0.015445877, -0.013303974, -0.09810386, -0.011673153, 2.8261164e-33, -0.022961555, 0.0090464745, -0.0057421196, 0.06604244, 0.042683356, -0.039691485, 0.027226122, 0.03183442, -0.028517157, 0.045575514, -0.055865873, 0.0924774, -0.046869125, 0.08027759, 0.118624836, 0.04889292, -0.06734586, 0.10688813, 0.009396721, -0.051344905, -0.067946814, 0.01592692, -0.010147019, 0.044173665, -0.030018767, 0.022772646, -0.031494025, -0.02233876, -0.0023573847, -0.010024354, 0.0032828946, -0.036839407, -0.11200184, 0.028629173, 0.030212566, 0.03185506, -0.01746865, -0.018295743, -0.036361173, 0.083925165, 0.007943152, -0.023664381, 0.15850149, 0.032088134, -0.070371404, -0.034124147, -0.015502377, 0.07960292, -0.06218589, 0.046537183, 0.04505064, 0.1043822, 0.029607052, 0.047920443, 0.09711685, -0.015767856, -0.064267434, 0.01960162, -0.093837254, -0.0028061024, 0.019721054, -0.027095793, -0.078636706, 0.0689579, 0.107794516, -0.033122607, -0.064406104, 0.016571952, 0.019280795, -0.023045482, -0.018821374, -0.018646069, -0.06431513, -0.03231013, -0.0027636476, 0.059007723, 0.059882853, -0.044795096, -0.06667144, 0.043793377, -0.019855661, -0.006715758, 0.04733659, -0.046866804, 0.03461545, -0.015199261, -0.039511763, 0.047361404, 0.052113988, 0.0008203065, 0.05290727, 0.02459614, -0.029357709, 0.034541644, 0.013009169, -1.36748e-8, -0.033930536, 0.007378359, -0.010701883, 0.04323486, 0.014735074, -0.04162692, 0.10553509, -0.012822099, -0.002357336, 0.040418625, -0.08136588, 0.033679843, -0.019665385, 0.077529214, 0.060347307, -0.016181026, -0.11332622, -0.04306442, 0.023209568, 0.07448782, -0.06055759, -0.045812756, -0.087526724, 0.0534105, -0.044014834, 0.029827949, 0.038628686, 0.016933717, 0.027725562, 0.078133695, 0.055581007, 0.05306717, -0.010792625, -0.029803185, -0.08492531, -0.016416015, 0.030501937, 0.06944753, -0.061944496, -0.122021444, 0.011901371, 0.07258673, -0.017778289, 0.0030972173, 0.014411535, -0.03802866, -0.052976213, 0.060414705, -0.053164586, 0.01794129, -0.104411006, 0.010633235, 0.042881854, 0.042603284, -0.003009017, -0.08530093, -0.039561126, -0.004481811, 0.013104284, -0.008498699, -0.028943708, -0.03587923, 0.05940551, -0.000055299755, }, } sim := cosineSimilarity(res.Embeddings[0], expected[0]) if sim < 0.99 { t.Fatalf("expected %v, got %v (similarity: %f)", expected[0][0:5], res.Embeddings[0][0:5], sim) } sim = cosineSimilarity(res.Embeddings[1], expected[1]) if sim < 0.99 { t.Fatalf("expected %v, got %v (similarity: %f)", expected[1][0:5], res.Embeddings[1][0:5], sim) } if res.PromptEvalCount != 16 { t.Fatalf("expected 16 prompt tokens, got %d", res.PromptEvalCount) } } func TestAllMiniLMEmbedTruncate(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() truncTrue, truncFalse := true, false want, err := embedTestHelper(ctx, client, t, api.EmbedRequest{ Model: "all-minilm", Input: "why", }) if err != nil { t.Fatal(err) } cases := []struct { name string request api.EmbedRequest check func(*testing.T, *api.EmbedResponse, error) }{ { name: "target truncation", request: api.EmbedRequest{ Model: "all-minilm", Input: "why", }, check: func(t *testing.T, got *api.EmbedResponse, err error) { if err != nil { t.Fatal(err) } if diff := cmp.Diff(want.Embeddings[0], got.Embeddings[0]); diff != "" { t.Errorf("embedding mismatch (-want +got):\n%s", diff) } }, }, { name: "default truncate", request: api.EmbedRequest{ Model: "all-minilm", Input: "why is the sky blue?", Options: map[string]any{"num_ctx": 3}, }, check: func(t *testing.T, got *api.EmbedResponse, err error) { if err != nil { t.Fatal(err) } t.Logf("PromptEvalCount: want=%d got=%d", want.PromptEvalCount, got.PromptEvalCount) if diff := cmp.Diff(want.Embeddings[0], got.Embeddings[0]); diff != "" { t.Errorf("embedding mismatch (-want +got):\n%s", diff) } }, }, { name: "explicit truncate", request: api.EmbedRequest{ Model: "all-minilm", Input: "why is the sky blue?", Truncate: &truncTrue, Options: map[string]any{"num_ctx": 3}, }, check: func(t *testing.T, got *api.EmbedResponse, err error) { if err != nil { t.Fatal(err) } t.Logf("PromptEvalCount: want=%d got=%d", want.PromptEvalCount, got.PromptEvalCount) if diff := cmp.Diff(want.Embeddings[0], got.Embeddings[0]); diff != "" { t.Errorf("embedding mismatch (-want +got):\n%s", diff) } }, }, { name: "truncate error", request: api.EmbedRequest{ Model: "all-minilm", Input: "why is the sky blue?", Truncate: &truncFalse, Options: map[string]any{"num_ctx": 3}, }, check: func(t *testing.T, res *api.EmbedResponse, err error) { if err.Error() != "the input length exceeds the context length" { t.Fatalf("expected truncation error, got: %v", err) } }, }, { name: "input after truncate error with context length of 1", request: api.EmbedRequest{ Model: "all-minilm", Input: "why is the sky blue?", Truncate: &truncTrue, Options: map[string]any{"num_ctx": 1}, }, check: func(t *testing.T, res *api.EmbedResponse, err error) { if err.Error() != "input after truncation exceeds maximum context length" { t.Fatalf("expected truncation error, got: %v", err) } }, }, { name: "input after truncate error", request: api.EmbedRequest{
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
true
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/basic_test.go
integration/basic_test.go
//go:build integration package integration import ( "context" "log/slog" "os" "runtime" "testing" "time" "github.com/ollama/ollama/api" ) func TestBlueSky(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() // Set up the test data req := api.ChatRequest{ Model: smol, Messages: []api.Message{ { Role: "user", Content: blueSkyPrompt, }, }, Stream: &stream, Options: map[string]any{ "temperature": 0, "seed": 123, }, } ChatTestHelper(ctx, t, req, blueSkyExpected) } func TestUnicode(t *testing.T) { skipUnderMinVRAM(t, 6) ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute) defer cancel() // Set up the test data req := api.ChatRequest{ // DeepSeek has a Unicode tokenizer regex, making it a unicode torture test Model: "deepseek-coder-v2:16b-lite-instruct-q2_K", // TODO is there an ollama-engine model we can switch to and keep the coverage? Messages: []api.Message{ { Role: "user", Content: "天空为什么是蓝色的?", // Why is the sky blue? }, }, Stream: &stream, Options: map[string]any{ "temperature": 0, "seed": 123, // Workaround deepseek context shifting bug "num_ctx": 8192, "num_predict": 2048, }, } client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() if err := PullIfMissing(ctx, client, req.Model); err != nil { t.Fatal(err) } slog.Info("loading", "model", req.Model) err := client.Generate(ctx, &api.GenerateRequest{Model: req.Model}, func(response api.GenerateResponse) error { return nil }) if err != nil { t.Fatalf("failed to load model %s: %s", req.Model, err) } defer func() { // best effort unload once we're done with the model client.Generate(ctx, &api.GenerateRequest{Model: req.Model, KeepAlive: &api.Duration{Duration: 0}}, func(rsp api.GenerateResponse) error { return nil }) }() skipIfNotGPULoaded(ctx, t, client, req.Model, 100) DoChat(ctx, t, client, req, []string{ "散射", // scattering "频率", // frequency }, 120*time.Second, 120*time.Second) } func TestExtendedUnicodeOutput(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() // Set up the test data req := api.ChatRequest{ Model: "gemma2:2b", Messages: []api.Message{ { Role: "user", Content: "Output some smily face emoji", }, }, Stream: &stream, Options: map[string]any{ "temperature": 0, "seed": 123, }, } client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() if err := PullIfMissing(ctx, client, req.Model); err != nil { t.Fatal(err) } DoChat(ctx, t, client, req, []string{"😀", "😊", "😁", "😂", "😄", "😃"}, 120*time.Second, 120*time.Second) } func TestUnicodeModelDir(t *testing.T) { // This is only useful for Windows with utf-16 characters, so skip this test for other platforms if runtime.GOOS != "windows" { t.Skip("Unicode test only applicable to windows") } // Only works for local testing if os.Getenv("OLLAMA_TEST_EXISTING") != "" { t.Skip("TestUnicodeModelDir only works for local testing, skipping") } modelDir, err := os.MkdirTemp("", "ollama_埃") if err != nil { t.Fatal(err) } defer os.RemoveAll(modelDir) slog.Info("unicode", "OLLAMA_MODELS", modelDir) t.Setenv("OLLAMA_MODELS", modelDir) ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() req := api.ChatRequest{ Model: smol, Messages: []api.Message{ { Role: "user", Content: blueSkyPrompt, }, }, Stream: &stream, Options: map[string]any{ "temperature": 0, "seed": 123, }, } ChatTestHelper(ctx, t, req, blueSkyExpected) }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/context_test.go
integration/context_test.go
//go:build integration package integration import ( "context" "log/slog" "sync" "testing" "time" "github.com/ollama/ollama/api" ) func TestLongInputContext(t *testing.T) { // Setting NUM_PARALLEL to 1 ensures the allocated context is exactly what // we asked for and there is nothing extra that we could spill over into t.Setenv("OLLAMA_NUM_PARALLEL", "1") // Longer needed for small footprint GPUs ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) defer cancel() // Set up the test data req := api.ChatRequest{ Model: smol, Messages: []api.Message{ { Role: "user", Content: "Oh, don’t speak to me of Austria. Perhaps I don’t understand things, but Austria never has wished, and does not wish, for war. She is betraying us! Russia alone must save Europe. Our gracious sovereign recognizes his high vocation and will be true to it. That is the one thing I have faith in! Our good and wonderful sovereign has to perform the noblest role on earth, and he is so virtuous and noble that God will not forsake him. He will fulfill his vocation and crush the hydra of revolution, which has become more terrible than ever in the person of this murderer and villain! We alone must avenge the blood of the just one.... Whom, I ask you, can we rely on?... England with her commercial spirit will not and cannot understand the Emperor Alexander’s loftiness of soul. She has refused to evacuate Malta. She wanted to find, and still seeks, some secret motive in our actions. What answer did Novosíltsev get? None. The English have not understood and cannot understand the self-abnegation of our Emperor who wants nothing for himself, but only desires the good of mankind. And what have they promised? Nothing! And what little they have promised they will not perform! Prussia has always declared that Buonaparte is invincible, and that all Europe is powerless before him.... And I don’t believe a word that Hardenburg says, or Haugwitz either. This famous Prussian neutrality is just a trap. I have faith only in God and the lofty destiny of our adored monarch. He will save Europe! What country is this referring to?", }, }, Stream: &stream, Options: map[string]any{ "temperature": 0, "seed": 123, "num_ctx": 128, }, } client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() if err := PullIfMissing(ctx, client, req.Model); err != nil { t.Fatalf("PullIfMissing failed: %v", err) } DoChat(ctx, t, client, req, []string{"russia", "german", "france", "england", "austria", "prussia", "europe", "individuals", "coalition", "conflict"}, 120*time.Second, 10*time.Second) } func TestContextExhaustion(t *testing.T) { // Setting NUM_PARALLEL to 1 ensures the allocated context is exactly what // we asked for and there is nothing extra that we could spill over into t.Setenv("OLLAMA_NUM_PARALLEL", "1") // Longer needed for small footprint GPUs ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) defer cancel() // Set up the test data req := api.ChatRequest{ Model: smol, Messages: []api.Message{ { Role: "user", Content: "Write me a story in english with a lot of emojis", }, }, Stream: &stream, Options: map[string]any{ "temperature": 0, "seed": 123, "num_ctx": 128, }, } client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() if err := PullIfMissing(ctx, client, req.Model); err != nil { t.Fatalf("PullIfMissing failed: %v", err) } DoChat(ctx, t, client, req, []string{"once", "upon", "lived", "sunny", "cloudy", "clear", "water", "time", "travel", "world"}, 120*time.Second, 10*time.Second) } // Send multiple generate requests with prior context and ensure the response is coherant and expected func TestParallelGenerateWithHistory(t *testing.T) { modelName := "gpt-oss:20b" req, resp := GenerateRequests() numParallel := 2 iterLimit := 2 softTimeout, hardTimeout := getTimeouts(t) ctx, cancel := context.WithTimeout(context.Background(), hardTimeout) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() initialTimeout := 120 * time.Second streamTimeout := 20 * time.Second // Get the server running (if applicable) warm the model up with a single initial request slog.Info("loading", "model", modelName) err := client.Generate(ctx, &api.GenerateRequest{Model: modelName, KeepAlive: &api.Duration{Duration: 10 * time.Second}}, func(response api.GenerateResponse) error { return nil }, ) if err != nil { t.Fatalf("failed to load model %s: %s", modelName, err) } gpuPercent := getGPUPercent(ctx, t, client, modelName) if gpuPercent < 80 { slog.Warn("Low GPU percentage - increasing timeouts", "percent", gpuPercent) initialTimeout = 240 * time.Second streamTimeout = 30 * time.Second } var wg sync.WaitGroup wg.Add(numParallel) for i := range numParallel { go func(i int) { defer wg.Done() k := i % len(req) req[k].Model = modelName for j := 0; j < iterLimit; j++ { if time.Now().Sub(started) > softTimeout { slog.Info("exceeded soft timeout, winding down test") return } slog.Info("Starting", "thread", i, "iter", j) // On slower GPUs it can take a while to process the concurrent requests // so we allow a much longer initial timeout c := DoGenerate(ctx, t, client, req[k], resp[k], initialTimeout, streamTimeout) req[k].Context = c req[k].Prompt = "tell me more!" } }(i) } wg.Wait() } // Send generate requests with prior context and ensure the response is coherant and expected func TestGenerateWithHistory(t *testing.T) { req := api.GenerateRequest{ Model: smol, Prompt: rainbowPrompt, Stream: &stream, KeepAlive: &api.Duration{Duration: 10 * time.Second}, Options: map[string]any{ "num_ctx": 16384, }, } softTimeout, hardTimeout := getTimeouts(t) ctx, cancel := context.WithTimeout(context.Background(), hardTimeout) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() // Get the server running (if applicable) warm the model up with a single initial request slog.Info("loading", "model", req.Model) err := client.Generate(ctx, &api.GenerateRequest{Model: req.Model, KeepAlive: &api.Duration{Duration: 10 * time.Second}, Options: req.Options}, func(response api.GenerateResponse) error { return nil }, ) if err != nil { t.Fatalf("failed to load model %s: %s", req.Model, err) } req.Context = DoGenerate(ctx, t, client, req, rainbowExpected, 30*time.Second, 20*time.Second) for i := 0; i < len(rainbowFollowups); i++ { req.Prompt = rainbowFollowups[i] if time.Now().Sub(started) > softTimeout { slog.Info("exceeded soft timeout, winding down test") return } req.Context = DoGenerate(ctx, t, client, req, rainbowExpected, 30*time.Second, 20*time.Second) } } // Send multiple chat requests with prior context and ensure the response is coherant and expected func TestParallelChatWithHistory(t *testing.T) { modelName := "gpt-oss:20b" req, resp := ChatRequests() numParallel := 2 iterLimit := 2 softTimeout, hardTimeout := getTimeouts(t) ctx, cancel := context.WithTimeout(context.Background(), hardTimeout) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() initialTimeout := 120 * time.Second streamTimeout := 20 * time.Second // Get the server running (if applicable) warm the model up with a single initial empty request slog.Info("loading", "model", modelName) err := client.Generate(ctx, &api.GenerateRequest{Model: modelName, KeepAlive: &api.Duration{Duration: 10 * time.Second}}, func(response api.GenerateResponse) error { return nil }, ) if err != nil { t.Fatalf("failed to load model %s: %s", modelName, err) } gpuPercent := getGPUPercent(ctx, t, client, modelName) if gpuPercent < 80 { slog.Warn("Low GPU percentage - increasing timeouts", "percent", gpuPercent) initialTimeout = 240 * time.Second streamTimeout = 30 * time.Second } var wg sync.WaitGroup wg.Add(numParallel) for i := range numParallel { go func(i int) { defer wg.Done() k := i % len(req) req[k].Model = modelName for j := 0; j < iterLimit; j++ { if time.Now().Sub(started) > softTimeout { slog.Info("exceeded soft timeout, winding down test") return } slog.Info("Starting", "thread", i, "iter", j) // On slower GPUs it can take a while to process the concurrent requests // so we allow a much longer initial timeout assistant := DoChat(ctx, t, client, req[k], resp[k], initialTimeout, streamTimeout) if assistant == nil { t.Fatalf("didn't get an assistant response for context") } req[k].Messages = append(req[k].Messages, *assistant, api.Message{Role: "user", Content: "tell me more!"}, ) } }(i) } wg.Wait() } // Send generate requests with prior context and ensure the response is coherant and expected func TestChatWithHistory(t *testing.T) { req := api.ChatRequest{ Model: smol, Stream: &stream, KeepAlive: &api.Duration{Duration: 10 * time.Second}, Options: map[string]any{ "num_ctx": 16384, }, Messages: []api.Message{ { Role: "user", Content: rainbowPrompt, }, }, } softTimeout, hardTimeout := getTimeouts(t) ctx, cancel := context.WithTimeout(context.Background(), hardTimeout) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() // Get the server running (if applicable) warm the model up with a single initial request slog.Info("loading", "model", req.Model) err := client.Generate(ctx, &api.GenerateRequest{Model: req.Model, KeepAlive: &api.Duration{Duration: 10 * time.Second}, Options: req.Options}, func(response api.GenerateResponse) error { return nil }, ) if err != nil { t.Fatalf("failed to load model %s: %s", req.Model, err) } assistant := DoChat(ctx, t, client, req, rainbowExpected, 30*time.Second, 20*time.Second) for i := 0; i < len(rainbowFollowups); i++ { if time.Now().Sub(started) > softTimeout { slog.Info("exceeded soft timeout, winding down test") return } req.Messages = append(req.Messages, *assistant, api.Message{Role: "user", Content: rainbowFollowups[i]}, ) assistant = DoChat(ctx, t, client, req, rainbowExpected, 30*time.Second, 20*time.Second) if assistant == nil { t.Fatalf("didn't get an assistant response for context") } } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/api_test.go
integration/api_test.go
//go:build integration package integration import ( "bytes" "context" "fmt" "math/rand" "strings" "testing" "time" "github.com/ollama/ollama/api" ) func assertBytesMatchToken(t *testing.T, label, token string, ints []int) { t.Helper() raw := []byte(token) if len(ints) != len(raw) { t.Errorf("%s expected %d bytes for token %q, got %d (%v)", label, len(raw), token, len(ints), ints) return } for i, b := range raw { if ints[i] != int(b) { t.Errorf("%s byte[%d] mismatch for token %q: got %d want %d", label, i, token, ints[i], int(b)) return } } } func TestAPIGenerate(t *testing.T) { initialTimeout := 60 * time.Second streamTimeout := 30 * time.Second ctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute) defer cancel() // Set up the test data req := api.GenerateRequest{ Model: smol, Prompt: blueSkyPrompt, Options: map[string]interface{}{ "temperature": 0, "seed": 123, }, } client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() if err := PullIfMissing(ctx, client, req.Model); err != nil { t.Fatalf("pull failed %s", err) } tests := []struct { name string stream bool }{ { name: "stream", stream: true, }, { name: "no_stream", stream: false, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { stallTimer := time.NewTimer(initialTimeout) var buf bytes.Buffer fn := func(response api.GenerateResponse) error { // Fields that must always be present if response.Model == "" { t.Errorf("response missing model: %#v", response) } if response.Done { // Required fields for final updates: if response.DoneReason == "" && *req.Stream { // TODO - is the lack of done reason on non-stream a bug? t.Errorf("final response missing done_reason: %#v", response) } if response.Metrics.TotalDuration == 0 { t.Errorf("final response missing total_duration: %#v", response) } if response.Metrics.LoadDuration == 0 { t.Errorf("final response missing load_duration: %#v", response) } if response.Metrics.PromptEvalDuration == 0 { t.Errorf("final response missing prompt_eval_duration: %#v", response) } if response.Metrics.EvalCount == 0 { t.Errorf("final response missing eval_count: %#v", response) } if response.Metrics.EvalDuration == 0 { t.Errorf("final response missing eval_duration: %#v", response) } if len(response.Context) == 0 { t.Errorf("final response missing context: %#v", response) } // Note: caching can result in no prompt eval count, so this can't be verified reliably // if response.Metrics.PromptEvalCount == 0 { // t.Errorf("final response missing prompt_eval_count: %#v", response) // } } // else incremental response, nothing to check right now... buf.Write([]byte(response.Response)) if !stallTimer.Reset(streamTimeout) { return fmt.Errorf("stall was detected while streaming response, aborting") } return nil } done := make(chan int) var genErr error go func() { req.Stream = &test.stream req.Options["seed"] = rand.Int() // bust cache for prompt eval results genErr = client.Generate(ctx, &req, fn) done <- 0 }() select { case <-stallTimer.C: if buf.Len() == 0 { t.Errorf("generate never started. Timed out after :%s", initialTimeout.String()) } else { t.Errorf("generate stalled. Response so far:%s", buf.String()) } case <-done: if genErr != nil { t.Fatalf("failed with %s request prompt %s ", req.Model, req.Prompt) } // Verify the response contains the expected data response := buf.String() atLeastOne := false for _, resp := range blueSkyExpected { if strings.Contains(strings.ToLower(response), resp) { atLeastOne = true break } } if !atLeastOne { t.Errorf("none of %v found in %s", blueSkyExpected, response) } case <-ctx.Done(): t.Error("outer test context done while waiting for generate") } }) } // Validate PS while we're at it... resp, err := client.ListRunning(ctx) if err != nil { t.Fatalf("list models API error: %s", err) } if resp == nil || len(resp.Models) == 0 { t.Fatalf("list models API returned empty list while model should still be loaded") } // Find the model we just loaded and verify some attributes found := false for _, model := range resp.Models { if strings.Contains(model.Name, req.Model) { found = true if model.Model == "" { t.Errorf("model field omitted: %#v", model) } if model.Size == 0 { t.Errorf("size omitted: %#v", model) } if model.Digest == "" { t.Errorf("digest omitted: %#v", model) } verifyModelDetails(t, model.Details) var nilTime time.Time if model.ExpiresAt == nilTime { t.Errorf("expires_at omitted: %#v", model) } // SizeVRAM could be zero. } } if !found { t.Errorf("unable to locate running model: %#v", resp) } } func TestAPIChat(t *testing.T) { initialTimeout := 60 * time.Second streamTimeout := 30 * time.Second ctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute) defer cancel() // Set up the test data req := api.ChatRequest{ Model: smol, Messages: []api.Message{ { Role: "user", Content: blueSkyPrompt, }, }, Options: map[string]interface{}{ "temperature": 0, "seed": 123, }, } client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() if err := PullIfMissing(ctx, client, req.Model); err != nil { t.Fatalf("pull failed %s", err) } tests := []struct { name string stream bool }{ { name: "stream", stream: true, }, { name: "no_stream", stream: false, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { stallTimer := time.NewTimer(initialTimeout) var buf bytes.Buffer fn := func(response api.ChatResponse) error { // Fields that must always be present if response.Model == "" { t.Errorf("response missing model: %#v", response) } if response.Done { // Required fields for final updates: var nilTime time.Time if response.CreatedAt == nilTime { t.Errorf("final response missing total_duration: %#v", response) } if response.DoneReason == "" { t.Errorf("final response missing done_reason: %#v", response) } if response.Metrics.TotalDuration == 0 { t.Errorf("final response missing total_duration: %#v", response) } if response.Metrics.LoadDuration == 0 { t.Errorf("final response missing load_duration: %#v", response) } if response.Metrics.PromptEvalDuration == 0 { t.Errorf("final response missing prompt_eval_duration: %#v", response) } if response.Metrics.EvalCount == 0 { t.Errorf("final response missing eval_count: %#v", response) } if response.Metrics.EvalDuration == 0 { t.Errorf("final response missing eval_duration: %#v", response) } if response.Metrics.PromptEvalCount == 0 { t.Errorf("final response missing prompt_eval_count: %#v", response) } } // else incremental response, nothing to check right now... buf.Write([]byte(response.Message.Content)) if !stallTimer.Reset(streamTimeout) { return fmt.Errorf("stall was detected while streaming response, aborting") } return nil } done := make(chan int) var genErr error go func() { req.Stream = &test.stream req.Options["seed"] = rand.Int() // bust cache for prompt eval results genErr = client.Chat(ctx, &req, fn) done <- 0 }() select { case <-stallTimer.C: if buf.Len() == 0 { t.Errorf("chat never started. Timed out after :%s", initialTimeout.String()) } else { t.Errorf("chat stalled. Response so far:%s", buf.String()) } case <-done: if genErr != nil { t.Fatalf("failed with %s request prompt %v", req.Model, req.Messages) } // Verify the response contains the expected data response := buf.String() atLeastOne := false for _, resp := range blueSkyExpected { if strings.Contains(strings.ToLower(response), resp) { atLeastOne = true break } } if !atLeastOne { t.Errorf("none of %v found in %s", blueSkyExpected, response) } case <-ctx.Done(): t.Error("outer test context done while waiting for chat") } }) } } func TestAPIListModels(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() // Make sure we have at least one model so an empty list can be considered a failure if err := PullIfMissing(ctx, client, smol); err != nil { t.Fatalf("pull failed %s", err) } resp, err := client.List(ctx) if err != nil { t.Fatalf("unable to list models: %s", err) } if len(resp.Models) == 0 { t.Fatalf("list should not be empty") } model := resp.Models[0] if model.Name == "" { t.Errorf("first model name empty: %#v", model) } var nilTime time.Time if model.ModifiedAt == nilTime { t.Errorf("first model modified_at empty: %#v", model) } if model.Size == 0 { t.Errorf("first model size empty: %#v", model) } if model.Digest == "" { t.Errorf("first model digest empty: %#v", model) } verifyModelDetails(t, model.Details) } func verifyModelDetails(t *testing.T, details api.ModelDetails) { if details.Format == "" { t.Errorf("first model details.format empty: %#v", details) } if details.Family == "" { t.Errorf("first model details.family empty: %#v", details) } if details.ParameterSize == "" { t.Errorf("first model details.parameter_size empty: %#v", details) } if details.QuantizationLevel == "" { t.Errorf("first model details.quantization_level empty: %#v", details) } } func TestAPIShowModel(t *testing.T) { modelName := "llama3.2" ctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() if err := PullIfMissing(ctx, client, modelName); err != nil { t.Fatalf("pull failed %s", err) } resp, err := client.Show(ctx, &api.ShowRequest{Name: modelName}) if err != nil { t.Fatalf("unable to show model: %s", err) } if resp.License == "" { t.Errorf("%s missing license: %#v", modelName, resp) } if resp.Modelfile == "" { t.Errorf("%s missing modelfile: %#v", modelName, resp) } if resp.Parameters == "" { t.Errorf("%s missing parameters: %#v", modelName, resp) } if resp.Template == "" { t.Errorf("%s missing template: %#v", modelName, resp) } // llama3 omits system verifyModelDetails(t, resp.Details) // llama3 ommits messages if len(resp.ModelInfo) == 0 { t.Errorf("%s missing model_info: %#v", modelName, resp) } // llama3 omits projectors var nilTime time.Time if resp.ModifiedAt == nilTime { t.Errorf("%s missing modified_at: %#v", modelName, resp) } } func TestAPIGenerateLogprobs(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() if err := PullIfMissing(ctx, client, smol); err != nil { t.Fatalf("pull failed %s", err) } enableLogprobs := true noStream := false tests := []struct { name string logprobs *bool topLogprobs int expectCount int }{ { name: "no_logprobs", logprobs: nil, topLogprobs: 0, expectCount: 0, }, { name: "logprobs_only", logprobs: &enableLogprobs, topLogprobs: 0, expectCount: 1, }, { name: "logprobs_with_top_5", logprobs: &enableLogprobs, topLogprobs: 5, expectCount: 1, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { req := api.GenerateRequest{ Model: smol, Prompt: "Why is the sky blue?", Stream: &noStream, Logprobs: test.logprobs != nil && *test.logprobs, TopLogprobs: test.topLogprobs, Options: map[string]interface{}{ "temperature": 0, "seed": 123, "num_predict": 10, }, } var response api.GenerateResponse err := client.Generate(ctx, &req, func(resp api.GenerateResponse) error { if resp.Done { response = resp } return nil }) if err != nil { t.Fatalf("generate failed: %s", err) } // Check logprobs based on expectation if test.expectCount == 0 { if len(response.Logprobs) > 0 { t.Errorf("expected no logprobs but got %d", len(response.Logprobs)) } } else { if len(response.Logprobs) == 0 { t.Errorf("expected logprobs but got none") } // Validate each logprob entry for i, lp := range response.Logprobs { if lp.Token == "" { t.Errorf("logprob[%d] has empty token", i) } if lp.Logprob > 0 { t.Errorf("logprob[%d] has positive logprob %f (should be <= 0)", i, lp.Logprob) } assertBytesMatchToken(t, fmt.Sprintf("generate logprob[%d]", i), lp.Token, lp.Bytes) // Check top_logprobs if requested if test.topLogprobs > 0 { if len(lp.TopLogprobs) == 0 { t.Errorf("logprob[%d] expected top_logprobs but got none", i) } if len(lp.TopLogprobs) > test.topLogprobs { t.Errorf("logprob[%d] has %d top_logprobs, expected max %d", i, len(lp.TopLogprobs), test.topLogprobs) } // Verify top_logprobs are sorted by probability (descending) for j := 1; j < len(lp.TopLogprobs); j++ { if lp.TopLogprobs[j-1].Logprob < lp.TopLogprobs[j].Logprob { t.Errorf("logprob[%d].top_logprobs not sorted: %f < %f", i, lp.TopLogprobs[j-1].Logprob, lp.TopLogprobs[j].Logprob) } } for j, top := range lp.TopLogprobs { assertBytesMatchToken(t, fmt.Sprintf("generate logprob[%d].top[%d]", i, j), top.Token, top.Bytes) } } else if len(lp.TopLogprobs) > 0 { t.Errorf("logprob[%d] has top_logprobs but none were requested", i) } } } }) } } func TestAPIChatLogprobs(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() if err := PullIfMissing(ctx, client, smol); err != nil { t.Fatalf("pull failed %s", err) } enableLogprobs := true noStream := false req := api.ChatRequest{ Model: smol, Messages: []api.Message{ {Role: "user", Content: "Say hello in one word"}, }, Stream: &noStream, Logprobs: enableLogprobs, TopLogprobs: 3, Options: map[string]interface{}{ "temperature": 0, "seed": 123, "num_predict": 5, }, } var response api.ChatResponse err := client.Chat(ctx, &req, func(resp api.ChatResponse) error { if resp.Done { response = resp } return nil }) if err != nil { t.Fatalf("chat failed: %s", err) } if len(response.Logprobs) == 0 { t.Fatal("expected logprobs in response but got none") } t.Logf("received %d logprobs for chat response", len(response.Logprobs)) for i, lp := range response.Logprobs { if lp.Token == "" { t.Errorf("logprob[%d] has empty token", i) } if lp.Logprob > 0 { t.Errorf("logprob[%d] has positive logprob %f", i, lp.Logprob) } assertBytesMatchToken(t, fmt.Sprintf("chat logprob[%d]", i), lp.Token, lp.Bytes) if len(lp.TopLogprobs) == 0 { t.Errorf("logprob[%d] expected top_logprobs but got none", i) } if len(lp.TopLogprobs) > 3 { t.Errorf("logprob[%d] has %d top_logprobs, expected max 3", i, len(lp.TopLogprobs)) } for j, top := range lp.TopLogprobs { assertBytesMatchToken(t, fmt.Sprintf("chat logprob[%d].top[%d]", i, j), top.Token, top.Bytes) } } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/utils_test.go
integration/utils_test.go
//go:build integration package integration import ( "bytes" "context" "errors" "fmt" "io" "log/slog" "math" "math/rand" "net" "net/http" "net/url" "os" "os/exec" "path/filepath" "runtime" "strconv" "strings" "sync" "testing" "time" "github.com/ollama/ollama/api" "github.com/ollama/ollama/format" ) var ( smol = "llama3.2:1b" stream = false ) var ( started = time.Now() // Note: add newer models at the top of the list to test them first ollamaEngineChatModels = []string{ "ministral-3", "qwen3-coder:30b", "gpt-oss:20b", "gemma3n:e2b", "mistral-small3.2:latest", "deepseek-r1:1.5b", "llama3.2-vision:latest", "qwen2.5-coder:latest", "qwen2.5vl:3b", "qwen3:0.6b", // dense "qwen3:1.7b", // dense "qwen3:30b", // MOE "gemma3:1b", "llama3.1:latest", "llama3.2:latest", "gemma2:latest", "minicpm-v:latest", // arch=qwen2 "granite-code:latest", // arch=llama } llamaRunnerChatModels = []string{ "mistral:latest", "falcon3:latest", "granite3-moe:latest", "command-r:latest", "nemotron-mini:latest", "phi3.5:latest", "solar-pro:latest", "internlm2:latest", "codellama:latest", // arch=llama "phi3:latest", "falcon2:latest", "gemma:latest", "llama2:latest", "nous-hermes:latest", "orca-mini:latest", "qwen:latest", "stablelm2:latest", // Predictions are off, crashes on small VRAM GPUs "falcon:latest", } // Some library models are quite large - ensure large VRAM and sufficient disk space // before running scenarios based on this set libraryChatModels = []string{ "alfred", "athene-v2", "aya-expanse", "aya", "bakllava", "bespoke-minicheck", "codebooga", "codegeex4", "codegemma", "codellama", "codeqwen", "codestral", "codeup", "cogito", "command-a", "command-r-plus", "command-r", "command-r7b-arabic", "command-r7b", "dbrx", "deepcoder", "deepscaler", "deepseek-coder-v2", "deepseek-coder", "deepseek-llm", "deepseek-r1", // "deepseek-v2.5", // requires 155 GB VRAM "deepseek-v2", // "deepseek-v3", // requires 482 GB VRAM "devstral", "dolphin-llama3", "dolphin-mistral", "dolphin-mixtral", "dolphin-phi", "dolphin3", "dolphincoder", "duckdb-nsql", "everythinglm", "exaone-deep", "exaone3.5", "falcon", "falcon2", "falcon3", "firefunction-v2", "gemma", "gemma2", "gemma3", "gemma3n", "glm4", "goliath", "gpt-oss:20b", "granite-code", "granite3-dense", "granite3-guardian", "granite3-moe", "granite3.1-dense", "granite3.1-moe", "granite3.2-vision", "granite3.2", "granite3.3", "hermes3", "internlm2", "llama-guard3", "llama-pro", "llama2-chinese", "llama2-uncensored", "llama2", "llama3-chatqa", "llama3-gradient", "llama3-groq-tool-use", "llama3.1", "llama3.2-vision", "llama3.2", "llama3.3", "llama3", "llama4", "llava-llama3", "llava-phi3", "llava", "magicoder", "magistral", "marco-o1", "mathstral", "meditron", "medllama2", "megadolphin", "minicpm-v", "ministral-3", "mistral-large", "mistral-nemo", "mistral-openorca", "mistral-small", "mistral-small3.1", "mistral-small3.2", "mistral", "mistrallite", "mixtral", "moondream", "nemotron-mini", "nemotron", "neural-chat", "nexusraven", "notus", "nous-hermes", "nous-hermes2-mixtral", "nous-hermes2", "nuextract", "olmo2", "open-orca-platypus2", "openchat", "opencoder", "openhermes", "openthinker", "orca-mini", "orca2", // "phi", // unreliable "phi3.5", "phi3", "phi4-mini-reasoning", "phi4-mini", "phi4-reasoning", "phi4", "phind-codellama", "qwen", "qwen2-math", "qwen2.5-coder", "qwen2.5", "qwen2.5vl", "qwen2", "qwen3:0.6b", // dense "qwen3:30b", // MOE "qwq", "r1-1776", "reader-lm", "reflection", "sailor2", "samantha-mistral", "shieldgemma", "smallthinker", "smollm", "smollm2", "solar-pro", "solar", "sqlcoder", "stable-beluga", "stable-code", "stablelm-zephyr", "stablelm2", "starcoder", "starcoder2", "starling-lm", "tinydolphin", "tinyllama", "tulu3", "vicuna", "wizard-math", "wizard-vicuna-uncensored", "wizard-vicuna", "wizardcoder", "wizardlm-uncensored", "wizardlm2", "xwinlm", "yarn-llama2", "yarn-mistral", "yi-coder", "yi", "zephyr", } libraryEmbedModels = []string{ "qwen3-embedding", "embeddinggemma", "nomic-embed-text", "all-minilm", "bge-large", "bge-m3", "granite-embedding", "mxbai-embed-large", "paraphrase-multilingual", "snowflake-arctic-embed", "snowflake-arctic-embed2", } libraryToolsModels = []string{ "qwen3-vl", "gpt-oss:20b", "gpt-oss:120b", "qwen3", "llama3.1", "llama3.2", "mistral", "qwen2.5", "qwen2", "ministral-3", "mistral-nemo", "mistral-small", "mixtral:8x22b", "qwq", "granite3.3", } blueSkyPrompt = "why is the sky blue? Be brief but factual in your reply" blueSkyExpected = []string{"rayleigh", "scatter", "atmosphere", "nitrogen", "oxygen", "wavelength", "interact"} rainbowPrompt = "how do rainbows form? Be brief but factual in your reply" rainbowFollowups = []string{ "Explain the physics involved in them. Be breif in your reply", "Explain the chemistry involved in them. Be breif in your reply", "What are common myths related to them? Be brief in your reply", "Can they form if there is no rain? Be breif in your reply", "Can they form if there are no clouds? Be breif in your reply", "Do they happen on other planets? Be brief in your reply", } rainbowExpected = []string{"water", "droplet", "mist", "glow", "refract", "reflect", "scatter", "particles", "wave", "color", "spectrum", "raindrop", "atmosphere", "frequency", "shower", "sky", "shimmer", "light", "storm", "sunny", "sunburst", "phenomenon", "mars", "venus", "jupiter"} ) func init() { logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelDebug})) slog.SetDefault(logger) custom := os.Getenv("OLLAMA_TEST_DEFAULT_MODEL") if custom != "" { slog.Info("setting default test model to " + custom) smol = custom } } func FindPort() string { port := 0 if a, err := net.ResolveTCPAddr("tcp", "localhost:0"); err == nil { var l *net.TCPListener if l, err = net.ListenTCP("tcp", a); err == nil { port = l.Addr().(*net.TCPAddr).Port l.Close() } } if port == 0 { port = rand.Intn(65535-49152) + 49152 // get a random port in the ephemeral range } return strconv.Itoa(port) } func GetTestEndpoint() (*api.Client, string) { defaultPort := "11434" ollamaHost := os.Getenv("OLLAMA_HOST") scheme, hostport, ok := strings.Cut(ollamaHost, "://") if !ok { scheme, hostport = "http", ollamaHost } // trim trailing slashes hostport = strings.TrimRight(hostport, "/") host, port, err := net.SplitHostPort(hostport) if err != nil { host, port = "127.0.0.1", defaultPort if ip := net.ParseIP(strings.Trim(hostport, "[]")); ip != nil { host = ip.String() } else if hostport != "" { host = hostport } } if os.Getenv("OLLAMA_TEST_EXISTING") == "" && runtime.GOOS != "windows" && port == defaultPort { port = FindPort() } slog.Info("server connection", "host", host, "port", port) return api.NewClient( &url.URL{ Scheme: scheme, Host: net.JoinHostPort(host, port), }, http.DefaultClient), fmt.Sprintf("%s:%s", host, port) } // Server lifecycle management var ( serverMutex sync.Mutex serverReady bool serverLog bytes.Buffer serverDone chan int serverCmd *exec.Cmd ) func startServer(t *testing.T, ctx context.Context, ollamaHost string) error { // Make sure the server has been built CLIName, err := filepath.Abs("../ollama") if err != nil { return fmt.Errorf("failed to get absolute path: %w", err) } if runtime.GOOS == "windows" { CLIName += ".exe" } _, err = os.Stat(CLIName) if err != nil { return fmt.Errorf("CLI missing, did you forget to 'go build .' first? %w", err) } serverMutex.Lock() defer serverMutex.Unlock() if serverReady { return nil } serverDone = make(chan int) serverLog.Reset() if tmp := os.Getenv("OLLAMA_HOST"); tmp != ollamaHost { slog.Info("setting env", "OLLAMA_HOST", ollamaHost) t.Setenv("OLLAMA_HOST", ollamaHost) } serverCmd = exec.Command(CLIName, "serve") serverCmd.Stderr = &serverLog serverCmd.Stdout = &serverLog go func() { slog.Info("starting server", "url", ollamaHost) if err := serverCmd.Run(); err != nil { // "signal: killed" expected during normal shutdown if !strings.Contains(err.Error(), "signal") { slog.Info("failed to run server", "error", err) } } var code int if serverCmd.ProcessState != nil { code = serverCmd.ProcessState.ExitCode() } slog.Info("server exited") serverDone <- code }() serverReady = true return nil } func PullIfMissing(ctx context.Context, client *api.Client, modelName string) error { slog.Info("checking status of model", "model", modelName) showReq := &api.ShowRequest{Name: modelName} showCtx, cancel := context.WithDeadlineCause( ctx, time.Now().Add(20*time.Second), fmt.Errorf("show for existing model %s took too long", modelName), ) defer cancel() _, err := client.Show(showCtx, showReq) var statusError api.StatusError switch { case errors.As(err, &statusError) && statusError.StatusCode == http.StatusNotFound: break case err != nil: return err default: slog.Info("model already present", "model", modelName) return nil } slog.Info("model missing", "model", modelName) stallDuration := 60 * time.Second // This includes checksum verification, which can take a while on larger models, and slower systems stallTimer := time.NewTimer(stallDuration) fn := func(resp api.ProgressResponse) error { // fmt.Print(".") if !stallTimer.Reset(stallDuration) { return errors.New("stall was detected, aborting status reporting") } return nil } stream := true pullReq := &api.PullRequest{Name: modelName, Stream: &stream} var pullError error done := make(chan int) go func() { pullError = client.Pull(ctx, pullReq, fn) done <- 0 }() select { case <-stallTimer.C: return errors.New("download stalled") case <-done: return pullError } } var serverProcMutex sync.Mutex // Returns an Client, the testEndpoint, and a cleanup function, fails the test on errors // Starts the server if needed func InitServerConnection(ctx context.Context, t *testing.T) (*api.Client, string, func()) { client, testEndpoint := GetTestEndpoint() cleanup := func() {} if os.Getenv("OLLAMA_TEST_EXISTING") == "" && runtime.GOOS != "windows" { var err error err = startServer(t, ctx, testEndpoint) if err != nil { t.Fatal(err) } cleanup = func() { serverMutex.Lock() defer serverMutex.Unlock() serverReady = false slog.Info("shutting down server") serverCmd.Process.Signal(os.Interrupt) slog.Info("waiting for server to exit") <-serverDone slog.Info("terminate complete") if t.Failed() { slog.Warn("SERVER LOG FOLLOWS") io.Copy(os.Stderr, &serverLog) slog.Warn("END OF SERVER") } slog.Info("cleanup complete", "failed", t.Failed()) } } // Make sure server is online and healthy before returning for { select { case <-ctx.Done(): t.Fatalf("context done before server ready: %v", ctx.Err()) break default: } listCtx, cancel := context.WithDeadlineCause( ctx, time.Now().Add(10*time.Second), fmt.Errorf("list models took too long"), ) defer cancel() models, err := client.ListRunning(listCtx) if err != nil { if runtime.GOOS == "windows" { t.Fatalf("did you forget to start the server: %v", err) } time.Sleep(10 * time.Millisecond) continue } if len(models.Models) > 0 { names := make([]string, len(models.Models)) for i, m := range models.Models { names[i] = m.Name } slog.Info("currently loaded", "models", names) } break } return client, testEndpoint, cleanup } func ChatTestHelper(ctx context.Context, t *testing.T, req api.ChatRequest, anyResp []string) { client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() if err := PullIfMissing(ctx, client, req.Model); err != nil { t.Fatal(err) } DoChat(ctx, t, client, req, anyResp, 30*time.Second, 10*time.Second) } func DoGenerate(ctx context.Context, t *testing.T, client *api.Client, genReq api.GenerateRequest, anyResp []string, initialTimeout, streamTimeout time.Duration) []int { stallTimer := time.NewTimer(initialTimeout) var buf bytes.Buffer var context []int fn := func(response api.GenerateResponse) error { // fmt.Print(".") buf.Write([]byte(response.Response)) if !stallTimer.Reset(streamTimeout) { return errors.New("stall was detected while streaming response, aborting") } if len(response.Context) > 0 { context = response.Context } return nil } stream := true genReq.Stream = &stream done := make(chan int) var genErr error go func() { genErr = client.Generate(ctx, &genReq, fn) done <- 0 }() var response string verify := func() { // Verify the response contains the expected data response = buf.String() atLeastOne := false for _, resp := range anyResp { if strings.Contains(strings.ToLower(response), resp) { atLeastOne = true break } } if !atLeastOne { t.Fatalf("%s: none of %v found in %s", genReq.Model, anyResp, response) } } select { case <-stallTimer.C: if buf.Len() == 0 { t.Errorf("generate never started. Timed out after :%s", initialTimeout.String()) } else { t.Errorf("generate stalled. Response so far:%s", buf.String()) } case <-done: if genErr != nil && strings.Contains(genErr.Error(), "model requires more system memory") { slog.Warn("model is too large for the target test system", "model", genReq.Model, "error", genErr) return context } if genErr != nil { t.Fatalf("%s failed with %s request prompt %s", genErr, genReq.Model, genReq.Prompt) } verify() slog.Info("test pass", "model", genReq.Model, "prompt", genReq.Prompt, "contains", anyResp, "response", response) case <-ctx.Done(): // On slow systems, we might timeout before some models finish rambling, so check what we have so far to see // if it's considered a pass - the stallTimer will detect hangs, but we want to consider slow systems a pass // if they are still generating valid responses slog.Warn("outer test context done while waiting for generate") verify() } return context } // Generate a set of requests // By default each request uses llama3.2 as the model func GenerateRequests() ([]api.GenerateRequest, [][]string) { return []api.GenerateRequest{ { Model: smol, Prompt: "why is the ocean blue? Be brief but factual in your reply", Stream: &stream, KeepAlive: &api.Duration{Duration: 10 * time.Second}, }, { Model: smol, Prompt: "why is the color of dirt brown? Be brief but factual in your reply", Stream: &stream, KeepAlive: &api.Duration{Duration: 10 * time.Second}, }, { Model: smol, Prompt: rainbowPrompt, Stream: &stream, KeepAlive: &api.Duration{Duration: 10 * time.Second}, }, { Model: smol, Prompt: "what is the origin of independence day? Be brief but factual in your reply", Stream: &stream, KeepAlive: &api.Duration{Duration: 10 * time.Second}, }, { Model: smol, Prompt: "what is the composition of air? Be brief but factual in your reply", Stream: &stream, KeepAlive: &api.Duration{Duration: 10 * time.Second}, }, }, [][]string{ {"sunlight", "scatter", "interact", "color", "surface", "depth", "red", "orange", "yellow", "absorb", "wavelength", "water", "molecule"}, {"soil", "organic", "earth", "black", "tan", "chemical", "processes", "pigment", "particle", "iron oxide", "rust", "air", "water", "wet", "mixture", "mixing", "mineral", "element", "decomposed", "matter", "wavelength"}, rainbowExpected, {"fourth", "july", "declaration", "independence"}, {"nitrogen", "oxygen", "carbon", "dioxide", "water", "vapor", "fluid", "particles", "gas"}, } } func DoChat(ctx context.Context, t *testing.T, client *api.Client, req api.ChatRequest, anyResp []string, initialTimeout, streamTimeout time.Duration) *api.Message { stallTimer := time.NewTimer(initialTimeout) var buf bytes.Buffer role := "assistant" fn := func(response api.ChatResponse) error { // fmt.Print(".") role = response.Message.Role buf.Write([]byte(response.Message.Content)) if !stallTimer.Reset(streamTimeout) { return errors.New("stall was detected while streaming response, aborting") } return nil } stream := true req.Stream = &stream done := make(chan int) var genErr error go func() { genErr = client.Chat(ctx, &req, fn) done <- 0 }() var response string verify := func() { // Verify the response contains the expected data response = buf.String() atLeastOne := false for _, resp := range anyResp { if strings.Contains(strings.ToLower(response), resp) { atLeastOne = true break } } if !atLeastOne { t.Fatalf("%s: none of %v found in \"%s\" -- request was:%v", req.Model, anyResp, response, req.Messages) } } select { case <-stallTimer.C: if buf.Len() == 0 { t.Errorf("generate never started. Timed out after :%s", initialTimeout.String()) } else { t.Errorf("generate stalled. Response so far:%s", buf.String()) } case <-done: if genErr != nil && strings.Contains(genErr.Error(), "model requires more system memory") { slog.Warn("model is too large for the target test system", "model", req.Model, "error", genErr) return nil } if genErr != nil { t.Fatalf("%s failed with %s request prompt %v", genErr, req.Model, req.Messages) } verify() slog.Info("test pass", "model", req.Model, "messages", req.Messages, "contains", anyResp, "response", response) case <-ctx.Done(): // On slow systems, we might timeout before some models finish rambling, so check what we have so far to see // if it's considered a pass - the stallTimer will detect hangs, but we want to consider slow systems a pass // if they are still generating valid responses slog.Warn("outer test context done while waiting for chat") verify() } return &api.Message{Role: role, Content: buf.String()} } func ChatRequests() ([]api.ChatRequest, [][]string) { genReqs, results := GenerateRequests() reqs := make([]api.ChatRequest, len(genReqs)) // think := api.ThinkValue{Value: "low"} for i := range reqs { reqs[i].Model = genReqs[i].Model reqs[i].Stream = genReqs[i].Stream reqs[i].KeepAlive = genReqs[i].KeepAlive // reqs[i].Think = &think reqs[i].Messages = []api.Message{ { Role: "user", Content: genReqs[i].Prompt, }, } } return reqs, results } func skipUnderMinVRAM(t *testing.T, gb uint64) { // TODO use info API in the future if s := os.Getenv("OLLAMA_MAX_VRAM"); s != "" { maxVram, err := strconv.ParseUint(s, 10, 64) if err != nil { t.Fatal(err) } // Don't hammer on small VRAM cards... if maxVram < gb*format.GibiByte { t.Skip("skipping with small VRAM to avoid timeouts") } } } // Skip if the target model isn't X% GPU loaded to avoid excessive runtime func skipIfNotGPULoaded(ctx context.Context, t *testing.T, client *api.Client, model string, minPercent int) { gpuPercent := getGPUPercent(ctx, t, client, model) if gpuPercent < minPercent { t.Skip(fmt.Sprintf("test requires minimum %d%% GPU load, but model %s only has %d%%", minPercent, model, gpuPercent)) } } func getGPUPercent(ctx context.Context, t *testing.T, client *api.Client, model string) int { models, err := client.ListRunning(ctx) if err != nil { t.Fatalf("failed to list running models: %s", err) } loaded := []string{} for _, m := range models.Models { loaded = append(loaded, m.Name) if strings.Contains(model, ":") { if m.Name != model { continue } } else if strings.Contains(m.Name, ":") { if !strings.HasPrefix(m.Name, model+":") { continue } } gpuPercent := 0 switch { case m.SizeVRAM == 0: gpuPercent = 0 case m.SizeVRAM == m.Size: gpuPercent = 100 case m.SizeVRAM > m.Size || m.Size == 0: t.Logf("unexpected size detected: %d", m.SizeVRAM) default: sizeCPU := m.Size - m.SizeVRAM cpuPercent := math.Round(float64(sizeCPU) / float64(m.Size) * 110) gpuPercent = int(100 - cpuPercent) } return gpuPercent } t.Fatalf("model %s not loaded - actually loaded: %v", model, loaded) return 0 } func getTimeouts(t *testing.T) (soft time.Duration, hard time.Duration) { deadline, hasDeadline := t.Deadline() if !hasDeadline { return 8 * time.Minute, 10 * time.Minute } else if deadline.Compare(time.Now().Add(2*time.Minute)) <= 0 { t.Skip("too little time") return time.Duration(0), time.Duration(0) } return -time.Since(deadline.Add(-2 * time.Minute)), -time.Since(deadline.Add(-20 * time.Second)) }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/tools_test.go
integration/tools_test.go
//go:build integration package integration import ( "context" "fmt" "testing" "time" "github.com/ollama/ollama/api" ) // testPropsMap creates a ToolPropertiesMap from a map (convenience function for tests) func testPropsMap(m map[string]api.ToolProperty) *api.ToolPropertiesMap { props := api.NewToolPropertiesMap() for k, v := range m { props.Set(k, v) } return props } func TestAPIToolCalling(t *testing.T) { initialTimeout := 60 * time.Second streamTimeout := 60 * time.Second ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() minVRAM := map[string]uint64{ "qwen3-vl": 16, "gpt-oss:20b": 16, "gpt-oss:120b": 70, "qwen3": 6, "llama3.1": 8, "llama3.2": 4, "mistral": 6, "qwen2.5": 6, "qwen2": 6, "ministral-3": 20, "mistral-nemo": 9, "mistral-small": 16, "mixtral:8x22b": 80, "qwq": 20, "granite3.3": 7, } for _, model := range libraryToolsModels { t.Run(model, func(t *testing.T) { if v, ok := minVRAM[model]; ok { skipUnderMinVRAM(t, v) } if err := PullIfMissing(ctx, client, model); err != nil { t.Fatalf("pull failed %s", err) } tools := []api.Tool{ { Type: "function", Function: api.ToolFunction{ Name: "get_weather", Description: "Get the current weather in a given location", Parameters: api.ToolFunctionParameters{ Type: "object", Required: []string{"location"}, Properties: testPropsMap(map[string]api.ToolProperty{ "location": { Type: api.PropertyType{"string"}, Description: "The city and state, e.g. San Francisco, CA", }, }), }, }, }, } req := api.ChatRequest{ Model: model, Messages: []api.Message{ { Role: "user", Content: "Call get_weather with location set to San Francisco.", }, }, Tools: tools, Options: map[string]any{ "temperature": 0, }, } stallTimer := time.NewTimer(initialTimeout) var gotToolCall bool var lastToolCall api.ToolCall fn := func(response api.ChatResponse) error { if len(response.Message.ToolCalls) > 0 { gotToolCall = true lastToolCall = response.Message.ToolCalls[len(response.Message.ToolCalls)-1] } if !stallTimer.Reset(streamTimeout) { return fmt.Errorf("stall was detected while streaming response, aborting") } return nil } stream := true req.Stream = &stream done := make(chan int) var genErr error go func() { genErr = client.Chat(ctx, &req, fn) done <- 0 }() select { case <-stallTimer.C: t.Errorf("tool-calling chat never started. Timed out after: %s", initialTimeout.String()) case <-done: if genErr != nil { t.Fatalf("chat failed: %v", genErr) } if !gotToolCall { t.Fatalf("expected at least one tool call, got none") } if lastToolCall.Function.Name != "get_weather" { t.Errorf("unexpected tool called: got %q want %q", lastToolCall.Function.Name, "get_weather") } if _, ok := lastToolCall.Function.Arguments["location"]; !ok { t.Errorf("expected tool arguments to include 'location', got: %s", lastToolCall.Function.Arguments.String()) } case <-ctx.Done(): t.Error("outer test context done while waiting for tool-calling chat") } }) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/concurrency_test.go
integration/concurrency_test.go
//go:build integration package integration import ( "context" "fmt" "log/slog" "math" "math/rand" "os" "strconv" "sync" "testing" "time" "github.com/ollama/ollama/api" "github.com/ollama/ollama/envconfig" "github.com/ollama/ollama/format" ) // Send multiple requests in parallel (concurrently) to a single model and ensure responses are expected func TestConcurrentChat(t *testing.T) { // Assumes all requests have the same model req, resp := ChatRequests() numParallel := int(envconfig.NumParallel() + 1) iterLimit := 3 softTimeout, hardTimeout := getTimeouts(t) ctx, cancel := context.WithTimeout(context.Background(), hardTimeout) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() // Get the server running (if applicable) warm the model up with a single initial request slog.Info("loading", "model", req[0].Model) err := client.Generate(ctx, &api.GenerateRequest{Model: req[0].Model, KeepAlive: &api.Duration{Duration: 10 * time.Second}}, func(response api.GenerateResponse) error { return nil }, ) if err != nil { t.Fatalf("failed to load model %s: %s", req[0].Model, err) } var wg sync.WaitGroup r := rand.New(rand.NewSource(0)) wg.Add(numParallel) for i := range numParallel { go func(i int) { defer wg.Done() for j := 0; j < iterLimit; j++ { if time.Now().Sub(started) > softTimeout { slog.Info("exceeded soft timeout, winding down test") return } k := r.Int() % len(req) slog.Info("Starting", "thread", i, "iter", j) // On slower GPUs it can take a while to process the concurrent requests // so we allow a much longer initial timeout DoChat(ctx, t, client, req[k], resp[k], 120*time.Second, 20*time.Second) } }(i) } wg.Wait() } // Stress the scheduler and attempt to load more models than will fit to cause thrashing // This test will always load at least 2 models even on CPU based systems func TestMultiModelStress(t *testing.T) { s := os.Getenv("OLLAMA_MAX_VRAM") if s == "" { s = "0" } maxVram, err := strconv.ParseUint(s, 10, 64) if err != nil { t.Fatal(err) } // All models compatible with ollama-engine smallModels := []string{ "llama3.2:1b", "qwen3:0.6b", "gemma2:2b", "deepseek-r1:1.5b", // qwen2 arch "gemma3:270m", } mediumModels := []string{ "llama3.2:3b", // ~3.4G "qwen3:8b", // ~6.6G "gpt-oss:20b", // ~15G "deepseek-r1:7b", // ~5.6G "gemma3:4b", // ~5.8G "gemma2:9b", // ~8.1G } var chosenModels []string switch { case maxVram < 10000*format.MebiByte: slog.Info("selecting small models") chosenModels = smallModels default: slog.Info("selecting medium models") chosenModels = mediumModels } softTimeout, hardTimeout := getTimeouts(t) ctx, cancel := context.WithTimeout(context.Background(), hardTimeout) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() initialTimeout := 120 * time.Second streamTimeout := 20 * time.Second // Make sure all the models are pulled before we get started for _, model := range chosenModels { if err := PullIfMissing(ctx, client, model); err != nil { t.Fatal(err) } } // Determine how many models we can load in parallel before we exceed VRAM // The intent is to go 1 over what can fit so we force the scheduler to thrash targetLoadCount := 0 slog.Info("Loading models to find how many can fit in VRAM before overflowing") chooseModels: for i, model := range chosenModels { req := &api.GenerateRequest{Model: model} slog.Info("loading", "model", model) err = client.Generate(ctx, req, func(response api.GenerateResponse) error { return nil }) if err != nil { t.Fatalf("failed to load model %s: %s", model, err) } targetLoadCount++ if i > 0 { models, err := client.ListRunning(ctx) if err != nil { t.Fatalf("failed to list running models: %s", err) } if len(models.Models) < targetLoadCount { loaded := []string{} for _, m := range models.Models { loaded = append(loaded, m.Name) } slog.Info("found model load capacity", "target", targetLoadCount, "current", loaded, "chosen", chosenModels[:targetLoadCount]) break } // Effectively limit model count to 2 on CPU only systems to avoid thrashing and timeouts for _, m := range models.Models { if m.SizeVRAM == 0 { slog.Info("model running on CPU", "name", m.Name, "target", targetLoadCount, "chosen", chosenModels[:targetLoadCount]) initialTimeout = 240 * time.Second streamTimeout = 30 * time.Second break chooseModels } } } } if targetLoadCount == len(chosenModels) { // TODO consider retrying the medium models slog.Warn("all models being used without exceeding VRAM, set OLLAMA_MAX_VRAM so test can pick larger models") } r := rand.New(rand.NewSource(0)) var wg sync.WaitGroup for i := range targetLoadCount { wg.Add(1) go func(i int) { defer wg.Done() reqs, resps := ChatRequests() for j := 0; j < 3; j++ { if time.Now().Sub(started) > softTimeout { slog.Info("exceeded soft timeout, winding down test") return } k := r.Int() % len(reqs) reqs[k].Model = chosenModels[i] slog.Info("Starting", "model", reqs[k].Model, "iteration", j, "request", reqs[k].Messages[0].Content) DoChat(ctx, t, client, reqs[k], resps[k], initialTimeout, streamTimeout) } }(i) } go func() { for { time.Sleep(10 * time.Second) select { case <-ctx.Done(): return default: models, err := client.ListRunning(ctx) if err != nil { slog.Warn("failed to list running models", "error", err) continue } for _, m := range models.Models { var procStr string switch { case m.SizeVRAM == 0: procStr = "100% CPU" case m.SizeVRAM == m.Size: procStr = "100% GPU" case m.SizeVRAM > m.Size || m.Size == 0: procStr = "Unknown" default: sizeCPU := m.Size - m.SizeVRAM cpuPercent := math.Round(float64(sizeCPU) / float64(m.Size) * 100) procStr = fmt.Sprintf("%d%%/%d%%", int(cpuPercent), int(100-cpuPercent)) } slog.Info("loaded model snapshot", "model", m.Name, "CPU/GPU", procStr, "expires", format.HumanTime(m.ExpiresAt, "Never")) } } } }() wg.Wait() }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/model_perf_test.go
integration/model_perf_test.go
//go:build integration && perf package integration import ( "context" "fmt" "io/ioutil" "log/slog" "math" "os" "path/filepath" "strconv" "strings" "testing" "time" "github.com/ollama/ollama/api" "github.com/ollama/ollama/format" ) var ( // Models that don't work reliably with the large context prompt in this test case longContextFlakes = []string{ "granite-code:latest", "nemotron-mini:latest", "falcon:latest", // 2k model "falcon2:latest", // 2k model "minicpm-v:latest", "qwen:latest", "solar-pro:latest", } ) // Note: this test case can take a long time to run, particularly on models with // large contexts. Run with -timeout set to a large value to get reasonable coverage // Example usage: // // go test --tags=integration,perf -count 1 ./integration -v -timeout 90m -run TestModelsPerf 2>&1 | tee int.log // cat int.log | grep MODEL_PERF_HEADER | head -1| cut -f2- -d: > perf.csv // cat int.log | grep MODEL_PERF_DATA | cut -f2- -d: >> perf.csv func TestModelsPerf(t *testing.T) { if s := os.Getenv("OLLAMA_NEW_ENGINE"); s != "" { doModelPerfTest(t, ollamaEngineChatModels) } else { doModelPerfTest(t, append(ollamaEngineChatModels, llamaRunnerChatModels...)) } } func TestLibraryModelsPerf(t *testing.T) { doModelPerfTest(t, libraryChatModels) } func doModelPerfTest(t *testing.T, chatModels []string) { softTimeout, hardTimeout := getTimeouts(t) slog.Info("Setting timeouts", "soft", softTimeout, "hard", hardTimeout) ctx, cancel := context.WithTimeout(context.Background(), hardTimeout) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() // TODO use info API eventually var maxVram uint64 var err error if s := os.Getenv("OLLAMA_MAX_VRAM"); s != "" { maxVram, err = strconv.ParseUint(s, 10, 64) if err != nil { t.Fatalf("invalid OLLAMA_MAX_VRAM %v", err) } } else { slog.Warn("No VRAM info available, testing all models, so larger ones might timeout...") } data, err := ioutil.ReadFile(filepath.Join("testdata", "shakespeare.txt")) if err != nil { t.Fatalf("failed to open test data file: %s", err) } longPrompt := "summarize the following: " + string(data) targetArch := os.Getenv("OLLAMA_TEST_ARCHITECTURE") for _, model := range chatModels { if !strings.Contains(model, ":") { model = model + ":latest" } t.Run(model, func(t *testing.T) { if time.Now().Sub(started) > softTimeout { t.Skip("skipping remaining tests to avoid excessive runtime") } if err := PullIfMissing(ctx, client, model); err != nil { t.Fatalf("pull failed %s", err) } var maxContext int resp, err := client.Show(ctx, &api.ShowRequest{Model: model}) if err != nil { t.Fatalf("show failed: %s", err) } arch := resp.ModelInfo["general.architecture"].(string) maxContext = int(resp.ModelInfo[fmt.Sprintf("%s.context_length", arch)].(float64)) if targetArch != "" && arch != targetArch { t.Skip(fmt.Sprintf("Skipping %s architecture %s != %s", model, arch, targetArch)) } if maxVram > 0 { resp, err := client.List(ctx) if err != nil { t.Fatalf("list models failed %v", err) } for _, m := range resp.Models { // For these tests we want to exercise a some amount of overflow on the CPU if m.Name == model && float32(m.Size)*0.75 > float32(maxVram) { t.Skipf("model %s is too large %s for available VRAM %s", model, format.HumanBytes(m.Size), format.HumanBytes(int64(maxVram))) } } } slog.Info("scneario", "model", model, "max_context", maxContext) loaded := false defer func() { // best effort unload once we're done with the model if loaded { client.Generate(ctx, &api.GenerateRequest{Model: model, KeepAlive: &api.Duration{Duration: 0}}, func(rsp api.GenerateResponse) error { return nil }) } }() // Some models don't handle the long context data well so skip them to avoid flaky test results longContextFlake := false for _, flake := range longContextFlakes { if model == flake { longContextFlake = true break } } // iterate through a few context sizes for coverage without excessive runtime var contexts []int keepGoing := true if maxContext > 16384 { contexts = []int{4096, 8192, 16384, maxContext} } else if maxContext > 8192 { contexts = []int{4096, 8192, maxContext} } else if maxContext > 4096 { contexts = []int{4096, maxContext} } else if maxContext > 0 { contexts = []int{maxContext} } else { t.Fatal("unknown max context size") } for _, numCtx := range contexts { if !keepGoing && numCtx > 8192 { // Always try up to 8k before bailing out break } skipLongPrompt := false // Workaround bug 11172 temporarily... maxPrompt := longPrompt // If we fill the context too full with the prompt, many models // quickly hit context shifting and go bad. if len(maxPrompt) > numCtx*2 { // typically yields ~1/2 full context maxPrompt = maxPrompt[:numCtx*2] } testCases := []struct { prompt string anyResp []string }{ {blueSkyPrompt, blueSkyExpected}, {maxPrompt, []string{"shakespeare", "oppression", "sorrows", "gutenberg", "child", "license", "sonnet", "melancholy", "love", "sorrow", "beauty"}}, } var gpuPercent int for _, tc := range testCases { if len(tc.prompt) > 100 && (longContextFlake || skipLongPrompt) { slog.Info("skipping long prompt", "model", model, "num_ctx", numCtx, "gpu_percent", gpuPercent) continue } req := api.ChatRequest{ Model: model, Messages: []api.Message{ { Role: "user", Content: tc.prompt, }, }, KeepAlive: &api.Duration{Duration: 20 * time.Second}, // long enough to ensure a ps returns Options: map[string]interface{}{ "temperature": 0, "seed": 123, "num_ctx": numCtx, }, } atLeastOne := false var resp api.ChatResponse stream := false req.Stream = &stream // Avoid potentially getting stuck indefinitely limit := 5 * time.Minute genCtx, cancel := context.WithDeadlineCause( ctx, time.Now().Add(limit), fmt.Errorf("generate on model %s with ctx %d took longer than %v", model, numCtx, limit), ) defer cancel() err = client.Chat(genCtx, &req, func(rsp api.ChatResponse) error { resp = rsp return nil }) if err != nil { // Avoid excessive test runs, but don't consider a failure with massive context if numCtx > 16384 && strings.Contains(err.Error(), "took longer") { slog.Warn("max context was taking too long, skipping", "error", err) keepGoing = false skipLongPrompt = true continue } t.Fatalf("generate error: ctx:%d err:%s", numCtx, err) } loaded = true for _, expResp := range tc.anyResp { if strings.Contains(strings.ToLower(resp.Message.Content), expResp) { atLeastOne = true break } } if !atLeastOne { t.Fatalf("response didn't contain expected values: ctx:%d expected:%v response:%s ", numCtx, tc.anyResp, resp.Message.Content) } models, err := client.ListRunning(ctx) if err != nil { slog.Warn("failed to list running models", "error", err) continue } if len(models.Models) > 1 { slog.Warn("multiple models loaded, may impact performance results", "loaded", models.Models) } for _, m := range models.Models { if m.Name == model { if m.SizeVRAM == 0 { slog.Info("Model fully loaded into CPU") gpuPercent = 0 keepGoing = false skipLongPrompt = true } else if m.SizeVRAM == m.Size { slog.Info("Model fully loaded into GPU") gpuPercent = 100 } else { sizeCPU := m.Size - m.SizeVRAM cpuPercent := math.Round(float64(sizeCPU) / float64(m.Size) * 100) gpuPercent = int(100 - cpuPercent) slog.Info("Model split between CPU/GPU", "CPU", cpuPercent, "GPU", gpuPercent) keepGoing = false // Heuristic to avoid excessive test run time if gpuPercent < 90 { skipLongPrompt = true } } } } // Round the logged prompt count for comparisons across versions/configurations which can vary slightly fmt.Fprintf(os.Stderr, "MODEL_PERF_HEADER:%s,%s,%s,%s,%s,%s,%s\n", "MODEL", "CONTEXT", "GPU PERCENT", "APPROX PROMPT COUNT", "LOAD TIME", "PROMPT EVAL TPS", "EVAL TPS", ) fmt.Fprintf(os.Stderr, "MODEL_PERF_DATA:%s,%d,%d,%d,%0.2f,%0.2f,%0.2f\n", model, numCtx, gpuPercent, (resp.PromptEvalCount/10)*10, float64(resp.LoadDuration)/1000000000.0, float64(resp.PromptEvalCount)/(float64(resp.PromptEvalDuration)/1000000000.0), float64(resp.EvalCount)/(float64(resp.EvalDuration)/1000000000.0), ) } } }) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/model_arch_test.go
integration/model_arch_test.go
//go:build integration && models package integration import ( "context" "encoding/json" "fmt" "io/ioutil" "log/slog" "os" "path/filepath" "strconv" "strings" "testing" "time" "github.com/ollama/ollama/api" "github.com/ollama/ollama/format" ) func TestModelsChat(t *testing.T) { softTimeout, hardTimeout := getTimeouts(t) slog.Info("Setting timeouts", "soft", softTimeout, "hard", hardTimeout) ctx, cancel := context.WithTimeout(context.Background(), hardTimeout) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() // TODO use info API eventually var maxVram uint64 var err error if s := os.Getenv("OLLAMA_MAX_VRAM"); s != "" { maxVram, err = strconv.ParseUint(s, 10, 64) if err != nil { t.Fatalf("invalid OLLAMA_MAX_VRAM %v", err) } } else { slog.Warn("No VRAM info available, testing all models, so larger ones might timeout...") } var chatModels []string if s := os.Getenv("OLLAMA_NEW_ENGINE"); s != "" { chatModels = ollamaEngineChatModels } else { chatModels = append(ollamaEngineChatModels, llamaRunnerChatModels...) } for _, model := range chatModels { t.Run(model, func(t *testing.T) { if time.Now().Sub(started) > softTimeout { t.Skip("skipping remaining tests to avoid excessive runtime") } if err := PullIfMissing(ctx, client, model); err != nil { t.Fatalf("pull failed %s", err) } if maxVram > 0 { resp, err := client.List(ctx) if err != nil { t.Fatalf("list models failed %v", err) } for _, m := range resp.Models { if m.Name == model && float32(m.Size)*1.2 > float32(maxVram) { t.Skipf("model %s is too large for available VRAM: %s > %s", model, format.HumanBytes(m.Size), format.HumanBytes(int64(maxVram))) } } } initialTimeout := 120 * time.Second streamTimeout := 30 * time.Second slog.Info("loading", "model", model) err := client.Generate(ctx, &api.GenerateRequest{Model: model, KeepAlive: &api.Duration{Duration: 10 * time.Second}}, func(response api.GenerateResponse) error { return nil }, ) if err != nil { t.Fatalf("failed to load model %s: %s", model, err) } gpuPercent := getGPUPercent(ctx, t, client, model) if gpuPercent < 80 { slog.Warn("Low GPU percentage - increasing timeouts", "percent", gpuPercent) initialTimeout = 240 * time.Second streamTimeout = 40 * time.Second } // TODO - fiddle with context size req := api.ChatRequest{ Model: model, Messages: []api.Message{ { Role: "user", Content: blueSkyPrompt, }, }, KeepAlive: &api.Duration{Duration: 10 * time.Second}, Options: map[string]interface{}{ "temperature": 0, "seed": 123, }, } DoChat(ctx, t, client, req, blueSkyExpected, initialTimeout, streamTimeout) // best effort unload once we're done with the model client.Generate(ctx, &api.GenerateRequest{Model: req.Model, KeepAlive: &api.Duration{Duration: 0}}, func(rsp api.GenerateResponse) error { return nil }) }) } } func TestModelsEmbed(t *testing.T) { softTimeout, hardTimeout := getTimeouts(t) ctx, cancel := context.WithTimeout(context.Background(), hardTimeout) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() // TODO use info API eventually var maxVram uint64 var err error if s := os.Getenv("OLLAMA_MAX_VRAM"); s != "" { maxVram, err = strconv.ParseUint(s, 10, 64) if err != nil { t.Fatalf("invalid OLLAMA_MAX_VRAM %v", err) } } else { slog.Warn("No VRAM info available, testing all models, so larger ones might timeout...") } data, err := ioutil.ReadFile(filepath.Join("testdata", "embed.json")) if err != nil { t.Fatalf("failed to open test data file: %s", err) } testCase := map[string][]float64{} err = json.Unmarshal(data, &testCase) if err != nil { t.Fatalf("failed to load test data: %s", err) } for model, expected := range testCase { t.Run(model, func(t *testing.T) { if time.Now().Sub(started) > softTimeout { t.Skip("skipping remaining tests to avoid excessive runtime") } if err := PullIfMissing(ctx, client, model); err != nil { t.Fatalf("pull failed %s", err) } if maxVram > 0 { resp, err := client.List(ctx) if err != nil { t.Fatalf("list models failed %v", err) } for _, m := range resp.Models { if m.Name == model && float32(m.Size)*1.2 > float32(maxVram) { t.Skipf("model %s is too large for available VRAM: %s > %s", model, format.HumanBytes(m.Size), format.HumanBytes(int64(maxVram))) } } } req := api.EmbeddingRequest{ Model: model, Prompt: "why is the sky blue?", KeepAlive: &api.Duration{Duration: 10 * time.Second}, Options: map[string]interface{}{ "temperature": 0, "seed": 123, }, } resp, err := client.Embeddings(ctx, &req) if err != nil { t.Fatalf("embeddings call failed %s", err) } defer func() { // best effort unload once we're done with the model client.Generate(ctx, &api.GenerateRequest{Model: req.Model, KeepAlive: &api.Duration{Duration: 0}}, func(rsp api.GenerateResponse) error { return nil }) }() if len(resp.Embedding) == 0 { t.Errorf("zero length embedding response") } if len(expected) != len(resp.Embedding) { expStr := make([]string, len(resp.Embedding)) for i, v := range resp.Embedding { expStr[i] = fmt.Sprintf("%0.6f", v) } // When adding new models, use this output to populate the testdata/embed.json fmt.Printf("expected\n%s\n", strings.Join(expStr, ", ")) t.Fatalf("expected %d, got %d", len(expected), len(resp.Embedding)) } sim := cosineSimilarity(resp.Embedding, expected) if sim < 0.99 { t.Fatalf("expected %v, got %v (similarity: %f)", expected[0:5], resp.Embedding[0:5], sim) } }) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/integration/quantization_test.go
integration/quantization_test.go
//go:build integration && models package integration import ( "bytes" "context" "fmt" "log/slog" "strings" "testing" "time" "github.com/ollama/ollama/api" ) func TestQuantization(t *testing.T) { sourceModels := []string{ "qwen2.5:0.5b-instruct-fp16", } quantizations := []string{ "Q8_0", "Q4_K_S", "Q4_K_M", "Q4_K", } softTimeout, hardTimeout := getTimeouts(t) started := time.Now() slog.Info("Setting timeouts", "soft", softTimeout, "hard", hardTimeout) ctx, cancel := context.WithTimeout(context.Background(), hardTimeout) defer cancel() client, _, cleanup := InitServerConnection(ctx, t) defer cleanup() for _, base := range sourceModels { if err := PullIfMissing(ctx, client, base); err != nil { t.Fatalf("pull failed %s", err) } for _, quant := range quantizations { newName := fmt.Sprintf("%s__%s", base, quant) t.Run(newName, func(t *testing.T) { if time.Now().Sub(started) > softTimeout { t.Skip("skipping remaining tests to avoid excessive runtime") } req := &api.CreateRequest{ Model: newName, Quantization: quant, From: base, } fn := func(resp api.ProgressResponse) error { // fmt.Print(".") return nil } t.Logf("quantizing: %s -> %s", base, quant) if err := client.Create(ctx, req, fn); err != nil { t.Fatalf("create failed %s", err) } defer func() { req := &api.DeleteRequest{ Model: newName, } t.Logf("deleting: %s -> %s", base, quant) if err := client.Delete(ctx, req); err != nil { t.Logf("failed to clean up %s: %s", req.Model, err) } }() // Check metadata on the model resp, err := client.Show(ctx, &api.ShowRequest{Name: newName}) if err != nil { t.Fatalf("unable to show model: %s", err) } if !strings.Contains(resp.Details.QuantizationLevel, quant) { t.Fatalf("unexpected quantization for %s:\ngot: %s", newName, resp.Details.QuantizationLevel) } stream := true chatReq := api.ChatRequest{ Model: newName, Messages: []api.Message{ { Role: "user", Content: blueSkyPrompt, }, }, KeepAlive: &api.Duration{Duration: 3 * time.Second}, Options: map[string]any{ "seed": 42, "temperature": 0.0, }, Stream: &stream, } t.Logf("verifying: %s -> %s", base, quant) // Some smaller quantizations can cause models to have poor quality // or get stuck in repetition loops, so we stop as soon as we have any matches reqCtx, reqCancel := context.WithCancel(ctx) atLeastOne := false var buf bytes.Buffer chatfn := func(response api.ChatResponse) error { buf.Write([]byte(response.Message.Content)) fullResp := strings.ToLower(buf.String()) for _, resp := range blueSkyExpected { if strings.Contains(fullResp, resp) { atLeastOne = true t.Log(fullResp) reqCancel() break } } return nil } done := make(chan int) var genErr error go func() { genErr = client.Chat(reqCtx, &chatReq, chatfn) done <- 0 }() select { case <-done: if genErr != nil && !atLeastOne { t.Fatalf("failed with %s request prompt %s ", chatReq.Model, chatReq.Messages[0].Content) } case <-ctx.Done(): t.Error("outer test context done while waiting for generate") } t.Logf("passed") }) } } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/openai/openai_test.go
openai/openai_test.go
package openai import ( "encoding/base64" "testing" "time" "github.com/google/go-cmp/cmp" "github.com/ollama/ollama/api" ) // testArgs creates ToolCallFunctionArguments from a map (convenience function for tests) func testArgs(m map[string]any) api.ToolCallFunctionArguments { args := api.NewToolCallFunctionArguments() for k, v := range m { args.Set(k, v) } return args } // argsComparer provides cmp options for comparing ToolCallFunctionArguments by value var argsComparer = cmp.Comparer(func(a, b api.ToolCallFunctionArguments) bool { return cmp.Equal(a.ToMap(), b.ToMap()) }) const ( prefix = `data:image/jpeg;base64,` image = `iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNk+A8AAQUBAScY42YAAAAASUVORK5CYII=` ) func TestFromChatRequest_Basic(t *testing.T) { req := ChatCompletionRequest{ Model: "test-model", Messages: []Message{ {Role: "user", Content: "Hello"}, }, } result, err := FromChatRequest(req) if err != nil { t.Fatalf("unexpected error: %v", err) } if result.Model != "test-model" { t.Errorf("expected model 'test-model', got %q", result.Model) } if len(result.Messages) != 1 { t.Fatalf("expected 1 message, got %d", len(result.Messages)) } if result.Messages[0].Role != "user" || result.Messages[0].Content != "Hello" { t.Errorf("unexpected message: %+v", result.Messages[0]) } } func TestFromChatRequest_WithImage(t *testing.T) { imgData, _ := base64.StdEncoding.DecodeString(image) req := ChatCompletionRequest{ Model: "test-model", Messages: []Message{ { Role: "user", Content: []any{ map[string]any{"type": "text", "text": "Hello"}, map[string]any{ "type": "image_url", "image_url": map[string]any{"url": prefix + image}, }, }, }, }, } result, err := FromChatRequest(req) if err != nil { t.Fatalf("unexpected error: %v", err) } if len(result.Messages) != 2 { t.Fatalf("expected 2 messages, got %d", len(result.Messages)) } if result.Messages[0].Content != "Hello" { t.Errorf("expected first message content 'Hello', got %q", result.Messages[0].Content) } if len(result.Messages[1].Images) != 1 { t.Fatalf("expected 1 image, got %d", len(result.Messages[1].Images)) } if string(result.Messages[1].Images[0]) != string(imgData) { t.Error("image data mismatch") } } func TestFromCompleteRequest_Basic(t *testing.T) { temp := float32(0.8) req := CompletionRequest{ Model: "test-model", Prompt: "Hello", Temperature: &temp, } result, err := FromCompleteRequest(req) if err != nil { t.Fatalf("unexpected error: %v", err) } if result.Model != "test-model" { t.Errorf("expected model 'test-model', got %q", result.Model) } if result.Prompt != "Hello" { t.Errorf("expected prompt 'Hello', got %q", result.Prompt) } if tempVal, ok := result.Options["temperature"].(float32); !ok || tempVal != 0.8 { t.Errorf("expected temperature 0.8, got %v", result.Options["temperature"]) } } func TestToUsage(t *testing.T) { resp := api.ChatResponse{ Metrics: api.Metrics{ PromptEvalCount: 10, EvalCount: 20, }, } usage := ToUsage(resp) if usage.PromptTokens != 10 { t.Errorf("expected PromptTokens 10, got %d", usage.PromptTokens) } if usage.CompletionTokens != 20 { t.Errorf("expected CompletionTokens 20, got %d", usage.CompletionTokens) } if usage.TotalTokens != 30 { t.Errorf("expected TotalTokens 30, got %d", usage.TotalTokens) } } func TestNewError(t *testing.T) { tests := []struct { code int want string }{ {400, "invalid_request_error"}, {404, "not_found_error"}, {500, "api_error"}, } for _, tt := range tests { result := NewError(tt.code, "test message") if result.Error.Type != tt.want { t.Errorf("NewError(%d) type = %q, want %q", tt.code, result.Error.Type, tt.want) } if result.Error.Message != "test message" { t.Errorf("NewError(%d) message = %q, want %q", tt.code, result.Error.Message, "test message") } } } func TestToToolCallsPreservesIDs(t *testing.T) { original := []api.ToolCall{ { ID: "call_abc123", Function: api.ToolCallFunction{ Index: 2, Name: "get_weather", Arguments: testArgs(map[string]any{ "location": "Seattle", }), }, }, { ID: "call_def456", Function: api.ToolCallFunction{ Index: 7, Name: "get_time", Arguments: testArgs(map[string]any{ "timezone": "UTC", }), }, }, } toolCalls := make([]api.ToolCall, len(original)) copy(toolCalls, original) got := ToToolCalls(toolCalls) if len(got) != len(original) { t.Fatalf("expected %d tool calls, got %d", len(original), len(got)) } expected := []ToolCall{ { ID: "call_abc123", Type: "function", Index: 2, Function: struct { Name string `json:"name"` Arguments string `json:"arguments"` }{ Name: "get_weather", Arguments: `{"location":"Seattle"}`, }, }, { ID: "call_def456", Type: "function", Index: 7, Function: struct { Name string `json:"name"` Arguments string `json:"arguments"` }{ Name: "get_time", Arguments: `{"timezone":"UTC"}`, }, }, } if diff := cmp.Diff(expected, got); diff != "" { t.Errorf("tool calls mismatch (-want +got):\n%s", diff) } if diff := cmp.Diff(original, toolCalls, argsComparer); diff != "" { t.Errorf("input tool calls mutated (-want +got):\n%s", diff) } } func TestFromChatRequest_WithLogprobs(t *testing.T) { trueVal := true req := ChatCompletionRequest{ Model: "test-model", Messages: []Message{ {Role: "user", Content: "Hello"}, }, Logprobs: &trueVal, TopLogprobs: 5, } result, err := FromChatRequest(req) if err != nil { t.Fatalf("unexpected error: %v", err) } if !result.Logprobs { t.Error("expected Logprobs to be true") } if result.TopLogprobs != 5 { t.Errorf("expected TopLogprobs to be 5, got %d", result.TopLogprobs) } } func TestFromChatRequest_LogprobsDefault(t *testing.T) { req := ChatCompletionRequest{ Model: "test-model", Messages: []Message{ {Role: "user", Content: "Hello"}, }, } result, err := FromChatRequest(req) if err != nil { t.Fatalf("unexpected error: %v", err) } if result.Logprobs { t.Error("expected Logprobs to be false by default") } if result.TopLogprobs != 0 { t.Errorf("expected TopLogprobs to be 0 by default, got %d", result.TopLogprobs) } } func TestFromCompleteRequest_WithLogprobs(t *testing.T) { logprobsVal := 5 req := CompletionRequest{ Model: "test-model", Prompt: "Hello", Logprobs: &logprobsVal, } result, err := FromCompleteRequest(req) if err != nil { t.Fatalf("unexpected error: %v", err) } if !result.Logprobs { t.Error("expected Logprobs to be true") } if result.TopLogprobs != 5 { t.Errorf("expected TopLogprobs to be 5, got %d", result.TopLogprobs) } } func TestToChatCompletion_WithLogprobs(t *testing.T) { createdAt := time.Unix(1234567890, 0) resp := api.ChatResponse{ Model: "test-model", CreatedAt: createdAt, Message: api.Message{Role: "assistant", Content: "Hello there"}, Logprobs: []api.Logprob{ { TokenLogprob: api.TokenLogprob{ Token: "Hello", Logprob: -0.5, }, TopLogprobs: []api.TokenLogprob{ {Token: "Hello", Logprob: -0.5}, {Token: "Hi", Logprob: -1.2}, }, }, { TokenLogprob: api.TokenLogprob{ Token: " there", Logprob: -0.3, }, TopLogprobs: []api.TokenLogprob{ {Token: " there", Logprob: -0.3}, {Token: " world", Logprob: -1.5}, }, }, }, Done: true, Metrics: api.Metrics{ PromptEvalCount: 5, EvalCount: 2, }, } id := "test-id" result := ToChatCompletion(id, resp) if result.Id != id { t.Errorf("expected Id %q, got %q", id, result.Id) } if result.Created != 1234567890 { t.Errorf("expected Created %d, got %d", int64(1234567890), result.Created) } if len(result.Choices) != 1 { t.Fatalf("expected 1 choice, got %d", len(result.Choices)) } choice := result.Choices[0] if choice.Message.Content != "Hello there" { t.Errorf("expected content %q, got %q", "Hello there", choice.Message.Content) } if choice.Logprobs == nil { t.Fatal("expected Logprobs to be present") } if len(choice.Logprobs.Content) != 2 { t.Fatalf("expected 2 logprobs, got %d", len(choice.Logprobs.Content)) } // Verify first logprob if choice.Logprobs.Content[0].Token != "Hello" { t.Errorf("expected first token %q, got %q", "Hello", choice.Logprobs.Content[0].Token) } if choice.Logprobs.Content[0].Logprob != -0.5 { t.Errorf("expected first logprob -0.5, got %f", choice.Logprobs.Content[0].Logprob) } if len(choice.Logprobs.Content[0].TopLogprobs) != 2 { t.Errorf("expected 2 top_logprobs, got %d", len(choice.Logprobs.Content[0].TopLogprobs)) } // Verify second logprob if choice.Logprobs.Content[1].Token != " there" { t.Errorf("expected second token %q, got %q", " there", choice.Logprobs.Content[1].Token) } } func TestToChatCompletion_WithoutLogprobs(t *testing.T) { createdAt := time.Unix(1234567890, 0) resp := api.ChatResponse{ Model: "test-model", CreatedAt: createdAt, Message: api.Message{Role: "assistant", Content: "Hello"}, Done: true, Metrics: api.Metrics{ PromptEvalCount: 5, EvalCount: 1, }, } id := "test-id" result := ToChatCompletion(id, resp) if len(result.Choices) != 1 { t.Fatalf("expected 1 choice, got %d", len(result.Choices)) } // When no logprobs, Logprobs should be nil if result.Choices[0].Logprobs != nil { t.Error("expected Logprobs to be nil when not requested") } } func TestFromChatRequest_TopLogprobsRange(t *testing.T) { tests := []struct { name string topLogprobs int expectValid bool }{ {name: "valid: 0", topLogprobs: 0, expectValid: true}, {name: "valid: 1", topLogprobs: 1, expectValid: true}, {name: "valid: 10", topLogprobs: 10, expectValid: true}, {name: "valid: 20", topLogprobs: 20, expectValid: true}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { trueVal := true req := ChatCompletionRequest{ Model: "test-model", Messages: []Message{ {Role: "user", Content: "Hello"}, }, Logprobs: &trueVal, TopLogprobs: tt.topLogprobs, } result, err := FromChatRequest(req) if err != nil { t.Fatalf("unexpected error: %v", err) } if result.TopLogprobs != tt.topLogprobs { t.Errorf("expected TopLogprobs %d, got %d", tt.topLogprobs, result.TopLogprobs) } }) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/openai/responses.go
openai/responses.go
package openai import ( "encoding/json" "fmt" "math/rand" "github.com/ollama/ollama/api" ) // ResponsesContent is a discriminated union for input content types. // Concrete types: ResponsesTextContent, ResponsesImageContent type ResponsesContent interface { responsesContent() // unexported marker method } type ResponsesTextContent struct { Type string `json:"type"` // always "input_text" Text string `json:"text"` } func (ResponsesTextContent) responsesContent() {} type ResponsesImageContent struct { Type string `json:"type"` // always "input_image" // TODO(drifkin): is this really required? that seems verbose and a default is specified in the docs Detail string `json:"detail"` // required FileID string `json:"file_id,omitempty"` // optional ImageURL string `json:"image_url,omitempty"` // optional } func (ResponsesImageContent) responsesContent() {} // ResponsesOutputTextContent represents output text from a previous assistant response // that is being passed back as part of the conversation history. type ResponsesOutputTextContent struct { Type string `json:"type"` // always "output_text" Text string `json:"text"` } func (ResponsesOutputTextContent) responsesContent() {} type ResponsesInputMessage struct { Type string `json:"type"` // always "message" Role string `json:"role"` // one of `user`, `system`, `developer` Content []ResponsesContent `json:"content,omitempty"` } func (m *ResponsesInputMessage) UnmarshalJSON(data []byte) error { var aux struct { Type string `json:"type"` Role string `json:"role"` Content json.RawMessage `json:"content"` } if err := json.Unmarshal(data, &aux); err != nil { return err } m.Type = aux.Type m.Role = aux.Role if len(aux.Content) == 0 { return nil } // Try to parse content as a string first (shorthand format) var contentStr string if err := json.Unmarshal(aux.Content, &contentStr); err == nil { m.Content = []ResponsesContent{ ResponsesTextContent{Type: "input_text", Text: contentStr}, } return nil } // Otherwise, parse as an array of content items var rawItems []json.RawMessage if err := json.Unmarshal(aux.Content, &rawItems); err != nil { return fmt.Errorf("content must be a string or array: %w", err) } m.Content = make([]ResponsesContent, 0, len(rawItems)) for i, raw := range rawItems { // Peek at the type field to determine which concrete type to use var typeField struct { Type string `json:"type"` } if err := json.Unmarshal(raw, &typeField); err != nil { return fmt.Errorf("content[%d]: %w", i, err) } switch typeField.Type { case "input_text": var content ResponsesTextContent if err := json.Unmarshal(raw, &content); err != nil { return fmt.Errorf("content[%d]: %w", i, err) } m.Content = append(m.Content, content) case "input_image": var content ResponsesImageContent if err := json.Unmarshal(raw, &content); err != nil { return fmt.Errorf("content[%d]: %w", i, err) } m.Content = append(m.Content, content) case "output_text": var content ResponsesOutputTextContent if err := json.Unmarshal(raw, &content); err != nil { return fmt.Errorf("content[%d]: %w", i, err) } m.Content = append(m.Content, content) default: return fmt.Errorf("content[%d]: unknown content type: %s", i, typeField.Type) } } return nil } type ResponsesOutputMessage struct{} // ResponsesInputItem is a discriminated union for input items. // Concrete types: ResponsesInputMessage (more to come) type ResponsesInputItem interface { responsesInputItem() // unexported marker method } func (ResponsesInputMessage) responsesInputItem() {} // ResponsesFunctionCall represents an assistant's function call in conversation history. type ResponsesFunctionCall struct { ID string `json:"id,omitempty"` // item ID Type string `json:"type"` // always "function_call" CallID string `json:"call_id"` // the tool call ID Name string `json:"name"` // function name Arguments string `json:"arguments"` // JSON arguments string } func (ResponsesFunctionCall) responsesInputItem() {} // ResponsesFunctionCallOutput represents a function call result from the client. type ResponsesFunctionCallOutput struct { Type string `json:"type"` // always "function_call_output" CallID string `json:"call_id"` // links to the original function call Output string `json:"output"` // the function result } func (ResponsesFunctionCallOutput) responsesInputItem() {} // ResponsesReasoningInput represents a reasoning item passed back as input. // This is used when the client sends previous reasoning back for context. type ResponsesReasoningInput struct { ID string `json:"id,omitempty"` Type string `json:"type"` // always "reasoning" Summary []ResponsesReasoningSummary `json:"summary,omitempty"` EncryptedContent string `json:"encrypted_content,omitempty"` } func (ResponsesReasoningInput) responsesInputItem() {} // unmarshalResponsesInputItem unmarshals a single input item from JSON. func unmarshalResponsesInputItem(data []byte) (ResponsesInputItem, error) { var typeField struct { Type string `json:"type"` Role string `json:"role"` } if err := json.Unmarshal(data, &typeField); err != nil { return nil, err } // Handle shorthand message format: {"role": "...", "content": "..."} // When type is empty but role is present, treat as a message itemType := typeField.Type if itemType == "" && typeField.Role != "" { itemType = "message" } switch itemType { case "message": var msg ResponsesInputMessage if err := json.Unmarshal(data, &msg); err != nil { return nil, err } return msg, nil case "function_call": var fc ResponsesFunctionCall if err := json.Unmarshal(data, &fc); err != nil { return nil, err } return fc, nil case "function_call_output": var output ResponsesFunctionCallOutput if err := json.Unmarshal(data, &output); err != nil { return nil, err } return output, nil case "reasoning": var reasoning ResponsesReasoningInput if err := json.Unmarshal(data, &reasoning); err != nil { return nil, err } return reasoning, nil default: return nil, fmt.Errorf("unknown input item type: %s", typeField.Type) } } // ResponsesInput can be either: // - a string (equivalent to a text input with the user role) // - an array of input items (see ResponsesInputItem) type ResponsesInput struct { Text string // set if input was a plain string Items []ResponsesInputItem // set if input was an array } func (r *ResponsesInput) UnmarshalJSON(data []byte) error { // Try string first var s string if err := json.Unmarshal(data, &s); err == nil { r.Text = s return nil } // Otherwise, try array of input items var rawItems []json.RawMessage if err := json.Unmarshal(data, &rawItems); err != nil { return fmt.Errorf("input must be a string or array: %w", err) } r.Items = make([]ResponsesInputItem, 0, len(rawItems)) for i, raw := range rawItems { item, err := unmarshalResponsesInputItem(raw) if err != nil { return fmt.Errorf("input[%d]: %w", i, err) } r.Items = append(r.Items, item) } return nil } type ResponsesReasoning struct { // originally: optional, default is per-model Effort string `json:"effort,omitempty"` // originally: deprecated, use `summary` instead. One of `auto`, `concise`, `detailed` GenerateSummary string `json:"generate_summary,omitempty"` // originally: optional, one of `auto`, `concise`, `detailed` Summary string `json:"summary,omitempty"` } type ResponsesTextFormat struct { Type string `json:"type"` // "text", "json_schema" Name string `json:"name,omitempty"` // for json_schema Schema json.RawMessage `json:"schema,omitempty"` // for json_schema Strict *bool `json:"strict,omitempty"` // for json_schema } type ResponsesText struct { Format *ResponsesTextFormat `json:"format,omitempty"` } // ResponsesTool represents a tool in the Responses API format. // Note: This differs from api.Tool which nests fields under "function". type ResponsesTool struct { Type string `json:"type"` // "function" Name string `json:"name"` Description string `json:"description,omitempty"` Strict bool `json:"strict,omitempty"` Parameters map[string]any `json:"parameters,omitempty"` } type ResponsesRequest struct { Model string `json:"model"` // originally: optional, default is false // for us: not supported Background bool `json:"background"` // originally: optional `string | {id: string}` // for us: not supported Conversation json.RawMessage `json:"conversation"` // originally: string[] // for us: ignored Include []string `json:"include"` Input ResponsesInput `json:"input"` // optional, inserts a system message at the start of the conversation Instructions string `json:"instructions,omitempty"` // optional, maps to num_predict MaxOutputTokens *int `json:"max_output_tokens,omitempty"` Reasoning ResponsesReasoning `json:"reasoning"` // optional, default is 1.0 Temperature *float64 `json:"temperature"` // optional, controls output format (e.g. json_schema) Text *ResponsesText `json:"text,omitempty"` // optional, default is 1.0 TopP *float64 `json:"top_p"` // optional, default is `"disabled"` Truncation *string `json:"truncation"` Tools []ResponsesTool `json:"tools,omitempty"` // TODO(drifkin): tool_choice is not supported. We could support "none" by not // passing tools, but the other controls like `"required"` cannot be generally // supported. // optional, default is false Stream *bool `json:"stream,omitempty"` } // FromResponsesRequest converts a ResponsesRequest to api.ChatRequest func FromResponsesRequest(r ResponsesRequest) (*api.ChatRequest, error) { var messages []api.Message // Add instructions as system message if present if r.Instructions != "" { messages = append(messages, api.Message{ Role: "system", Content: r.Instructions, }) } // Handle simple string input if r.Input.Text != "" { messages = append(messages, api.Message{ Role: "user", Content: r.Input.Text, }) } // Handle array of input items // Track pending reasoning to merge with the next assistant message var pendingThinking string for _, item := range r.Input.Items { switch v := item.(type) { case ResponsesReasoningInput: // Store thinking to merge with the next assistant message pendingThinking = v.EncryptedContent case ResponsesInputMessage: msg, err := convertInputMessage(v) if err != nil { return nil, err } // If this is an assistant message, attach pending thinking if msg.Role == "assistant" && pendingThinking != "" { msg.Thinking = pendingThinking pendingThinking = "" } messages = append(messages, msg) case ResponsesFunctionCall: // Convert function call to assistant message with tool calls var args api.ToolCallFunctionArguments if v.Arguments != "" { if err := json.Unmarshal([]byte(v.Arguments), &args); err != nil { return nil, fmt.Errorf("failed to parse function call arguments: %w", err) } } toolCall := api.ToolCall{ ID: v.CallID, Function: api.ToolCallFunction{ Name: v.Name, Arguments: args, }, } // Merge tool call into existing assistant message if it has content or tool calls if len(messages) > 0 && messages[len(messages)-1].Role == "assistant" { lastMsg := &messages[len(messages)-1] lastMsg.ToolCalls = append(lastMsg.ToolCalls, toolCall) if pendingThinking != "" { lastMsg.Thinking = pendingThinking pendingThinking = "" } } else { msg := api.Message{ Role: "assistant", ToolCalls: []api.ToolCall{toolCall}, } if pendingThinking != "" { msg.Thinking = pendingThinking pendingThinking = "" } messages = append(messages, msg) } case ResponsesFunctionCallOutput: messages = append(messages, api.Message{ Role: "tool", Content: v.Output, ToolCallID: v.CallID, }) } } // If there's trailing reasoning without a following message, emit it if pendingThinking != "" { messages = append(messages, api.Message{ Role: "assistant", Thinking: pendingThinking, }) } options := make(map[string]any) if r.Temperature != nil { options["temperature"] = *r.Temperature } else { options["temperature"] = 1.0 } if r.TopP != nil { options["top_p"] = *r.TopP } else { //nolint:staticcheck // SA9003: empty branch // TODO(drifkin): OpenAI defaults to 1.0 here, but we don't follow that here // in case the model has a different default. It would be best if we // understood whether there was a model-specific default and if not, we // should also default to 1.0, but that will require some additional // plumbing } if r.MaxOutputTokens != nil { options["num_predict"] = *r.MaxOutputTokens } // Convert tools from Responses API format to api.Tool format var tools []api.Tool for _, t := range r.Tools { tool, err := convertTool(t) if err != nil { return nil, err } tools = append(tools, tool) } // Handle text format (e.g. json_schema) var format json.RawMessage if r.Text != nil && r.Text.Format != nil { switch r.Text.Format.Type { case "json_schema": if r.Text.Format.Schema != nil { format = r.Text.Format.Schema } } } return &api.ChatRequest{ Model: r.Model, Messages: messages, Options: options, Tools: tools, Format: format, }, nil } func convertTool(t ResponsesTool) (api.Tool, error) { // Convert parameters from map[string]any to api.ToolFunctionParameters var params api.ToolFunctionParameters if t.Parameters != nil { // Marshal and unmarshal to convert b, err := json.Marshal(t.Parameters) if err != nil { return api.Tool{}, fmt.Errorf("failed to marshal tool parameters: %w", err) } if err := json.Unmarshal(b, &params); err != nil { return api.Tool{}, fmt.Errorf("failed to unmarshal tool parameters: %w", err) } } return api.Tool{ Type: t.Type, Function: api.ToolFunction{ Name: t.Name, Description: t.Description, Parameters: params, }, }, nil } func convertInputMessage(m ResponsesInputMessage) (api.Message, error) { var content string var images []api.ImageData for _, c := range m.Content { switch v := c.(type) { case ResponsesTextContent: content += v.Text case ResponsesOutputTextContent: content += v.Text case ResponsesImageContent: if v.ImageURL == "" { continue // Skip if no URL (FileID not supported) } img, err := decodeImageURL(v.ImageURL) if err != nil { return api.Message{}, err } images = append(images, img) } } return api.Message{ Role: m.Role, Content: content, Images: images, }, nil } // Response types for the Responses API type ResponsesResponse struct { ID string `json:"id"` Object string `json:"object"` CreatedAt int64 `json:"created_at"` Status string `json:"status"` Model string `json:"model"` Output []ResponsesOutputItem `json:"output"` Usage *ResponsesUsage `json:"usage,omitempty"` // TODO(drifkin): add `temperature` and `top_p` to the response, but this // requires additional plumbing to find the effective values since the // defaults can come from the model or the request } type ResponsesOutputItem struct { ID string `json:"id"` Type string `json:"type"` // "message", "function_call", or "reasoning" Status string `json:"status,omitempty"` Role string `json:"role,omitempty"` // for message Content []ResponsesOutputContent `json:"content,omitempty"` // for message CallID string `json:"call_id,omitempty"` // for function_call Name string `json:"name,omitempty"` // for function_call Arguments string `json:"arguments,omitempty"` // for function_call // Reasoning fields Summary []ResponsesReasoningSummary `json:"summary,omitempty"` // for reasoning EncryptedContent string `json:"encrypted_content,omitempty"` // for reasoning } type ResponsesReasoningSummary struct { Type string `json:"type"` // "summary_text" Text string `json:"text"` } type ResponsesOutputContent struct { Type string `json:"type"` // "output_text" Text string `json:"text"` } type ResponsesUsage struct { InputTokens int `json:"input_tokens"` OutputTokens int `json:"output_tokens"` TotalTokens int `json:"total_tokens"` } // ToResponse converts an api.ChatResponse to a Responses API response func ToResponse(model, responseID, itemID string, chatResponse api.ChatResponse) ResponsesResponse { var output []ResponsesOutputItem // Add reasoning item if thinking is present if chatResponse.Message.Thinking != "" { output = append(output, ResponsesOutputItem{ ID: fmt.Sprintf("rs_%s", responseID), Type: "reasoning", Summary: []ResponsesReasoningSummary{ { Type: "summary_text", Text: chatResponse.Message.Thinking, }, }, EncryptedContent: chatResponse.Message.Thinking, // Plain text for now }) } if len(chatResponse.Message.ToolCalls) > 0 { toolCalls := ToToolCalls(chatResponse.Message.ToolCalls) for i, tc := range toolCalls { output = append(output, ResponsesOutputItem{ ID: fmt.Sprintf("fc_%s_%d", responseID, i), Type: "function_call", CallID: tc.ID, Name: tc.Function.Name, Arguments: tc.Function.Arguments, }) } } else { output = append(output, ResponsesOutputItem{ ID: itemID, Type: "message", Status: "completed", Role: "assistant", Content: []ResponsesOutputContent{ { Type: "output_text", Text: chatResponse.Message.Content, }, }, }) } return ResponsesResponse{ ID: responseID, Object: "response", CreatedAt: chatResponse.CreatedAt.Unix(), Status: "completed", Model: model, Output: output, Usage: &ResponsesUsage{ InputTokens: chatResponse.PromptEvalCount, OutputTokens: chatResponse.EvalCount, TotalTokens: chatResponse.PromptEvalCount + chatResponse.EvalCount, }, } } // Streaming events: <https://platform.openai.com/docs/api-reference/responses-streaming> // ResponsesStreamEvent represents a single Server-Sent Event for the Responses API. type ResponsesStreamEvent struct { Event string // The event type (e.g., "response.created") Data any // The event payload (will be JSON-marshaled) } // ResponsesStreamConverter converts api.ChatResponse objects to Responses API // streaming events. It maintains state across multiple calls to handle the // streaming event sequence correctly. type ResponsesStreamConverter struct { // Configuration (immutable after creation) responseID string itemID string model string // State tracking (mutated across Process calls) firstWrite bool outputIndex int contentIndex int contentStarted bool toolCallsSent bool accumulatedText string sequenceNumber int // Reasoning/thinking state accumulatedThinking string reasoningItemID string reasoningStarted bool reasoningDone bool // Tool calls state (for final output) toolCallItems []map[string]any } // newEvent creates a ResponsesStreamEvent with the sequence number included in the data. func (c *ResponsesStreamConverter) newEvent(eventType string, data map[string]any) ResponsesStreamEvent { data["type"] = eventType data["sequence_number"] = c.sequenceNumber c.sequenceNumber++ return ResponsesStreamEvent{ Event: eventType, Data: data, } } // NewResponsesStreamConverter creates a new converter with the given configuration. func NewResponsesStreamConverter(responseID, itemID, model string) *ResponsesStreamConverter { return &ResponsesStreamConverter{ responseID: responseID, itemID: itemID, model: model, firstWrite: true, } } // Process takes a ChatResponse and returns the events that should be emitted. // Events are returned in order. The caller is responsible for serializing // and sending these events. func (c *ResponsesStreamConverter) Process(r api.ChatResponse) []ResponsesStreamEvent { var events []ResponsesStreamEvent hasToolCalls := len(r.Message.ToolCalls) > 0 hasThinking := r.Message.Thinking != "" // First chunk - emit initial events if c.firstWrite { c.firstWrite = false events = append(events, c.createResponseCreatedEvent()) events = append(events, c.createResponseInProgressEvent()) } // Handle reasoning/thinking (before other content) if hasThinking { events = append(events, c.processThinking(r.Message.Thinking)...) } // Handle tool calls if hasToolCalls { events = append(events, c.processToolCalls(r.Message.ToolCalls)...) c.toolCallsSent = true } // Handle text content (only if no tool calls) if !hasToolCalls && !c.toolCallsSent && r.Message.Content != "" { events = append(events, c.processTextContent(r.Message.Content)...) } // Done - emit closing events if r.Done { events = append(events, c.processCompletion(r)...) } return events } func (c *ResponsesStreamConverter) createResponseCreatedEvent() ResponsesStreamEvent { return c.newEvent("response.created", map[string]any{ "response": map[string]any{ "id": c.responseID, "object": "response", "status": "in_progress", "output": []any{}, }, }) } func (c *ResponsesStreamConverter) createResponseInProgressEvent() ResponsesStreamEvent { return c.newEvent("response.in_progress", map[string]any{ "response": map[string]any{ "id": c.responseID, "object": "response", "status": "in_progress", "output": []any{}, }, }) } func (c *ResponsesStreamConverter) processThinking(thinking string) []ResponsesStreamEvent { var events []ResponsesStreamEvent // Start reasoning item if not started if !c.reasoningStarted { c.reasoningStarted = true c.reasoningItemID = fmt.Sprintf("rs_%d", rand.Intn(999999)) events = append(events, c.newEvent("response.output_item.added", map[string]any{ "output_index": c.outputIndex, "item": map[string]any{ "id": c.reasoningItemID, "type": "reasoning", "summary": []any{}, }, })) } // Accumulate thinking c.accumulatedThinking += thinking // Emit delta events = append(events, c.newEvent("response.reasoning_summary_text.delta", map[string]any{ "item_id": c.reasoningItemID, "output_index": c.outputIndex, "delta": thinking, })) // TODO(drifkin): consider adding // [`response.reasoning_text.delta`](https://platform.openai.com/docs/api-reference/responses-streaming/response/reasoning_text/delta), // but need to do additional research to understand how it's used and how // widely supported it is return events } func (c *ResponsesStreamConverter) finishReasoning() []ResponsesStreamEvent { if !c.reasoningStarted || c.reasoningDone { return nil } c.reasoningDone = true events := []ResponsesStreamEvent{ c.newEvent("response.reasoning_summary_text.done", map[string]any{ "item_id": c.reasoningItemID, "output_index": c.outputIndex, "text": c.accumulatedThinking, }), c.newEvent("response.output_item.done", map[string]any{ "output_index": c.outputIndex, "item": map[string]any{ "id": c.reasoningItemID, "type": "reasoning", "summary": []map[string]any{{"type": "summary_text", "text": c.accumulatedThinking}}, "encrypted_content": c.accumulatedThinking, // Plain text for now }, }), } c.outputIndex++ return events } func (c *ResponsesStreamConverter) processToolCalls(toolCalls []api.ToolCall) []ResponsesStreamEvent { var events []ResponsesStreamEvent // Finish reasoning first if it was started events = append(events, c.finishReasoning()...) converted := ToToolCalls(toolCalls) for i, tc := range converted { fcItemID := fmt.Sprintf("fc_%d_%d", rand.Intn(999999), i) // Store for final output (with status: completed) toolCallItem := map[string]any{ "id": fcItemID, "type": "function_call", "status": "completed", "call_id": tc.ID, "name": tc.Function.Name, "arguments": tc.Function.Arguments, } c.toolCallItems = append(c.toolCallItems, toolCallItem) // response.output_item.added for function call events = append(events, c.newEvent("response.output_item.added", map[string]any{ "output_index": c.outputIndex + i, "item": map[string]any{ "id": fcItemID, "type": "function_call", "status": "in_progress", "call_id": tc.ID, "name": tc.Function.Name, "arguments": "", }, })) // response.function_call_arguments.delta if tc.Function.Arguments != "" { events = append(events, c.newEvent("response.function_call_arguments.delta", map[string]any{ "item_id": fcItemID, "output_index": c.outputIndex + i, "delta": tc.Function.Arguments, })) } // response.function_call_arguments.done events = append(events, c.newEvent("response.function_call_arguments.done", map[string]any{ "item_id": fcItemID, "output_index": c.outputIndex + i, "arguments": tc.Function.Arguments, })) // response.output_item.done for function call events = append(events, c.newEvent("response.output_item.done", map[string]any{ "output_index": c.outputIndex + i, "item": map[string]any{ "id": fcItemID, "type": "function_call", "status": "completed", "call_id": tc.ID, "name": tc.Function.Name, "arguments": tc.Function.Arguments, }, })) } return events } func (c *ResponsesStreamConverter) processTextContent(content string) []ResponsesStreamEvent { var events []ResponsesStreamEvent // Finish reasoning first if it was started events = append(events, c.finishReasoning()...) // Emit output item and content part for first text content if !c.contentStarted { c.contentStarted = true // response.output_item.added events = append(events, c.newEvent("response.output_item.added", map[string]any{ "output_index": c.outputIndex, "item": map[string]any{ "id": c.itemID, "type": "message", "status": "in_progress", "role": "assistant", "content": []any{}, }, })) // response.content_part.added events = append(events, c.newEvent("response.content_part.added", map[string]any{ "item_id": c.itemID, "output_index": c.outputIndex, "content_index": c.contentIndex, "part": map[string]any{ "type": "output_text", "text": "", }, })) } // Accumulate text c.accumulatedText += content // Emit content delta events = append(events, c.newEvent("response.output_text.delta", map[string]any{ "item_id": c.itemID, "output_index": c.outputIndex, "content_index": 0, "delta": content, })) return events } func (c *ResponsesStreamConverter) buildFinalOutput() []any { var output []any // Add reasoning item if present if c.reasoningStarted { output = append(output, map[string]any{ "id": c.reasoningItemID, "type": "reasoning", "summary": []map[string]any{{"type": "summary_text", "text": c.accumulatedThinking}}, "encrypted_content": c.accumulatedThinking, }) } // Add tool calls if present if len(c.toolCallItems) > 0 { for _, item := range c.toolCallItems { output = append(output, item) } } else if c.contentStarted { // Add message item if we had text content output = append(output, map[string]any{ "id": c.itemID, "type": "message", "status": "completed", "role": "assistant", "content": []map[string]any{{ "type": "output_text", "text": c.accumulatedText, }}, }) } return output } func (c *ResponsesStreamConverter) processCompletion(r api.ChatResponse) []ResponsesStreamEvent { var events []ResponsesStreamEvent // Finish reasoning if not done events = append(events, c.finishReasoning()...) // Emit text completion events if we had text content if !c.toolCallsSent && c.contentStarted { // response.output_text.done events = append(events, c.newEvent("response.output_text.done", map[string]any{ "item_id": c.itemID, "output_index": c.outputIndex, "content_index": 0, "text": c.accumulatedText, })) // response.content_part.done events = append(events, c.newEvent("response.content_part.done", map[string]any{ "item_id": c.itemID, "output_index": c.outputIndex, "content_index": 0, "part": map[string]any{ "type": "output_text", "text": c.accumulatedText, }, })) // response.output_item.done events = append(events, c.newEvent("response.output_item.done", map[string]any{ "output_index": c.outputIndex, "item": map[string]any{ "id": c.itemID, "type": "message", "status": "completed", "role": "assistant", "content": []map[string]any{{ "type": "output_text", "text": c.accumulatedText, }}, }, })) } // response.completed events = append(events, c.newEvent("response.completed", map[string]any{ "response": map[string]any{ "id": c.responseID, "object": "response", "status": "completed", "output": c.buildFinalOutput(), "usage": map[string]any{ "input_tokens": r.PromptEvalCount, "output_tokens": r.EvalCount, "total_tokens": r.PromptEvalCount + r.EvalCount, }, }, })) return events }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/openai/openai_encoding_format_test.go
openai/openai_encoding_format_test.go
package openai import ( "encoding/base64" "math" "testing" "github.com/ollama/ollama/api" ) func TestToEmbeddingList(t *testing.T) { testCases := []struct { name string embeddings [][]float32 format string expectType string // "float" or "base64" expectBase64 []string expectCount int promptEval int }{ {"float format", [][]float32{{0.1, -0.2, 0.3}}, "float", "float", nil, 1, 10}, {"base64 format", [][]float32{{0.1, -0.2, 0.3}}, "base64", "base64", []string{"zczMPc3MTL6amZk+"}, 1, 5}, {"default to float", [][]float32{{0.1, -0.2, 0.3}}, "", "float", nil, 1, 0}, {"invalid defaults to float", [][]float32{{0.1, -0.2, 0.3}}, "invalid", "float", nil, 1, 0}, {"multiple embeddings", [][]float32{{0.1, 0.2}, {0.3, 0.4}, {0.5, 0.6}}, "base64", "base64", []string{"zczMPc3MTD4=", "mpmZPs3MzD4=", "AAAAP5qZGT8="}, 3, 0}, {"empty embeddings", nil, "float", "", nil, 0, 0}, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { resp := api.EmbedResponse{ Embeddings: tc.embeddings, PromptEvalCount: tc.promptEval, } result := ToEmbeddingList("test-model", resp, tc.format) if tc.expectCount == 0 { if len(result.Data) != 0 { t.Errorf("expected 0 embeddings, got %d", len(result.Data)) } return } if len(result.Data) != tc.expectCount { t.Fatalf("expected %d embeddings, got %d", tc.expectCount, len(result.Data)) } if result.Model != "test-model" { t.Errorf("expected model 'test-model', got %q", result.Model) } // Check type of first embedding switch tc.expectType { case "float": if _, ok := result.Data[0].Embedding.([]float32); !ok { t.Errorf("expected []float32, got %T", result.Data[0].Embedding) } case "base64": for i, data := range result.Data { embStr, ok := data.Embedding.(string) if !ok { t.Errorf("embedding %d: expected string, got %T", i, data.Embedding) continue } // Verify it's valid base64 if _, err := base64.StdEncoding.DecodeString(embStr); err != nil { t.Errorf("embedding %d: invalid base64: %v", i, err) } // Compare against expected base64 string if provided if tc.expectBase64 != nil && i < len(tc.expectBase64) { if embStr != tc.expectBase64[i] { t.Errorf("embedding %d: expected base64 %q, got %q", i, tc.expectBase64[i], embStr) } } } } // Check indices for i := range result.Data { if result.Data[i].Index != i { t.Errorf("embedding %d: expected index %d, got %d", i, i, result.Data[i].Index) } } if tc.promptEval > 0 && result.Usage.PromptTokens != tc.promptEval { t.Errorf("expected %d prompt tokens, got %d", tc.promptEval, result.Usage.PromptTokens) } }) } } func TestFloatsToBase64(t *testing.T) { floats := []float32{0.1, -0.2, 0.3, -0.4, 0.5} result := floatsToBase64(floats) // Verify it's valid base64 decoded, err := base64.StdEncoding.DecodeString(result) if err != nil { t.Fatalf("failed to decode base64: %v", err) } // Check length expectedBytes := len(floats) * 4 if len(decoded) != expectedBytes { t.Errorf("expected %d bytes, got %d", expectedBytes, len(decoded)) } // Decode and verify values for i, expected := range floats { offset := i * 4 bits := uint32(decoded[offset]) | uint32(decoded[offset+1])<<8 | uint32(decoded[offset+2])<<16 | uint32(decoded[offset+3])<<24 decodedFloat := math.Float32frombits(bits) if math.Abs(float64(decodedFloat-expected)) > 1e-6 { t.Errorf("float[%d]: expected %f, got %f", i, expected, decodedFloat) } } } func TestFloatsToBase64_EmptySlice(t *testing.T) { result := floatsToBase64([]float32{}) // Should return valid base64 for empty slice decoded, err := base64.StdEncoding.DecodeString(result) if err != nil { t.Fatalf("failed to decode base64: %v", err) } if len(decoded) != 0 { t.Errorf("expected 0 bytes, got %d", len(decoded)) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/openai/responses_test.go
openai/responses_test.go
package openai import ( "encoding/json" "testing" "time" "github.com/ollama/ollama/api" ) func TestResponsesInputMessage_UnmarshalJSON(t *testing.T) { tests := []struct { name string json string want ResponsesInputMessage wantErr bool }{ { name: "text content", json: `{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "hello"}]}`, want: ResponsesInputMessage{ Type: "message", Role: "user", Content: []ResponsesContent{ResponsesTextContent{Type: "input_text", Text: "hello"}}, }, }, { name: "image content", json: `{"type": "message", "role": "user", "content": [{"type": "input_image", "detail": "auto", "image_url": "https://example.com/img.png"}]}`, want: ResponsesInputMessage{ Type: "message", Role: "user", Content: []ResponsesContent{ResponsesImageContent{ Type: "input_image", Detail: "auto", ImageURL: "https://example.com/img.png", }}, }, }, { name: "multiple content items", json: `{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "hello"}, {"type": "input_text", "text": "world"}]}`, want: ResponsesInputMessage{ Type: "message", Role: "user", Content: []ResponsesContent{ ResponsesTextContent{Type: "input_text", Text: "hello"}, ResponsesTextContent{Type: "input_text", Text: "world"}, }, }, }, { name: "unknown content type", json: `{"type": "message", "role": "user", "content": [{"type": "unknown"}]}`, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { var got ResponsesInputMessage err := json.Unmarshal([]byte(tt.json), &got) if tt.wantErr { if err == nil { t.Error("expected error, got nil") } return } if err != nil { t.Fatalf("unexpected error: %v", err) } if got.Type != tt.want.Type { t.Errorf("Type = %q, want %q", got.Type, tt.want.Type) } if got.Role != tt.want.Role { t.Errorf("Role = %q, want %q", got.Role, tt.want.Role) } if len(got.Content) != len(tt.want.Content) { t.Fatalf("len(Content) = %d, want %d", len(got.Content), len(tt.want.Content)) } for i := range tt.want.Content { switch wantContent := tt.want.Content[i].(type) { case ResponsesTextContent: gotContent, ok := got.Content[i].(ResponsesTextContent) if !ok { t.Fatalf("Content[%d] type = %T, want ResponsesTextContent", i, got.Content[i]) } if gotContent != wantContent { t.Errorf("Content[%d] = %+v, want %+v", i, gotContent, wantContent) } case ResponsesImageContent: gotContent, ok := got.Content[i].(ResponsesImageContent) if !ok { t.Fatalf("Content[%d] type = %T, want ResponsesImageContent", i, got.Content[i]) } if gotContent != wantContent { t.Errorf("Content[%d] = %+v, want %+v", i, gotContent, wantContent) } } } }) } } func TestResponsesInput_UnmarshalJSON(t *testing.T) { tests := []struct { name string json string wantText string wantItems int wantErr bool }{ { name: "plain string", json: `"hello world"`, wantText: "hello world", }, { name: "array with one message", json: `[{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "hello"}]}]`, wantItems: 1, }, { name: "array with multiple messages", json: `[{"type": "message", "role": "system", "content": [{"type": "input_text", "text": "you are helpful"}]}, {"type": "message", "role": "user", "content": [{"type": "input_text", "text": "hello"}]}]`, wantItems: 2, }, { name: "invalid input", json: `123`, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { var got ResponsesInput err := json.Unmarshal([]byte(tt.json), &got) if tt.wantErr { if err == nil { t.Error("expected error, got nil") } return } if err != nil { t.Fatalf("unexpected error: %v", err) } if got.Text != tt.wantText { t.Errorf("Text = %q, want %q", got.Text, tt.wantText) } if len(got.Items) != tt.wantItems { t.Errorf("len(Items) = %d, want %d", len(got.Items), tt.wantItems) } }) } } func TestUnmarshalResponsesInputItem(t *testing.T) { t.Run("message item", func(t *testing.T) { got, err := unmarshalResponsesInputItem([]byte(`{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "hello"}]}`)) if err != nil { t.Fatalf("unexpected error: %v", err) } msg, ok := got.(ResponsesInputMessage) if !ok { t.Fatalf("got type %T, want ResponsesInputMessage", got) } if msg.Role != "user" { t.Errorf("Role = %q, want %q", msg.Role, "user") } }) t.Run("function_call item", func(t *testing.T) { got, err := unmarshalResponsesInputItem([]byte(`{"type": "function_call", "call_id": "call_abc123", "name": "get_weather", "arguments": "{\"city\":\"Paris\"}"}`)) if err != nil { t.Fatalf("unexpected error: %v", err) } fc, ok := got.(ResponsesFunctionCall) if !ok { t.Fatalf("got type %T, want ResponsesFunctionCall", got) } if fc.Type != "function_call" { t.Errorf("Type = %q, want %q", fc.Type, "function_call") } if fc.CallID != "call_abc123" { t.Errorf("CallID = %q, want %q", fc.CallID, "call_abc123") } if fc.Name != "get_weather" { t.Errorf("Name = %q, want %q", fc.Name, "get_weather") } }) t.Run("function_call_output item", func(t *testing.T) { got, err := unmarshalResponsesInputItem([]byte(`{"type": "function_call_output", "call_id": "call_abc123", "output": "the result"}`)) if err != nil { t.Fatalf("unexpected error: %v", err) } output, ok := got.(ResponsesFunctionCallOutput) if !ok { t.Fatalf("got type %T, want ResponsesFunctionCallOutput", got) } if output.Type != "function_call_output" { t.Errorf("Type = %q, want %q", output.Type, "function_call_output") } if output.CallID != "call_abc123" { t.Errorf("CallID = %q, want %q", output.CallID, "call_abc123") } if output.Output != "the result" { t.Errorf("Output = %q, want %q", output.Output, "the result") } }) t.Run("unknown item type", func(t *testing.T) { _, err := unmarshalResponsesInputItem([]byte(`{"type": "unknown_type"}`)) if err == nil { t.Error("expected error, got nil") } }) } func TestResponsesRequest_UnmarshalJSON(t *testing.T) { tests := []struct { name string json string check func(t *testing.T, req ResponsesRequest) wantErr bool }{ { name: "simple string input", json: `{"model": "gpt-oss:20b", "input": "hello"}`, check: func(t *testing.T, req ResponsesRequest) { if req.Model != "gpt-oss:20b" { t.Errorf("Model = %q, want %q", req.Model, "gpt-oss:20b") } if req.Input.Text != "hello" { t.Errorf("Input.Text = %q, want %q", req.Input.Text, "hello") } }, }, { name: "array input with messages", json: `{"model": "gpt-oss:20b", "input": [{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "hello"}]}]}`, check: func(t *testing.T, req ResponsesRequest) { if len(req.Input.Items) != 1 { t.Fatalf("len(Input.Items) = %d, want 1", len(req.Input.Items)) } msg, ok := req.Input.Items[0].(ResponsesInputMessage) if !ok { t.Fatalf("Input.Items[0] type = %T, want ResponsesInputMessage", req.Input.Items[0]) } if msg.Role != "user" { t.Errorf("Role = %q, want %q", msg.Role, "user") } }, }, { name: "with temperature", json: `{"model": "gpt-oss:20b", "input": "hello", "temperature": 0.5}`, check: func(t *testing.T, req ResponsesRequest) { if req.Temperature == nil || *req.Temperature != 0.5 { t.Errorf("Temperature = %v, want 0.5", req.Temperature) } }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { var got ResponsesRequest err := json.Unmarshal([]byte(tt.json), &got) if tt.wantErr { if err == nil { t.Error("expected error, got nil") } return } if err != nil { t.Fatalf("unexpected error: %v", err) } if tt.check != nil { tt.check(t, got) } }) } } func TestFromResponsesRequest_Tools(t *testing.T) { reqJSON := `{ "model": "gpt-oss:20b", "input": "hello", "tools": [ { "type": "function", "name": "shell", "description": "Runs a shell command", "strict": false, "parameters": { "type": "object", "properties": { "command": { "type": "array", "items": {"type": "string"}, "description": "The command to execute" } }, "required": ["command"] } } ] }` var req ResponsesRequest if err := json.Unmarshal([]byte(reqJSON), &req); err != nil { t.Fatalf("failed to unmarshal request: %v", err) } // Check that tools were parsed if len(req.Tools) != 1 { t.Fatalf("expected 1 tool, got %d", len(req.Tools)) } if req.Tools[0].Name != "shell" { t.Errorf("expected tool name 'shell', got %q", req.Tools[0].Name) } // Convert and check chatReq, err := FromResponsesRequest(req) if err != nil { t.Fatalf("failed to convert request: %v", err) } if len(chatReq.Tools) != 1 { t.Fatalf("expected 1 converted tool, got %d", len(chatReq.Tools)) } tool := chatReq.Tools[0] if tool.Type != "function" { t.Errorf("expected tool type 'function', got %q", tool.Type) } if tool.Function.Name != "shell" { t.Errorf("expected function name 'shell', got %q", tool.Function.Name) } if tool.Function.Description != "Runs a shell command" { t.Errorf("expected function description 'Runs a shell command', got %q", tool.Function.Description) } if tool.Function.Parameters.Type != "object" { t.Errorf("expected parameters type 'object', got %q", tool.Function.Parameters.Type) } if len(tool.Function.Parameters.Required) != 1 || tool.Function.Parameters.Required[0] != "command" { t.Errorf("expected required ['command'], got %v", tool.Function.Parameters.Required) } } func TestFromResponsesRequest_FunctionCallOutput(t *testing.T) { // Test a complete tool call round-trip: // 1. User message asking about weather // 2. Assistant's function call (from previous response) // 3. Function call output (the tool result) reqJSON := `{ "model": "gpt-oss:20b", "input": [ {"type": "message", "role": "user", "content": [{"type": "input_text", "text": "what is the weather?"}]}, {"type": "function_call", "call_id": "call_abc123", "name": "get_weather", "arguments": "{\"city\":\"Paris\"}"}, {"type": "function_call_output", "call_id": "call_abc123", "output": "sunny, 72F"} ] }` var req ResponsesRequest if err := json.Unmarshal([]byte(reqJSON), &req); err != nil { t.Fatalf("failed to unmarshal request: %v", err) } // Check that input items were parsed if len(req.Input.Items) != 3 { t.Fatalf("expected 3 input items, got %d", len(req.Input.Items)) } // Verify the function_call item fc, ok := req.Input.Items[1].(ResponsesFunctionCall) if !ok { t.Fatalf("Input.Items[1] type = %T, want ResponsesFunctionCall", req.Input.Items[1]) } if fc.Name != "get_weather" { t.Errorf("Name = %q, want %q", fc.Name, "get_weather") } // Verify the function_call_output item fcOutput, ok := req.Input.Items[2].(ResponsesFunctionCallOutput) if !ok { t.Fatalf("Input.Items[2] type = %T, want ResponsesFunctionCallOutput", req.Input.Items[2]) } if fcOutput.CallID != "call_abc123" { t.Errorf("CallID = %q, want %q", fcOutput.CallID, "call_abc123") } // Convert and check chatReq, err := FromResponsesRequest(req) if err != nil { t.Fatalf("failed to convert request: %v", err) } if len(chatReq.Messages) != 3 { t.Fatalf("expected 3 messages, got %d", len(chatReq.Messages)) } // Check the user message userMsg := chatReq.Messages[0] if userMsg.Role != "user" { t.Errorf("expected role 'user', got %q", userMsg.Role) } // Check the assistant message with tool call assistantMsg := chatReq.Messages[1] if assistantMsg.Role != "assistant" { t.Errorf("expected role 'assistant', got %q", assistantMsg.Role) } if len(assistantMsg.ToolCalls) != 1 { t.Fatalf("expected 1 tool call, got %d", len(assistantMsg.ToolCalls)) } if assistantMsg.ToolCalls[0].ID != "call_abc123" { t.Errorf("expected tool call ID 'call_abc123', got %q", assistantMsg.ToolCalls[0].ID) } if assistantMsg.ToolCalls[0].Function.Name != "get_weather" { t.Errorf("expected function name 'get_weather', got %q", assistantMsg.ToolCalls[0].Function.Name) } // Check the tool response message toolMsg := chatReq.Messages[2] if toolMsg.Role != "tool" { t.Errorf("expected role 'tool', got %q", toolMsg.Role) } if toolMsg.Content != "sunny, 72F" { t.Errorf("expected content 'sunny, 72F', got %q", toolMsg.Content) } if toolMsg.ToolCallID != "call_abc123" { t.Errorf("expected ToolCallID 'call_abc123', got %q", toolMsg.ToolCallID) } } func TestFromResponsesRequest_FunctionCallMerge(t *testing.T) { t.Run("function call merges with preceding assistant message", func(t *testing.T) { // When assistant message has content followed by function_call, // they should be merged into a single message reqJSON := `{ "model": "gpt-oss:20b", "input": [ {"type": "message", "role": "user", "content": [{"type": "input_text", "text": "what is the weather?"}]}, {"type": "message", "role": "assistant", "content": [{"type": "output_text", "text": "I'll check the weather for you."}]}, {"type": "function_call", "call_id": "call_abc123", "name": "get_weather", "arguments": "{\"city\":\"Paris\"}"} ] }` var req ResponsesRequest if err := json.Unmarshal([]byte(reqJSON), &req); err != nil { t.Fatalf("failed to unmarshal request: %v", err) } chatReq, err := FromResponsesRequest(req) if err != nil { t.Fatalf("failed to convert request: %v", err) } // Should have 2 messages: user and assistant (with content + tool call merged) if len(chatReq.Messages) != 2 { t.Fatalf("expected 2 messages, got %d", len(chatReq.Messages)) } // Check user message if chatReq.Messages[0].Role != "user" { t.Errorf("Messages[0].Role = %q, want %q", chatReq.Messages[0].Role, "user") } // Check assistant message has both content and tool call assistantMsg := chatReq.Messages[1] if assistantMsg.Role != "assistant" { t.Errorf("Messages[1].Role = %q, want %q", assistantMsg.Role, "assistant") } if assistantMsg.Content != "I'll check the weather for you." { t.Errorf("Messages[1].Content = %q, want %q", assistantMsg.Content, "I'll check the weather for you.") } if len(assistantMsg.ToolCalls) != 1 { t.Fatalf("expected 1 tool call, got %d", len(assistantMsg.ToolCalls)) } if assistantMsg.ToolCalls[0].Function.Name != "get_weather" { t.Errorf("ToolCalls[0].Function.Name = %q, want %q", assistantMsg.ToolCalls[0].Function.Name, "get_weather") } }) t.Run("function call without preceding assistant creates new message", func(t *testing.T) { // When there's no preceding assistant message, function_call creates its own message reqJSON := `{ "model": "gpt-oss:20b", "input": [ {"type": "message", "role": "user", "content": [{"type": "input_text", "text": "what is the weather?"}]}, {"type": "function_call", "call_id": "call_abc123", "name": "get_weather", "arguments": "{\"city\":\"Paris\"}"} ] }` var req ResponsesRequest if err := json.Unmarshal([]byte(reqJSON), &req); err != nil { t.Fatalf("failed to unmarshal request: %v", err) } chatReq, err := FromResponsesRequest(req) if err != nil { t.Fatalf("failed to convert request: %v", err) } // Should have 2 messages: user and assistant (tool call only) if len(chatReq.Messages) != 2 { t.Fatalf("expected 2 messages, got %d", len(chatReq.Messages)) } // Check assistant message has tool call but no content assistantMsg := chatReq.Messages[1] if assistantMsg.Role != "assistant" { t.Errorf("Messages[1].Role = %q, want %q", assistantMsg.Role, "assistant") } if assistantMsg.Content != "" { t.Errorf("Messages[1].Content = %q, want empty", assistantMsg.Content) } if len(assistantMsg.ToolCalls) != 1 { t.Fatalf("expected 1 tool call, got %d", len(assistantMsg.ToolCalls)) } }) t.Run("multiple function calls merge into same assistant message", func(t *testing.T) { // Multiple consecutive function_calls should all merge into the same assistant message reqJSON := `{ "model": "gpt-oss:20b", "input": [ {"type": "message", "role": "user", "content": [{"type": "input_text", "text": "check weather and time"}]}, {"type": "message", "role": "assistant", "content": [{"type": "output_text", "text": "I'll check both."}]}, {"type": "function_call", "call_id": "call_1", "name": "get_weather", "arguments": "{\"city\":\"Paris\"}"}, {"type": "function_call", "call_id": "call_2", "name": "get_time", "arguments": "{\"city\":\"Paris\"}"} ] }` var req ResponsesRequest if err := json.Unmarshal([]byte(reqJSON), &req); err != nil { t.Fatalf("failed to unmarshal request: %v", err) } chatReq, err := FromResponsesRequest(req) if err != nil { t.Fatalf("failed to convert request: %v", err) } // Should have 2 messages: user and assistant (content + both tool calls) if len(chatReq.Messages) != 2 { t.Fatalf("expected 2 messages, got %d", len(chatReq.Messages)) } // Assistant has content + both tool calls assistantMsg := chatReq.Messages[1] if assistantMsg.Content != "I'll check both." { t.Errorf("Messages[1].Content = %q, want %q", assistantMsg.Content, "I'll check both.") } if len(assistantMsg.ToolCalls) != 2 { t.Fatalf("expected 2 tool calls, got %d", len(assistantMsg.ToolCalls)) } if assistantMsg.ToolCalls[0].Function.Name != "get_weather" { t.Errorf("ToolCalls[0].Function.Name = %q, want %q", assistantMsg.ToolCalls[0].Function.Name, "get_weather") } if assistantMsg.ToolCalls[1].Function.Name != "get_time" { t.Errorf("ToolCalls[1].Function.Name = %q, want %q", assistantMsg.ToolCalls[1].Function.Name, "get_time") } }) t.Run("new assistant message starts fresh tool call group", func(t *testing.T) { // assistant → tool_call → tool_call → assistant → tool_call // Should result in 2 assistant messages with their respective tool calls reqJSON := `{ "model": "gpt-oss:20b", "input": [ {"type": "message", "role": "user", "content": [{"type": "input_text", "text": "do multiple things"}]}, {"type": "message", "role": "assistant", "content": [{"type": "output_text", "text": "First batch."}]}, {"type": "function_call", "call_id": "call_1", "name": "func_a", "arguments": "{}"}, {"type": "function_call", "call_id": "call_2", "name": "func_b", "arguments": "{}"}, {"type": "message", "role": "assistant", "content": [{"type": "output_text", "text": "Second batch."}]}, {"type": "function_call", "call_id": "call_3", "name": "func_c", "arguments": "{}"} ] }` var req ResponsesRequest if err := json.Unmarshal([]byte(reqJSON), &req); err != nil { t.Fatalf("failed to unmarshal request: %v", err) } chatReq, err := FromResponsesRequest(req) if err != nil { t.Fatalf("failed to convert request: %v", err) } // Should have 3 messages: // 1. user // 2. assistant "First batch." + tool calls [func_a, func_b] // 3. assistant "Second batch." + tool calls [func_c] if len(chatReq.Messages) != 3 { t.Fatalf("expected 3 messages, got %d", len(chatReq.Messages)) } asst1 := chatReq.Messages[1] if asst1.Content != "First batch." { t.Errorf("Messages[1].Content = %q, want %q", asst1.Content, "First batch.") } if len(asst1.ToolCalls) != 2 { t.Fatalf("expected 2 tool calls in Messages[1], got %d", len(asst1.ToolCalls)) } if asst1.ToolCalls[0].Function.Name != "func_a" { t.Errorf("Messages[1].ToolCalls[0] = %q, want %q", asst1.ToolCalls[0].Function.Name, "func_a") } if asst1.ToolCalls[1].Function.Name != "func_b" { t.Errorf("Messages[1].ToolCalls[1] = %q, want %q", asst1.ToolCalls[1].Function.Name, "func_b") } asst2 := chatReq.Messages[2] if asst2.Content != "Second batch." { t.Errorf("Messages[2].Content = %q, want %q", asst2.Content, "Second batch.") } if len(asst2.ToolCalls) != 1 { t.Fatalf("expected 1 tool call in Messages[2], got %d", len(asst2.ToolCalls)) } if asst2.ToolCalls[0].Function.Name != "func_c" { t.Errorf("Messages[2].ToolCalls[0] = %q, want %q", asst2.ToolCalls[0].Function.Name, "func_c") } }) t.Run("function call merges with assistant that has thinking", func(t *testing.T) { // reasoning → assistant (gets thinking) → function_call → should merge reqJSON := `{ "model": "gpt-oss:20b", "input": [ {"type": "message", "role": "user", "content": [{"type": "input_text", "text": "think and act"}]}, {"type": "reasoning", "id": "rs_1", "encrypted_content": "Let me think...", "summary": []}, {"type": "message", "role": "assistant", "content": [{"type": "output_text", "text": "I thought about it."}]}, {"type": "function_call", "call_id": "call_1", "name": "do_thing", "arguments": "{}"} ] }` var req ResponsesRequest if err := json.Unmarshal([]byte(reqJSON), &req); err != nil { t.Fatalf("failed to unmarshal request: %v", err) } chatReq, err := FromResponsesRequest(req) if err != nil { t.Fatalf("failed to convert request: %v", err) } // Should have 2 messages: user and assistant (thinking + content + tool call) if len(chatReq.Messages) != 2 { t.Fatalf("expected 2 messages, got %d", len(chatReq.Messages)) } asst := chatReq.Messages[1] if asst.Thinking != "Let me think..." { t.Errorf("Messages[1].Thinking = %q, want %q", asst.Thinking, "Let me think...") } if asst.Content != "I thought about it." { t.Errorf("Messages[1].Content = %q, want %q", asst.Content, "I thought about it.") } if len(asst.ToolCalls) != 1 { t.Fatalf("expected 1 tool call, got %d", len(asst.ToolCalls)) } if asst.ToolCalls[0].Function.Name != "do_thing" { t.Errorf("ToolCalls[0].Function.Name = %q, want %q", asst.ToolCalls[0].Function.Name, "do_thing") } }) t.Run("mixed thinking and content with multiple tool calls", func(t *testing.T) { // Test: // 1. reasoning → assistant (empty content, gets thinking) → tc (merges) // 2. assistant with content → tc → tc (both merge) // Result: 2 assistant messages reqJSON := `{ "model": "gpt-oss:20b", "input": [ {"type": "message", "role": "user", "content": [{"type": "input_text", "text": "complex task"}]}, {"type": "reasoning", "id": "rs_1", "encrypted_content": "Thinking first...", "summary": []}, {"type": "message", "role": "assistant", "content": ""}, {"type": "function_call", "call_id": "call_1", "name": "think_action", "arguments": "{}"}, {"type": "message", "role": "assistant", "content": [{"type": "output_text", "text": "Now doing more."}]}, {"type": "function_call", "call_id": "call_2", "name": "action_a", "arguments": "{}"}, {"type": "function_call", "call_id": "call_3", "name": "action_b", "arguments": "{}"} ] }` var req ResponsesRequest if err := json.Unmarshal([]byte(reqJSON), &req); err != nil { t.Fatalf("failed to unmarshal request: %v", err) } chatReq, err := FromResponsesRequest(req) if err != nil { t.Fatalf("failed to convert request: %v", err) } // Should have 3 messages: // 1. user // 2. assistant with thinking + tool call [think_action] // 3. assistant with content "Now doing more." + tool calls [action_a, action_b] if len(chatReq.Messages) != 3 { t.Fatalf("expected 3 messages, got %d", len(chatReq.Messages)) } // First assistant: thinking + tool call asst1 := chatReq.Messages[1] if asst1.Thinking != "Thinking first..." { t.Errorf("Messages[1].Thinking = %q, want %q", asst1.Thinking, "Thinking first...") } if asst1.Content != "" { t.Errorf("Messages[1].Content = %q, want empty", asst1.Content) } if len(asst1.ToolCalls) != 1 { t.Fatalf("expected 1 tool call in Messages[1], got %d", len(asst1.ToolCalls)) } if asst1.ToolCalls[0].Function.Name != "think_action" { t.Errorf("Messages[1].ToolCalls[0] = %q, want %q", asst1.ToolCalls[0].Function.Name, "think_action") } // Second assistant: content + 2 tool calls asst2 := chatReq.Messages[2] if asst2.Content != "Now doing more." { t.Errorf("Messages[2].Content = %q, want %q", asst2.Content, "Now doing more.") } if len(asst2.ToolCalls) != 2 { t.Fatalf("expected 2 tool calls in Messages[2], got %d", len(asst2.ToolCalls)) } if asst2.ToolCalls[0].Function.Name != "action_a" { t.Errorf("Messages[2].ToolCalls[0] = %q, want %q", asst2.ToolCalls[0].Function.Name, "action_a") } if asst2.ToolCalls[1].Function.Name != "action_b" { t.Errorf("Messages[2].ToolCalls[1] = %q, want %q", asst2.ToolCalls[1].Function.Name, "action_b") } }) } func TestDecodeImageURL(t *testing.T) { // Valid PNG base64 (1x1 red pixel) validPNG := "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8DwHwAFBQIAX8jx0gAAAABJRU5ErkJggg==" t.Run("valid png", func(t *testing.T) { img, err := decodeImageURL(validPNG) if err != nil { t.Fatalf("unexpected error: %v", err) } if len(img) == 0 { t.Error("expected non-empty image data") } }) t.Run("valid jpeg", func(t *testing.T) { // Just test the prefix validation with minimal base64 _, err := decodeImageURL("data:image/jpeg;base64,/9j/4AAQSkZJRg==") if err != nil { t.Fatalf("unexpected error: %v", err) } }) t.Run("blank mime type", func(t *testing.T) { _, err := decodeImageURL("data:;base64,dGVzdA==") if err != nil { t.Fatalf("unexpected error: %v", err) } }) t.Run("invalid mime type", func(t *testing.T) { _, err := decodeImageURL("data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7") if err == nil { t.Error("expected error for unsupported mime type") } }) t.Run("invalid base64", func(t *testing.T) { _, err := decodeImageURL("data:image/png;base64,not-valid-base64!") if err == nil { t.Error("expected error for invalid base64") } }) t.Run("not a data url", func(t *testing.T) { _, err := decodeImageURL("https://example.com/image.png") if err == nil { t.Error("expected error for non-data URL") } }) } func TestFromResponsesRequest_Images(t *testing.T) { // 1x1 red PNG pixel pngBase64 := "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8DwHwAFBQIAX8jx0gAAAABJRU5ErkJggg==" reqJSON := `{ "model": "llava", "input": [ {"type": "message", "role": "user", "content": [ {"type": "input_text", "text": "What is in this image?"}, {"type": "input_image", "detail": "auto", "image_url": "data:image/png;base64,` + pngBase64 + `"} ]} ] }` var req ResponsesRequest if err := json.Unmarshal([]byte(reqJSON), &req); err != nil { t.Fatalf("failed to unmarshal request: %v", err) } chatReq, err := FromResponsesRequest(req) if err != nil { t.Fatalf("failed to convert request: %v", err) } if len(chatReq.Messages) != 1 { t.Fatalf("expected 1 message, got %d", len(chatReq.Messages)) } msg := chatReq.Messages[0] if msg.Role != "user" { t.Errorf("expected role 'user', got %q", msg.Role) } if msg.Content != "What is in this image?" { t.Errorf("expected content 'What is in this image?', got %q", msg.Content) } if len(msg.Images) != 1 { t.Fatalf("expected 1 image, got %d", len(msg.Images)) } if len(msg.Images[0]) == 0 { t.Error("expected non-empty image data") } } func TestResponsesStreamConverter_TextOnly(t *testing.T) { converter := NewResponsesStreamConverter("resp_123", "msg_456", "gpt-oss:20b") // First chunk with content events := converter.Process(api.ChatResponse{ Message: api.Message{ Content: "Hello", }, }) // Should have: response.created, response.in_progress, output_item.added, content_part.added, output_text.delta if len(events) != 5 { t.Fatalf("expected 5 events, got %d", len(events)) } if events[0].Event != "response.created" { t.Errorf("events[0].Event = %q, want %q", events[0].Event, "response.created") } if events[1].Event != "response.in_progress" { t.Errorf("events[1].Event = %q, want %q", events[1].Event, "response.in_progress") } if events[2].Event != "response.output_item.added" { t.Errorf("events[2].Event = %q, want %q", events[2].Event, "response.output_item.added") } if events[3].Event != "response.content_part.added" { t.Errorf("events[3].Event = %q, want %q", events[3].Event, "response.content_part.added") } if events[4].Event != "response.output_text.delta" { t.Errorf("events[4].Event = %q, want %q", events[4].Event, "response.output_text.delta") } // Second chunk with more content events = converter.Process(api.ChatResponse{ Message: api.Message{ Content: " World", }, }) // Should only have output_text.delta (no more created/in_progress/added) if len(events) != 1 { t.Fatalf("expected 1 event, got %d", len(events)) } if events[0].Event != "response.output_text.delta" { t.Errorf("events[0].Event = %q, want %q", events[0].Event, "response.output_text.delta") } // Final chunk events = converter.Process(api.ChatResponse{ Message: api.Message{}, Done: true, }) // Should have: output_text.done, content_part.done, output_item.done, response.completed if len(events) != 4 { t.Fatalf("expected 4 events, got %d", len(events)) } if events[0].Event != "response.output_text.done" { t.Errorf("events[0].Event = %q, want %q", events[0].Event, "response.output_text.done") } // Check that accumulated text is present data := events[0].Data.(map[string]any) if data["text"] != "Hello World" { t.Errorf("accumulated text = %q, want %q", data["text"], "Hello World") } } func TestResponsesStreamConverter_ToolCalls(t *testing.T) { converter := NewResponsesStreamConverter("resp_123", "msg_456", "gpt-oss:20b") events := converter.Process(api.ChatResponse{ Message: api.Message{ ToolCalls: []api.ToolCall{ { ID: "call_abc", Function: api.ToolCallFunction{ Name: "get_weather", Arguments: testArgs(map[string]any{"city": "Paris"}), }, }, }, }, }) // Should have: created, in_progress, output_item.added, arguments.delta, arguments.done, output_item.done if len(events) != 6 { t.Fatalf("expected 6 events, got %d", len(events)) } if events[2].Event != "response.output_item.added" { t.Errorf("events[2].Event = %q, want %q", events[2].Event, "response.output_item.added") } if events[3].Event != "response.function_call_arguments.delta" { t.Errorf("events[3].Event = %q, want %q", events[3].Event, "response.function_call_arguments.delta") } if events[4].Event != "response.function_call_arguments.done" { t.Errorf("events[4].Event = %q, want %q", events[4].Event, "response.function_call_arguments.done") } if events[5].Event != "response.output_item.done" { t.Errorf("events[5].Event = %q, want %q", events[5].Event, "response.output_item.done") } } func TestResponsesStreamConverter_Reasoning(t *testing.T) { converter := NewResponsesStreamConverter("resp_123", "msg_456", "gpt-oss:20b") // First chunk with thinking events := converter.Process(api.ChatResponse{ Message: api.Message{ Thinking: "Let me think...", }, }) // Should have: created, in_progress, output_item.added (reasoning), reasoning_summary_text.delta if len(events) != 4 { t.Fatalf("expected 4 events, got %d", len(events)) } if events[2].Event != "response.output_item.added" { t.Errorf("events[2].Event = %q, want %q", events[2].Event, "response.output_item.added") } // Check it's a reasoning item data := events[2].Data.(map[string]any) item := data["item"].(map[string]any) if item["type"] != "reasoning" { t.Errorf("item type = %q, want %q", item["type"], "reasoning") } if events[3].Event != "response.reasoning_summary_text.delta" { t.Errorf("events[3].Event = %q, want %q", events[3].Event, "response.reasoning_summary_text.delta") } // Second chunk with text content (reasoning should close first) events = converter.Process(api.ChatResponse{ Message: api.Message{ Content: "The answer is 42", }, }) // Should have: reasoning_summary_text.done, output_item.done (reasoning), output_item.added (message), content_part.added, output_text.delta if len(events) != 5 { t.Fatalf("expected 5 events, got %d", len(events)) } if events[0].Event != "response.reasoning_summary_text.done" {
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
true
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/openai/openai.go
openai/openai.go
// openai package provides core transformation logic for partial compatibility with the OpenAI REST API package openai import ( "bytes" "encoding/base64" "encoding/binary" "encoding/json" "errors" "fmt" "log/slog" "net/http" "slices" "strings" "time" "github.com/ollama/ollama/api" "github.com/ollama/ollama/types/model" ) var finishReasonToolCalls = "tool_calls" type Error struct { Message string `json:"message"` Type string `json:"type"` Param any `json:"param"` Code *string `json:"code"` } type ErrorResponse struct { Error Error `json:"error"` } type Message struct { Role string `json:"role"` Content any `json:"content"` Reasoning string `json:"reasoning,omitempty"` ToolCalls []ToolCall `json:"tool_calls,omitempty"` Name string `json:"name,omitempty"` ToolCallID string `json:"tool_call_id,omitempty"` } type ChoiceLogprobs struct { Content []api.Logprob `json:"content"` } type Choice struct { Index int `json:"index"` Message Message `json:"message"` FinishReason *string `json:"finish_reason"` Logprobs *ChoiceLogprobs `json:"logprobs,omitempty"` } type ChunkChoice struct { Index int `json:"index"` Delta Message `json:"delta"` FinishReason *string `json:"finish_reason"` Logprobs *ChoiceLogprobs `json:"logprobs,omitempty"` } type CompleteChunkChoice struct { Text string `json:"text"` Index int `json:"index"` FinishReason *string `json:"finish_reason"` Logprobs *ChoiceLogprobs `json:"logprobs,omitempty"` } type Usage struct { PromptTokens int `json:"prompt_tokens"` CompletionTokens int `json:"completion_tokens"` TotalTokens int `json:"total_tokens"` } type ResponseFormat struct { Type string `json:"type"` JsonSchema *JsonSchema `json:"json_schema,omitempty"` } type JsonSchema struct { Schema json.RawMessage `json:"schema"` } type EmbedRequest struct { Input any `json:"input"` Model string `json:"model"` Dimensions int `json:"dimensions,omitempty"` EncodingFormat string `json:"encoding_format,omitempty"` // "float" or "base64" } type StreamOptions struct { IncludeUsage bool `json:"include_usage"` } type Reasoning struct { Effort string `json:"effort,omitempty"` } type ChatCompletionRequest struct { Model string `json:"model"` Messages []Message `json:"messages"` Stream bool `json:"stream"` StreamOptions *StreamOptions `json:"stream_options"` MaxTokens *int `json:"max_tokens"` Seed *int `json:"seed"` Stop any `json:"stop"` Temperature *float64 `json:"temperature"` FrequencyPenalty *float64 `json:"frequency_penalty"` PresencePenalty *float64 `json:"presence_penalty"` TopP *float64 `json:"top_p"` ResponseFormat *ResponseFormat `json:"response_format"` Tools []api.Tool `json:"tools"` Reasoning *Reasoning `json:"reasoning,omitempty"` ReasoningEffort *string `json:"reasoning_effort,omitempty"` Logprobs *bool `json:"logprobs"` TopLogprobs int `json:"top_logprobs"` DebugRenderOnly bool `json:"_debug_render_only"` } type ChatCompletion struct { Id string `json:"id"` Object string `json:"object"` Created int64 `json:"created"` Model string `json:"model"` SystemFingerprint string `json:"system_fingerprint"` Choices []Choice `json:"choices"` Usage Usage `json:"usage,omitempty"` DebugInfo *api.DebugInfo `json:"_debug_info,omitempty"` } type ChatCompletionChunk struct { Id string `json:"id"` Object string `json:"object"` Created int64 `json:"created"` Model string `json:"model"` SystemFingerprint string `json:"system_fingerprint"` Choices []ChunkChoice `json:"choices"` Usage *Usage `json:"usage,omitempty"` } // TODO (https://github.com/ollama/ollama/issues/5259): support []string, []int and [][]int type CompletionRequest struct { Model string `json:"model"` Prompt string `json:"prompt"` FrequencyPenalty float32 `json:"frequency_penalty"` MaxTokens *int `json:"max_tokens"` PresencePenalty float32 `json:"presence_penalty"` Seed *int `json:"seed"` Stop any `json:"stop"` Stream bool `json:"stream"` StreamOptions *StreamOptions `json:"stream_options"` Temperature *float32 `json:"temperature"` TopP float32 `json:"top_p"` Suffix string `json:"suffix"` Logprobs *int `json:"logprobs"` DebugRenderOnly bool `json:"_debug_render_only"` } type Completion struct { Id string `json:"id"` Object string `json:"object"` Created int64 `json:"created"` Model string `json:"model"` SystemFingerprint string `json:"system_fingerprint"` Choices []CompleteChunkChoice `json:"choices"` Usage Usage `json:"usage,omitempty"` } type CompletionChunk struct { Id string `json:"id"` Object string `json:"object"` Created int64 `json:"created"` Choices []CompleteChunkChoice `json:"choices"` Model string `json:"model"` SystemFingerprint string `json:"system_fingerprint"` Usage *Usage `json:"usage,omitempty"` } type ToolCall struct { ID string `json:"id"` Index int `json:"index"` Type string `json:"type"` Function struct { Name string `json:"name"` Arguments string `json:"arguments"` } `json:"function"` } type Model struct { Id string `json:"id"` Object string `json:"object"` Created int64 `json:"created"` OwnedBy string `json:"owned_by"` } type Embedding struct { Object string `json:"object"` Embedding any `json:"embedding"` // Can be []float32 (float format) or string (base64 format) Index int `json:"index"` } type ListCompletion struct { Object string `json:"object"` Data []Model `json:"data"` } type EmbeddingList struct { Object string `json:"object"` Data []Embedding `json:"data"` Model string `json:"model"` Usage EmbeddingUsage `json:"usage,omitempty"` } type EmbeddingUsage struct { PromptTokens int `json:"prompt_tokens"` TotalTokens int `json:"total_tokens"` } func NewError(code int, message string) ErrorResponse { var etype string switch code { case http.StatusBadRequest: etype = "invalid_request_error" case http.StatusNotFound: etype = "not_found_error" default: etype = "api_error" } return ErrorResponse{Error{Type: etype, Message: message}} } // ToUsage converts an api.ChatResponse to Usage func ToUsage(r api.ChatResponse) Usage { return Usage{ PromptTokens: r.Metrics.PromptEvalCount, CompletionTokens: r.Metrics.EvalCount, TotalTokens: r.Metrics.PromptEvalCount + r.Metrics.EvalCount, } } // ToToolCalls converts api.ToolCall to OpenAI ToolCall format func ToToolCalls(tc []api.ToolCall) []ToolCall { toolCalls := make([]ToolCall, len(tc)) for i, tc := range tc { toolCalls[i].ID = tc.ID toolCalls[i].Type = "function" toolCalls[i].Function.Name = tc.Function.Name toolCalls[i].Index = tc.Function.Index args, err := json.Marshal(tc.Function.Arguments) if err != nil { slog.Error("could not marshall function arguments to json", "error", err) continue } toolCalls[i].Function.Arguments = string(args) } return toolCalls } // ToChatCompletion converts an api.ChatResponse to ChatCompletion func ToChatCompletion(id string, r api.ChatResponse) ChatCompletion { toolCalls := ToToolCalls(r.Message.ToolCalls) var logprobs *ChoiceLogprobs if len(r.Logprobs) > 0 { logprobs = &ChoiceLogprobs{Content: r.Logprobs} } return ChatCompletion{ Id: id, Object: "chat.completion", Created: r.CreatedAt.Unix(), Model: r.Model, SystemFingerprint: "fp_ollama", Choices: []Choice{{ Index: 0, Message: Message{Role: r.Message.Role, Content: r.Message.Content, ToolCalls: toolCalls, Reasoning: r.Message.Thinking}, FinishReason: func(reason string) *string { if len(toolCalls) > 0 { reason = "tool_calls" } if len(reason) > 0 { return &reason } return nil }(r.DoneReason), Logprobs: logprobs, }}, Usage: ToUsage(r), DebugInfo: r.DebugInfo, } } // ToChunk converts an api.ChatResponse to ChatCompletionChunk func ToChunk(id string, r api.ChatResponse, toolCallSent bool) ChatCompletionChunk { toolCalls := ToToolCalls(r.Message.ToolCalls) var logprobs *ChoiceLogprobs if len(r.Logprobs) > 0 { logprobs = &ChoiceLogprobs{Content: r.Logprobs} } return ChatCompletionChunk{ Id: id, Object: "chat.completion.chunk", Created: time.Now().Unix(), Model: r.Model, SystemFingerprint: "fp_ollama", Choices: []ChunkChoice{{ Index: 0, Delta: Message{Role: "assistant", Content: r.Message.Content, ToolCalls: toolCalls, Reasoning: r.Message.Thinking}, FinishReason: func(reason string) *string { if len(reason) > 0 { if toolCallSent || len(toolCalls) > 0 { return &finishReasonToolCalls } return &reason } return nil }(r.DoneReason), Logprobs: logprobs, }}, } } // ToUsageGenerate converts an api.GenerateResponse to Usage func ToUsageGenerate(r api.GenerateResponse) Usage { return Usage{ PromptTokens: r.Metrics.PromptEvalCount, CompletionTokens: r.Metrics.EvalCount, TotalTokens: r.Metrics.PromptEvalCount + r.Metrics.EvalCount, } } // ToCompletion converts an api.GenerateResponse to Completion func ToCompletion(id string, r api.GenerateResponse) Completion { return Completion{ Id: id, Object: "text_completion", Created: r.CreatedAt.Unix(), Model: r.Model, SystemFingerprint: "fp_ollama", Choices: []CompleteChunkChoice{{ Text: r.Response, Index: 0, FinishReason: func(reason string) *string { if len(reason) > 0 { return &reason } return nil }(r.DoneReason), }}, Usage: ToUsageGenerate(r), } } // ToCompleteChunk converts an api.GenerateResponse to CompletionChunk func ToCompleteChunk(id string, r api.GenerateResponse) CompletionChunk { return CompletionChunk{ Id: id, Object: "text_completion", Created: time.Now().Unix(), Model: r.Model, SystemFingerprint: "fp_ollama", Choices: []CompleteChunkChoice{{ Text: r.Response, Index: 0, FinishReason: func(reason string) *string { if len(reason) > 0 { return &reason } return nil }(r.DoneReason), }}, } } // ToListCompletion converts an api.ListResponse to ListCompletion func ToListCompletion(r api.ListResponse) ListCompletion { var data []Model for _, m := range r.Models { data = append(data, Model{ Id: m.Name, Object: "model", Created: m.ModifiedAt.Unix(), OwnedBy: model.ParseName(m.Name).Namespace, }) } return ListCompletion{ Object: "list", Data: data, } } // ToEmbeddingList converts an api.EmbedResponse to EmbeddingList // encodingFormat can be "float", "base64", or empty (defaults to "float") func ToEmbeddingList(model string, r api.EmbedResponse, encodingFormat string) EmbeddingList { if r.Embeddings != nil { var data []Embedding for i, e := range r.Embeddings { var embedding any if strings.EqualFold(encodingFormat, "base64") { embedding = floatsToBase64(e) } else { embedding = e } data = append(data, Embedding{ Object: "embedding", Embedding: embedding, Index: i, }) } return EmbeddingList{ Object: "list", Data: data, Model: model, Usage: EmbeddingUsage{ PromptTokens: r.PromptEvalCount, TotalTokens: r.PromptEvalCount, }, } } return EmbeddingList{} } // floatsToBase64 encodes a []float32 to a base64 string func floatsToBase64(floats []float32) string { var buf bytes.Buffer binary.Write(&buf, binary.LittleEndian, floats) return base64.StdEncoding.EncodeToString(buf.Bytes()) } // ToModel converts an api.ShowResponse to Model func ToModel(r api.ShowResponse, m string) Model { return Model{ Id: m, Object: "model", Created: r.ModifiedAt.Unix(), OwnedBy: model.ParseName(m).Namespace, } } // FromChatRequest converts a ChatCompletionRequest to api.ChatRequest func FromChatRequest(r ChatCompletionRequest) (*api.ChatRequest, error) { var messages []api.Message for _, msg := range r.Messages { toolName := "" if strings.ToLower(msg.Role) == "tool" { toolName = msg.Name if toolName == "" && msg.ToolCallID != "" { toolName = nameFromToolCallID(r.Messages, msg.ToolCallID) } } switch content := msg.Content.(type) { case string: toolCalls, err := FromCompletionToolCall(msg.ToolCalls) if err != nil { return nil, err } messages = append(messages, api.Message{Role: msg.Role, Content: content, Thinking: msg.Reasoning, ToolCalls: toolCalls, ToolName: toolName, ToolCallID: msg.ToolCallID}) case []any: for _, c := range content { data, ok := c.(map[string]any) if !ok { return nil, errors.New("invalid message format") } switch data["type"] { case "text": text, ok := data["text"].(string) if !ok { return nil, errors.New("invalid message format") } messages = append(messages, api.Message{Role: msg.Role, Content: text}) case "image_url": var url string if urlMap, ok := data["image_url"].(map[string]any); ok { if url, ok = urlMap["url"].(string); !ok { return nil, errors.New("invalid message format") } } else { if url, ok = data["image_url"].(string); !ok { return nil, errors.New("invalid message format") } } img, err := decodeImageURL(url) if err != nil { return nil, err } messages = append(messages, api.Message{Role: msg.Role, Images: []api.ImageData{img}}) default: return nil, errors.New("invalid message format") } } // since we might have added multiple messages above, if we have tools // calls we'll add them to the last message if len(messages) > 0 && len(msg.ToolCalls) > 0 { toolCalls, err := FromCompletionToolCall(msg.ToolCalls) if err != nil { return nil, err } messages[len(messages)-1].ToolCalls = toolCalls messages[len(messages)-1].ToolName = toolName messages[len(messages)-1].ToolCallID = msg.ToolCallID messages[len(messages)-1].Thinking = msg.Reasoning } default: // content is only optional if tool calls are present if msg.ToolCalls == nil { return nil, fmt.Errorf("invalid message content type: %T", content) } toolCalls, err := FromCompletionToolCall(msg.ToolCalls) if err != nil { return nil, err } messages = append(messages, api.Message{Role: msg.Role, Thinking: msg.Reasoning, ToolCalls: toolCalls, ToolCallID: msg.ToolCallID}) } } options := make(map[string]any) switch stop := r.Stop.(type) { case string: options["stop"] = []string{stop} case []any: var stops []string for _, s := range stop { if str, ok := s.(string); ok { stops = append(stops, str) } } options["stop"] = stops } if r.MaxTokens != nil { options["num_predict"] = *r.MaxTokens } if r.Temperature != nil { options["temperature"] = *r.Temperature } else { options["temperature"] = 1.0 } if r.Seed != nil { options["seed"] = *r.Seed } if r.FrequencyPenalty != nil { options["frequency_penalty"] = *r.FrequencyPenalty } if r.PresencePenalty != nil { options["presence_penalty"] = *r.PresencePenalty } if r.TopP != nil { options["top_p"] = *r.TopP } else { options["top_p"] = 1.0 } var format json.RawMessage if r.ResponseFormat != nil { switch strings.ToLower(strings.TrimSpace(r.ResponseFormat.Type)) { // Support the old "json_object" type for OpenAI compatibility case "json_object": format = json.RawMessage(`"json"`) case "json_schema": if r.ResponseFormat.JsonSchema != nil { format = r.ResponseFormat.JsonSchema.Schema } } } var think *api.ThinkValue var effort string if r.Reasoning != nil { effort = r.Reasoning.Effort } else if r.ReasoningEffort != nil { effort = *r.ReasoningEffort } if effort != "" { if !slices.Contains([]string{"high", "medium", "low", "none"}, effort) { return nil, fmt.Errorf("invalid reasoning value: '%s' (must be \"high\", \"medium\", \"low\", or \"none\")", effort) } if effort == "none" { think = &api.ThinkValue{Value: false} } else { think = &api.ThinkValue{Value: effort} } } return &api.ChatRequest{ Model: r.Model, Messages: messages, Format: format, Options: options, Stream: &r.Stream, Tools: r.Tools, Think: think, Logprobs: r.Logprobs != nil && *r.Logprobs, TopLogprobs: r.TopLogprobs, DebugRenderOnly: r.DebugRenderOnly, }, nil } func nameFromToolCallID(messages []Message, toolCallID string) string { // iterate backwards to be more resilient to duplicate tool call IDs (this // follows "last one wins") for i := len(messages) - 1; i >= 0; i-- { msg := messages[i] for _, tc := range msg.ToolCalls { if tc.ID == toolCallID { return tc.Function.Name } } } return "" } // decodeImageURL decodes a base64 data URI into raw image bytes. func decodeImageURL(url string) (api.ImageData, error) { types := []string{"jpeg", "jpg", "png", "webp"} // Support blank mime type to match /api/chat's behavior of taking just unadorned base64 if strings.HasPrefix(url, "data:;base64,") { url = strings.TrimPrefix(url, "data:;base64,") } else { valid := false for _, t := range types { prefix := "data:image/" + t + ";base64," if strings.HasPrefix(url, prefix) { url = strings.TrimPrefix(url, prefix) valid = true break } } if !valid { return nil, errors.New("invalid image input") } } img, err := base64.StdEncoding.DecodeString(url) if err != nil { return nil, errors.New("invalid image input") } return img, nil } // FromCompletionToolCall converts OpenAI ToolCall format to api.ToolCall func FromCompletionToolCall(toolCalls []ToolCall) ([]api.ToolCall, error) { apiToolCalls := make([]api.ToolCall, len(toolCalls)) for i, tc := range toolCalls { apiToolCalls[i].ID = tc.ID apiToolCalls[i].Function.Name = tc.Function.Name err := json.Unmarshal([]byte(tc.Function.Arguments), &apiToolCalls[i].Function.Arguments) if err != nil { return nil, errors.New("invalid tool call arguments") } } return apiToolCalls, nil } // FromCompleteRequest converts a CompletionRequest to api.GenerateRequest func FromCompleteRequest(r CompletionRequest) (api.GenerateRequest, error) { options := make(map[string]any) switch stop := r.Stop.(type) { case string: options["stop"] = []string{stop} case []any: var stops []string for _, s := range stop { if str, ok := s.(string); ok { stops = append(stops, str) } else { return api.GenerateRequest{}, fmt.Errorf("invalid type for 'stop' field: %T", s) } } options["stop"] = stops } if r.MaxTokens != nil { options["num_predict"] = *r.MaxTokens } if r.Temperature != nil { options["temperature"] = *r.Temperature } else { options["temperature"] = 1.0 } if r.Seed != nil { options["seed"] = *r.Seed } options["frequency_penalty"] = r.FrequencyPenalty options["presence_penalty"] = r.PresencePenalty if r.TopP != 0.0 { options["top_p"] = r.TopP } else { options["top_p"] = 1.0 } var logprobs bool var topLogprobs int if r.Logprobs != nil && *r.Logprobs > 0 { logprobs = true topLogprobs = *r.Logprobs } return api.GenerateRequest{ Model: r.Model, Prompt: r.Prompt, Options: options, Stream: &r.Stream, Suffix: r.Suffix, Logprobs: logprobs, TopLogprobs: topLogprobs, DebugRenderOnly: r.DebugRenderOnly, }, nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/tools/tools.go
tools/tools.go
package tools import ( "bytes" "encoding/json" "strings" "text/template" "github.com/ollama/ollama/api" ) type toolsState int const ( toolsState_LookingForTag toolsState = iota toolsState_ToolCalling toolsState_Done ) type Parser struct { tag string tools []api.Tool state toolsState buffer []byte n int } func (p *Parser) GetBuffer() []byte { return p.buffer } // NewParser creates a new tool call parser from a model's chat // template and a list of provided tools. func NewParser(tmpl *template.Template, tools []api.Tool) *Parser { return NewParserWithTag(tools, parseTag(tmpl)) } func NewParserWithTag(tools []api.Tool, tag string) *Parser { return &Parser{ tag: tag, tools: tools, } } // Add processes a string input to parse tool calls and content that // should be sent back to the user. func (p *Parser) Add(s string) (calls []api.ToolCall, content string) { if p.state == toolsState_Done { return nil, s } p.buffer = append(p.buffer, s...) if p.state == toolsState_LookingForTag { i, found := p.findTag() if i == -1 { content = string(p.buffer) p.buffer = []byte{} } else { content = string(p.buffer[:i]) p.buffer = p.buffer[i:] } // for models where { or [ are used as tool calling // tags, we only support parsing tools if the first non- // whitespace character is { or [ if p.tag == "{" || p.tag == "[" { if strings.TrimSpace(content) != "" { p.state = toolsState_Done return nil, content + string(p.buffer) } } if !found { return nil, content } p.state = toolsState_ToolCalling } for { call := p.parseToolCall() if call == nil { break } calls = append(calls, *call) } if p.done() { p.state = toolsState_Done content = string(p.buffer) p.buffer = []byte{} } return calls, content } // findTag searches the buffer to find and handle a tool calling tag // returning true if the tag was found and false otherwise, and // a string content signaling any content that should be sent back to the user func (p *Parser) findTag() (int, bool) { // First check for complete substring anywhere in s if i := bytes.Index(p.buffer, []byte(p.tag)); i > -1 { return i, true } // Then check for partial suffix overlap max := min(len(p.buffer), len(p.tag)) for i := max; i > 0; i-- { if bytes.HasSuffix(p.buffer, []byte(p.tag[:i])) { return len(p.buffer) - i, false } } return -1, false } // parseToolCall finds the next complete tool call in the buffer // incrementing n and advancing the buffer. func (p *Parser) parseToolCall() *api.ToolCall { tool, end := findTool(p.tools, p.buffer) if tool == nil { return nil } var argsMap map[string]any if found, i := findArguments(tool, p.buffer); found == nil { return nil } else { argsMap = found if i > end { end = i } } args := api.NewToolCallFunctionArguments() for k, v := range argsMap { args.Set(k, v) } tc := &api.ToolCall{ Function: api.ToolCallFunction{ Name: tool.Function.Name, Arguments: args, Index: p.n, }, } p.n++ p.buffer = p.buffer[end:] return tc } // findTool finds the first tool name in the list that matches the // beginning of the buffer, returning nil if no tool is found // or if the buffer ends with a partial tool name since we need // to wait for more data to disambiguate. // The second return value is the end position of the tool name // if one is found, otherwise 0. func findTool(tools []api.Tool, buf []byte) (*api.Tool, int) { if len(buf) == 0 { return nil, 0 } // check if buffer ends with a partial tool name // this prevents matching "get" when seeing "get_weather" var longest string for _, t := range tools { if len(t.Function.Name) > len(longest) { longest = t.Function.Name } } // Only check up to longest characters from the end for i := 1; i <= min(len(buf), len(longest)); i++ { tail := buf[len(buf)-i:] for _, t := range tools { name := []byte(t.Function.Name) if len(tail) < len(name) && bytes.HasPrefix(name, tail) { return nil, 0 } } } // find first occurrence of the longest tool name var found *api.Tool start := -1 end := -1 for i := range tools { name := []byte(tools[i].Function.Name) pos := bytes.Index(buf, name) if pos == -1 { continue } // Skip if we have a better match already if start != -1 { if pos > start { continue } if pos == start && len(name) <= len(found.Function.Name) { continue } } found = &tools[i] start = pos end = pos + len(name) } if found != nil { return found, end } return nil, 0 } // findArguments returns the first object that appears to be // arguments for the provided tool in the provided buffer, // returning nil if no arguments are found and the end position // TODO (jmorganca): this does not support parsing omitted arguments // objects for functions that have all-optional parameters // e.g. `{"name": "get_conditions", "arguments": {}}` will work but // `{"name": "get_conditions"}` will not currently work func findArguments(tool *api.Tool, buffer []byte) (map[string]any, int) { if len(buffer) == 0 { return nil, 0 } start := -1 var braces int var inString, escaped bool for i := range buffer { c := buffer[i] if escaped { escaped = false continue } if c == '\\' { escaped = true continue } if c == '"' { inString = !inString continue } if inString { continue } if c == '{' { if braces == 0 { start = i } braces++ } else if c == '}' { braces-- if braces == 0 && start != -1 { object := buffer[start : i+1] var data map[string]any if err := json.Unmarshal(object, &data); err != nil { // not a valid object, keep looking start = -1 continue } var findObject func(obj map[string]any) (map[string]any, bool) findObject = func(obj map[string]any) (map[string]any, bool) { findMap := func(name string, obj map[string]any) (map[string]any, bool) { if args, ok := obj[name].(map[string]any); ok { return args, true } if argsStr, ok := obj[name].(string); ok { var argsData map[string]interface{} if err := json.Unmarshal([]byte(argsStr), &argsData); err == nil { return argsData, ok } } return nil, false } if _, hasName := obj["name"]; hasName { if args, ok := findMap("arguments", obj); ok { return args, true } if args, ok := findMap("parameters", obj); ok { return args, true } return nil, true } if args, ok := findMap(tool.Function.Name, obj); ok { return args, true } for _, v := range obj { switch child := v.(type) { case map[string]any: if result, found := findObject(child); found { return result, true } case []any: for _, item := range child { if childObj, ok := item.(map[string]any); ok { if result, found := findObject(childObj); found { return result, true } } } } } return nil, false } if args, found := findObject(data); found { return args, i } return data, i } if braces < 0 { braces = 0 } } } return nil, 0 } // done checks if the parser is done parsing by looking // for closing tag. currently only } and ] are supported // for closing tags as {} or [] pairs may not always // represent tool calls and we need to send the content back func (p *Parser) done() bool { var open, close rune switch p.tag { case "{": open, close = '{', '}' case "[": open, close = '[', ']' default: return false } var count int for _, c := range p.buffer { if c == byte(open) { count++ } else if c == byte(close) { count-- if count == 0 { return true } } } return false } // Content returns any remaining content that // should be sent to the user. This should be the empty string // string unless the tag is { or [ and a tool call was not found func (p *Parser) Content() string { if p.n > 0 { return "" } if p.tag == "{" || p.tag == "[" { return string(p.buffer) } return "" }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/tools/tools_test.go
tools/tools_test.go
package tools import ( "strings" "testing" "text/template" "github.com/google/go-cmp/cmp" "github.com/ollama/ollama/api" ) // argsComparer provides cmp options for comparing ToolCallFunctionArguments by value (order-insensitive) var argsComparer = cmp.Comparer(func(a, b api.ToolCallFunctionArguments) bool { return cmp.Equal(a.ToMap(), b.ToMap()) }) // testPropsMap creates a ToolPropertiesMap from a map (convenience function for tests, order not preserved) func testPropsMap(m map[string]api.ToolProperty) *api.ToolPropertiesMap { props := api.NewToolPropertiesMap() for k, v := range m { props.Set(k, v) } return props } // testArgs creates ToolCallFunctionArguments from a map (convenience function for tests, order not preserved) func testArgs(m map[string]any) api.ToolCallFunctionArguments { args := api.NewToolCallFunctionArguments() for k, v := range m { args.Set(k, v) } return args } func TestParser(t *testing.T) { qwen, err := template.New("qwen").Parse(`{{if .ToolCalls}}<tool_call>{{range .ToolCalls}}{"name": "{{.Function.Name}}", "arguments": {{.Function.Arguments}}}{{end}}</tool_call>{{end}}`) if err != nil { t.Fatalf("Failed to parse template: %v", err) } deepseek, err := template.New("deepseek").Parse("{{if .ToolCalls}}<|tool▁calls▁begin|>{{range .ToolCalls}}<|tool▁call▁begin|>function<|tool▁sep|>get_current_weather\n```json\n{\"location\": \"Tokyo\"}\n```<|tool▁call▁end|>{{end}}<|tool▁calls▁end|><|end▁of▁sentence|>{{end}}") if err != nil { t.Fatalf("Failed to parse template: %v", err) } json, err := template.New("json").Parse(`{{if .ToolCalls}}{{range .ToolCalls}}{"name": "{{.Function.Name}}", "arguments": {{.Function.Arguments}}}{{end}}{{end}}`) if err != nil { t.Fatalf("Failed to parse template: %v", err) } mistral, err := template.New("mistral").Parse(`{{if .ToolCalls}}[TOOL_CALLS] [{{range .ToolCalls}}{"name": "{{.Function.Name}}", "arguments": {{.Function.Arguments}}}{{end}}][/TOOL_CALLS]{{end}}`) if err != nil { t.Fatalf("Failed to parse template: %v", err) } list, err := template.New("list").Parse(`{{if .ToolCalls}}[{{range .ToolCalls}}{"name": "{{.Function.Name}}", "arguments": {{.Function.Arguments}}}{{end}}]{{end}}`) if err != nil { t.Fatalf("Failed to parse template: %v", err) } tools := []api.Tool{ { Type: "function", Function: api.ToolFunction{ Name: "get_temperature", Description: "Retrieve the temperature for a given location", Parameters: api.ToolFunctionParameters{ Type: "object", Required: []string{"city"}, Properties: testPropsMap(map[string]api.ToolProperty{ "format": { Type: api.PropertyType{"string"}, Description: "The format to return the temperature in", Enum: []any{"fahrenheit", "celsius"}, }, "city": { Type: api.PropertyType{"string"}, Description: "The city to get the temperature for", }, }), }, }, }, { Type: "function", Function: api.ToolFunction{ Name: "get_conditions", Description: "Retrieve the current weather conditions for a given location", Parameters: api.ToolFunctionParameters{ Type: "object", Properties: testPropsMap(map[string]api.ToolProperty{ "location": { Type: api.PropertyType{"string"}, Description: "The location to get the weather conditions for", }, }), }, }, }, { Type: "function", Function: api.ToolFunction{ Name: "say_hello", Description: "Say hello", }, }, { Type: "function", Function: api.ToolFunction{ Name: "say_hello_world", Description: "Say hello world", }, }, { Type: "function", Function: api.ToolFunction{ Name: "get_address", Description: "Get the address of a given location", Parameters: api.ToolFunctionParameters{ Type: "object", Properties: testPropsMap(map[string]api.ToolProperty{ "location": { Type: api.PropertyType{"string"}, Description: "The location to get the address for", }, }), }, }, }, { Type: "function", Function: api.ToolFunction{ Name: "add", Description: "Add two numbers", Parameters: api.ToolFunctionParameters{ Type: "object", Properties: testPropsMap(map[string]api.ToolProperty{ "a": { Type: api.PropertyType{"string"}, Description: "The first number to add", }, "b": { Type: api.PropertyType{"string"}, Description: "The second number to add", }, }), }, }, }, } tests := []struct { name string inputs []string tmpl *template.Template content string calls []api.ToolCall }{ { name: "no tool calls - just text", inputs: []string{"Hello, how can I help you today?"}, content: "Hello, how can I help you today?", tmpl: qwen, calls: nil, }, { name: "empty input", inputs: []string{""}, content: "", tmpl: qwen, calls: nil, }, { name: "tool call", inputs: []string{`<tool_call>{"name": "get_conditions", "arguments": {"location": "San Francisco"}}</tool_call>`}, content: "", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_conditions", Arguments: testArgs(map[string]any{ "location": "San Francisco", }), }, }, }, }, { name: "empty args", inputs: []string{`<tool_call>{"name": "get_conditions", "arguments": {}}</tool_call>`}, content: "", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_conditions", Arguments: api.NewToolCallFunctionArguments(), }, }, }, }, { name: "text before tool call", inputs: []string{`Let me check the weather. <tool_call>{"name": "get_temperature", "arguments": {"city": "New York"}}</tool_call>`}, content: "Let me check the weather. ", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_temperature", Arguments: testArgs(map[string]any{ "city": "New York", }), }, }, }, }, { name: "qwen no args with text", inputs: []string{"Let me say hello to the user. I'll use the say_hello tool. "}, content: "Let me say hello to the user. I'll use the say_hello tool. ", tmpl: qwen, calls: nil, }, { name: "two tool calls in a list", inputs: []string{`[TOOL_CALLS] [{"name": "get_temperature", "arguments": {"city": "London", "format": "fahrenheit"}}, {"name": "get_conditions", "arguments": {"location": "Tokyo"}}][/TOOL_CALLS]`}, content: "", tmpl: mistral, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_temperature", Arguments: testArgs(map[string]any{ "city": "London", "format": "fahrenheit", }), }, }, { Function: api.ToolCallFunction{ Index: 1, Name: "get_conditions", Arguments: testArgs(map[string]any{ "location": "Tokyo", }), }, }, }, }, { name: "qwen two tool calls", inputs: []string{`Okay, let's call both tools! <tool_call>{"name": "get_temperature", "arguments": {"city": "London", "format": "fahrenheit"}}</tool_call><tool_call>{"name": "get_conditions", "arguments": {"location": "Tokyo"}}</tool_call>`}, content: "Okay, let's call both tools! ", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_temperature", Arguments: testArgs(map[string]any{ "city": "London", "format": "fahrenheit", }), }, }, { Function: api.ToolCallFunction{ Index: 1, Name: "get_conditions", Arguments: testArgs(map[string]any{ "location": "Tokyo", }), }, }, }, }, { name: "empty args followed by args", inputs: []string{`Let me say hello and check the weather. <tool_call>{"name": "say_hello", "arguments": {}}</tool_call><tool_call>{"name": "get_temperature", "arguments": {"city": "London", "format": "fahrenheit"}}</tool_call>`}, content: "Let me say hello and check the weather. ", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "say_hello", Arguments: api.NewToolCallFunctionArguments(), }, }, { Function: api.ToolCallFunction{ Index: 1, Name: "get_temperature", Arguments: testArgs(map[string]any{ "city": "London", "format": "fahrenheit", }), }, }, }, }, { name: "qwen empty followed by args", inputs: []string{`Let me check the weather. <tool_call>{"name": "get_conditions", "arguments": {}}</tool_call><tool_call>{"name": "get_conditions", "arguments": {"location": "Tokyo"}}`}, content: "Let me check the weather. ", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_conditions", Arguments: api.NewToolCallFunctionArguments(), }, }, { Function: api.ToolCallFunction{ Index: 1, Name: "get_conditions", Arguments: testArgs(map[string]any{ "location": "Tokyo", }), }, }, }, }, { name: "deepseek", inputs: []string{"<think>Wait, I need to call a tool</think><|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>get_temperature\n```json\n{\"city\": \"Tokyo\"}\n```<|tool▁call▁end|><|tool▁calls▁end|><|end▁of▁sentence|>"}, content: "<think>Wait, I need to call a tool</think>", tmpl: deepseek, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_temperature", Arguments: testArgs(map[string]any{ "city": "Tokyo", }), }, }, }, }, { name: "deepseek incremental", inputs: []string{ "<think>Wait", ", I need", " to call", " a tool</think><|too", "l▁calls▁begin", "|>", "<|tool▁call▁begin|>function<|tool▁sep|>get_temperature\n", "```json\n", "{\"city\": \"Tokyo\"}\n", "```", "<|tool▁c", "all▁end|>", "<|tool▁calls▁end|>", "<|end▁of▁sentence|>", }, content: "<think>Wait, I need to call a tool</think>", tmpl: deepseek, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_temperature", Arguments: testArgs(map[string]any{ "city": "Tokyo", }), }, }, }, }, { name: "json", inputs: []string{ "{", "\"name\": \"get_temperature\",", "\"arguments\": {", "\"city\": \"Tokyo\"", "}", "}", }, content: "", tmpl: json, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_temperature", Arguments: testArgs(map[string]any{ "city": "Tokyo", }), }, }, }, }, { name: "json maybe a tool call", inputs: []string{ "{", "\"name\": \"get_temperature\",", "\"arguments\": {", }, content: "", tmpl: json, calls: nil, }, { name: "json not a tool call", inputs: []string{ "{", "\"name\": \"search\", ", "\"arguments\": {", "\"query\": \"What is the capital of Canada?\"", "}", "}", }, content: "{\"name\": \"search\", \"arguments\": {\"query\": \"What is the capital of Canada?\"}}", tmpl: json, calls: nil, }, { name: "json object followed by tool call", inputs: []string{ "{\"name\": \"jeff\"}", "{\"name\": \"get_conditions\", \"arguments\": {\"location\": \"San Francisco\"}}", }, content: "{\"name\": \"jeff\"}{\"name\": \"get_conditions\", \"arguments\": {\"location\": \"San Francisco\"}}", tmpl: json, }, { name: "json object followed by tool call split", inputs: []string{ "{\"name\": \"jeff\"} {", "\"name\": \"get_conditions\", \"arguments\": {\"location\": \"San Francisco\"}}", }, content: "{\"name\": \"jeff\"} {\"name\": \"get_conditions\", \"arguments\": {\"location\": \"San Francisco\"}}", tmpl: json, }, { name: "json code", inputs: []string{ "for { fmt.Println(\"hello\") }", }, content: "for { fmt.Println(\"hello\") }", tmpl: json, }, { name: "list multiple", inputs: []string{ "[", "{", "\"name\": \"get_temperature\", ", "\"arguments\": {", "\"city\": \"London\"", "}", "},", "{", "\"name\": \"get_conditions\", ", "\"arguments\": {", "\"location\": \"Tokyo\"", "}", "}]", }, content: "", tmpl: list, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_temperature", Arguments: testArgs(map[string]any{ "city": "London", }), }, }, { Function: api.ToolCallFunction{ Index: 1, Name: "get_conditions", Arguments: testArgs(map[string]any{ "location": "Tokyo", }), }, }, }, }, { name: "list partial", inputs: []string{ "[{", "\"name\": \"get_conditions\", ", "\"arguments\": {", "\"location\": \"Tokyo\"", "}", "}", }, content: "", tmpl: list, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_conditions", Arguments: testArgs(map[string]any{ "location": "Tokyo", }), }, }, }, }, { name: "list invalid", inputs: []string{ "[", "{", "\"name\": \"search\", ", "\"arguments\": {", "\"query\": \"What is the capital of Canada?\"", "}", "}", }, content: "", tmpl: list, calls: nil, }, { name: "list trailing ]", inputs: []string{ "[", "{", "\"name\": \"get_conditions\", ", "\"arguments\": {", "\"location\": \"Tokyo\"", "}", "}", "]", "]", }, content: "", tmpl: list, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_conditions", Arguments: testArgs(map[string]any{ "location": "Tokyo", }), }, }, }, }, { name: "list not a tool call", inputs: []string{ "[special", " del", "ivery]", }, content: "[special delivery]", tmpl: list, calls: nil, }, { name: "tool name with collision", inputs: []string{ "<tool_call>", "{", "\"name\": \"say_hello", "_world\",", "\"arguments\": {}}", "}", }, content: "", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "say_hello_world", Arguments: api.NewToolCallFunctionArguments(), }, }, }, }, { name: "tool name with collision multiple", inputs: []string{ "<tool_call>", "{", "\"name\": \"say_hello", "_world\",", "\"arguments\": {}}", "</tool_call>", "<tool_call>", "{", "\"name\": \"say_hello", "\",", "\"arguments\": {}}", "</tool_call>", }, content: "", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "say_hello_world", Arguments: api.NewToolCallFunctionArguments(), }, }, { Function: api.ToolCallFunction{ Index: 1, Name: "say_hello", Arguments: api.NewToolCallFunctionArguments(), }, }, }, }, { name: "tool name with collision non streaming", inputs: []string{ `<tool_call>{"name": "say_hello`, }, content: "", tmpl: qwen, calls: nil, }, { name: "tool name with collision non streaming multiple", inputs: []string{ `<tool_call>{"name": "say_hello", "arguments": {}}</tool_call><tool_call>{"name": "say_hello_world", "arguments": {}}`, }, content: "", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "say_hello", Arguments: api.NewToolCallFunctionArguments(), }, }, { Function: api.ToolCallFunction{ Index: 1, Name: "say_hello_world", Arguments: api.NewToolCallFunctionArguments(), }, }, }, }, { name: "tool name with collision non streaming shorter", inputs: []string{ `<tool_call>{"name": "say_hello", "arguments": {}}</tool_call>`, }, content: "", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "say_hello", Arguments: api.NewToolCallFunctionArguments(), }, }, }, }, { name: "tool name with collision non streaming longer", inputs: []string{ `<tool_call>{"name": "say_hello_world", "arguments": {}}</tool_call>`, }, content: "", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "say_hello_world", Arguments: api.NewToolCallFunctionArguments(), }, }, }, }, { name: "tool name with substring of another", inputs: []string{ "{", "\"name\": \"get_address\",", "\"arguments\": {", "\"location\": \"London\"", "}", "}", }, content: "", tmpl: json, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_address", Arguments: testArgs(map[string]any{ "location": "London", }), }, }, }, }, { name: "tool name with substring of another", inputs: []string{ `<tool_call>{"name": "get_address", "arguments": {"location": "London"}}</tool_call>`, }, content: "", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "get_address", Arguments: testArgs(map[string]any{ "location": "London", }), }, }, }, }, { name: "args before name", inputs: []string{ `<tool_call>{"arguments": {"a": "5", "b": "10"}, "name": "add"}</tool_call>`, }, content: "", tmpl: qwen, calls: []api.ToolCall{ { Function: api.ToolCallFunction{ Index: 0, Name: "add", Arguments: testArgs(map[string]any{ "a": "5", "b": "10", }), }, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { parser := NewParser(tt.tmpl, tools) var calls []api.ToolCall var content string for _, input := range tt.inputs { tcs, c := parser.Add(input) calls = append(calls, tcs...) content += c } if content != tt.content { t.Errorf("Expected content %q, got %q", tt.content, content) } if len(calls) != len(tt.calls) { t.Fatalf("Expected %d tool calls, got %d", len(tt.calls), len(calls)) } for i, want := range tt.calls { if diff := cmp.Diff(calls[i], want, argsComparer); diff != "" { t.Errorf("Tool call %d mismatch (-got +want):\n%s", i, diff) } } }) } } func TestDone(t *testing.T) { tests := []struct { name string tag string buffer []byte want bool }{ { name: "empty", tag: "<tool_call>", buffer: []byte{}, want: false, }, { name: "empty", tag: "<tool_call>", buffer: []byte{}, want: false, }, { name: "json open", tag: "{", buffer: []byte("{\"name\": \"get_weather\""), want: false, }, { name: "json closed", tag: "{", buffer: []byte("{\"name\": \"get_weather\"}"), want: true, }, { name: "json empty", tag: "{", buffer: []byte("{}"), want: true, }, { name: "list open", tag: "[", buffer: []byte("[{\"name\": \"get_weather\""), want: false, }, { name: "list closed", tag: "[", buffer: []byte("[{\"name\": \"get_weather\"}]"), want: true, }, { name: "list empty", tag: "[", buffer: []byte("[]"), want: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { parser := &Parser{ tag: tt.tag, buffer: tt.buffer, } got := parser.done() if got != tt.want { t.Errorf("done() = %t, want %t", got, tt.want) } }) } } func TestContent(t *testing.T) { tests := []struct { name string tag string content []byte want string n int }{ { name: "empty", content: []byte{}, tag: "{", want: "", n: 0, }, { name: "tag", tag: "<tool_call>", content: []byte("<tool_call>{\"name\": \"get_temperature\""), want: "", n: 0, }, { name: "json object", tag: "{", content: []byte("{\"name\": \"get_temperature\"}"), want: "{\"name\": \"get_temperature\"}", n: 0, }, { name: "json object after called", tag: "{", content: []byte("{\"hello\": \"world\"}"), want: "{\"hello\": \"world\"}", n: 0, }, { name: "json object after called", tag: "{", content: []byte("{\"hello\": \"world\"}"), want: "", n: 1, }, { name: "list", tag: "[", content: []byte("[{\"name\": \"get_temperature\"}]"), want: "[{\"name\": \"get_temperature\"}]", n: 0, }, { name: "code", tag: "{", content: []byte("{ fmt.Println(\"hello\")"), want: "{ fmt.Println(\"hello\")", n: 0, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { parser := &Parser{ tag: tt.tag, buffer: tt.content, n: tt.n, } got := parser.Content() if got != tt.want { t.Errorf("Content() = %q, want %q", got, tt.want) } }) } } func TestFindTag(t *testing.T) { cases := []struct { name string buffer []byte tag string i int found bool }{ { name: "no overlap", buffer: []byte("hello world"), tag: "<tool_call>", i: -1, found: false, }, { name: "full overlap", buffer: []byte("<tool_call>"), tag: "<tool_call>", i: 0, found: true, }, { name: "whitespace", buffer: []byte(" <tool_call>\n {\"name\": \"bob\"}"), tag: "<tool_call>", i: 4, found: true, }, { name: "over", buffer: []byte("<tool_call>{\"name\""), tag: "<tool_call>", i: 0, found: true, }, { name: "partial overlap", buffer: []byte("text <tool_call>"), tag: "<tool_call>", i: 5, found: true, }, { name: "overlap with extra", buffer: []byte("<tool_calls><tool_call>"), tag: "<tool_calls>", i: 0, found: true, }, { name: "delimiter longer than string", buffer: []byte("<tool>"), tag: "<tool_call>", i: -1, found: false, }, { name: "empty string", buffer: []byte{}, tag: "<tool_call>", i: -1, found: false, }, { name: "single char overlap", buffer: []byte("test<"), tag: "<tool_call>", i: 4, found: false, }, { name: "partial tool call", buffer: []byte("hello <tool_"), tag: "<tool_call>", i: 6, found: false, }, { name: "square bracket", buffer: []byte("calling tools: ["), tag: "[", i: 15, found: true, }, { name: "bracket", buffer: []byte("{\"name\": \"bob\""), tag: "{", i: 0, found: true, }, { name: "bracket with whitespace", buffer: []byte("\n\n{\n\"name\": \"bob\""), tag: "{", i: 2, found: true, }, } for _, tt := range cases { t.Run(tt.name, func(t *testing.T) { parser := &Parser{ tag: tt.tag, buffer: tt.buffer, n: 0, } i, found := parser.findTag() if i != tt.i { t.Errorf("findTag(%q, %q) = %d; want %d", tt.buffer, tt.tag, i, tt.i) } if found != tt.found { t.Errorf("findTag(%q, %q) = %t; want %t", tt.buffer, tt.tag, found, tt.found) } }) } } func TestFindArguments(t *testing.T) { tests := []struct { name string buffer []byte want map[string]any tool string }{ { name: "empty string", buffer: []byte{}, want: nil, }, { name: "whitespace only", buffer: []byte(" \n\t "), want: nil, }, { name: "unbalanced braces - missing closing", buffer: []byte(`{"format": "fahrenheit", "location": "San Francisco"`), want: nil, }, { name: "unbalanced braces - extra closing", buffer: []byte(`{"format": "fahrenheit"}}`), want: map[string]any{ "format": "fahrenheit", }, }, { name: "invalid JSON", buffer: []byte(`{format: fahrenheit, location: "San Francisco"}`), want: nil, }, { name: "valid json", buffer: []byte(`{"name": "get_temperature", "arguments": {"format": "fahrenheit", "location": "San Francisco, CA"}}`), want: map[string]any{ "format": "fahrenheit", "location": "San Francisco, CA", }, }, { name: "valid arguments with special tokens", buffer: []byte(`[tool]get_temperature[args]{"format": "fahrenheit", "location": "San Francisco, CA"}[end]`), want: map[string]any{ "format": "fahrenheit", "location": "San Francisco, CA", }, }, { name: "valid arguments in array", buffer: []byte(`[{"name": "get_temperature", "arguments": {"format": "fahrenheit", "location": "San Francisco, CA"}}`), want: map[string]any{ "format": "fahrenheit", "location": "San Francisco, CA", }, }, { name: "nested deep", buffer: []byte(`{"function": {"name": "get_temperature", "arguments": {"format": "fahrenheit", "location": "San Francisco, CA"}}}`), want: map[string]any{ "format": "fahrenheit", "location": "San Francisco, CA", }, }, { name: "one arg", buffer: []byte(`get_temperature({"location": "San Francisco, CA"})`), want: map[string]any{ "location": "San Francisco, CA", }, }, { name: "two args", buffer: []byte(`[{"name": "get_temperature", "arguments": {"location": "San Francisco, CA", "format": "fahrenheit"}}, {"name": "get_weather", "arguments": {"location": "San Francisco, CA", "format": "fahrenheit"}}]`), want: map[string]any{ "location": "San Francisco, CA", "format": "fahrenheit", }, }, { name: "deepseek", buffer: []byte("<|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>get_temperature\n```json\n{\"location\": \"Tokyo\"}\n```<|tool▁call▁end|><|tool▁calls▁end|><|end▁of▁sentence|>"), want: map[string]any{ "location": "Tokyo", }, }, { name: "deepseek", buffer: []byte(`"arguments": {"location": "Tokyo"}}</tool_call>`), want: map[string]any{ "location": "Tokyo", }, }, { name: "string with braces", buffer: []byte(`{"name": "process_code", "arguments": {"code": "if (x > 0) { return true; }"}}`), want: map[string]any{ "code": "if (x > 0) { return true; }", }, }, { name: "string with nested json", buffer: []byte(`{"name": "send_data", "arguments": {"payload": "{\"nested\": {\"key\": \"value\"}}"}}`), want: map[string]any{ "payload": `{"nested": {"key": "value"}}`, }, }, { name: "string with escaped quotes and braces", buffer: []byte(`{"name": "analyze", "arguments": {"text": "The JSON is: {\"key\": \"val{ue}\"}"}}`), want: map[string]any{ "text": `The JSON is: {"key": "val{ue}"}`, }, }, { name: "multiple objects with string containing braces", buffer: []byte(`{"name": "test", "arguments": {"query": "find } in text"}} {"name": "other"}`), want: map[string]any{ "query": "find } in text", }, }, { name: "unmatched closing brace in string", buffer: []byte(`{"name": "search", "arguments": {"pattern": "regex: }"}}`), want: map[string]any{ "pattern": "regex: }", }, }, { name: "complex nested with mixed braces", buffer: []byte(`{"name": "analyze", "arguments": {"data": "{\"items\": [{\"value\": \"}\"}, {\"code\": \"if (x) { return y; }\"}]}"}}`), want: map[string]any{ "data": `{"items": [{"value": "}"}, {"code": "if (x) { return y; }"}]}`, }, }, { name: "string with newline and braces", buffer: []byte(`{"name": "format", "arguments": {"template": "{\n \"key\": \"value\"\n}"}}`), want: map[string]any{ "template": "{\n \"key\": \"value\"\n}", }, }, { name: "string with unicode escape", buffer: []byte(`{"name": "test", "arguments": {"text": "Unicode: \u007B and \u007D"}}`), want: map[string]any{ "text": "Unicode: { and }", }, }, { name: "array arguments", buffer: []byte(`{"name": "batch", "arguments": ["item1", "item2", "{\"nested\": true}"]}`), want: nil, // This should return nil because arguments is not a map }, { name: "escaped backslash before quote", buffer: []byte(`{"name": "path", "arguments": {"dir": "C:\\Program Files\\{App}\\"}}`), want: map[string]any{ "dir": `C:\Program Files\{App}\`, }, }, { name: "single quotes not treated as string delimiters", buffer: []byte(`{"name": "query", "arguments": {"sql": "SELECT * FROM users WHERE name = '{admin}'"}}`), want: map[string]any{ "sql": "SELECT * FROM users WHERE name = '{admin}'", }, }, { name: "incomplete json at buffer end", buffer: []byte(`{"name": "test", "arguments": {"data": "some {"`), want: nil, }, { name: "multiple escaped quotes", buffer: []byte(`{"name": "echo", "arguments": {"msg": "He said \"Hello {World}\" loudly"}}`), want: map[string]any{ "msg": `He said "Hello {World}" loudly`, }, }, { name: "json with comments style string", buffer: []byte(`{"name": "code", "arguments": {"snippet": "// This is a comment with { and }"}}`), want: map[string]any{ "snippet": "// This is a comment with { and }", }, }, { name: "consecutive escaped backslashes", buffer: []byte(`{"name": "test", "arguments": {"path": "C:\\\\{folder}\\\\"}}`), want: map[string]any{ "path": `C:\\{folder}\\`, }, }, { name: "empty string with braces after", buffer: []byte(`{"name": "test", "arguments": {"a": "", "b": "{value}"}}`), want: map[string]any{ "a": "", "b": "{value}", }, }, { name: "unicode in key names", buffer: []byte(`{"name": "test", "arguments": {"key{": "value", "key}": "value2"}}`), want: map[string]any{ "key{": "value", "key}": "value2", }, }, { name: "very long string with braces", buffer: []byte(`{"name": "test", "arguments": {"data": "` + strings.Repeat("a{b}c", 100) + `"}}`), want: map[string]any{ "data": strings.Repeat("a{b}c", 100), }, }, { name: "tab characters and braces", buffer: []byte(`{"name": "test", "arguments": {"code": "\tif (true) {\n\t\treturn;\n\t}"}}`), want: map[string]any{ "code": "\tif (true) {\n\t\treturn;\n\t}", }, }, { name: "null byte in string", buffer: []byte(`{"name": "test", "arguments": {"data": "before\u0000{after}"}}`), want: map[string]any{ "data": "before\x00{after}", }, }, { name: "escaped quote at end of string", buffer: []byte(`{"name": "test", "arguments": {"data": "text with quote at end\\\""}}`), want: map[string]any{ "data": `text with quote at end\"`, }, }, { name: "mixed array and object in arguments", buffer: []byte(`{"name": "test", "arguments": {"items": ["{", "}", {"key": "value"}]}}`), want: map[string]any{ "items": []any{"{", "}", map[string]any{"key": "value"}}, }, }, { name: "stringified arguments", buffer: []byte(`{"name": "get_temperature", "arguments": "{\"format\": \"fahrenheit\", \"location\": \"San Francisco, CA\"}"}`), want: map[string]any{ "format": "fahrenheit", "location": "San Francisco, CA", }, }, { name: "stringified parameters", buffer: []byte(`{"name": "get_temperature", "parameters": "{\"format\": \"fahrenheit\", \"location\": \"San Francisco, CA\"}"}`), want: map[string]any{ "format": "fahrenheit", "location": "San Francisco, CA", }, }, { name: "simple tool call", tool: "get_temperature", buffer: []byte(`{"get_temperature": {"format": "fahrenheit", "location": "San Francisco, CA"}}`), want: map[string]any{ "format": "fahrenheit", "location": "San Francisco, CA", }, }, {
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
true
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/tools/template_test.go
tools/template_test.go
package tools import ( "testing" "text/template" ) func TestParseTag(t *testing.T) { cases := []struct { name string template string want string }{ { name: "empty", template: "", want: "{", }, { name: "no tag", template: "{{if .ToolCalls}}{{end}}", want: "{", }, { name: "no tag with range", template: "{{if .ToolCalls}}{{range .ToolCalls}}{{ . }}{{end}}{{end}}", want: "{", }, { name: "tool call with json format", template: "{{if .ToolCalls}}```json\n{{end}}", want: "```json", }, { name: "square brackets", template: "{{if .ToolCalls}}[{{range .ToolCalls}}{{ . }}{{end}}]{{end}}", want: "[", }, { name: "square brackets with whitespace", template: "{{if .ToolCalls}}\n [ {{range .ToolCalls}}{{ . }}{{end}}]{{end}}", want: "[", }, { name: "tailing ]", template: "{{if .ToolCalls}}{{range .ToolCalls}}{{ . }}{{end}}]{{end}}", want: "{", }, { name: "whitespace only", template: "{{if .ToolCalls}} {{range .ToolCalls}}{{ . }}{{end}}{{end}}", want: "{", }, { name: "whitespace only in range", template: "{{if .ToolCalls}}{{range .ToolCalls}}\n{{ . }}\n{{end}}{{end}}", want: "{", }, { name: "json objects", template: `{{if .ToolCalls}}{{range .ToolCalls}}{"name": "{{ .Function.Name }}", "arguments": {{ .Function.Arguments }}}{{end}}{{end}}`, want: "{", }, { name: "json objects with whitespace", template: "{{if .ToolCalls}}{{range .ToolCalls}}\n{\"name\": \"{{ .Function.Name }}\", \"arguments\": {{ .Function.Arguments }}}{{end}}{{end}}", want: "{", }, { name: "json objects with CRLF", template: "{{if .ToolCalls}}{{range .ToolCalls}}\r\n{\"name\": \"{{ .Function.Name }}\", \"arguments\": {{ .Function.Arguments }}}{{end}}{{end}}", want: "{", }, { name: "json objects with whitespace before and after range", template: "{{if .ToolCalls}}\n{{range .ToolCalls}}\n{\"name\": \"{{ .Function.Name }}\", \"arguments\": {{ .Function.Arguments }}}\r\n{{end}}\r\n{{end}}", want: "{", }, { name: "before and after range", template: "{{if .ToolCalls}}<|tool▁calls▁begin|>{{range .ToolCalls}}<|tool▁call▁begin|>functionget_current_weather\n```json\n{\"location\": \"Tokyo\"}\n```<|tool▁call▁end|>\n{{end}}<|tool▁calls▁end|>{{end}}", want: "<|tool▁calls▁begin|>", }, { name: "after range", template: "{{if .ToolCalls}}{{range .ToolCalls}}<tool_call>{\"name\": \"{{ .Function.Name }}\", \"arguments\": {{ .Function.Arguments }}}</tool_call>{{end}}{{end}}", want: "<tool_call>", }, { name: "after range with leading whitespace before range", template: "{{if .ToolCalls}}\n{{range .ToolCalls}}<tool_call>{\"name\": \"{{ .Function.Name }}\", \"arguments\": {{ .Function.Arguments }}}</tool_call>{{end}}{{end}}", want: "<tool_call>", }, { name: "tool call in range with {", template: `{{if .ToolCalls}}{{range .ToolCalls}}<tool_call>{"name": "{{ .Function.Name }}", "arguments": {{ .Function.Arguments }}}<tool_call>{{end}}{{end}}`, want: "<tool_call>", }, { name: "tool call with multiple text nodes", template: "{{if .ToolCalls}}First text{{if .Something}}inner{{end}}Second text{{end}}", want: "First text", }, { name: "action tag", template: "{{if .ToolCalls}}Action: ```json{{end}}", want: "Action: ```json", }, { name: "incomplete functools bracket", template: "{{if .ToolCalls}}functools[{{end}}", want: "functools[", }, { name: "uppercase tool call with incomplete bracket", template: "{{if .ToolCalls}}[TOOL_CALL] [{{end}}", want: "[TOOL_CALL] [", }, { name: "uppercase tool call with adjacent bracket", template: "{{if .ToolCalls}}[TOOL_CALL][{{end}}", want: "[TOOL_CALL][", }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { tmpl, err := template.New("test").Parse(tc.template) if err != nil && tc.template != "" { t.Fatalf("failed to parse template: %v", err) } got := parseTag(tmpl) if got != tc.want { t.Errorf("got text %q, want %q", got, tc.want) } }) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/tools/template.go
tools/template.go
package tools import ( "bytes" "log/slog" "slices" "strings" "text/template" "text/template/parse" ) // parseTag finds the tool calling tag from a Go template // often <tool_call> [TOOL_CALL] or similar by finding the // first text node after .ToolCalls and returning the content // if no tag is found, return "{" to indicate that json objects // should be attempted to be parsed as tool calls func parseTag(tmpl *template.Template) string { if tmpl == nil || tmpl.Tree == nil { slog.Debug("template or tree is nil") return "{" } tc := findToolCallNode(tmpl.Tree.Root.Nodes) if tc == nil { return "{" } tn := findTextNode(tc.List.Nodes) if tn == nil { return "{" } tag := string(tn.Text) tag = strings.ReplaceAll(tag, "\r\n", "\n") // avoid parsing { onwards as this may be a tool call // however keep '{' as a prefix if there is no tag // so that all json objects will be attempted to // be parsed as tool calls tag, _, _ = strings.Cut(tag, "{") tag = strings.TrimSpace(tag) if tag == "" { tag = "{" } return tag } // findToolCallNode searches for and returns an IfNode with .ToolCalls func findToolCallNode(nodes []parse.Node) *parse.IfNode { isToolCallsNode := func(n *parse.IfNode) bool { for _, cmd := range n.Pipe.Cmds { for _, arg := range cmd.Args { if field, ok := arg.(*parse.FieldNode); ok { if slices.Contains(field.Ident, "ToolCalls") { return true } } } } return false } for _, node := range nodes { switch n := node.(type) { case *parse.IfNode: if isToolCallsNode(n) { return n } // Recursively search in nested IfNodes if result := findToolCallNode(n.List.Nodes); result != nil { return result } if n.ElseList != nil { if result := findToolCallNode(n.ElseList.Nodes); result != nil { return result } } case *parse.ListNode: if result := findToolCallNode(n.Nodes); result != nil { return result } case *parse.RangeNode: if result := findToolCallNode(n.List.Nodes); result != nil { return result } if n.ElseList != nil { if result := findToolCallNode(n.ElseList.Nodes); result != nil { return result } } case *parse.WithNode: if result := findToolCallNode(n.List.Nodes); result != nil { return result } if n.ElseList != nil { if result := findToolCallNode(n.ElseList.Nodes); result != nil { return result } } } } return nil } // findTextNode does a depth-first search for the first text content in nodes, // stopping at template constructs to avoid parsing text after the tool calls func findTextNode(nodes []parse.Node) *parse.TextNode { for _, node := range nodes { switch n := node.(type) { case *parse.TextNode: // skip whitespace-only text nodes if len(bytes.TrimSpace(n.Text)) == 0 { continue } return n case *parse.IfNode: if text := findTextNode(n.List.Nodes); text != nil { return text } if n.ElseList != nil { if text := findTextNode(n.ElseList.Nodes); text != nil { return text } } return nil case *parse.ListNode: if text := findTextNode(n.Nodes); text != nil { return text } case *parse.RangeNode: if text := findTextNode(n.List.Nodes); text != nil { return text } if n.ElseList != nil { if text := findTextNode(n.ElseList.Nodes); text != nil { return text } } return nil case *parse.WithNode: if text := findTextNode(n.List.Nodes); text != nil { return text } if n.ElseList != nil { if text := findTextNode(n.ElseList.Nodes); text != nil { return text } } return nil case *parse.ActionNode: return nil } } return nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/wintray/notifyicon.go
app/wintray/notifyicon.go
//go:build windows package wintray import ( "unsafe" "golang.org/x/sys/windows" ) // Contains information that the system needs to display notifications in the notification area. // Used by Shell_NotifyIcon. // https://msdn.microsoft.com/en-us/library/windows/desktop/bb773352(v=vs.85).aspx // https://msdn.microsoft.com/en-us/library/windows/desktop/bb762159 type notifyIconData struct { Size uint32 Wnd windows.Handle ID, Flags, CallbackMessage uint32 Icon windows.Handle Tip [128]uint16 State, StateMask uint32 Info [256]uint16 // Timeout, Version uint32 Timeout uint32 InfoTitle [64]uint16 InfoFlags uint32 GuidItem windows.GUID BalloonIcon windows.Handle } func (nid *notifyIconData) add() error { const NIM_ADD = 0x00000000 res, _, err := pShellNotifyIcon.Call( uintptr(NIM_ADD), uintptr(unsafe.Pointer(nid)), ) if res == 0 { return err } return nil } func (nid *notifyIconData) modify() error { const NIM_MODIFY = 0x00000001 res, _, err := pShellNotifyIcon.Call( uintptr(NIM_MODIFY), uintptr(unsafe.Pointer(nid)), ) if res == 0 { return err } return nil } func (nid *notifyIconData) delete() error { const NIM_DELETE = 0x00000002 res, _, err := pShellNotifyIcon.Call( uintptr(NIM_DELETE), uintptr(unsafe.Pointer(nid)), ) if res == 0 { return err } return nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/wintray/tray.go
app/wintray/tray.go
//go:build windows package wintray import ( "crypto/md5" "encoding/hex" "fmt" "log/slog" "os" "path/filepath" "sort" "sync" "syscall" "unsafe" "github.com/ollama/ollama/app/assets" "golang.org/x/sys/windows" ) const ( UpdateIconName = "tray_upgrade.ico" IconName = "tray.ico" ClassName = "OllamaClass" ) func NewTray(app AppCallbacks) (TrayCallbacks, error) { updateIcon, err := assets.GetIcon(UpdateIconName) if err != nil { return nil, fmt.Errorf("failed to load icon %s: %w", UpdateIconName, err) } icon, err := assets.GetIcon(IconName) if err != nil { return nil, fmt.Errorf("failed to load icon %s: %w", IconName, err) } return InitTray(icon, updateIcon, app) } type TrayCallbacks interface { Quit() TrayRun() UpdateAvailable(ver string) error GetIconHandle() windows.Handle } type AppCallbacks interface { UIRun(path string) UIShow() UITerminate() UIRunning() bool Quit() DoUpdate() } type URLSchemeHandler interface { HandleURLScheme(urlScheme string) } // Helpful sources: https://github.com/golang/exp/blob/master/shiny/driver/internal/win32 // Contains information about loaded resources type winTray struct { instance, icon, defaultIcon, cursor, window windows.Handle loadedImages map[string]windows.Handle muLoadedImages sync.RWMutex // menus keeps track of the submenus keyed by the menu item ID, plus 0 // which corresponds to the main popup menu. menus map[uint32]windows.Handle muMenus sync.RWMutex menuOf map[uint32]windows.Handle muMenuOf sync.RWMutex // menuItemIcons maintains the bitmap of each menu item (if applies). It's // needed to show the icon correctly when showing a previously hidden menu // item again. // menuItemIcons map[uint32]windows.Handle // muMenuItemIcons sync.RWMutex visibleItems map[uint32][]uint32 muVisibleItems sync.RWMutex nid *notifyIconData muNID sync.RWMutex wcex *wndClassEx wmSystrayMessage, wmTaskbarCreated uint32 pendingUpdate bool updateNotified bool // Only pop up the notification once - TODO consider daily nag? normalIcon []byte updateIcon []byte // TODO clean up exit handling quitting bool app AppCallbacks } var wt winTray func InitTray(icon, updateIcon []byte, app AppCallbacks) (*winTray, error) { wt.normalIcon = icon wt.updateIcon = updateIcon wt.app = app if err := wt.initInstance(); err != nil { return nil, fmt.Errorf("Unable to init instance: %w\n", err) } if err := wt.createMenu(); err != nil { return nil, fmt.Errorf("Unable to create menu: %w\n", err) } iconFilePath, err := iconBytesToFilePath(wt.normalIcon) if err != nil { return nil, fmt.Errorf("Unable to write icon data to temp file: %w", err) } if err := wt.setIcon(iconFilePath); err != nil { return nil, fmt.Errorf("Unable to set icon: %w", err) } h, err := wt.loadIconFrom(iconFilePath) if err != nil { return nil, fmt.Errorf("Unable to set default icon: %w", err) } wt.defaultIcon = h return &wt, wt.initMenus() } func (t *winTray) initInstance() error { const ( windowName = "" ) t.wmSystrayMessage = WM_USER + 1 t.visibleItems = make(map[uint32][]uint32) t.menus = make(map[uint32]windows.Handle) t.menuOf = make(map[uint32]windows.Handle) t.loadedImages = make(map[string]windows.Handle) taskbarEventNamePtr, _ := windows.UTF16PtrFromString("TaskbarCreated") // https://msdn.microsoft.com/en-us/library/windows/desktop/ms644947 res, _, err := pRegisterWindowMessage.Call( uintptr(unsafe.Pointer(taskbarEventNamePtr)), ) if res == 0 { // success 0xc000-0xfff return fmt.Errorf("failed to register window: %w", err) } t.wmTaskbarCreated = uint32(res) instanceHandle, _, err := pGetModuleHandle.Call(0) if instanceHandle == 0 { return err } t.instance = windows.Handle(instanceHandle) // https://msdn.microsoft.com/en-us/library/windows/desktop/ms648072(v=vs.85).aspx iconHandle, _, err := pLoadIcon.Call(0, uintptr(IDI_APPLICATION)) if iconHandle == 0 { return err } t.icon = windows.Handle(iconHandle) // https://msdn.microsoft.com/en-us/library/windows/desktop/ms648391(v=vs.85).aspx cursorHandle, _, err := pLoadCursor.Call(0, uintptr(IDC_ARROW)) if cursorHandle == 0 { return err } t.cursor = windows.Handle(cursorHandle) classNamePtr, err := windows.UTF16PtrFromString(ClassName) if err != nil { return err } windowNamePtr, err := windows.UTF16PtrFromString(windowName) if err != nil { return err } t.wcex = &wndClassEx{ Style: CS_HREDRAW | CS_VREDRAW, WndProc: windows.NewCallback(t.wndProc), Instance: t.instance, Icon: t.icon, Cursor: t.cursor, Background: windows.Handle(6), // (COLOR_WINDOW + 1) ClassName: classNamePtr, IconSm: t.icon, } if err := t.wcex.register(); err != nil { return err } windowHandle, _, err := pCreateWindowEx.Call( uintptr(0), uintptr(unsafe.Pointer(classNamePtr)), uintptr(unsafe.Pointer(windowNamePtr)), uintptr(WS_OVERLAPPEDWINDOW), uintptr(CW_USEDEFAULT), uintptr(CW_USEDEFAULT), uintptr(CW_USEDEFAULT), uintptr(CW_USEDEFAULT), uintptr(0), uintptr(0), uintptr(t.instance), uintptr(0), ) if windowHandle == 0 { return err } t.window = windows.Handle(windowHandle) pShowWindow.Call(uintptr(t.window), uintptr(SW_HIDE)) //nolint:errcheck boolRet, _, err := pUpdateWindow.Call(uintptr(t.window)) if boolRet == 0 { slog.Error(fmt.Sprintf("failed to update window: %s", err)) } t.muNID.Lock() defer t.muNID.Unlock() t.nid = &notifyIconData{ Wnd: t.window, ID: 100, Flags: NIF_MESSAGE, CallbackMessage: t.wmSystrayMessage, } t.nid.Size = uint32(unsafe.Sizeof(*t.nid)) return t.nid.add() } func (t *winTray) createMenu() error { menuHandle, _, err := pCreatePopupMenu.Call() if menuHandle == 0 { return err } t.menus[0] = windows.Handle(menuHandle) // https://msdn.microsoft.com/en-us/library/windows/desktop/ms647575(v=vs.85).aspx mi := struct { Size, Mask, Style, Max uint32 Background windows.Handle ContextHelpID uint32 MenuData uintptr }{ Mask: MIM_APPLYTOSUBMENUS, } mi.Size = uint32(unsafe.Sizeof(mi)) res, _, err := pSetMenuInfo.Call( uintptr(t.menus[0]), uintptr(unsafe.Pointer(&mi)), ) if res == 0 { return err } return nil } // Contains information about a menu item. // https://msdn.microsoft.com/en-us/library/windows/desktop/ms647578(v=vs.85).aspx type menuItemInfo struct { Size, Mask, Type, State uint32 ID uint32 SubMenu, Checked, Unchecked windows.Handle ItemData uintptr TypeData *uint16 Cch uint32 BMPItem windows.Handle } func (t *winTray) addOrUpdateMenuItem(menuItemId uint32, parentId uint32, title string, disabled bool) error { titlePtr, err := windows.UTF16PtrFromString(title) if err != nil { return err } mi := menuItemInfo{ Mask: MIIM_FTYPE | MIIM_STRING | MIIM_ID | MIIM_STATE, Type: MFT_STRING, ID: menuItemId, TypeData: titlePtr, Cch: uint32(len(title)), } mi.Size = uint32(unsafe.Sizeof(mi)) if disabled { mi.State |= MFS_DISABLED } var res uintptr t.muMenus.RLock() menu := t.menus[parentId] t.muMenus.RUnlock() if t.getVisibleItemIndex(parentId, menuItemId) != -1 { // We set the menu item info based on the menuID boolRet, _, err := pSetMenuItemInfo.Call( uintptr(menu), uintptr(menuItemId), 0, uintptr(unsafe.Pointer(&mi)), ) if boolRet == 0 { return fmt.Errorf("failed to set menu item: %w", err) } } if res == 0 { // Menu item does not already exist, create it t.muMenus.RLock() submenu, exists := t.menus[menuItemId] t.muMenus.RUnlock() if exists { mi.Mask |= MIIM_SUBMENU mi.SubMenu = submenu } t.addToVisibleItems(parentId, menuItemId) position := t.getVisibleItemIndex(parentId, menuItemId) res, _, err = pInsertMenuItem.Call( uintptr(menu), uintptr(position), 1, uintptr(unsafe.Pointer(&mi)), ) if res == 0 { t.delFromVisibleItems(parentId, menuItemId) return err } t.muMenuOf.Lock() t.menuOf[menuItemId] = menu t.muMenuOf.Unlock() } return nil } func (t *winTray) addSeparatorMenuItem(menuItemId, parentId uint32) error { mi := menuItemInfo{ Mask: MIIM_FTYPE | MIIM_ID | MIIM_STATE, Type: MFT_SEPARATOR, ID: menuItemId, } mi.Size = uint32(unsafe.Sizeof(mi)) t.addToVisibleItems(parentId, menuItemId) position := t.getVisibleItemIndex(parentId, menuItemId) t.muMenus.RLock() menu := uintptr(t.menus[parentId]) t.muMenus.RUnlock() res, _, err := pInsertMenuItem.Call( menu, uintptr(position), 1, uintptr(unsafe.Pointer(&mi)), ) if res == 0 { return err } return nil } // func (t *winTray) hideMenuItem(menuItemId, parentId uint32) error { // const ERROR_SUCCESS syscall.Errno = 0 // t.muMenus.RLock() // menu := uintptr(t.menus[parentId]) // t.muMenus.RUnlock() // res, _, err := pRemoveMenu.Call( // menu, // uintptr(menuItemId), // MF_BYCOMMAND, // ) // if res == 0 && err.(syscall.Errno) != ERROR_SUCCESS { // return err // } // t.delFromVisibleItems(parentId, menuItemId) // return nil // } func (t *winTray) showMenu() error { p := point{} boolRet, _, err := pGetCursorPos.Call(uintptr(unsafe.Pointer(&p))) if boolRet == 0 { return err } boolRet, _, err = pSetForegroundWindow.Call(uintptr(t.window)) if boolRet == 0 { slog.Warn(fmt.Sprintf("failed to bring menu to foreground: %s", err)) } boolRet, _, err = pTrackPopupMenu.Call( uintptr(t.menus[0]), TPM_BOTTOMALIGN|TPM_LEFTALIGN|TPM_RIGHTBUTTON, uintptr(p.X), uintptr(p.Y), 0, uintptr(t.window), 0, ) if boolRet == 0 { return err } return nil } func (t *winTray) delFromVisibleItems(parent, val uint32) { t.muVisibleItems.Lock() defer t.muVisibleItems.Unlock() visibleItems := t.visibleItems[parent] for i, itemval := range visibleItems { if val == itemval { t.visibleItems[parent] = append(visibleItems[:i], visibleItems[i+1:]...) break } } } func (t *winTray) addToVisibleItems(parent, val uint32) { t.muVisibleItems.Lock() defer t.muVisibleItems.Unlock() if visibleItems, exists := t.visibleItems[parent]; !exists { t.visibleItems[parent] = []uint32{val} } else { newvisible := append(visibleItems, val) sort.Slice(newvisible, func(i, j int) bool { return newvisible[i] < newvisible[j] }) t.visibleItems[parent] = newvisible } } func (t *winTray) getVisibleItemIndex(parent, val uint32) int { t.muVisibleItems.RLock() defer t.muVisibleItems.RUnlock() for i, itemval := range t.visibleItems[parent] { if val == itemval { return i } } return -1 } func iconBytesToFilePath(iconBytes []byte) (string, error) { bh := md5.Sum(iconBytes) dataHash := hex.EncodeToString(bh[:]) iconFilePath := filepath.Join(os.TempDir(), "ollama_temp_icon_"+dataHash) if _, err := os.Stat(iconFilePath); os.IsNotExist(err) { if err := os.WriteFile(iconFilePath, iconBytes, 0o644); err != nil { return "", err } } return iconFilePath, nil } // Loads an image from file and shows it in tray. // Shell_NotifyIcon: https://msdn.microsoft.com/en-us/library/windows/desktop/bb762159(v=vs.85).aspx func (t *winTray) setIcon(src string) error { h, err := t.loadIconFrom(src) if err != nil { return err } t.muNID.Lock() defer t.muNID.Unlock() t.nid.Icon = h t.nid.Flags |= NIF_ICON | NIF_TIP if toolTipUTF16, err := syscall.UTF16FromString("Ollama"); err == nil { copy(t.nid.Tip[:], toolTipUTF16) } else { return err } t.nid.Size = uint32(unsafe.Sizeof(*t.nid)) return t.nid.modify() } // Loads an image from file to be shown in tray or menu item. // LoadImage: https://msdn.microsoft.com/en-us/library/windows/desktop/ms648045(v=vs.85).aspx func (t *winTray) loadIconFrom(src string) (windows.Handle, error) { // Save and reuse handles of loaded images t.muLoadedImages.RLock() h, ok := t.loadedImages[src] t.muLoadedImages.RUnlock() if !ok { srcPtr, err := windows.UTF16PtrFromString(src) if err != nil { return 0, err } res, _, err := pLoadImage.Call( 0, uintptr(unsafe.Pointer(srcPtr)), IMAGE_ICON, 0, 0, LR_LOADFROMFILE|LR_DEFAULTSIZE, ) if res == 0 { return 0, err } h = windows.Handle(res) t.muLoadedImages.Lock() t.loadedImages[src] = h t.muLoadedImages.Unlock() } return h, nil } func (t *winTray) GetIconHandle() windows.Handle { return t.defaultIcon } func (t *winTray) DisplayFirstUseNotification() error { t.muNID.Lock() defer t.muNID.Unlock() copy(t.nid.InfoTitle[:], windows.StringToUTF16(firstTimeTitle)) copy(t.nid.Info[:], windows.StringToUTF16(firstTimeMessage)) t.nid.Flags |= NIF_INFO t.nid.Size = uint32(unsafe.Sizeof(*wt.nid)) return t.nid.modify() }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/wintray/w32api.go
app/wintray/w32api.go
//go:build windows package wintray import ( "runtime" "golang.org/x/sys/windows" ) var ( k32 = windows.NewLazySystemDLL("Kernel32.dll") u32 = windows.NewLazySystemDLL("User32.dll") s32 = windows.NewLazySystemDLL("Shell32.dll") pCreatePopupMenu = u32.NewProc("CreatePopupMenu") pCreateWindowEx = u32.NewProc("CreateWindowExW") pDefWindowProc = u32.NewProc("DefWindowProcW") pDestroyWindow = u32.NewProc("DestroyWindow") pDispatchMessage = u32.NewProc("DispatchMessageW") pFindWindow = u32.NewProc("FindWindowW") pGetCursorPos = u32.NewProc("GetCursorPos") pGetMessage = u32.NewProc("GetMessageW") pGetModuleHandle = k32.NewProc("GetModuleHandleW") pInsertMenuItem = u32.NewProc("InsertMenuItemW") pLoadCursor = u32.NewProc("LoadCursorW") pLoadIcon = u32.NewProc("LoadIconW") pLoadImage = u32.NewProc("LoadImageW") pPostMessage = u32.NewProc("PostMessageW") pPostQuitMessage = u32.NewProc("PostQuitMessage") pRegisterClass = u32.NewProc("RegisterClassExW") pRegisterWindowMessage = u32.NewProc("RegisterWindowMessageW") pSendMessage = u32.NewProc("SendMessageW") pSetForegroundWindow = u32.NewProc("SetForegroundWindow") pSetMenuInfo = u32.NewProc("SetMenuInfo") pSetMenuItemInfo = u32.NewProc("SetMenuItemInfoW") pShellNotifyIcon = s32.NewProc("Shell_NotifyIconW") pShowWindow = u32.NewProc("ShowWindow") pTrackPopupMenu = u32.NewProc("TrackPopupMenu") pTranslateMessage = u32.NewProc("TranslateMessage") pUnregisterClass = u32.NewProc("UnregisterClassW") pUpdateWindow = u32.NewProc("UpdateWindow") ) const ( CS_HREDRAW = 0x0002 CS_VREDRAW = 0x0001 CW_USEDEFAULT = 0x80000000 IDC_ARROW = 32512 // Standard arrow IDI_APPLICATION = 32512 IMAGE_ICON = 1 // Loads an icon LR_DEFAULTSIZE = 0x00000040 // Loads default-size icon for windows(SM_CXICON x SM_CYICON) if cx, cy are set to zero LR_LOADFROMFILE = 0x00000010 // Loads the stand-alone image from the file MF_BYCOMMAND = 0x00000000 MFS_DISABLED = 0x00000003 MFT_SEPARATOR = 0x00000800 MFT_STRING = 0x00000000 MIIM_BITMAP = 0x00000080 MIIM_FTYPE = 0x00000100 MIIM_ID = 0x00000002 MIIM_STATE = 0x00000001 MIIM_STRING = 0x00000040 MIIM_SUBMENU = 0x00000004 MIM_APPLYTOSUBMENUS = 0x80000000 NIF_ICON = 0x00000002 NIF_TIP = 0x00000004 NIF_INFO = 0x00000010 NIF_MESSAGE = 0x00000001 SW_HIDE = 0 TPM_BOTTOMALIGN = 0x0020 TPM_LEFTALIGN = 0x0000 TPM_RIGHTBUTTON = 0x0002 WM_CLOSE = 0x0010 WM_RBUTTONUP = 0x0205 WM_LBUTTONUP = 0x0202 WM_COMMAND = 0x0111 WM_ENDSESSION = 0x0016 WM_QUIT = 0x0012 WM_DESTROY = 0x0002 WM_MOUSEMOVE = 0x0200 WM_LBUTTONDOWN = 0x0201 WM_USER = 0x0400 WM_COPYDATA = 0x004A WS_CAPTION = 0x00C00000 WS_MAXIMIZEBOX = 0x00010000 WS_MINIMIZEBOX = 0x00020000 WS_OVERLAPPED = 0x00000000 WS_OVERLAPPEDWINDOW = WS_OVERLAPPED | WS_CAPTION | WS_SYSMENU | WS_THICKFRAME | WS_MINIMIZEBOX | WS_MAXIMIZEBOX WS_SYSMENU = 0x00080000 WS_THICKFRAME = 0x00040000 MB_OK = 0x00000000 MB_ICONINFORMATION = 0x00000040 ) // Not sure if this is actually needed on windows func init() { runtime.LockOSThread() } // The POINT structure defines the x- and y- coordinates of a point. // https://msdn.microsoft.com/en-us/library/windows/desktop/dd162805(v=vs.85).aspx type point struct { X, Y int32 } // COPYDATASTRUCT contains data to be passed to another application by WM_COPYDATA // https://docs.microsoft.com/en-us/windows/win32/api/winuser/ns-winuser-copydatastruct type COPYDATASTRUCT struct { DwData uintptr CbData uint32 LpData uintptr }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/wintray/eventloop.go
app/wintray/eventloop.go
//go:build windows package wintray import ( "fmt" "log/slog" "sync" "unsafe" "golang.org/x/sys/windows" ) var ( quitOnce sync.Once UI_REQUEST_MSG_ID = WM_USER + 2 FOCUS_WINDOW_MSG_ID = WM_USER + 3 ) func (t *winTray) TrayRun() { // Main message pump. slog.Debug("starting event handling loop") m := &struct { WindowHandle windows.Handle Message uint32 Wparam uintptr Lparam uintptr Time uint32 Pt point LPrivate uint32 }{} for { ret, _, err := pGetMessage.Call(uintptr(unsafe.Pointer(m)), 0, 0, 0) // Ignore WM_QUIT messages from the UI window, which shouldn't exit the main app if m.Message == WM_QUIT && t.app.UIRunning() { if t.app != nil { slog.Debug("converting WM_QUIT to terminate call on webview") t.app.UITerminate() } // Drain any other WM_QUIT messages for { ret, _, err = pGetMessage.Call(uintptr(unsafe.Pointer(m)), 0, 0, 0) if m.Message != WM_QUIT { break } } } // If the function retrieves a message other than WM_QUIT, the return value is nonzero. // If the function retrieves the WM_QUIT message, the return value is zero. // If there is an error, the return value is -1 // https://msdn.microsoft.com/en-us/library/windows/desktop/ms644936(v=vs.85).aspx switch int32(ret) { case -1: slog.Error(fmt.Sprintf("get message failure: %v", err)) return case 0: // slog.Debug("XXX tray run loop exiting from handling", "message", fmt.Sprintf("0x%x", m.Message), "wParam", fmt.Sprintf("0x%x", m.Wparam), "lParam", fmt.Sprintf("0x%x", m.Lparam)) return default: pTranslateMessage.Call(uintptr(unsafe.Pointer(m))) //nolint:errcheck pDispatchMessage.Call(uintptr(unsafe.Pointer(m))) //nolint:errcheck } } } // WindowProc callback function that processes messages sent to a window. // https://msdn.microsoft.com/en-us/library/windows/desktop/ms633573(v=vs.85).aspx func (t *winTray) wndProc(hWnd windows.Handle, message uint32, wParam, lParam uintptr) (lResult uintptr) { // slog.Debug("XXX in winTray.wndProc", "message", fmt.Sprintf("0x%x", message), "wParam", fmt.Sprintf("0x%x", wParam), "lParam", fmt.Sprintf("0x%x", lParam)) switch message { case WM_COMMAND: menuItemId := int32(wParam) // https://docs.microsoft.com/en-us/windows/win32/menurc/wm-command#menus switch menuItemId { case quitMenuID: t.app.Quit() case updateMenuID: t.app.DoUpdate() case openUIMenuID: // UI must be initialized on this thread so don't use the callbacks t.app.UIShow() case settingsUIMenuID: // UI must be initialized on this thread so don't use the callbacks t.app.UIRun("/settings") case diagLogsMenuID: t.showLogs() default: slog.Debug(fmt.Sprintf("Unexpected menu item id: %d", menuItemId)) lResult, _, _ = pDefWindowProc.Call( uintptr(hWnd), uintptr(message), wParam, lParam, ) } case WM_CLOSE: // TODO - does this need adjusting? // slog.Debug("XXX WM_CLOSE triggered") boolRet, _, err := pDestroyWindow.Call(uintptr(t.window)) if boolRet == 0 { slog.Error(fmt.Sprintf("failed to destroy window: %s", err)) } err = t.wcex.unregister() if err != nil { slog.Error(fmt.Sprintf("failed to uregister windo %s", err)) } case WM_DESTROY: // slog.Debug("XXX WM_DESTROY triggered") // TODO - does this need adjusting? // same as WM_ENDSESSION, but throws 0 exit code after all defer pPostQuitMessage.Call(uintptr(int32(0))) //nolint:errcheck fallthrough case WM_ENDSESSION: // slog.Debug("XXX WM_ENDSESSION triggered") t.muNID.Lock() if t.nid != nil { err := t.nid.delete() if err != nil { slog.Error(fmt.Sprintf("failed to delete nid: %s", err)) } } t.muNID.Unlock() case t.wmSystrayMessage: switch lParam { case WM_MOUSEMOVE, WM_LBUTTONDOWN: // Ignore these... case WM_RBUTTONUP, WM_LBUTTONUP: err := t.showMenu() if err != nil { slog.Error(fmt.Sprintf("failed to show menu: %s", err)) } case 0x405: // TODO - how is this magic value derived for the notification left click if t.pendingUpdate { // TODO - revamp how detecting an update is notified to the user t.app.DoUpdate() } case 0x404: // Middle click or close notification // slog.Debug("doing nothing on close of first time notification") default: // 0x402 also seems common - what is it? slog.Debug(fmt.Sprintf("unmanaged app message, lParm: 0x%x", lParam)) lResult, _, _ = pDefWindowProc.Call( uintptr(hWnd), uintptr(message), wParam, lParam, ) } case t.wmTaskbarCreated: // on explorer.exe restarts t.muNID.Lock() err := t.nid.add() if err != nil { slog.Error(fmt.Sprintf("failed to refresh the taskbar on explorer restart: %s", err)) } t.muNID.Unlock() case uint32(UI_REQUEST_MSG_ID): // Requests for the UI must always come from the main event thread l := int(wParam) path := unsafe.String((*byte)(unsafe.Pointer(lParam)), l) //nolint:govet,gosec t.app.UIRun(path) case WM_COPYDATA: // Handle URL scheme requests from other instances if lParam != 0 { cds := (*COPYDATASTRUCT)(unsafe.Pointer(lParam)) //nolint:govet,gosec if cds.DwData == 1 { // Our identifier for URL scheme messages // Convert the data back to string data := make([]byte, cds.CbData) copy(data, (*[1 << 30]byte)(unsafe.Pointer(cds.LpData))[:cds.CbData:cds.CbData]) //nolint:govet,gosec urlScheme := string(data) handleURLSchemeRequest(urlScheme) lResult = 1 // Return non-zero to indicate success } } case uint32(FOCUS_WINDOW_MSG_ID): // Handle focus window request from another instance if t.app.UIRunning() { // If UI is already running, just show it t.app.UIShow() } else { // If UI is not running, start it t.app.UIRun("/") } lResult = 1 // Return non-zero to indicate success default: // Calls the default window procedure to provide default processing for any window messages that an application does not process. // https://msdn.microsoft.com/en-us/library/windows/desktop/ms633572(v=vs.85).aspx // slog.Debug("XXX passing through", "message", fmt.Sprintf("0x%x", message), "wParam", fmt.Sprintf("0x%x", wParam), "lParam", fmt.Sprintf("0x%x", lParam)) lResult, _, _ = pDefWindowProc.Call( uintptr(hWnd), uintptr(message), wParam, lParam, ) } return } func (t *winTray) Quit() { // slog.Debug("XXX in winTray.Quit") t.quitting = true quitOnce.Do(quit) } func SendUIRequestMessage(path string) { boolRet, _, err := pPostMessage.Call( uintptr(wt.window), uintptr(UI_REQUEST_MSG_ID), uintptr(len(path)), uintptr(unsafe.Pointer(unsafe.StringData(path))), ) if boolRet == 0 { slog.Error(fmt.Sprintf("failed to post UI request message %s", err)) } } func quit() { boolRet, _, err := pPostMessage.Call( uintptr(wt.window), WM_CLOSE, 0, 0, ) if boolRet == 0 { slog.Error(fmt.Sprintf("failed to post close message on shutdown %s", err)) } } // findExistingInstance attempts to find an existing Ollama instance window // Returns the window handle if found, 0 if not found func findExistingInstance() uintptr { classNamePtr, err := windows.UTF16PtrFromString(ClassName) if err != nil { slog.Error("failed to convert class name to UTF16", "error", err) return 0 } hwnd, _, _ := pFindWindow.Call( uintptr(unsafe.Pointer(classNamePtr)), 0, // window name (null = any) ) return hwnd } // CheckAndSendToExistingInstance attempts to send a URL scheme to an existing instance // Returns true if successfully sent to existing instance, false if no instance found func CheckAndSendToExistingInstance(urlScheme string) bool { hwnd := findExistingInstance() if hwnd == 0 { // No existing window found return false } data := []byte(urlScheme) cds := COPYDATASTRUCT{ DwData: 1, // 1 to identify URL scheme messages CbData: uint32(len(data)), LpData: uintptr(unsafe.Pointer(&data[0])), } result, _, err := pSendMessage.Call( hwnd, uintptr(WM_COPYDATA), 0, // wParam is handle to sending window (0 is ok) uintptr(unsafe.Pointer(&cds)), ) // SendMessage returns the result from the window procedure // For WM_COPYDATA, non-zero means success if result == 0 { slog.Error("failed to send URL scheme message to existing instance", "error", err) return false } return true } // handleURLSchemeRequest processes a URL scheme request func handleURLSchemeRequest(urlScheme string) { if urlScheme == "" { slog.Warn("empty URL scheme request") return } // Call the app callback to handle URL scheme requests // This will delegate to the main app logic if wt.app != nil { if urlHandler, ok := wt.app.(URLSchemeHandler); ok { urlHandler.HandleURLScheme(urlScheme) } else { slog.Warn("app does not implement URLSchemeHandler interface") } } else { slog.Warn("wt.app is nil") } } // CheckAndFocusExistingInstance attempts to find an existing instance and optionally focus it // Returns true if an existing instance was found, false otherwise func CheckAndFocusExistingInstance(shouldFocus bool) bool { hwnd := findExistingInstance() if hwnd == 0 { // No existing window found return false } if !shouldFocus { slog.Info("existing instance found, not focusing due to startHidden") return true } // Send focus message to existing instance result, _, err := pSendMessage.Call( hwnd, uintptr(FOCUS_WINDOW_MSG_ID), 0, // wParam not used 0, // lParam not used ) // SendMessage returns the result from the window procedure // For our custom message, non-zero means success if result == 0 { slog.Error("failed to send focus message to existing instance", "error", err) return false } slog.Info("sent focus request to existing instance") return true }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/wintray/messages.go
app/wintray/messages.go
//go:build windows package wintray const ( firstTimeTitle = "Ollama is running" firstTimeMessage = "Click here to get started" updateTitle = "Update available" updateMessage = "Ollama version %s is ready to install" quitMenuTitle = "Quit Ollama" updateAvailableMenuTitle = "An update is available" updateMenuTitle = "Restart to update" diagLogsMenuTitle = "View logs" openUIMenuTitle = "Open Ollama" settingsUIMenuTitle = "Settings..." )
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/wintray/winclass.go
app/wintray/winclass.go
//go:build windows package wintray import ( "unsafe" "golang.org/x/sys/windows" ) // Contains window class information. // It is used with the RegisterClassEx and GetClassInfoEx functions. // https://msdn.microsoft.com/en-us/library/ms633577.aspx type wndClassEx struct { Size, Style uint32 WndProc uintptr ClsExtra, WndExtra int32 Instance, Icon, Cursor, Background windows.Handle MenuName, ClassName *uint16 IconSm windows.Handle } // Registers a window class for subsequent use in calls to the CreateWindow or CreateWindowEx function. // https://msdn.microsoft.com/en-us/library/ms633587.aspx func (w *wndClassEx) register() error { w.Size = uint32(unsafe.Sizeof(*w)) res, _, err := pRegisterClass.Call(uintptr(unsafe.Pointer(w))) if res == 0 { return err } return nil } // Unregisters a window class, freeing the memory required for the class. // https://msdn.microsoft.com/en-us/library/ms644899.aspx func (w *wndClassEx) unregister() error { res, _, err := pUnregisterClass.Call( uintptr(unsafe.Pointer(w.ClassName)), uintptr(w.Instance), ) if res == 0 { return err } return nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/wintray/menus.go
app/wintray/menus.go
//go:build windows package wintray import ( "fmt" "log/slog" "os" "os/exec" "path/filepath" "syscall" "unsafe" "golang.org/x/sys/windows" ) const ( _ = iota openUIMenuID settingsUIMenuID updateSeparatorMenuID updateAvailableMenuID updateMenuID separatorMenuID diagLogsMenuID diagSeparatorMenuID quitMenuID ) func (t *winTray) initMenus() error { if err := t.addOrUpdateMenuItem(openUIMenuID, 0, openUIMenuTitle, false); err != nil { return fmt.Errorf("unable to create menu entries %w", err) } if err := t.addOrUpdateMenuItem(settingsUIMenuID, 0, settingsUIMenuTitle, false); err != nil { return fmt.Errorf("unable to create menu entries %w", err) } if err := t.addOrUpdateMenuItem(diagLogsMenuID, 0, diagLogsMenuTitle, false); err != nil { return fmt.Errorf("unable to create menu entries %w\n", err) } if err := t.addSeparatorMenuItem(diagSeparatorMenuID, 0); err != nil { return fmt.Errorf("unable to create menu entries %w", err) } if err := t.addOrUpdateMenuItem(quitMenuID, 0, quitMenuTitle, false); err != nil { return fmt.Errorf("unable to create menu entries %w", err) } return nil } func (t *winTray) UpdateAvailable(ver string) error { if !t.updateNotified { slog.Debug("updating menu and sending notification for new update") if err := t.addSeparatorMenuItem(updateSeparatorMenuID, 0); err != nil { return fmt.Errorf("unable to create menu entries %w", err) } if err := t.addOrUpdateMenuItem(updateAvailableMenuID, 0, updateAvailableMenuTitle, true); err != nil { return fmt.Errorf("unable to create menu entries %w", err) } if err := t.addOrUpdateMenuItem(updateMenuID, 0, updateMenuTitle, false); err != nil { return fmt.Errorf("unable to create menu entries %w", err) } if err := t.addSeparatorMenuItem(separatorMenuID, 0); err != nil { return fmt.Errorf("unable to create menu entries %w", err) } iconFilePath, err := iconBytesToFilePath(wt.updateIcon) if err != nil { return fmt.Errorf("unable to write icon data to temp file: %w", err) } if err := wt.setIcon(iconFilePath); err != nil { return fmt.Errorf("unable to set icon: %w", err) } t.updateNotified = true t.pendingUpdate = true // Now pop up the notification t.muNID.Lock() defer t.muNID.Unlock() copy(t.nid.InfoTitle[:], windows.StringToUTF16(updateTitle)) copy(t.nid.Info[:], windows.StringToUTF16(fmt.Sprintf(updateMessage, ver))) t.nid.Flags |= NIF_INFO t.nid.Timeout = 10 t.nid.Size = uint32(unsafe.Sizeof(*wt.nid)) err = t.nid.modify() if err != nil { return err } } return nil } func (t *winTray) showLogs() error { localAppData := os.Getenv("LOCALAPPDATA") AppDataDir := filepath.Join(localAppData, "Ollama") cmd_path := "c:\\Windows\\system32\\cmd.exe" slog.Debug(fmt.Sprintf("viewing logs with start %s", AppDataDir)) cmd := exec.Command(cmd_path, "/c", "start", AppDataDir) cmd.SysProcAttr = &syscall.SysProcAttr{HideWindow: false, CreationFlags: 0x08000000} err := cmd.Start() if err != nil { slog.Error(fmt.Sprintf("Failed to open log dir: %s", err)) } return nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/auth/connect.go
app/auth/connect.go
//go:build windows || darwin package auth import ( "encoding/base64" "fmt" "net/url" "os" "github.com/ollama/ollama/auth" ) // BuildConnectURL generates the connect URL with the public key and device name func BuildConnectURL(baseURL string) (string, error) { pubKey, err := auth.GetPublicKey() if err != nil { return "", fmt.Errorf("failed to get public key: %w", err) } encodedKey := base64.RawURLEncoding.EncodeToString([]byte(pubKey)) hostname, _ := os.Hostname() encodedDevice := url.QueryEscape(hostname) return fmt.Sprintf("%s/connect?name=%s&key=%s&launch=true", baseURL, encodedDevice, encodedKey), nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/cmd/app/app_darwin.go
app/cmd/app/app_darwin.go
//go:build windows || darwin package main // #cgo CFLAGS: -x objective-c // #cgo LDFLAGS: -framework Webkit -framework Cocoa -framework LocalAuthentication -framework ServiceManagement // #include "app_darwin.h" // #include "../../updater/updater_darwin.h" // typedef const char cchar_t; import "C" import ( "log/slog" "os" "os/exec" "path/filepath" "strings" "time" "unsafe" "github.com/ollama/ollama/app/updater" "github.com/ollama/ollama/app/version" ) var ollamaPath = func() string { if updater.BundlePath != "" { return filepath.Join(updater.BundlePath, "Contents", "Resources", "ollama") } pwd, err := os.Getwd() if err != nil { slog.Warn("failed to get pwd", "error", err) return "" } return filepath.Join(pwd, "ollama") }() var ( isApp = updater.BundlePath != "" appLogPath = filepath.Join(os.Getenv("HOME"), ".ollama", "logs", "app.log") launchAgentPath = filepath.Join(os.Getenv("HOME"), "Library", "LaunchAgents", "com.ollama.ollama.plist") ) // TODO(jmorganca): pre-create the window and pass // it to the webview instead of using the internal one // //export StartUI func StartUI(path *C.cchar_t) { p := C.GoString(path) wv.Run(p) styleWindow(wv.webview.Window()) C.setWindowDelegate(wv.webview.Window()) } //export ShowUI func ShowUI() { // If webview is already running, just show the window if wv.IsRunning() && wv.webview != nil { showWindow(wv.webview.Window()) } else { root := C.CString("/") defer C.free(unsafe.Pointer(root)) StartUI(root) } } //export StopUI func StopUI() { wv.Terminate() } //export StartUpdate func StartUpdate() { if err := updater.DoUpgrade(true); err != nil { slog.Error("upgrade failed", "error", err) return } slog.Debug("launching new version...") // TODO - consider a timer that aborts if this takes too long and we haven't been killed yet... LaunchNewApp() // not reached if upgrade works, the new app will kill this process } //export darwinStartHiddenTasks func darwinStartHiddenTasks() { startHiddenTasks() } func init() { // Temporary code to mimic Squirrel ShipIt behavior if len(os.Args) > 2 { if os.Args[1] == "___launch___" { path := strings.TrimPrefix(os.Args[2], "file://") slog.Info("Ollama binary called as ShipIt - launching", "app", path) appName := C.CString(path) defer C.free(unsafe.Pointer(appName)) C.launchApp(appName) slog.Info("other instance has been launched") time.Sleep(5 * time.Second) slog.Info("exiting with zero status") os.Exit(0) } } } // maybeMoveAndRestart checks if we should relocate // and returns true if we did and should immediately exit func maybeMoveAndRestart() appMove { if updater.BundlePath == "" { // Typically developer mode with 'go run ./cmd/app' return CannotMove } // Respect users intent if they chose "keep" vs. "replace" when dragging to Applications if strings.HasPrefix(updater.BundlePath, strings.TrimSuffix(updater.SystemWidePath, filepath.Ext(updater.SystemWidePath))) { return AlreadyMoved } // Ask to move to applications directory status := (appMove)(C.askToMoveToApplications()) if status == MoveCompleted { // Double check if _, err := os.Stat(updater.SystemWidePath); err != nil { slog.Warn("stat failure after move", "path", updater.SystemWidePath, "error", err) return MoveError } } return status } // handleExistingInstance handles existing instances on macOS func handleExistingInstance(_ bool) { C.killOtherInstances() } func installSymlink() { if !isApp { return } cliPath := C.CString(ollamaPath) defer C.free(unsafe.Pointer(cliPath)) // Check the users path first cmd, _ := exec.LookPath("ollama") if cmd != "" { resolved, err := os.Readlink(cmd) if err == nil { tmp, err := filepath.Abs(resolved) if err == nil { resolved = tmp } } else { resolved = cmd } if resolved == ollamaPath { slog.Info("ollama already in users PATH", "cli", cmd) return } } code := C.installSymlink(cliPath) if code != 0 { slog.Error("Failed to install symlink") } } func UpdateAvailable(ver string) error { slog.Debug("update detected, adjusting menu") // TODO (jmorganca): find a better check for development mode than checking the bundle path if updater.BundlePath != "" { C.updateAvailable() } return nil } func osRun(_ func(), hasCompletedFirstRun, startHidden bool) { registerLaunchAgent(hasCompletedFirstRun) // Run the native macOS app // Note: this will block until the app is closed slog.Debug("starting native darwin event loop") C.run(C._Bool(hasCompletedFirstRun), C._Bool(startHidden)) } func quit() { C.quit() } func LaunchNewApp() { appName := C.CString(updater.BundlePath) defer C.free(unsafe.Pointer(appName)) C.launchApp(appName) } func registerLaunchAgent(hasCompletedFirstRun bool) { // Remove any stale Login Item registrations C.unregisterSelfFromLoginItem() C.registerSelfAsLoginItem(C._Bool(hasCompletedFirstRun)) } func logStartup() { appPath := updater.BundlePath if appPath == updater.SystemWidePath { // Detect sandboxed scenario exe, err := os.Executable() if err == nil { p := filepath.Dir(exe) if filepath.Base(p) == "MacOS" { p = filepath.Dir(filepath.Dir(p)) if p != appPath { slog.Info("starting sandboxed Ollama", "app", appPath, "sandbox", p) return } } } } slog.Info("starting Ollama", "app", appPath, "version", version.Version, "OS", updater.UserAgentOS) } func hideWindow(ptr unsafe.Pointer) { C.hideWindow(C.uintptr_t(uintptr(ptr))) } func showWindow(ptr unsafe.Pointer) { C.showWindow(C.uintptr_t(uintptr(ptr))) } func styleWindow(ptr unsafe.Pointer) { C.styleWindow(C.uintptr_t(uintptr(ptr))) } func runInBackground() { cmd := exec.Command(filepath.Join(updater.BundlePath, "Contents", "MacOS", "Ollama"), "hidden") if cmd != nil { err := cmd.Run() if err != nil { slog.Error("failed to run Ollama", "bundlePath", updater.BundlePath, "error", err) os.Exit(1) } } else { slog.Error("failed to start Ollama in background", "bundlePath", updater.BundlePath) os.Exit(1) } } func drag(ptr unsafe.Pointer) { C.drag(C.uintptr_t(uintptr(ptr))) } func doubleClick(ptr unsafe.Pointer) { C.doubleClick(C.uintptr_t(uintptr(ptr))) } //export handleConnectURL func handleConnectURL() { handleConnectURLScheme() } // checkAndHandleExistingInstance is not needed on non-Windows platforms func checkAndHandleExistingInstance(_ string) bool { return false }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/cmd/app/app_windows.go
app/cmd/app/app_windows.go
//go:build windows || darwin package main import ( "errors" "fmt" "io" "log" "log/slog" "os" "os/exec" "os/signal" "path/filepath" "runtime" "strings" "syscall" "unsafe" "github.com/ollama/ollama/app/updater" "github.com/ollama/ollama/app/version" "github.com/ollama/ollama/app/wintray" "golang.org/x/sys/windows" ) var ( u32 = windows.NewLazySystemDLL("User32.dll") pBringWindowToTop = u32.NewProc("BringWindowToTop") pShowWindow = u32.NewProc("ShowWindow") pSendMessage = u32.NewProc("SendMessageA") pGetSystemMetrics = u32.NewProc("GetSystemMetrics") pGetWindowRect = u32.NewProc("GetWindowRect") pSetWindowPos = u32.NewProc("SetWindowPos") pSetForegroundWindow = u32.NewProc("SetForegroundWindow") pSetActiveWindow = u32.NewProc("SetActiveWindow") pIsIconic = u32.NewProc("IsIconic") appPath = filepath.Join(os.Getenv("LOCALAPPDATA"), "Programs", "Ollama") appLogPath = filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "app.log") startupShortcut = filepath.Join(os.Getenv("APPDATA"), "Microsoft", "Windows", "Start Menu", "Programs", "Startup", "Ollama.lnk") ollamaPath string DesktopAppName = "ollama app.exe" ) func init() { // With alternate install location use executable location exe, err := os.Executable() if err != nil { slog.Warn("error discovering executable directory", "error", err) } else { appPath = filepath.Dir(exe) } ollamaPath = filepath.Join(appPath, "ollama.exe") // Handle developer mode (go run ./cmd/app) if _, err := os.Stat(ollamaPath); err != nil { pwd, err := os.Getwd() if err != nil { slog.Warn("missing ollama.exe and failed to get pwd", "error", err) return } distAppPath := filepath.Join(pwd, "dist", "windows-"+runtime.GOARCH) distOllamaPath := filepath.Join(distAppPath, "ollama.exe") if _, err := os.Stat(distOllamaPath); err == nil { slog.Info("detected developer mode") appPath = distAppPath ollamaPath = distOllamaPath } } } func maybeMoveAndRestart() appMove { return 0 } // handleExistingInstance checks for existing instances and optionally focuses them func handleExistingInstance(startHidden bool) { if wintray.CheckAndFocusExistingInstance(!startHidden) { slog.Info("existing instance found, exiting") os.Exit(0) } } func installSymlink() {} type appCallbacks struct { t wintray.TrayCallbacks shutdown func() } var app = &appCallbacks{} func (ac *appCallbacks) UIRun(path string) { wv.Run(path) } func (*appCallbacks) UIShow() { if wv.webview != nil { showWindow(wv.webview.Window()) } else { wv.Run("/") } } func (*appCallbacks) UITerminate() { wv.Terminate() } func (*appCallbacks) UIRunning() bool { return wv.IsRunning() } func (app *appCallbacks) Quit() { app.t.Quit() wv.Terminate() } // TODO - reconcile with above for consistency between mac/windows func quit() { wv.Terminate() } func (app *appCallbacks) DoUpdate() { // Safeguard in case we have requests in flight that need to drain... slog.Info("Waiting for server to shutdown") app.shutdown() if err := updater.DoUpgrade(true); err != nil { slog.Warn(fmt.Sprintf("upgrade attempt failed: %s", err)) } } // HandleURLScheme implements the URLSchemeHandler interface func (app *appCallbacks) HandleURLScheme(urlScheme string) { handleURLSchemeRequest(urlScheme) } // handleURLSchemeRequest processes URL scheme requests from other instances func handleURLSchemeRequest(urlScheme string) { isConnect, err := parseURLScheme(urlScheme) if err != nil { slog.Error("failed to parse URL scheme request", "url", urlScheme, "error", err) return } if isConnect { handleConnectURLScheme() } else { if wv.webview != nil { showWindow(wv.webview.Window()) } } } func UpdateAvailable(ver string) error { return app.t.UpdateAvailable(ver) } func osRun(shutdown func(), hasCompletedFirstRun, startHidden bool) { var err error app.shutdown = shutdown app.t, err = wintray.NewTray(app) if err != nil { log.Fatalf("Failed to start: %s", err) } signals := make(chan os.Signal, 1) signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM) // TODO - can this be generalized? go func() { <-signals slog.Debug("shutting down due to signal") app.t.Quit() wv.Terminate() }() // On windows, we run the final tasks in the main thread // before starting the tray event loop. These final tasks // may trigger the UI, and must do that from the main thread. if !startHidden { // Determine if the process was started from a shortcut // ~\AppData\Roaming\Microsoft\Windows\Start Menu\Programs\Startup\Ollama const STARTF_TITLEISLINKNAME = 0x00000800 var info windows.StartupInfo if err := windows.GetStartupInfo(&info); err != nil { slog.Debug("unable to retrieve startup info", "error", err) } else if info.Flags&STARTF_TITLEISLINKNAME == STARTF_TITLEISLINKNAME { linkPath := windows.UTF16PtrToString(info.Title) if strings.Contains(linkPath, "Startup") { startHidden = true } } } if startHidden { startHiddenTasks() } else { ptr := wv.Run("/") // Set the window icon using the tray icon if ptr != nil { iconHandle := app.t.GetIconHandle() if iconHandle != 0 { hwnd := uintptr(ptr) const ICON_SMALL = 0 const ICON_BIG = 1 const WM_SETICON = 0x0080 pSendMessage.Call(hwnd, uintptr(WM_SETICON), uintptr(ICON_SMALL), uintptr(iconHandle)) pSendMessage.Call(hwnd, uintptr(WM_SETICON), uintptr(ICON_BIG), uintptr(iconHandle)) } } centerWindow(ptr) } if !hasCompletedFirstRun { // Only create the login shortcut on first start // so we can respect users deletion of the link err = createLoginShortcut() if err != nil { slog.Warn("unable to create login shortcut", "error", err) } } app.t.TrayRun() // This will block the main thread } func createLoginShortcut() error { // The installer lays down a shortcut for us so we can copy it without // having to resort to calling COM APIs to establish the shortcut shortcutOrigin := filepath.Join(appPath, "lib", "Ollama.lnk") _, err := os.Stat(startupShortcut) if err != nil { if errors.Is(err, os.ErrNotExist) { in, err := os.Open(shortcutOrigin) if err != nil { return fmt.Errorf("unable to open shortcut %s : %w", shortcutOrigin, err) } defer in.Close() out, err := os.Create(startupShortcut) if err != nil { return fmt.Errorf("unable to open startup link %s : %w", startupShortcut, err) } defer out.Close() _, err = io.Copy(out, in) if err != nil { return fmt.Errorf("unable to copy shortcut %s : %w", startupShortcut, err) } err = out.Sync() if err != nil { return fmt.Errorf("unable to sync shortcut %s : %w", startupShortcut, err) } slog.Info("Created Startup shortcut", "shortcut", startupShortcut) } else { slog.Warn("unexpected error looking up Startup shortcut", "error", err) } } else { slog.Debug("Startup link already exists", "shortcut", startupShortcut) } return nil } func LaunchNewApp() { } func logStartup() { slog.Info("starting Ollama", "app", appPath, "version", version.Version, "OS", updater.UserAgentOS) } const ( SW_HIDE = 0 // Hides the window SW_SHOW = 5 // Shows window in its current size/position SW_SHOWNA = 8 // Shows without activating SW_MINIMIZE = 6 // Minimizes the window SW_RESTORE = 9 // Restores to previous size/position SW_SHOWDEFAULT = 10 // Sets show state based on program state SM_CXSCREEN = 0 SM_CYSCREEN = 1 HWND_TOP = 0 SWP_NOSIZE = 0x0001 SWP_NOMOVE = 0x0002 SWP_NOZORDER = 0x0004 SWP_SHOWWINDOW = 0x0040 // Menu constants MF_STRING = 0x00000000 MF_SEPARATOR = 0x00000800 MF_GRAYED = 0x00000001 TPM_RETURNCMD = 0x0100 ) // POINT structure for cursor position type POINT struct { X int32 Y int32 } // Rect structure for GetWindowRect type Rect struct { Left int32 Top int32 Right int32 Bottom int32 } func centerWindow(ptr unsafe.Pointer) { hwnd := uintptr(ptr) if hwnd == 0 { return } var rect Rect pGetWindowRect.Call(hwnd, uintptr(unsafe.Pointer(&rect))) screenWidth, _, _ := pGetSystemMetrics.Call(uintptr(SM_CXSCREEN)) screenHeight, _, _ := pGetSystemMetrics.Call(uintptr(SM_CYSCREEN)) windowWidth := rect.Right - rect.Left windowHeight := rect.Bottom - rect.Top x := (int32(screenWidth) - windowWidth) / 2 y := (int32(screenHeight) - windowHeight) / 2 // Ensure the window is not positioned off-screen if x < 0 { x = 0 } if y < 0 { y = 0 } pSetWindowPos.Call( hwnd, uintptr(HWND_TOP), uintptr(x), uintptr(y), uintptr(windowWidth), // Keep original width uintptr(windowHeight), // Keep original height uintptr(SWP_SHOWWINDOW), ) } func showWindow(ptr unsafe.Pointer) { hwnd := uintptr(ptr) if hwnd != 0 { iconHandle := app.t.GetIconHandle() if iconHandle != 0 { const ICON_SMALL = 0 const ICON_BIG = 1 const WM_SETICON = 0x0080 pSendMessage.Call(hwnd, uintptr(WM_SETICON), uintptr(ICON_SMALL), uintptr(iconHandle)) pSendMessage.Call(hwnd, uintptr(WM_SETICON), uintptr(ICON_BIG), uintptr(iconHandle)) } // Check if window is minimized isMinimized, _, _ := pIsIconic.Call(hwnd) if isMinimized != 0 { // Restore the window if it's minimized pShowWindow.Call(hwnd, uintptr(SW_RESTORE)) } // Show the window pShowWindow.Call(hwnd, uintptr(SW_SHOW)) // Bring window to top pBringWindowToTop.Call(hwnd) // Force window to foreground pSetForegroundWindow.Call(hwnd) // Make it the active window pSetActiveWindow.Call(hwnd) // Ensure window is positioned on top pSetWindowPos.Call( hwnd, uintptr(HWND_TOP), 0, 0, 0, 0, uintptr(SWP_NOSIZE|SWP_NOMOVE|SWP_SHOWWINDOW), ) } } // HideWindow hides the application window func hideWindow(ptr unsafe.Pointer) { hwnd := uintptr(ptr) if hwnd != 0 { pShowWindow.Call( hwnd, uintptr(SW_HIDE), ) } } func runInBackground() { exe, err := os.Executable() if err != nil { slog.Error("failed to get executable path", "error", err) os.Exit(1) } cmd := exec.Command(exe, "hidden") if cmd != nil { err = cmd.Run() if err != nil { slog.Error("failed to run Ollama", "exe", exe, "error", err) os.Exit(1) } } else { slog.Error("failed to start Ollama", "exe", exe) os.Exit(1) } } func drag(ptr unsafe.Pointer) {} func doubleClick(ptr unsafe.Pointer) {} // checkAndHandleExistingInstance checks if another instance is running and sends the URL to it func checkAndHandleExistingInstance(urlSchemeRequest string) bool { if urlSchemeRequest == "" { return false } // Try to send URL to existing instance using wintray messaging if wintray.CheckAndSendToExistingInstance(urlSchemeRequest) { os.Exit(0) return true } // No existing instance, we'll handle it ourselves return false }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/cmd/app/webview.go
app/cmd/app/webview.go
//go:build windows || darwin package main // #include "menu.h" import "C" import ( "encoding/base64" "encoding/json" "fmt" "log/slog" "net/http" "os" "path/filepath" "runtime" "strings" "sync" "time" "unsafe" "github.com/ollama/ollama/app/dialog" "github.com/ollama/ollama/app/store" "github.com/ollama/ollama/app/webview" ) type Webview struct { port int token string webview webview.WebView mutex sync.Mutex Store *store.Store } // Run initializes the webview and starts its event loop. // Note: this must be called from the primary app thread // This returns the OS native window handle to the caller func (w *Webview) Run(path string) unsafe.Pointer { var url string if devMode { // In development mode, use the local dev server url = fmt.Sprintf("http://localhost:5173%s", path) } else { url = fmt.Sprintf("http://127.0.0.1:%d%s", w.port, path) } w.mutex.Lock() defer w.mutex.Unlock() if w.webview == nil { // Note: turning on debug on macos throws errors but is marginally functional for debugging // TODO (jmorganca): we should pre-create the window and then provide it here to // webview so we can hide it from the start and make other modifications wv := webview.New(debug) // start the window hidden hideWindow(wv.Window()) wv.SetTitle("Ollama") // TODO (jmorganca): this isn't working yet since it needs to be set // on the first page load, ideally in an interstitial page like `/token` // that exists only to set the cookie and redirect to / // wv.Init(fmt.Sprintf(`document.cookie = "token=%s; path=/"`, w.token)) init := ` // Disable reload document.addEventListener('keydown', function(e) { if ((e.ctrlKey || e.metaKey) && e.key === 'r') { e.preventDefault(); return false; } }); // Prevent back/forward navigation window.addEventListener('popstate', function(e) { e.preventDefault(); history.pushState(null, '', window.location.pathname); return false; }); // Clear history on load window.addEventListener('load', function() { history.pushState(null, '', window.location.pathname); window.history.replaceState(null, '', window.location.pathname); }); // Set token cookie document.cookie = "token=` + w.token + `; path=/"; ` // Windows-specific scrollbar styling if runtime.GOOS == "windows" { init += ` // Fix scrollbar styling for Edge WebView2 on Windows only function updateScrollbarStyles() { const isDark = window.matchMedia('(prefers-color-scheme: dark)').matches; const existingStyle = document.getElementById('scrollbar-style'); if (existingStyle) existingStyle.remove(); const style = document.createElement('style'); style.id = 'scrollbar-style'; if (isDark) { style.textContent = ` + "`" + ` ::-webkit-scrollbar { width: 6px !important; height: 6px !important; } ::-webkit-scrollbar-track { background: #1a1a1a !important; } ::-webkit-scrollbar-thumb { background: #404040 !important; border-radius: 6px !important; } ::-webkit-scrollbar-thumb:hover { background: #505050 !important; } ::-webkit-scrollbar-corner { background: #1a1a1a !important; } ::-webkit-scrollbar-button { background: transparent !important; border: none !important; width: 0px !important; height: 0px !important; margin: 0 !important; padding: 0 !important; } ::-webkit-scrollbar-button:vertical:start:decrement { background: transparent !important; height: 0px !important; } ::-webkit-scrollbar-button:vertical:end:increment { background: transparent !important; height: 0px !important; } ::-webkit-scrollbar-button:horizontal:start:decrement { background: transparent !important; width: 0px !important; } ::-webkit-scrollbar-button:horizontal:end:increment { background: transparent !important; width: 0px !important; } ` + "`" + `; } else { style.textContent = ` + "`" + ` ::-webkit-scrollbar { width: 6px !important; height: 6px !important; } ::-webkit-scrollbar-track { background: #f0f0f0 !important; } ::-webkit-scrollbar-thumb { background: #c0c0c0 !important; border-radius: 6px !important; } ::-webkit-scrollbar-thumb:hover { background: #a0a0a0 !important; } ::-webkit-scrollbar-corner { background: #f0f0f0 !important; } ::-webkit-scrollbar-button { background: transparent !important; border: none !important; width: 0px !important; height: 0px !important; margin: 0 !important; padding: 0 !important; } ::-webkit-scrollbar-button:vertical:start:decrement { background: transparent !important; height: 0px !important; } ::-webkit-scrollbar-button:vertical:end:increment { background: transparent !important; height: 0px !important; } ::-webkit-scrollbar-button:horizontal:start:decrement { background: transparent !important; width: 0px !important; } ::-webkit-scrollbar-button:horizontal:end:increment { background: transparent !important; width: 0px !important; } ` + "`" + `; } document.head.appendChild(style); } window.addEventListener('load', updateScrollbarStyles); window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', updateScrollbarStyles); ` } // on windows make ctrl+n open new chat // TODO (jmorganca): later we should use proper accelerators // once we introduce a native menu for the window // this is only used on windows since macOS uses the proper accelerators if runtime.GOOS == "windows" { init += ` document.addEventListener('keydown', function(e) { if ((e.ctrlKey || e.metaKey) && e.key === 'n') { e.preventDefault(); // Use the existing navigation method history.pushState({}, '', '/c/new'); window.dispatchEvent(new PopStateEvent('popstate')); return false; } }); ` } init += ` window.OLLAMA_WEBSEARCH = true; ` wv.Init(init) // Add keyboard handler for zoom wv.Init(` window.addEventListener('keydown', function(e) { // CMD/Ctrl + Plus/Equals (zoom in) if ((e.metaKey || e.ctrlKey) && (e.key === '+' || e.key === '=')) { e.preventDefault(); window.zoomIn && window.zoomIn(); return false; } // CMD/Ctrl + Minus (zoom out) if ((e.metaKey || e.ctrlKey) && e.key === '-') { e.preventDefault(); window.zoomOut && window.zoomOut(); return false; } // CMD/Ctrl + 0 (reset zoom) if ((e.metaKey || e.ctrlKey) && e.key === '0') { e.preventDefault(); window.zoomReset && window.zoomReset(); return false; } }, true); `) wv.Bind("zoomIn", func() { current := wv.GetZoom() wv.SetZoom(current + 0.1) }) wv.Bind("zoomOut", func() { current := wv.GetZoom() wv.SetZoom(current - 0.1) }) wv.Bind("zoomReset", func() { wv.SetZoom(1.0) }) wv.Bind("ready", func() { showWindow(wv.Window()) }) wv.Bind("close", func() { hideWindow(wv.Window()) }) // Webviews do not allow access to the file system by default, so we need to // bind file system operations here wv.Bind("selectModelsDirectory", func() { go func() { // Helper function to call the JavaScript callback with data or null callCallback := func(data interface{}) { dataJSON, _ := json.Marshal(data) wv.Dispatch(func() { wv.Eval(fmt.Sprintf("window.__selectModelsDirectoryCallback && window.__selectModelsDirectoryCallback(%s)", dataJSON)) }) } directory, err := dialog.Directory().Title("Select Model Directory").ShowHidden(true).Browse() if err != nil { slog.Debug("Directory selection cancelled or failed", "error", err) callCallback(nil) return } slog.Debug("Directory selected", "path", directory) callCallback(directory) }() }) // Bind selectFiles function for selecting multiple files at once wv.Bind("selectFiles", func() { go func() { // Helper function to call the JavaScript callback with data or null callCallback := func(data interface{}) { dataJSON, _ := json.Marshal(data) wv.Dispatch(func() { wv.Eval(fmt.Sprintf("window.__selectFilesCallback && window.__selectFilesCallback(%s)", dataJSON)) }) } // Define allowed extensions for native dialog filtering textExts := []string{ "pdf", "docx", "txt", "md", "csv", "json", "xml", "html", "htm", "js", "jsx", "ts", "tsx", "py", "java", "cpp", "c", "cc", "h", "cs", "php", "rb", "go", "rs", "swift", "kt", "scala", "sh", "bat", "yaml", "yml", "toml", "ini", "cfg", "conf", "log", "rtf", } imageExts := []string{"png", "jpg", "jpeg", "webp"} allowedExts := append(textExts, imageExts...) // Use native multiple file selection with extension filtering filenames, err := dialog.File(). Filter("Supported Files", allowedExts...). Title("Select Files"). LoadMultiple() if err != nil { slog.Debug("Multiple file selection cancelled or failed", "error", err) callCallback(nil) return } if len(filenames) == 0 { callCallback(nil) return } var files []map[string]string maxFileSize := int64(10 * 1024 * 1024) // 10MB for _, filename := range filenames { // Check file extension (double-check after native dialog filtering) ext := strings.ToLower(strings.TrimPrefix(filepath.Ext(filename), ".")) validExt := false for _, allowedExt := range allowedExts { if ext == allowedExt { validExt = true break } } if !validExt { slog.Warn("file extension not allowed, skipping", "filename", filepath.Base(filename), "extension", ext) continue } // Check file size before reading (pre-filter large files) fileStat, err := os.Stat(filename) if err != nil { slog.Error("failed to get file info", "error", err, "filename", filename) continue } if fileStat.Size() > maxFileSize { slog.Warn("file too large, skipping", "filename", filepath.Base(filename), "size", fileStat.Size()) continue } fileBytes, err := os.ReadFile(filename) if err != nil { slog.Error("failed to read file", "error", err, "filename", filename) continue } mimeType := http.DetectContentType(fileBytes) dataURL := fmt.Sprintf("data:%s;base64,%s", mimeType, base64.StdEncoding.EncodeToString(fileBytes)) fileResult := map[string]string{ "filename": filepath.Base(filename), "path": filename, "dataURL": dataURL, } files = append(files, fileResult) } if len(files) == 0 { callCallback(nil) } else { callCallback(files) } }() }) wv.Bind("drag", func() { wv.Dispatch(func() { drag(wv.Window()) }) }) wv.Bind("doubleClick", func() { wv.Dispatch(func() { doubleClick(wv.Window()) }) }) // Add binding for working directory selection wv.Bind("selectWorkingDirectory", func() { go func() { // Helper function to call the JavaScript callback with data or null callCallback := func(data interface{}) { dataJSON, _ := json.Marshal(data) wv.Dispatch(func() { wv.Eval(fmt.Sprintf("window.__selectWorkingDirectoryCallback && window.__selectWorkingDirectoryCallback(%s)", dataJSON)) }) } directory, err := dialog.Directory().Title("Select Working Directory").ShowHidden(true).Browse() if err != nil { slog.Debug("Directory selection cancelled or failed", "error", err) callCallback(nil) return } slog.Debug("Directory selected", "path", directory) callCallback(directory) }() }) wv.Bind("setContextMenuItems", func(items []map[string]interface{}) error { menuMutex.Lock() defer menuMutex.Unlock() if len(menuItems) > 0 { pinner.Unpin() } menuItems = nil for _, item := range items { menuItem := C.menuItem{ label: C.CString(item["label"].(string)), enabled: 0, separator: 0, } if item["enabled"] != nil { menuItem.enabled = 1 } if item["separator"] != nil { menuItem.separator = 1 } menuItems = append(menuItems, menuItem) } return nil }) // Debounce resize events var resizeTimer *time.Timer var resizeMutex sync.Mutex wv.Bind("resize", func(width, height int) { if w.Store != nil { resizeMutex.Lock() if resizeTimer != nil { resizeTimer.Stop() } resizeTimer = time.AfterFunc(100*time.Millisecond, func() { err := w.Store.SetWindowSize(width, height) if err != nil { slog.Error("failed to set window size", "error", err) } }) resizeMutex.Unlock() } }) // On Darwin, we can't have 2 threads both running global event loops // but on Windows, the event loops are tied to the window, so we're // able to run in both the tray and webview if runtime.GOOS != "darwin" { slog.Debug("starting webview event loop") go func() { wv.Run() slog.Debug("webview event loop exited") }() } if w.Store != nil { width, height, err := w.Store.WindowSize() if err != nil { slog.Error("failed to get window size", "error", err) } if width > 0 && height > 0 { wv.SetSize(width, height, webview.HintNone) } else { wv.SetSize(800, 600, webview.HintNone) } } wv.SetSize(800, 600, webview.HintMin) w.webview = wv w.webview.Navigate(url) } else { w.webview.Eval(fmt.Sprintf(` history.pushState({}, '', '%s'); `, path)) showWindow(w.webview.Window()) } return w.webview.Window() } func (w *Webview) Terminate() { w.mutex.Lock() if w.webview == nil { w.mutex.Unlock() return } wv := w.webview w.webview = nil w.mutex.Unlock() wv.Terminate() wv.Destroy() } func (w *Webview) IsRunning() bool { w.mutex.Lock() defer w.mutex.Unlock() return w.webview != nil } var ( menuItems []C.menuItem menuMutex sync.RWMutex pinner runtime.Pinner ) //export menu_get_item_count func menu_get_item_count() C.int { menuMutex.RLock() defer menuMutex.RUnlock() return C.int(len(menuItems)) } //export menu_get_items func menu_get_items() unsafe.Pointer { menuMutex.RLock() defer menuMutex.RUnlock() if len(menuItems) == 0 { return nil } // Return pointer to the slice data pinner.Pin(&menuItems[0]) return unsafe.Pointer(&menuItems[0]) } //export menu_handle_selection func menu_handle_selection(item *C.char) { wv.webview.Eval(fmt.Sprintf("window.handleContextMenuResult('%s')", C.GoString(item))) }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/cmd/app/app.go
app/cmd/app/app.go
//go:build windows || darwin package main import ( "context" "encoding/json" "errors" "fmt" "io" "log/slog" "net" "net/http" "net/url" "os" "os/exec" "os/signal" "path/filepath" "runtime" "strings" "syscall" "time" "github.com/google/uuid" "github.com/ollama/ollama/app/auth" "github.com/ollama/ollama/app/logrotate" "github.com/ollama/ollama/app/server" "github.com/ollama/ollama/app/store" "github.com/ollama/ollama/app/tools" "github.com/ollama/ollama/app/ui" "github.com/ollama/ollama/app/updater" "github.com/ollama/ollama/app/version" ) var ( wv = &Webview{} uiServerPort int ) var debug = strings.EqualFold(os.Getenv("OLLAMA_DEBUG"), "true") || os.Getenv("OLLAMA_DEBUG") == "1" var ( fastStartup = false devMode = false ) type appMove int const ( CannotMove appMove = iota UserDeclinedMove MoveCompleted AlreadyMoved LoginSession PermissionDenied MoveError ) func main() { startHidden := false var urlSchemeRequest string if len(os.Args) > 1 { for _, arg := range os.Args { // Handle URL scheme requests (Windows) if strings.HasPrefix(arg, "ollama://") { urlSchemeRequest = arg slog.Info("received URL scheme request", "url", arg) continue } switch arg { case "serve": fmt.Fprintln(os.Stderr, "serve command not supported, use ollama") os.Exit(1) case "version", "-v", "--version": fmt.Println(version.Version) os.Exit(0) case "background": // When running the process in this "background" mode, we spawn a // child process for the main app. This is necessary so the // "Allow in the Background" setting in MacOS can be unchecked // without breaking the main app. Two copies of the app are // present in the bundle, one for the main app and one for the // background initiator. fmt.Fprintln(os.Stdout, "starting in background") runInBackground() os.Exit(0) case "hidden", "-j", "--hide": // startHidden suppresses the UI on startup, and can be triggered multiple ways // On windows, path based via login startup detection // On MacOS via [NSApp isHidden] from `open -j -a /Applications/Ollama.app` or equivalent // On both via the "hidden" command line argument startHidden = true case "--fast-startup": // Skip optional steps like pending updates to start quickly for immediate use fastStartup = true case "-dev", "--dev": // Development mode: use local dev server and enable CORS devMode = true } } } level := slog.LevelInfo if debug { level = slog.LevelDebug } logrotate.Rotate(appLogPath) if _, err := os.Stat(filepath.Dir(appLogPath)); errors.Is(err, os.ErrNotExist) { if err := os.MkdirAll(filepath.Dir(appLogPath), 0o755); err != nil { slog.Error(fmt.Sprintf("failed to create server log dir %v", err)) return } } var logFile io.Writer var err error logFile, err = os.OpenFile(appLogPath, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0o755) if err != nil { slog.Error(fmt.Sprintf("failed to create server log %v", err)) return } // Detect if we're a GUI app on windows, and if not, send logs to console as well if os.Stderr.Fd() != 0 { // Console app detected logFile = io.MultiWriter(os.Stderr, logFile) } handler := slog.NewTextHandler(logFile, &slog.HandlerOptions{ Level: level, AddSource: true, ReplaceAttr: func(_ []string, attr slog.Attr) slog.Attr { if attr.Key == slog.SourceKey { source := attr.Value.Any().(*slog.Source) source.File = filepath.Base(source.File) } return attr }, }) slog.SetDefault(slog.New(handler)) logStartup() // On Windows, check if another instance is running and send URL to it // Do this after logging is set up so we can debug issues if runtime.GOOS == "windows" && urlSchemeRequest != "" { slog.Debug("checking for existing instance", "url", urlSchemeRequest) if checkAndHandleExistingInstance(urlSchemeRequest) { // The function will exit if it successfully sends to another instance // If we reach here, we're the first/only instance } else { // No existing instance found, handle the URL scheme in this instance go func() { handleURLSchemeInCurrentInstance(urlSchemeRequest) }() } } if u := os.Getenv("OLLAMA_UPDATE_URL"); u != "" { updater.UpdateCheckURLBase = u } // Detect if this is a first start after an upgrade, in // which case we need to do some cleanup var skipMove bool if _, err := os.Stat(updater.UpgradeMarkerFile); err == nil { slog.Debug("first start after upgrade") err = updater.DoPostUpgradeCleanup() if err != nil { slog.Error("failed to cleanup prior version", "error", err) } // We never prompt to move the app after an upgrade skipMove = true // Start hidden after updates to prevent UI from opening automatically startHidden = true } if !skipMove && !fastStartup { if maybeMoveAndRestart() == MoveCompleted { return } } // Check if another instance is already running // On Windows, focus the existing instance; on other platforms, kill it handleExistingInstance(startHidden) // on macOS, offer the user to create a symlink // from /usr/local/bin/ollama to the app bundle installSymlink() var ln net.Listener if devMode { // Use a fixed port in dev mode for predictable API access ln, err = net.Listen("tcp", "127.0.0.1:3001") } else { ln, err = net.Listen("tcp", "127.0.0.1:0") } if err != nil { slog.Error("failed to find available port", "error", err) return } port := ln.Addr().(*net.TCPAddr).Port token := uuid.NewString() wv.port = port wv.token = token uiServerPort = port st := &store.Store{} // Enable CORS in development mode if devMode { os.Setenv("OLLAMA_CORS", "1") // Check if Vite dev server is running on port 5173 var conn net.Conn var err error for _, addr := range []string{"127.0.0.1:5173", "localhost:5173"} { conn, err = net.DialTimeout("tcp", addr, 2*time.Second) if err == nil { conn.Close() break } } if err != nil { slog.Error("Vite dev server not running on port 5173") fmt.Fprintln(os.Stderr, "Error: Vite dev server is not running on port 5173") fmt.Fprintln(os.Stderr, "Please run 'npm run dev' in the ui/app directory to start the UI in development mode") os.Exit(1) } } // Initialize tools registry toolRegistry := tools.NewRegistry() slog.Info("initialized tools registry", "tool_count", len(toolRegistry.List())) // ctx is the app-level context that will be used to stop the app ctx, cancel := context.WithCancel(context.Background()) // octx is the ollama server context that will be used to stop the ollama server octx, ocancel := context.WithCancel(ctx) // TODO (jmorganca): instead we should instantiate the // webview with the store instead of assigning it here, however // making the webview a global variable is easier for now wv.Store = st done := make(chan error, 1) osrv := server.New(st, devMode) go func() { slog.Info("starting ollama server") done <- osrv.Run(octx) }() uiServer := ui.Server{ Token: token, Restart: func() { ocancel() <-done octx, ocancel = context.WithCancel(ctx) go func() { done <- osrv.Run(octx) }() }, Store: st, ToolRegistry: toolRegistry, Dev: devMode, Logger: slog.Default(), } srv := &http.Server{ Handler: uiServer.Handler(), } // Start the UI server slog.Info("starting ui server", "port", port) go func() { slog.Debug("starting ui server on port", "port", port) err = srv.Serve(ln) if err != nil && !errors.Is(err, http.ErrServerClosed) { slog.Warn("desktop server", "error", err) } slog.Debug("background desktop server done") }() updater := &updater.Updater{Store: st} updater.StartBackgroundUpdaterChecker(ctx, UpdateAvailable) hasCompletedFirstRun, err := st.HasCompletedFirstRun() if err != nil { slog.Error("failed to load has completed first run", "error", err) } if !hasCompletedFirstRun { err = st.SetHasCompletedFirstRun(true) if err != nil { slog.Error("failed to set has completed first run", "error", err) } } // capture SIGINT and SIGTERM signals and gracefully shutdown the app signals := make(chan os.Signal, 1) signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM) go func() { <-signals slog.Info("received SIGINT or SIGTERM signal, shutting down") quit() }() if urlSchemeRequest != "" { go func() { handleURLSchemeInCurrentInstance(urlSchemeRequest) }() } else { slog.Debug("no URL scheme request to handle") } go func() { slog.Debug("waiting for ollama server to be ready") if err := ui.WaitForServer(ctx, 10*time.Second); err != nil { slog.Warn("ollama server not ready, continuing anyway", "error", err) } if _, err := uiServer.UserData(ctx); err != nil { slog.Warn("failed to load user data", "error", err) } }() osRun(cancel, hasCompletedFirstRun, startHidden) slog.Info("shutting down desktop server") if err := srv.Close(); err != nil { slog.Warn("error shutting down desktop server", "error", err) } slog.Info("shutting down ollama server") cancel() <-done } func startHiddenTasks() { // If an upgrade is ready and we're in hidden mode, perform it at startup. // If we're not in hidden mode, we want to start as fast as possible and not // slow the user down with an upgrade. if updater.IsUpdatePending() { if fastStartup { // CLI triggered app startup use-case slog.Info("deferring pending update for fast startup") } else { if err := updater.DoUpgradeAtStartup(); err != nil { slog.Info("unable to perform upgrade at startup", "error", err) // Make sure the restart to upgrade menu shows so we can attempt an interactive upgrade to get authorization UpdateAvailable("") } else { slog.Debug("launching new version...") // TODO - consider a timer that aborts if this takes too long and we haven't been killed yet... LaunchNewApp() os.Exit(0) } } } } func checkUserLoggedIn(uiServerPort int) bool { if uiServerPort == 0 { slog.Debug("UI server not ready yet, skipping auth check") return false } resp, err := http.Post(fmt.Sprintf("http://127.0.0.1:%d/api/me", uiServerPort), "application/json", nil) if err != nil { slog.Debug("failed to call local auth endpoint", "error", err) return false } defer resp.Body.Close() // Check if the response is successful if resp.StatusCode != http.StatusOK { slog.Debug("auth endpoint returned non-OK status", "status", resp.StatusCode) return false } var user struct { ID string `json:"id"` Name string `json:"name"` } if err := json.NewDecoder(resp.Body).Decode(&user); err != nil { slog.Debug("failed to parse user response", "error", err) return false } // Verify we have a valid user with an ID and name if user.ID == "" || user.Name == "" { slog.Debug("user response missing required fields", "id", user.ID, "name", user.Name) return false } slog.Debug("user is logged in", "user_id", user.ID, "user_name", user.Name) return true } // handleConnectURLScheme fetches the connect URL and opens it in the browser func handleConnectURLScheme() { if checkUserLoggedIn(uiServerPort) { slog.Info("user is already logged in, opening app instead") showWindow(wv.webview.Window()) return } connectURL, err := auth.BuildConnectURL("https://ollama.com") if err != nil { slog.Error("failed to build connect URL", "error", err) openInBrowser("https://ollama.com/connect") return } openInBrowser(connectURL) } // openInBrowser opens the specified URL in the default browser func openInBrowser(url string) { var cmd string var args []string switch runtime.GOOS { case "windows": cmd = "rundll32" args = []string{"url.dll,FileProtocolHandler", url} case "darwin": cmd = "open" args = []string{url} default: // "linux", "freebsd", "openbsd", "netbsd"... should not reach here slog.Warn("unsupported OS for openInBrowser", "os", runtime.GOOS) } slog.Info("executing browser command", "cmd", cmd, "args", args) if err := exec.Command(cmd, args...).Start(); err != nil { slog.Error("failed to open URL in browser", "url", url, "cmd", cmd, "args", args, "error", err) } } // parseURLScheme parses an ollama:// URL and validates it // Supports: ollama:// (open app) and ollama://connect (OAuth) func parseURLScheme(urlSchemeRequest string) (isConnect bool, err error) { parsedURL, err := url.Parse(urlSchemeRequest) if err != nil { return false, fmt.Errorf("invalid URL: %w", err) } // Check if this is a connect URL if parsedURL.Host == "connect" || strings.TrimPrefix(parsedURL.Path, "/") == "connect" { return true, nil } // Allow bare ollama:// or ollama:/// to open the app if (parsedURL.Host == "" && parsedURL.Path == "") || parsedURL.Path == "/" { return false, nil } return false, fmt.Errorf("unsupported ollama:// URL path: %s", urlSchemeRequest) } // handleURLSchemeInCurrentInstance processes URL scheme requests in the current instance func handleURLSchemeInCurrentInstance(urlSchemeRequest string) { isConnect, err := parseURLScheme(urlSchemeRequest) if err != nil { slog.Error("failed to parse URL scheme request", "url", urlSchemeRequest, "error", err) return } if isConnect { handleConnectURLScheme() } else { if wv.webview != nil { showWindow(wv.webview.Window()) } } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/tools/tools.go
app/tools/tools.go
//go:build windows || darwin package tools import ( "context" "encoding/json" "fmt" ) // Tool defines the interface that all tools must implement type Tool interface { // Name returns the unique identifier for the tool Name() string // Description returns a human-readable description of what the tool does Description() string // Schema returns the JSON schema for the tool's parameters Schema() map[string]any // Execute runs the tool with the given arguments and returns result to store in db, and a string result for the model Execute(ctx context.Context, args map[string]any) (any, string, error) // Prompt returns a prompt for the tool Prompt() string } // Registry manages the available tools and their execution type Registry struct { tools map[string]Tool workingDir string // Working directory for all tool operations } // NewRegistry creates a new tool registry with no tools func NewRegistry() *Registry { return &Registry{ tools: make(map[string]Tool), } } // Register adds a tool to the registry func (r *Registry) Register(tool Tool) { r.tools[tool.Name()] = tool } // Get retrieves a tool by name func (r *Registry) Get(name string) (Tool, bool) { tool, exists := r.tools[name] return tool, exists } // List returns all available tools func (r *Registry) List() []Tool { tools := make([]Tool, 0, len(r.tools)) for _, tool := range r.tools { tools = append(tools, tool) } return tools } // SetWorkingDir sets the working directory for all tool operations func (r *Registry) SetWorkingDir(dir string) { r.workingDir = dir } // Execute runs a tool with the given name and arguments func (r *Registry) Execute(ctx context.Context, name string, args map[string]any) (any, string, error) { tool, ok := r.tools[name] if !ok { return nil, "", fmt.Errorf("unknown tool: %s", name) } result, text, err := tool.Execute(ctx, args) if err != nil { return nil, "", err } return result, text, nil } // ToolCall represents a request to execute a tool type ToolCall struct { ID string `json:"id"` Type string `json:"type"` Function ToolFunction `json:"function"` } // ToolFunction represents the function call details type ToolFunction struct { Name string `json:"name"` Arguments json.RawMessage `json:"arguments"` } // ToolResult represents the result of a tool execution type ToolResult struct { ToolCallID string `json:"tool_call_id"` Content any `json:"content"` Error string `json:"error,omitempty"` } // ToolSchemas returns all tools as schema maps suitable for API calls func (r *Registry) AvailableTools() []map[string]any { schemas := make([]map[string]any, 0, len(r.tools)) for _, tool := range r.tools { schema := map[string]any{ "name": tool.Name(), "description": tool.Description(), "schema": tool.Schema(), } schemas = append(schemas, schema) } return schemas } // ToolNames returns a list of all tool names func (r *Registry) ToolNames() []string { names := make([]string, 0, len(r.tools)) for name := range r.tools { names = append(names, name) } return names }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/tools/web_fetch.go
app/tools/web_fetch.go
//go:build windows || darwin package tools import ( "bytes" "context" "encoding/json" "fmt" "net/http" "net/url" "strconv" "strings" "time" "github.com/ollama/ollama/auth" ) type WebFetch struct{} type FetchRequest struct { URL string `json:"url"` } type FetchResponse struct { Title string `json:"title"` Content string `json:"content"` Links []string `json:"links"` } func (w *WebFetch) Name() string { return "web_fetch" } func (w *WebFetch) Description() string { return "Crawl and extract text content from web pages" } func (g *WebFetch) Schema() map[string]any { schemaBytes := []byte(`{ "type": "object", "properties": { "url": { "type": "string", "description": "URL to crawl and extract content from" } }, "required": ["url"] }`) var schema map[string]any if err := json.Unmarshal(schemaBytes, &schema); err != nil { return nil } return schema } func (w *WebFetch) Prompt() string { return "" } func (w *WebFetch) Execute(ctx context.Context, args map[string]any) (any, string, error) { urlRaw, ok := args["url"] if !ok { return nil, "", fmt.Errorf("url parameter is required") } urlStr, ok := urlRaw.(string) if !ok || strings.TrimSpace(urlStr) == "" { return nil, "", fmt.Errorf("url must be a non-empty string") } result, err := performWebFetch(ctx, urlStr) if err != nil { return nil, "", err } return result, "", nil } func performWebFetch(ctx context.Context, targetURL string) (*FetchResponse, error) { reqBody := FetchRequest{URL: targetURL} jsonBody, err := json.Marshal(reqBody) if err != nil { return nil, fmt.Errorf("failed to marshal request body: %w", err) } crawlURL, err := url.Parse("https://ollama.com/api/web_fetch") if err != nil { return nil, fmt.Errorf("failed to parse fetch URL: %w", err) } query := crawlURL.Query() query.Add("ts", strconv.FormatInt(time.Now().Unix(), 10)) crawlURL.RawQuery = query.Encode() data := fmt.Appendf(nil, "%s,%s", http.MethodPost, crawlURL.RequestURI()) signature, err := auth.Sign(ctx, data) if err != nil { return nil, fmt.Errorf("failed to sign request: %w", err) } req, err := http.NewRequestWithContext(ctx, http.MethodPost, crawlURL.String(), bytes.NewBuffer(jsonBody)) if err != nil { return nil, fmt.Errorf("failed to create request: %w", err) } req.Header.Set("Content-Type", "application/json") if signature != "" { req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", signature)) } client := &http.Client{Timeout: 30 * time.Second} resp, err := client.Do(req) if err != nil { return nil, fmt.Errorf("failed to execute fetch request: %w", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return nil, fmt.Errorf("fetch API error (status %d)", resp.StatusCode) } var result FetchResponse if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { return nil, fmt.Errorf("failed to decode response: %w", err) } return &result, nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/tools/browser_test.go
app/tools/browser_test.go
//go:build windows || darwin package tools import ( "strings" "testing" "time" "github.com/ollama/ollama/app/ui/responses" ) func makeTestPage(url string) *responses.Page { return &responses.Page{ URL: url, Title: "Title " + url, Text: "Body for " + url, Lines: []string{"line1", "line2", "line3"}, Links: map[int]string{0: url}, FetchedAt: time.Now(), } } func TestBrowser_Scroll_AppendsOnlyPageStack(t *testing.T) { b := NewBrowser(&responses.BrowserStateData{PageStack: []string{}, ViewTokens: 1024, URLToPage: map[string]*responses.Page{}}) p1 := makeTestPage("https://example.com/1") b.savePage(p1) initialStackLen := len(b.state.Data.PageStack) initialMapLen := len(b.state.Data.URLToPage) bo := NewBrowserOpen(b) // Scroll without id — should push only to PageStack _, _, err := bo.Execute(t.Context(), map[string]any{"loc": float64(1), "num_lines": float64(1)}) if err != nil { t.Fatalf("scroll execute failed: %v", err) } if got, want := len(b.state.Data.PageStack), initialStackLen+1; got != want { t.Fatalf("page stack length = %d, want %d", got, want) } if got, want := len(b.state.Data.URLToPage), initialMapLen; got != want { t.Fatalf("url_to_page length changed = %d, want %d", got, want) } } func TestBrowserOpen_UseCacheByURL(t *testing.T) { b := NewBrowser(&responses.BrowserStateData{PageStack: []string{}, ViewTokens: 1024, URLToPage: map[string]*responses.Page{}}) bo := NewBrowserOpen(b) p := makeTestPage("https://example.com/cached") b.state.Data.URLToPage[p.URL] = p initialStackLen := len(b.state.Data.PageStack) initialMapLen := len(b.state.Data.URLToPage) _, _, err := bo.Execute(t.Context(), map[string]any{"id": p.URL}) if err != nil { t.Fatalf("open cached execute failed: %v", err) } if got, want := len(b.state.Data.PageStack), initialStackLen+1; got != want { t.Fatalf("page stack length = %d, want %d", got, want) } if got, want := len(b.state.Data.URLToPage), initialMapLen; got != want { t.Fatalf("url_to_page length changed = %d, want %d", got, want) } } func TestDisplayPage_InvalidLoc(t *testing.T) { b := NewBrowser(&responses.BrowserStateData{PageStack: []string{}, ViewTokens: 1024, URLToPage: map[string]*responses.Page{}}) p := makeTestPage("https://example.com/x") // ensure lines are set p.Lines = []string{"a", "b"} _, err := b.displayPage(p, 0, 10, -1) if err == nil || !strings.Contains(err.Error(), "invalid location") { t.Fatalf("expected invalid location error, got %v", err) } } func TestBrowserOpen_LinkId_UsesCacheAndAppends(t *testing.T) { b := NewBrowser(&responses.BrowserStateData{PageStack: []string{}, ViewTokens: 1024, URLToPage: map[string]*responses.Page{}}) // Seed a main page with a link id 0 to a linked URL main := makeTestPage("https://example.com/main") linked := makeTestPage("https://example.com/linked") main.Links = map[int]string{0: linked.URL} // Save the main page (adds to PageStack and URLToPage) b.savePage(main) // Pre-cache the linked page so open by id avoids network b.state.Data.URLToPage[linked.URL] = linked initialStackLen := len(b.state.Data.PageStack) initialMapLen := len(b.state.Data.URLToPage) bo := NewBrowserOpen(b) _, _, err := bo.Execute(t.Context(), map[string]any{"id": float64(0)}) if err != nil { t.Fatalf("open by link id failed: %v", err) } if got, want := len(b.state.Data.PageStack), initialStackLen+1; got != want { t.Fatalf("page stack length = %d, want %d", got, want) } if got, want := len(b.state.Data.URLToPage), initialMapLen; got != want { t.Fatalf("url_to_page length changed = %d, want %d", got, want) } if last := b.state.Data.PageStack[len(b.state.Data.PageStack)-1]; last != linked.URL { t.Fatalf("last page in stack = %s, want %s", last, linked.URL) } } func TestWrapLines_PreserveAndWidth(t *testing.T) { long := strings.Repeat("word ", 50) text := "Line1\n\n" + long + "\nLine3" lines := wrapLines(text, 40) // Ensure empty line preserved at index 1 if lines[1] != "" { t.Fatalf("expected preserved empty line at index 1, got %q", lines[1]) } // All lines should be <= 40 chars for i, l := range lines { if len(l) > 40 { t.Fatalf("line %d exceeds width: %d > 40", i, len(l)) } } } func TestDisplayPage_FormatHeaderAndLines(t *testing.T) { b := NewBrowser(&responses.BrowserStateData{PageStack: []string{}, ViewTokens: 1024, URLToPage: map[string]*responses.Page{}}) p := &responses.Page{ URL: "https://example.com/x", Title: "Example", Lines: []string{"URL: https://example.com/x", "A", "B", "C"}, } out, err := b.displayPage(p, 3, 0, 2) if err != nil { t.Fatalf("displayPage failed: %v", err) } if !strings.HasPrefix(out, "[3] Example(") { t.Fatalf("header not formatted as expected: %q", out) } if !strings.Contains(out, "L0:\n") { t.Fatalf("missing L0 label: %q", out) } if !strings.Contains(out, "L1: URL: https://example.com/x\n") || !strings.Contains(out, "L2: A\n") { t.Fatalf("missing expected line numbers/content: %q", out) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/tools/browser.go
app/tools/browser.go
//go:build windows || darwin package tools import ( "context" "fmt" "net/url" "regexp" "strings" "sync" "time" "github.com/ollama/ollama/app/ui/responses" ) type PageType string const ( PageTypeSearchResults PageType = "initial_results" PageTypeWebpage PageType = "webpage" ) // DefaultViewTokens is the number of tokens to show to the model used when calling displayPage const DefaultViewTokens = 1024 /* The Browser tool provides web browsing capability for gpt-oss. The model uses the tool by usually doing a search first and then choosing to either open a page, find a term in a page, or do another search. The tool optionally may open a URL directly - especially if one is passed in. Each action is saved into an append-only page stack `responses.BrowserStateData` to keep track of the history of the browsing session. Each `Execute()` for a tool returns the full current state of the browser. ui.go manages the browser state representation between the tool, ui, and db. A new Browser object is created per request - the state is reconstructed by ui.go. The initialization of the browser will receive a `responses.BrowserStateData` with the stitched history. */ // BrowserState manages the browsing session on a per-chat basis type BrowserState struct { mu sync.RWMutex Data *responses.BrowserStateData } type Browser struct { state *BrowserState } // State is only accessed in a single thread, as each chat has its own browser state func (b *Browser) State() *responses.BrowserStateData { b.state.mu.RLock() defer b.state.mu.RUnlock() return b.state.Data } func (b *Browser) savePage(page *responses.Page) { b.state.Data.URLToPage[page.URL] = page b.state.Data.PageStack = append(b.state.Data.PageStack, page.URL) } func (b *Browser) getPageFromStack(url string) (*responses.Page, error) { page, ok := b.state.Data.URLToPage[url] if !ok { return nil, fmt.Errorf("page not found for url %s", url) } return page, nil } func NewBrowser(state *responses.BrowserStateData) *Browser { if state == nil { state = &responses.BrowserStateData{ PageStack: []string{}, ViewTokens: DefaultViewTokens, URLToPage: make(map[string]*responses.Page), } } b := &BrowserState{ Data: state, } return &Browser{ state: b, } } type BrowserSearch struct { Browser webSearch *BrowserWebSearch } // NewBrowserSearch creates a new browser search instance func NewBrowserSearch(bb *Browser) *BrowserSearch { if bb == nil { bb = &Browser{ state: &BrowserState{ Data: &responses.BrowserStateData{ PageStack: []string{}, ViewTokens: DefaultViewTokens, URLToPage: make(map[string]*responses.Page), }, }, } } return &BrowserSearch{ Browser: *bb, webSearch: &BrowserWebSearch{}, } } func (b *BrowserSearch) Name() string { return "browser.search" } func (b *BrowserSearch) Description() string { return "Search the web for information" } func (b *BrowserSearch) Prompt() string { return "" } func (b *BrowserSearch) Schema() map[string]any { return map[string]any{} } func (b *BrowserSearch) Execute(ctx context.Context, args map[string]any) (any, string, error) { query, ok := args["query"].(string) if !ok { return nil, "", fmt.Errorf("query parameter is required") } topn, ok := args["topn"].(int) if !ok { topn = 5 } searchArgs := map[string]any{ "queries": []any{query}, "max_results": topn, } result, err := b.webSearch.Execute(ctx, searchArgs) if err != nil { return nil, "", fmt.Errorf("search error: %w", err) } searchResponse, ok := result.(*WebSearchResponse) if !ok { return nil, "", fmt.Errorf("invalid search results format") } // Build main search results page that contains all search results searchResultsPage := b.buildSearchResultsPageCollection(query, searchResponse) b.savePage(searchResultsPage) cursor := len(b.state.Data.PageStack) - 1 // cache result for each page for _, queryResults := range searchResponse.Results { for i, result := range queryResults { resultPage := b.buildSearchResultsPage(&result, i+1) // save to global only, do not add to visited stack b.state.Data.URLToPage[resultPage.URL] = resultPage } } page := searchResultsPage pageText, err := b.displayPage(page, cursor, 0, -1) if err != nil { return nil, "", fmt.Errorf("failed to display page: %w", err) } return b.state.Data, pageText, nil } func (b *Browser) buildSearchResultsPageCollection(query string, results *WebSearchResponse) *responses.Page { page := &responses.Page{ URL: "search_results_" + query, Title: query, Links: make(map[int]string), FetchedAt: time.Now(), } var textBuilder strings.Builder linkIdx := 0 // Add the header lines to match format textBuilder.WriteString("\n") // L0: empty textBuilder.WriteString("URL: \n") // L1: URL: (empty for search) textBuilder.WriteString("# Search Results\n") // L2: # Search Results textBuilder.WriteString("\n") // L3: empty for _, queryResults := range results.Results { for _, result := range queryResults { domain := result.URL if u, err := url.Parse(result.URL); err == nil && u.Host != "" { domain = u.Host domain = strings.TrimPrefix(domain, "www.") } linkFormat := fmt.Sprintf("* 【%d†%s†%s】", linkIdx, result.Title, domain) textBuilder.WriteString(linkFormat) numChars := min(len(result.Content.FullText), 400) snippet := strings.TrimSpace(result.Content.FullText[:numChars]) textBuilder.WriteString(snippet) textBuilder.WriteString("\n") page.Links[linkIdx] = result.URL linkIdx++ } } page.Text = textBuilder.String() page.Lines = wrapLines(page.Text, 80) return page } func (b *Browser) buildSearchResultsPage(result *WebSearchResult, linkIdx int) *responses.Page { page := &responses.Page{ URL: result.URL, Title: result.Title, Links: make(map[int]string), FetchedAt: time.Now(), } var textBuilder strings.Builder // Format the individual result page (only used when no full text is available) linkFormat := fmt.Sprintf("【%d†%s】", linkIdx, result.Title) textBuilder.WriteString(linkFormat) textBuilder.WriteString("\n") textBuilder.WriteString(fmt.Sprintf("URL: %s\n", result.URL)) numChars := min(len(result.Content.FullText), 300) textBuilder.WriteString(result.Content.FullText[:numChars]) textBuilder.WriteString("\n\n") // Only store link and snippet if we won't be processing full text later // (full text processing will handle all links consistently) if result.Content.FullText == "" { page.Links[linkIdx] = result.URL } // Use full text if available, otherwise use snippet if result.Content.FullText != "" { // Prepend the URL line to the full text page.Text = fmt.Sprintf("URL: %s\n%s", result.URL, result.Content.FullText) // Process markdown links in the full text processedText, processedLinks := processMarkdownLinks(page.Text) page.Text = processedText page.Links = processedLinks } else { page.Text = textBuilder.String() } page.Lines = wrapLines(page.Text, 80) return page } // getEndLoc calculates the end location for viewport based on token limits func (b *Browser) getEndLoc(loc, numLines, totalLines int, lines []string) int { if numLines <= 0 { // Auto-calculate based on viewTokens txt := b.joinLinesWithNumbers(lines[loc:]) // If text is very short, no need to truncate (at least 1 char per token) if len(txt) > b.state.Data.ViewTokens { // Simple heuristic: approximate token counting // Typical token is ~4 characters, but can be up to 128 chars maxCharsPerToken := 128 // upper bound for text to analyze upperBound := min((b.state.Data.ViewTokens+1)*maxCharsPerToken, len(txt)) textToAnalyze := txt[:upperBound] // Simple approximation: count tokens as ~4 chars each // This is less accurate than tiktoken but more performant approxTokens := len(textToAnalyze) / 4 if approxTokens > b.state.Data.ViewTokens { // Find the character position at viewTokens endIdx := min(b.state.Data.ViewTokens*4, len(txt)) // Count newlines up to that position to get line count numLines = strings.Count(txt[:endIdx], "\n") + 1 } else { numLines = totalLines } } else { numLines = totalLines } } return min(loc+numLines, totalLines) } // joinLinesWithNumbers creates a string with line numbers, matching Python's join_lines func (b *Browser) joinLinesWithNumbers(lines []string) string { var builder strings.Builder var hadZeroLine bool for i, line := range lines { if i == 0 { builder.WriteString("L0:\n") hadZeroLine = true } if hadZeroLine { builder.WriteString(fmt.Sprintf("L%d: %s\n", i+1, line)) } else { builder.WriteString(fmt.Sprintf("L%d: %s\n", i, line)) } } return builder.String() } // processMarkdownLinks finds all markdown links in the text and replaces them with the special format // Returns the processed text and a map of link IDs to URLs func processMarkdownLinks(text string) (string, map[int]string) { links := make(map[int]string) // Always start from 0 for consistent numbering across all pages linkID := 0 // First, handle multi-line markdown links by joining them // This regex finds markdown links that might be split across lines multiLinePattern := regexp.MustCompile(`\[([^\]]+)\]\s*\n\s*\(([^)]+)\)`) text = multiLinePattern.ReplaceAllStringFunc(text, func(match string) string { // Replace newlines with spaces in the match cleaned := strings.ReplaceAll(match, "\n", " ") // Remove extra spaces cleaned = regexp.MustCompile(`\s+`).ReplaceAllString(cleaned, " ") return cleaned }) // Now process all markdown links (including the cleaned multi-line ones) linkPattern := regexp.MustCompile(`\[([^\]]+)\]\(([^)]+)\)`) processedText := linkPattern.ReplaceAllStringFunc(text, func(match string) string { matches := linkPattern.FindStringSubmatch(match) if len(matches) != 3 { return match } linkText := strings.TrimSpace(matches[1]) linkURL := strings.TrimSpace(matches[2]) // Extract domain from URL domain := linkURL if u, err := url.Parse(linkURL); err == nil && u.Host != "" { domain = u.Host // Remove www. prefix if present domain = strings.TrimPrefix(domain, "www.") } // Create the formatted link formatted := fmt.Sprintf("【%d†%s†%s】", linkID, linkText, domain) // Store the link links[linkID] = linkURL linkID++ return formatted }) return processedText, links } func wrapLines(text string, width int) []string { if width <= 0 { width = 80 } lines := strings.Split(text, "\n") var wrapped []string for _, line := range lines { if line == "" { // Preserve empty lines wrapped = append(wrapped, "") } else if len(line) <= width { wrapped = append(wrapped, line) } else { // Word wrapping while preserving whitespace structure words := strings.Fields(line) if len(words) == 0 { // Line with only whitespace wrapped = append(wrapped, line) continue } currentLine := "" for _, word := range words { // Check if adding this word would exceed width testLine := currentLine if testLine != "" { testLine += " " } testLine += word if len(testLine) > width && currentLine != "" { // Current line would be too long, wrap it wrapped = append(wrapped, currentLine) currentLine = word } else { // Add word to current line if currentLine != "" { currentLine += " " } currentLine += word } } // Add any remaining content if currentLine != "" { wrapped = append(wrapped, currentLine) } } } return wrapped } // displayPage formats and returns the page display for the model func (b *Browser) displayPage(page *responses.Page, cursor, loc, numLines int) (string, error) { totalLines := len(page.Lines) if loc >= totalLines { return "", fmt.Errorf("invalid location: %d (max: %d)", loc, totalLines-1) } // get viewport end location endLoc := b.getEndLoc(loc, numLines, totalLines, page.Lines) var displayBuilder strings.Builder displayBuilder.WriteString(fmt.Sprintf("[%d] %s", cursor, page.Title)) if page.URL != "" { displayBuilder.WriteString(fmt.Sprintf("(%s)\n", page.URL)) } else { displayBuilder.WriteString("\n") } displayBuilder.WriteString(fmt.Sprintf("**viewing lines [%d - %d] of %d**\n\n", loc, endLoc-1, totalLines-1)) // Content with line numbers var hadZeroLine bool for i := loc; i < endLoc; i++ { if i == 0 { displayBuilder.WriteString("L0:\n") hadZeroLine = true } if hadZeroLine { displayBuilder.WriteString(fmt.Sprintf("L%d: %s\n", i+1, page.Lines[i])) } else { displayBuilder.WriteString(fmt.Sprintf("L%d: %s\n", i, page.Lines[i])) } } return displayBuilder.String(), nil } type BrowserOpen struct { Browser crawlPage *BrowserCrawler } func NewBrowserOpen(bb *Browser) *BrowserOpen { if bb == nil { bb = &Browser{ state: &BrowserState{ Data: &responses.BrowserStateData{ PageStack: []string{}, ViewTokens: DefaultViewTokens, URLToPage: make(map[string]*responses.Page), }, }, } } return &BrowserOpen{ Browser: *bb, crawlPage: &BrowserCrawler{}, } } func (b *BrowserOpen) Name() string { return "browser.open" } func (b *BrowserOpen) Description() string { return "Open a link in the browser" } func (b *BrowserOpen) Prompt() string { return "" } func (b *BrowserOpen) Schema() map[string]any { return map[string]any{} } func (b *BrowserOpen) Execute(ctx context.Context, args map[string]any) (any, string, error) { // Get cursor parameter first cursor := -1 if c, ok := args["cursor"].(float64); ok { cursor = int(c) } else if c, ok := args["cursor"].(int); ok { cursor = c } // Get loc parameter loc := 0 if l, ok := args["loc"].(float64); ok { loc = int(l) } else if l, ok := args["loc"].(int); ok { loc = l } // Get num_lines parameter numLines := -1 if n, ok := args["num_lines"].(float64); ok { numLines = int(n) } else if n, ok := args["num_lines"].(int); ok { numLines = n } // get page from cursor var page *responses.Page if cursor >= 0 { if cursor >= len(b.state.Data.PageStack) { return nil, "", fmt.Errorf("cursor %d is out of range (pageStack length: %d)", cursor, len(b.state.Data.PageStack)) } var err error page, err = b.getPageFromStack(b.state.Data.PageStack[cursor]) if err != nil { return nil, "", fmt.Errorf("page not found for cursor %d: %w", cursor, err) } } else { // get last page if len(b.state.Data.PageStack) != 0 { pageURL := b.state.Data.PageStack[len(b.state.Data.PageStack)-1] var err error page, err = b.getPageFromStack(pageURL) if err != nil { return nil, "", fmt.Errorf("page not found for cursor %d: %w", cursor, err) } } } // Try to get id as string (URL) first if url, ok := args["id"].(string); ok { // Check if we already have this page cached if existingPage, ok := b.state.Data.URLToPage[url]; ok { // Use cached page b.savePage(existingPage) // Always update cursor to point to the newly added page cursor = len(b.state.Data.PageStack) - 1 pageText, err := b.displayPage(existingPage, cursor, loc, numLines) if err != nil { return nil, "", fmt.Errorf("failed to display page: %w", err) } return b.state.Data, pageText, nil } // Page not in cache, need to crawl it if b.crawlPage == nil { b.crawlPage = &BrowserCrawler{} } crawlResponse, err := b.crawlPage.Execute(ctx, map[string]any{ "urls": []any{url}, "latest": false, }) if err != nil { return nil, "", fmt.Errorf("failed to crawl URL %s: %w", url, err) } newPage, err := b.buildPageFromCrawlResult(url, crawlResponse) if err != nil { return nil, "", fmt.Errorf("failed to build page from crawl result: %w", err) } // Need to fall through if first search is directly an open command - no existing page b.savePage(newPage) // Always update cursor to point to the newly added page cursor = len(b.state.Data.PageStack) - 1 pageText, err := b.displayPage(newPage, cursor, loc, numLines) if err != nil { return nil, "", fmt.Errorf("failed to display page: %w", err) } return b.state.Data, pageText, nil } // Try to get id as integer (link ID from current page) if id, ok := args["id"].(float64); ok { if page == nil { return nil, "", fmt.Errorf("no current page to resolve link from") } idInt := int(id) pageURL, ok := page.Links[idInt] if !ok { return nil, "", fmt.Errorf("invalid link id %d", idInt) } // Check if we have the linked page cached newPage, ok := b.state.Data.URLToPage[pageURL] if !ok { if b.crawlPage == nil { b.crawlPage = &BrowserCrawler{} } crawlResponse, err := b.crawlPage.Execute(ctx, map[string]any{ "urls": []any{pageURL}, "latest": false, }) if err != nil { return nil, "", fmt.Errorf("failed to crawl URL %s: %w", pageURL, err) } // Create new page from crawl result newPage, err = b.buildPageFromCrawlResult(pageURL, crawlResponse) if err != nil { return nil, "", fmt.Errorf("failed to build page from crawl result: %w", err) } } // Add to history stack regardless of cache status b.savePage(newPage) // Always update cursor to point to the newly added page cursor = len(b.state.Data.PageStack) - 1 pageText, err := b.displayPage(newPage, cursor, loc, numLines) if err != nil { return nil, "", fmt.Errorf("failed to display page: %w", err) } return b.state.Data, pageText, nil } // If no id provided, just display current page if page == nil { return nil, "", fmt.Errorf("no current page to display") } // Only add to PageStack without updating URLToPage b.state.Data.PageStack = append(b.state.Data.PageStack, page.URL) cursor = len(b.state.Data.PageStack) - 1 pageText, err := b.displayPage(page, cursor, loc, numLines) if err != nil { return nil, "", fmt.Errorf("failed to display page: %w", err) } return b.state.Data, pageText, nil } // buildPageFromCrawlResult creates a Page from crawl API results func (b *Browser) buildPageFromCrawlResult(requestedURL string, crawlResponse *CrawlResponse) (*responses.Page, error) { // Initialize page with defaults page := &responses.Page{ URL: requestedURL, Title: requestedURL, Text: "", Links: make(map[int]string), FetchedAt: time.Now(), } // Process crawl results - the API returns results grouped by URL for url, urlResults := range crawlResponse.Results { if len(urlResults) > 0 { // Get the first result for this URL result := urlResults[0] // Extract content if result.Content.FullText != "" { page.Text = result.Content.FullText } // Extract title if available if result.Title != "" { page.Title = result.Title } // Update URL to the actual URL from results page.URL = url // Extract links if available from extras for i, link := range result.Extras.Links { if link.Href != "" { page.Links[i] = link.Href } else if link.URL != "" { page.Links[i] = link.URL } } // Only process the first URL's results break } } // If no text was extracted, set a default message if page.Text == "" { page.Text = "No content could be extracted from this page." } else { // Prepend the URL line to match Python implementation page.Text = fmt.Sprintf("URL: %s\n%s", page.URL, page.Text) } // Process markdown links in the text processedText, processedLinks := processMarkdownLinks(page.Text) page.Text = processedText page.Links = processedLinks // Wrap lines for display page.Lines = wrapLines(page.Text, 80) return page, nil } type BrowserFind struct { Browser } func NewBrowserFind(bb *Browser) *BrowserFind { return &BrowserFind{ Browser: *bb, } } func (b *BrowserFind) Name() string { return "browser.find" } func (b *BrowserFind) Description() string { return "Find a term in the browser" } func (b *BrowserFind) Prompt() string { return "" } func (b *BrowserFind) Schema() map[string]any { return map[string]any{} } func (b *BrowserFind) Execute(ctx context.Context, args map[string]any) (any, string, error) { pattern, ok := args["pattern"].(string) if !ok { return nil, "", fmt.Errorf("pattern parameter is required") } // Get cursor parameter if provided, default to current page cursor := -1 if c, ok := args["cursor"].(float64); ok { cursor = int(c) } // Get the page to search in var page *responses.Page if cursor == -1 { // Use current page if len(b.state.Data.PageStack) == 0 { return nil, "", fmt.Errorf("no pages to search in") } var err error page, err = b.getPageFromStack(b.state.Data.PageStack[len(b.state.Data.PageStack)-1]) if err != nil { return nil, "", fmt.Errorf("page not found for cursor %d: %w", cursor, err) } } else { // Use specific cursor if cursor < 0 || cursor >= len(b.state.Data.PageStack) { return nil, "", fmt.Errorf("cursor %d is out of range [0-%d]", cursor, len(b.state.Data.PageStack)-1) } var err error page, err = b.getPageFromStack(b.state.Data.PageStack[cursor]) if err != nil { return nil, "", fmt.Errorf("page not found for cursor %d: %w", cursor, err) } } if page == nil { return nil, "", fmt.Errorf("page not found") } // Create find results page findPage := b.buildFindResultsPage(pattern, page) // Add the find results page to state b.savePage(findPage) newCursor := len(b.state.Data.PageStack) - 1 pageText, err := b.displayPage(findPage, newCursor, 0, -1) if err != nil { return nil, "", fmt.Errorf("failed to display page: %w", err) } return b.state.Data, pageText, nil } func (b *Browser) buildFindResultsPage(pattern string, page *responses.Page) *responses.Page { findPage := &responses.Page{ Title: fmt.Sprintf("Find results for text: `%s` in `%s`", pattern, page.Title), Links: make(map[int]string), FetchedAt: time.Now(), } findPage.URL = fmt.Sprintf("find_results_%s", pattern) var textBuilder strings.Builder matchIdx := 0 maxResults := 50 numShowLines := 4 patternLower := strings.ToLower(pattern) // Search through the page lines following the reference algorithm var resultChunks []string lineIdx := 0 for lineIdx < len(page.Lines) { line := page.Lines[lineIdx] lineLower := strings.ToLower(line) if !strings.Contains(lineLower, patternLower) { lineIdx++ continue } // Build snippet context endLine := min(lineIdx+numShowLines, len(page.Lines)) var snippetBuilder strings.Builder for j := lineIdx; j < endLine; j++ { snippetBuilder.WriteString(page.Lines[j]) if j < endLine-1 { snippetBuilder.WriteString("\n") } } snippet := snippetBuilder.String() // Format the match linkFormat := fmt.Sprintf("【%d†match at L%d】", matchIdx, lineIdx) resultChunk := fmt.Sprintf("%s\n%s", linkFormat, snippet) resultChunks = append(resultChunks, resultChunk) if len(resultChunks) >= maxResults { break } matchIdx++ lineIdx += numShowLines } // Build final display text if len(resultChunks) > 0 { textBuilder.WriteString(strings.Join(resultChunks, "\n\n")) } if matchIdx == 0 { findPage.Text = fmt.Sprintf("No `find` results for pattern: `%s`", pattern) } else { findPage.Text = textBuilder.String() } findPage.Lines = wrapLines(findPage.Text, 80) return findPage }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/tools/browser_crawl.go
app/tools/browser_crawl.go
//go:build windows || darwin package tools import ( "context" "encoding/json" "fmt" ) // CrawlContent represents the content of a crawled page type CrawlContent struct { Snippet string `json:"snippet"` FullText string `json:"full_text"` } // CrawlExtras represents additional data from the crawl API type CrawlExtras struct { Links []CrawlLink `json:"links"` } // CrawlLink represents a link found on a crawled page type CrawlLink struct { URL string `json:"url"` Href string `json:"href"` Text string `json:"text"` } // CrawlResult represents a single crawl result type CrawlResult struct { Title string `json:"title"` URL string `json:"url"` Content CrawlContent `json:"content"` Extras CrawlExtras `json:"extras"` } // CrawlResponse represents the complete response from the crawl API type CrawlResponse struct { Results map[string][]CrawlResult `json:"results"` } // BrowserCrawler tool for crawling web pages using ollama.com crawl API type BrowserCrawler struct{} func (g *BrowserCrawler) Name() string { return "get_webpage" } func (g *BrowserCrawler) Description() string { return "Crawl and extract text content from web pages" } func (g *BrowserCrawler) Prompt() string { return `When you need to read content from web pages, use the get_webpage tool. Simply provide the URLs you want to read and I'll fetch their content for you. For each URL, I'll extract the main text content in a readable format. If you need to discover links within those pages, set extract_links to true. If the user requires the latest information, set livecrawl to true. Only use this tool when you need to access current web content. Make sure the URLs are valid and accessible. Do not use this tool for: - Downloading files or media - Accessing private/authenticated pages - Scraping data at high volumes Always check the returned content to ensure it's relevant before using it in your response.` } func (g *BrowserCrawler) Schema() map[string]any { schemaBytes := []byte(`{ "type": "object", "properties": { "urls": { "type": "array", "items": { "type": "string" }, "description": "List of URLs to crawl and extract content from" } }, "required": ["urls"] }`) var schema map[string]any if err := json.Unmarshal(schemaBytes, &schema); err != nil { return nil } return schema } func (g *BrowserCrawler) Execute(ctx context.Context, args map[string]any) (*CrawlResponse, error) { urlsRaw, ok := args["urls"].([]any) if !ok { return nil, fmt.Errorf("urls parameter is required and must be an array of strings") } urls := make([]string, 0, len(urlsRaw)) for _, u := range urlsRaw { if urlStr, ok := u.(string); ok { urls = append(urls, urlStr) } } if len(urls) == 0 { return nil, fmt.Errorf("at least one URL is required") } return g.performWebCrawl(ctx, urls) } // performWebCrawl handles the actual HTTP request to ollama.com crawl API func (g *BrowserCrawler) performWebCrawl(ctx context.Context, urls []string) (*CrawlResponse, error) { result := &CrawlResponse{Results: make(map[string][]CrawlResult, len(urls))} for _, targetURL := range urls { fetchResp, err := performWebFetch(ctx, targetURL) if err != nil { return nil, fmt.Errorf("web_fetch failed for %q: %w", targetURL, err) } links := make([]CrawlLink, 0, len(fetchResp.Links)) for _, link := range fetchResp.Links { links = append(links, CrawlLink{URL: link, Href: link}) } snippet := truncateString(fetchResp.Content, 400) result.Results[targetURL] = []CrawlResult{{ Title: fetchResp.Title, URL: targetURL, Content: CrawlContent{ Snippet: snippet, FullText: fetchResp.Content, }, Extras: CrawlExtras{Links: links}, }} } return result, nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/tools/browser_websearch.go
app/tools/browser_websearch.go
//go:build windows || darwin package tools import ( "context" "encoding/json" "fmt" "strconv" "time" ) // WebSearchContent represents the content of a search result type WebSearchContent struct { Snippet string `json:"snippet"` FullText string `json:"full_text"` } // WebSearchMetadata represents metadata for a search result type WebSearchMetadata struct { PublishedDate *time.Time `json:"published_date,omitempty"` } // WebSearchResult represents a single search result type WebSearchResult struct { Title string `json:"title"` URL string `json:"url"` Content WebSearchContent `json:"content"` Metadata WebSearchMetadata `json:"metadata"` } // WebSearchResponse represents the complete response from the websearch API type WebSearchResponse struct { Results map[string][]WebSearchResult `json:"results"` } // BrowserWebSearch tool for searching the web using ollama.com search API type BrowserWebSearch struct{} func (w *BrowserWebSearch) Name() string { return "gpt_oss_web_search" } func (w *BrowserWebSearch) Description() string { return "Search the web for real-time information using ollama.com search API." } func (w *BrowserWebSearch) Prompt() string { return `Use the gpt_oss_web_search tool to search the web. 1. Come up with a list of search queries to get comprehensive information (typically 2-3 related queries work well) 2. Use the gpt_oss_web_search tool with multiple queries to get results organized by query 3. Use the search results to provide current up to date, accurate information Today's date is ` + time.Now().Format("January 2, 2006") + ` Add "` + time.Now().Format("January 2, 2006") + `" for news queries and ` + strconv.Itoa(time.Now().Year()+1) + ` for other queries that need current information.` } func (w *BrowserWebSearch) Schema() map[string]any { schemaBytes := []byte(`{ "type": "object", "properties": { "queries": { "type": "array", "items": { "type": "string" }, "description": "List of search queries to look up" }, "max_results": { "type": "integer", "description": "Maximum number of results to return per query (default: 2) up to 5", "default": 2 } }, "required": ["queries"] }`) var schema map[string]any if err := json.Unmarshal(schemaBytes, &schema); err != nil { return nil } return schema } func (w *BrowserWebSearch) Execute(ctx context.Context, args map[string]any) (any, error) { queriesRaw, ok := args["queries"].([]any) if !ok { return nil, fmt.Errorf("queries parameter is required and must be an array of strings") } queries := make([]string, 0, len(queriesRaw)) for _, q := range queriesRaw { if query, ok := q.(string); ok { queries = append(queries, query) } } if len(queries) == 0 { return nil, fmt.Errorf("at least one query is required") } maxResults := 5 if mr, ok := args["max_results"].(int); ok { maxResults = mr } return w.performWebSearch(ctx, queries, maxResults) } // performWebSearch handles the actual HTTP request to ollama.com search API func (w *BrowserWebSearch) performWebSearch(ctx context.Context, queries []string, maxResults int) (*WebSearchResponse, error) { response := &WebSearchResponse{Results: make(map[string][]WebSearchResult, len(queries))} for _, query := range queries { searchResp, err := performWebSearch(ctx, query, maxResults) if err != nil { return nil, fmt.Errorf("web_search failed for %q: %w", query, err) } converted := make([]WebSearchResult, 0, len(searchResp.Results)) for _, item := range searchResp.Results { converted = append(converted, WebSearchResult{ Title: item.Title, URL: item.URL, Content: WebSearchContent{ Snippet: truncateString(item.Content, 400), FullText: item.Content, }, Metadata: WebSearchMetadata{}, }) } response.Results[query] = converted } return response, nil } func truncateString(input string, limit int) string { if limit <= 0 || len(input) <= limit { return input } return input[:limit] }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/tools/web_search.go
app/tools/web_search.go
//go:build windows || darwin package tools import ( "bytes" "context" "encoding/json" "fmt" "net/http" "net/url" "strconv" "strings" "time" "github.com/ollama/ollama/auth" ) type WebSearch struct{} type SearchRequest struct { Query string `json:"query"` MaxResults int `json:"max_results,omitempty"` } type SearchResult struct { Title string `json:"title"` URL string `json:"url"` Content string `json:"content"` } type SearchResponse struct { Results []SearchResult `json:"results"` } func (w *WebSearch) Name() string { return "web_search" } func (w *WebSearch) Description() string { return "Search the web for real-time information using ollama.com web search API." } func (w *WebSearch) Prompt() string { return "" } func (g *WebSearch) Schema() map[string]any { schemaBytes := []byte(`{ "type": "object", "properties": { "query": { "type": "string", "description": "The search query to execute" }, "max_results": { "type": "integer", "description": "Maximum number of search results to return", "default": 3 } }, "required": ["query"] }`) var schema map[string]any if err := json.Unmarshal(schemaBytes, &schema); err != nil { return nil } return schema } func (w *WebSearch) Execute(ctx context.Context, args map[string]any) (any, string, error) { rawQuery, ok := args["query"] if !ok { return nil, "", fmt.Errorf("query parameter is required") } queryStr, ok := rawQuery.(string) if !ok || strings.TrimSpace(queryStr) == "" { return nil, "", fmt.Errorf("query must be a non-empty string") } maxResults := 5 if v, ok := args["max_results"].(float64); ok && int(v) > 0 { maxResults = int(v) } result, err := performWebSearch(ctx, queryStr, maxResults) if err != nil { return nil, "", err } return result, "", nil } func performWebSearch(ctx context.Context, query string, maxResults int) (*SearchResponse, error) { reqBody := SearchRequest{Query: query, MaxResults: maxResults} jsonBody, err := json.Marshal(reqBody) if err != nil { return nil, fmt.Errorf("failed to marshal request body: %w", err) } searchURL, err := url.Parse("https://ollama.com/api/web_search") if err != nil { return nil, fmt.Errorf("failed to parse search URL: %w", err) } q := searchURL.Query() q.Add("ts", strconv.FormatInt(time.Now().Unix(), 10)) searchURL.RawQuery = q.Encode() data := fmt.Appendf(nil, "%s,%s", http.MethodPost, searchURL.RequestURI()) signature, err := auth.Sign(ctx, data) if err != nil { return nil, fmt.Errorf("failed to sign request: %w", err) } req, err := http.NewRequestWithContext(ctx, http.MethodPost, searchURL.String(), bytes.NewBuffer(jsonBody)) if err != nil { return nil, fmt.Errorf("failed to create request: %w", err) } req.Header.Set("Content-Type", "application/json") if signature != "" { req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", signature)) } client := &http.Client{Timeout: 10 * time.Second} resp, err := client.Do(req) if err != nil { return nil, fmt.Errorf("failed to execute search request: %w", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return nil, fmt.Errorf("search API error (status %d)", resp.StatusCode) } var result SearchResponse if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { return nil, fmt.Errorf("failed to decode response: %w", err) } return &result, nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/dialog/dlgs_darwin.go
app/dialog/dlgs_darwin.go
package dialog import ( "github.com/ollama/ollama/app/dialog/cocoa" ) func (b *MsgBuilder) yesNo() bool { return cocoa.YesNoDlg(b.Msg, b.Dlg.Title) } func (b *MsgBuilder) info() { cocoa.InfoDlg(b.Msg, b.Dlg.Title) } func (b *MsgBuilder) error() { cocoa.ErrorDlg(b.Msg, b.Dlg.Title) } func (b *FileBuilder) load() (string, error) { return b.run(false) } func (b *FileBuilder) loadMultiple() ([]string, error) { return b.runMultiple() } func (b *FileBuilder) save() (string, error) { return b.run(true) } func (b *FileBuilder) run(save bool) (string, error) { star := false var exts []string for _, filt := range b.Filters { for _, ext := range filt.Extensions { if ext == "*" { star = true } else { exts = append(exts, ext) } } } if star && save { /* OSX doesn't allow the user to switch visible file types/extensions. Also ** NSSavePanel's allowsOtherFileTypes property has no effect for an open ** dialog, so if "*" is a possible extension we must always show all files. */ exts = nil } f, err := cocoa.FileDlg(save, b.Dlg.Title, exts, star, b.StartDir, b.StartFile, b.ShowHiddenFiles) if f == "" && err == nil { return "", ErrCancelled } return f, err } func (b *FileBuilder) runMultiple() ([]string, error) { star := false var exts []string for _, filt := range b.Filters { for _, ext := range filt.Extensions { if ext == "*" { star = true } else { exts = append(exts, ext) } } } files, err := cocoa.MultiFileDlg(b.Dlg.Title, exts, star, b.StartDir, b.ShowHiddenFiles) if len(files) == 0 && err == nil { return nil, ErrCancelled } return files, err } func (b *DirectoryBuilder) browse() (string, error) { f, err := cocoa.DirDlg(b.Dlg.Title, b.StartDir, b.ShowHiddenFiles) if f == "" && err == nil { return "", ErrCancelled } return f, err }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/dialog/dlgs_windows.go
app/dialog/dlgs_windows.go
package dialog import ( "fmt" "reflect" "syscall" "unicode/utf16" "unsafe" "github.com/TheTitanrain/w32" ) const multiFileBufferSize = w32.MAX_PATH * 10 type WinDlgError int func (e WinDlgError) Error() string { return fmt.Sprintf("CommDlgExtendedError: %#x", int(e)) } func err() error { e := w32.CommDlgExtendedError() if e == 0 { return ErrCancelled } return WinDlgError(e) } func (b *MsgBuilder) yesNo() bool { r := w32.MessageBox(w32.HWND(0), b.Msg, firstOf(b.Dlg.Title, "Confirm?"), w32.MB_YESNO) return r == w32.IDYES } func (b *MsgBuilder) info() { w32.MessageBox(w32.HWND(0), b.Msg, firstOf(b.Dlg.Title, "Information"), w32.MB_OK|w32.MB_ICONINFORMATION) } func (b *MsgBuilder) error() { w32.MessageBox(w32.HWND(0), b.Msg, firstOf(b.Dlg.Title, "Error"), w32.MB_OK|w32.MB_ICONERROR) } type filedlg struct { buf []uint16 filters []uint16 opf *w32.OPENFILENAME } func (d filedlg) Filename() string { i := 0 for i < len(d.buf) && d.buf[i] != 0 { i++ } return string(utf16.Decode(d.buf[:i])) } func (d filedlg) parseMultipleFilenames() []string { var files []string i := 0 // Find first null terminator (directory path) for i < len(d.buf) && d.buf[i] != 0 { i++ } if i >= len(d.buf) { return files } // Get directory path dirPath := string(utf16.Decode(d.buf[:i])) i++ // Skip null terminator // Check if there are more files (multiple selection) if i < len(d.buf) && d.buf[i] != 0 { // Multiple files selected - parse filenames for i < len(d.buf) { start := i // Find next null terminator for i < len(d.buf) && d.buf[i] != 0 { i++ } if i >= len(d.buf) { break } if start < i { filename := string(utf16.Decode(d.buf[start:i])) if dirPath != "" { files = append(files, dirPath+"\\"+filename) } else { files = append(files, filename) } } i++ // Skip null terminator if i >= len(d.buf) || d.buf[i] == 0 { break // End of list } } } else { // Single file selected files = append(files, dirPath) } return files } func (b *FileBuilder) load() (string, error) { d := openfile(w32.OFN_FILEMUSTEXIST|w32.OFN_NOCHANGEDIR, b) if w32.GetOpenFileName(d.opf) { return d.Filename(), nil } return "", err() } func (b *FileBuilder) loadMultiple() ([]string, error) { d := openfile(w32.OFN_FILEMUSTEXIST|w32.OFN_NOCHANGEDIR|w32.OFN_ALLOWMULTISELECT|w32.OFN_EXPLORER, b) d.buf = make([]uint16, multiFileBufferSize) d.opf.File = utf16ptr(d.buf) d.opf.MaxFile = uint32(len(d.buf)) if w32.GetOpenFileName(d.opf) { return d.parseMultipleFilenames(), nil } return nil, err() } func (b *FileBuilder) save() (string, error) { d := openfile(w32.OFN_OVERWRITEPROMPT|w32.OFN_NOCHANGEDIR, b) if w32.GetSaveFileName(d.opf) { return d.Filename(), nil } return "", err() } /* syscall.UTF16PtrFromString not sufficient because we need to encode embedded NUL bytes */ func utf16ptr(utf16 []uint16) *uint16 { if utf16[len(utf16)-1] != 0 { panic("refusing to make ptr to non-NUL terminated utf16 slice") } h := (*reflect.SliceHeader)(unsafe.Pointer(&utf16)) return (*uint16)(unsafe.Pointer(h.Data)) } func utf16slice(ptr *uint16) []uint16 { //nolint:unused hdr := reflect.SliceHeader{Data: uintptr(unsafe.Pointer(ptr)), Len: 1, Cap: 1} slice := *((*[]uint16)(unsafe.Pointer(&hdr))) //nolint:govet i := 0 for slice[len(slice)-1] != 0 { i++ } hdr.Len = i slice = *((*[]uint16)(unsafe.Pointer(&hdr))) //nolint:govet return slice } func openfile(flags uint32, b *FileBuilder) (d filedlg) { d.buf = make([]uint16, w32.MAX_PATH) if b.StartFile != "" { initialName, _ := syscall.UTF16FromString(b.StartFile) for i := 0; i < len(initialName) && i < w32.MAX_PATH; i++ { d.buf[i] = initialName[i] } } d.opf = &w32.OPENFILENAME{ File: utf16ptr(d.buf), MaxFile: uint32(len(d.buf)), Flags: flags, } d.opf.StructSize = uint32(unsafe.Sizeof(*d.opf)) if b.StartDir != "" { d.opf.InitialDir, _ = syscall.UTF16PtrFromString(b.StartDir) } if b.Dlg.Title != "" { d.opf.Title, _ = syscall.UTF16PtrFromString(b.Dlg.Title) } for _, filt := range b.Filters { /* build utf16 string of form "Music File\0*.mp3;*.ogg;*.wav;\0" */ d.filters = append(d.filters, utf16.Encode([]rune(filt.Desc))...) d.filters = append(d.filters, 0) for _, ext := range filt.Extensions { s := fmt.Sprintf("*.%s;", ext) d.filters = append(d.filters, utf16.Encode([]rune(s))...) } d.filters = append(d.filters, 0) } if d.filters != nil { d.filters = append(d.filters, 0, 0) // two extra NUL chars to terminate the list d.opf.Filter = utf16ptr(d.filters) } return d } type dirdlg struct { bi *w32.BROWSEINFO } const ( bffm_INITIALIZED = 1 bffm_SELCHANGED = 2 bffm_VALIDATEFAILEDA = 3 bffm_VALIDATEFAILEDW = 4 bffm_SETSTATUSTEXTA = (w32.WM_USER + 100) bffm_SETSTATUSTEXTW = (w32.WM_USER + 104) bffm_ENABLEOK = (w32.WM_USER + 101) bffm_SETSELECTIONA = (w32.WM_USER + 102) bffm_SETSELECTIONW = (w32.WM_USER + 103) bffm_SETOKTEXT = (w32.WM_USER + 105) bffm_SETEXPANDED = (w32.WM_USER + 106) bffm_SETSTATUSTEXT = bffm_SETSTATUSTEXTW bffm_SETSELECTION = bffm_SETSELECTIONW bffm_VALIDATEFAILED = bffm_VALIDATEFAILEDW ) func callbackDefaultDir(hwnd w32.HWND, msg uint, lParam, lpData uintptr) int { if msg == bffm_INITIALIZED { _ = w32.SendMessage(hwnd, bffm_SETSELECTION, w32.TRUE, lpData) } return 0 } func selectdir(b *DirectoryBuilder) (d dirdlg) { d.bi = &w32.BROWSEINFO{Flags: w32.BIF_RETURNONLYFSDIRS | w32.BIF_NEWDIALOGSTYLE} if b.Dlg.Title != "" { d.bi.Title, _ = syscall.UTF16PtrFromString(b.Dlg.Title) } if b.StartDir != "" { s16, _ := syscall.UTF16PtrFromString(b.StartDir) d.bi.LParam = uintptr(unsafe.Pointer(s16)) d.bi.CallbackFunc = syscall.NewCallback(callbackDefaultDir) } return d } func (b *DirectoryBuilder) browse() (string, error) { d := selectdir(b) res := w32.SHBrowseForFolder(d.bi) if res == 0 { return "", ErrCancelled } return w32.SHGetPathFromIDList(res), nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/dialog/util.go
app/dialog/util.go
//go:build windows package dialog func firstOf(args ...string) string { for _, arg := range args { if arg != "" { return arg } } return "" }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/dialog/dlgs.go
app/dialog/dlgs.go
//go:build windows || darwin // Package dialog provides a simple cross-platform common dialog API. // Eg. to prompt the user with a yes/no dialog: // // if dialog.MsgDlg("%s", "Do you want to continue?").YesNo() { // // user pressed Yes // } // // The general usage pattern is to call one of the toplevel *Dlg functions // which return a *Builder structure. From here you can optionally call // configuration functions (eg. Title) to customise the dialog, before // using a launcher function to run the dialog. package dialog import ( "errors" "fmt" ) // ErrCancelled is an error returned when a user cancels/closes a dialog. var ErrCancelled = errors.New("Cancelled") // Cancelled refers to ErrCancelled. // Deprecated: Use ErrCancelled instead. var Cancelled = ErrCancelled // Dlg is the common type for dialogs. type Dlg struct { Title string } // MsgBuilder is used for creating message boxes. type MsgBuilder struct { Dlg Msg string } // Message initialises a MsgBuilder with the provided message. func Message(format string, args ...interface{}) *MsgBuilder { return &MsgBuilder{Msg: fmt.Sprintf(format, args...)} } // Title specifies what the title of the message dialog will be. func (b *MsgBuilder) Title(title string) *MsgBuilder { b.Dlg.Title = title return b } // YesNo spawns the message dialog with two buttons, "Yes" and "No". // Returns true iff the user selected "Yes". func (b *MsgBuilder) YesNo() bool { return b.yesNo() } // Info spawns the message dialog with an information icon and single button, "Ok". func (b *MsgBuilder) Info() { b.info() } // Error spawns the message dialog with an error icon and single button, "Ok". func (b *MsgBuilder) Error() { b.error() } // FileFilter represents a category of files (eg. audio files, spreadsheets). type FileFilter struct { Desc string Extensions []string } // FileBuilder is used for creating file browsing dialogs. type FileBuilder struct { Dlg StartDir string StartFile string Filters []FileFilter ShowHiddenFiles bool } // File initialises a FileBuilder using the default configuration. func File() *FileBuilder { return &FileBuilder{} } // Title specifies the title to be used for the dialog. func (b *FileBuilder) Title(title string) *FileBuilder { b.Dlg.Title = title return b } // Filter adds a category of files to the types allowed by the dialog. Multiple // calls to Filter are cumulative - any of the provided categories will be allowed. // By default all files can be selected. // // The special extension '*' allows all files to be selected when the Filter is active. func (b *FileBuilder) Filter(desc string, extensions ...string) *FileBuilder { filt := FileFilter{desc, extensions} if len(filt.Extensions) == 0 { filt.Extensions = append(filt.Extensions, "*") } b.Filters = append(b.Filters, filt) return b } // SetStartDir specifies the initial directory of the dialog. func (b *FileBuilder) SetStartDir(startDir string) *FileBuilder { b.StartDir = startDir return b } // SetStartFile specifies the initial file name of the dialog. func (b *FileBuilder) SetStartFile(startFile string) *FileBuilder { b.StartFile = startFile return b } // ShowHiddenFiles sets whether hidden files should be visible in the dialog. func (b *FileBuilder) ShowHidden(show bool) *FileBuilder { b.ShowHiddenFiles = show return b } // Load spawns the file selection dialog using the configured settings, // asking the user to select a single file. Returns ErrCancelled as the error // if the user cancels or closes the dialog. func (b *FileBuilder) Load() (string, error) { return b.load() } // LoadMultiple spawns the file selection dialog using the configured settings, // asking the user to select multiple files. Returns ErrCancelled as the error // if the user cancels or closes the dialog. func (b *FileBuilder) LoadMultiple() ([]string, error) { return b.loadMultiple() } // Save spawns the file selection dialog using the configured settings, // asking the user for a filename to save as. If the chosen file exists, the // user is prompted whether they want to overwrite the file. Returns // ErrCancelled as the error if the user cancels/closes the dialog, or selects // not to overwrite the file. func (b *FileBuilder) Save() (string, error) { return b.save() } // DirectoryBuilder is used for directory browse dialogs. type DirectoryBuilder struct { Dlg StartDir string ShowHiddenFiles bool } // Directory initialises a DirectoryBuilder using the default configuration. func Directory() *DirectoryBuilder { return &DirectoryBuilder{} } // Browse spawns the directory selection dialog using the configured settings, // asking the user to select a single folder. Returns ErrCancelled as the error // if the user cancels or closes the dialog. func (b *DirectoryBuilder) Browse() (string, error) { return b.browse() } // Title specifies the title to be used for the dialog. func (b *DirectoryBuilder) Title(title string) *DirectoryBuilder { b.Dlg.Title = title return b } // StartDir specifies the initial directory to be used for the dialog. func (b *DirectoryBuilder) SetStartDir(dir string) *DirectoryBuilder { b.StartDir = dir return b } // ShowHiddenFiles sets whether hidden files should be visible in the dialog. func (b *DirectoryBuilder) ShowHidden(show bool) *DirectoryBuilder { b.ShowHiddenFiles = show return b }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/dialog/cocoa/dlg_darwin.go
app/dialog/cocoa/dlg_darwin.go
package cocoa // #cgo darwin LDFLAGS: -framework Cocoa -framework UniformTypeIdentifiers // #include <stdlib.h> // #include <sys/syslimits.h> // #include "dlg.h" import "C" import ( "bytes" "errors" "unsafe" ) type AlertParams struct { p C.AlertDlgParams } func mkAlertParams(msg, title string, style C.AlertStyle) *AlertParams { a := AlertParams{C.AlertDlgParams{msg: C.CString(msg), style: style}} if title != "" { a.p.title = C.CString(title) } return &a } func (a *AlertParams) run() C.DlgResult { return C.alertDlg(&a.p) } func (a *AlertParams) free() { C.free(unsafe.Pointer(a.p.msg)) if a.p.title != nil { C.free(unsafe.Pointer(a.p.title)) } } func nsStr(s string) unsafe.Pointer { return C.NSStr(unsafe.Pointer(&[]byte(s)[0]), C.int(len(s))) } func YesNoDlg(msg, title string) bool { a := mkAlertParams(msg, title, C.MSG_YESNO) defer a.free() return a.run() == C.DLG_OK } func InfoDlg(msg, title string) { a := mkAlertParams(msg, title, C.MSG_INFO) defer a.free() a.run() } func ErrorDlg(msg, title string) { a := mkAlertParams(msg, title, C.MSG_ERROR) defer a.free() a.run() } const ( BUFSIZE = C.PATH_MAX MULTI_FILE_BUF_SIZE = 32768 ) // MultiFileDlg opens a file dialog that allows multiple file selection func MultiFileDlg(title string, exts []string, relaxExt bool, startDir string, showHidden bool) ([]string, error) { return fileDlgWithOptions(C.LOADDLG, title, exts, relaxExt, startDir, "", showHidden, true) } // FileDlg opens a file dialog for single file selection (kept for compatibility) func FileDlg(save bool, title string, exts []string, relaxExt bool, startDir string, filename string, showHidden bool) (string, error) { mode := C.LOADDLG if save { mode = C.SAVEDLG } files, err := fileDlgWithOptions(mode, title, exts, relaxExt, startDir, filename, showHidden, false) if err != nil { return "", err } if len(files) == 0 { return "", nil } return files[0], nil } func DirDlg(title string, startDir string, showHidden bool) (string, error) { files, err := fileDlgWithOptions(C.DIRDLG, title, nil, false, startDir, "", showHidden, false) if err != nil { return "", err } if len(files) == 0 { return "", nil } return files[0], nil } // fileDlgWithOptions is the unified file dialog function that handles both single and multiple selection func fileDlgWithOptions(mode int, title string, exts []string, relaxExt bool, startDir, filename string, showHidden, allowMultiple bool) ([]string, error) { // Use larger buffer for multiple files, smaller for single bufSize := BUFSIZE if allowMultiple { bufSize = MULTI_FILE_BUF_SIZE } p := C.FileDlgParams{ mode: C.int(mode), nbuf: C.int(bufSize), } if allowMultiple { p.allowMultiple = C.int(1) // Enable multiple selection //nolint:structcheck } if showHidden { p.showHidden = 1 } p.buf = (*C.char)(C.malloc(C.size_t(bufSize))) defer C.free(unsafe.Pointer(p.buf)) buf := (*(*[MULTI_FILE_BUF_SIZE]byte)(unsafe.Pointer(p.buf)))[:bufSize] if title != "" { p.title = C.CString(title) defer C.free(unsafe.Pointer(p.title)) } if startDir != "" { p.startDir = C.CString(startDir) defer C.free(unsafe.Pointer(p.startDir)) } if filename != "" { p.filename = C.CString(filename) defer C.free(unsafe.Pointer(p.filename)) } if len(exts) > 0 { if len(exts) > 999 { panic("more than 999 extensions not supported") } ptrSize := int(unsafe.Sizeof(&title)) p.exts = (*unsafe.Pointer)(C.malloc(C.size_t(ptrSize * len(exts)))) defer C.free(unsafe.Pointer(p.exts)) cext := (*(*[999]unsafe.Pointer)(unsafe.Pointer(p.exts)))[:] for i, ext := range exts { cext[i] = nsStr(ext) defer C.NSRelease(cext[i]) } p.numext = C.int(len(exts)) if relaxExt { p.relaxext = 1 } } // Execute dialog and parse results switch C.fileDlg(&p) { case C.DLG_OK: if allowMultiple { // Parse multiple null-terminated strings from buffer var files []string start := 0 for i := range len(buf) - 1 { if buf[i] == 0 { if i > start { files = append(files, string(buf[start:i])) } start = i + 1 // Check for double null (end of list) if i+1 < len(buf) && buf[i+1] == 0 { break } } } return files, nil } else { // Single file - return as array for consistency filename := string(buf[:bytes.Index(buf, []byte{0})]) return []string{filename}, nil } case C.DLG_CANCEL: return nil, nil case C.DLG_URLFAIL: return nil, errors.New("failed to get file-system representation for selected URL") } panic("unhandled case") }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/version/version.go
app/version/version.go
//go:build windows || darwin package version var Version string = "0.0.0"
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/store/database_test.go
app/store/database_test.go
//go:build windows || darwin package store import ( "database/sql" "fmt" "os" "path/filepath" "sort" "strings" "testing" "time" "github.com/google/go-cmp/cmp" _ "github.com/mattn/go-sqlite3" ) func TestSchemaMigrations(t *testing.T) { t.Run("schema comparison after migration", func(t *testing.T) { tmpDir := t.TempDir() migratedDBPath := filepath.Join(tmpDir, "migrated.db") migratedDB := loadV2Schema(t, migratedDBPath) defer migratedDB.Close() if err := migratedDB.migrate(); err != nil { t.Fatalf("migration failed: %v", err) } // Create fresh database with current schema freshDBPath := filepath.Join(tmpDir, "fresh.db") freshDB, err := newDatabase(freshDBPath) if err != nil { t.Fatalf("failed to create fresh database: %v", err) } defer freshDB.Close() // Extract tables and indexes from both databases, directly comparing their schemas won't work due to ordering migratedSchema := schemaMap(migratedDB) freshSchema := schemaMap(freshDB) if !cmp.Equal(migratedSchema, freshSchema) { t.Errorf("Schema difference found:\n%s", cmp.Diff(freshSchema, migratedSchema)) } // Verify both databases have the same final schema version migratedVersion, _ := migratedDB.getSchemaVersion() freshVersion, _ := freshDB.getSchemaVersion() if migratedVersion != freshVersion { t.Errorf("schema version mismatch: migrated=%d, fresh=%d", migratedVersion, freshVersion) } }) t.Run("idempotent migrations", func(t *testing.T) { tmpDir := t.TempDir() dbPath := filepath.Join(tmpDir, "test.db") db := loadV2Schema(t, dbPath) defer db.Close() // Run migration twice if err := db.migrate(); err != nil { t.Fatalf("first migration failed: %v", err) } if err := db.migrate(); err != nil { t.Fatalf("second migration failed: %v", err) } // Verify schema version is still correct version, err := db.getSchemaVersion() if err != nil { t.Fatalf("failed to get schema version: %v", err) } if version != currentSchemaVersion { t.Errorf("expected schema version %d after double migration, got %d", currentSchemaVersion, version) } }) t.Run("init database has correct schema version", func(t *testing.T) { tmpDir := t.TempDir() dbPath := filepath.Join(tmpDir, "test.db") db, err := newDatabase(dbPath) if err != nil { t.Fatalf("failed to create database: %v", err) } defer db.Close() // Get the schema version from the newly initialized database version, err := db.getSchemaVersion() if err != nil { t.Fatalf("failed to get schema version: %v", err) } // Verify it matches the currentSchemaVersion constant if version != currentSchemaVersion { t.Errorf("expected schema version %d in initialized database, got %d", currentSchemaVersion, version) } }) } func TestChatDeletionWithCascade(t *testing.T) { t.Run("chat deletion cascades to related messages", func(t *testing.T) { tmpDir := t.TempDir() dbPath := filepath.Join(tmpDir, "test.db") db, err := newDatabase(dbPath) if err != nil { t.Fatalf("failed to create database: %v", err) } defer db.Close() // Create test chat testChatID := "test-chat-cascade-123" testChat := Chat{ ID: testChatID, Title: "Test Chat for Cascade Delete", CreatedAt: time.Now(), Messages: []Message{ { Role: "user", Content: "Hello, this is a test message", CreatedAt: time.Now(), UpdatedAt: time.Now(), }, { Role: "assistant", Content: "Hi there! This is a response.", CreatedAt: time.Now(), UpdatedAt: time.Now(), }, }, } // Save the chat with messages if err := db.saveChat(testChat); err != nil { t.Fatalf("failed to save test chat: %v", err) } // Verify chat and messages exist chatCount := countRows(t, db, "chats") messageCount := countRows(t, db, "messages") if chatCount != 1 { t.Errorf("expected 1 chat, got %d", chatCount) } if messageCount != 2 { t.Errorf("expected 2 messages, got %d", messageCount) } // Verify specific chat exists var exists bool err = db.conn.QueryRow("SELECT EXISTS(SELECT 1 FROM chats WHERE id = ?)", testChatID).Scan(&exists) if err != nil { t.Fatalf("failed to check chat existence: %v", err) } if !exists { t.Error("test chat should exist before deletion") } // Verify messages exist for this chat messageCountForChat := countRowsWithCondition(t, db, "messages", "chat_id = ?", testChatID) if messageCountForChat != 2 { t.Errorf("expected 2 messages for test chat, got %d", messageCountForChat) } // Delete the chat if err := db.deleteChat(testChatID); err != nil { t.Fatalf("failed to delete chat: %v", err) } // Verify chat is deleted chatCountAfter := countRows(t, db, "chats") if chatCountAfter != 0 { t.Errorf("expected 0 chats after deletion, got %d", chatCountAfter) } // Verify messages are CASCADE deleted messageCountAfter := countRows(t, db, "messages") if messageCountAfter != 0 { t.Errorf("expected 0 messages after CASCADE deletion, got %d", messageCountAfter) } // Verify specific chat no longer exists err = db.conn.QueryRow("SELECT EXISTS(SELECT 1 FROM chats WHERE id = ?)", testChatID).Scan(&exists) if err != nil { t.Fatalf("failed to check chat existence after deletion: %v", err) } if exists { t.Error("test chat should not exist after deletion") } // Verify no orphaned messages remain orphanedCount := countRowsWithCondition(t, db, "messages", "chat_id = ?", testChatID) if orphanedCount != 0 { t.Errorf("expected 0 orphaned messages, got %d", orphanedCount) } }) t.Run("foreign keys are enabled", func(t *testing.T) { tmpDir := t.TempDir() dbPath := filepath.Join(tmpDir, "test.db") db, err := newDatabase(dbPath) if err != nil { t.Fatalf("failed to create database: %v", err) } defer db.Close() // Verify foreign keys are enabled var foreignKeysEnabled int err = db.conn.QueryRow("PRAGMA foreign_keys").Scan(&foreignKeysEnabled) if err != nil { t.Fatalf("failed to check foreign keys: %v", err) } if foreignKeysEnabled != 1 { t.Errorf("expected foreign keys to be enabled (1), got %d", foreignKeysEnabled) } }) // This test is only relevant for v8 migrations, but we keep it here for now // since it's a useful test to ensure that we don't introduce any new orphaned data t.Run("cleanup orphaned data", func(t *testing.T) { tmpDir := t.TempDir() dbPath := filepath.Join(tmpDir, "test.db") db, err := newDatabase(dbPath) if err != nil { t.Fatalf("failed to create database: %v", err) } defer db.Close() // First disable foreign keys to simulate the bug from ollama/ollama#11785 _, err = db.conn.Exec("PRAGMA foreign_keys = OFF") if err != nil { t.Fatalf("failed to disable foreign keys: %v", err) } // Create a chat and message testChatID := "orphaned-test-chat" testMessageID := int64(999) _, err = db.conn.Exec("INSERT INTO chats (id, title) VALUES (?, ?)", testChatID, "Orphaned Test Chat") if err != nil { t.Fatalf("failed to insert test chat: %v", err) } _, err = db.conn.Exec("INSERT INTO messages (id, chat_id, role, content) VALUES (?, ?, ?, ?)", testMessageID, testChatID, "user", "test message") if err != nil { t.Fatalf("failed to insert test message: %v", err) } // Delete chat but keep message (simulating the bug from ollama/ollama#11785) _, err = db.conn.Exec("DELETE FROM chats WHERE id = ?", testChatID) if err != nil { t.Fatalf("failed to delete chat: %v", err) } // Verify we have orphaned message orphanedCount := countRowsWithCondition(t, db, "messages", "chat_id = ?", testChatID) if orphanedCount != 1 { t.Errorf("expected 1 orphaned message, got %d", orphanedCount) } // Run cleanup if err := db.cleanupOrphanedData(); err != nil { t.Fatalf("failed to cleanup orphaned data: %v", err) } // Verify orphaned message is gone orphanedCountAfter := countRowsWithCondition(t, db, "messages", "chat_id = ?", testChatID) if orphanedCountAfter != 0 { t.Errorf("expected 0 orphaned messages after cleanup, got %d", orphanedCountAfter) } }) } func countRows(t *testing.T, db *database, table string) int { t.Helper() var count int err := db.conn.QueryRow(fmt.Sprintf("SELECT COUNT(*) FROM %s", table)).Scan(&count) if err != nil { t.Fatalf("failed to count rows in %s: %v", table, err) } return count } func countRowsWithCondition(t *testing.T, db *database, table, condition string, args ...interface{}) int { t.Helper() var count int query := fmt.Sprintf("SELECT COUNT(*) FROM %s WHERE %s", table, condition) err := db.conn.QueryRow(query, args...).Scan(&count) if err != nil { t.Fatalf("failed to count rows with condition: %v", err) } return count } // Test helpers for schema migration testing // schemaMap returns both tables/columns and indexes (ignoring order) func schemaMap(db *database) map[string]interface{} { result := make(map[string]any) result["tables"] = columnMap(db) result["indexes"] = indexMap(db) return result } // columnMap returns a map of table names to their column sets (ignoring order) func columnMap(db *database) map[string][]string { result := make(map[string][]string) // Get all table names tableQuery := `SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name` rows, _ := db.conn.Query(tableQuery) defer rows.Close() for rows.Next() { var tableName string rows.Scan(&tableName) // Get columns for this table colQuery := fmt.Sprintf("PRAGMA table_info(%s)", tableName) colRows, _ := db.conn.Query(colQuery) var columns []string for colRows.Next() { var cid int var name, dataType sql.NullString var notNull, primaryKey int var defaultValue sql.NullString colRows.Scan(&cid, &name, &dataType, &notNull, &defaultValue, &primaryKey) // Create a normalized column description colDesc := fmt.Sprintf("%s %s", name.String, dataType.String) if notNull == 1 { colDesc += " NOT NULL" } if defaultValue.Valid && defaultValue.String != "" { // Skip DEFAULT for schema_version as it doesn't get updated during migrations if name.String != "schema_version" { colDesc += " DEFAULT " + defaultValue.String } } if primaryKey == 1 { colDesc += " PRIMARY KEY" } columns = append(columns, colDesc) } colRows.Close() // Sort columns to ignore order differences sort.Strings(columns) result[tableName] = columns } return result } // indexMap returns a map of index names to their definitions func indexMap(db *database) map[string]string { result := make(map[string]string) // Get all indexes (excluding auto-created primary key indexes) indexQuery := `SELECT name, sql FROM sqlite_master WHERE type='index' AND name NOT LIKE 'sqlite_%' AND sql IS NOT NULL ORDER BY name` rows, _ := db.conn.Query(indexQuery) defer rows.Close() for rows.Next() { var name, sql string rows.Scan(&name, &sql) // Normalize the SQL by removing extra whitespace sql = strings.Join(strings.Fields(sql), " ") result[name] = sql } return result } // loadV2Schema loads the version 2 schema from testdata/schema.sql func loadV2Schema(t *testing.T, dbPath string) *database { t.Helper() // Read the v1 schema file schemaFile := filepath.Join("testdata", "schema.sql") schemaSQL, err := os.ReadFile(schemaFile) if err != nil { t.Fatalf("failed to read schema file: %v", err) } // Open database connection conn, err := sql.Open("sqlite3", dbPath+"?_foreign_keys=on&_journal_mode=WAL&_busy_timeout=5000&_txlock=immediate") if err != nil { t.Fatalf("failed to open database: %v", err) } // Execute the v1 schema _, err = conn.Exec(string(schemaSQL)) if err != nil { conn.Close() t.Fatalf("failed to execute v1 schema: %v", err) } return &database{conn: conn} }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/store/image.go
app/store/image.go
//go:build windows || darwin package store import ( "crypto/sha256" "encoding/hex" "fmt" "os" "path/filepath" "strings" ) type Image struct { Filename string `json:"filename"` Path string `json:"path"` Size int64 `json:"size,omitempty"` MimeType string `json:"mime_type,omitempty"` } // Bytes loads image data from disk for a given ImageData reference func (i *Image) Bytes() ([]byte, error) { return ImgBytes(i.Path) } // ImgBytes reads image data from the specified file path func ImgBytes(path string) ([]byte, error) { if path == "" { return nil, fmt.Errorf("empty image path") } data, err := os.ReadFile(path) if err != nil { return nil, fmt.Errorf("read image file %s: %w", path, err) } return data, nil } // ImgDir returns the directory path for storing images for a specific chat func (s *Store) ImgDir() string { dbPath := s.DBPath if dbPath == "" { dbPath = defaultDBPath } storeDir := filepath.Dir(dbPath) return filepath.Join(storeDir, "cache", "images") } // ImgToFile saves image data to disk and returns ImageData reference func (s *Store) ImgToFile(chatID string, imageBytes []byte, filename, mimeType string) (Image, error) { baseImageDir := s.ImgDir() if err := os.MkdirAll(baseImageDir, 0o755); err != nil { return Image{}, fmt.Errorf("create base image directory: %w", err) } // Root prevents path traversal issues root, err := os.OpenRoot(baseImageDir) if err != nil { return Image{}, fmt.Errorf("open image root directory: %w", err) } defer root.Close() // Create chat-specific subdirectory within the root chatDir := sanitize(chatID) if err := root.Mkdir(chatDir, 0o755); err != nil && !os.IsExist(err) { return Image{}, fmt.Errorf("create chat directory: %w", err) } // Generate a unique filename to avoid conflicts // Use hash of content + original filename for uniqueness hash := sha256.Sum256(imageBytes) hashStr := hex.EncodeToString(hash[:])[:16] // Use first 16 chars of hash // Extract file extension from original filename or mime type ext := filepath.Ext(filename) if ext == "" { switch mimeType { case "image/jpeg": ext = ".jpg" case "image/png": ext = ".png" case "image/webp": ext = ".webp" default: ext = ".img" } } // Create unique filename: hash + original name + extension baseFilename := sanitize(strings.TrimSuffix(filename, ext)) uniqueFilename := fmt.Sprintf("%s_%s%s", hashStr, baseFilename, ext) relativePath := filepath.Join(chatDir, uniqueFilename) file, err := root.Create(relativePath) if err != nil { return Image{}, fmt.Errorf("create image file: %w", err) } defer file.Close() if _, err := file.Write(imageBytes); err != nil { return Image{}, fmt.Errorf("write image data: %w", err) } return Image{ Filename: uniqueFilename, Path: filepath.Join(baseImageDir, relativePath), Size: int64(len(imageBytes)), MimeType: mimeType, }, nil } // sanitize removes unsafe characters from filenames func sanitize(filename string) string { // Convert to safe characters only safe := strings.Map(func(r rune) rune { if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '-' { return r } return '_' }, filename) // Clean up and validate safe = strings.Trim(safe, "_") if safe == "" { return "image" } return safe }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/store/database.go
app/store/database.go
//go:build windows || darwin package store import ( "database/sql" "encoding/json" "fmt" "strings" "time" sqlite3 "github.com/mattn/go-sqlite3" ) // currentSchemaVersion defines the current database schema version. // Increment this when making schema changes that require migrations. const currentSchemaVersion = 12 // database wraps the SQLite connection. // SQLite handles its own locking for concurrent access: // - Multiple readers can access the database simultaneously // - Writers are serialized (only one writer at a time) // - WAL mode allows readers to not block writers // This means we don't need application-level locks for database operations. type database struct { conn *sql.DB } func newDatabase(dbPath string) (*database, error) { // Open database connection conn, err := sql.Open("sqlite3", dbPath+"?_foreign_keys=on&_journal_mode=WAL&_busy_timeout=5000&_txlock=immediate") if err != nil { return nil, fmt.Errorf("open database: %w", err) } // Test the connection if err := conn.Ping(); err != nil { conn.Close() return nil, fmt.Errorf("ping database: %w", err) } db := &database{conn: conn} // Initialize schema if err := db.init(); err != nil { conn.Close() return nil, fmt.Errorf("initialize database: %w", err) } return db, nil } func (db *database) Close() error { _, _ = db.conn.Exec("PRAGMA wal_checkpoint(TRUNCATE);") return db.conn.Close() } func (db *database) init() error { if _, err := db.conn.Exec("PRAGMA foreign_keys = ON"); err != nil { return fmt.Errorf("enable foreign keys: %w", err) } schema := fmt.Sprintf(` CREATE TABLE IF NOT EXISTS settings ( id INTEGER PRIMARY KEY CHECK (id = 1), device_id TEXT NOT NULL DEFAULT '', has_completed_first_run BOOLEAN NOT NULL DEFAULT 0, expose BOOLEAN NOT NULL DEFAULT 0, survey BOOLEAN NOT NULL DEFAULT TRUE, browser BOOLEAN NOT NULL DEFAULT 0, models TEXT NOT NULL DEFAULT '', agent BOOLEAN NOT NULL DEFAULT 0, tools BOOLEAN NOT NULL DEFAULT 0, working_dir TEXT NOT NULL DEFAULT '', context_length INTEGER NOT NULL DEFAULT 4096, window_width INTEGER NOT NULL DEFAULT 0, window_height INTEGER NOT NULL DEFAULT 0, config_migrated BOOLEAN NOT NULL DEFAULT 0, airplane_mode BOOLEAN NOT NULL DEFAULT 0, turbo_enabled BOOLEAN NOT NULL DEFAULT 0, websearch_enabled BOOLEAN NOT NULL DEFAULT 0, selected_model TEXT NOT NULL DEFAULT '', sidebar_open BOOLEAN NOT NULL DEFAULT 0, think_enabled BOOLEAN NOT NULL DEFAULT 0, think_level TEXT NOT NULL DEFAULT '', remote TEXT NOT NULL DEFAULT '', -- deprecated schema_version INTEGER NOT NULL DEFAULT %d ); -- Insert default settings row if it doesn't exist INSERT OR IGNORE INTO settings (id) VALUES (1); CREATE TABLE IF NOT EXISTS chats ( id TEXT PRIMARY KEY, title TEXT NOT NULL DEFAULT '', created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, browser_state TEXT ); CREATE TABLE IF NOT EXISTS messages ( id INTEGER PRIMARY KEY AUTOINCREMENT, chat_id TEXT NOT NULL, role TEXT NOT NULL, content TEXT NOT NULL DEFAULT '', thinking TEXT NOT NULL DEFAULT '', stream BOOLEAN NOT NULL DEFAULT 0, model_name TEXT, model_cloud BOOLEAN, -- deprecated model_ollama_host BOOLEAN, -- deprecated created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, thinking_time_start TIMESTAMP, thinking_time_end TIMESTAMP, tool_result TEXT, FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE ); CREATE INDEX IF NOT EXISTS idx_messages_chat_id ON messages(chat_id); CREATE TABLE IF NOT EXISTS tool_calls ( id INTEGER PRIMARY KEY AUTOINCREMENT, message_id INTEGER NOT NULL, type TEXT NOT NULL, function_name TEXT NOT NULL, function_arguments TEXT NOT NULL, function_result TEXT, FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE ); CREATE INDEX IF NOT EXISTS idx_tool_calls_message_id ON tool_calls(message_id); CREATE TABLE IF NOT EXISTS attachments ( id INTEGER PRIMARY KEY AUTOINCREMENT, message_id INTEGER NOT NULL, filename TEXT NOT NULL, data BLOB NOT NULL, FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE ); CREATE INDEX IF NOT EXISTS idx_attachments_message_id ON attachments(message_id); CREATE TABLE IF NOT EXISTS users ( name TEXT NOT NULL DEFAULT '', email TEXT NOT NULL DEFAULT '', plan TEXT NOT NULL DEFAULT '', cached_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); `, currentSchemaVersion) _, err := db.conn.Exec(schema) if err != nil { return err } // Check and upgrade schema version if needed if err := db.migrate(); err != nil { return fmt.Errorf("migrate schema: %w", err) } // Clean up orphaned records created before foreign key constraints were properly enforced // TODO: Can eventually be removed - cleans up data from foreign key bug (ollama/ollama#11785, ollama/app#476) if err := db.cleanupOrphanedData(); err != nil { return fmt.Errorf("cleanup orphaned data: %w", err) } return nil } // migrate handles database schema migrations func (db *database) migrate() error { // Get current schema version version, err := db.getSchemaVersion() if err != nil { return fmt.Errorf("get schema version after migration attempt: %w", err) } // Run migrations for each version for version < currentSchemaVersion { switch version { case 1: // Migrate from version 1 to 2: add context_length column if err := db.migrateV1ToV2(); err != nil { return fmt.Errorf("migrate v1 to v2: %w", err) } version = 2 case 2: // Migrate from version 2 to 3: create attachments table if err := db.migrateV2ToV3(); err != nil { return fmt.Errorf("migrate v2 to v3: %w", err) } version = 3 case 3: // Migrate from version 3 to 4: add tool_result column to messages table if err := db.migrateV3ToV4(); err != nil { return fmt.Errorf("migrate v3 to v4: %w", err) } version = 4 case 4: // add airplane_mode column to settings table if err := db.migrateV4ToV5(); err != nil { return fmt.Errorf("migrate v4 to v5: %w", err) } version = 5 case 5: // add turbo_enabled column to settings table if err := db.migrateV5ToV6(); err != nil { return fmt.Errorf("migrate v5 to v6: %w", err) } version = 6 case 6: // add missing index for attachments table if err := db.migrateV6ToV7(); err != nil { return fmt.Errorf("migrate v6 to v7: %w", err) } version = 7 case 7: // add think_enabled and think_level columns to settings table if err := db.migrateV7ToV8(); err != nil { return fmt.Errorf("migrate v7 to v8: %w", err) } version = 8 case 8: // add browser_state column to chats table if err := db.migrateV8ToV9(); err != nil { return fmt.Errorf("migrate v8 to v9: %w", err) } version = 9 case 9: // add cached user table if err := db.migrateV9ToV10(); err != nil { return fmt.Errorf("migrate v9 to v10: %w", err) } version = 10 case 10: // remove remote column from settings table if err := db.migrateV10ToV11(); err != nil { return fmt.Errorf("migrate v10 to v11: %w", err) } version = 11 case 11: // bring back remote column for backwards compatibility (deprecated) if err := db.migrateV11ToV12(); err != nil { return fmt.Errorf("migrate v11 to v12: %w", err) } version = 12 default: // If we have a version we don't recognize, just set it to current // This might happen during development version = currentSchemaVersion } } return nil } // migrateV1ToV2 adds the context_length column to the settings table func (db *database) migrateV1ToV2() error { _, err := db.conn.Exec(`ALTER TABLE settings ADD COLUMN context_length INTEGER NOT NULL DEFAULT 4096;`) if err != nil && !duplicateColumnError(err) { return fmt.Errorf("add context_length column: %w", err) } _, err = db.conn.Exec(`ALTER TABLE settings ADD COLUMN survey BOOLEAN NOT NULL DEFAULT TRUE;`) if err != nil && !duplicateColumnError(err) { return fmt.Errorf("add survey column: %w", err) } _, err = db.conn.Exec(`UPDATE settings SET schema_version = 2;`) if err != nil { return fmt.Errorf("update schema version: %w", err) } return nil } // migrateV2ToV3 creates the attachments table func (db *database) migrateV2ToV3() error { _, err := db.conn.Exec(` CREATE TABLE IF NOT EXISTS attachments ( id INTEGER PRIMARY KEY AUTOINCREMENT, message_id INTEGER NOT NULL, filename TEXT NOT NULL, data BLOB NOT NULL, FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE ) `) if err != nil { return fmt.Errorf("create attachments table: %w", err) } _, err = db.conn.Exec(`UPDATE settings SET schema_version = 3`) if err != nil { return fmt.Errorf("update schema version: %w", err) } return nil } func (db *database) migrateV3ToV4() error { _, err := db.conn.Exec(`ALTER TABLE messages ADD COLUMN tool_result TEXT;`) if err != nil && !duplicateColumnError(err) { return fmt.Errorf("add tool_result column: %w", err) } _, err = db.conn.Exec(`UPDATE settings SET schema_version = 4;`) if err != nil { return fmt.Errorf("update schema version: %w", err) } return nil } // migrateV4ToV5 adds the airplane_mode column to the settings table func (db *database) migrateV4ToV5() error { _, err := db.conn.Exec(`ALTER TABLE settings ADD COLUMN airplane_mode BOOLEAN NOT NULL DEFAULT 0;`) if err != nil && !duplicateColumnError(err) { return fmt.Errorf("add airplane_mode column: %w", err) } _, err = db.conn.Exec(`UPDATE settings SET schema_version = 5;`) if err != nil { return fmt.Errorf("update schema version: %w", err) } return nil } // migrateV5ToV6 adds the turbo_enabled, websearch_enabled, selected_model, sidebar_open columns to the settings table func (db *database) migrateV5ToV6() error { _, err := db.conn.Exec(`ALTER TABLE settings ADD COLUMN turbo_enabled BOOLEAN NOT NULL DEFAULT 0;`) if err != nil && !duplicateColumnError(err) { return fmt.Errorf("add turbo_enabled column: %w", err) } _, err = db.conn.Exec(`ALTER TABLE settings ADD COLUMN websearch_enabled BOOLEAN NOT NULL DEFAULT 0;`) if err != nil && !duplicateColumnError(err) { return fmt.Errorf("add websearch_enabled column: %w", err) } _, err = db.conn.Exec(`ALTER TABLE settings ADD COLUMN selected_model TEXT NOT NULL DEFAULT '';`) if err != nil && !duplicateColumnError(err) { return fmt.Errorf("add selected_model column: %w", err) } _, err = db.conn.Exec(`ALTER TABLE settings ADD COLUMN sidebar_open BOOLEAN NOT NULL DEFAULT 0;`) if err != nil && !duplicateColumnError(err) { return fmt.Errorf("add sidebar_open column: %w", err) } _, err = db.conn.Exec(`UPDATE settings SET schema_version = 6;`) if err != nil { return fmt.Errorf("update schema version: %w", err) } return nil } // migrateV6ToV7 adds the missing index for the attachments table func (db *database) migrateV6ToV7() error { _, err := db.conn.Exec(`CREATE INDEX IF NOT EXISTS idx_attachments_message_id ON attachments(message_id);`) if err != nil { return fmt.Errorf("create attachments index: %w", err) } _, err = db.conn.Exec(`UPDATE settings SET schema_version = 7;`) if err != nil { return fmt.Errorf("update schema version: %w", err) } return nil } // migrateV7ToV8 adds the think_enabled and think_level columns to the settings table func (db *database) migrateV7ToV8() error { _, err := db.conn.Exec(`ALTER TABLE settings ADD COLUMN think_enabled BOOLEAN NOT NULL DEFAULT 0;`) if err != nil && !duplicateColumnError(err) { return fmt.Errorf("add think_enabled column: %w", err) } _, err = db.conn.Exec(`ALTER TABLE settings ADD COLUMN think_level TEXT NOT NULL DEFAULT '';`) if err != nil && !duplicateColumnError(err) { return fmt.Errorf("add think_level column: %w", err) } _, err = db.conn.Exec(`UPDATE settings SET schema_version = 8;`) if err != nil { return fmt.Errorf("update schema version: %w", err) } return nil } // migrateV8ToV9 adds browser_state to chats and bumps schema func (db *database) migrateV8ToV9() error { _, err := db.conn.Exec(` ALTER TABLE chats ADD COLUMN browser_state TEXT; UPDATE settings SET schema_version = 9; `) if err != nil && !duplicateColumnError(err) { return fmt.Errorf("add browser_state column: %w", err) } return nil } // migrateV9ToV10 adds users table func (db *database) migrateV9ToV10() error { _, err := db.conn.Exec(` CREATE TABLE IF NOT EXISTS users ( name TEXT NOT NULL DEFAULT '', email TEXT NOT NULL DEFAULT '', plan TEXT NOT NULL DEFAULT '', cached_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); UPDATE settings SET schema_version = 10; `) if err != nil { return fmt.Errorf("create users table: %w", err) } return nil } // migrateV10ToV11 removes the remote column from the settings table func (db *database) migrateV10ToV11() error { _, err := db.conn.Exec(`ALTER TABLE settings DROP COLUMN remote`) if err != nil && !columnNotExists(err) { return fmt.Errorf("drop remote column: %w", err) } _, err = db.conn.Exec(`UPDATE settings SET schema_version = 11`) if err != nil { return fmt.Errorf("update schema version: %w", err) } return nil } // migrateV11ToV12 brings back the remote column for backwards compatibility (deprecated) func (db *database) migrateV11ToV12() error { _, err := db.conn.Exec(`ALTER TABLE settings ADD COLUMN remote TEXT NOT NULL DEFAULT ''`) if err != nil && !duplicateColumnError(err) { return fmt.Errorf("add remote column: %w", err) } _, err = db.conn.Exec(`UPDATE settings SET schema_version = 12`) if err != nil { return fmt.Errorf("update schema version: %w", err) } return nil } // cleanupOrphanedData removes orphaned records that may exist due to the foreign key bug func (db *database) cleanupOrphanedData() error { _, err := db.conn.Exec(` DELETE FROM tool_calls WHERE message_id NOT IN (SELECT id FROM messages) `) if err != nil { return fmt.Errorf("cleanup orphaned tool_calls: %w", err) } _, err = db.conn.Exec(` DELETE FROM attachments WHERE message_id NOT IN (SELECT id FROM messages) `) if err != nil { return fmt.Errorf("cleanup orphaned attachments: %w", err) } _, err = db.conn.Exec(` DELETE FROM messages WHERE chat_id NOT IN (SELECT id FROM chats) `) if err != nil { return fmt.Errorf("cleanup orphaned messages: %w", err) } return nil } func duplicateColumnError(err error) bool { if sqlite3Err, ok := err.(sqlite3.Error); ok { return sqlite3Err.Code == sqlite3.ErrError && strings.Contains(sqlite3Err.Error(), "duplicate column name") } return false } func columnNotExists(err error) bool { if sqlite3Err, ok := err.(sqlite3.Error); ok { return sqlite3Err.Code == sqlite3.ErrError && strings.Contains(sqlite3Err.Error(), "no such column") } return false } func (db *database) getAllChats() ([]Chat, error) { // Query chats with their first user message and latest update time query := ` SELECT c.id, c.title, c.created_at, COALESCE(first_msg.content, '') as first_user_content, COALESCE(datetime(MAX(m.updated_at)), datetime(c.created_at)) as last_updated FROM chats c LEFT JOIN ( SELECT chat_id, content, MIN(id) as min_id FROM messages WHERE role = 'user' GROUP BY chat_id ) first_msg ON c.id = first_msg.chat_id LEFT JOIN messages m ON c.id = m.chat_id GROUP BY c.id, c.title, c.created_at, first_msg.content ORDER BY last_updated DESC ` rows, err := db.conn.Query(query) if err != nil { return nil, fmt.Errorf("query chats: %w", err) } defer rows.Close() var chats []Chat for rows.Next() { var chat Chat var createdAt time.Time var firstUserContent string var lastUpdatedStr string err := rows.Scan( &chat.ID, &chat.Title, &createdAt, &firstUserContent, &lastUpdatedStr, ) // Parse the last updated time lastUpdated, _ := time.Parse("2006-01-02 15:04:05", lastUpdatedStr) if err != nil { return nil, fmt.Errorf("scan chat: %w", err) } chat.CreatedAt = createdAt // Add a dummy first user message for the UI to display // This is just for the excerpt, full messages are loaded when needed chat.Messages = []Message{} if firstUserContent != "" { chat.Messages = append(chat.Messages, Message{ Role: "user", Content: firstUserContent, UpdatedAt: lastUpdated, }) } chats = append(chats, chat) } if err := rows.Err(); err != nil { return nil, fmt.Errorf("iterate chats: %w", err) } return chats, nil } func (db *database) getChatWithOptions(id string, loadAttachmentData bool) (*Chat, error) { query := ` SELECT id, title, created_at, browser_state FROM chats WHERE id = ? ` var chat Chat var createdAt time.Time var browserState sql.NullString err := db.conn.QueryRow(query, id).Scan( &chat.ID, &chat.Title, &createdAt, &browserState, ) if err != nil { if err == sql.ErrNoRows { return nil, fmt.Errorf("chat not found") } return nil, fmt.Errorf("query chat: %w", err) } chat.CreatedAt = createdAt if browserState.Valid && browserState.String != "" { var raw json.RawMessage if err := json.Unmarshal([]byte(browserState.String), &raw); err == nil { chat.BrowserState = raw } } messages, err := db.getMessages(id, loadAttachmentData) if err != nil { return nil, fmt.Errorf("get messages: %w", err) } chat.Messages = messages return &chat, nil } func (db *database) saveChat(chat Chat) error { tx, err := db.conn.Begin() if err != nil { return fmt.Errorf("begin transaction: %w", err) } defer tx.Rollback() // Use COALESCE for browser_state to avoid wiping an existing // chat-level browser_state when saving a chat that doesn't include a new state payload. // Many code paths call SetChat to update metadata/messages only; without COALESCE the // UPSERT would overwrite browser_state with NULL, breaking revisit rendering that relies // on the last persisted full tool state. query := ` INSERT INTO chats (id, title, created_at, browser_state) VALUES (?, ?, ?, ?) ON CONFLICT(id) DO UPDATE SET title = excluded.title, browser_state = COALESCE(excluded.browser_state, chats.browser_state) ` var browserState sql.NullString if chat.BrowserState != nil { browserState = sql.NullString{String: string(chat.BrowserState), Valid: true} } _, err = tx.Exec(query, chat.ID, chat.Title, chat.CreatedAt, browserState, ) if err != nil { return fmt.Errorf("save chat: %w", err) } // Delete existing messages (we'll re-insert all) _, err = tx.Exec("DELETE FROM messages WHERE chat_id = ?", chat.ID) if err != nil { return fmt.Errorf("delete messages: %w", err) } // Insert messages for _, msg := range chat.Messages { messageID, err := db.insertMessage(tx, chat.ID, msg) if err != nil { return fmt.Errorf("insert message: %w", err) } // Insert tool calls if any for _, toolCall := range msg.ToolCalls { err := db.insertToolCall(tx, messageID, toolCall) if err != nil { return fmt.Errorf("insert tool call: %w", err) } } } return tx.Commit() } // updateChatBrowserState updates only the browser_state for a chat func (db *database) updateChatBrowserState(chatID string, state json.RawMessage) error { _, err := db.conn.Exec(`UPDATE chats SET browser_state = ? WHERE id = ?`, string(state), chatID) if err != nil { return fmt.Errorf("update chat browser state: %w", err) } return nil } func (db *database) deleteChat(id string) error { _, err := db.conn.Exec("DELETE FROM chats WHERE id = ?", id) if err != nil { return fmt.Errorf("delete chat: %w", err) } _, _ = db.conn.Exec("PRAGMA wal_checkpoint(TRUNCATE);") return nil } func (db *database) updateLastMessage(chatID string, msg Message) error { tx, err := db.conn.Begin() if err != nil { return fmt.Errorf("begin transaction: %w", err) } defer tx.Rollback() // Get the ID of the last message var messageID int64 err = tx.QueryRow(` SELECT MAX(id) FROM messages WHERE chat_id = ? `, chatID).Scan(&messageID) if err != nil { return fmt.Errorf("get last message id: %w", err) } query := ` UPDATE messages SET content = ?, thinking = ?, model_name = ?, updated_at = ?, thinking_time_start = ?, thinking_time_end = ?, tool_result = ? WHERE id = ? ` var thinkingTimeStart, thinkingTimeEnd sql.NullTime if msg.ThinkingTimeStart != nil { thinkingTimeStart = sql.NullTime{Time: *msg.ThinkingTimeStart, Valid: true} } if msg.ThinkingTimeEnd != nil { thinkingTimeEnd = sql.NullTime{Time: *msg.ThinkingTimeEnd, Valid: true} } var modelName sql.NullString if msg.Model != "" { modelName = sql.NullString{String: msg.Model, Valid: true} } var toolResultJSON sql.NullString if msg.ToolResult != nil { resultBytes, err := json.Marshal(msg.ToolResult) if err != nil { return fmt.Errorf("marshal tool result: %w", err) } toolResultJSON = sql.NullString{String: string(resultBytes), Valid: true} } result, err := tx.Exec(query, msg.Content, msg.Thinking, modelName, msg.UpdatedAt, thinkingTimeStart, thinkingTimeEnd, toolResultJSON, messageID, ) if err != nil { return fmt.Errorf("update last message: %w", err) } rowsAffected, err := result.RowsAffected() if err != nil { return fmt.Errorf("get rows affected: %w", err) } if rowsAffected == 0 { return fmt.Errorf("no message found to update") } _, err = tx.Exec("DELETE FROM attachments WHERE message_id = ?", messageID) if err != nil { return fmt.Errorf("delete existing attachments: %w", err) } for _, att := range msg.Attachments { err := db.insertAttachment(tx, messageID, att) if err != nil { return fmt.Errorf("insert attachment: %w", err) } } _, err = tx.Exec("DELETE FROM tool_calls WHERE message_id = ?", messageID) if err != nil { return fmt.Errorf("delete existing tool calls: %w", err) } for _, toolCall := range msg.ToolCalls { err := db.insertToolCall(tx, messageID, toolCall) if err != nil { return fmt.Errorf("insert tool call: %w", err) } } return tx.Commit() } func (db *database) appendMessage(chatID string, msg Message) error { tx, err := db.conn.Begin() if err != nil { return fmt.Errorf("begin transaction: %w", err) } defer tx.Rollback() messageID, err := db.insertMessage(tx, chatID, msg) if err != nil { return fmt.Errorf("insert message: %w", err) } // Insert tool calls if any for _, toolCall := range msg.ToolCalls { err := db.insertToolCall(tx, messageID, toolCall) if err != nil { return fmt.Errorf("insert tool call: %w", err) } } return tx.Commit() } func (db *database) getMessages(chatID string, loadAttachmentData bool) ([]Message, error) { query := ` SELECT id, role, content, thinking, stream, model_name, created_at, updated_at, thinking_time_start, thinking_time_end, tool_result FROM messages WHERE chat_id = ? ORDER BY id ASC ` rows, err := db.conn.Query(query, chatID) if err != nil { return nil, fmt.Errorf("query messages: %w", err) } defer rows.Close() var messages []Message for rows.Next() { var msg Message var messageID int64 var thinkingTimeStart, thinkingTimeEnd sql.NullTime var modelName sql.NullString var toolResult sql.NullString err := rows.Scan( &messageID, &msg.Role, &msg.Content, &msg.Thinking, &msg.Stream, &modelName, &msg.CreatedAt, &msg.UpdatedAt, &thinkingTimeStart, &thinkingTimeEnd, &toolResult, ) if err != nil { return nil, fmt.Errorf("scan message: %w", err) } attachments, err := db.getAttachments(messageID, loadAttachmentData) if err != nil { return nil, fmt.Errorf("get attachments: %w", err) } msg.Attachments = attachments if thinkingTimeStart.Valid { msg.ThinkingTimeStart = &thinkingTimeStart.Time } if thinkingTimeEnd.Valid { msg.ThinkingTimeEnd = &thinkingTimeEnd.Time } // Parse tool result from JSON if present if toolResult.Valid && toolResult.String != "" { var result json.RawMessage if err := json.Unmarshal([]byte(toolResult.String), &result); err == nil { msg.ToolResult = &result } } // Set model if present if modelName.Valid && modelName.String != "" { msg.Model = modelName.String } // Get tool calls for this message toolCalls, err := db.getToolCalls(messageID) if err != nil { return nil, fmt.Errorf("get tool calls: %w", err) } msg.ToolCalls = toolCalls messages = append(messages, msg) } if err := rows.Err(); err != nil { return nil, fmt.Errorf("iterate messages: %w", err) } return messages, nil } func (db *database) insertMessage(tx *sql.Tx, chatID string, msg Message) (int64, error) { query := ` INSERT INTO messages (chat_id, role, content, thinking, stream, model_name, created_at, updated_at, thinking_time_start, thinking_time_end, tool_result) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ` var thinkingTimeStart, thinkingTimeEnd sql.NullTime if msg.ThinkingTimeStart != nil { thinkingTimeStart = sql.NullTime{Time: *msg.ThinkingTimeStart, Valid: true} } if msg.ThinkingTimeEnd != nil { thinkingTimeEnd = sql.NullTime{Time: *msg.ThinkingTimeEnd, Valid: true} } var modelName sql.NullString if msg.Model != "" { modelName = sql.NullString{String: msg.Model, Valid: true} } var toolResultJSON sql.NullString if msg.ToolResult != nil { resultBytes, err := json.Marshal(msg.ToolResult) if err != nil { return 0, fmt.Errorf("marshal tool result: %w", err) } toolResultJSON = sql.NullString{String: string(resultBytes), Valid: true} } result, err := tx.Exec(query, chatID, msg.Role, msg.Content, msg.Thinking, msg.Stream, modelName, msg.CreatedAt, msg.UpdatedAt, thinkingTimeStart, thinkingTimeEnd, toolResultJSON, ) if err != nil { return 0, err } messageID, err := result.LastInsertId() if err != nil { return 0, err } for _, att := range msg.Attachments { err := db.insertAttachment(tx, messageID, att) if err != nil { return 0, fmt.Errorf("insert attachment: %w", err) } } return messageID, nil } func (db *database) getAttachments(messageID int64, loadData bool) ([]File, error) { var query string if loadData { query = ` SELECT filename, data FROM attachments WHERE message_id = ? ORDER BY id ASC ` } else { query = ` SELECT filename, '' as data FROM attachments WHERE message_id = ? ORDER BY id ASC ` } rows, err := db.conn.Query(query, messageID) if err != nil { return nil, fmt.Errorf("query attachments: %w", err) } defer rows.Close() var attachments []File for rows.Next() { var file File err := rows.Scan(&file.Filename, &file.Data) if err != nil { return nil, fmt.Errorf("scan attachment: %w", err) } attachments = append(attachments, file) } if err := rows.Err(); err != nil { return nil, fmt.Errorf("iterate attachments: %w", err) } return attachments, nil } func (db *database) getToolCalls(messageID int64) ([]ToolCall, error) { query := ` SELECT type, function_name, function_arguments, function_result FROM tool_calls WHERE message_id = ? ORDER BY id ASC ` rows, err := db.conn.Query(query, messageID) if err != nil { return nil, fmt.Errorf("query tool calls: %w", err) } defer rows.Close() var toolCalls []ToolCall for rows.Next() { var tc ToolCall var functionResult sql.NullString err := rows.Scan( &tc.Type, &tc.Function.Name, &tc.Function.Arguments, &functionResult, ) if err != nil { return nil, fmt.Errorf("scan tool call: %w", err) } if functionResult.Valid && functionResult.String != "" { // Parse the JSON result var result json.RawMessage if err := json.Unmarshal([]byte(functionResult.String), &result); err == nil { tc.Function.Result = &result } } toolCalls = append(toolCalls, tc) } if err := rows.Err(); err != nil { return nil, fmt.Errorf("iterate tool calls: %w", err) } return toolCalls, nil } func (db *database) insertAttachment(tx *sql.Tx, messageID int64, file File) error { query := ` INSERT INTO attachments (message_id, filename, data) VALUES (?, ?, ?) ` _, err := tx.Exec(query, messageID, file.Filename, file.Data) return err } func (db *database) insertToolCall(tx *sql.Tx, messageID int64, tc ToolCall) error { query := ` INSERT INTO tool_calls (message_id, type, function_name, function_arguments, function_result) VALUES (?, ?, ?, ?, ?) ` var functionResult sql.NullString if tc.Function.Result != nil { // Convert result to JSON resultJSON, err := json.Marshal(tc.Function.Result) if err != nil { return fmt.Errorf("marshal tool result: %w", err) } functionResult = sql.NullString{String: string(resultJSON), Valid: true} } _, err := tx.Exec(query, messageID, tc.Type, tc.Function.Name, tc.Function.Arguments, functionResult, ) return err } // Settings operations func (db *database) getID() (string, error) { var id string err := db.conn.QueryRow("SELECT device_id FROM settings").Scan(&id) if err != nil { return "", fmt.Errorf("get device id: %w", err) } return id, nil } func (db *database) setID(id string) error { _, err := db.conn.Exec("UPDATE settings SET device_id = ?", id) if err != nil { return fmt.Errorf("set device id: %w", err) } return nil } func (db *database) getHasCompletedFirstRun() (bool, error) { var hasCompletedFirstRun bool err := db.conn.QueryRow("SELECT has_completed_first_run FROM settings").Scan(&hasCompletedFirstRun) if err != nil { return false, fmt.Errorf("get has completed first run: %w", err) } return hasCompletedFirstRun, nil } func (db *database) setHasCompletedFirstRun(hasCompletedFirstRun bool) error { _, err := db.conn.Exec("UPDATE settings SET has_completed_first_run = ?", hasCompletedFirstRun) if err != nil { return fmt.Errorf("set has completed first run: %w", err) } return nil } func (db *database) getSettings() (Settings, error) { var s Settings err := db.conn.QueryRow(` SELECT expose, survey, browser, models, agent, tools, working_dir, context_length, airplane_mode, turbo_enabled, websearch_enabled, selected_model, sidebar_open, think_enabled, think_level FROM settings `).Scan(&s.Expose, &s.Survey, &s.Browser, &s.Models, &s.Agent, &s.Tools, &s.WorkingDir, &s.ContextLength, &s.AirplaneMode, &s.TurboEnabled, &s.WebSearchEnabled, &s.SelectedModel, &s.SidebarOpen, &s.ThinkEnabled, &s.ThinkLevel) if err != nil { return Settings{}, fmt.Errorf("get settings: %w", err) } return s, nil } func (db *database) setSettings(s Settings) error { _, err := db.conn.Exec(` UPDATE settings SET expose = ?, survey = ?, browser = ?, models = ?, agent = ?, tools = ?, working_dir = ?, context_length = ?, airplane_mode = ?, turbo_enabled = ?, websearch_enabled = ?, selected_model = ?, sidebar_open = ?, think_enabled = ?, think_level = ? `, s.Expose, s.Survey, s.Browser, s.Models, s.Agent, s.Tools, s.WorkingDir, s.ContextLength, s.AirplaneMode, s.TurboEnabled, s.WebSearchEnabled, s.SelectedModel, s.SidebarOpen, s.ThinkEnabled, s.ThinkLevel) if err != nil { return fmt.Errorf("set settings: %w", err) } return nil } func (db *database) getWindowSize() (int, int, error) { var width, height int err := db.conn.QueryRow("SELECT window_width, window_height FROM settings").Scan(&width, &height) if err != nil { return 0, 0, fmt.Errorf("get window size: %w", err) } return width, height, nil } func (db *database) setWindowSize(width, height int) error { _, err := db.conn.Exec("UPDATE settings SET window_width = ?, window_height = ?", width, height) if err != nil { return fmt.Errorf("set window size: %w", err) } return nil } func (db *database) isConfigMigrated() (bool, error) { var migrated bool err := db.conn.QueryRow("SELECT config_migrated FROM settings").Scan(&migrated) if err != nil { return false, fmt.Errorf("get config migrated: %w", err) } return migrated, nil } func (db *database) setConfigMigrated(migrated bool) error { _, err := db.conn.Exec("UPDATE settings SET config_migrated = ?", migrated) if err != nil { return fmt.Errorf("set config migrated: %w", err) } return nil } func (db *database) getSchemaVersion() (int, error) { var version int err := db.conn.QueryRow("SELECT schema_version FROM settings").Scan(&version) if err != nil { return 0, fmt.Errorf("get schema version: %w", err) } return version, nil } func (db *database) setSchemaVersion(version int) error { _, err := db.conn.Exec("UPDATE settings SET schema_version = ?", version) if err != nil { return fmt.Errorf("set schema version: %w", err) } return nil } func (db *database) getUser() (*User, error) { var user User err := db.conn.QueryRow(`
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
true
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/store/store_test.go
app/store/store_test.go
//go:build windows || darwin package store import ( "path/filepath" "testing" ) func TestStore(t *testing.T) { s, cleanup := setupTestStore(t) defer cleanup() t.Run("default id", func(t *testing.T) { // ID should be automatically generated id, err := s.ID() if err != nil { t.Fatal(err) } if id == "" { t.Error("expected non-empty ID") } // Verify ID is persisted id2, err := s.ID() if err != nil { t.Fatal(err) } if id != id2 { t.Errorf("expected ID %s, got %s", id, id2) } }) t.Run("has completed first run", func(t *testing.T) { // Default should be false (hasn't completed first run yet) hasCompleted, err := s.HasCompletedFirstRun() if err != nil { t.Fatal(err) } if hasCompleted { t.Error("expected has completed first run to be false by default") } if err := s.SetHasCompletedFirstRun(true); err != nil { t.Fatal(err) } hasCompleted, err = s.HasCompletedFirstRun() if err != nil { t.Fatal(err) } if !hasCompleted { t.Error("expected has completed first run to be true") } }) t.Run("settings", func(t *testing.T) { sc := Settings{ Expose: true, Browser: true, Survey: true, Models: "/tmp/models", Agent: true, Tools: false, WorkingDir: "/tmp/work", } if err := s.SetSettings(sc); err != nil { t.Fatal(err) } loaded, err := s.Settings() if err != nil { t.Fatal(err) } // Compare fields individually since Models might get a default if loaded.Expose != sc.Expose || loaded.Browser != sc.Browser || loaded.Agent != sc.Agent || loaded.Survey != sc.Survey || loaded.Tools != sc.Tools || loaded.WorkingDir != sc.WorkingDir { t.Errorf("expected %v, got %v", sc, loaded) } }) t.Run("window size", func(t *testing.T) { if err := s.SetWindowSize(1024, 768); err != nil { t.Fatal(err) } width, height, err := s.WindowSize() if err != nil { t.Fatal(err) } if width != 1024 || height != 768 { t.Errorf("expected 1024x768, got %dx%d", width, height) } }) t.Run("create and retrieve chat", func(t *testing.T) { chat := NewChat("test-chat-1") chat.Title = "Test Chat" chat.Messages = append(chat.Messages, NewMessage("user", "Hello", nil)) chat.Messages = append(chat.Messages, NewMessage("assistant", "Hi there!", &MessageOptions{ Model: "llama4", })) if err := s.SetChat(*chat); err != nil { t.Fatalf("failed to save chat: %v", err) } retrieved, err := s.Chat("test-chat-1") if err != nil { t.Fatalf("failed to retrieve chat: %v", err) } if retrieved.ID != chat.ID { t.Errorf("expected ID %s, got %s", chat.ID, retrieved.ID) } if retrieved.Title != chat.Title { t.Errorf("expected title %s, got %s", chat.Title, retrieved.Title) } if len(retrieved.Messages) != 2 { t.Fatalf("expected 2 messages, got %d", len(retrieved.Messages)) } if retrieved.Messages[0].Content != "Hello" { t.Errorf("expected first message 'Hello', got %s", retrieved.Messages[0].Content) } if retrieved.Messages[1].Content != "Hi there!" { t.Errorf("expected second message 'Hi there!', got %s", retrieved.Messages[1].Content) } }) t.Run("list chats", func(t *testing.T) { chat2 := NewChat("test-chat-2") chat2.Title = "Another Chat" chat2.Messages = append(chat2.Messages, NewMessage("user", "Test", nil)) if err := s.SetChat(*chat2); err != nil { t.Fatalf("failed to save chat: %v", err) } chats, err := s.Chats() if err != nil { t.Fatalf("failed to list chats: %v", err) } if len(chats) != 2 { t.Fatalf("expected 2 chats, got %d", len(chats)) } }) t.Run("delete chat", func(t *testing.T) { if err := s.DeleteChat("test-chat-1"); err != nil { t.Fatalf("failed to delete chat: %v", err) } // Verify it's gone _, err := s.Chat("test-chat-1") if err == nil { t.Error("expected error retrieving deleted chat") } // Verify other chat still exists chats, err := s.Chats() if err != nil { t.Fatalf("failed to list chats: %v", err) } if len(chats) != 1 { t.Fatalf("expected 1 chat after deletion, got %d", len(chats)) } }) } // setupTestStore creates a temporary store for testing func setupTestStore(t *testing.T) (*Store, func()) { t.Helper() tmpDir := t.TempDir() // Override legacy config path to ensure no migration happens oldLegacyConfigPath := legacyConfigPath legacyConfigPath = filepath.Join(tmpDir, "config.json") s := &Store{DBPath: filepath.Join(tmpDir, "db.sqlite")} cleanup := func() { s.Close() legacyConfigPath = oldLegacyConfigPath } return s, cleanup }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/store/store.go
app/store/store.go
//go:build windows || darwin // Package store provides a simple JSON file store for the desktop application // to save and load data such as ollama server configuration, messages, // login information and more. package store import ( "encoding/json" "fmt" "log/slog" "os" "path/filepath" "runtime" "sync" "time" "github.com/google/uuid" "github.com/ollama/ollama/app/types/not" ) type File struct { Filename string `json:"filename"` Data []byte `json:"data"` } type User struct { Name string `json:"name"` Email string `json:"email"` Plan string `json:"plan"` CachedAt time.Time `json:"cachedAt"` } type Message struct { Role string `json:"role"` Content string `json:"content"` Thinking string `json:"thinking"` Stream bool `json:"stream"` Model string `json:"model,omitempty"` Attachments []File `json:"attachments,omitempty"` ToolCalls []ToolCall `json:"tool_calls,omitempty"` ToolCall *ToolCall `json:"tool_call,omitempty"` ToolName string `json:"tool_name,omitempty"` ToolResult *json.RawMessage `json:"tool_result,omitempty"` CreatedAt time.Time `json:"created_at"` UpdatedAt time.Time `json:"updated_at"` ThinkingTimeStart *time.Time `json:"thinkingTimeStart,omitempty" ts_type:"Date | undefined" ts_transform:"__VALUE__ && new Date(__VALUE__)"` ThinkingTimeEnd *time.Time `json:"thinkingTimeEnd,omitempty" ts_type:"Date | undefined" ts_transform:"__VALUE__ && new Date(__VALUE__)"` } // MessageOptions contains optional parameters for creating a Message type MessageOptions struct { Model string Attachments []File Stream bool Thinking string ToolCalls []ToolCall ToolCall *ToolCall ToolResult *json.RawMessage ThinkingTimeStart *time.Time ThinkingTimeEnd *time.Time } // NewMessage creates a new Message with the given options func NewMessage(role, content string, opts *MessageOptions) Message { now := time.Now() msg := Message{ Role: role, Content: content, CreatedAt: now, UpdatedAt: now, } if opts != nil { msg.Model = opts.Model msg.Attachments = opts.Attachments msg.Stream = opts.Stream msg.Thinking = opts.Thinking msg.ToolCalls = opts.ToolCalls msg.ToolCall = opts.ToolCall msg.ToolResult = opts.ToolResult msg.ThinkingTimeStart = opts.ThinkingTimeStart msg.ThinkingTimeEnd = opts.ThinkingTimeEnd } return msg } type ToolCall struct { Type string `json:"type"` Function ToolFunction `json:"function"` } type ToolFunction struct { Name string `json:"name"` Arguments string `json:"arguments"` Result any `json:"result,omitempty"` } type Model struct { Model string `json:"model"` // Model name Digest string `json:"digest,omitempty"` // Model digest from the registry ModifiedAt *time.Time `json:"modified_at,omitempty"` // When the model was last modified locally } type Chat struct { ID string `json:"id"` Messages []Message `json:"messages"` Title string `json:"title"` CreatedAt time.Time `json:"created_at"` BrowserState json.RawMessage `json:"browser_state,omitempty" ts_type:"BrowserStateData"` } // NewChat creates a new Chat with the ID, with CreatedAt timestamp initialized func NewChat(id string) *Chat { return &Chat{ ID: id, Messages: []Message{}, CreatedAt: time.Now(), } } type Settings struct { // Expose is a boolean that indicates if the ollama server should // be exposed to the network Expose bool // Browser is a boolean that indicates if the ollama server should // be exposed to browser windows (e.g. CORS set to allow all origins) Browser bool // Survey is a boolean that indicates if the user allows anonymous // inference information to be shared with Ollama Survey bool // Models is a string that contains the models to load on startup Models string // TODO(parthsareen): temporary for experimentation // Agent indicates if the app should use multi-turn tools to fulfill user requests Agent bool // Tools indicates if the app should use single-turn tools to fulfill user requests Tools bool // WorkingDir specifies the working directory for all agent operations WorkingDir string // ContextLength specifies the context length for the ollama server (using OLLAMA_CONTEXT_LENGTH) ContextLength int // AirplaneMode when true, turns off Ollama Turbo features and only uses local models AirplaneMode bool // TurboEnabled indicates if Ollama Turbo features are enabled TurboEnabled bool // Maps gpt-oss specific frontend name' BrowserToolEnabled' to db field 'websearch_enabled' WebSearchEnabled bool // ThinkEnabled indicates if thinking is enabled ThinkEnabled bool // ThinkLevel indicates the level of thinking to use for models that support multiple levels ThinkLevel string // SelectedModel stores the last model that the user selected SelectedModel string // SidebarOpen indicates if the chat sidebar is open SidebarOpen bool } type Store struct { // DBPath allows overriding the default database path (mainly for testing) DBPath string // dbMu protects database initialization only dbMu sync.Mutex db *database } var defaultDBPath = func() string { switch runtime.GOOS { case "windows": return filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "db.sqlite") case "darwin": return filepath.Join(os.Getenv("HOME"), "Library", "Application Support", "Ollama", "db.sqlite") default: return filepath.Join(os.Getenv("HOME"), ".ollama", "db.sqlite") } }() // legacyConfigPath is the path to the old config.json file var legacyConfigPath = func() string { switch runtime.GOOS { case "windows": return filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "config.json") case "darwin": return filepath.Join(os.Getenv("HOME"), "Library", "Application Support", "Ollama", "config.json") default: return filepath.Join(os.Getenv("HOME"), ".ollama", "config.json") } }() // legacyData represents the old config.json structure (only fields we need to migrate) type legacyData struct { ID string `json:"id"` FirstTimeRun bool `json:"first-time-run"` } func (s *Store) ensureDB() error { // Fast path: check if db is already initialized if s.db != nil { return nil } // Slow path: initialize database with lock s.dbMu.Lock() defer s.dbMu.Unlock() // Double-check after acquiring lock if s.db != nil { return nil } dbPath := s.DBPath if dbPath == "" { dbPath = defaultDBPath } // Ensure directory exists if err := os.MkdirAll(filepath.Dir(dbPath), 0o755); err != nil { return fmt.Errorf("create db directory: %w", err) } database, err := newDatabase(dbPath) if err != nil { return fmt.Errorf("open database: %w", err) } // Generate device ID if needed id, err := database.getID() if err != nil || id == "" { // Generate new UUID for device u, err := uuid.NewV7() if err == nil { database.setID(u.String()) } } s.db = database // Check if we need to migrate from config.json migrated, err := database.isConfigMigrated() if err != nil || !migrated { if err := s.migrateFromConfig(database); err != nil { slog.Warn("failed to migrate from config.json", "error", err) } } return nil } // migrateFromConfig attempts to migrate ID and FirstTimeRun from config.json func (s *Store) migrateFromConfig(database *database) error { configPath := legacyConfigPath // Check if config.json exists if _, err := os.Stat(configPath); os.IsNotExist(err) { // No config to migrate, mark as migrated return database.setConfigMigrated(true) } // Read the config file b, err := os.ReadFile(configPath) if err != nil { return fmt.Errorf("read legacy config: %w", err) } var legacy legacyData if err := json.Unmarshal(b, &legacy); err != nil { // If we can't parse it, just mark as migrated and move on slog.Warn("failed to parse legacy config.json", "error", err) return database.setConfigMigrated(true) } // Migrate the ID if present if legacy.ID != "" { if err := database.setID(legacy.ID); err != nil { return fmt.Errorf("migrate device ID: %w", err) } slog.Info("migrated device ID from config.json") } hasCompleted := legacy.FirstTimeRun // If old FirstTimeRun is true, it means first run was completed if err := database.setHasCompletedFirstRun(hasCompleted); err != nil { return fmt.Errorf("migrate first time run: %w", err) } slog.Info("migrated first run status from config.json", "hasCompleted", hasCompleted) // Mark as migrated if err := database.setConfigMigrated(true); err != nil { return fmt.Errorf("mark config as migrated: %w", err) } slog.Info("successfully migrated settings from config.json") return nil } func (s *Store) ID() (string, error) { if err := s.ensureDB(); err != nil { return "", err } return s.db.getID() } func (s *Store) HasCompletedFirstRun() (bool, error) { if err := s.ensureDB(); err != nil { return false, err } return s.db.getHasCompletedFirstRun() } func (s *Store) SetHasCompletedFirstRun(hasCompleted bool) error { if err := s.ensureDB(); err != nil { return err } return s.db.setHasCompletedFirstRun(hasCompleted) } func (s *Store) Settings() (Settings, error) { if err := s.ensureDB(); err != nil { return Settings{}, fmt.Errorf("load settings: %w", err) } settings, err := s.db.getSettings() if err != nil { return Settings{}, err } // Set default models directory if not set if settings.Models == "" { dir := os.Getenv("OLLAMA_MODELS") if dir != "" { settings.Models = dir } else { home, err := os.UserHomeDir() if err == nil { settings.Models = filepath.Join(home, ".ollama", "models") } } } return settings, nil } func (s *Store) SetSettings(settings Settings) error { if err := s.ensureDB(); err != nil { return err } return s.db.setSettings(settings) } func (s *Store) Chats() ([]Chat, error) { if err := s.ensureDB(); err != nil { return nil, err } return s.db.getAllChats() } func (s *Store) Chat(id string) (*Chat, error) { return s.ChatWithOptions(id, true) } func (s *Store) ChatWithOptions(id string, loadAttachmentData bool) (*Chat, error) { if err := s.ensureDB(); err != nil { return nil, err } chat, err := s.db.getChatWithOptions(id, loadAttachmentData) if err != nil { return nil, fmt.Errorf("%w: chat %s", not.Found, id) } return chat, nil } func (s *Store) SetChat(chat Chat) error { if err := s.ensureDB(); err != nil { return err } return s.db.saveChat(chat) } func (s *Store) DeleteChat(id string) error { if err := s.ensureDB(); err != nil { return err } // Delete from database if err := s.db.deleteChat(id); err != nil { return fmt.Errorf("%w: chat %s", not.Found, id) } // Also delete associated images chatImgDir := filepath.Join(s.ImgDir(), id) if err := os.RemoveAll(chatImgDir); err != nil { // Log error but don't fail the deletion slog.Warn("failed to delete chat images", "chat_id", id, "error", err) } return nil } func (s *Store) WindowSize() (int, int, error) { if err := s.ensureDB(); err != nil { return 0, 0, err } return s.db.getWindowSize() } func (s *Store) SetWindowSize(width, height int) error { if err := s.ensureDB(); err != nil { return err } return s.db.setWindowSize(width, height) } func (s *Store) UpdateLastMessage(chatID string, message Message) error { if err := s.ensureDB(); err != nil { return err } return s.db.updateLastMessage(chatID, message) } func (s *Store) AppendMessage(chatID string, message Message) error { if err := s.ensureDB(); err != nil { return err } return s.db.appendMessage(chatID, message) } func (s *Store) UpdateChatBrowserState(chatID string, state json.RawMessage) error { if err := s.ensureDB(); err != nil { return err } return s.db.updateChatBrowserState(chatID, state) } func (s *Store) User() (*User, error) { if err := s.ensureDB(); err != nil { return nil, err } return s.db.getUser() } func (s *Store) SetUser(user User) error { if err := s.ensureDB(); err != nil { return err } user.CachedAt = time.Now() return s.db.setUser(user) } func (s *Store) ClearUser() error { if err := s.ensureDB(); err != nil { return err } return s.db.clearUser() } func (s *Store) Close() error { s.dbMu.Lock() defer s.dbMu.Unlock() if s.db != nil { return s.db.Close() } return nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/store/schema_test.go
app/store/schema_test.go
//go:build windows || darwin package store import ( "path/filepath" "testing" ) func TestSchemaVersioning(t *testing.T) { tmpDir := t.TempDir() // Override legacy config path to avoid migration logs oldLegacyConfigPath := legacyConfigPath legacyConfigPath = filepath.Join(tmpDir, "config.json") defer func() { legacyConfigPath = oldLegacyConfigPath }() t.Run("new database has correct schema version", func(t *testing.T) { dbPath := filepath.Join(tmpDir, "new_db.sqlite") db, err := newDatabase(dbPath) if err != nil { t.Fatalf("failed to create database: %v", err) } defer db.Close() // Check schema version version, err := db.getSchemaVersion() if err != nil { t.Fatalf("failed to get schema version: %v", err) } if version != currentSchemaVersion { t.Errorf("expected schema version %d, got %d", currentSchemaVersion, version) } }) t.Run("can update schema version", func(t *testing.T) { dbPath := filepath.Join(tmpDir, "update_db.sqlite") db, err := newDatabase(dbPath) if err != nil { t.Fatalf("failed to create database: %v", err) } defer db.Close() // Set a different version testVersion := 42 if err := db.setSchemaVersion(testVersion); err != nil { t.Fatalf("failed to set schema version: %v", err) } // Verify it was updated version, err := db.getSchemaVersion() if err != nil { t.Fatalf("failed to get schema version: %v", err) } if version != testVersion { t.Errorf("expected schema version %d, got %d", testVersion, version) } }) }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/store/migration_test.go
app/store/migration_test.go
//go:build windows || darwin package store import ( "database/sql" "encoding/json" "os" "path/filepath" "testing" ) func TestConfigMigration(t *testing.T) { tmpDir := t.TempDir() // Create a legacy config.json legacyConfig := legacyData{ ID: "test-device-id-12345", FirstTimeRun: true, // In old system, true meant "has completed first run" } configData, err := json.MarshalIndent(legacyConfig, "", " ") if err != nil { t.Fatal(err) } configPath := filepath.Join(tmpDir, "config.json") if err := os.WriteFile(configPath, configData, 0o644); err != nil { t.Fatal(err) } // Override the legacy config path for testing oldLegacyConfigPath := legacyConfigPath legacyConfigPath = configPath defer func() { legacyConfigPath = oldLegacyConfigPath }() // Create store with database in same directory s := Store{DBPath: filepath.Join(tmpDir, "db.sqlite")} defer s.Close() // First access should trigger migration id, err := s.ID() if err != nil { t.Fatalf("failed to get ID: %v", err) } if id != "test-device-id-12345" { t.Errorf("expected migrated ID 'test-device-id-12345', got '%s'", id) } // Check HasCompletedFirstRun hasCompleted, err := s.HasCompletedFirstRun() if err != nil { t.Fatalf("failed to get has completed first run: %v", err) } if !hasCompleted { t.Error("expected has completed first run to be true after migration") } // Verify migration is marked as complete migrated, err := s.db.isConfigMigrated() if err != nil { t.Fatalf("failed to check migration status: %v", err) } if !migrated { t.Error("expected config to be marked as migrated") } // Create a new store instance to verify migration doesn't run again s2 := Store{DBPath: filepath.Join(tmpDir, "db.sqlite")} defer s2.Close() // Delete the config file to ensure we're not reading from it os.Remove(configPath) // Verify data is still there id2, err := s2.ID() if err != nil { t.Fatalf("failed to get ID from second store: %v", err) } if id2 != "test-device-id-12345" { t.Errorf("expected persisted ID 'test-device-id-12345', got '%s'", id2) } } func TestNoConfigToMigrate(t *testing.T) { tmpDir := t.TempDir() // Override the legacy config path for testing oldLegacyConfigPath := legacyConfigPath legacyConfigPath = filepath.Join(tmpDir, "config.json") defer func() { legacyConfigPath = oldLegacyConfigPath }() // Create store without any config.json s := Store{DBPath: filepath.Join(tmpDir, "db.sqlite")} defer s.Close() // Should generate a new ID id, err := s.ID() if err != nil { t.Fatalf("failed to get ID: %v", err) } if id == "" { t.Error("expected auto-generated ID, got empty string") } // HasCompletedFirstRun should be false (default) hasCompleted, err := s.HasCompletedFirstRun() if err != nil { t.Fatalf("failed to get has completed first run: %v", err) } if hasCompleted { t.Error("expected has completed first run to be false by default") } // Migration should still be marked as complete migrated, err := s.db.isConfigMigrated() if err != nil { t.Fatalf("failed to check migration status: %v", err) } if !migrated { t.Error("expected config to be marked as migrated even with no config.json") } } const ( v1Schema = ` CREATE TABLE IF NOT EXISTS settings ( id INTEGER PRIMARY KEY CHECK (id = 1), device_id TEXT NOT NULL DEFAULT '', has_completed_first_run BOOLEAN NOT NULL DEFAULT 0, expose BOOLEAN NOT NULL DEFAULT 0, browser BOOLEAN NOT NULL DEFAULT 0, models TEXT NOT NULL DEFAULT '', remote TEXT NOT NULL DEFAULT '', agent BOOLEAN NOT NULL DEFAULT 0, tools BOOLEAN NOT NULL DEFAULT 0, working_dir TEXT NOT NULL DEFAULT '', window_width INTEGER NOT NULL DEFAULT 0, window_height INTEGER NOT NULL DEFAULT 0, config_migrated BOOLEAN NOT NULL DEFAULT 0, schema_version INTEGER NOT NULL DEFAULT 1 ); -- Insert default settings row if it doesn't exist INSERT OR IGNORE INTO settings (id) VALUES (1); CREATE TABLE IF NOT EXISTS chats ( id TEXT PRIMARY KEY, title TEXT NOT NULL DEFAULT '', created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); CREATE TABLE IF NOT EXISTS messages ( id INTEGER PRIMARY KEY AUTOINCREMENT, chat_id TEXT NOT NULL, role TEXT NOT NULL, content TEXT NOT NULL DEFAULT '', thinking TEXT NOT NULL DEFAULT '', stream BOOLEAN NOT NULL DEFAULT 0, model_name TEXT, model_cloud BOOLEAN, model_ollama_host BOOLEAN, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, thinking_time_start TIMESTAMP, thinking_time_end TIMESTAMP, FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE ); CREATE INDEX IF NOT EXISTS idx_messages_chat_id ON messages(chat_id); CREATE TABLE IF NOT EXISTS tool_calls ( id INTEGER PRIMARY KEY AUTOINCREMENT, message_id INTEGER NOT NULL, type TEXT NOT NULL, function_name TEXT NOT NULL, function_arguments TEXT NOT NULL, function_result TEXT, FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE ); CREATE INDEX IF NOT EXISTS idx_tool_calls_message_id ON tool_calls(message_id); ` ) func TestMigrationFromEpoc(t *testing.T) { tmpDir := t.TempDir() s := Store{DBPath: filepath.Join(tmpDir, "db.sqlite")} defer s.Close() // Open database connection conn, err := sql.Open("sqlite3", s.DBPath+"?_foreign_keys=on&_journal_mode=WAL") if err != nil { t.Fatal(err) } // Test the connection if err := conn.Ping(); err != nil { conn.Close() t.Fatal(err) } s.db = &database{conn: conn} t.Logf("DB created: %s", s.DBPath) _, err = s.db.conn.Exec(v1Schema) if err != nil { t.Fatal(err) } version, err := s.db.getSchemaVersion() if err != nil { t.Fatalf("failed to get schema version: %v", err) } if version != 1 { t.Fatalf("expected: %d\n got: %d", 1, version) } t.Logf("v1 schema created") if err := s.db.migrate(); err != nil { t.Fatal(err) } t.Logf("migrations completed") version, err = s.db.getSchemaVersion() if err != nil { t.Fatalf("failed to get schema version: %v", err) } if version != currentSchemaVersion { t.Fatalf("expected: %d\n got: %d", currentSchemaVersion, version) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/types/not/found.go
app/types/not/found.go
//go:build windows || darwin package not import ( "errors" ) // Found is an error that indicates that a value was not found. It // may be used by low-level packages to signal to higher-level // packages that a value was not found. // // It exists to avoid using errors.New("not found") in multiple // packages to mean the same thing. // // Found should not be used directly. Instead it should be wrapped // or joined using errors.Join or fmt.Errorf, etc. // // Errors wrapping Found should provide additional context, e.g. // fmt.Errorf("%w: %s", not.Found, key) // //lint:ignore ST1012 This is a sentinel error intended to be read like not.Found. var Found = errors.New("not found") // Available is an error that indicates that a value is not available. // //lint:ignore ST1012 This is a sentinel error intended to be read like not.Available. var Available = errors.New("not available")
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/types/not/valids_test.go
app/types/not/valids_test.go
//go:build windows || darwin package not_test import ( "errors" "fmt" "github.com/ollama/ollama/app/types/not" ) func ExampleValids() { // This example demonstrates how to use the Valids type to create // a list of validation errors. // // The Valids type is a slice of ValidError values. Each ValidError // value represents a validation error. // // The Valids type has an Error method that returns a single error // value that represents all of the validation errors in the list. // // The Valids type is useful for collecting multiple validation errors // and returning them as a single error value. validate := func() error { var b not.Valids b.Add("name", "must be a valid name") b.Add("email", "%q: must be a valid email address", "invalid.email") return b } err := validate() var nv not.Valids if errors.As(err, &nv) { for _, v := range nv { fmt.Println(v) } } // Output: // invalid name: must be a valid name // invalid email: "invalid.email": must be a valid email address }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/types/not/valids.go
app/types/not/valids.go
//go:build windows || darwin package not import ( "fmt" ) type ValidError struct { name string msg string args []any } // Valid returns a new validation error with the given name and message. func Valid(name, message string, args ...any) error { return ValidError{name, message, args} } // Message returns the formatted message for the validation error. func (e *ValidError) Message() string { return fmt.Sprintf(e.msg, e.args...) } // Error implements the error interface. func (e ValidError) Error() string { return fmt.Sprintf("invalid %s: %s", e.name, e.Message()) } func (e ValidError) Field() string { return e.name } // Valids is for building a list of validation errors. type Valids []ValidError // Addf adds a validation error to the list with a formatted message using fmt.Sprintf. func (b *Valids) Add(name, message string, args ...any) { *b = append(*b, ValidError{name, message, args}) } func (b Valids) Error() string { if len(b) == 0 { return "" } var result string for i, err := range b { if i > 0 { result += "; " } result += err.Error() } return result }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/assets/assets.go
app/assets/assets.go
//go:build windows || darwin package assets import ( "embed" "io/fs" ) //go:embed *.ico var icons embed.FS func ListIcons() ([]string, error) { return fs.Glob(icons, "*") } func GetIcon(filename string) ([]byte, error) { return icons.ReadFile(filename) }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/logrotate/logrotate_test.go
app/logrotate/logrotate_test.go
//go:build windows || darwin package logrotate import ( "os" "path/filepath" "strconv" "testing" ) func TestRotate(t *testing.T) { logDir := t.TempDir() logFile := filepath.Join(logDir, "testlog.log") // No log exists Rotate(logFile) if err := os.WriteFile(logFile, []byte("1"), 0o644); err != nil { t.Fatal(err) } if _, err := os.Stat(logFile); os.IsNotExist(err) { t.Fatal("expected log file to exist") } // First rotation Rotate(logFile) if _, err := os.Stat(filepath.Join(logDir, "testlog-1.log")); os.IsNotExist(err) { t.Fatal("expected rotated log file to exist") } if _, err := os.Stat(filepath.Join(logDir, "testlog-2.log")); !os.IsNotExist(err) { t.Fatal("expected no second rotated log file") } if _, err := os.Stat(logFile); !os.IsNotExist(err) { t.Fatal("expected original log file to be moved") } // Should be a no-op without a new log Rotate(logFile) if _, err := os.Stat(filepath.Join(logDir, "testlog-1.log")); os.IsNotExist(err) { t.Fatal("expected rotated log file to still exist") } if _, err := os.Stat(filepath.Join(logDir, "testlog-2.log")); !os.IsNotExist(err) { t.Fatal("expected no second rotated log file") } if _, err := os.Stat(logFile); !os.IsNotExist(err) { t.Fatal("expected no original log file") } for i := 2; i <= MaxLogFiles+1; i++ { if err := os.WriteFile(logFile, []byte(strconv.Itoa(i)), 0o644); err != nil { t.Fatal(err) } if _, err := os.Stat(logFile); os.IsNotExist(err) { t.Fatal("expected log file to exist") } Rotate(logFile) if _, err := os.Stat(logFile); !os.IsNotExist(err) { t.Fatal("expected log file to be moved") } for j := 1; j < i; j++ { if _, err := os.Stat(filepath.Join(logDir, "testlog-"+strconv.Itoa(j)+".log")); os.IsNotExist(err) { t.Fatalf("expected rotated log file %d to exist", j) } } if _, err := os.Stat(filepath.Join(logDir, "testlog-"+strconv.Itoa(i+1)+".log")); !os.IsNotExist(err) { t.Fatalf("expected no rotated log file %d", i+1) } } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/logrotate/logrotate.go
app/logrotate/logrotate.go
//go:build windows || darwin // package logrotate provides utilities for rotating logs // TODO (jmorgan): this most likely doesn't need it's own // package and can be moved to app where log files are created package logrotate import ( "log/slog" "os" "strconv" "strings" ) const MaxLogFiles = 5 func Rotate(filename string) { if _, err := os.Stat(filename); os.IsNotExist(err) { return } index := strings.LastIndex(filename, ".") pre := filename[:index] post := "." + filename[index+1:] for i := MaxLogFiles; i > 0; i-- { older := pre + "-" + strconv.Itoa(i) + post newer := pre + "-" + strconv.Itoa(i-1) + post if i == 1 { newer = pre + post } if _, err := os.Stat(newer); err == nil { if _, err := os.Stat(older); err == nil { err := os.Remove(older) if err != nil { slog.Warn("Failed to remove older log", "older", older, "error", err) continue } } err := os.Rename(newer, older) if err != nil { slog.Warn("Failed to rotate log", "older", older, "newer", newer, "error", err) } } } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/server/server_test.go
app/server/server_test.go
//go:build windows || darwin package server import ( "context" "os" "path/filepath" "reflect" "strings" "testing" "time" "github.com/ollama/ollama/app/store" ) func TestNew(t *testing.T) { tmpDir := t.TempDir() st := &store.Store{DBPath: filepath.Join(tmpDir, "db.sqlite")} defer st.Close() // Ensure database is closed before cleanup s := New(st, false) if s == nil { t.Fatal("expected non-nil server") } if s.bin == "" { t.Error("expected non-empty bin path") } } func TestServerCmd(t *testing.T) { os.Unsetenv("OLLAMA_HOST") os.Unsetenv("OLLAMA_ORIGINS") os.Unsetenv("OLLAMA_MODELS") var defaultModels string home, err := os.UserHomeDir() if err == nil { defaultModels = filepath.Join(home, ".ollama", "models") os.MkdirAll(defaultModels, 0o755) } tmpModels := t.TempDir() tests := []struct { name string settings store.Settings want []string dont []string }{ { name: "default", settings: store.Settings{}, want: []string{"OLLAMA_MODELS=" + defaultModels}, dont: []string{"OLLAMA_HOST=", "OLLAMA_ORIGINS="}, }, { name: "expose", settings: store.Settings{Expose: true}, want: []string{"OLLAMA_HOST=0.0.0.0", "OLLAMA_MODELS=" + defaultModels}, dont: []string{"OLLAMA_ORIGINS="}, }, { name: "browser", settings: store.Settings{Browser: true}, want: []string{"OLLAMA_ORIGINS=*", "OLLAMA_MODELS=" + defaultModels}, dont: []string{"OLLAMA_HOST="}, }, { name: "models", settings: store.Settings{Models: tmpModels}, want: []string{"OLLAMA_MODELS=" + tmpModels}, dont: []string{"OLLAMA_HOST=", "OLLAMA_ORIGINS="}, }, { name: "inaccessible_models", settings: store.Settings{Models: "/nonexistent/external/drive/models"}, want: []string{}, dont: []string{"OLLAMA_MODELS="}, }, { name: "all", settings: store.Settings{ Expose: true, Browser: true, Models: tmpModels, }, want: []string{ "OLLAMA_HOST=0.0.0.0", "OLLAMA_ORIGINS=*", "OLLAMA_MODELS=" + tmpModels, }, dont: []string{}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tmpDir := t.TempDir() st := &store.Store{DBPath: filepath.Join(tmpDir, "db.sqlite")} defer st.Close() // Ensure database is closed before cleanup st.SetSettings(tt.settings) s := &Server{ store: st, } cmd, err := s.cmd(t.Context()) if err != nil { t.Fatalf("s.cmd() error = %v", err) } for _, want := range tt.want { found := false for _, env := range cmd.Env { if strings.Contains(env, want) { found = true break } } if !found { t.Errorf("expected environment variable containing %s", want) } } for _, dont := range tt.dont { for _, env := range cmd.Env { if strings.Contains(env, dont) { t.Errorf("unexpected environment variable: %s", env) } } } if cmd.Cancel == nil { t.Error("expected non-nil cancel function") } }) } } func TestGetInferenceComputer(t *testing.T) { tests := []struct { name string log string exp []InferenceCompute }{ { name: "metal", log: `time=2025-06-30T09:23:07.374-07:00 level=DEBUG source=sched.go:108 msg="starting llm scheduler" time=2025-06-30T09:23:07.416-07:00 level=INFO source=types.go:130 msg="inference compute" id=0 library=metal variant="" compute="" driver=0.0 name="" total="96.0 GiB" available="96.0 GiB" time=2025-06-30T09:25:56.197-07:00 level=DEBUG source=ggml.go:155 msg="key not found" key=general.alignment default=32 `, exp: []InferenceCompute{{ Library: "metal", Driver: "0.0", VRAM: "96.0 GiB", }}, }, { name: "cpu", log: `time=2025-07-01T17:59:51.470Z level=INFO source=gpu.go:377 msg="no compatible GPUs were discovered" time=2025-07-01T17:59:51.470Z level=INFO source=types.go:130 msg="inference compute" id=0 library=cpu variant="" compute="" driver=0.0 name="" total="31.3 GiB" available="30.4 GiB" [GIN] 2025/07/01 - 18:00:09 | 200 | 50.263µs | 100.126.204.152 | HEAD "/" `, exp: []InferenceCompute{{ Library: "cpu", Driver: "0.0", VRAM: "31.3 GiB", }}, }, { name: "cuda1", log: `time=2025-07-01T19:33:43.162Z level=DEBUG source=amd_linux.go:419 msg="amdgpu driver not detected /sys/module/amdgpu" releasing cuda driver library time=2025-07-01T19:33:43.162Z level=INFO source=types.go:130 msg="inference compute" id=GPU-452cac9f-6960-839c-4fb3-0cec83699196 library=cuda variant=v12 compute=6.1 driver=12.7 name="NVIDIA GeForce GT 1030" total="3.9 GiB" available="3.9 GiB" [GIN] 2025/07/01 - 18:00:09 | 200 | 50.263µs | 100.126.204.152 | HEAD "/" `, exp: []InferenceCompute{{ Library: "cuda", Variant: "v12", Compute: "6.1", Driver: "12.7", Name: "NVIDIA GeForce GT 1030", VRAM: "3.9 GiB", }}, }, { name: "frank", log: `time=2025-07-01T19:36:13.315Z level=INFO source=amd_linux.go:386 msg="amdgpu is supported" gpu=GPU-9abb57639fa80c50 gpu_type=gfx1030 releasing cuda driver library time=2025-07-01T19:36:13.315Z level=INFO source=types.go:130 msg="inference compute" id=GPU-d6de3398-9932-6902-11ec-fee8e424c8a2 library=cuda variant=v12 compute=7.5 driver=12.8 name="NVIDIA GeForce RTX 2080 Ti" total="10.6 GiB" available="10.4 GiB" time=2025-07-01T19:36:13.315Z level=INFO source=types.go:130 msg="inference compute" id=GPU-9abb57639fa80c50 library=rocm variant="" compute=gfx1030 driver=6.3 name=1002:73bf total="16.0 GiB" available="1.3 GiB" [GIN] 2025/07/01 - 18:00:09 | 200 | 50.263µs | 100.126.204.152 | HEAD "/" `, exp: []InferenceCompute{ { Library: "cuda", Variant: "v12", Compute: "7.5", Driver: "12.8", Name: "NVIDIA GeForce RTX 2080 Ti", VRAM: "10.6 GiB", }, { Library: "rocm", Compute: "gfx1030", Driver: "6.3", Name: "1002:73bf", VRAM: "16.0 GiB", }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tmpDir := t.TempDir() serverLogPath = filepath.Join(tmpDir, "server.log") err := os.WriteFile(serverLogPath, []byte(tt.log), 0o644) if err != nil { t.Fatalf("failed to write log file %s: %s", serverLogPath, err) } ctx, cancel := context.WithTimeout(t.Context(), 10*time.Millisecond) defer cancel() ics, err := GetInferenceComputer(ctx) if err != nil { t.Fatalf(" failed to get inference compute: %v", err) } if !reflect.DeepEqual(ics, tt.exp) { t.Fatalf("got:\n%#v\nwant:\n%#v", ics, tt.exp) } }) } } func TestGetInferenceComputerTimeout(t *testing.T) { ctx, cancel := context.WithTimeout(t.Context(), 10*time.Millisecond) defer cancel() tmpDir := t.TempDir() serverLogPath = filepath.Join(tmpDir, "server.log") err := os.WriteFile(serverLogPath, []byte("foo\nbar\nbaz\n"), 0o644) if err != nil { t.Fatalf("failed to write log file %s: %s", serverLogPath, err) } _, err = GetInferenceComputer(ctx) if err == nil { t.Fatal("expected timeout") } if !strings.Contains(err.Error(), "timeout") { t.Fatalf("unexpected error: %s", err) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/server/server_unix.go
app/server/server_unix.go
//go:build darwin package server import ( "context" "errors" "fmt" "log/slog" "os" "os/exec" "path/filepath" "strconv" "strings" "syscall" ) var ( pidFile = filepath.Join(os.Getenv("HOME"), "Library", "Application Support", "Ollama", "ollama.pid") serverLogPath = filepath.Join(os.Getenv("HOME"), ".ollama", "logs", "server.log") ) func commandContext(ctx context.Context, name string, arg ...string) *exec.Cmd { return exec.CommandContext(ctx, name, arg...) } func terminate(proc *os.Process) error { return proc.Signal(os.Interrupt) } func terminated(pid int) (bool, error) { proc, err := os.FindProcess(pid) if err != nil { return false, fmt.Errorf("failed to find process: %v", err) } err = proc.Signal(syscall.Signal(0)) if err != nil { if errors.Is(err, os.ErrProcessDone) || errors.Is(err, syscall.ESRCH) { return true, nil } return false, fmt.Errorf("error signaling process: %v", err) } return false, nil } // reapServers kills all ollama processes except our own func reapServers() error { // Get our own PID to avoid killing ourselves currentPID := os.Getpid() // Use pkill to kill ollama processes // -x matches the whole command name exactly // We'll get the list first, then kill selectively cmd := exec.Command("pgrep", "-x", "ollama") output, err := cmd.Output() if err != nil { // No ollama processes found slog.Debug("no ollama processes found") return nil //nolint:nilerr } pidsStr := strings.TrimSpace(string(output)) if pidsStr == "" { return nil } pids := strings.Split(pidsStr, "\n") for _, pidStr := range pids { pidStr = strings.TrimSpace(pidStr) if pidStr == "" { continue } pid, err := strconv.Atoi(pidStr) if err != nil { slog.Debug("failed to parse PID", "pidStr", pidStr, "err", err) continue } if pid == currentPID { continue } proc, err := os.FindProcess(pid) if err != nil { slog.Debug("failed to find process", "pid", pid, "err", err) continue } if err := proc.Signal(syscall.SIGTERM); err != nil { // Try SIGKILL if SIGTERM fails if err := proc.Signal(syscall.SIGKILL); err != nil { slog.Warn("failed to stop external ollama process", "pid", pid, "err", err) continue } } slog.Info("stopped external ollama process", "pid", pid) } return nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/server/server.go
app/server/server.go
//go:build windows || darwin package server import ( "bufio" "context" "errors" "fmt" "io" "log/slog" "os" "os/exec" "path/filepath" "regexp" "runtime" "strconv" "strings" "time" "github.com/ollama/ollama/app/logrotate" "github.com/ollama/ollama/app/store" ) const restartDelay = time.Second // Server is a managed ollama server process type Server struct { store *store.Store bin string // resolved path to `ollama` log io.WriteCloser dev bool // true if running with the dev flag } type InferenceCompute struct { Library string Variant string Compute string Driver string Name string VRAM string } func New(s *store.Store, devMode bool) *Server { p := resolvePath("ollama") return &Server{store: s, bin: p, dev: devMode} } func resolvePath(name string) string { // look in the app bundle first if exe, _ := os.Executable(); exe != "" { var dir string if runtime.GOOS == "windows" { dir = filepath.Dir(exe) } else { dir = filepath.Join(filepath.Dir(exe), "..", "Resources") } if _, err := os.Stat(filepath.Join(dir, name)); err == nil { return filepath.Join(dir, name) } } // check the development dist path for _, path := range []string{ filepath.Join("dist", runtime.GOOS, name), filepath.Join("dist", runtime.GOOS+"-"+runtime.GOARCH, name), } { if _, err := os.Stat(path); err == nil { return path } } // fallback to system path if p, _ := exec.LookPath(name); p != "" { return p } return name } // cleanup checks the pid file for a running ollama process // and shuts it down gracefully if it is running func cleanup() error { data, err := os.ReadFile(pidFile) if err != nil { if os.IsNotExist(err) { return nil } return err } defer os.Remove(pidFile) pid, err := strconv.Atoi(strings.TrimSpace(string(data))) if err != nil { return err } proc, err := os.FindProcess(pid) if err != nil { return nil } ok, err := terminated(pid) if err != nil { slog.Debug("cleanup: error checking if terminated", "pid", pid, "err", err) } if ok { return nil } slog.Info("detected previous ollama process, cleaning up", "pid", pid) return stop(proc) } // stop waits for a process with the provided pid to exit by polling // `terminated(pid)`. If the process has not exited within 5 seconds, it logs a // warning and kills the process. func stop(proc *os.Process) error { if proc == nil { return nil } if err := terminate(proc); err != nil { slog.Warn("graceful terminate failed, killing", "err", err) return proc.Kill() } deadline := time.NewTimer(5 * time.Second) defer deadline.Stop() for { select { case <-deadline.C: slog.Warn("timeout waiting for graceful shutdown; killing", "pid", proc.Pid) return proc.Kill() default: ok, err := terminated(proc.Pid) if err != nil { slog.Error("error checking if ollama process is terminated", "err", err) return err } if ok { return nil } time.Sleep(10 * time.Millisecond) } } } func (s *Server) Run(ctx context.Context) error { l, err := openRotatingLog() if err != nil { return err } s.log = l defer s.log.Close() if err := cleanup(); err != nil { slog.Warn("failed to cleanup previous ollama process", "err", err) } reaped := false for ctx.Err() == nil { select { case <-ctx.Done(): return ctx.Err() case <-time.After(restartDelay): } cmd, err := s.cmd(ctx) if err != nil { return err } if err := cmd.Start(); err != nil { return err } err = os.WriteFile(pidFile, []byte(strconv.Itoa(cmd.Process.Pid)), 0o644) if err != nil { slog.Warn("failed to write pid file", "file", pidFile, "err", err) } if err = cmd.Wait(); err != nil && !errors.Is(err, context.Canceled) { var exitErr *exec.ExitError if errors.As(err, &exitErr) && exitErr.ExitCode() == 1 && !s.dev && !reaped { reaped = true // This could be a port conflict, try to kill any existing ollama processes if err := reapServers(); err != nil { slog.Warn("failed to stop existing ollama server", "err", err) } else { slog.Debug("conflicting server stopped, waiting for port to be released") continue } } slog.Error("ollama exited", "err", err) } } return ctx.Err() } func (s *Server) cmd(ctx context.Context) (*exec.Cmd, error) { settings, err := s.store.Settings() if err != nil { return nil, err } cmd := commandContext(ctx, s.bin, "serve") cmd.Stdout, cmd.Stderr = s.log, s.log // Copy and mutate the environment to merge in settings the user has specified without dups env := map[string]string{} for _, kv := range os.Environ() { s := strings.SplitN(kv, "=", 2) env[s[0]] = s[1] } if settings.Expose { env["OLLAMA_HOST"] = "0.0.0.0" } if settings.Browser { env["OLLAMA_ORIGINS"] = "*" } if settings.Models != "" { if _, err := os.Stat(settings.Models); err == nil { env["OLLAMA_MODELS"] = settings.Models } else { slog.Warn("models path not accessible, using default", "path", settings.Models, "err", err) } } if settings.ContextLength > 0 { env["OLLAMA_CONTEXT_LENGTH"] = strconv.Itoa(settings.ContextLength) } cmd.Env = []string{} for k, v := range env { cmd.Env = append(cmd.Env, k+"="+v) } cmd.Cancel = func() error { if cmd.Process == nil { return nil } return stop(cmd.Process) } return cmd, nil } func openRotatingLog() (io.WriteCloser, error) { // TODO consider rotation based on size or time, not just every server invocation dir := filepath.Dir(serverLogPath) if err := os.MkdirAll(dir, 0o755); err != nil { return nil, fmt.Errorf("create log directory: %w", err) } logrotate.Rotate(serverLogPath) f, err := os.OpenFile(serverLogPath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o644) if err != nil { return nil, fmt.Errorf("open log file: %w", err) } return f, nil } // Attempt to retrieve inference compute information from the server // log. Set ctx to timeout to control how long to wait for the logs to appear func GetInferenceComputer(ctx context.Context) ([]InferenceCompute, error) { inference := []InferenceCompute{} marker := regexp.MustCompile(`inference compute.*library=`) q := `inference compute.*%s=["]([^"]*)["]` nq := `inference compute.*%s=(\S+)\s` type regex struct { q *regexp.Regexp nq *regexp.Regexp } regexes := map[string]regex{ "library": { q: regexp.MustCompile(fmt.Sprintf(q, "library")), nq: regexp.MustCompile(fmt.Sprintf(nq, "library")), }, "variant": { q: regexp.MustCompile(fmt.Sprintf(q, "variant")), nq: regexp.MustCompile(fmt.Sprintf(nq, "variant")), }, "compute": { q: regexp.MustCompile(fmt.Sprintf(q, "compute")), nq: regexp.MustCompile(fmt.Sprintf(nq, "compute")), }, "driver": { q: regexp.MustCompile(fmt.Sprintf(q, "driver")), nq: regexp.MustCompile(fmt.Sprintf(nq, "driver")), }, "name": { q: regexp.MustCompile(fmt.Sprintf(q, "name")), nq: regexp.MustCompile(fmt.Sprintf(nq, "name")), }, "total": { q: regexp.MustCompile(fmt.Sprintf(q, "total")), nq: regexp.MustCompile(fmt.Sprintf(nq, "total")), }, } get := func(field, line string) string { regex, ok := regexes[field] if !ok { slog.Warn("missing field", "field", field) return "" } match := regex.q.FindStringSubmatch(line) if len(match) > 1 { return match[1] } match = regex.nq.FindStringSubmatch(line) if len(match) > 1 { return match[1] } return "" } for { select { case <-ctx.Done(): return nil, fmt.Errorf("timeout scanning server log for inference compute details") default: } file, err := os.Open(serverLogPath) if err != nil { slog.Debug("failed to open server log", "log", serverLogPath, "error", err) time.Sleep(time.Second) continue } defer file.Close() scanner := bufio.NewScanner(file) for scanner.Scan() { line := scanner.Text() match := marker.FindStringSubmatch(line) if len(match) > 0 { ic := InferenceCompute{ Library: get("library", line), Variant: get("variant", line), Compute: get("compute", line), Driver: get("driver", line), Name: get("name", line), VRAM: get("total", line), } slog.Info("Matched", "inference compute", ic) inference = append(inference, ic) } else { // Break out on first non matching line after we start matching if len(inference) > 0 { return inference, nil } } } time.Sleep(100 * time.Millisecond) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/server/server_windows.go
app/server/server_windows.go
package server import ( "context" "fmt" "log/slog" "os" "os/exec" "path/filepath" "strconv" "strings" "syscall" "golang.org/x/sys/windows" ) var ( pidFile = filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "ollama.pid") serverLogPath = filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "server.log") ) func commandContext(ctx context.Context, name string, arg ...string) *exec.Cmd { cmd := exec.CommandContext(ctx, name, arg...) cmd.SysProcAttr = &syscall.SysProcAttr{ HideWindow: true, CreationFlags: windows.CREATE_NEW_PROCESS_GROUP, } return cmd } func terminate(proc *os.Process) error { dll, err := windows.LoadDLL("kernel32.dll") if err != nil { return err } defer dll.Release() pid := proc.Pid f, err := dll.FindProc("AttachConsole") if err != nil { return err } r1, _, err := f.Call(uintptr(pid)) if r1 == 0 && err != syscall.ERROR_ACCESS_DENIED { return err } f, err = dll.FindProc("SetConsoleCtrlHandler") if err != nil { return err } r1, _, err = f.Call(0, 1) if r1 == 0 { return err } f, err = dll.FindProc("GenerateConsoleCtrlEvent") if err != nil { return err } r1, _, err = f.Call(windows.CTRL_BREAK_EVENT, uintptr(pid)) if r1 == 0 { return err } r1, _, err = f.Call(windows.CTRL_C_EVENT, uintptr(pid)) if r1 == 0 { return err } return nil } const STILL_ACTIVE = 259 func terminated(pid int) (bool, error) { hProcess, err := windows.OpenProcess(windows.PROCESS_QUERY_INFORMATION, false, uint32(pid)) if err != nil { if errno, ok := err.(windows.Errno); ok && errno == windows.ERROR_INVALID_PARAMETER { return true, nil } return false, fmt.Errorf("failed to open process: %v", err) } defer windows.CloseHandle(hProcess) var exitCode uint32 err = windows.GetExitCodeProcess(hProcess, &exitCode) if err != nil { return false, fmt.Errorf("failed to get exit code: %v", err) } if exitCode == STILL_ACTIVE { return false, nil } return true, nil } // reapServers kills all ollama processes except our own func reapServers() error { // Get current process ID to avoid killing ourselves currentPID := os.Getpid() // Use wmic to find ollama processes cmd := exec.Command("wmic", "process", "where", "name='ollama.exe'", "get", "ProcessId") cmd.SysProcAttr = &syscall.SysProcAttr{HideWindow: true} output, err := cmd.Output() if err != nil { // No ollama processes found slog.Debug("no ollama processes found") return nil //nolint:nilerr } lines := strings.Split(string(output), "\n") var pids []string for _, line := range lines { line = strings.TrimSpace(line) if line == "" || line == "ProcessId" { continue } if _, err := strconv.Atoi(line); err == nil { pids = append(pids, line) } } for _, pidStr := range pids { pid, err := strconv.Atoi(pidStr) if err != nil { continue } if pid == currentPID { continue } cmd := exec.Command("taskkill", "/F", "/PID", pidStr) if err := cmd.Run(); err != nil { slog.Warn("failed to kill ollama process", "pid", pid, "err", err) } } return nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/format/field_test.go
app/format/field_test.go
//go:build windows || darwin package format import "testing" func TestKebabCase(t *testing.T) { tests := []struct { input string expected string }{ {"already-kebab-case", "already-kebab-case"}, {"simpleCamelCase", "simple-camel-case"}, {"PascalCase", "pascal-case"}, {"camelCaseWithNumber123", "camel-case-with-number123"}, {"APIResponse", "api-response"}, {"mixedCASE", "mixed-case"}, {"WithACRONYMS", "with-acronyms"}, {"ALLCAPS", "allcaps"}, {"camelCaseWITHMixedACRONYMS", "camel-case-with-mixed-acronyms"}, {"numbers123in456string", "numbers123in456string"}, {"5", "5"}, {"S", "s"}, } for _, tt := range tests { t.Run(tt.input, func(t *testing.T) { result := KebabCase(tt.input) if result != tt.expected { t.Errorf("toKebabCase(%q) = %q, want %q", tt.input, result, tt.expected) } }) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/format/field.go
app/format/field.go
//go:build windows || darwin package format import ( "strings" "unicode" ) // KebabCase converts a string from camelCase or PascalCase to kebab-case. // (e.g. "camelCase" -> "camel-case") func KebabCase(str string) string { var result strings.Builder for i, char := range str { if i > 0 { prevChar := rune(str[i-1]) // Add hyphen before uppercase letters if unicode.IsUpper(char) && (unicode.IsLower(prevChar) || unicode.IsDigit(prevChar) || (i < len(str)-1 && unicode.IsLower(rune(str[i+1])))) { result.WriteRune('-') } } result.WriteRune(unicode.ToLower(char)) } return result.String() }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/updater/updater_darwin.go
app/updater/updater_darwin.go
package updater // #cgo CFLAGS: -x objective-c // #cgo LDFLAGS: -framework Webkit -framework Cocoa -framework LocalAuthentication -framework ServiceManagement // #include "updater_darwin.h" // typedef const char cchar_t; import "C" import ( "archive/zip" "errors" "fmt" "io" "log/slog" "os" "os/user" "path/filepath" "strings" "syscall" "unsafe" "golang.org/x/sys/unix" ) var ( appBackupDir string SystemWidePath = "/Applications/Ollama.app" ) var BundlePath = func() string { if bundle := alreadyMoved(); bundle != "" { return bundle } exe, err := os.Executable() if err != nil { return "" } // We also install this binary in Contents/Frameworks/Squirrel.framework/Versions/A/Squirrel if filepath.Base(exe) == "Squirrel" && filepath.Base(filepath.Dir(filepath.Dir(filepath.Dir(filepath.Dir(filepath.Dir(exe)))))) == "Contents" { return filepath.Dir(filepath.Dir(filepath.Dir(filepath.Dir(filepath.Dir(filepath.Dir(exe)))))) } // Make sure we're in a proper macOS app bundle structure (Contents/MacOS) if filepath.Base(filepath.Dir(exe)) != "MacOS" || filepath.Base(filepath.Dir(filepath.Dir(exe))) != "Contents" { return "" } return filepath.Dir(filepath.Dir(filepath.Dir(exe))) }() func init() { VerifyDownload = verifyDownload Installer = "Ollama-darwin.zip" home, err := os.UserHomeDir() if err != nil { panic(err) } var uts unix.Utsname if err := unix.Uname(&uts); err == nil { sysname := unix.ByteSliceToString(uts.Sysname[:]) release := unix.ByteSliceToString(uts.Release[:]) UserAgentOS = fmt.Sprintf("%s/%s", sysname, release) } else { slog.Warn("unable to determine OS version", "error", err) UserAgentOS = "Darwin" } // TODO handle failure modes here, and developer mode better... // Executable = Ollama.app/Contents/MacOS/Ollama UpgradeLogFile = filepath.Join(home, ".ollama", "logs", "upgrade.log") cacheDir, err := os.UserCacheDir() if err != nil { slog.Warn("unable to determine user cache dir, falling back to tmpdir", "error", err) cacheDir = os.TempDir() } appDataDir := filepath.Join(cacheDir, "ollama") UpgradeMarkerFile = filepath.Join(appDataDir, "upgraded") appBackupDir = filepath.Join(appDataDir, "backup") UpdateStageDir = filepath.Join(appDataDir, "updates") } func DoUpgrade(interactive bool) error { // TODO use UpgradeLogFile to record the upgrade details from->to version, etc. bundle := getStagedUpdate() if bundle == "" { return fmt.Errorf("failed to lookup downloads") } slog.Info("starting upgrade", "app", BundlePath, "update", bundle, "pid", os.Getpid(), "log", UpgradeLogFile) // TODO - in the future, consider shutting down the backend server now to give it // time to drain connections and stop allowing new connections while we perform the // actual upgrade to reduce the overall time to complete contentsName := filepath.Join(BundlePath, "Contents") appBackup := filepath.Join(appBackupDir, "Ollama.app") contentsOldName := filepath.Join(appBackup, "Contents") // Verify old doesn't exist yet if _, err := os.Stat(contentsOldName); err == nil { slog.Error("prior upgrade failed", "backup", contentsOldName) return fmt.Errorf("prior upgrade failed - please upgrade manually by installing the bundle") } if err := os.MkdirAll(appBackupDir, 0o755); err != nil { return fmt.Errorf("unable to create backup dir %s: %w", appBackupDir, err) } // Verify bundle loads before starting staging process r, err := zip.OpenReader(bundle) if err != nil { return fmt.Errorf("unable to open upgrade bundle %s: %w", bundle, err) } defer r.Close() slog.Debug("temporarily staging old version", "staging", appBackup) if err := os.Rename(BundlePath, appBackup); err != nil { if !interactive { // We don't want to prompt for permission if we're attempting to upgrade at startup return fmt.Errorf("unable to upgrade in non-interactive mode with permission problems: %w", err) } // TODO actually inspect the error and look for permission problems before trying chown slog.Warn("unable to backup old version due to permission problems, changing ownership", "error", err) u, err := user.Current() if err != nil { return err } if !chownWithAuthorization(u.Username) { return fmt.Errorf("unable to change permissions to complete upgrade") } if err := os.Rename(BundlePath, appBackup); err != nil { return fmt.Errorf("unable to perform upgrade - failed to stage old version: %w", err) } } // Get ready to try to unwind a partial upgade failure during unzip // If something goes wrong, we attempt to put the old version back. anyFailures := false defer func() { if anyFailures { slog.Warn("upgrade failures detected, attempting to revert") if err := os.RemoveAll(BundlePath); err != nil { slog.Warn("failed to remove partial upgrade", "path", BundlePath, "error", err) // At this point, we're basically hosed and the user will need to re-install return } if err := os.Rename(appBackup, BundlePath); err != nil { slog.Error("failed to revert to prior version", "path", contentsName, "error", err) } } }() // Bundle contents Ollama.app/Contents/... links := []*zip.File{} for _, f := range r.File { s := strings.SplitN(f.Name, "/", 2) if len(s) < 2 || s[1] == "" { slog.Debug("skipping", "file", f.Name) continue } name := s[1] if strings.HasSuffix(name, "/") { d := filepath.Join(BundlePath, name) err := os.MkdirAll(d, 0o755) if err != nil { anyFailures = true return fmt.Errorf("failed to mkdir %s: %w", d, err) } continue } if f.Mode()&os.ModeSymlink != 0 { // Defer links to the end links = append(links, f) continue } src, err := f.Open() if err != nil { anyFailures = true return fmt.Errorf("failed to open bundle file %s: %w", name, err) } destName := filepath.Join(BundlePath, name) // Verify directory first d := filepath.Dir(destName) if _, err := os.Stat(d); err != nil { err := os.MkdirAll(d, 0o755) if err != nil { anyFailures = true return fmt.Errorf("failed to mkdir %s: %w", d, err) } } destFile, err := os.OpenFile(destName, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o755) if err != nil { anyFailures = true return fmt.Errorf("failed to open output file %s: %w", destName, err) } defer destFile.Close() if _, err := io.Copy(destFile, src); err != nil { anyFailures = true return fmt.Errorf("failed to open extract file %s: %w", destName, err) } } for _, f := range links { s := strings.SplitN(f.Name, "/", 2) // Strip off Ollama.app/ if len(s) < 2 || s[1] == "" { slog.Debug("skipping link", "file", f.Name) continue } name := s[1] src, err := f.Open() if err != nil { anyFailures = true return err } buf, err := io.ReadAll(src) if err != nil { anyFailures = true return err } link := string(buf) if link[0] == '/' { anyFailures = true return fmt.Errorf("bundle contains absolute symlink %s -> %s", f.Name, link) } // Don't allow links outside of Ollama.app if strings.HasPrefix(filepath.Join(filepath.Dir(name), link), "..") { anyFailures = true return fmt.Errorf("bundle contains link outside of contents %s -> %s", f.Name, link) } if err = os.Symlink(link, filepath.Join(BundlePath, name)); err != nil { anyFailures = true return err } } f, err := os.OpenFile(UpgradeMarkerFile, os.O_RDONLY|os.O_CREATE, 0o666) if err != nil { slog.Warn("unable to create marker file", "file", UpgradeMarkerFile, "error", err) } f.Close() // Make sure to remove the staged download now that we succeeded so we don't inadvertently try again. cleanupOldDownloads(UpdateStageDir) return nil } func DoPostUpgradeCleanup() error { slog.Debug("post upgrade cleanup", "backup", appBackupDir) err := os.RemoveAll(appBackupDir) if err != nil { return err } slog.Debug("post upgrade cleanup", "old", UpgradeMarkerFile) return os.Remove(UpgradeMarkerFile) } func verifyDownload() error { bundle := getStagedUpdate() if bundle == "" { return fmt.Errorf("failed to lookup downloads") } slog.Debug("verifying update", "bundle", bundle) // Extract zip file into a temporary location so we can run the cert verification routines dir, err := os.MkdirTemp("", "ollama_update_verify") if err != nil { return err } defer os.RemoveAll(dir) r, err := zip.OpenReader(bundle) if err != nil { return fmt.Errorf("unable to open upgrade bundle %s: %w", bundle, err) } defer r.Close() links := []*zip.File{} for _, f := range r.File { if strings.HasSuffix(f.Name, "/") { d := filepath.Join(dir, f.Name) err := os.MkdirAll(d, 0o755) if err != nil { return fmt.Errorf("failed to mkdir %s: %w", d, err) } continue } if f.Mode()&os.ModeSymlink != 0 { // Defer links to the end links = append(links, f) continue } src, err := f.Open() if err != nil { return fmt.Errorf("failed to open bundle file %s: %w", f.Name, err) } destName := filepath.Join(dir, f.Name) // Verify directory first d := filepath.Dir(destName) if _, err := os.Stat(d); err != nil { err := os.MkdirAll(d, 0o755) if err != nil { return fmt.Errorf("failed to mkdir %s: %w", d, err) } } destFile, err := os.OpenFile(destName, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o755) if err != nil { return fmt.Errorf("failed to open output file %s: %w", destName, err) } defer destFile.Close() if _, err := io.Copy(destFile, src); err != nil { return fmt.Errorf("failed to open extract file %s: %w", destName, err) } } for _, f := range links { src, err := f.Open() if err != nil { return err } buf, err := io.ReadAll(src) if err != nil { return err } link := string(buf) if link[0] == '/' { return fmt.Errorf("bundle contains absolute symlink %s -> %s", f.Name, link) } if strings.HasPrefix(filepath.Join(filepath.Dir(f.Name), link), "..") { return fmt.Errorf("bundle contains link outside of contents %s -> %s", f.Name, link) } if err = os.Symlink(link, filepath.Join(dir, f.Name)); err != nil { return err } } if err := verifyExtractedBundle(filepath.Join(dir, "Ollama.app")); err != nil { return fmt.Errorf("signature verification failed: %s", err) } return nil } // If we detect an upgrade bundle, attempt to upgrade at startup func DoUpgradeAtStartup() error { bundle := getStagedUpdate() if bundle == "" { return fmt.Errorf("failed to lookup downloads") } if BundlePath == "" { return fmt.Errorf("unable to upgrade at startup, app in development mode") } // [Re]verify before proceeding if err := VerifyDownload(); err != nil { _ = os.Remove(bundle) slog.Warn("verification failure", "bundle", bundle, "error", err) return nil } slog.Info("performing update at startup", "bundle", bundle) return DoUpgrade(false) } func getStagedUpdate() string { files, err := filepath.Glob(filepath.Join(UpdateStageDir, "*", "*.zip")) if err != nil { slog.Debug("failed to lookup downloads", "error", err) return "" } if len(files) == 0 { return "" } else if len(files) > 1 { // Shouldn't happen slog.Warn("multiple update downloads found, using first one", "bundles", files) } return files[0] } func IsUpdatePending() bool { return getStagedUpdate() != "" } func chownWithAuthorization(user string) bool { u := C.CString(user) defer C.free(unsafe.Pointer(u)) return (bool)(C.chownWithAuthorization(u)) } func verifyExtractedBundle(path string) error { p := C.CString(path) defer C.free(unsafe.Pointer(p)) resp := C.verifyExtractedBundle(p) if resp == nil { return nil } return errors.New(C.GoString(resp)) } //export goLogInfo func goLogInfo(msg *C.cchar_t) { slog.Info(C.GoString(msg)) } //export goLogDebug func goLogDebug(msg *C.cchar_t) { slog.Debug(C.GoString(msg)) } func alreadyMoved() string { // Respect users intent if they chose "keep" vs. "replace" when dragging to Applications installedAppPaths, err := filepath.Glob(filepath.Join( strings.TrimSuffix(SystemWidePath, filepath.Ext(SystemWidePath))+"*"+filepath.Ext(SystemWidePath), "Contents", "MacOS", "Ollama")) if err != nil { slog.Warn("failed to lookup installed app paths", "error", err) return "" } exe, err := os.Executable() if err != nil { slog.Warn("failed to resolve executable", "error", err) return "" } self, err := os.Stat(exe) if err != nil { slog.Warn("failed to stat running executable", "path", exe, "error", err) return "" } selfSys := self.Sys().(*syscall.Stat_t) for _, installedAppPath := range installedAppPaths { app, err := os.Stat(installedAppPath) if err != nil { slog.Debug("failed to stat installed app path", "path", installedAppPath, "error", err) continue } appSys := app.Sys().(*syscall.Stat_t) if appSys.Ino == selfSys.Ino { return filepath.Dir(filepath.Dir(filepath.Dir(installedAppPath))) } } return "" }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/updater/updater_darwin_test.go
app/updater/updater_darwin_test.go
package updater import ( "archive/zip" "io/fs" "os" "path/filepath" "strings" "testing" ) func TestDoUpgrade(t *testing.T) { tmpDir := t.TempDir() BundlePath = filepath.Join(tmpDir, "Ollama.app") appContents := filepath.Join(BundlePath, "Contents") appBackupDir = filepath.Join(tmpDir, "backup") appContentsOld := filepath.Join(appBackupDir, "Ollama.app", "Contents") UpdateStageDir = filepath.Join(tmpDir, "updates") UpgradeMarkerFile = filepath.Join(tmpDir, "upgraded") bundle := filepath.Join(UpdateStageDir, "foo", "ollama-darwin.zip") err := os.MkdirAll(filepath.Join(appContents, "MacOS"), 0o755) if err != nil { t.Fatal("failed to create empty dirs") } err = os.MkdirAll(filepath.Join(BundlePath, "Contents", "Resources"), 0o755) if err != nil { t.Fatal("failed to create empty dirs") } err = os.MkdirAll(filepath.Dir(bundle), 0o755) if err != nil { t.Fatal("failed to create empty dirs") } // No update file, simple failure scenario if err := DoUpgrade(false); err == nil { t.Fatal("expected failure without download") } else if !strings.Contains(err.Error(), "failed to lookup downloads") { t.Fatalf("unexpected error: %s", err.Error()) } // Start with an unreadable zip file if err := os.WriteFile(bundle, []byte{0x4b, 0x50, 0x40, 0x03, 0x00, 0x0a, 0x00}, 0o755); err != nil { t.Fatalf("failed to create intentionally corrupt zip file: %s", err) } if err := DoUpgrade(false); err == nil { t.Fatal("expected failure with corrupt zip file") } else if !strings.Contains(err.Error(), "unable to open upgrade bundle") { t.Fatalf("unexpected error with corrupt zip file: %s", err) } // Generate valid (partial) zip file for remaining scenarios if err := zipCreationHelper(bundle, []testPayload{ { Name: "Ollama.app/Contents/MacOS/Ollama", Body: []byte("would be app binary"), }, { Name: "Ollama.app/Contents/Resources/ollama", Body: []byte("would be the cli"), }, { Name: "Ollama.app/Contents/Resources/dummy", Body: []byte("./ollama"), Mode: os.ModeSymlink, }, }); err != nil { t.Fatal(err) } // Permission failure on rename if err := os.Chmod(BundlePath, 0o500); err != nil { t.Fatal("failed to remove write permission") } if err := DoUpgrade(false); err == nil { t.Fatal("expected failure with no permission to rename Contents") } else if !strings.Contains(err.Error(), "permission problems") { t.Fatalf("unexpected error with permission failure: %s", err) } if err := os.Chmod(BundlePath, 0o755); err != nil { t.Fatal("failed to restore write permission") } // Prior failed upgrade if err := os.MkdirAll(appContentsOld, 0o755); err != nil { t.Fatal("failed to create empty dirs") } if err := DoUpgrade(false); err == nil { t.Fatal("expected failure with old contents existing") } else if !strings.Contains(err.Error(), "prior upgrade failed") { t.Fatalf("unexpected error with old contents: %s", err) } if err := os.RemoveAll(appBackupDir); err != nil { t.Fatal("failed to cleanup dir") } // TODO - a failure mode where we revert the backup // Happy path if err := DoUpgrade(false); err != nil { t.Fatalf("unexpected error with clean setup: %s", err) } if _, err := os.Stat(appContentsOld); err != nil { t.Fatalf("missing %s", appContentsOld) } if _, err := os.Stat(UpgradeMarkerFile); err != nil { t.Fatalf("missing marker %s", UpgradeMarkerFile) } if _, err := os.Stat(filepath.Join(BundlePath, "Contents", "MacOS", "Ollama")); err != nil { t.Fatalf("missing new App") } if _, err := os.Stat(filepath.Join(BundlePath, "Contents", "Resources", "ollama")); err != nil { t.Fatalf("missing new cli") } // Cleanup before next attempt if err := DoPostUpgradeCleanup(); err != nil { t.Fatal("failed to cleanup dir") } err = os.MkdirAll(filepath.Dir(bundle), 0o755) if err != nil { t.Fatal("failed to create empty dirs") } // Zip file with one corrupt file within to trigger a rollback if err := os.WriteFile(bundle, corruptZipData, 0o755); err != nil { t.Fatalf("failed to create intentionally corrupt zip file: %s", err) } if err := DoUpgrade(false); err == nil { t.Fatal("expected failure with corrupt zip file") } else if !strings.Contains(err.Error(), "failed to open bundle file") { t.Fatalf("unexpected error with corrupt zip file: %s", err) } // Make sure things were restored on partial failure if _, err := os.Stat(appContents); err != nil { t.Fatalf("missing %s", appContents) } if _, err := os.Stat(appContentsOld); err == nil { t.Fatal("old contents still exists") } if _, err := os.Stat(filepath.Join(BundlePath, "Contents", "MacOS", "Ollama")); err != nil { t.Fatalf("missing old App") } if _, err := os.Stat(filepath.Join(BundlePath, "Contents", "Resources", "ollama")); err != nil { t.Fatalf("missing old cli") } } func TestDoUpgradeAtStartup(t *testing.T) { tmpDir := t.TempDir() BundlePath = filepath.Join(tmpDir, "Ollama.app") appBackupDir = filepath.Join(tmpDir, "backup") UpdateStageDir = filepath.Join(tmpDir, "updates") UpgradeMarkerFile = filepath.Join(tmpDir, "upgraded") bundle := filepath.Join(UpdateStageDir, "foo", "ollama-darwin.zip") if err := DoUpgradeAtStartup(); err == nil { t.Fatal("expected failure without download") } else if !strings.Contains(err.Error(), "failed to lookup downloads") { t.Fatalf("unexpected error: %s", err.Error()) } if err := os.MkdirAll(filepath.Dir(bundle), 0o755); err != nil { t.Fatal("failed to create empty dirs") } if err := zipCreationHelper(bundle, []testPayload{ { Name: "Ollama.app/Contents/MacOS/Ollama", Body: []byte("would be app binary"), }, { Name: "Ollama.app/Contents/Resources/ollama", Body: []byte("would be the cli"), }, { Name: "Ollama.app/Contents/Resources/dummy", Body: []byte("./ollama"), Mode: os.ModeSymlink, }, }); err != nil { t.Fatal(err) } if err := DoUpgradeAtStartup(); err != nil { t.Fatalf("unexpected error with verification failure: %s", err) } if _, err := os.Stat(bundle); err == nil { t.Fatalf("unverified bundle still exists %s", bundle) } } func TestVerifyDownloadFailures(t *testing.T) { tmpDir := t.TempDir() BundlePath = filepath.Join(tmpDir, "Ollama.app") UpdateStageDir = filepath.Join(tmpDir, "staging") bundle := filepath.Join(UpdateStageDir, "foo", "ollama-darwin.zip") if err := os.MkdirAll(filepath.Dir(bundle), 0o755); err != nil { t.Fatal("failed to create empty dirs") } tests := []struct { n string in []testPayload expected string }{ {"breakout", []testPayload{ { Name: "Ollama.app/", Body: []byte{}, }, { Name: "Ollama.app/Resources/ollama", Body: []byte("cli payload here"), }, { Name: "Ollama.app/Contents/MacOS/Ollama", Body: []byte("../../../../breakout"), Mode: os.ModeSymlink, }, }, "bundle contains link outside"}, {"absolute", []testPayload{{ Name: "Ollama.app/Contents/MacOS/Ollama", Body: []byte("/etc/foo"), Mode: os.ModeSymlink, }}, "bundle contains absolute"}, {"missing", []testPayload{{ Name: "Ollama.app/Contents/MacOS/Ollama", Body: []byte("../nothere"), Mode: os.ModeSymlink, }}, "no such file or directory"}, {"unsigned", []testPayload{{ Name: "Ollama.app/Contents/MacOS/Ollama", Body: []byte{0xfa, 0xcf, 0xfe, 0xed, 0x00, 0x0c, 0x01, 0x00}, }}, "signature verification failed"}, } for _, tt := range tests { t.Run(tt.n, func(t *testing.T) { _ = os.Remove(bundle) if err := zipCreationHelper(bundle, tt.in); err != nil { t.Fatal(err) } err := VerifyDownload() if err == nil || !strings.Contains(err.Error(), tt.expected) { t.Fatalf("expected \"%s\" got %s", tt.expected, err) } }) } } // One file has been corrupted to cause a checksum mismatch var corruptZipData = []byte{0x50, 0x4b, 0x3, 0x4, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0xed, 0x7e, 0x8f, 0x59, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xb, 0x0, 0x1c, 0x0, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x61, 0x70, 0x70, 0x2f, 0x55, 0x54, 0x9, 0x0, 0x3, 0x6d, 0x6c, 0x5f, 0x67, 0x6e, 0x6c, 0x5f, 0x67, 0x75, 0x78, 0xb, 0x0, 0x1, 0x4, 0xf5, 0x1, 0x0, 0x0, 0x4, 0x14, 0x0, 0x0, 0x0, 0x50, 0x4b, 0x3, 0x4, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0xd8, 0x7e, 0x8f, 0x59, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x14, 0x0, 0x1c, 0x0, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x61, 0x70, 0x70, 0x2f, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x55, 0x54, 0x9, 0x0, 0x3, 0x48, 0x6c, 0x5f, 0x67, 0x58, 0x6c, 0x5f, 0x67, 0x75, 0x78, 0xb, 0x0, 0x1, 0x4, 0xf5, 0x1, 0x0, 0x0, 0x4, 0x14, 0x0, 0x0, 0x0, 0x50, 0x4b, 0x3, 0x4, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe3, 0x7e, 0x8f, 0x59, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1a, 0x0, 0x1c, 0x0, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x61, 0x70, 0x70, 0x2f, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x4d, 0x61, 0x63, 0x4f, 0x53, 0x2f, 0x55, 0x54, 0x9, 0x0, 0x3, 0x59, 0x6c, 0x5f, 0x67, 0x9f, 0x6c, 0x5f, 0x67, 0x75, 0x78, 0xb, 0x0, 0x1, 0x4, 0xf5, 0x1, 0x0, 0x0, 0x4, 0x14, 0x0, 0x0, 0x0, 0x50, 0x4b, 0x3, 0x4, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe3, 0x7e, 0x8f, 0x59, 0xe3, 0x6, 0x15, 0x70, 0x14, 0x0, 0x0, 0x0, 0x14, 0x0, 0x0, 0x0, 0x20, 0x0, 0x1c, 0x0, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x61, 0x70, 0x70, 0x2f, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x4d, 0x61, 0x63, 0x4f, 0x53, 0x2f, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x55, 0x54, 0x9, 0x0, 0x3, 0x59, 0x6c, 0x5f, 0x67, 0x83, 0x6c, 0x5f, 0x67, 0x75, 0x78, 0xb, 0x0, 0x1, 0x4, 0xf5, 0x1, 0x0, 0x0, 0x4, 0x14, 0x0, 0x0, 0x0, 0x43, 0x4f, 0x52, 0x52, 0x55, 0x50, 0x54, 0xa, 0x50, 0x4b, 0x3, 0x4, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe9, 0x7e, 0x8f, 0x59, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1e, 0x0, 0x1c, 0x0, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x61, 0x70, 0x70, 0x2f, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, 0x55, 0x54, 0x9, 0x0, 0x3, 0x66, 0x6c, 0x5f, 0x67, 0x83, 0x6c, 0x5f, 0x67, 0x75, 0x78, 0xb, 0x0, 0x1, 0x4, 0xf5, 0x1, 0x0, 0x0, 0x4, 0x14, 0x0, 0x0, 0x0, 0x50, 0x4b, 0x3, 0x4, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe9, 0x7e, 0x8f, 0x59, 0x19, 0xa5, 0x62, 0xf7, 0x11, 0x0, 0x0, 0x0, 0x11, 0x0, 0x0, 0x0, 0x24, 0x0, 0x1c, 0x0, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x61, 0x70, 0x70, 0x2f, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, 0x6f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x55, 0x54, 0x9, 0x0, 0x3, 0x66, 0x6c, 0x5f, 0x67, 0x66, 0x6c, 0x5f, 0x67, 0x75, 0x78, 0xb, 0x0, 0x1, 0x4, 0xf5, 0x1, 0x0, 0x0, 0x4, 0x14, 0x0, 0x0, 0x0, 0x77, 0x6f, 0x75, 0x6c, 0x64, 0x20, 0x62, 0x65, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6c, 0x69, 0xa, 0x50, 0x4b, 0x1, 0x2, 0x1e, 0x3, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0xed, 0x7e, 0x8f, 0x59, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xb, 0x0, 0x18, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x10, 0x0, 0xed, 0x41, 0x0, 0x0, 0x0, 0x0, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x61, 0x70, 0x70, 0x2f, 0x55, 0x54, 0x5, 0x0, 0x3, 0x6d, 0x6c, 0x5f, 0x67, 0x75, 0x78, 0xb, 0x0, 0x1, 0x4, 0xf5, 0x1, 0x0, 0x0, 0x4, 0x14, 0x0, 0x0, 0x0, 0x50, 0x4b, 0x1, 0x2, 0x1e, 0x3, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0xd8, 0x7e, 0x8f, 0x59, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x14, 0x0, 0x18, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x10, 0x0, 0xed, 0x41, 0x45, 0x0, 0x0, 0x0, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x61, 0x70, 0x70, 0x2f, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x55, 0x54, 0x5, 0x0, 0x3, 0x48, 0x6c, 0x5f, 0x67, 0x75, 0x78, 0xb, 0x0, 0x1, 0x4, 0xf5, 0x1, 0x0, 0x0, 0x4, 0x14, 0x0, 0x0, 0x0, 0x50, 0x4b, 0x1, 0x2, 0x1e, 0x3, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe3, 0x7e, 0x8f, 0x59, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1a, 0x0, 0x18, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x10, 0x0, 0xed, 0x41, 0x93, 0x0, 0x0, 0x0, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x61, 0x70, 0x70, 0x2f, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x4d, 0x61, 0x63, 0x4f, 0x53, 0x2f, 0x55, 0x54, 0x5, 0x0, 0x3, 0x59, 0x6c, 0x5f, 0x67, 0x75, 0x78, 0xb, 0x0, 0x1, 0x4, 0xf5, 0x1, 0x0, 0x0, 0x4, 0x14, 0x0, 0x0, 0x0, 0x50, 0x4b, 0x1, 0x2, 0x1e, 0x3, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe3, 0x7e, 0x8f, 0x59, 0xe3, 0x6, 0x15, 0x70, 0x14, 0x0, 0x0, 0x0, 0x14, 0x0, 0x0, 0x0, 0x20, 0x0, 0x18, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xa4, 0x81, 0xe7, 0x0, 0x0, 0x0, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x61, 0x70, 0x70, 0x2f, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x4d, 0x61, 0x63, 0x4f, 0x53, 0x2f, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x55, 0x54, 0x5, 0x0, 0x3, 0x59, 0x6c, 0x5f, 0x67, 0x75, 0x78, 0xb, 0x0, 0x1, 0x4, 0xf5, 0x1, 0x0, 0x0, 0x4, 0x14, 0x0, 0x0, 0x0, 0x50, 0x4b, 0x1, 0x2, 0x1e, 0x3, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe9, 0x7e, 0x8f, 0x59, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1e, 0x0, 0x18, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x10, 0x0, 0xed, 0x41, 0x55, 0x1, 0x0, 0x0, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x61, 0x70, 0x70, 0x2f, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, 0x55, 0x54, 0x5, 0x0, 0x3, 0x66, 0x6c, 0x5f, 0x67, 0x75, 0x78, 0xb, 0x0, 0x1, 0x4, 0xf5, 0x1, 0x0, 0x0, 0x4, 0x14, 0x0, 0x0, 0x0, 0x50, 0x4b, 0x1, 0x2, 0x1e, 0x3, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe9, 0x7e, 0x8f, 0x59, 0x19, 0xa5, 0x62, 0xf7, 0x11, 0x0, 0x0, 0x0, 0x11, 0x0, 0x0, 0x0, 0x24, 0x0, 0x18, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xa4, 0x81, 0xad, 0x1, 0x0, 0x0, 0x4f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x61, 0x70, 0x70, 0x2f, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, 0x6f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x55, 0x54, 0x5, 0x0, 0x3, 0x66, 0x6c, 0x5f, 0x67, 0x75, 0x78, 0xb, 0x0, 0x1, 0x4, 0xf5, 0x1, 0x0, 0x0, 0x4, 0x14, 0x0, 0x0, 0x0, 0x50, 0x4b, 0x5, 0x6, 0x0, 0x0, 0x0, 0x0, 0x6, 0x0, 0x6, 0x0, 0x3f, 0x2, 0x0, 0x0, 0x1c, 0x2, 0x0, 0x0, 0x0, 0x0} type testPayload struct { Name string Body []byte Mode fs.FileMode } func zipCreationHelper(filename string, files []testPayload) error { fd, err := os.Create(filename) if err != nil { return err } w := zip.NewWriter(fd) for _, file := range files { fh := &zip.FileHeader{ Name: file.Name, Flags: 0, } if file.Mode != 0 { fh.SetMode(file.Mode) } f, err := w.CreateHeader(fh) if err != nil { return err } _, err = f.Write(file.Body) if err != nil { return err } } return w.Close() } func TestAlreadyMoved(t *testing.T) { oldPath := SystemWidePath defer func() { SystemWidePath = oldPath }() exe, err := os.Executable() if err != nil { t.Fatal("failed to find executable path") } tmpDir := t.TempDir() testApp := filepath.Join(tmpDir, "Ollama.app") err = os.MkdirAll(filepath.Join(testApp, "Contents", "MacOS"), 0o755) if err != nil { t.Fatal("failed to create Contents dir") } SystemWidePath = testApp testBinary := filepath.Join(testApp, "Contents", "MacOS", "Ollama") if err := os.Symlink(exe, testBinary); err != nil { t.Fatalf("failed to create symlink to executable: %s", err) } bundle := alreadyMoved() if bundle != testApp { t.Fatalf("expected %s, got %s", testApp, bundle) } // "Keep scenario" testApp = filepath.Join(tmpDir, "Ollama 2.app") err = os.MkdirAll(filepath.Join(testApp, "Contents", "MacOS"), 0o755) if err != nil { t.Fatal("failed to create Contents dir") } testBinary = filepath.Join(testApp, "Contents", "MacOS", "Ollama") if err := os.Symlink(exe, testBinary); err != nil { t.Fatalf("failed to create symlink to executable: %s", err) } bundle = alreadyMoved() if bundle != testApp { t.Fatalf("expected %s, got %s", testApp, bundle) } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/updater/updater_windows.go
app/updater/updater_windows.go
package updater import ( "errors" "fmt" "log/slog" "os" "os/exec" "path" "path/filepath" "strings" "syscall" "time" "unsafe" "golang.org/x/sys/windows" ) var runningInstaller string type OSVERSIONINFOEXW struct { dwOSVersionInfoSize uint32 dwMajorVersion uint32 dwMinorVersion uint32 dwBuildNumber uint32 dwPlatformId uint32 szCSDVersion [128]uint16 wServicePackMajor uint16 wServicePackMinor uint16 wSuiteMask uint16 wProductType uint8 wReserved uint8 } func init() { VerifyDownload = verifyDownload Installer = "Ollama-darwin.zip" localAppData := os.Getenv("LOCALAPPDATA") appDataDir := filepath.Join(localAppData, "Ollama") // Use a distinct update staging directory from the old desktop app // to avoid double upgrades on the transition UpdateStageDir = filepath.Join(appDataDir, "updates_v2") UpgradeLogFile = filepath.Join(appDataDir, "upgrade.log") Installer = "OllamaSetup.exe" runningInstaller = filepath.Join(appDataDir, Installer) UpgradeMarkerFile = filepath.Join(appDataDir, "upgraded") loadOSVersion() } func loadOSVersion() { UserAgentOS = "Windows" verInfo := OSVERSIONINFOEXW{} verInfo.dwOSVersionInfoSize = (uint32)(unsafe.Sizeof(verInfo)) ntdll, err := windows.LoadDLL("ntdll.dll") if err != nil { slog.Warn("unable to find ntdll", "error", err) return } defer ntdll.Release() pRtlGetVersion, err := ntdll.FindProc("RtlGetVersion") if err != nil { slog.Warn("unable to locate RtlGetVersion", "error", err) return } status, _, err := pRtlGetVersion.Call(uintptr(unsafe.Pointer(&verInfo))) if status < 0x80000000 { // Success or Informational // Note: Windows 11 reports 10.0.22000 or newer UserAgentOS = fmt.Sprintf("Windows/%d.%d.%d", verInfo.dwMajorVersion, verInfo.dwMinorVersion, verInfo.dwBuildNumber) } else { slog.Warn("unable to get OS version", "error", err) } } func getStagedUpdate() string { // When transitioning from old to new app, cleanup the update from the old staging dir // This can eventually be removed once enough time has passed since the transition cleanupOldDownloads(filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "updates")) files, err := filepath.Glob(filepath.Join(UpdateStageDir, "*", "*.exe")) if err != nil { slog.Debug("failed to lookup downloads", "error", err) return "" } if len(files) == 0 { return "" } else if len(files) > 1 { // Shouldn't happen slog.Warn("multiple update downloads found, using first one", "bundles", files) } return files[0] } func DoUpgrade(interactive bool) error { bundle := getStagedUpdate() if bundle == "" { return fmt.Errorf("failed to lookup downloads") } // We move the installer to ensure we don't race with multiple apps starting in quick succession if err := os.Rename(bundle, runningInstaller); err != nil { return fmt.Errorf("unable to rename %s -> %s : %w", bundle, runningInstaller, err) } slog.Info("upgrade log file " + UpgradeLogFile) // make the upgrade show progress, but non interactive installArgs := []string{ "/CLOSEAPPLICATIONS", // Quit the tray app if it's still running "/LOG=" + filepath.Base(UpgradeLogFile), // Only relative seems reliable, so set pwd "/FORCECLOSEAPPLICATIONS", // Force close the tray app - might be needed "/SP", // Skip the "This will install... Do you wish to continue" prompt "/NOCANCEL", // Disable the ability to cancel upgrade mid-flight to avoid partially installed upgrades "/SILENT", } if !interactive { // Add flags to make it totally silent without GUI installArgs = append(installArgs, "/VERYSILENT", "/SUPPRESSMSGBOXES") } slog.Info("starting upgrade", "installer", runningInstaller, "args", installArgs) os.Chdir(filepath.Dir(UpgradeLogFile)) //nolint:errcheck cmd := exec.Command(runningInstaller, installArgs...) if err := cmd.Start(); err != nil { return fmt.Errorf("unable to start ollama app %w", err) } if cmd.Process != nil { err := cmd.Process.Release() if err != nil { slog.Error(fmt.Sprintf("failed to release server process: %s", err)) } } else { // TODO - some details about why it didn't start, or is this a pedantic error case? return errors.New("installer process did not start") } // If the install fails to upgrade the system, and leaves a functional // app, this marker file will cause us to remove the staged upgrade // bundle, which will prevent trying again until we download again. // If this becomes looping a problem, we may need to look for failures // in the upgrade log in DoPostUpgradeCleanup and then not download // the same version again. f, err := os.OpenFile(UpgradeMarkerFile, os.O_RDONLY|os.O_CREATE, 0o666) if err != nil { slog.Warn("unable to create marker file", "file", UpgradeMarkerFile, "error", err) } f.Close() // TODO should we linger for a moment and check to make sure it's actually running by checking the pid? slog.Info("Installer started in background, exiting") os.Exit(0) // Not reached return nil } func DoPostUpgradeCleanup() error { cleanupOldDownloads(UpdateStageDir) err := os.Remove(UpgradeMarkerFile) if err != nil { slog.Warn("unable to clean up marker file", "marker", UpgradeMarkerFile, "error", err) } err = os.Remove(runningInstaller) if err != nil { slog.Debug("failed to remove running installer on first attempt, backgrounding...", "installer", runningInstaller, "error", err) go func() { for range 10 { time.Sleep(5 * time.Second) if err := os.Remove(runningInstaller); err == nil { slog.Debug("installer cleaned up") return } slog.Debug("failed to remove running installer on background attempt", "installer", runningInstaller, "error", err) } }() } return nil } func verifyDownload() error { return nil } func IsUpdatePending() bool { return getStagedUpdate() != "" } func DoUpgradeAtStartup() error { return DoUpgrade(false) } func isInstallerRunning() bool { return len(IsProcRunning(Installer)) > 0 } func IsProcRunning(procName string) []uint32 { pids := make([]uint32, 2048) var ret uint32 if err := windows.EnumProcesses(pids, &ret); err != nil || ret == 0 { slog.Debug("failed to check for running installers", "error", err) return nil } pids = pids[:ret] matches := []uint32{} for _, pid := range pids { if pid == 0 { continue } hProcess, err := windows.OpenProcess(windows.PROCESS_QUERY_INFORMATION|windows.PROCESS_VM_READ, false, pid) if err != nil { continue } defer windows.CloseHandle(hProcess) var module windows.Handle var cbNeeded uint32 cb := (uint32)(unsafe.Sizeof(module)) if err := windows.EnumProcessModules(hProcess, &module, cb, &cbNeeded); err != nil { continue } var sz uint32 = 1024 * 8 moduleName := make([]uint16, sz) cb = uint32(len(moduleName)) * (uint32)(unsafe.Sizeof(uint16(0))) if err := windows.GetModuleBaseName(hProcess, module, &moduleName[0], cb); err != nil && err != syscall.ERROR_INSUFFICIENT_BUFFER { continue } exeFile := path.Base(strings.ToLower(syscall.UTF16ToString(moduleName))) if strings.EqualFold(exeFile, procName) { matches = append(matches, pid) } } return matches }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/updater/updater_windows_test.go
app/updater/updater_windows_test.go
//go:build windows || darwin package updater import ( "log/slog" "testing" ) func TestIsInstallerRunning(t *testing.T) { slog.SetLogLoggerLevel(slog.LevelDebug) Installer = "go.exe" if !isInstallerRunning() { t.Fatal("not running") } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/updater/updater_test.go
app/updater/updater_test.go
//go:build windows || darwin package updater import ( "archive/zip" "bytes" "context" "fmt" "io" "log/slog" "net/http" "net/http/httptest" "testing" "time" "github.com/ollama/ollama/app/store" ) func TestIsNewReleaseAvailable(t *testing.T) { slog.SetLogLoggerLevel(slog.LevelDebug) var server *httptest.Server server = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/update.json" { w.Write([]byte( fmt.Sprintf(`{"version": "9.9.9", "url": "%s"}`, server.URL+"/9.9.9/"+Installer))) // TODO - wire up the redirects to mimic real behavior } else { slog.Debug("unexpected request", "url", r.URL) } })) defer server.Close() slog.Debug("server", "url", server.URL) updater := &Updater{Store: &store.Store{}} defer updater.Store.Close() // Ensure database is closed UpdateCheckURLBase = server.URL + "/update.json" updatePresent, resp := updater.checkForUpdate(t.Context()) if !updatePresent { t.Fatal("expected update to be available") } if resp.UpdateVersion != "9.9.9" { t.Fatal("unexpected response", "url", resp.UpdateURL, "version", resp.UpdateVersion) } } func TestBackgoundChecker(t *testing.T) { UpdateStageDir = t.TempDir() haveUpdate := false verified := false done := make(chan int) cb := func(ver string) error { haveUpdate = true done <- 0 return nil } stallTimer := time.NewTimer(5 * time.Second) ctx, cancel := context.WithCancel(t.Context()) defer cancel() UpdateCheckInitialDelay = 5 * time.Millisecond UpdateCheckInterval = 5 * time.Millisecond VerifyDownload = func() error { verified = true return nil } var server *httptest.Server server = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/update.json" { w.Write([]byte( fmt.Sprintf(`{"version": "9.9.9", "url": "%s"}`, server.URL+"/9.9.9/"+Installer))) // TODO - wire up the redirects to mimic real behavior } else if r.URL.Path == "/9.9.9/"+Installer { buf := &bytes.Buffer{} zw := zip.NewWriter(buf) zw.Close() io.Copy(w, buf) } else { slog.Debug("unexpected request", "url", r.URL) } })) defer server.Close() UpdateCheckURLBase = server.URL + "/update.json" updater := &Updater{Store: &store.Store{}} defer updater.Store.Close() // Ensure database is closed updater.StartBackgroundUpdaterChecker(ctx, cb) select { case <-stallTimer.C: t.Fatal("stalled") case <-done: if !haveUpdate { t.Fatal("no update received") } if !verified { t.Fatal("unverified") } } }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/updater/updater.go
app/updater/updater.go
//go:build windows || darwin package updater import ( "context" "crypto/rand" "encoding/json" "errors" "fmt" "io" "log/slog" "mime" "net/http" "net/url" "os" "path" "path/filepath" "runtime" "strconv" "strings" "time" "github.com/ollama/ollama/app/store" "github.com/ollama/ollama/app/version" "github.com/ollama/ollama/auth" ) var ( UpdateCheckURLBase = "https://ollama.com/api/update" UpdateDownloaded = false UpdateCheckInterval = 60 * 60 * time.Second UpdateCheckInitialDelay = 3 * time.Second // 30 * time.Second UpdateStageDir string UpgradeLogFile string UpgradeMarkerFile string Installer string UserAgentOS string VerifyDownload func() error ) // TODO - maybe move up to the API package? type UpdateResponse struct { UpdateURL string `json:"url"` UpdateVersion string `json:"version"` } func (u *Updater) checkForUpdate(ctx context.Context) (bool, UpdateResponse) { var updateResp UpdateResponse requestURL, err := url.Parse(UpdateCheckURLBase) if err != nil { return false, updateResp } query := requestURL.Query() query.Add("os", runtime.GOOS) query.Add("arch", runtime.GOARCH) query.Add("version", version.Version) query.Add("ts", strconv.FormatInt(time.Now().Unix(), 10)) // The original macOS app used to use the device ID // to check for updates so include it if present if runtime.GOOS == "darwin" { if id, err := u.Store.ID(); err == nil && id != "" { query.Add("id", id) } } var signature string nonce, err := auth.NewNonce(rand.Reader, 16) if err != nil { // Don't sign if we haven't yet generated a key pair for the server slog.Debug("unable to generate nonce for update check request", "error", err) } else { query.Add("nonce", nonce) requestURL.RawQuery = query.Encode() data := []byte(fmt.Sprintf("%s,%s", http.MethodGet, requestURL.RequestURI())) signature, err = auth.Sign(ctx, data) if err != nil { slog.Debug("unable to generate signature for update check request", "error", err) } } req, err := http.NewRequestWithContext(ctx, http.MethodGet, requestURL.String(), nil) if err != nil { slog.Warn(fmt.Sprintf("failed to check for update: %s", err)) return false, updateResp } if signature != "" { req.Header.Set("Authorization", signature) } ua := fmt.Sprintf("ollama/%s %s Go/%s %s", version.Version, runtime.GOARCH, runtime.Version(), UserAgentOS) req.Header.Set("User-Agent", ua) slog.Debug("checking for available update", "requestURL", requestURL, "User-Agent", ua) resp, err := http.DefaultClient.Do(req) if err != nil { slog.Warn(fmt.Sprintf("failed to check for update: %s", err)) return false, updateResp } defer resp.Body.Close() if resp.StatusCode == http.StatusNoContent { slog.Debug("check update response 204 (current version is up to date)") return false, updateResp } body, err := io.ReadAll(resp.Body) if err != nil { slog.Warn(fmt.Sprintf("failed to read body response: %s", err)) } if resp.StatusCode != http.StatusOK { slog.Info(fmt.Sprintf("check update error %d - %.96s", resp.StatusCode, string(body))) return false, updateResp } err = json.Unmarshal(body, &updateResp) if err != nil { slog.Warn(fmt.Sprintf("malformed response checking for update: %s", err)) return false, updateResp } // Extract the version string from the URL in the github release artifact path updateResp.UpdateVersion = path.Base(path.Dir(updateResp.UpdateURL)) slog.Info("New update available at " + updateResp.UpdateURL) return true, updateResp } func (u *Updater) DownloadNewRelease(ctx context.Context, updateResp UpdateResponse) error { // Do a head first to check etag info req, err := http.NewRequestWithContext(ctx, http.MethodHead, updateResp.UpdateURL, nil) if err != nil { return err } // In case of slow downloads, continue the update check in the background bgctx, cancel := context.WithCancel(ctx) defer cancel() go func() { for { select { case <-bgctx.Done(): return case <-time.After(UpdateCheckInterval): u.checkForUpdate(bgctx) } } }() resp, err := http.DefaultClient.Do(req) if err != nil { return fmt.Errorf("error checking update: %w", err) } if resp.StatusCode != http.StatusOK { return fmt.Errorf("unexpected status attempting to download update %d", resp.StatusCode) } resp.Body.Close() etag := strings.Trim(resp.Header.Get("etag"), "\"") if etag == "" { slog.Debug("no etag detected, falling back to filename based dedup") etag = "_" } filename := Installer _, params, err := mime.ParseMediaType(resp.Header.Get("content-disposition")) if err == nil { filename = params["filename"] } stageFilename := filepath.Join(UpdateStageDir, etag, filename) // Check to see if we already have it downloaded _, err = os.Stat(stageFilename) if err == nil { slog.Info("update already downloaded", "bundle", stageFilename) return nil } cleanupOldDownloads(UpdateStageDir) req.Method = http.MethodGet resp, err = http.DefaultClient.Do(req) if err != nil { return fmt.Errorf("error checking update: %w", err) } defer resp.Body.Close() etag = strings.Trim(resp.Header.Get("etag"), "\"") if etag == "" { slog.Debug("no etag detected, falling back to filename based dedup") // TODO probably can get rid of this redundant log etag = "_" } stageFilename = filepath.Join(UpdateStageDir, etag, filename) _, err = os.Stat(filepath.Dir(stageFilename)) if errors.Is(err, os.ErrNotExist) { if err := os.MkdirAll(filepath.Dir(stageFilename), 0o755); err != nil { return fmt.Errorf("create ollama dir %s: %v", filepath.Dir(stageFilename), err) } } payload, err := io.ReadAll(resp.Body) if err != nil { return fmt.Errorf("failed to read body response: %w", err) } fp, err := os.OpenFile(stageFilename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o755) if err != nil { return fmt.Errorf("write payload %s: %w", stageFilename, err) } defer fp.Close() if n, err := fp.Write(payload); err != nil || n != len(payload) { return fmt.Errorf("write payload %s: %d vs %d -- %w", stageFilename, n, len(payload), err) } slog.Info("new update downloaded " + stageFilename) if err := VerifyDownload(); err != nil { _ = os.Remove(stageFilename) return fmt.Errorf("%s - %s", resp.Request.URL.String(), err) } UpdateDownloaded = true return nil } func cleanupOldDownloads(stageDir string) { files, err := os.ReadDir(stageDir) if err != nil && errors.Is(err, os.ErrNotExist) { // Expected behavior on first run return } else if err != nil { slog.Warn(fmt.Sprintf("failed to list stage dir: %s", err)) return } for _, file := range files { fullname := filepath.Join(stageDir, file.Name()) slog.Debug("cleaning up old download: " + fullname) err = os.RemoveAll(fullname) if err != nil { slog.Warn(fmt.Sprintf("failed to cleanup stale update download %s", err)) } } } type Updater struct { Store *store.Store } func (u *Updater) StartBackgroundUpdaterChecker(ctx context.Context, cb func(string) error) { go func() { // Don't blast an update message immediately after startup time.Sleep(UpdateCheckInitialDelay) slog.Info("beginning update checker", "interval", UpdateCheckInterval) for { available, resp := u.checkForUpdate(ctx) if available { err := u.DownloadNewRelease(ctx, resp) if err != nil { slog.Error(fmt.Sprintf("failed to download new release: %s", err)) } else { err = cb(resp.UpdateVersion) if err != nil { slog.Warn(fmt.Sprintf("failed to register update available with tray: %s", err)) } } } select { case <-ctx.Done(): slog.Debug("stopping background update checker") return default: time.Sleep(UpdateCheckInterval) } } }() }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/webview/webview.go
app/webview/webview.go
//go:build windows || darwin /* * MIT License * * Copyright (c) 2017 Serge Zaitsev * Copyright (c) 2022 Steffen André Langnes * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package webview /* #cgo CFLAGS: -I${SRCDIR}/libs/webview/include #cgo CXXFLAGS: -I${SRCDIR}/libs/webview/include -DWEBVIEW_STATIC #cgo darwin CXXFLAGS: -DWEBVIEW_COCOA -std=c++11 #cgo darwin LDFLAGS: -framework WebKit -ldl #cgo windows CXXFLAGS: -DWEBVIEW_EDGE -std=c++14 -I${SRCDIR}/libs/mswebview2/include #cgo windows LDFLAGS: -static -ladvapi32 -lole32 -lshell32 -lshlwapi -luser32 -lversion #include "webview.h" #include <stdlib.h> #include <stdint.h> void CgoWebViewDispatch(webview_t w, uintptr_t arg); void CgoWebViewBind(webview_t w, const char *name, uintptr_t index); void CgoWebViewUnbind(webview_t w, const char *name); void webview_set_zoom(webview_t w, double level); double webview_get_zoom(webview_t w); */ import "C" import ( "encoding/json" "errors" "reflect" "runtime" "sync" "unsafe" ) func init() { // Ensure that main.main is called from the main thread runtime.LockOSThread() } // Hints are used to configure window sizing and resizing type Hint int const ( // Width and height are default size HintNone = C.WEBVIEW_HINT_NONE // Window size can not be changed by a user HintFixed = C.WEBVIEW_HINT_FIXED // Width and height are minimum bounds HintMin = C.WEBVIEW_HINT_MIN // Width and height are maximum bounds HintMax = C.WEBVIEW_HINT_MAX ) type WebView interface { // Run runs the main loop until it's terminated. After this function exits - // you must destroy the webview. Run() // Terminate stops the main loop. It is safe to call this function from // a background thread. Terminate() // Dispatch posts a function to be executed on the main thread. You normally // do not need to call this function, unless you want to tweak the native // window. Dispatch(f func()) // Destroy destroys a webview and closes the native window. Destroy() // Window returns a native window handle pointer. When using GTK backend the // pointer is GtkWindow pointer, when using Cocoa backend the pointer is // NSWindow pointer, when using Win32 backend the pointer is HWND pointer. Window() unsafe.Pointer // SetTitle updates the title of the native window. Must be called from the UI // thread. SetTitle(title string) // SetSize updates native window size. See Hint constants. SetSize(w int, h int, hint Hint) // Navigate navigates webview to the given URL. URL may be a properly encoded data. // URI. Examples: // w.Navigate("https://github.com/webview/webview") // w.Navigate("data:text/html,%3Ch1%3EHello%3C%2Fh1%3E") // w.Navigate("data:text/html;base64,PGgxPkhlbGxvPC9oMT4=") Navigate(url string) // SetHtml sets the webview HTML directly. // Example: w.SetHtml(w, "<h1>Hello</h1>"); SetHtml(html string) // Init injects JavaScript code at the initialization of the new page. Every // time the webview will open a the new page - this initialization code will // be executed. It is guaranteed that code is executed before window.onload. Init(js string) // Eval evaluates arbitrary JavaScript code. Evaluation happens asynchronously, // also the result of the expression is ignored. Use RPC bindings if you want // to receive notifications about the results of the evaluation. Eval(js string) // Bind binds a callback function so that it will appear under the given name // as a global JavaScript function. Internally it uses webview_init(). // Callback receives a request string and a user-provided argument pointer. // Request string is a JSON array of all the arguments passed to the // JavaScript function. // // f must be a function // f must return either value and error or just error Bind(name string, f interface{}) error // Removes a callback that was previously set by Bind. Unbind(name string) error // SetZoom sets the zoom level of the webview. // level: 1.0 is normal size, >1.0 zooms in, <1.0 zooms out. SetZoom(level float64) // GetZoom returns the current zoom level of the webview. GetZoom() float64 } type webview struct { w C.webview_t } var ( m sync.Mutex index uintptr dispatch = map[uintptr]func(){} bindings = map[uintptr]func(id, req string) (interface{}, error){} ) func boolToInt(b bool) C.int { if b { return 1 } return 0 } // New calls NewWindow to create a new window and a new webview instance. If debug // is non-zero - developer tools will be enabled (if the platform supports them). func New(debug bool) WebView { return NewWindow(debug, nil) } // NewWindow creates a new webview instance. If debug is non-zero - developer // tools will be enabled (if the platform supports them). Window parameter can be // a pointer to the native window handle. If it's non-null - then child WebView is // embedded into the given parent window. Otherwise a new window is created. // Depending on the platform, a GtkWindow, NSWindow or HWND pointer can be passed // here. func NewWindow(debug bool, window unsafe.Pointer) WebView { w := &webview{} w.w = C.webview_create(boolToInt(debug), window) return w } func (w *webview) Destroy() { C.webview_destroy(w.w) } func (w *webview) Run() { C.webview_run(w.w) } func (w *webview) Terminate() { C.webview_terminate(w.w) } func (w *webview) Window() unsafe.Pointer { return C.webview_get_window(w.w) } func (w *webview) Navigate(url string) { s := C.CString(url) defer C.free(unsafe.Pointer(s)) C.webview_navigate(w.w, s) } func (w *webview) SetHtml(html string) { s := C.CString(html) defer C.free(unsafe.Pointer(s)) C.webview_set_html(w.w, s) } func (w *webview) SetTitle(title string) { s := C.CString(title) defer C.free(unsafe.Pointer(s)) C.webview_set_title(w.w, s) } func (w *webview) SetSize(width int, height int, hint Hint) { C.webview_set_size(w.w, C.int(width), C.int(height), C.webview_hint_t(hint)) } func (w *webview) Init(js string) { s := C.CString(js) defer C.free(unsafe.Pointer(s)) C.webview_init(w.w, s) } func (w *webview) Eval(js string) { s := C.CString(js) defer C.free(unsafe.Pointer(s)) C.webview_eval(w.w, s) } func (w *webview) Dispatch(f func()) { m.Lock() for ; dispatch[index] != nil; index++ { } dispatch[index] = f m.Unlock() C.CgoWebViewDispatch(w.w, C.uintptr_t(index)) } //export _webviewDispatchGoCallback func _webviewDispatchGoCallback(index unsafe.Pointer) { m.Lock() f := dispatch[uintptr(index)] delete(dispatch, uintptr(index)) m.Unlock() f() } //export _webviewBindingGoCallback func _webviewBindingGoCallback(w C.webview_t, id *C.char, req *C.char, index uintptr) { m.Lock() f := bindings[index] m.Unlock() jsString := func(v interface{}) string { b, _ := json.Marshal(v); return string(b) } status := 0 var result string if res, err := f(C.GoString(id), C.GoString(req)); err != nil { status = -1 result = jsString(err.Error()) } else if b, err := json.Marshal(res); err != nil { status = -1 result = jsString(err.Error()) } else { status = 0 result = string(b) } s := C.CString(result) defer C.free(unsafe.Pointer(s)) C.webview_return(w, id, C.int(status), s) } func (w *webview) Bind(name string, f interface{}) error { v := reflect.ValueOf(f) // f must be a function if v.Kind() != reflect.Func { return errors.New("only functions can be bound") } // f must return either value and error or just error if n := v.Type().NumOut(); n > 2 { return errors.New("function may only return a value or a value+error") } binding := func(id, req string) (interface{}, error) { raw := []json.RawMessage{} if err := json.Unmarshal([]byte(req), &raw); err != nil { return nil, err } isVariadic := v.Type().IsVariadic() numIn := v.Type().NumIn() if (isVariadic && len(raw) < numIn-1) || (!isVariadic && len(raw) != numIn) { return nil, errors.New("function arguments mismatch") } args := []reflect.Value{} for i := range raw { var arg reflect.Value if isVariadic && i >= numIn-1 { arg = reflect.New(v.Type().In(numIn - 1).Elem()) } else { arg = reflect.New(v.Type().In(i)) } if err := json.Unmarshal(raw[i], arg.Interface()); err != nil { return nil, err } args = append(args, arg.Elem()) } errorType := reflect.TypeOf((*error)(nil)).Elem() res := v.Call(args) switch len(res) { case 0: // No results from the function, just return nil return nil, nil case 1: // One result may be a value, or an error if res[0].Type().Implements(errorType) { if res[0].Interface() != nil { return nil, res[0].Interface().(error) } return nil, nil } return res[0].Interface(), nil case 2: // Two results: first one is value, second is error if !res[1].Type().Implements(errorType) { return nil, errors.New("second return value must be an error") } if res[1].Interface() == nil { return res[0].Interface(), nil } return res[0].Interface(), res[1].Interface().(error) default: return nil, errors.New("unexpected number of return values") } } m.Lock() for ; bindings[index] != nil; index++ { } bindings[index] = binding m.Unlock() cname := C.CString(name) defer C.free(unsafe.Pointer(cname)) C.CgoWebViewBind(w.w, cname, C.uintptr_t(index)) return nil } func (w *webview) Unbind(name string) error { cname := C.CString(name) defer C.free(unsafe.Pointer(cname)) C.CgoWebViewUnbind(w.w, cname) return nil } func (w *webview) SetZoom(level float64) { C.webview_set_zoom(w.w, C.double(level)) } func (w *webview) GetZoom() float64 { return float64(C.webview_get_zoom(w.w)) }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/ui/ui_test.go
app/ui/ui_test.go
//go:build windows || darwin package ui import ( "bytes" "encoding/json" "io" "net/http" "net/http/httptest" "path/filepath" "runtime" "strings" "testing" "github.com/ollama/ollama/app/store" ) func TestHandlePostApiSettings(t *testing.T) { tests := []struct { name string requested store.Settings wantErr bool }{ { name: "valid settings update - all fields", requested: store.Settings{ Expose: true, Browser: true, Models: "/custom/models", Agent: true, Tools: true, WorkingDir: "/workspace", }, wantErr: false, }, { name: "partial settings update", requested: store.Settings{ Agent: true, Tools: false, WorkingDir: "/new/path", }, wantErr: false, }, { name: "settings with special characters in paths", requested: store.Settings{ Models: "/path with spaces/models", WorkingDir: "/tmp/work-dir_123", Agent: true, }, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { testStore := &store.Store{ DBPath: filepath.Join(t.TempDir(), "db.sqlite"), } defer testStore.Close() // Ensure database is closed before cleanup body, err := json.Marshal(tt.requested) if err != nil { t.Fatalf("failed to marshal test body: %v", err) } req := httptest.NewRequest("POST", "/api/v1/settings", bytes.NewReader(body)) req.Header.Set("Content-Type", "application/json") rr := httptest.NewRecorder() // Set up server with test store server := &Server{ Store: testStore, Restart: func() {}, // Mock restart function for tests } if err := server.settings(rr, req); (err != nil) != tt.wantErr { t.Errorf("handlePostApiSettings() error = %v, wantErr %v", err, tt.wantErr) } if rr.Code != http.StatusOK { t.Errorf("handlePostApiSettings() status = %v, want %v", rr.Code, http.StatusOK) } // Check settings were saved correctly (if no error expected) if !tt.wantErr { savedSettings, err := testStore.Settings() if err != nil { t.Errorf("failed to retrieve saved settings: %v", err) } else { // Compare field by field, accounting for defaults that may be set by the store if savedSettings.Expose != tt.requested.Expose { t.Errorf("Expose: got %v, want %v", savedSettings.Expose, tt.requested.Expose) } if savedSettings.Browser != tt.requested.Browser { t.Errorf("Browser: got %v, want %v", savedSettings.Browser, tt.requested.Browser) } if savedSettings.Agent != tt.requested.Agent { t.Errorf("Agent: got %v, want %v", savedSettings.Agent, tt.requested.Agent) } if savedSettings.Tools != tt.requested.Tools { t.Errorf("Tools: got %v, want %v", savedSettings.Tools, tt.requested.Tools) } if savedSettings.WorkingDir != tt.requested.WorkingDir { t.Errorf("WorkingDir: got %q, want %q", savedSettings.WorkingDir, tt.requested.WorkingDir) } // Only check Models if explicitly set in the test case if tt.requested.Models != "" && savedSettings.Models != tt.requested.Models { t.Errorf("Models: got %q, want %q", savedSettings.Models, tt.requested.Models) } } } }) } } func TestAuthenticationMiddleware(t *testing.T) { tests := []struct { name string method string contentType string tokenCookie string serverToken string wantStatus int wantError string setupRequest func(*http.Request) }{ { name: "missing token cookie", method: "GET", tokenCookie: "", serverToken: "test-token-123", wantStatus: http.StatusForbidden, wantError: "Token is required", }, { name: "invalid token value", method: "GET", tokenCookie: "wrong-token", serverToken: "test-token-123", wantStatus: http.StatusForbidden, wantError: "Token is required", }, { name: "valid token - GET request", method: "GET", tokenCookie: "test-token-123", serverToken: "test-token-123", wantStatus: http.StatusOK, wantError: "", }, { name: "valid token - POST with application/json", method: "POST", contentType: "application/json", tokenCookie: "test-token-123", serverToken: "test-token-123", wantStatus: http.StatusOK, wantError: "", }, { name: "POST without Content-Type header", method: "POST", contentType: "", tokenCookie: "test-token-123", serverToken: "test-token-123", wantStatus: http.StatusForbidden, wantError: "Content-Type must be application/json", }, { name: "POST with wrong Content-Type", method: "POST", contentType: "text/plain", tokenCookie: "test-token-123", serverToken: "test-token-123", wantStatus: http.StatusForbidden, wantError: "Content-Type must be application/json", }, { name: "OPTIONS request (CORS preflight) - should bypass auth", method: "OPTIONS", tokenCookie: "", serverToken: "test-token-123", wantStatus: http.StatusOK, wantError: "", setupRequest: func(r *http.Request) { // Simulate CORS being enabled // Note: This assumes CORS() returns true in test environment }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { // Create a test handler that just returns 200 OK if auth passes testHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) json.NewEncoder(w).Encode(map[string]string{"status": "ok"}) }) // Create server with test token server := &Server{ Token: tt.serverToken, } // Get the authentication middleware by calling Handler() // We need to wrap our test handler with the auth middleware handler := server.Handler() // Create a test router to simulate the authentication middleware mux := http.NewServeMux() mux.Handle("/test", handler) // But since Handler() returns the full router, we'll need a different approach // Let's create a minimal handler that includes just the auth logic authHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // Add CORS headers for dev work if r.Method == "OPTIONS" { w.WriteHeader(http.StatusOK) return } if r.Method == "POST" && r.Header.Get("Content-Type") != "application/json" { w.WriteHeader(http.StatusForbidden) json.NewEncoder(w).Encode(map[string]string{"error": "Content-Type must be application/json"}) return } cookie, err := r.Cookie("token") if err != nil { w.WriteHeader(http.StatusForbidden) json.NewEncoder(w).Encode(map[string]string{"error": "Token is required"}) return } if cookie.Value != server.Token { w.WriteHeader(http.StatusForbidden) json.NewEncoder(w).Encode(map[string]string{"error": "Token is required"}) return } // If auth passes, call the test handler testHandler.ServeHTTP(w, r) }) // Create test request req := httptest.NewRequest(tt.method, "/test", nil) // Set Content-Type if provided if tt.contentType != "" { req.Header.Set("Content-Type", tt.contentType) } // Set token cookie if provided if tt.tokenCookie != "" { req.AddCookie(&http.Cookie{ Name: "token", Value: tt.tokenCookie, }) } // Run any additional setup if tt.setupRequest != nil { tt.setupRequest(req) } // Create response recorder rr := httptest.NewRecorder() // Serve the request authHandler.ServeHTTP(rr, req) // Check status code if rr.Code != tt.wantStatus { t.Errorf("handler returned wrong status code: got %v want %v", rr.Code, tt.wantStatus) } // Check error message if expected if tt.wantError != "" { var response map[string]string if err := json.NewDecoder(rr.Body).Decode(&response); err != nil { t.Fatalf("failed to decode response body: %v", err) } if response["error"] != tt.wantError { t.Errorf("handler returned wrong error message: got %v want %v", response["error"], tt.wantError) } } }) } } func TestUserAgent(t *testing.T) { ua := userAgent() // The userAgent function should return a string in the format: // "ollama/version (arch os) app/version Go/goversion" // Example: "ollama/v0.1.28 (amd64 darwin) Go/go1.21.0" if ua == "" { t.Fatal("userAgent returned empty string") } req := httptest.NewRequest("GET", "/", nil) req.Header.Set("User-Agent", ua) // This is a copy of the logic ollama.com uses to parse the user agent clientInfoFromRequest := func(r *http.Request) struct { Product string Version string OS string Arch string AppVersion string } { product, rest, _ := strings.Cut(r.UserAgent(), " ") client, version, ok := strings.Cut(product, "/") if !ok { return struct { Product string Version string OS string Arch string AppVersion string }{} } if version != "" && version[0] != 'v' { version = "v" + version } arch, rest, _ := strings.Cut(rest, " ") arch = strings.Trim(arch, "(") os, rest, _ := strings.Cut(rest, ")") var appVersion string if strings.Contains(rest, "app/") { _, appPart, found := strings.Cut(rest, "app/") if found { appVersion = strings.Fields(strings.TrimSpace(appPart))[0] if appVersion != "" && appVersion[0] != 'v' { appVersion = "v" + appVersion } } } return struct { Product string Version string OS string Arch string AppVersion string }{ Product: client, Version: version, OS: os, Arch: arch, AppVersion: appVersion, } } info := clientInfoFromRequest(req) if info.Product != "ollama" { t.Errorf("Expected Product to be 'ollama', got '%s'", info.Product) } if info.Version != "" && info.Version[0] != 'v' { t.Errorf("Expected Version to start with 'v', got '%s'", info.Version) } expectedOS := runtime.GOOS if info.OS != expectedOS { t.Errorf("Expected OS to be '%s', got '%s'", expectedOS, info.OS) } expectedArch := runtime.GOARCH if info.Arch != expectedArch { t.Errorf("Expected Arch to be '%s', got '%s'", expectedArch, info.Arch) } if info.AppVersion != "" && info.AppVersion[0] != 'v' { t.Errorf("Expected AppVersion to start with 'v', got '%s'", info.AppVersion) } t.Logf("User Agent: %s", ua) t.Logf("Parsed - Product: %s, Version: %s, OS: %s, Arch: %s", info.Product, info.Version, info.OS, info.Arch) } func TestUserAgentTransport(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "text/plain") w.Write([]byte(r.Header.Get("User-Agent"))) })) defer ts.Close() server := &Server{} client := server.httpClient() resp, err := client.Get(ts.URL) if err != nil { t.Fatalf("Failed to make request: %v", err) } defer resp.Body.Close() // In this case the User-Agent is the response body, as the server just echoes it back body, err := io.ReadAll(resp.Body) if err != nil { t.Fatalf("Failed to read response: %v", err) } receivedUA := string(body) expectedUA := userAgent() if receivedUA != expectedUA { t.Errorf("User-Agent mismatch\nExpected: %s\nReceived: %s", expectedUA, receivedUA) } if !strings.HasPrefix(receivedUA, "ollama/") { t.Errorf("User-Agent should start with 'ollama/', got: %s", receivedUA) } t.Logf("User-Agent transport successfully set: %s", receivedUA) }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/ui/extract.go
app/ui/extract.go
//go:build windows || darwin package ui import ( "bytes" "fmt" "path/filepath" "slices" "strings" "unicode/utf8" "github.com/ledongthuc/pdf" ) // convertBytesToText converts raw file bytes to text based on file extension func convertBytesToText(data []byte, filename string) string { ext := strings.ToLower(filepath.Ext(filename)) if ext == ".pdf" { text, err := extractPDFText(data) if err != nil { return fmt.Sprintf("[PDF file - %d bytes - failed to extract text: %v]", len(data), err) } if strings.TrimSpace(text) == "" { return fmt.Sprintf("[PDF file - %d bytes - no text content found]", len(data)) } return text } binaryExtensions := []string{ ".xlsx", ".pptx", ".zip", ".tar", ".gz", ".rar", ".jpg", ".jpeg", ".png", ".gif", ".bmp", ".svg", ".ico", ".mp3", ".mp4", ".avi", ".mov", ".wmv", ".flv", ".webm", ".exe", ".dll", ".so", ".dylib", ".app", ".dmg", ".pkg", } if slices.Contains(binaryExtensions, ext) { return fmt.Sprintf("[Binary file of type %s - %d bytes]", ext, len(data)) } if utf8.Valid(data) { return string(data) } // If not valid UTF-8, return a placeholder return fmt.Sprintf("[Binary file - %d bytes - not valid UTF-8]", len(data)) } // extractPDFText extracts text content from PDF bytes func extractPDFText(data []byte) (string, error) { reader := bytes.NewReader(data) pdfReader, err := pdf.NewReader(reader, int64(len(data))) if err != nil { return "", fmt.Errorf("failed to create PDF reader: %w", err) } var textBuilder strings.Builder numPages := pdfReader.NumPage() for i := 1; i <= numPages; i++ { page := pdfReader.Page(i) if page.V.IsNull() { continue } text, err := page.GetPlainText(nil) if err != nil { // Log the error but continue with other pages continue } if strings.TrimSpace(text) != "" { if textBuilder.Len() > 0 { textBuilder.WriteString("\n\n--- Page ") textBuilder.WriteString(fmt.Sprintf("%d", i)) textBuilder.WriteString(" ---\n") } textBuilder.WriteString(text) } } return textBuilder.String(), nil }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/ui/ui.go
app/ui/ui.go
//go:build windows || darwin // package ui implements a chat interface for Ollama package ui import ( "context" "encoding/base64" "encoding/json" "errors" "fmt" "log/slog" "net/http" "net/http/httputil" "os" "runtime" "runtime/debug" "slices" "strconv" "strings" "sync" "time" "github.com/google/uuid" "github.com/ollama/ollama/api" "github.com/ollama/ollama/app/server" "github.com/ollama/ollama/app/store" "github.com/ollama/ollama/app/tools" "github.com/ollama/ollama/app/types/not" "github.com/ollama/ollama/app/ui/responses" "github.com/ollama/ollama/app/version" ollamaAuth "github.com/ollama/ollama/auth" "github.com/ollama/ollama/envconfig" "github.com/ollama/ollama/types/model" _ "github.com/tkrajina/typescriptify-golang-structs/typescriptify" ) //go:generate tscriptify -package=github.com/ollama/ollama/app/ui/responses -target=./app/codegen/gotypes.gen.ts responses/types.go //go:generate npm --prefix ./app run build var CORS = envconfig.Bool("OLLAMA_CORS") // OllamaDotCom returns the URL for ollama.com, allowing override via environment variable var OllamaDotCom = func() string { if url := os.Getenv("OLLAMA_DOT_COM_URL"); url != "" { return url } return "https://ollama.com" }() type statusRecorder struct { http.ResponseWriter code int } func (r *statusRecorder) Written() bool { return r.code != 0 } func (r *statusRecorder) WriteHeader(code int) { r.code = code r.ResponseWriter.WriteHeader(code) } func (r *statusRecorder) Status() int { if r.code == 0 { return http.StatusOK } return r.code } func (r *statusRecorder) Flush() { if flusher, ok := r.ResponseWriter.(http.Flusher); ok { flusher.Flush() } } // Event is a string that represents the type of event being sent to the // client. It is used in the Server-Sent Events (SSE) protocol to identify // the type of data being sent. // The client (template) will use this type in the sse event listener to // determine how to handle the incoming data. It will also be used in the // sse-swap htmx event listener to determine how to handle the incoming data. type Event string const ( EventChat Event = "chat" EventComplete Event = "complete" EventLoading Event = "loading" EventToolResult Event = "tool_result" // Used for both tool calls and their results EventThinking Event = "thinking" EventToolCall Event = "tool_call" EventDownload Event = "download" ) type Server struct { Logger *slog.Logger Restart func() Token string Store *store.Store ToolRegistry *tools.Registry Tools bool // if true, the server will use single-turn tools to fulfill the user's request WebSearch bool // if true, the server will use single-turn browser tool to fulfill the user's request Agent bool // if true, the server will use multi-turn tools to fulfill the user's request WorkingDir string // Working directory for all agent operations // Dev is true if the server is running in development mode Dev bool } func (s *Server) log() *slog.Logger { if s.Logger == nil { return slog.Default() } return s.Logger } // ollamaProxy creates a reverse proxy handler to the Ollama server func (s *Server) ollamaProxy() http.Handler { var ( proxy http.Handler proxyMu sync.Mutex ) return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { proxyMu.Lock() p := proxy proxyMu.Unlock() if p == nil { proxyMu.Lock() if proxy == nil { var err error for i := range 2 { if i > 0 { s.log().Warn("ollama server not ready, retrying", "attempt", i+1) time.Sleep(1 * time.Second) } err = WaitForServer(context.Background(), 10*time.Second) if err == nil { break } } if err != nil { proxyMu.Unlock() s.log().Error("ollama server not ready after retries", "error", err) http.Error(w, "Ollama server is not ready", http.StatusServiceUnavailable) return } target := envconfig.Host() s.log().Info("configuring ollama proxy", "target", target.String()) newProxy := httputil.NewSingleHostReverseProxy(target) originalDirector := newProxy.Director newProxy.Director = func(req *http.Request) { originalDirector(req) req.Host = target.Host s.log().Debug("proxying request", "method", req.Method, "path", req.URL.Path, "target", target.Host) } newProxy.ErrorHandler = func(w http.ResponseWriter, r *http.Request, err error) { s.log().Error("proxy error", "error", err, "path", r.URL.Path, "target", target.String()) http.Error(w, "proxy error: "+err.Error(), http.StatusBadGateway) } proxy = newProxy p = newProxy } else { p = proxy } proxyMu.Unlock() } p.ServeHTTP(w, r) }) } type errHandlerFunc func(http.ResponseWriter, *http.Request) error func (s *Server) Handler() http.Handler { handle := func(f errHandlerFunc) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // Add CORS headers for dev work if CORS() { w.Header().Set("Access-Control-Allow-Origin", "*") w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS") w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Authorization, X-Requested-With") w.Header().Set("Access-Control-Allow-Credentials", "true") // Handle preflight requests if r.Method == "OPTIONS" { w.WriteHeader(http.StatusOK) return } } // Don't check for token in development mode if !s.Dev { cookie, err := r.Cookie("token") if err != nil { w.WriteHeader(http.StatusForbidden) json.NewEncoder(w).Encode(map[string]string{"error": "Token is required"}) return } if cookie.Value != s.Token { w.WriteHeader(http.StatusForbidden) json.NewEncoder(w).Encode(map[string]string{"error": "Token is required"}) return } } sw := &statusRecorder{ResponseWriter: w} log := s.log() level := slog.LevelInfo start := time.Now() requestID := fmt.Sprintf("%d", time.Now().UnixNano()) defer func() { p := recover() if p != nil { log = log.With("panic", p, "request_id", requestID) level = slog.LevelError // Handle panic with user-friendly error if !sw.Written() { s.handleError(sw, fmt.Errorf("internal server error")) } } log.Log(r.Context(), level, "site.serveHTTP", "http.method", r.Method, "http.path", r.URL.Path, "http.pattern", r.Pattern, "http.status", sw.Status(), "http.d", time.Since(start), "request_id", requestID, "version", version.Version, ) // let net/http.Server deal with panics if p != nil { panic(p) } }() w.Header().Set("X-Frame-Options", "DENY") w.Header().Set("X-Version", version.Version) w.Header().Set("X-Request-ID", requestID) ctx := r.Context() if err := f(sw, r); err != nil { if ctx.Err() != nil { return } level = slog.LevelError log = log.With("error", err) s.handleError(sw, err) } }) } mux := http.NewServeMux() // CORS is handled in `handle`, but we have to match on OPTIONS to handle preflight requests mux.Handle("OPTIONS /", handle(func(w http.ResponseWriter, r *http.Request) error { return nil })) // API routes - handle first to take precedence mux.Handle("GET /api/v1/chats", handle(s.listChats)) mux.Handle("GET /api/v1/chat/{id}", handle(s.getChat)) mux.Handle("POST /api/v1/chat/{id}", handle(s.chat)) mux.Handle("DELETE /api/v1/chat/{id}", handle(s.deleteChat)) mux.Handle("POST /api/v1/create-chat", handle(s.createChat)) mux.Handle("PUT /api/v1/chat/{id}/rename", handle(s.renameChat)) mux.Handle("GET /api/v1/inference-compute", handle(s.getInferenceCompute)) mux.Handle("POST /api/v1/model/upstream", handle(s.modelUpstream)) mux.Handle("GET /api/v1/settings", handle(s.getSettings)) mux.Handle("POST /api/v1/settings", handle(s.settings)) // Ollama proxy endpoints ollamaProxy := s.ollamaProxy() mux.Handle("GET /api/tags", ollamaProxy) mux.Handle("POST /api/show", ollamaProxy) mux.Handle("GET /api/version", ollamaProxy) mux.Handle("HEAD /api/version", ollamaProxy) mux.Handle("POST /api/me", ollamaProxy) mux.Handle("POST /api/signout", ollamaProxy) // React app - catch all non-API routes and serve the React app mux.Handle("GET /", s.appHandler()) mux.Handle("PUT /", s.appHandler()) mux.Handle("POST /", s.appHandler()) mux.Handle("PATCH /", s.appHandler()) mux.Handle("DELETE /", s.appHandler()) return mux } // handleError renders appropriate error responses based on request type func (s *Server) handleError(w http.ResponseWriter, e error) { // Preserve CORS headers for API requests if CORS() { w.Header().Set("Access-Control-Allow-Origin", "*") w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS") w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Authorization, X-Requested-With") w.Header().Set("Access-Control-Allow-Credentials", "true") } w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusInternalServerError) json.NewEncoder(w).Encode(map[string]string{"error": e.Error()}) } // userAgentTransport is a custom RoundTripper that adds the User-Agent header to all requests type userAgentTransport struct { base http.RoundTripper } func (t *userAgentTransport) RoundTrip(req *http.Request) (*http.Response, error) { // Clone the request to avoid mutating the original r := req.Clone(req.Context()) r.Header.Set("User-Agent", userAgent()) return t.base.RoundTrip(r) } // httpClient returns an HTTP client that automatically adds the User-Agent header func (s *Server) httpClient() *http.Client { return &http.Client{ Timeout: 10 * time.Second, Transport: &userAgentTransport{ base: http.DefaultTransport, }, } } // doSelfSigned sends a self-signed request to the ollama.com API func (s *Server) doSelfSigned(ctx context.Context, method, path string) (*http.Response, error) { timestamp := strconv.FormatInt(time.Now().Unix(), 10) // Form the string to sign: METHOD,PATH?ts=TIMESTAMP signString := fmt.Sprintf("%s,%s?ts=%s", method, path, timestamp) signature, err := ollamaAuth.Sign(ctx, []byte(signString)) if err != nil { return nil, fmt.Errorf("failed to sign request: %w", err) } endpoint := fmt.Sprintf("%s%s?ts=%s", OllamaDotCom, path, timestamp) req, err := http.NewRequestWithContext(ctx, method, endpoint, nil) if err != nil { return nil, fmt.Errorf("failed to create request: %w", err) } req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", signature)) return s.httpClient().Do(req) } // UserData fetches user data from ollama.com API for the current ollama key func (s *Server) UserData(ctx context.Context) (*api.UserResponse, error) { resp, err := s.doSelfSigned(ctx, http.MethodPost, "/api/me") if err != nil { return nil, fmt.Errorf("failed to call ollama.com/api/me: %w", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode) } var user api.UserResponse if err := json.NewDecoder(resp.Body).Decode(&user); err != nil { return nil, fmt.Errorf("failed to parse user response: %w", err) } user.AvatarURL = fmt.Sprintf("%s/%s", OllamaDotCom, user.AvatarURL) storeUser := store.User{ Name: user.Name, Email: user.Email, Plan: user.Plan, } if err := s.Store.SetUser(storeUser); err != nil { s.log().Warn("failed to cache user data", "error", err) } return &user, nil } // WaitForServer waits for the Ollama server to be ready func WaitForServer(ctx context.Context, timeout time.Duration) error { deadline := time.Now().Add(timeout) for time.Now().Before(deadline) { c, err := api.ClientFromEnvironment() if err != nil { return err } if _, err := c.Version(ctx); err == nil { slog.Debug("ollama server is ready") return nil } time.Sleep(10 * time.Millisecond) } return errors.New("timeout waiting for Ollama server to be ready") } func (s *Server) createChat(w http.ResponseWriter, r *http.Request) error { if err := WaitForServer(r.Context(), 10*time.Second); err != nil { return err } id, err := uuid.NewV7() if err != nil { return fmt.Errorf("failed to generate chat ID: %w", err) } json.NewEncoder(w).Encode(map[string]string{"id": id.String()}) return nil } func (s *Server) listChats(w http.ResponseWriter, r *http.Request) error { chats, _ := s.Store.Chats() chatInfos := make([]responses.ChatInfo, len(chats)) for i, chat := range chats { chatInfos[i] = chatInfoFromChat(chat) } w.Header().Set("Content-Type", "application/json") json.NewEncoder(w).Encode(responses.ChatsResponse{ChatInfos: chatInfos}) return nil } // checkModelUpstream makes a HEAD request to the Ollama registry to get the upstream digest and push time func (s *Server) checkModelUpstream(ctx context.Context, modelName string, timeout time.Duration) (string, int64, error) { // Create a context with timeout for the registry check checkCtx, cancel := context.WithTimeout(ctx, timeout) defer cancel() // Parse model name to get namespace, model, and tag parts := strings.Split(modelName, ":") name := parts[0] tag := "latest" if len(parts) > 1 { tag = parts[1] } if !strings.Contains(name, "/") { // If the model name does not contain a slash, assume it's a library model name = "library/" + name } // Check the model in the Ollama registry using HEAD request url := OllamaDotCom + "/v2/" + name + "/manifests/" + tag req, err := http.NewRequestWithContext(checkCtx, "HEAD", url, nil) if err != nil { return "", 0, err } httpClient := s.httpClient() httpClient.Timeout = timeout resp, err := httpClient.Do(req) if err != nil { return "", 0, err } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return "", 0, fmt.Errorf("registry returned status %d", resp.StatusCode) } digest := resp.Header.Get("ollama-content-digest") if digest == "" { return "", 0, fmt.Errorf("no digest header found") } var pushTime int64 if pushTimeStr := resp.Header.Get("ollama-push-time"); pushTimeStr != "" { if pt, err := strconv.ParseInt(pushTimeStr, 10, 64); err == nil { pushTime = pt } } return digest, pushTime, nil } // isNetworkError checks if an error string contains common network/connection error patterns func isNetworkError(errStr string) bool { networkErrorPatterns := []string{ "connection refused", "no such host", "timeout", "network is unreachable", "connection reset", "connection timed out", "temporary failure", "dial tcp", "i/o timeout", "context deadline exceeded", "broken pipe", } for _, pattern := range networkErrorPatterns { if strings.Contains(errStr, pattern) { return true } } return false } var ErrNetworkOffline = errors.New("network is offline") func (s *Server) getError(err error) responses.ErrorEvent { var sErr api.AuthorizationError if errors.As(err, &sErr) && sErr.StatusCode == http.StatusUnauthorized { return responses.ErrorEvent{ EventName: "error", Error: "Could not verify you are signed in. Please sign in and try again.", Code: "cloud_unauthorized", } } errStr := err.Error() switch { case strings.Contains(errStr, "402"): return responses.ErrorEvent{ EventName: "error", Error: "You've reached your usage limit, please upgrade to continue", Code: "usage_limit_upgrade", } case strings.HasPrefix(errStr, "pull model manifest") && isNetworkError(errStr): return responses.ErrorEvent{ EventName: "error", Error: "Unable to download model. Please check your internet connection to download the model for offline use.", Code: "offline_download_error", } case errors.Is(err, ErrNetworkOffline) || strings.Contains(errStr, "operation timed out"): return responses.ErrorEvent{ EventName: "error", Error: "Connection lost", Code: "turbo_connection_lost", } } return responses.ErrorEvent{ EventName: "error", Error: err.Error(), } } func (s *Server) browserState(chat *store.Chat) (*responses.BrowserStateData, bool) { if len(chat.BrowserState) > 0 { var st responses.BrowserStateData if err := json.Unmarshal(chat.BrowserState, &st); err == nil { return &st, true } } return nil, false } // reconstructBrowserState (legacy): return the latest full browser state stored in messages. func reconstructBrowserState(messages []store.Message, defaultViewTokens int) *responses.BrowserStateData { for i := len(messages) - 1; i >= 0; i-- { msg := messages[i] if msg.ToolResult == nil { continue } var st responses.BrowserStateData if err := json.Unmarshal(*msg.ToolResult, &st); err == nil { if len(st.PageStack) > 0 || len(st.URLToPage) > 0 { if st.ViewTokens == 0 { st.ViewTokens = defaultViewTokens } return &st } } } return nil } func (s *Server) chat(w http.ResponseWriter, r *http.Request) error { w.Header().Set("Content-Type", "text/jsonl") w.Header().Set("Cache-Control", "no-cache") w.Header().Set("Connection", "keep-alive") w.Header().Set("Transfer-Encoding", "chunked") flusher, ok := w.(http.Flusher) if !ok { return errors.New("streaming not supported") } if r.Method != "POST" { return not.Found } cid := r.PathValue("id") createdChat := false // if cid is the literal string "new", then we create a new chat before // performing our normal actions if cid == "new" { u, err := uuid.NewV7() if err != nil { return fmt.Errorf("failed to generate new chat id: %w", err) } cid = u.String() createdChat = true } var req responses.ChatRequest if err := json.NewDecoder(r.Body).Decode(&req); err != nil { fmt.Fprintf(os.Stderr, "error unmarshalling body: %v\n", err) return fmt.Errorf("invalid request body: %w", err) } if req.Model == "" { return fmt.Errorf("empty model") } // Don't allow empty messages unless forceUpdate is true if req.Prompt == "" && !req.ForceUpdate { return fmt.Errorf("empty message") } if createdChat { // send message to the client that the chat has been created json.NewEncoder(w).Encode(responses.ChatEvent{ EventName: "chat_created", ChatID: &cid, }) flusher.Flush() } // Check if this is from a specific message index (e.g. for editing) idx := -1 if req.Index != nil { idx = *req.Index } // Load chat with attachments since we need them for processing chat, err := s.Store.ChatWithOptions(cid, true) if err != nil { if !errors.Is(err, not.Found) { return err } chat = store.NewChat(cid) } // Only add user message if not forceUpdate if !req.ForceUpdate { var messageOptions *store.MessageOptions if len(req.Attachments) > 0 { storeAttachments := make([]store.File, 0, len(req.Attachments)) for _, att := range req.Attachments { if att.Data == "" { // This is an existing file reference - keep it from the original message if idx >= 0 && idx < len(chat.Messages) { originalMessage := chat.Messages[idx] // Find the file by filename in the original message for _, originalFile := range originalMessage.Attachments { if originalFile.Filename == att.Filename { storeAttachments = append(storeAttachments, originalFile) break } } } } else { // This is a new file - decode base64 data data, err := base64.StdEncoding.DecodeString(att.Data) if err != nil { s.log().Error("failed to decode attachment data", "error", err, "filename", att.Filename) continue } storeAttachments = append(storeAttachments, store.File{ Filename: att.Filename, Data: data, }) } } messageOptions = &store.MessageOptions{ Attachments: storeAttachments, } } userMsg := store.NewMessage("user", req.Prompt, messageOptions) if idx >= 0 && idx < len(chat.Messages) { // Generate from specified message: truncate and replace chat.Messages = chat.Messages[:idx] chat.Messages = append(chat.Messages, userMsg) } else { // Normal mode: append new message chat.Messages = append(chat.Messages, userMsg) } if err := s.Store.SetChat(*chat); err != nil { return err } } ctx, cancel := context.WithCancel(r.Context()) defer cancel() _, cancelLoading := context.WithCancel(ctx) loading := false c, err := api.ClientFromEnvironment() if err != nil { cancelLoading() return err } // Check if the model exists locally by trying to show it // TODO (jmorganca): skip this round trip and instead just act // on a 404 error on chat _, err = c.Show(ctx, &api.ShowRequest{Model: req.Model}) if err != nil || req.ForceUpdate { // Create an empty assistant message to store the model information // This will be overwritten when the model responds chat.Messages = append(chat.Messages, store.NewMessage("assistant", "", &store.MessageOptions{Model: req.Model})) if err := s.Store.SetChat(*chat); err != nil { cancelLoading() return err } // Send download progress events while the model is being pulled // TODO (jmorganca): this only shows the largest digest, but we // should show the progress for the total size of the download var largestDigest string var largestTotal int64 err = c.Pull(ctx, &api.PullRequest{Model: req.Model}, func(progress api.ProgressResponse) error { if progress.Digest != "" && progress.Total > largestTotal { largestDigest = progress.Digest largestTotal = progress.Total } if progress.Digest != "" && progress.Digest == largestDigest { progressEvent := responses.DownloadEvent{ EventName: string(EventDownload), Total: progress.Total, Completed: progress.Completed, Done: false, } if err := json.NewEncoder(w).Encode(progressEvent); err != nil { return err } flusher.Flush() } return nil }) if err != nil { s.log().Error("model download error", "error", err, "model", req.Model) errorEvent := s.getError(err) json.NewEncoder(w).Encode(errorEvent) flusher.Flush() cancelLoading() return fmt.Errorf("failed to download model: %w", err) } if err := json.NewEncoder(w).Encode(responses.DownloadEvent{ EventName: string(EventDownload), Completed: largestTotal, Total: largestTotal, Done: true, }); err != nil { cancelLoading() return err } flusher.Flush() // If forceUpdate, we're done after updating the model if req.ForceUpdate { json.NewEncoder(w).Encode(responses.ChatEvent{EventName: "done"}) flusher.Flush() cancelLoading() return nil } } loading = true defer cancelLoading() // Check the model capabilities details, err := c.Show(ctx, &api.ShowRequest{Model: req.Model}) if err != nil || details == nil { errorEvent := s.getError(err) json.NewEncoder(w).Encode(errorEvent) flusher.Flush() s.log().Error("failed to show model details", "error", err, "model", req.Model) return nil } think := slices.Contains(details.Capabilities, model.CapabilityThinking) var thinkValue any if req.Think != nil { thinkValue = req.Think } else { thinkValue = think } // Check if the last user message has attachments // TODO (parthsareen): this logic will change with directory drag and drop hasAttachments := false if len(chat.Messages) > 0 { lastMsg := chat.Messages[len(chat.Messages)-1] if lastMsg.Role == "user" && len(lastMsg.Attachments) > 0 { hasAttachments = true } } // Check if agent or tools mode is enabled // Note: Skip agent/tools mode if user has attachments, as the agent doesn't handle file attachments properly registry := tools.NewRegistry() var browser *tools.Browser if !hasAttachments { WebSearchEnabled := req.WebSearch != nil && *req.WebSearch if WebSearchEnabled { if supportsBrowserTools(req.Model) { browserState, ok := s.browserState(chat) if !ok { browserState = reconstructBrowserState(chat.Messages, tools.DefaultViewTokens) } browser = tools.NewBrowser(browserState) registry.Register(tools.NewBrowserSearch(browser)) registry.Register(tools.NewBrowserOpen(browser)) registry.Register(tools.NewBrowserFind(browser)) } else if supportsWebSearchTools(req.Model) { registry.Register(&tools.WebSearch{}) registry.Register(&tools.WebFetch{}) } } } var thinkingTimeStart *time.Time = nil var thinkingTimeEnd *time.Time = nil // Request-only assistant tool_calls buffer // if tool_calls arrive before any assistant text, we keep them here, // inject them into the next request, and attach on first assistant content/thinking. var pendingAssistantToolCalls []store.ToolCall passNum := 1 for { var toolsExecuted bool availableTools := registry.AvailableTools() // If we have pending assistant tool_calls and no assistant yet, // build the request against a temporary chat that includes a // request-only assistant with tool_calls inserted BEFORE tool messages reqChat := chat if len(pendingAssistantToolCalls) > 0 { if len(chat.Messages) == 0 || chat.Messages[len(chat.Messages)-1].Role != "assistant" { temp := *chat synth := store.NewMessage("assistant", "", &store.MessageOptions{Model: req.Model, ToolCalls: pendingAssistantToolCalls}) insertIdx := len(temp.Messages) - 1 for insertIdx >= 0 && temp.Messages[insertIdx].Role == "tool" { insertIdx-- } if insertIdx < 0 { temp.Messages = append([]store.Message{synth}, temp.Messages...) } else { tmp := make([]store.Message, 0, len(temp.Messages)+1) tmp = append(tmp, temp.Messages[:insertIdx+1]...) tmp = append(tmp, synth) tmp = append(tmp, temp.Messages[insertIdx+1:]...) temp.Messages = tmp } reqChat = &temp } } chatReq, err := s.buildChatRequest(reqChat, req.Model, thinkValue, availableTools) if err != nil { return err } err = c.Chat(ctx, chatReq, func(res api.ChatResponse) error { if loading { // Remove the loading indicator on first token cancelLoading() loading = false } // Start thinking timer on first thinking content or after tool call when thinking again if res.Message.Thinking != "" && (thinkingTimeStart == nil || thinkingTimeEnd != nil) { now := time.Now() thinkingTimeStart = &now thinkingTimeEnd = nil } if res.Message.Content == "" && res.Message.Thinking == "" && len(res.Message.ToolCalls) == 0 { return nil } event := EventChat if thinkingTimeStart != nil && res.Message.Content == "" && len(res.Message.ToolCalls) == 0 { event = EventThinking } if len(res.Message.ToolCalls) > 0 { event = EventToolCall } if event == EventToolCall && thinkingTimeStart != nil && thinkingTimeEnd == nil { now := time.Now() thinkingTimeEnd = &now } if event == EventChat && thinkingTimeStart != nil && thinkingTimeEnd == nil && res.Message.Content != "" { now := time.Now() thinkingTimeEnd = &now } json.NewEncoder(w).Encode(chatEventFromApiChatResponse(res, thinkingTimeStart, thinkingTimeEnd)) flusher.Flush() switch event { case EventToolCall: if thinkingTimeEnd != nil { if len(chat.Messages) > 0 && chat.Messages[len(chat.Messages)-1].Role == "assistant" { lastMsg := &chat.Messages[len(chat.Messages)-1] lastMsg.ThinkingTimeEnd = thinkingTimeEnd lastMsg.UpdatedAt = time.Now() s.Store.UpdateLastMessage(chat.ID, *lastMsg) } thinkingTimeStart = nil thinkingTimeEnd = nil } // attach tool_calls to an existing assistant if present, // otherwise (for standalone web_search/web_fetch) buffer for request-only injection. if len(res.Message.ToolCalls) > 0 { if len(chat.Messages) > 0 && chat.Messages[len(chat.Messages)-1].Role == "assistant" { toolCalls := make([]store.ToolCall, len(res.Message.ToolCalls)) for i, tc := range res.Message.ToolCalls { argsJSON, _ := json.Marshal(tc.Function.Arguments) toolCalls[i] = store.ToolCall{ Type: "function", Function: store.ToolFunction{ Name: tc.Function.Name, Arguments: string(argsJSON), }, } } lastMsg := &chat.Messages[len(chat.Messages)-1] lastMsg.ToolCalls = toolCalls if err := s.Store.UpdateLastMessage(chat.ID, *lastMsg); err != nil { return err } } else { onlyStandalone := true for _, tc := range res.Message.ToolCalls { if !(tc.Function.Name == "web_search" || tc.Function.Name == "web_fetch") { onlyStandalone = false break } } if onlyStandalone { toolCalls := make([]store.ToolCall, len(res.Message.ToolCalls)) for i, tc := range res.Message.ToolCalls { argsJSON, _ := json.Marshal(tc.Function.Arguments) toolCalls[i] = store.ToolCall{ Type: "function", Function: store.ToolFunction{ Name: tc.Function.Name, Arguments: string(argsJSON), }, } } synth := store.NewMessage("assistant", "", &store.MessageOptions{Model: req.Model, ToolCalls: toolCalls}) chat.Messages = append(chat.Messages, synth) if err := s.Store.AppendMessage(chat.ID, synth); err != nil { return err } // clear buffer to avoid-injecting again pendingAssistantToolCalls = nil } } } for _, toolCall := range res.Message.ToolCalls { // continues loop as tools were executed toolsExecuted = true result, content, err := registry.Execute(ctx, toolCall.Function.Name, toolCall.Function.Arguments.ToMap()) if err != nil { errContent := fmt.Sprintf("Error: %v", err) toolErrMsg := store.NewMessage("tool", errContent, nil) toolErrMsg.ToolName = toolCall.Function.Name chat.Messages = append(chat.Messages, toolErrMsg) if err := s.Store.AppendMessage(chat.ID, toolErrMsg); err != nil { return err } // Emit tool error event toolResult := true json.NewEncoder(w).Encode(responses.ChatEvent{ EventName: "tool", Content: &errContent, ToolName: &toolCall.Function.Name, }) flusher.Flush() json.NewEncoder(w).Encode(responses.ChatEvent{ EventName: "tool_result", Content: &errContent, ToolName: &toolCall.Function.Name, ToolResult: &toolResult, ToolResultData: nil, // No result data for errors }) flusher.Flush() continue } var tr json.RawMessage if strings.HasPrefix(toolCall.Function.Name, "browser.search") { // For standalone web_search, ensure the tool message has readable content // so the second-pass model can consume results, while keeping browser state flow intact. // We still persist tool msg with content below. // (No browser state update needed for standalone.) } else if strings.HasPrefix(toolCall.Function.Name, "browser") { stateBytes, err := json.Marshal(browser.State()) if err != nil { return fmt.Errorf("failed to marshal browser state: %w", err) } if err := s.Store.UpdateChatBrowserState(chat.ID, json.RawMessage(stateBytes)); err != nil { return fmt.Errorf("failed to persist browser state to chat: %w", err) } // tool result is not added to the tool message for the browser tool } else { var err error tr, err = json.Marshal(result) if err != nil { return fmt.Errorf("failed to marshal tool result: %w", err) } } // ensure tool message sent back to the model has content (if empty, use a sensible fallback) modelContent := content if toolCall.Function.Name == "web_fetch" && modelContent == "" { if str, ok := result.(string); ok { modelContent = str } } if modelContent == "" && len(tr) > 0 { s.log().Debug("tool message empty, sending json result") modelContent = string(tr) } toolMsg := store.NewMessage("tool", modelContent, &store.MessageOptions{ ToolResult: &tr, }) toolMsg.ToolName = toolCall.Function.Name chat.Messages = append(chat.Messages, toolMsg) s.Store.AppendMessage(chat.ID, toolMsg) // Emit tool message event (matching agent pattern) toolResult := true json.NewEncoder(w).Encode(responses.ChatEvent{ EventName: "tool", Content: &content, ToolName: &toolCall.Function.Name, }) flusher.Flush() var toolState any = nil if browser != nil { toolState = browser.State() } // Stream tool result to frontend
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
true
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/ui/app.go
app/ui/app.go
//go:build windows || darwin package ui import ( "bytes" "embed" "errors" "io/fs" "net/http" "strings" "time" ) //go:embed app/dist var appFS embed.FS // appHandler returns an HTTP handler that serves the React SPA. // It tries to serve real files first, then falls back to index.html for React Router. func (s *Server) appHandler() http.Handler { // Strip the dist prefix so URLs look clean fsys, _ := fs.Sub(appFS, "app/dist") fileServer := http.FileServer(http.FS(fsys)) return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { p := strings.TrimPrefix(r.URL.Path, "/") if _, err := fsys.Open(p); err == nil { // Serve the file directly fileServer.ServeHTTP(w, r) return } // Fallback – serve index.html for unknown paths so React Router works data, err := fs.ReadFile(fsys, "index.html") if err != nil { if errors.Is(err, fs.ErrNotExist) { http.NotFound(w, r) } else { http.Error(w, "Internal Server Error", http.StatusInternalServerError) } return } http.ServeContent(w, r, "index.html", time.Time{}, bytes.NewReader(data)) }) }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false
ollama/ollama
https://github.com/ollama/ollama/blob/626af2d80973270c4d59b8df7153ac47ad67ed7b/app/ui/responses/types.go
app/ui/responses/types.go
//go:build windows || darwin package responses import ( "time" "github.com/ollama/ollama/app/store" "github.com/ollama/ollama/types/model" ) type ChatInfo struct { ID string `json:"id"` Title string `json:"title"` UserExcerpt string `json:"userExcerpt"` CreatedAt time.Time `json:"createdAt" ts_type:"Date" ts_transform:"new Date(__VALUE__)"` UpdatedAt time.Time `json:"updatedAt" ts_type:"Date" ts_transform:"new Date(__VALUE__)"` } type ChatsResponse struct { ChatInfos []ChatInfo `json:"chatInfos"` } type ChatResponse struct { Chat store.Chat `json:"chat"` } type Model struct { Model string `json:"model"` Digest string `json:"digest,omitempty"` ModifiedAt *time.Time `json:"modified_at,omitempty"` } type ModelsResponse struct { Models []Model `json:"models"` } type InferenceCompute struct { Library string `json:"library"` Variant string `json:"variant"` Compute string `json:"compute"` Driver string `json:"driver"` Name string `json:"name"` VRAM string `json:"vram"` } type InferenceComputeResponse struct { InferenceComputes []InferenceCompute `json:"inferenceComputes"` } type ModelCapabilitiesResponse struct { Capabilities []model.Capability `json:"capabilities"` } // ChatEvent is for regular chat messages and assistant interactions type ChatEvent struct { EventName string `json:"eventName" ts_type:"\"chat\" | \"thinking\" | \"assistant_with_tools\" | \"tool_call\" | \"tool\" | \"tool_result\" | \"done\" | \"chat_created\""` // Chat/Assistant message fields Content *string `json:"content,omitempty"` Thinking *string `json:"thinking,omitempty"` ThinkingTimeStart *time.Time `json:"thinkingTimeStart,omitempty" ts_type:"Date | undefined" ts_transform:"__VALUE__ && new Date(__VALUE__)"` ThinkingTimeEnd *time.Time `json:"thinkingTimeEnd,omitempty" ts_type:"Date | undefined" ts_transform:"__VALUE__ && new Date(__VALUE__)"` // Tool-related fields ToolCalls []store.ToolCall `json:"toolCalls,omitempty"` ToolCall *store.ToolCall `json:"toolCall,omitempty"` ToolName *string `json:"toolName,omitempty"` ToolResult *bool `json:"toolResult,omitempty"` ToolResultData any `json:"toolResultData,omitempty"` // Chat creation fields ChatID *string `json:"chatId,omitempty"` // Tool state field from the new code ToolState any `json:"toolState,omitempty"` } // DownloadEvent is for model download progress type DownloadEvent struct { EventName string `json:"eventName" ts_type:"\"download\""` Total int64 `json:"total" ts_type:"number"` Completed int64 `json:"completed" ts_type:"number"` Done bool `json:"done" ts_type:"boolean"` } // ErrorEvent is for error messages type ErrorEvent struct { EventName string `json:"eventName" ts_type:"\"error\""` Error string `json:"error"` Code string `json:"code,omitempty"` // Optional error code for different error types Details string `json:"details,omitempty"` // Optional additional details } type SettingsResponse struct { Settings store.Settings `json:"settings"` } type HealthResponse struct { Healthy bool `json:"healthy"` } type User struct { ID string `json:"id"` Email string `json:"email"` Name string `json:"name"` Bio string `json:"bio,omitempty"` AvatarURL string `json:"avatarurl,omitempty"` FirstName string `json:"firstname,omitempty"` LastName string `json:"lastname,omitempty"` Plan string `json:"plan,omitempty"` } type Attachment struct { Filename string `json:"filename"` Data string `json:"data,omitempty"` // omitempty = optional, no data = existing file reference } type ChatRequest struct { Model string `json:"model"` Prompt string `json:"prompt"` Index *int `json:"index,omitempty"` Attachments []Attachment `json:"attachments,omitempty"` WebSearch *bool `json:"web_search,omitempty"` FileTools *bool `json:"file_tools,omitempty"` ForceUpdate bool `json:"forceUpdate,omitempty"` Think any `json:"think,omitempty"` } type Error struct { Error string `json:"error"` } type ModelUpstreamResponse struct { Digest string `json:"digest,omitempty"` PushTime int64 `json:"pushTime"` Error string `json:"error,omitempty"` } // Serializable data for the browser state type BrowserStateData struct { PageStack []string `json:"page_stack"` // Sequential list of page URLs ViewTokens int `json:"view_tokens"` // Number of tokens to show in viewport URLToPage map[string]*Page `json:"url_to_page"` // URL to page contents } // Page represents the contents of a page type Page struct { URL string `json:"url"` Title string `json:"title"` Text string `json:"text"` Lines []string `json:"lines"` Links map[int]string `json:"links,omitempty" ts_type:"Record<number, string>"` FetchedAt time.Time `json:"fetched_at"` }
go
MIT
626af2d80973270c4d59b8df7153ac47ad67ed7b
2026-01-07T08:35:43.337630Z
false