2023-07-04 03:22:44 +08:00
package cmd
import (
2024-09-12 02:01:30 +08:00
"bufio"
2023-07-04 03:22:44 +08:00
"context"
2023-08-12 01:58:23 +08:00
"crypto/ed25519"
"crypto/rand"
2025-09-18 05:40:53 +08:00
"encoding/base64"
2024-12-05 08:31:19 +08:00
"encoding/json"
2023-08-12 01:58:23 +08:00
"encoding/pem"
2023-07-07 06:43:04 +08:00
"errors"
2023-07-07 00:24:49 +08:00
"fmt"
2023-07-19 05:01:19 +08:00
"io"
2023-07-04 03:22:44 +08:00
"log"
2024-05-14 08:17:36 +08:00
"math"
2023-07-04 03:22:44 +08:00
"net"
2023-10-30 23:10:18 +08:00
"net/http"
2025-09-18 05:40:53 +08:00
"net/url"
2023-07-04 03:22:44 +08:00
"os"
2023-09-29 08:13:01 +08:00
"os/signal"
2023-07-19 10:34:05 +08:00
"path/filepath"
2023-08-01 04:25:57 +08:00
"runtime"
2025-04-02 06:21:46 +08:00
"slices"
2025-03-14 05:24:27 +08:00
"sort"
2024-09-12 02:01:30 +08:00
"strconv"
2023-07-08 01:12:58 +08:00
"strings"
2024-08-13 02:46:32 +08:00
"sync/atomic"
2023-09-29 08:13:01 +08:00
"syscall"
2023-07-07 06:43:04 +08:00
"time"
2023-07-04 03:22:44 +08:00
2023-11-24 14:21:32 +08:00
"github.com/containerd/console"
2024-05-16 07:29:33 +08:00
"github.com/mattn/go-runewidth"
2023-07-19 00:09:45 +08:00
"github.com/olekukonko/tablewriter"
2023-07-07 04:49:31 +08:00
"github.com/spf13/cobra"
2023-08-12 01:58:23 +08:00
"golang.org/x/crypto/ssh"
2025-05-06 02:59:26 +08:00
"golang.org/x/sync/errgroup"
2023-09-23 04:36:08 +08:00
"golang.org/x/term"
2023-07-07 04:49:31 +08:00
2024-03-27 04:04:17 +08:00
"github.com/ollama/ollama/api"
2025-09-18 05:40:53 +08:00
"github.com/ollama/ollama/auth"
2024-05-25 05:57:15 +08:00
"github.com/ollama/ollama/envconfig"
2024-03-27 04:04:17 +08:00
"github.com/ollama/ollama/format"
2024-05-21 02:26:45 +08:00
"github.com/ollama/ollama/parser"
2024-03-27 04:04:17 +08:00
"github.com/ollama/ollama/progress"
2025-05-29 10:38:52 +08:00
"github.com/ollama/ollama/readline"
2024-12-18 11:59:41 +08:00
"github.com/ollama/ollama/runner"
2024-03-27 04:04:17 +08:00
"github.com/ollama/ollama/server"
2024-11-26 01:40:16 +08:00
"github.com/ollama/ollama/types/model"
2025-05-06 02:59:26 +08:00
"github.com/ollama/ollama/types/syncmap"
2024-03-27 04:04:17 +08:00
"github.com/ollama/ollama/version"
2023-07-04 03:22:44 +08:00
)
2025-09-18 05:40:53 +08:00
const ConnectInstructions = "To sign in, navigate to:\n https://ollama.com/connect?name=%s&key=%s\n\n"
2025-05-29 10:38:52 +08:00
// ensureThinkingSupport emits a warning if the model does not advertise thinking support
func ensureThinkingSupport ( ctx context . Context , client * api . Client , name string ) {
if name == "" {
return
}
resp , err := client . Show ( ctx , & api . ShowRequest { Model : name } )
if err != nil {
return
}
2025-09-12 00:57:31 +08:00
if slices . Contains ( resp . Capabilities , model . CapabilityThinking ) {
return
2025-05-29 10:38:52 +08:00
}
fmt . Fprintf ( os . Stderr , "warning: model %q does not support thinking output\n" , name )
}
2025-01-01 10:02:30 +08:00
var errModelfileNotFound = errors . New ( "specified Modelfile wasn't found" )
2024-10-23 04:32:24 +08:00
func getModelfileName ( cmd * cobra . Command ) ( string , error ) {
2025-01-11 08:14:08 +08:00
filename , _ := cmd . Flags ( ) . GetString ( "file" )
2024-10-23 04:32:24 +08:00
if filename == "" {
filename = "Modelfile"
}
absName , err := filepath . Abs ( filename )
2023-07-19 10:34:05 +08:00
if err != nil {
2024-10-23 04:32:24 +08:00
return "" , err
2023-07-19 10:34:05 +08:00
}
2024-10-23 04:32:24 +08:00
_ , err = os . Stat ( absName )
2023-08-16 23:03:48 +08:00
if err != nil {
2025-01-16 17:14:04 +08:00
return "" , err
2023-08-16 23:03:48 +08:00
}
2023-07-17 08:02:22 +08:00
2024-10-23 04:32:24 +08:00
return absName , nil
}
func CreateHandler ( cmd * cobra . Command , args [ ] string ) error {
2023-11-15 08:33:24 +08:00
p := progress . NewProgress ( os . Stderr )
defer p . Stop ( )
2024-10-23 04:32:24 +08:00
var reader io . Reader
filename , err := getModelfileName ( cmd )
if os . IsNotExist ( err ) {
if filename == "" {
reader = strings . NewReader ( "FROM .\n" )
} else {
return errModelfileNotFound
}
} else if err != nil {
2023-11-15 06:07:40 +08:00
return err
2024-10-23 04:32:24 +08:00
} else {
f , err := os . Open ( filename )
if err != nil {
return err
}
reader = f
defer f . Close ( )
2023-11-15 06:07:40 +08:00
}
2024-10-23 04:32:24 +08:00
modelfile , err := parser . ParseFile ( reader )
2023-11-15 06:07:40 +08:00
if err != nil {
return err
}
2025-01-01 10:02:30 +08:00
status := "gathering model components"
spinner := progress . NewSpinner ( status )
p . Add ( status , spinner )
2025-01-11 08:14:08 +08:00
req , err := modelfile . CreateRequest ( filepath . Dir ( filename ) )
2023-11-15 06:07:40 +08:00
if err != nil {
return err
}
2025-01-01 10:02:30 +08:00
spinner . Stop ( )
2023-11-15 06:07:40 +08:00
2025-05-06 02:59:26 +08:00
req . Model = args [ 0 ]
2025-01-01 10:02:30 +08:00
quantize , _ := cmd . Flags ( ) . GetString ( "quantize" )
if quantize != "" {
req . Quantize = quantize
}
2023-11-16 08:59:49 +08:00
2024-10-23 04:32:24 +08:00
client , err := api . ClientFromEnvironment ( )
if err != nil {
return err
}
2025-05-06 02:59:26 +08:00
var g errgroup . Group
g . SetLimit ( max ( runtime . GOMAXPROCS ( 0 ) - 1 , 1 ) )
files := syncmap . NewSyncMap [ string , string ] ( )
for f , digest := range req . Files {
g . Go ( func ( ) error {
2025-01-01 10:02:30 +08:00
if _ , err := createBlob ( cmd , client , f , digest , p ) ; err != nil {
2023-11-15 06:07:40 +08:00
return err
}
2025-05-06 02:59:26 +08:00
// TODO: this is incorrect since the file might be in a subdirectory
// instead this should take the path relative to the model directory
// but the current implementation does not allow this
files . Store ( filepath . Base ( f ) , digest )
return nil
} )
2025-01-01 10:02:30 +08:00
}
2023-11-15 06:07:40 +08:00
2025-05-06 02:59:26 +08:00
adapters := syncmap . NewSyncMap [ string , string ] ( )
for f , digest := range req . Adapters {
g . Go ( func ( ) error {
2025-01-01 10:02:30 +08:00
if _ , err := createBlob ( cmd , client , f , digest , p ) ; err != nil {
2023-11-15 06:07:40 +08:00
return err
}
2025-05-06 02:59:26 +08:00
// TODO: same here
adapters . Store ( filepath . Base ( f ) , digest )
return nil
} )
2023-11-15 06:07:40 +08:00
}
2023-07-18 05:14:41 +08:00
2025-05-06 02:59:26 +08:00
if err := g . Wait ( ) ; err != nil {
return err
}
req . Files = files . Items ( )
req . Adapters = adapters . Items ( )
2024-04-23 02:02:25 +08:00
bars := make ( map [ string ] * progress . Bar )
2023-07-26 02:25:13 +08:00
fn := func ( resp api . ProgressResponse ) error {
2023-11-15 08:33:24 +08:00
if resp . Digest != "" {
bar , ok := bars [ resp . Digest ]
if ! ok {
2025-05-07 02:20:48 +08:00
msg := resp . Status
if msg == "" {
msg = fmt . Sprintf ( "pulling %s..." , resp . Digest [ 7 : 19 ] )
}
bar = progress . NewBar ( msg , resp . Total , resp . Completed )
2023-11-15 08:33:24 +08:00
bars [ resp . Digest ] = bar
p . Add ( resp . Digest , bar )
}
bar . Set ( resp . Completed )
} else if status != resp . Status {
spinner . Stop ( )
status = resp . Status
spinner = progress . NewSpinner ( status )
p . Add ( status , spinner )
}
2023-07-17 08:02:22 +08:00
return nil
}
2025-01-01 10:02:30 +08:00
if err := client . Create ( cmd . Context ( ) , req , fn ) ; err != nil {
2025-01-10 02:12:30 +08:00
if strings . Contains ( err . Error ( ) , "path or Modelfile are required" ) {
return fmt . Errorf ( "the ollama server must be updated to use `ollama create` with this client" )
}
2023-07-17 08:02:22 +08:00
return err
}
return nil
}
2025-01-01 10:02:30 +08:00
func createBlob ( cmd * cobra . Command , client * api . Client , path string , digest string , p * progress . Progress ) ( string , error ) {
realPath , err := filepath . EvalSymlinks ( path )
2024-04-26 05:41:30 +08:00
if err != nil {
return "" , err
}
2025-01-01 10:02:30 +08:00
bin , err := os . Open ( realPath )
2024-03-07 13:01:51 +08:00
if err != nil {
return "" , err
}
defer bin . Close ( )
2024-08-13 02:46:32 +08:00
// Get file info to retrieve the size
fileInfo , err := bin . Stat ( )
if err != nil {
return "" , err
}
fileSize := fileInfo . Size ( )
var pw progressWriter
2025-01-01 10:02:30 +08:00
status := fmt . Sprintf ( "copying file %s 0%%" , digest )
spinner := progress . NewSpinner ( status )
p . Add ( status , spinner )
defer spinner . Stop ( )
2024-08-13 02:46:32 +08:00
done := make ( chan struct { } )
defer close ( done )
go func ( ) {
ticker := time . NewTicker ( 60 * time . Millisecond )
defer ticker . Stop ( )
for {
select {
case <- ticker . C :
2025-01-01 10:02:30 +08:00
spinner . SetMessage ( fmt . Sprintf ( "copying file %s %d%%" , digest , int ( 100 * pw . n . Load ( ) / fileSize ) ) )
2024-08-13 02:46:32 +08:00
case <- done :
2025-01-01 10:02:30 +08:00
spinner . SetMessage ( fmt . Sprintf ( "copying file %s 100%%" , digest ) )
2024-08-13 02:46:32 +08:00
return
}
}
} ( )
2025-05-06 02:59:26 +08:00
if err := client . CreateBlob ( cmd . Context ( ) , digest , io . TeeReader ( bin , & pw ) ) ; err != nil {
2024-03-07 13:01:51 +08:00
return "" , err
}
return digest , nil
}
2024-08-13 02:46:32 +08:00
type progressWriter struct {
n atomic . Int64
}
func ( w * progressWriter ) Write ( p [ ] byte ) ( n int , err error ) {
w . n . Add ( int64 ( len ( p ) ) )
return len ( p ) , nil
}
2024-09-12 07:36:21 +08:00
func loadOrUnloadModel ( cmd * cobra . Command , opts * runOptions ) error {
p := progress . NewProgress ( os . Stderr )
defer p . StopAndClear ( )
spinner := progress . NewSpinner ( "" )
p . Add ( "" , spinner )
client , err := api . ClientFromEnvironment ( )
if err != nil {
return err
}
req := & api . GenerateRequest {
Model : opts . Model ,
KeepAlive : opts . KeepAlive ,
2025-05-29 10:38:52 +08:00
// pass Think here so we fail before getting to the chat prompt if the model doesn't support it
Think : opts . Think ,
2024-09-12 07:36:21 +08:00
}
2025-09-18 05:40:53 +08:00
return client . Generate ( cmd . Context ( ) , req , func ( r api . GenerateResponse ) error {
if r . RemoteModel != "" && opts . ShowConnect {
p . StopAndClear ( )
if strings . HasPrefix ( r . RemoteHost , "https://ollama.com" ) {
fmt . Fprintf ( os . Stderr , "Connecting to '%s' on 'ollama.com' ⚡\n" , r . RemoteModel )
} else {
fmt . Fprintf ( os . Stderr , "Connecting to '%s' on '%s'\n" , r . RemoteModel , r . RemoteHost )
}
}
return nil
} )
2024-09-12 07:36:21 +08:00
}
func StopHandler ( cmd * cobra . Command , args [ ] string ) error {
opts := & runOptions {
Model : args [ 0 ] ,
KeepAlive : & api . Duration { Duration : 0 } ,
}
if err := loadOrUnloadModel ( cmd , opts ) ; err != nil {
if strings . Contains ( err . Error ( ) , "not found" ) {
return fmt . Errorf ( "couldn't find model \"%s\" to stop" , args [ 0 ] )
}
2025-03-04 04:13:41 +08:00
return err
2024-09-12 07:36:21 +08:00
}
return nil
}
2023-07-26 00:07:27 +08:00
func RunHandler ( cmd * cobra . Command , args [ ] string ) error {
2024-02-02 13:33:06 +08:00
interactive := true
opts := runOptions {
2025-09-18 05:40:53 +08:00
Model : args [ 0 ] ,
WordWrap : os . Getenv ( "TERM" ) == "xterm-256color" ,
Options : map [ string ] any { } ,
ShowConnect : true ,
2024-02-02 13:33:06 +08:00
}
format , err := cmd . Flags ( ) . GetString ( "format" )
if err != nil {
return err
}
opts . Format = format
2025-05-29 10:38:52 +08:00
thinkFlag := cmd . Flags ( ) . Lookup ( "think" )
if thinkFlag . Changed {
2025-08-06 03:21:16 +08:00
thinkStr , err := cmd . Flags ( ) . GetString ( "think" )
2025-05-29 10:38:52 +08:00
if err != nil {
return err
}
2025-08-06 03:21:16 +08:00
// Handle different values for --think
switch thinkStr {
case "" , "true" :
// --think or --think=true
opts . Think = & api . ThinkValue { Value : true }
case "false" :
opts . Think = & api . ThinkValue { Value : false }
case "high" , "medium" , "low" :
opts . Think = & api . ThinkValue { Value : thinkStr }
default :
return fmt . Errorf ( "invalid value for --think: %q (must be true, false, high, medium, or low)" , thinkStr )
}
2025-05-29 10:38:52 +08:00
} else {
opts . Think = nil
}
hidethinking , err := cmd . Flags ( ) . GetBool ( "hidethinking" )
if err != nil {
return err
}
opts . HideThinking = hidethinking
2024-05-14 08:17:36 +08:00
keepAlive , err := cmd . Flags ( ) . GetString ( "keepalive" )
if err != nil {
return err
}
if keepAlive != "" {
d , err := time . ParseDuration ( keepAlive )
if err != nil {
return err
}
opts . KeepAlive = & api . Duration { Duration : d }
}
2024-02-02 13:33:06 +08:00
prompts := args [ 1 : ]
// prepend stdin to the prompt if provided
if ! term . IsTerminal ( int ( os . Stdin . Fd ( ) ) ) {
in , err := io . ReadAll ( os . Stdin )
if err != nil {
return err
}
prompts = append ( [ ] string { string ( in ) } , prompts ... )
2025-09-18 05:40:53 +08:00
opts . ShowConnect = false
2024-02-02 13:33:06 +08:00
opts . WordWrap = false
interactive = false
}
opts . Prompt = strings . Join ( prompts , " " )
if len ( prompts ) > 0 {
interactive = false
}
2024-11-23 00:04:54 +08:00
// Be quiet if we're redirecting to a pipe or file
if ! term . IsTerminal ( int ( os . Stdout . Fd ( ) ) ) {
interactive = false
}
2024-02-02 13:33:06 +08:00
nowrap , err := cmd . Flags ( ) . GetBool ( "nowordwrap" )
if err != nil {
return err
}
opts . WordWrap = ! nowrap
cmd: defer stating model info until necessary (#5248)
This commit changes the 'ollama run' command to defer fetching model
information until it really needs it. That is, when in interactive mode.
It also removes one such case where the model information is fetch in
duplicate, just before calling generateInteractive and then again, first
thing, in generateInteractive.
This positively impacts the performance of the command:
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.168 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.220 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.217 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 4% cpu 0.652 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 5% cpu 0.498 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 3% cpu 0.479 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
2024-06-25 11:14:03 +08:00
// Fill out the rest of the options based on information about the
// model.
client , err := api . ClientFromEnvironment ( )
if err != nil {
return err
}
name := args [ 0 ]
info , err := func ( ) ( * api . ShowResponse , error ) {
showReq := & api . ShowRequest { Name : name }
info , err := client . Show ( cmd . Context ( ) , showReq )
var se api . StatusError
if errors . As ( err , & se ) && se . StatusCode == http . StatusNotFound {
if err := PullHandler ( cmd , [ ] string { name } ) ; err != nil {
return nil , err
}
return client . Show ( cmd . Context ( ) , & api . ShowRequest { Name : name } )
}
return info , err
} ( )
if err != nil {
return err
2024-02-02 13:33:06 +08:00
}
2025-05-29 10:38:52 +08:00
opts . Think , err = inferThinkingOption ( & info . Capabilities , & opts , thinkFlag . Changed )
if err != nil {
return err
}
2025-04-02 06:21:46 +08:00
opts . MultiModal = slices . Contains ( info . Capabilities , model . CapabilityVision )
// TODO: remove the projector info and vision info checks below,
// these are left in for backwards compatibility with older servers
// that don't have the capabilities field in the model info
2025-03-05 01:03:46 +08:00
if len ( info . ProjectorInfo ) != 0 {
opts . MultiModal = true
}
for k := range info . ModelInfo {
if strings . Contains ( k , ".vision." ) {
opts . MultiModal = true
break
}
}
cmd: defer stating model info until necessary (#5248)
This commit changes the 'ollama run' command to defer fetching model
information until it really needs it. That is, when in interactive mode.
It also removes one such case where the model information is fetch in
duplicate, just before calling generateInteractive and then again, first
thing, in generateInteractive.
This positively impacts the performance of the command:
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.168 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.220 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.217 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 4% cpu 0.652 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 5% cpu 0.498 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 3% cpu 0.479 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
2024-06-25 11:14:03 +08:00
opts . ParentModel = info . Details . ParentModel
if interactive {
2024-09-12 07:36:21 +08:00
if err := loadOrUnloadModel ( cmd , & opts ) ; err != nil {
2025-09-18 05:40:53 +08:00
var sErr api . AuthorizationError
if errors . As ( err , & sErr ) && sErr . StatusCode == http . StatusUnauthorized {
pubKey , pkErr := auth . GetPublicKey ( )
if pkErr != nil {
return pkErr
}
// the server and the client both have the same public key
if pubKey == sErr . PublicKey {
h , _ := os . Hostname ( )
encKey := base64 . RawURLEncoding . EncodeToString ( [ ] byte ( pubKey ) )
fmt . Printf ( "You need to be signed in to Ollama to run Cloud models.\n\n" )
fmt . Printf ( ConnectInstructions , url . PathEscape ( h ) , encKey )
}
return nil
}
2024-07-27 04:39:38 +08:00
return err
}
for _ , msg := range info . Messages {
switch msg . Role {
case "user" :
fmt . Printf ( ">>> %s\n" , msg . Content )
case "assistant" :
state := & displayResponseState { }
displayResponse ( msg . Content , opts . WordWrap , state )
fmt . Println ( )
fmt . Println ( )
}
}
cmd: defer stating model info until necessary (#5248)
This commit changes the 'ollama run' command to defer fetching model
information until it really needs it. That is, when in interactive mode.
It also removes one such case where the model information is fetch in
duplicate, just before calling generateInteractive and then again, first
thing, in generateInteractive.
This positively impacts the performance of the command:
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.168 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.220 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.217 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 4% cpu 0.652 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 5% cpu 0.498 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 3% cpu 0.479 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
2024-06-25 11:14:03 +08:00
return generateInteractive ( cmd , opts )
}
return generate ( cmd , opts )
2023-07-07 02:18:40 +08:00
}
2025-09-18 05:40:53 +08:00
func SigninHandler ( cmd * cobra . Command , args [ ] string ) error {
client , err := api . ClientFromEnvironment ( )
if err != nil {
return err
}
user , err := client . Whoami ( cmd . Context ( ) )
if err != nil {
return err
}
if user != nil && user . Name != "" {
fmt . Printf ( "You are already signed in as user '%s'\n" , user . Name )
fmt . Println ( )
return nil
}
pubKey , pkErr := auth . GetPublicKey ( )
if pkErr != nil {
return pkErr
}
encKey := base64 . RawURLEncoding . EncodeToString ( [ ] byte ( pubKey ) )
h , _ := os . Hostname ( )
fmt . Printf ( ConnectInstructions , url . PathEscape ( h ) , encKey )
return nil
}
func SignoutHandler ( cmd * cobra . Command , args [ ] string ) error {
pubKey , pkErr := auth . GetPublicKey ( )
if pkErr != nil {
return pkErr
}
encKey := base64 . RawURLEncoding . EncodeToString ( [ ] byte ( pubKey ) )
client , err := api . ClientFromEnvironment ( )
if err != nil {
return err
}
err = client . Signout ( cmd . Context ( ) , encKey )
if err != nil {
return err
}
fmt . Println ( "You have signed out of ollama.com" )
fmt . Println ( )
return nil
}
2023-07-21 07:09:23 +08:00
func PushHandler ( cmd * cobra . Command , args [ ] string ) error {
2023-10-10 03:18:26 +08:00
client , err := api . ClientFromEnvironment ( )
2023-08-16 23:03:48 +08:00
if err != nil {
return err
}
2023-07-17 08:02:22 +08:00
2023-07-22 06:42:19 +08:00
insecure , err := cmd . Flags ( ) . GetBool ( "insecure" )
if err != nil {
return err
}
2023-11-15 08:33:24 +08:00
p := progress . NewProgress ( os . Stderr )
defer p . Stop ( )
bars := make ( map [ string ] * progress . Bar )
2023-11-20 02:43:21 +08:00
var status string
var spinner * progress . Spinner
2023-11-15 08:33:24 +08:00
2023-07-19 09:51:30 +08:00
fn := func ( resp api . ProgressResponse ) error {
2023-11-15 08:33:24 +08:00
if resp . Digest != "" {
2023-11-20 02:43:21 +08:00
if spinner != nil {
spinner . Stop ( )
}
2023-11-15 08:33:24 +08:00
bar , ok := bars [ resp . Digest ]
if ! ok {
2023-11-21 00:37:17 +08:00
bar = progress . NewBar ( fmt . Sprintf ( "pushing %s..." , resp . Digest [ 7 : 19 ] ) , resp . Total , resp . Completed )
2023-11-15 08:33:24 +08:00
bars [ resp . Digest ] = bar
p . Add ( resp . Digest , bar )
}
bar . Set ( resp . Completed )
} else if status != resp . Status {
2023-11-20 02:43:21 +08:00
if spinner != nil {
spinner . Stop ( )
}
2023-11-15 08:33:24 +08:00
status = resp . Status
spinner = progress . NewSpinner ( status )
p . Add ( status , spinner )
}
2023-07-17 08:02:22 +08:00
return nil
}
2023-11-15 08:33:24 +08:00
request := api . PushRequest { Name : args [ 0 ] , Insecure : insecure }
2024-11-26 01:40:16 +08:00
n := model . ParseName ( args [ 0 ] )
2023-10-13 06:56:40 +08:00
if err := client . Push ( cmd . Context ( ) , & request , fn ) ; err != nil {
2024-05-01 02:02:08 +08:00
if spinner != nil {
spinner . Stop ( )
}
2025-09-18 05:40:53 +08:00
errStr := strings . ToLower ( err . Error ( ) )
if strings . Contains ( errStr , "access denied" ) || strings . Contains ( errStr , "unauthorized" ) {
2024-05-01 02:02:08 +08:00
return errors . New ( "you are not authorized to push to this namespace, create the model under a namespace you own" )
}
2023-11-15 08:33:24 +08:00
return err
}
2024-11-26 01:40:16 +08:00
p . Stop ( )
2023-11-16 08:59:49 +08:00
spinner . Stop ( )
2024-11-26 01:40:16 +08:00
destination := n . String ( )
if strings . HasSuffix ( n . Host , ".ollama.ai" ) || strings . HasSuffix ( n . Host , ".ollama.com" ) {
destination = "https://ollama.com/" + strings . TrimSuffix ( n . DisplayShortest ( ) , ":latest" )
}
fmt . Printf ( "\nYou can find your model at:\n\n" )
fmt . Printf ( "\t%s\n" , destination )
2023-11-15 08:33:24 +08:00
return nil
2023-07-17 08:02:22 +08:00
}
2023-07-21 07:09:23 +08:00
func ListHandler ( cmd * cobra . Command , args [ ] string ) error {
2023-10-10 03:18:26 +08:00
client , err := api . ClientFromEnvironment ( )
2023-08-16 23:03:48 +08:00
if err != nil {
return err
}
2023-07-19 00:09:45 +08:00
2023-10-13 06:56:40 +08:00
models , err := client . List ( cmd . Context ( ) )
2023-07-19 00:09:45 +08:00
if err != nil {
return err
}
var data [ ] [ ] string
for _ , m := range models . Models {
2024-12-12 07:29:59 +08:00
if len ( args ) == 0 || strings . HasPrefix ( strings . ToLower ( m . Name ) , strings . ToLower ( args [ 0 ] ) ) {
2025-09-18 05:40:53 +08:00
var size string
if m . RemoteModel != "" {
size = "-"
} else {
size = format . HumanBytes ( m . Size )
}
data = append ( data , [ ] string { m . Name , m . Digest [ : 12 ] , size , format . HumanTime ( m . ModifiedAt , "Never" ) } )
2023-07-19 05:01:19 +08:00
}
2023-07-19 00:09:45 +08:00
}
table := tablewriter . NewWriter ( os . Stdout )
2023-08-29 11:50:24 +08:00
table . SetHeader ( [ ] string { "NAME" , "ID" , "SIZE" , "MODIFIED" } )
2023-07-19 00:09:45 +08:00
table . SetHeaderAlignment ( tablewriter . ALIGN_LEFT )
table . SetAlignment ( tablewriter . ALIGN_LEFT )
table . SetHeaderLine ( false )
table . SetBorder ( false )
table . SetNoWhiteSpace ( true )
2024-09-12 02:01:30 +08:00
table . SetTablePadding ( " " )
2023-07-19 00:09:45 +08:00
table . AppendBulk ( data )
table . Render ( )
return nil
}
2024-05-14 08:17:36 +08:00
func ListRunningHandler ( cmd * cobra . Command , args [ ] string ) error {
client , err := api . ClientFromEnvironment ( )
if err != nil {
return err
}
models , err := client . ListRunning ( cmd . Context ( ) )
if err != nil {
return err
}
var data [ ] [ ] string
for _ , m := range models . Models {
if len ( args ) == 0 || strings . HasPrefix ( m . Name , args [ 0 ] ) {
var procStr string
switch {
case m . SizeVRAM == 0 :
procStr = "100% CPU"
case m . SizeVRAM == m . Size :
procStr = "100% GPU"
case m . SizeVRAM > m . Size || m . Size == 0 :
procStr = "Unknown"
default :
sizeCPU := m . Size - m . SizeVRAM
cpuPercent := math . Round ( float64 ( sizeCPU ) / float64 ( m . Size ) * 100 )
procStr = fmt . Sprintf ( "%d%%/%d%% CPU/GPU" , int ( cpuPercent ) , int ( 100 - cpuPercent ) )
}
2024-09-12 07:36:21 +08:00
var until string
delta := time . Since ( m . ExpiresAt )
if delta > 0 {
until = "Stopping..."
} else {
until = format . HumanTime ( m . ExpiresAt , "Never" )
}
2025-07-09 02:59:06 +08:00
ctxStr := strconv . Itoa ( m . ContextLength )
data = append ( data , [ ] string { m . Name , m . Digest [ : 12 ] , format . HumanBytes ( m . Size ) , procStr , ctxStr , until } )
2024-05-14 08:17:36 +08:00
}
}
table := tablewriter . NewWriter ( os . Stdout )
2025-07-09 02:59:06 +08:00
table . SetHeader ( [ ] string { "NAME" , "ID" , "SIZE" , "PROCESSOR" , "CONTEXT" , "UNTIL" } )
2024-05-14 08:17:36 +08:00
table . SetHeaderAlignment ( tablewriter . ALIGN_LEFT )
table . SetAlignment ( tablewriter . ALIGN_LEFT )
table . SetHeaderLine ( false )
table . SetBorder ( false )
table . SetNoWhiteSpace ( true )
2024-09-12 02:01:30 +08:00
table . SetTablePadding ( " " )
2024-05-14 08:17:36 +08:00
table . AppendBulk ( data )
table . Render ( )
return nil
}
2023-07-21 07:09:23 +08:00
func DeleteHandler ( cmd * cobra . Command , args [ ] string ) error {
2023-10-10 03:18:26 +08:00
client , err := api . ClientFromEnvironment ( )
2023-08-16 23:03:48 +08:00
if err != nil {
return err
}
2023-07-21 07:09:23 +08:00
2024-10-02 06:45:43 +08:00
// Unload the model if it's running before deletion
opts := & runOptions {
Model : args [ 0 ] ,
KeepAlive : & api . Duration { Duration : 0 } ,
}
if err := loadOrUnloadModel ( cmd , opts ) ; err != nil {
2025-09-18 05:40:53 +08:00
if ! strings . Contains ( strings . ToLower ( err . Error ( ) ) , "not found" ) {
fmt . Fprintf ( os . Stderr , "Warning: unable to stop model '%s'\n" , args [ 0 ] )
2024-10-02 06:45:43 +08:00
}
}
2023-08-26 15:47:56 +08:00
for _ , name := range args {
req := api . DeleteRequest { Name : name }
2023-10-13 06:56:40 +08:00
if err := client . Delete ( cmd . Context ( ) , & req ) ; err != nil {
2023-08-26 15:47:56 +08:00
return err
}
fmt . Printf ( "deleted '%s'\n" , name )
2023-07-21 07:09:23 +08:00
}
return nil
}
2023-09-07 02:04:17 +08:00
func ShowHandler ( cmd * cobra . Command , args [ ] string ) error {
2023-10-10 03:18:26 +08:00
client , err := api . ClientFromEnvironment ( )
2023-09-07 02:04:17 +08:00
if err != nil {
return err
}
license , errLicense := cmd . Flags ( ) . GetBool ( "license" )
modelfile , errModelfile := cmd . Flags ( ) . GetBool ( "modelfile" )
parameters , errParams := cmd . Flags ( ) . GetBool ( "parameters" )
system , errSystem := cmd . Flags ( ) . GetBool ( "system" )
template , errTemplate := cmd . Flags ( ) . GetBool ( "template" )
2025-03-14 05:24:27 +08:00
verbose , errVerbose := cmd . Flags ( ) . GetBool ( "verbose" )
2023-09-07 02:04:17 +08:00
2025-03-14 05:24:27 +08:00
for _ , boolErr := range [ ] error { errLicense , errModelfile , errParams , errSystem , errTemplate , errVerbose } {
2023-09-07 02:04:17 +08:00
if boolErr != nil {
return errors . New ( "error retrieving flags" )
}
}
flagsSet := 0
showType := ""
if license {
flagsSet ++
showType = "license"
}
if modelfile {
flagsSet ++
showType = "modelfile"
}
if parameters {
flagsSet ++
showType = "parameters"
}
if system {
flagsSet ++
showType = "system"
}
if template {
flagsSet ++
showType = "template"
}
if flagsSet > 1 {
2023-09-07 04:38:49 +08:00
return errors . New ( "only one of '--license', '--modelfile', '--parameters', '--system', or '--template' can be specified" )
2024-06-20 05:19:02 +08:00
}
2025-03-14 05:24:27 +08:00
req := api . ShowRequest { Name : args [ 0 ] , Verbose : verbose }
2024-06-29 04:15:52 +08:00
resp , err := client . Show ( cmd . Context ( ) , & req )
if err != nil {
return err
}
2024-06-20 05:19:02 +08:00
2024-06-29 04:15:52 +08:00
if flagsSet == 1 {
2024-06-20 05:19:02 +08:00
switch showType {
case "license" :
fmt . Println ( resp . License )
case "modelfile" :
fmt . Println ( resp . Modelfile )
case "parameters" :
fmt . Println ( resp . Parameters )
case "system" :
2024-11-14 15:53:30 +08:00
fmt . Print ( resp . System )
2024-06-20 05:19:02 +08:00
case "template" :
2024-11-14 15:53:30 +08:00
fmt . Print ( resp . Template )
2024-06-20 05:19:02 +08:00
}
return nil
2023-09-07 02:04:17 +08:00
}
2025-03-14 05:24:27 +08:00
return showInfo ( resp , verbose , os . Stdout )
2024-06-29 04:15:52 +08:00
}
2025-03-14 05:24:27 +08:00
func showInfo ( resp * api . ShowResponse , verbose bool , w io . Writer ) error {
2024-09-12 02:01:30 +08:00
tableRender := func ( header string , rows func ( ) [ ] [ ] string ) {
fmt . Fprintln ( w , " " , header )
table := tablewriter . NewWriter ( w )
table . SetAlignment ( tablewriter . ALIGN_LEFT )
table . SetBorder ( false )
table . SetNoWhiteSpace ( true )
table . SetTablePadding ( " " )
2024-06-20 05:19:02 +08:00
2024-09-12 02:01:30 +08:00
switch header {
case "Template" , "System" , "License" :
table . SetColWidth ( 100 )
2024-06-29 02:30:16 +08:00
}
2024-09-12 02:01:30 +08:00
table . AppendBulk ( rows ( ) )
table . Render ( )
fmt . Fprintln ( w )
2023-09-07 02:04:17 +08:00
}
2024-09-12 02:01:30 +08:00
tableRender ( "Model" , func ( ) ( rows [ ] [ ] string ) {
2025-09-18 05:40:53 +08:00
if resp . RemoteHost != "" {
rows = append ( rows , [ ] string { "" , "Remote model" , resp . RemoteModel } )
rows = append ( rows , [ ] string { "" , "Remote URL" , resp . RemoteHost } )
}
2024-09-12 02:01:30 +08:00
if resp . ModelInfo != nil {
arch := resp . ModelInfo [ "general.architecture" ] . ( string )
rows = append ( rows , [ ] string { "" , "architecture" , arch } )
2025-09-18 05:40:53 +08:00
var paramStr string
if resp . Details . ParameterSize != "" {
paramStr = resp . Details . ParameterSize
} else if v , ok := resp . ModelInfo [ "general.parameter_count" ] ; ok {
if f , ok := v . ( float64 ) ; ok {
paramStr = format . HumanNumber ( uint64 ( f ) )
}
}
rows = append ( rows , [ ] string { "" , "parameters" , paramStr } )
if v , ok := resp . ModelInfo [ fmt . Sprintf ( "%s.context_length" , arch ) ] ; ok {
if f , ok := v . ( float64 ) ; ok {
rows = append ( rows , [ ] string { "" , "context length" , strconv . FormatFloat ( f , 'f' , - 1 , 64 ) } )
}
}
if v , ok := resp . ModelInfo [ fmt . Sprintf ( "%s.embedding_length" , arch ) ] ; ok {
if f , ok := v . ( float64 ) ; ok {
rows = append ( rows , [ ] string { "" , "embedding length" , strconv . FormatFloat ( f , 'f' , - 1 , 64 ) } )
}
}
2024-09-12 02:01:30 +08:00
} else {
rows = append ( rows , [ ] string { "" , "architecture" , resp . Details . Family } )
rows = append ( rows , [ ] string { "" , "parameters" , resp . Details . ParameterSize } )
}
rows = append ( rows , [ ] string { "" , "quantization" , resp . Details . QuantizationLevel } )
return
} )
2024-06-20 05:19:02 +08:00
2025-04-02 06:21:46 +08:00
if len ( resp . Capabilities ) > 0 {
tableRender ( "Capabilities" , func ( ) ( rows [ ] [ ] string ) {
for _ , capability := range resp . Capabilities {
rows = append ( rows , [ ] string { "" , capability . String ( ) } )
}
return
} )
}
2024-09-12 02:01:30 +08:00
if resp . ProjectorInfo != nil {
tableRender ( "Projector" , func ( ) ( rows [ ] [ ] string ) {
arch := resp . ProjectorInfo [ "general.architecture" ] . ( string )
rows = append ( rows , [ ] string { "" , "architecture" , arch } )
rows = append ( rows , [ ] string { "" , "parameters" , format . HumanNumber ( uint64 ( resp . ProjectorInfo [ "general.parameter_count" ] . ( float64 ) ) ) } )
rows = append ( rows , [ ] string { "" , "embedding length" , strconv . FormatFloat ( resp . ProjectorInfo [ fmt . Sprintf ( "%s.vision.embedding_length" , arch ) ] . ( float64 ) , 'f' , - 1 , 64 ) } )
rows = append ( rows , [ ] string { "" , "dimensions" , strconv . FormatFloat ( resp . ProjectorInfo [ fmt . Sprintf ( "%s.vision.projection_dim" , arch ) ] . ( float64 ) , 'f' , - 1 , 64 ) } )
return
} )
2024-06-20 05:19:02 +08:00
}
2024-09-12 02:01:30 +08:00
if resp . Parameters != "" {
tableRender ( "Parameters" , func ( ) ( rows [ ] [ ] string ) {
scanner := bufio . NewScanner ( strings . NewReader ( resp . Parameters ) )
for scanner . Scan ( ) {
if text := scanner . Text ( ) ; text != "" {
rows = append ( rows , append ( [ ] string { "" } , strings . Fields ( text ) ... ) )
}
}
return
} )
2024-06-20 05:19:02 +08:00
}
2025-03-14 05:24:27 +08:00
if resp . ModelInfo != nil && verbose {
tableRender ( "Metadata" , func ( ) ( rows [ ] [ ] string ) {
keys := make ( [ ] string , 0 , len ( resp . ModelInfo ) )
for k := range resp . ModelInfo {
keys = append ( keys , k )
}
sort . Strings ( keys )
for _ , k := range keys {
var v string
switch vData := resp . ModelInfo [ k ] . ( type ) {
2025-03-22 02:13:54 +08:00
case bool :
v = fmt . Sprintf ( "%t" , vData )
2025-03-14 05:24:27 +08:00
case string :
v = vData
case float64 :
v = fmt . Sprintf ( "%g" , vData )
case [ ] any :
2025-05-16 06:45:52 +08:00
targetWidth := 10 // Small width where we are displaying the data in a column
var itemsToShow int
totalWidth := 1 // Start with 1 for opening bracket
// Find how many we can fit
for i := range vData {
itemStr := fmt . Sprintf ( "%v" , vData [ i ] )
width := runewidth . StringWidth ( itemStr )
// Add separator width (", ") for all items except the first
if i > 0 {
width += 2
}
// Check if adding this item would exceed our width limit
if totalWidth + width > targetWidth && i > 0 {
break
}
totalWidth += width
itemsToShow ++
}
// Format the output
if itemsToShow < len ( vData ) {
v = fmt . Sprintf ( "%v" , vData [ : itemsToShow ] )
v = strings . TrimSuffix ( v , "]" )
v += fmt . Sprintf ( " ...+%d more]" , len ( vData ) - itemsToShow )
} else {
v = fmt . Sprintf ( "%v" , vData )
2025-03-14 05:24:27 +08:00
}
default :
v = fmt . Sprintf ( "%T" , vData )
}
rows = append ( rows , [ ] string { "" , k , v } )
}
return
} )
}
if len ( resp . Tensors ) > 0 && verbose {
tableRender ( "Tensors" , func ( ) ( rows [ ] [ ] string ) {
for _ , t := range resp . Tensors {
rows = append ( rows , [ ] string { "" , t . Name , t . Type , fmt . Sprint ( t . Shape ) } )
}
return
} )
}
2024-09-12 02:01:30 +08:00
head := func ( s string , n int ) ( rows [ ] [ ] string ) {
scanner := bufio . NewScanner ( strings . NewReader ( s ) )
2025-05-16 06:45:52 +08:00
count := 0
for scanner . Scan ( ) {
text := strings . TrimSpace ( scanner . Text ( ) )
if text == "" {
continue
2024-06-20 05:19:02 +08:00
}
2025-05-16 06:45:52 +08:00
count ++
if n < 0 || count <= n {
rows = append ( rows , [ ] string { "" , text } )
}
}
if n >= 0 && count > n {
rows = append ( rows , [ ] string { "" , "..." } )
2024-06-20 05:19:02 +08:00
}
2024-09-12 02:01:30 +08:00
return
2024-06-20 05:19:02 +08:00
}
2024-09-12 02:01:30 +08:00
if resp . System != "" {
tableRender ( "System" , func ( ) [ ] [ ] string {
return head ( resp . System , 2 )
} )
}
2024-06-20 05:19:02 +08:00
2024-09-12 02:01:30 +08:00
if resp . License != "" {
tableRender ( "License" , func ( ) [ ] [ ] string {
return head ( resp . License , 2 )
} )
2024-06-20 05:19:02 +08:00
}
2024-09-12 02:01:30 +08:00
return nil
2024-06-20 05:19:02 +08:00
}
2023-07-24 23:27:28 +08:00
func CopyHandler ( cmd * cobra . Command , args [ ] string ) error {
2023-10-10 03:18:26 +08:00
client , err := api . ClientFromEnvironment ( )
2023-08-16 23:03:48 +08:00
if err != nil {
return err
}
2023-07-24 23:27:28 +08:00
req := api . CopyRequest { Source : args [ 0 ] , Destination : args [ 1 ] }
2023-10-13 06:56:40 +08:00
if err := client . Copy ( cmd . Context ( ) , & req ) ; err != nil {
2023-07-24 23:27:28 +08:00
return err
}
fmt . Printf ( "copied '%s' to '%s'\n" , args [ 0 ] , args [ 1 ] )
return nil
}
2023-07-21 07:09:23 +08:00
func PullHandler ( cmd * cobra . Command , args [ ] string ) error {
2023-07-22 06:42:19 +08:00
insecure , err := cmd . Flags ( ) . GetBool ( "insecure" )
if err != nil {
return err
}
2023-10-10 03:18:26 +08:00
client , err := api . ClientFromEnvironment ( )
2023-08-16 23:03:48 +08:00
if err != nil {
return err
}
2023-07-17 08:02:22 +08:00
2023-11-15 08:33:24 +08:00
p := progress . NewProgress ( os . Stderr )
defer p . Stop ( )
bars := make ( map [ string ] * progress . Bar )
2023-11-20 02:43:21 +08:00
var status string
var spinner * progress . Spinner
2023-11-15 08:33:24 +08:00
2023-07-19 09:51:30 +08:00
fn := func ( resp api . ProgressResponse ) error {
2023-11-15 08:33:24 +08:00
if resp . Digest != "" {
2025-04-16 14:24:44 +08:00
if resp . Completed == 0 {
// This is the initial status update for the
// layer, which the server sends before
// beginning the download, for clients to
// compute total size and prepare for
// downloads, if needed.
//
// Skipping this here to avoid showing a 0%
// progress bar, which *should* clue the user
// into the fact that many things are being
// downloaded and that the current active
// download is not that last. However, in rare
// cases it seems to be triggering to some, and
// it isn't worth explaining, so just ignore
// and regress to the old UI that keeps giving
// you the "But wait, there is more!" after
// each "100% done" bar, which is "better."
return nil
}
2023-11-20 02:43:21 +08:00
if spinner != nil {
spinner . Stop ( )
}
2023-11-15 08:33:24 +08:00
bar , ok := bars [ resp . Digest ]
if ! ok {
2025-04-16 14:24:44 +08:00
name , isDigest := strings . CutPrefix ( resp . Digest , "sha256:" )
name = strings . TrimSpace ( name )
if isDigest {
name = name [ : min ( 12 , len ( name ) ) ]
}
bar = progress . NewBar ( fmt . Sprintf ( "pulling %s:" , name ) , resp . Total , resp . Completed )
2023-11-15 08:33:24 +08:00
bars [ resp . Digest ] = bar
p . Add ( resp . Digest , bar )
}
bar . Set ( resp . Completed )
} else if status != resp . Status {
2023-11-20 02:43:21 +08:00
if spinner != nil {
spinner . Stop ( )
}
2023-11-15 08:33:24 +08:00
status = resp . Status
spinner = progress . NewSpinner ( status )
p . Add ( status , spinner )
}
2023-07-17 08:02:22 +08:00
return nil
}
2023-07-07 22:22:37 +08:00
2023-11-15 08:33:24 +08:00
request := api . PullRequest { Name : args [ 0 ] , Insecure : insecure }
2025-04-16 14:24:44 +08:00
return client . Pull ( cmd . Context ( ) , & request , fn )
2023-07-07 04:49:31 +08:00
}
2023-12-05 08:35:29 +08:00
type generateContextKey string
2024-01-13 04:05:52 +08:00
type runOptions struct {
2025-05-29 10:38:52 +08:00
Model string
ParentModel string
Prompt string
Messages [ ] api . Message
WordWrap bool
Format string
System string
Images [ ] api . ImageData
Options map [ string ] any
MultiModal bool
KeepAlive * api . Duration
2025-08-06 03:21:16 +08:00
Think * api . ThinkValue
2025-05-29 10:38:52 +08:00
HideThinking bool
2025-09-18 05:40:53 +08:00
ShowConnect bool
2023-11-30 01:56:42 +08:00
}
2024-01-13 04:05:52 +08:00
type displayResponseState struct {
lineLength int
wordBuffer string
}
func displayResponse ( content string , wordWrap bool , state * displayResponseState ) {
termWidth , _ , _ := term . GetSize ( int ( os . Stdout . Fd ( ) ) )
if wordWrap && termWidth >= 10 {
for _ , ch := range content {
2024-05-16 08:24:17 +08:00
if state . lineLength + 1 > termWidth - 5 {
if runewidth . StringWidth ( state . wordBuffer ) > termWidth - 10 {
2024-01-13 04:05:52 +08:00
fmt . Printf ( "%s%c" , state . wordBuffer , ch )
state . wordBuffer = ""
state . lineLength = 0
continue
}
// backtrack the length of the last word and clear to the end of the line
2024-05-31 01:24:21 +08:00
a := runewidth . StringWidth ( state . wordBuffer )
if a > 0 {
2024-05-31 01:38:07 +08:00
fmt . Printf ( "\x1b[%dD" , a )
2024-05-31 01:24:21 +08:00
}
fmt . Printf ( "\x1b[K\n" )
2024-01-13 04:05:52 +08:00
fmt . Printf ( "%s%c" , state . wordBuffer , ch )
2024-05-16 07:29:33 +08:00
chWidth := runewidth . RuneWidth ( ch )
state . lineLength = runewidth . StringWidth ( state . wordBuffer ) + chWidth
2024-01-13 04:05:52 +08:00
} else {
fmt . Print ( string ( ch ) )
2024-05-16 07:29:33 +08:00
state . lineLength += runewidth . RuneWidth ( ch )
if runewidth . RuneWidth ( ch ) >= 2 {
state . wordBuffer = ""
continue
2024-05-16 08:24:17 +08:00
}
2024-01-13 04:05:52 +08:00
switch ch {
2025-08-06 03:21:16 +08:00
case ' ' , '\t' :
2024-01-13 04:05:52 +08:00
state . wordBuffer = ""
2025-08-06 03:21:16 +08:00
case '\n' , '\r' :
2024-01-13 04:05:52 +08:00
state . lineLength = 0
2025-08-06 03:21:16 +08:00
state . wordBuffer = ""
2024-01-13 04:05:52 +08:00
default :
state . wordBuffer += string ( ch )
}
}
}
} else {
fmt . Printf ( "%s%s" , state . wordBuffer , content )
if len ( state . wordBuffer ) > 0 {
state . wordBuffer = ""
}
}
}
2025-05-29 10:38:52 +08:00
func thinkingOutputOpeningText ( plainText bool ) string {
text := "Thinking...\n"
if plainText {
return text
}
return readline . ColorGrey + readline . ColorBold + text + readline . ColorDefault + readline . ColorGrey
}
func thinkingOutputClosingText ( plainText bool ) string {
text := "...done thinking.\n\n"
if plainText {
return text
}
return readline . ColorGrey + readline . ColorBold + text + readline . ColorDefault
}
2024-01-13 04:05:52 +08:00
func chat ( cmd * cobra . Command , opts runOptions ) ( * api . Message , error ) {
client , err := api . ClientFromEnvironment ( )
if err != nil {
return nil , err
}
p := progress . NewProgress ( os . Stderr )
defer p . StopAndClear ( )
spinner := progress . NewSpinner ( "" )
p . Add ( "" , spinner )
cancelCtx , cancel := context . WithCancel ( cmd . Context ( ) )
defer cancel ( )
sigChan := make ( chan os . Signal , 1 )
signal . Notify ( sigChan , syscall . SIGINT )
go func ( ) {
<- sigChan
cancel ( )
} ( )
var state * displayResponseState = & displayResponseState { }
2025-08-06 03:21:16 +08:00
var thinkingContent strings . Builder
2024-01-13 04:05:52 +08:00
var latest api . ChatResponse
var fullResponse strings . Builder
2025-05-29 10:38:52 +08:00
var thinkTagOpened bool = false
var thinkTagClosed bool = false
2024-01-13 04:05:52 +08:00
2025-07-17 02:18:16 +08:00
role := "assistant"
2024-01-13 04:05:52 +08:00
fn := func ( response api . ChatResponse ) error {
2025-05-29 10:38:52 +08:00
if response . Message . Content != "" || ! opts . HideThinking {
p . StopAndClear ( )
}
2024-01-13 04:05:52 +08:00
latest = response
role = response . Message . Role
2025-05-29 10:38:52 +08:00
if response . Message . Thinking != "" && ! opts . HideThinking {
if ! thinkTagOpened {
fmt . Print ( thinkingOutputOpeningText ( false ) )
thinkTagOpened = true
2025-08-06 03:21:16 +08:00
thinkTagClosed = false
2025-05-29 10:38:52 +08:00
}
2025-08-06 03:21:16 +08:00
thinkingContent . WriteString ( response . Message . Thinking )
2025-05-29 10:38:52 +08:00
displayResponse ( response . Message . Thinking , opts . WordWrap , state )
}
2024-01-13 04:05:52 +08:00
content := response . Message . Content
2025-08-06 03:21:16 +08:00
if thinkTagOpened && ! thinkTagClosed && ( content != "" || len ( response . Message . ToolCalls ) > 0 ) {
if ! strings . HasSuffix ( thinkingContent . String ( ) , "\n" ) {
fmt . Println ( )
}
2025-05-29 10:38:52 +08:00
fmt . Print ( thinkingOutputClosingText ( false ) )
2025-08-06 03:21:16 +08:00
thinkTagOpened = false
2025-05-29 10:38:52 +08:00
thinkTagClosed = true
2025-08-06 03:21:16 +08:00
state = & displayResponseState { }
2025-05-29 10:38:52 +08:00
}
// purposefully not putting thinking blocks in the response, which would
// only be needed if we later added tool calling to the cli (they get
// filtered out anyway since current models don't expect them unless you're
// about to finish some tool calls)
2024-01-13 04:05:52 +08:00
fullResponse . WriteString ( content )
2025-08-06 03:21:16 +08:00
if response . Message . ToolCalls != nil {
toolCalls := response . Message . ToolCalls
if len ( toolCalls ) > 0 {
fmt . Print ( renderToolCalls ( toolCalls , false ) )
}
}
2024-01-13 04:05:52 +08:00
displayResponse ( content , opts . WordWrap , state )
return nil
}
2024-12-07 06:13:15 +08:00
if opts . Format == "json" {
opts . Format = ` " ` + opts . Format + ` " `
}
2024-01-13 04:05:52 +08:00
req := & api . ChatRequest {
Model : opts . Model ,
Messages : opts . Messages ,
2024-12-05 08:31:19 +08:00
Format : json . RawMessage ( opts . Format ) ,
2024-01-13 04:05:52 +08:00
Options : opts . Options ,
2025-05-29 10:38:52 +08:00
Think : opts . Think ,
2024-01-13 04:05:52 +08:00
}
2024-05-14 08:17:36 +08:00
if opts . KeepAlive != nil {
req . KeepAlive = opts . KeepAlive
}
2024-01-13 04:05:52 +08:00
if err := client . Chat ( cancelCtx , req , fn ) ; err != nil {
if errors . Is ( err , context . Canceled ) {
return nil , nil
}
2025-07-24 13:16:55 +08:00
// this error should ideally be wrapped properly by the client
if strings . Contains ( err . Error ( ) , "upstream error" ) {
p . StopAndClear ( )
fmt . Println ( "An error occurred while processing your message. Please try again." )
fmt . Println ( )
return nil , nil
}
2024-01-13 04:05:52 +08:00
return nil , err
}
if len ( opts . Messages ) > 0 {
fmt . Println ( )
fmt . Println ( )
}
verbose , err := cmd . Flags ( ) . GetBool ( "verbose" )
if err != nil {
return nil , err
}
if verbose {
latest . Summary ( )
}
return & api . Message { Role : role , Content : fullResponse . String ( ) } , nil
}
func generate ( cmd * cobra . Command , opts runOptions ) error {
2023-10-10 03:18:26 +08:00
client , err := api . ClientFromEnvironment ( )
2023-09-19 03:26:56 +08:00
if err != nil {
2023-12-05 08:35:29 +08:00
return err
2023-09-19 03:26:56 +08:00
}
2023-07-08 01:12:58 +08:00
2023-11-15 08:58:51 +08:00
p := progress . NewProgress ( os . Stderr )
2023-11-20 13:49:08 +08:00
defer p . StopAndClear ( )
2023-12-05 08:35:29 +08:00
2023-11-15 08:58:51 +08:00
spinner := progress . NewSpinner ( "" )
p . Add ( "" , spinner )
2023-12-05 08:35:29 +08:00
var latest api . GenerateResponse
generateContext , ok := cmd . Context ( ) . Value ( generateContextKey ( "context" ) ) . ( [ ] int )
if ! ok {
generateContext = [ ] int { }
}
2023-10-13 06:56:40 +08:00
ctx , cancel := context . WithCancel ( cmd . Context ( ) )
2023-09-29 08:13:01 +08:00
defer cancel ( )
sigChan := make ( chan os . Signal , 1 )
signal . Notify ( sigChan , syscall . SIGINT )
go func ( ) {
<- sigChan
cancel ( )
} ( )
2024-01-13 04:05:52 +08:00
var state * displayResponseState = & displayResponseState { }
2025-08-06 03:21:16 +08:00
var thinkingContent strings . Builder
2025-05-29 10:38:52 +08:00
var thinkTagOpened bool = false
var thinkTagClosed bool = false
2023-09-23 04:36:08 +08:00
2025-05-29 10:38:52 +08:00
plainText := ! term . IsTerminal ( int ( os . Stdout . Fd ( ) ) )
2023-12-05 08:35:29 +08:00
2025-05-29 10:38:52 +08:00
fn := func ( response api . GenerateResponse ) error {
2023-09-19 03:26:56 +08:00
latest = response
2024-01-13 04:05:52 +08:00
content := response . Response
2023-07-13 09:18:06 +08:00
2025-05-29 10:38:52 +08:00
if response . Response != "" || ! opts . HideThinking {
p . StopAndClear ( )
}
if response . Thinking != "" && ! opts . HideThinking {
if ! thinkTagOpened {
fmt . Print ( thinkingOutputOpeningText ( plainText ) )
thinkTagOpened = true
2025-08-06 03:21:16 +08:00
thinkTagClosed = false
2025-05-29 10:38:52 +08:00
}
2025-08-06 03:21:16 +08:00
thinkingContent . WriteString ( response . Thinking )
2025-05-29 10:38:52 +08:00
displayResponse ( response . Thinking , opts . WordWrap , state )
}
2025-08-06 03:21:16 +08:00
if thinkTagOpened && ! thinkTagClosed && ( content != "" || len ( response . ToolCalls ) > 0 ) {
if ! strings . HasSuffix ( thinkingContent . String ( ) , "\n" ) {
fmt . Println ( )
}
2025-05-29 10:38:52 +08:00
fmt . Print ( thinkingOutputClosingText ( plainText ) )
2025-08-06 03:21:16 +08:00
thinkTagOpened = false
2025-05-29 10:38:52 +08:00
thinkTagClosed = true
2025-08-06 03:21:16 +08:00
state = & displayResponseState { }
2025-05-29 10:38:52 +08:00
}
2024-01-13 04:05:52 +08:00
displayResponse ( content , opts . WordWrap , state )
2023-09-23 04:36:08 +08:00
2025-08-06 03:21:16 +08:00
if response . ToolCalls != nil {
toolCalls := response . ToolCalls
if len ( toolCalls ) > 0 {
fmt . Print ( renderToolCalls ( toolCalls , plainText ) )
}
}
2023-09-19 03:26:56 +08:00
return nil
}
2023-07-08 05:04:43 +08:00
2024-02-02 13:33:06 +08:00
if opts . MultiModal {
opts . Prompt , opts . Images , err = extractFileData ( opts . Prompt )
if err != nil {
return err
}
}
2024-12-07 06:13:15 +08:00
if opts . Format == "json" {
opts . Format = ` " ` + opts . Format + ` " `
}
2023-10-13 06:56:40 +08:00
request := api . GenerateRequest {
2024-05-15 06:17:04 +08:00
Model : opts . Model ,
Prompt : opts . Prompt ,
Context : generateContext ,
Images : opts . Images ,
2024-12-05 09:37:12 +08:00
Format : json . RawMessage ( opts . Format ) ,
2024-05-15 06:17:04 +08:00
System : opts . System ,
Options : opts . Options ,
KeepAlive : opts . KeepAlive ,
2025-05-29 10:38:52 +08:00
Think : opts . Think ,
2023-10-13 06:56:40 +08:00
}
if err := client . Generate ( ctx , & request , fn ) ; err != nil {
2024-01-06 04:22:32 +08:00
if errors . Is ( err , context . Canceled ) {
2023-12-05 08:35:29 +08:00
return nil
2023-07-08 05:04:43 +08:00
}
2024-01-06 04:22:32 +08:00
return err
2023-09-19 03:26:56 +08:00
}
2024-01-06 04:22:32 +08:00
2023-12-05 08:35:29 +08:00
if opts . Prompt != "" {
2023-07-08 01:12:58 +08:00
fmt . Println ( )
fmt . Println ( )
2023-09-19 03:26:56 +08:00
}
2023-07-13 09:18:06 +08:00
2023-12-05 08:35:29 +08:00
if ! latest . Done {
return nil
}
2023-09-19 03:26:56 +08:00
verbose , err := cmd . Flags ( ) . GetBool ( "verbose" )
if err != nil {
2023-12-05 08:35:29 +08:00
return err
2023-09-19 03:26:56 +08:00
}
2023-07-19 02:59:42 +08:00
2023-09-19 03:26:56 +08:00
if verbose {
latest . Summary ( )
2023-07-08 01:12:58 +08:00
}
2023-07-07 04:49:31 +08:00
2023-12-12 05:56:22 +08:00
ctx = context . WithValue ( cmd . Context ( ) , generateContextKey ( "context" ) , latest . Context )
cmd . SetContext ( ctx )
2023-12-05 08:35:29 +08:00
return nil
2023-07-07 04:49:31 +08:00
}
2024-08-14 08:54:19 +08:00
func RunServer ( _ * cobra . Command , _ [ ] string ) error {
2023-09-21 08:49:48 +08:00
if err := initializeKeypair ( ) ; err != nil {
2023-08-12 01:58:23 +08:00
return err
}
2024-07-04 07:44:57 +08:00
ln , err := net . Listen ( "tcp" , envconfig . Host ( ) . Host )
2023-08-07 11:34:37 +08:00
if err != nil {
return err
}
2023-07-04 12:47:00 +08:00
2024-05-07 07:01:37 +08:00
err = server . Serve ( ln )
if errors . Is ( err , http . ErrServerClosed ) {
return nil
}
return err
2023-07-04 12:47:00 +08:00
}
2023-08-12 01:58:23 +08:00
func initializeKeypair ( ) error {
home , err := os . UserHomeDir ( )
if err != nil {
return err
}
privKeyPath := filepath . Join ( home , ".ollama" , "id_ed25519" )
pubKeyPath := filepath . Join ( home , ".ollama" , "id_ed25519.pub" )
_ , err = os . Stat ( privKeyPath )
if os . IsNotExist ( err ) {
fmt . Printf ( "Couldn't find '%s'. Generating new private key.\n" , privKeyPath )
2024-02-24 08:50:41 +08:00
cryptoPublicKey , cryptoPrivateKey , err := ed25519 . GenerateKey ( rand . Reader )
2023-08-12 01:58:23 +08:00
if err != nil {
return err
}
2024-02-24 08:50:41 +08:00
privateKeyBytes , err := ssh . MarshalPrivateKey ( cryptoPrivateKey , "" )
2023-08-12 01:58:23 +08:00
if err != nil {
return err
}
2024-02-24 08:50:41 +08:00
if err := os . MkdirAll ( filepath . Dir ( privKeyPath ) , 0 o755 ) ; err != nil {
2023-08-12 06:35:55 +08:00
return fmt . Errorf ( "could not create directory %w" , err )
}
2024-02-24 08:50:41 +08:00
if err := os . WriteFile ( privKeyPath , pem . EncodeToMemory ( privateKeyBytes ) , 0 o600 ) ; err != nil {
2023-08-12 01:58:23 +08:00
return err
}
2024-02-24 08:50:41 +08:00
sshPublicKey , err := ssh . NewPublicKey ( cryptoPublicKey )
2023-08-12 01:58:23 +08:00
if err != nil {
return err
}
2024-02-24 08:50:41 +08:00
publicKeyBytes := ssh . MarshalAuthorizedKey ( sshPublicKey )
2023-08-12 01:58:23 +08:00
2024-02-24 08:50:41 +08:00
if err := os . WriteFile ( pubKeyPath , publicKeyBytes , 0 o644 ) ; err != nil {
2023-08-12 01:58:23 +08:00
return err
}
2024-02-24 08:50:41 +08:00
fmt . Printf ( "Your new public key is: \n\n%s\n" , publicKeyBytes )
2023-08-12 01:58:23 +08:00
}
return nil
}
2023-10-13 06:56:40 +08:00
func checkServerHeartbeat ( cmd * cobra . Command , _ [ ] string ) error {
2023-10-10 03:18:26 +08:00
client , err := api . ClientFromEnvironment ( )
2023-08-16 23:03:48 +08:00
if err != nil {
return err
}
2023-10-13 06:56:40 +08:00
if err := client . Heartbeat ( cmd . Context ( ) ) ; err != nil {
2025-05-22 01:46:56 +08:00
if ! ( strings . Contains ( err . Error ( ) , " refused" ) || strings . Contains ( err . Error ( ) , "could not connect" ) ) {
2023-08-01 05:38:10 +08:00
return err
}
2023-12-27 08:03:45 +08:00
if err := startApp ( cmd . Context ( ) , client ) ; err != nil {
2025-05-22 01:46:56 +08:00
return fmt . Errorf ( "ollama server not responding - %w" , err )
2023-08-01 04:25:57 +08:00
}
}
return nil
}
2023-11-23 01:41:02 +08:00
func versionHandler ( cmd * cobra . Command , _ [ ] string ) {
client , err := api . ClientFromEnvironment ( )
if err != nil {
return
}
serverVersion , err := client . Version ( cmd . Context ( ) )
if err != nil {
2023-12-02 04:10:27 +08:00
fmt . Println ( "Warning: could not connect to a running Ollama instance" )
}
if serverVersion != "" {
fmt . Printf ( "ollama version is %s\n" , serverVersion )
2023-11-23 01:41:02 +08:00
}
2023-10-17 00:57:19 +08:00
if serverVersion != version . Version {
2023-12-02 04:10:27 +08:00
fmt . Printf ( "Warning: client version is %s\n" , version . Version )
2023-10-17 00:57:19 +08:00
}
2023-11-23 01:41:02 +08:00
}
2024-05-25 05:57:15 +08:00
func appendEnvDocs ( cmd * cobra . Command , envs [ ] envconfig . EnvVar ) {
2024-05-19 02:51:57 +08:00
if len ( envs ) == 0 {
return
}
envUsage := `
2024-03-08 05:57:07 +08:00
Environment Variables :
`
2024-05-19 02:51:57 +08:00
for _ , e := range envs {
2024-05-25 05:57:15 +08:00
envUsage += fmt . Sprintf ( " %-24s %s\n" , e . Name , e . Description )
2024-05-19 02:51:57 +08:00
}
cmd . SetUsageTemplate ( cmd . UsageTemplate ( ) + envUsage )
2024-03-08 05:57:07 +08:00
}
2023-07-04 03:22:44 +08:00
func NewCLI ( ) * cobra . Command {
log . SetFlags ( log . LstdFlags | log . Lshortfile )
2023-11-23 01:41:02 +08:00
cobra . EnableCommandSorting = false
2023-07-04 03:22:44 +08:00
2024-10-26 04:43:16 +08:00
if runtime . GOOS == "windows" && term . IsTerminal ( int ( os . Stdout . Fd ( ) ) ) {
2024-03-12 06:21:57 +08:00
console . ConsoleFromFile ( os . Stdin ) //nolint:errcheck
2023-11-24 14:21:32 +08:00
}
2023-07-04 03:22:44 +08:00
rootCmd := & cobra . Command {
2023-08-15 02:15:53 +08:00
Use : "ollama" ,
Short : "Large language model runner" ,
SilenceUsage : true ,
SilenceErrors : true ,
2023-07-04 03:22:44 +08:00
CompletionOptions : cobra . CompletionOptions {
DisableDefaultCmd : true ,
} ,
2023-11-23 01:41:02 +08:00
Run : func ( cmd * cobra . Command , args [ ] string ) {
if version , _ := cmd . Flags ( ) . GetBool ( "version" ) ; version {
versionHandler ( cmd , args )
return
}
cmd . Print ( cmd . UsageString ( ) )
} ,
2023-07-04 03:22:44 +08:00
}
2023-11-23 01:41:02 +08:00
rootCmd . Flags ( ) . BoolP ( "version" , "v" , false , "Show version information" )
2023-07-04 03:22:44 +08:00
2023-07-17 08:02:22 +08:00
createCmd := & cobra . Command {
2023-08-01 04:25:57 +08:00
Use : "create MODEL" ,
2025-07-17 13:16:10 +08:00
Short : "Create a model" ,
2023-10-19 02:57:22 +08:00
Args : cobra . ExactArgs ( 1 ) ,
2023-08-01 04:25:57 +08:00
PreRunE : checkServerHeartbeat ,
RunE : CreateHandler ,
2023-07-17 08:02:22 +08:00
}
2025-07-17 13:16:10 +08:00
createCmd . Flags ( ) . StringP ( "file" , "f" , "" , "Name of the Modelfile (default \"Modelfile\")" )
2025-05-16 07:33:23 +08:00
createCmd . Flags ( ) . StringP ( "quantize" , "q" , "" , "Quantize model to this level (e.g. q4_K_M)" )
2023-07-17 08:02:22 +08:00
2023-09-07 02:04:17 +08:00
showCmd := & cobra . Command {
Use : "show MODEL" ,
Short : "Show information for a model" ,
2023-10-19 02:57:22 +08:00
Args : cobra . ExactArgs ( 1 ) ,
2023-09-07 02:04:17 +08:00
PreRunE : checkServerHeartbeat ,
RunE : ShowHandler ,
}
showCmd . Flags ( ) . Bool ( "license" , false , "Show license of a model" )
showCmd . Flags ( ) . Bool ( "modelfile" , false , "Show Modelfile of a model" )
showCmd . Flags ( ) . Bool ( "parameters" , false , "Show parameters of a model" )
showCmd . Flags ( ) . Bool ( "template" , false , "Show template of a model" )
2023-12-13 03:43:19 +08:00
showCmd . Flags ( ) . Bool ( "system" , false , "Show system message of a model" )
2025-03-14 05:24:27 +08:00
showCmd . Flags ( ) . BoolP ( "verbose" , "v" , false , "Show detailed model information" )
2023-09-07 02:04:17 +08:00
2023-07-04 03:22:44 +08:00
runCmd := & cobra . Command {
2024-04-16 07:58:00 +08:00
Use : "run MODEL [PROMPT]" ,
Short : "Run a model" ,
Args : cobra . MinimumNArgs ( 1 ) ,
PreRunE : checkServerHeartbeat ,
RunE : RunHandler ,
2023-07-04 03:22:44 +08:00
}
2024-05-14 08:17:36 +08:00
runCmd . Flags ( ) . String ( "keepalive" , "" , "Duration to keep a model loaded (e.g. 5m)" )
2023-07-13 09:18:06 +08:00
runCmd . Flags ( ) . Bool ( "verbose" , false , "Show timings for response" )
2023-08-22 12:56:56 +08:00
runCmd . Flags ( ) . Bool ( "insecure" , false , "Use an insecure registry" )
2023-09-23 04:36:08 +08:00
runCmd . Flags ( ) . Bool ( "nowordwrap" , false , "Don't wrap words to the next line automatically" )
2023-11-14 10:54:02 +08:00
runCmd . Flags ( ) . String ( "format" , "" , "Response format (e.g. json)" )
2025-08-06 03:21:16 +08:00
runCmd . Flags ( ) . String ( "think" , "" , "Enable thinking mode: true/false or high/medium/low for supported models" )
runCmd . Flags ( ) . Lookup ( "think" ) . NoOptDefVal = "true"
2025-05-29 10:38:52 +08:00
runCmd . Flags ( ) . Bool ( "hidethinking" , false , "Hide thinking output (if provided)" )
2024-09-12 07:36:21 +08:00
stopCmd := & cobra . Command {
Use : "stop MODEL" ,
Short : "Stop a running model" ,
Args : cobra . ExactArgs ( 1 ) ,
PreRunE : checkServerHeartbeat ,
RunE : StopHandler ,
}
2023-07-04 03:22:44 +08:00
serveCmd := & cobra . Command {
Use : "serve" ,
Aliases : [ ] string { "start" } ,
Short : "Start ollama" ,
2023-10-19 02:57:22 +08:00
Args : cobra . ExactArgs ( 0 ) ,
2023-07-07 04:49:31 +08:00
RunE : RunServer ,
2023-07-04 03:22:44 +08:00
}
2023-07-17 08:02:22 +08:00
pullCmd := & cobra . Command {
2023-08-01 04:25:57 +08:00
Use : "pull MODEL" ,
Short : "Pull a model from a registry" ,
2023-10-19 02:57:22 +08:00
Args : cobra . ExactArgs ( 1 ) ,
2023-08-01 04:25:57 +08:00
PreRunE : checkServerHeartbeat ,
RunE : PullHandler ,
2023-07-17 08:02:22 +08:00
}
2023-07-22 06:42:19 +08:00
pullCmd . Flags ( ) . Bool ( "insecure" , false , "Use an insecure registry" )
2023-07-17 08:02:22 +08:00
pushCmd := & cobra . Command {
2023-08-01 04:25:57 +08:00
Use : "push MODEL" ,
Short : "Push a model to a registry" ,
2023-10-19 02:57:22 +08:00
Args : cobra . ExactArgs ( 1 ) ,
2023-08-01 04:25:57 +08:00
PreRunE : checkServerHeartbeat ,
RunE : PushHandler ,
2023-07-17 08:02:22 +08:00
}
2023-07-22 06:42:19 +08:00
pushCmd . Flags ( ) . Bool ( "insecure" , false , "Use an insecure registry" )
2025-09-18 05:40:53 +08:00
signinCmd := & cobra . Command {
Use : "signin" ,
Short : "Sign in to ollama.com" ,
Args : cobra . ExactArgs ( 0 ) ,
PreRunE : checkServerHeartbeat ,
RunE : SigninHandler ,
}
signoutCmd := & cobra . Command {
Use : "signout" ,
Short : "Sign out from ollama.com" ,
Args : cobra . ExactArgs ( 0 ) ,
PreRunE : checkServerHeartbeat ,
RunE : SignoutHandler ,
}
2023-07-19 00:09:45 +08:00
listCmd := & cobra . Command {
2023-07-22 06:42:19 +08:00
Use : "list" ,
2023-07-21 06:28:27 +08:00
Aliases : [ ] string { "ls" } ,
2023-07-22 06:42:19 +08:00
Short : "List models" ,
2023-08-01 04:25:57 +08:00
PreRunE : checkServerHeartbeat ,
2023-07-22 06:42:19 +08:00
RunE : ListHandler ,
2023-07-21 07:09:23 +08:00
}
2024-05-14 08:17:36 +08:00
psCmd := & cobra . Command {
Use : "ps" ,
Short : "List running models" ,
PreRunE : checkServerHeartbeat ,
RunE : ListRunningHandler ,
}
2023-07-24 23:27:28 +08:00
copyCmd := & cobra . Command {
2024-05-02 03:39:05 +08:00
Use : "cp SOURCE DESTINATION" ,
2023-08-01 04:25:57 +08:00
Short : "Copy a model" ,
2023-10-19 02:57:22 +08:00
Args : cobra . ExactArgs ( 2 ) ,
2023-08-01 04:25:57 +08:00
PreRunE : checkServerHeartbeat ,
RunE : CopyHandler ,
2023-07-24 23:27:28 +08:00
}
2023-07-21 07:09:23 +08:00
deleteCmd := & cobra . Command {
2023-10-19 02:57:22 +08:00
Use : "rm MODEL [MODEL...]" ,
2023-08-01 04:25:57 +08:00
Short : "Remove a model" ,
Args : cobra . MinimumNArgs ( 1 ) ,
PreRunE : checkServerHeartbeat ,
RunE : DeleteHandler ,
2023-07-19 00:09:45 +08:00
}
build: Make target improvements (#7499)
* llama: wire up builtin runner
This adds a new entrypoint into the ollama CLI to run the cgo built runner.
On Mac arm64, this will have GPU support, but on all other platforms it will
be the lowest common denominator CPU build. After we fully transition
to the new Go runners more tech-debt can be removed and we can stop building
the "default" runner via make and rely on the builtin always.
* build: Make target improvements
Add a few new targets and help for building locally.
This also adjusts the runner lookup to favor local builds, then
runners relative to the executable, and finally payloads.
* Support customized CPU flags for runners
This implements a simplified custom CPU flags pattern for the runners.
When built without overrides, the runner name contains the vector flag
we check for (AVX) to ensure we don't try to run on unsupported systems
and crash. If the user builds a customized set, we omit the naming
scheme and don't check for compatibility. This avoids checking
requirements at runtime, so that logic has been removed as well. This
can be used to build GPU runners with no vector flags, or CPU/GPU
runners with additional flags (e.g. AVX512) enabled.
* Use relative paths
If the user checks out the repo in a path that contains spaces, make gets
really confused so use relative paths for everything in-repo to avoid breakage.
* Remove payloads from main binary
* install: clean up prior libraries
This removes support for v0.3.6 and older versions (before the tar bundle)
and ensures we clean up prior libraries before extracting the bundle(s).
Without this change, runners and dependent libraries could leak when we
update and lead to subtle runtime errors.
2024-12-11 01:47:19 +08:00
runnerCmd := & cobra . Command {
Use : "runner" ,
Hidden : true ,
RunE : func ( cmd * cobra . Command , args [ ] string ) error {
return runner . Execute ( os . Args [ 1 : ] )
} ,
FParseErrWhitelist : cobra . FParseErrWhitelist { UnknownFlags : true } ,
}
runnerCmd . SetHelpFunc ( func ( cmd * cobra . Command , args [ ] string ) {
_ = runner . Execute ( args [ 1 : ] )
} )
2024-05-25 05:57:15 +08:00
envVars := envconfig . AsMap ( )
envs := [ ] envconfig . EnvVar { envVars [ "OLLAMA_HOST" ] }
2024-05-19 02:51:57 +08:00
2024-03-08 05:57:07 +08:00
for _ , cmd := range [ ] * cobra . Command {
createCmd ,
showCmd ,
runCmd ,
2024-09-12 07:36:21 +08:00
stopCmd ,
2024-03-08 05:57:07 +08:00
pullCmd ,
pushCmd ,
listCmd ,
2024-05-14 08:17:36 +08:00
psCmd ,
2024-03-08 05:57:07 +08:00
copyCmd ,
deleteCmd ,
2024-05-25 05:57:15 +08:00
serveCmd ,
2024-03-08 05:57:07 +08:00
} {
2024-05-19 02:51:57 +08:00
switch cmd {
case runCmd :
2024-05-25 05:57:15 +08:00
appendEnvDocs ( cmd , [ ] envconfig . EnvVar { envVars [ "OLLAMA_HOST" ] , envVars [ "OLLAMA_NOHISTORY" ] } )
case serveCmd :
appendEnvDocs ( cmd , [ ] envconfig . EnvVar {
envVars [ "OLLAMA_DEBUG" ] ,
envVars [ "OLLAMA_HOST" ] ,
2025-08-16 05:59:52 +08:00
envVars [ "OLLAMA_CONTEXT_LENGTH" ] ,
2024-05-25 05:57:15 +08:00
envVars [ "OLLAMA_KEEP_ALIVE" ] ,
envVars [ "OLLAMA_MAX_LOADED_MODELS" ] ,
envVars [ "OLLAMA_MAX_QUEUE" ] ,
envVars [ "OLLAMA_MODELS" ] ,
envVars [ "OLLAMA_NUM_PARALLEL" ] ,
envVars [ "OLLAMA_NOPRUNE" ] ,
envVars [ "OLLAMA_ORIGINS" ] ,
2024-07-24 06:14:28 +08:00
envVars [ "OLLAMA_SCHED_SPREAD" ] ,
2024-05-31 00:36:51 +08:00
envVars [ "OLLAMA_FLASH_ATTENTION" ] ,
2024-12-04 07:57:19 +08:00
envVars [ "OLLAMA_KV_CACHE_TYPE" ] ,
2024-05-31 00:36:51 +08:00
envVars [ "OLLAMA_LLM_LIBRARY" ] ,
2024-09-06 04:46:35 +08:00
envVars [ "OLLAMA_GPU_OVERHEAD" ] ,
2024-09-06 05:00:08 +08:00
envVars [ "OLLAMA_LOAD_TIMEOUT" ] ,
2024-05-25 05:57:15 +08:00
} )
2024-05-19 02:51:57 +08:00
default :
appendEnvDocs ( cmd , envs )
}
2024-03-08 05:57:07 +08:00
}
2023-07-04 03:22:44 +08:00
rootCmd . AddCommand (
serveCmd ,
2023-07-17 08:02:22 +08:00
createCmd ,
2023-09-07 02:04:17 +08:00
showCmd ,
2023-07-04 05:14:20 +08:00
runCmd ,
2024-09-12 07:36:21 +08:00
stopCmd ,
2023-07-17 08:02:22 +08:00
pullCmd ,
pushCmd ,
2025-09-18 05:40:53 +08:00
signinCmd ,
signoutCmd ,
2023-07-19 00:09:45 +08:00
listCmd ,
2024-05-14 08:17:36 +08:00
psCmd ,
2023-07-24 23:27:28 +08:00
copyCmd ,
2023-07-21 07:09:23 +08:00
deleteCmd ,
build: Make target improvements (#7499)
* llama: wire up builtin runner
This adds a new entrypoint into the ollama CLI to run the cgo built runner.
On Mac arm64, this will have GPU support, but on all other platforms it will
be the lowest common denominator CPU build. After we fully transition
to the new Go runners more tech-debt can be removed and we can stop building
the "default" runner via make and rely on the builtin always.
* build: Make target improvements
Add a few new targets and help for building locally.
This also adjusts the runner lookup to favor local builds, then
runners relative to the executable, and finally payloads.
* Support customized CPU flags for runners
This implements a simplified custom CPU flags pattern for the runners.
When built without overrides, the runner name contains the vector flag
we check for (AVX) to ensure we don't try to run on unsupported systems
and crash. If the user builds a customized set, we omit the naming
scheme and don't check for compatibility. This avoids checking
requirements at runtime, so that logic has been removed as well. This
can be used to build GPU runners with no vector flags, or CPU/GPU
runners with additional flags (e.g. AVX512) enabled.
* Use relative paths
If the user checks out the repo in a path that contains spaces, make gets
really confused so use relative paths for everything in-repo to avoid breakage.
* Remove payloads from main binary
* install: clean up prior libraries
This removes support for v0.3.6 and older versions (before the tar bundle)
and ensures we clean up prior libraries before extracting the bundle(s).
Without this change, runners and dependent libraries could leak when we
update and lead to subtle runtime errors.
2024-12-11 01:47:19 +08:00
runnerCmd ,
2023-07-04 03:22:44 +08:00
)
return rootCmd
}
2025-05-29 10:38:52 +08:00
// If the user has explicitly set thinking options, either through the CLI or
// through the `/set think` or `set nothink` interactive options, then we
// respect them. Otherwise, we check model capabilities to see if the model
// supports thinking. If the model does support thinking, we enable it.
// Otherwise, we unset the thinking option (which is different than setting it
// to false).
//
// If capabilities are not provided, we fetch them from the server.
2025-08-06 03:21:16 +08:00
func inferThinkingOption ( caps * [ ] model . Capability , runOpts * runOptions , explicitlySetByUser bool ) ( * api . ThinkValue , error ) {
2025-05-29 10:38:52 +08:00
if explicitlySetByUser {
return runOpts . Think , nil
}
if caps == nil {
client , err := api . ClientFromEnvironment ( )
if err != nil {
return nil , err
}
ret , err := client . Show ( context . Background ( ) , & api . ShowRequest {
Model : runOpts . Model ,
} )
if err != nil {
return nil , err
}
caps = & ret . Capabilities
}
thinkingSupported := false
for _ , cap := range * caps {
if cap == model . CapabilityThinking {
thinkingSupported = true
}
}
if thinkingSupported {
2025-08-06 03:21:16 +08:00
return & api . ThinkValue { Value : true } , nil
2025-05-29 10:38:52 +08:00
}
return nil , nil
}
2025-08-06 03:21:16 +08:00
func renderToolCalls ( toolCalls [ ] api . ToolCall , plainText bool ) string {
out := ""
formatExplanation := ""
formatValues := ""
if ! plainText {
formatExplanation = readline . ColorGrey + readline . ColorBold
formatValues = readline . ColorDefault
out += formatExplanation
}
for i , toolCall := range toolCalls {
argsAsJSON , err := json . Marshal ( toolCall . Function . Arguments )
if err != nil {
return ""
}
if i > 0 {
out += "\n"
}
// all tool calls are unexpected since we don't currently support registering any in the CLI
out += fmt . Sprintf ( " Model called a non-existent function '%s()' with arguments: %s" , formatValues + toolCall . Function . Name + formatExplanation , formatValues + string ( argsAsJSON ) + formatExplanation )
}
if ! plainText {
out += readline . ColorDefault
}
return out
}