@@ -15,6 +15,7 @@ import (
1515 "strings"
1616 "sync"
1717
18+ "github.com/docker/model-runner/pkg/inference"
1819 "github.com/docker/model-runner/pkg/internal/dockerhub"
1920 "github.com/docker/model-runner/pkg/logging"
2021)
@@ -100,8 +101,7 @@ func (l *llamaCpp) downloadLatestLlamaCpp(ctx context.Context, log logging.Logge
100101 if err != nil {
101102 return fmt .Errorf ("failed to read bundled llama.cpp version: %w" , err )
102103 } else if strings .TrimSpace (string (data )) == latest {
103- l .status = fmt .Sprintf ("running llama.cpp %s (%s) version: %s" ,
104- desiredTag , latest , getLlamaCppVersion (log , filepath .Join (vendoredServerStoragePath , "com.docker.llama-server" )))
104+ l .setRunningStatus (log , filepath .Join (vendoredServerStoragePath , "com.docker.llama-server" ), desiredTag , latest )
105105 return errLlamaCppUpToDate
106106 }
107107
@@ -112,8 +112,7 @@ func (l *llamaCpp) downloadLatestLlamaCpp(ctx context.Context, log logging.Logge
112112 } else if strings .TrimSpace (string (data )) == latest {
113113 log .Infoln ("current llama.cpp version is already up to date" )
114114 if _ , statErr := os .Stat (llamaCppPath ); statErr == nil {
115- l .status = fmt .Sprintf ("running llama.cpp %s (%s) version: %s" ,
116- desiredTag , latest , getLlamaCppVersion (log , llamaCppPath ))
115+ l .setRunningStatus (log , llamaCppPath , desiredTag , latest )
117116 return nil
118117 }
119118 log .Infoln ("llama.cpp binary must be updated, proceeding to update it" )
@@ -128,7 +127,7 @@ func (l *llamaCpp) downloadLatestLlamaCpp(ctx context.Context, log logging.Logge
128127 }
129128 defer os .RemoveAll (downloadDir )
130129
131- l .status = fmt .Sprintf ("downloading %s (%s) variant of llama.cpp" , desiredTag , latest )
130+ l .status = inference . FormatInstalling ( fmt .Sprintf ("%s llama.cpp %s " , inference . DetailDownloading , desiredTag ) )
132131 if extractErr := extractFromImage (ctx , log , image , runtime .GOOS , runtime .GOARCH , downloadDir ); extractErr != nil {
133132 return fmt .Errorf ("could not extract image: %w" , extractErr )
134133 }
@@ -164,7 +163,7 @@ func (l *llamaCpp) downloadLatestLlamaCpp(ctx context.Context, log logging.Logge
164163 }
165164
166165 log .Infoln ("successfully updated llama.cpp binary" )
167- l .status = fmt . Sprintf ( "running llama.cpp %s (%s) version: %s" , desiredTag , latest , getLlamaCppVersion ( log , llamaCppPath ) )
166+ l .setRunningStatus ( log , llamaCppPath , desiredTag , latest )
168167 log .Infoln (l .status )
169168
170169 if err := os .WriteFile (currentVersionFile , []byte (latest ), 0o644 ); err != nil {
@@ -188,6 +187,15 @@ func extractFromImage(ctx context.Context, log logging.Logger, image, requiredOs
188187 return dockerhub .Extract (imageTar , requiredArch , requiredOs , destination )
189188}
190189
190+ func (l * llamaCpp ) setRunningStatus (log logging.Logger , binaryPath , variant , digest string ) {
191+ version := getLlamaCppVersion (log , binaryPath )
192+ if variant == "" && digest == "" {
193+ l .status = inference .FormatRunning (fmt .Sprintf ("llama.cpp %s" , version ))
194+ } else {
195+ l .status = inference .FormatRunning (fmt .Sprintf ("llama.cpp %s (%s) %s" , variant , digest , version ))
196+ }
197+ }
198+
191199func getLlamaCppVersion (log logging.Logger , llamaCpp string ) string {
192200 output , err := exec .Command (llamaCpp , "--version" ).CombinedOutput ()
193201 if err != nil {
0 commit comments