logutil: add Trace and TraceContext helpers (#12110)

This commit is contained in:
Michael Yang 2025-09-02 13:09:12 -07:00 committed by GitHub
parent 8149a3c86e
commit fb92b61754
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 22 additions and 18 deletions

View File

@ -1,7 +1,6 @@
package harmony package harmony
import ( import (
"context"
"fmt" "fmt"
"log/slog" "log/slog"
"strings" "strings"
@ -292,7 +291,7 @@ func (h *HarmonyMessageHandler) AddContent(content string, toolParser *HarmonyTo
for _, event := range events { for _, event := range events {
switch event := event.(type) { switch event := event.(type) {
case HarmonyEventHeaderComplete: case HarmonyEventHeaderComplete:
slog.Log(context.TODO(), logutil.LevelTrace, "harmony event header complete", "header", event.Header) logutil.Trace("harmony event header complete", "header", event.Header)
switch event.Header.Channel { switch event.Header.Channel {
case "analysis": case "analysis":
if event.Header.Recipient != "" { if event.Header.Recipient != "" {
@ -315,7 +314,7 @@ func (h *HarmonyMessageHandler) AddContent(content string, toolParser *HarmonyTo
h.state = harmonyMessageState_Normal h.state = harmonyMessageState_Normal
} }
case HarmonyEventContentEmitted: case HarmonyEventContentEmitted:
slog.Log(context.TODO(), logutil.LevelTrace, "harmony event content", "content", event.Content, "state", h.state) logutil.Trace("harmony event content", "content", event.Content, "state", h.state)
if h.state == harmonyMessageState_Normal { if h.state == harmonyMessageState_Normal {
contentSb.WriteString(event.Content) contentSb.WriteString(event.Content)
} else if h.state == harmonyMessageState_Thinking { } else if h.state == harmonyMessageState_Thinking {

View File

@ -865,7 +865,7 @@ func (s *ollamaServer) createLayout(systemInfo discover.SystemInfo, systemGPUs d
} }
layers[i] += memory.CPU.Weights[i].Size layers[i] += memory.CPU.Weights[i].Size
layers[i] += memory.CPU.Cache[i].Size layers[i] += memory.CPU.Cache[i].Size
slog.Log(context.TODO(), logutil.LevelTrace, "layer to assign", "layer", i, "size", format.HumanBytes2(layers[i])) logutil.Trace("layer to assign", "layer", i, "size", format.HumanBytes2(layers[i]))
} }
gpuLayers := ml.GPULayersList{} gpuLayers := ml.GPULayersList{}

View File

@ -1,6 +1,7 @@
package logutil package logutil
import ( import (
"context"
"io" "io"
"log/slog" "log/slog"
"path/filepath" "path/filepath"
@ -27,3 +28,11 @@ func NewLogger(w io.Writer, level slog.Level) *slog.Logger {
}, },
})) }))
} }
func Trace(msg string, args ...any) {
slog.Log(context.TODO(), LevelTrace, msg, args...)
}
func TraceContext(ctx context.Context, msg string, args ...any) {
slog.Log(ctx, LevelTrace, msg, args...)
}

View File

@ -271,7 +271,7 @@ func New(modelPath string, params ml.BackendParams) (ml.Backend, error) {
tt := C.ggml_new_tensor(ctxs[bt], kind, C.int(len(t.source.Shape)), (*C.int64_t)(unsafe.Pointer(&t.source.Shape[0]))) tt := C.ggml_new_tensor(ctxs[bt], kind, C.int(len(t.source.Shape)), (*C.int64_t)(unsafe.Pointer(&t.source.Shape[0])))
C.ggml_set_name(tt, cname) C.ggml_set_name(tt, cname)
slog.Log(context.TODO(), logutil.LevelTrace, "created tensor", "name", name, "shape", t.source.Shape, "dtype", t.source.Kind, "buffer_type", C.GoString(C.ggml_backend_buft_name(bt))) logutil.Trace("created tensor", "name", name, "shape", t.source.Shape, "dtype", t.source.Kind, "buffer_type", C.GoString(C.ggml_backend_buft_name(bt)))
size := pad(C.ggml_backend_buft_get_alloc_size(bt, tt), C.ggml_backend_buft_get_alignment(bt)) size := pad(C.ggml_backend_buft_get_alloc_size(bt, tt), C.ggml_backend_buft_get_alignment(bt))
if layer == -1 { if layer == -1 {
@ -378,7 +378,7 @@ func New(modelPath string, params ml.BackendParams) (ml.Backend, error) {
} }
for bs := range maps.Values(bbs) { for bs := range maps.Values(bbs) {
slog.Log(context.TODO(), logutil.LevelTrace, "model weights", "buffer", C.GoString(C.ggml_backend_buffer_name(bs)), logutil.Trace("model weights", "buffer", C.GoString(C.ggml_backend_buffer_name(bs)),
"size", format.HumanBytes2(uint64(C.ggml_backend_buffer_get_size(bs)))) "size", format.HumanBytes2(uint64(C.ggml_backend_buffer_get_size(bs))))
} }
@ -811,7 +811,7 @@ func (c *Context) Reserve() {
} }
} }
slog.Log(context.TODO(), logutil.LevelTrace, "compute graph", "backend", C.GoString(C.ggml_backend_name(c.b.schedBackends[i])), logutil.Trace("compute graph", "backend", C.GoString(C.ggml_backend_name(c.b.schedBackends[i])),
"buffer_type", C.GoString(C.ggml_backend_buft_name(c.b.schedBufts[i])), "size", format.HumanBytes2(uint64(bufferStatus.size))) "buffer_type", C.GoString(C.ggml_backend_buft_name(c.b.schedBufts[i])), "size", format.HumanBytes2(uint64(bufferStatus.size)))
} }

View File

@ -2,7 +2,6 @@ package model
import ( import (
"cmp" "cmp"
"context"
"fmt" "fmt"
"iter" "iter"
"log/slog" "log/slog"
@ -202,7 +201,7 @@ func (bpe BytePairEncoding) Encode(s string, addSpecial bool) ([]int32, error) {
} }
} }
slog.Log(context.TODO(), logutil.LevelTrace, "encoded", "string", s, "ids", ids) logutil.Trace("encoded", "string", s, "ids", ids)
if addSpecial && len(ids) > 0 { if addSpecial && len(ids) > 0 {
ids = bpe.vocab.addSpecials(ids) ids = bpe.vocab.addSpecials(ids)
@ -243,6 +242,6 @@ func (bpe BytePairEncoding) Decode(ids []int32) (string, error) {
} }
} }
slog.Log(context.TODO(), logutil.LevelTrace, "decoded", "string", sb.String(), "from", lazyIdsString{ids: ids}) logutil.Trace("decoded", "string", sb.String(), "from", lazyIdsString{ids: ids})
return sb.String(), nil return sb.String(), nil
} }

View File

@ -1,12 +1,10 @@
package model package model
import ( import (
"context"
"errors" "errors"
"fmt" "fmt"
_ "image/jpeg" _ "image/jpeg"
_ "image/png" _ "image/png"
"log/slog"
"os" "os"
"reflect" "reflect"
"strconv" "strconv"
@ -198,7 +196,7 @@ func populateFields(base Base, v reflect.Value, tags ...Tag) reflect.Value {
names := fn(tagsCopy) names := fn(tagsCopy)
for _, name := range names { for _, name := range names {
if tensor := base.Backend().Get(strings.Join(name, ".")); tensor != nil { if tensor := base.Backend().Get(strings.Join(name, ".")); tensor != nil {
slog.Log(context.TODO(), logutil.LevelTrace, "found tensor", "", tensor) logutil.Trace("found tensor", "", tensor)
vv.Set(reflect.ValueOf(tensor)) vv.Set(reflect.ValueOf(tensor))
break break
} }

View File

@ -2,7 +2,6 @@ package model
import ( import (
"container/heap" "container/heap"
"context"
"fmt" "fmt"
"log/slog" "log/slog"
"strconv" "strconv"
@ -25,7 +24,7 @@ func (spm SentencePieceModel) Vocabulary() *Vocabulary {
} }
func NewSentencePieceModel(vocab *Vocabulary) SentencePieceModel { func NewSentencePieceModel(vocab *Vocabulary) SentencePieceModel {
slog.Log(context.TODO(), logutil.LevelTrace, "Tokens", "num tokens", len(vocab.Values), "vals", vocab.Values[:5], "scores", vocab.Scores[:5], "types", vocab.Types[:5]) logutil.Trace("Tokens", "num tokens", len(vocab.Values), "vals", vocab.Values[:5], "scores", vocab.Scores[:5], "types", vocab.Types[:5])
counter := map[int]int{} counter := map[int]int{}
var maxTokenLen int var maxTokenLen int
@ -39,7 +38,7 @@ func NewSentencePieceModel(vocab *Vocabulary) SentencePieceModel {
} }
} }
slog.Log(context.TODO(), logutil.LevelTrace, "Token counts", "normal", counter[TOKEN_TYPE_NORMAL], "unknown", counter[TOKEN_TYPE_UNKNOWN], "control", counter[TOKEN_TYPE_CONTROL], logutil.Trace("Token counts", "normal", counter[TOKEN_TYPE_NORMAL], "unknown", counter[TOKEN_TYPE_UNKNOWN], "control", counter[TOKEN_TYPE_CONTROL],
"user defined", counter[TOKEN_TYPE_USER_DEFINED], "unused", counter[TOKEN_TYPE_UNUSED], "byte", counter[TOKEN_TYPE_BYTE], "user defined", counter[TOKEN_TYPE_USER_DEFINED], "unused", counter[TOKEN_TYPE_UNUSED], "byte", counter[TOKEN_TYPE_BYTE],
"max token len", maxTokenLen) "max token len", maxTokenLen)
@ -182,7 +181,7 @@ func (spm SentencePieceModel) Encode(s string, addSpecial bool) ([]int32, error)
} }
} }
slog.Log(context.TODO(), logutil.LevelTrace, "encoded", "string", s, "ids", ids) logutil.Trace("encoded", "string", s, "ids", ids)
if addSpecial && len(ids) > 0 { if addSpecial && len(ids) > 0 {
ids = spm.vocab.addSpecials(ids) ids = spm.vocab.addSpecials(ids)
@ -246,6 +245,6 @@ func (spm SentencePieceModel) Decode(ids []int32) (string, error) {
} }
} }
slog.Log(context.TODO(), logutil.LevelTrace, "decoded", "ids", ids, "string", sb.String()) logutil.Trace("decoded", "ids", ids, "string", sb.String())
return sb.String(), nil return sb.String(), nil
} }