Skip to content

Commit 5af59d5

Browse files
committed
Add verbocity levels for all log commands
Signed-off-by: Maya Barnea <[email protected]>
1 parent 9a57299 commit 5af59d5

File tree

16 files changed

+103
-66
lines changed

16 files changed

+103
-66
lines changed

cmd/llm-d-inference-sim/main.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import (
2424
"k8s.io/klog/v2"
2525

2626
"github.com/llm-d/llm-d-inference-sim/cmd/signals"
27+
"github.com/llm-d/llm-d-inference-sim/pkg/common/logging"
2728
vllmsim "github.com/llm-d/llm-d-inference-sim/pkg/llm-d-inference-sim"
2829
)
2930

@@ -33,11 +34,11 @@ func main() {
3334
ctx := klog.NewContext(context.Background(), logger)
3435
ctx = signals.SetupSignalHandler(ctx)
3536

36-
logger.Info("Starting vLLM simulator")
37+
logger.V(logging.INFO).Info("Starting vLLM simulator")
3738

3839
vllmSim, err := vllmsim.New(logger)
3940
if err != nil {
40-
logger.Error(err, "Failed to create vLLM simulator")
41+
logger.Error(err, "failed to create vLLM simulator")
4142
return
4243
}
4344
if err := vllmSim.Start(ctx); err != nil {

pkg/common/logging/levels.go

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
/*
2+
Copyright 2025 The llm-d-inference-sim Authors.
3+
4+
Licensed under the Apache License, Version 2.0 (the "License");
5+
you may not use this file except in compliance with the License.
6+
You may obtain a copy of the License at
7+
8+
http://www.apache.org/licenses/LICENSE-2.0
9+
10+
Unless required by applicable law or agreed to in writing, software
11+
distributed under the License is distributed on an "AS IS" BASIS,
12+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
See the License for the specific language governing permissions and
14+
limitations under the License.
15+
*/
16+
17+
package logging
18+
19+
const (
20+
WARN = 1
21+
INFO = 2
22+
DEBUG = 4
23+
TRACE = 5
24+
)

pkg/common/publisher.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ import (
2525
"sync/atomic"
2626
"time"
2727

28+
"github.com/llm-d/llm-d-inference-sim/pkg/common/logging"
2829
zmq "github.com/pebbe/zmq4"
2930
"github.com/vmihailenco/msgpack/v5"
3031
"k8s.io/klog/v2"
@@ -93,7 +94,7 @@ func (p *Publisher) PublishEvent(ctx context.Context, topic string, batch interf
9394
return fmt.Errorf("failed to send message to topic %s: %w", topic, err)
9495
}
9596

96-
logger.Info("Published event batch", "topic", topic, "seq", seq)
97+
logger.V(logging.TRACE).Info("Published event batch", "topic", topic, "seq", seq)
9798
return nil
9899
}
99100

pkg/common/utils.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ import (
2323

2424
"github.com/go-logr/logr"
2525
"github.com/google/uuid"
26+
"github.com/llm-d/llm-d-inference-sim/pkg/common/logging"
2627
)
2728

2829
// Definition of buckets for time-to-first-token and time-per-output-token metrics, each value is an upper boundary of a bucket
@@ -149,6 +150,6 @@ func WriteToChannel[T any](channel chan T, object T, logger logr.Logger, channel
149150
select {
150151
case channel <- object:
151152
default:
152-
logger.V(1).Info("failed to write to", "channel", channelName)
153+
logger.V(logging.WARN).Info("failed to write to", "channel", channelName)
153154
}
154155
}

pkg/dataset/custom_dataset.go

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ import (
3333

3434
"github.com/go-logr/logr"
3535
"github.com/llm-d/llm-d-inference-sim/pkg/common"
36+
"github.com/llm-d/llm-d-inference-sim/pkg/common/logging"
3637
openaiserverapi "github.com/llm-d/llm-d-inference-sim/pkg/openai-server-api"
3738
)
3839

@@ -80,7 +81,7 @@ func (d *CustomDataset) downloadDataset(ctx context.Context, url string, path st
8081
}
8182
}()
8283

83-
d.logger.Info("Using dataset-url", "dataset-url", url)
84+
d.logger.V(logging.INFO).Info("Using dataset-url", "dataset-url", url)
8485
resp, err := http.Get(url)
8586
if err != nil {
8687
return err
@@ -181,9 +182,9 @@ func (pr *progressReader) logProgress(pct int) {
181182
speed := float64(pr.downloaded) / (1024 * 1024 * elapsedTime)
182183
remainingTime := float64(pr.total-pr.downloaded) / (float64(pr.downloaded) / elapsedTime)
183184
if pct != 100 {
184-
pr.logger.Info(fmt.Sprintf("Download progress: %d%%, Speed: %.2f MB/s, Remaining time: %.2fs", pct, speed, remainingTime))
185+
pr.logger.V(logging.INFO).Info("Dataset download progress", "%", pct, "speed (MB/s)", speed, "remaining time (s)", remainingTime)
185186
} else {
186-
pr.logger.Info(fmt.Sprintf("Download completed: 100%%, Average Speed: %.2f MB/s, Total time: %.2fs", speed, elapsedTime))
187+
pr.logger.V(logging.INFO).Info("Download completed", "average speed (MB/s)", speed, "total time (s)", elapsedTime)
187188
}
188189
}
189190

@@ -248,7 +249,7 @@ func (d *CustomDataset) getRecordsCount() (int, error) {
248249
}
249250

250251
func (d *CustomDataset) loadDatabaseInMemory(path string) error {
251-
d.logger.Info("Loading database into memory...")
252+
d.logger.V(logging.INFO).Info("Loading database into memory...")
252253
start := time.Now()
253254

254255
// Create in-memory database
@@ -301,7 +302,7 @@ func (d *CustomDataset) loadDatabaseInMemory(path string) error {
301302
}
302303

303304
loadTime := time.Since(start)
304-
d.logger.Info("Database loaded into memory", "load_time", loadTime.String())
305+
d.logger.V(logging.INFO).Info("Database loaded into memory", "load_time", loadTime.String())
305306
return nil
306307
}
307308

@@ -354,9 +355,9 @@ func (d *CustomDataset) connectToDB(path string, useInMemory bool) error {
354355
}
355356

356357
if useInMemory {
357-
d.logger.Info("In-memory database connected successfully", "path", path, "records count", count)
358+
d.logger.V(logging.INFO).Info("In-memory database connected successfully", "path", path, "records count", count)
358359
} else {
359-
d.logger.Info("Database connected successfully", "path", path, "records count", count)
360+
d.logger.V(logging.INFO).Info("Database connected successfully", "path", path, "records count", count)
360361
}
361362
return nil
362363
}
@@ -368,7 +369,7 @@ func (d *CustomDataset) Init(ctx context.Context, logger logr.Logger, path strin
368369
}
369370
d.hasWarned = false
370371
if url == "" {
371-
d.logger.Info("Using dataset from", "path", path)
372+
d.logger.V(logging.INFO).Info("Using dataset from", "path", path)
372373
return d.connectToDB(path, useInMemory)
373374
}
374375
_, err := os.Stat(path)
@@ -386,7 +387,7 @@ func (d *CustomDataset) Init(ctx context.Context, logger logr.Logger, path strin
386387
return fmt.Errorf("failed to download dataset: %w", err)
387388
}
388389
}
389-
d.logger.Info("Using dataset path", "dataset-path", path)
390+
d.logger.V(logging.INFO).Info("Using dataset path", "dataset-path", path)
390391

391392
return d.connectToDB(path, useInMemory)
392393
}
@@ -448,7 +449,7 @@ func (d *CustomDataset) query(query string, nTokens int, random *common.Random)
448449
rows, err := d.db.Query(query)
449450
if err != nil {
450451
if !d.hasWarned {
451-
d.logger.Error(err, "Failed to query database. Ensure dataset file is still valid. Will generate random tokens instead.")
452+
d.logger.Error(err, "failed to query database. Ensure dataset file is still valid. Will generate random tokens instead.")
452453
d.hasWarned = true
453454
}
454455
return [][]string{GenPresetRandomTokens(random, nTokens)}, nil
@@ -472,7 +473,7 @@ func (d *CustomDataset) GenerateTokens(req openaiserverapi.CompletionRequest, nT
472473
// filter out results according to finish reason
473474
var filteredTokensList [][]string
474475
if finishReason != LengthFinishReason && finishReason != StopFinishReason {
475-
d.logger.Error(errors.New("unknown finish reason"), "Unexpected finish reason", "reason", finishReason)
476+
d.logger.Error(errors.New("unknown finish reason"), "unexpected finish reason", "reason", finishReason)
476477
}
477478
for _, tokens := range tokensList {
478479
if finishReason == StopFinishReason && len(tokens) <= nTokens {

pkg/kv-cache/block_cache.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ func newBlockCache(config *common.Configuration, logger logr.Logger, usageChan c
7373
func (bc *blockCache) start(ctx context.Context) {
7474
err := bc.eventSender.Run(ctx)
7575
if err != nil {
76-
bc.logger.Info("sender stopped with error", "error", err)
76+
bc.logger.Error(err, "Sender stopped with error")
7777
}
7878
}
7979

pkg/kv-cache/kv_cache.go

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import (
2222

2323
"github.com/go-logr/logr"
2424
"github.com/llm-d/llm-d-inference-sim/pkg/common"
25+
"github.com/llm-d/llm-d-inference-sim/pkg/common/logging"
2526
openaiserverapi "github.com/llm-d/llm-d-inference-sim/pkg/openai-server-api"
2627
"github.com/llm-d/llm-d-kv-cache-manager/pkg/kvcache/kvblock"
2728
"github.com/llm-d/llm-d-kv-cache-manager/pkg/tokenization"
@@ -63,7 +64,7 @@ func (h *KVCacheHelper) Run(ctx context.Context) {
6364
}
6465

6566
func (h *KVCacheHelper) OnRequestStart(vllmReq openaiserverapi.CompletionRequest) error {
66-
h.logger.Info("KV cache - process request")
67+
h.logger.V(logging.TRACE).Info("KV cache - process request")
6768

6869
prompt := vllmReq.GetPrompt()
6970
modelName := vllmReq.GetModel()
@@ -72,13 +73,13 @@ func (h *KVCacheHelper) OnRequestStart(vllmReq openaiserverapi.CompletionRequest
7273
// tokenize the input
7374
tokens, _, err := h.tokenizer.Encode(prompt, modelName)
7475
if err != nil {
75-
h.logger.Info("Prompt tokenization failed", "error", err.Error())
76+
h.logger.Error(err, "prompt tokenization failed")
7677
return err
7778
}
7879

7980
// get block keys
8081
blockKeys := h.tokensProcessor.TokensToKVBlockKeys(tokens, modelName)
81-
h.logger.Info("found tokens", "tokens", tokens, "block-keys", blockKeys)
82+
h.logger.V(logging.TRACE).Info("Found tokens", "tokens", tokens, "block-keys", blockKeys)
8283

8384
blockHashes := make([]uint64, len(blockKeys))
8485
for i, key := range blockKeys {

pkg/kv-cache/kv_cache_sender.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import (
2222

2323
"github.com/go-logr/logr"
2424
"github.com/llm-d/llm-d-inference-sim/pkg/common"
25+
"github.com/llm-d/llm-d-inference-sim/pkg/common/logging"
2526
"github.com/llm-d/llm-d-kv-cache-manager/pkg/kvcache/kvevents"
2627
"github.com/vmihailenco/msgpack/v5"
2728
)
@@ -70,15 +71,15 @@ func (s *KVEventSender) Run(ctx context.Context) error {
7071
case <-ctx.Done():
7172
// Exiting, discard remaining events if any
7273
if len(s.batch) > 0 {
73-
s.logger.Info("Existing, discard remaining events", "num of events", len(s.batch))
74+
s.logger.V(logging.INFO).Info("Exiting, discard remaining events", "num of events", len(s.batch))
7475
}
7576
return ctx.Err()
7677

7778
case eventData, ok := <-s.eventChan:
7879
if !ok {
7980
// Channel closed, discard remaining events and exit
8081
if len(s.batch) > 0 {
81-
s.logger.Info("Channel closed, discard remaining events", "num of events", len(s.batch))
82+
s.logger.V(logging.INFO).Info("Channel closed, discard remaining events", "num of events", len(s.batch))
8283
}
8384
return nil
8485
}

pkg/llm-d-inference-sim/helpers.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import (
2222
"fmt"
2323

2424
"github.com/llm-d/llm-d-inference-sim/pkg/common"
25+
"github.com/llm-d/llm-d-inference-sim/pkg/common/logging"
2526
openaiserverapi "github.com/llm-d/llm-d-inference-sim/pkg/openai-server-api"
2627
)
2728

@@ -92,7 +93,7 @@ func (s *VllmSimulator) showConfig(dp bool) error {
9293
if err != nil {
9394
return fmt.Errorf("failed to marshal configuration to JSON: %w", err)
9495
}
95-
s.logger.Info("Configuration:", "", string(cfgJSON))
96+
s.logger.V(logging.INFO).Info("Configuration:", "", string(cfgJSON))
9697
return nil
9798
}
9899

pkg/llm-d-inference-sim/lora.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import (
2121
"encoding/json"
2222

2323
"github.com/llm-d/llm-d-inference-sim/pkg/common"
24+
"github.com/llm-d/llm-d-inference-sim/pkg/common/logging"
2425
"github.com/valyala/fasthttp"
2526
)
2627

@@ -40,7 +41,7 @@ func (s *VllmSimulator) getLoras() []string {
4041
if lora, ok := key.(string); ok {
4142
loras = append(loras, lora)
4243
} else {
43-
s.logger.Info("Stored LoRA is not a string", "value", key)
44+
s.logger.V(logging.WARN).Info("Stored LoRA is not a string", "value", key)
4445
}
4546
return true
4647
})

0 commit comments

Comments
 (0)