Skip to content

Commit f25a5e9

Browse files
committed
feat(llm): Update tracing of llm host components
Signed-off-by: Caleb Schoepp <[email protected]>
1 parent 5b6deb9 commit f25a5e9

File tree

4 files changed

+10
-4
lines changed

4 files changed

+10
-4
lines changed

crates/llm-local/Cargo.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,3 +31,6 @@ tracing = { workspace = true }
3131
default = []
3232
metal = ["llm/metal"]
3333
cublas = ["llm/cublas"]
34+
35+
[lints]
36+
workspace = true

crates/llm-local/src/lib.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ pub struct LocalLlmEngine {
3333

3434
#[async_trait]
3535
impl LlmEngine for LocalLlmEngine {
36-
#[instrument(name = "generate_inference_local_llm", skip(self, prompt), err(level = Level::INFO))]
36+
#[instrument(name = "spin_llm_local.infer", skip(self, prompt), err(level = Level::INFO))]
3737
async fn infer(
3838
&mut self,
3939
model: wasi_llm::InferencingModel,
@@ -93,7 +93,7 @@ impl LlmEngine for LocalLlmEngine {
9393
Ok(response)
9494
}
9595

96-
#[instrument(name = "generate_embeddings_local_llm", skip(self, data), err(level = Level::INFO))]
96+
#[instrument(name = "spin_llm_local.generate_embeddings", skip(self, data), err(level = Level::INFO))]
9797
async fn generate_embeddings(
9898
&mut self,
9999
model: wasi_llm::EmbeddingModel,

crates/llm-remote-http/Cargo.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,3 +16,6 @@ spin-telemetry = { path = "../telemetry" }
1616
spin-world = { path = "../world" }
1717
reqwest = { version = "0.11", features = ["gzip", "json"] }
1818
tracing = { workspace = true }
19+
20+
[lints]
21+
workspace = true

crates/llm-remote-http/src/lib.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ struct EmbeddingResponseBody {
5555

5656
#[async_trait]
5757
impl LlmEngine for RemoteHttpLlmEngine {
58-
#[instrument(name = "generate_inference_remote_llm", skip(self, prompt), err(level = Level::INFO), fields(otel.kind = "client"))]
58+
#[instrument(name = "spin_llm_remote_http.infer", skip(self, prompt), err(level = Level::INFO), fields(otel.kind = "client"))]
5959
async fn infer(
6060
&mut self,
6161
model: wasi_llm::InferencingModel,
@@ -118,7 +118,7 @@ impl LlmEngine for RemoteHttpLlmEngine {
118118
}
119119
}
120120

121-
#[instrument(name = "generate_embeddings_remote_llm", skip(self, data), err(level = Level::INFO), fields(otel.kind = "client"))]
121+
#[instrument(name = "spin_llm_remote_http.generate_embeddings", skip(self, data), err(level = Level::INFO), fields(otel.kind = "client"))]
122122
async fn generate_embeddings(
123123
&mut self,
124124
model: wasi_llm::EmbeddingModel,

0 commit comments

Comments
 (0)