Skip to content

Commit 8dda467

Browse files
authored
fix: merge sdk.Initialize and sdk.NewClient + avoid panic on error (#7)
1 parent 2ee3974 commit 8dda467

File tree

13 files changed

+280
-167
lines changed

13 files changed

+280
-167
lines changed

README.md

Lines changed: 59 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ For a complete guide, go to our [docs](https://traceloop.com/docs/openllmetry/ge
5353
Install the SDK:
5454

5555
```bash
56-
go get traceloop-sdk
56+
go get github.com/traceloop/go-openllmetry/traceloop-sdk
5757
```
5858

5959
Then, initialize the SDK in your code:
@@ -65,19 +65,15 @@ import (
6565
"context"
6666

6767
sdk "github.com/traceloop/go-openllmetry/traceloop-sdk"
68-
"github.com/traceloop/go-openllmetry/traceloop-sdk/config"
6968
)
7069

7170
func main() {
7271
ctx := context.Background()
7372

74-
traceloop := sdk.NewClient(config.Config{
75-
BaseURL: "api.traceloop.com",
73+
traceloop := sdk.NewClient(ctx, sdk.Config{
7674
APIKey: os.Getenv("TRACELOOP_API_KEY"),
7775
})
7876
defer func() { traceloop.Shutdown(ctx) }()
79-
80-
traceloop.Initialize(ctx)
8177
}
8278
```
8379

@@ -114,22 +110,17 @@ import (
114110

115111
"github.com/sashabaranov/go-openai"
116112
sdk "github.com/traceloop/go-openllmetry/traceloop-sdk"
117-
"github.com/traceloop/go-openllmetry/traceloop-sdk/config"
118-
"github.com/traceloop/go-openllmetry/traceloop-sdk/dto"
119113
)
120114

121115
func main() {
122116
ctx := context.Background()
123117

124118
// Initialize Traceloop
125-
traceloop := sdk.NewClient(config.Config{
126-
BaseURL: "api.traceloop.com",
119+
traceloop := sdk.NewClient(ctx, config.Config{
127120
APIKey: os.Getenv("TRACELOOP_API_KEY"),
128121
})
129122
defer func() { traceloop.Shutdown(ctx) }()
130123

131-
traceloop.Initialize(ctx)
132-
133124
// Call OpenAI like you normally would
134125
resp, err := client.CreateChatCompletion(
135126
context.Background(),
@@ -144,40 +135,62 @@ func main() {
144135
},
145136
)
146137

147-
// Log the request and the response
148-
log := dto.PromptLogAttributes{
149-
Prompt: dto.Prompt{
150-
Vendor: "openai",
151-
Mode: "chat",
152-
Model: request.Model,
153-
},
154-
Completion: dto.Completion{
155-
Model: resp.Model,
156-
},
157-
Usage: dto.Usage{
158-
TotalTokens: resp.Usage.TotalTokens,
159-
CompletionTokens: resp.Usage.CompletionTokens,
160-
PromptTokens: resp.Usage.PromptTokens,
161-
},
162-
}
163-
164-
for i, message := range request.Messages {
165-
log.Prompt.Messages = append(log.Prompt.Messages, dto.Message{
166-
Index: i,
167-
Content: message.Content,
168-
Role: message.Role,
169-
})
170-
}
171-
172-
for _, choice := range resp.Choices {
173-
log.Completion.Messages = append(log.Completion.Messages, dto.Message{
174-
Index: choice.Index,
175-
Content: choice.Message.Content,
176-
Role: choice.Message.Role,
177-
})
178-
}
179-
180-
traceloop.LogPrompt(ctx, log)
138+
var promptMsgs []sdk.Message
139+
for i, message := range request.Messages {
140+
promptMsgs = append(promptMsgs, sdk.Message{
141+
Index: i,
142+
Content: message.Content,
143+
Role: message.Role,
144+
})
145+
}
146+
147+
// Log the request
148+
llmSpan, err := traceloop.LogPrompt(
149+
ctx,
150+
sdk.Prompt{
151+
Vendor: "openai",
152+
Mode: "chat",
153+
Model: request.Model,
154+
Messages: promptMsgs,
155+
},
156+
sdk.TraceloopAttributes{
157+
WorkflowName: "example-workflow",
158+
EntityName: "example-entity",
159+
},
160+
)
161+
if err != nil {
162+
fmt.Printf("LogPrompt error: %v\n", err)
163+
return
164+
}
165+
166+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
167+
resp, err := client.CreateChatCompletion(
168+
context.Background(),
169+
*request,
170+
)
171+
if err != nil {
172+
fmt.Printf("ChatCompletion error: %v\n", err)
173+
return
174+
}
175+
176+
var completionMsgs []sdk.Message
177+
for _, choice := range resp.Choices {
178+
completionMsgs = append(completionMsgs, sdk.Message{
179+
Index: choice.Index,
180+
Content: choice.Message.Content,
181+
Role: choice.Message.Role,
182+
})
183+
}
184+
185+
// Log the response
186+
llmSpan.LogCompletion(ctx, sdk.Completion{
187+
Model: resp.Model,
188+
Messages: completionMsgs,
189+
}, sdk.Usage{
190+
TotalTokens: resp.Usage.TotalTokens,
191+
CompletionTokens: resp.Usage.CompletionTokens,
192+
PromptTokens: resp.Usage.PromptTokens,
193+
})
181194
}
182195
```
183196

go.work.sum

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+
2121
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
2222
github.com/klauspost/compress v1.15.15/go.mod h1:ZcK2JAFqKOpnBlxcLsJzYfrS9X1akm9fHZNnD9+Vo/4=
2323
github.com/onsi/gomega v1.27.10/go.mod h1:RsS8tutOdbdgzbPtzzATp12yT7kM5I5aElG3evPbQ0M=
24+
github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4=
2425
github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI=
2526
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
2627
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=

sample-app/main.go

Lines changed: 46 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -7,74 +7,84 @@ import (
77
"time"
88

99
"github.com/sashabaranov/go-openai"
10-
sdk "github.com/traceloop/go-openllmetry/traceloop-sdk"
11-
"github.com/traceloop/go-openllmetry/traceloop-sdk/config"
12-
"github.com/traceloop/go-openllmetry/traceloop-sdk/dto"
10+
tlp "github.com/traceloop/go-openllmetry/traceloop-sdk"
1311
)
1412

1513
func main() {
1614
ctx := context.Background()
1715

18-
traceloop := sdk.NewClient(config.Config{
16+
traceloop, err := tlp.NewClient(ctx, tlp.Config{
1917
BaseURL: "api-staging.traceloop.com",
2018
APIKey: os.Getenv("TRACELOOP_API_KEY"),
2119
})
2220
defer func() { traceloop.Shutdown(ctx) }()
2321

24-
traceloop.Initialize(ctx)
25-
26-
27-
request, err := traceloop.GetOpenAIChatCompletionRequest("example-prompt", map[string]interface{}{ "date": time.Now().Format("01/02") })
2822
if err != nil {
29-
fmt.Printf("GetOpenAIChatCompletionRequest error: %v\n", err)
23+
fmt.Printf("NewClient error: %v\n", err)
3024
return
3125
}
32-
33-
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
34-
resp, err := client.CreateChatCompletion(
35-
context.Background(),
36-
*request,
37-
)
3826

27+
request, err := traceloop.GetOpenAIChatCompletionRequest("example-prompt", map[string]interface{}{ "date": time.Now().Format("01/02") })
3928
if err != nil {
40-
fmt.Printf("ChatCompletion error: %v\n", err)
29+
fmt.Printf("GetOpenAIChatCompletionRequest error: %v\n", err)
4130
return
4231
}
4332

44-
fmt.Println(resp.Choices[0].Message.Content)
45-
33+
var promptMsgs []tlp.Message
34+
for i, message := range request.Messages {
35+
promptMsgs = append(promptMsgs, tlp.Message{
36+
Index: i,
37+
Content: message.Content,
38+
Role: message.Role,
39+
})
40+
}
4641

47-
log := dto.PromptLogAttributes{
48-
Prompt: dto.Prompt{
42+
llmSpan, err := traceloop.LogPrompt(
43+
ctx,
44+
tlp.Prompt{
4945
Vendor: "openai",
5046
Mode: "chat",
5147
Model: request.Model,
48+
Messages: promptMsgs,
5249
},
53-
Completion: dto.Completion{
54-
Model: resp.Model,
55-
},
56-
Usage: dto.Usage{
57-
TotalTokens: resp.Usage.TotalTokens,
58-
CompletionTokens: resp.Usage.CompletionTokens,
59-
PromptTokens: resp.Usage.PromptTokens,
50+
tlp.TraceloopAttributes{
51+
WorkflowName: "example-workflow",
52+
EntityName: "example-entity",
6053
},
61-
}
54+
)
55+
if err != nil {
56+
fmt.Printf("LogPrompt error: %v\n", err)
57+
return
58+
}
6259

63-
for i, message := range request.Messages {
64-
log.Prompt.Messages = append(log.Prompt.Messages, dto.Message{
65-
Index: i,
66-
Content: message.Content,
67-
Role: message.Role,
68-
})
60+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
61+
resp, err := client.CreateChatCompletion(
62+
context.Background(),
63+
*request,
64+
)
65+
if err != nil {
66+
fmt.Printf("ChatCompletion error: %v\n", err)
67+
return
6968
}
7069

70+
var completionMsgs []tlp.Message
7171
for _, choice := range resp.Choices {
72-
log.Completion.Messages = append(log.Completion.Messages, dto.Message{
72+
completionMsgs = append(completionMsgs, tlp.Message{
7373
Index: choice.Index,
7474
Content: choice.Message.Content,
7575
Role: choice.Message.Role,
7676
})
7777
}
7878

79-
traceloop.LogPrompt(ctx, log)
79+
llmSpan.LogCompletion(ctx, tlp.Completion{
80+
Model: resp.Model,
81+
Messages: completionMsgs,
82+
}, tlp.Usage{
83+
TotalTokens: resp.Usage.TotalTokens,
84+
CompletionTokens: resp.Usage.CompletionTokens,
85+
PromptTokens: resp.Usage.PromptTokens,
86+
})
87+
88+
89+
fmt.Println(resp.Choices[0].Message.Content)
8090
}

traceloop-sdk/config/config.go renamed to traceloop-sdk/config.go

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
package config
1+
package traceloop
22

33
import "time"
44

@@ -9,6 +9,8 @@ type BackoffConfig struct {
99
type Config struct {
1010
BaseURL string
1111
APIKey string
12+
TracerName string
13+
ServiceName string
1214
PollingInterval time.Duration
1315
BackoffConfig BackoffConfig
1416
}

traceloop-sdk/dto/prompts_registry.go

Lines changed: 0 additions & 8 deletions
This file was deleted.

traceloop-sdk/go.mod

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ require (
3838
github.com/kluctl/go-embed-python v0.0.0-3.11.6-20231002-1 // indirect
3939
github.com/rogpeppe/go-internal v1.11.0 // indirect
4040
github.com/sirupsen/logrus v1.9.3 // indirect
41+
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0
4142
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.22.0
4243
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.22.0
4344
go.opentelemetry.io/otel/sdk v1.22.0

traceloop-sdk/go.sum

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,8 @@ go.opentelemetry.io/otel v1.22.0 h1:xS7Ku+7yTFvDfDraDIJVpw7XPyuHlB9MCiqqX5mcJ6Y=
6363
go.opentelemetry.io/otel v1.22.0/go.mod h1:eoV4iAi3Ea8LkAEI9+GFT44O6T/D0GWAVFyZVCC6pMI=
6464
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 h1:9M3+rhx7kZCIQQhQRYaZCdNu1V73tm4TvXs2ntl98C4=
6565
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0/go.mod h1:noq80iT8rrHP1SfybmPiRGc9dc5M8RPmGvtwo7Oo7tc=
66+
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0 h1:H2JFgRcGiyHg7H7bwcwaQJYrNFqCqrbTQ8K4p1OvDu8=
67+
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0/go.mod h1:WfCWp1bGoYK8MeULtI15MmQVczfR+bFkk0DF3h06QmQ=
6668
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.22.0 h1:FyjCyI9jVEfqhUh2MoSkmolPjfh5fp2hnV0b0irxH4Q=
6769
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.22.0/go.mod h1:hYwym2nDEeZfG/motx0p7L7J1N1vyzIThemQsb4g2qY=
6870
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.22.0 h1:zr8ymM5OWWjjiWRzwTfZ67c905+2TMHYp2lMJ52QTyM=

traceloop-sdk/prompt_registry.go

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,14 @@ import (
77

88
"github.com/kluctl/go-jinja2"
99
"github.com/sashabaranov/go-openai"
10-
"github.com/traceloop/go-openllmetry/traceloop-sdk/dto"
1110
"github.com/traceloop/go-openllmetry/traceloop-sdk/model"
1211
)
1312

13+
type PromptsResponse struct {
14+
Prompts []model.Prompt `json:"prompts"`
15+
Environment string `json:"environment"`
16+
}
17+
1418
func (instance *Traceloop) populatePromptRegistry() {
1519
resp, err := instance.fetchPathWithRetry(PromptsPath, instance.config.BackoffConfig.MaxRetries)
1620
if err != nil {
@@ -20,7 +24,7 @@ func (instance *Traceloop) populatePromptRegistry() {
2024
defer resp.Body.Close()
2125
decoder := json.NewDecoder(resp.Body)
2226

23-
var response dto.PromptsResponse
27+
var response PromptsResponse
2428
err = decoder.Decode(&response)
2529
if err != nil {
2630
fmt.Println("Failed to decode response", err)

0 commit comments

Comments
 (0)