Skip to content

Commit

Permalink
fix: merge sdk.Initialize and sdk.NewClient + avoid panic on error (#7)
Browse files Browse the repository at this point in the history
  • Loading branch information
galkleinman authored Jan 28, 2024
1 parent 2ee3974 commit 8dda467
Show file tree
Hide file tree
Showing 13 changed files with 280 additions and 167 deletions.
105 changes: 59 additions & 46 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ For a complete guide, go to our [docs](https://traceloop.com/docs/openllmetry/ge
Install the SDK:

```bash
go get traceloop-sdk
go get github.com/traceloop/go-openllmetry/traceloop-sdk
```

Then, initialize the SDK in your code:
Expand All @@ -65,19 +65,15 @@ import (
"context"

sdk "github.com/traceloop/go-openllmetry/traceloop-sdk"
"github.com/traceloop/go-openllmetry/traceloop-sdk/config"
)

func main() {
ctx := context.Background()

traceloop := sdk.NewClient(config.Config{
BaseURL: "api.traceloop.com",
traceloop := sdk.NewClient(ctx, sdk.Config{
APIKey: os.Getenv("TRACELOOP_API_KEY"),
})
defer func() { traceloop.Shutdown(ctx) }()

traceloop.Initialize(ctx)
}
```

Expand Down Expand Up @@ -114,22 +110,17 @@ import (

"github.com/sashabaranov/go-openai"
sdk "github.com/traceloop/go-openllmetry/traceloop-sdk"
"github.com/traceloop/go-openllmetry/traceloop-sdk/config"
"github.com/traceloop/go-openllmetry/traceloop-sdk/dto"
)

func main() {
ctx := context.Background()

// Initialize Traceloop
traceloop := sdk.NewClient(config.Config{
BaseURL: "api.traceloop.com",
traceloop := sdk.NewClient(ctx, config.Config{
APIKey: os.Getenv("TRACELOOP_API_KEY"),
})
defer func() { traceloop.Shutdown(ctx) }()

traceloop.Initialize(ctx)

// Call OpenAI like you normally would
resp, err := client.CreateChatCompletion(
context.Background(),
Expand All @@ -144,40 +135,62 @@ func main() {
},
)

// Log the request and the response
log := dto.PromptLogAttributes{
Prompt: dto.Prompt{
Vendor: "openai",
Mode: "chat",
Model: request.Model,
},
Completion: dto.Completion{
Model: resp.Model,
},
Usage: dto.Usage{
TotalTokens: resp.Usage.TotalTokens,
CompletionTokens: resp.Usage.CompletionTokens,
PromptTokens: resp.Usage.PromptTokens,
},
}

for i, message := range request.Messages {
log.Prompt.Messages = append(log.Prompt.Messages, dto.Message{
Index: i,
Content: message.Content,
Role: message.Role,
})
}

for _, choice := range resp.Choices {
log.Completion.Messages = append(log.Completion.Messages, dto.Message{
Index: choice.Index,
Content: choice.Message.Content,
Role: choice.Message.Role,
})
}

traceloop.LogPrompt(ctx, log)
var promptMsgs []sdk.Message
for i, message := range request.Messages {
promptMsgs = append(promptMsgs, sdk.Message{
Index: i,
Content: message.Content,
Role: message.Role,
})
}

// Log the request
llmSpan, err := traceloop.LogPrompt(
ctx,
sdk.Prompt{
Vendor: "openai",
Mode: "chat",
Model: request.Model,
Messages: promptMsgs,
},
sdk.TraceloopAttributes{
WorkflowName: "example-workflow",
EntityName: "example-entity",
},
)
if err != nil {
fmt.Printf("LogPrompt error: %v\n", err)
return
}

client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
resp, err := client.CreateChatCompletion(
context.Background(),
*request,
)
if err != nil {
fmt.Printf("ChatCompletion error: %v\n", err)
return
}

var completionMsgs []sdk.Message
for _, choice := range resp.Choices {
completionMsgs = append(completionMsgs, sdk.Message{
Index: choice.Index,
Content: choice.Message.Content,
Role: choice.Message.Role,
})
}

// Log the response
llmSpan.LogCompletion(ctx, sdk.Completion{
Model: resp.Model,
Messages: completionMsgs,
}, sdk.Usage{
TotalTokens: resp.Usage.TotalTokens,
CompletionTokens: resp.Usage.CompletionTokens,
PromptTokens: resp.Usage.PromptTokens,
})
}
```

Expand Down
1 change: 1 addition & 0 deletions go.work.sum
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
github.com/klauspost/compress v1.15.15/go.mod h1:ZcK2JAFqKOpnBlxcLsJzYfrS9X1akm9fHZNnD9+Vo/4=
github.com/onsi/gomega v1.27.10/go.mod h1:RsS8tutOdbdgzbPtzzATp12yT7kM5I5aElG3evPbQ0M=
github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4=
github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
Expand Down
82 changes: 46 additions & 36 deletions sample-app/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,74 +7,84 @@ import (
"time"

"github.com/sashabaranov/go-openai"
sdk "github.com/traceloop/go-openllmetry/traceloop-sdk"
"github.com/traceloop/go-openllmetry/traceloop-sdk/config"
"github.com/traceloop/go-openllmetry/traceloop-sdk/dto"
tlp "github.com/traceloop/go-openllmetry/traceloop-sdk"
)

func main() {
ctx := context.Background()

traceloop := sdk.NewClient(config.Config{
traceloop, err := tlp.NewClient(ctx, tlp.Config{
BaseURL: "api-staging.traceloop.com",
APIKey: os.Getenv("TRACELOOP_API_KEY"),
})
defer func() { traceloop.Shutdown(ctx) }()

traceloop.Initialize(ctx)


request, err := traceloop.GetOpenAIChatCompletionRequest("example-prompt", map[string]interface{}{ "date": time.Now().Format("01/02") })
if err != nil {
fmt.Printf("GetOpenAIChatCompletionRequest error: %v\n", err)
fmt.Printf("NewClient error: %v\n", err)
return
}

client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
resp, err := client.CreateChatCompletion(
context.Background(),
*request,
)

request, err := traceloop.GetOpenAIChatCompletionRequest("example-prompt", map[string]interface{}{ "date": time.Now().Format("01/02") })
if err != nil {
fmt.Printf("ChatCompletion error: %v\n", err)
fmt.Printf("GetOpenAIChatCompletionRequest error: %v\n", err)
return
}

fmt.Println(resp.Choices[0].Message.Content)

var promptMsgs []tlp.Message
for i, message := range request.Messages {
promptMsgs = append(promptMsgs, tlp.Message{
Index: i,
Content: message.Content,
Role: message.Role,
})
}

log := dto.PromptLogAttributes{
Prompt: dto.Prompt{
llmSpan, err := traceloop.LogPrompt(
ctx,
tlp.Prompt{
Vendor: "openai",
Mode: "chat",
Model: request.Model,
Messages: promptMsgs,
},
Completion: dto.Completion{
Model: resp.Model,
},
Usage: dto.Usage{
TotalTokens: resp.Usage.TotalTokens,
CompletionTokens: resp.Usage.CompletionTokens,
PromptTokens: resp.Usage.PromptTokens,
tlp.TraceloopAttributes{
WorkflowName: "example-workflow",
EntityName: "example-entity",
},
}
)
if err != nil {
fmt.Printf("LogPrompt error: %v\n", err)
return
}

for i, message := range request.Messages {
log.Prompt.Messages = append(log.Prompt.Messages, dto.Message{
Index: i,
Content: message.Content,
Role: message.Role,
})
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
resp, err := client.CreateChatCompletion(
context.Background(),
*request,
)
if err != nil {
fmt.Printf("ChatCompletion error: %v\n", err)
return
}

var completionMsgs []tlp.Message
for _, choice := range resp.Choices {
log.Completion.Messages = append(log.Completion.Messages, dto.Message{
completionMsgs = append(completionMsgs, tlp.Message{
Index: choice.Index,
Content: choice.Message.Content,
Role: choice.Message.Role,
})
}

traceloop.LogPrompt(ctx, log)
llmSpan.LogCompletion(ctx, tlp.Completion{
Model: resp.Model,
Messages: completionMsgs,
}, tlp.Usage{
TotalTokens: resp.Usage.TotalTokens,
CompletionTokens: resp.Usage.CompletionTokens,
PromptTokens: resp.Usage.PromptTokens,
})


fmt.Println(resp.Choices[0].Message.Content)
}
4 changes: 3 additions & 1 deletion traceloop-sdk/config/config.go → traceloop-sdk/config.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package config
package traceloop

import "time"

Expand All @@ -9,6 +9,8 @@ type BackoffConfig struct {
type Config struct {
BaseURL string
APIKey string
TracerName string
ServiceName string
PollingInterval time.Duration
BackoffConfig BackoffConfig
}
8 changes: 0 additions & 8 deletions traceloop-sdk/dto/prompts_registry.go

This file was deleted.

1 change: 1 addition & 0 deletions traceloop-sdk/go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ require (
github.com/kluctl/go-embed-python v0.0.0-3.11.6-20231002-1 // indirect
github.com/rogpeppe/go-internal v1.11.0 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.22.0
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.22.0
go.opentelemetry.io/otel/sdk v1.22.0
Expand Down
2 changes: 2 additions & 0 deletions traceloop-sdk/go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ go.opentelemetry.io/otel v1.22.0 h1:xS7Ku+7yTFvDfDraDIJVpw7XPyuHlB9MCiqqX5mcJ6Y=
go.opentelemetry.io/otel v1.22.0/go.mod h1:eoV4iAi3Ea8LkAEI9+GFT44O6T/D0GWAVFyZVCC6pMI=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 h1:9M3+rhx7kZCIQQhQRYaZCdNu1V73tm4TvXs2ntl98C4=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0/go.mod h1:noq80iT8rrHP1SfybmPiRGc9dc5M8RPmGvtwo7Oo7tc=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0 h1:H2JFgRcGiyHg7H7bwcwaQJYrNFqCqrbTQ8K4p1OvDu8=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0/go.mod h1:WfCWp1bGoYK8MeULtI15MmQVczfR+bFkk0DF3h06QmQ=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.22.0 h1:FyjCyI9jVEfqhUh2MoSkmolPjfh5fp2hnV0b0irxH4Q=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.22.0/go.mod h1:hYwym2nDEeZfG/motx0p7L7J1N1vyzIThemQsb4g2qY=
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.22.0 h1:zr8ymM5OWWjjiWRzwTfZ67c905+2TMHYp2lMJ52QTyM=
Expand Down
8 changes: 6 additions & 2 deletions traceloop-sdk/prompt_registry.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,14 @@ import (

"github.com/kluctl/go-jinja2"
"github.com/sashabaranov/go-openai"
"github.com/traceloop/go-openllmetry/traceloop-sdk/dto"
"github.com/traceloop/go-openllmetry/traceloop-sdk/model"
)

type PromptsResponse struct {
Prompts []model.Prompt `json:"prompts"`
Environment string `json:"environment"`
}

func (instance *Traceloop) populatePromptRegistry() {
resp, err := instance.fetchPathWithRetry(PromptsPath, instance.config.BackoffConfig.MaxRetries)
if err != nil {
Expand All @@ -20,7 +24,7 @@ func (instance *Traceloop) populatePromptRegistry() {
defer resp.Body.Close()
decoder := json.NewDecoder(resp.Body)

var response dto.PromptsResponse
var response PromptsResponse
err = decoder.Decode(&response)
if err != nil {
fmt.Println("Failed to decode response", err)
Expand Down
Loading

0 comments on commit 8dda467

Please sign in to comment.