Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

release: 0.1.0-alpha.14 #35

Merged
merged 5 commits into from
Sep 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.1.0-alpha.13"
".": "0.1.0-alpha.14"
}
19 changes: 19 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,24 @@
# Changelog

## 0.1.0-alpha.14 (2024-09-03)

Full Changelog: [v0.1.0-alpha.13...v0.1.0-alpha.14](https://github.com/openai/openai-go/compare/v0.1.0-alpha.13...v0.1.0-alpha.14)

### Features

* **examples/structure-outputs:** created an example for using structured outputs ([7d1e71e](https://github.com/openai/openai-go/commit/7d1e71e72b8c55d5b7228b72d967e4cae8165280))
* **stream-accumulators:** added streaming accumulator helpers and example ([29e80e7](https://github.com/openai/openai-go/commit/29e80e7dfb4571e93e616981ddc950e3058b6203))


### Bug Fixes

* **examples/fine-tuning:** used an old constant name ([#34](https://github.com/openai/openai-go/issues/34)) ([5d9ec26](https://github.com/openai/openai-go/commit/5d9ec26407b15c7effceb999bba3dfbeefc0adf2))


### Documentation

* **readme:** added some examples to readme ([#39](https://github.com/openai/openai-go/issues/39)) ([2dbfa62](https://github.com/openai/openai-go/commit/2dbfa62ffc89ead88e0fed586684a6b757836752))

## 0.1.0-alpha.13 (2024-08-29)

Full Changelog: [v0.1.0-alpha.12...v0.1.0-alpha.13](https://github.com/openai/openai-go/compare/v0.1.0-alpha.12...v0.1.0-alpha.13)
Expand Down
247 changes: 246 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ Or to pin the version:
<!-- x-release-please-start-version -->

```sh
go get -u 'github.com/openai/openai-go@v0.1.0-alpha.13'
go get -u 'github.com/openai/openai-go@v0.1.0-alpha.14'
```

<!-- x-release-please-end -->
Expand Down Expand Up @@ -64,6 +64,251 @@ func main() {
}

```
<details>
<summary>Conversations</summary>

```go
param := openai.ChatCompletionNewParams{
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
openai.UserMessage("What kind of houseplant is easy to take care of?"),
}),
Seed: openai.Int(1),
Model: openai.F(openai.ChatModelGPT4o),
}

completion, err := client.Chat.Completions.New(ctx, params)

param.Messages.Value = append(param.Messages.Value, completion.Choices[0].Message)
param.Messages.Value = append(param.Messages.Value, openai.UserMessage("How big are those?"))

completion, err = client.Chat.Completions.New(ctx, param)
```
</details>

<details>
<summary>Streaming responses</summary>

```go
question := "Write an epic"

stream := client.Chat.Completions.NewStreaming(ctx, openai.ChatCompletionNewParams{
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
openai.UserMessage(question),
}),
Seed: openai.Int(0),
Model: openai.F(openai.ChatModelGPT4o),
})

// optionally, an accumulator helper can be used
acc := openai.ChatCompletionAccumulator{}

for stream.Next() {
chunk := stream.Current()
acc.AddChunk(chunk)

if content, ok := acc.JustFinishedContent(); ok {
println("Content stream finished:", content)
println()
}

// if using tool calls
if tool, ok := acc.JustFinishedToolCall(); ok {
println("Tool call stream finished:", tool.Index, tool.Name, tool.Arguments)
println()
}

if refusal, ok := acc.JustFinishedRefusal(); ok {
println("Refusal stream finished:", refusal)
println()
}

// it's best to use chunks after handling JustFinished events
if len(chunk.Choices) > 0 {
println(chunk.Choices[0].Delta.Content)
}
}

if err := stream.Err(); err != nil {
panic(err)
}

// After the stream is finished, acc can be used like a ChatCompletion
_ = acc.Choices[0].Message.Content
```

</details>

<details>
<summary>Tool calling</summary>

```go
package main

import (
"context"
"encoding/json"
"fmt"

"github.com/openai/openai-go"
)

// Mock function to simulate weather data retrieval
func getWeather(location string) string {
// In a real implementation, this function would call a weather API
return "Sunny, 25°C"
}

func main() {
client := openai.NewClient()
ctx := context.Background()

question := "What is the weather in New York City?"

params := openai.ChatCompletionNewParams{
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
openai.UserMessage(question),
}),
Tools: openai.F([]openai.ChatCompletionToolParam{
{
Type: openai.F(openai.ChatCompletionToolTypeFunction),
Function: openai.F(openai.FunctionDefinitionParam{
Name: openai.String("get_weather"),
Description: openai.String("Get weather at the given location"),
Parameters: openai.F(openai.FunctionParameters{
"type": "object",
"properties": map[string]interface{}{
"location": map[string]string{
"type": "string",
},
},
"required": []string{"location"},
}),
}),
},
}),
Model: openai.F(openai.ChatModelGPT4o),
}

// Make initial chat completion request
completion, err := client.Chat.Completions.New(ctx, params)

toolCalls := completion.Choices[0].Message.ToolCalls

if len(toolCalls) == 0 {
fmt.Printf("No function call")
return
}

// If there is a was a function call, continue the conversation
params.Messages.Value = append(params.Messages.Value, completion.Choices[0].Message)
for _, toolCall := range toolCalls {
if toolCall.Function.Name == "get_weather" {
// Extract the location from the function call arguments
var args map[string]interface{}
if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil {
panic(err)
}
location := args["location"].(string)

// Simulate getting weather data
weatherData := getWeather(location)

params.Messages.Value = append(params.Messages.Value, openai.ToolMessage(toolCall.ID, weatherData))
}
}

completion, err = client.Chat.Completions.New(ctx, params)

println(completion.Choices[0].Message.Content)
}
```

</details>

<details>
<summary>Structured outputs</summary>

```go
package main

import (
"context"
"encoding/json"
"fmt"

"github.com/invopop/jsonschema"
"github.com/openai/openai-go"
)

// A struct that will be converted to a Structured Outputs response schema
type HistoricalComputer struct {
Origin Origin `json:"origin" jsonschema_description:"The origin of the computer"`
Name string `json:"full_name" jsonschema_description:"The name of the device model"`
Legacy string `json:"legacy" jsonschema:"enum=positive,enum=neutral,enum=negative" jsonschema_description:"Its influence on the field of computing"`
NotableFacts []string `json:"notable_facts" jsonschema_description:"A few key facts about the computer"`
}

type Origin struct {
YearBuilt int64 `json:"year_of_construction" jsonschema_description:"The year it was made"`
Organization string `json:"organization" jsonschema_description:"The organization that was in charge of its development"`
}

func GenerateSchema[T any]() interface{} {
reflector := jsonschema.Reflector{
AllowAdditionalProperties: false,
DoNotReference: true,
}
var v T
schema := reflector.Reflect(v)
return schema
}

// Generate the JSON schema at initialization time
var HistoricalComputerResponseSchema = GenerateSchema[HistoricalComputer]()

func main() {
client := openai.NewClient()
ctx := context.Background()

question := "What computer ran the first neural network?"

schemaParam := openai.ResponseFormatJSONSchemaJSONSchemaParam{
Name: openai.F("biography"),
Description: openai.F("Notable information about a person"),
Schema: openai.F(HistoricalComputerResponseSchema),
Strict: openai.Bool(true),
}

chat, err := client.Chat.Completions.New(ctx, openai.ChatCompletionNewParams{
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
openai.UserMessage(question),
}),
ResponseFormat: openai.F[openai.ChatCompletionNewParamsResponseFormatUnion](
openai.ResponseFormatJSONSchemaParam{
Type: openai.F(openai.ResponseFormatJSONSchemaTypeJSONSchema),
JSONSchema: openai.F(schemaParam),
},
),
// only certain models can perform structured outputs
Model: openai.F(openai.ChatModelGPT4o2024_08_06),
})

historicalComputer := HistoricalComputer{}
// extract into a well-typed struct
err = json.Unmarshal([]byte(chat.Choices[0].Message.Content), &historicalComputer)

fmt.Printf("Name: %v\n", historicalComputer.Name)
fmt.Printf("Year: %v\n", historicalComputer.Origin.YearBuilt)
fmt.Printf("Org: %v\n", historicalComputer.Origin.Organization)
fmt.Printf("Legacy: %v\n", historicalComputer.Legacy)
fmt.Printf("Facts:\n")
for i, fact := range historicalComputer.NotableFacts {
fmt.Printf("%v. %v\n", i+1, fact)
}
}
```

</details>

### Request fields

Expand Down
90 changes: 90 additions & 0 deletions examples/chat-completion-accumulating/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
package main

import (
"context"

"github.com/openai/openai-go"
)

// Mock function to simulate weather data retrieval
func getWeather(location string) string {
// In a real implementation, this function would call a weather API
return "Sunny, 25°C"
}

func main() {
client := openai.NewClient()
ctx := context.Background()

question := "Begin a very brief introduction of Greece, then incorporate the local weather of a few towns"

print("> ")
println(question)
println()

params := openai.ChatCompletionNewParams{
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
openai.UserMessage(question),
}),
Seed: openai.Int(0),
Model: openai.F(openai.ChatModelGPT4o),
Tools: openai.F([]openai.ChatCompletionToolParam{
{
Type: openai.F(openai.ChatCompletionToolTypeFunction),
Function: openai.F(openai.FunctionDefinitionParam{
Name: openai.String("get_live_weather"),
Description: openai.String("Get weather at the given location"),
Parameters: openai.F(openai.FunctionParameters{
"type": "object",
"properties": map[string]interface{}{
"location": map[string]string{
"type": "string",
},
},
"required": []string{"location"},
}),
}),
},
}),
}

stream := client.Chat.Completions.NewStreaming(ctx, params)

acc := openai.ChatCompletionAccumulator{}

for stream.Next() {
chunk := stream.Current()
acc.AddChunk(chunk)

// When this fires, the current chunk value will not contain content data
if content, ok := acc.JustFinishedContent(); ok {
println("Content stream finished:", content)
println()
}

if tool, ok := acc.JustFinishedToolCall(); ok {
println("Tool call stream finished:", tool.Index, tool.Name, tool.Arguments)
println()
}

if refusal, ok := acc.JustFinishedRefusal(); ok {
println("Refusal stream finished:", refusal)
println()
}

// It's best to use chunks after handling JustFinished events
if len(chunk.Choices) > 0 {
println(chunk.Choices[0].Delta.JSON.RawJSON())
}
}

if err := stream.Err(); err != nil {
panic(err)
}

// After the stream is finished, acc can be used like a ChatCompletion
_ = acc.Choices[0].Message.Content

println("Total Tokens:", acc.Usage.TotalTokens)
println("Finish Reason:", acc.Choices[0].FinishReason)
}
2 changes: 1 addition & 1 deletion examples/fine-tuning/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ func main() {
data, err := os.Open("./fine-tuning-data.jsonl")
file, err := client.Files.New(ctx, openai.FileNewParams{
File: openai.F[io.Reader](data),
Purpose: openai.F(openai.FileNewParamsPurposeFineTune),
Purpose: openai.F(openai.FilePurposeFineTune),
})
if err != nil {
panic(err)
Expand Down
Loading
Loading