Skip to content

Commit 5f4ff3e

Browse files
authored
Add Readme example to example_test.go (#298)
* Add speech to text example in docs * Add caption formats for audio transcription * Add caption example to README * Address sanity check errors * Add tests for decodeResponse * Use typechecker for audio response format * Decoding response refactors * Migrated examples to example_test.go * Add some executable examples * Update error docs * Avoid linting example files which break conventions * Restore README examples * Enable linting for example_test
1 parent 39abb5a commit 5f4ff3e

File tree

8 files changed

+490
-1
lines changed

8 files changed

+490
-1
lines changed

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -485,4 +485,5 @@ if errors.As(err, &e) {
485485
```
486486
</details>
487487

488+
See the `examples/` folder for more.
488489

common.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1-
// common.go defines common types used throughout the OpenAI API.
21
package openai
32

3+
// common.go defines common types used throughout the OpenAI API.
4+
45
// Usage Represents the total token usage per request to OpenAI.
56
type Usage struct {
67
PromptTokens int `json:"prompt_tokens"`

example_test.go

Lines changed: 350 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,350 @@
1+
package openai_test
2+
3+
import (
4+
"bufio"
5+
"context"
6+
"encoding/base64"
7+
"errors"
8+
"fmt"
9+
"io"
10+
"net/http"
11+
"net/url"
12+
"os"
13+
14+
"github.com/sashabaranov/go-openai"
15+
)
16+
17+
func Example() {
18+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
19+
resp, err := client.CreateChatCompletion(
20+
context.Background(),
21+
openai.ChatCompletionRequest{
22+
Model: openai.GPT3Dot5Turbo,
23+
Messages: []openai.ChatCompletionMessage{
24+
{
25+
Role: openai.ChatMessageRoleUser,
26+
Content: "Hello!",
27+
},
28+
},
29+
},
30+
)
31+
32+
if err != nil {
33+
fmt.Printf("ChatCompletion error: %v\n", err)
34+
return
35+
}
36+
37+
fmt.Println(resp.Choices[0].Message.Content)
38+
}
39+
40+
func ExampleClient_CreateChatCompletionStream() {
41+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
42+
43+
stream, err := client.CreateChatCompletionStream(
44+
context.Background(),
45+
openai.ChatCompletionRequest{
46+
Model: openai.GPT3Dot5Turbo,
47+
MaxTokens: 20,
48+
Messages: []openai.ChatCompletionMessage{
49+
{
50+
Role: openai.ChatMessageRoleUser,
51+
Content: "Lorem ipsum",
52+
},
53+
},
54+
Stream: true,
55+
},
56+
)
57+
if err != nil {
58+
fmt.Printf("ChatCompletionStream error: %v\n", err)
59+
return
60+
}
61+
defer stream.Close()
62+
63+
fmt.Printf("Stream response: ")
64+
for {
65+
var response openai.ChatCompletionStreamResponse
66+
response, err = stream.Recv()
67+
if errors.Is(err, io.EOF) {
68+
fmt.Println("\nStream finished")
69+
return
70+
}
71+
72+
if err != nil {
73+
fmt.Printf("\nStream error: %v\n", err)
74+
return
75+
}
76+
77+
fmt.Printf(response.Choices[0].Delta.Content)
78+
}
79+
}
80+
81+
func ExampleClient_CreateCompletion() {
82+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
83+
resp, err := client.CreateCompletion(
84+
context.Background(),
85+
openai.CompletionRequest{
86+
Model: openai.GPT3Ada,
87+
MaxTokens: 5,
88+
Prompt: "Lorem ipsum",
89+
},
90+
)
91+
if err != nil {
92+
fmt.Printf("Completion error: %v\n", err)
93+
return
94+
}
95+
fmt.Println(resp.Choices[0].Text)
96+
}
97+
98+
func ExampleClient_CreateCompletionStream() {
99+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
100+
stream, err := client.CreateCompletionStream(
101+
context.Background(),
102+
openai.CompletionRequest{
103+
Model: openai.GPT3Ada,
104+
MaxTokens: 5,
105+
Prompt: "Lorem ipsum",
106+
Stream: true,
107+
},
108+
)
109+
if err != nil {
110+
fmt.Printf("CompletionStream error: %v\n", err)
111+
return
112+
}
113+
defer stream.Close()
114+
115+
for {
116+
var response openai.CompletionResponse
117+
response, err = stream.Recv()
118+
if errors.Is(err, io.EOF) {
119+
fmt.Println("Stream finished")
120+
return
121+
}
122+
123+
if err != nil {
124+
fmt.Printf("Stream error: %v\n", err)
125+
return
126+
}
127+
128+
fmt.Printf("Stream response: %#v\n", response)
129+
}
130+
}
131+
132+
func ExampleClient_CreateTranscription() {
133+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
134+
resp, err := client.CreateTranscription(
135+
context.Background(),
136+
openai.AudioRequest{
137+
Model: openai.Whisper1,
138+
FilePath: "recording.mp3",
139+
},
140+
)
141+
if err != nil {
142+
fmt.Printf("Transcription error: %v\n", err)
143+
return
144+
}
145+
fmt.Println(resp.Text)
146+
}
147+
148+
func ExampleClient_CreateTranscription_captions() {
149+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
150+
151+
resp, err := client.CreateTranscription(
152+
context.Background(),
153+
openai.AudioRequest{
154+
Model: openai.Whisper1,
155+
FilePath: os.Args[1],
156+
Format: openai.AudioResponseFormatSRT,
157+
},
158+
)
159+
if err != nil {
160+
fmt.Printf("Transcription error: %v\n", err)
161+
return
162+
}
163+
f, err := os.Create(os.Args[1] + ".srt")
164+
if err != nil {
165+
fmt.Printf("Could not open file: %v\n", err)
166+
return
167+
}
168+
defer f.Close()
169+
if _, err = f.WriteString(resp.Text); err != nil {
170+
fmt.Printf("Error writing to file: %v\n", err)
171+
return
172+
}
173+
}
174+
175+
func ExampleClient_CreateTranslation() {
176+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
177+
resp, err := client.CreateTranslation(
178+
context.Background(),
179+
openai.AudioRequest{
180+
Model: openai.Whisper1,
181+
FilePath: "recording.mp3",
182+
},
183+
)
184+
if err != nil {
185+
fmt.Printf("Translation error: %v\n", err)
186+
return
187+
}
188+
fmt.Println(resp.Text)
189+
}
190+
191+
func ExampleClient_CreateImage() {
192+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
193+
194+
respURL, err := client.CreateImage(
195+
context.Background(),
196+
openai.ImageRequest{
197+
Prompt: "Parrot on a skateboard performs a trick, cartoon style, natural light, high detail",
198+
Size: openai.CreateImageSize256x256,
199+
ResponseFormat: openai.CreateImageResponseFormatURL,
200+
N: 1,
201+
},
202+
)
203+
if err != nil {
204+
fmt.Printf("Image creation error: %v\n", err)
205+
return
206+
}
207+
fmt.Println(respURL.Data[0].URL)
208+
}
209+
210+
func ExampleClient_CreateImage_base64() {
211+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
212+
213+
resp, err := client.CreateImage(
214+
context.Background(),
215+
openai.ImageRequest{
216+
Prompt: "Portrait of a humanoid parrot in a classic costume, high detail, realistic light, unreal engine",
217+
Size: openai.CreateImageSize512x512,
218+
ResponseFormat: openai.CreateImageResponseFormatB64JSON,
219+
N: 1,
220+
},
221+
)
222+
if err != nil {
223+
fmt.Printf("Image creation error: %v\n", err)
224+
return
225+
}
226+
227+
b, err := base64.StdEncoding.DecodeString(resp.Data[0].B64JSON)
228+
if err != nil {
229+
fmt.Printf("Base64 decode error: %v\n", err)
230+
return
231+
}
232+
233+
f, err := os.Create("example.png")
234+
if err != nil {
235+
fmt.Printf("File creation error: %v\n", err)
236+
return
237+
}
238+
defer f.Close()
239+
240+
_, err = f.Write(b)
241+
if err != nil {
242+
fmt.Printf("File write error: %v\n", err)
243+
return
244+
}
245+
246+
fmt.Println("The image was saved as example.png")
247+
}
248+
249+
func ExampleClientConfig_clientWithProxy() {
250+
config := openai.DefaultConfig(os.Getenv("OPENAI_API_KEY"))
251+
port := os.Getenv("OPENAI_PROXY_PORT")
252+
proxyURL, err := url.Parse(fmt.Sprintf("http://localhost:%s", port))
253+
if err != nil {
254+
panic(err)
255+
}
256+
transport := &http.Transport{
257+
Proxy: http.ProxyURL(proxyURL),
258+
}
259+
config.HTTPClient = &http.Client{
260+
Transport: transport,
261+
}
262+
263+
client := openai.NewClientWithConfig(config)
264+
265+
client.CreateChatCompletion( //nolint:errcheck // outside of the scope of this example.
266+
context.Background(),
267+
openai.ChatCompletionRequest{
268+
// etc...
269+
},
270+
)
271+
}
272+
273+
func Example_chatbot() {
274+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
275+
276+
req := openai.ChatCompletionRequest{
277+
Model: openai.GPT3Dot5Turbo,
278+
Messages: []openai.ChatCompletionMessage{
279+
{
280+
Role: openai.ChatMessageRoleSystem,
281+
Content: "you are a helpful chatbot",
282+
},
283+
},
284+
}
285+
fmt.Println("Conversation")
286+
fmt.Println("---------------------")
287+
fmt.Print("> ")
288+
s := bufio.NewScanner(os.Stdin)
289+
for s.Scan() {
290+
req.Messages = append(req.Messages, openai.ChatCompletionMessage{
291+
Role: openai.ChatMessageRoleUser,
292+
Content: s.Text(),
293+
})
294+
resp, err := client.CreateChatCompletion(context.Background(), req)
295+
if err != nil {
296+
fmt.Printf("ChatCompletion error: %v\n", err)
297+
continue
298+
}
299+
fmt.Printf("%s\n\n", resp.Choices[0].Message.Content)
300+
req.Messages = append(req.Messages, resp.Choices[0].Message)
301+
fmt.Print("> ")
302+
}
303+
}
304+
305+
func ExampleDefaultAzureConfig() {
306+
azureKey := os.Getenv("AZURE_OPENAI_API_KEY") // Your azure API key
307+
azureEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") // Your azure OpenAI endpoint
308+
azureModel := os.Getenv("AZURE_OPENAI_MODEL") // Your model deployment name
309+
config := openai.DefaultAzureConfig(azureKey, azureEndpoint, azureModel)
310+
client := openai.NewClientWithConfig(config)
311+
resp, err := client.CreateChatCompletion(
312+
context.Background(),
313+
openai.ChatCompletionRequest{
314+
Model: openai.GPT3Dot5Turbo,
315+
Messages: []openai.ChatCompletionMessage{
316+
{
317+
Role: openai.ChatMessageRoleUser,
318+
Content: "Hello Azure OpenAI!",
319+
},
320+
},
321+
},
322+
)
323+
324+
if err != nil {
325+
fmt.Printf("ChatCompletion error: %v\n", err)
326+
return
327+
}
328+
329+
fmt.Println(resp.Choices[0].Message.Content)
330+
}
331+
332+
// Open-AI maintains clear documentation on how to handle API errors.
333+
//
334+
// see: https://platform.openai.com/docs/guides/error-codes/api-errors
335+
func ExampleAPIError() {
336+
var err error // Assume this is the error you are checking.
337+
e := &openai.APIError{}
338+
if errors.As(err, &e) {
339+
switch e.HTTPStatusCode {
340+
case 401:
341+
// invalid auth or key (do not retry)
342+
case 429:
343+
// rate limiting or engine overload (wait and retry)
344+
case 500:
345+
// openai server error (retry)
346+
default:
347+
// unhandled
348+
}
349+
}
350+
}

examples/README.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
To run an example:
2+
3+
```
4+
export OPENAI_API_KEY="<your key here>"
5+
go run ./example/<target>
6+
```

0 commit comments

Comments
 (0)