@@ -389,6 +389,335 @@ for _, embedding := range resp.Data {
389
389
}
390
390
```
391
391
392
+ ### Legacy Completions
393
+
394
+ ** Before:**
395
+ ``` go
396
+ resp , err := client.GetCompletions (context.TODO (), azopenai.CompletionsOptions {
397
+ Prompt : []string {" What is Azure OpenAI, in 20 words or less" },
398
+ MaxTokens : to.Ptr (int32 (2048 )),
399
+ Temperature : to.Ptr (float32 (0.0 )),
400
+ DeploymentName : to.Ptr (" gpt-3.5-turbo-instruct" ),
401
+ }, nil )
402
+
403
+ if err != nil {
404
+ // Handle error
405
+ }
406
+
407
+ for _ , choice := range resp.Choices {
408
+ // Process each choice in the response
409
+ // *choice.Text() contains the generated text
410
+ }
411
+ ```
412
+
413
+ ** After:**
414
+ ``` go
415
+ resp , err := client.Completions .New (context.TODO (), openai.CompletionNewParams {
416
+ Model : openai.CompletionNewParamsModel (model), // Azure deployment name here
417
+ Prompt : openai.CompletionNewParamsPromptUnion {
418
+ OfString: openai.String (" What is Azure OpenAI, in 20 words or less" ),
419
+ },
420
+ Temperature : openai.Float (0.0 ),
421
+ })
422
+
423
+ if err != nil {
424
+ // Handle error
425
+ }
426
+
427
+ for _ , choice := range resp.Choices {
428
+ // Process each choice in the response
429
+ // choice.Text contains the generated text
430
+ }
431
+ ```
432
+
433
+ ### Audio
434
+
435
+ #### Transcription
436
+
437
+ ** Before:**
438
+ ``` go
439
+ mp3Bytes , err := os.ReadFile (" audio.mp3" )
440
+ if err != nil {
441
+ // Handle error
442
+ }
443
+ resp , err := client.GetAudioTranscription (context.TODO (), azopenai.AudioTranscriptionOptions {
444
+ File : mp3Bytes,
445
+
446
+ ResponseFormat : to.Ptr (azopenai.AudioTranscriptionFormatText ),
447
+
448
+ // DeploymentName: &modelDeploymentID,
449
+ }, nil )
450
+
451
+ if err != nil {
452
+ // Handle error
453
+ }
454
+
455
+ // Access response as *resp.Text
456
+
457
+ ```
458
+
459
+ ** After:**
460
+ ``` go
461
+ audio_file , err := os.Open (" audio.mp3" )
462
+ if err != nil {
463
+ // Handle error
464
+ }
465
+ resp , err := client.Audio .Transcriptions .New (context.TODO (), openai.AudioTranscriptionNewParams {
466
+ Model : openai.AudioModel (model), // Azure deployment name here
467
+ File : audio_file, // Notice actual file object is passed here
468
+ ResponseFormat : openai.AudioResponseFormatJSON ,
469
+ })
470
+
471
+ if err != nil {
472
+ // Handle error
473
+ }
474
+
475
+ // Access response as resp.Text
476
+
477
+ ```
478
+
479
+ #### Text to speech
480
+
481
+ ** Before:**
482
+ ``` go
483
+ audioResp , err := client.GenerateSpeechFromText (context.Background (), azopenai.SpeechGenerationOptions {
484
+ Input : to.Ptr (" i am a computer" ),
485
+ Voice : to.Ptr (azopenai.SpeechVoiceAlloy ),
486
+ ResponseFormat : to.Ptr (azopenai.SpeechGenerationResponseFormatFlac ),
487
+ DeploymentName : to.Ptr (" tts-1" ),
488
+ }, nil )
489
+
490
+ if err != nil {
491
+ // Handle error
492
+ }
493
+
494
+ defer audioResp.Body .Close ()
495
+
496
+ audioBytes , err := io.ReadAll (audioResp.Body )
497
+
498
+ if err != nil {
499
+ // Handle error
500
+ }
501
+
502
+ // Got length of audio : len(audioBytes)
503
+ ```
504
+
505
+ ** After:**
506
+ ``` go
507
+ audioResp , err := client.Audio .Speech .New (context.Background (), openai.AudioSpeechNewParams {
508
+ Model : openai.SpeechModel (model),
509
+ Input : " i am a computer" ,
510
+ Voice : openai.AudioSpeechNewParamsVoiceAlloy ,
511
+ ResponseFormat : openai.AudioSpeechNewParamsResponseFormatFLAC ,
512
+ })
513
+
514
+ if err != nil {
515
+ // Handle error
516
+ }
517
+
518
+ defer audioResp.Body .Close ()
519
+
520
+ audioBytes , err := io.ReadAll (audioResp.Body )
521
+
522
+ if err != nil {
523
+ // Handle error
524
+ }
525
+
526
+ // Got length of audio : len(audioBytes)
527
+
528
+ ```
529
+
530
+ #### Translation
531
+
532
+ ** Before:**
533
+ ``` go
534
+ resp , err := client.GetAudioTranslation (context.TODO (), azopenai.AudioTranslationOptions {
535
+ File : mp3Bytes,
536
+ DeploymentName : &modelDeploymentID,
537
+ Prompt : to.Ptr (" Translate the following Hindi audio to English" ),
538
+ }, nil )
539
+
540
+ if err != nil {
541
+ // Handle error
542
+ }
543
+
544
+ // Access response as *resp.Text
545
+ ```
546
+
547
+ ** After:**
548
+ ``` go
549
+ resp , err := client.Audio .Translations .New (context.TODO (), openai.AudioTranslationNewParams {
550
+ Model : openai.AudioModel (model),
551
+ File : audio_file,
552
+ Prompt : openai.String (" Translate the following Hindi audio to English" ),
553
+ })
554
+
555
+ if err != nil {
556
+ // Handle error
557
+ }
558
+
559
+ // Access translated text as resp.Text
560
+ ```
561
+
562
+ ### Image
563
+
564
+ ** Before:**
565
+ ``` go
566
+ resp , err := client.GetImageGenerations (context.TODO (), azopenai.ImageGenerationOptions {
567
+ Prompt : to.Ptr (" a cat" ),
568
+ ResponseFormat : to.Ptr (azopenai.ImageGenerationResponseFormatURL ),
569
+ DeploymentName : &azureDeployment,
570
+ }, nil )
571
+
572
+ if err != nil {
573
+ // Handle error
574
+ }
575
+
576
+ for _ , generatedImage := range resp.Data {
577
+ resp , err := http.Get (*generatedImage.URL )
578
+ if err != nil {
579
+ // Handle error
580
+ }
581
+ defer resp.Body .Close ()
582
+
583
+ if resp.StatusCode != http.StatusOK {
584
+ // Handle non-200 status code
585
+ continue
586
+ }
587
+
588
+ imageData , err := io.ReadAll (resp.Body )
589
+ if err != nil {
590
+ // Handle error reading image data
591
+ }
592
+
593
+ // Use imageData byte slice for the downloaded image
594
+ // For example, save to file:
595
+ // err = os.WriteFile("generated_image.png", imageData, 0644)
596
+ }
597
+ ```
598
+
599
+ ** After:**
600
+ ``` go
601
+ resp , err := client.Images .Generate (context.TODO (), openai.ImageGenerateParams {
602
+ Prompt : " a cat" ,
603
+ Model : openai.ImageModel (model),
604
+ ResponseFormat : openai.ImageGenerateParamsResponseFormatURL ,
605
+ Size : openai.ImageGenerateParamsSize1024x1024 ,
606
+ })
607
+
608
+ if err != nil {
609
+ // Handle error
610
+ }
611
+
612
+ for _ , generatedImage := range resp.Data {
613
+ resp , err := http.Get (generatedImage.URL )
614
+ if err != nil {
615
+ // Handle error
616
+ }
617
+ defer resp.Body .Close ()
618
+
619
+ if resp.StatusCode != http.StatusOK {
620
+ // Handle non-200 status code
621
+ continue
622
+ }
623
+
624
+ imageData , err := io.ReadAll (resp.Body )
625
+ if err != nil {
626
+ // Handle error reading image data
627
+ }
628
+
629
+ // Use imageData byte slice for the downloaded image
630
+ // For example, save to file:
631
+ // err = os.WriteFile("generated_image.png", imageData, 0644)
632
+ }
633
+ ```
634
+
635
+ ### Vision
636
+
637
+ ** Before:**
638
+ ``` go
639
+ imageURL := " https://www.bing.com/th?id=OHR.BradgateFallow_EN-US3932725763_1920x1080.jpg"
640
+
641
+ content := azopenai.NewChatRequestUserMessageContent ([]azopenai.ChatCompletionRequestMessageContentPartClassification {
642
+ &azopenai.ChatCompletionRequestMessageContentPartText {
643
+ Text: to.Ptr (" Describe this image" ),
644
+ },
645
+ &azopenai.ChatCompletionRequestMessageContentPartImage {
646
+ ImageURL: &azopenai.ChatCompletionRequestMessageContentPartImageURL {
647
+ URL: &imageURL,
648
+ },
649
+ },
650
+ })
651
+
652
+ ctx , cancel := context.WithTimeout (context.TODO (), time.Minute )
653
+ defer cancel ()
654
+
655
+ resp , err := client.GetChatCompletions (ctx, azopenai.ChatCompletionsOptions {
656
+ Messages : []azopenai.ChatRequestMessageClassification {
657
+ &azopenai.ChatRequestUserMessage {
658
+ Content: content,
659
+ },
660
+ },
661
+ MaxTokens : to.Ptr [int32 ](512 ),
662
+ DeploymentName : to.Ptr (modelDeployment),
663
+ }, nil )
664
+
665
+ if err != nil {
666
+ // Handle error
667
+ }
668
+
669
+ for _ , choice := range resp.Choices {
670
+ if choice.Message != nil && choice.Message .Content != nil {
671
+ // Access result as *choice.Message.Content
672
+ }
673
+ }
674
+ ```
675
+
676
+ ** After:**
677
+ ``` go
678
+ imageURL := " https://www.bing.com/th?id=OHR.BradgateFallow_EN-US3932725763_1920x1080.jpg"
679
+
680
+ ctx , cancel := context.WithTimeout (context.TODO (), time.Minute )
681
+ defer cancel ()
682
+
683
+ resp , err := client.Chat .Completions .New (ctx, openai.ChatCompletionNewParams {
684
+ Model : openai.ChatModel (model),
685
+ Messages : []openai.ChatCompletionMessageParamUnion {
686
+ {
687
+ OfUser: &openai.ChatCompletionUserMessageParam {
688
+ Content: openai.ChatCompletionUserMessageParamContentUnion {
689
+ OfArrayOfContentParts: []openai.ChatCompletionContentPartUnionParam {
690
+ {
691
+ OfText: &openai.ChatCompletionContentPartTextParam {
692
+ Text: " Describe this image" ,
693
+ },
694
+ },
695
+ {
696
+ OfImageURL: &openai.ChatCompletionContentPartImageParam {
697
+ ImageURL: openai.ChatCompletionContentPartImageImageURLParam {
698
+ URL: imageURL,
699
+ },
700
+ },
701
+ },
702
+ },
703
+ },
704
+ },
705
+ },
706
+ },
707
+ MaxTokens : openai.Int (512 ),
708
+ })
709
+
710
+ if err != nil {
711
+ // Handle error
712
+ }
713
+
714
+ for _ , choice := range resp.Choices {
715
+ if choice.Message != nil && choice.Message .Content != nil {
716
+ // Access result as choice.Message.Content
717
+ }
718
+ }
719
+ ```
720
+
392
721
## Additional Resources
393
722
394
723
- [ OpenAI Go Client Documentation] ( https://github.com/openai/openai-go )
0 commit comments