Skip to content

Commit 3afefca

Browse files
authored
Add new Vison feature to use local images (#37)
1 parent 7f506fa commit 3afefca

File tree

2 files changed

+64
-12
lines changed

2 files changed

+64
-12
lines changed

Demo/Demo/Vision/VisionView.swift

Lines changed: 48 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,51 @@
11
import SwiftUI
2+
import PhotosUI
23

34
struct VisionView: View {
4-
var viewModel: VisionViewModel
5+
@State private var visionStrategy = 0
6+
@State var viewModel: VisionViewModel
57

68
var body: some View {
79
VStack {
8-
AsyncImage(url: URL(string: viewModel.imageVisionURL)) { image in
9-
image
10-
.resizable()
11-
.scaledToFit()
12-
.frame(width: 300, height: 300)
13-
} placeholder: {
14-
ProgressView()
10+
Picker("What is your favorite color?", selection: $visionStrategy) {
11+
Text("URL").tag(0)
12+
Text("Gallery").tag(1)
13+
}
14+
.pickerStyle(.segmented)
15+
16+
if visionStrategy == 0 {
17+
AsyncImage(url: URL(string: viewModel.imageVisionURL)) { image in
18+
image
19+
.resizable()
20+
.scaledToFit()
21+
.frame(width: 300, height: 300)
22+
} placeholder: {
23+
ProgressView()
24+
.padding(.bottom, 20)
25+
}
26+
} else {
27+
PhotosPicker(selection: $viewModel.photoSelection,
28+
matching: .images,
29+
photoLibrary: .shared()) {
30+
Label("Add video or audio",
31+
systemImage: "video.fill")
32+
}
33+
.frame(height: 300)
34+
.photosPickerStyle(.inline)
35+
.onChange(of: viewModel.photoSelection!) { oldValue, newValue in
36+
newValue.loadTransferable(type: Data.self) { [self] result in
37+
switch result {
38+
case .success(let data):
39+
if let data {
40+
viewModel.currentData = data
41+
} else {
42+
print("No supported content type found.")
43+
}
44+
case .failure(let error):
45+
fatalError(error.localizedDescription)
46+
}
47+
}
48+
}
1549
}
1650

1751
if !viewModel.isLoading {
@@ -20,18 +54,22 @@ struct VisionView: View {
2054
await viewModel.send(message: "Please analyze the image and describe its contents, providing any relevant details or information")
2155
}
2256
}, label: {
23-
Text("Describe Image")
57+
Text("Describe Image from URL")
2458
})
2559
.buttonStyle(.borderedProminent)
2660
} else {
2761
ProgressView()
2862
}
2963

30-
TextEditor(text: .constant( viewModel.message))
64+
Divider()
65+
.padding(.top, 20)
66+
67+
TextEditor(text: .constant(viewModel.message))
3168
.font(.body)
3269
.padding(.top, 12)
3370
.padding(.horizontal)
3471
}
72+
.padding(.horizontal, 32)
3573
}
3674
}
3775

Demo/Demo/Vision/VisionViewModel.swift

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
import Foundation
22
import SwiftOpenAI
3+
import PhotosUI
4+
import SwiftUI
35

46
@Observable
57
final class VisionViewModel {
@@ -8,13 +10,25 @@ final class VisionViewModel {
810
var message: String = ""
911
var isLoading = false
1012

13+
// Local Image
14+
var photoSelection: PhotosPickerItem? = .init(itemIdentifier: "")
15+
var currentData: Data?
16+
1117
@MainActor
1218
func send(message: String) async {
1319
isLoading = true
1420

1521
do {
22+
let imageValue: String
23+
if let data = currentData {
24+
let base64Image = data.base64EncodedString()
25+
imageValue = "data:image/jpeg;base64,\(base64Image)"
26+
} else {
27+
imageValue = imageVisionURL
28+
}
29+
1630
let myMessage = MessageChatImageInput(text: message,
17-
imageURL: imageVisionURL,
31+
imageURL: imageValue,
1832
role: .user)
1933

2034
let optionalParameters: ChatCompletionsOptionalParameters = .init(temperature: 0.5,
@@ -25,7 +39,7 @@ final class VisionViewModel {
2539
let result = try await openAI.createChatCompletionsWithImageInput(model: .gpt4(.gpt_4_vision_preview),
2640
messages: [myMessage],
2741
optionalParameters: optionalParameters)
28-
42+
self.currentData = nil
2943
self.message = result?.choices.first?.message.content ?? "No value"
3044
self.isLoading = false
3145

0 commit comments

Comments
 (0)