Skip to content

Commit

Permalink
Add new Vison feature to use local images
Browse files Browse the repository at this point in the history
  • Loading branch information
SwiftBeta committed Mar 13, 2024
1 parent 7f506fa commit 952aa9a
Show file tree
Hide file tree
Showing 2 changed files with 64 additions and 12 deletions.
58 changes: 48 additions & 10 deletions Demo/Demo/Vision/VisionView.swift
Original file line number Diff line number Diff line change
@@ -1,17 +1,51 @@
import SwiftUI
import PhotosUI

struct VisionView: View {
var viewModel: VisionViewModel
@State private var visionStrategy = 0
@State var viewModel: VisionViewModel

var body: some View {
VStack {
AsyncImage(url: URL(string: viewModel.imageVisionURL)) { image in
image
.resizable()
.scaledToFit()
.frame(width: 300, height: 300)
} placeholder: {
ProgressView()
Picker("What is your favorite color?", selection: $visionStrategy) {
Text("URL").tag(0)
Text("Gallery").tag(1)
}
.pickerStyle(.segmented)

if visionStrategy == 0 {
AsyncImage(url: URL(string: viewModel.imageVisionURL)) { image in
image
.resizable()
.scaledToFit()
.frame(width: 300, height: 300)
} placeholder: {
ProgressView()
.padding(.bottom, 20)
}
} else {
PhotosPicker(selection: $viewModel.photoSelection,
matching: .images,
photoLibrary: .shared()) {
Label("Add video or audio",
systemImage: "video.fill")
}
.frame(height: 300)
.photosPickerStyle(.inline)
.onChange(of: viewModel.photoSelection!) { oldValue, newValue in
newValue.loadTransferable(type: Data.self) { [self] result in
switch result {
case .success(let data):
if let data {
viewModel.currentData = data
} else {
print("No supported content type found.")
}
case .failure(let error):
fatalError(error.localizedDescription)
}
}
}
}

if !viewModel.isLoading {
Expand All @@ -20,18 +54,22 @@ struct VisionView: View {
await viewModel.send(message: "Please analyze the image and describe its contents, providing any relevant details or information")
}
}, label: {
Text("Describe Image")
Text("Describe Image from URL")
})
.buttonStyle(.borderedProminent)
} else {
ProgressView()
}

TextEditor(text: .constant( viewModel.message))
Divider()
.padding(.top, 20)

TextEditor(text: .constant(viewModel.message))
.font(.body)
.padding(.top, 12)
.padding(.horizontal)
}
.padding(.horizontal, 32)
}
}

Expand Down
18 changes: 16 additions & 2 deletions Demo/Demo/Vision/VisionViewModel.swift
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import Foundation
import SwiftOpenAI
import PhotosUI
import SwiftUI

@Observable
final class VisionViewModel {
Expand All @@ -8,13 +10,25 @@ final class VisionViewModel {
var message: String = ""
var isLoading = false

// Local Image
var photoSelection: PhotosPickerItem? = .init(itemIdentifier: "")
var currentData: Data?

@MainActor
func send(message: String) async {
isLoading = true

do {
let imageValue: String
if let data = currentData {
let base64Image = data.base64EncodedString()
imageValue = "data:image/jpeg;base64,\(base64Image)"
} else {
imageValue = imageVisionURL
}

let myMessage = MessageChatImageInput(text: message,
imageURL: imageVisionURL,
imageURL: imageValue,
role: .user)

let optionalParameters: ChatCompletionsOptionalParameters = .init(temperature: 0.5,
Expand All @@ -25,7 +39,7 @@ final class VisionViewModel {
let result = try await openAI.createChatCompletionsWithImageInput(model: .gpt4(.gpt_4_vision_preview),
messages: [myMessage],
optionalParameters: optionalParameters)

self.currentData = nil
self.message = result?.choices.first?.message.content ?? "No value"
self.isLoading = false

Expand Down

0 comments on commit 952aa9a

Please sign in to comment.