Skip to content

Instantly share code, notes, and snippets.

@ArtCC
Created August 13, 2025 15:01
Show Gist options
  • Select an option

  • Save ArtCC/9601216724cd3271ebcaad1936aac21c to your computer and use it in GitHub Desktop.

Select an option

Save ArtCC/9601216724cd3271ebcaad1936aac21c to your computer and use it in GitHub Desktop.
AIProxy: analyze image request
//
// AnalyzeView.swift
//
// Created by Arturo Carretero Calvo on 13/8/25.
// Copyright © 2025 ArtCC. All rights reserved.
//
import AIProxy
import PhotosUI
import SwiftUI
struct AnalyzeView: View {
@State private var selectedPhotoItem: PhotosPickerItem?
var body: some View {
Text("Hello, World!")
}
func createAddView() -> some View {
PhotosPicker(selection: $selectedPhotoItem, matching: .any(of: [.images]), photoLibrary: .shared()) {
// Create the button or image that triggers the photo selection.
}
.onChange(of: selectedPhotoItem) { _, newState in
Task {
if let data = try? await newState?.loadTransferable(type: Data.self) {
if let image = UIImage(data: data),
let url = AIProxy.encodeImageAsURL(image: image, compressionQuality: 0.5) {
// Image selected, proceed with the post creation.
selectedPhotoItem = nil
// Use url for request.
}
}
}
}
}
}
//
// OpenAIManager.swift
//
// Created by Arturo Carretero Calvo on 11/5/25.
// Copyright © 2025 ArtCC. All rights reserved.
//
import AIProxy
import Foundation
struct AnalyzeRequestEntity {
let imageUrl: URL
let model: OpenAIModel
}
enum OpenAIModel: String {
case gpt4o = "gpt-4o"
case gpt4oMini = "gpt-4o-mini"
case gpt4Turbo = "gpt-4-turbo"
}
enum OpenAIError: Error {
case invalidRequest
case invalidResponse
}
protocol OpenAIManagerProtocol {
func configure()
func analyze(with request: AnalyzeRequestEntity) async throws
}
final class OpenAIManager: OpenAIManagerProtocol {
// MARK: - Properties
private let openAIService = AIProxy.openAIService(
partialKey: "partialKey",
serviceURL: "serviceURL"
)
// MARK: - Public functions
func configure() {
var enabled: Bool
#if DEBUG
enabled = true
#else
enabled = false
#endif
AIProxy.configure(
logLevel: .debug,
printRequestBodies: enabled,
printResponseBodies: enabled,
resolveDNSOverTLS: true,
useStableID: false
)
}
// swiftlint:disable function_body_length
func analyze(with request: AnalyzeRequestEntity) async throws {
do {
let prompt = """
You are an expert photography critic. Analyze the image provided and return a JSON object with the following structure.
Respond only with a JSON object like this:
{
"description": String
}
"""
let response = try await openAIService.chatCompletionRequest(body: .init(
model: request.model.rawValue,
messages: [
.system(
content: .text(prompt)
),
.user(
content: .parts(
[
.imageURL(request.imageUrl, detail: .auto)
]
)
)
],
responseFormat: .jsonObject
), secondsToWait: 10)
guard let content = response.choices.first?.message.content else {
throw OpenAIError.invalidResponse
}
debugPrint("OpenAIManager: analyze: response: \(content)")
} catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
debugPrint("OpenAIManager: analyze: statusCode: \(statusCode), error: \(responseBody)")
throw OpenAIError.invalidResponse
} catch {
debugPrint("OpenAIManager: analyze: error: \(error)")
throw OpenAIError.invalidResponse
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment