iOS SDK
Basic Usage
Learn how to make AI requests with ProxyKit iOS SDK
Basic Usage
After configuring the SDK, you can make AI requests using the simple API.
Making Requests
OpenAI
import AIProxy
// Simple request
let response = try await AIProxy.openai.chat.completions.create(
model: "gpt-4",
messages: [
.system("You are a helpful assistant"),
.user("What is the capital of France?")
]
)
print(response.choices.first?.message.content ?? "")
Anthropic
// Using Claude
let response = try await AIProxy.anthropic.chat.completions.create(
model: "claude-3-opus-20240229",
messages: [
.user("Explain quantum computing in simple terms")
]
)
Message Types
// System message - Sets AI behavior
.system("You are a helpful assistant")
// User message - User input
.user("Hello, how are you?")
// Assistant message - AI response (for conversation history)
.assistant("I'm doing well, thank you!")
Optional Parameters
let response = try await AIProxy.openai.chat.completions.create(
model: "gpt-4",
messages: messages,
temperature: 0.7, // Creativity (0.0-2.0)
maxTokens: 1000, // Maximum response length
topP: 0.9, // Nucleus sampling
frequencyPenalty: 0.5, // Reduce repetition
presencePenalty: 0.5 // Encourage new topics
)
Error Handling
do {
let response = try await AIProxy.openai.chat.completions.create(
model: "gpt-4",
messages: [.user("Hello!")]
)
// Handle response
} catch AIProxyError.attestationFailed(let reason) {
print("Device verification failed: \(reason)")
} catch AIProxyError.rateLimited(let retryAfter) {
print("Rate limited. Retry after \(retryAfter) seconds")
} catch {
print("Error: \(error)")
}
SwiftUI Example
struct ChatView: View {
@State private var userInput = ""
@State private var response = ""
@State private var isLoading = false
var body: some View {
VStack {
Text(response)
.padding()
TextField("Ask something...", text: $userInput)
.textFieldStyle(RoundedBorderTextFieldStyle())
.padding()
Button("Send") {
Task {
await sendMessage()
}
}
.disabled(isLoading || userInput.isEmpty)
}
}
func sendMessage() async {
isLoading = true
do {
let response = try await AIProxy.openai.chat.completions.create(
model: "gpt-3.5-turbo",
messages: [.user(userInput)]
)
self.response = response.choices.first?.message.content ?? ""
} catch {
self.response = "Error: \(error.localizedDescription)"
}
isLoading = false
}
}
Model Constants
// OpenAI
ChatModel.gpt4 // "gpt-4"
ChatModel.gpt4Turbo // "gpt-4-turbo"
ChatModel.gpt35Turbo // "gpt-3.5-turbo"
// Anthropic
ChatModel.claude3Opus // "claude-3-opus-20240229"
ChatModel.claude3Sonnet // "claude-3-sonnet-20240229"
ChatModel.claude3Haiku // "claude-3-haiku-20240307"