iOS SDK
Streaming Responses
How to implement real-time streaming AI responses
Streaming Responses
ProxyKit supports streaming responses for real-time AI interactions.
Basic Streaming
import AIProxy
// Stream from OpenAI
let stream = try await AIProxy.openai.chat.completions.stream(
model: "gpt-4",
messages: [.user("Write a story about a robot")]
)
for try await chunk in stream {
if let content = chunk.delta.content {
print(content, terminator: "")
}
}
Streaming with Anthropic
// Stream from Claude
let stream = try await AIProxy.anthropic.chat.completions.stream(
model: "claude-3-sonnet-20240229",
messages: [.user("Tell me about AI")]
)
for try await chunk in stream {
if let content = chunk.delta.content {
print(content, terminator: "")
}
}
SwiftUI Streaming Example
struct StreamingChatView: View {
@State private var userInput = ""
@State private var streamedText = ""
@State private var isStreaming = false
var body: some View {
VStack {
ScrollView {
Text(streamedText)
.padding()
.frame(maxWidth: .infinity, alignment: .leading)
}
HStack {
TextField("Type a message...", text: $userInput)
.textFieldStyle(RoundedBorderTextFieldStyle())
.disabled(isStreaming)
Button("Send") {
Task {
await streamResponse()
}
}
.disabled(isStreaming || userInput.isEmpty)
}
.padding()
}
}
func streamResponse() async {
isStreaming = true
streamedText = ""
do {
let stream = try await AIProxy.openai.chat.completions.stream(
model: "gpt-4",
messages: [
.system("You are a helpful assistant"),
.user(userInput)
]
)
for try await chunk in stream {
if let content = chunk.delta.content {
streamedText += content
}
}
} catch {
streamedText = "Error: \(error.localizedDescription)"
}
isStreaming = false
userInput = ""
}
}
Cancellable Streams
class StreamManager {
private var streamTask: Task<Void, Never>?
func startStream(prompt: String) {
// Cancel any existing stream
streamTask?.cancel()
streamTask = Task {
do {
let stream = try await AIProxy.openai.chat.completions.stream(
model: "gpt-4",
messages: [.user(prompt)]
)
for try await chunk in stream {
// Check for cancellation
if Task.isCancelled {
break
}
processChunk(chunk)
}
} catch {
handleError(error)
}
}
}
func cancelStream() {
streamTask?.cancel()
streamTask = nil
}
}
Error Handling
do {
let stream = try await AIProxy.openai.chat.completions.stream(
model: "gpt-4",
messages: [.user("Hello")]
)
for try await chunk in stream {
processChunk(chunk)
}
} catch AIProxyError.streamInterrupted {
print("Stream was interrupted")
} catch {
print("Stream error: \(error)")
}