iOS SDK
Streaming Responses
How to implement real-time streaming AI responses
Streaming Responses
ProxyKit supports streaming responses for real-time AI interactions.
Basic Streaming
import ProxyKit
// Stream from OpenAI
for try await chunk in ProxyKitAdvance.openai.chat.completions.stream(
model: "gpt-4",
messages: [.user("Write a story about a robot")]
) {
if let content = chunk.choices.first?.delta.content {
print(content, terminator: "")
}
}
Streaming with Anthropic
// Stream from Claude
for try await chunk in ProxyKitAdvance.anthropic.chat.completions.stream(
model: "claude-3-sonnet-20240229",
messages: [.user("Tell me about AI")]
) {
if let content = chunk.choices.first?.delta.content {
print(content, terminator: "")
}
}
SwiftUI Streaming Example
struct StreamingChatView: View {
@State private var userInput = ""
@State private var streamedText = ""
@State private var isStreaming = false
var body: some View {
VStack {
ScrollView {
Text(streamedText)
.padding()
.frame(maxWidth: .infinity, alignment: .leading)
}
HStack {
TextField("Type a message...", text: $userInput)
.textFieldStyle(RoundedBorderTextFieldStyle())
.disabled(isStreaming)
Button("Send") {
Task {
await streamResponse()
}
}
.disabled(isStreaming || userInput.isEmpty)
}
.padding()
}
}
func streamResponse() async {
isStreaming = true
streamedText = ""
do {
for try await chunk in ProxyKitAdvance.openai.chat.completions.stream(
model: "gpt-4",
messages: [
.system("You are a helpful assistant"),
.user(userInput)
]
) {
if let content = chunk.choices.first?.delta.content {
streamedText += content
}
}
} catch {
streamedText = "Error: \(error.localizedDescription)"
}
isStreaming = false
userInput = ""
}
}
Cancellable Streams
class StreamManager {
private var streamTask: Task<Void, Never>?
func startStream(prompt: String) {
// Cancel any existing stream
streamTask?.cancel()
streamTask = Task {
do {
for try await chunk in ProxyKitAdvance.openai.chat.completions.stream(
model: "gpt-4",
messages: [.user(prompt)]
) {
// Check for cancellation
if Task.isCancelled {
break
}
processChunk(chunk)
}
} catch {
handleError(error)
}
}
}
func cancelStream() {
streamTask?.cancel()
streamTask = nil
}
}
Error Handling
do {
for try await chunk in ProxyKitAdvance.openai.chat.completions.stream(
model: "gpt-4",
messages: [.user("Hello")]
) {
processChunk(chunk)
}
} catch ProxyKitError.streamInterrupted {
print("Stream was interrupted")
} catch {
print("Stream error: \(error)")
}