Files
sleepy_agent_ios/SleepyAgent/UI/Views/InputBar.swift
T
sleepy bbcf0c74bb Initial iOS port - Complete source code and build system
- 19 Swift source files (~4900 lines)
- Complete UI with SwiftUI (MainView, SettingsView, MessageBubble, InputBar)
- Inference layer (LlmEngine, Agent, ToolCalling, ConversationContext)
- Services (Audio, TTS, WebSearch, ModelDownload, Storage)
- Build system: Makefile, Package.swift, Podfile
- Documentation: BUILD.md, plan.md, PROJECT_STATUS.md
- Ready for Xcode build - just need LiteRT dependency added
2026-04-06 14:26:08 +02:00

274 lines
7.8 KiB
Swift

import SwiftUI
struct InputBar: View {
@Binding var text: String
let onSend: (String) -> Void
let onVoiceTap: () -> Void
let onImageTap: () -> Void
let isRecording: Bool
let isProcessing: Bool
let isExecutingTool: Bool
@FocusState private var isFocused: Bool
@State private var showVoiceOverlay = false
var body: some View {
HStack(spacing: 12) {
// Text input field
textField
// Action buttons
actionButtons
}
.padding(.horizontal, 4)
.padding(.vertical, 8)
.background(Color(.systemBackground))
.overlay(
// Voice recording overlay
voiceOverlay
)
}
private var textField: some View {
HStack(spacing: 8) {
TextField(placeholderText, text: $text, axis: .vertical)
.focused($isFocused)
.lineLimit(1...4)
.disabled(isProcessing || isExecutingTool)
.submitLabel(.send)
.onSubmit {
sendMessage()
}
if !text.isEmpty {
Button(action: { text = "" }) {
Image(systemName: "xmark.circle.fill")
.foregroundColor(.secondary)
}
}
}
.padding(.horizontal, 12)
.padding(.vertical, 10)
.background(Color(.systemGray6))
.cornerRadius(20)
}
private var actionButtons: some View {
HStack(spacing: 8) {
// Image picker button
Button(action: onImageTap) {
Image(systemName: "photo")
.font(.system(size: 22))
.frame(width: 40, height: 40)
}
.disabled(isProcessing || isExecutingTool)
.opacity(isProcessing || isExecutingTool ? 0.5 : 1)
// Voice/Record button
Button(action: handleVoiceTap) {
ZStack {
Circle()
.fill(buttonBackgroundColor)
.frame(width: 44, height: 44)
if isProcessing || isExecutingTool {
ProgressView()
.scaleEffect(0.8)
.tint(.white)
} else if isRecording {
Image(systemName: "stop.fill")
.font(.system(size: 20))
.foregroundColor(.white)
} else {
Image(systemName: "mic.fill")
.font(.system(size: 20))
.foregroundColor(.white)
}
}
}
.disabled(isProcessing || isExecutingTool)
// Send button
Button(action: sendMessage) {
Image(systemName: "arrow.up.circle.fill")
.font(.system(size: 32))
.foregroundColor(canSend ? .accentColor : .secondary.opacity(0.5))
}
.disabled(!canSend)
}
}
@ViewBuilder
private var voiceOverlay: some View {
if isRecording {
VoiceRecordingOverlay()
.transition(.opacity)
}
}
// MARK: - Computed Properties
private var placeholderText: String {
if isExecutingTool {
return "🔧 Executing tool..."
} else if isProcessing {
return "Thinking..."
} else if isRecording {
return "Recording..."
} else {
return "Type a message..."
}
}
private var buttonBackgroundColor: Color {
if isRecording {
return .red
} else if isProcessing || isExecutingTool {
return .orange
} else {
return .accentColor
}
}
private var canSend: Bool {
!text.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty &&
!isProcessing &&
!isExecutingTool
}
// MARK: - Actions
private func sendMessage() {
let trimmed = text.trimmingCharacters(in: .whitespacesAndNewlines)
guard !trimmed.isEmpty else { return }
onSend(trimmed)
text = ""
isFocused = false
}
private func handleVoiceTap() {
withAnimation(.spring()) {
onVoiceTap()
}
}
}
// MARK: - Voice Recording Overlay
struct VoiceRecordingOverlay: View {
@State private var pulseScale: CGFloat = 1.0
var body: some View {
ZStack {
Color.black.opacity(0.5)
.ignoresSafeArea()
VStack(spacing: 24) {
Spacer()
// Recording indicator
ZStack {
Circle()
.fill(Color.red.opacity(0.3))
.frame(width: 120, height: 120)
.scaleEffect(pulseScale)
Circle()
.fill(Color.red)
.frame(width: 80, height: 80)
Image(systemName: "mic.fill")
.font(.system(size: 40))
.foregroundColor(.white)
}
Text("Recording...")
.font(.title3)
.foregroundColor(.white)
Text("Tap stop button to finish")
.font(.body)
.foregroundColor(.white.opacity(0.7))
Spacer()
}
}
.onAppear {
withAnimation(.easeInOut(duration: 1).repeatForever(autoreverses: true)) {
pulseScale = 1.3
}
}
}
}
// MARK: - Audio Waveform View (Visual feedback while recording)
struct AudioWaveformView: View {
@State private var bars: [CGFloat] = Array(repeating: 0.5, count: 20)
var body: some View {
HStack(spacing: 4) {
ForEach(0..<bars.count, id: \.self) { index in
RoundedRectangle(cornerRadius: 2)
.fill(Color.red)
.frame(width: 4, height: bars[index] * 40)
.animation(.easeInOut(duration: 0.1).delay(Double(index) * 0.02), value: bars[index])
}
}
.frame(height: 40)
.onAppear {
startAnimating()
}
}
private func startAnimating() {
Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { _ in
for i in bars.indices {
bars[i] = CGFloat.random(in: 0.3...1.0)
}
}
}
}
// MARK: - Preview
struct InputBar_Previews: PreviewProvider {
static var previews: some View {
VStack {
InputBar(
text: .constant("Hello"),
onSend: { _ in },
onVoiceTap: {},
onImageTap: {},
isRecording: false,
isProcessing: false,
isExecutingTool: false
)
InputBar(
text: .constant(""),
onSend: { _ in },
onVoiceTap: {},
onImageTap: {},
isRecording: true,
isProcessing: false,
isExecutingTool: false
)
InputBar(
text: .constant(""),
onSend: { _ in },
onVoiceTap: {},
onImageTap: {},
isRecording: false,
isProcessing: true,
isExecutingTool: false
)
Spacer()
}
.padding()
}
}