class VoiceAssistantScreen extends StatefulWidget {
@override
_VoiceAssistantScreenState createState() => _VoiceAssistantScreenState();
}
class _VoiceAssistantScreenState extends State<VoiceAssistantScreen> {
VoiceSessionHandle? _session;
List<ChatMessage> _messages = [];
double _audioLevel = 0.0;
String _status = 'Tap to start';
bool _isActive = false;
Future<void> _initializeModels() async {
setState(() => _status = 'Loading models...');
// Download and load all required models
if (!RunAnywhere.isSTTModelLoaded) {
await _downloadAndLoad('sherpa-onnx-whisper-tiny.en', 'STT');
await RunAnywhere.loadSTTModel('sherpa-onnx-whisper-tiny.en');
}
if (!RunAnywhere.isModelLoaded) {
await _downloadAndLoad('smollm2-360m-q8_0', 'LLM');
await RunAnywhere.loadModel('smollm2-360m-q8_0');
}
if (!RunAnywhere.isTTSVoiceLoaded) {
await _downloadAndLoad('vits-piper-en_US-lessac-medium', 'TTS');
await RunAnywhere.loadTTSVoice('vits-piper-en_US-lessac-medium');
}
setState(() => _status = 'Ready');
}
Future<void> _downloadAndLoad(String modelId, String label) async {
await for (final p in RunAnywhere.downloadModel(modelId)) {
setState(() => _status = 'Downloading $label: ${(p.percentage * 100).toStringAsFixed(0)}%');
if (p.state.isCompleted) break;
}
}
Future<void> _toggleSession() async {
if (_session != null) {
_session!.stop();
setState(() {
_session = null;
_isActive = false;
_status = 'Stopped';
});
return;
}
// Ensure models are loaded
if (!RunAnywhere.isVoiceAgentReady) {
await _initializeModels();
}
// Start session
_session = await RunAnywhere.startVoiceSession(
config: VoiceSessionConfig(
silenceDuration: 1.5,
autoPlayTTS: true,
continuousMode: true,
),
);
setState(() => _isActive = true);
// Handle events
_session!.events.listen((event) {
setState(() {
switch (event) {
case VoiceSessionListening(:final audioLevel):
_audioLevel = audioLevel;
_status = 'Listening...';
case VoiceSessionSpeechStarted():
_status = 'Speech detected';
case VoiceSessionProcessing():
_status = 'Processing...';
case VoiceSessionTranscribed(:final text):
_messages.add(ChatMessage(text: text, isUser: true));
case VoiceSessionResponded(:final text):
_messages.add(ChatMessage(text: text, isUser: false));
case VoiceSessionSpeaking():
_status = 'Speaking...';
case VoiceSessionTurnCompleted():
_status = 'Listening...';
case VoiceSessionError(:final message):
_status = 'Error: $message';
case VoiceSessionStopped():
_status = 'Stopped';
_isActive = false;
}
});
});
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: Text('Voice Assistant')),
body: Column(
children: [
// Chat messages
Expanded(
child: ListView.builder(
itemCount: _messages.length,
itemBuilder: (context, index) {
final msg = _messages[index];
return Align(
alignment: msg.isUser
? Alignment.centerRight
: Alignment.centerLeft,
child: Container(
margin: EdgeInsets.all(8),
padding: EdgeInsets.all(12),
decoration: BoxDecoration(
color: msg.isUser ? Colors.blue : Colors.grey[300],
borderRadius: BorderRadius.circular(12),
),
child: Text(
msg.text,
style: TextStyle(
color: msg.isUser ? Colors.white : Colors.black,
),
),
),
);
},
),
),
// Status and controls
Container(
padding: EdgeInsets.all(16),
child: Column(
children: [
// Audio level indicator
if (_isActive)
Container(
height: 4,
child: LinearProgressIndicator(
value: _audioLevel.clamp(0.0, 1.0),
),
),
SizedBox(height: 8),
Text(_status),
SizedBox(height: 16),
// Start/Stop button
FloatingActionButton(
onPressed: _toggleSession,
backgroundColor: _isActive ? Colors.red : Colors.blue,
child: Icon(_isActive ? Icons.stop : Icons.mic),
),
],
),
),
],
),
);
}
@override
void dispose() {
_session?.stop();
super.dispose();
}
}
class ChatMessage {
final String text;
final bool isUser;
ChatMessage({required this.text, required this.isUser});
}