diff --git a/packages/firebase_ai/firebase_ai/example/ios/Runner/Info.plist b/packages/firebase_ai/firebase_ai/example/ios/Runner/Info.plist
index a80f00ea0116..babcfb712863 100644
--- a/packages/firebase_ai/firebase_ai/example/ios/Runner/Info.plist
+++ b/packages/firebase_ai/firebase_ai/example/ios/Runner/Info.plist
@@ -45,6 +45,8 @@
UIApplicationSupportsIndirectInputEvents
+ NSCameraUsageDescription
+ We need camera access to take pictures and record video.
NSMicrophoneUsageDescription
We need access to the microphone to record audio.
diff --git a/packages/firebase_ai/firebase_ai/example/lib/main.dart b/packages/firebase_ai/firebase_ai/example/lib/main.dart
index ed9748965723..133f2c6ec257 100644
--- a/packages/firebase_ai/firebase_ai/example/lib/main.dart
+++ b/packages/firebase_ai/firebase_ai/example/lib/main.dart
@@ -18,7 +18,7 @@ import 'package:firebase_core/firebase_core.dart';
import 'package:flutter/material.dart';
// Import after file is generated through flutterfire_cli.
-// import 'package:firebase_ai_example/firebase_options.dart';
+//import 'package:firebase_ai_example/firebase_options.dart';
import 'pages/audio_page.dart';
import 'pages/bidi_page.dart';
@@ -54,7 +54,14 @@ class _GenerativeAISampleState extends State {
bool _useVertexBackend = false;
late GenerativeModel _currentModel;
late ImagenModel _currentImagenModel;
- int _currentBottomNavIndex = 0;
+
+ static final ThemeData _darkTheme = ThemeData(
+ colorScheme: ColorScheme.fromSeed(
+ brightness: Brightness.dark,
+ seedColor: const Color.fromARGB(255, 171, 222, 244),
+ ),
+ useMaterial3: true,
+ );
@override
void initState() {
@@ -98,25 +105,13 @@ class _GenerativeAISampleState extends State {
_initializeModel(_useVertexBackend);
}
- void _onBottomNavTapped(int index) {
- setState(() {
- _currentBottomNavIndex = index;
- });
- }
-
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Flutter + ${_useVertexBackend ? 'Vertex AI' : 'Google AI'}',
debugShowCheckedModeBanner: false,
themeMode: ThemeMode.dark,
- theme: ThemeData(
- colorScheme: ColorScheme.fromSeed(
- brightness: Brightness.dark,
- seedColor: const Color.fromARGB(255, 171, 222, 244),
- ),
- useMaterial3: true,
- ),
+ theme: _darkTheme,
home: HomeScreen(
key: ValueKey(
'${_useVertexBackend}_${_currentModel.hashCode}',
@@ -125,8 +120,6 @@ class _GenerativeAISampleState extends State {
imagenModel: _currentImagenModel,
useVertexBackend: _useVertexBackend,
onBackendChanged: _toggleBackend,
- selectedIndex: _currentBottomNavIndex,
- onSelectedIndexChanged: _onBottomNavTapped,
),
);
}
@@ -137,8 +130,6 @@ class HomeScreen extends StatefulWidget {
final ImagenModel imagenModel;
final bool useVertexBackend;
final ValueChanged onBackendChanged;
- final int selectedIndex;
- final ValueChanged onSelectedIndexChanged;
const HomeScreen({
super.key,
@@ -146,8 +137,6 @@ class HomeScreen extends StatefulWidget {
required this.imagenModel,
required this.useVertexBackend,
required this.onBackendChanged,
- required this.selectedIndex,
- required this.onSelectedIndexChanged,
});
@override
@@ -155,8 +144,12 @@ class HomeScreen extends StatefulWidget {
}
class _HomeScreenState extends State {
+ int _selectedIndex = 0;
+
void _onItemTapped(int index) {
- widget.onSelectedIndexChanged(index);
+ setState(() {
+ _selectedIndex = index;
+ });
}
// Method to build the selected page on demand
@@ -264,7 +257,7 @@ class _HomeScreenState extends State {
),
body: Center(
child: _buildSelectedPage(
- widget.selectedIndex,
+ _selectedIndex,
widget.model,
widget.imagenModel,
widget.useVertexBackend,
@@ -344,7 +337,7 @@ class _HomeScreenState extends State {
tooltip: 'Server Template',
),
],
- currentIndex: widget.selectedIndex,
+ currentIndex: _selectedIndex,
onTap: _onItemTapped,
),
);
diff --git a/packages/firebase_ai/firebase_ai/example/lib/pages/bidi_page.dart b/packages/firebase_ai/firebase_ai/example/lib/pages/bidi_page.dart
index 70c7007cdd5b..e6c53963bd5f 100644
--- a/packages/firebase_ai/firebase_ai/example/lib/pages/bidi_page.dart
+++ b/packages/firebase_ai/firebase_ai/example/lib/pages/bidi_page.dart
@@ -13,13 +13,17 @@
// limitations under the License.
import 'dart:async';
import 'dart:developer' as developer;
+import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:firebase_ai/firebase_ai.dart';
import '../utils/audio_input.dart';
import '../utils/audio_output.dart';
+import '../utils/video_input.dart';
import '../widgets/message_widget.dart';
+import '../widgets/audio_visualizer.dart';
+import '../widgets/camera_previews.dart';
class BidiPage extends StatefulWidget {
const BidiPage({
@@ -54,11 +58,14 @@ class _BidiPageState extends State {
bool _recording = false;
late LiveGenerativeModel _liveModel;
late LiveSession _session;
- StreamController _stopController = StreamController();
final AudioOutput _audioOutput = AudioOutput();
final AudioInput _audioInput = AudioInput();
+ final VideoInput _videoInput = VideoInput();
+ StreamSubscription? _audioSubscription;
int? _inputTranscriptionMessageIndex;
int? _outputTranscriptionMessageIndex;
+ bool _isCameraOn = false;
+ bool _videoIsInitialized = false;
@override
void initState() {
@@ -73,7 +80,6 @@ class _BidiPageState extends State {
outputAudioTranscription: AudioTranscriptionConfig(),
);
- // ignore: deprecated_member_use
_liveModel = widget.useVertexBackend
? FirebaseAI.vertexAI().liveGenerativeModel(
model: 'gemini-live-2.5-flash-preview-native-audio-09-2025',
@@ -89,30 +95,44 @@ class _BidiPageState extends State {
Tool.functionDeclarations([lightControlTool]),
],
);
- _initAudio();
}
Future _initAudio() async {
- await _audioOutput.init();
- await _audioInput.init();
+ try {
+ await _audioOutput.init();
+ } catch (e) {
+ developer.log('Audio Output init error: $e');
+ }
+
+ try {
+ await _audioInput.init();
+ } catch (e) {
+ developer.log('Audio Input init error: $e');
+ }
+ }
+
+ Future _initVideo() async {
+ try {
+ await _videoInput.init();
+ setState(() {
+ _videoIsInitialized = true;
+ });
+ } catch (e) {
+ developer.log('Error during video initialization: $e');
+ }
}
void _scrollDown() {
- WidgetsBinding.instance.addPostFrameCallback(
- (_) => _scrollController.animateTo(
- _scrollController.position.maxScrollExtent,
- duration: const Duration(
- milliseconds: 750,
- ),
- curve: Curves.easeOutCirc,
- ),
+ if (!_scrollController.hasClients) return;
+
+ _scrollController.jumpTo(
+ _scrollController.position.maxScrollExtent,
);
}
@override
void dispose() {
if (_sessionOpening) {
- _stopController.close();
_sessionOpening = false;
_session.close();
}
@@ -121,102 +141,139 @@ class _BidiPageState extends State {
@override
Widget build(BuildContext context) {
- return Scaffold(
- appBar: AppBar(
- title: Text(widget.title),
- ),
- body: Padding(
- padding: const EdgeInsets.all(8),
- child: Column(
- mainAxisAlignment: MainAxisAlignment.center,
- crossAxisAlignment: CrossAxisAlignment.start,
- children: [
- Expanded(
- child: ListView.builder(
- controller: _scrollController,
- itemBuilder: (context, idx) {
- return MessageWidget(
- text: _messages[idx].text,
- image: _messages[idx].imageBytes != null
- ? Image.memory(
- _messages[idx].imageBytes!,
- cacheWidth: 400,
- cacheHeight: 400,
- )
- : null,
- isFromUser: _messages[idx].fromUser ?? false,
- );
- },
- itemCount: _messages.length,
- ),
+ return Padding(
+ padding: const EdgeInsets.all(8),
+ child: Column(
+ mainAxisAlignment: MainAxisAlignment.center,
+ crossAxisAlignment: CrossAxisAlignment.start,
+ children: [
+ if (_isCameraOn)
+ Container(
+ height: 200,
+ color: Colors.black,
+ alignment: Alignment.center,
+ child: (!kIsWeb && defaultTargetPlatform == TargetPlatform.macOS)
+ ? FullCameraPreview(
+ controller: _videoInput.cameraController,
+ deviceId: _videoInput.selectedCameraId,
+ onInitialized: (controller) {
+ // This is where the controller actually gets born on macOS
+ _videoInput.setMacOSController(controller);
+ },
+ )
+ : (_videoInput.cameraController != null &&
+ _videoInput.controllerInitialized)
+ ? FullCameraPreview(
+ controller: _videoInput.cameraController,
+ deviceId: _videoInput.selectedCameraId,
+ onInitialized: (controller) {
+ // Web/Mobile callback (often unused if controller passed in)
+ },
+ )
+ : const Center(child: CircularProgressIndicator()),
),
- Padding(
- padding: const EdgeInsets.symmetric(
- vertical: 25,
- horizontal: 15,
- ),
- child: Row(
- children: [
- Expanded(
- child: TextField(
- autofocus: true,
- focusNode: _textFieldFocus,
- controller: _textController,
- onSubmitted: _sendTextPrompt,
- ),
+ Expanded(
+ child: ListView.builder(
+ controller: _scrollController,
+ itemBuilder: (context, idx) {
+ return MessageWidget(
+ text: _messages[idx].text,
+ image: _messages[idx].imageBytes != null
+ ? Image.memory(
+ _messages[idx].imageBytes!,
+ cacheWidth: 400,
+ cacheHeight: 400,
+ )
+ : null,
+ isFromUser: _messages[idx].fromUser ?? false,
+ isThought: _messages[idx].isThought,
+ );
+ },
+ itemCount: _messages.length,
+ ),
+ ),
+ Padding(
+ padding: const EdgeInsets.symmetric(
+ vertical: 25,
+ horizontal: 15,
+ ),
+ child: Row(
+ children: [
+ Expanded(
+ child: TextField(
+ focusNode: _textFieldFocus,
+ controller: _textController,
+ onSubmitted: _sendTextPrompt,
),
- const SizedBox.square(
- dimension: 15,
+ ),
+ const SizedBox.square(
+ dimension: 15,
+ ),
+ AudioVisualizer(
+ audioStreamIsActive: _recording,
+ amplitudeStream: _audioInput.amplitudeStream,
+ ),
+ const SizedBox.square(
+ dimension: 15,
+ ),
+ IconButton(
+ tooltip: 'Start Streaming',
+ onPressed: !_loading
+ ? () async {
+ await _setupSession();
+ }
+ : null,
+ icon: Icon(
+ Icons.network_wifi,
+ color: _sessionOpening
+ ? Theme.of(context).colorScheme.secondary
+ : Theme.of(context).colorScheme.primary,
),
- IconButton(
- tooltip: 'Start Streaming',
- onPressed: !_loading
- ? () async {
- await _setupSession();
+ ),
+ IconButton(
+ tooltip: 'Send Stream Message',
+ onPressed: !_loading
+ ? () async {
+ if (_recording) {
+ await _stopRecording();
+ } else {
+ await _startRecording();
}
- : null,
- icon: Icon(
- Icons.network_wifi,
- color: _sessionOpening
- ? Theme.of(context).colorScheme.secondary
- : Theme.of(context).colorScheme.primary,
- ),
+ }
+ : null,
+ icon: Icon(
+ _recording ? Icons.stop : Icons.mic,
+ color: _loading
+ ? Theme.of(context).colorScheme.secondary
+ : Theme.of(context).colorScheme.primary,
+ ),
+ ),
+ IconButton(
+ tooltip: 'Toggle Camera',
+ onPressed: _isCameraOn ? _stopVideoStream : _startVideoStream,
+ icon: Icon(
+ _isCameraOn ? Icons.videocam_off : Icons.videocam,
+ color: _loading
+ ? Theme.of(context).colorScheme.secondary
+ : Theme.of(context).colorScheme.primary,
),
+ ),
+ if (!_loading)
IconButton(
- tooltip: 'Send Stream Message',
- onPressed: !_loading
- ? () async {
- if (_recording) {
- await _stopRecording();
- } else {
- await _startRecording();
- }
- }
- : null,
+ onPressed: () async {
+ await _sendTextPrompt(_textController.text);
+ },
icon: Icon(
- _recording ? Icons.stop : Icons.mic,
- color: _loading
- ? Theme.of(context).colorScheme.secondary
- : Theme.of(context).colorScheme.primary,
+ Icons.send,
+ color: Theme.of(context).colorScheme.primary,
),
- ),
- if (!_loading)
- IconButton(
- onPressed: () async {
- await _sendTextPrompt(_textController.text);
- },
- icon: Icon(
- Icons.send,
- color: Theme.of(context).colorScheme.primary,
- ),
- )
- else
- const CircularProgressIndicator(),
- ],
- ),
+ )
+ else
+ const CircularProgressIndicator(),
+ ],
),
- ],
- ),
+ ),
+ ],
),
);
}
@@ -251,20 +308,25 @@ class _BidiPageState extends State {
setState(() {
_loading = true;
});
+ await _initAudio();
+
+ try {
+ if (!_videoIsInitialized) {
+ await _initVideo();
+ } else {
+ await _videoInput.initializeCameraController();
+ }
+ } catch (e) {
+ developer.log('Video Hardware init error: $e');
+ }
if (!_sessionOpening) {
_session = await _liveModel.connect();
_sessionOpening = true;
- _stopController = StreamController();
unawaited(
- _processMessagesContinuously(
- stopSignal: _stopController,
- ),
+ _processMessagesContinuously(),
);
} else {
- _stopController.add(true);
- await _stopController.close();
-
await _session.close();
_sessionOpening = false;
}
@@ -275,6 +337,8 @@ class _BidiPageState extends State {
}
Future _startRecording() async {
+ await _audioSubscription?.cancel();
+ _audioSubscription = null;
setState(() {
_recording = true;
});
@@ -282,17 +346,27 @@ class _BidiPageState extends State {
var inputStream = await _audioInput.startRecordingStream();
await _audioOutput.playStream();
if (inputStream != null) {
- await for (final data in inputStream) {
- await _session.sendAudioRealtime(InlineDataPart('audio/pcm', data));
- }
+ _audioSubscription = inputStream.listen(
+ (data) {
+ _session.sendAudioRealtime(InlineDataPart('audio/pcm', data));
+ },
+ onError: (e) {
+ developer.log('Audio Stream Error: $e');
+ _stopRecording();
+ },
+ cancelOnError: true,
+ );
}
} catch (e) {
- developer.log(e.toString());
- _showError(e.toString());
+ developer.log('bidi_page._startRecording(): $e');
+ _showError('bidi_page._startRecording(): $e');
+ setState(() => _recording = false);
}
}
Future _stopRecording() async {
+ await _audioSubscription?.cancel();
+ _audioSubscription = null;
try {
await _audioInput.stopRecording();
} catch (e) {
@@ -304,49 +378,113 @@ class _BidiPageState extends State {
});
}
- Future _sendTextPrompt(String textPrompt) async {
+ Future _startVideoStream() async {
+ // 1. Re-entry Guard: Prevent multiple clicks while switching
+ if (_loading || !_videoIsInitialized) return;
+
+ // 2. Capture the current recording state
+ bool wasRecording = _recording;
+
setState(() {
- _loading = true;
+ _loading = true; // Lock the UI during the switch
});
+
try {
- final prompt = Content.text(textPrompt);
- await _session.send(input: prompt, turnComplete: true);
+ if (wasRecording) {
+ await _stopRecording();
+ }
+
+ // 4. Wait for ripple/UI (Prevent freeze)
+ await Future.delayed(const Duration(milliseconds: 250));
+
+ // 5. Initialize Camera if needed
+ if (!_videoInput.controllerInitialized ||
+ _videoInput.cameraController == null) {
+ await _videoInput.initializeCameraController();
+ }
+
+ // 6. Mount Camera UI
+ setState(() {
+ _isCameraOn = true;
+ });
+
+ if (!kIsWeb && defaultTargetPlatform == TargetPlatform.macOS) {
+ // ✅ Because we set _cameraController to null in stopStreamingImages,
+ // this loop will now CORRECTLY wait for the new View to initialize.
+ int attempts = 0;
+ while (_videoInput.cameraController == null) {
+ if (attempts > 50) break; // 5 second timeout safety
+ await Future.delayed(const Duration(milliseconds: 100));
+ attempts++;
+ }
+ }
+
+ // 7. Wait for Mac Camera to Settle (Prevent audio hijack)
+ await Future.delayed(const Duration(milliseconds: 1000));
+
+ // 8. CLEAN RESTART: Use the helper method!
+ // Only restart if we were recording before.
+ if (wasRecording) {
+ developer.log('Resuming audio session...');
+ await _startRecording();
+ }
+
+ // 9. Start Video Stream
+ _videoInput.startStreamingImages().listen(
+ (data) {
+ String mimeType = 'image/jpeg';
+ if (!kIsWeb && defaultTargetPlatform == TargetPlatform.macOS) {
+ if (data.length > 3 && data[0] == 0x89 && data[1] == 0x50) {
+ mimeType = 'image/png';
+ }
+ }
+ _session.sendVideoRealtime(InlineDataPart(mimeType, data));
+ },
+ onError: (e) => developer.log('Video Stream Error: $e'),
+ );
} catch (e) {
+ developer.log('Error switching to video: $e');
_showError(e.toString());
+ } finally {
+ // 10. Always unlock the UI
+ setState(() {
+ _loading = false;
+ });
}
+ }
+ Future _stopVideoStream() async {
+ await _videoInput.stopStreamingImages();
setState(() {
- _loading = false;
+ _isCameraOn = false;
});
}
- Future _processMessagesContinuously({
- required StreamController stopSignal,
- }) async {
- bool shouldContinue = true;
+ Future _sendTextPrompt(String textPrompt) async {
+ setState(() {
+ _loading = true;
+ });
+ try {
+ //final prompt = Content.text(textPrompt);
+ // await _session.send(input: prompt, turnComplete: true);
+ await _session.sendTextRealtime(textPrompt);
+ } catch (e) {
+ _showError(e.toString());
+ }
- //listen to the stop signal stream
- stopSignal.stream.listen((stop) {
- if (stop) {
- shouldContinue = false;
- }
+ setState(() {
+ _loading = false;
});
+ }
- while (shouldContinue) {
- try {
- await for (final message in _session.receive()) {
- // Process the received message
- await _handleLiveServerMessage(message);
- }
- } catch (e) {
- _showError(e.toString());
- break;
+ Future _processMessagesContinuously() async {
+ try {
+ await for (final message in _session.receive()) {
+ if (!mounted) break;
+ await _handleLiveServerMessage(message);
}
-
- // Optionally add a delay before restarting, if needed
- await Future.delayed(
- const Duration(milliseconds: 100),
- ); // Small delay to prevent tight loops
+ } catch (e) {
+ _showError(e.toString());
}
}
@@ -381,8 +519,13 @@ class _BidiPageState extends State {
}
if (transcription.finished ?? false) {
currentIndex = null;
+ setState(_scrollDown);
+ } else {
+ // Use a scheduled frame instead of an immediate setState
+ WidgetsBinding.instance.addPostFrameCallback((_) {
+ if (mounted) setState(() {});
+ });
}
- setState(_scrollDown);
}
return currentIndex;
}
@@ -405,6 +548,8 @@ class _BidiPageState extends State {
}
} else if (message is LiveServerToolCall && message.functionCalls != null) {
await _handleLiveServerToolCall(message);
+ } else if (message is GoingAwayNotice) {
+ developer.log('Session is going away in ${message.timeLeft} seconds');
}
}
@@ -429,7 +574,13 @@ class _BidiPageState extends State {
_loading = true;
});
}
- _messages.add(MessageData(text: part.text, fromUser: false));
+ _messages.add(
+ MessageData(
+ text: part.text,
+ fromUser: false,
+ isThought: part.isThought ?? false,
+ ),
+ );
setState(() {
_loading = false;
_scrollDown();
@@ -438,7 +589,7 @@ class _BidiPageState extends State {
Future _handleInlineDataPart(InlineDataPart part) async {
if (part.mimeType.startsWith('audio')) {
- _audioOutput.addAudioStream(part.bytes);
+ _audioOutput.addDataToAudioStream(part.bytes);
}
}
diff --git a/packages/firebase_ai/firebase_ai/example/lib/utils/audio_input.dart b/packages/firebase_ai/firebase_ai/example/lib/utils/audio_input.dart
index 869d4ee32781..2698537b048b 100644
--- a/packages/firebase_ai/firebase_ai/example/lib/utils/audio_input.dart
+++ b/packages/firebase_ai/firebase_ai/example/lib/utils/audio_input.dart
@@ -12,16 +12,26 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-import 'package:flutter/material.dart';
+import 'package:flutter/foundation.dart';
import 'package:record/record.dart';
-import 'dart:typed_data';
+import 'dart:async';
+import 'package:waveform_flutter/waveform_flutter.dart' as wf;
class AudioInput extends ChangeNotifier {
- final _recorder = AudioRecorder();
+ AudioRecorder _recorder = AudioRecorder();
final AudioEncoder _encoder = AudioEncoder.pcm16bits;
+
bool isRecording = false;
bool isPaused = false;
- Stream? audioStream;
+
+ StreamController? _audioDataController;
+ StreamSubscription? _recorderStreamSub;
+
+ Stream? get audioStream => _audioDataController?.stream;
+
+ Stream? amplitudeStream;
+ StreamSubscription? _amplitudeSubscription;
+ StreamController? _amplitudeStreamController;
Future init() async {
await _checkPermission();
@@ -30,6 +40,7 @@ class AudioInput extends ChangeNotifier {
@override
void dispose() {
_recorder.dispose();
+ _audioDataController?.close();
super.dispose();
}
@@ -43,9 +54,61 @@ class AudioInput extends ChangeNotifier {
}
Future?> startRecordingStream() async {
+ await _amplitudeSubscription?.cancel();
+ if (_amplitudeStreamController != null &&
+ !_amplitudeStreamController!.isClosed) {
+ await _amplitudeStreamController!.close();
+ }
+
+ await _recorderStreamSub?.cancel();
+ if (_audioDataController != null && !_audioDataController!.isClosed) {
+ await _audioDataController!.close();
+ }
+
+ _audioDataController = StreamController();
+
+ // Re-instantiate the recorder to ensure we get a fresh stream.
+ // This fixes "Stream has already been listened to" errors when restarting recording.
+ try {
+ if (await _recorder.isRecording()) {
+ await _recorder.stop();
+ }
+ } catch (e) {
+ debugPrint('Error stopping recorder: $e');
+ }
+ await _recorder.dispose();
+ _recorder = AudioRecorder();
+
+ // 1. DEVICE SELECTION LOGIC
+ // Fetch all devices to find the real microphone
+ final devices = await _recorder.listInputDevices();
+ InputDevice? selectedDevice;
+
+ try {
+ // Find the device that is NOT BlackHole and looks like a built-in mic.
+ // Browsers often name it "Default - Internal Microphone" or "Built-in Audio".
+ selectedDevice = devices.firstWhere(
+ (device) {
+ final label = device.label.toLowerCase();
+ return !label.contains('blackhole') &&
+ (label.contains('internal') ||
+ label.contains('built-in') ||
+ label.contains('macbook'));
+ },
+ // Fallback: Just find anything that isn't Blackhole
+ orElse: () => devices.firstWhere(
+ (d) => !d.label.toLowerCase().contains('blackhole'),
+ orElse: () => devices.first, // Absolute fallback
+ ),
+ );
+ } catch (e) {
+ debugPrint('Error selecting device: $e');
+ }
+
var recordConfig = RecordConfig(
encoder: _encoder,
sampleRate: 24000,
+ device: selectedDevice,
numChannels: 1,
echoCancel: true,
noiseSuppress: true,
@@ -54,15 +117,60 @@ class AudioInput extends ChangeNotifier {
),
iosConfig: const IosRecordConfig(categoryOptions: []),
);
- await _recorder.listInputDevices();
- audioStream = await _recorder.startStream(recordConfig);
+
+ final rawStream = await _recorder.startStream(recordConfig);
+
+ _recorderStreamSub = rawStream.listen(
+ (data) {
+ if (data.isNotEmpty &&
+ _audioDataController != null &&
+ !_audioDataController!.isClosed) {
+ // debugPrint('AudioInput: received ${data.length} bytes');
+ _audioDataController!.add(data);
+ }
+ },
+ onError: (e) {
+ debugPrint('Recorder stream error: $e');
+ if (_audioDataController != null && !_audioDataController!.isClosed) {
+ _audioDataController!.addError(e);
+ }
+ },
+ onDone: () {
+ // Do not close the controller here automatically; let stopRecording handle it
+ // to prevent race conditions in the UI.
+ },
+ );
+
+ _amplitudeStreamController = StreamController.broadcast();
+ _amplitudeSubscription = _recorder
+ .onAmplitudeChanged(const Duration(milliseconds: 100))
+ .listen((amp) {
+ _amplitudeStreamController?.add(
+ wf.Amplitude(current: amp.current, max: amp.max),
+ );
+ });
+ amplitudeStream = _amplitudeStreamController?.stream;
+
isRecording = true;
notifyListeners();
- return audioStream;
+
+ return _audioDataController!.stream;
}
Future stopRecording() async {
- await _recorder.stop();
+ try {
+ await _recorder.stop();
+ } catch (e) {
+ debugPrint('Error stopping recorder hardware: $e');
+ }
+ await _amplitudeSubscription?.cancel();
+ await _amplitudeStreamController?.close();
+ amplitudeStream = null;
+
+ await _recorderStreamSub?.cancel();
+ await _audioDataController?.close();
+ _audioDataController = null;
+
isRecording = false;
notifyListeners();
}
diff --git a/packages/firebase_ai/firebase_ai/example/lib/utils/audio_output.dart b/packages/firebase_ai/firebase_ai/example/lib/utils/audio_output.dart
index b97ad3478f5b..4e0623eeef97 100644
--- a/packages/firebase_ai/firebase_ai/example/lib/utils/audio_output.dart
+++ b/packages/firebase_ai/firebase_ai/example/lib/utils/audio_output.dart
@@ -12,55 +12,86 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+import 'dart:developer';
import 'dart:typed_data';
-
import 'package:flutter_soloud/flutter_soloud.dart';
class AudioOutput {
+ bool initialized = false;
AudioSource? stream;
SoundHandle? handle;
+ final int sampleRate = 24000;
+ final Channels channels = Channels.mono;
+ final BufferType format = BufferType.s16le; // pcm16bits
Future init() async {
- // Initialize the player.
- await SoLoud.instance.init(sampleRate: 24000, channels: Channels.mono);
- await setupNewStream();
- }
+ if (initialized) {
+ return;
+ }
- Future setupNewStream() async {
- if (SoLoud.instance.isInitialized) {
- // Stop and clear any previous playback handle if it's still valid
- await stopStream(); // Ensure previous sound is stopped
+ /// Initialize the player (singleton).
+ await SoLoud.instance.init(sampleRate: sampleRate, channels: channels);
+ initialized = true;
+ }
- stream = SoLoud.instance.setBufferStream(
- maxBufferSizeBytes:
- 1024 * 1024 * 10, // 10MB of max buffer (not allocated)
- bufferingType: BufferingType.released,
- bufferingTimeNeeds: 0,
- onBuffering: (isBuffering, handle, time) {},
- );
- // Reset handle to null until the stream is played again
- handle = null;
+ Future dispose() async {
+ if (initialized) {
+ await SoLoud.instance.disposeAllSources();
+ SoLoud.instance.deinit();
+ initialized = false;
}
}
- Future playStream() async {
- handle = await SoLoud.instance.play(stream!);
+ SoLoud get instance => SoLoud.instance;
+
+ AudioSource? setupNewStream() {
+ if (!SoLoud.instance.isInitialized) {
+ return null;
+ }
+
+ stream = SoLoud.instance.setBufferStream(
+ bufferingType: BufferingType.released,
+ bufferingTimeNeeds: 0,
+ sampleRate: sampleRate,
+ channels: channels,
+ format: format,
+ onBuffering: (isBuffering, handle, time) {
+ log('Buffering: $isBuffering, Time: $time');
+ },
+ );
+ log('New audio output stream buffer created.');
return stream;
}
- Future stopStream() async {
- if (stream != null &&
- handle != null &&
- SoLoud.instance.getIsValidVoiceHandle(handle!)) {
- SoLoud.instance.setDataIsEnded(stream!);
- await SoLoud.instance.stop(handle!);
+ Future playStream() async {
+ var myStream = setupNewStream();
+ if (!SoLoud.instance.isInitialized || myStream == null) {
+ return null;
+ }
+ // Play audio stream
+ handle = await SoLoud.instance.play(myStream);
+ return stream = myStream;
+ }
- // Clear old stream, set up new session for next time.
- await setupNewStream();
+ void addDataToAudioStream(Uint8List audioChunk) {
+ var currentStream = stream;
+ if (currentStream != null) {
+ SoLoud.instance.addAudioDataStream(currentStream, audioChunk);
}
}
- void addAudioStream(Uint8List audioChunk) {
- SoLoud.instance.addAudioDataStream(stream!, audioChunk);
+ Future stopStream() async {
+ var currentStream = stream;
+ var currentHandle = handle;
+
+ // Stream doesn't exist or handle is not valid - so nothing to stop.
+ if (currentStream == null ||
+ currentHandle == null ||
+ !SoLoud.instance.getIsValidVoiceHandle(currentHandle)) {
+ return;
+ }
+ // End data to stream & stop currently playing sound from handle
+ SoLoud.instance.setDataIsEnded(currentStream);
+ await SoLoud.instance.stop(currentHandle);
}
}
diff --git a/packages/firebase_ai/firebase_ai/example/lib/utils/video_input.dart b/packages/firebase_ai/firebase_ai/example/lib/utils/video_input.dart
new file mode 100644
index 000000000000..97dc8108343b
--- /dev/null
+++ b/packages/firebase_ai/firebase_ai/example/lib/utils/video_input.dart
@@ -0,0 +1,219 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import 'dart:developer';
+import 'dart:async';
+import 'package:camera/camera.dart';
+import 'package:camera_macos/camera_macos.dart';
+import 'package:flutter/foundation.dart';
+
+class VideoInput extends ChangeNotifier {
+ List _cameras = [];
+ dynamic _cameraController;
+ dynamic _selectedCamera;
+ bool controllerInitialized = false;
+ Timer? _captureTimer;
+ StreamController _imageStreamController =
+ StreamController.broadcast();
+ bool _isStreaming = false;
+
+ List get cameras => _cameras;
+ dynamic get cameraController => _cameraController;
+
+ Future init() async {
+ try {
+ if (!kIsWeb && defaultTargetPlatform == TargetPlatform.macOS) {
+ //await camera_macos_lib.loadLibrary();
+ _cameras = await CameraMacOS.instance.listDevices();
+ } else {
+ _cameras = await availableCameras();
+ }
+ if (_cameras.isNotEmpty) {
+ _selectedCamera = _cameras[0];
+ }
+ } catch (e) {
+ log('Error getting available cameras: $e');
+ }
+ }
+
+ @override
+ void dispose() {
+ super.dispose();
+ stopStreamingImages();
+ if (controllerInitialized && _cameraController != null) {
+ if (!kIsWeb && defaultTargetPlatform == TargetPlatform.macOS) {
+ (_cameraController as CameraMacOSController).destroy();
+ } else {
+ (_cameraController as CameraController).dispose();
+ }
+ }
+ }
+
+ String? get selectedCameraId {
+ if (_selectedCamera == null) return null;
+ if (!kIsWeb && defaultTargetPlatform == TargetPlatform.macOS) {
+ return _selectedCamera.deviceId;
+ }
+ return null;
+ }
+
+ void setMacOSController(dynamic controller) {
+ _cameraController = controller;
+ controllerInitialized = true;
+ notifyListeners();
+ }
+
+ Future initializeCameraController() async {
+ if (controllerInitialized && _cameraController != null) {
+ if (!kIsWeb && defaultTargetPlatform == TargetPlatform.macOS) {
+ await (_cameraController as CameraMacOSController).destroy();
+ } else {
+ await (_cameraController as CameraController).dispose();
+ }
+ controllerInitialized = false;
+ }
+
+ if (_selectedCamera == null) {
+ log('No camera selected or available.');
+ return;
+ }
+
+ if (!kIsWeb && defaultTargetPlatform == TargetPlatform.macOS) {
+ // On macOS, we rely on CameraMacOSView to initialize the controller.
+ controllerInitialized = false;
+ notifyListeners();
+ } else {
+ _cameraController = CameraController(
+ _selectedCamera as CameraDescription,
+ ResolutionPreset.medium,
+ enableAudio: false,
+ imageFormatGroup: ImageFormatGroup.jpeg,
+ );
+ try {
+ await (_cameraController as CameraController).initialize();
+ controllerInitialized = true;
+ notifyListeners();
+ } catch (e) {
+ log('Error initializing camera: $e');
+ }
+ }
+ }
+
+ Stream startStreamingImages() {
+ final bool isInitialized =
+ !kIsWeb && defaultTargetPlatform == TargetPlatform.macOS
+ ? _cameraController != null
+ : (_cameraController as CameraController?)?.value.isInitialized ??
+ false;
+
+ if (_cameraController == null || !isInitialized) {
+ throw ErrorSummary('Unable to start image stream');
+ }
+
+ _captureTimer?.cancel();
+
+ _captureTimer = Timer.periodic(
+ const Duration(seconds: 1), // Capture images at 1 frame per second
+ (timer) async {
+ final bool currentIsInitialized = !kIsWeb &&
+ defaultTargetPlatform == TargetPlatform.macOS
+ ? _cameraController != null
+ : (_cameraController as CameraController?)?.value.isInitialized ??
+ false;
+
+ if (_cameraController == null ||
+ !currentIsInitialized ||
+ !_isStreaming) {
+ log('Stopping timer due to invalid state.');
+ await stopStreamingImages();
+ return;
+ }
+
+ try {
+ if (!kIsWeb && defaultTargetPlatform == TargetPlatform.macOS) {
+ final controller = _cameraController as CameraMacOSController;
+ final CameraMacOSFile? image = await controller.takePicture();
+ if (image != null && image.bytes != null) {
+ if (!_imageStreamController.isClosed) {
+ _imageStreamController.add(image.bytes!);
+ }
+ }
+ } else {
+ final controller = _cameraController as CameraController;
+ if (controller.value.isTakingPicture) return;
+ final XFile imageFile = await controller.takePicture();
+ Uint8List imageBytes = await imageFile.readAsBytes();
+ if (!_imageStreamController.isClosed) {
+ _imageStreamController.add(imageBytes);
+ }
+ }
+ } catch (e) {
+ log('Error taking picture: $e');
+ }
+ },
+ );
+ _isStreaming = true;
+ return _imageStreamController.stream;
+ }
+
+ /// Stops the periodic image capture and closes the stream.
+ Future stopStreamingImages() async {
+ if (!_isStreaming) {
+ return; // Nothing to stop
+ }
+ _captureTimer?.cancel();
+ _captureTimer = null;
+
+ if (!_imageStreamController.isClosed) {
+ await _imageStreamController.close();
+ }
+ _imageStreamController = StreamController.broadcast();
+ _isStreaming = false;
+
+ if (!kIsWeb && defaultTargetPlatform == TargetPlatform.macOS) {
+ // On macOS, the view owns the controller, so we just forget our reference.
+ _cameraController = null;
+ controllerInitialized = false;
+ } else {
+ // On Web/Mobile, we dispose it manually.
+ await (_cameraController as CameraController?)?.dispose();
+ _cameraController = null;
+ controllerInitialized = false;
+ }
+ notifyListeners();
+ }
+
+ Future flipCamera() async {
+ if (_cameras.length > 1) {
+ if (!kIsWeb && defaultTargetPlatform == TargetPlatform.macOS) {
+ final currentSelected = _selectedCamera;
+ final otherCamera = _cameras.firstWhere(
+ (camera) => camera.deviceId != currentSelected.deviceId,
+ orElse: () => _cameras[0],
+ );
+ _selectedCamera = otherCamera;
+ } else {
+ final currentSelected = _selectedCamera as CameraDescription;
+ final otherCamera = _cameras.firstWhere(
+ (camera) =>
+ (camera as CameraDescription).lensDirection !=
+ currentSelected.lensDirection,
+ orElse: () => _cameras[0],
+ );
+ _selectedCamera = otherCamera;
+ }
+ await initializeCameraController();
+ }
+ }
+}
diff --git a/packages/firebase_ai/firebase_ai/example/lib/widgets/audio_visualizer.dart b/packages/firebase_ai/firebase_ai/example/lib/widgets/audio_visualizer.dart
new file mode 100644
index 000000000000..bbb1bd8fb5d6
--- /dev/null
+++ b/packages/firebase_ai/firebase_ai/example/lib/widgets/audio_visualizer.dart
@@ -0,0 +1,40 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import 'package:flutter/material.dart';
+import 'package:waveform_flutter/waveform_flutter.dart';
+import 'sound_waves.dart';
+
+class AudioVisualizer extends StatelessWidget {
+ const AudioVisualizer({
+ super.key,
+ required this.audioStreamIsActive,
+ this.amplitudeStream,
+ });
+
+ final bool audioStreamIsActive;
+ final Stream? amplitudeStream;
+
+ @override
+ Widget build(BuildContext context) {
+ return (audioStreamIsActive && amplitudeStream != null)
+ ? Expanded(
+ child: ConstrainedBox(
+ constraints: const BoxConstraints(maxWidth: 400),
+ child: Soundwaves(amplitudeStream: amplitudeStream!),
+ ),
+ )
+ : const Spacer();
+ }
+}
diff --git a/packages/firebase_ai/firebase_ai/example/lib/widgets/camera_previews.dart b/packages/firebase_ai/firebase_ai/example/lib/widgets/camera_previews.dart
new file mode 100644
index 000000000000..5bf338f7e345
--- /dev/null
+++ b/packages/firebase_ai/firebase_ai/example/lib/widgets/camera_previews.dart
@@ -0,0 +1,151 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import 'package:camera/camera.dart';
+import 'package:camera_macos/camera_macos.dart' deferred as camera_macos_lib;
+import 'package:flutter/foundation.dart';
+import 'package:flutter_animate/flutter_animate.dart';
+import 'package:flutter/material.dart';
+
+class SquareCameraPreview extends StatelessWidget {
+ const SquareCameraPreview({
+ required this.controller,
+ this.deviceId,
+ this.onInitialized,
+ super.key,
+ });
+
+ final dynamic controller;
+ final String? deviceId;
+ final Function(dynamic)? onInitialized;
+
+ @override
+ Widget build(BuildContext context) {
+ double aspectRatio = 1;
+ if (!kIsWeb && defaultTargetPlatform == TargetPlatform.macOS) {
+ //aspectRatio = (controller as CameraMacOSController?)?.aspectRatio ?? 1.0;
+ } else {
+ aspectRatio = (controller as CameraController?)?.value.aspectRatio ?? 1.0;
+ }
+
+ return Center(
+ child: Container(
+ width: 352, // Adjusted from 350 to be a multiple of 4
+ height: 352,
+ decoration: BoxDecoration(
+ borderRadius: BorderRadius.circular(16),
+ ),
+ child: AspectRatio(
+ aspectRatio: 1,
+ child: ClipRRect(
+ borderRadius: const BorderRadius.all(
+ Radius.circular(16),
+ ),
+ // The camera preview is often not a square. To fill the 1:1 aspect
+ // ratio, we scale the preview to cover the area and clip it.
+ child: Transform.scale(
+ scale: aspectRatio / 1,
+ child: Center(
+ child: !kIsWeb && defaultTargetPlatform == TargetPlatform.macOS
+ ? FutureBuilder(
+ future: camera_macos_lib.loadLibrary(),
+ builder: (context, snapshot) {
+ if (snapshot.connectionState ==
+ ConnectionState.done) {
+ return camera_macos_lib.CameraMacOSView(
+ deviceId: deviceId,
+ cameraMode:
+ camera_macos_lib.CameraMacOSMode.photo,
+ enableAudio: false,
+ onCameraInizialized: (dynamic controller) {
+ onInitialized?.call(controller);
+ },
+ );
+ }
+ return const SizedBox.shrink();
+ },
+ )
+ : CameraPreview(controller as CameraController),
+ ),
+ ),
+ ),
+ ),
+ ),
+ );
+ }
+}
+
+class FullCameraPreview extends StatefulWidget {
+ const FullCameraPreview({
+ required this.controller,
+ this.deviceId,
+ this.onInitialized,
+ super.key,
+ });
+
+ final dynamic controller;
+ final String? deviceId;
+ final Function(dynamic)? onInitialized;
+
+ @override
+ State createState() => _FullCameraPreviewState();
+}
+
+class _FullCameraPreviewState extends State
+ with SingleTickerProviderStateMixin {
+ late AnimationController _animController;
+
+ @override
+ void initState() {
+ super.initState();
+ _animController = AnimationController(
+ vsync: this, // the SingleTickerProviderStateMixin
+ duration: const Duration(seconds: 1),
+ );
+ }
+
+ @override
+ void dispose() {
+ super.dispose();
+ _animController.dispose();
+ }
+
+ @override
+ Widget build(BuildContext context) {
+ return Padding(
+ padding: const EdgeInsets.all(16),
+ child: ClipRRect(
+ borderRadius: const BorderRadius.all(Radius.circular(16)),
+ child: !kIsWeb && defaultTargetPlatform == TargetPlatform.macOS
+ ? FutureBuilder(
+ future: camera_macos_lib.loadLibrary(),
+ builder: (context, snapshot) {
+ if (snapshot.connectionState == ConnectionState.done) {
+ return camera_macos_lib.CameraMacOSView(
+ deviceId: widget.deviceId,
+ cameraMode: camera_macos_lib.CameraMacOSMode.photo,
+ enableAudio: false,
+ onCameraInizialized: (dynamic controller) {
+ widget.onInitialized?.call(controller);
+ },
+ );
+ }
+ return const SizedBox.shrink();
+ },
+ )
+ : CameraPreview(widget.controller as CameraController),
+ ),
+ ).animate(controller: _animController).scaleXY().fadeIn();
+ }
+}
diff --git a/packages/firebase_ai/firebase_ai/example/lib/widgets/sound_waves.dart b/packages/firebase_ai/firebase_ai/example/lib/widgets/sound_waves.dart
new file mode 100644
index 000000000000..7f969e172cb3
--- /dev/null
+++ b/packages/firebase_ai/firebase_ai/example/lib/widgets/sound_waves.dart
@@ -0,0 +1,118 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import 'dart:math';
+import 'package:flutter/material.dart';
+import 'package:waveform_flutter/waveform_flutter.dart';
+
+class CenterCircle extends StatelessWidget {
+ const CenterCircle({required this.child, super.key});
+
+ final Widget child;
+
+ @override
+ Widget build(BuildContext context) {
+ return Center(
+ child: CustomPaint(
+ size: const Size(160, 160),
+ painter: NestedCirclesPainter(
+ color: Theme.of(context).colorScheme.primary,
+ strokeWidth: 1,
+ ),
+ child: child,
+ ),
+ );
+ }
+}
+
+class Soundwaves extends StatelessWidget {
+ const Soundwaves({required this.amplitudeStream, super.key});
+
+ final Stream amplitudeStream;
+
+ @override
+ Widget build(BuildContext context) {
+ return Padding(
+ padding: const EdgeInsets.symmetric(horizontal: 16),
+ child: SizedBox(
+ height: 48,
+ child: AnimatedWaveList(
+ stream: amplitudeStream,
+ barBuilder: (animation, amplitude) {
+ return WaveFormBar(
+ amplitude: amplitude,
+ animation: animation,
+ color: Theme.of(context).colorScheme.primary,
+ );
+ },
+ ),
+ ),
+ );
+ }
+}
+
+// Custom Painter for drawing two nested circles
+class NestedCirclesPainter extends CustomPainter {
+ final Color color;
+ final double strokeWidth;
+ final double gapBetweenCircles; // The space between the two circles
+
+ NestedCirclesPainter({
+ this.color = Colors.white54, // Default color for the circles
+ this.strokeWidth = 1.5, // Default stroke width for both circles
+ this.gapBetweenCircles = 4.0, // Default gap between the circles
+ });
+
+ @override
+ void paint(Canvas canvas, Size size) {
+ // Calculate the center of the drawing area
+ final Offset center = Offset(size.width / 2, size.height / 2);
+
+ // Configure the paint properties (same for both circles)
+ final Paint paint = Paint()
+ ..color =
+ color.withValues(alpha: 0.7) // Make circles slightly transparent
+ ..strokeWidth = strokeWidth
+ ..style = PaintingStyle.stroke; // Draw the outline
+
+ // Calculate the radius for the outer circle
+ // Ensure it fits within the bounds defined by 'size'
+ final double outerRadius =
+ min(size.width / 2, size.height / 2) - strokeWidth / 2;
+
+ // Calculate the radius for the inner circle
+ final double innerRadius =
+ outerRadius - gapBetweenCircles - strokeWidth / 2;
+
+ // Ensure inner radius is not negative
+ if (innerRadius > 0) {
+ // Draw the outer circle
+ canvas.drawCircle(center, outerRadius, paint);
+ // Draw the inner circle
+ canvas.drawCircle(center, innerRadius, paint);
+ } else {
+ // If the gap is too large, just draw the outer circle
+ canvas.drawCircle(center, outerRadius, paint);
+ }
+ }
+
+ @override
+ bool shouldRepaint(covariant CustomPainter oldDelegate) {
+ // Repaint only if properties change
+ return oldDelegate is NestedCirclesPainter &&
+ (oldDelegate.color != color ||
+ oldDelegate.strokeWidth != strokeWidth ||
+ oldDelegate.gapBetweenCircles != gapBetweenCircles);
+ }
+}
diff --git a/packages/firebase_ai/firebase_ai/example/macos/Runner/DebugProfile.entitlements b/packages/firebase_ai/firebase_ai/example/macos/Runner/DebugProfile.entitlements
index 8560da29b687..4c4d67d33f66 100644
--- a/packages/firebase_ai/firebase_ai/example/macos/Runner/DebugProfile.entitlements
+++ b/packages/firebase_ai/firebase_ai/example/macos/Runner/DebugProfile.entitlements
@@ -14,6 +14,8 @@
com.apple.security.device.audio-input
+ com.apple.security.device.camera
+
com.apple.security.files.user-selected.read-only
diff --git a/packages/firebase_ai/firebase_ai/example/macos/Runner/Info.plist b/packages/firebase_ai/firebase_ai/example/macos/Runner/Info.plist
index d4369e6253fa..2025c8cb0b8d 100644
--- a/packages/firebase_ai/firebase_ai/example/macos/Runner/Info.plist
+++ b/packages/firebase_ai/firebase_ai/example/macos/Runner/Info.plist
@@ -30,6 +30,8 @@
NSApplication
NSMicrophoneUsageDescription
Permission to Record audio
+ NSCameraUsageDescription
+ This app needs access to the camera to take pictures and record videos.
NSPhotoLibraryUsageDescription
This app needs access to your photo library to let you select a profile picture.
diff --git a/packages/firebase_ai/firebase_ai/example/macos/Runner/Release.entitlements b/packages/firebase_ai/firebase_ai/example/macos/Runner/Release.entitlements
index 2f9659c917fb..383535f97985 100644
--- a/packages/firebase_ai/firebase_ai/example/macos/Runner/Release.entitlements
+++ b/packages/firebase_ai/firebase_ai/example/macos/Runner/Release.entitlements
@@ -6,5 +6,11 @@
com.apple.security.files.downloads.read-write
+ com.apple.security.network.client
+
+ com.apple.security.device.audio-input
+
+ com.apple.security.device.camera
+
diff --git a/packages/firebase_ai/firebase_ai/example/pubspec.yaml b/packages/firebase_ai/firebase_ai/example/pubspec.yaml
index 210b42f4479d..29db616091be 100644
--- a/packages/firebase_ai/firebase_ai/example/pubspec.yaml
+++ b/packages/firebase_ai/firebase_ai/example/pubspec.yaml
@@ -19,18 +19,22 @@ dependencies:
# The following adds the Cupertino Icons font to your application.
# Use with the CupertinoIcons class for iOS style icons.
+ camera: ^0.11.2+1
+ camera_macos: ^0.0.9
cupertino_icons: ^1.0.6
firebase_ai: ^3.7.0
firebase_core: ^4.4.0
firebase_storage: ^13.0.6
flutter:
sdk: flutter
+ flutter_animate: ^4.5.2
flutter_markdown: ^0.6.20
flutter_soloud: ^3.1.6
image: ^4.5.4
image_picker: ^1.1.2
path_provider: ^2.1.5
record: ^5.2.1
+ waveform_flutter: ^1.2.0
dev_dependencies:
flutter_lints: ^4.0.0
diff --git a/packages/firebase_ai/firebase_ai/example/web/index.html b/packages/firebase_ai/firebase_ai/example/web/index.html
index adc47a626031..6d01e20f17a5 100644
--- a/packages/firebase_ai/firebase_ai/example/web/index.html
+++ b/packages/firebase_ai/firebase_ai/example/web/index.html
@@ -34,5 +34,7 @@
+
+