Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 13 additions & 12 deletions packages/firebase_ai/firebase_ai/example/lib/pages/bidi_page.dart
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ class _BidiPageState extends State<BidiPage> {
final config = LiveGenerationConfig(
speechConfig: SpeechConfig(voiceName: 'Fenrir'),
responseModalities: [
ResponseModalities.audio,
ResponseModalities.text,
],
);

Expand Down Expand Up @@ -133,11 +133,13 @@ class _BidiPageState extends State<BidiPage> {
itemBuilder: (context, idx) {
return MessageWidget(
text: _messages[idx].text,
image: Image.memory(
_messages[idx].imageBytes!,
cacheWidth: 400,
cacheHeight: 400,
),
image: _messages[idx].imageBytes != null
? Image.memory(
_messages[idx].imageBytes!,
cacheWidth: 400,
cacheHeight: 400,
)
: null,
isFromUser: _messages[idx].fromUser ?? false,
);
},
Expand Down Expand Up @@ -277,11 +279,9 @@ class _BidiPageState extends State<BidiPage> {
await _audioOutput.playStream();
// Map the Uint8List stream to InlineDataPart stream
if (inputStream != null) {
final inlineDataStream = inputStream.map((data) {
return InlineDataPart('audio/pcm', data);
});

await _session.sendMediaStream(inlineDataStream);
await for (final data in inputStream) {
await _session.sendAudio(InlineDataPart('audio/pcm', data));
}
}
} catch (e) {
developer.log(e.toString());
Expand All @@ -307,7 +307,8 @@ class _BidiPageState extends State<BidiPage> {
});
try {
final prompt = Content.text(textPrompt);
await _session.send(input: prompt, turnComplete: true);
// await _session.send(input: prompt, turnComplete: true);
await _session.sendText(textPrompt);
} catch (e) {
_showError(e.toString());
}
Expand Down
45 changes: 40 additions & 5 deletions packages/firebase_ai/firebase_ai/lib/src/live_api.dart
Original file line number Diff line number Diff line change
Expand Up @@ -179,18 +179,53 @@ class LiveServerResponse {
/// Represents realtime input from the client in a live stream.
class LiveClientRealtimeInput {
/// Creates a [LiveClientRealtimeInput] instance.
///
/// [mediaChunks] (optional): The list of media chunks.
LiveClientRealtimeInput({this.mediaChunks});
LiveClientRealtimeInput({
@Deprecated('Use audio, video, or text instead') this.mediaChunks,
this.audio,
this.video,
this.text,
});

/// Creates a [LiveClientRealtimeInput] with audio data.
LiveClientRealtimeInput.audio(this.audio)
: mediaChunks = null,
video = null,
text = null;

/// Creates a [LiveClientRealtimeInput] with video data.
LiveClientRealtimeInput.video(this.video)
: mediaChunks = null,
audio = null,
text = null;

/// Creates a [LiveClientRealtimeInput] with text data.
LiveClientRealtimeInput.text(this.text)
: mediaChunks = null,
audio = null,
video = null;

/// The list of media chunks.
@Deprecated('Use audio, video, or text instead')
final List<InlineDataPart>? mediaChunks;

/// Audio data.
final InlineDataPart? audio;

/// Video data.
final InlineDataPart? video;

/// Text data.
final String? text;

// ignore: public_member_api_docs
Map<String, dynamic> toJson() => {
'realtime_input': {
'media_chunks':
mediaChunks?.map((e) => e.toMediaChunkJson()).toList(),
if (mediaChunks != null)
'media_chunks':
mediaChunks?.map((e) => e.toMediaChunkJson()).toList(),
if (audio != null) 'audio': audio!.toMediaChunkJson(),
if (video != null) 'video': video!.toMediaChunkJson(),
if (text != null) 'text': text,
},
};
}
Expand Down
32 changes: 32 additions & 0 deletions packages/firebase_ai/firebase_ai/lib/src/live_session.dart
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,40 @@ class LiveSession {
_ws.sink.add(clientJson);
}

/// Sends audio data to the server.
///
/// [audio]: The audio data to send.
Future<void> sendAudio(InlineDataPart audio) async {
_checkWsStatus();
var clientMessage = LiveClientRealtimeInput.audio(audio);
var clientJson = jsonEncode(clientMessage.toJson());
_ws.sink.add(clientJson);
}

/// Sends video data to the server.
///
/// [video]: The video data to send.
Future<void> sendVideo(InlineDataPart video) async {
_checkWsStatus();
var clientMessage = LiveClientRealtimeInput.video(video);
var clientJson = jsonEncode(clientMessage.toJson());
_ws.sink.add(clientJson);
}

/// Sends text data to the server.
///
/// [text]: The text data to send.
Future<void> sendText(String text) async {
_checkWsStatus();
var clientMessage = LiveClientRealtimeInput.text(text);
var clientJson = jsonEncode(clientMessage.toJson());
_ws.sink.add(clientJson);
}

/// Sends realtime input (media chunks) to the server.
///
/// [mediaChunks]: The list of media chunks to send.
@Deprecated('Use sendAudio, sendVideo, or sendText instead')
Future<void> sendMediaChunks({
required List<InlineDataPart> mediaChunks,
}) async {
Expand All @@ -95,6 +126,7 @@ class LiveSession {
///
/// Parameters:
/// - [mediaChunkStream]: The stream of [InlineDataPart] objects to send to the server.
@Deprecated('Use sendAudio, sendVideo, or sendText with a stream instead')
Future<void> sendMediaStream(Stream<InlineDataPart> mediaChunkStream) async {
_checkWsStatus();

Expand Down
Loading