Skip to content

Commit 0ceb1e2

Browse files
committed
update of bidi input api
1 parent 8c0802d commit 0ceb1e2

File tree

3 files changed

+85
-17
lines changed

3 files changed

+85
-17
lines changed

packages/firebase_ai/firebase_ai/example/lib/pages/bidi_page.dart

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ class _BidiPageState extends State<BidiPage> {
6565
final config = LiveGenerationConfig(
6666
speechConfig: SpeechConfig(voiceName: 'Fenrir'),
6767
responseModalities: [
68-
ResponseModalities.audio,
68+
ResponseModalities.text,
6969
],
7070
);
7171

@@ -133,11 +133,13 @@ class _BidiPageState extends State<BidiPage> {
133133
itemBuilder: (context, idx) {
134134
return MessageWidget(
135135
text: _messages[idx].text,
136-
image: Image.memory(
137-
_messages[idx].imageBytes!,
138-
cacheWidth: 400,
139-
cacheHeight: 400,
140-
),
136+
image: _messages[idx].imageBytes != null
137+
? Image.memory(
138+
_messages[idx].imageBytes!,
139+
cacheWidth: 400,
140+
cacheHeight: 400,
141+
)
142+
: null,
141143
isFromUser: _messages[idx].fromUser ?? false,
142144
);
143145
},
@@ -277,11 +279,9 @@ class _BidiPageState extends State<BidiPage> {
277279
await _audioOutput.playStream();
278280
// Map the Uint8List stream to InlineDataPart stream
279281
if (inputStream != null) {
280-
final inlineDataStream = inputStream.map((data) {
281-
return InlineDataPart('audio/pcm', data);
282-
});
283-
284-
await _session.sendMediaStream(inlineDataStream);
282+
await for (final data in inputStream) {
283+
await _session.sendAudio(InlineDataPart('audio/pcm', data));
284+
}
285285
}
286286
} catch (e) {
287287
developer.log(e.toString());
@@ -307,7 +307,8 @@ class _BidiPageState extends State<BidiPage> {
307307
});
308308
try {
309309
final prompt = Content.text(textPrompt);
310-
await _session.send(input: prompt, turnComplete: true);
310+
// await _session.send(input: prompt, turnComplete: true);
311+
await _session.sendText(textPrompt);
311312
} catch (e) {
312313
_showError(e.toString());
313314
}

packages/firebase_ai/firebase_ai/lib/src/live_api.dart

Lines changed: 40 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -180,18 +180,53 @@ class LiveServerResponse {
180180
/// Represents realtime input from the client in a live stream.
181181
class LiveClientRealtimeInput {
182182
/// Creates a [LiveClientRealtimeInput] instance.
183-
///
184-
/// [mediaChunks] (optional): The list of media chunks.
185-
LiveClientRealtimeInput({this.mediaChunks});
183+
LiveClientRealtimeInput({
184+
@Deprecated('Use audio, video, or text instead') this.mediaChunks,
185+
this.audio,
186+
this.video,
187+
this.text,
188+
});
189+
190+
/// Creates a [LiveClientRealtimeInput] with audio data.
191+
LiveClientRealtimeInput.audio(this.audio)
192+
: mediaChunks = null,
193+
video = null,
194+
text = null;
195+
196+
/// Creates a [LiveClientRealtimeInput] with video data.
197+
LiveClientRealtimeInput.video(this.video)
198+
: mediaChunks = null,
199+
audio = null,
200+
text = null;
201+
202+
/// Creates a [LiveClientRealtimeInput] with text data.
203+
LiveClientRealtimeInput.text(this.text)
204+
: mediaChunks = null,
205+
audio = null,
206+
video = null;
186207

187208
/// The list of media chunks.
209+
@Deprecated('Use audio, video, or text instead')
188210
final List<InlineDataPart>? mediaChunks;
189211

212+
/// Audio data.
213+
final InlineDataPart? audio;
214+
215+
/// Video data.
216+
final InlineDataPart? video;
217+
218+
/// Text data.
219+
final String? text;
220+
190221
// ignore: public_member_api_docs
191222
Map<String, dynamic> toJson() => {
192223
'realtime_input': {
193-
'media_chunks':
194-
mediaChunks?.map((e) => e.toMediaChunkJson()).toList(),
224+
if (mediaChunks != null)
225+
'media_chunks':
226+
mediaChunks?.map((e) => e.toMediaChunkJson()).toList(),
227+
if (audio != null) 'audio': audio!.toMediaChunkJson(),
228+
if (video != null) 'video': video!.toMediaChunkJson(),
229+
if (text != null) 'text': text,
195230
},
196231
};
197232
}

packages/firebase_ai/firebase_ai/lib/src/live_session.dart

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -76,9 +76,40 @@ class LiveSession {
7676
_ws.sink.add(clientJson);
7777
}
7878

79+
/// Sends audio data to the server.
80+
///
81+
/// [audio]: The audio data to send.
82+
Future<void> sendAudio(InlineDataPart audio) async {
83+
_checkWsStatus();
84+
var clientMessage = LiveClientRealtimeInput.audio(audio);
85+
var clientJson = jsonEncode(clientMessage.toJson());
86+
_ws.sink.add(clientJson);
87+
}
88+
89+
/// Sends video data to the server.
90+
///
91+
/// [video]: The video data to send.
92+
Future<void> sendVideo(InlineDataPart video) async {
93+
_checkWsStatus();
94+
var clientMessage = LiveClientRealtimeInput.video(video);
95+
var clientJson = jsonEncode(clientMessage.toJson());
96+
_ws.sink.add(clientJson);
97+
}
98+
99+
/// Sends text data to the server.
100+
///
101+
/// [text]: The text data to send.
102+
Future<void> sendText(String text) async {
103+
_checkWsStatus();
104+
var clientMessage = LiveClientRealtimeInput.text(text);
105+
var clientJson = jsonEncode(clientMessage.toJson());
106+
_ws.sink.add(clientJson);
107+
}
108+
79109
/// Sends realtime input (media chunks) to the server.
80110
///
81111
/// [mediaChunks]: The list of media chunks to send.
112+
@Deprecated('Use sendAudio, sendVideo, or sendText instead')
82113
Future<void> sendMediaChunks({
83114
required List<InlineDataPart> mediaChunks,
84115
}) async {
@@ -95,6 +126,7 @@ class LiveSession {
95126
///
96127
/// Parameters:
97128
/// - [mediaChunkStream]: The stream of [InlineDataPart] objects to send to the server.
129+
@Deprecated('Use sendAudio, sendVideo, or sendText with a stream instead')
98130
Future<void> sendMediaStream(Stream<InlineDataPart> mediaChunkStream) async {
99131
_checkWsStatus();
100132

0 commit comments

Comments
 (0)