Skip to content

Commit 5aee87f

Browse files
andrewheardgoogle-labs-jules[bot]
authored andcommitted
fix(firebaseai): Fix usageMetadata.thoughtsTokenCount (firebase#17608)
Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com>
1 parent e1a1a28 commit 5aee87f

File tree

2 files changed

+46
-6
lines changed

2 files changed

+46
-6
lines changed

packages/firebase_ai/firebase_ai/lib/src/api.dart

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1299,6 +1299,10 @@ UsageMetadata _parseUsageMetadata(Object jsonObject) {
12991299
{'totalTokenCount': final int totalTokenCount} => totalTokenCount,
13001300
_ => null,
13011301
};
1302+
final thoughtsTokenCount = switch (jsonObject) {
1303+
{'thoughtsTokenCount': final int thoughtsTokenCount} => thoughtsTokenCount,
1304+
_ => null,
1305+
};
13021306
final promptTokensDetails = switch (jsonObject) {
13031307
{'promptTokensDetails': final List<Object?> promptTokensDetails} =>
13041308
promptTokensDetails.map(_parseModalityTokenCount).toList(),
@@ -1309,12 +1313,14 @@ UsageMetadata _parseUsageMetadata(Object jsonObject) {
13091313
candidatesTokensDetails.map(_parseModalityTokenCount).toList(),
13101314
_ => null,
13111315
};
1312-
return UsageMetadata._(
1313-
promptTokenCount: promptTokenCount,
1314-
candidatesTokenCount: candidatesTokenCount,
1315-
totalTokenCount: totalTokenCount,
1316-
promptTokensDetails: promptTokensDetails,
1317-
candidatesTokensDetails: candidatesTokensDetails);
1316+
return createUsageMetadata(
1317+
promptTokenCount: promptTokenCount,
1318+
candidatesTokenCount: candidatesTokenCount,
1319+
totalTokenCount: totalTokenCount,
1320+
thoughtsTokenCount: thoughtsTokenCount,
1321+
promptTokensDetails: promptTokensDetails,
1322+
candidatesTokensDetails: candidatesTokensDetails,
1323+
);
13181324
}
13191325

13201326
ModalityTokenCount _parseModalityTokenCount(Object? jsonObject) {

packages/firebase_ai/firebase_ai/test/api_test.dart

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -650,6 +650,40 @@ void main() {
650650
expect(response.usageMetadata!.candidatesTokensDetails, hasLength(1));
651651
});
652652

653+
group('usageMetadata parsing', () {
654+
test('parses usageMetadata when thoughtsTokenCount is set', () {
655+
final json = {
656+
'usageMetadata': {
657+
'promptTokenCount': 10,
658+
'candidatesTokenCount': 20,
659+
'totalTokenCount': 30,
660+
'thoughtsTokenCount': 5,
661+
}
662+
};
663+
final response =
664+
VertexSerialization().parseGenerateContentResponse(json);
665+
expect(response.usageMetadata, isNotNull);
666+
expect(response.usageMetadata!.promptTokenCount, 10);
667+
expect(response.usageMetadata!.candidatesTokenCount, 20);
668+
expect(response.usageMetadata!.totalTokenCount, 30);
669+
expect(response.usageMetadata!.thoughtsTokenCount, 5);
670+
});
671+
672+
test('parses usageMetadata when thoughtsTokenCount is missing', () {
673+
final json = {
674+
'usageMetadata': {
675+
'promptTokenCount': 10,
676+
'candidatesTokenCount': 20,
677+
'totalTokenCount': 30,
678+
}
679+
};
680+
final response =
681+
VertexSerialization().parseGenerateContentResponse(json);
682+
expect(response.usageMetadata, isNotNull);
683+
expect(response.usageMetadata!.thoughtsTokenCount, isNull);
684+
});
685+
});
686+
653687
group('groundingMetadata parsing', () {
654688
test('parses valid response with full grounding metadata', () {
655689
final jsonResponse = {

0 commit comments

Comments
 (0)