Skip to content

Commit 69cd2a6

Browse files
authored
feat(vertexai): Live API breaking changes (#17299)
* Add a new response api * make vertex android app gradle working * Some more update post release * remove unspecified modality * breaking api update * add todo for server content * fix an error of turncomplete and interrupted null * update to pass analyzer * ignore the documentation
1 parent ec1e6a5 commit 69cd2a6

File tree

13 files changed

+127
-112
lines changed

13 files changed

+127
-112
lines changed

packages/firebase_vertexai/firebase_vertexai/example/android/app/build.gradle

+13-12
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,12 @@
1+
plugins {
2+
id "com.android.application"
3+
// START: FlutterFire Configuration
4+
id 'com.google.gms.google-services'
5+
// END: FlutterFire Configuration
6+
id "kotlin-android"
7+
id "dev.flutter.flutter-gradle-plugin"
8+
}
9+
110
def localProperties = new Properties()
211
def localPropertiesFile = rootProject.file('local.properties')
312
if (localPropertiesFile.exists()) {
@@ -6,11 +15,6 @@ if (localPropertiesFile.exists()) {
615
}
716
}
817

9-
def flutterRoot = localProperties.getProperty('flutter.sdk')
10-
if (flutterRoot == null) {
11-
throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")
12-
}
13-
1418
def flutterVersionCode = localProperties.getProperty('flutter.versionCode')
1519
if (flutterVersionCode == null) {
1620
flutterVersionCode = '1'
@@ -21,20 +25,14 @@ if (flutterVersionName == null) {
2125
flutterVersionName = '1.0'
2226
}
2327

24-
apply plugin: 'com.android.application'
25-
// START: FlutterFire Configuration
26-
apply plugin: 'com.google.gms.google-services'
27-
// END: FlutterFire Configuration
28-
apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
29-
3028
android {
3129
namespace "com.example.example"
3230

3331
compileSdk 35
3432

3533
defaultConfig {
3634
applicationId "com.example.example"
37-
minSdk 21
35+
minSdk 23
3836
targetSdk 33
3937
versionCode flutterVersionCode.toInteger()
4038
versionName flutterVersionName
@@ -51,6 +49,9 @@ android {
5149
signingConfig signingConfigs.debug
5250
}
5351
}
52+
kotlinOptions {
53+
jvmTarget = '1.8' // Or '11'
54+
}
5455
}
5556

5657
flutter {

packages/firebase_vertexai/firebase_vertexai/example/android/app/src/main/AndroidManifest.xml

+4-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,8 @@
22
<application
33
android:label="example"
44
android:name="${applicationName}"
5-
android:icon="@mipmap/ic_launcher">
5+
android:icon="@mipmap/ic_launcher"
6+
android:usesCleartextTraffic="true">
67
<activity
78
android:name=".MainActivity"
89
android:exported="true"
@@ -44,4 +45,6 @@
4445
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
4546
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
4647
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
48+
<uses-permission android:name="android.permission.INTERNET" />
49+
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
4750
</manifest>

packages/firebase_vertexai/firebase_vertexai/example/android/build.gradle

-14
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,3 @@
1-
buildscript {
2-
repositories {
3-
google()
4-
mavenCentral()
5-
}
6-
7-
dependencies {
8-
classpath 'com.android.tools.build:gradle:8.1.2'
9-
// START: FlutterFire Configuration
10-
classpath 'com.google.gms:google-services:4.4.0'
11-
// END: FlutterFire Configuration
12-
}
13-
}
14-
151
allprojects {
162
repositories {
173
google()

packages/firebase_vertexai/firebase_vertexai/example/android/gradle/wrapper/gradle-wrapper.properties

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,4 @@ distributionBase=GRADLE_USER_HOME
22
distributionPath=wrapper/dists
33
zipStoreBase=GRADLE_USER_HOME
44
zipStorePath=wrapper/dists
5-
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.3-all.zip
5+
distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip

packages/firebase_vertexai/firebase_vertexai/example/android/settings.gradle

+2-3
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,9 @@ pluginManagement {
55
def flutterSdkPath = properties.getProperty("flutter.sdk")
66
assert flutterSdkPath != null, "flutter.sdk not set in local.properties"
77
return flutterSdkPath
8-
}
9-
settings.ext.flutterSdkPath = flutterSdkPath()
8+
}()
109

11-
includeBuild("${settings.ext.flutterSdkPath}/packages/flutter_tools/gradle")
10+
includeBuild("$flutterSdkPath/packages/flutter_tools/gradle")
1211

1312
repositories {
1413
google()

packages/firebase_vertexai/firebase_vertexai/example/lib/main.dart

+6-3
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@ import 'package:firebase_auth/firebase_auth.dart';
1717
import 'package:firebase_vertexai/firebase_vertexai.dart';
1818
import 'package:flutter/material.dart';
1919

20+
// Import after file is generated through flutterfire_cli.
21+
// import 'package:vertex_ai_example/firebase_options.dart';
22+
2023
import 'pages/chat_page.dart';
2124
import 'pages/audio_page.dart';
2225
import 'pages/function_calling_page.dart';
@@ -28,11 +31,11 @@ import 'pages/document.dart';
2831
import 'pages/video_page.dart';
2932
import 'pages/bidi_page.dart';
3033

31-
// REQUIRED if you want to run on Web
32-
const FirebaseOptions? options = null;
33-
3434
void main() async {
3535
WidgetsFlutterBinding.ensureInitialized();
36+
// Enable this line instead once have the firebase_options.dart generated and
37+
// imported through flutterfire_cli.
38+
// await Firebase.initializeApp(options: DefaultFirebaseOptions.currentPlatform);
3639
await Firebase.initializeApp();
3740
await FirebaseAuth.instance.signInAnonymously();
3841

packages/firebase_vertexai/firebase_vertexai/example/lib/pages/bidi_page.dart

+15-19
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ class _BidiPageState extends State<BidiPage> {
5959
super.initState();
6060

6161
final config = LiveGenerationConfig(
62-
speechConfig: SpeechConfig(voice: Voice.fenrir),
62+
speechConfig: SpeechConfig(voiceName: 'Fenrir'),
6363
responseModalities: [
6464
ResponseModalities.audio,
6565
],
@@ -328,25 +328,21 @@ class _BidiPageState extends State<BidiPage> {
328328
}
329329
}
330330

331-
Future<void> _handleLiveServerMessage(LiveServerMessage response) async {
332-
if (response is LiveServerContent && response.modelTurn != null) {
333-
await _handleLiveServerContent(response);
334-
}
335-
336-
if (response is LiveServerContent &&
337-
response.turnComplete != null &&
338-
response.turnComplete!) {
339-
await _handleTurnComplete();
340-
}
341-
342-
if (response is LiveServerContent &&
343-
response.interrupted != null &&
344-
response.interrupted!) {
345-
log('Interrupted: $response');
346-
}
331+
Future<void> _handleLiveServerMessage(LiveServerResponse response) async {
332+
final message = response.message;
347333

348-
if (response is LiveServerToolCall && response.functionCalls != null) {
349-
await _handleLiveServerToolCall(response);
334+
if (message is LiveServerContent) {
335+
if (message.modelTurn != null) {
336+
await _handleLiveServerContent(message);
337+
}
338+
if (message.turnComplete != null && message.turnComplete!) {
339+
await _handleTurnComplete();
340+
}
341+
if (message.interrupted != null && message.interrupted!) {
342+
log('Interrupted: $response');
343+
}
344+
} else if (message is LiveServerToolCall && message.functionCalls != null) {
345+
await _handleLiveServerToolCall(message);
350346
}
351347
}
352348

packages/firebase_vertexai/firebase_vertexai/example/lib/utils/audio_recorder.dart

+4
Original file line numberDiff line numberDiff line change
@@ -137,6 +137,10 @@ class InMemoryAudioRecorder {
137137
encoder: _encoder,
138138
sampleRate: 16000,
139139
numChannels: 1,
140+
androidConfig: const AndroidRecordConfig(
141+
muteAudio: true,
142+
audioSource: AndroidAudioSource.mic,
143+
),
140144
);
141145
final devs = await _recorder.listInputDevices();
142146
debugPrint(devs.toString());

packages/firebase_vertexai/firebase_vertexai/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme

+1
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@
5959
ignoresPersistentStateOnLaunch = "NO"
6060
debugDocumentVersioning = "YES"
6161
debugServiceExtension = "internal"
62+
enableGPUValidationMode = "1"
6263
allowLocationSimulation = "YES">
6364
<BuildableProductRunnable
6465
runnableDebuggingMode = "0">

packages/firebase_vertexai/firebase_vertexai/lib/firebase_vertexai.dart

+2-2
Original file line numberDiff line numberDiff line change
@@ -72,11 +72,11 @@ export 'src/live_api.dart'
7272
show
7373
LiveGenerationConfig,
7474
SpeechConfig,
75-
Voice,
7675
ResponseModalities,
7776
LiveServerMessage,
7877
LiveServerContent,
7978
LiveServerToolCall,
80-
LiveServerToolCallCancellation;
79+
LiveServerToolCallCancellation,
80+
LiveServerResponse;
8181
export 'src/live_session.dart' show LiveSession;
8282
export 'src/schema.dart' show Schema, SchemaType;

packages/firebase_vertexai/firebase_vertexai/lib/src/live_api.dart

+59-29
Original file line numberDiff line numberDiff line change
@@ -15,53 +15,64 @@ import 'api.dart';
1515
import 'content.dart';
1616
import 'error.dart';
1717

18-
/// The available voice options for speech synthesis.
19-
enum Voice {
18+
/// Configuration for a prebuilt voice.
19+
///
20+
/// This class allows specifying a voice by its name.
21+
class PrebuiltVoiceConfig {
2022
// ignore: public_member_api_docs
21-
aoede('Aoede'),
23+
const PrebuiltVoiceConfig({this.voiceName});
2224

25+
/// The voice name to use for speech synthesis.
26+
///
27+
/// See https://cloud.google.com/text-to-speech/docs/chirp3-hd for names and
28+
/// sound demos.
29+
final String? voiceName;
2330
// ignore: public_member_api_docs
24-
charon('Charon'),
31+
Map<String, Object?> toJson() =>
32+
{if (voiceName case final voiceName?) 'voice_name': voiceName};
33+
}
2534

35+
/// Configuration for the voice to be used in speech synthesis.
36+
///
37+
/// This class currently supports using a prebuilt voice configuration.
38+
class VoiceConfig {
2639
// ignore: public_member_api_docs
27-
fenrir('Fenrir'),
40+
VoiceConfig({this.prebuiltVoiceConfig});
2841

2942
// ignore: public_member_api_docs
30-
kore('Kore'),
31-
43+
final PrebuiltVoiceConfig? prebuiltVoiceConfig;
3244
// ignore: public_member_api_docs
33-
puck('Puck');
34-
35-
const Voice(this._jsonString);
36-
final String _jsonString;
37-
38-
// ignore: public_member_api_docs
39-
String toJson() => _jsonString;
45+
Map<String, Object?> toJson() => {
46+
if (prebuiltVoiceConfig case final prebuiltVoiceConfig?)
47+
'prebuilt_voice_config': prebuiltVoiceConfig.toJson()
48+
};
4049
}
4150

4251
/// Configures speech synthesis settings.
52+
///
53+
/// Allows specifying the desired voice for speech synthesis.
4354
class SpeechConfig {
4455
/// Creates a [SpeechConfig] instance.
4556
///
46-
/// [voice] (optional): The desired voice for speech synthesis.
47-
SpeechConfig({this.voice});
48-
49-
/// The voice to use for speech synthesis.
50-
final Voice? voice;
57+
/// [voiceName] See https://cloud.google.com/text-to-speech/docs/chirp3-hd
58+
/// for names and sound demos.
59+
SpeechConfig({String? voiceName})
60+
: voiceConfig = voiceName != null
61+
? VoiceConfig(
62+
prebuiltVoiceConfig: PrebuiltVoiceConfig(voiceName: voiceName))
63+
: null;
64+
65+
/// The voice config to use for speech synthesis.
66+
final VoiceConfig? voiceConfig;
5167
// ignore: public_member_api_docs
5268
Map<String, Object?> toJson() => {
53-
if (voice case final voice?)
54-
'voice_config': {
55-
'prebuilt_voice_config': {'voice_name': voice.toJson()}
56-
}
69+
if (voiceConfig case final voiceConfig?)
70+
'voice_config': voiceConfig.toJson()
5771
};
5872
}
5973

6074
/// The available response modalities.
6175
enum ResponseModalities {
62-
/// Unspecified response modality.
63-
unspecified('MODALITY_UNSPECIFIED'),
64-
6576
/// Text response modality.
6677
text('TEXT'),
6778

@@ -132,6 +143,7 @@ class LiveServerContent implements LiveServerMessage {
132143
/// [interrupted] (optional): Indicates if the generation was interrupted.
133144
LiveServerContent({this.modelTurn, this.turnComplete, this.interrupted});
134145

146+
// TODO(cynthia): Add accessor for media content
135147
/// The content generated by the model.
136148
final Content? modelTurn;
137149

@@ -176,6 +188,19 @@ class LiveServerToolCallCancellation implements LiveServerMessage {
176188
final List<String>? functionIds;
177189
}
178190

191+
/// A single response chunk received during a live content generation.
192+
///
193+
/// It can contain generated content, function calls to be executed, or
194+
/// instructions to cancel previous function calls, along with the status of the
195+
/// ongoing generation.
196+
class LiveServerResponse {
197+
// ignore: public_member_api_docs
198+
LiveServerResponse({required this.message});
199+
200+
/// The server message generated by the live model.
201+
final LiveServerMessage message;
202+
}
203+
179204
/// Represents realtime input from the client in a live stream.
180205
class LiveClientRealtimeInput {
181206
/// Creates a [LiveClientRealtimeInput] instance.
@@ -237,7 +262,7 @@ class LiveClientToolResponse {
237262
};
238263
}
239264

240-
/// Parses a JSON object received from the live server into a [LiveServerMessage].
265+
/// Parses a JSON object received from the live server into a [LiveServerResponse].
241266
///
242267
/// This function handles different types of server messages, including:
243268
/// - Error messages, which result in a [VertexAIException] being thrown.
@@ -275,8 +300,13 @@ class LiveClientToolResponse {
275300
/// - [jsonObject]: The JSON object received from the live server.
276301
///
277302
/// Returns:
278-
/// - A [LiveServerMessage] object representing the parsed message.
279-
LiveServerMessage parseServerMessage(Object jsonObject) {
303+
/// - A [LiveServerResponse] object representing the parsed message.
304+
LiveServerResponse parseServerResponse(Object jsonObject) {
305+
LiveServerMessage message = _parseServerMessage(jsonObject);
306+
return LiveServerResponse(message: message);
307+
}
308+
309+
LiveServerMessage _parseServerMessage(Object jsonObject) {
280310
if (jsonObject case {'error': final Object error}) {
281311
throw parseError(error);
282312
}

packages/firebase_vertexai/firebase_vertexai/lib/src/live_session.dart

+4-4
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ class LiveSession {
3333
var jsonString = utf8.decode(message);
3434
var response = json.decode(jsonString);
3535

36-
_messageController.add(parseServerMessage(response));
36+
_messageController.add(parseServerResponse(response));
3737
} catch (e) {
3838
_messageController.addError(e);
3939
}
@@ -45,7 +45,7 @@ class LiveSession {
4545
);
4646
}
4747
final WebSocketChannel _ws;
48-
final _messageController = StreamController<LiveServerMessage>.broadcast();
48+
final _messageController = StreamController<LiveServerResponse>.broadcast();
4949
late StreamSubscription _wsSubscription;
5050

5151
/// Sends content to the server.
@@ -107,10 +107,10 @@ class LiveSession {
107107

108108
/// Receives messages from the server.
109109
///
110-
/// Returns a [Stream] of [LiveServerMessage] objects representing the
110+
/// Returns a [Stream] of [LiveServerResponse] objects representing the
111111
/// messages received from the server. The stream will stops once the server
112112
/// sends turn complete message.
113-
Stream<LiveServerMessage> receive() async* {
113+
Stream<LiveServerResponse> receive() async* {
114114
_checkWsStatus();
115115

116116
await for (final result in _messageController.stream) {

0 commit comments

Comments
 (0)