aboutsummaryrefslogtreecommitdiffstats
path: root/lib/presentation/common_widget/voice_assistant_button.dart
diff options
context:
space:
mode:
authorAnuj Solanki <anuj603362@gmail.com>2024-09-29 21:31:03 +0530
committerJan-Simon Moeller <jsmoeller@linuxfoundation.org>2024-10-07 10:52:41 +0000
commitf870bbe3c49d421ff8ea561752b3b0a38ad04e96 (patch)
treee6607974a3ea6591f622a3ebe8010561b0a6ad26 /lib/presentation/common_widget/voice_assistant_button.dart
parent29ae7d2d9e04bd8e3a7d37dcfa87a02dd1ab385f (diff)
Integrate voice assistant into flutter-ics-homescreen
- Implement voice-agent client to connect with agl-service-voiceagent for command execution, wake word detection. - ⁠Add a setting tile on the settings page for configuring voice assistant settings. - Add toggle buttons for wake word mode, online mode, overlay and speech-to-text model in the voice assistant settings. - Add a button on the homepage to start the voice assistant. - Update gRPC protos to retrieve online-mode status from the voice service. - Make online-mode tile conditional in voice-assistant settings, removing it from the UI if not enabled in the service. - Automatically hide the overlay 3 seconds after command execution. Bug-AGL: SPEC-5200 Change-Id: I4efaaf16ebc570b28816dc7203364efe2b658c2e Signed-off-by: Anuj Solanki <anuj603362@gmail.com>
Diffstat (limited to 'lib/presentation/common_widget/voice_assistant_button.dart')
-rw-r--r--lib/presentation/common_widget/voice_assistant_button.dart214
1 files changed, 214 insertions, 0 deletions
diff --git a/lib/presentation/common_widget/voice_assistant_button.dart b/lib/presentation/common_widget/voice_assistant_button.dart
new file mode 100644
index 0000000..2a82a0a
--- /dev/null
+++ b/lib/presentation/common_widget/voice_assistant_button.dart
@@ -0,0 +1,214 @@
+import 'package:flutter_ics_homescreen/export.dart';
+
+class VoiceAssistantButton extends ConsumerStatefulWidget {
+ const VoiceAssistantButton({super.key});
+
+ @override
+ ConsumerState<VoiceAssistantButton> createState() => _VoiceAssistantButtonState();
+}
+
+class _VoiceAssistantButtonState extends ConsumerState<VoiceAssistantButton> with SingleTickerProviderStateMixin {
+ bool _showOverlay = false;
+ late AnimationController _animationController;
+ late Animation<double> _pulseAnimation;
+ int overlayLock = 0;
+
+ @override
+ void initState() {
+ super.initState();
+ _animationController = AnimationController(
+ vsync: this,
+ duration: const Duration(milliseconds: 700),
+ )..stop(); // Stop the animation initially
+
+ _pulseAnimation = Tween<double>(begin: 1.0, end: 1.05).animate(
+ CurvedAnimation(parent: _animationController, curve: Curves.easeInOut),
+ );
+ }
+
+ @override
+ void dispose() {
+ _animationController.dispose();
+ super.dispose();
+ }
+
+ void _onTap() {
+ ref.read(voiceAssistantStateProvider.notifier).updateCommandResponse("");
+ ref.read(voiceAssistantStateProvider.notifier).updateCommand("");
+ bool state = ref.read(voiceAssistantStateProvider.notifier).toggleButtonPressed();
+ if(state){
+ var voiceAgentClient = ref.read(voiceAgentClientProvider);
+ voiceAgentClient.startVoiceAssistant();
+ }
+ }
+
+ void _showAssistantPopup(BuildContext context) {
+ showModalBottomSheet(
+ context: context,
+ isScrollControlled: true,
+ backgroundColor: Colors.transparent,
+ builder: (context) {
+ return Consumer(
+ builder: (context, ref, child) {
+ final String? command = ref.watch(voiceAssistantStateProvider.select((value) => value.command));
+ final String? commandResponse = ref.watch(voiceAssistantStateProvider.select((value) => value.commandResponse));
+ final bool isRecording = ref.watch(voiceAssistantStateProvider.select((value)=>value.isRecording));
+ final bool isProcessing = ref.watch(voiceAssistantStateProvider.select((value)=>value.isCommandProcessing));
+
+ if (isRecording) {
+ _animationController.repeat(reverse: true);
+ } else {
+ _animationController.stop();
+ }
+
+ return Container(
+ height: MediaQuery.of(context).size.height * 0.35,
+ decoration: const BoxDecoration(
+ image: DecorationImage(
+ image: AssetImage('assets/VoiceAssistantBottomSheetBg.png'),
+ fit: BoxFit.cover,
+ ),
+ ),
+ child: Padding(
+ padding: const EdgeInsets.fromLTRB(30, 0, 40, 0),
+ child: Column(
+ mainAxisAlignment: MainAxisAlignment.end,
+ crossAxisAlignment: CrossAxisAlignment.center,
+ children: [
+ if(!isRecording && !isProcessing)
+ Column(
+ crossAxisAlignment: CrossAxisAlignment.center,
+ children: [
+ Padding(
+ padding: const EdgeInsets.symmetric(horizontal: 16.0),
+ child: Text(
+ command ?? "No Command Detected",
+ textAlign: TextAlign.center,
+ style: const TextStyle(
+ color: Colors.white70,
+ fontSize: 43,
+ fontWeight: FontWeight.w400,
+ ),
+ ),
+ ),
+ SizedBox(
+ height: MediaQuery.of(context).size.height * 0.03
+ ),
+ Padding(
+ padding: const EdgeInsets.symmetric(horizontal: 16.0),
+ child: Text(
+ commandResponse ?? "No Response",
+ textAlign: TextAlign.center,
+ style: const TextStyle(
+ color: Color.fromRGBO(41, 95, 248, 1),
+ fontSize: 43,
+ fontWeight: FontWeight.w800,
+ ),
+ ),
+ ),
+
+ SizedBox(
+ height: MediaQuery.of(context).size.height * 0.02,
+ ),
+ ],
+ ),
+
+ if(isRecording)
+ Column(
+ children: [
+ const Text("Listening...",
+ style: TextStyle(
+ color: Colors.white70,
+ fontSize: 43,
+ fontWeight: FontWeight.w400,
+ ),
+ ),
+ SizedBox(
+ height: MediaQuery.of(context).size.height*0.02,
+ ),
+ ScaleTransition(
+ scale: _pulseAnimation, // Apply the pulse animation here
+ child: SvgPicture.asset(
+ 'assets/VoiceControlButton.svg',
+ fit: BoxFit.cover,
+ semanticsLabel: 'Voice Assistant',
+ ),
+ ),
+ ],
+ ),
+
+ if(!isRecording && isProcessing)
+ Column(
+ children: [
+ const Text("Processing...",
+ style: TextStyle(
+ color: Colors.white70,
+ fontSize: 43,
+ fontWeight: FontWeight.w400,
+ ),
+ ),
+ SizedBox(
+ height: MediaQuery.of(context).size.height*0.05,
+ ),
+ Lottie.asset(
+ 'animations/LoadingAnimation.json',
+ fit: BoxFit.cover,
+ repeat: true,
+ ),
+ ],
+ ),
+ SizedBox(
+ height: MediaQuery.of(context).size.height * 0.035,
+ ),
+ ],
+ ),
+ ),
+ );
+ },
+ );
+ },
+ ).whenComplete(() {
+ ref.read(voiceAssistantStateProvider.notifier).updateCommandResponse(null);
+ ref.read(voiceAssistantStateProvider.notifier).updateCommand(null);
+ ref.read(voiceAssistantStateProvider.notifier).toggleShowOverlay(false);
+ overlayLock = 0;
+ });
+ }
+
+ @override
+ Widget build(BuildContext context) {
+ _showOverlay = ref.watch(voiceAssistantStateProvider.select((value) => value.showOverLay));
+
+ if(_showOverlay){
+ WidgetsBinding.instance!.addPostFrameCallback((_) {
+ if(overlayLock == 0){
+ overlayLock = 1;
+ _showAssistantPopup(context);
+ }
+ });
+ }
+ else if(overlayLock == 1){
+ overlayLock = 0;
+ Navigator.of(context).pop();
+ }
+
+ String svgPath = ref.watch(voiceAssistantStateProvider.select((value) => value.buttonPressed))
+ ? 'assets/VoiceAssistantActive.svg'
+ : 'assets/VoiceAssistantEnabled.svg';
+
+ return Padding(
+ padding: const EdgeInsets.only(left: 8),
+ child: GestureDetector(
+ onTap: _onTap,
+ child: Container(
+ padding: EdgeInsets.zero,
+ child: SvgPicture.asset(
+ svgPath,
+ fit: BoxFit.cover,
+ semanticsLabel: 'Voice Assistant',
+ ),
+ ),
+ ),
+ );
+ }
+}