diff options
author | 2024-09-29 21:31:03 +0530 | |
---|---|---|
committer | 2024-10-07 10:52:41 +0000 | |
commit | f870bbe3c49d421ff8ea561752b3b0a38ad04e96 (patch) | |
tree | e6607974a3ea6591f622a3ebe8010561b0a6ad26 /lib | |
parent | 29ae7d2d9e04bd8e3a7d37dcfa87a02dd1ab385f (diff) |
Integrate voice assistant into flutter-ics-homescreen
- Implement voice-agent client to connect with agl-service-voiceagent
for command execution, wake word detection.
- Add a setting tile on the settings page for configuring voice
assistant settings.
- Add toggle buttons for wake word mode, online mode, overlay and
speech-to-text model in the voice assistant settings.
- Add a button on the homepage to start the voice assistant.
- Update gRPC protos to retrieve online-mode status from the voice
service.
- Make online-mode tile conditional in voice-assistant settings,
removing it from the UI if not enabled in the service.
- Automatically hide the overlay 3 seconds after command execution.
Bug-AGL: SPEC-5200
Change-Id: I4efaaf16ebc570b28816dc7203364efe2b658c2e
Signed-off-by: Anuj Solanki <anuj603362@gmail.com>
Diffstat (limited to 'lib')
14 files changed, 1480 insertions, 4 deletions
diff --git a/lib/data/data_providers/app_config_provider.dart b/lib/data/data_providers/app_config_provider.dart index b82eb54..9e187a9 100644 --- a/lib/data/data_providers/app_config_provider.dart +++ b/lib/data/data_providers/app_config_provider.dart @@ -88,6 +88,21 @@ class MpdConfig { } } +class VoiceAgentConfig { + final String hostname; + final int port; + + static String defaultHostname = 'localhost'; + static int defaultPort = 51053; + + VoiceAgentConfig({required this.hostname,required this.port}); + + static VoiceAgentConfig defaultConfig() { + return VoiceAgentConfig( + hostname: VoiceAgentConfig.defaultHostname, port: VoiceAgentConfig.defaultPort); + } +} + class AppConfig { final bool disableBkgAnimation; final bool plainBackground; @@ -96,6 +111,7 @@ class AppConfig { final RadioConfig radioConfig; final StorageConfig storageConfig; final MpdConfig mpdConfig; + final VoiceAgentConfig voiceAgentConfig; static String configFilePath = '/etc/xdg/AGL/ics-homescreen.yaml'; @@ -106,7 +122,8 @@ class AppConfig { required this.kuksaConfig, required this.radioConfig, required this.storageConfig, - required this.mpdConfig}); + required this.mpdConfig, + required this.voiceAgentConfig}); static KuksaConfig parseKuksaConfig(YamlMap kuksaMap) { try { @@ -238,6 +255,25 @@ class AppConfig { return MpdConfig.defaultConfig(); } } + + static VoiceAgentConfig parseVoiceAgentConfig(YamlMap voiceAgentMap) { + try { + String hostname = VoiceAgentConfig.defaultHostname; + if (voiceAgentMap.containsKey('hostname')) { + hostname = voiceAgentMap['hostname']; + } + + int port = VoiceAgentConfig.defaultPort; + if (voiceAgentMap.containsKey('port')) { + port = voiceAgentMap['port']; + } + + return VoiceAgentConfig(hostname: hostname, port: port); + } catch (_) { + debugPrint("Invalid VoiceAgent configuration, using defaults"); + return VoiceAgentConfig.defaultConfig(); + } + } } final appConfigProvider = Provider((ref) { @@ -281,6 +317,13 @@ final appConfigProvider = Provider((ref) { mpdConfig = MpdConfig.defaultConfig(); } + VoiceAgentConfig voiceAgentConfig; + if(yamlMap.containsKey('voiceAgent')){ + voiceAgentConfig = AppConfig.parseVoiceAgentConfig(yamlMap['voiceAgent']); + } else { + voiceAgentConfig = VoiceAgentConfig.defaultConfig(); + } + bool disableBkgAnimation = disableBkgAnimationDefault; if (yamlMap.containsKey('disable-bg-animation')) { var value = yamlMap['disable-bg-animation']; @@ -312,7 +355,8 @@ final appConfigProvider = Provider((ref) { kuksaConfig: kuksaConfig, radioConfig: radioConfig, storageConfig: storageConfig, - mpdConfig: mpdConfig); + mpdConfig: mpdConfig, + voiceAgentConfig: voiceAgentConfig); } catch (_) { return AppConfig( disableBkgAnimation: false, @@ -321,6 +365,7 @@ final appConfigProvider = Provider((ref) { kuksaConfig: KuksaConfig.defaultConfig(), radioConfig: RadioConfig.defaultConfig(), storageConfig: StorageConfig.defaultConfig(), - mpdConfig: MpdConfig.defaultConfig()); + mpdConfig: MpdConfig.defaultConfig(), + voiceAgentConfig: VoiceAgentConfig.defaultConfig()); } }); diff --git a/lib/data/data_providers/app_provider.dart b/lib/data/data_providers/app_provider.dart index 0f7ed0c..64c0e47 100644 --- a/lib/data/data_providers/app_provider.dart +++ b/lib/data/data_providers/app_provider.dart @@ -16,10 +16,14 @@ import 'package:flutter_ics_homescreen/data/data_providers/radio_client.dart'; import 'package:flutter_ics_homescreen/data/data_providers/storage_client.dart'; import 'package:flutter_ics_homescreen/data/data_providers/mpd_client.dart'; import 'package:flutter_ics_homescreen/data/data_providers/play_controller.dart'; +import 'package:flutter_ics_homescreen/data/data_providers/voice_agent_client.dart'; +import 'package:flutter_ics_homescreen/data/data_providers/voice_assistant_notifier.dart'; import 'package:flutter_ics_homescreen/export.dart'; import 'package:flutter_ics_homescreen/data/models/users.dart'; +import '../models/voice_assistant_state.dart'; + enum AppState { home, dashboard, @@ -44,7 +48,9 @@ enum AppState { clock, date, time, - year + year, + voiceAssistant, + sttModel, } class AppStateNotifier extends Notifier<AppState> { @@ -73,6 +79,11 @@ final valClientProvider = Provider((ref) { return ValClient(config: config, ref: ref); }); +final voiceAgentClientProvider = Provider((ref){ + VoiceAgentConfig config = ref.watch(appConfigProvider).voiceAgentConfig; + return VoiceAgentClient(config: config, ref: ref); +}); + final appLauncherProvider = Provider((ref) { return AppLauncher(ref: ref); }); @@ -154,3 +165,7 @@ final currentTimeProvider = StateNotifierProvider<CurrentTimeNotifier, DateTime>((ref) { return CurrentTimeNotifier(); }); + + +final voiceAssistantStateProvider = + NotifierProvider<VoiceAssistantStateNotifier, VoiceAssistantState>(VoiceAssistantStateNotifier.new); diff --git a/lib/data/data_providers/voice_agent_client.dart b/lib/data/data_providers/voice_agent_client.dart new file mode 100644 index 0000000..295e138 --- /dev/null +++ b/lib/data/data_providers/voice_agent_client.dart @@ -0,0 +1,312 @@ +import 'dart:async'; +import 'package:flutter_ics_homescreen/data/models/voice_assistant_state.dart'; +import 'package:protos/val_api.dart'; + +import '../../export.dart'; + +class VoiceAgentClient { + final VoiceAgentConfig config; + late ClientChannel _channel; + late VoiceAgentServiceClient _client; + final Ref ref; + StreamSubscription<WakeWordStatus>? _wakeWordStatusSubscription; + + VoiceAgentClient({required this.config,required this.ref}) { + // Initialize the client channel without connecting immediately + String host = config.hostname; + int port = config.port; + _channel = ClientChannel( + host, + port: port, + options: const ChannelOptions( + credentials: ChannelCredentials.insecure(), + ), + ); + debugPrint("Connecting to Voice Assistant"); + _client = VoiceAgentServiceClient(_channel); + + } + + Future<ServiceStatus> checkServiceStatus() async { + final empty = Empty(); + try { + final response = await _client.checkServiceStatus(empty); + return response; + } catch (e) { + // Handle the error gracefully, such as returning an error status + return ServiceStatus()..status = false; + } + } + + Stream<WakeWordStatus> detectWakeWord() { + final empty = Empty(); + try { + return _client.detectWakeWord(empty); + } catch (e) { + // Handle the error gracefully, such as returning a default status + return const Stream.empty(); // An empty stream as a placeholder + } + } + + Future<RecognizeResult> recognizeVoiceCommand( + Stream<RecognizeVoiceControl> controlStream) async { + try { + final response = await _client.recognizeVoiceCommand(controlStream); + return response; + } catch (e) { + // Handle the error gracefully, such as returning a default RecognizeResult + return RecognizeResult()..status = RecognizeStatusType.REC_ERROR; + } + } + + Future<RecognizeResult> recognizeTextCommandGrpc( + RecognizeTextControl controlInput) async { + try { + final response = await _client.recognizeTextCommand(controlInput); + return response; + } catch (e) { + // Handle the error gracefully, such as returning a default RecognizeResult + return RecognizeResult()..status = RecognizeStatusType.REC_ERROR; + } + } + + Future<ExecuteResult> executeCommandGrpc(ExecuteInput input) async { + try { + final response = await _client.executeCommand(input); + return response; + } catch (e) { + // Handle the error gracefully, such as returning an error status + return ExecuteResult()..status = ExecuteStatusType.EXEC_ERROR; + } + } + + Future<void> shutdown() async { + // await _channel.shutdown(); + } + + // Grpc helper methods + Future<void> startWakeWordDetection() async { + // Capture the state before any async operations + _wakeWordStatusSubscription?.cancel(); + final isWakeWordModeActive = ref.read(voiceAssistantStateProvider.select((value) => value.isWakeWordMode)); + + if (isWakeWordModeActive) { + debugPrint("Wake Word Detection Started"); + } else { + debugPrint("Wake Word Detection Stopped"); + return; + } + _wakeWordStatusSubscription = detectWakeWord().listen( + (response) async { + if (response.status) { + await startVoiceAssistant(); + // Wait for 2-3 seconds and then restart wake word detection + await Future.delayed(const Duration(seconds: 2)); + startWakeWordDetection(); + } + if(!ref.read(voiceAssistantStateProvider.select((value) => value.isWakeWordMode))){ + _wakeWordStatusSubscription?.cancel(); + return; + } + }, + onError: (error) { + }, + cancelOnError: true, + ); + } + + Future<String> startRecording() async { + String streamId = ""; + try { + // Create a RecognizeControl message to start recording + final controlMessage = RecognizeVoiceControl() + ..action = RecordAction.START + ..recordMode = RecordMode + .MANUAL; // You can change this to your desired record mode + + // Create a Stream with the control message + final controlStream = Stream.fromIterable([controlMessage]); + + // Call the gRPC method to start recording + final response = + await recognizeVoiceCommand(controlStream); + + streamId = response.streamId; + } catch (e) { + } + return streamId; + } + + Future<RecognizeResult> stopRecording( + String streamId, String nluModel, String stt,bool isOnlineMode) async { + + try { + NLUModel model = NLUModel.RASA; + if (nluModel == "snips") { + model = NLUModel.SNIPS; + } + STTFramework sttFramework = STTFramework.VOSK; + if (stt == "whisper") { + sttFramework = STTFramework.WHISPER; + } + OnlineMode onlineMode = OnlineMode.OFFLINE; + if (isOnlineMode) { + onlineMode = OnlineMode.ONLINE; + } + // Create a RecognizeControl message to stop recording + final controlMessage = RecognizeVoiceControl() + ..action = RecordAction.STOP + ..nluModel = model + ..streamId = + streamId // Use the same stream ID as when starting recording + ..recordMode = RecordMode.MANUAL + ..sttFramework = sttFramework + ..onlineMode = onlineMode; + + + // Create a Stream with the control message + final controlStream = Stream.fromIterable([controlMessage]); + + // Call the gRPC method to stop recording + final response = + await recognizeVoiceCommand(controlStream); + + // Process and store the result + if (response.status == RecognizeStatusType.REC_SUCCESS) { + } else if (response.status == RecognizeStatusType.INTENT_NOT_RECOGNIZED) { + final command = response.command; + debugPrint("Command is : $command"); + } + else { + debugPrint('Failed to process your voice command. Please try again.'); + } + await shutdown(); + return response; + } catch (e) { + // addChatMessage(/**/'Failed to process your voice command. Please try again.'); + await shutdown(); + return RecognizeResult()..status = RecognizeStatusType.REC_ERROR; + } + // await voiceAgentClient.shutdown(); + } + + Future<RecognizeResult> recognizeTextCommand(String command, String nluModel) async { + debugPrint("Recognizing Text Command: $command"); + try { + NLUModel model = NLUModel.RASA; + if (nluModel == "snips") { + model = NLUModel.SNIPS; + } + // Create a RecognizeControl message to stop recording + final controlMessage = RecognizeTextControl() + ..textCommand = command + ..nluModel = model; + + // Call the gRPC method to stop recording + final response = + await recognizeTextCommandGrpc(controlMessage); + debugPrint("Response is : $response"); + + // Process and store the result + if (response.status == RecognizeStatusType.REC_SUCCESS) { + // Do nothing + } else if (response.status == RecognizeStatusType.INTENT_NOT_RECOGNIZED) { + final command = response.command; + debugPrint("Command is : $command"); + } else { + debugPrint('Failed to process your voice command. Please try again.'); + } + return response; + } catch (e) { + return RecognizeResult()..status = RecognizeStatusType.REC_ERROR; + } + } + + Future<void> executeCommand(RecognizeResult response) async { + try { + // Create an ExecuteInput message using the response from stopRecording + final executeInput = ExecuteInput() + ..intent = response.intent + ..intentSlots.addAll(response.intentSlots); + + // Call the gRPC method to execute the voice command + final execResponse = await executeCommandGrpc(executeInput); + + // Handle the response as needed + if (execResponse.status == ExecuteStatusType.EXEC_SUCCESS) { + final commandResponse = execResponse.response; + ref.read(voiceAssistantStateProvider.notifier).updateCommandResponse(commandResponse); + debugPrint("Command Response is : $commandResponse"); + } else if (execResponse.status == ExecuteStatusType.KUKSA_CONN_ERROR) { + final commandResponse = execResponse.response; + ref.read(voiceAssistantStateProvider.notifier).updateCommandResponse(commandResponse); + } else { + ref.read(voiceAssistantStateProvider.notifier).updateCommandResponse("Sorry, I couldn't execute your command. Please try again."); + } + } catch (e) { + } + await shutdown(); + } + + + Future<void> disableOverlay() async{ + await Future.delayed(Duration(seconds: 3)); + ref.read(voiceAssistantStateProvider.notifier).toggleShowOverlay(false); + } + + Future<void> startVoiceAssistant()async { + ref.read(voiceAssistantStateProvider.notifier).updateCommand(null); + ref.read(voiceAssistantStateProvider.notifier).updateCommandResponse(null); + + SttModel stt = ref.read(voiceAssistantStateProvider.select((value)=>value.sttModel)); + bool isOnlineMode = ref.read(voiceAssistantStateProvider.select((value)=>value.isOnlineMode)); + String nluModel = "snips"; + String sttModel = "whisper"; + if(stt == SttModel.vosk){ + sttModel = "vosk"; + } + bool isOverlayEnabled = ref.read(voiceAssistantStateProvider.select((value)=>value.voiceAssistantOverlay)); + bool overlayState = ref.read(voiceAssistantStateProvider.select((value)=>value.showOverLay)); + + String streamId = await startRecording(); + if (streamId.isNotEmpty) { + debugPrint('Recording started. Please speak your command.'); + if(isOverlayEnabled){ + if(!overlayState){ + ref.read(voiceAssistantStateProvider.notifier).toggleShowOverlay(true); + } + } + + ref.read(voiceAssistantStateProvider.notifier).updateButtonPressed(true); + ref.read(voiceAssistantStateProvider.notifier).updateIsRecording(); + ref.read(voiceAssistantStateProvider.notifier).updateIsCommandProcessing(false); + + // wait for the recording time + await Future.delayed(Duration(seconds: ref.watch(voiceAssistantStateProvider.select((value)=>value.recordingTime)))); + + ref.read(voiceAssistantStateProvider.notifier).updateIsRecording(); + ref.read(voiceAssistantStateProvider.notifier).updateIsCommandProcessing(true); + + // stop the recording and process the command + RecognizeResult recognizeResult = await stopRecording(streamId, nluModel, sttModel,isOnlineMode); + + ref.read(voiceAssistantStateProvider.notifier).updateCommand(recognizeResult.command); + debugPrint('Recording stopped. Processing the command...'); + + // Execute the command + await executeCommand(recognizeResult); + + ref.read(voiceAssistantStateProvider.notifier).updateIsCommandProcessing(false); + ref.read(voiceAssistantStateProvider.notifier).updateButtonPressed(false); + ref.read(voiceAssistantStateProvider.notifier).updateCommand(null); + ref.read(voiceAssistantStateProvider.notifier).updateCommandResponse(null); + disableOverlay(); + + } else { + debugPrint('Failed to start recording. Please try again.'); + } + + } + + +} diff --git a/lib/data/data_providers/voice_assistant_notifier.dart b/lib/data/data_providers/voice_assistant_notifier.dart new file mode 100644 index 0000000..0bc681a --- /dev/null +++ b/lib/data/data_providers/voice_assistant_notifier.dart @@ -0,0 +1,148 @@ +import 'package:protos/val_api.dart'; + +import '../../export.dart'; +import '../models/voice_assistant_state.dart'; + + +class VoiceAssistantStateNotifier extends Notifier<VoiceAssistantState>{ + @override + VoiceAssistantState build() { + return const VoiceAssistantState.initial(); + } + + void updateVoiceAssistantState(VoiceAssistantState newState){ + state = newState; + } + + void updateVoiceAssistantStateWith({ + bool? isWakeWordMode, + bool? isVoiceAssistantEnable, + bool? voiceAssistantOverlay, + bool? isOnlineMode, + bool? isOnlineModeAvailable, + String? wakeWord, + SttModel? sttModel, + String? streamId, + bool? isCommandProcessing, + String? commandProcessingText, + int? recordingTime, + bool? buttonPressed, + bool? isRecording, + String? command, + String? commandResponse, + bool? isWakeWordDetected, + bool? showOverLay, + }){ + state = state.copyWith( + isWakeWordMode: isWakeWordMode, + isVoiceAssistantEnable: isVoiceAssistantEnable, + voiceAssistantOverlay: voiceAssistantOverlay, + isOnlineMode: isOnlineMode, + isOnlineModeAvailable: isOnlineModeAvailable, + wakeWord: wakeWord, + sttModel: sttModel, + streamId: streamId, + isCommandProcessing: isCommandProcessing, + commandProcessingText: commandProcessingText, + recordingTime: recordingTime, + buttonPressed: buttonPressed, + isRecording: isRecording, + command: command, + commandResponse: commandResponse, + isWakeWordDetected: isWakeWordDetected, + showOverLay: showOverLay, + ); + } + + void resetToDefaults(){ + state = const VoiceAssistantState.initial(); + } + + void updateWakeWordDetected(bool isWakeWordDetected){ + state = state.copyWith(isWakeWordDetected: isWakeWordDetected); + } + + void toggleShowOverlay(bool value){ + state = state.copyWith(showOverLay: value); + } + + bool toggleWakeWordMode(){ + state = state.copyWith(isWakeWordMode: !state.isWakeWordMode); + return state.isWakeWordMode; + } + + Future<void> toggleVoiceAssistant(ServiceStatus status) async { + bool prevState = state.isVoiceAssistantEnable; + if(!prevState){ + if(status.status){ + state = state.copyWith(isVoiceAssistantEnable: !state.isVoiceAssistantEnable); + state = state.copyWith(wakeWord: status.wakeWord); + state = state.copyWith(isOnlineModeAvailable: status.onlineMode); + } + else{ + debugPrint("Failed to start the Voice Assistant"); + } + } + else{ + state = state.copyWith(isVoiceAssistantEnable: !state.isVoiceAssistantEnable); + if(state.isWakeWordMode){ + state = state.copyWith(isWakeWordMode: false); + } + } + } + + void toggleVoiceAssistantOverlay(){ + state = state.copyWith(voiceAssistantOverlay: !state.voiceAssistantOverlay); + } + + void toggleOnlineMode(){ + state = state.copyWith(isOnlineMode: !state.isOnlineMode); + } + + void updateWakeWord(String wakeWord){ + state = state.copyWith(wakeWord: wakeWord); + } + + void updateSttModel(SttModel sttModel){ + state = state.copyWith(sttModel: sttModel); + } + + void updateStreamId(String streamId){ + state = state.copyWith(streamId: streamId); + } + + void updateIsCommandProcessing(bool isCommandProcessing){ + state = state.copyWith(isCommandProcessing: isCommandProcessing); + } + + void updateCommandProcessingText(String commandProcessingText){ + state = state.copyWith(commandProcessingText: commandProcessingText); + } + + void updateRecordingTime(int recordingTime){ + state = state.copyWith(recordingTime: recordingTime); + } + + void updateIsRecording(){ + state = state.copyWith(isRecording: !state.isRecording); + } + + void updateCommand(String? command){ + state = state.copyWith(command: command); + } + + void updateCommandResponse(String? commandResponse){ + state = state.copyWith(commandResponse: commandResponse); + } + + + bool toggleButtonPressed(){ + bool prevState = state.buttonPressed; + state = state.copyWith(buttonPressed: !state.buttonPressed); + return !prevState; + } + + void updateButtonPressed(bool buttonPressed){ + state = state.copyWith(buttonPressed: buttonPressed); + } +}
\ No newline at end of file diff --git a/lib/data/models/voice_assistant_state.dart b/lib/data/models/voice_assistant_state.dart new file mode 100644 index 0000000..f898dd5 --- /dev/null +++ b/lib/data/models/voice_assistant_state.dart @@ -0,0 +1,104 @@ +enum SttModel { + whisper, + vosk +} + +class VoiceAssistantState{ + final bool isWakeWordMode; + final bool isVoiceAssistantEnable; + final bool voiceAssistantOverlay; + final bool isOnlineMode; + final bool isOnlineModeAvailable; + final String wakeWord; + final SttModel sttModel; + final String streamId; + final bool isCommandProcessing; + final String commandProcessingText; + final int recordingTime; + final bool buttonPressed; + final bool isRecording; + final String command; + final String commandResponse; + final bool isWakeWordDetected; + final bool showOverLay; + + + const VoiceAssistantState({ + required this.isWakeWordMode, + required this.isVoiceAssistantEnable, + required this.voiceAssistantOverlay, + required this.isOnlineMode, + required this.isOnlineModeAvailable, + required this.wakeWord, + required this.sttModel, + required this.streamId, + required this.isCommandProcessing, + required this.commandProcessingText, + required this.recordingTime, + required this.buttonPressed, + required this.isRecording, + required this.command, + required this.commandResponse, + required this.isWakeWordDetected, + required this.showOverLay, + }); + + const VoiceAssistantState.initial() + : wakeWord = "hello auto", + sttModel = SttModel.whisper, + streamId = "", + isWakeWordMode = false, + isVoiceAssistantEnable = false, + voiceAssistantOverlay = false, + isOnlineMode = false, + isOnlineModeAvailable = false, + isCommandProcessing = false, + commandProcessingText = "Processing...", + recordingTime = 4, + buttonPressed = false, + isRecording = false, + command = "", + commandResponse = "", + isWakeWordDetected = false, + showOverLay = false; + + VoiceAssistantState copyWith({ + bool? isWakeWordMode, + bool? isVoiceAssistantEnable, + bool? voiceAssistantOverlay, + bool? isOnlineMode, + bool? isOnlineModeAvailable, + String? wakeWord, + SttModel? sttModel, + String? streamId, + bool? isCommandProcessing, + String? commandProcessingText, + int? recordingTime, + bool? buttonPressed, + bool? isRecording, + String? command, + String? commandResponse, + bool? isWakeWordDetected, + bool? showOverLay, + }) { + return VoiceAssistantState( + isVoiceAssistantEnable : isVoiceAssistantEnable ?? this.isVoiceAssistantEnable, + isWakeWordMode : isWakeWordMode ?? this.isWakeWordMode, + voiceAssistantOverlay : voiceAssistantOverlay ?? this.voiceAssistantOverlay, + isOnlineMode : isOnlineMode ?? this.isOnlineMode, + isOnlineModeAvailable : isOnlineModeAvailable ?? this.isOnlineModeAvailable, + wakeWord : wakeWord ?? this.wakeWord, + sttModel : sttModel ?? this.sttModel, + streamId : streamId ?? this.streamId, + isCommandProcessing : isCommandProcessing ?? this.isCommandProcessing, + commandProcessingText : commandProcessingText ?? this.commandProcessingText, + recordingTime : recordingTime ?? this.recordingTime, + buttonPressed : buttonPressed ?? this.buttonPressed, + isRecording : isRecording ?? this.isRecording, + command : command ?? this.command, + commandResponse : commandResponse ?? this.commandResponse, + isWakeWordDetected: isWakeWordDetected ?? this.isWakeWordDetected, + showOverLay: showOverLay ?? this.showOverLay, + ); + } +}
\ No newline at end of file diff --git a/lib/presentation/common_widget/voice_assistant_button.dart b/lib/presentation/common_widget/voice_assistant_button.dart new file mode 100644 index 0000000..2a82a0a --- /dev/null +++ b/lib/presentation/common_widget/voice_assistant_button.dart @@ -0,0 +1,214 @@ +import 'package:flutter_ics_homescreen/export.dart'; + +class VoiceAssistantButton extends ConsumerStatefulWidget { + const VoiceAssistantButton({super.key}); + + @override + ConsumerState<VoiceAssistantButton> createState() => _VoiceAssistantButtonState(); +} + +class _VoiceAssistantButtonState extends ConsumerState<VoiceAssistantButton> with SingleTickerProviderStateMixin { + bool _showOverlay = false; + late AnimationController _animationController; + late Animation<double> _pulseAnimation; + int overlayLock = 0; + + @override + void initState() { + super.initState(); + _animationController = AnimationController( + vsync: this, + duration: const Duration(milliseconds: 700), + )..stop(); // Stop the animation initially + + _pulseAnimation = Tween<double>(begin: 1.0, end: 1.05).animate( + CurvedAnimation(parent: _animationController, curve: Curves.easeInOut), + ); + } + + @override + void dispose() { + _animationController.dispose(); + super.dispose(); + } + + void _onTap() { + ref.read(voiceAssistantStateProvider.notifier).updateCommandResponse(""); + ref.read(voiceAssistantStateProvider.notifier).updateCommand(""); + bool state = ref.read(voiceAssistantStateProvider.notifier).toggleButtonPressed(); + if(state){ + var voiceAgentClient = ref.read(voiceAgentClientProvider); + voiceAgentClient.startVoiceAssistant(); + } + } + + void _showAssistantPopup(BuildContext context) { + showModalBottomSheet( + context: context, + isScrollControlled: true, + backgroundColor: Colors.transparent, + builder: (context) { + return Consumer( + builder: (context, ref, child) { + final String? command = ref.watch(voiceAssistantStateProvider.select((value) => value.command)); + final String? commandResponse = ref.watch(voiceAssistantStateProvider.select((value) => value.commandResponse)); + final bool isRecording = ref.watch(voiceAssistantStateProvider.select((value)=>value.isRecording)); + final bool isProcessing = ref.watch(voiceAssistantStateProvider.select((value)=>value.isCommandProcessing)); + + if (isRecording) { + _animationController.repeat(reverse: true); + } else { + _animationController.stop(); + } + + return Container( + height: MediaQuery.of(context).size.height * 0.35, + decoration: const BoxDecoration( + image: DecorationImage( + image: AssetImage('assets/VoiceAssistantBottomSheetBg.png'), + fit: BoxFit.cover, + ), + ), + child: Padding( + padding: const EdgeInsets.fromLTRB(30, 0, 40, 0), + child: Column( + mainAxisAlignment: MainAxisAlignment.end, + crossAxisAlignment: CrossAxisAlignment.center, + children: [ + if(!isRecording && !isProcessing) + Column( + crossAxisAlignment: CrossAxisAlignment.center, + children: [ + Padding( + padding: const EdgeInsets.symmetric(horizontal: 16.0), + child: Text( + command ?? "No Command Detected", + textAlign: TextAlign.center, + style: const TextStyle( + color: Colors.white70, + fontSize: 43, + fontWeight: FontWeight.w400, + ), + ), + ), + SizedBox( + height: MediaQuery.of(context).size.height * 0.03 + ), + Padding( + padding: const EdgeInsets.symmetric(horizontal: 16.0), + child: Text( + commandResponse ?? "No Response", + textAlign: TextAlign.center, + style: const TextStyle( + color: Color.fromRGBO(41, 95, 248, 1), + fontSize: 43, + fontWeight: FontWeight.w800, + ), + ), + ), + + SizedBox( + height: MediaQuery.of(context).size.height * 0.02, + ), + ], + ), + + if(isRecording) + Column( + children: [ + const Text("Listening...", + style: TextStyle( + color: Colors.white70, + fontSize: 43, + fontWeight: FontWeight.w400, + ), + ), + SizedBox( + height: MediaQuery.of(context).size.height*0.02, + ), + ScaleTransition( + scale: _pulseAnimation, // Apply the pulse animation here + child: SvgPicture.asset( + 'assets/VoiceControlButton.svg', + fit: BoxFit.cover, + semanticsLabel: 'Voice Assistant', + ), + ), + ], + ), + + if(!isRecording && isProcessing) + Column( + children: [ + const Text("Processing...", + style: TextStyle( + color: Colors.white70, + fontSize: 43, + fontWeight: FontWeight.w400, + ), + ), + SizedBox( + height: MediaQuery.of(context).size.height*0.05, + ), + Lottie.asset( + 'animations/LoadingAnimation.json', + fit: BoxFit.cover, + repeat: true, + ), + ], + ), + SizedBox( + height: MediaQuery.of(context).size.height * 0.035, + ), + ], + ), + ), + ); + }, + ); + }, + ).whenComplete(() { + ref.read(voiceAssistantStateProvider.notifier).updateCommandResponse(null); + ref.read(voiceAssistantStateProvider.notifier).updateCommand(null); + ref.read(voiceAssistantStateProvider.notifier).toggleShowOverlay(false); + overlayLock = 0; + }); + } + + @override + Widget build(BuildContext context) { + _showOverlay = ref.watch(voiceAssistantStateProvider.select((value) => value.showOverLay)); + + if(_showOverlay){ + WidgetsBinding.instance!.addPostFrameCallback((_) { + if(overlayLock == 0){ + overlayLock = 1; + _showAssistantPopup(context); + } + }); + } + else if(overlayLock == 1){ + overlayLock = 0; + Navigator.of(context).pop(); + } + + String svgPath = ref.watch(voiceAssistantStateProvider.select((value) => value.buttonPressed)) + ? 'assets/VoiceAssistantActive.svg' + : 'assets/VoiceAssistantEnabled.svg'; + + return Padding( + padding: const EdgeInsets.only(left: 8), + child: GestureDetector( + onTap: _onTap, + child: Container( + padding: EdgeInsets.zero, + child: SvgPicture.asset( + svgPath, + fit: BoxFit.cover, + semanticsLabel: 'Voice Assistant', + ), + ), + ), + ); + } +} diff --git a/lib/presentation/router/routes/routes.dart b/lib/presentation/router/routes/routes.dart index 328d495..24eab3a 100644 --- a/lib/presentation/router/routes/routes.dart +++ b/lib/presentation/router/routes/routes.dart @@ -3,6 +3,8 @@ import 'package:flutter_ics_homescreen/presentation/screens/settings/settings_sc import 'package:flutter_ics_homescreen/presentation/screens/settings/settings_screens/date_time/time/time_screen.dart'; import '../../../../export.dart'; +import '../../screens/settings/settings_screens/voice_assistant/voice_assistant_screen.dart'; +import '../../screens/settings/settings_screens/voice_assistant/widgets/stt_model/stt_model_screen.dart'; List<Page<dynamic>> onGenerateAppViewPages( AppState state, @@ -57,5 +59,9 @@ List<Page<dynamic>> onGenerateAppViewPages( return [TimePage.page()]; case AppState.year: return [SelectYearPage.page()]; + case AppState.voiceAssistant: + return [VoiceAssistantPage.page()]; + case AppState.sttModel: + return [STTModelPage.page()]; } } diff --git a/lib/presentation/screens/home/home.dart b/lib/presentation/screens/home/home.dart index 0ee52ac..6e3e119 100644 --- a/lib/presentation/screens/home/home.dart +++ b/lib/presentation/screens/home/home.dart @@ -1,4 +1,6 @@ import 'package:flutter_ics_homescreen/export.dart'; + +import '../../common_widget/voice_assistant_button.dart'; // import 'package:media_kit_video/media_kit_video.dart'; final bkgImageProvider = Provider((ref) { @@ -76,6 +78,15 @@ class HomeScreenState extends ConsumerState<HomeScreen> { height: 500, child: const VolumeFanControl()), ), + // Voice Assistant Button + if (appState != AppState.splash && ref.watch(voiceAssistantStateProvider.select((value)=>value.isVoiceAssistantEnable))) + Positioned( + top: MediaQuery.of(context).size.height * 0.82, + child: Container( + padding: const EdgeInsets.only(left: 8), + child: const VoiceAssistantButton() + ), + ), ], ), bottomNavigationBar: diff --git a/lib/presentation/screens/settings/settings_screens/voice_assistant/voice_assistant_screen.dart b/lib/presentation/screens/settings/settings_screens/voice_assistant/voice_assistant_screen.dart new file mode 100644 index 0000000..e1f38ae --- /dev/null +++ b/lib/presentation/screens/settings/settings_screens/voice_assistant/voice_assistant_screen.dart @@ -0,0 +1,28 @@ + +import 'package:flutter_ics_homescreen/export.dart'; +import 'widgets/voice_assistant_content.dart'; + +class VoiceAssistantPage extends ConsumerWidget{ + const VoiceAssistantPage({super.key}); + + static Page<void> page() => const MaterialPage<void>(child: VoiceAssistantPage()); + @override + Widget build(BuildContext context,WidgetRef ref) { + + return Scaffold( + body: Column( + children: [ + CommonTitle( + title: 'Voice Assistant', + hasBackButton: true, + onPressed: () { + ref.read(appProvider.notifier).back(); + }, + ), + Expanded(child: VoiceAssistantContent()), + ], + ), + ); + } +} + diff --git a/lib/presentation/screens/settings/settings_screens/voice_assistant/widgets/stt_model/stt_model_screen.dart b/lib/presentation/screens/settings/settings_screens/voice_assistant/widgets/stt_model/stt_model_screen.dart new file mode 100644 index 0000000..614763d --- /dev/null +++ b/lib/presentation/screens/settings/settings_screens/voice_assistant/widgets/stt_model/stt_model_screen.dart @@ -0,0 +1,120 @@ +import 'package:flutter_ics_homescreen/export.dart'; + +import '../../../../../../../data/models/voice_assistant_state.dart'; + +class STTModelPage extends ConsumerWidget { + const STTModelPage({super.key}); + + static Page<void> page() => + const MaterialPage<void>(child: STTModelPage()); + @override + Widget build(BuildContext context, WidgetRef ref) { + final SttModel sttModel = ref.watch(voiceAssistantStateProvider.select((value) => value.sttModel)); + + return Scaffold( + body: Column( + children: [ + CommonTitle( + title: 'Speech to Text Model', + hasBackButton: true, + onPressed: () { + context.flow<AppState>().update((state) => AppState.voiceAssistant); + }, + ), + Expanded( + child: Padding( + padding: const EdgeInsets.symmetric(vertical: 0, horizontal: 144), + child: ListView( + children: [ + Container( + height: 130, + decoration: BoxDecoration( + gradient: LinearGradient( + begin: Alignment.centerLeft, + end: Alignment.centerRight, + stops: sttModel == SttModel.whisper + ? [0, 0.01, 0.8] + : [0.1, 1], + colors: sttModel == SttModel.whisper + ? <Color>[ + Colors.white, + Colors.blue, + const Color.fromARGB(16, 41, 98, 255) + ] + : <Color>[Colors.black, Colors.black12]), + ), + child: ListTile( + minVerticalPadding: 0.0, + contentPadding: const EdgeInsets.symmetric( + horizontal: 16.0, vertical: 40.0), + leading: Text( + 'Whisper AI', + style: Theme.of(context).textTheme.titleMedium, + ), + trailing: sttModel == SttModel.whisper + ? const Icon( + Icons.done, + color: AGLDemoColors.periwinkleColor, + size: 48, + ) + : null, + onTap: () { + ref + .read(voiceAssistantStateProvider.notifier) + .updateSttModel(SttModel.whisper); + }), + ), + const SizedBox( + height: 8, + ), + Container( + height: 130, + decoration: BoxDecoration( + gradient: LinearGradient( + begin: Alignment.centerLeft, + end: Alignment.centerRight, + stops: sttModel == SttModel.vosk + ? [0, 0.01, 0.8] + : [0.1, 1], + colors: sttModel == SttModel.vosk + ? <Color>[ + Colors.white, + Colors.blue, + const Color.fromARGB(16, 41, 98, 255) + ] + : <Color>[Colors.black, Colors.black12]), + ), + child: ListTile( + minVerticalPadding: 0.0, + contentPadding: const EdgeInsets.symmetric( + horizontal: 16.0, vertical: 40.0), + leading: Text( + 'Vosk', + style: Theme.of(context).textTheme.titleMedium, + ), + //title: Text(widget.title), + //enabled: isSwitchOn, + trailing: sttModel == SttModel.vosk + ? const Icon( + Icons.done, + color: AGLDemoColors.periwinkleColor, + size: 48, + ) + : null, + + onTap: () { + ref + .read(voiceAssistantStateProvider.notifier) + .updateSttModel(SttModel.vosk); + }, + ), + ), + ], + ), + ), + ), + ], + ), + ); + } +} diff --git a/lib/presentation/screens/settings/settings_screens/voice_assistant/widgets/voice_assistant_content.dart b/lib/presentation/screens/settings/settings_screens/voice_assistant/widgets/voice_assistant_content.dart new file mode 100644 index 0000000..924a219 --- /dev/null +++ b/lib/presentation/screens/settings/settings_screens/voice_assistant/widgets/voice_assistant_content.dart @@ -0,0 +1,251 @@ +import 'package:flutter_ics_homescreen/export.dart'; + +import 'package:flutter_ics_homescreen/export.dart'; +import 'package:flutter_ics_homescreen/presentation/screens/settings/settings_screens/voice_assistant/widgets/voice_assistant_tile.dart'; + +import '../../../../../../core/utils/helpers.dart'; +import '../../../../../../data/models/voice_assistant_state.dart'; + +@immutable +class VoiceAssistantContent extends ConsumerWidget { + VoiceAssistantContent({Key? key}) : super(key: key); + bool isWakeWordMode = false; + bool isVoiceAssistantOverlay = false; + bool isOnlineMode = false; + SttModel sttModel = SttModel.whisper; + + @override + Widget build(BuildContext context, WidgetRef ref) { + isWakeWordMode = + ref.watch(voiceAssistantStateProvider.select((value) => value.isWakeWordMode)); + isVoiceAssistantOverlay = + ref.watch(voiceAssistantStateProvider.select((value) => value.voiceAssistantOverlay)); + isOnlineMode = + ref.watch(voiceAssistantStateProvider.select((value) => value.isOnlineMode)); + sttModel = + ref.watch(voiceAssistantStateProvider.select((value) => value.sttModel)); + + final wakeWordCallback = () { + bool status = ref.read(voiceAssistantStateProvider.notifier).toggleWakeWordMode(); + if(status){ + var voiceAgentClient = ref.read(voiceAgentClientProvider); + voiceAgentClient.startWakeWordDetection(); + } + }; + + final voiceAssistantOverlayCallback = () { + ref.read(voiceAssistantStateProvider.notifier).toggleVoiceAssistantOverlay(); + }; + + final onlineModeCallback = () { + ref.read(voiceAssistantStateProvider.notifier).toggleOnlineMode(); + }; + + + return Column( + children: [ + Expanded( + child: ListView( + padding: const EdgeInsets.symmetric(vertical: 50, horizontal: 144), + children: [ + VoiceAssistantTile( + icon: Icons.insert_comment_outlined, + title: "Voice Assistant Overlay", + hasSwitch: true, + voidCallback: voiceAssistantOverlayCallback, + isSwitchOn: isVoiceAssistantOverlay + ), + if(ref.watch(voiceAssistantStateProvider.select((value) => value.isOnlineModeAvailable))) + VoiceAssistantTile( + icon: Icons.cloud_circle, + title: "Online Mode", + hasSwitch: true, + voidCallback: onlineModeCallback, + isSwitchOn: isOnlineMode + ), + VoiceAssistantTile( + icon: Icons.mic_none_outlined, + title: "Wake Word Mode", + hasSwitch: true, + voidCallback: wakeWordCallback, + isSwitchOn: isWakeWordMode + ), + if(ref.watch(voiceAssistantStateProvider.select((value) => value.isWakeWordMode))) + WakeWordTile(), + SttTile( + title: " Speech To Text", + sttName: sttModel==SttModel.whisper ? "Whisper AI" : "Vosk", + hasSwich: true, + voidCallback: () async { + context + .flow<AppState>() + .update((next) => AppState.sttModel); + }), + ], + ) + ), + ], + ); + } +} + +class SttTile extends ConsumerStatefulWidget { + final IconData? icon; + final String title; + final String sttName; + final bool hasSwich; + final VoidCallback voidCallback; + final String? image; + const SttTile({ + Key? key, + this.icon, + required this.title, + required this.sttName, + required this.hasSwich, + required this.voidCallback, + this.image, + }) : super(key: key); + + @override + SttTileState createState() => SttTileState(); +} + +class SttTileState extends ConsumerState<SttTile> { + @override + Widget build(BuildContext context) { + return Column( + children: [ + Container( + margin: const EdgeInsets.symmetric(vertical: 8), + padding: const EdgeInsets.symmetric(vertical: 15), + decoration: const BoxDecoration( + gradient: LinearGradient( + begin: Alignment.centerLeft, + end: Alignment.centerRight, + stops: [0.3, 1], + colors: <Color>[Colors.black, Colors.black12]), + ), + //color: Color(0xFF0D113F), + child: ListTile( + contentPadding: + const EdgeInsets.symmetric(vertical: 17, horizontal: 24), + leading: Icon( + Icons.transcribe_outlined, + color: AGLDemoColors.periwinkleColor, + size: 48, + ), + title: Text( + widget.title, + style: TextStyle( + color: AGLDemoColors.periwinkleColor, + shadows: [ + Helpers.dropShadowRegular, + ], + fontSize: 40), + ), + trailing: Row( + mainAxisSize: MainAxisSize.min, + mainAxisAlignment: MainAxisAlignment.end, + children: [ + Text( + widget.sttName, + style: TextStyle( + color: AGLDemoColors.periwinkleColor, + shadows: [ + Helpers.dropShadowRegular, + ], + fontSize: 40, + ), + ), + const SizedBox( + width: 24, + ), + const Icon( + Icons.arrow_forward_ios, + color: AGLDemoColors.periwinkleColor, + size: 48, + ), + ], + ), + onTap: widget.voidCallback, + ), + ), + const SizedBox( + height: 8, + ) + ], + ); + } +} + + + +class WakeWordTile extends ConsumerStatefulWidget { + const WakeWordTile({Key? key}) : super(key: key); + + @override + WakeWordTileState createState() => WakeWordTileState(); +} + +class WakeWordTileState extends ConsumerState<WakeWordTile> { + @override + Widget build(BuildContext context) { + return Column( + children: [ + Container( + margin: const EdgeInsets.symmetric(vertical: 8), + padding: const EdgeInsets.symmetric(vertical: 15), + decoration: const BoxDecoration( + gradient: LinearGradient( + begin: Alignment.centerLeft, + end: Alignment.centerRight, + stops: [0.3, 1], + colors: <Color>[Colors.black, Colors.black12]), + ), + //color: Color(0xFF0D113F), + child: ListTile( + contentPadding: + const EdgeInsets.symmetric(vertical: 17, horizontal: 24), + leading: Icon( + Icons.mic_none_outlined, + color: AGLDemoColors.periwinkleColor, + size: 48, + ), + title: Text( + "Wake Word", + style: TextStyle( + color: AGLDemoColors.periwinkleColor, + shadows: [ + Helpers.dropShadowRegular, + ], + fontSize: 40), + ), + trailing: Row( + mainAxisSize: MainAxisSize.min, + mainAxisAlignment: MainAxisAlignment.end, + children: [ + Text( + ref.watch(voiceAssistantStateProvider.select((value) => value.wakeWord)) ?? "Not Set", + style: TextStyle( + color: AGLDemoColors.periwinkleColor, + shadows: [ + Helpers.dropShadowRegular, + ], + fontSize: 40, + ), + ), + const SizedBox( + width: 50, + ), + + ], + ), + ), + ), + const SizedBox( + height: 8, + ) + ], + ); + } +} diff --git a/lib/presentation/screens/settings/settings_screens/voice_assistant/widgets/voice_assistant_settings_list_tile.dart b/lib/presentation/screens/settings/settings_screens/voice_assistant/widgets/voice_assistant_settings_list_tile.dart new file mode 100644 index 0000000..ee0365a --- /dev/null +++ b/lib/presentation/screens/settings/settings_screens/voice_assistant/widgets/voice_assistant_settings_list_tile.dart @@ -0,0 +1,111 @@ +import 'package:flutter_ics_homescreen/export.dart'; +import 'package:protos/val_api.dart'; + +class VoiceAssistantSettingsTile extends ConsumerStatefulWidget { + final IconData icon; + final String title; + final bool hasSwich; + final VoidCallback voidCallback; + const VoiceAssistantSettingsTile({ + Key? key, + required this.icon, + required this.title, + required this.hasSwich, + required this.voidCallback, + }) : super(key: key); + + @override + VoiceAssistantSettingsTileState createState() => VoiceAssistantSettingsTileState(); +} + +class VoiceAssistantSettingsTileState extends ConsumerState<VoiceAssistantSettingsTile> { + bool isSwitchOn = true; + @override + Widget build(BuildContext context) { + isSwitchOn = ref.watch(voiceAssistantStateProvider.select((voiceAssistant) => voiceAssistant.isVoiceAssistantEnable)); + return Column( + children: [ + GestureDetector( + onTap: isSwitchOn ? widget.voidCallback : () {}, + child: Container( + height: 130, + decoration: BoxDecoration( + gradient: LinearGradient( + begin: Alignment.centerLeft, + end: Alignment.centerRight, + stops: isSwitchOn ? [0.3, 1] : [0.8, 1], + colors: isSwitchOn + ? <Color>[Colors.black, Colors.black12] + : <Color>[ + const Color.fromARGB(50, 0, 0, 0), + Colors.transparent + ], + ), + ), + child: Card( + color: Colors.transparent, + elevation: 5, + child: Padding( + padding: + const EdgeInsets.symmetric(vertical: 0, horizontal: 24), + child: Row( + children: [ + Icon( + widget.icon, + color: AGLDemoColors.periwinkleColor, + size: 48, + ), + const SizedBox(width: 24), + Expanded( + child: Text( + widget.title, + style: const TextStyle(fontSize: 40), + ), + ), + widget.hasSwich + ? Container( + width: 126, + height: 80, + decoration: const ShapeDecoration( + color: + AGLDemoColors.gradientBackgroundDarkColor, + shape: StadiumBorder( + side: BorderSide( + color: Color(0xFF5477D4), + width: 4, + )), + ), + child: FittedBox( + fit: BoxFit.fill, + child: Switch( + value: isSwitchOn, + onChanged: (bool value) async { + var voiceAgentClient = ref.read(voiceAgentClientProvider); + ServiceStatus status = await voiceAgentClient.checkServiceStatus(); + ref.read(voiceAssistantStateProvider.notifier).toggleVoiceAssistant(status); + setState(() { + isSwitchOn = value; + }); + // This is called when the user toggles the switch. + }, + inactiveTrackColor: Colors.transparent, + activeTrackColor: Colors.transparent, + thumbColor: + MaterialStateProperty.all<Color>( + AGLDemoColors.periwinkleColor)), + ), + ) + : const SizedBox(), + ], + ), + ), + ) + ), + ), + const SizedBox( + height: 8, + ) + ], + ); + } +} diff --git a/lib/presentation/screens/settings/settings_screens/voice_assistant/widgets/voice_assistant_tile.dart b/lib/presentation/screens/settings/settings_screens/voice_assistant/widgets/voice_assistant_tile.dart new file mode 100644 index 0000000..d4bdd48 --- /dev/null +++ b/lib/presentation/screens/settings/settings_screens/voice_assistant/widgets/voice_assistant_tile.dart @@ -0,0 +1,102 @@ +import 'package:flutter_ics_homescreen/export.dart'; + +class VoiceAssistantTile extends ConsumerStatefulWidget { + final IconData icon; + final String title; + final bool hasSwitch; + final VoidCallback voidCallback; + final bool isSwitchOn; + const VoiceAssistantTile({super.key, required this.icon, required this.title, required this.hasSwitch, required this.voidCallback,required this.isSwitchOn}); + + @override + ConsumerState<VoiceAssistantTile> createState() => _VoiceAssistantTileState(); +} + +class _VoiceAssistantTileState extends ConsumerState<VoiceAssistantTile> { + bool isSwitchOn = true; + @override + Widget build(BuildContext context) { + isSwitchOn = widget.isSwitchOn; + return Column( + children: [ + Container( + height: 130, + decoration: BoxDecoration( + gradient: LinearGradient( + begin: Alignment.centerLeft, + end: Alignment.centerRight, + stops: isSwitchOn ? [0.3, 1] : [0.8, 1], + colors: isSwitchOn + ? <Color>[Colors.black, Colors.black12] + : <Color>[ + const Color.fromARGB(50, 0, 0, 0), + Colors.transparent + ], + ), + ), + child: Card( + + color: Colors.transparent, + elevation: 5, + child: Padding( + padding: + const EdgeInsets.symmetric(vertical: 0, horizontal: 24), + child: Row( + children: [ + Icon( + widget.icon, + color: AGLDemoColors.periwinkleColor, + size: 48, + ), + const SizedBox(width: 24), + Expanded( + child: Text( + widget.title, + style: const TextStyle(fontSize: 40), + ), + ), + widget.hasSwitch + ? Container( + width: 126, + height: 80, + decoration: const ShapeDecoration( + color: + AGLDemoColors.gradientBackgroundDarkColor, + shape: StadiumBorder( + side: BorderSide( + color: Color(0xFF5477D4), + width: 4, + )), + ), + child: FittedBox( + fit: BoxFit.fill, + child: Switch( + value: isSwitchOn, + onChanged: (bool value) { + setState(() { + isSwitchOn = value; + }); + widget.voidCallback(); + }, + inactiveTrackColor: Colors.transparent, + activeTrackColor: Colors.transparent, + thumbColor: + MaterialStateProperty.all<Color>( + AGLDemoColors.periwinkleColor)), + ), + ) + : const SizedBox(), + ], + ), + ), + ) + ), + const SizedBox( + height: 14, + ) + ], + ); + } +} + + diff --git a/lib/presentation/screens/settings/widgets/settings_content.dart b/lib/presentation/screens/settings/widgets/settings_content.dart index 6d0df50..458677c 100644 --- a/lib/presentation/screens/settings/widgets/settings_content.dart +++ b/lib/presentation/screens/settings/widgets/settings_content.dart @@ -1,6 +1,7 @@ import 'package:flutter_ics_homescreen/export.dart'; import '../../../custom_icons/custom_icons.dart'; +import '../settings_screens/voice_assistant/widgets/voice_assistant_settings_list_tile.dart'; class Settings extends ConsumerWidget { const Settings({ @@ -55,6 +56,14 @@ class Settings extends ConsumerWidget { voidCallback: () { ref.read(appProvider.notifier).update(AppState.audioSettings); }), + VoiceAssistantSettingsTile( + icon: Icons.keyboard_voice_outlined, + title: "Voice Assistant", + hasSwich: true, + voidCallback: (){ + ref.read(appProvider.notifier).update(AppState.voiceAssistant); + } + ), SettingsTile( icon: Icons.person_2_outlined, title: 'Profiles', |