diff --git a/lib/components/recordingSession/recorder.dart b/lib/components/recordingSession/recorder.dart index f9d9b5b..b85ca02 100644 --- a/lib/components/recordingSession/recorder.dart +++ b/lib/components/recordingSession/recorder.dart @@ -31,6 +31,8 @@ class Recorder extends StatefulWidget { class _RecorderState extends State { bool _isRecording = false; + bool _recording = false; + Stopwatch stopwatch = new Stopwatch(); StreamSubscription _recorderSubscription; FlutterSoundRecorder recorderModule; String _recorderTxt = '00:00:00'; @@ -156,7 +158,6 @@ class _RecorderState extends State { await recorderModule.stopRecorder(); final DateTime exportedTime = getExportedTime(); ws.sendAudioFile(savePath, userId, topic, exportedTime); - await cancelRecorderSubscriptions(); await closeAudioSession(); } catch (err) { @@ -172,6 +173,44 @@ class _RecorderState extends State { prefs.setString('session', value); } + Future recorderPressed() async { + if (!_isRecording) { + await showDialog( + context: context, + builder: (BuildContext context) => MyTopicDialog( + onTopicChanged: (String childTopic) { + topic = childTopic; + }, + ), + ); + setState(() { + _recording = true; + stopwatch.reset(); + stopwatch.start(); + }); + while (_recording) { + await startRecorder(); + await Future.delayed( + const Duration(seconds: 5), () async => await stopRecorder()); + } + } else { + if (_isRecording) { + setState(() { + _recording = false; + stopwatch.stop(); + }); + stopRecorder(); + } + } + } + + DateTime getExportedTime() { + final DateTime now = DateTime.now(); + final DateTime date = DateTime( + now.year, now.month, now.day, now.hour, now.minute, now.second); + return date; + } + @override Widget build(BuildContext context) { return Expanded( @@ -180,22 +219,8 @@ class _RecorderState extends State { children: [ FloatingActionButton( heroTag: 'recorder', - onPressed: () async { - if (!_isRecording) { - await showDialog( - context: context, - builder: (BuildContext context) => MyTopicDialog( - onTopicChanged: (String childTopic) { - topic = childTopic; - }, - ), - ); - return startRecorder(); - } else { - stopRecorder(); - } - }, - child: _isRecording + onPressed: () async => await recorderPressed(), + child: _recording ? Icon(Icons.stop) : Icon( Icons.mic, @@ -205,7 +230,7 @@ class _RecorderState extends State { ), Container( child: AutoSizeText( - _recorderTxt, + stopwatch.elapsed.toString().substring(2, 10), style: TextStyle( fontSize: 22.0, color: Colors.black, @@ -213,7 +238,7 @@ class _RecorderState extends State { ), ), Container( - child: _isRecording + child: _recording ? LinearProgressIndicator( value: 100.0 / 160.0 * (_dbLevel ?? 1) / 100, valueColor: AlwaysStoppedAnimation(Colors.green), @@ -225,17 +250,4 @@ class _RecorderState extends State { ), ); } - - DateTime getExportedTime() { - final DateTime now = DateTime.now(); - final DateTime date = DateTime( - now.year, - now.month, - now.day, - now.hour, - now.minute, - now.second - ); - return date; - } } diff --git a/lib/components/recordingSession/websocket.dart b/lib/components/recordingSession/websocket.dart index 97871ec..7e854f8 100644 --- a/lib/components/recordingSession/websocket.dart +++ b/lib/components/recordingSession/websocket.dart @@ -21,16 +21,11 @@ class MyWebSocket { Future sendAudioFile( String exportedAudioData, int userID, String topic, DateTime date) async { final List audioData = await processAudioFile(exportedAudioData); - final IncomingAudioEvent incomingAudioEvent = IncomingAudioEvent( - audioData, - userID, - topic, - date.toString() - ); + print(audioData.toString() + '\n'); // array of integers/bytes + final IncomingAudioEvent incomingAudioEvent = + IncomingAudioEvent(audioData, userID, topic, date.toString()); channel.sink.add( - json.encode( - incomingAudioEvent.toJson() - ), + json.encode(incomingAudioEvent.toJson()), ); }