diff --git a/CHANGELOG.md b/CHANGELOG.md index 1cb6be93..dd0a72cc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,10 @@ -## 1.0.2 (Unreleased) +## 1.0.2 - Now, calling `stopAllPlayers` is not mandatory for disposing streams and it will also not dispose controller. With last remaining player they will be disposed (Streams can be re-initialised by creating a new PlayerController). - Added legacy normalization with this fixed [#144](https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues/144). - Added `onRecorderStateChanged` stream to monitor Recorder state changes. -- Added `onCurrentDuration` stream to get latest recorded audio duration +- Added `onCurrentDuration` stream to get latest recorded audio duration. +- Added `onRecordingEnded` stream to get recorded audio file duration. Fixes [#157](https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues/157). ## 1.0.1 diff --git a/README.md b/README.md index a7393a31..1ec17015 100644 --- a/README.md +++ b/README.md @@ -60,12 +60,13 @@ import 'package:audio_waveforms/audio_waveforms.dart'; ### Usage 1. Recording audio ```dart -RecorderController controller = RecorderController(); // Initialise -await controller.record(path: 'path'); // Record (path is optional) -await controller.pause(); // Pause recording -final path = await controller.stop(); // Stop recording and get the path -controller.refresh(); // Refresh waveform to original position -controller.dispose(); // Dispose controller +RecorderController controller = RecorderController(); // Initialise +await controller.record(path: 'path'); // Record (path is optional) +final hasPermission = await controller.checkPermission(); // Check mic permission (also called duration record) +await controller.pause(); // Pause recording +final path = await controller.stop(); // Stop recording and get the path +controller.refresh(); // Refresh waveform to original position +controller.dispose(); // Dispose controller ``` 2. Use `AudioWaveforms` widget in widget tree @@ -101,6 +102,7 @@ controller.sampleRate = 44100; // Updating sam controller.bitRate = 48000; // Updating bitrate controller.onRecorderStateChanged.listen((state){}); // Listening to recorder state changes controller.onCurrentDuration.listen((duration){}); // Listening to current duration updates +controller.onRecordingEnded.listen((duration)); // Listening to audio file duration controller.recordedDuration; // Get recorded audio duration controller.currentScrolledDuration; // Current duration position notifier ``` diff --git a/ios/Classes/AudioRecorder.swift b/ios/Classes/AudioRecorder.swift index 47adc740..9987ebf6 100644 --- a/ios/Classes/AudioRecorder.swift +++ b/ios/Classes/AudioRecorder.swift @@ -6,6 +6,8 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ var path: String? var hasPermission: Bool = false var useLegacyNormalization: Bool = false + var audioUrl: URL? + var recordedDuration: CMTime = CMTime.zero public func startRecording(_ result: @escaping FlutterResult,_ path: String?,_ encoder : Int?,_ sampleRate : Int?,_ bitRate : Int?,_ fileNameFormat: String, _ useLegacy: Bool?){ useLegacyNormalization = useLegacy ?? false @@ -41,21 +43,44 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, options: options) try AVAudioSession.sharedInstance().setActive(true) - let url = URL(string: self.path!) ?? URL(fileURLWithPath: self.path!) - audioRecorder = try AVAudioRecorder(url: url, settings: bitRate != nil ? settingsWithBitrate as [String : Any] : settings as [String : Any]) + audioUrl = URL(fileURLWithPath: self.path!) + + if(audioUrl == nil){ + result(FlutterError(code: Constants.audioWaveforms, message: "Failed to initialise file URL", details: nil)) + } + audioRecorder = try AVAudioRecorder(url: audioUrl!, settings: bitRate != nil ? settingsWithBitrate as [String : Any] : settings as [String : Any]) + audioRecorder?.delegate = self audioRecorder?.isMeteringEnabled = true audioRecorder?.record() result(true) } catch { - result(FlutterError(code: "", message: "Failed to start recording", details: nil)) + result(FlutterError(code: Constants.audioWaveforms, message: "Failed to start recording", details: nil)) } } public func stopRecording(_ result: @escaping FlutterResult) { audioRecorder?.stop() + if(audioUrl != nil) { + let asset = AVURLAsset(url: audioUrl!) + if #available(iOS 15.0, *) { + Task { + do { + recordedDuration = try await asset.load(.duration) + result([path,Int(recordedDuration.seconds * 1000).description]) + } catch let err { + debugPrint(err.localizedDescription) + result([path,CMTime.zero.seconds.description]) + } + } + } else { + recordedDuration = asset.duration + result([path,Int(recordedDuration.seconds * 1000).description]) + } + } else { + result([path,CMTime.zero.seconds.description]) + } audioRecorder = nil - result(path) } public func pauseRecording(_ result: @escaping FlutterResult) { diff --git a/lib/src/audio_file_waveforms.dart b/lib/src/audio_file_waveforms.dart index a40340c8..e3049de5 100644 --- a/lib/src/audio_file_waveforms.dart +++ b/lib/src/audio_file_waveforms.dart @@ -65,8 +65,7 @@ class AudioFileWaveforms extends StatefulWidget { /// Generate waveforms from audio file. You play those audio file using /// [PlayerController]. /// - /// When you play the audio file, another waveform - /// will drawn on top of it to show + /// When you play the audio file, waves change their color according to /// how much audio has been played and how much is left. /// /// With seeking gesture enabled, playing audio can be seeked to diff --git a/lib/src/controllers/recorder_controller.dart b/lib/src/controllers/recorder_controller.dart index a26c8918..695ba28b 100644 --- a/lib/src/controllers/recorder_controller.dart +++ b/lib/src/controllers/recorder_controller.dart @@ -96,11 +96,20 @@ class RecorderController extends ChangeNotifier { final StreamController _recorderStateController = StreamController.broadcast(); + final StreamController _recordedFileDurationController = + StreamController.broadcast(); + /// A Stream to monitor change in RecorderState. Events are emitted whenever /// there is change in the RecorderState. Stream get onRecorderStateChanged => _recorderStateController.stream; + /// A stream to get duration of recording when audio recorder has + /// been stopped. Events are only emitted if platform could extract the + /// duration of audio file when recording is ended. + Stream get onRecordingEnded => + _recordedFileDurationController.stream; + /// A class having controls for recording audio and other useful handlers. /// /// Use [useLegacyNormalization] parameter to use normalization before @@ -281,6 +290,7 @@ class RecorderController extends ChangeNotifier { var duration = int.tryParse(audioInfo[1]!); if (duration != null) { recordedDuration = Duration(milliseconds: duration); + _recordedFileDurationController.add(recordedDuration); } } elapsedDuration = Duration.zero; @@ -415,6 +425,7 @@ class RecorderController extends ChangeNotifier { _currentScrolledDuration.dispose(); _currentDurationController.close(); _recorderStateController.close(); + _recordedFileDurationController.close(); _recorderTimer?.cancel(); _timer?.cancel(); _timer = null;