Skip to content

Commit

Permalink
✨ Added recordedDuration to get recorded duration for ios and added o…
Browse files Browse the repository at this point in the history
…nRecordingEnded stream
  • Loading branch information
Ujas-Majithiya authored and ujas-m-simformsolutions committed Feb 20, 2023
1 parent 06f327e commit c7e8d06
Show file tree
Hide file tree
Showing 5 changed files with 52 additions and 14 deletions.
5 changes: 3 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
## 1.0.2 (Unreleased)
## 1.0.2

- Now, calling `stopAllPlayers` is not mandatory for disposing streams and it will also not dispose controller. With last remaining player they will be disposed (Streams can be re-initialised by creating a new PlayerController).
- Added legacy normalization with this fixed [#144](https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues/144).
- Added `onRecorderStateChanged` stream to monitor Recorder state changes.
- Added `onCurrentDuration` stream to get latest recorded audio duration
- Added `onCurrentDuration` stream to get latest recorded audio duration.
- Added `onRecordingEnded` stream to get recorded audio file duration. Fixes [#157](https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues/157).

## 1.0.1

Expand Down
14 changes: 8 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,13 @@ import 'package:audio_waveforms/audio_waveforms.dart';
### Usage
1. Recording audio
```dart
RecorderController controller = RecorderController(); // Initialise
await controller.record(path: 'path'); // Record (path is optional)
await controller.pause(); // Pause recording
final path = await controller.stop(); // Stop recording and get the path
controller.refresh(); // Refresh waveform to original position
controller.dispose(); // Dispose controller
RecorderController controller = RecorderController(); // Initialise
await controller.record(path: 'path'); // Record (path is optional)
final hasPermission = await controller.checkPermission(); // Check mic permission (also called duration record)
await controller.pause(); // Pause recording
final path = await controller.stop(); // Stop recording and get the path
controller.refresh(); // Refresh waveform to original position
controller.dispose(); // Dispose controller
```

2. Use `AudioWaveforms` widget in widget tree
Expand Down Expand Up @@ -101,6 +102,7 @@ controller.sampleRate = 44100; // Updating sam
controller.bitRate = 48000; // Updating bitrate
controller.onRecorderStateChanged.listen((state){}); // Listening to recorder state changes
controller.onCurrentDuration.listen((duration){}); // Listening to current duration updates
controller.onRecordingEnded.listen((duration)); // Listening to audio file duration
controller.recordedDuration; // Get recorded audio duration
controller.currentScrolledDuration; // Current duration position notifier
```
Expand Down
33 changes: 29 additions & 4 deletions ios/Classes/AudioRecorder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{
var path: String?
var hasPermission: Bool = false
var useLegacyNormalization: Bool = false
var audioUrl: URL?
var recordedDuration: CMTime = CMTime.zero

public func startRecording(_ result: @escaping FlutterResult,_ path: String?,_ encoder : Int?,_ sampleRate : Int?,_ bitRate : Int?,_ fileNameFormat: String, _ useLegacy: Bool?){
useLegacyNormalization = useLegacy ?? false
Expand Down Expand Up @@ -41,21 +43,44 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, options: options)
try AVAudioSession.sharedInstance().setActive(true)

let url = URL(string: self.path!) ?? URL(fileURLWithPath: self.path!)
audioRecorder = try AVAudioRecorder(url: url, settings: bitRate != nil ? settingsWithBitrate as [String : Any] : settings as [String : Any])
audioUrl = URL(fileURLWithPath: self.path!)

if(audioUrl == nil){
result(FlutterError(code: Constants.audioWaveforms, message: "Failed to initialise file URL", details: nil))
}
audioRecorder = try AVAudioRecorder(url: audioUrl!, settings: bitRate != nil ? settingsWithBitrate as [String : Any] : settings as [String : Any])

audioRecorder?.delegate = self
audioRecorder?.isMeteringEnabled = true
audioRecorder?.record()
result(true)
} catch {
result(FlutterError(code: "", message: "Failed to start recording", details: nil))
result(FlutterError(code: Constants.audioWaveforms, message: "Failed to start recording", details: nil))
}
}

public func stopRecording(_ result: @escaping FlutterResult) {
audioRecorder?.stop()
if(audioUrl != nil) {
let asset = AVURLAsset(url: audioUrl!)
if #available(iOS 15.0, *) {
Task {
do {
recordedDuration = try await asset.load(.duration)
result([path,Int(recordedDuration.seconds * 1000).description])
} catch let err {
debugPrint(err.localizedDescription)
result([path,CMTime.zero.seconds.description])
}
}
} else {
recordedDuration = asset.duration
result([path,Int(recordedDuration.seconds * 1000).description])
}
} else {
result([path,CMTime.zero.seconds.description])
}
audioRecorder = nil
result(path)
}

public func pauseRecording(_ result: @escaping FlutterResult) {
Expand Down
3 changes: 1 addition & 2 deletions lib/src/audio_file_waveforms.dart
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,7 @@ class AudioFileWaveforms extends StatefulWidget {
/// Generate waveforms from audio file. You play those audio file using
/// [PlayerController].
///
/// When you play the audio file, another waveform
/// will drawn on top of it to show
/// When you play the audio file, waves change their color according to
/// how much audio has been played and how much is left.
///
/// With seeking gesture enabled, playing audio can be seeked to
Expand Down
11 changes: 11 additions & 0 deletions lib/src/controllers/recorder_controller.dart
Original file line number Diff line number Diff line change
Expand Up @@ -96,11 +96,20 @@ class RecorderController extends ChangeNotifier {
final StreamController<RecorderState> _recorderStateController =
StreamController.broadcast();

final StreamController<Duration> _recordedFileDurationController =
StreamController.broadcast();

/// A Stream to monitor change in RecorderState. Events are emitted whenever
/// there is change in the RecorderState.
Stream<RecorderState> get onRecorderStateChanged =>
_recorderStateController.stream;

/// A stream to get duration of recording when audio recorder has
/// been stopped. Events are only emitted if platform could extract the
/// duration of audio file when recording is ended.
Stream<Duration> get onRecordingEnded =>
_recordedFileDurationController.stream;

/// A class having controls for recording audio and other useful handlers.
///
/// Use [useLegacyNormalization] parameter to use normalization before
Expand Down Expand Up @@ -281,6 +290,7 @@ class RecorderController extends ChangeNotifier {
var duration = int.tryParse(audioInfo[1]!);
if (duration != null) {
recordedDuration = Duration(milliseconds: duration);
_recordedFileDurationController.add(recordedDuration);
}
}
elapsedDuration = Duration.zero;
Expand Down Expand Up @@ -415,6 +425,7 @@ class RecorderController extends ChangeNotifier {
_currentScrolledDuration.dispose();
_currentDurationController.close();
_recorderStateController.close();
_recordedFileDurationController.close();
_recorderTimer?.cancel();
_timer?.cancel();
_timer = null;
Expand Down

0 comments on commit c7e8d06

Please sign in to comment.