diff --git a/CHANGELOG.md b/CHANGELOG.md index b99b7839..c8f9054a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,8 @@ ## Changelogs +- **[3.0.0-beta.1]** + - [iOS] + * Codebase re-written in `Swift`. + * Migrate `AVAudioPlayer` to `AVPlayer`. - **[2.7.0]** - Migrate `android` module to `kotlin`. - **[2.6.2]** diff --git a/Example/App.tsx b/Example/App.tsx index dfa9bbc7..e0ba9040 100644 --- a/Example/App.tsx +++ b/Example/App.tsx @@ -298,8 +298,9 @@ class Page extends Component { private onStartPlay = async () => { console.log('onStartPlay'); const msg = await this.audioRecorderPlayer.startPlayer(this.path); - this.audioRecorderPlayer.setVolume(1.0); - console.log(msg); + const volume = await this.audioRecorderPlayer.setVolume(1.0); + console.log(`file: ${msg}`, `volume: ${volume}`); + this.audioRecorderPlayer.addPlayBackListener((e: any) => { if (e.current_position === e.duration) { console.log('finished'); diff --git a/Example/ios/Podfile b/Example/ios/Podfile index 665d3504..c206f808 100644 --- a/Example/ios/Podfile +++ b/Example/ios/Podfile @@ -23,6 +23,7 @@ target 'RNAudioRecorderPlayer' do # you should disable the next line. use_flipper!({ 'Flipper-Folly' => '2.5.3', 'Flipper' => '0.87.0', 'Flipper-RSocket' => '1.3.1' }) + post_install do |installer| react_native_post_install(installer) end diff --git a/Example/ios/Podfile.lock b/Example/ios/Podfile.lock index 41e2dc55..9d8fa99f 100644 --- a/Example/ios/Podfile.lock +++ b/Example/ios/Podfile.lock @@ -505,10 +505,10 @@ SPEC CHECKSUMS: React-runtimeexecutor: cad74a1eaa53ee6e7a3620231939d8fe2c6afcf0 ReactCommon: cfe2b7fd20e0dbd2d1185cd7d8f99633fbc5ff05 rn-fetch-blob: f065bb7ab7fb48dd002629f8bdcb0336602d3cba - RNAudioRecorderPlayer: afbacfc9312e836a4299407b896d42cf3bab6c3a + RNAudioRecorderPlayer: 415f616f2db152d9e6e11011fd8b192d3ef6606d Yoga: 8c8436d4171c87504c648ae23b1d81242bdf3bbf YogaKit: f782866e155069a2cca2517aafea43200b01fd5a -PODFILE CHECKSUM: 1764a8118c59a5127e35c36ef37ef22f785b1a62 +PODFILE CHECKSUM: 6398d8875e876b57a0b08c900810453dfb882c9c COCOAPODS: 1.10.1 diff --git a/Example/ios/RNAudioRecorderPlayer.xcodeproj/project.pbxproj b/Example/ios/RNAudioRecorderPlayer.xcodeproj/project.pbxproj index 639fb395..3d7a6c8d 100644 --- a/Example/ios/RNAudioRecorderPlayer.xcodeproj/project.pbxproj +++ b/Example/ios/RNAudioRecorderPlayer.xcodeproj/project.pbxproj @@ -8,6 +8,7 @@ /* Begin PBXBuildFile section */ 00E356F31AD99517003FC87E /* RNAudioRecorderPlayerTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 00E356F21AD99517003FC87E /* RNAudioRecorderPlayerTests.m */; }; + 0E7EAFD72642888200F4C51F /* SwiftBridge.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0E7EAFD62642888200F4C51F /* SwiftBridge.swift */; }; 13B07FBC1A68108700A75B9A /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB01A68108700A75B9A /* AppDelegate.m */; }; 13B07FBF1A68108700A75B9A /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 13B07FB51A68108700A75B9A /* Images.xcassets */; }; 13B07FC11A68108700A75B9A /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; }; @@ -31,6 +32,7 @@ 00E356F11AD99517003FC87E /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 00E356F21AD99517003FC87E /* RNAudioRecorderPlayerTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNAudioRecorderPlayerTests.m; sourceTree = ""; }; 0B595183C5E56B684890DD40 /* Pods-RNAudioRecorderPlayer.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RNAudioRecorderPlayer.debug.xcconfig"; path = "Target Support Files/Pods-RNAudioRecorderPlayer/Pods-RNAudioRecorderPlayer.debug.xcconfig"; sourceTree = ""; }; + 0E7EAFD62642888200F4C51F /* SwiftBridge.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SwiftBridge.swift; sourceTree = ""; }; 13B07F961A680F5B00A75B9A /* RNAudioRecorderPlayer.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = RNAudioRecorderPlayer.app; sourceTree = BUILT_PRODUCTS_DIR; }; 13B07FAF1A68108700A75B9A /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = AppDelegate.h; path = RNAudioRecorderPlayer/AppDelegate.h; sourceTree = ""; }; 13B07FB01A68108700A75B9A /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = AppDelegate.m; path = RNAudioRecorderPlayer/AppDelegate.m; sourceTree = ""; }; @@ -116,6 +118,7 @@ 83CBB9F61A601CBA00E9B192 = { isa = PBXGroup; children = ( + 0E7EAFD62642888200F4C51F /* SwiftBridge.swift */, 13B07FAE1A68108700A75B9A /* RNAudioRecorderPlayer */, 832341AE1AAA6A7D00B99B32 /* Libraries */, 00E356EF1AD99517003FC87E /* RNAudioRecorderPlayerTests */, @@ -145,7 +148,6 @@ B1F18E999E49CF88AAD205B3 /* Pods-RNAudioRecorderPlayer-RNAudioRecorderPlayerTests.debug.xcconfig */, DE6831B173502234E195B22A /* Pods-RNAudioRecorderPlayer-RNAudioRecorderPlayerTests.release.xcconfig */, ); - name = Pods; path = Pods; sourceTree = ""; }; @@ -208,7 +210,7 @@ TestTargetID = 13B07F861A680F5B00A75B9A; }; 13B07F861A680F5B00A75B9A = { - LastSwiftMigration = 1120; + LastSwiftMigration = 1250; }; }; }; @@ -413,6 +415,7 @@ files = ( 13B07FBC1A68108700A75B9A /* AppDelegate.m in Sources */, 13B07FC11A68108700A75B9A /* main.m in Sources */, + 0E7EAFD72642888200F4C51F /* SwiftBridge.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -431,6 +434,7 @@ isa = XCBuildConfiguration; baseConfigurationReference = B1F18E999E49CF88AAD205B3 /* Pods-RNAudioRecorderPlayer-RNAudioRecorderPlayerTests.debug.xcconfig */; buildSettings = { + ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; BUNDLE_LOADER = "$(TEST_HOST)"; GCC_PREPROCESSOR_DEFINITIONS = ( "DEBUG=1", @@ -458,6 +462,7 @@ isa = XCBuildConfiguration; baseConfigurationReference = DE6831B173502234E195B22A /* Pods-RNAudioRecorderPlayer-RNAudioRecorderPlayerTests.release.xcconfig */; buildSettings = { + ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; BUNDLE_LOADER = "$(TEST_HOST)"; COPY_PHASE_STRIP = NO; INFOPLIST_FILE = RNAudioRecorderPlayerTests/Info.plist; diff --git a/Example/ios/RNAudioRecorderPlayer/Info.plist b/Example/ios/RNAudioRecorderPlayer/Info.plist index eeeccfad..6969e586 100644 --- a/Example/ios/RNAudioRecorderPlayer/Info.plist +++ b/Example/ios/RNAudioRecorderPlayer/Info.plist @@ -51,5 +51,7 @@ UIViewControllerBasedStatusBarAppearance + NSMicrophoneUsageDescription + Give $(PRODUCT_NAME) permission to use your microphone. Your record wont be shared without your permission. diff --git a/Example/ios/SwiftBridge.swift b/Example/ios/SwiftBridge.swift new file mode 100644 index 00000000..f67a080a --- /dev/null +++ b/Example/ios/SwiftBridge.swift @@ -0,0 +1,8 @@ +// +// SwiftBridge.swift +// RNAudioRecorderPlayer +// +// Created by hyochan on 2021/05/05. +// + +import Foundation diff --git a/README.md b/README.md index cc15f3c8..2b17715c 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,11 @@ This is a react-native link module for audio recorder and player. This is not a ## Breaking Changes +- From version `3.0.+`, a critical migration has been done. Current version is not much different from version `2.0.+` in usability, but there are many changes internally. + 1. Codebase has been re-written to [kotlin for Android](https://kotlinlang.org) and [swift for iOS](https://swift.org). + [iOS] + * [AVAudioPlayer](https://developer.apple.com/documentation/avfaudio/avaudioplayer) has been migrated to [AVPlayer](https://developer.apple.com/documentation/avfoundation/avplayer) which supports stream and more possibilities [#231](https://github.com/hyochan/react-native-audio-recorder-player/issues/231), [#245](https://github.com/hyochan/react-native-audio-recorder-player/issues/245), [#275](https://github.com/hyochan/react-native-audio-recorder-player/issues/275). + - There has been vast improvements in [#114](https://github.com/dooboolab/react-native-audio-recorder-player/pull/114) which is released in `2.3.0`. We now support all `RN` versions without any version differenciating. See below installation guide for your understanding. ## Migration Guide @@ -34,6 +39,7 @@ This is a react-native link module for audio recorder and player. This is not a | `resume` | `resumePlayer` | | `seekTo` | `seekToPlayer` | | | `setSubscriptionDuration` | +| `addPlayBackListener` | `addPlayBackListener` | | `setRecordInterval` | `addRecordBackListener` | | `removeRecordInterval` | `` | | | `setVolume` | @@ -81,15 +87,24 @@ npx pod-install compile project(':react-native-audio-recorder-player') ``` -### Post installation +## Post installation + +#### iOS On _iOS_ you need to add a usage description to `Info.plist`: ```xml NSMicrophoneUsageDescription -This sample uses the microphone to record your speech and convert it to text. +Give $(PRODUCT_NAME) permission to use your microphone. Your record wont be shared without your permission. ``` +Also, add [swift bridging header](https://stackoverflow.com/questions/31716413/xcode-not-automatically-creating-bridging-header) if you haven't created one for `swift` compatibility. + +1 + + +#### Android + On _Android_ you need to add a permission to `AndroidManifest.xml`: ```xml @@ -173,7 +188,7 @@ All methods are implemented with promises. | seekToPlayer | `number` miliseconds | `Promise` | Seek audio. | | setVolume | `doulbe` value | `Promise` | Set volume of audio player (default 1.0, range: 0.0 ~ 1.0). | -## Customizing recorded audio quality (from `2.3.0`) +## Able to customize recorded audio quality (from `2.3.0`) ``` interface AudioSet { diff --git a/RNAudioRecorderPlayer.podspec b/RNAudioRecorderPlayer.podspec index 26006312..1baba69e 100644 --- a/RNAudioRecorderPlayer.podspec +++ b/RNAudioRecorderPlayer.podspec @@ -10,10 +10,10 @@ Pod::Spec.new do |s| s.authors = package['author'] s.homepage = package['homepage'] - s.platform = :ios, "9.0" + s.platform = :ios, "11.0" - s.source = { :git => "https://github.com/dooboolab/react-native-audio-recorder-player.git", :tag => "#{s.version}" } - s.source_files = "ios/**/*.{h,m}" + s.source = { :git => "https://github.com/hyochan/react-native-audio-recorder-player.git", :tag => "#{s.version}" } + s.source_files = "ios/**/*.{h,c,cc,cpp,m,mm,swift}" s.dependency 'React' end diff --git a/ios/RNAudioRecorderPlayer.h b/ios/RNAudioRecorderPlayer.h index 18261e51..eed96f76 100644 --- a/ios/RNAudioRecorderPlayer.h +++ b/ios/RNAudioRecorderPlayer.h @@ -8,6 +8,4 @@ successfully:(BOOL)flag; - (void)updateRecorderProgress:(NSTimer*) timer; - (void)updateProgress:(NSTimer*) timer; -- (void)startRecorderTimer; -- (void)startPlayerTimer; @end diff --git a/ios/RNAudioRecorderPlayer.m b/ios/RNAudioRecorderPlayer.m index 336ae579..bc358672 100644 --- a/ios/RNAudioRecorderPlayer.m +++ b/ios/RNAudioRecorderPlayer.m @@ -1,378 +1,45 @@ +#import "React/RCTBridgeModule.h" +#import "React/RCTEventEmitter.h" + // RNAudioRecorderPlayer.m // dooboolab // // Created by dooboolab on 16/04/2018. // Copyright © 2018 Facebook. All rights reserved. -// - -#import "RNAudioRecorderPlayer.h" -#import -#import -#import - -NSString* GetDirectoryOfType_Sound(NSSearchPathDirectory dir) { - NSArray* paths = NSSearchPathForDirectoriesInDomains(dir, NSUserDomainMask, YES); - return [paths.firstObject stringByAppendingString:@"/"]; -} - -@implementation RNAudioRecorderPlayer { - NSURL *audioFileURL; - AVAudioRecorder *audioRecorder; - AVAudioPlayer *audioPlayer; - NSTimer *recordTimer; - NSTimer *playTimer; - BOOL _meteringEnabled; -} -double subscriptionDuration = 0.1; - -- (void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag { - NSLog(@"audioPlayerDidFinishPlaying"); - NSNumber *duration = [NSNumber numberWithDouble:audioPlayer.duration * 1000]; - - // Send last event then finish it. - // NSString* status = [NSString stringWithFormat:@"{\"duration\": \"%@\", \"current_position\": \"%@\"}", [duration stringValue], [currentTime stringValue]]; - NSDictionary *status = @{ - @"duration" : [duration stringValue], - @"current_position" : [duration stringValue], - }; - [self sendEventWithName:@"rn-playback" body: status]; - if (playTimer != nil) { - [playTimer invalidate]; - playTimer = nil; - } -} - -- (void)updateRecorderProgress:(NSTimer*) timer -{ - NSNumber *currentTime = [NSNumber numberWithDouble:audioRecorder.currentTime * 1000]; - // NSString* status = [NSString stringWithFormat:@"{\"current_position\": \"%@\"}", [currentTime stringValue]]; - NSNumber *currentMetering = [NSNumber numberWithDouble:0]; - if (_meteringEnabled) { - [audioRecorder updateMeters]; - currentMetering = [NSNumber numberWithDouble:[audioRecorder averagePowerForChannel: 0]]; - } - - NSDictionary *status = @{ - @"current_position" : [currentTime stringValue], - @"current_metering" : [currentMetering stringValue], - }; - [self sendEventWithName:@"rn-recordback" body:status]; -} - -- (void)updateProgress:(NSTimer*) timer -{ - NSNumber *duration = [NSNumber numberWithDouble:audioPlayer.duration * 1000]; - NSNumber *currentTime = [NSNumber numberWithDouble:audioPlayer.currentTime * 1000]; - - NSLog(@"updateProgress: %@", duration); - if ([duration intValue] == 0) { - [playTimer invalidate]; - [audioPlayer stop]; - return; - } +@interface RCT_EXTERN_MODULE(RNAudioRecorderPlayer, RCTEventEmitter) - // NSString* status = [NSString stringWithFormat:@"{\"duration\": \"%@\", \"current_position\": \"%@\"}", [duration stringValue], [currentTime stringValue]]; - NSDictionary *status = @{ - @"duration" : [duration stringValue], - @"current_position" : [currentTime stringValue], - }; +RCT_EXTERN_METHOD(setSubscriptionDuration:(double)duration); - [self sendEventWithName:@"rn-playback" body:status]; -} - -- (void)startRecorderTimer -{ - dispatch_async(dispatch_get_main_queue(), ^{ - self->recordTimer = [NSTimer scheduledTimerWithTimeInterval: subscriptionDuration - target:self - selector:@selector(updateRecorderProgress:) - userInfo:nil - repeats:YES]; - }); -} - -- (void)startPlayerTimer -{ - dispatch_async(dispatch_get_main_queue(), ^{ - self->playTimer = [NSTimer scheduledTimerWithTimeInterval: subscriptionDuration - target:self - selector:@selector(updateProgress:) - userInfo:nil - repeats:YES]; - }); -} - -- (dispatch_queue_t)methodQueue -{ - return dispatch_get_main_queue(); -} - -RCT_EXPORT_MODULE(); - -- (NSArray *)supportedEvents -{ - return @[@"rn-recordback", @"rn-playback"]; -} - -RCT_EXPORT_METHOD(setSubscriptionDuration:(double)duration - resolve:(RCTPromiseResolveBlock)resolve - reject:(RCTPromiseRejectBlock)reject) { - subscriptionDuration = duration; - resolve(@"set subscription duration."); -} - -RCT_EXPORT_METHOD(startRecorder:(NSString*)path +RCT_EXTERN_METHOD(startRecorder:(NSString *)path meteringEnabled:(BOOL)meteringEnabled - audioSets: (NSDictionary*)audioSets + audioSets:(NSDictionary *)audioSets resolve:(RCTPromiseResolveBlock)resolve - reject:(RCTPromiseRejectBlock)reject) { - - NSString *encoding = [RCTConvert NSString:audioSets[@"AVFormatIDKeyIOS"]]; - NSNumber *sampleRate = [RCTConvert NSNumber:audioSets[@"AVSampleRateKeyIOS"]]; - NSNumber *numberOfChannel = [RCTConvert NSNumber:audioSets[@"AVNumberOfChannelsKeyIOS"]]; - NSNumber *avFormat; - NSNumber *audioQuality = [RCTConvert NSNumber:audioSets[@"AVEncoderAudioQualityKeyIOS"]]; - _meteringEnabled = meteringEnabled; - NSNumber *avLPCMBitDepth = [RCTConvert NSNumber:audioSets[@"AVLinearPCMBitDepthKeyIOS"]]; - BOOL *avLPCMIsBigEndian = [RCTConvert BOOL:audioSets[@"AVLinearPCMIsBigEndianKeyIOS"]]; - BOOL *avLPCMIsFloatKey = [RCTConvert BOOL:audioSets[@"AVLinearPCMIsFloatKeyIOS"]]; - BOOL *avLPCMIsNonInterleaved = [RCTConvert BOOL:audioSets[@"AVLinearPCMIsNonInterleavedIOS"]]; - - if ([path isEqualToString:@"DEFAULT"]) { - audioFileURL = [NSURL fileURLWithPath:[GetDirectoryOfType_Sound(NSCachesDirectory) stringByAppendingString:@"sound.m4a"]]; - } else { - if ([path rangeOfString:@"file://"].location == NSNotFound) { - audioFileURL = [NSURL fileURLWithPath: [GetDirectoryOfType_Sound(NSCachesDirectory) stringByAppendingString:path]]; - } else { - audioFileURL = [NSURL URLWithString:path]; - } - } + reject:(RCTPromiseRejectBlock)reject); - if (!sampleRate) { - sampleRate = [NSNumber numberWithFloat:44100]; - } - if (!encoding) { - avFormat = [NSNumber numberWithInt:kAudioFormatAppleLossless]; - } else { - if ([encoding isEqual: @"lpcm"]) { - avFormat =[NSNumber numberWithInt:kAudioFormatLinearPCM]; - } else if ([encoding isEqual: @"ima4"]) { - avFormat =[NSNumber numberWithInt:kAudioFormatAppleIMA4]; - } else if ([encoding isEqual: @"aac"]) { - avFormat =[NSNumber numberWithInt:kAudioFormatMPEG4AAC]; - } else if ([encoding isEqual: @"MAC3"]) { - avFormat =[NSNumber numberWithInt:kAudioFormatMACE3]; - } else if ([encoding isEqual: @"MAC6"]) { - avFormat =[NSNumber numberWithInt:kAudioFormatMACE6]; - } else if ([encoding isEqual: @"ulaw"]) { - avFormat =[NSNumber numberWithInt:kAudioFormatULaw]; - } else if ([encoding isEqual: @"alaw"]) { - avFormat =[NSNumber numberWithInt:kAudioFormatALaw]; - } else if ([encoding isEqual: @"mp1"]) { - avFormat =[NSNumber numberWithInt:kAudioFormatMPEGLayer1]; - } else if ([encoding isEqual: @"mp2"]) { - avFormat =[NSNumber numberWithInt:kAudioFormatMPEGLayer2]; - } else if ([encoding isEqual: @"alac"]) { - avFormat =[NSNumber numberWithInt:kAudioFormatAppleLossless]; - } else if ([encoding isEqual: @"amr"]) { - avFormat =[NSNumber numberWithInt:kAudioFormatAMR]; - } else if ([encoding isEqual: @"flac"]) { - if (@available(iOS 11, *)) avFormat =[NSNumber numberWithInt:kAudioFormatFLAC]; - } else if ([encoding isEqual: @"opus"]) { - if (@available(iOS 11, *)) avFormat =[NSNumber numberWithInt:kAudioFormatOpus]; - } - } - if (!numberOfChannel) { - numberOfChannel = [NSNumber numberWithInt:2]; - } - if (!audioQuality) { - audioQuality = [NSNumber numberWithInt:AVAudioQualityMedium]; - } +RCT_EXTERN_METHOD(stopRecorder:(RCTPromiseResolveBlock)resolve + rejecter:(RCTPromiseRejectBlock)reject); - NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys: - sampleRate, AVSampleRateKey, - avFormat, AVFormatIDKey, - numberOfChannel, AVNumberOfChannelsKey, - audioQuality, AVEncoderAudioQualityKey, - avLPCMBitDepth, AVLinearPCMBitDepthKey, - avLPCMIsBigEndian, AVLinearPCMIsBigEndianKey, - avLPCMIsFloatKey, AVLinearPCMIsFloatKey, - avLPCMIsNonInterleaved, AVLinearPCMIsNonInterleaved, - nil]; - // Setup audio session - AVAudioSession *session = [AVAudioSession sharedInstance]; - [session setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionAllowBluetooth error:nil]; - - // set volume default to speaker - UInt32 doChangeDefaultRoute = 1; - AudioSessionSetProperty(kAudioSessionProperty_OverrideCategoryDefaultToSpeaker, sizeof(doChangeDefaultRoute), &doChangeDefaultRoute); - - audioRecorder = [[AVAudioRecorder alloc] - initWithURL:audioFileURL - settings:audioSettings - error:nil]; - audioRecorder.meteringEnabled = _meteringEnabled; - - [audioRecorder setDelegate:self]; - [audioRecorder record]; - [self startRecorderTimer]; - - NSString *filePath = self->audioFileURL.absoluteString; - resolve(filePath); -} - -RCT_EXPORT_METHOD(stopRecorder:(RCTPromiseResolveBlock)resolve - reject:(RCTPromiseRejectBlock)reject) { - if (audioRecorder) { - [audioRecorder stop]; - if (recordTimer != nil) { - [recordTimer invalidate]; - recordTimer = nil; - } - - AVAudioSession *audioSession = [AVAudioSession sharedInstance]; - [audioSession setActive:NO error:nil]; - - NSString *filePath = audioFileURL.absoluteString; - resolve(filePath); - } else { - reject(@"audioRecorder record", @"audioRecorder is not set", nil); - } -} - -RCT_EXPORT_METHOD(setVolume:(double) volume +RCT_EXTERN_METHOD(setVolume:(float)volume resolve:(RCTPromiseResolveBlock) resolve - reject:(RCTPromiseRejectBlock) reject) { - [audioPlayer setVolume: volume]; - resolve(@"setVolume"); -} + rejecter:(RCTPromiseRejectBlock) reject); -RCT_EXPORT_METHOD(startPlayer:(NSString*)path +RCT_EXTERN_METHOD(startPlayer:(NSString*)path resolve:(RCTPromiseResolveBlock)resolve - reject:(RCTPromiseRejectBlock)reject) { - NSError *error; - if ([[path substringToIndex:4] isEqualToString:@"http"]) { - audioFileURL = [NSURL URLWithString:path]; - - NSURLSessionDataTask *downloadTask = [[NSURLSession sharedSession] - dataTaskWithURL:audioFileURL completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) { - // NSData *data = [NSData dataWithContentsOfURL:audioFileURL]; - if (!audioPlayer) { - audioPlayer = [[AVAudioPlayer alloc] initWithData:data error:&error]; - audioPlayer.delegate = self; - } - - // Able to play in silent mode - [[AVAudioSession sharedInstance] - setCategory: AVAudioSessionCategoryPlayback - error: &error]; - // Able to play in background - [[AVAudioSession sharedInstance] setActive: YES error: nil]; - [[UIApplication sharedApplication] beginReceivingRemoteControlEvents]; + rejecter:(RCTPromiseRejectBlock)reject); - [audioPlayer play]; - [self startPlayerTimer]; - NSString *filePath = audioFileURL.absoluteString; - resolve(filePath); - }]; +RCT_EXTERN_METHOD(resumePlayer:(RCTPromiseResolveBlock)resolve + rejecter:(RCTPromiseRejectBlock)reject); - [downloadTask resume]; - } else { - if ([path isEqualToString:@"DEFAULT"]) { - audioFileURL = [NSURL fileURLWithPath:[GetDirectoryOfType_Sound(NSCachesDirectory) stringByAppendingString:@"sound.m4a"]]; - } else { - if ([path rangeOfString:@"file://"].location == NSNotFound) { - audioFileURL = [NSURL fileURLWithPath: [GetDirectoryOfType_Sound(NSCachesDirectory) stringByAppendingString:path]]; - } else { - audioFileURL = [NSURL URLWithString:path]; - } - } - - NSLog(@"Error %@",error); - - if (!audioPlayer) { - RCTLogInfo(@"audio player alloc"); - audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:audioFileURL error:nil]; - audioPlayer.delegate = self; - } - - // Able to play in silent mode - [[AVAudioSession sharedInstance] - setCategory: AVAudioSessionCategoryPlayback - error: nil]; - - NSLog(@"Error %@",error); - [audioPlayer play]; - [self startPlayerTimer]; - - NSString *filePath = audioFileURL.absoluteString; - resolve(filePath); - } -} - -RCT_EXPORT_METHOD(resumePlayer: (RCTPromiseResolveBlock)resolve - reject:(RCTPromiseRejectBlock)reject) { - if (!audioFileURL) { - reject(@"audioRecorder resume", @"no audioFileURL", nil); - return; - } - - if (!audioPlayer) { - reject(@"audioRecorder resume", @"no audioPlayer", nil); - return; - } - - [[AVAudioSession sharedInstance] - setCategory: AVAudioSessionCategoryPlayback - error: nil]; - [audioPlayer play]; - [self startPlayerTimer]; - NSString *filePath = audioFileURL.absoluteString; - resolve(filePath); -} - -RCT_EXPORT_METHOD(seekToPlayer: (nonnull NSNumber*) time +RCT_EXTERN_METHOD(seekToPlayer:(nonnull NSNumber*) time resolve:(RCTPromiseResolveBlock)resolve - reject:(RCTPromiseRejectBlock)reject) { - if (audioPlayer) { - audioPlayer.currentTime = [time doubleValue]; - resolve(@"seekTo"); - } else { - reject(@"audioPlayer seekTo", @"audioPlayer is not set", nil); - } -} - -RCT_EXPORT_METHOD(pausePlayer: (RCTPromiseResolveBlock)resolve - reject:(RCTPromiseRejectBlock)reject) { - RCTLogInfo(@"pause"); - if (audioPlayer && [audioPlayer isPlaying]) { - [audioPlayer pause]; - if (playTimer != nil) { - [playTimer invalidate]; - playTimer = nil; - } - resolve(@"pause play"); - } else { - reject(@"audioPlayer pause", @"audioPlayer is not playing", nil); - } -} + rejecter:(RCTPromiseRejectBlock)reject); +RCT_EXTERN_METHOD(pausePlayer:(RCTPromiseResolveBlock)resolve + rejecter:(RCTPromiseRejectBlock)reject); -RCT_EXPORT_METHOD(stopPlayer:(RCTPromiseResolveBlock)resolve - reject:(RCTPromiseRejectBlock)reject) { - if (audioPlayer) { - if (playTimer != nil) { - [playTimer invalidate]; - playTimer = nil; - } - [audioPlayer stop]; - audioPlayer = nil; - resolve(@"stop play"); - } else { - reject(@"audioPlayer stop", @"audioPlayer is not set", nil); - } -} +RCT_EXTERN_METHOD(stopPlayer:(RCTPromiseResolveBlock)resolve + rejecter:(RCTPromiseRejectBlock)reject); @end diff --git a/ios/RNAudioRecorderPlayer.swift b/ios/RNAudioRecorderPlayer.swift new file mode 100644 index 00000000..81d9ab9d --- /dev/null +++ b/ios/RNAudioRecorderPlayer.swift @@ -0,0 +1,349 @@ +// +// RNAudioRecorderPlayer.swift +// RNAudioRecorderPlayer +// +// Created by hyochan on 2021/05/05. +// + +import Foundation +import AVFoundation + +@objc(RNAudioRecorderPlayer) +class RNAudioRecorderPlayer: RCTEventEmitter, AVAudioRecorderDelegate { + var subscriptionDuration: Double = 0.5 + var audioFileURL: URL? + + // Recorder + var audioRecorder: AVAudioRecorder! + var recordingSession: AVAudioSession! + var recordTimer: Timer? + var _meteringEnabled: Bool = false + + // Player + var pausedPlayTime: CMTime? + var audioPlayerItem: AVPlayerItem! + var audioPlayer: AVPlayer! + var playTimer: Timer? + var timeObserverToken: Any? + + override static func requiresMainQueueSetup() -> Bool { + return true + } + + override func supportedEvents() -> [String]! { + return ["rn-playback", "rn-recordback"] + } + + func setAudioFileURL(path: String) { + if (path == "DEFAULT") { + let cachesDirectory = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first! + audioFileURL = cachesDirectory.appendingPathComponent("sound.m4a") + } else if (path.contains("http")){ + audioFileURL = URL(string: path) + } else { + let cachesDirectory = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first! + audioFileURL = cachesDirectory.appendingPathComponent(path) + } + } + + @objc(updateRecorderProgress:) + public func updateRecorderProgress(timer: Timer) -> Void { + if (audioRecorder != nil) { + var currentMetering: Float = 0 + + if (_meteringEnabled) { + audioRecorder.updateMeters() + currentMetering = audioRecorder.averagePower(forChannel: 0) + } + + let status = [ + "is_recording": audioRecorder.isRecording, + "current_position": audioRecorder.currentTime * 1000, + "current_metering": currentMetering, + ] as [String : Any]; + + sendEvent(withName: "rn-recordback", body: status) + } + } + + @objc(startRecorderTimer) + func startRecorderTimer() -> Void { + DispatchQueue.main.async { + self.recordTimer = Timer.scheduledTimer( + timeInterval: self.subscriptionDuration, + target: self, + selector: #selector(self.updateRecorderProgress), + userInfo: nil, + repeats: true + ) + } + } + + @objc + func construct() { + self.subscriptionDuration = 0.1 + } + + @objc(audioPlayerDidFinishPlaying:) + public static func audioPlayerDidFinishPlaying(player: AVAudioRecorder) -> Bool { + return true + } + + @objc(setSubscriptionDuration:) + func setSubscriptionDuration(duration: Double) -> Void { + subscriptionDuration = duration + } + + /********** Player **********/ + + @objc(startRecorder:meteringEnabled:audioSets:resolve:reject:) + func startRecorder(path: String, meteringEnabled: Bool, audioSets: [String: Any], resolve: @escaping RCTPromiseResolveBlock, + rejecter reject: @escaping RCTPromiseRejectBlock) -> Void { + + _meteringEnabled = meteringEnabled; + + let encoding = audioSets["AVFormatIDKeyIOS"] as? String + let avLPCMBitDepth = audioSets["AVLinearPCMBitDepthKeyIOS"] as? Int + let avLPCMIsBigEndian = audioSets["AVLinearPCMIsBigEndianKeyIOS"] as? Bool + let avLPCMIsFloatKey = audioSets["AVLinearPCMIsFloatKeyIOS"] as? Bool + let avLPCMIsNonInterleaved = audioSets["AVLinearPCMIsNonInterleavedIOS"] as? Bool + + var avFormat: Int? = nil + var sampleRate = audioSets["AVSampleRateKeyIOS"] as? Int + var numberOfChannel = audioSets["AVNumberOfChannelsKeyIOS"] as? Int + var audioQuality = audioSets["AVEncoderAudioQualityKeyIOS"] as? Int + + setAudioFileURL(path: path) + + if (sampleRate == nil) { + sampleRate = 44100; + } + + if (encoding == nil) { + avFormat = Int(kAudioFormatAppleLossless) + } else { + if (encoding == "lpcm") { + avFormat = Int(kAudioFormatAppleIMA4) + } else if (encoding == "ima4") { + avFormat = Int(kAudioFormatAppleIMA4) + } else if (encoding == "aac") { + avFormat = Int(kAudioFormatMPEG4AAC) + } else if (encoding == "MAC3") { + avFormat = Int(kAudioFormatMACE3) + } else if (encoding == "MAC6") { + avFormat = Int(kAudioFormatMACE6) + } else if (encoding == "ulaw") { + avFormat = Int(kAudioFormatULaw) + } else if (encoding == "alaw") { + avFormat = Int(kAudioFormatALaw) + } else if (encoding == "mp1") { + avFormat = Int(kAudioFormatMPEGLayer1) + } else if (encoding == "mp2") { + avFormat = Int(kAudioFormatMPEGLayer2) + } else if (encoding == "alac") { + avFormat = Int(kAudioFormatAppleLossless) + } else if (encoding == "amr") { + avFormat = Int(kAudioFormatAMR) + } else if (encoding == "flac") { + if #available(iOS 11.0, *) { + avFormat = Int(kAudioFormatFLAC) + } else if (encoding == "opus") { + avFormat = Int(kAudioFormatOpus) + } + } + } + + if (numberOfChannel == nil) { + numberOfChannel = 2 + } + + if (audioQuality == nil) { + audioQuality = AVAudioQuality.medium.rawValue + } + + func startRecording() { + let settings = [ + AVSampleRateKey: sampleRate!, + AVFormatIDKey: avFormat!, + AVNumberOfChannelsKey: numberOfChannel!, + AVEncoderAudioQualityKey: audioQuality!, + AVLinearPCMBitDepthKey: avLPCMBitDepth ?? AVLinearPCMBitDepthKey.count, + AVLinearPCMIsBigEndianKey: avLPCMIsBigEndian ?? true, + AVLinearPCMIsFloatKey: avLPCMIsFloatKey ?? false, + AVLinearPCMIsNonInterleaved: avLPCMIsNonInterleaved ?? false + ] as [String : Any] + + do { + audioRecorder = try AVAudioRecorder(url: audioFileURL!, settings: settings) + + if (audioRecorder != nil) { + audioRecorder.prepareToRecord() + audioRecorder.delegate = self + audioRecorder.isMeteringEnabled = _meteringEnabled + audioRecorder.record() + startRecorderTimer() + + resolve(audioFileURL?.absoluteString) + return + } + + reject("RNAudioPlayerRecorder", "Error occured during initiating recorder", nil) + } catch { + reject("RNAudioPlayerRecorder", "Error occured during recording", nil) + } + } + + recordingSession = AVAudioSession.sharedInstance() + + do { + try recordingSession.setCategory(.playAndRecord, mode: .default, options: AVAudioSession.CategoryOptions.allowBluetooth) + try recordingSession.setActive(true) + + recordingSession.requestRecordPermission { granted in + DispatchQueue.main.async { + if granted { + startRecording() + } else { + reject("RNAudioPlayerRecorder", "Record permission not granted", nil) + } + } + } + } catch { + reject("RNAudioPlayerRecorder", "Faled to record", nil) + } + } + + @objc(stopRecorder:rejecter:) + public func stopRecorder( + resolve: @escaping RCTPromiseResolveBlock, + rejecter reject: @escaping RCTPromiseRejectBlock + ) -> Void { + if (audioRecorder != nil) { + do { + try recordingSession.setActive(false) + + audioRecorder.stop() + + if (recordTimer != nil) { + recordTimer!.invalidate() + recordTimer = nil + } + + resolve(audioFileURL?.absoluteString) + } catch { + reject("RNAudioPlayerRecorder", "Faled to stop recorder", nil) + } + } + } + + + /********** Player **********/ + + func addPeriodicTimeObserver() { + let timeScale = CMTimeScale(NSEC_PER_SEC) + let time = CMTime(seconds: subscriptionDuration, preferredTimescale: timeScale) + + timeObserverToken = audioPlayer.addPeriodicTimeObserver(forInterval: time, + queue: .main) {_ in + self.sendEvent(withName: "rn-playback", body: [ + "isMuted": self.audioPlayer.isMuted, + "current_position": self.audioPlayerItem.currentTime().seconds * 1000, + "duration": self.audioPlayerItem.duration.seconds * 1000, + ]) + } + } + + func removePeriodicTimeObserver() { + if let timeObserverToken = timeObserverToken { + audioPlayer.removeTimeObserver(timeObserverToken) + self.timeObserverToken = nil + } + } + + + @objc(startPlayer:resolve:rejecter:) + public func startPlayer( + path: String, + resolve: @escaping RCTPromiseResolveBlock, + rejecter reject: @escaping RCTPromiseRejectBlock + ) -> Void { + setAudioFileURL(path: path) + + if (audioPlayer == nil) { + audioPlayerItem = AVPlayerItem(url: audioFileURL!) + audioPlayer = AVPlayer(playerItem: audioPlayerItem) + } else { + audioPlayer.replaceCurrentItem(with: audioPlayerItem) + } + + addPeriodicTimeObserver() + audioPlayer.play() + resolve(audioFileURL?.absoluteString) + } + + @objc(stopPlayer:rejecter:) + public func stopPlayer( + resolve: @escaping RCTPromiseResolveBlock, + rejecter reject: @escaping RCTPromiseRejectBlock + ) -> Void { + if (audioPlayer == nil) { + return reject("RNAudioPlayerRecorder", "Player is already stopped.", nil) + } + + audioPlayer.pause() + self.removePeriodicTimeObserver() + self.audioPlayer = nil; + + resolve(audioFileURL?.absoluteString) + } + + @objc(pausePlayer:rejecter:) + public func pausePlayer( + resolve: @escaping RCTPromiseResolveBlock, + rejecter reject: @escaping RCTPromiseRejectBlock + ) -> Void { + if (audioPlayer == nil) { + return reject("RNAudioPlayerRecorder", "Player is not playing", nil) + } + + audioPlayer.pause() + resolve("Paused!") + } + + @objc(resumePlayer:rejecter:) + public func resumePlayer( + resolve: @escaping RCTPromiseResolveBlock, + rejecter reject: @escaping RCTPromiseRejectBlock + ) -> Void { + if (audioPlayer == nil) { + return reject("RNAudioPlayerRecorder", "Player is null", nil) + } + + audioPlayer.play() + resolve("Resumed!") + } + + @objc(seekToPlayer:resolve:rejecter:) + public func seekToPlayer( + time: Double, + resolve: @escaping RCTPromiseResolveBlock, + rejecter reject: @escaping RCTPromiseRejectBlock + ) -> Void { + if (audioPlayer == nil) { + return reject("RNAudioPlayerRecorder", "Player is null", nil) + } + + audioPlayer.seek(to: CMTime(seconds: time, preferredTimescale: CMTimeScale(NSEC_PER_SEC))) + resolve("Resumed!") + } + + @objc(setVolume:resolve:rejecter:) + public func setVolume( + volume: Float, + resolve: @escaping RCTPromiseResolveBlock, + rejecter reject: @escaping RCTPromiseRejectBlock + ) -> Void { + audioPlayer.volume = volume + resolve(volume) + } +} diff --git a/package.json b/package.json index 09a4cc0e..55b90c84 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "react-native-audio-recorder-player", - "version": "2.7.0", + "version": "3.0.0-beta.1", "description": "React Native Audio Recorder and Player.", "homepage": "https://github.com/dooboolab/react-native-audio-recorder-player", "main": "index.ts",