diff --git a/packages/audiofileplayer/android/build.gradle b/packages/audiofileplayer/android/build.gradle index d7b1e95..62b25a4 100644 --- a/packages/audiofileplayer/android/build.gradle +++ b/packages/audiofileplayer/android/build.gradle @@ -22,7 +22,7 @@ rootProject.allprojects { apply plugin: 'com.android.library' android { - compileSdkVersion 29 + compileSdkVersion 31 compileOptions { sourceCompatibility JavaVersion.VERSION_1_8 @@ -40,5 +40,5 @@ android { dependencies { implementation 'androidx.core:core:1.0.0' - implementation 'androidx.media:media:1.0.0' + implementation 'androidx.media:media:1.3.0' } \ No newline at end of file diff --git a/packages/audiofileplayer/android/src/main/java/com/google/flutter/plugins/audiofileplayer/AudiofileplayerPlugin.java b/packages/audiofileplayer/android/src/main/java/com/google/flutter/plugins/audiofileplayer/AudiofileplayerPlugin.java index a12fc3b..5072b64 100644 --- a/packages/audiofileplayer/android/src/main/java/com/google/flutter/plugins/audiofileplayer/AudiofileplayerPlugin.java +++ b/packages/audiofileplayer/android/src/main/java/com/google/flutter/plugins/audiofileplayer/AudiofileplayerPlugin.java @@ -39,6 +39,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; /** * Flutter audio file player plugin. @@ -347,14 +348,17 @@ private void onLoad(MethodCall call, Result result) { // Note that this will throw an exception on invalid URL or lack of network connectivity. RemoteManagedMediaPlayer newPlayer = new RemoteManagedMediaPlayer(audioId, remoteUrl, this, looping, playInBackground); + final AtomicBoolean remoteLoad = new AtomicBoolean(false); newPlayer.setOnRemoteLoadListener( (success) -> { - if (success) { - handleDurationForPlayer(newPlayer, audioId); - result.success(null); - } else { - mediaPlayers.remove(audioId); - result.error(ERROR_CODE, "Remote URL loading failed for URL: " + remoteUrl, null); + if (remoteLoad.compareAndSet(false, true)) { + if (success) { + handleDurationForPlayer(newPlayer, audioId); + result.success(null); + } else { + mediaPlayers.remove(audioId); + result.error(ERROR_CODE, "Remote URL loading failed for URL: " + remoteUrl, null); + } } }); // Add player to data structure immediately; will be removed if async loading fails. @@ -509,16 +513,16 @@ public void onActivityDestroyed(Activity activity) { @Override public void onConnected() { Log.i(TAG, "ConnectionCallback.onConnected"); - try { +// try { MediaSessionCompat.Token token = mediaBrowser.getSessionToken(); mediaController = new MediaControllerCompat(activity, token); MediaControllerCompat.setMediaController(activity, mediaController); mediaController.registerCallback(controllerCallback); AudiofileplayerService.instance.setPendingIntentActivity(activity); AudiofileplayerService.instance.setListener(AudiofileplayerPlugin.this); - } catch (RemoteException e) { - throw new RuntimeException(e); - } +// } catch (RemoteException e) { +// throw new RuntimeException(e); +// } } @Override diff --git a/packages/audiofileplayer/android/src/main/java/com/google/flutter/plugins/audiofileplayer/AudiofileplayerService.java b/packages/audiofileplayer/android/src/main/java/com/google/flutter/plugins/audiofileplayer/AudiofileplayerService.java index 6859878..51a0140 100644 --- a/packages/audiofileplayer/android/src/main/java/com/google/flutter/plugins/audiofileplayer/AudiofileplayerService.java +++ b/packages/audiofileplayer/android/src/main/java/com/google/flutter/plugins/audiofileplayer/AudiofileplayerService.java @@ -137,7 +137,7 @@ public void setPendingIntentActivity(Activity activity) { Context context = activity.getApplicationContext(); Intent intent = new Intent(context, activity.getClass()); PendingIntent pendingIntent = - PendingIntent.getActivity(context, 99, intent, PendingIntent.FLAG_UPDATE_CURRENT); + PendingIntent.getActivity(context, 99, intent, PendingIntent.FLAG_IMMUTABLE | PendingIntent.FLAG_UPDATE_CURRENT); mediaSession.setSessionActivity(pendingIntent); } diff --git a/packages/audiofileplayer/example/.flutter-plugins-dependencies b/packages/audiofileplayer/example/.flutter-plugins-dependencies new file mode 100644 index 0000000..bfeadd4 --- /dev/null +++ b/packages/audiofileplayer/example/.flutter-plugins-dependencies @@ -0,0 +1 @@ +{"info":"This is a generated file; do not edit or check into version control.","plugins":{"ios":[{"name":"audiofileplayer","path":"/Users/dengxianshun/pluginspace/flutter.plugins/packages/audiofileplayer/","dependencies":[]},{"name":"path_provider","path":"/Users/dengxianshun/development/flutter/.pub-cache/hosted/pub.flutter-io.cn/path_provider-2.0.3/","dependencies":[]}],"android":[{"name":"audiofileplayer","path":"/Users/dengxianshun/pluginspace/flutter.plugins/packages/audiofileplayer/","dependencies":[]},{"name":"path_provider","path":"/Users/dengxianshun/development/flutter/.pub-cache/hosted/pub.flutter-io.cn/path_provider-2.0.3/","dependencies":[]}],"macos":[{"name":"path_provider_macos","path":"/Users/dengxianshun/development/flutter/.pub-cache/hosted/pub.flutter-io.cn/path_provider_macos-2.0.2/","dependencies":[]}],"linux":[{"name":"path_provider_linux","path":"/Users/dengxianshun/development/flutter/.pub-cache/hosted/pub.flutter-io.cn/path_provider_linux-2.0.2/","dependencies":[]}],"windows":[{"name":"path_provider_windows","path":"/Users/dengxianshun/development/flutter/.pub-cache/hosted/pub.flutter-io.cn/path_provider_windows-2.0.3/","dependencies":[]}],"web":[]},"dependencyGraph":[{"name":"audiofileplayer","dependencies":[]},{"name":"path_provider","dependencies":["path_provider_linux","path_provider_macos","path_provider_windows"]},{"name":"path_provider_linux","dependencies":[]},{"name":"path_provider_macos","dependencies":[]},{"name":"path_provider_windows","dependencies":[]}],"date_created":"2021-09-10 17:27:37.935922","version":"2.5.0"} \ No newline at end of file diff --git a/packages/audiofileplayer/example/ios/Flutter/Flutter.podspec b/packages/audiofileplayer/example/ios/Flutter/Flutter.podspec new file mode 100644 index 0000000..5ca3041 --- /dev/null +++ b/packages/audiofileplayer/example/ios/Flutter/Flutter.podspec @@ -0,0 +1,18 @@ +# +# NOTE: This podspec is NOT to be published. It is only used as a local source! +# + +Pod::Spec.new do |s| + s.name = 'Flutter' + s.version = '1.0.0' + s.summary = 'High-performance, high-fidelity mobile apps.' + s.description = <<-DESC +Flutter provides an easy and productive way to build and deploy high-performance mobile apps for Android and iOS. + DESC + s.homepage = 'https://flutter.io' + s.license = { :type => 'MIT' } + s.author = { 'Flutter Dev Team' => 'flutter-dev@googlegroups.com' } + s.source = { :git => 'https://github.com/flutter/engine', :tag => s.version.to_s } + s.ios.deployment_target = '8.0' + s.vendored_frameworks = 'Flutter.framework' +end diff --git a/packages/audiofileplayer/example/ios/Flutter/flutter_export_environment.sh b/packages/audiofileplayer/example/ios/Flutter/flutter_export_environment.sh new file mode 100755 index 0000000..396bd79 --- /dev/null +++ b/packages/audiofileplayer/example/ios/Flutter/flutter_export_environment.sh @@ -0,0 +1,15 @@ +#!/bin/sh +# This is a generated file; do not edit or check into version control. +export "FLUTTER_ROOT=/Users/dengxianshun/development/flutter" +export "FLUTTER_APPLICATION_PATH=/Users/dengxianshun/pluginspace/flutter.plugins/packages/audiofileplayer/example" +export "FLUTTER_TARGET=lib/main.dart" +export "FLUTTER_BUILD_DIR=build" +export "SYMROOT=${SOURCE_ROOT}/../build/ios" +export "OTHER_LDFLAGS=$(inherited) -framework Flutter" +export "FLUTTER_FRAMEWORK_DIR=/Users/dengxianshun/development/flutter/bin/cache/artifacts/engine/ios" +export "FLUTTER_BUILD_NAME=1.0.0" +export "FLUTTER_BUILD_NUMBER=1" +export "DART_OBFUSCATION=false" +export "TRACK_WIDGET_CREATION=false" +export "TREE_SHAKE_ICONS=false" +export "PACKAGE_CONFIG=.packages" diff --git a/packages/audiofileplayer/example/ios/Runner.xcodeproj/project.pbxproj b/packages/audiofileplayer/example/ios/Runner.xcodeproj/project.pbxproj index 1da2f11..14c74cf 100644 --- a/packages/audiofileplayer/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/audiofileplayer/example/ios/Runner.xcodeproj/project.pbxproj @@ -16,6 +16,7 @@ 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; + 997AD79126329BE4001D2FCD /* ios in Resources */ = {isa = PBXBuildFile; fileRef = 997AD79026329BE4001D2FCD /* ios */; }; /* End PBXBuildFile section */ /* Begin PBXCopyFilesBuildPhase section */ @@ -48,6 +49,7 @@ 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + 997AD79026329BE4001D2FCD /* ios */ = {isa = PBXFileReference; lastKnownFileType = folder; name = ios; path = ../../../ios; sourceTree = ""; }; C1B130F899C72F55F2976942 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; EC8F38AE54ABD5EB70CDAE54 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; /* End PBXFileReference section */ @@ -107,6 +109,7 @@ 97C146F01CF9000F007C117D /* Runner */ = { isa = PBXGroup; children = ( + 997AD79026329BE4001D2FCD /* ios */, 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */, 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */, 97C146FA1CF9000F007C117D /* Main.storyboard */, @@ -150,6 +153,7 @@ 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + 819ED315A953D3AAA113DCE2 /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -203,6 +207,7 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( + 997AD79126329BE4001D2FCD /* ios in Resources */, 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */, 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */, 9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */, @@ -246,6 +251,24 @@ shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; showEnvVarsInLog = 0; }; + 819ED315A953D3AAA113DCE2 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh", + "${PODS_ROOT}/../Flutter/Flutter.framework", + ); + name = "[CP] Embed Pods Frameworks"; + outputPaths = ( + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Flutter.framework", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; diff --git a/packages/audiofileplayer/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/packages/audiofileplayer/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 0000000..18d9810 --- /dev/null +++ b/packages/audiofileplayer/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/packages/audiofileplayer/ios/Classes/AudiofileplayerPlugin.m b/packages/audiofileplayer/ios/Classes/AudiofileplayerPlugin.m index 1671aca..fec6e88 100644 --- a/packages/audiofileplayer/ios/Classes/AudiofileplayerPlugin.m +++ b/packages/audiofileplayer/ios/Classes/AudiofileplayerPlugin.m @@ -25,6 +25,7 @@ static NSString *const kOnPositionCallback = @"onPosition"; static NSString *const kPositionSeconds = @"position_seconds"; static NSString *const kStopBackgroundDisplay = @"stopBackgroundDisplay"; +static NSString *const kHideBackgroundDisplay = @"hideBackgroundDisplay"; static NSString *const kErrorCode = @"AudioPluginError"; static NSString *const kAudioCategoryMethod = @"iosAudioCategory"; @@ -32,6 +33,7 @@ static NSString *const kAudioCategoryAmbientSolo = @"iosAudioCategoryAmbientSolo"; static NSString *const kAudioCategoryAmbientMixed = @"iosAudioCategoryAmbientMixed"; static NSString *const kAudioCategoryPlayback = @"iosAudioCategoryPlayback"; +static NSString *const kAudioCategoryPlayAndRecord = @"iosAudioCategoryPlayAndRecord"; // static NSString *const kSetPlaybackStateMethod = @"setPlaybackState"; @@ -71,444 +73,454 @@ @interface AudiofileplayerPlugin () @end @implementation AudiofileplayerPlugin { - NSObject *_registrar; - FlutterMethodChannel *_channel; - NSMutableDictionary *_playersDict; - NSMutableDictionary *_nowPlayingInfo; + NSObject *_registrar; + FlutterMethodChannel *_channel; + NSMutableDictionary *_playersDict; + NSMutableDictionary *_nowPlayingInfo; } + (void)registerWithRegistrar:(NSObject *)registrar { - FlutterMethodChannel *channel = - [FlutterMethodChannel methodChannelWithName:kChannel binaryMessenger:[registrar messenger]]; - AudiofileplayerPlugin *instance = - [[AudiofileplayerPlugin alloc] initWithRegistrar:registrar channel:channel]; - [registrar addMethodCallDelegate:instance channel:channel]; - [registrar addApplicationDelegate:instance]; + FlutterMethodChannel *channel = + [FlutterMethodChannel methodChannelWithName:kChannel binaryMessenger:[registrar messenger]]; + AudiofileplayerPlugin *instance = + [[AudiofileplayerPlugin alloc] initWithRegistrar:registrar channel:channel]; + [registrar addMethodCallDelegate:instance channel:channel]; + [registrar addApplicationDelegate:instance]; } - (instancetype)initWithRegistrar:(NSObject *)registrar channel:(FlutterMethodChannel *)channel { - self = [super init]; - if (self) { - _registrar = registrar; - _channel = channel; - _playersDict = [NSMutableDictionary dictionary]; - _nowPlayingInfo = [NSMutableDictionary dictionary]; - [self addCommandHandlers]; - [self disableCommandHandlers]; - // Set audio category to initial default of 'playback'. - [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil]; - } - return self; + self = [super init]; + if (self) { + _registrar = registrar; + _channel = channel; + _playersDict = [NSMutableDictionary dictionary]; + _nowPlayingInfo = [NSMutableDictionary dictionary]; + [self addCommandHandlers]; + [self disableCommandHandlers]; + // Set audio category to initial default of 'playback'. + [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil]; + } + return self; } - (void)dealloc { - [self removeCommandHandlers]; + [self removeCommandHandlers]; } - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result { - NSLog(@"handleMethodCall: method = %@", call.method); - - if ([call.method isEqualToString:kLoadMethod]) { - // Loading an audio instance. - [self handleLoadWithCall:call result:result]; - return; - } else if ([call.method isEqualToString:kAudioCategoryMethod]) { - // Setting the audio category. - NSString *categoryString = call.arguments[kAudioCategory]; - AVAudioSessionCategory category; - if ([categoryString isEqualToString:kAudioCategoryAmbientSolo]) { - category = AVAudioSessionCategorySoloAmbient; - } else if ([categoryString isEqualToString:kAudioCategoryAmbientMixed]) { - category = AVAudioSessionCategoryAmbient; + NSLog(@"handleMethodCall: method = %@", call.method); + + if ([call.method isEqualToString:kLoadMethod]) { + // Loading an audio instance. + [self handleLoadWithCall:call result:result]; + return; + } else if ([call.method isEqualToString:kAudioCategoryMethod]) { + // Setting the audio category. + NSString *categoryString = call.arguments[kAudioCategory]; + AVAudioSessionCategory category; + if ([categoryString isEqualToString:kAudioCategoryPlayAndRecord]) { + category = AVAudioSessionCategoryPlayAndRecord; + [[AVAudioSession sharedInstance] setCategory:category withOptions:AVAudioSessionCategoryOptionMixWithOthers error:nil]; + [[AVAudioSession sharedInstance] overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker error:nil]; + result(nil); + return; + } + if ([categoryString isEqualToString:kAudioCategoryAmbientSolo]) { + category = AVAudioSessionCategorySoloAmbient; + } else if ([categoryString isEqualToString:kAudioCategoryAmbientMixed]) { + category = AVAudioSessionCategoryAmbient; + } else if ([categoryString isEqualToString:kAudioCategoryPlayback]) { + category = AVAudioSessionCategoryPlayback; + } + [[AVAudioSession sharedInstance] setCategory:category error:nil]; + result(nil); + return; + } else if ([call.method isEqualToString:kSetPlaybackStateMethod]) { + [self updateNowPlayingInfoFromPlaybackState:call.arguments]; + result(nil); + return; + } else if ([call.method isEqualToString:kSetMetadataMethod]) { + [self updateNowPlayingInfoFromMetadata:call.arguments]; + result(nil); + return; + } else if ([call.method isEqualToString:kSetSupportedMediaActionsMethod]) { + NSArray *mediaActionTypes = call.arguments[kMediaActions]; + NSNumber *skipIntervalNumber = call.arguments[kMediaSkipIntervalSeconds]; + [self enableCommandHandlersFromMediaActionTypes:mediaActionTypes + skipInterval:skipIntervalNumber]; + result(nil); + return; + } else if ([call.method isEqualToString:kStopBackgroundDisplay]) { + // Clear now playing info and all command handlers. + [_nowPlayingInfo removeAllObjects]; + MPNowPlayingInfoCenter.defaultCenter.nowPlayingInfo = _nowPlayingInfo; + [self disableCommandHandlers]; + result(nil); + return; + } else if ([call.method isEqualToString:kHideBackgroundDisplay]) { + [self removeCommandHandlers]; + result(nil); + return; } - if ([categoryString isEqualToString:kAudioCategoryPlayback]) { - category = AVAudioSessionCategoryPlayback; + + // All subsequent calls need a valid player. + NSString *audioId = call.arguments[@"audioId"]; + if (!audioId) { + result([FlutterError + errorWithCode:kErrorCode + message:[NSString + stringWithFormat:@"Received %@ call without an audioId", call.method] + details:nil]); + return; } - [[AVAudioSession sharedInstance] setCategory:category error:nil]; - result(nil); - return; - } else if ([call.method isEqualToString:kSetPlaybackStateMethod]) { - [self updateNowPlayingInfoFromPlaybackState:call.arguments]; - result(nil); - return; - } else if ([call.method isEqualToString:kSetMetadataMethod]) { - [self updateNowPlayingInfoFromMetadata:call.arguments]; - result(nil); - return; - } else if ([call.method isEqualToString:kSetSupportedMediaActionsMethod]) { - NSArray *mediaActionTypes = call.arguments[kMediaActions]; - NSNumber *skipIntervalNumber = call.arguments[kMediaSkipIntervalSeconds]; - [self enableCommandHandlersFromMediaActionTypes:mediaActionTypes - skipInterval:skipIntervalNumber]; - result(nil); - return; - } else if ([call.method isEqualToString:kStopBackgroundDisplay]) { - // Clear now playing info and all command handlers. - [_nowPlayingInfo removeAllObjects]; - MPNowPlayingInfoCenter.defaultCenter.nowPlayingInfo = _nowPlayingInfo; - [self disableCommandHandlers]; - result(nil); - return; - } - - // All subsequent calls need a valid player. - NSString *audioId = call.arguments[@"audioId"]; - if (!audioId) { - result([FlutterError - errorWithCode:kErrorCode - message:[NSString - stringWithFormat:@"Received %@ call without an audioId", call.method] - details:nil]); - return; - } - FLTManagedPlayer *player = _playersDict[audioId]; - if (!player) { - result([FlutterError - errorWithCode:kErrorCode - message:[NSString stringWithFormat:@"Called %@ on an unloaded player: %@", - call.method, audioId] - details:nil]); - return; - } - - if ([call.method isEqualToString:kPlayMethod]) { - bool playFromStart = [call.arguments[kPlayFromStart] boolValue]; - NSNumber *endpointSecondsNumber = call.arguments[kEndpointSeconds]; - NSTimeInterval endpoint = + FLTManagedPlayer *player = _playersDict[audioId]; + if (!player) { + result([FlutterError + errorWithCode:kErrorCode + message:[NSString stringWithFormat:@"Called %@ on an unloaded player: %@", + call.method, audioId] + details:nil]); + return; + } + + if ([call.method isEqualToString:kPlayMethod]) { + bool playFromStart = [call.arguments[kPlayFromStart] boolValue]; + NSNumber *endpointSecondsNumber = call.arguments[kEndpointSeconds]; + NSTimeInterval endpoint = endpointSecondsNumber ? [endpointSecondsNumber doubleValue] : FLTManagedPlayerPlayToEnd; - [player play:playFromStart endpoint:endpoint]; - result(nil); - } else if ([call.method isEqualToString:kReleaseMethod]) { - [player releasePlayer]; - [_playersDict removeObjectForKey:audioId]; - result(nil); - } else if ([call.method isEqualToString:kSeekMethod]) { - NSTimeInterval position = [call.arguments[kPositionSeconds] doubleValue]; - [player seek:position - completionHandler:^() { - result(nil); + [player play:playFromStart endpoint:endpoint]; + result(nil); + } else if ([call.method isEqualToString:kReleaseMethod]) { + [player releasePlayer]; + [_playersDict removeObjectForKey:audioId]; + result(nil); + } else if ([call.method isEqualToString:kSeekMethod]) { + NSTimeInterval position = [call.arguments[kPositionSeconds] doubleValue]; + [player seek:position + completionHandler:^() { + result(nil); }]; - } else if ([call.method isEqualToString:kSetVolumeMethod]) { - double volume = [call.arguments[kVolume] doubleValue]; - [player setVolume:volume]; - result(nil); - } else if ([call.method isEqualToString:kPauseMethod]) { - [player pause]; - result(nil); - } else { - result(FlutterMethodNotImplemented); - } + } else if ([call.method isEqualToString:kSetVolumeMethod]) { + double volume = [call.arguments[kVolume] doubleValue]; + [player setVolume:volume]; + result(nil); + } else if ([call.method isEqualToString:kPauseMethod]) { + [player pause]; + result(nil); + } else { + result(FlutterMethodNotImplemented); + } } - (void)handleLoadWithCall:(FlutterMethodCall *)call result:(FlutterResult)result { - NSString *audioId = call.arguments[kAudioId]; - if (!audioId) { - result([FlutterError errorWithCode:kErrorCode - message:@"Received load call without an audioId" - details:nil]); - return; - } - if (_playersDict[audioId]) { - result([FlutterError - errorWithCode:kErrorCode - message:[NSString - stringWithFormat:@"Tried to load an already-loaded player: %@", audioId] - details:nil]); - return; - } - - bool isLooping = [call.arguments[kLooping] boolValue]; - - FLTManagedPlayer *player = nil; - if (call.arguments[kFlutterPath] != [NSNull null]) { - NSString *flutterPath = call.arguments[kFlutterPath]; - NSString *key = [_registrar lookupKeyForAsset:flutterPath]; - NSString *path = [[NSBundle mainBundle] pathForResource:key ofType:nil]; - if (!path) { - result([FlutterError - errorWithCode:kErrorCode - message:[NSString stringWithFormat: - @"Could not get path for flutter asset %@ for audio %@ ", - flutterPath, audioId] - details:nil]); - return; + NSString *audioId = call.arguments[kAudioId]; + if (!audioId) { + result([FlutterError errorWithCode:kErrorCode + message:@"Received load call without an audioId" + details:nil]); + return; } - player = [[FLTManagedPlayer alloc] initWithAudioId:audioId - path:path - delegate:self - isLooping:isLooping]; - _playersDict[audioId] = player; - result(nil); - } else if (call.arguments[kAbsolutePath] != [NSNull null]) { - NSString *absolutePath = call.arguments[kAbsolutePath]; - player = [[FLTManagedPlayer alloc] initWithAudioId:audioId - path:absolutePath - delegate:self - isLooping:isLooping]; - if (!player) { - result([FlutterError - errorWithCode:kErrorCode + if (_playersDict[audioId]) { + result([FlutterError + errorWithCode:kErrorCode message:[NSString - stringWithFormat:@"Could not load from absolute path %@ for audio %@ ", - absolutePath, audioId] - details:nil]); - return; - } - _playersDict[audioId] = player; - result(nil); - } else if (call.arguments[kAudioBytes] != [NSNull null]) { - FlutterStandardTypedData *flutterData = call.arguments[kAudioBytes]; - player = [[FLTManagedPlayer alloc] initWithAudioId:audioId - data:[flutterData data] - delegate:self - isLooping:isLooping]; - if (!player) { - result([FlutterError - errorWithCode:kErrorCode - message:[NSString stringWithFormat:@"Could not load from audio bytes for audio %@ ", - audioId] + stringWithFormat:@"Tried to load an already-loaded player: %@", audioId] details:nil]); - return; + return; } - _playersDict[audioId] = player; - result(nil); - } else if (call.arguments[kRemoteUrl] != [NSNull null]) { - NSString *urlString = call.arguments[kRemoteUrl]; - // Load player, but wait for remote loading to succeed/fail before returning the methodCall. - __weak AudiofileplayerPlugin *weakSelf = self; - player = [[FLTManagedPlayer alloc] - initWithAudioId:audioId - remoteUrl:urlString - delegate:self - isLooping:isLooping - remoteLoadHandler:^(BOOL success) { - if (success) { - result(nil); - } else { - AudiofileplayerPlugin *strongSelf = weakSelf; - if (strongSelf) { - [strongSelf->_playersDict removeObjectForKey:audioId]; - } + + bool isLooping = [call.arguments[kLooping] boolValue]; + + FLTManagedPlayer *player = nil; + if (call.arguments[kFlutterPath] != [NSNull null]) { + NSString *flutterPath = call.arguments[kFlutterPath]; + NSString *key = [_registrar lookupKeyForAsset:flutterPath]; + NSString *path = [[NSBundle mainBundle] pathForResource:key ofType:nil]; + if (!path) { result([FlutterError - errorWithCode:kErrorCode - message:[NSString - stringWithFormat:@"Could not load remote URL %@ for player %@", - urlString, audioId] - details:nil]); - } + errorWithCode:kErrorCode + message:[NSString stringWithFormat: + @"Could not get path for flutter asset %@ for audio %@ ", + flutterPath, audioId] + details:nil]); + return; + } + player = [[FLTManagedPlayer alloc] initWithAudioId:audioId + path:path + delegate:self + isLooping:isLooping]; + _playersDict[audioId] = player; + result(nil); + } else if (call.arguments[kAbsolutePath] != [NSNull null]) { + NSString *absolutePath = call.arguments[kAbsolutePath]; + player = [[FLTManagedPlayer alloc] initWithAudioId:audioId + path:absolutePath + delegate:self + isLooping:isLooping]; + if (!player) { + result([FlutterError + errorWithCode:kErrorCode + message:[NSString + stringWithFormat:@"Could not load from absolute path %@ for audio %@ ", + absolutePath, audioId] + details:nil]); + return; + } + _playersDict[audioId] = player; + result(nil); + } else if (call.arguments[kAudioBytes] != [NSNull null]) { + FlutterStandardTypedData *flutterData = call.arguments[kAudioBytes]; + player = [[FLTManagedPlayer alloc] initWithAudioId:audioId + data:[flutterData data] + delegate:self + isLooping:isLooping]; + if (!player) { + result([FlutterError + errorWithCode:kErrorCode + message:[NSString stringWithFormat:@"Could not load from audio bytes for audio %@ ", + audioId] + details:nil]); + return; + } + _playersDict[audioId] = player; + result(nil); + } else if (call.arguments[kRemoteUrl] != [NSNull null]) { + NSString *urlString = call.arguments[kRemoteUrl]; + // Load player, but wait for remote loading to succeed/fail before returning the methodCall. + __weak AudiofileplayerPlugin *weakSelf = self; + player = [[FLTManagedPlayer alloc] + initWithAudioId:audioId + remoteUrl:urlString + delegate:self + isLooping:isLooping + remoteLoadHandler:^(BOOL success) { + if (success) { + result(nil); + } else { + AudiofileplayerPlugin *strongSelf = weakSelf; + if (strongSelf) { + [strongSelf->_playersDict removeObjectForKey:audioId]; + } + result([FlutterError + errorWithCode:kErrorCode + message:[NSString + stringWithFormat:@"Could not load remote URL %@ for player %@", + urlString, audioId] + details:nil]); + } }]; - // Put AVPlayer into dictionary syncl'y on creation. Will be removed in the remoteLoadHandler - // if remote loading fails. - _playersDict[audioId] = player; - } else { - result([FlutterError errorWithCode:kErrorCode - message:@"Could not create ManagedMediaPlayer with neither " - @"flutterPath nor absolutePath nor audioBytes nor remoteUrl" - details:nil]); - } + // Put AVPlayer into dictionary syncl'y on creation. Will be removed in the remoteLoadHandler + // if remote loading fails. + _playersDict[audioId] = player; + } else { + result([FlutterError errorWithCode:kErrorCode + message:@"Could not create ManagedMediaPlayer with neither " + @"flutterPath nor absolutePath nor audioBytes nor remoteUrl" + details:nil]); + } } #pragma mark - FLTManagedPlayerDelegate - (void)managedPlayerDidFinishPlaying:(NSString *)audioId { - [_channel invokeMethod:kOnCompleteCallback arguments:@{kAudioId : audioId}]; + [_channel invokeMethod:kOnCompleteCallback arguments:@{kAudioId : audioId}]; } - (void)managedPlayerDidUpdatePosition:(NSTimeInterval)position forAudioId:(NSString *)audioId { - [_channel invokeMethod:kOnPositionCallback - arguments:@{ - kAudioId : audioId, - kPositionSeconds : @(position), - }]; + [_channel invokeMethod:kOnPositionCallback + arguments:@{ + kAudioId : audioId, + kPositionSeconds : @(position), + }]; } - (void)managedPlayerDidLoadWithDuration:(NSTimeInterval)duration forAudioId:(NSString *)audioId { - [_channel invokeMethod:kOnDurationCallback - arguments:@{ - kAudioId : audioId, - kDurationSeconds : @(duration), - }]; + [_channel invokeMethod:kOnDurationCallback + arguments:@{ + kAudioId : audioId, + kDurationSeconds : @(duration), + }]; } #pragma mark - MPRemoteCommandCenter targets - (void)addCommandHandlers { - MPRemoteCommandCenter *remoteCommandCenter = [MPRemoteCommandCenter sharedCommandCenter]; - [remoteCommandCenter.togglePlayPauseCommand addTarget:self - action:@selector(handleTogglePlayPauseCommand:)]; - [remoteCommandCenter.playCommand addTarget:self action:@selector(handlePlayCommand:)]; - [remoteCommandCenter.pauseCommand addTarget:self action:@selector(handlePauseCommand:)]; - [remoteCommandCenter.nextTrackCommand addTarget:self action:@selector(handleNextTrackCommand:)]; - [remoteCommandCenter.previousTrackCommand addTarget:self - action:@selector(handlePreviousTrackCommand:)]; - [remoteCommandCenter.seekForwardCommand addTarget:self - action:@selector(handleSeekForwardCommand:)]; - [remoteCommandCenter.seekBackwardCommand addTarget:self - action:@selector(handleSeekBackwardCommand:)]; - [remoteCommandCenter.changePlaybackPositionCommand - addTarget:self - action:@selector(handleChangePlaybackPositionCommand:)]; - [remoteCommandCenter.skipForwardCommand addTarget:self - action:@selector(handleSkipForwardCommand:)]; - [remoteCommandCenter.skipBackwardCommand addTarget:self - action:@selector(handleSkipBackwardCommand:)]; + MPRemoteCommandCenter *remoteCommandCenter = [MPRemoteCommandCenter sharedCommandCenter]; + [remoteCommandCenter.togglePlayPauseCommand addTarget:self + action:@selector(handleTogglePlayPauseCommand:)]; + [remoteCommandCenter.playCommand addTarget:self action:@selector(handlePlayCommand:)]; + [remoteCommandCenter.pauseCommand addTarget:self action:@selector(handlePauseCommand:)]; + [remoteCommandCenter.nextTrackCommand addTarget:self action:@selector(handleNextTrackCommand:)]; + [remoteCommandCenter.previousTrackCommand addTarget:self + action:@selector(handlePreviousTrackCommand:)]; + [remoteCommandCenter.seekForwardCommand addTarget:self + action:@selector(handleSeekForwardCommand:)]; + [remoteCommandCenter.seekBackwardCommand addTarget:self + action:@selector(handleSeekBackwardCommand:)]; + [remoteCommandCenter.changePlaybackPositionCommand + addTarget:self + action:@selector(handleChangePlaybackPositionCommand:)]; + [remoteCommandCenter.skipForwardCommand addTarget:self + action:@selector(handleSkipForwardCommand:)]; + [remoteCommandCenter.skipBackwardCommand addTarget:self + action:@selector(handleSkipBackwardCommand:)]; } - (void)disableCommandHandlers { - MPRemoteCommandCenter *remoteCommandCenter = [MPRemoteCommandCenter sharedCommandCenter]; - remoteCommandCenter.togglePlayPauseCommand.enabled = NO; - remoteCommandCenter.playCommand.enabled = NO; - remoteCommandCenter.pauseCommand.enabled = NO; - remoteCommandCenter.nextTrackCommand.enabled = NO; - remoteCommandCenter.previousTrackCommand.enabled = NO; - remoteCommandCenter.seekForwardCommand.enabled = NO; - remoteCommandCenter.seekBackwardCommand.enabled = NO; - remoteCommandCenter.changePlaybackPositionCommand.enabled = NO; - remoteCommandCenter.skipForwardCommand.enabled = NO; - remoteCommandCenter.skipBackwardCommand.enabled = NO; + MPRemoteCommandCenter *remoteCommandCenter = [MPRemoteCommandCenter sharedCommandCenter]; + remoteCommandCenter.togglePlayPauseCommand.enabled = NO; + remoteCommandCenter.playCommand.enabled = NO; + remoteCommandCenter.pauseCommand.enabled = NO; + remoteCommandCenter.nextTrackCommand.enabled = NO; + remoteCommandCenter.previousTrackCommand.enabled = NO; + remoteCommandCenter.seekForwardCommand.enabled = NO; + remoteCommandCenter.seekBackwardCommand.enabled = NO; + remoteCommandCenter.changePlaybackPositionCommand.enabled = NO; + remoteCommandCenter.skipForwardCommand.enabled = NO; + remoteCommandCenter.skipBackwardCommand.enabled = NO; } - (void)removeCommandHandlers { - MPRemoteCommandCenter *remoteCommandCenter = [MPRemoteCommandCenter sharedCommandCenter]; - [remoteCommandCenter.togglePlayPauseCommand removeTarget:self]; - [remoteCommandCenter.playCommand removeTarget:self]; - [remoteCommandCenter.pauseCommand removeTarget:self]; - [remoteCommandCenter.nextTrackCommand removeTarget:self]; - [remoteCommandCenter.previousTrackCommand removeTarget:self]; - [remoteCommandCenter.seekForwardCommand removeTarget:self]; - [remoteCommandCenter.seekBackwardCommand removeTarget:self]; - [remoteCommandCenter.changePlaybackPositionCommand removeTarget:self]; - [remoteCommandCenter.skipForwardCommand removeTarget:self]; - [remoteCommandCenter.skipBackwardCommand removeTarget:self]; + MPRemoteCommandCenter *remoteCommandCenter = [MPRemoteCommandCenter sharedCommandCenter]; + [remoteCommandCenter.togglePlayPauseCommand removeTarget:self]; + [remoteCommandCenter.playCommand removeTarget:self]; + [remoteCommandCenter.pauseCommand removeTarget:self]; + [remoteCommandCenter.nextTrackCommand removeTarget:self]; + [remoteCommandCenter.previousTrackCommand removeTarget:self]; + [remoteCommandCenter.seekForwardCommand removeTarget:self]; + [remoteCommandCenter.seekBackwardCommand removeTarget:self]; + [remoteCommandCenter.changePlaybackPositionCommand removeTarget:self]; + [remoteCommandCenter.skipForwardCommand removeTarget:self]; + [remoteCommandCenter.skipBackwardCommand removeTarget:self]; } - (void)enableCommandHandlersFromMediaActionTypes:(NSArray *)mediaActionTypes skipInterval:(NSNumber *)skipIntervalNumber { - NSLog(@"mediaActionTypes: %@", mediaActionTypes); - MPRemoteCommandCenter *remoteCommandCenter = [MPRemoteCommandCenter sharedCommandCenter]; - remoteCommandCenter.togglePlayPauseCommand.enabled = - [mediaActionTypes containsObject:kMediaPlayPause]; - remoteCommandCenter.playCommand.enabled = [mediaActionTypes containsObject:kMediaPlay]; - remoteCommandCenter.pauseCommand.enabled = [mediaActionTypes containsObject:kMediaPause]; - remoteCommandCenter.nextTrackCommand.enabled = [mediaActionTypes containsObject:kMediaNext]; - remoteCommandCenter.previousTrackCommand.enabled = - [mediaActionTypes containsObject:kMediaPrevious]; - remoteCommandCenter.seekForwardCommand.enabled = - [mediaActionTypes containsObject:kMediaSeekForward]; - remoteCommandCenter.seekBackwardCommand.enabled = - [mediaActionTypes containsObject:kMediaSeekBackward]; - remoteCommandCenter.changePlaybackPositionCommand.enabled = - [mediaActionTypes containsObject:kMediaSeekTo]; - remoteCommandCenter.skipForwardCommand.enabled = - [mediaActionTypes containsObject:kMediaSkipForward]; - remoteCommandCenter.skipBackwardCommand.enabled = - [mediaActionTypes containsObject:kMediaSkipBackward]; - if (skipIntervalNumber) { - remoteCommandCenter.skipForwardCommand.preferredIntervals = @[ skipIntervalNumber ]; - remoteCommandCenter.skipBackwardCommand.preferredIntervals = @[ skipIntervalNumber ]; - } + NSLog(@"mediaActionTypes: %@", mediaActionTypes); + MPRemoteCommandCenter *remoteCommandCenter = [MPRemoteCommandCenter sharedCommandCenter]; + remoteCommandCenter.togglePlayPauseCommand.enabled = + [mediaActionTypes containsObject:kMediaPlayPause]; + remoteCommandCenter.playCommand.enabled = [mediaActionTypes containsObject:kMediaPlay]; + remoteCommandCenter.pauseCommand.enabled = [mediaActionTypes containsObject:kMediaPause]; + remoteCommandCenter.nextTrackCommand.enabled = [mediaActionTypes containsObject:kMediaNext]; + remoteCommandCenter.previousTrackCommand.enabled = + [mediaActionTypes containsObject:kMediaPrevious]; + remoteCommandCenter.seekForwardCommand.enabled = + [mediaActionTypes containsObject:kMediaSeekForward]; + remoteCommandCenter.seekBackwardCommand.enabled = + [mediaActionTypes containsObject:kMediaSeekBackward]; + remoteCommandCenter.changePlaybackPositionCommand.enabled = + [mediaActionTypes containsObject:kMediaSeekTo]; + remoteCommandCenter.skipForwardCommand.enabled = + [mediaActionTypes containsObject:kMediaSkipForward]; + remoteCommandCenter.skipBackwardCommand.enabled = + [mediaActionTypes containsObject:kMediaSkipBackward]; + if (skipIntervalNumber) { + remoteCommandCenter.skipForwardCommand.preferredIntervals = @[ skipIntervalNumber ]; + remoteCommandCenter.skipBackwardCommand.preferredIntervals = @[ skipIntervalNumber ]; + } } - (MPRemoteCommandHandlerStatus)handleTogglePlayPauseCommand: - (MPRemoteCommandEvent *)remoteCommandEvent { - [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaPlayPause}]; - return MPRemoteCommandHandlerStatusSuccess; +(MPRemoteCommandEvent *)remoteCommandEvent { + [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaPlayPause}]; + return MPRemoteCommandHandlerStatusSuccess; } - (MPRemoteCommandHandlerStatus)handlePlayCommand:(MPRemoteCommandEvent *)remoteCommandEvent { - [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaPlay}]; - return MPRemoteCommandHandlerStatusSuccess; + [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaPlay}]; + return MPRemoteCommandHandlerStatusSuccess; } - (MPRemoteCommandHandlerStatus)handlePauseCommand:(MPRemoteCommandEvent *)remoteCommandEvent { - [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaPause}]; - return MPRemoteCommandHandlerStatusSuccess; + [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaPause}]; + return MPRemoteCommandHandlerStatusSuccess; } - (MPRemoteCommandHandlerStatus)handleNextTrackCommand:(MPRemoteCommandEvent *)remoteCommandEvent { - [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaNext}]; - return MPRemoteCommandHandlerStatusSuccess; + [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaNext}]; + return MPRemoteCommandHandlerStatusSuccess; } - (MPRemoteCommandHandlerStatus)handlePreviousTrackCommand: - (MPRemoteCommandEvent *)remoteCommandEvent { - [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaPrevious}]; - return MPRemoteCommandHandlerStatusSuccess; +(MPRemoteCommandEvent *)remoteCommandEvent { + [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaPrevious}]; + return MPRemoteCommandHandlerStatusSuccess; } - (MPRemoteCommandHandlerStatus)handleSeekForwardCommand: - (MPRemoteCommandEvent *)remoteCommandEvent { - [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaSeekForward}]; - return MPRemoteCommandHandlerStatusSuccess; +(MPRemoteCommandEvent *)remoteCommandEvent { + [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaSeekForward}]; + return MPRemoteCommandHandlerStatusSuccess; } - (MPRemoteCommandHandlerStatus)handleSeekBackwardCommand: - (MPRemoteCommandEvent *)remoteCommandEvent { - [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaSeekBackward}]; - return MPRemoteCommandHandlerStatusSuccess; +(MPRemoteCommandEvent *)remoteCommandEvent { + [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaSeekBackward}]; + return MPRemoteCommandHandlerStatusSuccess; } - (MPRemoteCommandHandlerStatus)handleChangePlaybackPositionCommand: - (MPChangePlaybackPositionCommandEvent *)changePlaybackPositionCommandEvent { - NSTimeInterval positionSeconds = changePlaybackPositionCommandEvent.positionTime; - [_channel invokeMethod:kOnMediaEventCallback - arguments:@{ - kMediaEventType : kMediaSeekTo, - kMediaSeekToPositionSeconds : @(positionSeconds) - }]; - return MPRemoteCommandHandlerStatusSuccess; +(MPChangePlaybackPositionCommandEvent *)changePlaybackPositionCommandEvent { + NSTimeInterval positionSeconds = changePlaybackPositionCommandEvent.positionTime; + [_channel invokeMethod:kOnMediaEventCallback + arguments:@{ + kMediaEventType : kMediaSeekTo, + kMediaSeekToPositionSeconds : @(positionSeconds) + }]; + return MPRemoteCommandHandlerStatusSuccess; } - (MPRemoteCommandHandlerStatus)handleSkipForwardCommand: - (MPSkipIntervalCommandEvent *)skipIntervalCommandEvent { - [_channel invokeMethod:kOnMediaEventCallback - arguments:@{ - kMediaEventType : kMediaSkipForward, - kMediaSkipIntervalSeconds : @(skipIntervalCommandEvent.interval) - }]; - return MPRemoteCommandHandlerStatusSuccess; +(MPSkipIntervalCommandEvent *)skipIntervalCommandEvent { + [_channel invokeMethod:kOnMediaEventCallback + arguments:@{ + kMediaEventType : kMediaSkipForward, + kMediaSkipIntervalSeconds : @(skipIntervalCommandEvent.interval) + }]; + return MPRemoteCommandHandlerStatusSuccess; } - (MPRemoteCommandHandlerStatus)handleSkipBackwardCommand: - (MPRemoteCommandEvent *)remoteCommandEvent { - [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaSkipBackward}]; - return MPRemoteCommandHandlerStatusSuccess; +(MPRemoteCommandEvent *)remoteCommandEvent { + [_channel invokeMethod:kOnMediaEventCallback arguments:@{kMediaEventType : kMediaSkipBackward}]; + return MPRemoteCommandHandlerStatusSuccess; } #pragma mark - MPNowPlayingInfoCenter - (void)updateNowPlayingInfoFromPlaybackState:(NSDictionary *)playbackState { - bool isPlaying = [playbackState[kPlaybackIsPlaying] boolValue]; - _nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = isPlaying ? @(1.0) : @(0.0); - _nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = - playbackState[kPlaybackPositionSeconds]; - - MPNowPlayingInfoCenter.defaultCenter.nowPlayingInfo = _nowPlayingInfo; + bool isPlaying = [playbackState[kPlaybackIsPlaying] boolValue]; + _nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = isPlaying ? @(1.0) : @(0.0); + _nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = + playbackState[kPlaybackPositionSeconds]; + + MPNowPlayingInfoCenter.defaultCenter.nowPlayingInfo = _nowPlayingInfo; } - (void)updateNowPlayingInfoFromMetadata:(NSDictionary *)metadata { - _nowPlayingInfo[MPMediaItemPropertyPersistentID] = metadata[kMetadataId]; - _nowPlayingInfo[MPMediaItemPropertyTitle] = metadata[kMetadataTitle]; - _nowPlayingInfo[MPMediaItemPropertyArtist] = metadata[kMetadataArtist]; - _nowPlayingInfo[MPMediaItemPropertyAlbumTitle] = metadata[kMetadataAlbum]; - _nowPlayingInfo[MPMediaItemPropertyGenre] = metadata[kMetadataGenre]; - _nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = metadata[kMetadataDurationSeconds]; - - if (metadata[kMetadataArtBytes]) { - FlutterStandardTypedData *flutterData = metadata[kMetadataArtBytes]; - NSData *data = [flutterData data]; - UIImage *image = [UIImage imageWithData:data]; - _nowPlayingInfo[MPMediaItemPropertyArtwork] = + _nowPlayingInfo[MPMediaItemPropertyPersistentID] = metadata[kMetadataId]; + _nowPlayingInfo[MPMediaItemPropertyTitle] = metadata[kMetadataTitle]; + _nowPlayingInfo[MPMediaItemPropertyArtist] = metadata[kMetadataArtist]; + _nowPlayingInfo[MPMediaItemPropertyAlbumTitle] = metadata[kMetadataAlbum]; + _nowPlayingInfo[MPMediaItemPropertyGenre] = metadata[kMetadataGenre]; + _nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = metadata[kMetadataDurationSeconds]; + + if (metadata[kMetadataArtBytes]) { + FlutterStandardTypedData *flutterData = metadata[kMetadataArtBytes]; + NSData *data = [flutterData data]; + UIImage *image = [UIImage imageWithData:data]; + _nowPlayingInfo[MPMediaItemPropertyArtwork] = [[MPMediaItemArtwork alloc] initWithBoundsSize:CGSizeMake(200.0, 200.0) requestHandler:^UIImage *(CGSize size) { - return image; - }]; - } else { - _nowPlayingInfo[MPMediaItemPropertyArtwork] = nil; - } - - MPNowPlayingInfoCenter.defaultCenter.nowPlayingInfo = _nowPlayingInfo; + return image; + }]; + } else { + _nowPlayingInfo[MPMediaItemPropertyArtwork] = nil; + } + + MPNowPlayingInfoCenter.defaultCenter.nowPlayingInfo = _nowPlayingInfo; } @end diff --git a/packages/audiofileplayer/ios/Classes/ManagedPlayer.m b/packages/audiofileplayer/ios/Classes/ManagedPlayer.m index fc2ce79..6fc1fbc 100644 --- a/packages/audiofileplayer/ios/Classes/ManagedPlayer.m +++ b/packages/audiofileplayer/ios/Classes/ManagedPlayer.m @@ -53,7 +53,7 @@ - (instancetype)initWithAudioId:(NSString *)audioId if (strongSelf) { if (strongSelf->_audioPlayer.playing) { [strongSelf->_delegate - managedPlayerDidUpdatePosition:_audioPlayer.currentTime + managedPlayerDidUpdatePosition:strongSelf->_audioPlayer.currentTime forAudioId:strongSelf->_audioId]; } } @@ -84,7 +84,7 @@ - (instancetype)initWithAudioId:(NSString *)audioId FLTManagedPlayer *strongSelf = weakSelf; if (strongSelf) { [strongSelf->_avPlayer seekToTime:kCMTimeZero]; - [strongSelf->_delegate managedPlayerDidFinishPlaying:_audioId]; + [strongSelf->_delegate managedPlayerDidFinishPlaying:strongSelf->_audioId]; } }]; [_avPlayer.currentItem addObserver:self @@ -195,7 +195,7 @@ - (void)play:(bool)playFromStart endpoint:(NSTimeInterval)endpoint { FLTManagedPlayer *strongSelf = weakSelf; if (strongSelf) { [strongSelf pause]; - [strongSelf->_delegate managedPlayerDidFinishPlaying:_audioId]; + [strongSelf->_delegate managedPlayerDidFinishPlaying:strongSelf->_audioId]; } }]; } @@ -214,7 +214,7 @@ - (void)releasePlayer { } } -- (void)seek:(NSTimeInterval)position completionHandler:(void (^)())completionHandler { +- (void)seek:(NSTimeInterval)position completionHandler:(void (^)(void))completionHandler { if (_audioPlayer) { _audioPlayer.currentTime = position; completionHandler(); diff --git a/packages/audiofileplayer/lib/audio_system.dart b/packages/audiofileplayer/lib/audio_system.dart index 70f6c13..9e1195e 100644 --- a/packages/audiofileplayer/lib/audio_system.dart +++ b/packages/audiofileplayer/lib/audio_system.dart @@ -15,6 +15,7 @@ const String setAndroidMediaButtonsMethod = 'setAndroidMediaButtons'; const String mediaButtonsKey = 'mediaButtons'; const String mediaCompactIndicesKey = 'mediaCompactIndices'; const String stopBackgroundDisplayMethod = 'stopBackgroundDisplay'; +const String hideBackgroundDisplayMethod = 'hideBackgroundDisplay'; // Constants for iOS category. const String iosAudioCategoryMethod = 'iosAudioCategory'; @@ -22,6 +23,7 @@ const String iosAudioCategoryKey = 'iosAudioCategory'; const String iosAudioCategoryAmbientSolo = 'iosAudioCategoryAmbientSolo'; const String iosAudioCategoryAmbientMixed = 'iosAudioCategoryAmbientMixed'; const String iosAudioCategoryPlayback = 'iosAudioCategoryPlayback'; +const String iosAudioCategoryPlayAndRecord = 'iosAudioCategoryPlayAndRecord'; // Constants for [setPlaybackState]. const String setPlaybackStateMethod = 'setPlaybackState'; @@ -62,7 +64,10 @@ enum IosAudioCategory { /// with other apps' audio. /// /// The default value. - playback + playback, + + /*! Use this category when recording and playing back audio. */ + playAndRecord, } /// A button to be added to the Android notification display via @@ -85,8 +90,7 @@ enum AndroidMediaButtonType { /// A custom button to be added to the Android notification display via /// [setAndroidMediaButtons]. class AndroidCustomMediaButton { - const AndroidCustomMediaButton( - this.title, this.eventId, this.drawableResource); + const AndroidCustomMediaButton(this.title, this.eventId, this.drawableResource); final String title; @@ -133,14 +137,11 @@ class AudioSystem { static AudioSystem get instance => _instance; /// Send media events to the client for handling. - final Set> _mediaEventListeners = - >{}; + final Set> _mediaEventListeners = >{}; - void addMediaEventListener(ValueChanged listener) => - _mediaEventListeners.add(listener); + void addMediaEventListener(ValueChanged listener) => _mediaEventListeners.add(listener); - void removeMediaEventListener(ValueChanged listener) => - _mediaEventListeners.remove(listener); + void removeMediaEventListener(ValueChanged listener) => _mediaEventListeners.remove(listener); /// Inform the OS's background audio system about the playback state; used to /// set the progress bar in lockscreen/notification. @@ -151,11 +152,7 @@ class AudioSystem { /// every second). void setPlaybackState(bool isPlaying, double positionSeconds) async { try { - await audioMethodChannel.invokeMethod( - setPlaybackStateMethod, { - playbackIsPlayingKey: isPlaying, - playbackPositionSeconds: positionSeconds - }); + await audioMethodChannel.invokeMethod(setPlaybackStateMethod, {playbackIsPlayingKey: isPlaying, playbackPositionSeconds: positionSeconds}); } on PlatformException catch (e) { _logger.severe('setPlaybackState error, category: ', e); } @@ -192,8 +189,7 @@ class AudioSystem { metadataMap[metadataArtBytesKey] = metadata.artBytes; } - await audioMethodChannel.invokeMethod( - setMetadataMethod, metadataMap); + await audioMethodChannel.invokeMethod(setMetadataMethod, metadataMap); } on PlatformException catch (e) { _logger.severe('setMetadata error, category: ', e); } @@ -203,10 +199,8 @@ class AudioSystem { /// /// Informs device displays and external controllers (e.g. watch/auto) on /// what controls to display. - void setSupportedMediaActions(Set actions, - {double? skipIntervalSeconds}) async { - const Map mediaActionTypeToString = - { + void setSupportedMediaActions(Set actions, {double? skipIntervalSeconds}) async { + const Map mediaActionTypeToString = { MediaActionType.playPause: mediaPlayPause, MediaActionType.pause: mediaPause, MediaActionType.play: mediaPlay, @@ -219,19 +213,15 @@ class AudioSystem { MediaActionType.skipBackward: mediaSkipBackward, }; - final List actionStrings = actions - .map((MediaActionType type) => mediaActionTypeToString[type]!).toList(); + final List actionStrings = actions.map((MediaActionType type) => mediaActionTypeToString[type]!).toList(); - final Map map = { - mediaActionsKey: actionStrings - }; + final Map map = {mediaActionsKey: actionStrings}; if (skipIntervalSeconds != null) { map[mediaSkipIntervalSecondsKey] = skipIntervalSeconds; } - await audioMethodChannel.invokeMethod( - setSupportedMediaActionsMethod, map); + await audioMethodChannel.invokeMethod(setSupportedMediaActionsMethod, map); } /// Specify buttons for display in the Android notification. @@ -244,10 +234,8 @@ class AudioSystem { /// of three. /// /// Only supported on Android; no-op otherwise. - void setAndroidNotificationButtons(List androidMediaButtons, - {List? androidCompactIndices}) async { - const Map androidMediaButtonTypeToString = - { + void setAndroidNotificationButtons(List androidMediaButtons, {List? androidCompactIndices}) async { + const Map androidMediaButtonTypeToString = { AndroidMediaButtonType.stop: mediaStop, AndroidMediaButtonType.pause: mediaPause, AndroidMediaButtonType.play: mediaPlay, @@ -260,14 +248,12 @@ class AudioSystem { if (!Platform.isAndroid) return; try { - final List androidMediaButtonsData = - androidMediaButtons.map((dynamic buttonTypeOrCustomButton) { + final List androidMediaButtonsData = androidMediaButtons.map((dynamic buttonTypeOrCustomButton) { if (buttonTypeOrCustomButton is AndroidMediaButtonType) { final AndroidMediaButtonType buttonType = buttonTypeOrCustomButton; return androidMediaButtonTypeToString[buttonType]; } else if (buttonTypeOrCustomButton is AndroidCustomMediaButton) { - final AndroidCustomMediaButton customMediaButton = - buttonTypeOrCustomButton; + final AndroidCustomMediaButton customMediaButton = buttonTypeOrCustomButton; return { mediaCustomTitleKey: customMediaButton.title, mediaCustomEventIdKey: customMediaButton.eventId, @@ -280,11 +266,7 @@ class AudioSystem { } }).toList(); - await audioMethodChannel.invokeMethod( - setAndroidMediaButtonsMethod, { - mediaButtonsKey: androidMediaButtonsData, - mediaCompactIndicesKey: androidCompactIndices - }); + await audioMethodChannel.invokeMethod(setAndroidMediaButtonsMethod, {mediaButtonsKey: androidMediaButtonsData, mediaCompactIndicesKey: androidCompactIndices}); } on PlatformException catch (e) { _logger.severe('setAndroidMediaButtonsMethod error', e); } @@ -309,27 +291,35 @@ class AudioSystem { /// (and its associated Dart process) active. void stopBackgroundDisplay() async { try { - await audioMethodChannel - .invokeMethod(stopBackgroundDisplayMethod); + await audioMethodChannel.invokeMethod(stopBackgroundDisplayMethod); } on PlatformException catch (e) { _logger.severe('stopBackgroundDisplay error', e); } } + /// this hides the supported controls and metadata from the + /// lockscreen/control center. + void hideBackgroundDisplay() async { + try { + await audioMethodChannel.invokeMethod(hideBackgroundDisplayMethod); + } on PlatformException catch (e) { + _logger.severe('hideBackgroundDisplay error', e); + } + } + /// Sets the iOS audio category. /// /// Only supported on iOS; no-op otherwise. Future setIosAudioCategory(IosAudioCategory category) async { - const Map categoryToString = - { + const Map categoryToString = { IosAudioCategory.ambientSolo: iosAudioCategoryAmbientSolo, IosAudioCategory.ambientMixed: iosAudioCategoryAmbientMixed, - IosAudioCategory.playback: iosAudioCategoryPlayback + IosAudioCategory.playback: iosAudioCategoryPlayback, + IosAudioCategory.playAndRecord: iosAudioCategoryPlayAndRecord, }; if (!Platform.isIOS) return; try { - await audioMethodChannel.invokeMethod(iosAudioCategoryMethod, - {iosAudioCategoryKey: categoryToString[category]}); + await audioMethodChannel.invokeMethod(iosAudioCategoryMethod, {iosAudioCategoryKey: categoryToString[category]}); } on PlatformException catch (e) { _logger.severe('setIosAudioCategory error, category: $category', e); } @@ -337,8 +327,7 @@ class AudioSystem { /// Handle the [MethodCall]s from the native implementation layer. void handleNativeMediaEventCallback(Map arguments) { - const Map stringToMediaActionType = - { + const Map stringToMediaActionType = { mediaPause: MediaActionType.pause, mediaPlay: MediaActionType.play, mediaPlayPause: MediaActionType.playPause, @@ -355,22 +344,17 @@ class AudioSystem { final String? mediaEventTypeString = arguments[mediaEventTypeKey]; if (mediaEventTypeString == null) { - _logger - .severe('[arguments] did not contain value for [mediaEventTypeKey]'); + _logger.severe('[arguments] did not contain value for [mediaEventTypeKey]'); return; } final MediaActionType? type = stringToMediaActionType[mediaEventTypeString]; if (type == null) { - _logger - .severe('Unknown MediaActionType for string $mediaEventTypeString'); + _logger.severe('Unknown MediaActionType for string $mediaEventTypeString'); return; } - final MediaEvent event = MediaEvent(type, - customEventId: arguments[mediaCustomEventIdKey], - seekToPositionSeconds: arguments[mediaSeekToPositionSecondsKey], - skipIntervalSeconds: arguments[mediaSkipIntervalSecondsKey]); - for (final ValueChanged mediaEventListener - in _mediaEventListeners) { + final MediaEvent event = + MediaEvent(type, customEventId: arguments[mediaCustomEventIdKey], seekToPositionSeconds: arguments[mediaSeekToPositionSecondsKey], skipIntervalSeconds: arguments[mediaSkipIntervalSecondsKey]); + for (final ValueChanged mediaEventListener in _mediaEventListeners) { mediaEventListener(event); } } diff --git a/packages/audiofileplayer/lib/audiofileplayer.dart b/packages/audiofileplayer/lib/audiofileplayer.dart index 315aed7..e1b2ba4 100644 --- a/packages/audiofileplayer/lib/audiofileplayer.dart +++ b/packages/audiofileplayer/lib/audiofileplayer.dart @@ -1,4 +1,5 @@ import 'dart:async'; +import 'dart:io'; import 'dart:typed_data'; import 'dart:ui' show AppLifecycleState; @@ -57,8 +58,7 @@ const String mediaCustomTitleKey = 'customTitle'; const String mediaCustomEventIdKey = 'customEventId'; const String mediaCustomDrawableResourceKey = 'customDrawableResource'; -MethodChannel audioMethodChannel = const MethodChannel(channelName) - ..setMethodCallHandler(Audio.handleMethodCall); +MethodChannel audioMethodChannel = const MethodChannel(channelName)..setMethodCallHandler(Audio.handleMethodCall); /// Specifies an action that the OS's background audio system may support. /// @@ -102,10 +102,7 @@ enum MediaActionType { /// lockscreen/control center, bluetooth controllers, etc) and from Android /// buttons in the notification. class MediaEvent { - const MediaEvent(this.type, - {this.customEventId, - this.seekToPositionSeconds, - this.skipIntervalSeconds}); + const MediaEvent(this.type, {this.customEventId, this.seekToPositionSeconds, this.skipIntervalSeconds}); final MediaActionType type; @@ -239,8 +236,7 @@ class MediaEvent { /// and use all the methods in [AudioSystem] to communicate desired state and /// supported behavior to the OS's background audio system. class Audio with WidgetsBindingObserver { - Audio._path(this._path, this._onComplete, this._onDuration, this._onPosition, - this._onError, this._looping, this._playInBackground) + Audio._path(this._path, this._onComplete, this._onDuration, this._onPosition, this._onError, this._looping, this._playInBackground) : _audioId = _uuid.v4(), _absolutePath = null, _audioBytes = null, @@ -248,8 +244,7 @@ class Audio with WidgetsBindingObserver { WidgetsBinding.instance!.addObserver(this); } - Audio._absolutePath(this._absolutePath, this._onComplete, this._onDuration, - this._onPosition, this._onError, this._looping, this._playInBackground) + Audio._absolutePath(this._absolutePath, this._onComplete, this._onDuration, this._onPosition, this._onError, this._looping, this._playInBackground) : _audioId = _uuid.v4(), _path = null, _audioBytes = null, @@ -257,8 +252,7 @@ class Audio with WidgetsBindingObserver { WidgetsBinding.instance!.addObserver(this); } - Audio._byteData(ByteData byteData, this._onComplete, this._onDuration, - this._onPosition, this._onError, this._looping, this._playInBackground) + Audio._byteData(ByteData byteData, this._onComplete, this._onDuration, this._onPosition, this._onError, this._looping, this._playInBackground) : _audioId = _uuid.v4(), _audioBytes = Uint8List.view(byteData.buffer), _path = null, @@ -267,8 +261,7 @@ class Audio with WidgetsBindingObserver { WidgetsBinding.instance!.addObserver(this); } - Audio._remoteUrl(this._remoteUrl, this._onComplete, this._onDuration, - this._onPosition, this._onError, this._looping, this._playInBackground) + Audio._remoteUrl(this._remoteUrl, this._onComplete, this._onDuration, this._onPosition, this._onError, this._looping, this._playInBackground) : _audioId = _uuid.v4(), _audioBytes = null, _path = null, @@ -292,15 +285,13 @@ class Audio with WidgetsBindingObserver { // onComplete callback. static final Map _awaitingOnCompleteAudios = {}; @visibleForTesting - static int get awaitingOnCompleteAudiosCount => - _awaitingOnCompleteAudios.length; + static int get awaitingOnCompleteAudiosCount => _awaitingOnCompleteAudios.length; // All Audio objects (including disposed ones), that are awaiting an // onDuration callback. static final Map _awaitingOnDurationAudios = {}; @visibleForTesting - static int get awaitingOnDurationAudiosCount => - _awaitingOnDurationAudios.length; + static int get awaitingOnDurationAudiosCount => _awaitingOnDurationAudios.length; // All Audio objects (including disposed ones), that are using an onPosition // callback. Audios are added on play()/resume() and removed on @@ -344,15 +335,8 @@ class Audio with WidgetsBindingObserver { /// Returns null if asset cannot be loaded. /// Note that it returns an Audio sync'ly, though loading occurs async'ly. static Audio load(String path, - {void onComplete()?, - void onDuration(double duration)?, - void onPosition(double position)?, - void onError(String? message)?, - bool looping = false, - bool playInBackground = false}) { - final Audio audio = Audio._path(path, onComplete, onDuration, onPosition, - onError, looping, playInBackground) - .._load(); + {void onComplete()?, void onDuration(double duration)?, void onPosition(double position)?, void onError(String? message)?, bool looping = false, bool playInBackground = false}) { + final Audio audio = Audio._path(path, onComplete, onDuration, onPosition, onError, looping, playInBackground).._load(); return audio; } @@ -361,15 +345,8 @@ class Audio with WidgetsBindingObserver { /// Returns null if asset cannot be loaded. /// Note that it returns an Audio sync'ly, though loading occurs async'ly. static Audio loadFromAbsolutePath(String path, - {void onComplete()?, - void onDuration(double duration)?, - void onPosition(double position)?, - void onError(String? message)?, - bool looping = false, - bool playInBackground = false}) { - final Audio audio = Audio._absolutePath(path, onComplete, onDuration, - onPosition, onError, looping, playInBackground) - .._load(); + {void onComplete()?, void onDuration(double duration)?, void onPosition(double position)?, void onError(String? message)?, bool looping = false, bool playInBackground = false}) { + final Audio audio = Audio._absolutePath(path, onComplete, onDuration, onPosition, onError, looping, playInBackground).._load(); return audio; } @@ -378,15 +355,8 @@ class Audio with WidgetsBindingObserver { /// Returns null if asset cannot be loaded. /// Note that it returns an Audio sync'ly, though loading occurs async'ly. static Audio loadFromByteData(ByteData byteData, - {void onComplete()?, - void onDuration(double duration)?, - void onPosition(double position)?, - void onError(String? message)?, - bool looping = false, - bool playInBackground = false}) { - final Audio audio = Audio._byteData(byteData, onComplete, onDuration, - onPosition, onError, looping, playInBackground) - .._load(); + {void onComplete()?, void onDuration(double duration)?, void onPosition(double position)?, void onError(String? message)?, bool looping = false, bool playInBackground = false}) { + final Audio audio = Audio._byteData(byteData, onComplete, onDuration, onPosition, onError, looping, playInBackground).._load(); return audio; } @@ -398,16 +368,9 @@ class Audio with WidgetsBindingObserver { /// invalid url, etc); this usually is fairly quick on iOS, but waits for /// a longer timeout on Android. static Audio? loadFromRemoteUrl(String url, - {void onComplete()?, - void onDuration(double duration)?, - void onPosition(double position)?, - void onError(String? message)?, - bool looping = false, - bool playInBackground = false}) { + {void onComplete()?, void onDuration(double duration)?, void onPosition(double position)?, void onError(String? message)?, bool looping = false, bool playInBackground = false}) { if (Uri.tryParse(url) == null) return null; - final Audio audio = Audio._remoteUrl(url, onComplete, onDuration, - onPosition, onError, looping, playInBackground) - .._load(); + final Audio audio = Audio._remoteUrl(url, onComplete, onDuration, onPosition, onError, looping, playInBackground).._load(); return audio; } @@ -416,10 +379,7 @@ class Audio with WidgetsBindingObserver { /// Keeps strong reference to this Audio (for channel callback routing) /// and requests underlying resource loading. Future _load() async { - assert(_path != null || - _absolutePath != null || - _audioBytes != null || - _remoteUrl != null); + assert(_path != null || _absolutePath != null || _audioBytes != null || _remoteUrl != null); assert(!_undisposedAudios.containsKey(_audioId)); _logger.info('Loading audio $_audioId'); // Note that we add the _audioId to _undisposedAudios before invoking a @@ -509,11 +469,14 @@ class Audio with WidgetsBindingObserver { /// If [endpointSeconds] is specified, playback will resume until that point, /// then stop playback and trigger an onComplete callback. If not specified, /// audio will play to the end of the file. - Future play({double? endpointSeconds}) async { + Future play({double? endpointSeconds, bool hideBackgroundDisplay = true}) async { if (!_undisposedAudios.containsKey(_audioId)) { _logger.severe('Called play() on a disposed Audio'); return; } + if (Platform.isIOS && hideBackgroundDisplay) { + AudioSystem.instance.hideBackgroundDisplay(); + } await _playHelper(playFromStart: true, endpointSeconds: endpointSeconds); } @@ -534,8 +497,7 @@ class Audio with WidgetsBindingObserver { } // Shared code for both [play] and [resume]. - Future _playHelper( - {required bool playFromStart, required double? endpointSeconds}) async { + Future _playHelper({required bool playFromStart, required double? endpointSeconds}) async { _playing = true; _playingAudios[_audioId] = this; _endpointSeconds = endpointSeconds; @@ -585,10 +547,7 @@ class Audio with WidgetsBindingObserver { } try { - await _sendMethodCall(_audioId, seekMethod, { - audioIdKey: _audioId, - positionSecondsKey: positionSeconds - }); + await _sendMethodCall(_audioId, seekMethod, {audioIdKey: _audioId, positionSecondsKey: positionSeconds}); } on PlatformException catch (e) { if (_usingOnErrorAudios.containsKey(_audioId)) { // Audio has an onError callback. @@ -613,16 +572,14 @@ class Audio with WidgetsBindingObserver { return; } if (volume < 0.0 || volume > 1.0) { - _logger.warning( - 'Invalid volume value $volume is begin clamped to 0.0 to 1.0.'); + _logger.warning('Invalid volume value $volume is begin clamped to 0.0 to 1.0.'); volume.clamp(0.0, 1.0); } _volume = volume; try { - await _sendMethodCall(_audioId, setVolumeMethod, - {audioIdKey: _audioId, volumeKey: volume}); + await _sendMethodCall(_audioId, setVolumeMethod, {audioIdKey: _audioId, volumeKey: volume}); } on PlatformException catch (e) { if (_usingOnErrorAudios.containsKey(_audioId)) { // Audio has an onError callback. @@ -653,10 +610,7 @@ class Audio with WidgetsBindingObserver { /// Sends method call for starting playback. Future _playNative(bool playFromStart, double? endpointSeconds) async { try { - final Map args = { - audioIdKey: _audioId, - playFromStartKey: playFromStart - }; + final Map args = {audioIdKey: _audioId, playFromStartKey: playFromStart}; if (endpointSeconds != null) args[endpointSecondsKey] = endpointSeconds; await _sendMethodCall(_audioId, playMethod, args); } on PlatformException catch (e) { @@ -673,8 +627,7 @@ class Audio with WidgetsBindingObserver { /// Sends method call for pausing playback. Future _pauseNative() async { try { - await _sendMethodCall( - _audioId, pauseMethod, {audioIdKey: _audioId}); + await _sendMethodCall(_audioId, pauseMethod, {audioIdKey: _audioId}); } on PlatformException catch (e) { if (_usingOnErrorAudios.containsKey(_audioId)) { // Audio has an onError callback. @@ -729,8 +682,7 @@ class Audio with WidgetsBindingObserver { /// Release underlying audio assets. static Future _releaseNative(String audioId) async { try { - await _sendMethodCall( - audioId, releaseMethod, {audioIdKey: audioId}); + await _sendMethodCall(audioId, releaseMethod, {audioIdKey: audioId}); } on PlatformException catch (e) { if (_usingOnErrorAudios.containsKey(audioId)) { // Audio has an onError callback. @@ -745,13 +697,11 @@ class Audio with WidgetsBindingObserver { // Subsequent methods interact directly with native layers. /// Call channel.invokeMethod, wrapped in a block to highlight/report errors. - static Future _sendMethodCall(String audioId, String method, - [dynamic arguments]) async { + static Future _sendMethodCall(String audioId, String method, [dynamic arguments]) async { try { await audioMethodChannel.invokeMethod(method, arguments); } on PlatformException catch (e) { - _logger.severe( - '_sendMethodCall error: audioId: $audioId method: $method', e); + _logger.severe('_sendMethodCall error: audioId: $audioId method: $method', e); // Calling methods should do any cleanup. Then, either call the _onError // callback (if the audio uses it), or rethrow again. rethrow;