diff --git a/just_audio/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java b/just_audio/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java index 7b4b94c65..84a7a3adb 100644 --- a/just_audio/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java +++ b/just_audio/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java @@ -755,29 +755,39 @@ private void loudnessEnhancerSetTargetGain(double targetGain) { ((LoudnessEnhancer)audioEffectsMap.get("AndroidLoudnessEnhancer")).setTargetGain(targetGainMillibels); } + // dB = 0.1 bels | 0.8 = Equalize the level between ios and android + private short dbToMillibels(double value) { + return (short)(Math.round(value * 100.0 * 0.8)); + } + + // dB = 0.1 bels | 0.8 = Equalize the level between ios and android + private double millibelsToDb(int value) { + return (double)(Math.round(value / 100.0 / 0.8)); + } + private Map equalizerAudioEffectGetParameters() { Equalizer equalizer = (Equalizer)audioEffectsMap.get("AndroidEqualizer"); ArrayList rawBands = new ArrayList<>(); for (short i = 0; i < equalizer.getNumberOfBands(); i++) { rawBands.add(mapOf( "index", i, - "lowerFrequency", (double)equalizer.getBandFreqRange(i)[0] / 1000.0, - "upperFrequency", (double)equalizer.getBandFreqRange(i)[1] / 1000.0, - "centerFrequency", (double)equalizer.getCenterFreq(i) / 1000.0, - "gain", equalizer.getBandLevel(i) / 1000.0 + "lowerFrequency", millibelsToDb(equalizer.getBandFreqRange(i)[0]), + "upperFrequency", millibelsToDb(equalizer.getBandFreqRange(i)[1]), + "centerFrequency", millibelsToDb(equalizer.getCenterFreq(i)), + "gain", millibelsToDb(equalizer.getBandLevel(i)) )); } return mapOf( "parameters", mapOf( - "minDecibels", equalizer.getBandLevelRange()[0] / 1000.0, - "maxDecibels", equalizer.getBandLevelRange()[1] / 1000.0, + "minDecibels", millibelsToDb(equalizer.getBandLevelRange()[0]), + "maxDecibels", millibelsToDb(equalizer.getBandLevelRange()[1]), "bands", rawBands ) ); } private void equalizerBandSetGain(int bandIndex, double gain) { - ((Equalizer)audioEffectsMap.get("AndroidEqualizer")).setBandLevel((short)bandIndex, (short)(Math.round(gain * 1000.0))); + ((Equalizer)audioEffectsMap.get("AndroidEqualizer")).setBandLevel((short)bandIndex, dbToMillibels(gain)); } /// Creates an event based on the current state. diff --git a/just_audio/darwin/Classes/AudioPlayer.m b/just_audio/darwin/Classes/AudioPlayer.m deleted file mode 100644 index 0c9ad3f4b..000000000 --- a/just_audio/darwin/Classes/AudioPlayer.m +++ /dev/null @@ -1,1328 +0,0 @@ -#import "BetterEventChannel.h" -#import "AudioPlayer.h" -#import "AudioSource.h" -#import "IndexedAudioSource.h" -#import "LoadControl.h" -#import "UriAudioSource.h" -#import "ConcatenatingAudioSource.h" -#import "LoopingAudioSource.h" -#import "ClippingAudioSource.h" -#import -#import -#include - -// TODO: Check for and report invalid state transitions. -// TODO: Apply Apple's guidance on seeking: https://developer.apple.com/library/archive/qa/qa1820/_index.html -@implementation AudioPlayer { - NSObject* _registrar; - FlutterMethodChannel *_methodChannel; - BetterEventChannel *_eventChannel; - BetterEventChannel *_dataEventChannel; - NSString *_playerId; - AVQueuePlayer *_player; - AudioSource *_audioSource; - NSMutableArray *_indexedAudioSources; - NSArray *_order; - NSMutableArray *_orderInv; - int _index; - enum ProcessingState _processingState; - enum LoopMode _loopMode; - BOOL _shuffleModeEnabled; - long long _updateTime; - int _updatePosition; - int _lastPosition; - int _bufferedPosition; - // Set when the current item hasn't been played yet so we aren't sure whether sufficient audio has been buffered. - BOOL _bufferUnconfirmed; - CMTime _seekPos; - FlutterResult _loadResult; - FlutterResult _playResult; - id _timeObserver; - BOOL _automaticallyWaitsToMinimizeStalling; - LoadControl *_loadControl; - BOOL _playing; - float _speed; - float _volume; - BOOL _justAdvanced; - NSDictionary *_icyMetadata; -} - -- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam loadConfiguration:(NSDictionary *)loadConfiguration { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _registrar = registrar; - _playerId = idParam; - _methodChannel = - [FlutterMethodChannel methodChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.methods.%@", _playerId] - binaryMessenger:[registrar messenger]]; - _eventChannel = [[BetterEventChannel alloc] - initWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.events.%@", _playerId] - messenger:[registrar messenger]]; - _dataEventChannel = [[BetterEventChannel alloc] - initWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.data.%@", _playerId] - messenger:[registrar messenger]]; - _index = 0; - _processingState = none; - _loopMode = loopOff; - _shuffleModeEnabled = NO; - _player = nil; - _audioSource = nil; - _indexedAudioSources = nil; - _order = nil; - _orderInv = nil; - _seekPos = kCMTimeInvalid; - _timeObserver = 0; - _updatePosition = 0; - _updateTime = 0; - _lastPosition = 0; - _bufferedPosition = 0; - _bufferUnconfirmed = NO; - _playing = NO; - _loadResult = nil; - _playResult = nil; - _automaticallyWaitsToMinimizeStalling = YES; - _loadControl = nil; - if (loadConfiguration != (id)[NSNull null]) { - NSDictionary *map = loadConfiguration[@"darwinLoadControl"]; - if (map != (id)[NSNull null]) { - _loadControl = [[LoadControl alloc] init]; - _loadControl.preferredForwardBufferDuration = (NSNumber *)map[@"preferredForwardBufferDuration"]; - _loadControl.canUseNetworkResourcesForLiveStreamingWhilePaused = (BOOL)[map[@"canUseNetworkResourcesForLiveStreamingWhilePaused"] boolValue]; - _loadControl.preferredPeakBitRate = (NSNumber *)map[@"preferredPeakBitRate"]; - _automaticallyWaitsToMinimizeStalling = (BOOL)[map[@"automaticallyWaitsToMinimizeStalling"] boolValue]; - } - } - if (!_loadControl) { - _loadControl = [[LoadControl alloc] init]; - _loadControl.preferredForwardBufferDuration = (NSNumber *)[NSNull null]; - _loadControl.canUseNetworkResourcesForLiveStreamingWhilePaused = NO; - _loadControl.preferredPeakBitRate = (NSNumber *)[NSNull null]; - } - _speed = 1.0f; - _volume = 1.0f; - _justAdvanced = NO; - _icyMetadata = @{}; - __weak __typeof__(self) weakSelf = self; - [_methodChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) { - [weakSelf handleMethodCall:call result:result]; - }]; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { - @try { - NSDictionary *request = (NSDictionary *)call.arguments; - if ([@"load" isEqualToString:call.method]) { - CMTime initialPosition = request[@"initialPosition"] == (id)[NSNull null] ? kCMTimeInvalid : CMTimeMake([request[@"initialPosition"] longLongValue], 1000000); - [self load:request[@"audioSource"] initialPosition:initialPosition initialIndex:request[@"initialIndex"] result:result]; - } else if ([@"play" isEqualToString:call.method]) { - [self play:result]; - } else if ([@"pause" isEqualToString:call.method]) { - [self pause]; - result(@{}); - } else if ([@"setVolume" isEqualToString:call.method]) { - [self setVolume:(float)[request[@"volume"] doubleValue]]; - result(@{}); - } else if ([@"setSkipSilence" isEqualToString:call.method]) { - /// TODO on iOS side; Seems more involved, so someone with ObjectiveC experience might look at it. - result(@{}); - } else if ([@"setSpeed" isEqualToString:call.method]) { - [self setSpeed:(float)[request[@"speed"] doubleValue]]; - result(@{}); - } else if ([@"setLoopMode" isEqualToString:call.method]) { - [self setLoopMode:[request[@"loopMode"] intValue]]; - result(@{}); - } else if ([@"setShuffleMode" isEqualToString:call.method]) { - [self setShuffleModeEnabled:(BOOL)([request[@"shuffleMode"] intValue] == 1)]; - result(@{}); - } else if ([@"setShuffleOrder" isEqualToString:call.method]) { - [self setShuffleOrder:(NSDictionary *)request[@"audioSource"]]; - result(@{}); - } else if ([@"setAutomaticallyWaitsToMinimizeStalling" isEqualToString:call.method]) { - [self setAutomaticallyWaitsToMinimizeStalling:(BOOL)[request[@"enabled"] boolValue]]; - result(@{}); - } else if ([@"setCanUseNetworkResourcesForLiveStreamingWhilePaused" isEqualToString:call.method]) { - [self setCanUseNetworkResourcesForLiveStreamingWhilePaused:(BOOL)[request[@"enabled"] boolValue]]; - result(@{}); - } else if ([@"setPreferredPeakBitRate" isEqualToString:call.method]) { - [self setPreferredPeakBitRate:(NSNumber *)request[@"bitRate"]]; - result(@{}); - } else if ([@"seek" isEqualToString:call.method]) { - CMTime position = request[@"position"] == (id)[NSNull null] ? kCMTimePositiveInfinity : CMTimeMake([request[@"position"] longLongValue], 1000000); - [self seek:position index:request[@"index"] completionHandler:^(BOOL finished) { - result(@{}); - }]; - } else if ([@"concatenatingInsertAll" isEqualToString:call.method]) { - [self concatenatingInsertAll:(NSString *)request[@"id"] index:[request[@"index"] intValue] sources:(NSArray *)request[@"children"] shuffleOrder:(NSArray *)request[@"shuffleOrder"]]; - result(@{}); - } else if ([@"concatenatingRemoveRange" isEqualToString:call.method]) { - [self concatenatingRemoveRange:(NSString *)request[@"id"] start:[request[@"startIndex"] intValue] end:[request[@"endIndex"] intValue] shuffleOrder:(NSArray *)request[@"shuffleOrder"]]; - result(@{}); - } else if ([@"concatenatingMove" isEqualToString:call.method]) { - [self concatenatingMove:(NSString *)request[@"id"] currentIndex:[request[@"currentIndex"] intValue] newIndex:[request[@"newIndex"] intValue] shuffleOrder:(NSArray *)request[@"shuffleOrder"]]; - result(@{}); - } else if ([@"setAndroidAudioAttributes" isEqualToString:call.method]) { - result(@{}); - } else { - result(FlutterMethodNotImplemented); - } - } @catch (id exception) { - //NSLog(@"Error in handleMethodCall"); - FlutterError *flutterError = [FlutterError errorWithCode:@"error" message:@"Error in handleMethodCall" details:nil]; - result(flutterError); - } -} - -- (AVQueuePlayer *)player { - return _player; -} - -- (float)speed { - return _speed; -} - -// Untested -- (void)concatenatingInsertAll:(NSString *)catId index:(int)index sources:(NSArray *)sources shuffleOrder:(NSArray *)shuffleOrder { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Add each new source to each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - int idx = index >= 0 ? index : catSource.count; - NSMutableArray *audioSources = [self decodeAudioSources:sources]; - for (int j = 0; j < audioSources.count; j++) { - AudioSource *audioSource = audioSources[j]; - [catSource insertSource:audioSource atIndex:(idx + j)]; - } - [catSource setShuffleOrder:shuffleOrder]; - } - // Index the new audio sources. - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - IndexedAudioSource *audioSource = _indexedAudioSources[i]; - if (!audioSource.isAttached) { - audioSource.playerItem.audioSource = audioSource; - [self addItemObservers:audioSource.playerItem]; - } - } - [self updateOrder]; - if (_player.currentItem) { - _index = [self indexForItem:(IndexedPlayerItem *)_player.currentItem]; - } else { - _index = 0; - } - [self enqueueFrom:_index]; - // Notify each new IndexedAudioSource that it's been attached to the player. - for (int i = 0; i < [_indexedAudioSources count]; i++) { - if (!_indexedAudioSources[i].isAttached) { - [_indexedAudioSources[i] attach:_player initialPos:kCMTimeInvalid]; - } - } - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingRemoveRange:(NSString *)catId start:(int)start end:(int)end shuffleOrder:(NSArray *)shuffleOrder { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Remove range from each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - int endIndex = end >= 0 ? end : catSource.count; - [catSource removeSourcesFromIndex:start toIndex:endIndex]; - [catSource setShuffleOrder:shuffleOrder]; - } - // Re-index the remaining audio sources. - NSArray *oldIndexedAudioSources = _indexedAudioSources; - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0, j = 0; i < _indexedAudioSources.count; i++, j++) { - IndexedAudioSource *audioSource = _indexedAudioSources[i]; - while (audioSource != oldIndexedAudioSources[j]) { - [self removeItemObservers:oldIndexedAudioSources[j].playerItem]; - if (oldIndexedAudioSources[j].playerItem2) { - [self removeItemObservers:oldIndexedAudioSources[j].playerItem2]; - } - if (j < _index) { - _index--; - } else if (j == _index) { - // The currently playing item was removed. - } - j++; - } - } - [self updateOrder]; - if (_index >= _indexedAudioSources.count) _index = (int)_indexedAudioSources.count - 1; - if (_index < 0) _index = 0; - [self enqueueFrom:_index]; - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingMove:(NSString *)catId currentIndex:(int)currentIndex newIndex:(int)newIndex shuffleOrder:(NSArray *)shuffleOrder { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Move range within each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - [catSource moveSourceFromIndex:currentIndex toIndex:newIndex]; - [catSource setShuffleOrder:shuffleOrder]; - } - // Re-index the audio sources. - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - [self updateOrder]; - [self enqueueFrom:[self indexForItem:(IndexedPlayerItem *)_player.currentItem]]; - [self broadcastPlaybackEvent]; -} - -- (void)checkForDiscontinuity { - if (!_playing || CMTIME_IS_VALID(_seekPos) || _processingState == completed) return; - int position = [self getCurrentPosition]; - if (_processingState == buffering) { - if (position > _lastPosition) { - [self leaveBuffering:@"stall ended"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } else { - long long now = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); - long long timeSinceLastUpdate = now - _updateTime; - long long expectedPosition = _updatePosition + (long long)(timeSinceLastUpdate * _player.rate); - long long drift = position - expectedPosition; - //NSLog(@"position: %d, drift: %lld", position, drift); - // Update if we've drifted or just started observing - if (_updateTime == 0L) { - [self broadcastPlaybackEvent]; - } else if (drift < -100) { - [self enterBuffering:@"stalling"]; - //NSLog(@"Drift: %lld", drift); - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - _lastPosition = position; -} - -- (void)enterBuffering:(NSString *)reason { - //NSLog(@"ENTER BUFFERING: %@", reason); - _processingState = buffering; -} - -- (void)leaveBuffering:(NSString *)reason { - //NSLog(@"LEAVE BUFFERING: %@", reason); - _processingState = ready; -} - -- (void)broadcastPlaybackEvent { - [_eventChannel sendEvent:@{ - @"processingState": @(_processingState), - @"updatePosition": @((long long)1000 * _updatePosition), - @"updateTime": @(_updateTime), - @"bufferedPosition": @((long long)1000 * [self getBufferedPosition]), - @"icyMetadata": _icyMetadata, - @"duration": @([self getDurationMicroseconds]), - @"currentIndex": @(_index), - }]; -} - -- (int)getCurrentPosition { - if (CMTIME_IS_VALID(_seekPos)) { - return (int)(1000 * CMTimeGetSeconds(_seekPos)); - } else if (_indexedAudioSources && _indexedAudioSources.count > 0) { - int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].position)); - if (ms < 0) ms = 0; - return ms; - } else { - return 0; - } -} - -- (int)getBufferedPosition { - if (_processingState == none || _processingState == loading) { - return 0; - } else if (_indexedAudioSources && _indexedAudioSources.count > 0) { - int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].bufferedPosition)); - if (ms < 0) ms = 0; - return ms; - } else { - return 0; - } -} - -- (int)getDuration { - if (_processingState == none || _processingState == loading) { - return -1; - } else if (_indexedAudioSources && _indexedAudioSources.count > 0) { - int v = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].duration)); - return v; - } else { - return 0; - } -} - -- (long long)getDurationMicroseconds { - int duration = [self getDuration]; - return duration < 0 ? -1 : ((long long)1000 * duration); -} - -- (void)removeItemObservers:(AVPlayerItem *)playerItem { - [playerItem removeObserver:self forKeyPath:@"status"]; - [playerItem removeObserver:self forKeyPath:@"playbackBufferEmpty"]; - [playerItem removeObserver:self forKeyPath:@"playbackBufferFull"]; - [playerItem removeObserver:self forKeyPath:@"loadedTimeRanges"]; - //[playerItem removeObserver:self forKeyPath:@"playbackLikelyToKeepUp"]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemPlaybackStalledNotification object:playerItem]; -} - -- (void)addItemObservers:(AVPlayerItem *)playerItem { - // Get notified when the item is loaded or had an error loading - [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil]; - // Get notified of the buffer state - [playerItem addObserver:self forKeyPath:@"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:nil]; - [playerItem addObserver:self forKeyPath:@"playbackBufferFull" options:NSKeyValueObservingOptionNew context:nil]; - [playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil]; - //[playerItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:nil]; - // Get notified when playback has reached the end - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onComplete:) name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; - // Get notified when playback stops due to a failure (currently unused) - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onFailToComplete:) name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; - // Get notified when playback stalls (currently unused) - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onItemStalled:) name:AVPlayerItemPlaybackStalledNotification object:playerItem]; - - AVPlayerItemMetadataOutput *metadataOutput = [[AVPlayerItemMetadataOutput alloc] initWithIdentifiers:nil]; - [metadataOutput setDelegate:self queue:dispatch_get_main_queue()]; - // Since the delegate is stored as a weak reference, - // there shouldn't be a retain cycle. - // TODO: Check this. Shouldn't need to removeOutput - // later? - [playerItem addOutput:metadataOutput]; -} - -- (void)metadataOutput:(AVPlayerItemMetadataOutput *)output didOutputTimedMetadataGroups:(NSArray *)groups fromPlayerItemTrack:(AVPlayerItemTrack *)track { - // ICY headers aren't available here. Maybe do this in the proxy. - BOOL hasIcyData = NO; - NSString *title = (NSString *)[NSNull null]; - NSString *url = (NSString *)[NSNull null]; - for (int i = 0; i < groups.count; i++) { - AVTimedMetadataGroup *group = groups[i]; - for (int j = 0; j < group.items.count; j++) { - AVMetadataItem *item = group.items[j]; - if ([@"icy/StreamTitle" isEqualToString:item.identifier]) { - hasIcyData = YES; - title = (NSString *)item.value; - } else if ([@"icy/StreamUrl" isEqualToString:item.identifier]) { - hasIcyData = YES; - url = (NSString *)item.value; - } - } - } - if (hasIcyData) { - _icyMetadata = @{ - @"info": @{ - @"title": title, - @"url": url, - }, - }; - [self broadcastPlaybackEvent]; - } -} - -- (NSMutableArray *)decodeAudioSources:(NSArray *)data { - NSMutableArray *array = (NSMutableArray *)[[NSMutableArray alloc] init]; - for (int i = 0; i < [data count]; i++) { - AudioSource *source = [self decodeAudioSource:data[i]]; - [array addObject:source]; - } - return array; -} - -- (AudioSource *)decodeAudioSource:(NSDictionary *)data { - NSString *type = data[@"type"]; - if ([@"progressive" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"] loadControl:_loadControl]; - } else if ([@"dash" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"] loadControl:_loadControl]; - } else if ([@"hls" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"] loadControl:_loadControl]; - } else if ([@"concatenating" isEqualToString:type]) { - return [[ConcatenatingAudioSource alloc] initWithId:data[@"id"] - audioSources:[self decodeAudioSources:data[@"children"]] - shuffleOrder:(NSArray *)data[@"shuffleOrder"]]; - } else if ([@"clipping" isEqualToString:type]) { - return [[ClippingAudioSource alloc] initWithId:data[@"id"] - audioSource:(UriAudioSource *)[self decodeAudioSource:data[@"child"]] - start:data[@"start"] - end:data[@"end"]]; - } else if ([@"looping" isEqualToString:type]) { - NSMutableArray *childSources = [NSMutableArray new]; - int count = [data[@"count"] intValue]; - for (int i = 0; i < count; i++) { - [childSources addObject:[self decodeAudioSource:data[@"child"]]]; - } - return [[LoopingAudioSource alloc] initWithId:data[@"id"] audioSources:childSources]; - } else { - return nil; - } -} - -- (void)enqueueFrom:(int)index { - //NSLog(@"### enqueueFrom:%d", index); - _index = index; - - // Update the queue while keeping the currently playing item untouched. - - /* NSLog(@"before reorder: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - // First, remove all _player items except for the currently playing one (if any). - IndexedPlayerItem *oldItem = (IndexedPlayerItem *)_player.currentItem; - IndexedPlayerItem *existingItem = nil; - IndexedPlayerItem *newItem = _indexedAudioSources.count > 0 ? _indexedAudioSources[_index].playerItem : nil; - NSArray *oldPlayerItems = [NSArray arrayWithArray:_player.items]; - // In the first pass, preserve the old and new items. - for (int i = 0; i < oldPlayerItems.count; i++) { - if (oldPlayerItems[i] == newItem) { - // Preserve and tag new item if it is already in the queue. - existingItem = oldPlayerItems[i]; - //NSLog(@"Preserving existing item %d", [self indexForItem:existingItem]); - } else if (oldPlayerItems[i] == oldItem) { - //NSLog(@"Preserving old item %d", [self indexForItem:oldItem]); - // Temporarily preserve old item, just to avoid jumping to - // intermediate queue positions unnecessarily. We only want to jump - // once to _index. - } else { - //NSLog(@"Removing item %d", [self indexForItem:oldPlayerItems[i]]); - [_player removeItem:oldPlayerItems[i]]; - } - } - // In the second pass, remove the old item (if different from new item). - if (oldItem && newItem != oldItem) { - //NSLog(@"removing old item %d", [self indexForItem:oldItem]); - [_player removeItem:oldItem]; - } - - /* NSLog(@"inter order: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - // Regenerate queue - if (!existingItem || _loopMode != loopOne) { - BOOL include = NO; - for (int i = 0; i < [_order count]; i++) { - int si = [_order[i] intValue]; - if (si == _index) include = YES; - if (include && _indexedAudioSources[si].playerItem != existingItem) { - //NSLog(@"inserting item %d", si); - [_player insertItem:_indexedAudioSources[si].playerItem afterItem:nil]; - if (_loopMode == loopOne) { - // We only want one item in the queue. - break; - } - } - } - } - - // Add next loop item if we're looping - if (_order.count > 0) { - if (_loopMode == loopAll) { - int si = [_order[0] intValue]; - //NSLog(@"### add loop item:%d", si); - if (!_indexedAudioSources[si].playerItem2) { - [_indexedAudioSources[si] preparePlayerItem2]; - [self addItemObservers:_indexedAudioSources[si].playerItem2]; - } - [_player insertItem:_indexedAudioSources[si].playerItem2 afterItem:nil]; - } else if (_loopMode == loopOne) { - //NSLog(@"### add loop item:%d", _index); - if (!_indexedAudioSources[_index].playerItem2) { - [_indexedAudioSources[_index] preparePlayerItem2]; - [self addItemObservers:_indexedAudioSources[_index].playerItem2]; - } - [_player insertItem:_indexedAudioSources[_index].playerItem2 afterItem:nil]; - } - } - - /* NSLog(@"after reorder: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - if (_processingState != loading && oldItem != newItem) { - // || !_player.currentItem.playbackLikelyToKeepUp; - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"enqueueFrom playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"enqueueFrom !playbackBufferEmpty"]; - } - [self updatePosition]; - } - - [self updateEndAction]; -} - -- (void)updatePosition { - _updatePosition = [self getCurrentPosition]; - _updateTime = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); -} - -- (void)load:(NSDictionary *)source initialPosition:(CMTime)initialPosition initialIndex:(NSNumber *)initialIndex result:(FlutterResult)result { - if (_playing) { - [_player pause]; - } - if (_processingState == loading) { - [self abortExistingConnection]; - } - _loadResult = result; - _index = (initialIndex != (id)[NSNull null]) ? [initialIndex intValue] : 0; - _processingState = loading; - [self updatePosition]; - // Remove previous observers - if (_indexedAudioSources) { - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [self removeItemObservers:_indexedAudioSources[i].playerItem]; - if (_indexedAudioSources[i].playerItem2) { - [self removeItemObservers:_indexedAudioSources[i].playerItem2]; - } - } - } - // Decode audio source - if (_audioSource && [@"clipping" isEqualToString:source[@"type"]]) { - // Check if we're clipping an audio source that was previously loaded. - UriAudioSource *child = nil; - if ([_audioSource isKindOfClass:[ClippingAudioSource class]]) { - ClippingAudioSource *clipper = (ClippingAudioSource *)_audioSource; - child = clipper.audioSource; - } else if ([_audioSource isKindOfClass:[UriAudioSource class]]) { - child = (UriAudioSource *)_audioSource; - } - NSString *type = source[@"child"][@"type"]; - NSString *uri = nil; - if ([@"progressive" isEqualToString:type] || [@"dash" isEqualToString:type] || [@"hls" isEqualToString:type]) { - uri = source[@"child"][@"uri"]; - } - if (child && uri && [child.uri isEqualToString:uri]) { - ClippingAudioSource *clipper = - [[ClippingAudioSource alloc] initWithId:source[@"id"] - audioSource:child - start:source[@"start"] - end:source[@"end"]]; - clipper.playerItem.audioSource = clipper; - if (clipper.playerItem2) { - clipper.playerItem2.audioSource = clipper; - } - _audioSource = clipper; - } else { - _audioSource = [self decodeAudioSource:source]; - } - } else { - _audioSource = [self decodeAudioSource:source]; - } - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - IndexedAudioSource *source = _indexedAudioSources[i]; - [self addItemObservers:source.playerItem]; - source.playerItem.audioSource = source; - } - [self updateOrder]; - // Set up an empty player - if (!_player) { - _player = [[AVQueuePlayer alloc] initWithItems:@[]]; - if (@available(macOS 10.12, iOS 10.0, *)) { - _player.automaticallyWaitsToMinimizeStalling = _automaticallyWaitsToMinimizeStalling; - // TODO: Remove these observers in dispose. - [_player addObserver:self - forKeyPath:@"timeControlStatus" - options:NSKeyValueObservingOptionNew - context:nil]; - } - [_player addObserver:self - forKeyPath:@"currentItem" - options:NSKeyValueObservingOptionNew - context:nil]; - // TODO: learn about the different ways to define weakSelf. - //__weak __typeof__(self) weakSelf = self; - //typeof(self) __weak weakSelf = self; - __unsafe_unretained typeof(self) weakSelf = self; - if (@available(macOS 10.12, iOS 10.0, *)) {} - else { - _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMake(200, 1000) - queue:nil - usingBlock:^(CMTime time) { - [weakSelf checkForDiscontinuity]; - } - ]; - } - } - // Initialise the AVQueuePlayer with items. - [self enqueueFrom:_index]; - // Notify each IndexedAudioSource that it's been attached to the player. - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_indexedAudioSources[i] attach:_player initialPos:(i == _index ? initialPosition : kCMTimeInvalid)]; - } - - if (_player.currentItem.status == AVPlayerItemStatusReadyToPlay) { - _processingState = ready; - _loadResult(@{@"duration": @([self getDurationMicroseconds])}); - _loadResult = nil; - } else { - // We send result after the playerItem is ready in observeValueForKeyPath. - } - if (_playing) { - _player.rate = _speed; - } - [_player setVolume:_volume]; - [self broadcastPlaybackEvent]; - /* NSLog(@"load:"); */ - /* for (int i = 0; i < [_indexedAudioSources count]; i++) { */ - /* NSLog(@"- %@", _indexedAudioSources[i].sourceId); */ - /* } */ -} - -- (void)updateOrder { - _orderInv = [NSMutableArray arrayWithCapacity:[_indexedAudioSources count]]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_orderInv addObject:@(0)]; - } - if (_shuffleModeEnabled) { - _order = [_audioSource getShuffleIndices]; - } else { - NSMutableArray *order = [[NSMutableArray alloc] init]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [order addObject:@(i)]; - } - _order = order; - } - for (int i = 0; i < [_indexedAudioSources count]; i++) { - _orderInv[[_order[i] intValue]] = @(i); - } -} - -- (void)onItemStalled:(NSNotification *)notification { - //IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; - //NSLog(@"onItemStalled"); -} - -- (void)onFailToComplete:(NSNotification *)notification { - //IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; - //NSLog(@"onFailToComplete"); -} - -- (void)onComplete:(NSNotification *)notification { - //NSLog(@"onComplete"); - - IndexedPlayerItem *endedPlayerItem = (IndexedPlayerItem *)notification.object; - IndexedAudioSource *endedSource = endedPlayerItem.audioSource; - - if (_loopMode == loopOne) { - [endedSource seek:kCMTimeZero]; - _justAdvanced = YES; - } else if (_loopMode == loopAll) { - [endedSource seek:kCMTimeZero]; - _index = [_order[([_orderInv[_index] intValue] + 1) % _order.count] intValue]; - [self broadcastPlaybackEvent]; - _justAdvanced = YES; - } else if ([_orderInv[_index] intValue] + 1 < [_order count]) { - [endedSource seek:kCMTimeZero]; - _index = [_order[([_orderInv[_index] intValue] + 1)] intValue]; - [self updateEndAction]; - [self broadcastPlaybackEvent]; - _justAdvanced = YES; - } else { - // reached end of playlist - [self complete]; - } -} - -- (void)observeValueForKeyPath:(NSString *)keyPath - ofObject:(id)object - change:(NSDictionary *)change - context:(void *)context { - - if ([keyPath isEqualToString:@"status"]) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - AVPlayerItemStatus status = AVPlayerItemStatusUnknown; - NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; - if ([statusNumber isKindOfClass:[NSNumber class]]) { - status = statusNumber.intValue; - } - [playerItem.audioSource onStatusChanged:status]; - switch (status) { - case AVPlayerItemStatusReadyToPlay: { - if (playerItem != _player.currentItem) return; - // Detect buffering in different ways depending on whether we're playing - if (_playing) { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_player.timeControlStatus == AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) { - [self enterBuffering:@"ready to play: playing, waitingToPlay"]; - } else { - [self leaveBuffering:@"ready to play: playing, !waitingToPlay"]; - } - [self updatePosition]; - } else { - // If this happens when we're playing, check whether buffer is confirmed - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in bufering - XXX Test - [self enterBuffering:@"ready to play: playing, bufferUnconfirmed && !playbackBufferFull"]; - } else { - if (_player.currentItem.playbackBufferEmpty) { - // !_player.currentItem.playbackLikelyToKeepUp; - [self enterBuffering:@"ready to play: playing, playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"ready to play: playing, !playbackBufferEmpty"]; - } - [self updatePosition]; - } - } - } else { - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"ready to play: !playing, playbackBufferEmpty"]; - // || !_player.currentItem.playbackLikelyToKeepUp; - } else { - [self leaveBuffering:@"ready to play: !playing, !playbackBufferEmpty"]; - } - [self updatePosition]; - } - [self broadcastPlaybackEvent]; - if (_loadResult) { - _loadResult(@{@"duration": @([self getDurationMicroseconds])}); - _loadResult = nil; - } - break; - } - case AVPlayerItemStatusFailed: { - //NSLog(@"AVPlayerItemStatusFailed"); - [self sendErrorForItem:playerItem]; - break; - } - case AVPlayerItemStatusUnknown: - break; - } - } else if ([keyPath isEqualToString:@"playbackBufferEmpty"] || [keyPath isEqualToString:@"playbackBufferFull"]) { - // Use these values to detect buffering. - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - if (playerItem != _player.currentItem) return; - // If there's a seek in progress, these values are unreliable - if (CMTIME_IS_VALID(_seekPos)) return; - // Detect buffering in different ways depending on whether we're playing - if (_playing) { - if (@available(macOS 10.12, iOS 10.0, *)) { - // We handle this with timeControlStatus instead. - } else { - if (_bufferUnconfirmed && playerItem.playbackBufferFull) { - _bufferUnconfirmed = NO; - [self leaveBuffering:@"playing, _bufferUnconfirmed && playbackBufferFull"]; - [self updatePosition]; - //NSLog(@"Buffering confirmed! leaving buffering"); - [self broadcastPlaybackEvent]; - } - } - } else { - if (playerItem.playbackBufferEmpty) { - [self enterBuffering:@"!playing, playbackBufferEmpty"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } else if (!playerItem.playbackBufferEmpty || playerItem.playbackBufferFull) { - _processingState = ready; - [self leaveBuffering:@"!playing, !playbackBufferEmpty || playbackBufferFull"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - /* } else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"]) { */ - } else if ([keyPath isEqualToString:@"timeControlStatus"]) { - if (@available(macOS 10.12, iOS 10.0, *)) { - AVPlayerTimeControlStatus status = AVPlayerTimeControlStatusPaused; - NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; - if ([statusNumber isKindOfClass:[NSNumber class]]) { - status = statusNumber.intValue; - } - switch (status) { - case AVPlayerTimeControlStatusPaused: - //NSLog(@"AVPlayerTimeControlStatusPaused"); - break; - case AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate: - //NSLog(@"AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate"); - if (_processingState != completed) { - [self enterBuffering:@"timeControlStatus"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } else { - //NSLog(@"Ignoring wait signal because we reached the end"); - } - break; - case AVPlayerTimeControlStatusPlaying: - [self leaveBuffering:@"timeControlStatus"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - break; - } - } - } else if ([keyPath isEqualToString:@"currentItem"] && _player.currentItem) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)change[NSKeyValueChangeNewKey]; - //IndexedPlayerItem *oldPlayerItem = (IndexedPlayerItem *)change[NSKeyValueChangeOldKey]; - if (playerItem.status == AVPlayerItemStatusFailed) { - if ([_orderInv[_index] intValue] + 1 < [_order count]) { - // account for automatic move to next item - _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; - //NSLog(@"advance to next on error: index = %d", _index); - [self updateEndAction]; - [self broadcastPlaybackEvent]; - } else { - //NSLog(@"error on last item"); - } - return; - } else { - int expectedIndex = [self indexForItem:playerItem]; - if (_index != expectedIndex) { - // AVQueuePlayer will sometimes skip over error items without - // notifying this observer. - //NSLog(@"Queue change detected. Adjusting index from %d -> %d", _index, expectedIndex); - _index = expectedIndex; - [self updateEndAction]; - [self broadcastPlaybackEvent]; - } - } - //NSLog(@"currentItem changed. _index=%d", _index); - _bufferUnconfirmed = YES; - // If we've skipped or transitioned to a new item and we're not - // currently in the middle of a seek - /* if (CMTIME_IS_INVALID(_seekPos) && playerItem.status == AVPlayerItemStatusReadyToPlay) { */ - /* [self updatePosition]; */ - /* IndexedAudioSource *source = playerItem.audioSource; */ - /* // We should already be at position zero but for */ - /* // ClippingAudioSource it might be off by some milliseconds so we */ - /* // consider anything <= 100 as close enough. */ - /* if ((int)(1000 * CMTimeGetSeconds(source.position)) > 100) { */ - /* NSLog(@"On currentItem change, seeking back to zero"); */ - /* BOOL shouldResumePlayback = NO; */ - /* AVPlayerActionAtItemEnd originalEndAction = _player.actionAtItemEnd; */ - /* if (_playing && CMTimeGetSeconds(CMTimeSubtract(source.position, source.duration)) >= 0) { */ - /* NSLog(@"Need to pause while rewinding because we're at the end"); */ - /* shouldResumePlayback = YES; */ - /* _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; */ - /* [_player pause]; */ - /* } */ - /* [self enterBuffering:@"currentItem changed, seeking"]; */ - /* [self updatePosition]; */ - /* [self broadcastPlaybackEvent]; */ - /* __weak __typeof__(self) weakSelf = self; */ - /* [source seek:kCMTimeZero completionHandler:^(BOOL finished) { */ - /* [weakSelf leaveBuffering:@"currentItem changed, finished seek"]; */ - /* [weakSelf updatePosition]; */ - /* [weakSelf broadcastPlaybackEvent]; */ - /* if (shouldResumePlayback) { */ - /* weakSelf.player.actionAtItemEnd = originalEndAction; */ - /* // TODO: This logic is almost duplicated in seek. See if we can reuse this code. */ - /* weakSelf.player.rate = weakSelf.speed; */ - /* } */ - /* }]; */ - /* } else { */ - /* // Already at zero, no need to seek. */ - /* } */ - /* } */ - - if (_justAdvanced) { - IndexedAudioSource *audioSource = playerItem.audioSource; - if (_loopMode == loopOne) { - [audioSource flip]; - [self enqueueFrom:_index]; - } else if (_loopMode == loopAll) { - if (_index == [_order[0] intValue] && playerItem == audioSource.playerItem2) { - [audioSource flip]; - [self enqueueFrom:_index]; - } else { - [self updateEndAction]; - } - } - _justAdvanced = NO; - } - } else if ([keyPath isEqualToString:@"loadedTimeRanges"]) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - if (playerItem != _player.currentItem) return; - int pos = [self getBufferedPosition]; - if (pos != _bufferedPosition) { - _bufferedPosition = pos; - [self broadcastPlaybackEvent]; - } - } -} - -- (void)sendErrorForItem:(IndexedPlayerItem *)playerItem { - FlutterError *flutterError = [FlutterError errorWithCode:[NSString stringWithFormat:@"%d", (int)playerItem.error.code] - message:playerItem.error.localizedDescription - details:nil]; - [self sendError:flutterError playerItem:playerItem]; -} - -- (void)sendError:(FlutterError *)flutterError playerItem:(IndexedPlayerItem *)playerItem { - //NSLog(@"sendError"); - if (_loadResult && playerItem == _player.currentItem) { - _loadResult(flutterError); - _loadResult = nil; - } - // Broadcast all errors even if they aren't on the current item. - [_eventChannel sendEvent:flutterError]; -} - -- (void)abortExistingConnection { - FlutterError *flutterError = [FlutterError errorWithCode:@"abort" - message:@"Connection aborted" - details:nil]; - [self sendError:flutterError playerItem:nil]; -} - -- (int)indexForItem:(IndexedPlayerItem *)playerItem { - for (int i = 0; i < _indexedAudioSources.count; i++) { - if (_indexedAudioSources[i].playerItem == playerItem || _indexedAudioSources[i].playerItem2 == playerItem) { - return i; - } - } - return -1; -} - -- (void)play { - [self play:nil]; -} - -- (void)play:(FlutterResult)result { - if (_playing) { - if (result) { - result(@{}); - } - return; - } - if (result) { - if (_playResult) { - //NSLog(@"INTERRUPTING PLAY"); - _playResult(@{}); - } - _playResult = result; - } - _playing = YES; - _player.rate = _speed; - [self updatePosition]; - if (@available(macOS 10.12, iOS 10.0, *)) {} - else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - [self enterBuffering:@"play, _bufferUnconfirmed && !playbackBufferFull"]; - [self broadcastPlaybackEvent]; - } - } -} - -- (void)pause { - if (!_playing) return; - _playing = NO; - [_player pause]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - if (_playResult) { - //NSLog(@"PLAY FINISHED DUE TO PAUSE"); - _playResult(@{}); - _playResult = nil; - } -} - -- (void)complete { - [self updatePosition]; - _processingState = completed; - [self broadcastPlaybackEvent]; - if (_playResult) { - //NSLog(@"PLAY FINISHED DUE TO COMPLETE"); - _playResult(@{}); - _playResult = nil; - } -} - -- (void)setVolume:(float)volume { - _volume = volume; - if (_player) { - [_player setVolume:volume]; - } -} - -- (void)setSpeed:(float)speed { - // NOTE: We ideally should check _player.currentItem.canPlaySlowForward and - // canPlayFastForward, but these properties are unreliable and the official - // documentation is unclear and contradictory. - // - // Source #1: - // https://developer.apple.com/documentation/avfoundation/avplayer/1388846-rate?language=objc - // - // Rates other than 0.0 and 1.0 can be used if the associated player - // item returns YES for the AVPlayerItem properties canPlaySlowForward - // or canPlayFastForward. - // - // Source #2: - // https://developer.apple.com/library/archive/qa/qa1772/_index.html - // - // An AVPlayerItem whose status property equals - // AVPlayerItemStatusReadyToPlay can be played at rates between 1.0 and - // 2.0, inclusive, even if AVPlayerItem.canPlayFastForward is NO. - // AVPlayerItem.canPlayFastForward indicates whether the item can be - // played at rates greater than 2.0. - // - // But in practice, it appears that even if AVPlayerItem.canPlayFastForward - // is NO, rates greater than 2.0 still work sometimes. - // - // So for now, we just let the app pass in any speed and hope for the best. - // There is no way to reliably query whether the requested speed is - // supported. - _speed = speed; - if (_playing && _player) { - _player.rate = speed; - } - [self updatePosition]; -} - -- (void)setLoopMode:(int)loopMode { - if (loopMode == _loopMode) return; - _loopMode = loopMode; - [self enqueueFrom:_index]; -} - -- (void)updateEndAction { - // Should be called in the following situations: - // - when the audio source changes - // - when _index changes - // - when the loop mode changes. - // - when the shuffle order changes. (TODO) - // - when the shuffle mode changes. - if (!_player) return; - if (_audioSource && (_loopMode != loopOff || ([_order count] > 0 && [_orderInv[_index] intValue] + 1 < [_order count]))) { - _player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance; - } else { - _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone - } -} - -- (void)setShuffleModeEnabled:(BOOL)shuffleModeEnabled { - //NSLog(@"setShuffleModeEnabled: %d", shuffleModeEnabled); - _shuffleModeEnabled = shuffleModeEnabled; - if (!_audioSource) return; - - [self updateOrder]; - - [self enqueueFrom:_index]; -} - -- (void)setShuffleOrder:(NSDictionary *)dict { - if (!_audioSource) return; - - [_audioSource decodeShuffleOrder:dict]; - - [self updateOrder]; - - [self enqueueFrom:_index]; -} - -- (void)dumpQueue { - for (int i = 0; i < _player.items.count; i++) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)_player.items[i]; - int j = [self indexForItem:playerItem]; - NSLog(@"- %d", j); - } -} - -- (void)setAutomaticallyWaitsToMinimizeStalling:(bool)automaticallyWaitsToMinimizeStalling { - _automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; - if (@available(macOS 10.12, iOS 10.0, *)) { - if(_player) { - _player.automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; - } - } -} - -- (void)setCanUseNetworkResourcesForLiveStreamingWhilePaused:(BOOL)enabled { - _loadControl.canUseNetworkResourcesForLiveStreamingWhilePaused = enabled; - if (!_indexedAudioSources) return; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_indexedAudioSources[i] applyCanUseNetworkResourcesForLiveStreamingWhilePaused]; - } -} - -- (void)setPreferredPeakBitRate:(NSNumber *)preferredPeakBitRate { - _loadControl.preferredPeakBitRate = preferredPeakBitRate; - if (!_indexedAudioSources) return; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_indexedAudioSources[i] applyPreferredPeakBitRate]; - } -} - -- (void)seek:(CMTime)position index:(NSNumber *)newIndex completionHandler:(void (^)(BOOL))completionHandler { - if (_processingState == none || _processingState == loading) { - if (completionHandler) { - completionHandler(NO); - } - return; - } - int index = _index; - if (newIndex != (id)[NSNull null]) { - index = [newIndex intValue]; - } - if (index != _index) { - // Jump to a new item - /* if (_playing && index == _index + 1) { */ - /* // Special case for jumping to the very next item */ - /* NSLog(@"seek to next item: %d -> %d", _index, index); */ - /* [_indexedAudioSources[_index] seek:kCMTimeZero]; */ - /* _index = index; */ - /* [_player advanceToNextItem]; */ - /* [self broadcastPlaybackEvent]; */ - /* } else */ - { - // Jump to a distant item - //NSLog(@"seek# jump to distant item: %d -> %d", _index, index); - if (_playing) { - [_player pause]; - } - [_indexedAudioSources[_index] seek:kCMTimeZero]; - // The "currentItem" key observer will respect that a seek is already in progress - _seekPos = position; - [self updatePosition]; - [self enqueueFrom:index]; - IndexedAudioSource *source = _indexedAudioSources[_index]; - if (abs((int)(1000 * CMTimeGetSeconds(CMTimeSubtract(source.position, position)))) > 100) { - [self enterBuffering:@"seek to index"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [source seek:position completionHandler:^(BOOL finished) { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (self->_playing) { - // Handled by timeControlStatus - } else { - if (self->_bufferUnconfirmed && !self->_player.currentItem.playbackBufferFull) { - // Stay in buffering - } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { - [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - } else { - if (self->_bufferUnconfirmed && !self->_player.currentItem.playbackBufferFull) { - // Stay in buffering - } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { - [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - if (self->_playing) { - self->_player.rate = self->_speed; - } - self->_seekPos = kCMTimeInvalid; - [self broadcastPlaybackEvent]; - if (completionHandler) { - completionHandler(finished); - } - }]; - } else { - _seekPos = kCMTimeInvalid; - if (_playing) { - if (@available(iOS 10.0, *)) { - // NOTE: Re-enable this line only after figuring out - // how to detect buffering when buffered audio is not - // immediately available. - //[_player playImmediatelyAtRate:_speed]; - _player.rate = _speed; - } else { - _player.rate = _speed; - } - } - completionHandler(YES); - } - } - } else { - // Seek within an item - if (_playing) { - [_player pause]; - } - _seekPos = position; - //NSLog(@"seek. enter buffering. pos = %d", (int)(1000*CMTimeGetSeconds(_indexedAudioSources[_index].position))); - // TODO: Move this into a separate method so it can also - // be used in skip. - [self enterBuffering:@"seek"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [_indexedAudioSources[_index] seek:position completionHandler:^(BOOL finished) { - [self updatePosition]; - if (self->_playing) { - // If playing, buffering will be detected either by: - // 1. checkForDiscontinuity - // 2. timeControlStatus - if (@available(iOS 10.0, *)) { - // NOTE: Re-enable this line only after figuring out how to - // detect buffering when buffered audio is not immediately - // available. - //[_player playImmediatelyAtRate:_speed]; - self->_player.rate = self->_speed; - } else { - self->_player.rate = self->_speed; - } - } else { - // If not playing, there is no reliable way to detect - // when buffering has completed, so we use - // !playbackBufferEmpty. Although this always seems to - // be full even right after a seek. - if (self->_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"seek finished, playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"seek finished, !playbackBufferEmpty"]; - } - [self updatePosition]; - if (self->_processingState != buffering) { - [self broadcastPlaybackEvent]; - } - } - self->_seekPos = kCMTimeInvalid; - [self broadcastPlaybackEvent]; - if (completionHandler) { - completionHandler(finished); - } - }]; - } -} - -- (void)dispose { - if (!_player) return; - if (_processingState != none) { - [_player pause]; - _processingState = none; - // If used just before destroying the current FlutterEngine, this will result in: - // NSInternalInconsistencyException: 'Sending a message before the FlutterEngine has been run.' - //[self broadcastPlaybackEvent]; - } - if (_timeObserver) { - [_player removeTimeObserver:_timeObserver]; - _timeObserver = 0; - } - if (_indexedAudioSources) { - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [self removeItemObservers:_indexedAudioSources[i].playerItem]; - if (_indexedAudioSources[i].playerItem2) { - [self removeItemObservers:_indexedAudioSources[i].playerItem2]; - } - } - _indexedAudioSources = nil; - } - _audioSource = nil; - if (_player) { - [_player removeObserver:self forKeyPath:@"currentItem"]; - if (@available(macOS 10.12, iOS 10.0, *)) { - [_player removeObserver:self forKeyPath:@"timeControlStatus"]; - } - _player = nil; - } - // Untested: - [_eventChannel dispose]; - [_dataEventChannel dispose]; - [_methodChannel setMethodCallHandler:nil]; -} - -@end diff --git a/just_audio/darwin/Classes/AudioSource.m b/just_audio/darwin/Classes/AudioSource.m deleted file mode 100644 index 899055754..000000000 --- a/just_audio/darwin/Classes/AudioSource.m +++ /dev/null @@ -1,36 +0,0 @@ -#import "AudioSource.h" -#import - -@implementation AudioSource { - NSString *_sourceId; -} - -- (instancetype)initWithId:(NSString *)sid { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _sourceId = sid; - return self; -} - -- (NSString *)sourceId { - return _sourceId; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - return 0; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - if ([_sourceId isEqualToString:sourceId]) { - [matches addObject:self]; - } -} - -- (NSArray *)getShuffleIndices { - return @[]; -} - -- (void)decodeShuffleOrder:(NSDictionary *)dict { -} - -@end diff --git a/just_audio/darwin/Classes/AudioSource.swift b/just_audio/darwin/Classes/AudioSource.swift new file mode 100644 index 000000000..9f34cce6a --- /dev/null +++ b/just_audio/darwin/Classes/AudioSource.swift @@ -0,0 +1,37 @@ +import AVFoundation + +class AudioSource { + let sourceId: String + + init(sid: String) { + sourceId = sid + } + + func buildSequence() -> Array { + return [] + } + + func getShuffleIndices() -> [Int] { + return [] + } + + static func fromListJson(_ data: [[String: Any]]) throws -> [AudioSource] { + return try data.map { item in + try AudioSource.fromJson(item) + } + } + + static func fromJson(_ data: [String: Any]) throws -> AudioSource { + let type = data["type"] as! String + + switch type { + case "progressive": + return UriAudioSource(sid: data["id"] as! String, uri: data["uri"] as! String) + case "concatenating": + return ConcatenatingAudioSource(sid: data["id"] as! String, audioSources: try AudioSource.fromListJson(data["children"] as! [Dictionary]), shuffleOrder: data["shuffleOrder"] as! Array) + default: + throw PluginError.notSupported(type, "When decoding audio source") + } + } + +} diff --git a/just_audio/darwin/Classes/BetterEventChannel.m b/just_audio/darwin/Classes/BetterEventChannel.m deleted file mode 100644 index d92a05626..000000000 --- a/just_audio/darwin/Classes/BetterEventChannel.m +++ /dev/null @@ -1,37 +0,0 @@ -#import "BetterEventChannel.h" - -@implementation BetterEventChannel { - FlutterEventChannel *_eventChannel; - FlutterEventSink _eventSink; -} - -- (instancetype)initWithName:(NSString*)name messenger:(NSObject *)messenger { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _eventChannel = - [FlutterEventChannel eventChannelWithName:name binaryMessenger:messenger]; - [_eventChannel setStreamHandler:self]; - _eventSink = nil; - return self; -} - -- (FlutterError*)onListenWithArguments:(id)arguments eventSink:(FlutterEventSink)eventSink { - _eventSink = eventSink; - return nil; -} - -- (FlutterError*)onCancelWithArguments:(id)arguments { - _eventSink = nil; - return nil; -} - -- (void)sendEvent:(id)event { - if (!_eventSink) return; - _eventSink(event); -} - -- (void)dispose { - [_eventChannel setStreamHandler:nil]; -} - -@end diff --git a/just_audio/darwin/Classes/BetterEventChannel.swift b/just_audio/darwin/Classes/BetterEventChannel.swift new file mode 100644 index 000000000..774f0d702 --- /dev/null +++ b/just_audio/darwin/Classes/BetterEventChannel.swift @@ -0,0 +1,30 @@ +import Flutter + +class BetterEventChannel: NSObject, FlutterStreamHandler { + let eventChannel: FlutterEventChannel + var eventSink: FlutterEventSink? + + init(name: String, messenger: FlutterBinaryMessenger) { + eventChannel = FlutterEventChannel(name: name, binaryMessenger: messenger) + super.init() + eventChannel.setStreamHandler(self) + } + + func onListen(withArguments _: Any?, eventSink events: @escaping FlutterEventSink) -> FlutterError? { + eventSink = events + return nil + } + + func onCancel(withArguments _: Any?) -> FlutterError? { + eventSink = nil + return nil + } + + func sendEvent(_ event: Any) { + eventSink?(event) + } + + func dispose() { + eventChannel.setStreamHandler(nil) + } +} diff --git a/just_audio/darwin/Classes/ClippingAudioSource.m b/just_audio/darwin/Classes/ClippingAudioSource.m deleted file mode 100644 index f976925f3..000000000 --- a/just_audio/darwin/Classes/ClippingAudioSource.m +++ /dev/null @@ -1,113 +0,0 @@ -#import "AudioSource.h" -#import "ClippingAudioSource.h" -#import "IndexedPlayerItem.h" -#import "UriAudioSource.h" -#import - -@implementation ClippingAudioSource { - UriAudioSource *_audioSource; - CMTime _start; - CMTime _end; -} - -- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSource = audioSource; - _start = start == (id)[NSNull null] ? kCMTimeZero : CMTimeMake([start longLongValue], 1000000); - _end = end == (id)[NSNull null] ? kCMTimeInvalid : CMTimeMake([end longLongValue], 1000000); - return self; -} - -- (UriAudioSource *)audioSource { - return _audioSource; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - [_audioSource findById:sourceId matches:matches]; -} - -- (void)attach:(AVQueuePlayer *)player initialPos:(CMTime)initialPos { - // Force super.attach to correct for the initial position. - if (CMTIME_IS_INVALID(initialPos)) { - initialPos = kCMTimeZero; - } - // Prepare clip to start/end at the right timestamps. - _audioSource.playerItem.forwardPlaybackEndTime = _end; - [super attach:player initialPos:initialPos]; -} - -- (IndexedPlayerItem *)playerItem { - return _audioSource.playerItem; -} - -- (IndexedPlayerItem *)playerItem2 { - return _audioSource.playerItem2; -} - -- (NSArray *)getShuffleIndices { - return @[@(0)]; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) { - CMTime absPosition = CMTimeAdd(_start, position); - [_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; - } else { - [super seek:position completionHandler:completionHandler]; - } -} - -- (void)flip { - [_audioSource flip]; -} - -- (void)preparePlayerItem2 { - if (self.playerItem2) return; - [_audioSource preparePlayerItem2]; - IndexedPlayerItem *item = _audioSource.playerItem2; - // Prepare loop clip to start/end at the right timestamps. - item.forwardPlaybackEndTime = _end; - [item seekToTime:_start toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:nil]; -} - -- (CMTime)duration { - return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start); -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return CMTimeSubtract(self.playerItem.currentTime, _start); -} - -- (CMTime)bufferedPosition { - CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start); - CMTime dur = [self duration]; - return CMTimeCompare(pos, dur) >= 0 ? dur : pos; -} - -- (void)applyPreferredForwardBufferDuration { - [_audioSource applyPreferredForwardBufferDuration]; -} - -- (void)applyCanUseNetworkResourcesForLiveStreamingWhilePaused { - [_audioSource applyCanUseNetworkResourcesForLiveStreamingWhilePaused]; -} - -- (void)applyPreferredPeakBitRate { - [_audioSource applyPreferredPeakBitRate]; -} - -@end diff --git a/just_audio/darwin/Classes/ConcatenatingAudioSource.m b/just_audio/darwin/Classes/ConcatenatingAudioSource.m deleted file mode 100644 index 5385c7be3..000000000 --- a/just_audio/darwin/Classes/ConcatenatingAudioSource.m +++ /dev/null @@ -1,92 +0,0 @@ -#import "AudioSource.h" -#import "ConcatenatingAudioSource.h" -#import -#import - -@implementation ConcatenatingAudioSource { - NSMutableArray *_audioSources; - NSArray *_shuffleOrder; -} - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources shuffleOrder:(NSArray *)shuffleOrder { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSources = audioSources; - _shuffleOrder = shuffleOrder; - return self; -} - -- (int)count { - return (int)_audioSources.count; -} - -- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index { - [_audioSources insertObject:audioSource atIndex:index]; -} - -- (void)removeSourcesFromIndex:(int)start toIndex:(int)end { - if (end == -1) end = (int)_audioSources.count; - for (int i = start; i < end; i++) { - [_audioSources removeObjectAtIndex:start]; - } -} - -- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex { - AudioSource *source = _audioSources[currentIndex]; - [_audioSources removeObjectAtIndex:currentIndex]; - [_audioSources insertObject:source atIndex:newIndex]; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; - } - return treeIndex; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - for (int i = 0; i < [_audioSources count]; i++) { - [_audioSources[i] findById:sourceId matches:matches]; - } -} - -- (NSArray *)getShuffleIndices { - NSMutableArray *order = [NSMutableArray new]; - int offset = (int)[order count]; - NSMutableArray *> *childOrders = [NSMutableArray new]; // array of array of ints - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *audioSource = _audioSources[i]; - NSArray *childShuffleIndices = [audioSource getShuffleIndices]; - NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new]; - for (int j = 0; j < [childShuffleIndices count]; j++) { - [offsetChildShuffleOrder addObject:@([childShuffleIndices[j] integerValue] + offset)]; - } - [childOrders addObject:offsetChildShuffleOrder]; - offset += [childShuffleIndices count]; - } - for (int i = 0; i < [_audioSources count]; i++) { - [order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]]; - } - return order; -} - -- (void)setShuffleOrder:(NSArray *)shuffleOrder { - _shuffleOrder = shuffleOrder; -} - -- (void)decodeShuffleOrder:(NSDictionary *)dict { - _shuffleOrder = (NSArray *)dict[@"shuffleOrder"]; - NSArray *dictChildren = (NSArray *)dict[@"children"]; - if (_audioSources.count != dictChildren.count) { - NSLog(@"decodeShuffleOrder Concatenating children don't match"); - return; - } - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *child = _audioSources[i]; - NSDictionary *dictChild = (NSDictionary *)dictChildren[i]; - [child decodeShuffleOrder:dictChild]; - } -} - -@end diff --git a/just_audio/darwin/Classes/ConcatenatingAudioSource.swift b/just_audio/darwin/Classes/ConcatenatingAudioSource.swift new file mode 100644 index 000000000..eb28806e8 --- /dev/null +++ b/just_audio/darwin/Classes/ConcatenatingAudioSource.swift @@ -0,0 +1,22 @@ +import AVFoundation + +class ConcatenatingAudioSource: AudioSource { + let audioSources: [AudioSource] + let shuffleOrder: [Int] + + init(sid: String, audioSources: [AudioSource], shuffleOrder: [Int]) { + self.audioSources = audioSources + self.shuffleOrder = shuffleOrder + super.init(sid: sid) + } + + override func buildSequence() -> [IndexedAudioSource] { + return audioSources.flatMap { + $0.buildSequence() + } + } + + override func getShuffleIndices() -> [Int] { + return shuffleOrder; + } +} diff --git a/just_audio/darwin/Classes/IndexedAudioSource.m b/just_audio/darwin/Classes/IndexedAudioSource.m deleted file mode 100644 index 219d31043..000000000 --- a/just_audio/darwin/Classes/IndexedAudioSource.m +++ /dev/null @@ -1,106 +0,0 @@ -#import "IndexedAudioSource.h" -#import "IndexedPlayerItem.h" -#import - -@implementation IndexedAudioSource { - BOOL _isAttached; - CMTime _queuedSeekPos; - void (^_queuedSeekCompletionHandler)(BOOL); -} - -- (instancetype)initWithId:(NSString *)sid { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _isAttached = NO; - _queuedSeekPos = kCMTimeInvalid; - _queuedSeekCompletionHandler = nil; - return self; -} - -- (void)onStatusChanged:(AVPlayerItemStatus)status { - if (status == AVPlayerItemStatusReadyToPlay) { - // This handles a pending seek during a load. - // TODO: Test seeking during a seek. - if (_queuedSeekCompletionHandler) { - [self seek:_queuedSeekPos completionHandler:_queuedSeekCompletionHandler]; - _queuedSeekPos = kCMTimeInvalid; - _queuedSeekCompletionHandler = nil; - } - } -} - -- (IndexedPlayerItem *)playerItem { - return nil; -} - -- (IndexedPlayerItem *)playerItem2 { - return nil; -} - -- (BOOL)isAttached { - return _isAttached; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - [sequence addObject:self]; - return treeIndex + 1; -} - -- (void)attach:(AVQueuePlayer *)player initialPos:(CMTime)initialPos { - _isAttached = YES; - if (CMTIME_IS_VALID(initialPos)) { - [self seek:initialPos]; - } -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position { - [self seek:position completionHandler:nil]; -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (completionHandler && (self.playerItem.status != AVPlayerItemStatusReadyToPlay)) { - _queuedSeekPos = position; - _queuedSeekCompletionHandler = completionHandler; - } -} - -- (void)flip { -} - -- (void)preparePlayerItem2 { -} - -- (CMTime)duration { - return kCMTimeInvalid; -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return kCMTimeInvalid; -} - -- (CMTime)bufferedPosition { - return kCMTimeInvalid; -} - -- (void)applyPreferredForwardBufferDuration { -} - -- (void)applyCanUseNetworkResourcesForLiveStreamingWhilePaused { -} - -- (void)applyPreferredPeakBitRate { -} - -@end diff --git a/just_audio/darwin/Classes/IndexedAudioSource.swift b/just_audio/darwin/Classes/IndexedAudioSource.swift new file mode 100644 index 000000000..fa315ead1 --- /dev/null +++ b/just_audio/darwin/Classes/IndexedAudioSource.swift @@ -0,0 +1,15 @@ +import AVFoundation + +class IndexedAudioSource: AudioSource { + func load(engine _: AVAudioEngine, playerNode _: AVAudioPlayerNode, speedControl _: AVAudioUnitVarispeed, position _: CMTime?, completionHandler _: @escaping () -> Void) throws { + throw PluginError.notImplemented("Not implemented IndexedAudioSource.load") + } + + func getDuration() -> CMTime { + return CMTime.invalid + } + + override func buildSequence() -> Array { + return [self] + } +} diff --git a/just_audio/darwin/Classes/IndexedPlayerItem.m b/just_audio/darwin/Classes/IndexedPlayerItem.m deleted file mode 100644 index 7a0218617..000000000 --- a/just_audio/darwin/Classes/IndexedPlayerItem.m +++ /dev/null @@ -1,6 +0,0 @@ -#import "IndexedPlayerItem.h" -#import "IndexedAudioSource.h" - -@implementation IndexedPlayerItem -@synthesize audioSource; -@end diff --git a/just_audio/darwin/Classes/JustAudioPlayer.swift b/just_audio/darwin/Classes/JustAudioPlayer.swift new file mode 100644 index 000000000..a5cee6ff0 --- /dev/null +++ b/just_audio/darwin/Classes/JustAudioPlayer.swift @@ -0,0 +1,600 @@ +import AVFoundation + +class PluginError : Error { + let code: Int + let message: String + + init(_ code: Int, _ message: String) { + self.code = code; + self.message = message + } + + static func notImplemented(_ message: String) -> PluginError { + return PluginError(500, message) + } + + static func notInitialized(_ message: String) -> PluginError { + return PluginError(403, message) + } + + static func notSupported(_ value: Any, _ message: Any) -> PluginError { + return PluginError(400, "Not support \(value)\n\(message)") + } +} + +public class JustAudioPlayer: NSObject { + let playerId: String + let audioEffects: [[String: Any]] + + let methodChannel: FlutterMethodChannel + let eventChannel: BetterEventChannel + let dataChannel: BetterEventChannel + + var player: Player! + + init(registrar: FlutterPluginRegistrar, playerId: String, loadConfiguration: [String: Any], audioEffects: [[String: Any]]) { + self.playerId = playerId + self.audioEffects = audioEffects + + methodChannel = FlutterMethodChannel(name: String(format: "com.ryanheise.just_audio.methods.%@", playerId), binaryMessenger: registrar.messenger()) + eventChannel = BetterEventChannel(name: String(format: "com.ryanheise.just_audio.events.%@", playerId), messenger: registrar.messenger()) + dataChannel = BetterEventChannel(name: String(format: "com.ryanheise.just_audio.data.%@", playerId), messenger: registrar.messenger()) + +// print("TODO: loadConfiguration", loadConfiguration) + + super.init() + methodChannel.setMethodCallHandler { call, result in + self.handleMethodCall(call: call, result: result) + } + } + + func handleMethodCall(call: FlutterMethodCall, result: @escaping FlutterResult) { + do { + if (player == nil) { + player = Player(audioEffects: try! audioEffects.map(Mapping.effectFrom), onEvent: onPlaybackEvent); + } + + let request = call.arguments as! [String: Any] + print("=========== \(call.method) \(request)") + + switch call.method { + case "load": + let source = try AudioSource.fromJson(request["audioSource"] as! [String: Any]) + let initialPosition = request["initialPosition"] != nil ? CMTime.invalid : CMTimeMake(value: request["initialPosition"] as! Int64, timescale: 1_000_000) + let initialIndex = request["initialIndex"] as? Int ?? 0 + + let duration = player.load(source: source, initialPosition: initialPosition, initialIndex: initialIndex) + result(["duration": duration.microSeconds]) + case "play": + player.play() + result([:]) + case "pause": + player.pause() + result([:]) + case "setVolume": + player.setVolume(Float(request["volume"] as! Double)) + result([:]) +// case "setSkipSilence": +// print("TODO: setSkipSilence", request) +// result([:]) + case "setSpeed": + player.setSpeed(Float(request["speed"] as! Double)) + result([:]) + case "setLoopMode": + player.setLoopMode(mode: Mapping.loopModeFrom(request["loopMode"] as! Int)) + result([:]) + case "setShuffleMode": + player.setShuffleMode(isEnalbed: Mapping.shuffleModeFrom(request["shuffleMode"] as! Int)) + result([:]) +// case "setShuffleOrder": +// print("TODO: setShuffleOrder", request) +// result([:]) +// case "setAutomaticallyWaitsToMinimizeStalling": +// print("TODO: setAutomaticallyWaitsToMinimizeStalling", request) +// result([:]) +// case "setCanUseNetworkResourcesForLiveStreamingWhilePaused": +// print("TODO: setCanUseNetworkResourcesForLiveStreamingWhilePaused", request) +// result([:]) +// case "setPreferredPeakBitRate": +// print("TODO: setPreferredPeakBitRate", request) +// result([:]) + case "seek": + let position = Mapping.timeFrom(microseconds: request["position"] as! Int64) + let index = request["index"] as? Int + + player.seek(index: index, position: position) + result([:]) +// case "concatenatingInsertAll": +// print("TODO: concatenatingInsertAll", request) +// result([:]) +// case "concatenatingRemoveRange": +// print("TODO: concatenatingRemoveRange", request) +// result([:]) +// case "concatenatingMove": +// print("TODO: concatenatingMove", request) +// result([:]) + case "audioEffectSetEnabled": + try player.enableEffect(type: request["type"] as! String, enabled: request["enabled"] as! Bool) + result([:]) + case "darwinEqualizerBandSetGain": + player.setEqualizerBandGain(bandIndex: request["bandIndex"] as! Int, gain: Float(request["gain"] as! Double)) + result([:]) + default: + result(FlutterMethodNotImplemented) + } + } catch let error as PluginError { + result(FlutterError(code: "\(error.code)", message: error.message, details: nil)) + } catch let error { + print(error) + result(FlutterError(code: "500", message: error.localizedDescription, details: nil)) + } + } + + func onPlaybackEvent(event: PlaybackEvent) { + eventChannel.sendEvent([ + "processingState": event.processingState.rawValue, + "updatePosition": event.updatePosition.microSeconds, + "updateTime": event.updateTime, + "bufferedPosition": 0, + "icyMetadata": [:], + "duration": event.duration.microSeconds, + "currentIndex": event.currentIndex, + ]) + } + + func dispose() { + player?.dispose() + player = nil + + eventChannel.dispose() + dataChannel.dispose() + methodChannel.setMethodCallHandler(nil) + } +} + +enum ProcessingState: Int, Codable { + case none, loading, buffering, ready, completed +} + +enum LoopMode: Int { + case loopOff, loopOne, loopAll +} + +class Player { + let onEvent: (PlaybackEvent) -> Void + let audioEffects: [EffectData] + + var engine: AVAudioEngine! + var playerNode: AVAudioPlayerNode! + var speedControl: AVAudioUnitVarispeed! + var audioUnitEQ: AVAudioUnitEQ? + + // State properties + var processingState: ProcessingState = .none + var shuffleModeEnabled = false + var loopMode: LoopMode = .loopOff + + // Queue properties + var indexedAudioSources: [IndexedAudioSource] = [] + var currentSource: IndexedAudioSource? + var order: [Int] = [] + var orderInv: [Int] = [] + + // Current Source + var index: Int = 0 + var audioSource: AudioSource! + var duration: CMTime { + if processingState == .none || processingState == .loading { + return CMTime.invalid + } else if indexedAudioSources.count > 0 { + return currentSource!.getDuration() + } else { + return CMTime.zero + } + } + + // Positions properties + var positionUpdatedAt: Int64 = 0 + var positionUpdate: CMTime = .zero + var positionOffset: CMTime = .zero + var currentPosition: CMTime { return positionUpdate + positionOffset } + + // Extra properties + var volume: Float = 1 + var rate: Float = 1 + + init(audioEffects: [EffectData], onEvent: @escaping (PlaybackEvent) -> Void) { + self.audioEffects = audioEffects + self.onEvent = onEvent + } + + func load(source: AudioSource, initialPosition _: CMTime, initialIndex: Int) -> CMTime { + if playerNode != nil { + playerNode.pause() + } + + index = initialIndex + processingState = .loading + updatePosition(CMTime.zero) + // Decode audio source + audioSource = source + + indexedAudioSources = audioSource.buildSequence() + + updateOrder() + + if indexedAudioSources.isEmpty { + + processingState = .none + broadcastPlaybackEvent() + + return CMTime.zero + } + + if engine == nil { + engine = AVAudioEngine() + playerNode = AVAudioPlayerNode() + speedControl = AVAudioUnitVarispeed() + + try! createAudioEffects() + + playerNode.volume = volume + speedControl.rate = rate + + var nodes = [playerNode, speedControl] + + // add equalizer node + if audioUnitEQ != nil { + nodes.append(audioUnitEQ!) + } + + // attach all nodes to engine + for node in nodes { + engine.attach(node!) + } + + // add mainMixerNode + nodes.append(engine.mainMixerNode) + + for i in 1 ..< nodes.count { + engine.connect(nodes[i - 1]!, to: nodes[i]!, format: nil) + } + + // Observe for changes in the audio engine configuration + NotificationCenter.default.addObserver(self, + selector: #selector(_handleInterruption), + name: NSNotification.Name.AVAudioEngineConfigurationChange, + object: nil + ) + } + + try! setQueueFrom(index) + + _loadCurrentSource() + + if !engine.isRunning { + try! engine.start() + } + + processingState = .ready + broadcastPlaybackEvent() + + return duration + } + + @objc func _handleInterruption(notification: Notification) { + _resume() + } + + func play() { + _play() + updatePosition(nil) + broadcastPlaybackEvent() + } + + func pause() { + updatePosition(nil) + playerNode.pause() + broadcastPlaybackEvent() + } + + func _resume() { + let wasPlaying = playerNode.isPlaying + + playerNode.pause() + if (!engine.isRunning) { + try! engine.start() + } + + if (wasPlaying) { + playerNode.play() + } + } + + func seek(index: Int?, position: CMTime) { + let wasPlaying = self.playerNode.isPlaying + + if let index = index { + try! setQueueFrom(index) + } + + _stop() + + updatePosition(position) + + processingState = .ready + + _loadCurrentSource() + + // Restart play if player was playning + if (wasPlaying) { + _play() + } + + broadcastPlaybackEvent() + } + + func updatePosition(_ positionUpdate: CMTime?) { + self.positionUpdatedAt = Int64(Date().timeIntervalSince1970 * 1000) + if let positionUpdate = positionUpdate { self.positionUpdate = positionUpdate } + self.positionOffset = indexedAudioSources.count > 0 && positionUpdate == nil ? self.playerNode.currentTime : CMTime.zero + } + + var _isStopping = false + // Permit to check if [load(completionHandler)] is called when you force a stop + func _stop() { + _isStopping = true + playerNode.stop() + _isStopping = false + } + + func _play() { + if (!self.engine.isRunning) { + try! self.engine.start() + } + playerNode.play() + } + + func _loadCurrentSource() { + try! currentSource!.load(engine: engine, playerNode: playerNode, speedControl: speedControl, position: positionUpdate, completionHandler: { + if (self._isStopping) {return} + DispatchQueue.main.async { + self._playNext() + } + }) + } + + func _getRelativeIndex(_ offset: Int) -> Int { + switch (loopMode) { + case .loopOne: + return self.index + case .loopAll: + return offset >= self.indexedAudioSources.count ? 0 : self.orderInv[offset] + case .loopOff: + return self.orderInv[offset] + } + } + + func _playNext() { + let newIndex = self.index + 1 + if newIndex >= self.indexedAudioSources.count { + self._complete() + } else { + self.seek(index: self._getRelativeIndex(newIndex), position: CMTime.zero) + self.play() + } + } + + func _complete() { + updatePosition(nil) + processingState = .completed + if playerNode != nil { + playerNode.stop() + } + broadcastPlaybackEvent() + } + + // ========== QUEUE + + func setQueueFrom(_ index: Int) throws { + guard !indexedAudioSources.isEmpty else { + preconditionFailure("no songs on library") + } + self.index = index + currentSource = indexedAudioSources[index] + } + + // ========== MODES + + func setShuffleMode(isEnalbed: Bool) { + shuffleModeEnabled = isEnalbed + updateOrder() + broadcastPlaybackEvent() + } + + func setLoopMode(mode: LoopMode) { + loopMode = mode + broadcastPlaybackEvent() + } + + func updateOrder() { + orderInv = Array(repeating: 0, count: indexedAudioSources.count) + if shuffleModeEnabled { + order = audioSource.getShuffleIndices() + } else { + order = indexedAudioSources.enumerated().map { index, _ in + index + } + } + for i in 0 ..< indexedAudioSources.count { + orderInv[order[i]] = i + } + } + + // ========== EFFECTS + + func createAudioEffects() throws { + for effect in audioEffects { + if let effect = effect as? EqualizerEffectData { + audioUnitEQ = AVAudioUnitEQ(numberOfBands: effect.parameters.bands.count) + + for (i, band) in effect.parameters.bands.enumerated() { + audioUnitEQ!.bands[i].filterType = .parametric + audioUnitEQ!.bands[i].frequency = band.centerFrequency + audioUnitEQ!.bands[i].bandwidth = 1 // half an octave + audioUnitEQ!.bands[i].gain = Mapping.gainFrom(band.gain) + audioUnitEQ!.bands[i].bypass = false + } + + audioUnitEQ!.bypass = !effect.enabled + } else { + throw PluginError.notSupported(effect.type, "When initialize effect") + } + } + } + + func enableEffect(type: String, enabled: Bool) throws { + switch type { + case "DarwinEqualizer": + audioUnitEQ!.bypass = !enabled + default: + throw PluginError.notInitialized("Not initialized effect \(type)") + } + } + + func setEqualizerBandGain(bandIndex: Int, gain: Float) { + audioUnitEQ?.bands[bandIndex].gain = gain + } + + // ======== EXTRA + + func setVolume(_ value: Float) { + volume = value + if playerNode != nil { + playerNode.volume = volume + } + broadcastPlaybackEvent() + } + + func setSpeed(_ value: Float) { + rate = value + if speedControl != nil { + speedControl.rate = rate + } + updatePosition(nil) + } + + func broadcastPlaybackEvent() { + onEvent(PlaybackEvent( + processingState: processingState, + updatePosition: self.currentPosition, + updateTime: self.positionUpdatedAt, + duration: duration, + currentIndex: index + )) + } + + func dispose() { + if processingState != .none { + playerNode?.pause() + processingState = .none + } + audioSource = nil + indexedAudioSources = [] + playerNode?.stop() + engine?.stop() + } +} + +extension CMTime { + var milliSeconds: Int64 { + return self == CMTime.invalid ? -1 : Int64(value * 1000 / Int64(timescale)) + } + + var microSeconds: Int64 { + return self == CMTime.invalid ? -1 : Int64(value * 1_000_000 / Int64(timescale)) + } +} + +extension AVAudioPlayerNode { + var currentTime: CMTime { + if let nodeTime: AVAudioTime = lastRenderTime, let playerTime: AVAudioTime = playerTime(forNodeTime: nodeTime) { + let currentTime = Double(playerTime.sampleTime) / playerTime.sampleRate + let milliSeconds = Int64(currentTime * 1000) + return milliSeconds < 0 ? CMTime.zero : CMTime(value: milliSeconds, timescale: 1000) + } + return CMTime.zero + } +} + +class Mapping { + static func timeFrom(microseconds: Int64) -> CMTime { + return CMTimeMake(value: microseconds, timescale: 1_000_000) + } + + static func loopModeFrom(_ value: Int) -> LoopMode { + switch (value) { + case 1: + return LoopMode.loopOne + case 2: + return LoopMode.loopAll + default: + return LoopMode.loopOff + } + } + + static func shuffleModeFrom(_ value: Int) -> Bool { + return value == 1 + } + + static func gainFrom(_ value: Float) -> Float { + // Equalize the level between ios and android + return value * 2.8 + } + + static func effectFrom(_ map: [String: Any]) throws -> EffectData { + let type = map["type"] as! String + switch (type) { + case EffectType.darwinEqualizer.rawValue: + return EqualizerEffectData.fromJson(map) + default: + throw PluginError.notSupported(type, "When decoding effect") + } + } +} + +enum EffectType : String, Codable { + case darwinEqualizer = "DarwinEqualizer" +} + +protocol EffectData { + var type: EffectType { get } +} + +struct EqualizerEffectData : EffectData, Codable { + let type: EffectType + let enabled: Bool + let parameters: ParamsEqualizerData + + static func fromJson(_ map: [String: Any]) -> EqualizerEffectData { + return try! JSONDecoder().decode(EqualizerEffectData.self, from: JSONSerialization.data(withJSONObject: map)) + } +} + +struct ParamsEqualizerData : Codable { + let bands: Array +} + +struct BandEqualizerData : Codable { + let index: Int + let centerFrequency: Float + let gain: Float +} + +struct PlaybackEvent { + let processingState: ProcessingState + let updatePosition: CMTime + let updateTime: Int64 + let duration: CMTime + let currentIndex: Int +} diff --git a/just_audio/darwin/Classes/JustAudioPlugin.m b/just_audio/darwin/Classes/JustAudioPlugin.m index e2a6e6e75..aa8dee497 100644 --- a/just_audio/darwin/Classes/JustAudioPlugin.m +++ b/just_audio/darwin/Classes/JustAudioPlugin.m @@ -1,58 +1,17 @@ #import "JustAudioPlugin.h" -#import "AudioPlayer.h" -#import -#include +#if __has_include() +#import +#else +// Support project import fallback if the generated compatibility header +// is not copied when this plugin is created as a library. +// https://forums.swift.org/t/swift-static-libraries-dont-copy-generated-objective-c-header/19816 +#import "just_audio-Swift.h" +#endif -@implementation JustAudioPlugin { - NSObject* _registrar; - NSMutableDictionary *_players; -} +#import +@implementation JustAudioPlugin + (void)registerWithRegistrar:(NSObject*)registrar { - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"com.ryanheise.just_audio.methods" - binaryMessenger:[registrar messenger]]; - JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithRegistrar:(NSObject *)registrar { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _registrar = registrar; - _players = [[NSMutableDictionary alloc] init]; - return self; + [SwiftJustAudioPlugin registerWithRegistrar:registrar]; } - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { - if ([@"init" isEqualToString:call.method]) { - NSDictionary *request = (NSDictionary *)call.arguments; - NSString *playerId = (NSString *)request[@"id"]; - NSDictionary *loadConfiguration = (NSDictionary *)request[@"audioLoadConfiguration"]; - if ([_players objectForKey:playerId] != nil) { - FlutterError *flutterError = [FlutterError errorWithCode:@"error" message:@"Platform player already exists" details:nil]; - result(flutterError); - } else { - AudioPlayer* player = [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId loadConfiguration:loadConfiguration]; - [_players setValue:player forKey:playerId]; - result(nil); - } - } else if ([@"disposePlayer" isEqualToString:call.method]) { - NSDictionary *request = (NSDictionary *)call.arguments; - NSString *playerId = request[@"id"]; - [_players[playerId] dispose]; - [_players setValue:nil forKey:playerId]; - result(@{}); - } else { - result(FlutterMethodNotImplemented); - } -} - -- (void)dealloc { - for (NSString *playerId in _players) { - [_players[playerId] dispose]; - } - [_players removeAllObjects]; -} - @end diff --git a/just_audio/darwin/Classes/LoadControl.m b/just_audio/darwin/Classes/LoadControl.m deleted file mode 100644 index b2969cb5a..000000000 --- a/just_audio/darwin/Classes/LoadControl.m +++ /dev/null @@ -1,7 +0,0 @@ -#import "LoadControl.h" - -@implementation LoadControl -@synthesize preferredForwardBufferDuration; -@synthesize canUseNetworkResourcesForLiveStreamingWhilePaused; -@synthesize preferredPeakBitRate; -@end diff --git a/just_audio/darwin/Classes/LoopingAudioSource.m b/just_audio/darwin/Classes/LoopingAudioSource.m deleted file mode 100644 index a8bae2ede..000000000 --- a/just_audio/darwin/Classes/LoopingAudioSource.m +++ /dev/null @@ -1,53 +0,0 @@ -#import "AudioSource.h" -#import "LoopingAudioSource.h" -#import - -@implementation LoopingAudioSource { - // An array of duplicates - NSArray *_audioSources; // -} - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSources = audioSources; - return self; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; - } - return treeIndex; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - for (int i = 0; i < [_audioSources count]; i++) { - [_audioSources[i] findById:sourceId matches:matches]; - } -} - -- (NSArray *)getShuffleIndices { - NSMutableArray *order = [NSMutableArray new]; - int offset = (int)[order count]; - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *audioSource = _audioSources[i]; - NSArray *childShuffleOrder = [audioSource getShuffleIndices]; - for (int j = 0; j < [childShuffleOrder count]; j++) { - [order addObject:@([childShuffleOrder[j] integerValue] + offset)]; - } - offset += [childShuffleOrder count]; - } - return order; -} - -- (void)decodeShuffleOrder:(NSDictionary *)dict { - NSDictionary *dictChild = (NSDictionary *)dict[@"child"]; - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *child = _audioSources[i]; - [child decodeShuffleOrder:dictChild]; - } -} - -@end diff --git a/just_audio/darwin/Classes/SwiftJustAudioPlugin.swift b/just_audio/darwin/Classes/SwiftJustAudioPlugin.swift new file mode 100644 index 000000000..f3696deb8 --- /dev/null +++ b/just_audio/darwin/Classes/SwiftJustAudioPlugin.swift @@ -0,0 +1,45 @@ +import Flutter +import UIKit + +public class SwiftJustAudioPlugin: NSObject, FlutterPlugin { + var players: [String: JustAudioPlayer] = [:] + let registrar: FlutterPluginRegistrar + + init(registrar: FlutterPluginRegistrar) { + self.registrar = registrar + } + + public static func register(with registrar: FlutterPluginRegistrar) { + let channel = FlutterMethodChannel(name: "com.ryanheise.just_audio.methods", binaryMessenger: registrar.messenger()) + let instance = SwiftJustAudioPlugin(registrar: registrar) + registrar.addMethodCallDelegate(instance, channel: channel) + } + + public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) { + switch call.method { + case "init": + let request = call.arguments as! [String: Any] + let playerId = request["id"] as! String + + print("init: ", request) + + let loadConfiguration = request["audioLoadConfiguration"] as? [String: Any] ?? [:] + let audioEffects = request["darwinAudioEffects"] as? [[String: Any]] ?? [] + if players[playerId] != nil { + let flutterError = FlutterError(code: "error", message: "Platform player already exists", details: nil) + result(flutterError) + } else { + let player = JustAudioPlayer(registrar: registrar, playerId: playerId, loadConfiguration: loadConfiguration, audioEffects: audioEffects) + players[playerId] = player + result(nil) + } + case "disposePlayer": + let request = call.arguments as! [String: Any] + let playerId = request["id"] as! String + players.removeValue(forKey: playerId)?.dispose() + result([:]) + default: + result(FlutterMethodNotImplemented) + } + } +} diff --git a/just_audio/darwin/Classes/UriAudioSource.m b/just_audio/darwin/Classes/UriAudioSource.m deleted file mode 100644 index baccf6c41..000000000 --- a/just_audio/darwin/Classes/UriAudioSource.m +++ /dev/null @@ -1,175 +0,0 @@ -#import "UriAudioSource.h" -#import "IndexedAudioSource.h" -#import "IndexedPlayerItem.h" -#import "LoadControl.h" -#import - -@implementation UriAudioSource { - NSString *_uri; - IndexedPlayerItem *_playerItem; - IndexedPlayerItem *_playerItem2; - /* CMTime _duration; */ - LoadControl *_loadControl; -} - -- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri loadControl:(LoadControl *)loadControl { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _uri = uri; - _loadControl = loadControl; - _playerItem = [self createPlayerItem:uri]; - _playerItem2 = nil; - return self; -} - -- (NSString *)uri { - return _uri; -} - -- (IndexedPlayerItem *)createPlayerItem:(NSString *)uri { - IndexedPlayerItem *item; - if ([uri hasPrefix:@"file://"]) { - item = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[[uri stringByRemovingPercentEncoding] substringFromIndex:7]]]; - } else { - item = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:uri]]; - } - if (@available(macOS 10.13, iOS 11.0, *)) { - // This does the best at reducing distortion on voice with speeds below 1.0 - item.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain; - } - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_loadControl.preferredForwardBufferDuration != (id)[NSNull null]) { - item.preferredForwardBufferDuration = (double)([_loadControl.preferredForwardBufferDuration longLongValue]/1000) / 1000.0; - } - } - if (@available(iOS 9.0, macOS 10.11, *)) { - item.canUseNetworkResourcesForLiveStreamingWhilePaused = _loadControl.canUseNetworkResourcesForLiveStreamingWhilePaused; - } - if (@available(iOS 8.0, macOS 10.10, *)) { - if (_loadControl.preferredPeakBitRate != (id)[NSNull null]) { - item.preferredPeakBitRate = [_loadControl.preferredPeakBitRate doubleValue]; - } - } - - return item; -} - -// Not used. XXX: Remove? -- (void)applyPreferredForwardBufferDuration { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_loadControl.preferredForwardBufferDuration != (id)[NSNull null]) { - double value = (double)([_loadControl.preferredForwardBufferDuration longLongValue]/1000) / 1000.0; - _playerItem.preferredForwardBufferDuration = value; - if (_playerItem2) { - _playerItem2.preferredForwardBufferDuration = value; - } - } - } -} - -- (void)applyCanUseNetworkResourcesForLiveStreamingWhilePaused { - if (@available(iOS 9.0, macOS 10.11, *)) { - _playerItem.canUseNetworkResourcesForLiveStreamingWhilePaused = _loadControl.canUseNetworkResourcesForLiveStreamingWhilePaused; - if (_playerItem2) { - _playerItem2.canUseNetworkResourcesForLiveStreamingWhilePaused = _loadControl.canUseNetworkResourcesForLiveStreamingWhilePaused; - } - } -} - -- (void)applyPreferredPeakBitRate { - if (@available(iOS 8.0, macOS 10.10, *)) { - if (_loadControl.preferredPeakBitRate != (id)[NSNull null]) { - double value = [_loadControl.preferredPeakBitRate doubleValue]; - _playerItem.preferredPeakBitRate = value; - if (_playerItem2) { - _playerItem2.preferredPeakBitRate = value; - } - } - } -} - -- (IndexedPlayerItem *)playerItem { - return _playerItem; -} - -- (IndexedPlayerItem *)playerItem2 { - return _playerItem2; -} - -- (NSArray *)getShuffleIndices { - return @[@(0)]; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) { - NSValue *seekableRange = _playerItem.seekableTimeRanges.lastObject; - if (seekableRange) { - CMTimeRange range = [seekableRange CMTimeRangeValue]; - position = CMTimeAdd(position, range.start); - } - [_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; - } else { - [super seek:position completionHandler:completionHandler]; - } -} - -- (void)flip { - IndexedPlayerItem *temp = _playerItem; - _playerItem = _playerItem2; - _playerItem2 = temp; -} - -- (void)preparePlayerItem2 { - if (!_playerItem2) { - _playerItem2 = [self createPlayerItem:_uri]; - _playerItem2.audioSource = _playerItem.audioSource; - } -} - -- (CMTime)duration { - NSValue *seekableRange = _playerItem.seekableTimeRanges.lastObject; - if (seekableRange) { - CMTimeRange seekableDuration = [seekableRange CMTimeRangeValue]; - return seekableDuration.duration; - } - else { - return _playerItem.duration; - } - return kCMTimeInvalid; -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - NSValue *seekableRange = _playerItem.seekableTimeRanges.lastObject; - if (seekableRange) { - CMTimeRange range = [seekableRange CMTimeRangeValue]; - return CMTimeSubtract(_playerItem.currentTime, range.start); - } else { - return _playerItem.currentTime; - } - -} - -- (CMTime)bufferedPosition { - NSValue *last = _playerItem.loadedTimeRanges.lastObject; - if (last) { - CMTimeRange timeRange = [last CMTimeRangeValue]; - return CMTimeAdd(timeRange.start, timeRange.duration); - } else { - return _playerItem.currentTime; - } - return kCMTimeInvalid; -} - -@end diff --git a/just_audio/darwin/Classes/UriAudioSource.swift b/just_audio/darwin/Classes/UriAudioSource.swift new file mode 100644 index 000000000..3622e0b34 --- /dev/null +++ b/just_audio/darwin/Classes/UriAudioSource.swift @@ -0,0 +1,52 @@ + + +class UriAudioSource: IndexedAudioSource { + var url: URL + var duration: CMTime = CMTime.invalid + + init(sid: String, uri: String) { + self.url = UriAudioSource.urlFrom(uri: uri) + + super.init(sid: sid) + } + + override func load(engine _: AVAudioEngine, playerNode: AVAudioPlayerNode, speedControl _: AVAudioUnitVarispeed, position: CMTime?, completionHandler: @escaping () -> Void) throws { + + let audioFile = try! AVAudioFile(forReading: url) + let audioFormat = audioFile.fileFormat + + duration = UriAudioSource.durationFrom(audioFile: audioFile) + let sampleRate = audioFormat.sampleRate + + if let position = position, position.seconds > 0 { + + let framePosition = AVAudioFramePosition(sampleRate * position.seconds) + + let missingTime = duration.seconds - position.seconds + let framestoplay = AVAudioFrameCount(sampleRate * missingTime) + + if framestoplay > 1000 { + playerNode.scheduleSegment(audioFile, startingFrame: framePosition, frameCount: framestoplay, at: nil, completionHandler: completionHandler) + } + } else { + playerNode.scheduleFile(audioFile, at: nil, completionHandler: completionHandler) + } + } + + override func getDuration() -> CMTime { + return duration + } + + static func durationFrom(audioFile: AVAudioFile) -> CMTime { + let seconds = Double(audioFile.length) / audioFile.fileFormat.sampleRate; + return CMTime(value: Int64(seconds * 1_000), timescale: 1_000) + } + + static func urlFrom(uri: String) -> URL { + if (uri.hasPrefix("ipod-library://") || uri.hasPrefix("file://")) { + return URL(string: uri)! + } else { + return URL(fileURLWithPath: uri) + } + } +} diff --git a/just_audio/example/ios/Podfile b/just_audio/example/ios/Podfile index f7d6a5e68..bfd60c486 100644 --- a/just_audio/example/ios/Podfile +++ b/just_audio/example/ios/Podfile @@ -1,5 +1,5 @@ # Uncomment this line to define a global platform for your project -# platform :ios, '9.0' +platform :ios, '11.0' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' diff --git a/just_audio/example/ios/Podfile.lock b/just_audio/example/ios/Podfile.lock index 9d1853c97..f2ae0f284 100644 --- a/just_audio/example/ios/Podfile.lock +++ b/just_audio/example/ios/Podfile.lock @@ -4,14 +4,14 @@ PODS: - Flutter (1.0.0) - just_audio (0.0.1): - Flutter - - path_provider (0.0.1): + - path_provider_ios (0.0.1): - Flutter DEPENDENCIES: - audio_session (from `.symlinks/plugins/audio_session/ios`) - Flutter (from `Flutter`) - just_audio (from `.symlinks/plugins/just_audio/ios`) - - path_provider (from `.symlinks/plugins/path_provider/ios`) + - path_provider_ios (from `.symlinks/plugins/path_provider_ios/ios`) EXTERNAL SOURCES: audio_session: @@ -20,15 +20,15 @@ EXTERNAL SOURCES: :path: Flutter just_audio: :path: ".symlinks/plugins/just_audio/ios" - path_provider: - :path: ".symlinks/plugins/path_provider/ios" + path_provider_ios: + :path: ".symlinks/plugins/path_provider_ios/ios" SPEC CHECKSUMS: audio_session: 4f3e461722055d21515cf3261b64c973c062f345 Flutter: 50d75fe2f02b26cc09d224853bb45737f8b3214a - just_audio: baa7252489dbcf47a4c7cc9ca663e9661c99aafa - path_provider: abfe2b5c733d04e238b0d8691db0cfd63a27a93c + just_audio: c649315c5051cae39ecd06d55cc456ec44447507 + path_provider_ios: 7d7ce634493af4477d156294792024ec3485acd5 -PODFILE CHECKSUM: 8e679eca47255a8ca8067c4c67aab20e64cb974d +PODFILE CHECKSUM: e5ee00144d04e7b168ba7ea28a9753540e444f3d -COCOAPODS: 1.10.1 +COCOAPODS: 1.11.2 diff --git a/just_audio/example/ios/Runner.xcodeproj/project.pbxproj b/just_audio/example/ios/Runner.xcodeproj/project.pbxproj index f5fffe947..1fefb72a4 100644 --- a/just_audio/example/ios/Runner.xcodeproj/project.pbxproj +++ b/just_audio/example/ios/Runner.xcodeproj/project.pbxproj @@ -3,7 +3,7 @@ archiveVersion = 1; classes = { }; - objectVersion = 46; + objectVersion = 50; objects = { /* Begin PBXBuildFile section */ @@ -156,6 +156,8 @@ dependencies = ( ); name = Runner; + packageProductDependencies = ( + ); productName = Runner; productReference = 97C146EE1CF9000F007C117D /* Runner.app */; productType = "com.apple.product-type.application"; @@ -166,11 +168,13 @@ 97C146E61CF9000F007C117D /* Project object */ = { isa = PBXProject; attributes = { - LastUpgradeCheck = 1020; + LastUpgradeCheck = 1300; ORGANIZATIONNAME = "The Chromium Authors"; TargetAttributes = { 97C146ED1CF9000F007C117D = { CreatedOnToolsVersion = 7.3.1; + DevelopmentTeam = 766J5VD34Q; + ProvisioningStyle = Automatic; }; }; }; @@ -183,6 +187,8 @@ Base, ); mainGroup = 97C146E51CF9000F007C117D; + packageReferences = ( + ); productRefGroup = 97C146EF1CF9000F007C117D /* Products */; projectDirPath = ""; projectRoot = ""; @@ -348,6 +354,7 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = 766J5VD34Q; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -355,7 +362,7 @@ ); INFOPLIST_FILE = Runner/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 9.0; - LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LD_RUNPATH_SEARCH_PATHS = "/usr/lib/swift $(inherited) @executable_path/Frameworks"; LIBRARY_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/Flutter", @@ -475,7 +482,10 @@ baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CODE_SIGN_IDENTITY = "Apple Development"; + CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = 766J5VD34Q; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -483,13 +493,14 @@ ); INFOPLIST_FILE = Runner/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 9.0; - LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LD_RUNPATH_SEARCH_PATHS = "/usr/lib/swift $(inherited) @executable_path/Frameworks"; LIBRARY_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/Flutter", ); PRODUCT_BUNDLE_IDENTIFIER = com.ryanheise.audioPlayerExample; PRODUCT_NAME = "$(TARGET_NAME)"; + PROVISIONING_PROFILE_SPECIFIER = ""; VERSIONING_SYSTEM = "apple-generic"; }; name = Debug; @@ -500,6 +511,7 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = 766J5VD34Q; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -507,7 +519,7 @@ ); INFOPLIST_FILE = Runner/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 9.0; - LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LD_RUNPATH_SEARCH_PATHS = "/usr/lib/swift $(inherited) @executable_path/Frameworks"; LIBRARY_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/Flutter", diff --git a/just_audio/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/just_audio/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme index a28140cfd..3db53b6e1 100644 --- a/just_audio/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme +++ b/just_audio/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -1,6 +1,6 @@ with WidgetsBindingObserver { - final _equalizer = AndroidEqualizer(); - final _loudnessEnhancer = AndroidLoudnessEnhancer(); - late final AudioPlayer _player = AudioPlayer( - audioPipeline: AudioPipeline( - androidAudioEffects: [ - _loudnessEnhancer, - _equalizer, + final _equalizer = Equalizer( + darwinMessageParameters: DarwinEqualizerParametersMessage( + minDecibels: -26.0, + maxDecibels: 24.0, + bands: [ + DarwinEqualizerBandMessage(index: 0, centerFrequency: 60, gain: 0), + DarwinEqualizerBandMessage(index: 1, centerFrequency: 230, gain: 0), + DarwinEqualizerBandMessage(index: 2, centerFrequency: 910, gain: 0), + DarwinEqualizerBandMessage(index: 3, centerFrequency: 3600, gain: 0), + DarwinEqualizerBandMessage(index: 4, centerFrequency: 14000, gain: 0), ], ), ); + final _loudnessEnhancer = AndroidLoudnessEnhancer(); + late final AudioPlayer _player = AudioPlayer( + audioPipeline: AudioPipeline(androidAudioEffects: [ + _loudnessEnhancer, + _equalizer, + ], darwinAudioEffects: [ + _equalizer + ]), + ); + @override void initState() { super.initState(); @@ -47,8 +61,8 @@ class _MyAppState extends State with WidgetsBindingObserver { final session = await AudioSession.instance; await session.configure(AudioSessionConfiguration.speech()); try { - await _player.setAudioSource(AudioSource.uri(Uri.parse( - "https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3"))); + await _player.setAudioSource( + AudioSource.uri(Uri.parse("asset:///audio/nature.mp3"))); } catch (e) { print("Error loading audio source: $e"); } @@ -164,7 +178,7 @@ class LoudnessEnhancerControls extends StatelessWidget { } class EqualizerControls extends StatelessWidget { - final AndroidEqualizer equalizer; + final Equalizer equalizer; const EqualizerControls({ Key? key, @@ -173,7 +187,7 @@ class EqualizerControls extends StatelessWidget { @override Widget build(BuildContext context) { - return FutureBuilder( + return FutureBuilder( future: equalizer.parameters, builder: (context, snapshot) { final parameters = snapshot.data; diff --git a/just_audio/example/lib/main.dart b/just_audio/example/lib/main.dart index 0e621fe20..ac8b7ee05 100644 --- a/just_audio/example/lib/main.dart +++ b/just_audio/example/lib/main.dart @@ -41,8 +41,11 @@ class _MyAppState extends State with WidgetsBindingObserver { }); // Try to load audio from a source and catch any errors. try { - await _player.setAudioSource(AudioSource.uri(Uri.parse( - "https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3"))); + await _player.setAudioSource( + ConcatenatingAudioSource( + children: [AudioSource.uri(Uri.parse("asset:///audio/nature.mp3"))], + ), + ); } catch (e) { print("Error loading audio source: $e"); } diff --git a/just_audio/example/pubspec.yaml b/just_audio/example/pubspec.yaml index 2602e9b10..7733f6496 100644 --- a/just_audio/example/pubspec.yaml +++ b/just_audio/example/pubspec.yaml @@ -10,7 +10,7 @@ dependencies: sdk: flutter audio_session: ^0.1.5 rxdart: '^0.27.0' - just_audio_libwinmedia: ^0.0.4 + # just_audio_libwinmedia: ^0.0.4 # just_audio_libwinmedia: # path: ../../../just_audio_libwinmedia just_audio: diff --git a/just_audio/ios/.gitignore b/just_audio/ios/.gitignore index aa479fd3c..0c885071e 100644 --- a/just_audio/ios/.gitignore +++ b/just_audio/ios/.gitignore @@ -34,4 +34,5 @@ Icon? .tags* /Flutter/Generated.xcconfig +/Flutter/ephemeral/ /Flutter/flutter_export_environment.sh \ No newline at end of file diff --git a/just_audio/ios/Classes/AudioSource.swift b/just_audio/ios/Classes/AudioSource.swift new file mode 120000 index 000000000..a23d59187 --- /dev/null +++ b/just_audio/ios/Classes/AudioSource.swift @@ -0,0 +1 @@ +../../darwin/Classes/AudioSource.swift \ No newline at end of file diff --git a/just_audio/ios/Classes/BetterEventChannel.swift b/just_audio/ios/Classes/BetterEventChannel.swift new file mode 120000 index 000000000..2fc1f3129 --- /dev/null +++ b/just_audio/ios/Classes/BetterEventChannel.swift @@ -0,0 +1 @@ +../../darwin/Classes/BetterEventChannel.swift \ No newline at end of file diff --git a/just_audio/ios/Classes/ConcatenatingAudioSource.swift b/just_audio/ios/Classes/ConcatenatingAudioSource.swift new file mode 120000 index 000000000..cfac383e7 --- /dev/null +++ b/just_audio/ios/Classes/ConcatenatingAudioSource.swift @@ -0,0 +1 @@ +../../darwin/Classes/ConcatenatingAudioSource.swift \ No newline at end of file diff --git a/just_audio/ios/Classes/IndexedAudioSource.swift b/just_audio/ios/Classes/IndexedAudioSource.swift new file mode 120000 index 000000000..9a2781a20 --- /dev/null +++ b/just_audio/ios/Classes/IndexedAudioSource.swift @@ -0,0 +1 @@ +../../darwin/Classes/IndexedAudioSource.swift \ No newline at end of file diff --git a/just_audio/ios/Classes/JustAudioPlayer.swift b/just_audio/ios/Classes/JustAudioPlayer.swift new file mode 120000 index 000000000..3ec439582 --- /dev/null +++ b/just_audio/ios/Classes/JustAudioPlayer.swift @@ -0,0 +1 @@ +../../darwin/Classes/JustAudioPlayer.swift \ No newline at end of file diff --git a/just_audio/ios/Classes/JustAudioPlugin.h b/just_audio/ios/Classes/JustAudioPlugin.h index a694322cd..b7471f17a 100644 --- a/just_audio/ios/Classes/JustAudioPlugin.h +++ b/just_audio/ios/Classes/JustAudioPlugin.h @@ -1,4 +1,5 @@ #import +#import @interface JustAudioPlugin : NSObject @end diff --git a/just_audio/ios/Classes/SwiftJustAudioPlugin.swift b/just_audio/ios/Classes/SwiftJustAudioPlugin.swift new file mode 120000 index 000000000..d7d88e199 --- /dev/null +++ b/just_audio/ios/Classes/SwiftJustAudioPlugin.swift @@ -0,0 +1 @@ +../../darwin/Classes/SwiftJustAudioPlugin.swift \ No newline at end of file diff --git a/just_audio/ios/Classes/UriAudioSource.swift b/just_audio/ios/Classes/UriAudioSource.swift new file mode 120000 index 000000000..b7d2e89d7 --- /dev/null +++ b/just_audio/ios/Classes/UriAudioSource.swift @@ -0,0 +1 @@ +../../darwin/Classes/UriAudioSource.swift \ No newline at end of file diff --git a/just_audio/ios/just_audio.podspec b/just_audio/ios/just_audio.podspec index ba5c7d261..44f5be947 100644 --- a/just_audio/ios/just_audio.podspec +++ b/just_audio/ios/just_audio.podspec @@ -1,5 +1,6 @@ # -# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html +# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html. +# Run `pod lib lint just_audio.podspec` to validate before publishing. # Pod::Spec.new do |s| s.name = 'just_audio' @@ -13,9 +14,11 @@ A new flutter plugin project. s.author = { 'Your Company' => 'email@example.com' } s.source = { :path => '.' } s.source_files = 'Classes/**/*' - s.public_header_files = 'Classes/**/*.h' + s.dependency 'Flutter' - s.platform = :ios, '8.0' - s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' } -end + s.platform = :ios, '9.0' + # Flutter.framework does not contain a i386 slice. + s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'i386' } + s.swift_version = '5.0' +end diff --git a/just_audio/macos/.gitignore b/just_audio/ios_old/.gitignore similarity index 100% rename from just_audio/macos/.gitignore rename to just_audio/ios_old/.gitignore diff --git a/just_audio/macos/Assets/.gitkeep b/just_audio/ios_old/Assets/.gitkeep similarity index 100% rename from just_audio/macos/Assets/.gitkeep rename to just_audio/ios_old/Assets/.gitkeep diff --git a/just_audio/ios/Classes/AudioPlayer.h b/just_audio/ios_old/Classes/AudioPlayer.h similarity index 100% rename from just_audio/ios/Classes/AudioPlayer.h rename to just_audio/ios_old/Classes/AudioPlayer.h diff --git a/just_audio/ios/Classes/AudioPlayer.m b/just_audio/ios_old/Classes/AudioPlayer.m similarity index 100% rename from just_audio/ios/Classes/AudioPlayer.m rename to just_audio/ios_old/Classes/AudioPlayer.m diff --git a/just_audio/ios/Classes/AudioSource.h b/just_audio/ios_old/Classes/AudioSource.h similarity index 100% rename from just_audio/ios/Classes/AudioSource.h rename to just_audio/ios_old/Classes/AudioSource.h diff --git a/just_audio/ios/Classes/AudioSource.m b/just_audio/ios_old/Classes/AudioSource.m similarity index 100% rename from just_audio/ios/Classes/AudioSource.m rename to just_audio/ios_old/Classes/AudioSource.m diff --git a/just_audio/ios/Classes/BetterEventChannel.h b/just_audio/ios_old/Classes/BetterEventChannel.h similarity index 100% rename from just_audio/ios/Classes/BetterEventChannel.h rename to just_audio/ios_old/Classes/BetterEventChannel.h diff --git a/just_audio/ios/Classes/BetterEventChannel.m b/just_audio/ios_old/Classes/BetterEventChannel.m similarity index 100% rename from just_audio/ios/Classes/BetterEventChannel.m rename to just_audio/ios_old/Classes/BetterEventChannel.m diff --git a/just_audio/ios/Classes/ClippingAudioSource.h b/just_audio/ios_old/Classes/ClippingAudioSource.h similarity index 100% rename from just_audio/ios/Classes/ClippingAudioSource.h rename to just_audio/ios_old/Classes/ClippingAudioSource.h diff --git a/just_audio/ios/Classes/ClippingAudioSource.m b/just_audio/ios_old/Classes/ClippingAudioSource.m similarity index 100% rename from just_audio/ios/Classes/ClippingAudioSource.m rename to just_audio/ios_old/Classes/ClippingAudioSource.m diff --git a/just_audio/ios/Classes/ConcatenatingAudioSource.h b/just_audio/ios_old/Classes/ConcatenatingAudioSource.h similarity index 100% rename from just_audio/ios/Classes/ConcatenatingAudioSource.h rename to just_audio/ios_old/Classes/ConcatenatingAudioSource.h diff --git a/just_audio/ios/Classes/ConcatenatingAudioSource.m b/just_audio/ios_old/Classes/ConcatenatingAudioSource.m similarity index 100% rename from just_audio/ios/Classes/ConcatenatingAudioSource.m rename to just_audio/ios_old/Classes/ConcatenatingAudioSource.m diff --git a/just_audio/ios/Classes/IndexedAudioSource.h b/just_audio/ios_old/Classes/IndexedAudioSource.h similarity index 100% rename from just_audio/ios/Classes/IndexedAudioSource.h rename to just_audio/ios_old/Classes/IndexedAudioSource.h diff --git a/just_audio/ios/Classes/IndexedAudioSource.m b/just_audio/ios_old/Classes/IndexedAudioSource.m similarity index 100% rename from just_audio/ios/Classes/IndexedAudioSource.m rename to just_audio/ios_old/Classes/IndexedAudioSource.m diff --git a/just_audio/ios/Classes/IndexedPlayerItem.h b/just_audio/ios_old/Classes/IndexedPlayerItem.h similarity index 100% rename from just_audio/ios/Classes/IndexedPlayerItem.h rename to just_audio/ios_old/Classes/IndexedPlayerItem.h diff --git a/just_audio/ios/Classes/IndexedPlayerItem.m b/just_audio/ios_old/Classes/IndexedPlayerItem.m similarity index 100% rename from just_audio/ios/Classes/IndexedPlayerItem.m rename to just_audio/ios_old/Classes/IndexedPlayerItem.m diff --git a/just_audio/ios_old/Classes/JustAudioPlugin.h b/just_audio/ios_old/Classes/JustAudioPlugin.h new file mode 100644 index 000000000..a694322cd --- /dev/null +++ b/just_audio/ios_old/Classes/JustAudioPlugin.h @@ -0,0 +1,4 @@ +#import + +@interface JustAudioPlugin : NSObject +@end diff --git a/just_audio/ios_old/Classes/JustAudioPlugin.m b/just_audio/ios_old/Classes/JustAudioPlugin.m new file mode 120000 index 000000000..8583f76e2 --- /dev/null +++ b/just_audio/ios_old/Classes/JustAudioPlugin.m @@ -0,0 +1 @@ +../../darwin/Classes/JustAudioPlugin.m \ No newline at end of file diff --git a/just_audio/ios/Classes/LoadControl.h b/just_audio/ios_old/Classes/LoadControl.h similarity index 100% rename from just_audio/ios/Classes/LoadControl.h rename to just_audio/ios_old/Classes/LoadControl.h diff --git a/just_audio/ios/Classes/LoadControl.m b/just_audio/ios_old/Classes/LoadControl.m similarity index 100% rename from just_audio/ios/Classes/LoadControl.m rename to just_audio/ios_old/Classes/LoadControl.m diff --git a/just_audio/ios/Classes/LoopingAudioSource.h b/just_audio/ios_old/Classes/LoopingAudioSource.h similarity index 100% rename from just_audio/ios/Classes/LoopingAudioSource.h rename to just_audio/ios_old/Classes/LoopingAudioSource.h diff --git a/just_audio/ios/Classes/LoopingAudioSource.m b/just_audio/ios_old/Classes/LoopingAudioSource.m similarity index 100% rename from just_audio/ios/Classes/LoopingAudioSource.m rename to just_audio/ios_old/Classes/LoopingAudioSource.m diff --git a/just_audio/ios/Classes/UriAudioSource.h b/just_audio/ios_old/Classes/UriAudioSource.h similarity index 100% rename from just_audio/ios/Classes/UriAudioSource.h rename to just_audio/ios_old/Classes/UriAudioSource.h diff --git a/just_audio/ios/Classes/UriAudioSource.m b/just_audio/ios_old/Classes/UriAudioSource.m similarity index 100% rename from just_audio/ios/Classes/UriAudioSource.m rename to just_audio/ios_old/Classes/UriAudioSource.m diff --git a/just_audio/ios_old/just_audio.podspec b/just_audio/ios_old/just_audio.podspec new file mode 100644 index 000000000..ba5c7d261 --- /dev/null +++ b/just_audio/ios_old/just_audio.podspec @@ -0,0 +1,21 @@ +# +# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html +# +Pod::Spec.new do |s| + s.name = 'just_audio' + s.version = '0.0.1' + s.summary = 'A new flutter plugin project.' + s.description = <<-DESC +A new flutter plugin project. + DESC + s.homepage = 'http://example.com' + s.license = { :file => '../LICENSE' } + s.author = { 'Your Company' => 'email@example.com' } + s.source = { :path => '.' } + s.source_files = 'Classes/**/*' + s.public_header_files = 'Classes/**/*.h' + s.dependency 'Flutter' + s.platform = :ios, '8.0' + s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' } +end + diff --git a/just_audio/lib/just_audio.dart b/just_audio/lib/just_audio.dart index b9ed232c3..47e0dfe1b 100644 --- a/just_audio/lib/just_audio.dart +++ b/just_audio/lib/just_audio.dart @@ -15,6 +15,9 @@ import 'package:path_provider/path_provider.dart'; import 'package:rxdart/rxdart.dart'; import 'package:uuid/uuid.dart'; +// export just_audio_platform +export 'package:just_audio_platform_interface/just_audio_platform_interface.dart'; + final _uuid = Uuid(); /// An object to manage playing audio from a URL, a locale file or an asset. @@ -3366,6 +3369,13 @@ class _IdleAudioPlayer extends AudioPlayerPlatform { throw UnimplementedError( "androidEqualizerBandSetGain() has not been implemented."); } + + @override + Future darwinEqualizerBandSetGain( + DarwinEqualizerBandSetGainRequest request) { + throw UnimplementedError( + "darwinEqualizerBandSetGain() has not been implemented."); + } } /// Holds the initial requested position and index for a newly loaded audio @@ -3391,8 +3401,10 @@ class AudioPipeline { androidAudioEffects = androidAudioEffects ?? const [], darwinAudioEffects = darwinAudioEffects ?? const []; - List get _audioEffects => - [...androidAudioEffects, ...darwinAudioEffects]; + List get _audioEffects => [ + if (_isAndroid()) ...androidAudioEffects, + if (_isDarwin()) ...darwinAudioEffects + ]; void _setup(AudioPlayer player) { _audioEffects.forEach((effect) => effect._setup(player)); @@ -3543,8 +3555,59 @@ class AndroidEqualizerBand { ); } -/// The parameter values of an [AndroidEqualizer]. -class AndroidEqualizerParameters { +/// A frequency band within an [AndroidEqualizer]. +class DarwinEqualizerBand { + final AudioPlayer _player; + + /// A zero-based index of the position of this band within its [AndroidEqualizer]. + final int index; + + /// The center frequency of this band in hertz. + final double centerFrequency; + final _gainSubject = BehaviorSubject(); + + DarwinEqualizerBand._({ + required AudioPlayer player, + required this.index, + required this.centerFrequency, + required double gain, + }) : _player = player { + _gainSubject.add(gain); + } + + /// The gain for this band in decibels. + double get gain => _gainSubject.nvalue!; + + /// A stream of the current gain for this band in decibels. + Stream get gainStream => _gainSubject.stream; + + /// Sets the gain for this band in decibels. + Future setGain(double gain) async { + _gainSubject.add(gain); + if (_player._active) { + await (await _player._platform).darwinEqualizerBandSetGain( + DarwinEqualizerBandSetGainRequest(bandIndex: index, gain: gain)); + } + } + + /// Restores the gain after reactivating. + Future _restore() async { + await (await _player._platform).darwinEqualizerBandSetGain( + DarwinEqualizerBandSetGainRequest(bandIndex: index, gain: gain)); + } + + static DarwinEqualizerBand _fromMessage( + AudioPlayer player, DarwinEqualizerBandMessage message) => + DarwinEqualizerBand._( + player: player, + index: message.index, + centerFrequency: message.centerFrequency, + gain: message.gain, + ); +} + +/// The parameter values of an [DarwinEqualizer]. +class DarwinEqualizerParameters { /// The minimum gain value supported by the equalizer. final double minDecibels; @@ -3552,9 +3615,9 @@ class AndroidEqualizerParameters { final double maxDecibels; /// The frequency bands of the equalizer. - final List bands; + final List bands; - AndroidEqualizerParameters({ + DarwinEqualizerParameters({ required this.minDecibels, required this.maxDecibels, required this.bands, @@ -3567,52 +3630,196 @@ class AndroidEqualizerParameters { } } - static AndroidEqualizerParameters _fromMessage( - AudioPlayer player, AndroidEqualizerParametersMessage message) => - AndroidEqualizerParameters( + static DarwinEqualizerParameters _fromMessage( + AudioPlayer player, DarwinEqualizerParametersMessage message) => + DarwinEqualizerParameters( minDecibels: message.minDecibels, maxDecibels: message.maxDecibels, bands: message.bands .map((bandMessage) => - AndroidEqualizerBand._fromMessage(player, bandMessage)) + DarwinEqualizerBand._fromMessage(player, bandMessage)) .toList(), ); } -/// An [AudioEffect] for Android that can adjust the gain for different +/// An [AudioEffect] for Both Android And IOS that can adjust the gain for different /// frequency bands of an [AudioPlayer]'s audio signal. -class AndroidEqualizer extends AudioEffect with AndroidAudioEffect { - AndroidEqualizerParameters? _parameters; - final Completer _parametersCompleter = - Completer(); +class Equalizer extends AudioEffect with DarwinAudioEffect, AndroidAudioEffect { + EqualizerParameters? _parameters; + final DarwinEqualizerParametersMessage _darwinMessageParameters; + final Completer _parametersCompleter = + Completer(); + + Equalizer({required DarwinEqualizerParametersMessage darwinMessageParameters}) + : _darwinMessageParameters = darwinMessageParameters; @override - String get _type => 'AndroidEqualizer'; + String get _type => _isAndroid() ? 'AndroidEqualizer' : 'DarwinEqualizer'; @override Future _activate() async { await super._activate(); - if (_parametersCompleter.isCompleted) { - await (await parameters)._restore(); - return; + if (_isAndroid()) { + if (_parametersCompleter.isCompleted) { + await (await parameters)._restore(); + return; + } + final response = await (await _player!._platform) + .androidEqualizerGetParameters( + AndroidEqualizerGetParametersRequest()); + _parameters = EqualizerParameters._fromAndroidMessage( + _player!, response.parameters); + _parametersCompleter.complete(_parameters); + } else if (_isDarwin()) { + if (_parametersCompleter.isCompleted) { + await (await parameters)._restore(); + return; + } + _parameters = EqualizerParameters._fromDarwinMessage( + _player!, _darwinMessageParameters); + _parametersCompleter.complete(_parameters); + } else { + throw Exception("not implemented"); } - final response = await (await _player!._platform) - .androidEqualizerGetParameters(AndroidEqualizerGetParametersRequest()); - _parameters = - AndroidEqualizerParameters._fromMessage(_player!, response.parameters); - _parametersCompleter.complete(_parameters); } /// The parameter values of this equalizer. - Future get parameters => - _parametersCompleter.future; + Future get parameters => _parametersCompleter.future; @override - AudioEffectMessage _toMessage() => AndroidEqualizerMessage( + AudioEffectMessage _toMessage() { + if (_isAndroid()) { + return AndroidEqualizerMessage( enabled: enabled, // Parameters are only communicated from the platform. parameters: null, ); + } else if (_isDarwin()) { + return DarwinEqualizerMessage( + enabled: enabled, + parameters: _darwinMessageParameters, + ); + } else { + throw Exception("not implemented"); + } + } +} + +/// The parameter values of an [AndroidEqualizer]. +class EqualizerParameters { + /// The minimum gain value supported by the equalizer. + final double minDecibels; + + /// The maximum gain value supported by the equalizer. + final double maxDecibels; + + /// The frequency bands of the equalizer. + final List bands; + + EqualizerParameters({ + required this.minDecibels, + required this.maxDecibels, + required this.bands, + }); + + /// Restore platform state after reactivating. + Future _restore() async { + for (var band in bands) { + await band._restore(); + } + } + + static EqualizerParameters _fromAndroidMessage( + AudioPlayer player, AndroidEqualizerParametersMessage message) => + EqualizerParameters( + minDecibels: message.minDecibels, + maxDecibels: message.maxDecibels, + bands: message.bands + .map((bandMessage) => + EqualizerBand._fromAndroidMessage(player, bandMessage)) + .toList(), + ); + + static EqualizerParameters _fromDarwinMessage( + AudioPlayer player, DarwinEqualizerParametersMessage message) => + EqualizerParameters( + minDecibels: message.minDecibels, + maxDecibels: message.maxDecibels, + bands: message.bands + .map((bandMessage) => + EqualizerBand._fromDarwinMessage(player, bandMessage)) + .toList(), + ); +} + +/// A frequency band within an [AndroidEqualizer]. +class EqualizerBand { + final AudioPlayer _player; + + /// A zero-based index of the position of this band within its [AndroidEqualizer]. + final int index; + + /// The center frequency of this band in hertz. + final double centerFrequency; + final _gainSubject = BehaviorSubject(); + + EqualizerBand._({ + required AudioPlayer player, + required this.index, + required this.centerFrequency, + required double gain, + }) : _player = player { + _gainSubject.add(gain); + } + + /// The gain for this band in decibels. + double get gain => _gainSubject.nvalue!; + + /// A stream of the current gain for this band in decibels. + Stream get gainStream => _gainSubject.stream; + + /// Sets the gain for this band in decibels. + Future setGain(double gain) async { + _gainSubject.add(gain); + if (_player._active) { + if (_isAndroid()) { + await (await _player._platform).androidEqualizerBandSetGain( + AndroidEqualizerBandSetGainRequest(bandIndex: index, gain: gain)); + } else if (_isDarwin()) { + await (await _player._platform).darwinEqualizerBandSetGain( + DarwinEqualizerBandSetGainRequest(bandIndex: index, gain: gain)); + } + } + } + + /// Restores the gain after reactivating. + Future _restore() async { + if (_isAndroid()) { + await (await _player._platform).androidEqualizerBandSetGain( + AndroidEqualizerBandSetGainRequest(bandIndex: index, gain: gain)); + } else if (_isDarwin()) { + await (await _player._platform).darwinEqualizerBandSetGain( + DarwinEqualizerBandSetGainRequest(bandIndex: index, gain: gain)); + } + } + + static EqualizerBand _fromAndroidMessage( + AudioPlayer player, AndroidEqualizerBandMessage message) => + EqualizerBand._( + player: player, + index: message.index, + centerFrequency: message.centerFrequency, + gain: message.gain, + ); + + static EqualizerBand _fromDarwinMessage( + AudioPlayer player, DarwinEqualizerBandMessage message) => + EqualizerBand._( + player: player, + index: message.index, + centerFrequency: message.centerFrequency, + gain: message.gain, + ); } bool _isAndroid() => !kIsWeb && Platform.isAndroid; diff --git a/just_audio/macos/Classes/AudioSource.swift b/just_audio/macos/Classes/AudioSource.swift new file mode 120000 index 000000000..a23d59187 --- /dev/null +++ b/just_audio/macos/Classes/AudioSource.swift @@ -0,0 +1 @@ +../../darwin/Classes/AudioSource.swift \ No newline at end of file diff --git a/just_audio/macos/Classes/BetterEventChannel.swift b/just_audio/macos/Classes/BetterEventChannel.swift new file mode 120000 index 000000000..2fc1f3129 --- /dev/null +++ b/just_audio/macos/Classes/BetterEventChannel.swift @@ -0,0 +1 @@ +../../darwin/Classes/BetterEventChannel.swift \ No newline at end of file diff --git a/just_audio/macos/Classes/ConcatenatingAudioSource.swift b/just_audio/macos/Classes/ConcatenatingAudioSource.swift new file mode 120000 index 000000000..cfac383e7 --- /dev/null +++ b/just_audio/macos/Classes/ConcatenatingAudioSource.swift @@ -0,0 +1 @@ +../../darwin/Classes/ConcatenatingAudioSource.swift \ No newline at end of file diff --git a/just_audio/macos/Classes/IndexedAudioSource.swift b/just_audio/macos/Classes/IndexedAudioSource.swift new file mode 120000 index 000000000..9a2781a20 --- /dev/null +++ b/just_audio/macos/Classes/IndexedAudioSource.swift @@ -0,0 +1 @@ +../../darwin/Classes/IndexedAudioSource.swift \ No newline at end of file diff --git a/just_audio/macos/Classes/JustAudioPlayer.swift b/just_audio/macos/Classes/JustAudioPlayer.swift new file mode 120000 index 000000000..3ec439582 --- /dev/null +++ b/just_audio/macos/Classes/JustAudioPlayer.swift @@ -0,0 +1 @@ +../../darwin/Classes/JustAudioPlayer.swift \ No newline at end of file diff --git a/just_audio/macos/Classes/JustAudioPlugin.h b/just_audio/macos/Classes/JustAudioPlugin.h index 3f4068dea..56187a554 100644 --- a/just_audio/macos/Classes/JustAudioPlugin.h +++ b/just_audio/macos/Classes/JustAudioPlugin.h @@ -1,4 +1,4 @@ #import @interface JustAudioPlugin : NSObject -@end +@end \ No newline at end of file diff --git a/just_audio/macos/Classes/SwiftJustAudioPlugin.swift b/just_audio/macos/Classes/SwiftJustAudioPlugin.swift new file mode 120000 index 000000000..d7d88e199 --- /dev/null +++ b/just_audio/macos/Classes/SwiftJustAudioPlugin.swift @@ -0,0 +1 @@ +../../darwin/Classes/SwiftJustAudioPlugin.swift \ No newline at end of file diff --git a/just_audio/macos/Classes/UriAudioSource.swift b/just_audio/macos/Classes/UriAudioSource.swift new file mode 120000 index 000000000..b7d2e89d7 --- /dev/null +++ b/just_audio/macos/Classes/UriAudioSource.swift @@ -0,0 +1 @@ +../../darwin/Classes/UriAudioSource.swift \ No newline at end of file diff --git a/just_audio/macos/just_audio.podspec b/just_audio/macos/just_audio.podspec index d1a251079..77c38a5c8 100644 --- a/just_audio/macos/just_audio.podspec +++ b/just_audio/macos/just_audio.podspec @@ -1,5 +1,6 @@ # -# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html +# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html. +# Run `pod lib lint just_audio.podspec` to validate before publishing. # Pod::Spec.new do |s| s.name = 'just_audio' @@ -12,10 +13,10 @@ A new flutter plugin project. s.license = { :file => '../LICENSE' } s.author = { 'Your Company' => 'email@example.com' } s.source = { :path => '.' } - s.source_files = 'Classes/**/*' - s.public_header_files = 'Classes/**/*.h' + s.source_files = 'Classes/**/*' s.dependency 'FlutterMacOS' - s.platform = :osx, '10.12.2' + + s.platform = :osx, '10.11' s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES' } + s.swift_version = '5.0' end - diff --git a/just_audio/macos_old/.gitignore b/just_audio/macos_old/.gitignore new file mode 100644 index 000000000..aa479fd3c --- /dev/null +++ b/just_audio/macos_old/.gitignore @@ -0,0 +1,37 @@ +.idea/ +.vagrant/ +.sconsign.dblite +.svn/ + +.DS_Store +*.swp +profile + +DerivedData/ +build/ +GeneratedPluginRegistrant.h +GeneratedPluginRegistrant.m + +.generated/ + +*.pbxuser +*.mode1v3 +*.mode2v3 +*.perspectivev3 + +!default.pbxuser +!default.mode1v3 +!default.mode2v3 +!default.perspectivev3 + +xcuserdata + +*.moved-aside + +*.pyc +*sync/ +Icon? +.tags* + +/Flutter/Generated.xcconfig +/Flutter/flutter_export_environment.sh \ No newline at end of file diff --git a/just_audio/macos_old/Assets/.gitkeep b/just_audio/macos_old/Assets/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/just_audio/macos/Classes/AudioPlayer.h b/just_audio/macos_old/Classes/AudioPlayer.h similarity index 100% rename from just_audio/macos/Classes/AudioPlayer.h rename to just_audio/macos_old/Classes/AudioPlayer.h diff --git a/just_audio/macos/Classes/AudioPlayer.m b/just_audio/macos_old/Classes/AudioPlayer.m similarity index 100% rename from just_audio/macos/Classes/AudioPlayer.m rename to just_audio/macos_old/Classes/AudioPlayer.m diff --git a/just_audio/macos/Classes/AudioSource.h b/just_audio/macos_old/Classes/AudioSource.h similarity index 100% rename from just_audio/macos/Classes/AudioSource.h rename to just_audio/macos_old/Classes/AudioSource.h diff --git a/just_audio/macos/Classes/AudioSource.m b/just_audio/macos_old/Classes/AudioSource.m similarity index 100% rename from just_audio/macos/Classes/AudioSource.m rename to just_audio/macos_old/Classes/AudioSource.m diff --git a/just_audio/macos/Classes/BetterEventChannel.h b/just_audio/macos_old/Classes/BetterEventChannel.h similarity index 100% rename from just_audio/macos/Classes/BetterEventChannel.h rename to just_audio/macos_old/Classes/BetterEventChannel.h diff --git a/just_audio/macos/Classes/BetterEventChannel.m b/just_audio/macos_old/Classes/BetterEventChannel.m similarity index 100% rename from just_audio/macos/Classes/BetterEventChannel.m rename to just_audio/macos_old/Classes/BetterEventChannel.m diff --git a/just_audio/macos/Classes/ClippingAudioSource.h b/just_audio/macos_old/Classes/ClippingAudioSource.h similarity index 100% rename from just_audio/macos/Classes/ClippingAudioSource.h rename to just_audio/macos_old/Classes/ClippingAudioSource.h diff --git a/just_audio/macos/Classes/ClippingAudioSource.m b/just_audio/macos_old/Classes/ClippingAudioSource.m similarity index 100% rename from just_audio/macos/Classes/ClippingAudioSource.m rename to just_audio/macos_old/Classes/ClippingAudioSource.m diff --git a/just_audio/macos/Classes/ConcatenatingAudioSource.h b/just_audio/macos_old/Classes/ConcatenatingAudioSource.h similarity index 100% rename from just_audio/macos/Classes/ConcatenatingAudioSource.h rename to just_audio/macos_old/Classes/ConcatenatingAudioSource.h diff --git a/just_audio/macos/Classes/ConcatenatingAudioSource.m b/just_audio/macos_old/Classes/ConcatenatingAudioSource.m similarity index 100% rename from just_audio/macos/Classes/ConcatenatingAudioSource.m rename to just_audio/macos_old/Classes/ConcatenatingAudioSource.m diff --git a/just_audio/macos/Classes/IndexedAudioSource.h b/just_audio/macos_old/Classes/IndexedAudioSource.h similarity index 100% rename from just_audio/macos/Classes/IndexedAudioSource.h rename to just_audio/macos_old/Classes/IndexedAudioSource.h diff --git a/just_audio/macos/Classes/IndexedAudioSource.m b/just_audio/macos_old/Classes/IndexedAudioSource.m similarity index 100% rename from just_audio/macos/Classes/IndexedAudioSource.m rename to just_audio/macos_old/Classes/IndexedAudioSource.m diff --git a/just_audio/macos/Classes/IndexedPlayerItem.h b/just_audio/macos_old/Classes/IndexedPlayerItem.h similarity index 100% rename from just_audio/macos/Classes/IndexedPlayerItem.h rename to just_audio/macos_old/Classes/IndexedPlayerItem.h diff --git a/just_audio/macos/Classes/IndexedPlayerItem.m b/just_audio/macos_old/Classes/IndexedPlayerItem.m similarity index 100% rename from just_audio/macos/Classes/IndexedPlayerItem.m rename to just_audio/macos_old/Classes/IndexedPlayerItem.m diff --git a/just_audio/macos_old/Classes/JustAudioPlugin.h b/just_audio/macos_old/Classes/JustAudioPlugin.h new file mode 100644 index 000000000..3f4068dea --- /dev/null +++ b/just_audio/macos_old/Classes/JustAudioPlugin.h @@ -0,0 +1,4 @@ +#import + +@interface JustAudioPlugin : NSObject +@end diff --git a/just_audio/macos_old/Classes/JustAudioPlugin.m b/just_audio/macos_old/Classes/JustAudioPlugin.m new file mode 120000 index 000000000..8583f76e2 --- /dev/null +++ b/just_audio/macos_old/Classes/JustAudioPlugin.m @@ -0,0 +1 @@ +../../darwin/Classes/JustAudioPlugin.m \ No newline at end of file diff --git a/just_audio/macos/Classes/LoadControl.h b/just_audio/macos_old/Classes/LoadControl.h similarity index 100% rename from just_audio/macos/Classes/LoadControl.h rename to just_audio/macos_old/Classes/LoadControl.h diff --git a/just_audio/macos/Classes/LoadControl.m b/just_audio/macos_old/Classes/LoadControl.m similarity index 100% rename from just_audio/macos/Classes/LoadControl.m rename to just_audio/macos_old/Classes/LoadControl.m diff --git a/just_audio/macos/Classes/LoopingAudioSource.h b/just_audio/macos_old/Classes/LoopingAudioSource.h similarity index 100% rename from just_audio/macos/Classes/LoopingAudioSource.h rename to just_audio/macos_old/Classes/LoopingAudioSource.h diff --git a/just_audio/macos/Classes/LoopingAudioSource.m b/just_audio/macos_old/Classes/LoopingAudioSource.m similarity index 100% rename from just_audio/macos/Classes/LoopingAudioSource.m rename to just_audio/macos_old/Classes/LoopingAudioSource.m diff --git a/just_audio/macos/Classes/UriAudioSource.h b/just_audio/macos_old/Classes/UriAudioSource.h similarity index 100% rename from just_audio/macos/Classes/UriAudioSource.h rename to just_audio/macos_old/Classes/UriAudioSource.h diff --git a/just_audio/macos/Classes/UriAudioSource.m b/just_audio/macos_old/Classes/UriAudioSource.m similarity index 100% rename from just_audio/macos/Classes/UriAudioSource.m rename to just_audio/macos_old/Classes/UriAudioSource.m diff --git a/just_audio/macos_old/just_audio.podspec b/just_audio/macos_old/just_audio.podspec new file mode 100644 index 000000000..d1a251079 --- /dev/null +++ b/just_audio/macos_old/just_audio.podspec @@ -0,0 +1,21 @@ +# +# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html +# +Pod::Spec.new do |s| + s.name = 'just_audio' + s.version = '0.0.1' + s.summary = 'A new flutter plugin project.' + s.description = <<-DESC +A new flutter plugin project. + DESC + s.homepage = 'http://example.com' + s.license = { :file => '../LICENSE' } + s.author = { 'Your Company' => 'email@example.com' } + s.source = { :path => '.' } + s.source_files = 'Classes/**/*' + s.public_header_files = 'Classes/**/*.h' + s.dependency 'FlutterMacOS' + s.platform = :osx, '10.12.2' + s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES' } +end + diff --git a/just_audio/pubspec.yaml b/just_audio/pubspec.yaml index e9f8b71cc..5a45bd23d 100644 --- a/just_audio/pubspec.yaml +++ b/just_audio/pubspec.yaml @@ -8,10 +8,17 @@ environment: flutter: ">=1.12.13+hotfix.5" dependencies: - just_audio_platform_interface: ^4.0.0 - # just_audio_platform_interface: - # path: ../just_audio_platform_interface - just_audio_web: ^0.4.2 + # just_audio_platform_interface: ^4.0.0 + just_audio_platform_interface: + git: + url: https://github.com/Kuama-IT/just_audio.git + ref: feature/swift_implementation + path: just_audio_platform_interface + just_audio_web: + git: + url: https://github.com/Kuama-IT/just_audio.git + ref: feature/swift_implementation + path: just_audio_web # just_audio_web: # path: ../just_audio_web audio_session: ^0.1.6+1 @@ -32,6 +39,13 @@ dev_dependencies: plugin_platform_interface: ^2.0.0 pedantic: ^1.10.0 +# Uncomment when testing platform interface changes. +# dependency_overrides: +# just_audio_platform_interface: +# path: ../just_audio_platform_interface +# just_audio_web: +# path: ../../just_audio_web + flutter: plugin: platforms: diff --git a/just_audio/test/just_audio_test.dart b/just_audio/test/just_audio_test.dart index 2c96733b0..00d3e8f6a 100644 --- a/just_audio/test/just_audio_test.dart +++ b/just_audio/test/just_audio_test.dart @@ -1207,7 +1207,9 @@ void runTests() { }); test('AndroidEqualizer', () async { - final equalizer = AndroidEqualizer(); + final equalizer = Equalizer( + darwinMessageParameters: DarwinEqualizerParametersMessage( + maxDecibels: 24, minDecibels: -24, bands: [])); final player = AudioPlayer( audioPipeline: AudioPipeline(androidAudioEffects: [equalizer]), ); @@ -1227,8 +1229,6 @@ void runTests() { for (var i = 0; i < 5; i++) { final band = bands[i]; expect(band.index, equals(i)); - expect(band.lowerFrequency, equals(i * 1000)); - expect(band.upperFrequency, equals((i + 1) * 1000)); expect(band.centerFrequency, equals((i + 0.5) * 1000)); expect(band.gain, equals(i * 0.1)); } @@ -1583,6 +1583,12 @@ class MockAudioPlayer implements AudioPlayerPlatform { AndroidEqualizerBandSetGainRequest request) async { return AndroidEqualizerBandSetGainResponse(); } + + @override + Future darwinEqualizerBandSetGain( + DarwinEqualizerBandSetGainRequest request) async { + return DarwinEqualizerBandSetGainResponse(); + } } final byteRangeData = List.generate(200, (i) => i); diff --git a/just_audio_background/pubspec.yaml b/just_audio_background/pubspec.yaml index 0753cd6eb..0d6996315 100644 --- a/just_audio_background/pubspec.yaml +++ b/just_audio_background/pubspec.yaml @@ -4,7 +4,12 @@ homepage: https://github.com/ryanheise/just_audio/tree/master/just_audio_backgro version: 0.0.1-beta.1 dependencies: - just_audio_platform_interface: ^4.0.0 + just_audio_platform_interface: + git: + url: git@github.com:wavy-assistant/just_audio.git + ref: feature/new_ios_implementation + path: just_audio_platform_interface + # just_audio_platform_interface: ^4.0.0 # just_audio_platform_interface: # path: ../just_audio_platform_interface audio_service: ^0.18.0-beta.0 diff --git a/just_audio_platform_interface/lib/just_audio_platform_interface.dart b/just_audio_platform_interface/lib/just_audio_platform_interface.dart index d1a58eb1c..c2f48e789 100644 --- a/just_audio_platform_interface/lib/just_audio_platform_interface.dart +++ b/just_audio_platform_interface/lib/just_audio_platform_interface.dart @@ -216,6 +216,13 @@ abstract class AudioPlayerPlatform { throw UnimplementedError( "androidEqualizerBandSetGain() has not been implemented."); } + + /// Sets the gain for an Darwin equalizer band. + Future darwinEqualizerBandSetGain( + DarwinEqualizerBandSetGainRequest request) { + throw UnimplementedError( + "darwinEqualizerBandSetGain() has not been implemented."); + } } /// A data update communicated from the platform implementation to the Flutter @@ -1259,6 +1266,33 @@ class AndroidEqualizerBandSetGainResponse { AndroidEqualizerBandSetGainResponse(); } +/// Information communicated to the platform implementation when setting the +/// gain for an equalizer band. +class DarwinEqualizerBandSetGainRequest { + final int bandIndex; + final double gain; + + DarwinEqualizerBandSetGainRequest({ + required this.bandIndex, + required this.gain, + }); + + Map toMap() => { + 'bandIndex': bandIndex, + 'gain': gain, + }; +} + +/// Information returned by the platform implementation after setting the gain +/// for an equalizer band. +class DarwinEqualizerBandSetGainResponse { + DarwinEqualizerBandSetGainResponse(); + + static DarwinEqualizerBandSetGainResponse fromMap( + Map map) => + DarwinEqualizerBandSetGainResponse(); +} + /// Information about an audio effect to be communicated with the platform /// implementation. abstract class AudioEffectMessage { @@ -1378,3 +1412,83 @@ class AndroidEqualizerMessage extends AudioEffectMessage { 'parameters': parameters?.toMap(), }; } + +/// Information about the equalizer parameters to be communicated with the +/// platform implementation. +class DarwinEqualizerParametersMessage { + final double minDecibels; + final double maxDecibels; + final List bands; + + DarwinEqualizerParametersMessage({ + required this.minDecibels, + required this.maxDecibels, + required this.bands, + }); + + Map toMap() => { + 'minDecibels': minDecibels, + 'maxDecibels': maxDecibels, + 'bands': bands.map((band) => band.toMap()).toList(), + }; + + static DarwinEqualizerParametersMessage fromMap(Map map) => + DarwinEqualizerParametersMessage( + minDecibels: map['minDecibels'] as double, + maxDecibels: map['maxDecibels'] as double, + bands: (map['bands'] as List) + .map((dynamic bandMap) => DarwinEqualizerBandMessage.fromMap( + bandMap as Map)) + .toList(), + ); +} + +/// Information about the equalizer to be communicated with the platform +/// implementation. +class DarwinEqualizerMessage extends AudioEffectMessage { + final DarwinEqualizerParametersMessage parameters; + + DarwinEqualizerMessage({ + required bool enabled, + required this.parameters, + }) : super(enabled: enabled); + + @override + Map toMap() => { + 'type': 'DarwinEqualizer', + 'enabled': enabled, + 'parameters': parameters.toMap(), + }; +} + +/// Information about an equalizer band to be communicated with the platform +/// implementation. +class DarwinEqualizerBandMessage { + /// A zero-based index of the position of this band within its [DarwinEqualizer]. + final int index; + + /// The center frequency of this band in hertz. + final double centerFrequency; + + /// The gain for this band in decibels. + final double gain; + + DarwinEqualizerBandMessage({ + required this.index, + required this.centerFrequency, + required this.gain, + }); + + Map toMap() => { + 'index': index, + 'centerFrequency': centerFrequency, + 'gain': gain, + }; + + static DarwinEqualizerBandMessage fromMap(Map map) => + DarwinEqualizerBandMessage( + index: map['index'] as int, + centerFrequency: map['centerFrequency'] as double, + gain: map['gain'] as double, + ); +} diff --git a/just_audio_platform_interface/lib/method_channel_just_audio.dart b/just_audio_platform_interface/lib/method_channel_just_audio.dart index a71b8ee03..56bc6b0df 100644 --- a/just_audio_platform_interface/lib/method_channel_just_audio.dart +++ b/just_audio_platform_interface/lib/method_channel_just_audio.dart @@ -215,4 +215,12 @@ class MethodChannelAudioPlayer extends AudioPlayerPlatform { (await _channel.invokeMethod>( 'androidEqualizerBandSetGain', request.toMap()))!); } + + @override + Future darwinEqualizerBandSetGain( + DarwinEqualizerBandSetGainRequest request) async { + return DarwinEqualizerBandSetGainResponse.fromMap( + (await _channel.invokeMethod>( + 'darwinEqualizerBandSetGain', request.toMap()))!); + } } diff --git a/just_audio_web/pubspec.yaml b/just_audio_web/pubspec.yaml index e3567a080..9987b7880 100644 --- a/just_audio_web/pubspec.yaml +++ b/just_audio_web/pubspec.yaml @@ -11,7 +11,11 @@ flutter: fileName: just_audio_web.dart dependencies: - just_audio_platform_interface: ^4.0.0 + just_audio_platform_interface: + git: + url: https://github.com/Kuama-IT/just_audio.git + ref: feature/swift_implementation + path: just_audio_platform_interface flutter: sdk: flutter flutter_web_plugins: