Update just_audio dart code to use platform interface.

This commit is contained in:
Ryan Heise 2020-09-26 23:32:43 +10:00
parent 3a44f844e2
commit b8ff881ccf
4 changed files with 152 additions and 78 deletions

View File

@ -113,7 +113,14 @@ packages:
path: ".."
relative: true
source: path
version: "0.4.4"
version: "0.4.5"
just_audio_platform_interface:
dependency: transitive
description:
path: "../../just_audio_platform_interface"
relative: true
source: path
version: "0.0.1"
matcher:
dependency: transitive
description:

View File

@ -6,6 +6,7 @@ import 'package:audio_session/audio_session.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
import 'package:flutter/widgets.dart';
import 'package:just_audio_platform_interface/just_audio_platform_interface.dart';
import 'package:path/path.dart' as p;
import 'package:path_provider/path_provider.dart';
import 'package:rxdart/rxdart.dart';
@ -32,22 +33,17 @@ final _uuid = Uuid();
/// You must call [dispose] to release the resources used by this player,
/// including any temporary files created to cache assets.
class AudioPlayer {
static final _mainChannel = MethodChannel('com.ryanheise.just_audio.methods');
static Future<MethodChannel> _init(String id) async {
await _mainChannel.invokeMethod('init', [id]);
return MethodChannel('com.ryanheise.just_audio.methods.$id');
static Future<AudioPlayerPlatform> _init(String id) async {
return await JustAudioPlatform.instance.init(InitRequest(id: id));
}
final Future<MethodChannel> _channel;
final Future<AudioPlayerPlatform> _platform;
final String _id;
_ProxyHttpServer _proxy;
Stream<PlaybackEvent> _eventChannelStream;
AudioSource _audioSource;
Map<String, AudioSource> _audioSources = {};
PlaybackEvent _playbackEvent;
StreamSubscription<PlaybackEvent> _eventChannelStreamSubscription;
final _playbackEventSubject = BehaviorSubject<PlaybackEvent>();
Future<Duration> _durationFuture;
final _durationSubject = BehaviorSubject<Duration>();
@ -78,7 +74,7 @@ class AudioPlayer {
AudioPlayer._internal(_uuid.v4(), handleInterruptions);
AudioPlayer._internal(this._id, bool handleInterruptions)
: _channel = _init(_id) {
: _platform = _init(_id) {
_playbackEvent = PlaybackEvent(
processingState: ProcessingState.none,
updatePosition: Duration.zero,
@ -90,38 +86,6 @@ class AudioPlayer {
androidAudioSessionId: null,
);
_playbackEventSubject.add(_playbackEvent);
_eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id')
.receiveBroadcastStream()
.map((data) {
try {
//print("received raw event: $data");
final duration = (data['duration'] ?? -1) < 0
? null
: Duration(milliseconds: data['duration']);
_durationFuture = Future.value(duration);
if (duration != _playbackEvent.duration) {
_durationSubject.add(duration);
}
_playbackEvent = PlaybackEvent(
processingState: ProcessingState.values[data['processingState']],
updatePosition: Duration(milliseconds: data['updatePosition']),
updateTime: DateTime.fromMillisecondsSinceEpoch(data['updateTime']),
bufferedPosition: Duration(milliseconds: data['bufferedPosition']),
duration: duration,
icyMetadata: data['icyMetadata'] == null
? null
: IcyMetadata.fromJson(data['icyMetadata']),
currentIndex: data['currentIndex'],
androidAudioSessionId: data['androidAudioSessionId'],
);
//print("created event object with state: ${_playbackEvent.state}");
return _playbackEvent;
} catch (e, stacktrace) {
print("Error parsing event: $e");
print("$stacktrace");
rethrow;
}
});
_processingStateSubject.addStream(playbackEventStream
.map((event) => event.processingState)
.distinct()
@ -160,10 +124,34 @@ class AudioPlayer {
(playing, event) => PlayerState(playing, event.processingState))
.distinct()
.handleError((err, stack) {/* noop */}));
_eventChannelStreamSubscription = _eventChannelStream.listen(
_playbackEventSubject.add,
onError: _playbackEventSubject.addError,
_platform.then((platform) {
platform.playbackEventMessageStream.listen((message) {
try {
final playbackEvent = PlaybackEvent(
processingState:
ProcessingState.values[message.processingState.index],
updateTime: message.updateTime,
updatePosition: message.updatePosition,
bufferedPosition: message.bufferedPosition,
duration: message.duration,
icyMetadata: message.icyMetadata == null
? null
: IcyMetadata._fromMessage(message.icyMetadata),
currentIndex: message.currentIndex,
androidAudioSessionId: message.androidAudioSessionId,
);
_durationFuture = Future.value(playbackEvent.duration);
if (playbackEvent.duration != _playbackEvent.duration) {
_durationSubject.add(playbackEvent.duration);
}
_playbackEventSubject.add(_playbackEvent = playbackEvent);
} catch (e, stacktrace) {
print("Error parsing event: $e");
print("$stacktrace");
rethrow;
}
}, onError: _playbackEventSubject.addError);
});
_sequenceSubject.add(null);
// Respond to changes to AndroidAudioAttributes configuration.
AudioSession.instance.then((audioSession) {
@ -491,8 +479,9 @@ class AudioPlayer {
}
}
await source._setup(this);
_durationFuture = _invokeMethod('load', [source.toJson()]).then(
(ms) => (ms == null || ms < 0) ? null : Duration(milliseconds: ms));
_durationFuture = (await _platform)
.load(LoadRequest(audioSourceMessage: source._toMessage()))
.then((response) => response.duration);
final duration = await _durationFuture;
_durationSubject.add(duration);
return duration;
@ -549,7 +538,7 @@ class AudioPlayer {
final audioSession = await AudioSession.instance;
if (await audioSession.setActive(true)) {
_playingSubject.add(true);
await _invokeMethod('play');
await (await _platform).play(PlayRequest());
}
}
@ -567,7 +556,7 @@ class AudioPlayer {
_playingSubject.add(false);
// TODO: perhaps modify platform side to ensure new state is broadcast
// before this method returns.
await _invokeMethod('pause');
await (await _platform).pause(PauseRequest());
}
/// Convenience method to pause and seek to zero.
@ -579,7 +568,7 @@ class AudioPlayer {
/// Sets the volume of this player, where 1.0 is normal volume.
Future<void> setVolume(final double volume) async {
_volumeSubject.add(volume);
await _invokeMethod('setVolume', [volume]);
await (await _platform).setVolume(SetVolumeRequest(volume: volume));
}
/// Sets the playback speed of this player, where 1.0 is normal speed.
@ -590,7 +579,7 @@ class AudioPlayer {
);
_playbackEventSubject.add(_playbackEvent);
_speedSubject.add(speed);
await _invokeMethod('setSpeed', [speed]);
await (await _platform).setSpeed(SetSpeedRequest(speed: speed));
}
/// Sets the [LoopMode]. The gapless looping support is as follows:
@ -601,13 +590,16 @@ class AudioPlayer {
/// * Web: not supported
Future<void> setLoopMode(LoopMode mode) async {
_loopModeSubject.add(mode);
await _invokeMethod('setLoopMode', [mode.index]);
await (await _platform).setLoopMode(
SetLoopModeRequest(loopMode: LoopModeMessage.values[mode.index]));
}
/// Sets whether shuffle mode is enabled.
Future<void> setShuffleModeEnabled(bool enabled) async {
_shuffleModeEnabledSubject.add(enabled);
await _invokeMethod('setShuffleModeEnabled', [enabled]);
await (await _platform).setShuffleMode(SetShuffleModeRequest(
shuffleMode:
enabled ? ShuffleModeMessage.all : ShuffleModeMessage.none));
}
/// Sets automaticallyWaitsToMinimizeStalling for AVPlayer in iOS 10.0 or later, defaults to true.
@ -616,8 +608,9 @@ class AudioPlayer {
final bool automaticallyWaitsToMinimizeStalling) async {
_automaticallyWaitsToMinimizeStalling =
automaticallyWaitsToMinimizeStalling;
await _invokeMethod('setAutomaticallyWaitsToMinimizeStalling',
[automaticallyWaitsToMinimizeStalling]);
await (await _platform).setAutomaticallyWaitsToMinimizeStalling(
SetAutomaticallyWaitsToMinimizeStallingRequest(
enabled: automaticallyWaitsToMinimizeStalling));
}
/// Seeks to a particular [position]. If a composition of multiple
@ -635,7 +628,8 @@ class AudioPlayer {
updateTime: DateTime.now(),
);
_playbackEventSubject.add(_playbackEvent);
await _invokeMethod('seek', [position?.inMilliseconds, index]);
await (await _platform)
.seek(SeekRequest(position: position, index: index));
}
}
@ -658,20 +652,22 @@ class AudioPlayer {
Future<void> setAndroidAudioAttributes(
AndroidAudioAttributes audioAttributes) async {
if (audioAttributes == null) return;
await _invokeMethod(
'setAndroidAudioAttributes', [audioAttributes.toJson()]);
await (await _platform).setAndroidAudioAttributes(
SetAndroidAudioAttributesRequest(
contentType: audioAttributes.contentType.index,
flags: audioAttributes.flags.value,
usage: audioAttributes.usage.value));
}
/// Release all resources associated with this player. You must invoke this
/// after you are done with the player.
Future<void> dispose() async {
await _invokeMethod('dispose');
await (await _platform).dispose(DisposeRequest());
_audioSource = null;
_audioSources.values.forEach((s) => s._dispose());
_audioSources.clear();
_proxy?.stop();
await _durationSubject.close();
await _eventChannelStreamSubscription.cancel();
await _loopModeSubject.close();
await _shuffleModeEnabledSubject.close();
await _playingSubject.close();
@ -679,9 +675,6 @@ class AudioPlayer {
await _speedSubject.close();
await _sequenceSubject.close();
}
Future<dynamic> _invokeMethod(String method, [dynamic args]) async =>
(await _channel).invokeMethod(method, args);
}
/// Captures the details of any error accessing, loading or playing an audio
@ -829,6 +822,11 @@ class IcyInfo {
final String title;
final String url;
static IcyInfo _fromMessage(IcyInfoMessage message) => IcyInfo(
title: message.title,
url: message.url,
);
IcyInfo({@required this.title, @required this.url});
IcyInfo.fromJson(Map json) : this(title: json['title'], url: json['url']);
@ -852,6 +850,15 @@ class IcyHeaders {
final String url;
final bool isPublic;
static IcyHeaders _fromMessage(IcyHeadersMessage message) => IcyHeaders(
bitrate: message.bitrate,
genre: message.genre,
name: message.name,
metadataInterval: message.metadataInterval,
url: message.url,
isPublic: message.isPublic,
);
IcyHeaders({
@required this.bitrate,
@required this.genre,
@ -887,6 +894,13 @@ class IcyMetadata {
final IcyInfo info;
final IcyHeaders headers;
static IcyMetadata _fromMessage(IcyMetadataMessage message) => IcyMetadata(
info: message.info == null ? null : IcyInfo._fromMessage(message.info),
headers: message.headers == null
? null
: IcyHeaders._fromMessage(message.headers),
);
IcyMetadata({@required this.info, @required this.headers});
IcyMetadata.fromJson(Map json)
@ -1109,6 +1123,8 @@ abstract class AudioSource {
_player = null;
}
AudioSourceMessage _toMessage();
bool get _requiresHeaders;
List<IndexedAudioSource> get sequence;
@ -1205,6 +1221,10 @@ abstract class UriAudioSource extends IndexedAudioSource {
class ProgressiveAudioSource extends UriAudioSource {
ProgressiveAudioSource(Uri uri, {Map headers, dynamic tag})
: super(uri, headers: headers, tag: tag, type: 'progressive');
@override
AudioSourceMessage _toMessage() => ProgressiveAudioSourceMessage(
id: _id, uri: uri.toString(), headers: headers);
}
/// An [AudioSource] representing a DASH stream. The following URI schemes are
@ -1221,6 +1241,10 @@ class ProgressiveAudioSource extends UriAudioSource {
class DashAudioSource extends UriAudioSource {
DashAudioSource(Uri uri, {Map headers, dynamic tag})
: super(uri, headers: headers, tag: tag, type: 'dash');
@override
AudioSourceMessage _toMessage() =>
DashAudioSourceMessage(id: _id, uri: uri.toString(), headers: headers);
}
/// An [AudioSource] representing an HLS stream. The following URI schemes are
@ -1236,6 +1260,10 @@ class DashAudioSource extends UriAudioSource {
class HlsAudioSource extends UriAudioSource {
HlsAudioSource(Uri uri, {Map headers, dynamic tag})
: super(uri, headers: headers, tag: tag, type: 'hls');
@override
AudioSourceMessage _toMessage() =>
HlsAudioSourceMessage(id: _id, uri: uri.toString(), headers: headers);
}
/// An [AudioSource] representing a concatenation of multiple audio sources to
@ -1267,8 +1295,11 @@ class ConcatenatingAudioSource extends AudioSource {
children.add(audioSource);
_player._broadcastSequence();
if (_player != null) {
await _player
._invokeMethod('concatenating.add', [_id, audioSource.toJson()]);
await (await _player._platform).concatenatingInsertAll(
ConcatenatingInsertAllRequest(
id: _id,
index: children.length,
children: [audioSource._toMessage()]));
}
}
@ -1277,8 +1308,9 @@ class ConcatenatingAudioSource extends AudioSource {
children.insert(index, audioSource);
_player._broadcastSequence();
if (_player != null) {
await _player._invokeMethod(
'concatenating.insert', [_id, index, audioSource.toJson()]);
await (await _player._platform).concatenatingInsertAll(
ConcatenatingInsertAllRequest(
id: _id, index: index, children: [audioSource._toMessage()]));
}
}
@ -1287,8 +1319,11 @@ class ConcatenatingAudioSource extends AudioSource {
this.children.addAll(children);
_player._broadcastSequence();
if (_player != null) {
await _player._invokeMethod('concatenating.addAll',
[_id, children.map((s) => s.toJson()).toList()]);
await (await _player._platform).concatenatingInsertAll(
ConcatenatingInsertAllRequest(
id: _id,
index: this.children.length,
children: children.map((child) => child._toMessage()).toList()));
}
}
@ -1297,8 +1332,11 @@ class ConcatenatingAudioSource extends AudioSource {
this.children.insertAll(index, children);
_player._broadcastSequence();
if (_player != null) {
await _player._invokeMethod('concatenating.insertAll',
[_id, index, children.map((s) => s.toJson()).toList()]);
await (await _player._platform).concatenatingInsertAll(
ConcatenatingInsertAllRequest(
id: _id,
index: index,
children: children.map((child) => child._toMessage()).toList()));
}
}
@ -1308,7 +1346,9 @@ class ConcatenatingAudioSource extends AudioSource {
children.removeAt(index);
_player._broadcastSequence();
if (_player != null) {
await _player._invokeMethod('concatenating.removeAt', [_id, index]);
await (await _player._platform).concatenatingRemoveRange(
ConcatenatingRemoveRangeRequest(
id: _id, startIndex: index, endIndex: index + 1));
}
}
@ -1318,8 +1358,9 @@ class ConcatenatingAudioSource extends AudioSource {
children.removeRange(start, end);
_player._broadcastSequence();
if (_player != null) {
await _player
._invokeMethod('concatenating.removeRange', [_id, start, end]);
await (await _player._platform).concatenatingRemoveRange(
ConcatenatingRemoveRangeRequest(
id: _id, startIndex: start, endIndex: end));
}
}
@ -1328,8 +1369,9 @@ class ConcatenatingAudioSource extends AudioSource {
children.insert(newIndex, children.removeAt(currentIndex));
_player._broadcastSequence();
if (_player != null) {
await _player
._invokeMethod('concatenating.move', [_id, currentIndex, newIndex]);
await (await _player._platform).concatenatingMove(
ConcatenatingMoveRequest(
id: _id, currentIndex: currentIndex, newIndex: newIndex));
}
}
@ -1338,7 +1380,9 @@ class ConcatenatingAudioSource extends AudioSource {
children.clear();
_player._broadcastSequence();
if (_player != null) {
await _player._invokeMethod('concatenating.clear', [_id]);
await (await _player._platform).concatenatingRemoveRange(
ConcatenatingRemoveRangeRequest(
id: _id, startIndex: 0, endIndex: children.length));
}
}
@ -1362,6 +1406,12 @@ class ConcatenatingAudioSource extends AudioSource {
'audioSources': children.map((source) => source.toJson()).toList(),
'useLazyPreparation': useLazyPreparation,
};
@override
AudioSourceMessage _toMessage() => ConcatenatingAudioSourceMessage(
id: _id,
children: children.map((child) => child._toMessage()).toList(),
useLazyPreparation: useLazyPreparation);
}
/// An [AudioSource] that clips the audio of a [UriAudioSource] between a
@ -1398,6 +1448,10 @@ class ClippingAudioSource extends IndexedAudioSource {
'start': start?.inMilliseconds,
'end': end?.inMilliseconds,
};
@override
AudioSourceMessage _toMessage() => ClippingAudioSourceMessage(
id: _id, child: child._toMessage(), start: start, end: end);
}
// An [AudioSource] that loops a nested [AudioSource] a finite number of times.
@ -1436,6 +1490,10 @@ class LoopingAudioSource extends AudioSource {
'audioSource': child.toJson(),
'count': count,
};
@override
AudioSourceMessage _toMessage() => LoopingAudioSourceMessage(
id: _id, child: child._toMessage(), count: count);
}
enum LoopMode { off, one, all }

View File

@ -100,6 +100,13 @@ packages:
url: "https://pub.dartlang.org"
source: hosted
version: "0.16.1"
just_audio_platform_interface:
dependency: "direct main"
description:
path: "../just_audio_platform_interface"
relative: true
source: path
version: "0.0.1"
matcher:
dependency: transitive
description:

View File

@ -8,6 +8,8 @@ environment:
flutter: ">=1.12.13+hotfix.5"
dependencies:
just_audio_platform_interface:
path: ../just_audio_platform_interface
audio_session: ^0.0.7
rxdart: ^0.24.1
path: ^1.6.4