Update just_audio dart code to use platform interface.
This commit is contained in:
parent
3a44f844e2
commit
b8ff881ccf
|
@ -113,7 +113,14 @@ packages:
|
||||||
path: ".."
|
path: ".."
|
||||||
relative: true
|
relative: true
|
||||||
source: path
|
source: path
|
||||||
version: "0.4.4"
|
version: "0.4.5"
|
||||||
|
just_audio_platform_interface:
|
||||||
|
dependency: transitive
|
||||||
|
description:
|
||||||
|
path: "../../just_audio_platform_interface"
|
||||||
|
relative: true
|
||||||
|
source: path
|
||||||
|
version: "0.0.1"
|
||||||
matcher:
|
matcher:
|
||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
|
|
|
@ -6,6 +6,7 @@ import 'package:audio_session/audio_session.dart';
|
||||||
import 'package:flutter/foundation.dart';
|
import 'package:flutter/foundation.dart';
|
||||||
import 'package:flutter/services.dart';
|
import 'package:flutter/services.dart';
|
||||||
import 'package:flutter/widgets.dart';
|
import 'package:flutter/widgets.dart';
|
||||||
|
import 'package:just_audio_platform_interface/just_audio_platform_interface.dart';
|
||||||
import 'package:path/path.dart' as p;
|
import 'package:path/path.dart' as p;
|
||||||
import 'package:path_provider/path_provider.dart';
|
import 'package:path_provider/path_provider.dart';
|
||||||
import 'package:rxdart/rxdart.dart';
|
import 'package:rxdart/rxdart.dart';
|
||||||
|
@ -32,22 +33,17 @@ final _uuid = Uuid();
|
||||||
/// You must call [dispose] to release the resources used by this player,
|
/// You must call [dispose] to release the resources used by this player,
|
||||||
/// including any temporary files created to cache assets.
|
/// including any temporary files created to cache assets.
|
||||||
class AudioPlayer {
|
class AudioPlayer {
|
||||||
static final _mainChannel = MethodChannel('com.ryanheise.just_audio.methods');
|
static Future<AudioPlayerPlatform> _init(String id) async {
|
||||||
|
return await JustAudioPlatform.instance.init(InitRequest(id: id));
|
||||||
static Future<MethodChannel> _init(String id) async {
|
|
||||||
await _mainChannel.invokeMethod('init', [id]);
|
|
||||||
return MethodChannel('com.ryanheise.just_audio.methods.$id');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
final Future<MethodChannel> _channel;
|
final Future<AudioPlayerPlatform> _platform;
|
||||||
final String _id;
|
final String _id;
|
||||||
_ProxyHttpServer _proxy;
|
_ProxyHttpServer _proxy;
|
||||||
Stream<PlaybackEvent> _eventChannelStream;
|
|
||||||
AudioSource _audioSource;
|
AudioSource _audioSource;
|
||||||
Map<String, AudioSource> _audioSources = {};
|
Map<String, AudioSource> _audioSources = {};
|
||||||
|
|
||||||
PlaybackEvent _playbackEvent;
|
PlaybackEvent _playbackEvent;
|
||||||
StreamSubscription<PlaybackEvent> _eventChannelStreamSubscription;
|
|
||||||
final _playbackEventSubject = BehaviorSubject<PlaybackEvent>();
|
final _playbackEventSubject = BehaviorSubject<PlaybackEvent>();
|
||||||
Future<Duration> _durationFuture;
|
Future<Duration> _durationFuture;
|
||||||
final _durationSubject = BehaviorSubject<Duration>();
|
final _durationSubject = BehaviorSubject<Duration>();
|
||||||
|
@ -78,7 +74,7 @@ class AudioPlayer {
|
||||||
AudioPlayer._internal(_uuid.v4(), handleInterruptions);
|
AudioPlayer._internal(_uuid.v4(), handleInterruptions);
|
||||||
|
|
||||||
AudioPlayer._internal(this._id, bool handleInterruptions)
|
AudioPlayer._internal(this._id, bool handleInterruptions)
|
||||||
: _channel = _init(_id) {
|
: _platform = _init(_id) {
|
||||||
_playbackEvent = PlaybackEvent(
|
_playbackEvent = PlaybackEvent(
|
||||||
processingState: ProcessingState.none,
|
processingState: ProcessingState.none,
|
||||||
updatePosition: Duration.zero,
|
updatePosition: Duration.zero,
|
||||||
|
@ -90,38 +86,6 @@ class AudioPlayer {
|
||||||
androidAudioSessionId: null,
|
androidAudioSessionId: null,
|
||||||
);
|
);
|
||||||
_playbackEventSubject.add(_playbackEvent);
|
_playbackEventSubject.add(_playbackEvent);
|
||||||
_eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id')
|
|
||||||
.receiveBroadcastStream()
|
|
||||||
.map((data) {
|
|
||||||
try {
|
|
||||||
//print("received raw event: $data");
|
|
||||||
final duration = (data['duration'] ?? -1) < 0
|
|
||||||
? null
|
|
||||||
: Duration(milliseconds: data['duration']);
|
|
||||||
_durationFuture = Future.value(duration);
|
|
||||||
if (duration != _playbackEvent.duration) {
|
|
||||||
_durationSubject.add(duration);
|
|
||||||
}
|
|
||||||
_playbackEvent = PlaybackEvent(
|
|
||||||
processingState: ProcessingState.values[data['processingState']],
|
|
||||||
updatePosition: Duration(milliseconds: data['updatePosition']),
|
|
||||||
updateTime: DateTime.fromMillisecondsSinceEpoch(data['updateTime']),
|
|
||||||
bufferedPosition: Duration(milliseconds: data['bufferedPosition']),
|
|
||||||
duration: duration,
|
|
||||||
icyMetadata: data['icyMetadata'] == null
|
|
||||||
? null
|
|
||||||
: IcyMetadata.fromJson(data['icyMetadata']),
|
|
||||||
currentIndex: data['currentIndex'],
|
|
||||||
androidAudioSessionId: data['androidAudioSessionId'],
|
|
||||||
);
|
|
||||||
//print("created event object with state: ${_playbackEvent.state}");
|
|
||||||
return _playbackEvent;
|
|
||||||
} catch (e, stacktrace) {
|
|
||||||
print("Error parsing event: $e");
|
|
||||||
print("$stacktrace");
|
|
||||||
rethrow;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
_processingStateSubject.addStream(playbackEventStream
|
_processingStateSubject.addStream(playbackEventStream
|
||||||
.map((event) => event.processingState)
|
.map((event) => event.processingState)
|
||||||
.distinct()
|
.distinct()
|
||||||
|
@ -160,10 +124,34 @@ class AudioPlayer {
|
||||||
(playing, event) => PlayerState(playing, event.processingState))
|
(playing, event) => PlayerState(playing, event.processingState))
|
||||||
.distinct()
|
.distinct()
|
||||||
.handleError((err, stack) {/* noop */}));
|
.handleError((err, stack) {/* noop */}));
|
||||||
_eventChannelStreamSubscription = _eventChannelStream.listen(
|
_platform.then((platform) {
|
||||||
_playbackEventSubject.add,
|
platform.playbackEventMessageStream.listen((message) {
|
||||||
onError: _playbackEventSubject.addError,
|
try {
|
||||||
);
|
final playbackEvent = PlaybackEvent(
|
||||||
|
processingState:
|
||||||
|
ProcessingState.values[message.processingState.index],
|
||||||
|
updateTime: message.updateTime,
|
||||||
|
updatePosition: message.updatePosition,
|
||||||
|
bufferedPosition: message.bufferedPosition,
|
||||||
|
duration: message.duration,
|
||||||
|
icyMetadata: message.icyMetadata == null
|
||||||
|
? null
|
||||||
|
: IcyMetadata._fromMessage(message.icyMetadata),
|
||||||
|
currentIndex: message.currentIndex,
|
||||||
|
androidAudioSessionId: message.androidAudioSessionId,
|
||||||
|
);
|
||||||
|
_durationFuture = Future.value(playbackEvent.duration);
|
||||||
|
if (playbackEvent.duration != _playbackEvent.duration) {
|
||||||
|
_durationSubject.add(playbackEvent.duration);
|
||||||
|
}
|
||||||
|
_playbackEventSubject.add(_playbackEvent = playbackEvent);
|
||||||
|
} catch (e, stacktrace) {
|
||||||
|
print("Error parsing event: $e");
|
||||||
|
print("$stacktrace");
|
||||||
|
rethrow;
|
||||||
|
}
|
||||||
|
}, onError: _playbackEventSubject.addError);
|
||||||
|
});
|
||||||
_sequenceSubject.add(null);
|
_sequenceSubject.add(null);
|
||||||
// Respond to changes to AndroidAudioAttributes configuration.
|
// Respond to changes to AndroidAudioAttributes configuration.
|
||||||
AudioSession.instance.then((audioSession) {
|
AudioSession.instance.then((audioSession) {
|
||||||
|
@ -491,8 +479,9 @@ class AudioPlayer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await source._setup(this);
|
await source._setup(this);
|
||||||
_durationFuture = _invokeMethod('load', [source.toJson()]).then(
|
_durationFuture = (await _platform)
|
||||||
(ms) => (ms == null || ms < 0) ? null : Duration(milliseconds: ms));
|
.load(LoadRequest(audioSourceMessage: source._toMessage()))
|
||||||
|
.then((response) => response.duration);
|
||||||
final duration = await _durationFuture;
|
final duration = await _durationFuture;
|
||||||
_durationSubject.add(duration);
|
_durationSubject.add(duration);
|
||||||
return duration;
|
return duration;
|
||||||
|
@ -549,7 +538,7 @@ class AudioPlayer {
|
||||||
final audioSession = await AudioSession.instance;
|
final audioSession = await AudioSession.instance;
|
||||||
if (await audioSession.setActive(true)) {
|
if (await audioSession.setActive(true)) {
|
||||||
_playingSubject.add(true);
|
_playingSubject.add(true);
|
||||||
await _invokeMethod('play');
|
await (await _platform).play(PlayRequest());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -567,7 +556,7 @@ class AudioPlayer {
|
||||||
_playingSubject.add(false);
|
_playingSubject.add(false);
|
||||||
// TODO: perhaps modify platform side to ensure new state is broadcast
|
// TODO: perhaps modify platform side to ensure new state is broadcast
|
||||||
// before this method returns.
|
// before this method returns.
|
||||||
await _invokeMethod('pause');
|
await (await _platform).pause(PauseRequest());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience method to pause and seek to zero.
|
/// Convenience method to pause and seek to zero.
|
||||||
|
@ -579,7 +568,7 @@ class AudioPlayer {
|
||||||
/// Sets the volume of this player, where 1.0 is normal volume.
|
/// Sets the volume of this player, where 1.0 is normal volume.
|
||||||
Future<void> setVolume(final double volume) async {
|
Future<void> setVolume(final double volume) async {
|
||||||
_volumeSubject.add(volume);
|
_volumeSubject.add(volume);
|
||||||
await _invokeMethod('setVolume', [volume]);
|
await (await _platform).setVolume(SetVolumeRequest(volume: volume));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the playback speed of this player, where 1.0 is normal speed.
|
/// Sets the playback speed of this player, where 1.0 is normal speed.
|
||||||
|
@ -590,7 +579,7 @@ class AudioPlayer {
|
||||||
);
|
);
|
||||||
_playbackEventSubject.add(_playbackEvent);
|
_playbackEventSubject.add(_playbackEvent);
|
||||||
_speedSubject.add(speed);
|
_speedSubject.add(speed);
|
||||||
await _invokeMethod('setSpeed', [speed]);
|
await (await _platform).setSpeed(SetSpeedRequest(speed: speed));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the [LoopMode]. The gapless looping support is as follows:
|
/// Sets the [LoopMode]. The gapless looping support is as follows:
|
||||||
|
@ -601,13 +590,16 @@ class AudioPlayer {
|
||||||
/// * Web: not supported
|
/// * Web: not supported
|
||||||
Future<void> setLoopMode(LoopMode mode) async {
|
Future<void> setLoopMode(LoopMode mode) async {
|
||||||
_loopModeSubject.add(mode);
|
_loopModeSubject.add(mode);
|
||||||
await _invokeMethod('setLoopMode', [mode.index]);
|
await (await _platform).setLoopMode(
|
||||||
|
SetLoopModeRequest(loopMode: LoopModeMessage.values[mode.index]));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets whether shuffle mode is enabled.
|
/// Sets whether shuffle mode is enabled.
|
||||||
Future<void> setShuffleModeEnabled(bool enabled) async {
|
Future<void> setShuffleModeEnabled(bool enabled) async {
|
||||||
_shuffleModeEnabledSubject.add(enabled);
|
_shuffleModeEnabledSubject.add(enabled);
|
||||||
await _invokeMethod('setShuffleModeEnabled', [enabled]);
|
await (await _platform).setShuffleMode(SetShuffleModeRequest(
|
||||||
|
shuffleMode:
|
||||||
|
enabled ? ShuffleModeMessage.all : ShuffleModeMessage.none));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets automaticallyWaitsToMinimizeStalling for AVPlayer in iOS 10.0 or later, defaults to true.
|
/// Sets automaticallyWaitsToMinimizeStalling for AVPlayer in iOS 10.0 or later, defaults to true.
|
||||||
|
@ -616,8 +608,9 @@ class AudioPlayer {
|
||||||
final bool automaticallyWaitsToMinimizeStalling) async {
|
final bool automaticallyWaitsToMinimizeStalling) async {
|
||||||
_automaticallyWaitsToMinimizeStalling =
|
_automaticallyWaitsToMinimizeStalling =
|
||||||
automaticallyWaitsToMinimizeStalling;
|
automaticallyWaitsToMinimizeStalling;
|
||||||
await _invokeMethod('setAutomaticallyWaitsToMinimizeStalling',
|
await (await _platform).setAutomaticallyWaitsToMinimizeStalling(
|
||||||
[automaticallyWaitsToMinimizeStalling]);
|
SetAutomaticallyWaitsToMinimizeStallingRequest(
|
||||||
|
enabled: automaticallyWaitsToMinimizeStalling));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Seeks to a particular [position]. If a composition of multiple
|
/// Seeks to a particular [position]. If a composition of multiple
|
||||||
|
@ -635,7 +628,8 @@ class AudioPlayer {
|
||||||
updateTime: DateTime.now(),
|
updateTime: DateTime.now(),
|
||||||
);
|
);
|
||||||
_playbackEventSubject.add(_playbackEvent);
|
_playbackEventSubject.add(_playbackEvent);
|
||||||
await _invokeMethod('seek', [position?.inMilliseconds, index]);
|
await (await _platform)
|
||||||
|
.seek(SeekRequest(position: position, index: index));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -658,20 +652,22 @@ class AudioPlayer {
|
||||||
Future<void> setAndroidAudioAttributes(
|
Future<void> setAndroidAudioAttributes(
|
||||||
AndroidAudioAttributes audioAttributes) async {
|
AndroidAudioAttributes audioAttributes) async {
|
||||||
if (audioAttributes == null) return;
|
if (audioAttributes == null) return;
|
||||||
await _invokeMethod(
|
await (await _platform).setAndroidAudioAttributes(
|
||||||
'setAndroidAudioAttributes', [audioAttributes.toJson()]);
|
SetAndroidAudioAttributesRequest(
|
||||||
|
contentType: audioAttributes.contentType.index,
|
||||||
|
flags: audioAttributes.flags.value,
|
||||||
|
usage: audioAttributes.usage.value));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Release all resources associated with this player. You must invoke this
|
/// Release all resources associated with this player. You must invoke this
|
||||||
/// after you are done with the player.
|
/// after you are done with the player.
|
||||||
Future<void> dispose() async {
|
Future<void> dispose() async {
|
||||||
await _invokeMethod('dispose');
|
await (await _platform).dispose(DisposeRequest());
|
||||||
_audioSource = null;
|
_audioSource = null;
|
||||||
_audioSources.values.forEach((s) => s._dispose());
|
_audioSources.values.forEach((s) => s._dispose());
|
||||||
_audioSources.clear();
|
_audioSources.clear();
|
||||||
_proxy?.stop();
|
_proxy?.stop();
|
||||||
await _durationSubject.close();
|
await _durationSubject.close();
|
||||||
await _eventChannelStreamSubscription.cancel();
|
|
||||||
await _loopModeSubject.close();
|
await _loopModeSubject.close();
|
||||||
await _shuffleModeEnabledSubject.close();
|
await _shuffleModeEnabledSubject.close();
|
||||||
await _playingSubject.close();
|
await _playingSubject.close();
|
||||||
|
@ -679,9 +675,6 @@ class AudioPlayer {
|
||||||
await _speedSubject.close();
|
await _speedSubject.close();
|
||||||
await _sequenceSubject.close();
|
await _sequenceSubject.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<dynamic> _invokeMethod(String method, [dynamic args]) async =>
|
|
||||||
(await _channel).invokeMethod(method, args);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Captures the details of any error accessing, loading or playing an audio
|
/// Captures the details of any error accessing, loading or playing an audio
|
||||||
|
@ -829,6 +822,11 @@ class IcyInfo {
|
||||||
final String title;
|
final String title;
|
||||||
final String url;
|
final String url;
|
||||||
|
|
||||||
|
static IcyInfo _fromMessage(IcyInfoMessage message) => IcyInfo(
|
||||||
|
title: message.title,
|
||||||
|
url: message.url,
|
||||||
|
);
|
||||||
|
|
||||||
IcyInfo({@required this.title, @required this.url});
|
IcyInfo({@required this.title, @required this.url});
|
||||||
|
|
||||||
IcyInfo.fromJson(Map json) : this(title: json['title'], url: json['url']);
|
IcyInfo.fromJson(Map json) : this(title: json['title'], url: json['url']);
|
||||||
|
@ -852,6 +850,15 @@ class IcyHeaders {
|
||||||
final String url;
|
final String url;
|
||||||
final bool isPublic;
|
final bool isPublic;
|
||||||
|
|
||||||
|
static IcyHeaders _fromMessage(IcyHeadersMessage message) => IcyHeaders(
|
||||||
|
bitrate: message.bitrate,
|
||||||
|
genre: message.genre,
|
||||||
|
name: message.name,
|
||||||
|
metadataInterval: message.metadataInterval,
|
||||||
|
url: message.url,
|
||||||
|
isPublic: message.isPublic,
|
||||||
|
);
|
||||||
|
|
||||||
IcyHeaders({
|
IcyHeaders({
|
||||||
@required this.bitrate,
|
@required this.bitrate,
|
||||||
@required this.genre,
|
@required this.genre,
|
||||||
|
@ -887,6 +894,13 @@ class IcyMetadata {
|
||||||
final IcyInfo info;
|
final IcyInfo info;
|
||||||
final IcyHeaders headers;
|
final IcyHeaders headers;
|
||||||
|
|
||||||
|
static IcyMetadata _fromMessage(IcyMetadataMessage message) => IcyMetadata(
|
||||||
|
info: message.info == null ? null : IcyInfo._fromMessage(message.info),
|
||||||
|
headers: message.headers == null
|
||||||
|
? null
|
||||||
|
: IcyHeaders._fromMessage(message.headers),
|
||||||
|
);
|
||||||
|
|
||||||
IcyMetadata({@required this.info, @required this.headers});
|
IcyMetadata({@required this.info, @required this.headers});
|
||||||
|
|
||||||
IcyMetadata.fromJson(Map json)
|
IcyMetadata.fromJson(Map json)
|
||||||
|
@ -1109,6 +1123,8 @@ abstract class AudioSource {
|
||||||
_player = null;
|
_player = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
AudioSourceMessage _toMessage();
|
||||||
|
|
||||||
bool get _requiresHeaders;
|
bool get _requiresHeaders;
|
||||||
|
|
||||||
List<IndexedAudioSource> get sequence;
|
List<IndexedAudioSource> get sequence;
|
||||||
|
@ -1205,6 +1221,10 @@ abstract class UriAudioSource extends IndexedAudioSource {
|
||||||
class ProgressiveAudioSource extends UriAudioSource {
|
class ProgressiveAudioSource extends UriAudioSource {
|
||||||
ProgressiveAudioSource(Uri uri, {Map headers, dynamic tag})
|
ProgressiveAudioSource(Uri uri, {Map headers, dynamic tag})
|
||||||
: super(uri, headers: headers, tag: tag, type: 'progressive');
|
: super(uri, headers: headers, tag: tag, type: 'progressive');
|
||||||
|
|
||||||
|
@override
|
||||||
|
AudioSourceMessage _toMessage() => ProgressiveAudioSourceMessage(
|
||||||
|
id: _id, uri: uri.toString(), headers: headers);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An [AudioSource] representing a DASH stream. The following URI schemes are
|
/// An [AudioSource] representing a DASH stream. The following URI schemes are
|
||||||
|
@ -1221,6 +1241,10 @@ class ProgressiveAudioSource extends UriAudioSource {
|
||||||
class DashAudioSource extends UriAudioSource {
|
class DashAudioSource extends UriAudioSource {
|
||||||
DashAudioSource(Uri uri, {Map headers, dynamic tag})
|
DashAudioSource(Uri uri, {Map headers, dynamic tag})
|
||||||
: super(uri, headers: headers, tag: tag, type: 'dash');
|
: super(uri, headers: headers, tag: tag, type: 'dash');
|
||||||
|
|
||||||
|
@override
|
||||||
|
AudioSourceMessage _toMessage() =>
|
||||||
|
DashAudioSourceMessage(id: _id, uri: uri.toString(), headers: headers);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An [AudioSource] representing an HLS stream. The following URI schemes are
|
/// An [AudioSource] representing an HLS stream. The following URI schemes are
|
||||||
|
@ -1236,6 +1260,10 @@ class DashAudioSource extends UriAudioSource {
|
||||||
class HlsAudioSource extends UriAudioSource {
|
class HlsAudioSource extends UriAudioSource {
|
||||||
HlsAudioSource(Uri uri, {Map headers, dynamic tag})
|
HlsAudioSource(Uri uri, {Map headers, dynamic tag})
|
||||||
: super(uri, headers: headers, tag: tag, type: 'hls');
|
: super(uri, headers: headers, tag: tag, type: 'hls');
|
||||||
|
|
||||||
|
@override
|
||||||
|
AudioSourceMessage _toMessage() =>
|
||||||
|
HlsAudioSourceMessage(id: _id, uri: uri.toString(), headers: headers);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An [AudioSource] representing a concatenation of multiple audio sources to
|
/// An [AudioSource] representing a concatenation of multiple audio sources to
|
||||||
|
@ -1267,8 +1295,11 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
children.add(audioSource);
|
children.add(audioSource);
|
||||||
_player._broadcastSequence();
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player
|
await (await _player._platform).concatenatingInsertAll(
|
||||||
._invokeMethod('concatenating.add', [_id, audioSource.toJson()]);
|
ConcatenatingInsertAllRequest(
|
||||||
|
id: _id,
|
||||||
|
index: children.length,
|
||||||
|
children: [audioSource._toMessage()]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1277,8 +1308,9 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
children.insert(index, audioSource);
|
children.insert(index, audioSource);
|
||||||
_player._broadcastSequence();
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod(
|
await (await _player._platform).concatenatingInsertAll(
|
||||||
'concatenating.insert', [_id, index, audioSource.toJson()]);
|
ConcatenatingInsertAllRequest(
|
||||||
|
id: _id, index: index, children: [audioSource._toMessage()]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1287,8 +1319,11 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
this.children.addAll(children);
|
this.children.addAll(children);
|
||||||
_player._broadcastSequence();
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod('concatenating.addAll',
|
await (await _player._platform).concatenatingInsertAll(
|
||||||
[_id, children.map((s) => s.toJson()).toList()]);
|
ConcatenatingInsertAllRequest(
|
||||||
|
id: _id,
|
||||||
|
index: this.children.length,
|
||||||
|
children: children.map((child) => child._toMessage()).toList()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1297,8 +1332,11 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
this.children.insertAll(index, children);
|
this.children.insertAll(index, children);
|
||||||
_player._broadcastSequence();
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod('concatenating.insertAll',
|
await (await _player._platform).concatenatingInsertAll(
|
||||||
[_id, index, children.map((s) => s.toJson()).toList()]);
|
ConcatenatingInsertAllRequest(
|
||||||
|
id: _id,
|
||||||
|
index: index,
|
||||||
|
children: children.map((child) => child._toMessage()).toList()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1308,7 +1346,9 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
children.removeAt(index);
|
children.removeAt(index);
|
||||||
_player._broadcastSequence();
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod('concatenating.removeAt', [_id, index]);
|
await (await _player._platform).concatenatingRemoveRange(
|
||||||
|
ConcatenatingRemoveRangeRequest(
|
||||||
|
id: _id, startIndex: index, endIndex: index + 1));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1318,8 +1358,9 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
children.removeRange(start, end);
|
children.removeRange(start, end);
|
||||||
_player._broadcastSequence();
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player
|
await (await _player._platform).concatenatingRemoveRange(
|
||||||
._invokeMethod('concatenating.removeRange', [_id, start, end]);
|
ConcatenatingRemoveRangeRequest(
|
||||||
|
id: _id, startIndex: start, endIndex: end));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1328,8 +1369,9 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
children.insert(newIndex, children.removeAt(currentIndex));
|
children.insert(newIndex, children.removeAt(currentIndex));
|
||||||
_player._broadcastSequence();
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player
|
await (await _player._platform).concatenatingMove(
|
||||||
._invokeMethod('concatenating.move', [_id, currentIndex, newIndex]);
|
ConcatenatingMoveRequest(
|
||||||
|
id: _id, currentIndex: currentIndex, newIndex: newIndex));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1338,7 +1380,9 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
children.clear();
|
children.clear();
|
||||||
_player._broadcastSequence();
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod('concatenating.clear', [_id]);
|
await (await _player._platform).concatenatingRemoveRange(
|
||||||
|
ConcatenatingRemoveRangeRequest(
|
||||||
|
id: _id, startIndex: 0, endIndex: children.length));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1362,6 +1406,12 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
'audioSources': children.map((source) => source.toJson()).toList(),
|
'audioSources': children.map((source) => source.toJson()).toList(),
|
||||||
'useLazyPreparation': useLazyPreparation,
|
'useLazyPreparation': useLazyPreparation,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@override
|
||||||
|
AudioSourceMessage _toMessage() => ConcatenatingAudioSourceMessage(
|
||||||
|
id: _id,
|
||||||
|
children: children.map((child) => child._toMessage()).toList(),
|
||||||
|
useLazyPreparation: useLazyPreparation);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An [AudioSource] that clips the audio of a [UriAudioSource] between a
|
/// An [AudioSource] that clips the audio of a [UriAudioSource] between a
|
||||||
|
@ -1398,6 +1448,10 @@ class ClippingAudioSource extends IndexedAudioSource {
|
||||||
'start': start?.inMilliseconds,
|
'start': start?.inMilliseconds,
|
||||||
'end': end?.inMilliseconds,
|
'end': end?.inMilliseconds,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@override
|
||||||
|
AudioSourceMessage _toMessage() => ClippingAudioSourceMessage(
|
||||||
|
id: _id, child: child._toMessage(), start: start, end: end);
|
||||||
}
|
}
|
||||||
|
|
||||||
// An [AudioSource] that loops a nested [AudioSource] a finite number of times.
|
// An [AudioSource] that loops a nested [AudioSource] a finite number of times.
|
||||||
|
@ -1436,6 +1490,10 @@ class LoopingAudioSource extends AudioSource {
|
||||||
'audioSource': child.toJson(),
|
'audioSource': child.toJson(),
|
||||||
'count': count,
|
'count': count,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@override
|
||||||
|
AudioSourceMessage _toMessage() => LoopingAudioSourceMessage(
|
||||||
|
id: _id, child: child._toMessage(), count: count);
|
||||||
}
|
}
|
||||||
|
|
||||||
enum LoopMode { off, one, all }
|
enum LoopMode { off, one, all }
|
||||||
|
|
|
@ -100,6 +100,13 @@ packages:
|
||||||
url: "https://pub.dartlang.org"
|
url: "https://pub.dartlang.org"
|
||||||
source: hosted
|
source: hosted
|
||||||
version: "0.16.1"
|
version: "0.16.1"
|
||||||
|
just_audio_platform_interface:
|
||||||
|
dependency: "direct main"
|
||||||
|
description:
|
||||||
|
path: "../just_audio_platform_interface"
|
||||||
|
relative: true
|
||||||
|
source: path
|
||||||
|
version: "0.0.1"
|
||||||
matcher:
|
matcher:
|
||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
|
|
|
@ -8,6 +8,8 @@ environment:
|
||||||
flutter: ">=1.12.13+hotfix.5"
|
flutter: ">=1.12.13+hotfix.5"
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
|
just_audio_platform_interface:
|
||||||
|
path: ../just_audio_platform_interface
|
||||||
audio_session: ^0.0.7
|
audio_session: ^0.0.7
|
||||||
rxdart: ^0.24.1
|
rxdart: ^0.24.1
|
||||||
path: ^1.6.4
|
path: ^1.6.4
|
||||||
|
|
Loading…
Reference in New Issue