Add sequenceStream and sequenceStateStream.
This commit is contained in:
parent
b18e2bd530
commit
0a772d1927
|
@ -147,6 +147,8 @@ player.playerStateStream.listen((state) {
|
||||||
// - durationStream
|
// - durationStream
|
||||||
// - positionStream
|
// - positionStream
|
||||||
// - bufferedPositionStream
|
// - bufferedPositionStream
|
||||||
|
// - sequenceStateStream
|
||||||
|
// - sequenceStream
|
||||||
// - currentIndexStream
|
// - currentIndexStream
|
||||||
// - icyMetadataStream
|
// - icyMetadataStream
|
||||||
// - playingStream
|
// - playingStream
|
||||||
|
|
|
@ -50,11 +50,6 @@ class _MyAppState extends State<MyApp> {
|
||||||
),
|
),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
List<IndexedAudioSource> get _sequence => _playlist.sequence;
|
|
||||||
|
|
||||||
List<AudioMetadata> get _metadataSequence =>
|
|
||||||
_sequence.map((s) => s.tag as AudioMetadata).toList();
|
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void initState() {
|
void initState() {
|
||||||
super.initState();
|
super.initState();
|
||||||
|
@ -92,11 +87,12 @@ class _MyAppState extends State<MyApp> {
|
||||||
mainAxisAlignment: MainAxisAlignment.center,
|
mainAxisAlignment: MainAxisAlignment.center,
|
||||||
children: [
|
children: [
|
||||||
Expanded(
|
Expanded(
|
||||||
child: StreamBuilder<int>(
|
child: StreamBuilder<SequenceState>(
|
||||||
stream: _player.currentIndexStream,
|
stream: _player.sequenceStateStream,
|
||||||
builder: (context, snapshot) {
|
builder: (context, snapshot) {
|
||||||
final index = snapshot.data ?? 0;
|
final state = snapshot.data;
|
||||||
final metadata = _metadataSequence[index];
|
if (state?.sequence?.isEmpty ?? true) return SizedBox();
|
||||||
|
final metadata = state.currentSource.tag as AudioMetadata;
|
||||||
return Column(
|
return Column(
|
||||||
crossAxisAlignment: CrossAxisAlignment.center,
|
crossAxisAlignment: CrossAxisAlignment.center,
|
||||||
children: [
|
children: [
|
||||||
|
@ -191,17 +187,19 @@ class _MyAppState extends State<MyApp> {
|
||||||
),
|
),
|
||||||
Container(
|
Container(
|
||||||
height: 240.0,
|
height: 240.0,
|
||||||
child: StreamBuilder<int>(
|
child: StreamBuilder<SequenceState>(
|
||||||
stream: _player.currentIndexStream,
|
stream: _player.sequenceStateStream,
|
||||||
builder: (context, snapshot) {
|
builder: (context, snapshot) {
|
||||||
final currentIndex = snapshot.data ?? 0;
|
final state = snapshot.data;
|
||||||
|
final sequence = state?.sequence ?? [];
|
||||||
return ListView.builder(
|
return ListView.builder(
|
||||||
itemCount: _metadataSequence.length,
|
itemCount: sequence.length,
|
||||||
itemBuilder: (context, index) => Material(
|
itemBuilder: (context, index) => Material(
|
||||||
color:
|
color: index == state.currentIndex
|
||||||
index == currentIndex ? Colors.grey.shade300 : null,
|
? Colors.grey.shade300
|
||||||
|
: null,
|
||||||
child: ListTile(
|
child: ListTile(
|
||||||
title: Text(_metadataSequence[index].title),
|
title: Text(sequence[index].tag.title),
|
||||||
onTap: () {
|
onTap: () {
|
||||||
_player.seek(Duration.zero, index: index);
|
_player.seek(Duration.zero, index: index);
|
||||||
},
|
},
|
||||||
|
@ -243,8 +241,8 @@ class ControlButtons extends StatelessWidget {
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
StreamBuilder<int>(
|
StreamBuilder<SequenceState>(
|
||||||
stream: player.currentIndexStream,
|
stream: player.sequenceStateStream,
|
||||||
builder: (context, snapshot) => IconButton(
|
builder: (context, snapshot) => IconButton(
|
||||||
icon: Icon(Icons.skip_previous),
|
icon: Icon(Icons.skip_previous),
|
||||||
onPressed: player.hasPrevious ? player.seekToPrevious : null,
|
onPressed: player.hasPrevious ? player.seekToPrevious : null,
|
||||||
|
@ -285,8 +283,8 @@ class ControlButtons extends StatelessWidget {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
StreamBuilder(
|
StreamBuilder<SequenceState>(
|
||||||
stream: player.currentIndexStream,
|
stream: player.sequenceStateStream,
|
||||||
builder: (context, snapshot) => IconButton(
|
builder: (context, snapshot) => IconButton(
|
||||||
icon: Icon(Icons.skip_next),
|
icon: Icon(Icons.skip_next),
|
||||||
onPressed: player.hasNext ? player.seekToNext : null,
|
onPressed: player.hasNext ? player.seekToNext : null,
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
import 'dart:io';
|
import 'dart:io';
|
||||||
|
import 'dart:math';
|
||||||
|
|
||||||
import 'package:flutter/foundation.dart';
|
import 'package:flutter/foundation.dart';
|
||||||
import 'package:flutter/services.dart';
|
import 'package:flutter/services.dart';
|
||||||
|
@ -76,7 +77,9 @@ class AudioPlayer {
|
||||||
final _bufferedPositionSubject = BehaviorSubject<Duration>();
|
final _bufferedPositionSubject = BehaviorSubject<Duration>();
|
||||||
final _icyMetadataSubject = BehaviorSubject<IcyMetadata>();
|
final _icyMetadataSubject = BehaviorSubject<IcyMetadata>();
|
||||||
final _playerStateSubject = BehaviorSubject<PlayerState>();
|
final _playerStateSubject = BehaviorSubject<PlayerState>();
|
||||||
|
final _sequenceSubject = BehaviorSubject<List<IndexedAudioSource>>();
|
||||||
final _currentIndexSubject = BehaviorSubject<int>();
|
final _currentIndexSubject = BehaviorSubject<int>();
|
||||||
|
final _sequenceStateSubject = BehaviorSubject<SequenceState>();
|
||||||
final _loopModeSubject = BehaviorSubject<LoopMode>();
|
final _loopModeSubject = BehaviorSubject<LoopMode>();
|
||||||
final _shuffleModeEnabledSubject = BehaviorSubject<bool>();
|
final _shuffleModeEnabledSubject = BehaviorSubject<bool>();
|
||||||
BehaviorSubject<Duration> _positionSubject;
|
BehaviorSubject<Duration> _positionSubject;
|
||||||
|
@ -127,10 +130,6 @@ class AudioPlayer {
|
||||||
rethrow;
|
rethrow;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
_eventChannelStreamSubscription = _eventChannelStream.listen(
|
|
||||||
_playbackEventSubject.add,
|
|
||||||
onError: _playbackEventSubject.addError,
|
|
||||||
);
|
|
||||||
_processingStateSubject.addStream(playbackEventStream
|
_processingStateSubject.addStream(playbackEventStream
|
||||||
.map((event) => event.processingState)
|
.map((event) => event.processingState)
|
||||||
.distinct()
|
.distinct()
|
||||||
|
@ -147,6 +146,17 @@ class AudioPlayer {
|
||||||
.map((event) => event.currentIndex)
|
.map((event) => event.currentIndex)
|
||||||
.distinct()
|
.distinct()
|
||||||
.handleError((err, stack) {/* noop */}));
|
.handleError((err, stack) {/* noop */}));
|
||||||
|
_sequenceStateSubject.addStream(
|
||||||
|
Rx.combineLatest2<List<IndexedAudioSource>, int, SequenceState>(
|
||||||
|
sequenceStream,
|
||||||
|
currentIndexStream,
|
||||||
|
(sequence, currentIndex) {
|
||||||
|
if (sequence == null) return null;
|
||||||
|
if (currentIndex == null) currentIndex = 0;
|
||||||
|
currentIndex = min(sequence.length - 1, max(0, currentIndex));
|
||||||
|
return SequenceState(sequence, currentIndex);
|
||||||
|
},
|
||||||
|
).distinct().handleError((err, stack) {/* noop */}));
|
||||||
_playerStateSubject.addStream(
|
_playerStateSubject.addStream(
|
||||||
Rx.combineLatest2<bool, PlaybackEvent, PlayerState>(
|
Rx.combineLatest2<bool, PlaybackEvent, PlayerState>(
|
||||||
playingStream,
|
playingStream,
|
||||||
|
@ -154,6 +164,11 @@ class AudioPlayer {
|
||||||
(playing, event) => PlayerState(playing, event.processingState))
|
(playing, event) => PlayerState(playing, event.processingState))
|
||||||
.distinct()
|
.distinct()
|
||||||
.handleError((err, stack) {/* noop */}));
|
.handleError((err, stack) {/* noop */}));
|
||||||
|
_eventChannelStreamSubscription = _eventChannelStream.listen(
|
||||||
|
_playbackEventSubject.add,
|
||||||
|
onError: _playbackEventSubject.addError,
|
||||||
|
);
|
||||||
|
_sequenceSubject.add(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The latest [PlaybackEvent].
|
/// The latest [PlaybackEvent].
|
||||||
|
@ -216,17 +231,31 @@ class AudioPlayer {
|
||||||
/// A stream of [PlayerState]s.
|
/// A stream of [PlayerState]s.
|
||||||
Stream<PlayerState> get playerStateStream => _playerStateSubject.stream;
|
Stream<PlayerState> get playerStateStream => _playerStateSubject.stream;
|
||||||
|
|
||||||
|
/// The current sequence of indexed audio sources.
|
||||||
|
List<IndexedAudioSource> get sequence => _sequenceSubject.value;
|
||||||
|
|
||||||
|
/// A stream broadcasting the current sequence of indexed audio sources.
|
||||||
|
Stream<List<IndexedAudioSource>> get sequenceStream =>
|
||||||
|
_sequenceSubject.stream;
|
||||||
|
|
||||||
/// The index of the current item.
|
/// The index of the current item.
|
||||||
int get currentIndex => _currentIndexSubject.value;
|
int get currentIndex => _currentIndexSubject.value;
|
||||||
|
|
||||||
/// A stream broadcasting the current item.
|
/// A stream broadcasting the current item.
|
||||||
Stream<int> get currentIndexStream => _currentIndexSubject.stream;
|
Stream<int> get currentIndexStream => _currentIndexSubject.stream;
|
||||||
|
|
||||||
|
/// The current [SequenceState], or `null` if either [sequence]] or
|
||||||
|
/// [currentIndex] is `null`.
|
||||||
|
SequenceState get sequenceState => _sequenceStateSubject.value;
|
||||||
|
|
||||||
|
/// A stream broadcasting the current [SequenceState].
|
||||||
|
Stream<SequenceState> get sequenceStateStream => _sequenceStateSubject.stream;
|
||||||
|
|
||||||
/// Whether there is another item after the current index.
|
/// Whether there is another item after the current index.
|
||||||
bool get hasNext =>
|
bool get hasNext =>
|
||||||
_audioSource != null &&
|
_audioSource != null &&
|
||||||
currentIndex != null &&
|
currentIndex != null &&
|
||||||
currentIndex + 1 < _audioSource.sequence.length;
|
currentIndex + 1 < sequence.length;
|
||||||
|
|
||||||
/// Whether there is another item before the current index.
|
/// Whether there is another item before the current index.
|
||||||
bool get hasPrevious =>
|
bool get hasPrevious =>
|
||||||
|
@ -378,6 +407,7 @@ class AudioPlayer {
|
||||||
Future<Duration> load(AudioSource source) async {
|
Future<Duration> load(AudioSource source) async {
|
||||||
try {
|
try {
|
||||||
_audioSource = source;
|
_audioSource = source;
|
||||||
|
_broadcastSequence();
|
||||||
final duration = await _load(source);
|
final duration = await _load(source);
|
||||||
// Wait for loading state to pass.
|
// Wait for loading state to pass.
|
||||||
await processingStateStream
|
await processingStateStream
|
||||||
|
@ -390,6 +420,10 @@ class AudioPlayer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void _broadcastSequence() {
|
||||||
|
_sequenceSubject.add(_audioSource?.sequence);
|
||||||
|
}
|
||||||
|
|
||||||
_registerAudioSource(AudioSource source) {
|
_registerAudioSource(AudioSource source) {
|
||||||
_audioSources[source._id] = source;
|
_audioSources[source._id] = source;
|
||||||
}
|
}
|
||||||
|
@ -572,6 +606,7 @@ class AudioPlayer {
|
||||||
await _playingSubject.close();
|
await _playingSubject.close();
|
||||||
await _volumeSubject.close();
|
await _volumeSubject.close();
|
||||||
await _speedSubject.close();
|
await _speedSubject.close();
|
||||||
|
await _sequenceSubject.close();
|
||||||
if (_positionSubject != null) {
|
if (_positionSubject != null) {
|
||||||
await _positionSubject.close();
|
await _positionSubject.close();
|
||||||
}
|
}
|
||||||
|
@ -790,6 +825,23 @@ class IcyMetadata {
|
||||||
other is IcyMetadata && other?.info == info && other?.headers == headers;
|
other is IcyMetadata && other?.info == info && other?.headers == headers;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Encapsulates the [sequence] and [currentIndex] state and ensures
|
||||||
|
/// consistency such that [currentIndex] is within the range of
|
||||||
|
/// [sequence.length]. If [sequence.length] is 0, then [currentIndex] is also
|
||||||
|
/// 0.
|
||||||
|
class SequenceState {
|
||||||
|
/// The sequence of the current [AudioSource].
|
||||||
|
final List<IndexedAudioSource> sequence;
|
||||||
|
|
||||||
|
/// The index of the current source in the sequence.
|
||||||
|
final int currentIndex;
|
||||||
|
|
||||||
|
SequenceState(this.sequence, this.currentIndex);
|
||||||
|
|
||||||
|
/// The current source in the sequence.
|
||||||
|
IndexedAudioSource get currentSource => sequence[currentIndex];
|
||||||
|
}
|
||||||
|
|
||||||
/// The audio session categories on iOS, to be used with
|
/// The audio session categories on iOS, to be used with
|
||||||
/// [AudioPlayer.setIosCategory].
|
/// [AudioPlayer.setIosCategory].
|
||||||
enum IosCategory {
|
enum IosCategory {
|
||||||
|
@ -937,7 +989,7 @@ abstract class AudioSource {
|
||||||
/// stream type on Android. If you know in advance what type of audio stream
|
/// stream type on Android. If you know in advance what type of audio stream
|
||||||
/// it is, you should instantiate [DashAudioSource] or [HlsAudioSource]
|
/// it is, you should instantiate [DashAudioSource] or [HlsAudioSource]
|
||||||
/// directly.
|
/// directly.
|
||||||
static AudioSource uri(Uri uri, {Map headers, Object tag}) {
|
static AudioSource uri(Uri uri, {Map headers, dynamic tag}) {
|
||||||
bool hasExtension(Uri uri, String extension) =>
|
bool hasExtension(Uri uri, String extension) =>
|
||||||
uri.path.toLowerCase().endsWith('.$extension') ||
|
uri.path.toLowerCase().endsWith('.$extension') ||
|
||||||
uri.fragment.toLowerCase().endsWith('.$extension');
|
uri.fragment.toLowerCase().endsWith('.$extension');
|
||||||
|
@ -1003,7 +1055,7 @@ abstract class AudioSource {
|
||||||
|
|
||||||
/// An [AudioSource] that can appear in a sequence.
|
/// An [AudioSource] that can appear in a sequence.
|
||||||
abstract class IndexedAudioSource extends AudioSource {
|
abstract class IndexedAudioSource extends AudioSource {
|
||||||
final Object tag;
|
final dynamic tag;
|
||||||
|
|
||||||
IndexedAudioSource(this.tag);
|
IndexedAudioSource(this.tag);
|
||||||
|
|
||||||
|
@ -1019,7 +1071,7 @@ abstract class UriAudioSource extends IndexedAudioSource {
|
||||||
Uri _overrideUri;
|
Uri _overrideUri;
|
||||||
File _cacheFile;
|
File _cacheFile;
|
||||||
|
|
||||||
UriAudioSource(this.uri, {this.headers, Object tag, @required String type})
|
UriAudioSource(this.uri, {this.headers, dynamic tag, @required String type})
|
||||||
: _type = type,
|
: _type = type,
|
||||||
super(tag);
|
super(tag);
|
||||||
|
|
||||||
|
@ -1081,7 +1133,7 @@ abstract class UriAudioSource extends IndexedAudioSource {
|
||||||
/// On platforms except for the web, the supplied [headers] will be passed with
|
/// On platforms except for the web, the supplied [headers] will be passed with
|
||||||
/// the HTTP(S) request.
|
/// the HTTP(S) request.
|
||||||
class ProgressiveAudioSource extends UriAudioSource {
|
class ProgressiveAudioSource extends UriAudioSource {
|
||||||
ProgressiveAudioSource(Uri uri, {Map headers, Object tag})
|
ProgressiveAudioSource(Uri uri, {Map headers, dynamic tag})
|
||||||
: super(uri, headers: headers, tag: tag, type: 'progressive');
|
: super(uri, headers: headers, tag: tag, type: 'progressive');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1091,7 +1143,7 @@ class ProgressiveAudioSource extends UriAudioSource {
|
||||||
/// the HTTP(S) request. Currently headers are not recursively applied to items
|
/// the HTTP(S) request. Currently headers are not recursively applied to items
|
||||||
/// the HTTP(S) request. Currently headers are not applied recursively.
|
/// the HTTP(S) request. Currently headers are not applied recursively.
|
||||||
class DashAudioSource extends UriAudioSource {
|
class DashAudioSource extends UriAudioSource {
|
||||||
DashAudioSource(Uri uri, {Map headers, Object tag})
|
DashAudioSource(Uri uri, {Map headers, dynamic tag})
|
||||||
: super(uri, headers: headers, tag: tag, type: 'dash');
|
: super(uri, headers: headers, tag: tag, type: 'dash');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1100,7 +1152,7 @@ class DashAudioSource extends UriAudioSource {
|
||||||
/// On platforms except for the web, the supplied [headers] will be passed with
|
/// On platforms except for the web, the supplied [headers] will be passed with
|
||||||
/// the HTTP(S) request. Currently headers are not applied recursively.
|
/// the HTTP(S) request. Currently headers are not applied recursively.
|
||||||
class HlsAudioSource extends UriAudioSource {
|
class HlsAudioSource extends UriAudioSource {
|
||||||
HlsAudioSource(Uri uri, {Map headers, Object tag})
|
HlsAudioSource(Uri uri, {Map headers, dynamic tag})
|
||||||
: super(uri, headers: headers, tag: tag, type: 'hls');
|
: super(uri, headers: headers, tag: tag, type: 'hls');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1131,6 +1183,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
/// (Untested) Appends an [AudioSource].
|
/// (Untested) Appends an [AudioSource].
|
||||||
Future<void> add(AudioSource audioSource) async {
|
Future<void> add(AudioSource audioSource) async {
|
||||||
children.add(audioSource);
|
children.add(audioSource);
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player
|
await _player
|
||||||
._invokeMethod('concatenating.add', [_id, audioSource.toJson()]);
|
._invokeMethod('concatenating.add', [_id, audioSource.toJson()]);
|
||||||
|
@ -1140,6 +1193,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
/// (Untested) Inserts an [AudioSource] at [index].
|
/// (Untested) Inserts an [AudioSource] at [index].
|
||||||
Future<void> insert(int index, AudioSource audioSource) async {
|
Future<void> insert(int index, AudioSource audioSource) async {
|
||||||
children.insert(index, audioSource);
|
children.insert(index, audioSource);
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod(
|
await _player._invokeMethod(
|
||||||
'concatenating.insert', [_id, index, audioSource.toJson()]);
|
'concatenating.insert', [_id, index, audioSource.toJson()]);
|
||||||
|
@ -1149,6 +1203,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
/// (Untested) Appends multiple [AudioSource]s.
|
/// (Untested) Appends multiple [AudioSource]s.
|
||||||
Future<void> addAll(List<AudioSource> children) async {
|
Future<void> addAll(List<AudioSource> children) async {
|
||||||
this.children.addAll(children);
|
this.children.addAll(children);
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod('concatenating.addAll',
|
await _player._invokeMethod('concatenating.addAll',
|
||||||
[_id, children.map((s) => s.toJson()).toList()]);
|
[_id, children.map((s) => s.toJson()).toList()]);
|
||||||
|
@ -1158,6 +1213,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
/// (Untested) Insert multiple [AudioSource]s at [index].
|
/// (Untested) Insert multiple [AudioSource]s at [index].
|
||||||
Future<void> insertAll(int index, List<AudioSource> children) async {
|
Future<void> insertAll(int index, List<AudioSource> children) async {
|
||||||
this.children.insertAll(index, children);
|
this.children.insertAll(index, children);
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod('concatenating.insertAll',
|
await _player._invokeMethod('concatenating.insertAll',
|
||||||
[_id, index, children.map((s) => s.toJson()).toList()]);
|
[_id, index, children.map((s) => s.toJson()).toList()]);
|
||||||
|
@ -1168,6 +1224,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
/// [ConcatenatingAudioSource] has already been loaded.
|
/// [ConcatenatingAudioSource] has already been loaded.
|
||||||
Future<void> removeAt(int index) async {
|
Future<void> removeAt(int index) async {
|
||||||
children.removeAt(index);
|
children.removeAt(index);
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod('concatenating.removeAt', [_id, index]);
|
await _player._invokeMethod('concatenating.removeAt', [_id, index]);
|
||||||
}
|
}
|
||||||
|
@ -1177,6 +1234,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
/// to [end] exclusive.
|
/// to [end] exclusive.
|
||||||
Future<void> removeRange(int start, int end) async {
|
Future<void> removeRange(int start, int end) async {
|
||||||
children.removeRange(start, end);
|
children.removeRange(start, end);
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player
|
await _player
|
||||||
._invokeMethod('concatenating.removeRange', [_id, start, end]);
|
._invokeMethod('concatenating.removeRange', [_id, start, end]);
|
||||||
|
@ -1186,6 +1244,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
/// (Untested) Moves an [AudioSource] from [currentIndex] to [newIndex].
|
/// (Untested) Moves an [AudioSource] from [currentIndex] to [newIndex].
|
||||||
Future<void> move(int currentIndex, int newIndex) async {
|
Future<void> move(int currentIndex, int newIndex) async {
|
||||||
children.insert(newIndex, children.removeAt(currentIndex));
|
children.insert(newIndex, children.removeAt(currentIndex));
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player
|
await _player
|
||||||
._invokeMethod('concatenating.move', [_id, currentIndex, newIndex]);
|
._invokeMethod('concatenating.move', [_id, currentIndex, newIndex]);
|
||||||
|
@ -1195,6 +1254,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
||||||
/// (Untested) Removes all [AudioSources].
|
/// (Untested) Removes all [AudioSources].
|
||||||
Future<void> clear() async {
|
Future<void> clear() async {
|
||||||
children.clear();
|
children.clear();
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod('concatenating.clear', [_id]);
|
await _player._invokeMethod('concatenating.clear', [_id]);
|
||||||
}
|
}
|
||||||
|
@ -1236,7 +1296,7 @@ class ClippingAudioSource extends IndexedAudioSource {
|
||||||
@required this.child,
|
@required this.child,
|
||||||
this.start,
|
this.start,
|
||||||
this.end,
|
this.end,
|
||||||
Object tag,
|
dynamic tag,
|
||||||
}) : super(tag);
|
}) : super(tag);
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
|
Loading…
Reference in New Issue