New state model, processingState + playing

This commit is contained in:
Ryan Heise 2020-08-04 00:16:15 +10:00
parent 6d14d257a0
commit e4789d9cd2
7 changed files with 620 additions and 644 deletions

View File

@ -12,7 +12,8 @@ A Flutter plugin to play audio from URLs, files, assets, DASH/HLS streams and pl
| request headers | ✅ | ✅ | ✅ | | | request headers | ✅ | ✅ | ✅ | |
| DASH | ✅ | (untested) | (untested) | (untested) | | DASH | ✅ | (untested) | (untested) | (untested) |
| HLS | ✅ | ✅ | (untested) | (untested) | | HLS | ✅ | ✅ | (untested) | (untested) |
| play/pause/stop/seek | ✅ | ✅ | ✅ | ✅ | | buffer status/position | ✅ | ✅ | (untested) | ✅ |
| play/pause/seek | ✅ | ✅ | ✅ | ✅ |
| set volume | ✅ | ✅ | (untested) | ✅ | | set volume | ✅ | ✅ | (untested) | ✅ |
| set speed | ✅ | ✅ | ✅ | ✅ | | set speed | ✅ | ✅ | ✅ | ✅ |
| clip audio | ✅ | ✅ | (untested) | ✅ | | clip audio | ✅ | ✅ | (untested) | ✅ |
@ -23,7 +24,7 @@ A Flutter plugin to play audio from URLs, files, assets, DASH/HLS streams and pl
| gapless playback | ✅ | ✅ | (untested) | | | gapless playback | ✅ | ✅ | (untested) | |
| report player errors | ✅ | ✅ | ✅ | ✅ | | report player errors | ✅ | ✅ | ✅ | ✅ |
This plugin has been tested on Android and Web, and is being made available for testing on iOS and MacOS. Please consider reporting any bugs you encounter [here](https://github.com/ryanheise/just_audio/issues) or submitting pull requests [here](https://github.com/ryanheise/just_audio/pulls). This plugin has been tested on Android, iOS and Web, and is being made available for testing on MacOS. Please consider reporting any bugs you encounter [here](https://github.com/ryanheise/just_audio/issues) or submitting pull requests [here](https://github.com/ryanheise/just_audio/pulls).
## Example ## Example
@ -40,7 +41,6 @@ Standard controls:
player.play(); // Usually you don't want to wait for playback to finish. player.play(); // Usually you don't want to wait for playback to finish.
await player.seek(Duration(seconds: 10)); await player.seek(Duration(seconds: 10));
await player.pause(); await player.pause();
await player.stop();
``` ```
Clipping audio: Clipping audio:
@ -122,28 +122,30 @@ try {
Listening to state changes: Listening to state changes:
```dart ```dart
player.playbackStateStream.listen((state) { player.playerStateStream.listen((state) {
switch (state) { if (state.playing) ... else ...
switch (state.processingState) {
case AudioPlaybackState.none: ... case AudioPlaybackState.none: ...
case AudioPlaybackState.stopped: ... case AudioPlaybackState.loading: ...
case AudioPlaybackState.paused: ... case AudioPlaybackState.buffering: ...
case AudioPlaybackState.playing: ... case AudioPlaybackState.ready: ...
case AudioPlaybackState.connecting: ...
case AudioPlaybackState.completed: ... case AudioPlaybackState.completed: ...
} }
}); });
// See also: // See also:
// - durationStream // - durationStream
// - bufferingStream // - positionStream
// - icyMetadataStream
// - bufferedPositionStream // - bufferedPositionStream
// - fullPlaybackStateStream
// - playbackEventStream
// - currentIndexStream // - currentIndexStream
// - icyMetadataStream
// - playingStream
// - processingStateStream
// - loopModeStream // - loopModeStream
// - shuffleModeEnabledStream // - shuffleModeEnabledStream
// - durationStream // - volumeStream
// - speedStream
// - playbackEventStream
``` ```
## Platform specific configuration ## Platform specific configuration

View File

@ -58,21 +58,19 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
private final EventChannel eventChannel; private final EventChannel eventChannel;
private EventSink eventSink; private EventSink eventSink;
private volatile PlaybackState state; private ProcessingState processingState;
private long updateTime; private long updateTime;
private long updatePosition; private long updatePosition;
private long bufferedPosition; private long bufferedPosition;
private long duration; private long duration;
private Long start; private Long start;
private Long end; private Long end;
private float volume = 1.0f;
private float speed = 1.0f;
private Long seekPos; private Long seekPos;
private Result prepareResult; private Result prepareResult;
private Result playResult;
private Result seekResult; private Result seekResult;
private boolean seekProcessed; private boolean seekProcessed;
private boolean buffering; private boolean playing;
private boolean justConnected;
private Map<String, MediaSource> mediaSources = new HashMap<String, MediaSource>(); private Map<String, MediaSource> mediaSources = new HashMap<String, MediaSource>();
private IcyInfo icyInfo; private IcyInfo icyInfo;
private IcyHeaders icyHeaders; private IcyHeaders icyHeaders;
@ -95,15 +93,18 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
bufferedPosition = newBufferedPosition; bufferedPosition = newBufferedPosition;
broadcastPlaybackEvent(); broadcastPlaybackEvent();
} }
if (buffering) { switch (processingState) {
case buffering:
handler.postDelayed(this, 200); handler.postDelayed(this, 200);
} else if (state == PlaybackState.playing) { break;
case ready:
if (playing) {
handler.postDelayed(this, 500); handler.postDelayed(this, 500);
} else if (state == PlaybackState.paused) { } else {
handler.postDelayed(this, 1000);
} else if (justConnected) {
handler.postDelayed(this, 1000); handler.postDelayed(this, 1000);
} }
break;
}
} }
}; };
@ -127,7 +128,7 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
eventSink = null; eventSink = null;
} }
}); });
state = PlaybackState.none; processingState = ProcessingState.none;
} }
private void startWatchingBuffer() { private void startWatchingBuffer() {
@ -198,31 +199,31 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
case Player.STATE_READY: case Player.STATE_READY:
if (prepareResult != null) { if (prepareResult != null) {
duration = getDuration(); duration = getDuration();
justConnected = true; transition(ProcessingState.ready);
transition(PlaybackState.stopped);
prepareResult.success(duration); prepareResult.success(duration);
prepareResult = null; prepareResult = null;
} else {
transition(ProcessingState.ready);
} }
if (seekProcessed) { if (seekProcessed) {
completeSeek(); completeSeek();
} }
break; break;
case Player.STATE_ENDED: case Player.STATE_BUFFERING:
if (state != PlaybackState.completed) { if (processingState != ProcessingState.buffering) {
player.setPlayWhenReady(false); transition(ProcessingState.buffering);
transition(PlaybackState.completed);
}
break;
}
final boolean buffering = playbackState == Player.STATE_BUFFERING;
// don't notify buffering if (buffering && state == stopped)
final boolean notifyBuffering = !buffering || state != PlaybackState.stopped;
if (notifyBuffering && (buffering != this.buffering)) {
this.buffering = buffering;
broadcastPlaybackEvent();
if (buffering) {
startWatchingBuffer(); startWatchingBuffer();
} }
break;
case Player.STATE_ENDED:
if (processingState != ProcessingState.completed) {
transition(ProcessingState.completed);
}
if (playResult != null) {
playResult.success(null);
playResult = null;
}
break;
} }
} }
@ -275,16 +276,12 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
load(getAudioSource(args.get(0)), result); load(getAudioSource(args.get(0)), result);
break; break;
case "play": case "play":
play(); play(result);
result.success(null);
break; break;
case "pause": case "pause":
pause(); pause();
result.success(null); result.success(null);
break; break;
case "stop":
stop(result);
break;
case "setVolume": case "setVolume":
setVolume((float) ((double) ((Double) args.get(0)))); setVolume((float) ((double) ((Double) args.get(0))));
result.success(null); result.success(null);
@ -497,22 +494,19 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
} }
private void load(final MediaSource mediaSource, final Result result) { private void load(final MediaSource mediaSource, final Result result) {
justConnected = false; switch (processingState) {
switch (state) {
case none: case none:
break; break;
case connecting: case loading:
abortExistingConnection(); abortExistingConnection();
player.stop(); player.stop();
player.setPlayWhenReady(false);
break; break;
default: default:
player.stop(); player.stop();
player.setPlayWhenReady(false);
break; break;
} }
prepareResult = result; prepareResult = result;
transition(PlaybackState.connecting); transition(ProcessingState.loading);
if (player.getShuffleModeEnabled()) { if (player.getShuffleModeEnabled()) {
setShuffleOrder(mediaSource, 0); setShuffleOrder(mediaSource, 0);
} }
@ -530,8 +524,7 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
private void broadcastPlaybackEvent() { private void broadcastPlaybackEvent() {
final Map<String, Object> event = new HashMap<String, Object>(); final Map<String, Object> event = new HashMap<String, Object>();
event.put("state", state.ordinal()); event.put("processingState", processingState.ordinal());
event.put("buffering", buffering);
event.put("updatePosition", updatePosition = getCurrentPosition()); event.put("updatePosition", updatePosition = getCurrentPosition());
event.put("updateTime", updateTime = System.currentTimeMillis()); event.put("updateTime", updateTime = System.currentTimeMillis());
event.put("bufferedPosition", Math.max(updatePosition, bufferedPosition)); event.put("bufferedPosition", Math.max(updatePosition, bufferedPosition));
@ -566,7 +559,7 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
} }
private long getCurrentPosition() { private long getCurrentPosition() {
if (state == PlaybackState.none || state == PlaybackState.connecting) { if (processingState == ProcessingState.none || processingState == ProcessingState.loading) {
return 0; return 0;
} else if (seekPos != null && seekPos != C.TIME_UNSET) { } else if (seekPos != null && seekPos != C.TIME_UNSET) {
return seekPos; return seekPos;
@ -576,7 +569,7 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
} }
private long getDuration() { private long getDuration() {
if (state == PlaybackState.none || state == PlaybackState.connecting) { if (processingState == ProcessingState.none || processingState == ProcessingState.loading) {
return C.TIME_UNSET; return C.TIME_UNSET;
} else { } else {
return player.getDuration(); return player.getDuration();
@ -594,9 +587,8 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
} }
} }
private void transition(final PlaybackState newState) { private void transition(final ProcessingState newState) {
final PlaybackState oldState = state; processingState = newState;
state = newState;
broadcastPlaybackEvent(); broadcastPlaybackEvent();
} }
@ -610,70 +602,35 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
return filename.replaceAll("^.*\\.", "").toLowerCase(); return filename.replaceAll("^.*\\.", "").toLowerCase();
} }
public void play() { public void play(Result result) {
switch (state) { if (player.getPlayWhenReady()) return;
case playing: if (playResult != null) {
break; result.success(null);
case stopped: } else {
case completed: playResult = result;
case paused: }
justConnected = false;
transition(PlaybackState.playing);
startWatchingBuffer(); startWatchingBuffer();
player.setPlayWhenReady(true); player.setPlayWhenReady(true);
break; if (processingState == ProcessingState.completed && playResult != null) {
default: playResult.success(null);
throw new IllegalStateException( playResult = null;
"Cannot call play from connecting/none states (" + state + ")");
} }
} }
public void pause() { public void pause() {
switch (state) { if (!player.getPlayWhenReady()) return;
case paused:
break;
case playing:
player.setPlayWhenReady(false); player.setPlayWhenReady(false);
transition(PlaybackState.paused); if (playResult != null) {
break; playResult.success(null);
default: playResult = null;
throw new IllegalStateException(
"Can call pause only from playing and buffering states (" + state + ")");
}
}
public void stop(final Result result) {
switch (state) {
case stopped:
result.success(null);
break;
case connecting:
abortExistingConnection();
buffering = false;
transition(PlaybackState.stopped);
result.success(null);
break;
case completed:
case playing:
case paused:
abortSeek();
player.setPlayWhenReady(false);
transition(PlaybackState.stopped);
player.seekTo(0L);
result.success(null);
break;
default:
throw new IllegalStateException("Cannot call stop from none state");
} }
} }
public void setVolume(final float volume) { public void setVolume(final float volume) {
this.volume = volume;
player.setVolume(volume); player.setVolume(volume);
} }
public void setSpeed(final float speed) { public void setSpeed(final float speed) {
this.speed = speed;
player.setPlaybackParameters(new PlaybackParameters(speed)); player.setPlaybackParameters(new PlaybackParameters(speed));
broadcastPlaybackEvent(); broadcastPlaybackEvent();
} }
@ -690,8 +647,8 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
} }
public void seek(final long position, final Result result, final Integer index) { public void seek(final long position, final Result result, final Integer index) {
if (state == PlaybackState.none || state == PlaybackState.connecting) { if (processingState == ProcessingState.none || processingState == ProcessingState.loading) {
throw new IllegalStateException("Cannot call seek from none none/connecting states"); return;
} }
abortSeek(); abortSeek();
seekPos = position; seekPos = position;
@ -708,8 +665,7 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
if (player != null) { if (player != null) {
player.release(); player.release();
player = null; player = null;
buffering = false; transition(ProcessingState.none);
transition(PlaybackState.none);
} }
onDispose.run(); onDispose.run();
} }
@ -731,12 +687,11 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
return (o == null || o instanceof Long) ? (Long)o : new Long(((Integer)o).intValue()); return (o == null || o instanceof Long) ? (Long)o : new Long(((Integer)o).intValue());
} }
enum PlaybackState { enum ProcessingState {
none, none,
stopped, loading,
paused, buffering,
playing, ready,
connecting,
completed completed
} }
} }

View File

@ -101,24 +101,29 @@ class _MyAppState extends State<MyApp> {
); );
}, },
), ),
StreamBuilder<FullAudioPlaybackState>( StreamBuilder<PlayerState>(
stream: _player.fullPlaybackStateStream, stream: _player.playerStateStream,
builder: (context, snapshot) { builder: (context, snapshot) {
final fullState = snapshot.data; final playerState = snapshot.data;
final state = fullState?.state; final processingState = playerState?.processingState;
final buffering = fullState?.buffering; final playing = playerState?.playing;
return Row( return Row(
mainAxisSize: MainAxisSize.min, mainAxisSize: MainAxisSize.min,
children: [ children: [
if (state == AudioPlaybackState.connecting || if (processingState == ProcessingState.buffering)
buffering == true)
Container( Container(
margin: EdgeInsets.all(8.0), margin: EdgeInsets.all(8.0),
width: 64.0, width: 64.0,
height: 64.0, height: 64.0,
child: CircularProgressIndicator(), child: CircularProgressIndicator(),
) )
else if (state == AudioPlaybackState.playing) else if (playing != true)
IconButton(
icon: Icon(Icons.play_arrow),
iconSize: 64.0,
onPressed: _player.play,
)
else if (processingState != ProcessingState.completed)
IconButton( IconButton(
icon: Icon(Icons.pause), icon: Icon(Icons.pause),
iconSize: 64.0, iconSize: 64.0,
@ -126,17 +131,10 @@ class _MyAppState extends State<MyApp> {
) )
else else
IconButton( IconButton(
icon: Icon(Icons.play_arrow), icon: Icon(Icons.replay),
iconSize: 64.0, iconSize: 64.0,
onPressed: _player.play, onPressed: () =>
), _player.seek(Duration.zero, index: 0),
IconButton(
icon: Icon(Icons.stop),
iconSize: 64.0,
onPressed: state == AudioPlaybackState.stopped ||
state == AudioPlaybackState.none
? null
: _player.stop,
), ),
], ],
); );
@ -148,7 +146,7 @@ class _MyAppState extends State<MyApp> {
builder: (context, snapshot) { builder: (context, snapshot) {
final duration = snapshot.data ?? Duration.zero; final duration = snapshot.data ?? Duration.zero;
return StreamBuilder<Duration>( return StreamBuilder<Duration>(
stream: _player.getPositionStream(), stream: _player.positionStream,
builder: (context, snapshot) { builder: (context, snapshot) {
var position = snapshot.data ?? Duration.zero; var position = snapshot.data ?? Duration.zero;
if (position > duration) { if (position > duration) {
@ -308,10 +306,10 @@ class _SeekBarState extends State<SeekBar> {
} }
}, },
onChangeEnd: (value) { onChangeEnd: (value) {
_dragValue = null;
if (widget.onChangeEnd != null) { if (widget.onChangeEnd != null) {
widget.onChangeEnd(Duration(milliseconds: value.round())); widget.onChangeEnd(Duration(milliseconds: value.round()));
} }
_dragValue = null;
}, },
); );
} }

View File

@ -1,27 +1,13 @@
# Generated by pub # Generated by pub
# See https://dart.dev/tools/pub/glossary#lockfile # See https://dart.dev/tools/pub/glossary#lockfile
packages: packages:
archive:
dependency: transitive
description:
name: archive
url: "https://pub.dartlang.org"
source: hosted
version: "2.0.13"
args:
dependency: transitive
description:
name: args
url: "https://pub.dartlang.org"
source: hosted
version: "1.6.0"
async: async:
dependency: transitive dependency: transitive
description: description:
name: async name: async
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "2.4.1" version: "2.4.2"
boolean_selector: boolean_selector:
dependency: transitive dependency: transitive
description: description:
@ -29,6 +15,13 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "2.0.0" version: "2.0.0"
characters:
dependency: transitive
description:
name: characters
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.0"
charcode: charcode:
dependency: transitive dependency: transitive
description: description:
@ -36,13 +29,20 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.1.3" version: "1.1.3"
clock:
dependency: transitive
description:
name: clock
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.1"
collection: collection:
dependency: transitive dependency: transitive
description: description:
name: collection name: collection
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.14.12" version: "1.14.13"
convert: convert:
dependency: transitive dependency: transitive
description: description:
@ -64,6 +64,13 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "0.1.3" version: "0.1.3"
fake_async:
dependency: transitive
description:
name: fake_async
url: "https://pub.dartlang.org"
source: hosted
version: "1.1.0"
file: file:
dependency: transitive dependency: transitive
description: description:
@ -86,13 +93,6 @@ packages:
description: flutter description: flutter
source: sdk source: sdk
version: "0.0.0" version: "0.0.0"
image:
dependency: transitive
description:
name: image
url: "https://pub.dartlang.org"
source: hosted
version: "2.1.12"
intl: intl:
dependency: transitive dependency: transitive
description: description:
@ -113,7 +113,7 @@ packages:
name: matcher name: matcher
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "0.12.6" version: "0.12.8"
meta: meta:
dependency: transitive dependency: transitive
description: description:
@ -127,7 +127,7 @@ packages:
name: path name: path
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.6.4" version: "1.7.0"
path_provider: path_provider:
dependency: transitive dependency: transitive
description: description:
@ -156,13 +156,6 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.0.2" version: "1.0.2"
petitparser:
dependency: transitive
description:
name: petitparser
url: "https://pub.dartlang.org"
source: hosted
version: "2.4.0"
platform: platform:
dependency: transitive dependency: transitive
description: description:
@ -184,13 +177,6 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "3.0.13" version: "3.0.13"
quiver:
dependency: transitive
description:
name: quiver
url: "https://pub.dartlang.org"
source: hosted
version: "2.1.3"
rxdart: rxdart:
dependency: "direct main" dependency: "direct main"
description: description:
@ -216,7 +202,7 @@ packages:
name: stack_trace name: stack_trace
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.9.3" version: "1.9.5"
stream_channel: stream_channel:
dependency: transitive dependency: transitive
description: description:
@ -244,14 +230,14 @@ packages:
name: test_api name: test_api
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "0.2.15" version: "0.2.17"
typed_data: typed_data:
dependency: transitive dependency: transitive
description: description:
name: typed_data name: typed_data
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.1.6" version: "1.2.0"
uuid: uuid:
dependency: transitive dependency: transitive
description: description:
@ -273,13 +259,6 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "0.1.0" version: "0.1.0"
xml:
dependency: transitive
description:
name: xml
url: "https://pub.dartlang.org"
source: hosted
version: "3.6.1"
sdks: sdks:
dart: ">=2.6.0 <3.0.0" dart: ">=2.9.0-14.0.dev <3.0.0"
flutter: ">=1.12.13+hotfix.5 <2.0.0" flutter: ">=1.12.13+hotfix.5 <2.0.0"

View File

@ -17,35 +17,18 @@ final _uuid = Uuid();
/// final player = AudioPlayer(); /// final player = AudioPlayer();
/// await player.setUrl('https://foo.com/bar.mp3'); /// await player.setUrl('https://foo.com/bar.mp3');
/// player.play(); /// player.play();
/// player.pause(); /// await player.pause();
/// player.play();
/// await player.stop();
/// await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20)); /// await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20));
/// await player.play(); /// await player.play();
/// await player.setUrl('https://foo.com/baz.mp3'); /// await player.setUrl('https://foo.com/baz.mp3');
/// await player.seek(Duration(minutes: 5)); /// await player.seek(Duration(minutes: 5));
/// player.play(); /// player.play();
/// await player.stop(); /// await player.pause();
/// await player.dispose(); /// await player.dispose();
/// ``` /// ```
/// ///
/// You must call [dispose] to release the resources used by this player, /// You must call [dispose] to release the resources used by this player,
/// including any temporary files created to cache assets. /// including any temporary files created to cache assets.
///
/// The [AudioPlayer] instance transitions through different states as follows:
///
/// * [AudioPlaybackState.none]: immediately after instantiation and [dispose].
/// * [AudioPlaybackState.stopped]: eventually after [load] completes, and
/// immediately after [stop].
/// * [AudioPlaybackState.paused]: after [pause].
/// * [AudioPlaybackState.playing]: after [play].
/// * [AudioPlaybackState.connecting]: immediately after [load] while waiting
/// for the media to load.
/// * [AudioPlaybackState.completed]: immediately after playback reaches the
/// end of the media or the end of the clip.
///
/// Additionally, after a [seek] request completes, the state will return to
/// whatever state the player was in prior to the seek request.
class AudioPlayer { class AudioPlayer {
static final _mainChannel = MethodChannel('com.ryanheise.just_audio.methods'); static final _mainChannel = MethodChannel('com.ryanheise.just_audio.methods');
@ -75,64 +58,44 @@ class AudioPlayer {
} }
final Future<MethodChannel> _channel; final Future<MethodChannel> _channel;
_ProxyHttpServer _proxy;
final String _id; final String _id;
_ProxyHttpServer _proxy;
Future<Duration> _durationFuture; Stream<PlaybackEvent> _eventChannelStream;
final _durationSubject = BehaviorSubject<Duration>();
// TODO: also broadcast this event on instantiation.
AudioPlaybackEvent _audioPlaybackEvent = AudioPlaybackEvent(
state: AudioPlaybackState.none,
buffering: false,
updatePosition: Duration.zero,
updateTime: Duration.zero,
bufferedPosition: Duration.zero,
speed: 1.0,
duration: null,
icyMetadata: null,
currentIndex: null,
);
Stream<AudioPlaybackEvent> _eventChannelStream;
StreamSubscription<AudioPlaybackEvent> _eventChannelStreamSubscription;
final _playbackEventSubject = BehaviorSubject<AudioPlaybackEvent>();
final _playbackStateSubject = BehaviorSubject<AudioPlaybackState>();
final _bufferingSubject = BehaviorSubject<bool>();
final _bufferedPositionSubject = BehaviorSubject<Duration>();
final _icyMetadataSubject = BehaviorSubject<IcyMetadata>();
final _fullPlaybackStateSubject = BehaviorSubject<FullAudioPlaybackState>();
final _currentIndexSubject = BehaviorSubject<int>();
final _loopModeSubject = BehaviorSubject<LoopMode>();
final _shuffleModeEnabledSubject = BehaviorSubject<bool>();
double _volume = 1.0;
double _speed = 1.0;
bool _automaticallyWaitsToMinimizeStalling = true;
AudioSource _audioSource; AudioSource _audioSource;
Map<String, AudioSource> _audioSources = {}; Map<String, AudioSource> _audioSources = {};
PlaybackEvent _playbackEvent;
StreamSubscription<PlaybackEvent> _eventChannelStreamSubscription;
final _playbackEventSubject = BehaviorSubject<PlaybackEvent>();
Future<Duration> _durationFuture;
final _durationSubject = BehaviorSubject<Duration>();
final _processingStateSubject = BehaviorSubject<ProcessingState>();
final _playingSubject = BehaviorSubject.seeded(false);
final _volumeSubject = BehaviorSubject.seeded(1.0);
final _speedSubject = BehaviorSubject.seeded(1.0);
final _bufferedPositionSubject = BehaviorSubject<Duration>();
final _icyMetadataSubject = BehaviorSubject<IcyMetadata>();
final _playerStateSubject = BehaviorSubject<PlayerState>();
final _currentIndexSubject = BehaviorSubject<int>();
final _loopModeSubject = BehaviorSubject<LoopMode>();
final _shuffleModeEnabledSubject = BehaviorSubject<bool>();
BehaviorSubject<Duration> _positionSubject;
bool _automaticallyWaitsToMinimizeStalling = true;
/// Creates an [AudioPlayer]. /// Creates an [AudioPlayer].
factory AudioPlayer() => AudioPlayer._internal(_uuid.v4()); factory AudioPlayer() => AudioPlayer._internal(_uuid.v4());
AudioPlayer._internal(this._id) : _channel = _init(_id) { AudioPlayer._internal(this._id) : _channel = _init(_id) {
_playbackEvent = PlaybackEvent(
processingState: ProcessingState.none,
updatePosition: Duration.zero,
updateTime: DateTime.now(),
bufferedPosition: Duration.zero,
duration: null,
icyMetadata: null,
currentIndex: null,
);
_playbackEventSubject.add(_playbackEvent);
_eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id') _eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id')
.receiveBroadcastStream() .receiveBroadcastStream()
.map((data) { .map((data) {
@ -142,22 +105,22 @@ class AudioPlayer {
? null ? null
: Duration(milliseconds: data['duration']); : Duration(milliseconds: data['duration']);
_durationFuture = Future.value(duration); _durationFuture = Future.value(duration);
if (duration != _playbackEvent.duration) {
_durationSubject.add(duration); _durationSubject.add(duration);
_audioPlaybackEvent = AudioPlaybackEvent( }
state: AudioPlaybackState.values[data['state']], _playbackEvent = PlaybackEvent(
buffering: data['buffering'], processingState: ProcessingState.values[data['processingState']],
updatePosition: Duration(milliseconds: data['updatePosition']), updatePosition: Duration(milliseconds: data['updatePosition']),
updateTime: Duration(milliseconds: data['updateTime']), updateTime: DateTime.fromMillisecondsSinceEpoch(data['updateTime']),
bufferedPosition: Duration(milliseconds: data['bufferedPosition']), bufferedPosition: Duration(milliseconds: data['bufferedPosition']),
speed: _speed,
duration: duration, duration: duration,
icyMetadata: data['icyMetadata'] == null icyMetadata: data['icyMetadata'] == null
? null ? null
: IcyMetadata.fromJson(data['icyMetadata']), : IcyMetadata.fromJson(data['icyMetadata']),
currentIndex: data['currentIndex'], currentIndex: data['currentIndex'],
); );
//print("created event object with state: ${_audioPlaybackEvent.state}"); //print("created event object with state: ${_playbackEvent.state}");
return _audioPlaybackEvent; return _playbackEvent;
} catch (e, stacktrace) { } catch (e, stacktrace) {
print("Error parsing event: $e"); print("Error parsing event: $e");
print("$stacktrace"); print("$stacktrace");
@ -166,110 +129,208 @@ class AudioPlayer {
}); });
_eventChannelStreamSubscription = _eventChannelStream.listen( _eventChannelStreamSubscription = _eventChannelStream.listen(
_playbackEventSubject.add, _playbackEventSubject.add,
onError: _playbackEventSubject.addError); onError: _playbackEventSubject.addError,
_playbackStateSubject.addStream(playbackEventStream );
.map((state) => state.state) _processingStateSubject.addStream(playbackEventStream
.distinct() .map((event) => event.processingState)
.handleError((err, stack) {/* noop */}));
_bufferingSubject.addStream(playbackEventStream
.map((state) => state.buffering)
.distinct() .distinct()
.handleError((err, stack) {/* noop */})); .handleError((err, stack) {/* noop */}));
_bufferedPositionSubject.addStream(playbackEventStream _bufferedPositionSubject.addStream(playbackEventStream
.map((state) => state.bufferedPosition) .map((event) => event.bufferedPosition)
.distinct() .distinct()
.handleError((err, stack) {/* noop */})); .handleError((err, stack) {/* noop */}));
_icyMetadataSubject.addStream(playbackEventStream _icyMetadataSubject.addStream(playbackEventStream
.map((state) => state.icyMetadata) .map((event) => event.icyMetadata)
.distinct() .distinct()
.handleError((err, stack) {/* noop */})); .handleError((err, stack) {/* noop */}));
_currentIndexSubject.addStream(playbackEventStream _currentIndexSubject.addStream(playbackEventStream
.map((state) => state.currentIndex) .map((event) => event.currentIndex)
.distinct() .distinct()
.handleError((err, stack) {/* noop */})); .handleError((err, stack) {/* noop */}));
_fullPlaybackStateSubject.addStream(playbackEventStream _playerStateSubject.addStream(
.map((event) => FullAudioPlaybackState( Rx.combineLatest2<bool, PlaybackEvent, PlayerState>(
event.state, event.buffering, event.icyMetadata)) playingStream,
playbackEventStream,
(playing, event) => PlayerState(playing, event.processingState))
.distinct() .distinct()
.handleError((err, stack) {/* noop */})); .handleError((err, stack) {/* noop */}));
} }
/// The duration of any media loaded via [load], or null if unknown. /// The latest [PlaybackEvent].
PlaybackEvent get playbackEvent => _playbackEvent;
/// A stream of [PlaybackEvent]s.
Stream<PlaybackEvent> get playbackEventStream => _playbackEventSubject.stream;
/// The duration of the current audio or null if unknown.
Duration get duration => _playbackEvent.duration;
/// The duration of the current audio or null if unknown.
Future<Duration> get durationFuture => _durationFuture; Future<Duration> get durationFuture => _durationFuture;
/// The duration of any media loaded via [load]. /// The duration of the current audio.
Stream<Duration> get durationStream => _durationSubject.stream; Stream<Duration> get durationStream => _durationSubject.stream;
/// The latest [AudioPlaybackEvent]. /// The current [ProcessingState].
AudioPlaybackEvent get playbackEvent => _audioPlaybackEvent; ProcessingState get processingState => _playbackEvent.processingState;
/// A stream of [AudioPlaybackEvent]s. /// A stream of [ProcessingState]s.
Stream<AudioPlaybackEvent> get playbackEventStream => Stream<ProcessingState> get processingStateStream =>
_playbackEventSubject.stream; _processingStateSubject.stream;
/// The current [AudioPlaybackState]. /// Whether the player is playing.
AudioPlaybackState get playbackState => _audioPlaybackEvent.state; bool get playing => _playingSubject.value;
/// A stream of [AudioPlaybackState]s. /// A stream of changing [playing] states.
Stream<AudioPlaybackState> get playbackStateStream => Stream<bool> get playingStream => _playingSubject.stream;
_playbackStateSubject.stream;
/// A stream broadcasting the current item. /// The current volume of the player.
Stream<int> get currentIndexStream => _currentIndexSubject.stream; double get volume => _volumeSubject.value;
/// Whether the player is buffering. /// A stream of [volume] changes.
bool get buffering => _audioPlaybackEvent.buffering; Stream<double> get volumeStream => _volumeSubject.stream;
/// The current position of the player. /// The current speed of the player.
Duration get position => _audioPlaybackEvent.position; double get speed => _speedSubject.value;
IcyMetadata get icyMetadata => _audioPlaybackEvent.icyMetadata; /// A stream of current speed values.
Stream<double> get speedStream => _speedSubject.stream;
/// A stream of buffering state changes. /// The position up to which buffered audio is available.
Stream<bool> get bufferingStream => _bufferingSubject.stream; Duration get bufferedPosition => _bufferedPositionSubject.value;
Stream<IcyMetadata> get icyMetadataStream => _icyMetadataSubject.stream;
/// A stream of buffered positions. /// A stream of buffered positions.
Stream<Duration> get bufferedPositionStream => Stream<Duration> get bufferedPositionStream =>
_bufferedPositionSubject.stream; _bufferedPositionSubject.stream;
/// A stream of [FullAudioPlaybackState]s. /// The latest ICY metadata received through the audio source.
Stream<FullAudioPlaybackState> get fullPlaybackStateStream => IcyMetadata get icyMetadata => _playbackEvent.icyMetadata;
_fullPlaybackStateSubject.stream;
/// A stream periodically tracking the current position of this player. /// A stream of ICY metadata received through the audio source.
Stream<Duration> getPositionStream( Stream<IcyMetadata> get icyMetadataStream => _icyMetadataSubject.stream;
[final Duration period = const Duration(milliseconds: 200)]) =>
Rx.combineLatest2<AudioPlaybackEvent, void, Duration>( /// The current player state containing only the processing and playing
playbackEventStream, /// states.
// TODO: emit periodically only in playing state. PlayerState get playerState => _playerStateSubject.value;
Stream.periodic(period),
(state, _) => state.position).distinct(); /// A stream of [PlayerState]s.
Stream<PlayerState> get playerStateStream => _playerStateSubject.stream;
/// The index of the current item.
int get currentIndex => _currentIndexSubject.value;
/// A stream broadcasting the current item.
Stream<int> get currentIndexStream => _currentIndexSubject.stream;
/// The current loop mode.
LoopMode get loopMode => _loopModeSubject.value;
/// A stream of [LoopMode]s. /// A stream of [LoopMode]s.
Stream<LoopMode> get loopModeStream => _loopModeSubject.stream; Stream<LoopMode> get loopModeStream => _loopModeSubject.stream;
/// Whether shuffle mode is currently enabled.
bool get shuffleModeEnabled => _shuffleModeEnabledSubject.value;
/// A stream of the shuffle mode status. /// A stream of the shuffle mode status.
Stream<bool> get shuffleModeEnabledStream => Stream<bool> get shuffleModeEnabledStream =>
_shuffleModeEnabledSubject.stream; _shuffleModeEnabledSubject.stream;
/// The current volume of the player.
double get volume => _volume;
/// The current speed of the player.
double get speed => _speed;
/// Whether the player should automatically delay playback in order to /// Whether the player should automatically delay playback in order to
/// minimize stalling. (iOS 10.0 or later only) /// minimize stalling. (iOS 10.0 or later only)
bool get automaticallyWaitsToMinimizeStalling => bool get automaticallyWaitsToMinimizeStalling =>
_automaticallyWaitsToMinimizeStalling; _automaticallyWaitsToMinimizeStalling;
/// The current position of the player.
Duration get position {
if (playing && processingState == ProcessingState.ready) {
final result = _playbackEvent.updatePosition +
(DateTime.now().difference(_playbackEvent.updateTime)) * speed;
return _playbackEvent.duration == null ||
result <= _playbackEvent.duration
? result
: _playbackEvent.duration;
} else {
return _playbackEvent.updatePosition;
}
}
/// A stream tracking the current position of this player, suitable for
/// animating a seek bar. To ensure a smooth animation, this stream emits
/// values more frequently on short items where the seek bar moves more
/// quickly, and less frequenly on long items where the seek bar moves more
/// slowly. The interval between each update will be no quicker than once
/// every 16ms and no slower than once every 200ms.
///
/// See [createPositionStream] for more control over the stream parameters.
Stream<Duration> get positionStream {
if (_positionSubject == null) {
_positionSubject = BehaviorSubject<Duration>();
_positionSubject.addStream(createPositionStream(
steps: 800,
minPeriod: Duration(milliseconds: 16),
maxPeriod: Duration(milliseconds: 11200)));
}
return _positionSubject.stream;
}
/// Creates a new stream periodically tracking the current position of this
/// player. The stream will aim to emit [steps] position updates from the
/// beginning to the end of the current audio source, at intervals of
/// [duration] / [steps]. This interval will be clipped between [minPeriod]
/// and [maxPeriod]. This stream will not emit values while audio playback is
/// paused or stalled.
///
/// Note: each time this method is called, a new stream is created. If you
/// intend to use this stream multiple times, you should hold a reference to
/// the returned stream and close it once you are done.
Stream<Duration> createPositionStream({
int steps = 800,
Duration minPeriod = const Duration(milliseconds: 200),
Duration maxPeriod = const Duration(milliseconds: 200),
}) {
assert(minPeriod <= maxPeriod);
assert(minPeriod > Duration.zero);
Duration duration() => this.duration ?? Duration.zero;
Duration step() {
var s = duration() ~/ steps;
if (s < minPeriod) s = minPeriod;
if (s > maxPeriod) s = maxPeriod;
return s;
}
StreamController<Duration> controller = StreamController.broadcast();
Timer currentTimer;
StreamSubscription durationSubscription;
void yieldPosition(Timer timer) {
if (controller.isClosed) {
timer.cancel();
durationSubscription.cancel();
return;
}
if (_durationSubject.isClosed) {
timer.cancel();
durationSubscription.cancel();
controller.close();
return;
}
controller.add(position);
}
currentTimer = Timer.periodic(step(), yieldPosition);
durationSubscription = durationStream.listen((duration) {
currentTimer.cancel();
currentTimer = Timer.periodic(step(), yieldPosition);
});
return Rx.combineLatest2<void, void, Duration>(
playbackEventStream, controller.stream, (event, period) => position)
.distinct();
}
/// Convenience method to load audio from a URL with optional headers, /// Convenience method to load audio from a URL with optional headers,
/// equivalent to: /// equivalent to:
/// ///
/// ``` /// ```
/// load(ProgressiveAudioSource(Uri.parse(url), headers: headers)); /// load(AudioSource.uri(Uri.parse(url), headers: headers));
/// ``` /// ```
/// ///
/// ///
@ -279,39 +340,32 @@ class AudioPlayer {
/// Convenience method to load audio from a file, equivalent to: /// Convenience method to load audio from a file, equivalent to:
/// ///
/// ``` /// ```
/// load(ProgressiveAudioSource(Uri.file(filePath))); /// load(AudioSource.uri(Uri.file(filePath)));
/// ``` /// ```
Future<Duration> setFilePath(String filePath) => Future<Duration> setFilePath(String filePath) =>
load(ProgressiveAudioSource(Uri.file(filePath))); load(AudioSource.uri(Uri.file(filePath)));
/// Convenience method to load audio from an asset, equivalent to: /// Convenience method to load audio from an asset, equivalent to:
/// ///
/// ``` /// ```
/// load(ProgressiveAudioSource(Uri.parse('asset://$filePath'))); /// load(AudioSource.uri(Uri.parse('asset://$filePath')));
/// ``` /// ```
Future<Duration> setAsset(String assetPath) => Future<Duration> setAsset(String assetPath) =>
load(ProgressiveAudioSource(Uri.parse('asset://$assetPath'))); load(AudioSource.uri(Uri.parse('asset://$assetPath')));
/// Loads audio from an [AudioSource] and completes with the duration of that /// Loads audio from an [AudioSource] and completes when the audio is ready
/// audio, or an exception if this call was interrupted by another /// to play with the duration of that audio, or an exception if this call was
/// call to [load], or if for any reason the audio source was unable to be /// interrupted by another call to [load], or if for any reason the audio
/// loaded. /// source was unable to be loaded.
/// ///
/// If the duration is unknown, null will be returned. /// If the duration is unknown, null will be returned.
///
/// On Android, DASH and HLS streams are detected only when the URL's path
/// has an "mpd" or "m3u8" extension. If the URL does not have such an
/// extension and you have no control over the server, and you also know the
/// type of the stream in advance, you may as a workaround supply the
/// extension as a URL fragment. e.g.
/// https://somewhere.com/somestream?x=etc#.m3u8
Future<Duration> load(AudioSource source) async { Future<Duration> load(AudioSource source) async {
try { try {
_audioSource = source; _audioSource = source;
final duration = await _load(source); final duration = await _load(source);
// Wait for connecting state to pass. // Wait for loading state to pass.
await playbackStateStream await processingStateStream
.firstWhere((state) => state != AudioPlaybackState.connecting); .firstWhere((state) => state != ProcessingState.loading);
return duration; return duration;
} catch (e) { } catch (e) {
_audioSource = null; _audioSource = null;
@ -357,112 +411,67 @@ class AudioPlayer {
start: start, start: start,
end: end, end: end,
)); ));
// Wait for connecting state to pass. // Wait for loading state to pass.
await playbackStateStream await processingStateStream
.firstWhere((state) => state != AudioPlaybackState.connecting); .firstWhere((state) => state != ProcessingState.loading);
return duration; return duration;
} }
/// Plays the currently loaded media from the current position. The [Future] /// Tells the player to play audio as soon as an audio source is loaded and
/// returned by this method completes when playback completes or is paused or /// ready to play. The [Future] returned by this method completes when the
/// stopped. This method can be called from any state except for: /// playback completes or is paused or stopped. If the player is already
/// playing, this method completes immediately.
/// ///
/// * [AudioPlaybackState.connecting] /// This method causes [playing] to become true, and it will remain true
/// * [AudioPlaybackState.none] /// until [pause] or [stop] is called. This means that if playback completes,
/// and then you [seek] to an earlier position in the audio, playback will
/// continue playing from that position. If you instead wish to [pause] or
/// [stop] playback on completion, you can call either method as soon as
/// [processingState] becomes [ProcessingState.completed] by listening to
/// [processingStateStream].
Future<void> play() async { Future<void> play() async {
switch (playbackState) { if (playing) return;
case AudioPlaybackState.playing: _playingSubject.add(true);
case AudioPlaybackState.stopped: // TODO: Make platform side wait for playback to stop on iOS.
case AudioPlaybackState.completed:
case AudioPlaybackState.paused:
// Update local state immediately so that queries aren't surprised.
_audioPlaybackEvent = _audioPlaybackEvent.copyWith(
state: AudioPlaybackState.playing,
);
StreamSubscription subscription;
Completer completer = Completer();
bool startedPlaying = false;
subscription = playbackStateStream.listen((state) {
// TODO: It will be more reliable to let the platform
// side wait for completion since events on the flutter
// side can lag behind the platform side.
if (startedPlaying &&
(state == AudioPlaybackState.paused ||
state == AudioPlaybackState.stopped ||
state == AudioPlaybackState.completed)) {
subscription.cancel();
completer.complete();
} else if (state == AudioPlaybackState.playing) {
startedPlaying = true;
}
});
await _invokeMethod('play'); await _invokeMethod('play');
await completer.future;
break;
default:
throw Exception(
"Cannot call play from connecting/none states ($playbackState)");
}
} }
/// Pauses the currently playing media. It is legal to invoke this method /// Pauses the currently playing media. This method does nothing if
/// only from the [AudioPlaybackState.playing] state. /// ![playing].
Future<void> pause() async { Future<void> pause() async {
switch (playbackState) { if (!playing) return;
case AudioPlaybackState.paused:
break;
case AudioPlaybackState.playing:
// Update local state immediately so that queries aren't surprised. // Update local state immediately so that queries aren't surprised.
_audioPlaybackEvent = _audioPlaybackEvent.copyWith( _playbackEvent = _playbackEvent.copyWith(
state: AudioPlaybackState.paused, updatePosition: position,
updateTime: DateTime.now(),
); );
// TODO: For pause, perhaps modify platform side to ensure new state _playbackEventSubject.add(_playbackEvent);
// is broadcast before this method returns. _playingSubject.add(false);
// TODO: perhaps modify platform side to ensure new state is broadcast
// before this method returns.
await _invokeMethod('pause'); await _invokeMethod('pause');
break;
default:
throw Exception(
"Can call pause only from playing and buffering states ($playbackState)");
}
} }
/// Stops the currently playing media such that the next [play] invocation /// Convenience method to pause and seek to zero.
/// will start from position 0. It is legal to invoke this method only from
/// the following states:
///
/// * [AudioPlaybackState.playing]
/// * [AudioPlaybackState.paused]
/// * [AudioPlaybackState.completed]
Future<void> stop() async { Future<void> stop() async {
switch (playbackState) { await pause();
case AudioPlaybackState.stopped: await seek(Duration.zero);
break;
case AudioPlaybackState.connecting:
case AudioPlaybackState.completed:
case AudioPlaybackState.playing:
case AudioPlaybackState.paused:
// Update local state immediately so that queries aren't surprised.
// NOTE: Android implementation already handles this.
// TODO: Do the same for iOS so the line below becomes unnecessary.
_audioPlaybackEvent = _audioPlaybackEvent.copyWith(
state: AudioPlaybackState.paused,
);
await _invokeMethod('stop');
break;
default:
throw Exception("Cannot call stop from none state");
}
} }
/// Sets the volume of this player, where 1.0 is normal volume. /// Sets the volume of this player, where 1.0 is normal volume.
Future<void> setVolume(final double volume) async { Future<void> setVolume(final double volume) async {
_volume = volume; _volumeSubject.add(volume);
await _invokeMethod('setVolume', [volume]); await _invokeMethod('setVolume', [volume]);
} }
/// Sets the playback speed of this player, where 1.0 is normal speed. /// Sets the playback speed of this player, where 1.0 is normal speed.
Future<void> setSpeed(final double speed) async { Future<void> setSpeed(final double speed) async {
_speed = speed; _playbackEvent = _playbackEvent.copyWith(
updatePosition: position,
updateTime: DateTime.now(),
);
_playbackEventSubject.add(_playbackEvent);
_speedSubject.add(speed);
await _invokeMethod('setSpeed', [speed]); await _invokeMethod('setSpeed', [speed]);
} }
@ -490,25 +499,25 @@ class AudioPlayer {
/// Seeks to a particular [position]. If a composition of multiple /// Seeks to a particular [position]. If a composition of multiple
/// [AudioSource]s has been loaded, you may also specify [index] to seek to a /// [AudioSource]s has been loaded, you may also specify [index] to seek to a
/// particular item within that sequence. It is legal to invoke this method /// particular item within that sequence. This method has no effect unless
/// from any state except for [AudioPlaybackState.none] and /// an audio source has been loaded.
/// [AudioPlaybackState.connecting].
Future<void> seek(final Duration position, {int index}) async { Future<void> seek(final Duration position, {int index}) async {
// Update local state immediately so that queries aren't surprised. switch (processingState) {
_audioPlaybackEvent = _audioPlaybackEvent.copyWith( case ProcessingState.none:
case ProcessingState.loading:
return;
default:
_playbackEvent = _playbackEvent.copyWith(
updatePosition: position, updatePosition: position,
updateTime: Duration(milliseconds: DateTime.now().millisecondsSinceEpoch), updateTime: DateTime.now(),
); );
_playbackEventSubject.add(_audioPlaybackEvent); _playbackEventSubject.add(_playbackEvent);
await _invokeMethod('seek', [position?.inMilliseconds, index]); await _invokeMethod('seek', [position?.inMilliseconds, index]);
} }
}
/// Release all resources associated with this player. You must invoke this /// Release all resources associated with this player. You must invoke this
/// after you are done with the player. This method can be invoked from any /// after you are done with the player.
/// state except for:
///
/// * [AudioPlaybackState.none]
/// * [AudioPlaybackState.connecting]
Future<void> dispose() async { Future<void> dispose() async {
await _invokeMethod('dispose'); await _invokeMethod('dispose');
_audioSource = null; _audioSource = null;
@ -520,6 +529,14 @@ class AudioPlayer {
await _playbackEventSubject.close(); await _playbackEventSubject.close();
await _loopModeSubject.close(); await _loopModeSubject.close();
await _shuffleModeEnabledSubject.close(); await _shuffleModeEnabledSubject.close();
await _playingSubject.close();
await _volumeSubject.close();
await _speedSubject.close();
await _playerStateSubject.drain();
await _playerStateSubject.close();
if (_positionSubject != null) {
await _positionSubject.close();
}
} }
Future<dynamic> _invokeMethod(String method, [dynamic args]) async => Future<dynamic> _invokeMethod(String method, [dynamic args]) async =>
@ -527,16 +544,13 @@ class AudioPlayer {
} }
/// Encapsulates the playback state and current position of the player. /// Encapsulates the playback state and current position of the player.
class AudioPlaybackEvent { class PlaybackEvent {
/// The current playback state. /// The current processing state.
final AudioPlaybackState state; final ProcessingState processingState;
/// Whether the player is buffering.
final bool buffering;
/// When the last time a position discontinuity happened, as measured in time /// When the last time a position discontinuity happened, as measured in time
/// since the epoch. /// since the epoch.
final Duration updateTime; final DateTime updateTime;
/// The position at [updateTime]. /// The position at [updateTime].
final Duration updatePosition; final Duration updatePosition;
@ -544,33 +558,28 @@ class AudioPlaybackEvent {
/// The buffer position. /// The buffer position.
final Duration bufferedPosition; final Duration bufferedPosition;
/// The playback speed.
final double speed;
/// The media duration, or null if unknown. /// The media duration, or null if unknown.
final Duration duration; final Duration duration;
/// The latest ICY metadata received through the audio stream.
final IcyMetadata icyMetadata; final IcyMetadata icyMetadata;
/// The index of the currently playing item. /// The index of the currently playing item.
final int currentIndex; final int currentIndex;
AudioPlaybackEvent({ PlaybackEvent({
@required this.state, @required this.processingState,
@required this.buffering,
@required this.updateTime, @required this.updateTime,
@required this.updatePosition, @required this.updatePosition,
@required this.bufferedPosition, @required this.bufferedPosition,
@required this.speed,
@required this.duration, @required this.duration,
@required this.icyMetadata, @required this.icyMetadata,
@required this.currentIndex, @required this.currentIndex,
}); });
AudioPlaybackEvent copyWith({ PlaybackEvent copyWith({
AudioPlaybackState state, ProcessingState processingState,
bool buffering, DateTime updateTime,
Duration updateTime,
Duration updatePosition, Duration updatePosition,
Duration bufferedPosition, Duration bufferedPosition,
double speed, double speed,
@ -578,71 +587,64 @@ class AudioPlaybackEvent {
IcyMetadata icyMetadata, IcyMetadata icyMetadata,
UriAudioSource currentIndex, UriAudioSource currentIndex,
}) => }) =>
AudioPlaybackEvent( PlaybackEvent(
state: state ?? this.state, processingState: processingState ?? this.processingState,
buffering: buffering ?? this.buffering,
updateTime: updateTime ?? this.updateTime, updateTime: updateTime ?? this.updateTime,
updatePosition: updatePosition ?? this.updatePosition, updatePosition: updatePosition ?? this.updatePosition,
bufferedPosition: bufferedPosition ?? this.bufferedPosition, bufferedPosition: bufferedPosition ?? this.bufferedPosition,
speed: speed ?? this.speed,
duration: duration ?? this.duration, duration: duration ?? this.duration,
icyMetadata: icyMetadata ?? this.icyMetadata, icyMetadata: icyMetadata ?? this.icyMetadata,
currentIndex: currentIndex ?? this.currentIndex, currentIndex: currentIndex ?? this.currentIndex,
); );
/// The current position of the player.
Duration get position {
if (state == AudioPlaybackState.playing && !buffering) {
final result = updatePosition +
(Duration(milliseconds: DateTime.now().millisecondsSinceEpoch) -
updateTime) *
speed;
return duration == null || result <= duration ? result : duration;
} else {
return updatePosition;
}
}
@override @override
String toString() => String toString() =>
"{state=$state, updateTime=$updateTime, updatePosition=$updatePosition, speed=$speed}"; "{processingState=$processingState, updateTime=$updateTime, updatePosition=$updatePosition}";
} }
/// Enumerates the different playback states of a player. /// Enumerates the different processing states of a player.
/// enum ProcessingState {
/// If you also need access to the buffering state, use /// The player has not loaded an [AudioSource].
/// [FullAudioPlaybackState].
enum AudioPlaybackState {
none, none,
stopped,
paused, /// The player is loading an [AudioSource].
playing, loading,
connecting,
/// The player is buffering audio and unable to play.
buffering,
/// The player is has enough audio buffered and is able to play.
ready,
/// The player has reached the end of the audio.
completed, completed,
} }
/// Encapsulates the playback state and the buffering state. /// Encapsulates the playing and processing states. These two states vary
/// /// orthogonally, and so if [processingState] is [ProcessingState.buffering],
/// These two states vary orthogonally, and so if [buffering] is true, you can /// you can check [playing] to determine whether the buffering occurred while
/// check [state] to determine whether this buffering is occurring during the /// the player was playing or while the player was paused.
/// playing state or the paused state. class PlayerState {
class FullAudioPlaybackState { /// Whether the player will play when [processingState] is
final AudioPlaybackState state; /// [ProcessingState.ready].
final bool buffering; final bool playing;
final IcyMetadata icyMetadata;
FullAudioPlaybackState(this.state, this.buffering, this.icyMetadata); /// The current processing state of the player.
final ProcessingState processingState;
PlayerState(this.playing, this.processingState);
@override @override
int get hashCode => String toString() => 'playing=$playing,processingState=$processingState';
icyMetadata.hashCode * (state.index + 1) * (buffering ? 2 : 1);
@override
int get hashCode => toString().hashCode;
@override @override
bool operator ==(dynamic other) => bool operator ==(dynamic other) =>
other is FullAudioPlaybackState && other is PlayerState &&
other?.state == state && other?.playing == playing &&
other?.buffering == buffering && other?.processingState == processingState;
other?.icyMetadata == icyMetadata;
} }
class IcyInfo { class IcyInfo {
@ -861,6 +863,12 @@ abstract class AudioSource {
/// attempting to guess the type of stream. On iOS, this uses Apple's SDK to /// attempting to guess the type of stream. On iOS, this uses Apple's SDK to
/// automatically detect the stream type. On Android, the type of stream will /// automatically detect the stream type. On Android, the type of stream will
/// be guessed from the extension. /// be guessed from the extension.
///
/// If you are loading DASH or HLS streams that do not have standard "mpd" or
/// "m3u8" extensions in their URIs, this method will fail to detect the
/// stream type on Android. If you know in advance what type of audio stream
/// it is, you should instantiate [DashAudioSource] or [HlsAudioSource]
/// directly.
static AudioSource uri(Uri uri, {Map headers, Object tag}) { static AudioSource uri(Uri uri, {Map headers, Object tag}) {
bool hasExtension(Uri uri, String extension) => bool hasExtension(Uri uri, String extension) =>
uri.path.toLowerCase().endsWith('.$extension') || uri.path.toLowerCase().endsWith('.$extension') ||

View File

@ -2,7 +2,6 @@ import 'dart:async';
import 'dart:html'; import 'dart:html';
import 'dart:math'; import 'dart:math';
import 'package:async/async.dart';
import 'package:flutter/services.dart'; import 'package:flutter/services.dart';
import 'package:flutter/widgets.dart'; import 'package:flutter/widgets.dart';
import 'package:flutter_web_plugins/flutter_web_plugins.dart'; import 'package:flutter_web_plugins/flutter_web_plugins.dart';
@ -44,8 +43,8 @@ abstract class JustAudioPlayer {
final MethodChannel methodChannel; final MethodChannel methodChannel;
final PluginEventChannel eventChannel; final PluginEventChannel eventChannel;
final StreamController eventController = StreamController(); final StreamController eventController = StreamController();
AudioPlaybackState _state = AudioPlaybackState.none; ProcessingState _processingState = ProcessingState.none;
bool _buffering = false; bool _playing = false;
int _index; int _index;
JustAudioPlayer({@required this.id, @required this.registrar}) JustAudioPlayer({@required this.id, @required this.registrar})
@ -67,8 +66,6 @@ abstract class JustAudioPlayer {
return await play(); return await play();
case 'pause': case 'pause':
return await pause(); return await pause();
case 'stop':
return await stop();
case 'setVolume': case 'setVolume':
return await setVolume(args[0]); return await setVolume(args[0]);
case 'setSpeed': case 'setSpeed':
@ -114,8 +111,6 @@ abstract class JustAudioPlayer {
Future<void> pause(); Future<void> pause();
Future<void> stop();
Future<void> setVolume(double volume); Future<void> setVolume(double volume);
Future<void> setSpeed(double speed); Future<void> setSpeed(double speed);
@ -133,6 +128,8 @@ abstract class JustAudioPlayer {
Duration getCurrentPosition(); Duration getCurrentPosition();
Duration getBufferedPosition();
Duration getDuration(); Duration getDuration();
concatenatingAdd(String playerId, Map source); concatenatingAdd(String playerId, Map source);
@ -154,12 +151,10 @@ abstract class JustAudioPlayer {
broadcastPlaybackEvent() { broadcastPlaybackEvent() {
var updateTime = DateTime.now().millisecondsSinceEpoch; var updateTime = DateTime.now().millisecondsSinceEpoch;
eventController.add({ eventController.add({
'state': _state.index, 'processingState': _processingState.index,
'buffering': _buffering,
'updatePosition': getCurrentPosition()?.inMilliseconds, 'updatePosition': getCurrentPosition()?.inMilliseconds,
'updateTime': updateTime, 'updateTime': updateTime,
// TODO: buffered position 'bufferedPosition': getBufferedPosition()?.inMilliseconds,
'bufferedPosition': getCurrentPosition()?.inMilliseconds,
// TODO: Icy Metadata // TODO: Icy Metadata
'icyMetadata': null, 'icyMetadata': null,
'duration': getDuration()?.inMilliseconds, 'duration': getDuration()?.inMilliseconds,
@ -167,8 +162,8 @@ abstract class JustAudioPlayer {
}); });
} }
transition(AudioPlaybackState state) { transition(ProcessingState processingState) {
_state = state; _processingState = processingState;
broadcastPlaybackEvent(); broadcastPlaybackEvent();
} }
} }
@ -179,26 +174,36 @@ class Html5AudioPlayer extends JustAudioPlayer {
AudioSourcePlayer _audioSourcePlayer; AudioSourcePlayer _audioSourcePlayer;
LoopMode _loopMode = LoopMode.off; LoopMode _loopMode = LoopMode.off;
bool _shuffleModeEnabled = false; bool _shuffleModeEnabled = false;
bool _playing = false;
final Map<String, AudioSourcePlayer> _audioSourcePlayers = {}; final Map<String, AudioSourcePlayer> _audioSourcePlayers = {};
Html5AudioPlayer({@required String id, @required Registrar registrar}) Html5AudioPlayer({@required String id, @required Registrar registrar})
: super(id: id, registrar: registrar) { : super(id: id, registrar: registrar) {
_audioElement.addEventListener('durationchange', (event) { _audioElement.addEventListener('durationchange', (event) {
_durationCompleter?.complete(); _durationCompleter?.complete();
broadcastPlaybackEvent();
}); });
_audioElement.addEventListener('error', (event) { _audioElement.addEventListener('error', (event) {
_durationCompleter?.completeError(_audioElement.error); _durationCompleter?.completeError(_audioElement.error);
}); });
_audioElement.addEventListener('ended', (event) async { _audioElement.addEventListener('ended', (event) async {
onEnded(); _currentAudioSourcePlayer.complete();
}); });
_audioElement.addEventListener('seek', (event) { _audioElement.addEventListener('timeupdate', (event) {
_buffering = true; _currentAudioSourcePlayer.timeUpdated(_audioElement.currentTime);
broadcastPlaybackEvent();
}); });
_audioElement.addEventListener('seeked', (event) { _audioElement.addEventListener('loadstart', (event) {
_buffering = false; transition(ProcessingState.buffering);
});
_audioElement.addEventListener('waiting', (event) {
transition(ProcessingState.buffering);
});
_audioElement.addEventListener('stalled', (event) {
transition(ProcessingState.buffering);
});
_audioElement.addEventListener('canplaythrough', (event) {
transition(ProcessingState.ready);
});
_audioElement.addEventListener('progress', (event) {
broadcastPlaybackEvent(); broadcastPlaybackEvent();
}); });
} }
@ -245,18 +250,17 @@ class Html5AudioPlayer extends JustAudioPlayer {
// Loop back to the beginning // Loop back to the beginning
if (order.length == 1) { if (order.length == 1) {
await seek(0, null); await seek(0, null);
await play(); play();
} else { } else {
_index = order[0]; _index = order[0];
await _currentAudioSourcePlayer.load(); await _currentAudioSourcePlayer.load();
// Should always be true... // Should always be true...
if (_playing) { if (_playing) {
await play(); play();
} }
} }
} else { } else {
_playing = false; transition(ProcessingState.completed);
transition(AudioPlaybackState.completed);
} }
} }
} }
@ -280,7 +284,7 @@ class Html5AudioPlayer extends JustAudioPlayer {
} }
Future<Duration> loadUri(final Uri uri) async { Future<Duration> loadUri(final Uri uri) async {
transition(AudioPlaybackState.connecting); transition(ProcessingState.loading);
final src = uri.toString(); final src = uri.toString();
if (src != _audioElement.src) { if (src != _audioElement.src) {
_durationCompleter = Completer<num>(); _durationCompleter = Completer<num>();
@ -296,7 +300,7 @@ class Html5AudioPlayer extends JustAudioPlayer {
_durationCompleter = null; _durationCompleter = null;
} }
} }
transition(AudioPlaybackState.stopped); transition(ProcessingState.ready);
final seconds = _audioElement.duration; final seconds = _audioElement.duration;
return seconds.isFinite return seconds.isFinite
? Duration(milliseconds: (seconds * 1000).toInt()) ? Duration(milliseconds: (seconds * 1000).toInt())
@ -306,22 +310,13 @@ class Html5AudioPlayer extends JustAudioPlayer {
@override @override
Future<void> play() async { Future<void> play() async {
_playing = true; _playing = true;
_currentAudioSourcePlayer.play(); await _currentAudioSourcePlayer.play();
transition(AudioPlaybackState.playing);
} }
@override @override
Future<void> pause() async { Future<void> pause() async {
_playing = false; _playing = false;
_currentAudioSourcePlayer.pause(); _currentAudioSourcePlayer.pause();
transition(AudioPlaybackState.paused);
}
@override
Future<void> stop() async {
_playing = false;
_currentAudioSourcePlayer.stop();
transition(AudioPlaybackState.stopped);
} }
@override @override
@ -356,7 +351,7 @@ class Html5AudioPlayer extends JustAudioPlayer {
await _currentAudioSourcePlayer.load(); await _currentAudioSourcePlayer.load();
await _currentAudioSourcePlayer.seek(position); await _currentAudioSourcePlayer.seek(position);
if (_playing) { if (_playing) {
await play(); _currentAudioSourcePlayer.play();
} }
} else { } else {
await _currentAudioSourcePlayer.seek(position); await _currentAudioSourcePlayer.seek(position);
@ -447,13 +442,16 @@ class Html5AudioPlayer extends JustAudioPlayer {
} }
concatenatingClear(String playerId) { concatenatingClear(String playerId) {
_currentAudioSourcePlayer.stop(); _currentAudioSourcePlayer.pause();
_concatenating(playerId).clear(); _concatenating(playerId).clear();
} }
@override @override
Duration getCurrentPosition() => _currentAudioSourcePlayer?.position; Duration getCurrentPosition() => _currentAudioSourcePlayer?.position;
@override
Duration getBufferedPosition() => _currentAudioSourcePlayer?.bufferedPosition;
@override @override
Duration getDuration() => _currentAudioSourcePlayer?.duration; Duration getDuration() => _currentAudioSourcePlayer?.duration;
@ -462,7 +460,7 @@ class Html5AudioPlayer extends JustAudioPlayer {
_currentAudioSourcePlayer?.pause(); _currentAudioSourcePlayer?.pause();
_audioElement.removeAttribute('src'); _audioElement.removeAttribute('src');
_audioElement.load(); _audioElement.load();
transition(AudioPlaybackState.none); transition(ProcessingState.none);
super.dispose(); super.dispose();
} }
@ -540,14 +538,18 @@ abstract class IndexedAudioSourcePlayer extends AudioSourcePlayer {
Future<void> pause(); Future<void> pause();
Future<void> stop();
Future<void> seek(int position); Future<void> seek(int position);
Future<void> complete();
Future<void> timeUpdated(double seconds) async {}
Duration get duration; Duration get duration;
Duration get position; Duration get position;
Duration get bufferedPosition;
AudioElement get _audioElement => html5AudioPlayer._audioElement; AudioElement get _audioElement => html5AudioPlayer._audioElement;
@override @override
@ -562,6 +564,7 @@ abstract class UriAudioSourcePlayer extends IndexedAudioSourcePlayer {
final Map headers; final Map headers;
double _resumePos; double _resumePos;
Duration _duration; Duration _duration;
Completer _completer;
UriAudioSourcePlayer( UriAudioSourcePlayer(
Html5AudioPlayer html5AudioPlayer, String id, this.uri, this.headers) Html5AudioPlayer html5AudioPlayer, String id, this.uri, this.headers)
@ -583,12 +586,16 @@ abstract class UriAudioSourcePlayer extends IndexedAudioSourcePlayer {
Future<void> play() async { Future<void> play() async {
_audioElement.currentTime = _resumePos; _audioElement.currentTime = _resumePos;
_audioElement.play(); _audioElement.play();
_completer = Completer();
await _completer.future;
_completer = null;
} }
@override @override
Future<void> pause() async { Future<void> pause() async {
_resumePos = _audioElement.currentTime; _resumePos = _audioElement.currentTime;
_audioElement.pause(); _audioElement.pause();
_interruptPlay();
} }
@override @override
@ -597,10 +604,15 @@ abstract class UriAudioSourcePlayer extends IndexedAudioSourcePlayer {
} }
@override @override
Future<void> stop() async { Future<void> complete() async {
_resumePos = 0.0; _interruptPlay();
_audioElement.pause(); html5AudioPlayer.onEnded();
_audioElement.currentTime = 0.0; }
_interruptPlay() {
if (_completer?.isCompleted == false) {
_completer.complete();
}
} }
@override @override
@ -617,6 +629,19 @@ abstract class UriAudioSourcePlayer extends IndexedAudioSourcePlayer {
double seconds = _audioElement.currentTime; double seconds = _audioElement.currentTime;
return Duration(milliseconds: (seconds * 1000).toInt()); return Duration(milliseconds: (seconds * 1000).toInt());
} }
@override
Duration get bufferedPosition {
if (_audioElement.buffered.length > 0) {
return Duration(
milliseconds:
(_audioElement.buffered.end(_audioElement.buffered.length - 1) *
1000)
.toInt());
} else {
return Duration.zero;
}
}
} }
class ProgressiveAudioSourcePlayer extends UriAudioSourcePlayer { class ProgressiveAudioSourcePlayer extends UriAudioSourcePlayer {
@ -775,7 +800,7 @@ class ClippingAudioSourcePlayer extends IndexedAudioSourcePlayer {
final UriAudioSourcePlayer audioSourcePlayer; final UriAudioSourcePlayer audioSourcePlayer;
final Duration start; final Duration start;
final Duration end; final Duration end;
CancelableOperation _playOperation; Completer<ClipInterruptReason> _completer;
double _resumePos; double _resumePos;
Duration _duration; Duration _duration;
@ -791,55 +816,61 @@ class ClippingAudioSourcePlayer extends IndexedAudioSourcePlayer {
@override @override
Future<Duration> load() async { Future<Duration> load() async {
_resumePos = start.inMilliseconds / 1000.0; _resumePos = (start ?? Duration.zero).inMilliseconds / 1000.0;
Duration fullDuration = Duration fullDuration =
await html5AudioPlayer.loadUri(audioSourcePlayer.uri); await html5AudioPlayer.loadUri(audioSourcePlayer.uri);
_audioElement.currentTime = _resumePos; _audioElement.currentTime = _resumePos;
_duration = Duration( _duration = Duration(
milliseconds: min(end.inMilliseconds, fullDuration.inMilliseconds) - milliseconds: min((end ?? fullDuration).inMilliseconds,
start.inMilliseconds); fullDuration.inMilliseconds) -
(start ?? Duration.zero).inMilliseconds);
return _duration; return _duration;
} }
double get remaining => end.inMilliseconds / 1000 - _audioElement.currentTime;
@override @override
Future<void> play() async { Future<void> play() async {
_interruptPlay(); _interruptPlay(ClipInterruptReason.simultaneous);
//_playing = true;
final duration =
end == null ? null : end.inMilliseconds / 1000 - _resumePos;
_audioElement.currentTime = _resumePos; _audioElement.currentTime = _resumePos;
_audioElement.play(); _audioElement.play();
if (duration != null) { _completer = Completer<ClipInterruptReason>();
_playOperation = CancelableOperation.fromFuture(Future.delayed(Duration( ClipInterruptReason reason;
milliseconds: duration * 1000 ~/ _audioElement.playbackRate))) while ((reason = await _completer.future) == ClipInterruptReason.seek) {
.then((_) { _completer = Completer<ClipInterruptReason>();
_playOperation = null;
pause();
html5AudioPlayer.onEnded();
});
} }
if (reason == ClipInterruptReason.end) {
html5AudioPlayer.onEnded();
}
_completer = null;
} }
@override @override
Future<void> pause() async { Future<void> pause() async {
_interruptPlay(); _interruptPlay(ClipInterruptReason.pause);
_resumePos = _audioElement.currentTime; _resumePos = _audioElement.currentTime;
_audioElement.pause(); _audioElement.pause();
} }
@override @override
Future<void> seek(int position) async { Future<void> seek(int position) async {
_interruptPlay(); _interruptPlay(ClipInterruptReason.seek);
_audioElement.currentTime = _audioElement.currentTime =
_resumePos = start.inMilliseconds / 1000.0 + position / 1000.0; _resumePos = start.inMilliseconds / 1000.0 + position / 1000.0;
} }
@override @override
Future<void> stop() async { Future<void> complete() async {
_resumePos = 0.0; _interruptPlay(ClipInterruptReason.end);
_audioElement.pause(); }
_audioElement.currentTime = start.inMilliseconds / 1000.0;
@override
Future<void> timeUpdated(double seconds) async {
if (end != null) {
if (seconds >= end.inMilliseconds / 1000) {
_interruptPlay(ClipInterruptReason.end);
}
}
} }
@override @override
@ -860,12 +891,36 @@ class ClippingAudioSourcePlayer extends IndexedAudioSourcePlayer {
return position; return position;
} }
_interruptPlay() { @override
_playOperation?.cancel(); Duration get bufferedPosition {
_playOperation = null; if (_audioElement.buffered.length > 0) {
var seconds =
_audioElement.buffered.end(_audioElement.buffered.length - 1);
var position = Duration(milliseconds: (seconds * 1000).toInt());
if (start != null) {
position -= start;
}
if (position < Duration.zero) {
position = Duration.zero;
}
if (duration != null && position > duration) {
position = duration;
}
return position;
} else {
return Duration.zero;
}
}
_interruptPlay(ClipInterruptReason reason) {
if (_completer?.isCompleted == false) {
_completer.complete(reason);
}
} }
} }
enum ClipInterruptReason { end, pause, seek, simultaneous }
class LoopingAudioSourcePlayer extends AudioSourcePlayer { class LoopingAudioSourcePlayer extends AudioSourcePlayer {
final AudioSourcePlayer audioSourcePlayer; final AudioSourcePlayer audioSourcePlayer;
final int count; final int count;

View File

@ -1,27 +1,13 @@
# Generated by pub # Generated by pub
# See https://dart.dev/tools/pub/glossary#lockfile # See https://dart.dev/tools/pub/glossary#lockfile
packages: packages:
archive:
dependency: transitive
description:
name: archive
url: "https://pub.dartlang.org"
source: hosted
version: "2.0.13"
args:
dependency: transitive
description:
name: args
url: "https://pub.dartlang.org"
source: hosted
version: "1.6.0"
async: async:
dependency: "direct main" dependency: "direct main"
description: description:
name: async name: async
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "2.4.1" version: "2.4.2"
boolean_selector: boolean_selector:
dependency: transitive dependency: transitive
description: description:
@ -29,6 +15,13 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "2.0.0" version: "2.0.0"
characters:
dependency: transitive
description:
name: characters
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.0"
charcode: charcode:
dependency: transitive dependency: transitive
description: description:
@ -36,13 +29,20 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.1.3" version: "1.1.3"
clock:
dependency: transitive
description:
name: clock
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.1"
collection: collection:
dependency: transitive dependency: transitive
description: description:
name: collection name: collection
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.14.12" version: "1.14.13"
convert: convert:
dependency: transitive dependency: transitive
description: description:
@ -57,6 +57,13 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "2.1.4" version: "2.1.4"
fake_async:
dependency: transitive
description:
name: fake_async
url: "https://pub.dartlang.org"
source: hosted
version: "1.1.0"
file: file:
dependency: transitive dependency: transitive
description: description:
@ -79,13 +86,6 @@ packages:
description: flutter description: flutter
source: sdk source: sdk
version: "0.0.0" version: "0.0.0"
image:
dependency: transitive
description:
name: image
url: "https://pub.dartlang.org"
source: hosted
version: "2.1.12"
intl: intl:
dependency: transitive dependency: transitive
description: description:
@ -99,7 +99,7 @@ packages:
name: matcher name: matcher
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "0.12.6" version: "0.12.8"
meta: meta:
dependency: transitive dependency: transitive
description: description:
@ -113,7 +113,7 @@ packages:
name: path name: path
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.6.4" version: "1.7.0"
path_provider: path_provider:
dependency: "direct main" dependency: "direct main"
description: description:
@ -142,13 +142,6 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.0.2" version: "1.0.2"
petitparser:
dependency: transitive
description:
name: petitparser
url: "https://pub.dartlang.org"
source: hosted
version: "2.4.0"
platform: platform:
dependency: transitive dependency: transitive
description: description:
@ -170,13 +163,6 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "3.0.13" version: "3.0.13"
quiver:
dependency: transitive
description:
name: quiver
url: "https://pub.dartlang.org"
source: hosted
version: "2.1.3"
rxdart: rxdart:
dependency: "direct main" dependency: "direct main"
description: description:
@ -202,7 +188,7 @@ packages:
name: stack_trace name: stack_trace
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.9.3" version: "1.9.5"
stream_channel: stream_channel:
dependency: transitive dependency: transitive
description: description:
@ -230,14 +216,14 @@ packages:
name: test_api name: test_api
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "0.2.15" version: "0.2.17"
typed_data: typed_data:
dependency: transitive dependency: transitive
description: description:
name: typed_data name: typed_data
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.1.6" version: "1.2.0"
uuid: uuid:
dependency: "direct main" dependency: "direct main"
description: description:
@ -259,13 +245,6 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "0.1.0" version: "0.1.0"
xml:
dependency: transitive
description:
name: xml
url: "https://pub.dartlang.org"
source: hosted
version: "3.6.1"
sdks: sdks:
dart: ">=2.6.0 <3.0.0" dart: ">=2.9.0-14.0.dev <3.0.0"
flutter: ">=1.12.13+hotfix.5 <2.0.0" flutter: ">=1.12.13+hotfix.5 <2.0.0"