2019-11-25 14:50:21 +00:00
|
|
|
import 'dart:async';
|
2019-11-28 05:16:54 +00:00
|
|
|
import 'dart:io';
|
2019-11-25 14:50:21 +00:00
|
|
|
|
|
|
|
import 'package:flutter/services.dart';
|
2019-11-28 05:16:54 +00:00
|
|
|
import 'package:flutter/widgets.dart';
|
|
|
|
import 'package:path/path.dart' as p;
|
|
|
|
import 'package:path_provider/path_provider.dart';
|
|
|
|
import 'package:rxdart/rxdart.dart';
|
2019-11-25 14:50:21 +00:00
|
|
|
|
2019-11-28 05:16:54 +00:00
|
|
|
/// An object to manage playing audio from a URL, a locale file or an asset.
|
|
|
|
///
|
|
|
|
/// ```
|
|
|
|
/// final player = AudioPlayer();
|
|
|
|
/// await player.setUrl('https://foo.com/bar.mp3');
|
2019-11-30 15:52:54 +00:00
|
|
|
/// player.play();
|
2019-12-31 09:38:46 +00:00
|
|
|
/// player.pause();
|
|
|
|
/// player.play();
|
|
|
|
/// await player.stop();
|
|
|
|
/// await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20));
|
|
|
|
/// await player.play();
|
2019-11-28 05:16:54 +00:00
|
|
|
/// await player.setUrl('https://foo.com/baz.mp3');
|
|
|
|
/// await player.seek(Duration(minutes: 5));
|
2019-11-30 15:52:54 +00:00
|
|
|
/// player.play();
|
2019-11-28 05:16:54 +00:00
|
|
|
/// await player.stop();
|
|
|
|
/// await player.dispose();
|
|
|
|
/// ```
|
|
|
|
///
|
|
|
|
/// You must call [dispose] to release the resources used by this player,
|
|
|
|
/// including any temporary files created to cache assets.
|
|
|
|
///
|
|
|
|
/// The [AudioPlayer] instance transitions through different states as follows:
|
|
|
|
///
|
2020-01-01 10:40:25 +00:00
|
|
|
/// * [AudioPlaybackState.none]: immediately after instantiation and [dispose].
|
|
|
|
/// * [AudioPlaybackState.stopped]: eventually after [setUrl], [setFilePath],
|
|
|
|
/// [setAsset] or [setClip] completes, and immediately after [stop].
|
|
|
|
/// * [AudioPlaybackState.paused]: after [pause].
|
2020-02-05 01:26:21 +00:00
|
|
|
/// * [AudioPlaybackState.playing]: after [play].
|
2019-11-28 05:16:54 +00:00
|
|
|
/// * [AudioPlaybackState.connecting]: immediately after [setUrl],
|
|
|
|
/// [setFilePath] and [setAsset] while waiting for the media to load.
|
2019-12-27 05:43:09 +00:00
|
|
|
/// * [AudioPlaybackState.completed]: immediately after playback reaches the
|
2020-01-01 10:40:25 +00:00
|
|
|
/// end of the media or the end of the clip.
|
2019-11-30 15:52:54 +00:00
|
|
|
///
|
2019-11-28 05:16:54 +00:00
|
|
|
/// Additionally, after a [seek] request completes, the state will return to
|
|
|
|
/// whatever state the player was in prior to the seek request.
|
2019-11-25 14:50:21 +00:00
|
|
|
class AudioPlayer {
|
2019-11-30 15:52:54 +00:00
|
|
|
static final _mainChannel = MethodChannel('com.ryanheise.just_audio.methods');
|
2019-11-25 14:50:21 +00:00
|
|
|
|
2019-12-25 13:44:08 +00:00
|
|
|
static Future<MethodChannel> _init(int id) async {
|
|
|
|
await _mainChannel.invokeMethod('init', ['$id']);
|
2019-11-28 06:55:32 +00:00
|
|
|
return MethodChannel('com.ryanheise.just_audio.methods.$id');
|
2019-11-25 14:50:21 +00:00
|
|
|
}
|
2019-11-28 05:16:54 +00:00
|
|
|
|
|
|
|
final Future<MethodChannel> _channel;
|
|
|
|
|
|
|
|
final int _id;
|
|
|
|
|
2020-03-07 02:50:59 +00:00
|
|
|
Duration _duration;
|
|
|
|
|
2019-11-28 05:16:54 +00:00
|
|
|
Future<Duration> _durationFuture;
|
|
|
|
|
|
|
|
final _durationSubject = BehaviorSubject<Duration>();
|
|
|
|
|
2020-01-20 15:30:04 +00:00
|
|
|
// TODO: also broadcast this event on instantiation.
|
|
|
|
AudioPlaybackEvent _audioPlaybackEvent = AudioPlaybackEvent(
|
|
|
|
state: AudioPlaybackState.none,
|
2020-02-05 01:26:21 +00:00
|
|
|
buffering: false,
|
2020-01-20 15:30:04 +00:00
|
|
|
updatePosition: Duration.zero,
|
|
|
|
updateTime: Duration.zero,
|
2020-03-07 08:41:40 +00:00
|
|
|
bufferedPosition: Duration.zero,
|
2020-01-20 15:30:04 +00:00
|
|
|
speed: 1.0,
|
2020-03-07 02:50:59 +00:00
|
|
|
duration: Duration.zero,
|
2020-04-21 12:57:32 +00:00
|
|
|
icyMetadata: IcyMetadata(
|
|
|
|
info: IcyInfo(title: null, url: null),
|
|
|
|
headers: IcyHeaders(
|
|
|
|
bitrate: null,
|
|
|
|
genre: null,
|
|
|
|
name: null,
|
|
|
|
metadataInterval: null,
|
|
|
|
url: null,
|
|
|
|
isPublic: null)),
|
2020-01-20 15:30:04 +00:00
|
|
|
);
|
2019-11-28 05:16:54 +00:00
|
|
|
|
2019-12-25 14:40:38 +00:00
|
|
|
Stream<AudioPlaybackEvent> _eventChannelStream;
|
2019-11-28 05:16:54 +00:00
|
|
|
|
2019-12-25 14:40:38 +00:00
|
|
|
StreamSubscription<AudioPlaybackEvent> _eventChannelStreamSubscription;
|
2019-11-28 05:16:54 +00:00
|
|
|
|
2019-12-25 14:40:38 +00:00
|
|
|
final _playbackEventSubject = BehaviorSubject<AudioPlaybackEvent>();
|
2019-11-28 05:16:54 +00:00
|
|
|
|
|
|
|
final _playbackStateSubject = BehaviorSubject<AudioPlaybackState>();
|
|
|
|
|
2020-02-05 01:26:21 +00:00
|
|
|
final _bufferingSubject = BehaviorSubject<bool>();
|
|
|
|
|
2020-03-07 08:41:40 +00:00
|
|
|
final _bufferedPositionSubject = BehaviorSubject<Duration>();
|
|
|
|
|
2020-04-21 12:57:32 +00:00
|
|
|
final _icyMetadataSubject = BehaviorSubject<IcyMetadata>();
|
|
|
|
|
2020-02-05 01:26:21 +00:00
|
|
|
final _fullPlaybackStateSubject = BehaviorSubject<FullAudioPlaybackState>();
|
|
|
|
|
2019-12-25 13:44:08 +00:00
|
|
|
double _volume = 1.0;
|
|
|
|
|
|
|
|
double _speed = 1.0;
|
|
|
|
|
2020-03-07 00:49:35 +00:00
|
|
|
bool _automaticallyWaitsToMinimizeStalling = true;
|
|
|
|
|
2020-03-07 00:37:49 +00:00
|
|
|
File _cacheFile;
|
|
|
|
|
2019-11-28 05:16:54 +00:00
|
|
|
/// Creates an [AudioPlayer].
|
|
|
|
factory AudioPlayer() =>
|
|
|
|
AudioPlayer._internal(DateTime.now().microsecondsSinceEpoch);
|
|
|
|
|
2019-12-25 13:44:08 +00:00
|
|
|
AudioPlayer._internal(this._id) : _channel = _init(_id) {
|
2019-11-28 06:55:32 +00:00
|
|
|
_eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id')
|
2019-11-28 05:16:54 +00:00
|
|
|
.receiveBroadcastStream()
|
2019-12-25 14:40:38 +00:00
|
|
|
.map((data) => _audioPlaybackEvent = AudioPlaybackEvent(
|
2019-11-28 05:16:54 +00:00
|
|
|
state: AudioPlaybackState.values[data[0]],
|
2020-02-05 01:26:21 +00:00
|
|
|
buffering: data[1],
|
|
|
|
updatePosition: Duration(milliseconds: data[2]),
|
|
|
|
updateTime: Duration(milliseconds: data[3]),
|
2020-03-07 08:41:40 +00:00
|
|
|
bufferedPosition: Duration(milliseconds: data[4]),
|
2019-12-25 13:44:08 +00:00
|
|
|
speed: _speed,
|
2020-03-07 02:50:59 +00:00
|
|
|
duration: _duration,
|
2020-04-22 02:11:15 +00:00
|
|
|
icyMetadata: data[5] == null
|
|
|
|
? null
|
|
|
|
: IcyMetadata(
|
|
|
|
info: IcyInfo(title: data[5][0][0], url: data[5][0][1]),
|
|
|
|
headers: IcyHeaders(
|
|
|
|
bitrate: data[5][1][0],
|
|
|
|
genre: data[5][1][1],
|
|
|
|
name: data[5][1][2],
|
|
|
|
metadataInterval: data[5][1][3],
|
|
|
|
url: data[5][1][4],
|
|
|
|
isPublic: data[5][1][5])),
|
2019-11-28 05:16:54 +00:00
|
|
|
));
|
|
|
|
_eventChannelStreamSubscription =
|
2019-12-25 14:40:38 +00:00
|
|
|
_eventChannelStream.listen(_playbackEventSubject.add);
|
2019-11-28 05:16:54 +00:00
|
|
|
_playbackStateSubject
|
2019-12-25 14:40:38 +00:00
|
|
|
.addStream(playbackEventStream.map((state) => state.state).distinct());
|
2020-02-05 01:26:21 +00:00
|
|
|
_bufferingSubject.addStream(
|
|
|
|
playbackEventStream.map((state) => state.buffering).distinct());
|
2020-03-07 08:41:40 +00:00
|
|
|
_bufferedPositionSubject.addStream(
|
|
|
|
playbackEventStream.map((state) => state.bufferedPosition).distinct());
|
2020-04-21 12:57:32 +00:00
|
|
|
_icyMetadataSubject.addStream(
|
|
|
|
playbackEventStream.map((state) => state.icyMetadata).distinct());
|
|
|
|
_fullPlaybackStateSubject.addStream(Rx.combineLatest3<AudioPlaybackState,
|
|
|
|
bool, IcyMetadata, FullAudioPlaybackState>(
|
|
|
|
playbackStateStream,
|
|
|
|
bufferingStream,
|
|
|
|
icyMetadataStream,
|
|
|
|
(state, buffering, icyMetadata) =>
|
|
|
|
FullAudioPlaybackState(state, buffering, icyMetadata)));
|
2019-11-28 05:16:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// The duration of any media set via [setUrl], [setFilePath] or [setAsset],
|
|
|
|
/// or null otherwise.
|
|
|
|
Future<Duration> get durationFuture => _durationFuture;
|
|
|
|
|
|
|
|
/// The duration of any media set via [setUrl], [setFilePath] or [setAsset].
|
|
|
|
Stream<Duration> get durationStream => _durationSubject.stream;
|
|
|
|
|
2019-12-25 14:40:38 +00:00
|
|
|
/// The latest [AudioPlaybackEvent].
|
|
|
|
AudioPlaybackEvent get playbackEvent => _audioPlaybackEvent;
|
2019-11-28 05:16:54 +00:00
|
|
|
|
2019-12-25 14:40:38 +00:00
|
|
|
/// A stream of [AudioPlaybackEvent]s.
|
|
|
|
Stream<AudioPlaybackEvent> get playbackEventStream =>
|
|
|
|
_playbackEventSubject.stream;
|
2019-11-28 05:16:54 +00:00
|
|
|
|
|
|
|
/// The current [AudioPlaybackState].
|
2019-12-25 14:40:38 +00:00
|
|
|
AudioPlaybackState get playbackState => _audioPlaybackEvent.state;
|
|
|
|
|
|
|
|
/// A stream of [AudioPlaybackState]s.
|
2019-11-28 05:16:54 +00:00
|
|
|
Stream<AudioPlaybackState> get playbackStateStream =>
|
|
|
|
_playbackStateSubject.stream;
|
|
|
|
|
2020-02-05 01:26:21 +00:00
|
|
|
/// Whether the player is buffering.
|
|
|
|
bool get buffering => _audioPlaybackEvent.buffering;
|
|
|
|
|
2020-04-21 12:57:32 +00:00
|
|
|
IcyMetadata get icyMetadata => _audioPlaybackEvent.icyMetadata;
|
|
|
|
|
2020-02-05 01:26:21 +00:00
|
|
|
/// A stream of buffering state changes.
|
|
|
|
Stream<bool> get bufferingStream => _bufferingSubject.stream;
|
|
|
|
|
2020-04-21 12:57:32 +00:00
|
|
|
Stream<IcyMetadata> get icyMetadataStream => _icyMetadataSubject.stream;
|
|
|
|
|
2020-03-07 08:41:40 +00:00
|
|
|
/// A stream of buffered positions.
|
|
|
|
Stream<Duration> get bufferedPositionStream =>
|
|
|
|
_bufferedPositionSubject.stream;
|
|
|
|
|
2020-02-05 01:26:21 +00:00
|
|
|
/// A stream of [FullAudioPlaybackState]s.
|
|
|
|
Stream<FullAudioPlaybackState> get fullPlaybackStateStream =>
|
|
|
|
_fullPlaybackStateSubject.stream;
|
|
|
|
|
2019-11-28 05:16:54 +00:00
|
|
|
/// A stream periodically tracking the current position of this player.
|
|
|
|
Stream<Duration> getPositionStream(
|
|
|
|
[final Duration period = const Duration(milliseconds: 200)]) =>
|
2019-12-25 14:40:38 +00:00
|
|
|
Rx.combineLatest2<AudioPlaybackEvent, void, Duration>(
|
|
|
|
playbackEventStream,
|
2019-12-25 13:44:08 +00:00
|
|
|
// TODO: emit periodically only in playing state.
|
|
|
|
Stream.periodic(period),
|
2020-03-07 02:50:59 +00:00
|
|
|
(state, _) => state.position).distinct();
|
2019-11-28 05:16:54 +00:00
|
|
|
|
2019-12-25 13:44:08 +00:00
|
|
|
/// The current volume of the player.
|
|
|
|
double get volume => _volume;
|
|
|
|
|
|
|
|
/// The current speed of the player.
|
|
|
|
double get speed => _speed;
|
|
|
|
|
2020-04-30 13:47:29 +00:00
|
|
|
/// Whether the player should automatically delay playback in order to
|
|
|
|
/// minimize stalling. (iOS 10.0 or later only)
|
2020-03-07 00:49:35 +00:00
|
|
|
bool get automaticallyWaitsToMinimizeStalling =>
|
|
|
|
_automaticallyWaitsToMinimizeStalling;
|
|
|
|
|
2020-01-01 13:59:15 +00:00
|
|
|
/// Loads audio media from a URL and completes with the duration of that
|
|
|
|
/// audio, or null if this call was interrupted by another call so [setUrl],
|
|
|
|
/// [setFilePath] or [setAsset].
|
2020-04-30 13:47:29 +00:00
|
|
|
///
|
|
|
|
/// On Android, DASH and HLS streams are detected only when the URL's path
|
|
|
|
/// has an "mpd" or "m3u8" extension. If the URL does not have such an
|
|
|
|
/// extension and you have no control over the server, and you also know the
|
|
|
|
/// type of the stream in advance, you may as a workaround supply the
|
|
|
|
/// extension as a URL fragment. e.g.
|
|
|
|
/// https://somewhere.com/somestream?x=etc#.m3u8
|
2019-11-28 05:16:54 +00:00
|
|
|
Future<Duration> setUrl(final String url) async {
|
2020-04-20 04:19:43 +00:00
|
|
|
try {
|
|
|
|
_durationFuture = _invokeMethod('setUrl', [url])
|
|
|
|
.then((ms) => ms == null ? null : Duration(milliseconds: ms));
|
|
|
|
_duration = await _durationFuture;
|
|
|
|
_durationSubject.add(_duration);
|
|
|
|
return _duration;
|
|
|
|
} on PlatformException catch (e) {
|
|
|
|
return Future.error(e.message);
|
|
|
|
}
|
2019-11-28 05:16:54 +00:00
|
|
|
}
|
|
|
|
|
2020-01-01 13:59:15 +00:00
|
|
|
/// Loads audio media from a file and completes with the duration of that
|
|
|
|
/// audio, or null if this call was interrupted by another call so [setUrl],
|
|
|
|
/// [setFilePath] or [setAsset].
|
2020-04-21 10:26:53 +00:00
|
|
|
Future<Duration> setFilePath(final String filePath) => setUrl(
|
|
|
|
Platform.isAndroid ? File(filePath).uri.toString() : 'file://$filePath');
|
2019-11-28 05:16:54 +00:00
|
|
|
|
2020-01-01 13:59:15 +00:00
|
|
|
/// Loads audio media from an asset and completes with the duration of that
|
|
|
|
/// audio, or null if this call was interrupted by another call so [setUrl],
|
|
|
|
/// [setFilePath] or [setAsset].
|
2019-11-28 05:16:54 +00:00
|
|
|
Future<Duration> setAsset(final String assetPath) async {
|
2020-03-07 00:37:49 +00:00
|
|
|
final file = await _getCacheFile(assetPath);
|
|
|
|
this._cacheFile = file;
|
2019-11-28 05:16:54 +00:00
|
|
|
if (!file.existsSync()) {
|
|
|
|
await file.create(recursive: true);
|
|
|
|
}
|
2019-11-30 15:52:54 +00:00
|
|
|
await file
|
|
|
|
.writeAsBytes((await rootBundle.load(assetPath)).buffer.asUint8List());
|
2019-11-28 05:16:54 +00:00
|
|
|
return await setFilePath(file.path);
|
|
|
|
}
|
|
|
|
|
2020-03-07 00:37:49 +00:00
|
|
|
/// Get file for caching asset media with proper extension
|
|
|
|
Future<File> _getCacheFile(final String assetPath) async => File(p.join(
|
2020-03-07 02:50:59 +00:00
|
|
|
(await getTemporaryDirectory()).path,
|
|
|
|
'just_audio_asset_cache',
|
|
|
|
'$_id${p.extension(assetPath)}'));
|
2019-11-28 05:16:54 +00:00
|
|
|
|
2020-01-01 10:40:25 +00:00
|
|
|
/// Clip the audio to the given [start] and [end] timestamps. This method
|
|
|
|
/// cannot be called from the [AudioPlaybackState.none] state.
|
2019-12-31 09:38:46 +00:00
|
|
|
Future<Duration> setClip({Duration start, Duration end}) async {
|
|
|
|
_durationFuture =
|
|
|
|
_invokeMethod('setClip', [start?.inMilliseconds, end?.inMilliseconds])
|
2020-01-01 15:17:41 +00:00
|
|
|
.then((ms) => ms == null ? null : Duration(milliseconds: ms));
|
2019-12-31 09:38:46 +00:00
|
|
|
final duration = await _durationFuture;
|
|
|
|
_durationSubject.add(duration);
|
|
|
|
return duration;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Plays the currently loaded media from the current position. The [Future]
|
|
|
|
/// returned by this method completes when playback completes or is paused or
|
2020-01-01 10:40:25 +00:00
|
|
|
/// stopped. This method can be called from any state except for:
|
2019-11-28 05:16:54 +00:00
|
|
|
///
|
2020-01-01 10:40:25 +00:00
|
|
|
/// * [AudioPlaybackState.connecting]
|
|
|
|
/// * [AudioPlaybackState.none]
|
2019-12-31 09:38:46 +00:00
|
|
|
Future<void> play() async {
|
2020-04-18 04:28:06 +00:00
|
|
|
switch (playbackState) {
|
|
|
|
case AudioPlaybackState.playing:
|
|
|
|
case AudioPlaybackState.stopped:
|
|
|
|
case AudioPlaybackState.completed:
|
|
|
|
case AudioPlaybackState.paused:
|
|
|
|
// Update local state immediately so that queries aren't surprised.
|
|
|
|
_audioPlaybackEvent = _audioPlaybackEvent.copyWith(
|
|
|
|
state: AudioPlaybackState.playing,
|
|
|
|
);
|
|
|
|
StreamSubscription subscription;
|
|
|
|
Completer completer = Completer();
|
|
|
|
bool startedPlaying = false;
|
|
|
|
subscription = playbackStateStream.listen((state) {
|
|
|
|
// TODO: It will be more reliable to let the platform
|
|
|
|
// side wait for completion since events on the flutter
|
|
|
|
// side can lag behind the platform side.
|
|
|
|
if (startedPlaying &&
|
|
|
|
(state == AudioPlaybackState.paused ||
|
|
|
|
state == AudioPlaybackState.stopped ||
|
|
|
|
state == AudioPlaybackState.completed)) {
|
|
|
|
subscription.cancel();
|
|
|
|
completer.complete();
|
|
|
|
} else if (state == AudioPlaybackState.playing) {
|
|
|
|
startedPlaying = true;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
await _invokeMethod('play');
|
|
|
|
await completer.future;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
throw Exception(
|
|
|
|
"Cannot call play from connecting/none states ($playbackState)");
|
|
|
|
}
|
2019-11-28 05:16:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Pauses the currently playing media. It is legal to invoke this method
|
2020-02-05 01:26:21 +00:00
|
|
|
/// only from the [AudioPlaybackState.playing] state.
|
2019-11-28 05:16:54 +00:00
|
|
|
Future<void> pause() async {
|
2020-04-18 04:28:06 +00:00
|
|
|
switch (playbackState) {
|
|
|
|
case AudioPlaybackState.paused:
|
|
|
|
break;
|
|
|
|
case AudioPlaybackState.playing:
|
|
|
|
// Update local state immediately so that queries aren't surprised.
|
|
|
|
_audioPlaybackEvent = _audioPlaybackEvent.copyWith(
|
|
|
|
state: AudioPlaybackState.paused,
|
|
|
|
);
|
|
|
|
// TODO: For pause, perhaps modify platform side to ensure new state
|
|
|
|
// is broadcast before this method returns.
|
|
|
|
await _invokeMethod('pause');
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
throw Exception(
|
|
|
|
"Can call pause only from playing and buffering states ($playbackState)");
|
|
|
|
}
|
2019-11-28 05:16:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Stops the currently playing media such that the next [play] invocation
|
2019-12-25 13:44:08 +00:00
|
|
|
/// will start from position 0. It is legal to invoke this method only from
|
|
|
|
/// the following states:
|
2019-11-28 05:16:54 +00:00
|
|
|
///
|
2019-12-25 13:44:08 +00:00
|
|
|
/// * [AudioPlaybackState.playing]
|
|
|
|
/// * [AudioPlaybackState.paused]
|
2019-12-27 05:43:09 +00:00
|
|
|
/// * [AudioPlaybackState.completed]
|
2019-11-28 05:16:54 +00:00
|
|
|
Future<void> stop() async {
|
2020-04-18 04:28:06 +00:00
|
|
|
switch (playbackState) {
|
|
|
|
case AudioPlaybackState.stopped:
|
|
|
|
break;
|
|
|
|
case AudioPlaybackState.connecting:
|
|
|
|
case AudioPlaybackState.completed:
|
|
|
|
case AudioPlaybackState.playing:
|
|
|
|
case AudioPlaybackState.paused:
|
|
|
|
// Update local state immediately so that queries aren't surprised.
|
|
|
|
// NOTE: Android implementation already handles this.
|
|
|
|
// TODO: Do the same for iOS so the line below becomes unnecessary.
|
|
|
|
_audioPlaybackEvent = _audioPlaybackEvent.copyWith(
|
|
|
|
state: AudioPlaybackState.paused,
|
|
|
|
);
|
|
|
|
await _invokeMethod('stop');
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
throw Exception("Cannot call stop from none state");
|
|
|
|
}
|
2019-11-28 05:16:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Sets the volume of this player, where 1.0 is normal volume.
|
|
|
|
Future<void> setVolume(final double volume) async {
|
2019-12-25 13:44:08 +00:00
|
|
|
_volume = volume;
|
2019-11-28 05:16:54 +00:00
|
|
|
await _invokeMethod('setVolume', [volume]);
|
|
|
|
}
|
|
|
|
|
2019-12-25 13:44:08 +00:00
|
|
|
/// Sets the playback speed of this player, where 1.0 is normal speed.
|
|
|
|
Future<void> setSpeed(final double speed) async {
|
|
|
|
_speed = speed;
|
|
|
|
await _invokeMethod('setSpeed', [speed]);
|
|
|
|
}
|
|
|
|
|
2020-03-07 00:49:35 +00:00
|
|
|
/// Sets automaticallyWaitsToMinimizeStalling for AVPlayer in iOS 10.0 or later, defaults to true.
|
|
|
|
/// Has no effect on Android clients
|
|
|
|
Future<void> setAutomaticallyWaitsToMinimizeStalling(
|
|
|
|
final bool automaticallyWaitsToMinimizeStalling) async {
|
|
|
|
_automaticallyWaitsToMinimizeStalling =
|
|
|
|
automaticallyWaitsToMinimizeStalling;
|
|
|
|
await _invokeMethod('setAutomaticallyWaitsToMinimizeStalling',
|
|
|
|
[automaticallyWaitsToMinimizeStalling]);
|
|
|
|
}
|
|
|
|
|
2019-12-27 05:43:09 +00:00
|
|
|
/// Seeks to a particular position. It is legal to invoke this method from
|
|
|
|
/// any state except for [AudioPlaybackState.none] and
|
|
|
|
/// [AudioPlaybackState.connecting].
|
2019-11-28 05:16:54 +00:00
|
|
|
Future<void> seek(final Duration position) async {
|
|
|
|
await _invokeMethod('seek', [position.inMilliseconds]);
|
|
|
|
}
|
|
|
|
|
2019-12-27 05:43:09 +00:00
|
|
|
/// Release all resources associated with this player. You must invoke this
|
2020-01-01 10:40:25 +00:00
|
|
|
/// after you are done with the player. This method can be invoked from any
|
|
|
|
/// state except for:
|
2019-12-27 05:43:09 +00:00
|
|
|
///
|
|
|
|
/// * [AudioPlaybackState.none]
|
2020-01-01 10:40:25 +00:00
|
|
|
/// * [AudioPlaybackState.connecting]
|
2019-11-28 05:16:54 +00:00
|
|
|
Future<void> dispose() async {
|
2020-03-07 00:37:49 +00:00
|
|
|
if (this._cacheFile?.existsSync() ?? false) {
|
|
|
|
this._cacheFile?.deleteSync();
|
2019-11-28 05:16:54 +00:00
|
|
|
}
|
|
|
|
await _invokeMethod('dispose');
|
|
|
|
await _durationSubject.close();
|
|
|
|
await _eventChannelStreamSubscription.cancel();
|
2019-12-25 14:40:38 +00:00
|
|
|
await _playbackEventSubject.close();
|
2019-11-28 05:16:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Future<dynamic> _invokeMethod(String method, [dynamic args]) async =>
|
|
|
|
(await _channel).invokeMethod(method, args);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Encapsulates the playback state and current position of the player.
|
2019-12-25 14:40:38 +00:00
|
|
|
class AudioPlaybackEvent {
|
2019-11-28 05:16:54 +00:00
|
|
|
/// The current playback state.
|
|
|
|
final AudioPlaybackState state;
|
2019-11-30 15:52:54 +00:00
|
|
|
|
2020-02-05 01:26:21 +00:00
|
|
|
/// Whether the player is buffering.
|
|
|
|
final bool buffering;
|
|
|
|
|
2019-11-28 05:16:54 +00:00
|
|
|
/// When the last time a position discontinuity happened, as measured in time
|
|
|
|
/// since the epoch.
|
|
|
|
final Duration updateTime;
|
2019-11-30 15:52:54 +00:00
|
|
|
|
2019-11-28 05:16:54 +00:00
|
|
|
/// The position at [updateTime].
|
|
|
|
final Duration updatePosition;
|
|
|
|
|
2020-03-07 08:41:40 +00:00
|
|
|
/// The buffer position.
|
|
|
|
final Duration bufferedPosition;
|
|
|
|
|
2019-12-25 13:44:08 +00:00
|
|
|
/// The playback speed.
|
|
|
|
final double speed;
|
|
|
|
|
2020-03-07 02:50:59 +00:00
|
|
|
/// The media duration.
|
|
|
|
final Duration duration;
|
|
|
|
|
2020-04-21 12:57:32 +00:00
|
|
|
final IcyMetadata icyMetadata;
|
|
|
|
|
2019-12-25 14:40:38 +00:00
|
|
|
AudioPlaybackEvent({
|
2019-11-28 05:16:54 +00:00
|
|
|
@required this.state,
|
2020-02-05 01:26:21 +00:00
|
|
|
@required this.buffering,
|
2019-11-28 05:16:54 +00:00
|
|
|
@required this.updateTime,
|
|
|
|
@required this.updatePosition,
|
2020-03-07 08:41:40 +00:00
|
|
|
@required this.bufferedPosition,
|
2019-12-25 13:44:08 +00:00
|
|
|
@required this.speed,
|
2020-03-07 02:50:59 +00:00
|
|
|
@required this.duration,
|
2020-04-21 12:57:32 +00:00
|
|
|
@required this.icyMetadata,
|
2019-11-28 05:16:54 +00:00
|
|
|
});
|
|
|
|
|
2020-04-18 04:28:06 +00:00
|
|
|
AudioPlaybackEvent copyWith({
|
|
|
|
AudioPlaybackState state,
|
|
|
|
bool buffering,
|
|
|
|
Duration updateTime,
|
|
|
|
Duration updatePosition,
|
|
|
|
Duration bufferedPosition,
|
|
|
|
double speed,
|
|
|
|
Duration duration,
|
2020-04-22 02:11:15 +00:00
|
|
|
IcyMetadata icyMetadata,
|
2020-04-18 04:28:06 +00:00
|
|
|
}) =>
|
|
|
|
AudioPlaybackEvent(
|
|
|
|
state: state ?? this.state,
|
|
|
|
buffering: buffering ?? this.buffering,
|
|
|
|
updateTime: updateTime ?? this.updateTime,
|
|
|
|
updatePosition: updatePosition ?? this.updatePosition,
|
|
|
|
bufferedPosition: bufferedPosition ?? this.bufferedPosition,
|
|
|
|
speed: speed ?? this.speed,
|
|
|
|
duration: duration ?? this.duration,
|
2020-04-22 02:11:15 +00:00
|
|
|
icyMetadata: icyMetadata ?? this.icyMetadata,
|
2020-04-18 04:28:06 +00:00
|
|
|
);
|
|
|
|
|
2019-11-28 05:16:54 +00:00
|
|
|
/// The current position of the player.
|
2020-03-07 02:50:59 +00:00
|
|
|
Duration get position {
|
|
|
|
if (state == AudioPlaybackState.playing && !buffering) {
|
|
|
|
final result = updatePosition +
|
2019-11-28 05:16:54 +00:00
|
|
|
(Duration(milliseconds: DateTime.now().millisecondsSinceEpoch) -
|
2019-12-25 13:44:08 +00:00
|
|
|
updateTime) *
|
2020-03-07 02:50:59 +00:00
|
|
|
speed;
|
|
|
|
return result <= duration ? result : duration;
|
|
|
|
} else {
|
|
|
|
return updatePosition;
|
|
|
|
}
|
|
|
|
}
|
2019-12-25 13:44:08 +00:00
|
|
|
|
|
|
|
@override
|
2019-12-25 14:40:38 +00:00
|
|
|
String toString() =>
|
|
|
|
"{state=$state, updateTime=$updateTime, updatePosition=$updatePosition, speed=$speed}";
|
2019-11-28 05:16:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Enumerates the different playback states of a player.
|
2020-04-07 04:30:37 +00:00
|
|
|
///
|
|
|
|
/// If you also need access to the buffering state, use
|
|
|
|
/// [FullAudioPlaybackState].
|
2019-11-28 05:16:54 +00:00
|
|
|
enum AudioPlaybackState {
|
|
|
|
none,
|
|
|
|
stopped,
|
|
|
|
paused,
|
|
|
|
playing,
|
|
|
|
connecting,
|
2019-12-27 05:43:09 +00:00
|
|
|
completed,
|
2019-11-25 14:50:21 +00:00
|
|
|
}
|
2020-02-05 01:26:21 +00:00
|
|
|
|
2020-04-07 04:30:37 +00:00
|
|
|
/// Encapsulates the playback state and the buffering state.
|
|
|
|
///
|
|
|
|
/// These two states vary orthogonally, and so if [buffering] is true, you can
|
|
|
|
/// check [state] to determine whether this buffering is occurring during the
|
|
|
|
/// playing state or the paused state.
|
2020-02-05 01:26:21 +00:00
|
|
|
class FullAudioPlaybackState {
|
|
|
|
final AudioPlaybackState state;
|
|
|
|
final bool buffering;
|
2020-04-21 12:57:32 +00:00
|
|
|
final IcyMetadata icyMetadata;
|
|
|
|
|
|
|
|
FullAudioPlaybackState(this.state, this.buffering, this.icyMetadata);
|
|
|
|
}
|
|
|
|
|
|
|
|
class IcyInfo {
|
|
|
|
final String title;
|
|
|
|
final String url;
|
|
|
|
|
|
|
|
IcyInfo({@required this.title, @required this.url});
|
|
|
|
}
|
|
|
|
|
|
|
|
class IcyHeaders {
|
|
|
|
final int bitrate;
|
|
|
|
final String genre;
|
|
|
|
final String name;
|
|
|
|
final int metadataInterval;
|
|
|
|
final String url;
|
|
|
|
final bool isPublic;
|
|
|
|
|
|
|
|
IcyHeaders(
|
|
|
|
{@required this.bitrate,
|
|
|
|
@required this.genre,
|
|
|
|
@required this.name,
|
|
|
|
@required this.metadataInterval,
|
|
|
|
@required this.url,
|
|
|
|
@required this.isPublic});
|
|
|
|
}
|
|
|
|
|
|
|
|
class IcyMetadata {
|
|
|
|
final IcyInfo info;
|
|
|
|
final IcyHeaders headers;
|
2020-02-05 01:26:21 +00:00
|
|
|
|
2020-04-21 12:57:32 +00:00
|
|
|
IcyMetadata({@required this.info, @required this.headers});
|
2020-02-05 01:26:21 +00:00
|
|
|
}
|