Separate buffering from playbackState. Improve Android buffering detection.

This commit is contained in:
Ryan Heise 2020-02-05 12:26:21 +11:00
parent 280f1a8208
commit eee50d712e
5 changed files with 84 additions and 104 deletions

View File

@ -35,23 +35,9 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener {
private final MethodChannel methodChannel;
private final EventChannel eventChannel;
private EventSink eventSink;
private final Handler handler = new Handler();
private final Runnable positionObserver = new Runnable() {
@Override
public void run() {
if (state != PlaybackState.playing && state != PlaybackState.buffering)
return;
if (eventSink != null) {
checkForDiscontinuity();
}
handler.postDelayed(this, 200);
}
};
private final String id;
private volatile PlaybackState state;
private PlaybackState stateBeforeSeek;
private long updateTime;
private long updatePosition;
@ -64,6 +50,7 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener {
private Result prepareResult;
private Result seekResult;
private boolean seekProcessed;
private boolean buffering;
private MediaSource mediaSource;
private SimpleExoPlayer player;
@ -106,15 +93,19 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener {
completeSeek();
}
break;
case Player.STATE_BUFFERING:
// TODO: use this instead of checkForDiscontinuity.
break;
case Player.STATE_ENDED:
if (state != PlaybackState.completed) {
transition(PlaybackState.completed);
}
break;
}
final boolean buffering = playbackState == Player.STATE_BUFFERING;
// don't notify buffering if (buffering && state == stopped)
final boolean notifyBuffering = !buffering || state != PlaybackState.stopped;
if (notifyBuffering && (buffering != this.buffering)) {
this.buffering = buffering;
broadcastPlaybackEvent();
}
}
@Override
@ -130,29 +121,10 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener {
private void completeSeek() {
seekProcessed = false;
seekPos = null;
transition(stateBeforeSeek);
seekResult.success(null);
stateBeforeSeek = null;
seekResult = null;
}
private void checkForDiscontinuity() {
final long now = System.currentTimeMillis();
final long position = getCurrentPosition();
final long timeSinceLastUpdate = now - updateTime;
final long expectedPosition = updatePosition + (long)(timeSinceLastUpdate * speed);
final long drift = position - expectedPosition;
// Update if we've drifted or just started observing
if (updateTime == 0L) {
broadcastPlaybackEvent();
} else if (drift < -100) {
System.out.println("time discontinuity detected: " + drift);
transition(PlaybackState.buffering);
} else if (state == PlaybackState.buffering) {
transition(PlaybackState.playing);
}
}
@Override
public void onMethodCall(final MethodCall call, final Result result) {
final List<?> args = (List<?>)call.arguments;
@ -219,6 +191,7 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener {
private void broadcastPlaybackEvent() {
final ArrayList<Object> event = new ArrayList<Object>();
event.add(state.ordinal());
event.add(buffering);
event.add(updatePosition = getCurrentPosition());
event.add(updateTime = System.currentTimeMillis());
eventSink.success(event);
@ -237,9 +210,6 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener {
private void transition(final PlaybackState newState) {
final PlaybackState oldState = state;
state = newState;
if (oldState != PlaybackState.playing && newState == PlaybackState.playing) {
startObservingPosition();
}
broadcastPlaybackEvent();
}
@ -282,14 +252,9 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener {
break;
case stopped:
case completed:
case buffering:
case paused:
transition(PlaybackState.playing);
player.setPlayWhenReady(true);
if (seekResult != null) {
stateBeforeSeek = PlaybackState.playing;
} else {
transition(PlaybackState.playing);
}
break;
default:
throw new IllegalStateException("Cannot call play from connecting/none states (" + state + ")");
@ -301,12 +266,8 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener {
case paused:
break;
case playing:
case buffering:
player.setPlayWhenReady(false);
transition(PlaybackState.paused);
if (seekResult != null) {
stateBeforeSeek = PlaybackState.paused;
}
break;
default:
throw new IllegalStateException("Can call pause only from playing and buffering states (" + state + ")");
@ -320,18 +281,17 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener {
break;
case connecting:
abortExistingConnection();
buffering = false;
transition(PlaybackState.stopped);
result.success(null);
break;
case buffering:
abortSeek();
// no break
case completed:
case playing:
case paused:
abortSeek();
player.setPlayWhenReady(false);
player.seekTo(0L);
transition(PlaybackState.stopped);
player.seekTo(0L);
result.success(null);
break;
default:
@ -358,11 +318,6 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener {
seekPos = position;
seekResult = result;
seekProcessed = false;
if (stateBeforeSeek == null) {
stateBeforeSeek = state;
}
handler.removeCallbacks(positionObserver);
transition(PlaybackState.buffering);
player.seekTo(position);
}
@ -376,7 +331,6 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener {
seekResult.success(null);
seekResult = null;
seekPos = null;
stateBeforeSeek = null;
seekProcessed = false;
}
}
@ -388,17 +342,11 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener {
}
}
private void startObservingPosition() {
handler.removeCallbacks(positionObserver);
handler.post(positionObserver);
}
enum PlaybackState {
none,
stopped,
paused,
playing,
buffering,
connecting,
completed
}

View File

@ -43,27 +43,29 @@ class _MyAppState extends State<MyApp> {
children: [
Text("Science Friday"),
Text("Science Friday and WNYC Studios"),
StreamBuilder<AudioPlaybackState>(
stream: _player.playbackStateStream,
StreamBuilder<FullAudioPlaybackState>(
stream: _player.fullPlaybackStateStream,
builder: (context, snapshot) {
final state = snapshot.data;
final fullState = snapshot.data;
final state = fullState?.state;
final buffering = fullState?.buffering;
return Row(
mainAxisSize: MainAxisSize.min,
children: [
if (state == AudioPlaybackState.playing)
IconButton(
icon: Icon(Icons.pause),
iconSize: 64.0,
onPressed: _player.pause,
)
else if (state == AudioPlaybackState.buffering ||
state == AudioPlaybackState.connecting)
if (state == AudioPlaybackState.connecting ||
buffering == true)
Container(
margin: EdgeInsets.all(8.0),
width: 64.0,
height: 64.0,
child: CircularProgressIndicator(),
)
else if (state == AudioPlaybackState.playing)
IconButton(
icon: Icon(Icons.pause),
iconSize: 64.0,
onPressed: _player.pause,
)
else
IconButton(
icon: Icon(Icons.play_arrow),

View File

@ -11,7 +11,6 @@ enum PlaybackState {
stopped,
paused,
playing,
buffering,
connecting,
completed
};

View File

@ -10,11 +10,11 @@
NSString* _playerId;
AVPlayer* _player;
enum PlaybackState _state;
enum PlaybackState _stateBeforeSeek;
long long _updateTime;
int _updatePosition;
int _seekPos;
FlutterResult _connectionResult;
BOOL _buffering;
id _endObserver;
id _timeObserver;
}
@ -32,9 +32,9 @@
binaryMessenger:[registrar messenger]];
[_eventChannel setStreamHandler:self];
_state = none;
_stateBeforeSeek = none;
_player = nil;
_seekPos = -1;
_buffering = NO;
_endObserver = 0;
_timeObserver = 0;
__weak __typeof__(self) weakSelf = self;
@ -94,7 +94,7 @@
- (void)checkForDiscontinuity {
if (!_eventSink) return;
if (_state != playing && _state != buffering) return;
if ((_state != playing) && !_buffering) return;
long long now = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0);
int position = [self getCurrentPosition];
long long timeSinceLastUpdate = now - _updateTime;
@ -105,9 +105,11 @@
[self broadcastPlaybackEvent];
} else if (drift < -100) {
NSLog(@"time discontinuity detected: %lld", drift);
[self setPlaybackState:buffering];
} else if (_state == buffering) {
[self setPlaybackState:playing];
_buffering = YES;
[self broadcastPlaybackEvent];
} else if (_buffering) {
_buffering = NO;
[self broadcastPlaybackEvent];
}
}
@ -118,6 +120,8 @@
_eventSink(@[
// state
@(_state),
// buffering
@(_buffering),
// updatePosition
@(_updatePosition),
// updateTime
@ -156,14 +160,14 @@
_endObserver = 0;
}
AVPlayerItem *playerItem;
AVPlayerItem *playerItem;
//Allow iOs playing both external links and local files.
if ([url hasPrefix:@"file://"]) {
playerItem = [[AVPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[url substringFromIndex:7]]];
} else {
playerItem = [[AVPlayerItem alloc] initWithURL:[NSURL URLWithString:url]];
}
if ([url hasPrefix:@"file://"]) {
playerItem = [[AVPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[url substringFromIndex:7]]];
} else {
playerItem = [[AVPlayerItem alloc] initWithURL:[NSURL URLWithString:url]];
}
[playerItem addObserver:self
forKeyPath:@"status"
@ -292,10 +296,10 @@
}
- (void)seek:(int)position result:(FlutterResult)result {
_stateBeforeSeek = _state;
_seekPos = position;
NSLog(@"seek. enter buffering");
[self setPlaybackState:buffering];
_buffering = YES;
[self broadcastPlaybackEvent];
[_player seekToTime:CMTimeMake(position, 1000)
completionHandler:^(BOOL finished) {
NSLog(@"seek completed");
@ -305,8 +309,8 @@
- (void)onSeekCompletion:(FlutterResult)result {
_seekPos = -1;
[self setPlaybackState:_stateBeforeSeek];
_stateBeforeSeek = none;
_buffering = NO;
[self broadcastPlaybackEvent];
result(nil);
}

View File

@ -34,10 +34,7 @@ import 'package:rxdart/rxdart.dart';
/// * [AudioPlaybackState.stopped]: eventually after [setUrl], [setFilePath],
/// [setAsset] or [setClip] completes, and immediately after [stop].
/// * [AudioPlaybackState.paused]: after [pause].
/// * [AudioPlaybackState.playing]: after [play] and after sufficiently
/// buffering during normal playback.
/// * [AudioPlaybackState.buffering]: immediately after a seek request and
/// during normal playback when the next buffer is not ready to be played.
/// * [AudioPlaybackState.playing]: after [play].
/// * [AudioPlaybackState.connecting]: immediately after [setUrl],
/// [setFilePath] and [setAsset] while waiting for the media to load.
/// * [AudioPlaybackState.completed]: immediately after playback reaches the
@ -64,6 +61,7 @@ class AudioPlayer {
// TODO: also broadcast this event on instantiation.
AudioPlaybackEvent _audioPlaybackEvent = AudioPlaybackEvent(
state: AudioPlaybackState.none,
buffering: false,
updatePosition: Duration.zero,
updateTime: Duration.zero,
speed: 1.0,
@ -77,6 +75,10 @@ class AudioPlayer {
final _playbackStateSubject = BehaviorSubject<AudioPlaybackState>();
final _bufferingSubject = BehaviorSubject<bool>();
final _fullPlaybackStateSubject = BehaviorSubject<FullAudioPlaybackState>();
double _volume = 1.0;
double _speed = 1.0;
@ -90,14 +92,22 @@ class AudioPlayer {
.receiveBroadcastStream()
.map((data) => _audioPlaybackEvent = AudioPlaybackEvent(
state: AudioPlaybackState.values[data[0]],
updatePosition: Duration(milliseconds: data[1]),
updateTime: Duration(milliseconds: data[2]),
buffering: data[1],
updatePosition: Duration(milliseconds: data[2]),
updateTime: Duration(milliseconds: data[3]),
speed: _speed,
));
_eventChannelStreamSubscription =
_eventChannelStream.listen(_playbackEventSubject.add);
_playbackStateSubject
.addStream(playbackEventStream.map((state) => state.state).distinct());
_bufferingSubject.addStream(
playbackEventStream.map((state) => state.buffering).distinct());
_fullPlaybackStateSubject.addStream(
Rx.combineLatest2<AudioPlaybackState, bool, FullAudioPlaybackState>(
playbackStateStream,
bufferingStream,
(state, buffering) => FullAudioPlaybackState(state, buffering)));
}
/// The duration of any media set via [setUrl], [setFilePath] or [setAsset],
@ -121,6 +131,16 @@ class AudioPlayer {
Stream<AudioPlaybackState> get playbackStateStream =>
_playbackStateSubject.stream;
/// Whether the player is buffering.
bool get buffering => _audioPlaybackEvent.buffering;
/// A stream of buffering state changes.
Stream<bool> get bufferingStream => _bufferingSubject.stream;
/// A stream of [FullAudioPlaybackState]s.
Stream<FullAudioPlaybackState> get fullPlaybackStateStream =>
_fullPlaybackStateSubject.stream;
/// A stream periodically tracking the current position of this player.
Stream<Duration> getPositionStream(
[final Duration period = const Duration(milliseconds: 200)]) =>
@ -209,10 +229,7 @@ class AudioPlayer {
}
/// Pauses the currently playing media. It is legal to invoke this method
/// only from the following states:
///
/// * [AudioPlaybackState.playing]
/// * [AudioPlaybackState.buffering]
/// only from the [AudioPlaybackState.playing] state.
Future<void> pause() async {
await _invokeMethod('pause');
}
@ -272,6 +289,9 @@ class AudioPlaybackEvent {
/// The current playback state.
final AudioPlaybackState state;
/// Whether the player is buffering.
final bool buffering;
/// When the last time a position discontinuity happened, as measured in time
/// since the epoch.
final Duration updateTime;
@ -284,6 +304,7 @@ class AudioPlaybackEvent {
AudioPlaybackEvent({
@required this.state,
@required this.buffering,
@required this.updateTime,
@required this.updatePosition,
@required this.speed,
@ -308,7 +329,13 @@ enum AudioPlaybackState {
stopped,
paused,
playing,
buffering,
connecting,
completed,
}
class FullAudioPlaybackState {
final AudioPlaybackState state;
final bool buffering;
FullAudioPlaybackState(this.state, this.buffering);
}