Support web, limit position to duration.

This commit is contained in:
Ryan Heise 2020-03-07 13:50:59 +11:00
parent 04d7d88751
commit 78d043b4db
5 changed files with 288 additions and 67 deletions

View file

@ -54,6 +54,8 @@ class AudioPlayer {
final int _id;
Duration _duration;
Future<Duration> _durationFuture;
final _durationSubject = BehaviorSubject<Duration>();
@ -65,6 +67,7 @@ class AudioPlayer {
updatePosition: Duration.zero,
updateTime: Duration.zero,
speed: 1.0,
duration: Duration.zero,
);
Stream<AudioPlaybackEvent> _eventChannelStream;
@ -100,6 +103,7 @@ class AudioPlayer {
updatePosition: Duration(milliseconds: data[2]),
updateTime: Duration(milliseconds: data[3]),
speed: _speed,
duration: _duration,
));
_eventChannelStreamSubscription =
_eventChannelStream.listen(_playbackEventSubject.add);
@ -152,7 +156,7 @@ class AudioPlayer {
playbackEventStream,
// TODO: emit periodically only in playing state.
Stream.periodic(period),
(state, _) => state.position);
(state, _) => state.position).distinct();
/// The current volume of the player.
double get volume => _volume;
@ -170,9 +174,9 @@ class AudioPlayer {
Future<Duration> setUrl(final String url) async {
_durationFuture = _invokeMethod('setUrl', [url])
.then((ms) => ms == null ? null : Duration(milliseconds: ms));
final duration = await _durationFuture;
_durationSubject.add(duration);
return duration;
_duration = await _durationFuture;
_durationSubject.add(_duration);
return _duration;
}
/// Loads audio media from a file and completes with the duration of that
@ -197,7 +201,9 @@ class AudioPlayer {
/// Get file for caching asset media with proper extension
Future<File> _getCacheFile(final String assetPath) async => File(p.join(
(await getTemporaryDirectory()).path, 'just_audio_asset_cache', '$_id${p.extension(assetPath)}'));
(await getTemporaryDirectory()).path,
'just_audio_asset_cache',
'$_id${p.extension(assetPath)}'));
/// Clip the audio to the given [start] and [end] timestamps. This method
/// cannot be called from the [AudioPlaybackState.none] state.
@ -322,21 +328,30 @@ class AudioPlaybackEvent {
/// The playback speed.
final double speed;
/// The media duration.
final Duration duration;
AudioPlaybackEvent({
@required this.state,
@required this.buffering,
@required this.updateTime,
@required this.updatePosition,
@required this.speed,
@required this.duration,
});
/// The current position of the player.
Duration get position => state == AudioPlaybackState.playing && !buffering
? updatePosition +
Duration get position {
if (state == AudioPlaybackState.playing && !buffering) {
final result = updatePosition +
(Duration(milliseconds: DateTime.now().millisecondsSinceEpoch) -
updateTime) *
speed
: updatePosition;
speed;
return result <= duration ? result : duration;
} else {
return updatePosition;
}
}
@override
String toString() =>

240
lib/just_audio_web.dart Normal file
View file

@ -0,0 +1,240 @@
import 'dart:async';
import 'dart:html';
import 'package:async/async.dart';
import 'package:flutter/services.dart';
import 'package:flutter/widgets.dart';
import 'package:flutter_web_plugins/flutter_web_plugins.dart';
import 'package:just_audio/just_audio.dart';
class JustAudioPlugin {
static void registerWith(Registrar registrar) {
final MethodChannel channel = MethodChannel(
'com.ryanheise.just_audio.methods',
const StandardMethodCodec(),
registrar.messenger);
final JustAudioPlugin instance = JustAudioPlugin(registrar);
channel.setMethodCallHandler(instance.handleMethodCall);
}
final Registrar registrar;
JustAudioPlugin(this.registrar);
Future<dynamic> handleMethodCall(MethodCall call) async {
switch (call.method) {
case 'init':
final String id = call.arguments[0];
new Html5AudioPlayer(id: id, registrar: registrar);
return null;
default:
throw PlatformException(code: 'Unimplemented');
}
}
}
abstract class JustAudioPlayer {
final String id;
final Registrar registrar;
final MethodChannel methodChannel;
final PluginEventChannel eventChannel;
final StreamController eventController = StreamController();
AudioPlaybackState _state = AudioPlaybackState.none;
bool _buffering = false;
JustAudioPlayer({@required this.id, @required this.registrar})
: methodChannel = MethodChannel('com.ryanheise.just_audio.methods.$id',
const StandardMethodCodec(), registrar.messenger),
eventChannel = PluginEventChannel('com.ryanheise.just_audio.events.$id',
const StandardMethodCodec(), registrar.messenger) {
methodChannel.setMethodCallHandler(_methodHandler);
eventChannel.controller = eventController;
}
Future<dynamic> _methodHandler(MethodCall call) async {
final args = call.arguments;
switch (call.method) {
case 'setUrl':
return await setUrl(args[0]);
case 'setClip':
return await setClip(args[0], args[1]);
case 'play':
return await play();
case 'pause':
return await pause();
case 'stop':
return await stop();
case 'setVolume':
return await setVolume(args[0]);
case 'setSpeed':
return await setSpeed(args[0]);
case 'seek':
return await seek(args[0]);
case 'dispose':
return await dispose();
default:
throw PlatformException(code: 'Unimplemented');
}
}
Future<int> setUrl(final String url);
Future<void> setClip(int start, int end);
Future<void> play();
Future<void> pause();
Future<void> stop();
Future<void> setVolume(double volume);
Future<void> setSpeed(double speed);
Future<void> seek(int position);
@mustCallSuper
Future<void> dispose() {
eventController.close();
}
double getCurrentPosition();
broadcastPlaybackEvent() {
var updateTime = DateTime.now().millisecondsSinceEpoch;
eventController.add([
_state.index,
_buffering,
(getCurrentPosition() * 1000).toInt(),
updateTime,
]);
}
transition(AudioPlaybackState state) {
_state = state;
broadcastPlaybackEvent();
}
}
class Html5AudioPlayer extends JustAudioPlayer {
AudioElement _audioElement = AudioElement();
Completer<num> _durationCompleter;
double _volume = 1.0;
double _startPos = 0.0;
double _start = 0.0;
double _end;
CancelableOperation _playOperation;
Html5AudioPlayer({@required String id, @required Registrar registrar})
: super(id: id, registrar: registrar) {
_audioElement.addEventListener('durationchange', (event) {
_durationCompleter?.complete(_audioElement.duration);
});
_audioElement.addEventListener('ended', (event) {
transition(AudioPlaybackState.completed);
});
_audioElement.addEventListener('seek', (event) {
_buffering = true;
broadcastPlaybackEvent();
});
_audioElement.addEventListener('seeked', (event) {
_buffering = false;
broadcastPlaybackEvent();
});
}
@override
Future<int> setUrl(final String url) async {
_interruptPlay();
transition(AudioPlaybackState.connecting);
_durationCompleter = Completer<num>();
_audioElement.src = url;
_audioElement.preload = 'auto';
_audioElement.load();
final duration = await _durationCompleter.future;
transition(AudioPlaybackState.stopped);
return (duration * 1000).toInt();
}
@override
Future<void> setClip(int start, int end) async {
_interruptPlay();
_start = start / 1000.0;
_end = end / 1000.0;
_startPos = _start;
}
@override
Future<void> play() async {
_interruptPlay();
final duration = _end == null ? null : _end - _startPos;
_audioElement.currentTime = _startPos;
_audioElement.play();
if (duration != null) {
_playOperation = CancelableOperation.fromFuture(Future.delayed(Duration(
milliseconds:
(duration * 1000 / _audioElement.playbackRate).toInt())))
.then((_) {
pause();
_playOperation = null;
});
}
transition(AudioPlaybackState.playing);
}
_interruptPlay() {
if (_playOperation != null) {
_playOperation.cancel();
_playOperation = null;
}
}
@override
Future<void> pause() async {
_interruptPlay();
_startPos = _audioElement.currentTime;
_audioElement.pause();
transition(AudioPlaybackState.paused);
}
@override
Future<void> stop() async {
_interruptPlay();
_startPos = _start;
_audioElement.pause();
_audioElement.currentTime = _start;
transition(AudioPlaybackState.stopped);
}
@override
Future<void> setVolume(double volume) async {
_volume = volume;
_audioElement.volume = volume;
}
@override
Future<void> setSpeed(double speed) async {
_audioElement.playbackRate = speed;
}
@override
Future<void> seek(int position) async {
_interruptPlay();
_startPos = _start + position / 1000.0;
_audioElement.currentTime = _startPos;
}
@override
double getCurrentPosition() => _audioElement.currentTime;
@override
Future<void> dispose() async {
_interruptPlay();
_audioElement.pause();
_audioElement.removeAttribute('src');
_audioElement.load();
transition(AudioPlaybackState.none);
super.dispose();
}
}