Update web implementation to use platform interface.

This commit is contained in:
Ryan Heise 2020-09-27 13:30:30 +10:00
parent d07d0f358a
commit 1be1c212f6
11 changed files with 323 additions and 308 deletions

View File

@ -6,7 +6,7 @@
import 'dart:ui'; import 'dart:ui';
import 'package:audio_session/audio_session_web.dart'; import 'package:audio_session/audio_session_web.dart';
import 'package:just_audio/just_audio_web.dart'; import 'package:just_audio_web/just_audio_web.dart';
import 'package:flutter_web_plugins/flutter_web_plugins.dart'; import 'package:flutter_web_plugins/flutter_web_plugins.dart';

View File

@ -121,6 +121,13 @@ packages:
relative: true relative: true
source: path source: path
version: "0.0.1" version: "0.0.1"
just_audio_web:
dependency: "direct dev"
description:
path: "../../just_audio_web"
relative: true
source: path
version: "0.0.1"
matcher: matcher:
dependency: transitive dependency: transitive
description: description:

View File

@ -18,6 +18,8 @@ dev_dependencies:
just_audio: just_audio:
path: ../ path: ../
just_audio_web:
path: ../../just_audio_web
# For information on the generic Dart part of this file, see the # For information on the generic Dart part of this file, see the
# following page: https://dart.dev/tools/pub/pubspec # following page: https://dart.dev/tools/pub/pubspec

View File

@ -1,7 +1,7 @@
name: just_audio name: just_audio
description: Flutter plugin to play audio from streams, files, assets, DASH/HLS streams and playlists. Works with audio_service to play audio in the background. description: Flutter plugin to play audio from streams, files, assets, DASH/HLS streams and playlists. Works with audio_service to play audio in the background.
version: 0.4.5 version: 0.4.5
homepage: https://github.com/ryanheise/just_audio homepage: https://github.com/ryanheise/just_audio/tree/master/just_audio
environment: environment:
sdk: ">=2.7.0 <3.0.0" sdk: ">=2.7.0 <3.0.0"
@ -35,6 +35,8 @@ flutter:
pluginClass: JustAudioPlugin pluginClass: JustAudioPlugin
macos: macos:
pluginClass: JustAudioPlugin pluginClass: JustAudioPlugin
web: # web:
pluginClass: JustAudioPlugin # default_package: just_audio_web
fileName: just_audio_web.dart # web:
# pluginClass: JustAudioPlugin
# fileName: just_audio_web.dart

View File

@ -1,6 +1,6 @@
name: just_audio_platform_interface name: just_audio_platform_interface
description: A common platform interface for the just_audio plugin. description: A common platform interface for the just_audio plugin.
homepage: https://github.com/ryanheise/just_audio/just_audio_platform_interface homepage: https://github.com/ryanheise/just_audio/tree/master/just_audio_platform_interface
# NOTE: We strongly prefer non-breaking changes, even at the expense of a # NOTE: We strongly prefer non-breaking changes, even at the expense of a
# less-clean API. See https://flutter.dev/go/platform-interface-breaking-changes # less-clean API. See https://flutter.dev/go/platform-interface-breaking-changes
version: 0.0.1 version: 0.0.1

44
just_audio_web/.gitignore vendored Normal file
View File

@ -0,0 +1,44 @@
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.buildlog/
.history
.svn/
# IntelliJ related
*.iml
*.ipr
*.iws
.idea/
# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
#.vscode/
# Flutter/Dart/Pub related
**/doc/api/
**/ios/Flutter/.last_build_id
.dart_tool/
.flutter-plugins
.flutter-plugins-dependencies
.packages
.pub-cache/
.pub/
/build/
# Web related
lib/generated_plugin_registrant.dart
# Symbolication related
app.*.symbols
# Obfuscation related
app.*.map.json
# Exceptions to above rules.
!/packages/flutter_tools/test/data/dart_dependencies_test/**/.packages

10
just_audio_web/.metadata Normal file
View File

@ -0,0 +1,10 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: bbfbf1770cca2da7c82e887e4e4af910034800b6
channel: stable
project_type: app

5
just_audio_web/README.md Normal file
View File

@ -0,0 +1,5 @@
# just_audio_web
The web implementation of [`just_audio`][1].
[1]: ../just_audio

View File

@ -5,127 +5,33 @@ import 'dart:math';
import 'package:flutter/services.dart'; import 'package:flutter/services.dart';
import 'package:flutter/widgets.dart'; import 'package:flutter/widgets.dart';
import 'package:flutter_web_plugins/flutter_web_plugins.dart'; import 'package:flutter_web_plugins/flutter_web_plugins.dart';
import 'package:just_audio/just_audio.dart'; import 'package:just_audio_platform_interface/just_audio_platform_interface.dart';
final Random _random = Random(); final Random _random = Random();
class JustAudioPlugin { class JustAudioPlugin extends JustAudioPlatform {
static void registerWith(Registrar registrar) { static void registerWith(Registrar registrar) {
final MethodChannel channel = MethodChannel( JustAudioPlatform.instance = JustAudioPlugin();
'com.ryanheise.just_audio.methods',
const StandardMethodCodec(),
registrar.messenger);
final JustAudioPlugin instance = JustAudioPlugin(registrar);
channel.setMethodCallHandler(instance.handleMethodCall);
} }
final Registrar registrar; Future<AudioPlayerPlatform> init(InitRequest request) async {
return Html5AudioPlayer(id: request.id);
JustAudioPlugin(this.registrar);
Future<dynamic> handleMethodCall(MethodCall call) async {
switch (call.method) {
case 'init':
final String id = call.arguments[0];
new Html5AudioPlayer(id: id, registrar: registrar);
return null;
case 'setIosCategory':
return null;
default:
throw PlatformException(code: 'Unimplemented');
}
} }
} }
abstract class JustAudioPlayer { abstract class JustAudioPlayer extends AudioPlayerPlatform {
final String id; final String id;
final Registrar registrar; final eventController = StreamController<PlaybackEventMessage>();
final MethodChannel methodChannel; ProcessingStateMessage _processingState = ProcessingStateMessage.none;
final PluginEventChannel eventChannel;
final StreamController eventController = StreamController();
ProcessingState _processingState = ProcessingState.none;
bool _playing = false; bool _playing = false;
int _index; int _index;
JustAudioPlayer({@required this.id, @required this.registrar}) JustAudioPlayer({@required this.id});
: methodChannel = MethodChannel('com.ryanheise.just_audio.methods.$id',
const StandardMethodCodec(), registrar.messenger),
eventChannel = PluginEventChannel('com.ryanheise.just_audio.events.$id',
const StandardMethodCodec(), registrar.messenger) {
methodChannel.setMethodCallHandler(_methodHandler);
eventChannel.controller = eventController;
}
Future<dynamic> _methodHandler(MethodCall call) async {
try {
final args = call.arguments;
switch (call.method) {
case 'load':
return await load(args[0]);
case 'play':
return await play();
case 'pause':
return await pause();
case 'setVolume':
return await setVolume(args[0]);
case 'setSpeed':
return await setSpeed(args[0]);
case 'setLoopMode':
return await setLoopMode(args[0]);
case 'setShuffleModeEnabled':
return await setShuffleModeEnabled(args[0]);
case 'setAutomaticallyWaitsToMinimizeStalling':
return null;
case 'seek':
return await seek(args[0], args[1]);
case 'dispose':
return dispose();
case 'concatenating.add':
return await concatenatingAdd(args[0], args[1]);
case "concatenating.insert":
return await concatenatingInsert(args[0], args[1], args[2]);
case "concatenating.addAll":
return await concatenatingAddAll(args[0], args[1]);
case "concatenating.insertAll":
return await concatenatingInsertAll(args[0], args[1], args[2]);
case "concatenating.removeAt":
return await concatenatingRemoveAt(args[0], args[1]);
case "concatenating.removeRange":
return await concatenatingRemoveRange(args[0], args[1], args[2]);
case "concatenating.move":
return await concatenatingMove(args[0], args[1], args[2]);
case "concatenating.clear":
return await concatenatingClear(args[0]);
case "setAndroidAudioAttributes":
return null;
default:
throw PlatformException(code: 'Unimplemented');
}
} catch (e, stacktrace) {
print("$stacktrace");
rethrow;
}
}
Future<int> load(Map source);
Future<void> play();
Future<void> pause();
Future<void> setVolume(double volume);
Future<void> setSpeed(double speed);
Future<void> setLoopMode(int mode);
Future<void> setShuffleModeEnabled(bool enabled);
Future<void> seek(int position, int index);
@mustCallSuper @mustCallSuper
void dispose() { Future<DisposeResponse> dispose(DisposeRequest request) async {
eventController.close(); eventController.close();
return DisposeResponse();
} }
Duration getCurrentPosition(); Duration getCurrentPosition();
@ -134,37 +40,22 @@ abstract class JustAudioPlayer {
Duration getDuration(); Duration getDuration();
concatenatingAdd(String playerId, Map source);
concatenatingInsert(String playerId, int index, Map source);
concatenatingAddAll(String playerId, List sources);
concatenatingInsertAll(String playerId, int index, List sources);
concatenatingRemoveAt(String playerId, int index);
concatenatingRemoveRange(String playerId, int start, int end);
concatenatingMove(String playerId, int currentIndex, int newIndex);
concatenatingClear(String playerId);
broadcastPlaybackEvent() { broadcastPlaybackEvent() {
var updateTime = DateTime.now().millisecondsSinceEpoch; var updateTime = DateTime.now();
eventController.add({ eventController.add(PlaybackEventMessage(
'processingState': _processingState.index, processingState: _processingState,
'updatePosition': getCurrentPosition()?.inMilliseconds, updatePosition: getCurrentPosition(),
'updateTime': updateTime, updateTime: updateTime,
'bufferedPosition': getBufferedPosition()?.inMilliseconds, bufferedPosition: getBufferedPosition(),
// TODO: Icy Metadata // TODO: Icy Metadata
'icyMetadata': null, icyMetadata: null,
'duration': getDuration()?.inMilliseconds, duration: getDuration(),
'currentIndex': _index, currentIndex: _index,
}); androidAudioSessionId: null,
));
} }
transition(ProcessingState processingState) { transition(ProcessingStateMessage processingState) {
_processingState = processingState; _processingState = processingState;
broadcastPlaybackEvent(); broadcastPlaybackEvent();
} }
@ -174,12 +65,11 @@ class Html5AudioPlayer extends JustAudioPlayer {
AudioElement _audioElement = AudioElement(); AudioElement _audioElement = AudioElement();
Completer _durationCompleter; Completer _durationCompleter;
AudioSourcePlayer _audioSourcePlayer; AudioSourcePlayer _audioSourcePlayer;
LoopMode _loopMode = LoopMode.off; LoopModeMessage _loopMode = LoopModeMessage.off;
bool _shuffleModeEnabled = false; bool _shuffleModeEnabled = false;
final Map<String, AudioSourcePlayer> _audioSourcePlayers = {}; final Map<String, AudioSourcePlayer> _audioSourcePlayers = {};
Html5AudioPlayer({@required String id, @required Registrar registrar}) Html5AudioPlayer({@required String id}) : super(id: id) {
: super(id: id, registrar: registrar) {
_audioElement.addEventListener('durationchange', (event) { _audioElement.addEventListener('durationchange', (event) {
_durationCompleter?.complete(); _durationCompleter?.complete();
broadcastPlaybackEvent(); broadcastPlaybackEvent();
@ -194,16 +84,16 @@ class Html5AudioPlayer extends JustAudioPlayer {
_currentAudioSourcePlayer.timeUpdated(_audioElement.currentTime); _currentAudioSourcePlayer.timeUpdated(_audioElement.currentTime);
}); });
_audioElement.addEventListener('loadstart', (event) { _audioElement.addEventListener('loadstart', (event) {
transition(ProcessingState.buffering); transition(ProcessingStateMessage.buffering);
}); });
_audioElement.addEventListener('waiting', (event) { _audioElement.addEventListener('waiting', (event) {
transition(ProcessingState.buffering); transition(ProcessingStateMessage.buffering);
}); });
_audioElement.addEventListener('stalled', (event) { _audioElement.addEventListener('stalled', (event) {
transition(ProcessingState.buffering); transition(ProcessingStateMessage.buffering);
}); });
_audioElement.addEventListener('canplaythrough', (event) { _audioElement.addEventListener('canplaythrough', (event) {
transition(ProcessingState.ready); transition(ProcessingStateMessage.ready);
}); });
_audioElement.addEventListener('progress', (event) { _audioElement.addEventListener('progress', (event) {
broadcastPlaybackEvent(); broadcastPlaybackEvent();
@ -232,9 +122,9 @@ class Html5AudioPlayer extends JustAudioPlayer {
} }
onEnded() async { onEnded() async {
if (_loopMode == LoopMode.one) { if (_loopMode == LoopModeMessage.one) {
await seek(0, null); await _seek(0, null);
play(); _play();
} else { } else {
final order = this.order; final order = this.order;
final orderInv = getInv(order); final orderInv = getInv(order);
@ -244,25 +134,25 @@ class Html5AudioPlayer extends JustAudioPlayer {
await _currentAudioSourcePlayer.load(); await _currentAudioSourcePlayer.load();
// Should always be true... // Should always be true...
if (_playing) { if (_playing) {
play(); _play();
} }
} else { } else {
// reached end of playlist // reached end of playlist
if (_loopMode == LoopMode.all) { if (_loopMode == LoopModeMessage.all) {
// Loop back to the beginning // Loop back to the beginning
if (order.length == 1) { if (order.length == 1) {
await seek(0, null); await _seek(0, null);
play(); _play();
} else { } else {
_index = order[0]; _index = order[0];
await _currentAudioSourcePlayer.load(); await _currentAudioSourcePlayer.load();
// Should always be true... // Should always be true...
if (_playing) { if (_playing) {
play(); _play();
} }
} }
} else { } else {
transition(ProcessingState.completed); transition(ProcessingStateMessage.completed);
} }
} }
} }
@ -275,18 +165,22 @@ class Html5AudioPlayer extends JustAudioPlayer {
: null; : null;
@override @override
Future<int> load(Map source) async { Stream<PlaybackEventMessage> get playbackEventMessageStream =>
eventController.stream;
@override
Future<LoadResponse> load(LoadRequest request) async {
_currentAudioSourcePlayer?.pause(); _currentAudioSourcePlayer?.pause();
_audioSourcePlayer = getAudioSource(source); _audioSourcePlayer = getAudioSource(request.audioSourceMessage);
_index = 0; _index = 0;
if (_shuffleModeEnabled) { if (_shuffleModeEnabled) {
_audioSourcePlayer?.shuffle(0, _index); _audioSourcePlayer?.shuffle(0, _index);
} }
return (await _currentAudioSourcePlayer.load())?.inMilliseconds; return LoadResponse(duration: await _currentAudioSourcePlayer.load());
} }
Future<Duration> loadUri(final Uri uri) async { Future<Duration> loadUri(final Uri uri) async {
transition(ProcessingState.loading); transition(ProcessingStateMessage.loading);
final src = uri.toString(); final src = uri.toString();
if (src != _audioElement.src) { if (src != _audioElement.src) {
_durationCompleter = Completer<num>(); _durationCompleter = Completer<num>();
@ -302,7 +196,7 @@ class Html5AudioPlayer extends JustAudioPlayer {
_durationCompleter = null; _durationCompleter = null;
} }
} }
transition(ProcessingState.ready); transition(ProcessingStateMessage.ready);
final seconds = _audioElement.duration; final seconds = _audioElement.duration;
return seconds.isFinite return seconds.isFinite
? Duration(milliseconds: (seconds * 1000).toInt()) ? Duration(milliseconds: (seconds * 1000).toInt())
@ -310,42 +204,58 @@ class Html5AudioPlayer extends JustAudioPlayer {
} }
@override @override
Future<void> play() async { Future<PlayResponse> play(PlayRequest request) async {
await _play();
return PlayResponse();
}
Future<void> _play() async {
_playing = true; _playing = true;
await _currentAudioSourcePlayer.play(); await _currentAudioSourcePlayer.play();
} }
@override @override
Future<void> pause() async { Future<PauseResponse> pause(PauseRequest request) async {
_playing = false; _playing = false;
_currentAudioSourcePlayer.pause(); _currentAudioSourcePlayer.pause();
return PauseResponse();
} }
@override @override
Future<void> setVolume(double volume) async { Future<SetVolumeResponse> setVolume(SetVolumeRequest request) async {
_audioElement.volume = volume; _audioElement.volume = request.volume;
return SetVolumeResponse();
} }
@override @override
Future<void> setSpeed(double speed) async { Future<SetSpeedResponse> setSpeed(SetSpeedRequest request) async {
_audioElement.playbackRate = speed; _audioElement.playbackRate = request.speed;
return SetSpeedResponse();
} }
@override @override
Future<void> setLoopMode(int mode) async { Future<SetLoopModeResponse> setLoopMode(SetLoopModeRequest request) async {
_loopMode = LoopMode.values[mode]; _loopMode = request.loopMode;
return SetLoopModeResponse();
} }
@override @override
Future<void> setShuffleModeEnabled(bool enabled) async { Future<SetShuffleModeResponse> setShuffleMode(
_shuffleModeEnabled = enabled; SetShuffleModeRequest request) async {
if (enabled) { _shuffleModeEnabled = request.shuffleMode == ShuffleModeMessage.all;
if (_shuffleModeEnabled) {
_audioSourcePlayer?.shuffle(0, _index); _audioSourcePlayer?.shuffle(0, _index);
} }
return SetShuffleModeResponse();
} }
@override @override
Future<void> seek(int position, int newIndex) async { Future<SeekResponse> seek(SeekRequest request) async {
await _seek(request.position.inMilliseconds, request.index);
return SeekResponse();
}
Future<void> _seek(int position, int newIndex) async {
int index = newIndex ?? _index; int index = newIndex ?? _index;
if (index != _index) { if (index != _index) {
_currentAudioSourcePlayer.pause(); _currentAudioSourcePlayer.pause();
@ -363,89 +273,70 @@ class Html5AudioPlayer extends JustAudioPlayer {
ConcatenatingAudioSourcePlayer _concatenating(String playerId) => ConcatenatingAudioSourcePlayer _concatenating(String playerId) =>
_audioSourcePlayers[playerId] as ConcatenatingAudioSourcePlayer; _audioSourcePlayers[playerId] as ConcatenatingAudioSourcePlayer;
concatenatingAdd(String playerId, Map source) { @override
final playlist = _concatenating(playerId); Future<ConcatenatingInsertAllResponse> concatenatingInsertAll(
playlist.add(getAudioSource(source)); ConcatenatingInsertAllRequest request) async {
} _concatenating(request.id)
.insertAll(request.index, getAudioSources(request.children));
concatenatingInsert(String playerId, int index, Map source) { if (request.index <= _index) {
_concatenating(playerId).insert(index, getAudioSource(source)); _index += request.children.length;
if (index <= _index) {
_index++;
} }
return ConcatenatingInsertAllResponse();
} }
concatenatingAddAll(String playerId, List sources) { @override
_concatenating(playerId).addAll(getAudioSources(sources)); Future<ConcatenatingRemoveRangeResponse> concatenatingRemoveRange(
} ConcatenatingRemoveRangeRequest request) async {
if (_index >= request.startIndex && _index < request.endIndex && _playing) {
concatenatingInsertAll(String playerId, int index, List sources) {
_concatenating(playerId).insertAll(index, getAudioSources(sources));
if (index <= _index) {
_index += sources.length;
}
}
concatenatingRemoveAt(String playerId, int index) async {
// Pause if removing current item
if (_index == index && _playing) {
_currentAudioSourcePlayer.pause();
}
_concatenating(playerId).removeAt(index);
if (_index == index) {
// Skip backward if there's nothing after this
if (index == _audioSourcePlayer.sequence.length) {
_index--;
}
// Resume playback at the new item (if it exists)
if (_playing && _currentAudioSourcePlayer != null) {
await _currentAudioSourcePlayer.load();
_currentAudioSourcePlayer.play();
}
} else if (index < _index) {
// Reflect that the current item has shifted its position
_index--;
}
}
concatenatingRemoveRange(String playerId, int start, int end) async {
if (_index >= start && _index < end && _playing) {
// Pause if removing current item // Pause if removing current item
_currentAudioSourcePlayer.pause(); _currentAudioSourcePlayer.pause();
} }
_concatenating(playerId).removeRange(start, end); _concatenating(request.id)
if (_index >= start && _index < end) { .removeRange(request.startIndex, request.endIndex);
if (_index >= request.startIndex && _index < request.endIndex) {
// Skip backward if there's nothing after this // Skip backward if there's nothing after this
if (start >= _audioSourcePlayer.sequence.length) { if (request.startIndex >= _audioSourcePlayer.sequence.length) {
_index = start - 1; _index = request.startIndex - 1;
} else { } else {
_index = start; _index = request.startIndex;
} }
// Resume playback at the new item (if it exists) // Resume playback at the new item (if it exists)
if (_playing && _currentAudioSourcePlayer != null) { if (_playing && _currentAudioSourcePlayer != null) {
await _currentAudioSourcePlayer.load(); await _currentAudioSourcePlayer.load();
_currentAudioSourcePlayer.play(); _currentAudioSourcePlayer.play();
} }
} else if (end <= _index) { } else if (request.endIndex <= _index) {
// Reflect that the current item has shifted its position // Reflect that the current item has shifted its position
_index -= (end - start); _index -= (request.endIndex - request.startIndex);
} }
return ConcatenatingRemoveRangeResponse();
} }
concatenatingMove(String playerId, int currentIndex, int newIndex) { @override
_concatenating(playerId).move(currentIndex, newIndex); Future<ConcatenatingMoveResponse> concatenatingMove(
if (currentIndex == _index) { ConcatenatingMoveRequest request) async {
_index = newIndex; _concatenating(request.id).move(request.currentIndex, request.newIndex);
} else if (currentIndex < _index && newIndex >= _index) { if (request.currentIndex == _index) {
_index = request.newIndex;
} else if (request.currentIndex < _index && request.newIndex >= _index) {
_index--; _index--;
} else if (currentIndex > _index && newIndex <= _index) { } else if (request.currentIndex > _index && request.newIndex <= _index) {
_index++; _index++;
} }
return ConcatenatingMoveResponse();
} }
concatenatingClear(String playerId) { @override
_currentAudioSourcePlayer.pause(); Future<SetAndroidAudioAttributesResponse> setAndroidAudioAttributes(
_concatenating(playerId).clear(); SetAndroidAudioAttributesRequest request) async {
return SetAndroidAudioAttributesResponse();
}
@override
Future<SetAutomaticallyWaitsToMinimizeStallingResponse>
setAutomaticallyWaitsToMinimizeStalling(
SetAutomaticallyWaitsToMinimizeStallingRequest request) async {
return SetAutomaticallyWaitsToMinimizeStallingResponse();
} }
@override @override
@ -458,57 +349,56 @@ class Html5AudioPlayer extends JustAudioPlayer {
Duration getDuration() => _currentAudioSourcePlayer?.duration; Duration getDuration() => _currentAudioSourcePlayer?.duration;
@override @override
void dispose() { Future<DisposeResponse> dispose(DisposeRequest request) async {
_currentAudioSourcePlayer?.pause(); _currentAudioSourcePlayer?.pause();
_audioElement.removeAttribute('src'); _audioElement.removeAttribute('src');
_audioElement.load(); _audioElement.load();
transition(ProcessingState.none); transition(ProcessingStateMessage.none);
super.dispose(); return await super.dispose(request);
} }
List<AudioSourcePlayer> getAudioSources(List json) => List<AudioSourcePlayer> getAudioSources(List messages) =>
json.map((s) => getAudioSource(s)).toList(); messages.map((message) => getAudioSource(message)).toList();
AudioSourcePlayer getAudioSource(Map json) { AudioSourcePlayer getAudioSource(AudioSourceMessage audioSourceMessage) {
final String id = json['id']; final String id = audioSourceMessage.id;
var audioSourcePlayer = _audioSourcePlayers[id]; var audioSourcePlayer = _audioSourcePlayers[id];
if (audioSourcePlayer == null) { if (audioSourcePlayer == null) {
audioSourcePlayer = decodeAudioSource(json); audioSourcePlayer = decodeAudioSource(audioSourceMessage);
_audioSourcePlayers[id] = audioSourcePlayer; _audioSourcePlayers[id] = audioSourcePlayer;
} }
return audioSourcePlayer; return audioSourcePlayer;
} }
AudioSourcePlayer decodeAudioSource(Map json) { AudioSourcePlayer decodeAudioSource(AudioSourceMessage audioSourceMessage) {
try { try {
switch (json['type']) { if (audioSourceMessage is ProgressiveAudioSourceMessage) {
case 'progressive': return ProgressiveAudioSourcePlayer(this, audioSourceMessage.id,
return ProgressiveAudioSourcePlayer( Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers);
this, json['id'], Uri.parse(json['uri']), json['headers']); } else if (audioSourceMessage is DashAudioSourceMessage) {
case "dash": return DashAudioSourcePlayer(this, audioSourceMessage.id,
return DashAudioSourcePlayer( Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers);
this, json['id'], Uri.parse(json['uri']), json['headers']); } else if (audioSourceMessage is HlsAudioSourceMessage) {
case "hls": return HlsAudioSourcePlayer(this, audioSourceMessage.id,
return HlsAudioSourcePlayer( Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers);
this, json['id'], Uri.parse(json['uri']), json['headers']); } else if (audioSourceMessage is ConcatenatingAudioSourceMessage) {
case "concatenating": return ConcatenatingAudioSourcePlayer(
return ConcatenatingAudioSourcePlayer( this,
this, audioSourceMessage.id,
json['id'], getAudioSources(audioSourceMessage.children),
getAudioSources(json['audioSources']), audioSourceMessage.useLazyPreparation);
json['useLazyPreparation']); } else if (audioSourceMessage is ClippingAudioSourceMessage) {
case "clipping": return ClippingAudioSourcePlayer(
return ClippingAudioSourcePlayer( this,
this, audioSourceMessage.id,
json['id'], getAudioSource(audioSourceMessage.child),
getAudioSource(json['audioSource']), audioSourceMessage.start,
Duration(milliseconds: json['start']), audioSourceMessage.end);
Duration(milliseconds: json['end'])); } else if (audioSourceMessage is LoopingAudioSourceMessage) {
case "looping": return LoopingAudioSourcePlayer(this, audioSourceMessage.id,
return LoopingAudioSourcePlayer(this, json['id'], getAudioSource(audioSourceMessage.child), audioSourceMessage.count);
getAudioSource(json['audioSource']), json['count']); } else {
default: throw Exception("Unknown AudioSource type: $audioSourceMessage");
throw Exception("Unknown AudioSource type: " + json['type']);
} }
} catch (e, stacktrace) { } catch (e, stacktrace) {
print("$stacktrace"); print("$stacktrace");
@ -731,29 +621,6 @@ class ConcatenatingAudioSourcePlayer extends AudioSourcePlayer {
return treeIndex; return treeIndex;
} }
add(AudioSourcePlayer player) {
audioSourcePlayers.add(player);
_shuffleOrder.add(audioSourcePlayers.length - 1);
}
insert(int index, AudioSourcePlayer player) {
audioSourcePlayers.insert(index, player);
for (var i = 0; i < audioSourcePlayers.length; i++) {
if (_shuffleOrder[i] >= index) {
_shuffleOrder[i]++;
}
}
_shuffleOrder.add(index);
}
addAll(List<AudioSourcePlayer> players) {
audioSourcePlayers.addAll(players);
_shuffleOrder.addAll(
List.generate(players.length, (i) => audioSourcePlayers.length + i)
.toList()
..shuffle());
}
insertAll(int index, List<AudioSourcePlayer> players) { insertAll(int index, List<AudioSourcePlayer> players) {
audioSourcePlayers.insertAll(index, players); audioSourcePlayers.insertAll(index, players);
for (var i = 0; i < audioSourcePlayers.length; i++) { for (var i = 0; i < audioSourcePlayers.length; i++) {
@ -765,18 +632,6 @@ class ConcatenatingAudioSourcePlayer extends AudioSourcePlayer {
List.generate(players.length, (i) => index + i).toList()..shuffle()); List.generate(players.length, (i) => index + i).toList()..shuffle());
} }
removeAt(int index) {
audioSourcePlayers.removeAt(index);
// 0 1 2 3
// 3 2 0 1
for (var i = 0; i < audioSourcePlayers.length; i++) {
if (_shuffleOrder[i] > index) {
_shuffleOrder[i]--;
}
}
_shuffleOrder.removeWhere((i) => i == index);
}
removeRange(int start, int end) { removeRange(int start, int end) {
audioSourcePlayers.removeRange(start, end); audioSourcePlayers.removeRange(start, end);
for (var i = 0; i < audioSourcePlayers.length; i++) { for (var i = 0; i < audioSourcePlayers.length; i++) {
@ -791,11 +646,6 @@ class ConcatenatingAudioSourcePlayer extends AudioSourcePlayer {
audioSourcePlayers.insert( audioSourcePlayers.insert(
newIndex, audioSourcePlayers.removeAt(currentIndex)); newIndex, audioSourcePlayers.removeAt(currentIndex));
} }
clear() {
audioSourcePlayers.clear();
_shuffleOrder.clear();
}
} }
class ClippingAudioSourcePlayer extends IndexedAudioSourcePlayer { class ClippingAudioSourcePlayer extends IndexedAudioSourcePlayer {

View File

@ -0,0 +1,70 @@
# Generated by pub
# See https://dart.dev/tools/pub/glossary#lockfile
packages:
characters:
dependency: transitive
description:
name: characters
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.0"
collection:
dependency: transitive
description:
name: collection
url: "https://pub.dartlang.org"
source: hosted
version: "1.14.13"
flutter:
dependency: "direct main"
description: flutter
source: sdk
version: "0.0.0"
flutter_web_plugins:
dependency: "direct main"
description: flutter
source: sdk
version: "0.0.0"
just_audio_platform_interface:
dependency: "direct main"
description:
path: "../just_audio_platform_interface"
relative: true
source: path
version: "0.0.1"
meta:
dependency: "direct main"
description:
name: meta
url: "https://pub.dartlang.org"
source: hosted
version: "1.1.8"
plugin_platform_interface:
dependency: transitive
description:
name: plugin_platform_interface
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.2"
sky_engine:
dependency: transitive
description: flutter
source: sdk
version: "0.0.99"
typed_data:
dependency: transitive
description:
name: typed_data
url: "https://pub.dartlang.org"
source: hosted
version: "1.2.0"
vector_math:
dependency: transitive
description:
name: vector_math
url: "https://pub.dartlang.org"
source: hosted
version: "2.0.8"
sdks:
dart: ">=2.9.0-14.0.dev <3.0.0"
flutter: ">=1.12.13+hotfix.5"

View File

@ -0,0 +1,25 @@
name: just_audio_web
description: Web platform implementation of just_audio
homepage: https://github.com/ryanheise/just_audio/tree/master/just_audio_web
version: 0.0.1
flutter:
plugin:
platforms:
web:
pluginClass: JustAudioPlugin
fileName: just_audio_web.dart
dependencies:
#just_audio_platform_interface: ^0.0.1
just_audio_platform_interface:
path: ../just_audio_platform_interface
flutter:
sdk: flutter
flutter_web_plugins:
sdk: flutter
meta: ^1.1.8
environment:
sdk: ">=2.7.0 <3.0.0"
flutter: ">=1.12.13+hotfix.5"