Playists, looping, shuffling for Android and web

This commit is contained in:
Ryan Heise 2020-07-09 13:27:53 +10:00
parent 6f776eee87
commit c0c5d0c2bf
8 changed files with 1795 additions and 379 deletions

View File

@ -1,6 +1,6 @@
# just_audio # just_audio
A Flutter plugin to play audio from URLs, files, assets and DASH/HLS streams. This plugin can be used with [audio_service](https://pub.dev/packages/audio_service) to play audio in the background and control playback from the lock screen, Android notifications, the iOS Control Center, and headset buttons. A Flutter plugin to play audio from URLs, files, assets, DASH/HLS streams and playlists. This plugin can be used with [audio_service](https://pub.dev/packages/audio_service) to play audio in the background and control playback from the lock screen, Android notifications, the iOS Control Center, and headset buttons.
## Features ## Features
@ -18,6 +18,10 @@ A Flutter plugin to play audio from URLs, files, assets and DASH/HLS streams. Th
| clip audio | ✅ | | | ✅ | | clip audio | ✅ | | | ✅ |
| dispose | ✅ | ✅ | ✅ | ✅ | | dispose | ✅ | ✅ | ✅ | ✅ |
| report player errors | ✅ | ✅ | ✅ | ✅ | | report player errors | ✅ | ✅ | ✅ | ✅ |
| playlists | ✅ | | | ✅ |
| looping | ✅ | | | ✅ |
| shuffle | ✅ | | | ✅ |
| gapless playback | ✅ | | | |
This plugin has been tested on Android and Web, and is being made available for testing on iOS. Please consider reporting any bugs you encounter [here](https://github.com/ryanheise/just_audio/issues) or submitting pull requests [here](https://github.com/ryanheise/just_audio/pulls). This plugin has been tested on Android and Web, and is being made available for testing on iOS. Please consider reporting any bugs you encounter [here](https://github.com/ryanheise/just_audio/issues) or submitting pull requests [here](https://github.com/ryanheise/just_audio/pulls).
@ -123,7 +127,3 @@ If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist`
<true/> <true/>
</dict> </dict>
``` ```
## Todo
* Gapless playback

View File

@ -8,13 +8,18 @@ import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.SimpleExoPlayer; import com.google.android.exoplayer2.SimpleExoPlayer;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.MetadataOutput; import com.google.android.exoplayer2.metadata.MetadataOutput;
import com.google.android.exoplayer2.metadata.icy.IcyHeaders; import com.google.android.exoplayer2.metadata.icy.IcyHeaders;
import com.google.android.exoplayer2.metadata.icy.IcyInfo; import com.google.android.exoplayer2.metadata.icy.IcyInfo;
import com.google.android.exoplayer2.source.ClippingMediaSource; import com.google.android.exoplayer2.source.ClippingMediaSource;
import com.google.android.exoplayer2.source.ConcatenatingMediaSource;
import com.google.android.exoplayer2.source.LoopingMediaSource;
import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.ProgressiveMediaSource; import com.google.android.exoplayer2.source.ProgressiveMediaSource;
import com.google.android.exoplayer2.source.ShuffleOrder;
import com.google.android.exoplayer2.source.ShuffleOrder.DefaultShuffleOrder;
import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.TrackGroup;
import com.google.android.exoplayer2.source.TrackGroupArray; import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.source.dash.DashMediaSource; import com.google.android.exoplayer2.source.dash.DashMediaSource;
@ -36,12 +41,18 @@ import io.flutter.plugin.common.MethodChannel.Result;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.stream.Collectors;
public class AudioPlayer implements MethodCallHandler, Player.EventListener, MetadataOutput { public class AudioPlayer implements MethodCallHandler, Player.EventListener, MetadataOutput {
static final String TAG = "AudioPlayer"; static final String TAG = "AudioPlayer";
private static Random random = new Random();
private final Context context; private final Context context;
private final MethodChannel methodChannel; private final MethodChannel methodChannel;
private final EventChannel eventChannel; private final EventChannel eventChannel;
@ -62,11 +73,15 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
private boolean seekProcessed; private boolean seekProcessed;
private boolean buffering; private boolean buffering;
private boolean justConnected; private boolean justConnected;
private MediaSource mediaSource; private Map<String, MediaSource> mediaSources = new HashMap<String, MediaSource>();
private IcyInfo icyInfo; private IcyInfo icyInfo;
private IcyHeaders icyHeaders; private IcyHeaders icyHeaders;
private SimpleExoPlayer player; private SimpleExoPlayer player;
private MediaSource mediaSource;
private Integer currentIndex;
private Map<LoopingMediaSource, MediaSource> loopingChildren = new HashMap<>();
private Map<LoopingMediaSource, Integer> loopingCounts = new HashMap<>();
private final Handler handler = new Handler(); private final Handler handler = new Handler();
private final Runnable bufferWatcher = new Runnable() { private final Runnable bufferWatcher = new Runnable() {
@Override @Override
@ -152,6 +167,31 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
} }
} }
@Override
public void onPositionDiscontinuity(int reason) {
switch (reason) {
case Player.DISCONTINUITY_REASON_PERIOD_TRANSITION:
case Player.DISCONTINUITY_REASON_SEEK:
onItemMayHaveChanged();
break;
}
}
@Override
public void onTimelineChanged(Timeline timeline, int reason) {
if (reason == Player.TIMELINE_CHANGE_REASON_DYNAMIC) {
onItemMayHaveChanged();
}
}
private void onItemMayHaveChanged() {
Integer newIndex = player.getCurrentWindowIndex();
if (newIndex != currentIndex) {
currentIndex = newIndex;
}
broadcastPlaybackEvent();
}
@Override @Override
public void onPlayerStateChanged(boolean playWhenReady, int playbackState) { public void onPlayerStateChanged(boolean playWhenReady, int playbackState) {
switch (playbackState) { switch (playbackState) {
@ -169,6 +209,7 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
break; break;
case Player.STATE_ENDED: case Player.STATE_ENDED:
if (state != PlaybackState.completed) { if (state != PlaybackState.completed) {
player.setPlayWhenReady(false);
transition(PlaybackState.completed); transition(PlaybackState.completed);
} }
break; break;
@ -230,19 +271,8 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
final List<?> args = (List<?>) call.arguments; final List<?> args = (List<?>) call.arguments;
try { try {
switch (call.method) { switch (call.method) {
case "setUrl": case "load":
setUrl((String) args.get(0), result); load(getAudioSource(args.get(0)), result);
break;
case "setClip":
Object start = args.get(0);
if (start != null && start instanceof Integer) {
start = new Long((Integer) start);
}
Object end = args.get(1);
if (end != null && end instanceof Integer) {
end = new Long((Integer) end);
}
setClip((Long) start, (Long) end, result);
break; break;
case "play": case "play":
play(); play();
@ -263,17 +293,57 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
setSpeed((float) ((double) ((Double) args.get(0)))); setSpeed((float) ((double) ((Double) args.get(0))));
result.success(null); result.success(null);
break; break;
case "setLoopMode":
setLoopMode((Integer) args.get(0));
result.success(null);
break;
case "setShuffleModeEnabled":
setShuffleModeEnabled((Boolean) args.get(0));
result.success(null);
break;
case "setAutomaticallyWaitsToMinimizeStalling": case "setAutomaticallyWaitsToMinimizeStalling":
result.success(null); result.success(null);
break; break;
case "seek": case "seek":
Long position = getLong(args.get(0)); Long position = getLong(args.get(0));
seek(position == null ? C.TIME_UNSET : position, result); Integer index = (Integer)args.get(1);
seek(position == null ? C.TIME_UNSET : position, result, index);
break; break;
case "dispose": case "dispose":
dispose(); dispose();
result.success(null); result.success(null);
break; break;
case "concatenating.add":
concatenating(args.get(0))
.addMediaSource(getAudioSource(args.get(1)), null, () -> result.success(null));
break;
case "concatenating.insert":
concatenating(args.get(0))
.addMediaSource((Integer)args.get(1), getAudioSource(args.get(2)), null, () -> result.success(null));
break;
case "concatenating.addAll":
concatenating(args.get(0))
.addMediaSources(getAudioSources(args.get(1)), null, () -> result.success(null));
break;
case "concatenating.insertAll":
concatenating(args.get(0))
.addMediaSources((Integer)args.get(1), getAudioSources(args.get(2)), null, () -> result.success(null));
break;
case "concatenating.removeAt":
concatenating(args.get(0))
.removeMediaSource((Integer)args.get(1), null, () -> result.success(null));
break;
case "concatenating.removeRange":
concatenating(args.get(0))
.removeMediaSourceRange((Integer)args.get(1), (Integer)args.get(2), null, () -> result.success(null));
break;
case "concatenating.move":
concatenating(args.get(0))
.moveMediaSource((Integer)args.get(1), (Integer)args.get(2), null, () -> result.success(null));
break;
case "concatenating.clear":
concatenating(args.get(0)).clear(null, () -> result.success(null));
break;
default: default:
result.notImplemented(); result.notImplemented();
break; break;
@ -287,6 +357,169 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
} }
} }
// Set the shuffle order for mediaSource, with currentIndex at
// the first position. Traverse the tree incrementing index at each
// node.
private int setShuffleOrder(MediaSource mediaSource, int index) {
if (mediaSource instanceof ConcatenatingMediaSource) {
final ConcatenatingMediaSource source = (ConcatenatingMediaSource)mediaSource;
// Find which child is current
Integer currentChildIndex = null;
for (int i = 0; i < source.getSize(); i++) {
final int indexBefore = index;
final MediaSource child = source.getMediaSource(i);
index = setShuffleOrder(child, index);
// If currentIndex falls within this child, make this child come first.
if (currentIndex >= indexBefore && currentIndex < index) {
currentChildIndex = i;
}
}
// Shuffle so that the current child is first in the shuffle order
source.setShuffleOrder(createShuffleOrder(source.getSize(), currentChildIndex));
} else if (mediaSource instanceof LoopingMediaSource) {
final LoopingMediaSource source = (LoopingMediaSource)mediaSource;
// The ExoPlayer API doesn't provide accessors for these so we have
// to index them ourselves.
MediaSource child = loopingChildren.get(source);
int count = loopingCounts.get(source);
for (int i = 0; i < count; i++) {
index = setShuffleOrder(child, index);
}
} else {
// An actual media item takes up one spot in the playlist.
index++;
}
return index;
}
private static int[] shuffle(int length, Integer firstIndex) {
final int[] shuffleOrder = new int[length];
for (int i = 0; i < length; i++) {
final int j = random.nextInt(i + 1);
shuffleOrder[i] = shuffleOrder[j];
shuffleOrder[j] = i;
}
if (firstIndex != null) {
for (int i = 1; i < length; i++) {
if (shuffleOrder[i] == firstIndex) {
final int v = shuffleOrder[0];
shuffleOrder[0] = shuffleOrder[i];
shuffleOrder[i] = v;
break;
}
}
}
return shuffleOrder;
}
// Create a shuffle order optionally fixing the first index.
private ShuffleOrder createShuffleOrder(int length, Integer firstIndex) {
int[] shuffleIndices = shuffle(length, firstIndex);
return new DefaultShuffleOrder(shuffleIndices, random.nextLong());
}
private ConcatenatingMediaSource concatenating(final Object index) {
return (ConcatenatingMediaSource)mediaSources.get((Integer)index);
}
private MediaSource getAudioSource(final Object json) {
Map<?, ?> map = (Map<?, ?>)json;
String id = (String)map.get("id");
MediaSource mediaSource = mediaSources.get(id);
if (mediaSource == null) {
mediaSource = decodeAudioSource(map);
mediaSources.put(id, mediaSource);
}
return mediaSource;
}
private MediaSource decodeAudioSource(final Object json) {
Map<?, ?> map = (Map<?, ?>)json;
String id = (String)map.get("id");
switch ((String)map.get("type")) {
case "progressive":
return new ProgressiveMediaSource.Factory(buildDataSourceFactory())
.setTag(id)
.createMediaSource(Uri.parse((String)map.get("uri")));
case "dash":
return new DashMediaSource.Factory(buildDataSourceFactory())
.setTag(id)
.createMediaSource(Uri.parse((String)map.get("uri")));
case "hls":
return new HlsMediaSource.Factory(buildDataSourceFactory())
.setTag(id)
.createMediaSource(Uri.parse((String)map.get("uri")));
case "concatenating":
List<Object> audioSources = (List<Object>)map.get("audioSources");
return new ConcatenatingMediaSource(
false, // isAtomic
(Boolean)map.get("useLazyPreparation"),
new DefaultShuffleOrder(audioSources.size()),
audioSources
.stream()
.map(s -> getAudioSource(s))
.toArray(MediaSource[]::new));
case "clipping":
Long start = getLong(map.get("start"));
Long end = getLong(map.get("end"));
return new ClippingMediaSource(getAudioSource(map.get("audioSource")),
(start != null ? start : 0) * 1000L,
(end != null ? end : C.TIME_END_OF_SOURCE) * 1000L);
case "looping":
Integer count = (Integer)map.get("count");
MediaSource looperChild = getAudioSource(map.get("audioSource"));
LoopingMediaSource looper = new LoopingMediaSource(looperChild, count);
// TODO: store both in a single map
loopingChildren.put(looper, looperChild);
loopingCounts.put(looper, count);
return looper;
default:
throw new IllegalArgumentException("Unknown AudioSource type: " + map.get("type"));
}
}
private List<MediaSource> getAudioSources(final Object json) {
return ((List<Object>)json)
.stream()
.map(s -> getAudioSource(s))
.collect(Collectors.toList());
}
private DataSource.Factory buildDataSourceFactory() {
String userAgent = Util.getUserAgent(context, "just_audio");
DataSource.Factory httpDataSourceFactory = new DefaultHttpDataSourceFactory(
userAgent,
DefaultHttpDataSource.DEFAULT_CONNECT_TIMEOUT_MILLIS,
DefaultHttpDataSource.DEFAULT_READ_TIMEOUT_MILLIS,
true
);
return new DefaultDataSourceFactory(context, httpDataSourceFactory);
}
private void load(final MediaSource mediaSource, final Result result) {
justConnected = false;
switch (state) {
case none:
break;
case connecting:
abortExistingConnection();
player.stop();
player.setPlayWhenReady(false);
break;
default:
player.stop();
player.setPlayWhenReady(false);
break;
}
prepareResult = result;
transition(PlaybackState.connecting);
if (player.getShuffleModeEnabled()) {
setShuffleOrder(mediaSource, 0);
}
this.mediaSource = mediaSource;
player.prepare(mediaSource);
}
private void ensurePlayerInitialized() { private void ensurePlayerInitialized() {
if (player == null) { if (player == null) {
player = new SimpleExoPlayer.Builder(context).build(); player = new SimpleExoPlayer.Builder(context).build();
@ -296,44 +529,39 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
} }
private void broadcastPlaybackEvent() { private void broadcastPlaybackEvent() {
final ArrayList<Object> event = new ArrayList<Object>(); final Map<String, Object> event = new HashMap<String, Object>();
event.add(state.ordinal()); event.put("state", state.ordinal());
event.add(buffering); event.put("buffering", buffering);
event.add(updatePosition = getCurrentPosition()); event.put("updatePosition", updatePosition = getCurrentPosition());
event.add(updateTime = System.currentTimeMillis()); event.put("updateTime", updateTime = System.currentTimeMillis());
event.add(Math.max(updatePosition, bufferedPosition)); event.put("bufferedPosition", Math.max(updatePosition, bufferedPosition));
event.add(collectIcyMetadata()); event.put("icyMetadata", collectIcyMetadata());
event.add(duration = getDuration()); event.put("duration", duration = getDuration());
event.put("currentIndex", currentIndex);
if (eventSink != null) { if (eventSink != null) {
eventSink.success(event); eventSink.success(event);
} }
} }
private ArrayList<Object> collectIcyMetadata() { private Map<String, Object> collectIcyMetadata() {
final ArrayList<Object> icyData = new ArrayList<>(); final Map<String, Object> icyData = new HashMap<>();
final ArrayList<String> info;
final ArrayList<Object> headers;
if (icyInfo != null) { if (icyInfo != null) {
info = new ArrayList<>(); final Map<String, String> info = new HashMap<>();
info.add(icyInfo.title); info.put("title", icyInfo.title);
info.add(icyInfo.url); info.put("url", icyInfo.url);
} else { icyData.put("info", info);
info = new ArrayList<>(Collections.nCopies(2, null));
} }
if (icyHeaders != null) { if (icyHeaders != null) {
headers = new ArrayList<>(); final Map<String, Object> headers = new HashMap<>();
headers.add(icyHeaders.bitrate); headers.put("bitrate", icyHeaders.bitrate);
headers.add(icyHeaders.genre); headers.put("genre", icyHeaders.genre);
headers.add(icyHeaders.name); headers.put("name", icyHeaders.name);
headers.add(icyHeaders.metadataInterval); headers.put("metadataInterval", icyHeaders.metadataInterval);
headers.add(icyHeaders.url); headers.put("url", icyHeaders.url);
headers.add(icyHeaders.isPublic); headers.put("isPublic", icyHeaders.isPublic);
} else { icyData.put("headers", headers);
headers = new ArrayList<>(Collections.nCopies(6, null));
} }
icyData.add(info);
icyData.add(headers);
return icyData; return icyData;
} }
@ -372,32 +600,6 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
broadcastPlaybackEvent(); broadcastPlaybackEvent();
} }
public void setUrl(final String url, final Result result) throws IOException {
justConnected = false;
abortExistingConnection();
prepareResult = result;
transition(PlaybackState.connecting);
String userAgent = Util.getUserAgent(context, "just_audio");
DataSource.Factory httpDataSourceFactory = new DefaultHttpDataSourceFactory(
userAgent,
DefaultHttpDataSource.DEFAULT_CONNECT_TIMEOUT_MILLIS,
DefaultHttpDataSource.DEFAULT_READ_TIMEOUT_MILLIS,
true
);
DataSource.Factory dataSourceFactory = new DefaultDataSourceFactory(context,
httpDataSourceFactory);
Uri uri = Uri.parse(url);
String extension = getLowerCaseExtension(uri);
if (extension.equals("mpd")) {
mediaSource = new DashMediaSource.Factory(dataSourceFactory).createMediaSource(uri);
} else if (extension.equals("m3u8")) {
mediaSource = new HlsMediaSource.Factory(dataSourceFactory).createMediaSource(uri);
} else {
mediaSource = new ProgressiveMediaSource.Factory(dataSourceFactory).createMediaSource(uri);
}
player.prepare(mediaSource);
}
private String getLowerCaseExtension(Uri uri) { private String getLowerCaseExtension(Uri uri) {
// Until ExoPlayer provides automatic detection of media source types, we // Until ExoPlayer provides automatic detection of media source types, we
// rely on the file extension. When this is absent, as a temporary // rely on the file extension. When this is absent, as a temporary
@ -408,23 +610,6 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
return filename.replaceAll("^.*\\.", "").toLowerCase(); return filename.replaceAll("^.*\\.", "").toLowerCase();
} }
public void setClip(final Long start, final Long end, final Result result) {
if (state == PlaybackState.none) {
throw new IllegalStateException("Cannot call setClip from none state");
}
abortExistingConnection();
this.start = start;
this.end = end;
prepareResult = result;
if (start != null || end != null) {
player.prepare(new ClippingMediaSource(mediaSource,
(start != null ? start : 0) * 1000L,
(end != null ? end : C.TIME_END_OF_SOURCE) * 1000L));
} else {
player.prepare(mediaSource);
}
}
public void play() { public void play() {
switch (state) { switch (state) {
case playing: case playing:
@ -493,7 +678,18 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
broadcastPlaybackEvent(); broadcastPlaybackEvent();
} }
public void seek(final long position, final Result result) { public void setLoopMode(final int mode) {
player.setRepeatMode(mode);
}
public void setShuffleModeEnabled(final boolean enabled) {
if (enabled) {
setShuffleOrder(mediaSource, 0);
}
player.setShuffleModeEnabled(enabled);
}
public void seek(final long position, final Result result, final Integer index) {
if (state == PlaybackState.none || state == PlaybackState.connecting) { if (state == PlaybackState.none || state == PlaybackState.connecting) {
throw new IllegalStateException("Cannot call seek from none none/connecting states"); throw new IllegalStateException("Cannot call seek from none none/connecting states");
} }
@ -501,10 +697,14 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
seekPos = position; seekPos = position;
seekResult = result; seekResult = result;
seekProcessed = false; seekProcessed = false;
player.seekTo(position); int windowIndex = index != null ? index : player.getCurrentWindowIndex();
player.seekTo(windowIndex, position);
} }
public void dispose() { public void dispose() {
mediaSources.clear();
mediaSource = null;
loopingChildren.clear();
if (player != null) { if (player != null) {
player.release(); player.release();
player = null; player = null;

View File

@ -14,19 +14,57 @@ class _MyAppState extends State<MyApp> {
final _volumeSubject = BehaviorSubject.seeded(1.0); final _volumeSubject = BehaviorSubject.seeded(1.0);
final _speedSubject = BehaviorSubject.seeded(1.0); final _speedSubject = BehaviorSubject.seeded(1.0);
AudioPlayer _player; AudioPlayer _player;
ConcatenatingAudioSource _playlist = ConcatenatingAudioSource(audioSources: [
LoopingAudioSource(
count: 2,
audioSource: ClippingAudioSource(
start: Duration(seconds: 60),
end: Duration(seconds: 65),
audioSource: AudioSource.uri(Uri.parse(
"https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3")),
tag: AudioMetadata(
album: "Science Friday",
title: "A Salute To Head-Scratching Science (5 seconds)",
),
),
),
AudioSource.uri(
Uri.parse(
"https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3"),
tag: AudioMetadata(
album: "Science Friday",
title: "A Salute To Head-Scratching Science (full)",
),
),
AudioSource.uri(
Uri.parse("https://s3.amazonaws.com/scifri-segments/scifri201711241.mp3"),
tag: AudioMetadata(
album: "Science Friday",
title: "From Cat Rheology To Operatic Incompetence",
),
),
]);
List<IndexedAudioSource> get _sequence => _playlist.sequence;
List<AudioMetadata> get _metadataSequence =>
_sequence.map((s) => s.tag as AudioMetadata).toList();
@override @override
void initState() { void initState() {
super.initState(); super.initState();
AudioPlayer.setIosCategory(IosCategory.playback); AudioPlayer.setIosCategory(IosCategory.playback);
_player = AudioPlayer(); _player = AudioPlayer();
_player _loadAudio();
.setUrl( }
"https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3")
.catchError((error) { _loadAudio() async {
try {
await _player.load(_playlist);
} catch (e) {
// catch audio error ex: 404 url, wrong url ... // catch audio error ex: 404 url, wrong url ...
print(error); print("$e");
}); }
} }
@override @override
@ -47,8 +85,21 @@ class _MyAppState extends State<MyApp> {
crossAxisAlignment: CrossAxisAlignment.center, crossAxisAlignment: CrossAxisAlignment.center,
mainAxisAlignment: MainAxisAlignment.center, mainAxisAlignment: MainAxisAlignment.center,
children: [ children: [
Text("Science Friday"), StreamBuilder<int>(
Text("Science Friday and WNYC Studios"), stream: _player.currentIndexStream,
builder: (context, snapshot) {
final index = snapshot.data ?? 0;
final metadata = _metadataSequence[index];
return Column(
crossAxisAlignment: CrossAxisAlignment.center,
children: [
Text(metadata.album ?? '',
style: Theme.of(context).textTheme.headline6),
Text(metadata.title ?? ''),
],
);
},
),
StreamBuilder<FullAudioPlaybackState>( StreamBuilder<FullAudioPlaybackState>(
stream: _player.fullPlaybackStateStream, stream: _player.fullPlaybackStateStream,
builder: (context, snapshot) { builder: (context, snapshot) {
@ -141,6 +192,77 @@ class _MyAppState extends State<MyApp> {
}, },
), ),
), ),
Row(
children: [
StreamBuilder<LoopMode>(
stream: _player.loopModeStream,
builder: (context, snapshot) {
final loopMode = snapshot.data ?? LoopMode.off;
const icons = [
Icon(Icons.repeat, color: Colors.grey),
Icon(Icons.repeat, color: Colors.orange),
Icon(Icons.repeat_one, color: Colors.orange),
];
const cycleModes = [
LoopMode.off,
LoopMode.all,
LoopMode.one,
];
final index = cycleModes.indexOf(loopMode);
return IconButton(
icon: icons[index],
onPressed: () {
_player.setLoopMode(cycleModes[
(cycleModes.indexOf(loopMode) + 1) %
cycleModes.length]);
},
);
},
),
Expanded(
child: Text(
"Playlist",
style: Theme.of(context).textTheme.headline6,
textAlign: TextAlign.center,
),
),
StreamBuilder<bool>(
stream: _player.shuffleModeEnabledStream,
builder: (context, snapshot) {
final shuffleModeEnabled = snapshot.data ?? false;
return IconButton(
icon: shuffleModeEnabled
? Icon(Icons.shuffle, color: Colors.orange)
: Icon(Icons.shuffle, color: Colors.grey),
onPressed: () {
_player.setShuffleModeEnabled(!shuffleModeEnabled);
},
);
},
),
],
),
Expanded(
child: StreamBuilder<int>(
stream: _player.currentIndexStream,
builder: (context, snapshot) {
final currentIndex = snapshot.data ?? 0;
return ListView.builder(
itemCount: _metadataSequence.length,
itemBuilder: (context, index) => Material(
color:
index == currentIndex ? Colors.grey.shade300 : null,
child: ListTile(
title: Text(_metadataSequence[index].title),
onTap: () {
_player.seek(Duration.zero, index: index);
},
),
),
);
},
),
),
], ],
), ),
), ),
@ -192,3 +314,10 @@ class _SeekBarState extends State<SeekBar> {
); );
} }
} }
class AudioMetadata {
final String album;
final String title;
AudioMetadata({this.album, this.title});
}

View File

@ -1,20 +1,6 @@
# Generated by pub # Generated by pub
# See https://dart.dev/tools/pub/glossary#lockfile # See https://dart.dev/tools/pub/glossary#lockfile
packages: packages:
archive:
dependency: transitive
description:
name: archive
url: "https://pub.dartlang.org"
source: hosted
version: "2.0.13"
args:
dependency: transitive
description:
name: args
url: "https://pub.dartlang.org"
source: hosted
version: "1.6.0"
async: async:
dependency: transitive dependency: transitive
description: description:
@ -36,6 +22,13 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.1.3" version: "1.1.3"
clock:
dependency: transitive
description:
name: clock
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.1"
collection: collection:
dependency: transitive dependency: transitive
description: description:
@ -64,6 +57,13 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "0.1.3" version: "0.1.3"
fake_async:
dependency: transitive
description:
name: fake_async
url: "https://pub.dartlang.org"
source: hosted
version: "1.1.0"
file: file:
dependency: transitive dependency: transitive
description: description:
@ -86,13 +86,6 @@ packages:
description: flutter description: flutter
source: sdk source: sdk
version: "0.0.0" version: "0.0.0"
image:
dependency: transitive
description:
name: image
url: "https://pub.dartlang.org"
source: hosted
version: "2.1.12"
intl: intl:
dependency: transitive dependency: transitive
description: description:
@ -106,7 +99,7 @@ packages:
path: ".." path: ".."
relative: true relative: true
source: path source: path
version: "0.2.1" version: "0.2.2"
matcher: matcher:
dependency: transitive dependency: transitive
description: description:
@ -127,7 +120,7 @@ packages:
name: path name: path
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.6.4" version: "1.7.0"
path_provider: path_provider:
dependency: transitive dependency: transitive
description: description:
@ -156,13 +149,6 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.0.2" version: "1.0.2"
petitparser:
dependency: transitive
description:
name: petitparser
url: "https://pub.dartlang.org"
source: hosted
version: "2.4.0"
platform: platform:
dependency: transitive dependency: transitive
description: description:
@ -184,13 +170,6 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "3.0.13" version: "3.0.13"
quiver:
dependency: transitive
description:
name: quiver
url: "https://pub.dartlang.org"
source: hosted
version: "2.1.3"
rxdart: rxdart:
dependency: "direct main" dependency: "direct main"
description: description:
@ -244,7 +223,7 @@ packages:
name: test_api name: test_api
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "0.2.15" version: "0.2.16"
typed_data: typed_data:
dependency: transitive dependency: transitive
description: description:
@ -252,6 +231,13 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.1.6" version: "1.1.6"
uuid:
dependency: transitive
description:
name: uuid
url: "https://pub.dartlang.org"
source: hosted
version: "2.2.0"
vector_math: vector_math:
dependency: transitive dependency: transitive
description: description:
@ -266,13 +252,6 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "0.1.0" version: "0.1.0"
xml:
dependency: transitive
description:
name: xml
url: "https://pub.dartlang.org"
source: hosted
version: "3.6.1"
sdks: sdks:
dart: ">=2.6.0 <3.0.0" dart: ">=2.7.0 <3.0.0"
flutter: ">=1.12.13+hotfix.5 <2.0.0" flutter: ">=1.12.13+hotfix.5 <2.0.0"

View File

@ -7,6 +7,9 @@ import 'package:flutter/widgets.dart';
import 'package:path/path.dart' as p; import 'package:path/path.dart' as p;
import 'package:path_provider/path_provider.dart'; import 'package:path_provider/path_provider.dart';
import 'package:rxdart/rxdart.dart'; import 'package:rxdart/rxdart.dart';
import 'package:uuid/uuid.dart';
final _uuid = Uuid();
/// An object to manage playing audio from a URL, a locale file or an asset. /// An object to manage playing audio from a URL, a locale file or an asset.
/// ///
@ -32,12 +35,12 @@ import 'package:rxdart/rxdart.dart';
/// The [AudioPlayer] instance transitions through different states as follows: /// The [AudioPlayer] instance transitions through different states as follows:
/// ///
/// * [AudioPlaybackState.none]: immediately after instantiation and [dispose]. /// * [AudioPlaybackState.none]: immediately after instantiation and [dispose].
/// * [AudioPlaybackState.stopped]: eventually after [setUrl], [setFilePath], /// * [AudioPlaybackState.stopped]: eventually after [load] completes, and
/// [setAsset] or [setClip] completes, and immediately after [stop]. /// immediately after [stop].
/// * [AudioPlaybackState.paused]: after [pause]. /// * [AudioPlaybackState.paused]: after [pause].
/// * [AudioPlaybackState.playing]: after [play]. /// * [AudioPlaybackState.playing]: after [play].
/// * [AudioPlaybackState.connecting]: immediately after [setUrl], /// * [AudioPlaybackState.connecting]: immediately after [load] while waiting
/// [setFilePath] and [setAsset] while waiting for the media to load. /// for the media to load.
/// * [AudioPlaybackState.completed]: immediately after playback reaches the /// * [AudioPlaybackState.completed]: immediately after playback reaches the
/// end of the media or the end of the clip. /// end of the media or the end of the clip.
/// ///
@ -46,8 +49,8 @@ import 'package:rxdart/rxdart.dart';
class AudioPlayer { class AudioPlayer {
static final _mainChannel = MethodChannel('com.ryanheise.just_audio.methods'); static final _mainChannel = MethodChannel('com.ryanheise.just_audio.methods');
static Future<MethodChannel> _init(int id) async { static Future<MethodChannel> _init(String id) async {
await _mainChannel.invokeMethod('init', ['$id']); await _mainChannel.invokeMethod('init', [id]);
return MethodChannel('com.ryanheise.just_audio.methods.$id'); return MethodChannel('com.ryanheise.just_audio.methods.$id');
} }
@ -75,7 +78,7 @@ class AudioPlayer {
_ProxyHttpServer _proxy; _ProxyHttpServer _proxy;
final int _id; final String _id;
Future<Duration> _durationFuture; Future<Duration> _durationFuture;
@ -90,15 +93,8 @@ class AudioPlayer {
bufferedPosition: Duration.zero, bufferedPosition: Duration.zero,
speed: 1.0, speed: 1.0,
duration: null, duration: null,
icyMetadata: IcyMetadata( icyMetadata: null,
info: IcyInfo(title: null, url: null), currentIndex: null,
headers: IcyHeaders(
bitrate: null,
genre: null,
name: null,
metadataInterval: null,
url: null,
isPublic: null)),
); );
Stream<AudioPlaybackEvent> _eventChannelStream; Stream<AudioPlaybackEvent> _eventChannelStream;
@ -117,47 +113,48 @@ class AudioPlayer {
final _fullPlaybackStateSubject = BehaviorSubject<FullAudioPlaybackState>(); final _fullPlaybackStateSubject = BehaviorSubject<FullAudioPlaybackState>();
final _currentIndexSubject = BehaviorSubject<int>();
final _loopModeSubject = BehaviorSubject<LoopMode>();
final _shuffleModeEnabledSubject = BehaviorSubject<bool>();
double _volume = 1.0; double _volume = 1.0;
double _speed = 1.0; double _speed = 1.0;
bool _automaticallyWaitsToMinimizeStalling = true; bool _automaticallyWaitsToMinimizeStalling = true;
File _cacheFile; AudioSource _audioSource;
Map<String, AudioSource> _audioSources = {};
/// Creates an [AudioPlayer]. /// Creates an [AudioPlayer].
factory AudioPlayer() => factory AudioPlayer() => AudioPlayer._internal(_uuid.v4());
AudioPlayer._internal(DateTime.now().microsecondsSinceEpoch);
AudioPlayer._internal(this._id) : _channel = _init(_id) { AudioPlayer._internal(this._id) : _channel = _init(_id) {
_eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id') _eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id')
.receiveBroadcastStream() .receiveBroadcastStream()
.map((data) { .map((data) {
final duration = data.length < 7 || data[6] < 0 final duration = (data['duration'] ?? -1) < 0
? null ? null
: Duration(milliseconds: data[6]); : Duration(milliseconds: data['duration']);
_durationFuture = Future.value(duration); _durationFuture = Future.value(duration);
_durationSubject.add(duration); _durationSubject.add(duration);
return _audioPlaybackEvent = AudioPlaybackEvent( _audioPlaybackEvent = AudioPlaybackEvent(
state: AudioPlaybackState.values[data[0]], state: AudioPlaybackState.values[data['state']],
buffering: data[1], buffering: data['buffering'],
updatePosition: Duration(milliseconds: data[2]), updatePosition: Duration(milliseconds: data['updatePosition']),
updateTime: Duration(milliseconds: data[3]), updateTime: Duration(milliseconds: data['updateTime']),
bufferedPosition: Duration(milliseconds: data[4]), bufferedPosition: Duration(milliseconds: data['bufferedPosition']),
speed: _speed, speed: _speed,
duration: duration, duration: duration,
icyMetadata: data.length < 6 || data[5] == null icyMetadata: data['icyMetadata'] == null
? null ? null
: IcyMetadata( : IcyMetadata.fromJson(data['icyMetadata']),
info: IcyInfo(title: data[5][0][0], url: data[5][0][1]), currentIndex: data['currentIndex'],
headers: IcyHeaders(
bitrate: data[5][1][0],
genre: data[5][1][1],
name: data[5][1][2],
metadataInterval: data[5][1][3],
url: data[5][1][4],
isPublic: data[5][1][5])),
); );
return _audioPlaybackEvent;
}); });
_eventChannelStreamSubscription = _eventChannelStream.listen( _eventChannelStreamSubscription = _eventChannelStream.listen(
_playbackEventSubject.add, _playbackEventSubject.add,
@ -178,20 +175,21 @@ class AudioPlayer {
.map((state) => state.icyMetadata) .map((state) => state.icyMetadata)
.distinct() .distinct()
.handleError((err, stack) {/* noop */})); .handleError((err, stack) {/* noop */}));
_fullPlaybackStateSubject.addStream(Rx.combineLatest3<AudioPlaybackState, _currentIndexSubject.addStream(playbackEventStream
bool, IcyMetadata, FullAudioPlaybackState>( .map((state) => state.currentIndex)
playbackStateStream, .distinct()
bufferingStream, .handleError((err, stack) {/* noop */}));
icyMetadataStream, _fullPlaybackStateSubject.addStream(playbackEventStream
(state, buffering, icyMetadata) => .map((event) => FullAudioPlaybackState(
FullAudioPlaybackState(state, buffering, icyMetadata))); event.state, event.buffering, event.icyMetadata))
.distinct()
.handleError((err, stack) {/* noop */}));
} }
/// The duration of any media set via [setUrl], [setFilePath] or [setAsset], /// The duration of any media loaded via [load], or null if unknown.
/// or null otherwise.
Future<Duration> get durationFuture => _durationFuture; Future<Duration> get durationFuture => _durationFuture;
/// The duration of any media set via [setUrl], [setFilePath] or [setAsset]. /// The duration of any media loaded via [load].
Stream<Duration> get durationStream => _durationSubject.stream; Stream<Duration> get durationStream => _durationSubject.stream;
/// The latest [AudioPlaybackEvent]. /// The latest [AudioPlaybackEvent].
@ -208,6 +206,9 @@ class AudioPlayer {
Stream<AudioPlaybackState> get playbackStateStream => Stream<AudioPlaybackState> get playbackStateStream =>
_playbackStateSubject.stream; _playbackStateSubject.stream;
/// A stream broadcasting the current item.
Stream<int> get currentIndexStream => _currentIndexSubject.stream;
/// Whether the player is buffering. /// Whether the player is buffering.
bool get buffering => _audioPlaybackEvent.buffering; bool get buffering => _audioPlaybackEvent.buffering;
@ -238,6 +239,13 @@ class AudioPlayer {
Stream.periodic(period), Stream.periodic(period),
(state, _) => state.position).distinct(); (state, _) => state.position).distinct();
/// A stream of [LoopMode]s.
Stream<LoopMode> get loopModeStream => _loopModeSubject.stream;
/// A stream of the shuffle mode status.
Stream<bool> get shuffleModeEnabledStream =>
_shuffleModeEnabledSubject.stream;
/// The current volume of the player. /// The current volume of the player.
double get volume => _volume; double get volume => _volume;
@ -249,79 +257,94 @@ class AudioPlayer {
bool get automaticallyWaitsToMinimizeStalling => bool get automaticallyWaitsToMinimizeStalling =>
_automaticallyWaitsToMinimizeStalling; _automaticallyWaitsToMinimizeStalling;
/// Loads audio media from a URL and completes with the duration of that /// Convenience method to load audio from a URL with optional headers,
/// audio, or a [PlatformException] if this call was interrupted by another /// equivalent to:
/// call to [setUrl], [setFilePath], [setAsset] or [stop]. ///
/// ```
/// load(ProgressiveAudioSource(Uri.parse(url), headers: headers));
/// ```
///
///
Future<Duration> setUrl(String url, {Map headers}) =>
load(AudioSource.uri(Uri.parse(url), headers: headers));
/// Convenience method to load audio from a file, equivalent to:
///
/// ```
/// load(ProgressiveAudioSource(Uri.file(filePath)));
/// ```
Future<Duration> setFilePath(String filePath) =>
load(ProgressiveAudioSource(Uri.file(filePath)));
/// Convenience method to load audio from an asset, equivalent to:
///
/// ```
/// load(ProgressiveAudioSource(Uri.parse('asset://$filePath')));
/// ```
Future<Duration> setAsset(String assetPath) =>
load(ProgressiveAudioSource(Uri.parse('asset://$assetPath')));
/// Loads audio from an [AudioSource] and completes with the duration of that
/// audio, or an exception if this call was interrupted by another
/// call to [load], or if for any reason the audio source was unable to be
/// loaded.
/// ///
/// If the duration is unknown, null will be returned. /// If the duration is unknown, null will be returned.
/// ///
/// On platforms except for the web, the supplied [headers] will be passed
/// with the request. Currently headers are not recursively applied to items
/// within playlist files such as m3u8.
///
/// On Android, DASH and HLS streams are detected only when the URL's path /// On Android, DASH and HLS streams are detected only when the URL's path
/// has an "mpd" or "m3u8" extension. If the URL does not have such an /// has an "mpd" or "m3u8" extension. If the URL does not have such an
/// extension and you have no control over the server, and you also know the /// extension and you have no control over the server, and you also know the
/// type of the stream in advance, you may as a workaround supply the /// type of the stream in advance, you may as a workaround supply the
/// extension as a URL fragment. e.g. /// extension as a URL fragment. e.g.
/// https://somewhere.com/somestream?x=etc#.m3u8 /// https://somewhere.com/somestream?x=etc#.m3u8
Future<Duration> setUrl(String url, {Map<String, String> headers}) async { Future<Duration> load(AudioSource source) async {
try { try {
if (!kIsWeb && headers != null) { _audioSource = source;
return await _load(source);
} catch (e) {
_audioSource = null;
_audioSources.clear();
rethrow;
}
}
_registerAudioSource(AudioSource source) {
_audioSources[source._id] = source;
}
Future<Duration> _load(AudioSource source) async {
try {
if (!kIsWeb && source._requiresHeaders) {
if (_proxy == null) { if (_proxy == null) {
_proxy = _ProxyHttpServer(); _proxy = _ProxyHttpServer();
await _proxy.start(); await _proxy.start();
} }
url = _proxy.addUrl(url, headers);
} }
_durationFuture = _invokeMethod('setUrl', [url]).then( await source._setup(this);
_durationFuture = _invokeMethod('load', [source.toJson()]).then(
(ms) => (ms == null || ms < 0) ? null : Duration(milliseconds: ms)); (ms) => (ms == null || ms < 0) ? null : Duration(milliseconds: ms));
final duration = await _durationFuture; final duration = await _durationFuture;
_durationSubject.add(duration); _durationSubject.add(duration);
return duration; return duration;
} on PlatformException catch (e) { } on PlatformException catch (e) {
return Future.error(e.message); // TODO: Create own exception type.
throw Exception(e.message);
} }
} }
/// Loads audio media from a file and completes with the duration of that /// Clips the current [AudioSource] to the given [start] and [end]
/// audio, or null if this call was interrupted by another call so [setUrl], /// timestamps. If [start] is null, it will be reset to the start of the
/// [setFilePath] or [setAsset]. /// original [AudioSource]. If [end] is null, it will be reset to the end of
Future<Duration> setFilePath(final String filePath) => setUrl( /// the original [AudioSource]. This method cannot be called from the
Platform.isAndroid ? File(filePath).uri.toString() : 'file://$filePath'); /// [AudioPlaybackState.none] state.
Future<Duration> setClip({Duration start, Duration end}) =>
/// Loads audio media from an asset and completes with the duration of that _load(start == null && end == null
/// audio, or null if this call was interrupted by another call so [setUrl], ? _audioSource
/// [setFilePath] or [setAsset]. : ClippingAudioSource(
Future<Duration> setAsset(final String assetPath) async { audioSource: _audioSource,
final file = await _getCacheFile(assetPath); start: start,
this._cacheFile = file; end: end,
if (!file.existsSync()) { ));
await file.create(recursive: true);
}
await file
.writeAsBytes((await rootBundle.load(assetPath)).buffer.asUint8List());
return await setFilePath(file.path);
}
/// Get file for caching asset media with proper extension
Future<File> _getCacheFile(final String assetPath) async => File(p.join(
(await getTemporaryDirectory()).path,
'just_audio_asset_cache',
'$_id${p.extension(assetPath)}'));
/// Clip the audio to the given [start] and [end] timestamps. This method
/// cannot be called from the [AudioPlaybackState.none] state.
Future<Duration> setClip({Duration start, Duration end}) async {
_durationFuture =
_invokeMethod('setClip', [start?.inMilliseconds, end?.inMilliseconds])
.then((ms) => (ms == null || ms < 0)
? const Duration(milliseconds: -1)
: Duration(milliseconds: ms));
final duration = await _durationFuture;
_durationSubject.add(duration);
return duration;
}
/// Plays the currently loaded media from the current position. The [Future] /// Plays the currently loaded media from the current position. The [Future]
/// returned by this method completes when playback completes or is paused or /// returned by this method completes when playback completes or is paused or
@ -426,6 +449,18 @@ class AudioPlayer {
await _invokeMethod('setSpeed', [speed]); await _invokeMethod('setSpeed', [speed]);
} }
/// Sets the [LoopMode].
Future<void> setLoopMode(LoopMode mode) async {
_loopModeSubject.add(mode);
await _invokeMethod('setLoopMode', [mode.index]);
}
/// Sets whether shuffle mode is enabled.
Future<void> setShuffleModeEnabled(bool enabled) async {
_shuffleModeEnabledSubject.add(enabled);
await _invokeMethod('setShuffleModeEnabled', [enabled]);
}
/// Sets automaticallyWaitsToMinimizeStalling for AVPlayer in iOS 10.0 or later, defaults to true. /// Sets automaticallyWaitsToMinimizeStalling for AVPlayer in iOS 10.0 or later, defaults to true.
/// Has no effect on Android clients /// Has no effect on Android clients
Future<void> setAutomaticallyWaitsToMinimizeStalling( Future<void> setAutomaticallyWaitsToMinimizeStalling(
@ -436,12 +471,13 @@ class AudioPlayer {
[automaticallyWaitsToMinimizeStalling]); [automaticallyWaitsToMinimizeStalling]);
} }
/// Seeks to a particular position. Specify [null] to seek to the end of live streams. /// Seeks to a particular [position]. If a composition of multiple
/// It is legal to invoke this method from /// [AudioSource]s has been loaded, you may also specify [index] to seek to a
/// any state except for [AudioPlaybackState.none] and /// particular item within that sequence. It is legal to invoke this method
/// from any state except for [AudioPlaybackState.none] and
/// [AudioPlaybackState.connecting]. /// [AudioPlaybackState.connecting].
Future<void> seek(final Duration position) async { Future<void> seek(final Duration position, {int index}) async {
await _invokeMethod('seek', [position?.inMilliseconds]); await _invokeMethod('seek', [position?.inMilliseconds, index]);
} }
/// Release all resources associated with this player. You must invoke this /// Release all resources associated with this player. You must invoke this
@ -452,13 +488,15 @@ class AudioPlayer {
/// * [AudioPlaybackState.connecting] /// * [AudioPlaybackState.connecting]
Future<void> dispose() async { Future<void> dispose() async {
await _invokeMethod('dispose'); await _invokeMethod('dispose');
if (_cacheFile?.existsSync() == true) { _audioSource = null;
_cacheFile?.deleteSync(); _audioSources.values.forEach((s) => s._dispose());
} _audioSources.clear();
_proxy?.stop(); _proxy?.stop();
await _durationSubject.close(); await _durationSubject.close();
await _eventChannelStreamSubscription.cancel(); await _eventChannelStreamSubscription.cancel();
await _playbackEventSubject.close(); await _playbackEventSubject.close();
await _loopModeSubject.close();
await _shuffleModeEnabledSubject.close();
} }
Future<dynamic> _invokeMethod(String method, [dynamic args]) async => Future<dynamic> _invokeMethod(String method, [dynamic args]) async =>
@ -491,6 +529,9 @@ class AudioPlaybackEvent {
final IcyMetadata icyMetadata; final IcyMetadata icyMetadata;
/// The index of the currently playing item.
final int currentIndex;
AudioPlaybackEvent({ AudioPlaybackEvent({
@required this.state, @required this.state,
@required this.buffering, @required this.buffering,
@ -500,6 +541,7 @@ class AudioPlaybackEvent {
@required this.speed, @required this.speed,
@required this.duration, @required this.duration,
@required this.icyMetadata, @required this.icyMetadata,
@required this.currentIndex,
}); });
AudioPlaybackEvent copyWith({ AudioPlaybackEvent copyWith({
@ -511,6 +553,7 @@ class AudioPlaybackEvent {
double speed, double speed,
Duration duration, Duration duration,
IcyMetadata icyMetadata, IcyMetadata icyMetadata,
UriAudioSource currentIndex,
}) => }) =>
AudioPlaybackEvent( AudioPlaybackEvent(
state: state ?? this.state, state: state ?? this.state,
@ -521,6 +564,7 @@ class AudioPlaybackEvent {
speed: speed ?? this.speed, speed: speed ?? this.speed,
duration: duration ?? this.duration, duration: duration ?? this.duration,
icyMetadata: icyMetadata ?? this.icyMetadata, icyMetadata: icyMetadata ?? this.icyMetadata,
currentIndex: currentIndex ?? this.currentIndex,
); );
/// The current position of the player. /// The current position of the player.
@ -565,6 +609,17 @@ class FullAudioPlaybackState {
final IcyMetadata icyMetadata; final IcyMetadata icyMetadata;
FullAudioPlaybackState(this.state, this.buffering, this.icyMetadata); FullAudioPlaybackState(this.state, this.buffering, this.icyMetadata);
@override
int get hashCode =>
icyMetadata.hashCode * (state.index + 1) * (buffering ? 2 : 1);
@override
bool operator ==(dynamic other) =>
other is FullAudioPlaybackState &&
other?.state == state &&
other?.buffering == buffering &&
other?.icyMetadata == icyMetadata;
} }
class IcyInfo { class IcyInfo {
@ -572,6 +627,18 @@ class IcyInfo {
final String url; final String url;
IcyInfo({@required this.title, @required this.url}); IcyInfo({@required this.title, @required this.url});
IcyInfo.fromJson(Map json) : this(title: json['title'], url: json['url']);
@override
String toString() => 'title=$title,url=$url';
@override
int get hashCode => toString().hashCode;
@override
bool operator ==(dynamic other) =>
other is IcyInfo && other?.toString() == toString();
} }
class IcyHeaders { class IcyHeaders {
@ -582,13 +649,35 @@ class IcyHeaders {
final String url; final String url;
final bool isPublic; final bool isPublic;
IcyHeaders( IcyHeaders({
{@required this.bitrate, @required this.bitrate,
@required this.genre, @required this.genre,
@required this.name, @required this.name,
@required this.metadataInterval, @required this.metadataInterval,
@required this.url, @required this.url,
@required this.isPublic}); @required this.isPublic,
});
IcyHeaders.fromJson(Map json)
: this(
bitrate: json['bitrate'],
genre: json['genre'],
name: json['name'],
metadataInterval: json['metadataInterval'],
url: json['url'],
isPublic: json['isPublic'],
);
@override
String toString() =>
'bitrate=$bitrate,genre=$genre,name=$name,metadataInterval=$metadataInterval,url=$url,isPublic=$isPublic';
@override
int get hashCode => toString().hashCode;
@override
bool operator ==(dynamic other) =>
other is IcyHeaders && other?.toString() == toString();
} }
class IcyMetadata { class IcyMetadata {
@ -596,6 +685,16 @@ class IcyMetadata {
final IcyHeaders headers; final IcyHeaders headers;
IcyMetadata({@required this.info, @required this.headers}); IcyMetadata({@required this.info, @required this.headers});
IcyMetadata.fromJson(Map json)
: this(info: json['info'], headers: json['headers']);
@override
int get hashCode => info.hashCode ^ headers.hashCode;
@override
bool operator ==(dynamic other) =>
other is IcyMetadata && other?.info == info && other?.headers == headers;
} }
/// The audio session categories on iOS, to be used with /// The audio session categories on iOS, to be used with
@ -624,17 +723,14 @@ class _ProxyHttpServer {
/// Associate headers with a URL. This may be called only after [start] has /// Associate headers with a URL. This may be called only after [start] has
/// completed. /// completed.
String addUrl(String url, Map<String, String> headers) { Uri addUrl(Uri url, Map<String, String> headers) {
final uri = Uri.parse(url); final path = _requestKey(url);
final path = _requestKey(uri); _uriMap[path] = _ProxyRequest(url, headers);
_uriMap[path] = _ProxyRequest(uri, headers); return url.replace(
return uri scheme: 'http',
.replace( host: InternetAddress.loopbackIPv4.address,
scheme: 'http', port: port,
host: InternetAddress.loopbackIPv4.address, );
port: port,
)
.toString();
} }
/// A unique key for each request that can be processed by this proxy, /// A unique key for each request that can be processed by this proxy,
@ -730,3 +826,358 @@ class _ProxyRequest {
_ProxyRequest(this.uri, this.headers); _ProxyRequest(this.uri, this.headers);
} }
/// Specifies a source of audio to be played. Audio sources are composable
/// using the subclasses of this class. The same [AudioSource] instance should
/// not be used simultaneously by more than one [AudioPlayer].
abstract class AudioSource {
final String _id;
AudioPlayer _player;
/// Creates an [AudioSource] from a [Uri] with optional headers by
/// attempting to guess the type of stream. On iOS, this uses Apple's SDK to
/// automatically detect the stream type. On Android, the type of stream will
/// be guessed from the extension.
static AudioSource uri(Uri uri, {Map headers, Object tag}) {
bool hasExtension(Uri uri, String extension) =>
uri.path.toLowerCase().endsWith('.$extension') ||
uri.fragment.toLowerCase().endsWith('.$extension');
if (hasExtension(uri, 'mdp')) {
return DashAudioSource(uri, headers: headers, tag: tag);
} else if (hasExtension(uri, 'm3u8')) {
return HlsAudioSource(uri, headers: headers, tag: tag);
} else {
return ProgressiveAudioSource(uri, headers: headers, tag: tag);
}
}
static AudioSource fromJson(Map json) {
switch (json['type']) {
case 'progressive':
return ProgressiveAudioSource(Uri.parse(json['uri']),
headers: json['headers']);
case "dash":
return DashAudioSource(Uri.parse(json['uri']),
headers: json['headers']);
case "hls":
return HlsAudioSource(Uri.parse(json['uri']), headers: json['headers']);
case "concatenating":
return ConcatenatingAudioSource(
audioSources: (json['audioSources'] as List)
.map((s) => AudioSource.fromJson(s))
.toList());
case "clipping":
return ClippingAudioSource(
audioSource: AudioSource.fromJson(json['audioSource']),
start: Duration(milliseconds: json['start']),
end: Duration(milliseconds: json['end']));
default:
throw Exception("Unknown AudioSource type: " + json['type']);
}
}
AudioSource() : _id = _uuid.v4();
@mustCallSuper
Future<void> _setup(AudioPlayer player) async {
_player = player;
player._registerAudioSource(this);
}
@mustCallSuper
void _dispose() {
_player = null;
}
bool get _requiresHeaders;
List<IndexedAudioSource> get sequence;
Map toJson();
@override
int get hashCode => _id.hashCode;
@override
bool operator ==(dynamic other) => other is AudioSource && other._id == _id;
}
/// An [AudioSource] that can appear in a sequence.
abstract class IndexedAudioSource extends AudioSource {
final Object tag;
IndexedAudioSource(this.tag);
@override
List<IndexedAudioSource> get sequence => [this];
}
abstract class UriAudioSource extends IndexedAudioSource {
final Uri uri;
final Map headers;
final String _type;
Uri _overrideUri;
File _cacheFile;
UriAudioSource(this.uri, {this.headers, Object tag, @required String type})
: _type = type,
super(tag);
@override
Future<void> _setup(AudioPlayer player) async {
await super._setup(player);
if (uri.scheme == 'asset') {
_overrideUri = Uri.file((await _loadAsset(uri.path)).path);
} else if (headers != null) {
_overrideUri = player._proxy.addUrl(uri, headers);
}
}
@override
void _dispose() {
if (_cacheFile?.existsSync() == true) {
_cacheFile?.deleteSync();
}
super._dispose();
}
Future<File> _loadAsset(String assetPath) async {
final file = await _getCacheFile(assetPath);
this._cacheFile = file;
if (!file.existsSync()) {
await file.create(recursive: true);
await file.writeAsBytes(
(await rootBundle.load(assetPath)).buffer.asUint8List());
}
return file;
}
/// Get file for caching asset media with proper extension
Future<File> _getCacheFile(final String assetPath) async => File(p.join(
(await getTemporaryDirectory()).path,
'just_audio_asset_cache',
'${_player._id}_$_id${p.extension(assetPath)}'));
@override
bool get _requiresHeaders => headers != null;
@override
Map toJson() => {
'id': _id,
'type': _type,
'uri': (_overrideUri ?? uri).toString(),
'headers': headers,
};
}
/// An [AudioSource] representing a regular media file such asn an MP3 or M4A
/// file. The following URI schemes are supported:
///
/// * file: loads from a local file (provided you give your app permission to
/// access that file).
/// * asset: loads from a Flutter asset (not supported on Web).
/// * http(s): loads from an HTTP(S) resource.
///
/// On platforms except for the web, the supplied [headers] will be passed with
/// the HTTP(S) request.
class ProgressiveAudioSource extends UriAudioSource {
ProgressiveAudioSource(Uri uri, {Map headers, Object tag})
: super(uri, headers: headers, tag: tag, type: 'progressive');
}
/// An [AudioSource] representing a DASH stream.
///
/// On platforms except for the web, the supplied [headers] will be passed with
/// the HTTP(S) request. Currently headers are not recursively applied to items
/// the HTTP(S) request. Currently headers are not applied recursively.
class DashAudioSource extends UriAudioSource {
DashAudioSource(Uri uri, {Map headers, Object tag})
: super(uri, headers: headers, tag: tag, type: 'dash');
}
/// An [AudioSource] representing an HLS stream.
///
/// On platforms except for the web, the supplied [headers] will be passed with
/// the HTTP(S) request. Currently headers are not applied recursively.
class HlsAudioSource extends UriAudioSource {
HlsAudioSource(Uri uri, {Map headers, Object tag})
: super(uri, headers: headers, tag: tag, type: 'hls');
}
/// An [AudioSource] representing a concatenation of multiple audio sources to
/// be played in succession. This can be used to create playlists. Audio sources
/// can be dynamically added, removed and reordered while the audio is playing.
class ConcatenatingAudioSource extends AudioSource {
final List<AudioSource> audioSources;
final bool useLazyPreparation;
ConcatenatingAudioSource({
@required this.audioSources,
this.useLazyPreparation = false,
});
@override
Future<void> _setup(AudioPlayer player) async {
await super._setup(player);
for (var source in audioSources) {
await source._setup(player);
}
}
/// Appends an [AudioSource].
Future<void> add(AudioSource audioSource) async {
audioSources.add(audioSource);
if (_player != null) {
await _player
._invokeMethod('concatenating.add', [_id, audioSource.toJson()]);
}
}
/// Inserts an [AudioSource] at [index].
Future<void> insert(int index, AudioSource audioSource) async {
audioSources.insert(index, audioSource);
if (_player != null) {
await _player._invokeMethod(
'concatenating.insert', [_id, index, audioSource.toJson()]);
}
}
/// Appends multiple [AudioSource]s.
Future<void> addAll(List<AudioSource> audioSources) async {
this.audioSources.addAll(audioSources);
if (_player != null) {
await _player._invokeMethod('concatenating.addAll',
[_id, audioSources.map((s) => s.toJson()).toList()]);
}
}
/// Insert multiple [AudioSource]s at [index].
Future<void> insertAll(int index, List<AudioSource> audioSources) async {
audioSources.insertAll(index, audioSources);
if (_player != null) {
await _player._invokeMethod('concatenating.insertAll',
[_id, index, audioSources.map((s) => s.toJson()).toList()]);
}
}
/// Dynmaically remove an [AudioSource] at [index] after this
/// [ConcatenatingAudioSource] has already been loaded.
Future<void> removeAt(int index) async {
audioSources.removeAt(index);
if (_player != null) {
await _player._invokeMethod('concatenating.removeAt', [_id, index]);
}
}
/// Removes a range of [AudioSource]s from index [start] inclusive to [end]
/// exclusive.
Future<void> removeRange(int start, int end) async {
audioSources.removeRange(start, end);
if (_player != null) {
await _player
._invokeMethod('concatenating.removeRange', [_id, start, end]);
}
}
/// Moves an [AudioSource] from [currentIndex] to [newIndex].
Future<void> move(int currentIndex, int newIndex) async {
audioSources.insert(newIndex, audioSources.removeAt(currentIndex));
if (_player != null) {
await _player
._invokeMethod('concatenating.move', [_id, currentIndex, newIndex]);
}
}
/// Removes all [AudioSources].
Future<void> clear() async {
audioSources.clear();
if (_player != null) {
await _player._invokeMethod('concatenating.clear', [_id]);
}
}
/// The number of [AudioSource]s.
int get length => audioSources.length;
operator [](int index) => audioSources[index];
@override
List<IndexedAudioSource> get sequence =>
audioSources.expand((s) => s.sequence).toList();
@override
bool get _requiresHeaders =>
audioSources.any((source) => source._requiresHeaders);
@override
Map toJson() => {
'id': _id,
'type': 'concatenating',
'audioSources': audioSources.map((source) => source.toJson()).toList(),
'useLazyPreparation': useLazyPreparation,
};
}
/// An [AudioSource] that clips the audio of a [UriAudioSource] between a
/// certain start and end time.
class ClippingAudioSource extends IndexedAudioSource {
final UriAudioSource audioSource;
final Duration start;
final Duration end;
ClippingAudioSource({
@required this.audioSource,
this.start,
this.end,
Object tag,
}) : super(tag);
@override
Future<void> _setup(AudioPlayer player) async {
await super._setup(player);
await audioSource._setup(player);
}
@override
bool get _requiresHeaders => audioSource._requiresHeaders;
@override
Map toJson() => {
'id': _id,
'type': 'clipping',
'audioSource': audioSource.toJson(),
'start': start?.inMilliseconds,
'end': end?.inMilliseconds,
};
}
// An [AudioSource] that loops a nested [AudioSource] a
// specified number of times.
class LoopingAudioSource extends AudioSource {
AudioSource audioSource;
final int count;
LoopingAudioSource({
@required this.audioSource,
this.count,
}) : super();
@override
List<IndexedAudioSource> get sequence =>
List.generate(count, (i) => audioSource)
.expand((s) => s.sequence)
.toList();
@override
bool get _requiresHeaders => audioSource._requiresHeaders;
@override
Map toJson() => {
'id': _id,
'type': 'looping',
'audioSource': audioSource.toJson(),
'count': count,
};
}
enum LoopMode { off, one, all }

View File

@ -1,5 +1,6 @@
import 'dart:async'; import 'dart:async';
import 'dart:html'; import 'dart:html';
import 'dart:math';
import 'package:async/async.dart'; import 'package:async/async.dart';
import 'package:flutter/services.dart'; import 'package:flutter/services.dart';
@ -7,6 +8,8 @@ import 'package:flutter/widgets.dart';
import 'package:flutter_web_plugins/flutter_web_plugins.dart'; import 'package:flutter_web_plugins/flutter_web_plugins.dart';
import 'package:just_audio/just_audio.dart'; import 'package:just_audio/just_audio.dart';
final Random _random = Random();
class JustAudioPlugin { class JustAudioPlugin {
static void registerWith(Registrar registrar) { static void registerWith(Registrar registrar) {
final MethodChannel channel = MethodChannel( final MethodChannel channel = MethodChannel(
@ -43,6 +46,7 @@ abstract class JustAudioPlayer {
final StreamController eventController = StreamController(); final StreamController eventController = StreamController();
AudioPlaybackState _state = AudioPlaybackState.none; AudioPlaybackState _state = AudioPlaybackState.none;
bool _buffering = false; bool _buffering = false;
int _index;
JustAudioPlayer({@required this.id, @required this.registrar}) JustAudioPlayer({@required this.id, @required this.registrar})
: methodChannel = MethodChannel('com.ryanheise.just_audio.methods.$id', : methodChannel = MethodChannel('com.ryanheise.just_audio.methods.$id',
@ -54,34 +58,55 @@ abstract class JustAudioPlayer {
} }
Future<dynamic> _methodHandler(MethodCall call) async { Future<dynamic> _methodHandler(MethodCall call) async {
final args = call.arguments; try {
switch (call.method) { final args = call.arguments;
case 'setUrl': switch (call.method) {
return await setUrl(args[0]); case 'load':
case 'setClip': return await load(args[0]);
return await setClip(args[0], args[1]); case 'play':
case 'play': return await play();
return await play(); case 'pause':
case 'pause': return await pause();
return await pause(); case 'stop':
case 'stop': return await stop();
return await stop(); case 'setVolume':
case 'setVolume': return await setVolume(args[0]);
return await setVolume(args[0]); case 'setSpeed':
case 'setSpeed': return await setSpeed(args[0]);
return await setSpeed(args[0]); case 'setLoopMode':
case 'seek': return await setLoopMode(args[0]);
return await seek(args[0]); case 'setShuffleModeEnabled':
case 'dispose': return await setShuffleModeEnabled(args[0]);
return dispose(); case 'seek':
default: return await seek(args[0], args[1]);
throw PlatformException(code: 'Unimplemented'); case 'dispose':
return dispose();
case 'concatenating.add':
return await concatenatingAdd(args[0], args[1]);
case "concatenating.insert":
return await concatenatingInsert(args[0], args[1], args[2]);
case "concatenating.addAll":
return await concatenatingAddAll(args[0], args[1]);
case "concatenating.insertAll":
return await concatenatingInsertAll(args[0], args[1], args[2]);
case "concatenating.removeAt":
return await concatenatingRemoveAt(args[0], args[1]);
case "concatenating.removeRange":
return await concatenatingRemoveRange(args[0], args[1], args[2]);
case "concatenating.move":
return await concatenatingMove(args[0], args[1], args[2]);
case "concatenating.clear":
return await concatenatingClear(args[0]);
default:
throw PlatformException(code: 'Unimplemented');
}
} catch (e, stacktrace) {
print("$stacktrace");
rethrow;
} }
} }
Future<int> setUrl(final String url); Future<int> load(Map source);
Future<void> setClip(int start, int end);
Future<void> play(); Future<void> play();
@ -93,37 +118,51 @@ abstract class JustAudioPlayer {
Future<void> setSpeed(double speed); Future<void> setSpeed(double speed);
Future<void> seek(int position); Future<void> setLoopMode(int mode);
Future<void> setShuffleModeEnabled(bool enabled);
Future<void> seek(int position, int index);
@mustCallSuper @mustCallSuper
void dispose() { void dispose() {
eventController.close(); eventController.close();
} }
double getCurrentPosition(); Duration getCurrentPosition();
int getCurrentPositionMs() => (getCurrentPosition() * 1000).toInt(); Duration getDuration();
double getDuration(); concatenatingAdd(String playerId, Map source);
int getDurationMs() { concatenatingInsert(String playerId, int index, Map source);
final duration = getDuration();
return duration.isFinite ? (duration * 1000).toInt() : -1; concatenatingAddAll(String playerId, List sources);
}
concatenatingInsertAll(String playerId, int index, List sources);
concatenatingRemoveAt(String playerId, int index);
concatenatingRemoveRange(String playerId, int start, int end);
concatenatingMove(String playerId, int currentIndex, int newIndex);
concatenatingClear(String playerId);
broadcastPlaybackEvent() { broadcastPlaybackEvent() {
var updateTime = DateTime.now().millisecondsSinceEpoch; var updateTime = DateTime.now().millisecondsSinceEpoch;
eventController.add([ eventController.add({
_state.index, 'state': _state.index,
_buffering, 'buffering': _buffering,
getCurrentPositionMs(), 'updatePosition': getCurrentPosition()?.inMilliseconds,
updateTime, 'updateTime': updateTime,
// TODO: buffered position // TODO: buffered position
getCurrentPositionMs(), 'bufferedPosition': getCurrentPosition()?.inMilliseconds,
// TODO: Icy Metadata // TODO: Icy Metadata
null, 'icyMetadata': null,
getDurationMs(), 'duration': getDuration()?.inMilliseconds,
]); 'currentIndex': _index,
});
} }
transition(AudioPlaybackState state) { transition(AudioPlaybackState state) {
@ -134,22 +173,23 @@ abstract class JustAudioPlayer {
class Html5AudioPlayer extends JustAudioPlayer { class Html5AudioPlayer extends JustAudioPlayer {
AudioElement _audioElement = AudioElement(); AudioElement _audioElement = AudioElement();
Completer<num> _durationCompleter; Completer _durationCompleter;
double _startPos = 0.0; AudioSourcePlayer _audioSourcePlayer;
double _start = 0.0; LoopMode _loopMode = LoopMode.off;
double _end; bool _shuffleModeEnabled = false;
CancelableOperation _playOperation; bool _playing = false;
final Map<String, AudioSourcePlayer> _audioSourcePlayers = {};
Html5AudioPlayer({@required String id, @required Registrar registrar}) Html5AudioPlayer({@required String id, @required Registrar registrar})
: super(id: id, registrar: registrar) { : super(id: id, registrar: registrar) {
_audioElement.addEventListener('durationchange', (event) { _audioElement.addEventListener('durationchange', (event) {
_durationCompleter?.complete(getDuration()); _durationCompleter?.complete();
}); });
_audioElement.addEventListener('error', (event) { _audioElement.addEventListener('error', (event) {
_durationCompleter?.completeError(_audioElement.error); _durationCompleter?.completeError(_audioElement.error);
}); });
_audioElement.addEventListener('ended', (event) { _audioElement.addEventListener('ended', (event) async {
transition(AudioPlaybackState.completed); onEnded();
}); });
_audioElement.addEventListener('seek', (event) { _audioElement.addEventListener('seek', (event) {
_buffering = true; _buffering = true;
@ -161,72 +201,124 @@ class Html5AudioPlayer extends JustAudioPlayer {
}); });
} }
@override List<int> get order {
Future<int> setUrl(final String url) async { final sequence = _audioSourcePlayer.sequence;
_interruptPlay(); List<int> order = List<int>(sequence.length);
transition(AudioPlaybackState.connecting); if (_shuffleModeEnabled) {
_durationCompleter = Completer<num>(); order = _audioSourcePlayer.shuffleOrder;
_audioElement.src = url; } else {
_audioElement.preload = 'auto'; for (var i = 0; i < order.length; i++) {
_audioElement.load(); order[i] = i;
try { }
await _durationCompleter.future;
} on MediaError catch (e) {
throw PlatformException(code: "${e.code}", message: "Failed to load URL");
} finally {
_durationCompleter = null;
} }
transition(AudioPlaybackState.stopped); return order;
return getDurationMs();
} }
List<int> getInv(List<int> order) {
List<int> orderInv = List<int>(order.length);
for (var i = 0; i < order.length; i++) {
orderInv[order[i]] = i;
}
return orderInv;
}
onEnded() async {
if (_loopMode == LoopMode.one) {
await seek(0, null);
play();
} else {
final order = this.order;
final orderInv = getInv(order);
if (orderInv[_index] + 1 < order.length) {
// move to next item
_index = order[orderInv[_index] + 1];
await _currentAudioSourcePlayer.load();
// Should always be true...
if (_playing) {
play();
}
} else {
// reached end of playlist
if (_loopMode == LoopMode.all) {
// Loop back to the beginning
if (order.length == 1) {
await seek(0, null);
await play();
} else {
_index = order[0];
await _currentAudioSourcePlayer.load();
// Should always be true...
if (_playing) {
await play();
}
}
} else {
_playing = false;
transition(AudioPlaybackState.completed);
}
}
}
}
// TODO: Improve efficiency.
IndexedAudioSourcePlayer get _currentAudioSourcePlayer =>
_audioSourcePlayer != null && _index < _audioSourcePlayer.sequence.length
? _audioSourcePlayer.sequence[_index]
: null;
@override @override
Future<void> setClip(int start, int end) async { Future<int> load(Map source) async {
_interruptPlay(); _currentAudioSourcePlayer?.pause();
_start = start / 1000.0; _audioSourcePlayer = getAudioSource(source);
_end = end / 1000.0; _index = 0;
_startPos = _start; if (_shuffleModeEnabled) {
_audioSourcePlayer?.shuffle(0, _index);
}
return (await _currentAudioSourcePlayer.load()).inMilliseconds;
}
Future<Duration> loadUri(final Uri uri) async {
transition(AudioPlaybackState.connecting);
final src = uri.toString();
if (src != _audioElement.src) {
_durationCompleter = Completer<num>();
_audioElement.src = src;
_audioElement.preload = 'auto';
_audioElement.load();
try {
await _durationCompleter.future;
} on MediaError catch (e) {
throw PlatformException(
code: "${e.code}", message: "Failed to load URL");
} finally {
_durationCompleter = null;
}
}
transition(AudioPlaybackState.stopped);
final seconds = _audioElement.duration;
return seconds.isFinite
? Duration(milliseconds: (seconds * 1000).toInt())
: null;
} }
@override @override
Future<void> play() async { Future<void> play() async {
_interruptPlay(); _playing = true;
final duration = _end == null ? null : _end - _startPos; _currentAudioSourcePlayer.play();
_audioElement.currentTime = _startPos;
_audioElement.play();
if (duration != null) {
_playOperation = CancelableOperation.fromFuture(Future.delayed(Duration(
milliseconds: duration * 1000 ~/ _audioElement.playbackRate)))
.then((_) {
pause();
_playOperation = null;
});
}
transition(AudioPlaybackState.playing); transition(AudioPlaybackState.playing);
} }
_interruptPlay() {
if (_playOperation != null) {
_playOperation.cancel();
_playOperation = null;
}
}
@override @override
Future<void> pause() async { Future<void> pause() async {
_interruptPlay(); _playing = false;
_startPos = _audioElement.currentTime; _currentAudioSourcePlayer.pause();
_audioElement.pause();
transition(AudioPlaybackState.paused); transition(AudioPlaybackState.paused);
} }
@override @override
Future<void> stop() async { Future<void> stop() async {
_interruptPlay(); _playing = false;
_startPos = _start; _currentAudioSourcePlayer.stop();
_audioElement.pause();
_audioElement.currentTime = _start;
transition(AudioPlaybackState.stopped); transition(AudioPlaybackState.stopped);
} }
@ -241,25 +333,568 @@ class Html5AudioPlayer extends JustAudioPlayer {
} }
@override @override
Future<void> seek(int position) async { Future<void> setLoopMode(int mode) async {
_interruptPlay(); _loopMode = LoopMode.values[mode];
_startPos = _start + position / 1000.0;
_audioElement.currentTime = _startPos;
} }
@override @override
double getCurrentPosition() => _audioElement.currentTime; Future<void> setShuffleModeEnabled(bool enabled) async {
_shuffleModeEnabled = enabled;
if (enabled) {
_audioSourcePlayer?.shuffle(0, _index);
}
}
@override @override
double getDuration() => _audioElement.duration; Future<void> seek(int position, int newIndex) async {
int index = newIndex ?? _index;
if (index != _index) {
_currentAudioSourcePlayer.pause();
_index = index;
await _currentAudioSourcePlayer.load();
await _currentAudioSourcePlayer.seek(position);
if (_playing) {
await play();
}
} else {
await _currentAudioSourcePlayer.seek(position);
}
}
ConcatenatingAudioSourcePlayer _concatenating(String playerId) =>
_audioSourcePlayers[playerId] as ConcatenatingAudioSourcePlayer;
concatenatingAdd(String playerId, Map source) {
final playlist = _concatenating(playerId);
playlist.add(getAudioSource(source));
}
concatenatingInsert(String playerId, int index, Map source) {
_concatenating(playerId).insert(index, getAudioSource(source));
if (index <= _index) {
_index++;
}
}
concatenatingAddAll(String playerId, List sources) {
_concatenating(playerId).addAll(getAudioSources(sources));
}
concatenatingInsertAll(String playerId, int index, List sources) {
_concatenating(playerId).insertAll(index, getAudioSources(sources));
if (index <= _index) {
_index += sources.length;
}
}
concatenatingRemoveAt(String playerId, int index) async {
// Pause if removing current item
if (_index == index && _playing) {
_currentAudioSourcePlayer.pause();
}
_concatenating(playerId).removeAt(index);
if (_index == index) {
// Skip backward if there's nothing after this
if (index == _audioSourcePlayer.sequence.length) {
_index--;
}
// Resume playback at the new item (if it exists)
if (_playing && _currentAudioSourcePlayer != null) {
await _currentAudioSourcePlayer.load();
_currentAudioSourcePlayer.play();
}
} else if (index < _index) {
// Reflect that the current item has shifted its position
_index--;
}
}
concatenatingRemoveRange(String playerId, int start, int end) async {
if (_index >= start && _index < end && _playing) {
// Pause if removing current item
_currentAudioSourcePlayer.pause();
}
_concatenating(playerId).removeRange(start, end);
if (_index >= start && _index < end) {
// Skip backward if there's nothing after this
if (start >= _audioSourcePlayer.sequence.length) {
_index = start - 1;
} else {
_index = start;
}
// Resume playback at the new item (if it exists)
if (_playing && _currentAudioSourcePlayer != null) {
await _currentAudioSourcePlayer.load();
_currentAudioSourcePlayer.play();
}
} else if (end <= _index) {
// Reflect that the current item has shifted its position
_index -= (end - start);
}
}
concatenatingMove(String playerId, int currentIndex, int newIndex) {
_concatenating(playerId).move(currentIndex, newIndex);
if (currentIndex == _index) {
_index = newIndex;
} else if (currentIndex < _index && newIndex >= _index) {
_index--;
} else if (currentIndex > _index && newIndex <= _index) {
_index++;
}
}
concatenatingClear(String playerId) {
_currentAudioSourcePlayer.stop();
_concatenating(playerId).clear();
}
@override
Duration getCurrentPosition() => _currentAudioSourcePlayer?.position;
@override
Duration getDuration() => _currentAudioSourcePlayer?.duration;
@override @override
void dispose() { void dispose() {
_interruptPlay(); _currentAudioSourcePlayer?.pause();
_audioElement.pause();
_audioElement.removeAttribute('src'); _audioElement.removeAttribute('src');
_audioElement.load(); _audioElement.load();
transition(AudioPlaybackState.none); transition(AudioPlaybackState.none);
super.dispose(); super.dispose();
} }
List<AudioSourcePlayer> getAudioSources(List json) =>
json.map((s) => getAudioSource(s)).toList();
AudioSourcePlayer getAudioSource(Map json) {
final String id = json['id'];
var audioSourcePlayer = _audioSourcePlayers[id];
if (audioSourcePlayer == null) {
audioSourcePlayer = decodeAudioSource(json);
_audioSourcePlayers[id] = audioSourcePlayer;
}
return audioSourcePlayer;
}
AudioSourcePlayer decodeAudioSource(Map json) {
try {
switch (json['type']) {
case 'progressive':
return ProgressiveAudioSourcePlayer(
this, json['id'], Uri.parse(json['uri']), json['headers']);
case "dash":
return DashAudioSourcePlayer(
this, json['id'], Uri.parse(json['uri']), json['headers']);
case "hls":
return HlsAudioSourcePlayer(
this, json['id'], Uri.parse(json['uri']), json['headers']);
case "concatenating":
return ConcatenatingAudioSourcePlayer(
this,
json['id'],
getAudioSources(json['audioSources']),
json['useLazyPreparation']);
case "clipping":
return ClippingAudioSourcePlayer(
this,
json['id'],
getAudioSource(json['audioSource']),
Duration(milliseconds: json['start']),
Duration(milliseconds: json['end']));
case "looping":
return LoopingAudioSourcePlayer(this, json['id'],
getAudioSource(json['audioSource']), json['count']);
default:
throw Exception("Unknown AudioSource type: " + json['type']);
}
} catch (e, stacktrace) {
print("$stacktrace");
rethrow;
}
}
}
abstract class AudioSourcePlayer {
Html5AudioPlayer html5AudioPlayer;
final String id;
AudioSourcePlayer(this.html5AudioPlayer, this.id);
List<IndexedAudioSourcePlayer> get sequence;
List<int> get shuffleOrder;
int shuffle(int treeIndex, int currentIndex);
}
abstract class IndexedAudioSourcePlayer extends AudioSourcePlayer {
IndexedAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id)
: super(html5AudioPlayer, id);
Future<Duration> load();
Future<void> play();
Future<void> pause();
Future<void> stop();
Future<void> seek(int position);
Duration get duration;
Duration get position;
AudioElement get _audioElement => html5AudioPlayer._audioElement;
@override
int shuffle(int treeIndex, int currentIndex) => treeIndex + 1;
@override
String toString() => "${this.runtimeType}";
}
abstract class UriAudioSourcePlayer extends IndexedAudioSourcePlayer {
final Uri uri;
final Map headers;
double _resumePos;
Duration _duration;
UriAudioSourcePlayer(
Html5AudioPlayer html5AudioPlayer, String id, this.uri, this.headers)
: super(html5AudioPlayer, id);
@override
List<IndexedAudioSourcePlayer> get sequence => [this];
@override
List<int> get shuffleOrder => [0];
@override
Future<Duration> load() async {
_resumePos = 0.0;
return _duration = await html5AudioPlayer.loadUri(uri);
}
@override
Future<void> play() async {
_audioElement.currentTime = _resumePos;
_audioElement.play();
}
@override
Future<void> pause() async {
_resumePos = _audioElement.currentTime;
_audioElement.pause();
}
@override
Future<void> seek(int position) async {
_audioElement.currentTime = _resumePos = position / 1000.0;
}
@override
Future<void> stop() async {
_resumePos = 0.0;
_audioElement.pause();
_audioElement.currentTime = 0.0;
}
@override
Duration get duration {
return _duration;
//final seconds = _audioElement.duration;
//return seconds.isFinite
// ? Duration(milliseconds: (seconds * 1000).toInt())
// : null;
}
@override
Duration get position {
double seconds = _audioElement.currentTime;
return Duration(milliseconds: (seconds * 1000).toInt());
}
}
class ProgressiveAudioSourcePlayer extends UriAudioSourcePlayer {
ProgressiveAudioSourcePlayer(
Html5AudioPlayer html5AudioPlayer, String id, Uri uri, Map headers)
: super(html5AudioPlayer, id, uri, headers);
}
class DashAudioSourcePlayer extends UriAudioSourcePlayer {
DashAudioSourcePlayer(
Html5AudioPlayer html5AudioPlayer, String id, Uri uri, Map headers)
: super(html5AudioPlayer, id, uri, headers);
}
class HlsAudioSourcePlayer extends UriAudioSourcePlayer {
HlsAudioSourcePlayer(
Html5AudioPlayer html5AudioPlayer, String id, Uri uri, Map headers)
: super(html5AudioPlayer, id, uri, headers);
}
class ConcatenatingAudioSourcePlayer extends AudioSourcePlayer {
static List<int> generateShuffleOrder(int length, [int firstIndex]) {
final shuffleOrder = List<int>(length);
for (var i = 0; i < length; i++) {
final j = _random.nextInt(i + 1);
shuffleOrder[i] = shuffleOrder[j];
shuffleOrder[j] = i;
}
if (firstIndex != null) {
for (var i = 1; i < length; i++) {
if (shuffleOrder[i] == firstIndex) {
final v = shuffleOrder[0];
shuffleOrder[0] = shuffleOrder[i];
shuffleOrder[i] = v;
break;
}
}
}
return shuffleOrder;
}
final List<AudioSourcePlayer> audioSourcePlayers;
final bool useLazyPreparation;
List<int> _shuffleOrder;
ConcatenatingAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id,
this.audioSourcePlayers, this.useLazyPreparation)
: _shuffleOrder = generateShuffleOrder(audioSourcePlayers.length),
super(html5AudioPlayer, id);
@override
List<IndexedAudioSourcePlayer> get sequence =>
audioSourcePlayers.expand((p) => p.sequence).toList();
@override
List<int> get shuffleOrder {
final order = <int>[];
var offset = order.length;
final childOrders = <List<int>>[];
for (var audioSourcePlayer in audioSourcePlayers) {
final childShuffleOrder = audioSourcePlayer.shuffleOrder;
childOrders.add(childShuffleOrder.map((i) => i + offset).toList());
offset += childShuffleOrder.length;
}
for (var i = 0; i < childOrders.length; i++) {
order.addAll(childOrders[_shuffleOrder[i]]);
}
return order;
}
@override
int shuffle(int treeIndex, int currentIndex) {
int currentChildIndex;
for (var i = 0; i < audioSourcePlayers.length; i++) {
final indexBefore = treeIndex;
final child = audioSourcePlayers[i];
treeIndex = child.shuffle(treeIndex, currentIndex);
if (currentIndex >= indexBefore && currentIndex < treeIndex) {
currentChildIndex = i;
} else {}
}
// Shuffle so that the current child is first in the shuffle order
_shuffleOrder =
generateShuffleOrder(audioSourcePlayers.length, currentChildIndex);
return treeIndex;
}
add(AudioSourcePlayer player) {
audioSourcePlayers.add(player);
_shuffleOrder.add(audioSourcePlayers.length - 1);
}
insert(int index, AudioSourcePlayer player) {
audioSourcePlayers.insert(index, player);
for (var i = 0; i < audioSourcePlayers.length; i++) {
if (_shuffleOrder[i] >= index) {
_shuffleOrder[i]++;
}
}
_shuffleOrder.add(index);
}
addAll(List<AudioSourcePlayer> players) {
audioSourcePlayers.addAll(players);
_shuffleOrder.addAll(
List.generate(players.length, (i) => audioSourcePlayers.length + i)
.toList()
..shuffle());
}
insertAll(int index, List<AudioSourcePlayer> players) {
audioSourcePlayers.insertAll(index, players);
for (var i = 0; i < audioSourcePlayers.length; i++) {
if (_shuffleOrder[i] >= index) {
_shuffleOrder[i] += players.length;
}
}
_shuffleOrder.addAll(
List.generate(players.length, (i) => index + i).toList()..shuffle());
}
removeAt(int index) {
audioSourcePlayers.removeAt(index);
// 0 1 2 3
// 3 2 0 1
for (var i = 0; i < audioSourcePlayers.length; i++) {
if (_shuffleOrder[i] > index) {
_shuffleOrder[i]--;
}
}
_shuffleOrder.removeWhere((i) => i == index);
}
removeRange(int start, int end) {
audioSourcePlayers.removeRange(start, end);
for (var i = 0; i < audioSourcePlayers.length; i++) {
if (_shuffleOrder[i] >= end) {
_shuffleOrder[i] -= (end - start);
}
}
_shuffleOrder.removeWhere((i) => i >= start && i < end);
}
move(int currentIndex, int newIndex) {
audioSourcePlayers.insert(
newIndex, audioSourcePlayers.removeAt(currentIndex));
}
clear() {
audioSourcePlayers.clear();
_shuffleOrder.clear();
}
}
class ClippingAudioSourcePlayer extends IndexedAudioSourcePlayer {
final UriAudioSourcePlayer audioSourcePlayer;
final Duration start;
final Duration end;
CancelableOperation _playOperation;
double _resumePos;
Duration _duration;
ClippingAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id,
this.audioSourcePlayer, this.start, this.end)
: super(html5AudioPlayer, id);
@override
List<IndexedAudioSourcePlayer> get sequence => [this];
@override
List<int> get shuffleOrder => [0];
@override
Future<Duration> load() async {
_resumePos = start.inMilliseconds / 1000.0;
Duration fullDuration =
await html5AudioPlayer.loadUri(audioSourcePlayer.uri);
_audioElement.currentTime = _resumePos;
_duration = Duration(
milliseconds: min(end.inMilliseconds, fullDuration.inMilliseconds) -
start.inMilliseconds);
return _duration;
}
@override
Future<void> play() async {
_interruptPlay();
//_playing = true;
final duration =
end == null ? null : end.inMilliseconds / 1000 - _resumePos;
_audioElement.currentTime = _resumePos;
_audioElement.play();
if (duration != null) {
_playOperation = CancelableOperation.fromFuture(Future.delayed(Duration(
milliseconds: duration * 1000 ~/ _audioElement.playbackRate)))
.then((_) {
_playOperation = null;
pause();
html5AudioPlayer.onEnded();
});
}
}
@override
Future<void> pause() async {
_interruptPlay();
_resumePos = _audioElement.currentTime;
_audioElement.pause();
}
@override
Future<void> seek(int position) async {
_interruptPlay();
_audioElement.currentTime =
_resumePos = start.inMilliseconds / 1000.0 + position / 1000.0;
}
@override
Future<void> stop() async {
_resumePos = 0.0;
_audioElement.pause();
_audioElement.currentTime = start.inMilliseconds / 1000.0;
}
@override
Duration get duration {
return _duration;
}
@override
Duration get position {
double seconds = _audioElement.currentTime;
var position = Duration(milliseconds: (seconds * 1000).toInt());
if (start != null) {
position -= start;
}
if (position < Duration.zero) {
position = Duration.zero;
}
return position;
}
_interruptPlay() {
_playOperation?.cancel();
_playOperation = null;
}
}
class LoopingAudioSourcePlayer extends AudioSourcePlayer {
final AudioSourcePlayer audioSourcePlayer;
final int count;
LoopingAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id,
this.audioSourcePlayer, this.count)
: super(html5AudioPlayer, id);
@override
List<IndexedAudioSourcePlayer> get sequence =>
List.generate(count, (i) => audioSourcePlayer)
.expand((p) => p.sequence)
.toList();
@override
List<int> get shuffleOrder {
final order = <int>[];
var offset = order.length;
for (var i = 0; i < count; i++) {
final childShuffleOrder = audioSourcePlayer.shuffleOrder;
order.addAll(childShuffleOrder.map((i) => i + offset).toList());
offset += childShuffleOrder.length;
}
return order;
}
@override
int shuffle(int treeIndex, int currentIndex) {
for (var i = 0; i < count; i++) {
treeIndex = audioSourcePlayer.shuffle(treeIndex, currentIndex);
}
return treeIndex;
}
} }

View File

@ -36,6 +36,20 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.14.12" version: "1.14.12"
convert:
dependency: transitive
description:
name: convert
url: "https://pub.dartlang.org"
source: hosted
version: "2.1.1"
crypto:
dependency: transitive
description:
name: crypto
url: "https://pub.dartlang.org"
source: hosted
version: "2.1.5"
fake_async: fake_async:
dependency: transitive dependency: transitive
description: description:
@ -195,7 +209,7 @@ packages:
name: test_api name: test_api
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "0.2.15" version: "0.2.16"
typed_data: typed_data:
dependency: transitive dependency: transitive
description: description:
@ -203,6 +217,13 @@ packages:
url: "https://pub.dartlang.org" url: "https://pub.dartlang.org"
source: hosted source: hosted
version: "1.1.6" version: "1.1.6"
uuid:
dependency: "direct main"
description:
name: uuid
url: "https://pub.dartlang.org"
source: hosted
version: "2.2.0"
vector_math: vector_math:
dependency: transitive dependency: transitive
description: description:
@ -218,5 +239,5 @@ packages:
source: hosted source: hosted
version: "0.1.0" version: "0.1.0"
sdks: sdks:
dart: ">=2.6.0 <3.0.0" dart: ">=2.7.0 <3.0.0"
flutter: ">=1.12.13+hotfix.5 <2.0.0" flutter: ">=1.12.13+hotfix.5 <2.0.0"

View File

@ -1,5 +1,5 @@
name: just_audio name: just_audio
description: Flutter plugin to play audio from streams, files, assets and DASH/HLS streams. Works with audio_service to play audio in the background. description: Flutter plugin to play audio from streams, files, assets, DASH/HLS streams and playlists. Works with audio_service to play audio in the background.
version: 0.2.2 version: 0.2.2
homepage: https://github.com/ryanheise/just_audio homepage: https://github.com/ryanheise/just_audio
@ -12,6 +12,7 @@ dependencies:
path: ^1.6.4 path: ^1.6.4
path_provider: ^1.6.10 path_provider: ^1.6.10
async: ^2.4.1 async: ^2.4.1
uuid: ^2.2.0
flutter: flutter:
sdk: flutter sdk: flutter
flutter_web_plugins: flutter_web_plugins: