Integrate the ExoPlayer library

This commit is contained in:
Ryan Heise 2019-12-31 20:38:46 +11:00
parent 5e0fc0fd76
commit fc9e65b371
7 changed files with 169 additions and 1519 deletions

View File

@ -22,7 +22,7 @@ SOFTWARE.
==============================================================================
This software includes the sonic library which is licensed under the Apache
This software includes the ExoPlayer library which is licensed under the Apache
License, Version 2.0.

View File

@ -17,9 +17,11 @@ This plugin has been tested on Android, and is being made available for testing
final player = AudioPlayer();
await player.setUrl('https://foo.com/bar.mp3');
player.play();
await player.pause();
await player.play(untilPosition: Duration(minutes: 1));
await player.stop()
player.pause();
player.play();
await player.stop();
await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20));
await player.play();
await player.setUrl('https://foo.com/baz.mp3');
await player.seek(Duration(minutes: 5));
player.play();

View File

@ -38,3 +38,10 @@ android {
targetCompatibility 1.8
}
}
dependencies {
implementation 'com.google.android.exoplayer:exoplayer-core:2.11.1'
implementation 'com.google.android.exoplayer:exoplayer-dash:2.11.1'
implementation 'com.google.android.exoplayer:exoplayer-hls:2.11.1'
implementation 'com.google.android.exoplayer:exoplayer-smoothstreaming:2.11.1'
}

View File

@ -1,13 +1,16 @@
package com.ryanheise.just_audio;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.media.MediaTimestamp;
import android.os.Handler;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.SimpleExoPlayer;
import com.google.android.exoplayer2.source.ClippingMediaSource;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.ProgressiveMediaSource;
import com.google.android.exoplayer2.upstream.DataSource;
import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory;
import com.google.android.exoplayer2.util.Util;
import io.flutter.plugin.common.EventChannel;
import io.flutter.plugin.common.EventChannel.EventSink;
import io.flutter.plugin.common.MethodCall;
@ -19,13 +22,14 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Deque;
import java.util.LinkedList;
import android.content.Context;
import android.net.Uri;
import java.util.List;
import sonic.Sonic;
public class AudioPlayer implements MethodCallHandler {
public class AudioPlayer implements MethodCallHandler, Player.EventListener {
private final Registrar registrar;
private final Context context;
private final MethodChannel methodChannel;
private final EventChannel eventChannel;
private EventSink eventSink;
@ -44,33 +48,26 @@ public class AudioPlayer implements MethodCallHandler {
};
private final String id;
private String url;
private volatile PlaybackState state;
private PlaybackState stateBeforeSeek;
private long updateTime;
private int updatePosition;
private Deque<SeekRequest> seekRequests = new LinkedList<>();
private long updatePosition;
private MediaExtractor extractor;
private MediaFormat format;
private Sonic sonic;
private int channelCount;
private int sampleRate;
private int duration;
private MediaCodec codec;
private AudioTrack audioTrack;
private PlayThread playThread;
private int start;
private Integer untilPosition;
private Object monitor = new Object();
private long duration;
private Long start;
private Long end;
private float volume = 1.0f;
private float speed = 1.0f;
private Thread mainThread;
private byte[] chunk;
private Long seekPos;
private Result prepareResult;
private Result seekResult;
private MediaSource mediaSource;
private SimpleExoPlayer player;
public AudioPlayer(final Registrar registrar, final String id) {
mainThread = Thread.currentThread();
this.registrar = registrar;
this.context = registrar.activeContext();
this.id = id;
methodChannel = new MethodChannel(registrar.messenger(), "com.ryanheise.just_audio.methods." + id);
methodChannel.setMethodCallHandler(this);
@ -87,11 +84,47 @@ public class AudioPlayer implements MethodCallHandler {
}
});
state = PlaybackState.none;
player = new SimpleExoPlayer.Builder(context).build();
player.addListener(this);
}
@Override
public void onPlayerStateChanged(boolean playWhenReady, int playbackState) {
switch (playbackState) {
case Player.STATE_READY:
if (prepareResult != null) {
duration = player.getDuration();
prepareResult.success(duration);
prepareResult = null;
transition(PlaybackState.stopped);
}
break;
case Player.STATE_BUFFERING:
// TODO: use this instead of checkForDiscontinuity.
break;
case Player.STATE_ENDED:
if (state != PlaybackState.completed) {
transition(PlaybackState.completed);
}
break;
}
}
@Override
public void onSeekProcessed() {
if (seekResult != null) {
seekPos = null;
transition(stateBeforeSeek);
seekResult.success(null);
stateBeforeSeek = null;
seekResult = null;
}
}
private void checkForDiscontinuity() {
final long now = System.currentTimeMillis();
final int position = getCurrentPosition();
final long position = getCurrentPosition();
final long timeSinceLastUpdate = now - updateTime;
final long expectedPosition = updatePosition + (long)(timeSinceLastUpdate * speed);
final long drift = position - expectedPosition;
@ -114,8 +147,19 @@ public class AudioPlayer implements MethodCallHandler {
case "setUrl":
setUrl((String)args.get(0), result);
break;
case "setClip":
Object start = args.get(0);
if (start != null && start instanceof Integer) {
start = new Long((Integer)start);
}
Object end = args.get(1);
if (end != null && end instanceof Integer) {
end = new Long((Integer)end);
}
setClip((Long)start, (Long)end, result);
break;
case "play":
play((Integer)args.get(0));
play();
result.success(null);
break;
case "pause":
@ -134,7 +178,12 @@ public class AudioPlayer implements MethodCallHandler {
result.success(null);
break;
case "seek":
seek((Integer)args.get(0), result);
Object position = args.get(0);
if (position instanceof Integer) {
seek((Integer)position, result);
} else {
seek((Long)position, result);
}
break;
case "dispose":
dispose();
@ -161,13 +210,13 @@ public class AudioPlayer implements MethodCallHandler {
eventSink.success(event);
}
private int getCurrentPosition() {
private long getCurrentPosition() {
if (state == PlaybackState.none || state == PlaybackState.connecting) {
return 0;
} else if (seekRequests.size() > 0) {
return seekRequests.peekFirst().pos;
} else if (seekPos != null) {
return seekPos;
} else {
return (int)(extractor.getSampleTime() / 1000);
return player.getCurrentPosition();
}
}
@ -183,92 +232,47 @@ public class AudioPlayer implements MethodCallHandler {
broadcastPlaybackEvent();
}
private void bgTransition(final PlaybackState newState) {
bgTransition(state, newState);
}
private void bgTransition(final PlaybackState oldState, final PlaybackState newState) {
// Redundant assignment which ensures the state is set
// immediately in the background thread.
state = newState;
handler.post(new Runnable() {
@Override
public void run() {
transition(oldState, newState);
}
});
}
public void setUrl(final String url, final Result result) throws IOException {
if (state != PlaybackState.none && state != PlaybackState.stopped && state != PlaybackState.completed) {
throw new IllegalStateException("Can call setUrl only from none/stopped/completed states (" + state + ")");
}
ensureStopped();
prepareResult = result;
transition(PlaybackState.connecting);
this.url = url;
if (extractor != null) {
extractor.release();
}
new Thread(() -> {
try {
blockingInitExtractorAndCodec();
sonic = new Sonic(sampleRate, channelCount);
sonic.setVolume(volume);
sonic.setSpeed(speed);
bgTransition(PlaybackState.stopped);
handler.post(() -> result.success(duration));
} catch (Exception e) {
e.printStackTrace();
handler.post(() -> result.error("Error: " + e, null, null));
}
}).start();
DataSource.Factory dataSourceFactory = new DefaultDataSourceFactory(context, Util.getUserAgent(context, "just_audio"));
mediaSource = new ProgressiveMediaSource.Factory(dataSourceFactory).createMediaSource(Uri.parse(url));
player.prepare(mediaSource);
}
public void play(final Integer untilPosition) {
if (untilPosition != null && untilPosition <= start) {
throw new IllegalArgumentException("untilPosition must be >= 0");
public void setClip(final Long start, final Long end, final Result result) {
this.start = start;
this.end = end;
prepareResult = result;
if (start != null || end != null) {
player.prepare(new ClippingMediaSource(mediaSource,
(start != null ? start : 0) * 1000L,
(end != null ? end : C.TIME_END_OF_SOURCE) * 1000L));
} else {
player.prepare(mediaSource);
}
this.untilPosition = untilPosition;
}
public void play() {
switch (state) {
case stopped:
case completed:
ensureStopped();
transition(PlaybackState.playing);
playThread = new PlayThread();
playThread.start();
break;
case buffering:
case paused:
synchronized (monitor) {
transition(PlaybackState.playing);
monitor.notifyAll();
}
player.setPlayWhenReady(true);
transition(PlaybackState.playing);
break;
default:
throw new IllegalStateException("Can call play only from stopped, completed and paused states (" + state + ")");
}
}
private void ensureStopped() {
synchronized (monitor) {
try {
while (playThread != null) {
monitor.wait();
}
} catch (Exception e) {}
}
}
public void pause() {
switch (state) {
case playing:
case buffering:
synchronized (monitor) {
transition(PlaybackState.paused);
audioTrack.pause();
monitor.notifyAll();
}
player.setPlayWhenReady(false);
transition(PlaybackState.paused);
break;
default:
throw new IllegalStateException("Can call pause only from playing and buffering states (" + state + ")");
@ -280,29 +284,14 @@ public class AudioPlayer implements MethodCallHandler {
case stopped:
result.success(null);
break;
case completed:
transition(PlaybackState.stopped);
result.success(null);
break;
// TODO: Allow stopping from buffered/connecting states.
case completed:
case playing:
case paused:
synchronized (monitor) {
// It takes some time for the PlayThread to actually wind down
// so other methods that transition from the stopped state should
// wait for playThread == null with ensureStopped().
PlaybackState oldState = state;
transition(PlaybackState.stopped);
if (oldState == PlaybackState.paused) {
monitor.notifyAll();
} else if (audioTrack != null) {
audioTrack.pause();
}
new Thread(() -> {
ensureStopped();
handler.post(() -> result.success(null));
}).start();
}
player.setPlayWhenReady(false);
player.seekTo(0L);
transition(PlaybackState.stopped);
result.success(null);
break;
default:
throw new IllegalStateException("Can call stop only from playing/paused/stopped/completed states (" + state + ")");
@ -311,104 +300,32 @@ public class AudioPlayer implements MethodCallHandler {
public void setVolume(final float volume) {
this.volume = volume;
if (sonic != null) {
sonic.setVolume(volume);
}
player.setVolume(volume);
}
public void setSpeed(final float speed) {
// NOTE: existing audio data in the pipeline will continue
// to play out at the speed it was already processed at. So
// for a brief moment, checkForDiscontinuity() may erroneously
// detect some buffering.
// TODO: Sort this out. The cheap workaround would be to disable
// checks for discontinuity during this brief moment.
this.speed = speed;
if (sonic != null) {
sonic.setSpeed(speed);
}
player.setPlaybackParameters(new PlaybackParameters(speed));
broadcastPlaybackEvent();
}
// TODO: Test whether this times out the MediaCodec on Ogg files.
// See: https://stackoverflow.com/questions/22109050/mediacodec-dequeueoutputbuffer-times-out-when-seeking-with-ogg-audio-files
public void seek(final int position, final Result result) {
synchronized (monitor) {
if (state == PlaybackState.none || state == PlaybackState.connecting) {
throw new IllegalStateException("Cannot call seek in none or connecting states (" + state + ")");
}
if (state == PlaybackState.stopped) {
ensureStopped();
}
start = position;
if (seekRequests.size() == 0) {
stateBeforeSeek = state;
}
seekRequests.addLast(new SeekRequest(position, result));
handler.removeCallbacks(positionObserver);
transition(PlaybackState.buffering);
if (stateBeforeSeek == PlaybackState.stopped) {
new Thread(() -> {
processSeekRequests();
}).start();
} else {
monitor.notifyAll();
}
public void seek(final long position, final Result result) {
seekPos = position;
seekResult = result;
if (stateBeforeSeek == null) {
stateBeforeSeek = state;
}
handler.removeCallbacks(positionObserver);
transition(PlaybackState.buffering);
player.seekTo(position);
}
public void dispose() {
if (state != PlaybackState.stopped && state != PlaybackState.completed && state != PlaybackState.none) {
throw new IllegalStateException("Can call dispose only from stopped/completed/none states (" + state + ")");
}
if (extractor != null) {
ensureStopped();
transition(PlaybackState.none);
extractor.release();
extractor = null;
codec.stop();
codec.release();
codec = null;
chunk = null;
}
}
private void blockingInitExtractorAndCodec() throws IOException {
extractor = new MediaExtractor();
extractor.setDataSource(url);
format = selectAudioTrack(extractor);
channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
long durationMs = format.getLong(MediaFormat.KEY_DURATION);
duration = (int)(durationMs / 1000);
start = 0;
codec = MediaCodec.createDecoderByType(format.getString(MediaFormat.KEY_MIME));
codec.configure(format, null, null, 0);
codec.start();
}
private MediaFormat selectAudioTrack(MediaExtractor extractor) throws IOException {
int trackCount = extractor.getTrackCount();
for (int i = 0; i < trackCount; i++) {
MediaFormat format = extractor.getTrackFormat(i);
if (format.getString(MediaFormat.KEY_MIME).startsWith("audio/")) {
extractor.selectTrack(i);
return format;
}
}
throw new RuntimeException("No audio track found");
}
private void processSeekRequests() {
while (seekRequests.size() > 0) {
SeekRequest seekRequest = seekRequests.removeFirst();
extractor.seekTo(seekRequest.pos*1000L, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
if (seekRequests.size() == 0) {
bgTransition(stateBeforeSeek);
stateBeforeSeek = null;
}
handler.post(() -> seekRequest.result.success(null));
}
player.release();
transition(PlaybackState.none);
}
private void startObservingPosition() {
@ -416,208 +333,6 @@ public class AudioPlayer implements MethodCallHandler {
handler.post(positionObserver);
}
private class PlayThread extends Thread {
private static final int TIMEOUT = 1000;
private static final int FRAME_SIZE = 1024*2;
private static final int BEHIND_LIMIT = 500; // ms
private byte[] silence;
private boolean finishedDecoding = false;
@Override
public void run() {
boolean reachedEnd = false;
int encoding = AudioFormat.ENCODING_PCM_16BIT;
int channelFormat = channelCount==1?AudioFormat.CHANNEL_OUT_MONO:AudioFormat.CHANNEL_OUT_STEREO;
int minSize = AudioTrack.getMinBufferSize(sampleRate, channelFormat, encoding);
int audioTrackBufferSize = minSize * 4;
audioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
sampleRate,
channelFormat,
encoding,
audioTrackBufferSize,
AudioTrack.MODE_STREAM);
silence = new byte[audioTrackBufferSize];
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean firstSample = true;
int decoderIdleCount = 0;
boolean finishedReading = false;
int progress = 0;
byte[] sonicOut = new byte[audioTrackBufferSize];
try {
audioTrack.play();
while (!finishedDecoding) {
if (checkForRequest()) continue;
// put data into input buffer
if (!finishedReading) {
int inputBufferIndex = codec.dequeueInputBuffer(TIMEOUT);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferIndex);
long presentationTime = extractor.getSampleTime();
int presentationTimeMs = (int)(presentationTime / 1000);
int sampleSize = extractor.readSampleData(inputBuffer, 0);
if (firstSample && sampleSize == 2 && format.getString(MediaFormat.KEY_MIME).equals("audio/mp4a-latm")) {
// Skip initial frames.
extractor.advance();
} else if (sampleSize >= 0) {
codec.queueInputBuffer(inputBufferIndex, 0, sampleSize, presentationTime, 0);
extractor.advance();
} else {
codec.queueInputBuffer(inputBufferIndex, 0, 0, -1, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
finishedReading = true;
}
firstSample = false;
}
}
if (checkForRequest()) continue;
// read data from output buffer
int outputBufferIndex = codec.dequeueOutputBuffer(info, TIMEOUT);
decoderIdleCount++;
if (outputBufferIndex >= 0) {
int currentPosition = (int)(info.presentationTimeUs/1000);
ByteBuffer buf = codec.getOutputBuffer(outputBufferIndex);
if (info.size > 0) {
decoderIdleCount = 0;
if (chunk == null || chunk.length < info.size) {
chunk = new byte[info.size];
}
buf.get(chunk, 0, info.size);
buf.clear();
// put decoded data into sonic
if (chunk.length > 0) {
sonic.writeBytesToStream(chunk, chunk.length);
} else {
sonic.flushStream();
}
// output sonic'd data to audioTrack
int numWritten;
do {
numWritten = sonic.readBytesFromStream(sonicOut, sonicOut.length);
if (numWritten > 0) {
audioTrack.write(sonicOut, 0, numWritten);
}
} while (numWritten > 0);
}
// Detect end of playback
codec.releaseOutputBuffer(outputBufferIndex, false);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (untilPosition != null) {
extractor.release();
codec.flush();
codec.stop();
codec.release();
blockingInitExtractorAndCodec();
finishedReading = false;
finishedDecoding = false;
decoderIdleCount = 0;
audioTrack.pause();
bgTransition(PlaybackState.paused);
} else {
audioTrack.pause();
finishedDecoding = true;
reachedEnd = true;
}
} else if (untilPosition != null && currentPosition >= untilPosition) {
// NOTE: When streaming audio over bluetooth, it clips off
// the last 200-300ms of the clip, even though it has been
// written to the AudioTrack. So, we need an option to pad the
// audio with an extra 200-300ms of silence.
// Could be a good idea to do the same at the start
// since some bluetooth headphones fade in and miss the
// first little bit.
Arrays.fill(sonicOut, (byte)0);
audioTrack.write(sonicOut, 0, sonicOut.length);
bgTransition(PlaybackState.paused);
audioTrack.pause();
}
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Don't expect this to happen in audio files, but could be wrong.
// TODO: Investigate.
//MediaFormat newFormat = codec.getOutputFormat();
}
if (decoderIdleCount >= 100) {
// Data has stopped coming through the pipeline despite not receiving a
// BUFFER_FLAG_END_OF_STREAM signal, so stop.
System.out.println("decoderIdleCount >= 100. finishedDecoding = true");
finishedDecoding = true;
audioTrack.pause();
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
codec.flush();
audioTrack.flush();
audioTrack.release();
audioTrack = null;
synchronized (monitor) {
start = 0;
untilPosition = null;
bgTransition(reachedEnd ? PlaybackState.completed : PlaybackState.stopped);
extractor.seekTo(0L, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
handler.post(() -> broadcastPlaybackEvent());
playThread = null;
monitor.notifyAll();
}
}
}
// Return true to "continue" to the audio loop
private boolean checkForRequest() {
try {
synchronized (monitor) {
if (state == PlaybackState.paused) {
while (state == PlaybackState.paused) {
monitor.wait();
}
// Unpaused
// Reset updateTime for higher accuracy.
bgTransition(state);
if (state == PlaybackState.playing) {
audioTrack.play();
} else if (state == PlaybackState.buffering) {
// TODO: What if we are in the second checkForRequest call and
// we ask to continue the loop, we may forget about dequeued
// input buffers. Need to handle this correctly.
return true;
}
} else if (state == PlaybackState.buffering && seekRequests.size() > 0) {
// Seek requested
codec.flush();
audioTrack.flush();
processSeekRequests();
if (state != PlaybackState.stopped) {
// The == stopped case is handled below.
return true;
}
}
if (state == PlaybackState.stopped) {
finishedDecoding = true;
return true;
}
}
}
catch (Exception e) {}
return false;
}
}
enum PlaybackState {
none,
stopped,
@ -627,14 +342,4 @@ public class AudioPlayer implements MethodCallHandler {
connecting,
completed
}
class SeekRequest {
public final int pos;
public final Result result;
public SeekRequest(int pos, Result result) {
this.pos = pos;
this.result = result;
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -48,8 +48,11 @@
NSArray* args = (NSArray*)call.arguments;
if ([@"setUrl" isEqualToString:call.method]) {
[self setUrl:args[0] result:result];
} else if ([@"setClip" isEqualToString:call.method]) {
[self setClip:args[0] end:args[1]];
result(nil);
} else if ([@"play" isEqualToString:call.method]) {
[self play:args[0]];
[self play];
result(nil);
} else if ([@"pause" isEqualToString:call.method]) {
[self pause];
@ -215,13 +218,14 @@
}
}
- (void)play:(NSNumber*)untilPosition {
- (void)setClip:(NSNumber*)start end:(NSNumber*)end {
// TODO
}
- (void)play {
// TODO: dynamically adjust the lag.
//int lag = 6;
int start = [self getCurrentPosition];
if (untilPosition != [NSNull null] && [untilPosition intValue] <= start) {
return;
}
//int start = [self getCurrentPosition];
[_player play];
[self setPlaybackState:playing];
// TODO: convert this Android code to iOS

View File

@ -13,9 +13,11 @@ import 'package:rxdart/rxdart.dart';
/// final player = AudioPlayer();
/// await player.setUrl('https://foo.com/bar.mp3');
/// player.play();
/// await player.pause();
/// await player.play(untilPosition: Duration(minutes: 1));
/// await player.stop()
/// player.pause();
/// player.play();
/// await player.stop();
/// await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20));
/// await player.play();
/// await player.setUrl('https://foo.com/baz.mp3');
/// await player.seek(Duration(minutes: 5));
/// player.play();
@ -171,27 +173,38 @@ class AudioPlayer {
Future<File> get _cacheFile async => File(p.join(
(await getTemporaryDirectory()).path, 'just_audio_asset_cache', '$_id'));
/// Plays the currently loaded media from the current position, until the
/// given position if specified. The [Future] returned by this method
/// completes when playback completes or is paused or stopped. It is legal to
/// invoke this method only from one of the following states:
/// Clip the audio to the given [start] and [end] timestamps.
Future<Duration> setClip({Duration start, Duration end}) async {
_durationFuture =
_invokeMethod('setClip', [start?.inMilliseconds, end?.inMilliseconds])
.then((ms) => Duration(milliseconds: ms));
final duration = await _durationFuture;
_durationSubject.add(duration);
return duration;
}
/// Plays the currently loaded media from the current position. The [Future]
/// returned by this method completes when playback completes or is paused or
/// stopped. It is legal to invoke this method only from one of the following
/// states:
///
/// * [AudioPlaybackState.stopped]
/// * [AudioPlaybackState.completed]
/// * [AudioPlaybackState.paused]
Future<void> play({final Duration untilPosition}) async {
Future<void> play() async {
StreamSubscription subscription;
Completer completer = Completer();
subscription = playbackStateStream
.skip(1)
.where((state) =>
state == AudioPlaybackState.paused ||
state == AudioPlaybackState.stopped)
state == AudioPlaybackState.stopped ||
state == AudioPlaybackState.completed)
.listen((state) {
subscription.cancel();
completer.complete();
});
await _invokeMethod('play', [untilPosition?.inMilliseconds]);
await _invokeMethod('play');
await completer.future;
}