Visualizer implementation for Android.

This commit is contained in:
Ryan Heise 2021-01-18 20:24:57 +11:00
parent 602dc44029
commit 1fecd5ac1f
13 changed files with 457 additions and 43 deletions

View File

@ -1,8 +1,14 @@
package com.ryanheise.just_audio;
import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.media.audiofx.Visualizer;
import android.net.Uri;
import android.os.Handler;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.MediaItem;
@ -35,13 +41,13 @@ import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import io.flutter.Log;
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.EventChannel;
import io.flutter.plugin.common.EventChannel.EventSink;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
import io.flutter.plugin.common.MethodChannel.Result;
import io.flutter.plugin.common.PluginRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
@ -50,7 +56,7 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
public class AudioPlayer implements MethodCallHandler, Player.EventListener, AudioListener, MetadataOutput {
public class AudioPlayer implements MethodCallHandler, Player.EventListener, AudioListener, MetadataOutput, PluginRegistry.RequestPermissionsResultListener {
static final String TAG = "AudioPlayer";
@ -58,8 +64,8 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Aud
private final Context context;
private final MethodChannel methodChannel;
private final EventChannel eventChannel;
private EventSink eventSink;
private final BetterEventChannel eventChannel;
private ActivityPluginBinding activityPluginBinding;
private ProcessingState processingState;
private long bufferedPosition;
@ -77,6 +83,12 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Aud
private IcyHeaders icyHeaders;
private int errorCount;
private AudioAttributes pendingAudioAttributes;
private BetterVisualizer visualizer;
private Result startVisualizerResult;
private boolean enableWaveform;
private boolean enableFft;
private Integer visualizerCaptureRate;
private Integer visualizerCaptureSize;
private SimpleExoPlayer player;
private Integer audioSessionId;
@ -114,26 +126,50 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Aud
this.context = applicationContext;
methodChannel = new MethodChannel(messenger, "com.ryanheise.just_audio.methods." + id);
methodChannel.setMethodCallHandler(this);
eventChannel = new EventChannel(messenger, "com.ryanheise.just_audio.events." + id);
eventChannel.setStreamHandler(new EventChannel.StreamHandler() {
@Override
public void onListen(final Object arguments, final EventSink eventSink) {
AudioPlayer.this.eventSink = eventSink;
}
@Override
public void onCancel(final Object arguments) {
eventSink = null;
}
});
eventChannel = new BetterEventChannel(messenger, "com.ryanheise.just_audio.events." + id);
visualizer = new BetterVisualizer(messenger, id);
processingState = ProcessingState.none;
}
private void requestPermissions() {
ActivityCompat.requestPermissions(activityPluginBinding.getActivity(), new String[] { Manifest.permission.RECORD_AUDIO }, 1);
}
public void setActivityPluginBinding(ActivityPluginBinding activityPluginBinding) {
if (this.activityPluginBinding != null && this.activityPluginBinding != activityPluginBinding) {
this.activityPluginBinding.removeRequestPermissionsResultListener(this);
}
this.activityPluginBinding = activityPluginBinding;
if (activityPluginBinding != null) {
activityPluginBinding.addRequestPermissionsResultListener(this);
// If there is a pending startVisualizer request
if (startVisualizerResult != null) {
requestPermissions();
}
}
}
private void startWatchingBuffer() {
handler.removeCallbacks(bufferWatcher);
handler.post(bufferWatcher);
}
@Override
public boolean onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
for (int i = 0; i < permissions.length; i++) {
if (permissions[i].equals(Manifest.permission.RECORD_AUDIO)) {
if (grantResults[i] == PackageManager.PERMISSION_GRANTED) {
visualizer.setHasPermission(true);
completeStartVisualizer(true);
return true;
}
completeStartVisualizer(false);
break;
}
}
return false;
}
@Override
public void onAudioSessionId(int audioSessionId) {
if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) {
@ -141,6 +177,7 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Aud
} else {
this.audioSessionId = audioSessionId;
}
visualizer.onAudioSessionId(this.audioSessionId);
}
@Override
@ -277,12 +314,48 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Aud
seekResult = null;
}
private void completeStartVisualizer(boolean success) {
if (startVisualizerResult == null) return;
if (success) {
visualizer.start(visualizerCaptureRate, visualizerCaptureSize, enableWaveform, enableFft);
Map<String, Object> resultMap = new HashMap<String, Object>();
resultMap.put("samplingRate", visualizer.getSamplingRate());
startVisualizerResult.success(resultMap);
} else {
startVisualizerResult.error("RECORD_AUDIO permission denied", null, null);
}
startVisualizerResult = null;
}
@Override
public void onMethodCall(final MethodCall call, final Result result) {
ensurePlayerInitialized();
try {
switch (call.method) {
case "startVisualizer":
Boolean enableWaveform = call.argument("enableWaveform");
Boolean enableFft = call.argument("enableFft");
Integer captureRate = call.argument("captureRate");
Integer captureSize = call.argument("captureSize");
this.enableWaveform = enableWaveform;
this.enableFft = enableFft;
visualizerCaptureRate = captureRate;
visualizerCaptureSize = captureSize;
startVisualizerResult = result;
if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) {
visualizer.setHasPermission(true);
completeStartVisualizer(true);
} else if (activityPluginBinding != null && activityPluginBinding.getActivity() != null) {
requestPermissions();
} else {
// Will request permission in setActivityPluginBinding
}
break;
case "stopVisualizer":
visualizer.stop();
result.success(new HashMap<String, Object>());
break;
case "load":
Long initialPosition = getLong(call.argument("initialPosition"));
Integer initialIndex = call.argument("initialIndex");
@ -565,9 +638,7 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Aud
event.put("currentIndex", currentIndex);
event.put("androidAudioSessionId", audioSessionId);
if (eventSink != null) {
eventSink.success(event);
}
eventChannel.success(event);
}
private Map<String, Object> collectIcyMetadata() {
@ -615,9 +686,7 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Aud
prepareResult = null;
}
if (eventSink != null) {
eventSink.error(errorCode, errorMsg, null);
}
eventChannel.error(errorCode, errorMsg, null);
}
private void transition(final ProcessingState newState) {
@ -706,9 +775,8 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Aud
player = null;
transition(ProcessingState.none);
}
if (eventSink != null) {
eventSink.endOfStream();
}
eventChannel.endOfStream();
visualizer.dispose();
}
private void abortSeek() {

View File

@ -0,0 +1,39 @@
package com.ryanheise.just_audio;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.EventChannel;
import io.flutter.plugin.common.EventChannel.EventSink;
public class BetterEventChannel implements EventSink {
private EventSink eventSink;
public BetterEventChannel(final BinaryMessenger messenger, final String id) {
EventChannel eventChannel = new EventChannel(messenger, id);
eventChannel.setStreamHandler(new EventChannel.StreamHandler() {
@Override
public void onListen(final Object arguments, final EventSink eventSink) {
BetterEventChannel.this.eventSink = eventSink;
}
@Override
public void onCancel(final Object arguments) {
eventSink = null;
}
});
}
@Override
public void success(Object event) {
if (eventSink != null) eventSink.success(event);
}
@Override
public void error(String errorCode, String errorMessage, Object errorDetails) {
if (eventSink != null) eventSink.error(errorCode, errorMessage, errorDetails);
}
@Override
public void endOfStream() {
if (eventSink != null) eventSink.endOfStream();
}
}

View File

@ -0,0 +1,86 @@
package com.ryanheise.just_audio;
import android.media.audiofx.Visualizer;
import io.flutter.plugin.common.BinaryMessenger;
public class BetterVisualizer {
private Visualizer visualizer;
private final BetterEventChannel waveformEventChannel;
private final BetterEventChannel fftEventChannel;
private Integer audioSessionId;
private int captureRate;
private int captureSize;
private boolean enableWaveform;
private boolean enableFft;
private boolean pendingStartRequest;
private boolean hasPermission;
public BetterVisualizer(final BinaryMessenger messenger, String id) {
waveformEventChannel = new BetterEventChannel(messenger, "com.ryanheise.just_audio.waveform_events." + id);
fftEventChannel = new BetterEventChannel(messenger, "com.ryanheise.just_audio.fft_events." + id);
}
public int getSamplingRate() {
return visualizer.getSamplingRate();
}
public void setHasPermission(boolean hasPermission) {
this.hasPermission = hasPermission;
}
public void onAudioSessionId(Integer audioSessionId) {
this.audioSessionId = audioSessionId;
if (audioSessionId != null && hasPermission && pendingStartRequest) {
start(captureRate, captureSize, enableWaveform, enableFft);
}
}
public void start(Integer captureRate, Integer captureSize, final boolean enableWavefrom, final boolean enableFft) {
if (visualizer != null) return;
if (captureRate == null) {
captureRate = Visualizer.getMaxCaptureRate() / 2;
} else if (captureRate > Visualizer.getMaxCaptureRate()) {
captureRate = Visualizer.getMaxCaptureRate();
}
if (captureSize == null) {
captureSize = Visualizer.getCaptureSizeRange()[1];
} else if (captureSize > Visualizer.getCaptureSizeRange()[1]) {
captureSize = Visualizer.getCaptureSizeRange()[1];
} else if (captureSize < Visualizer.getCaptureSizeRange()[0]) {
captureSize = Visualizer.getCaptureSizeRange()[0];
}
this.enableWaveform = enableWaveform;
this.enableFft = enableFft;
this.captureRate = captureRate;
if (audioSessionId == null || !hasPermission) {
pendingStartRequest = true;
return;
}
pendingStartRequest = false;
visualizer = new Visualizer(audioSessionId);
visualizer.setCaptureSize(captureSize);
visualizer.setDataCaptureListener(new Visualizer.OnDataCaptureListener() {
public void onWaveFormDataCapture(Visualizer visualizer, byte[] waveform, int samplingRate) {
waveformEventChannel.success(waveform);
}
public void onFftDataCapture(Visualizer visualizer, byte[] fft, int samplingRate) {
fftEventChannel.success(fft);
}
}, captureRate, enableWavefrom, enableFft);
visualizer.setEnabled(true);
}
public void stop() {
if (visualizer == null) return;
visualizer.setDataCaptureListener(null, captureRate, enableWaveform, enableFft);
visualizer.setEnabled(false);
visualizer.release();
visualizer = null;
}
public void dispose() {
stop();
waveformEventChannel.endOfStream();
fftEventChannel.endOfStream();
}
}

View File

@ -3,6 +3,8 @@ package com.ryanheise.just_audio;
import android.content.Context;
import androidx.annotation.NonNull;
import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.embedding.engine.plugins.activity.ActivityAware;
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.PluginRegistry.Registrar;
@ -10,7 +12,7 @@ import io.flutter.plugin.common.PluginRegistry.Registrar;
/**
* JustAudioPlugin
*/
public class JustAudioPlugin implements FlutterPlugin {
public class JustAudioPlugin implements FlutterPlugin, ActivityAware {
private MethodChannel channel;
private MainMethodCallHandler methodCallHandler;
@ -41,6 +43,25 @@ public class JustAudioPlugin implements FlutterPlugin {
stopListening();
}
@Override
public void onAttachedToActivity(ActivityPluginBinding binding) {
methodCallHandler.setActivityPluginBinding(binding);
}
@Override
public void onDetachedFromActivityForConfigChanges() {
}
@Override
public void onReattachedToActivityForConfigChanges(ActivityPluginBinding binding) {
methodCallHandler.setActivityPluginBinding(binding);
}
@Override
public void onDetachedFromActivity() {
methodCallHandler.setActivityPluginBinding(null);
}
private void startListening(Context applicationContext, BinaryMessenger messenger) {
methodCallHandler = new MainMethodCallHandler(applicationContext, messenger);

View File

@ -1,7 +1,9 @@
package com.ryanheise.just_audio;
import android.app.Activity;
import android.content.Context;
import androidx.annotation.NonNull;
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
@ -15,6 +17,7 @@ public class MainMethodCallHandler implements MethodCallHandler {
private final Context applicationContext;
private final BinaryMessenger messenger;
private ActivityPluginBinding activityPluginBinding;
private final Map<String, AudioPlayer> players = new HashMap<>();
@ -24,6 +27,13 @@ public class MainMethodCallHandler implements MethodCallHandler {
this.messenger = messenger;
}
void setActivityPluginBinding(ActivityPluginBinding activityPluginBinding) {
this.activityPluginBinding = activityPluginBinding;
for (AudioPlayer player : players.values()) {
player.setActivityPluginBinding(activityPluginBinding);
}
}
@Override
public void onMethodCall(MethodCall call, @NonNull Result result) {
final Map<?, ?> request = call.arguments();
@ -34,7 +44,11 @@ public class MainMethodCallHandler implements MethodCallHandler {
result.error("Platform player " + id + " already exists", null, null);
break;
}
players.put(id, new AudioPlayer(applicationContext, messenger, id));
final AudioPlayer player = new AudioPlayer(applicationContext, messenger, id);
players.put(id, player);
if (activityPluginBinding != null) {
player.setActivityPluginBinding(activityPluginBinding);
}
result.success(null);
break;
}

View File

@ -1,6 +1,8 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.ryanheise.just_audio_example">
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<!-- io.flutter.app.FlutterApplication is an android.app.Application that
calls FlutterMain.startInitialization(this); in its onCreate method.
In most cases you can leave this as-is, but you if you want to provide

View File

@ -124,16 +124,16 @@ packages:
just_audio_platform_interface:
dependency: transitive
description:
name: just_audio_platform_interface
url: "https://pub.dartlang.org"
source: hosted
path: "../../just_audio_platform_interface"
relative: true
source: path
version: "2.0.0"
just_audio_web:
dependency: transitive
description:
name: just_audio_web
url: "https://pub.dartlang.org"
source: hosted
path: "../../just_audio_web"
relative: true
source: path
version: "0.2.1"
matcher:
dependency: transitive

View File

@ -2,6 +2,7 @@ import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'dart:math';
import 'dart:typed_data';
import 'package:audio_session/audio_session.dart';
import 'package:crypto/crypto.dart';
@ -59,15 +60,24 @@ class AudioPlayer {
/// subscribe to the new platform's events.
StreamSubscription _playbackEventSubscription;
/// The subscription to the event channel for waveform data.
StreamSubscription _visualizerWaveformSubscription;
/// The subscription to the event channel for FFT data.
StreamSubscription _visualizerFftSubscription;
final String _id;
_ProxyHttpServer _proxy;
AudioSource _audioSource;
Map<String, AudioSource> _audioSources = {};
bool _disposed = false;
_InitialSeekValues _initialSeekValues;
StartVisualizerRequest _startVisualizerRequest;
PlaybackEvent _playbackEvent;
final _playbackEventSubject = BehaviorSubject<PlaybackEvent>(sync: true);
final _visualizerWaveformSubject = BehaviorSubject<Uint8List>();
final _visualizerFftSubject = BehaviorSubject<Uint8List>();
Future<Duration> _durationFuture;
final _durationSubject = BehaviorSubject<Duration>();
final _processingStateSubject = BehaviorSubject<ProcessingState>();
@ -227,6 +237,13 @@ class AudioPlayer {
/// A stream of [PlaybackEvent]s.
Stream<PlaybackEvent> get playbackEventStream => _playbackEventSubject.stream;
/// A stream of visualizer waveform data in unsigned 8 bit PCM..
Stream<Uint8List> get visualizerWaveformStream =>
_visualizerWaveformSubject.stream;
/// A stream of visualizer FFT data.
Stream<Uint8List> get visualizerFftStream => _visualizerFftSubject.stream;
/// The duration of the current audio or null if unknown.
Duration get duration => _playbackEvent.duration;
@ -894,11 +911,42 @@ class AudioPlayer {
usage: audioAttributes.usage.value));
}
/// Start the visualizer by capturing [captureSize] samples of audio at
/// [captureRate] millihertz and return the sampling rate of the audio. If
/// [enableWaveform] is `true`, the captured samples will be broadcast via
/// [visualizerWaveformStream]. If [enableFft] is `true`, the FFT data for
/// each capture will be broadcast via [visualizerFftStream]. You should call
/// [stopVisualizer] to stop capturing audio data.
Future<int> startVisualizer({
bool enableWaveform = true,
bool enableFft = true,
int captureRate,
int captureSize,
}) async {
return (await (await _platform).startVisualizer(_startVisualizerRequest =
StartVisualizerRequest(
enableWaveform: enableWaveform,
enableFft: enableFft,
captureRate: captureRate,
captureSize: captureSize)))
?.samplingRate;
}
/// Stop capturing audio data for the visualizer.
Future<void> stopVisualizer() async {
_startVisualizerRequest = null;
(await _platform).stopVisualizer(StopVisualizerRequest());
}
/// Release all resources associated with this player. You must invoke this
/// after you are done with the player.
Future<void> dispose() async {
if (_disposed) return;
_disposed = true;
await _visualizerWaveformSubscription?.cancel();
await _visualizerFftSubscription?.cancel();
await _visualizerWaveformSubject.close();
await _visualizerFftSubject.close();
if (_nativePlatform != null) {
await _disposePlatform(await _nativePlatform);
_nativePlatform = null;
@ -940,6 +988,8 @@ class AudioPlayer {
final durationCompleter = Completer<Duration>();
_platform = Future<AudioPlayerPlatform>(() async {
_playbackEventSubscription?.cancel();
_visualizerWaveformSubscription?.cancel();
_visualizerFftSubscription?.cancel();
if (oldPlatformFuture != null) {
final oldPlatform = await oldPlatformFuture;
if (oldPlatform != _idlePlatform) {
@ -953,6 +1003,10 @@ class AudioPlayer {
? await (_nativePlatform =
JustAudioPlatform.instance.init(InitRequest(id: _id)))
: _idlePlatform;
_visualizerWaveformSubscription = platform.visualizerWaveformStream
.listen(_visualizerWaveformSubject.add);
_visualizerFftSubscription =
platform.visualizerFftStream.listen(_visualizerFftSubject.add);
_playbackEventSubscription =
platform.playbackEventMessageStream.listen((message) {
var duration = message.duration;
@ -1005,6 +1059,9 @@ class AudioPlayer {
SetAutomaticallyWaitsToMinimizeStallingRequest(
enabled: automaticallyWaitsToMinimizeStalling));
}
if (_startVisualizerRequest != null) {
await platform.startVisualizer(_startVisualizerRequest);
}
await platform.setVolume(SetVolumeRequest(volume: volume));
await platform.setSpeed(SetSpeedRequest(speed: speed));
await platform.setLoopMode(SetLoopModeRequest(
@ -2273,6 +2330,8 @@ enum LoopMode { off, one, all }
/// state and the native platform is deallocated.
class _IdleAudioPlayer extends AudioPlayerPlatform {
final _eventSubject = BehaviorSubject<PlaybackEventMessage>();
final _visualizerWaveformSubject = BehaviorSubject<Uint8List>();
final _visualizerFftSubject = BehaviorSubject<Uint8List>();
Duration _position;
int _index;
List<IndexedAudioSource> _sequence;
@ -2310,6 +2369,13 @@ class _IdleAudioPlayer extends AudioPlayerPlatform {
Stream<PlaybackEventMessage> get playbackEventMessageStream =>
_eventSubject.stream;
@override
Stream<Uint8List> get visualizerWaveformStream =>
_visualizerWaveformSubject.stream;
@override
Stream<Uint8List> get visualizerFftStream => _visualizerFftSubject.stream;
@override
Future<LoadResponse> load(LoadRequest request) async {
_index = request.initialIndex ?? 0;
@ -2379,6 +2445,9 @@ class _IdleAudioPlayer extends AudioPlayerPlatform {
@override
Future<DisposeResponse> dispose(DisposeRequest request) async {
await _eventSubject.close();
await _visualizerWaveformSubject.close();
await _visualizerFftSubject.close();
return DisposeResponse();
}
@ -2399,6 +2468,18 @@ class _IdleAudioPlayer extends AudioPlayerPlatform {
ConcatenatingMoveRequest request) async {
return ConcatenatingMoveResponse();
}
@override
Future<StartVisualizerResponse> startVisualizer(
StartVisualizerRequest request) async {
return StartVisualizerResponse();
}
@override
Future<StopVisualizerResponse> stopVisualizer(
StopVisualizerRequest request) async {
return StopVisualizerResponse();
}
}
/// Holds the initial requested position and index for a newly loaded audio

View File

@ -194,16 +194,16 @@ packages:
just_audio_platform_interface:
dependency: "direct main"
description:
name: just_audio_platform_interface
url: "https://pub.dartlang.org"
source: hosted
path: "../just_audio_platform_interface"
relative: true
source: path
version: "2.0.0"
just_audio_web:
dependency: "direct main"
description:
name: just_audio_web
url: "https://pub.dartlang.org"
source: hosted
path: "../just_audio_web"
relative: true
source: path
version: "0.2.1"
logging:
dependency: transitive

View File

@ -8,8 +8,12 @@ environment:
flutter: ">=1.12.13+hotfix.5"
dependencies:
just_audio_platform_interface: ^2.0.0
just_audio_web: ^0.2.1
# just_audio_platform_interface: ^2.0.0
just_audio_platform_interface:
path: ../just_audio_platform_interface
# just_audio_web: ^0.2.1
just_audio_web:
path: ../just_audio_web
audio_session: ^0.0.10
rxdart: ">= 0.24.1 < 0.26.0"
path: ^1.6.4

View File

@ -1,4 +1,5 @@
import 'dart:async';
import 'dart:typed_data';
import 'package:flutter/foundation.dart';
import 'package:meta/meta.dart' show required;
@ -68,6 +69,17 @@ abstract class AudioPlayerPlatform {
'playbackEventMessageStream has not been implemented.');
}
/// A stream of visualizer waveform data.
Stream<Uint8List> get visualizerWaveformStream {
throw UnimplementedError(
'visualizerWaveformStream has not been implemented.');
}
/// A stream of visualizer fft data.
Stream<Uint8List> get visualizerFftStream {
throw UnimplementedError('visualizerFftStream has not been implemented.');
}
/// Loads an audio source.
Future<LoadResponse> load(LoadRequest request) {
throw UnimplementedError("load() has not been implemented.");
@ -157,6 +169,17 @@ abstract class AudioPlayerPlatform {
ConcatenatingMoveRequest request) {
throw UnimplementedError("concatenatingMove() has not been implemented.");
}
/// Starts the visualizer.
Future<StartVisualizerResponse> startVisualizer(
StartVisualizerRequest request) {
throw UnimplementedError("startVisualizer() has not been implemented.");
}
/// Stops the visualizer.
Future<StopVisualizerResponse> stopVisualizer(StopVisualizerRequest request) {
throw UnimplementedError("stopVisualizer() has not been implemented.");
}
}
/// A playback event communicated from the platform implementation to the
@ -624,6 +647,53 @@ class ConcatenatingMoveResponse {
ConcatenatingMoveResponse();
}
/// Information communicated to the platform implementation when starting the
/// visualizer.
class StartVisualizerRequest {
final bool enableWaveform;
final bool enableFft;
final int captureRate;
final int captureSize;
StartVisualizerRequest({
@required this.enableWaveform,
@required this.enableFft,
@required this.captureRate,
@required this.captureSize,
});
Map<dynamic, dynamic> toMap() => {
'enableWaveform': enableWaveform,
'enableFft': enableFft,
'captureRate': captureRate,
'captureSize': captureSize,
};
}
/// Information returned by the platform implementation after starting the
/// visualizer.
class StartVisualizerResponse {
final int samplingRate;
StartVisualizerResponse({@required this.samplingRate});
static StartVisualizerResponse fromMap(Map<dynamic, dynamic> map) =>
StartVisualizerResponse(samplingRate: map['samplingRate']);
}
/// Information communicated to the platform implementation when stopping the
/// visualizer.
class StopVisualizerRequest {
Map<dynamic, dynamic> toMap() => {};
}
/// Information returned by the platform implementation after stopping the
/// visualizer.
class StopVisualizerResponse {
static StopVisualizerResponse fromMap(Map<dynamic, dynamic> map) =>
StopVisualizerResponse();
}
/// Information about an audio source to be communicated with the platform
/// implementation.
abstract class AudioSourceMessage {

View File

@ -1,4 +1,5 @@
import 'dart:async';
import 'dart:typed_data';
import 'package:flutter/services.dart';
@ -36,6 +37,18 @@ class MethodChannelAudioPlayer extends AudioPlayerPlatform {
.receiveBroadcastStream()
.map((map) => PlaybackEventMessage.fromMap(map));
@override
Stream<Uint8List> get visualizerWaveformStream =>
EventChannel('com.ryanheise.just_audio.waveform_events.$id')
.receiveBroadcastStream()
.cast<Uint8List>();
@override
Stream<Uint8List> get visualizerFftStream =>
EventChannel('com.ryanheise.just_audio.fft_events.$id')
.receiveBroadcastStream()
.cast<Uint8List>();
@override
Future<LoadResponse> load(LoadRequest request) async {
return LoadResponse.fromMap(
@ -134,4 +147,18 @@ class MethodChannelAudioPlayer extends AudioPlayerPlatform {
return ConcatenatingMoveResponse.fromMap(
await _channel.invokeMethod('concatenatingMove', request?.toMap()));
}
@override
Future<StartVisualizerResponse> startVisualizer(
StartVisualizerRequest request) async {
return StartVisualizerResponse.fromMap(
await _channel.invokeMethod('startVisualizer', request?.toMap()));
}
@override
Future<StopVisualizerResponse> stopVisualizer(
StopVisualizerRequest request) async {
return StopVisualizerResponse.fromMap(
await _channel.invokeMethod('stopVisualizer', request?.toMap()));
}
}

View File

@ -11,7 +11,9 @@ flutter:
fileName: just_audio_web.dart
dependencies:
just_audio_platform_interface: ^2.0.0
# just_audio_platform_interface: ^2.0.0
just_audio_platform_interface:
path: ../just_audio_platform_interface
flutter:
sdk: flutter
flutter_web_plugins: