Use audio_session.
This commit is contained in:
parent
e60a672722
commit
b8ae308eec
|
@ -8,7 +8,7 @@ buildscript {
|
|||
}
|
||||
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:3.6.3'
|
||||
classpath 'com.android.tools.build:gradle:3.5.0'
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ android {
|
|||
defaultConfig {
|
||||
// TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
|
||||
applicationId "com.ryanheise.just_audio_example"
|
||||
minSdkVersion 16
|
||||
minSdkVersion 19
|
||||
targetSdkVersion 28
|
||||
versionCode flutterVersionCode.toInteger()
|
||||
versionName flutterVersionName
|
||||
|
|
|
@ -5,7 +5,7 @@ buildscript {
|
|||
}
|
||||
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:3.6.3'
|
||||
classpath 'com.android.tools.build:gradle:3.5.0'
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
#Sun Jun 07 15:20:36 BST 2020
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.2-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
include ':app'
|
|
@ -5,12 +5,14 @@
|
|||
// ignore: unused_import
|
||||
import 'dart:ui';
|
||||
|
||||
import 'package:audio_session/audio_session_web.dart';
|
||||
import 'package:just_audio/just_audio_web.dart';
|
||||
|
||||
import 'package:flutter_web_plugins/flutter_web_plugins.dart';
|
||||
|
||||
// ignore: public_member_api_docs
|
||||
void registerPlugins(PluginRegistry registry) {
|
||||
AudioSessionWeb.registerWith(registry.registrarFor(AudioSessionWeb));
|
||||
JustAudioPlugin.registerWith(registry.registrarFor(JustAudioPlugin));
|
||||
registry.registerMessageHandler();
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import 'dart:math';
|
||||
|
||||
import 'package:audio_session/audio_session.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter/services.dart';
|
||||
import 'package:just_audio/just_audio.dart';
|
||||
|
@ -53,15 +54,16 @@ class _MyAppState extends State<MyApp> {
|
|||
@override
|
||||
void initState() {
|
||||
super.initState();
|
||||
AudioPlayer.setIosCategory(IosCategory.playback);
|
||||
_player = AudioPlayer();
|
||||
_player = AudioPlayer(handleInterruptions: true);
|
||||
SystemChrome.setSystemUIOverlayStyle(SystemUiOverlayStyle(
|
||||
statusBarColor: Colors.black,
|
||||
));
|
||||
_loadAudio();
|
||||
_init();
|
||||
}
|
||||
|
||||
_loadAudio() async {
|
||||
_init() async {
|
||||
final session = await AudioSession.instance;
|
||||
await session.configure(AudioSessionConfiguration.speech());
|
||||
try {
|
||||
await _player.load(_playlist);
|
||||
} catch (e) {
|
||||
|
|
|
@ -8,6 +8,13 @@ packages:
|
|||
url: "https://pub.dartlang.org"
|
||||
source: hosted
|
||||
version: "2.4.2"
|
||||
audio_session:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
path: "../../audio_session"
|
||||
relative: true
|
||||
source: path
|
||||
version: "0.0.1"
|
||||
boolean_selector:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
@ -106,7 +113,7 @@ packages:
|
|||
path: ".."
|
||||
relative: true
|
||||
source: path
|
||||
version: "0.3.2"
|
||||
version: "0.3.3"
|
||||
matcher:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
@ -261,4 +268,4 @@ packages:
|
|||
version: "0.1.0"
|
||||
sdks:
|
||||
dart: ">=2.9.0-14.0.dev <3.0.0"
|
||||
flutter: ">=1.12.13+hotfix.5 <2.0.0"
|
||||
flutter: ">=1.20.0 <2.0.0"
|
||||
|
|
|
@ -7,6 +7,8 @@ environment:
|
|||
flutter: ">=1.12.8 <2.0.0"
|
||||
|
||||
dependencies:
|
||||
audio_session:
|
||||
path: ../../audio_session
|
||||
flutter:
|
||||
sdk: flutter
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ import 'dart:async';
|
|||
import 'dart:io';
|
||||
import 'dart:math';
|
||||
|
||||
import 'package:audio_session/audio_session.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:flutter/services.dart';
|
||||
import 'package:flutter/widgets.dart';
|
||||
|
@ -38,26 +39,6 @@ class AudioPlayer {
|
|||
return MethodChannel('com.ryanheise.just_audio.methods.$id');
|
||||
}
|
||||
|
||||
/// Configure the audio session category on iOS. This method should be called
|
||||
/// before playing any audio. It has no effect on Android or Flutter for Web.
|
||||
///
|
||||
/// Note that the default category on iOS is [IosCategory.soloAmbient], but
|
||||
/// for a typical media app, Apple recommends setting this to
|
||||
/// [IosCategory.playback]. If you don't call this method, `just_audio` will
|
||||
/// respect any prior category that was already set on your app's audio
|
||||
/// session and will leave it alone. If it hasn't been previously set, this
|
||||
/// will be [IosCategory.soloAmbient]. But if another audio plugin in your
|
||||
/// app has configured a particular category, that will also be left alone.
|
||||
///
|
||||
/// Note: If you use other audio plugins in conjunction with this one, it is
|
||||
/// possible that each of those audio plugins may override the setting you
|
||||
/// choose here. (You may consider asking the developers of the other plugins
|
||||
/// to provide similar configurability so that you have complete control over
|
||||
/// setting the overall category that you want for your app.)
|
||||
static Future<void> setIosCategory(IosCategory category) async {
|
||||
await _mainChannel.invokeMethod('setIosCategory', category.index);
|
||||
}
|
||||
|
||||
final Future<MethodChannel> _channel;
|
||||
final String _id;
|
||||
_ProxyHttpServer _proxy;
|
||||
|
@ -85,11 +66,14 @@ class AudioPlayer {
|
|||
final _androidAudioSessionIdSubject = BehaviorSubject<int>();
|
||||
BehaviorSubject<Duration> _positionSubject;
|
||||
bool _automaticallyWaitsToMinimizeStalling = true;
|
||||
bool _playInterrupted = false;
|
||||
|
||||
/// Creates an [AudioPlayer].
|
||||
factory AudioPlayer() => AudioPlayer._internal(_uuid.v4());
|
||||
factory AudioPlayer({bool handleInterruptions = false}) =>
|
||||
AudioPlayer._internal(_uuid.v4(), handleInterruptions);
|
||||
|
||||
AudioPlayer._internal(this._id) : _channel = _init(_id) {
|
||||
AudioPlayer._internal(this._id, bool handleInterruptions)
|
||||
: _channel = _init(_id) {
|
||||
_playbackEvent = PlaybackEvent(
|
||||
processingState: ProcessingState.none,
|
||||
updatePosition: Duration.zero,
|
||||
|
@ -126,6 +110,43 @@ class AudioPlayer {
|
|||
androidAudioSessionId: data['androidAudioSessionId'],
|
||||
);
|
||||
//print("created event object with state: ${_playbackEvent.state}");
|
||||
if (handleInterruptions) {
|
||||
AudioSession.instance.then((session) {
|
||||
session.interruptionEventStream.listen((event) {
|
||||
switch (event) {
|
||||
case AudioInterruptionEvent.pauseIndefinitely:
|
||||
case AudioInterruptionEvent.pauseTemporarily:
|
||||
if (playing) {
|
||||
pause();
|
||||
// Although pause is asyncand sets _playInterrupted = false,
|
||||
// this is done in the sync portion.
|
||||
_playInterrupted = true;
|
||||
}
|
||||
break;
|
||||
case AudioInterruptionEvent.duck:
|
||||
if (session.androidAudioAttributes.usage ==
|
||||
AndroidAudioUsage.game) {
|
||||
setVolume(volume / 2);
|
||||
}
|
||||
_playInterrupted = false;
|
||||
break;
|
||||
case AudioInterruptionEvent.end:
|
||||
_playInterrupted = false;
|
||||
break;
|
||||
case AudioInterruptionEvent.resume:
|
||||
if (_playInterrupted) play();
|
||||
_playInterrupted = false;
|
||||
break;
|
||||
case AudioInterruptionEvent.unduck:
|
||||
setVolume(min(1.0, volume * 2));
|
||||
_playInterrupted = false;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
return _playbackEvent;
|
||||
} catch (e, stacktrace) {
|
||||
print("Error parsing event: $e");
|
||||
|
@ -502,7 +523,9 @@ class AudioPlayer {
|
|||
/// [processingStateStream].
|
||||
Future<void> play() async {
|
||||
if (playing) return;
|
||||
_playInterrupted = false;
|
||||
_playingSubject.add(true);
|
||||
await AudioSession.ensurePrepared(ensureActive: true);
|
||||
await _invokeMethod('play');
|
||||
}
|
||||
|
||||
|
@ -510,6 +533,7 @@ class AudioPlayer {
|
|||
/// ![playing].
|
||||
Future<void> pause() async {
|
||||
if (!playing) return;
|
||||
_playInterrupted = false;
|
||||
// Update local state immediately so that queries aren't surprised.
|
||||
_playbackEvent = _playbackEvent.copyWith(
|
||||
updatePosition: position,
|
||||
|
@ -871,67 +895,6 @@ class SequenceState {
|
|||
IndexedAudioSource get currentSource => sequence[currentIndex];
|
||||
}
|
||||
|
||||
/// The audio session categories on iOS, to be used with
|
||||
/// [AudioPlayer.setIosCategory].
|
||||
enum IosCategory {
|
||||
ambient,
|
||||
soloAmbient,
|
||||
playback,
|
||||
record,
|
||||
playAndRecord,
|
||||
multiRoute,
|
||||
}
|
||||
|
||||
/// The Android AudioAttributes to use with a player.
|
||||
class AndroidAudioAttributes {
|
||||
static const FLAG_AUDIBILITY_ENFORCED = 0x1 << 0;
|
||||
final AndroidAudioContentType contentType;
|
||||
final int flags;
|
||||
final AndroidAudioUsage usage;
|
||||
final AndroidAudioAllowedCapturePolicy allowedCapturePolicy;
|
||||
|
||||
AndroidAudioAttributes({
|
||||
this.contentType = AndroidAudioContentType.unknown,
|
||||
this.flags = 0,
|
||||
this.usage = AndroidAudioUsage.unknown,
|
||||
this.allowedCapturePolicy = AndroidAudioAllowedCapturePolicy.all,
|
||||
});
|
||||
|
||||
Map toJson() => {
|
||||
'contentType': contentType.index,
|
||||
'flags': flags,
|
||||
'usage': usage.index,
|
||||
// The Android constant values for this enum are 1-indexed
|
||||
'allowedCapturePolicy': allowedCapturePolicy.index + 1,
|
||||
};
|
||||
}
|
||||
|
||||
/// The content type options for [AndroidAudioAttributes].
|
||||
enum AndroidAudioContentType { unknown, speech, music, movie, sonification }
|
||||
|
||||
/// The usage options for [AndroidAudioAttributes].
|
||||
enum AndroidAudioUsage {
|
||||
unknown,
|
||||
media,
|
||||
voiceCommunication,
|
||||
voiceCommunicationSignalling,
|
||||
alarm,
|
||||
notification,
|
||||
notificationRingtone,
|
||||
notificationCommunicationRequest,
|
||||
notificationCommunicationInstant,
|
||||
notificationCommunicationDelayed,
|
||||
notificationEvent,
|
||||
assistanceAccessibility,
|
||||
assistanceNavigationGuidance,
|
||||
assistanceSonification,
|
||||
unused_1,
|
||||
assistant,
|
||||
}
|
||||
|
||||
/// The allowed capture policy options for [AndroidAudioAttributes].
|
||||
enum AndroidAudioAllowedCapturePolicy { all, system, none }
|
||||
|
||||
/// A local proxy HTTP server for making remote GET requests with headers.
|
||||
///
|
||||
/// TODO: Recursively attach headers to items in playlists like m3u8.
|
||||
|
|
|
@ -8,6 +8,13 @@ packages:
|
|||
url: "https://pub.dartlang.org"
|
||||
source: hosted
|
||||
version: "2.4.2"
|
||||
audio_session:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
path: "../audio_session"
|
||||
relative: true
|
||||
source: path
|
||||
version: "0.0.1"
|
||||
boolean_selector:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
@ -247,4 +254,4 @@ packages:
|
|||
version: "0.1.0"
|
||||
sdks:
|
||||
dart: ">=2.9.0-14.0.dev <3.0.0"
|
||||
flutter: ">=1.12.13+hotfix.5 <2.0.0"
|
||||
flutter: ">=1.20.0 <2.0.0"
|
||||
|
|
|
@ -8,6 +8,8 @@ environment:
|
|||
flutter: ">=1.12.8 <2.0.0"
|
||||
|
||||
dependencies:
|
||||
audio_session:
|
||||
path: ../audio_session
|
||||
rxdart: ^0.24.1
|
||||
path: ^1.6.4
|
||||
path_provider: ^1.6.10
|
||||
|
|
Loading…
Reference in New Issue