Use audio_session.
This commit is contained in:
parent
e60a672722
commit
b8ae308eec
|
@ -8,7 +8,7 @@ buildscript {
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
classpath 'com.android.tools.build:gradle:3.6.3'
|
classpath 'com.android.tools.build:gradle:3.5.0'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,7 @@ android {
|
||||||
defaultConfig {
|
defaultConfig {
|
||||||
// TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
|
// TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
|
||||||
applicationId "com.ryanheise.just_audio_example"
|
applicationId "com.ryanheise.just_audio_example"
|
||||||
minSdkVersion 16
|
minSdkVersion 19
|
||||||
targetSdkVersion 28
|
targetSdkVersion 28
|
||||||
versionCode flutterVersionCode.toInteger()
|
versionCode flutterVersionCode.toInteger()
|
||||||
versionName flutterVersionName
|
versionName flutterVersionName
|
||||||
|
|
|
@ -5,7 +5,7 @@ buildscript {
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
classpath 'com.android.tools.build:gradle:3.6.3'
|
classpath 'com.android.tools.build:gradle:3.5.0'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
#Sun Jun 07 15:20:36 BST 2020
|
|
||||||
distributionBase=GRADLE_USER_HOME
|
distributionBase=GRADLE_USER_HOME
|
||||||
distributionPath=wrapper/dists
|
distributionPath=wrapper/dists
|
||||||
|
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.2-bin.zip
|
||||||
zipStoreBase=GRADLE_USER_HOME
|
zipStoreBase=GRADLE_USER_HOME
|
||||||
zipStorePath=wrapper/dists
|
zipStorePath=wrapper/dists
|
||||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip
|
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
include ':app'
|
|
@ -5,12 +5,14 @@
|
||||||
// ignore: unused_import
|
// ignore: unused_import
|
||||||
import 'dart:ui';
|
import 'dart:ui';
|
||||||
|
|
||||||
|
import 'package:audio_session/audio_session_web.dart';
|
||||||
import 'package:just_audio/just_audio_web.dart';
|
import 'package:just_audio/just_audio_web.dart';
|
||||||
|
|
||||||
import 'package:flutter_web_plugins/flutter_web_plugins.dart';
|
import 'package:flutter_web_plugins/flutter_web_plugins.dart';
|
||||||
|
|
||||||
// ignore: public_member_api_docs
|
// ignore: public_member_api_docs
|
||||||
void registerPlugins(PluginRegistry registry) {
|
void registerPlugins(PluginRegistry registry) {
|
||||||
|
AudioSessionWeb.registerWith(registry.registrarFor(AudioSessionWeb));
|
||||||
JustAudioPlugin.registerWith(registry.registrarFor(JustAudioPlugin));
|
JustAudioPlugin.registerWith(registry.registrarFor(JustAudioPlugin));
|
||||||
registry.registerMessageHandler();
|
registry.registerMessageHandler();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import 'dart:math';
|
import 'dart:math';
|
||||||
|
|
||||||
|
import 'package:audio_session/audio_session.dart';
|
||||||
import 'package:flutter/material.dart';
|
import 'package:flutter/material.dart';
|
||||||
import 'package:flutter/services.dart';
|
import 'package:flutter/services.dart';
|
||||||
import 'package:just_audio/just_audio.dart';
|
import 'package:just_audio/just_audio.dart';
|
||||||
|
@ -53,15 +54,16 @@ class _MyAppState extends State<MyApp> {
|
||||||
@override
|
@override
|
||||||
void initState() {
|
void initState() {
|
||||||
super.initState();
|
super.initState();
|
||||||
AudioPlayer.setIosCategory(IosCategory.playback);
|
_player = AudioPlayer(handleInterruptions: true);
|
||||||
_player = AudioPlayer();
|
|
||||||
SystemChrome.setSystemUIOverlayStyle(SystemUiOverlayStyle(
|
SystemChrome.setSystemUIOverlayStyle(SystemUiOverlayStyle(
|
||||||
statusBarColor: Colors.black,
|
statusBarColor: Colors.black,
|
||||||
));
|
));
|
||||||
_loadAudio();
|
_init();
|
||||||
}
|
}
|
||||||
|
|
||||||
_loadAudio() async {
|
_init() async {
|
||||||
|
final session = await AudioSession.instance;
|
||||||
|
await session.configure(AudioSessionConfiguration.speech());
|
||||||
try {
|
try {
|
||||||
await _player.load(_playlist);
|
await _player.load(_playlist);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
|
|
@ -8,6 +8,13 @@ packages:
|
||||||
url: "https://pub.dartlang.org"
|
url: "https://pub.dartlang.org"
|
||||||
source: hosted
|
source: hosted
|
||||||
version: "2.4.2"
|
version: "2.4.2"
|
||||||
|
audio_session:
|
||||||
|
dependency: "direct main"
|
||||||
|
description:
|
||||||
|
path: "../../audio_session"
|
||||||
|
relative: true
|
||||||
|
source: path
|
||||||
|
version: "0.0.1"
|
||||||
boolean_selector:
|
boolean_selector:
|
||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
|
@ -106,7 +113,7 @@ packages:
|
||||||
path: ".."
|
path: ".."
|
||||||
relative: true
|
relative: true
|
||||||
source: path
|
source: path
|
||||||
version: "0.3.2"
|
version: "0.3.3"
|
||||||
matcher:
|
matcher:
|
||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
|
@ -261,4 +268,4 @@ packages:
|
||||||
version: "0.1.0"
|
version: "0.1.0"
|
||||||
sdks:
|
sdks:
|
||||||
dart: ">=2.9.0-14.0.dev <3.0.0"
|
dart: ">=2.9.0-14.0.dev <3.0.0"
|
||||||
flutter: ">=1.12.13+hotfix.5 <2.0.0"
|
flutter: ">=1.20.0 <2.0.0"
|
||||||
|
|
|
@ -7,6 +7,8 @@ environment:
|
||||||
flutter: ">=1.12.8 <2.0.0"
|
flutter: ">=1.12.8 <2.0.0"
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
|
audio_session:
|
||||||
|
path: ../../audio_session
|
||||||
flutter:
|
flutter:
|
||||||
sdk: flutter
|
sdk: flutter
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@ import 'dart:async';
|
||||||
import 'dart:io';
|
import 'dart:io';
|
||||||
import 'dart:math';
|
import 'dart:math';
|
||||||
|
|
||||||
|
import 'package:audio_session/audio_session.dart';
|
||||||
import 'package:flutter/foundation.dart';
|
import 'package:flutter/foundation.dart';
|
||||||
import 'package:flutter/services.dart';
|
import 'package:flutter/services.dart';
|
||||||
import 'package:flutter/widgets.dart';
|
import 'package:flutter/widgets.dart';
|
||||||
|
@ -38,26 +39,6 @@ class AudioPlayer {
|
||||||
return MethodChannel('com.ryanheise.just_audio.methods.$id');
|
return MethodChannel('com.ryanheise.just_audio.methods.$id');
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Configure the audio session category on iOS. This method should be called
|
|
||||||
/// before playing any audio. It has no effect on Android or Flutter for Web.
|
|
||||||
///
|
|
||||||
/// Note that the default category on iOS is [IosCategory.soloAmbient], but
|
|
||||||
/// for a typical media app, Apple recommends setting this to
|
|
||||||
/// [IosCategory.playback]. If you don't call this method, `just_audio` will
|
|
||||||
/// respect any prior category that was already set on your app's audio
|
|
||||||
/// session and will leave it alone. If it hasn't been previously set, this
|
|
||||||
/// will be [IosCategory.soloAmbient]. But if another audio plugin in your
|
|
||||||
/// app has configured a particular category, that will also be left alone.
|
|
||||||
///
|
|
||||||
/// Note: If you use other audio plugins in conjunction with this one, it is
|
|
||||||
/// possible that each of those audio plugins may override the setting you
|
|
||||||
/// choose here. (You may consider asking the developers of the other plugins
|
|
||||||
/// to provide similar configurability so that you have complete control over
|
|
||||||
/// setting the overall category that you want for your app.)
|
|
||||||
static Future<void> setIosCategory(IosCategory category) async {
|
|
||||||
await _mainChannel.invokeMethod('setIosCategory', category.index);
|
|
||||||
}
|
|
||||||
|
|
||||||
final Future<MethodChannel> _channel;
|
final Future<MethodChannel> _channel;
|
||||||
final String _id;
|
final String _id;
|
||||||
_ProxyHttpServer _proxy;
|
_ProxyHttpServer _proxy;
|
||||||
|
@ -85,11 +66,14 @@ class AudioPlayer {
|
||||||
final _androidAudioSessionIdSubject = BehaviorSubject<int>();
|
final _androidAudioSessionIdSubject = BehaviorSubject<int>();
|
||||||
BehaviorSubject<Duration> _positionSubject;
|
BehaviorSubject<Duration> _positionSubject;
|
||||||
bool _automaticallyWaitsToMinimizeStalling = true;
|
bool _automaticallyWaitsToMinimizeStalling = true;
|
||||||
|
bool _playInterrupted = false;
|
||||||
|
|
||||||
/// Creates an [AudioPlayer].
|
/// Creates an [AudioPlayer].
|
||||||
factory AudioPlayer() => AudioPlayer._internal(_uuid.v4());
|
factory AudioPlayer({bool handleInterruptions = false}) =>
|
||||||
|
AudioPlayer._internal(_uuid.v4(), handleInterruptions);
|
||||||
|
|
||||||
AudioPlayer._internal(this._id) : _channel = _init(_id) {
|
AudioPlayer._internal(this._id, bool handleInterruptions)
|
||||||
|
: _channel = _init(_id) {
|
||||||
_playbackEvent = PlaybackEvent(
|
_playbackEvent = PlaybackEvent(
|
||||||
processingState: ProcessingState.none,
|
processingState: ProcessingState.none,
|
||||||
updatePosition: Duration.zero,
|
updatePosition: Duration.zero,
|
||||||
|
@ -126,6 +110,43 @@ class AudioPlayer {
|
||||||
androidAudioSessionId: data['androidAudioSessionId'],
|
androidAudioSessionId: data['androidAudioSessionId'],
|
||||||
);
|
);
|
||||||
//print("created event object with state: ${_playbackEvent.state}");
|
//print("created event object with state: ${_playbackEvent.state}");
|
||||||
|
if (handleInterruptions) {
|
||||||
|
AudioSession.instance.then((session) {
|
||||||
|
session.interruptionEventStream.listen((event) {
|
||||||
|
switch (event) {
|
||||||
|
case AudioInterruptionEvent.pauseIndefinitely:
|
||||||
|
case AudioInterruptionEvent.pauseTemporarily:
|
||||||
|
if (playing) {
|
||||||
|
pause();
|
||||||
|
// Although pause is asyncand sets _playInterrupted = false,
|
||||||
|
// this is done in the sync portion.
|
||||||
|
_playInterrupted = true;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case AudioInterruptionEvent.duck:
|
||||||
|
if (session.androidAudioAttributes.usage ==
|
||||||
|
AndroidAudioUsage.game) {
|
||||||
|
setVolume(volume / 2);
|
||||||
|
}
|
||||||
|
_playInterrupted = false;
|
||||||
|
break;
|
||||||
|
case AudioInterruptionEvent.end:
|
||||||
|
_playInterrupted = false;
|
||||||
|
break;
|
||||||
|
case AudioInterruptionEvent.resume:
|
||||||
|
if (_playInterrupted) play();
|
||||||
|
_playInterrupted = false;
|
||||||
|
break;
|
||||||
|
case AudioInterruptionEvent.unduck:
|
||||||
|
setVolume(min(1.0, volume * 2));
|
||||||
|
_playInterrupted = false;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
return _playbackEvent;
|
return _playbackEvent;
|
||||||
} catch (e, stacktrace) {
|
} catch (e, stacktrace) {
|
||||||
print("Error parsing event: $e");
|
print("Error parsing event: $e");
|
||||||
|
@ -502,7 +523,9 @@ class AudioPlayer {
|
||||||
/// [processingStateStream].
|
/// [processingStateStream].
|
||||||
Future<void> play() async {
|
Future<void> play() async {
|
||||||
if (playing) return;
|
if (playing) return;
|
||||||
|
_playInterrupted = false;
|
||||||
_playingSubject.add(true);
|
_playingSubject.add(true);
|
||||||
|
await AudioSession.ensurePrepared(ensureActive: true);
|
||||||
await _invokeMethod('play');
|
await _invokeMethod('play');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -510,6 +533,7 @@ class AudioPlayer {
|
||||||
/// ![playing].
|
/// ![playing].
|
||||||
Future<void> pause() async {
|
Future<void> pause() async {
|
||||||
if (!playing) return;
|
if (!playing) return;
|
||||||
|
_playInterrupted = false;
|
||||||
// Update local state immediately so that queries aren't surprised.
|
// Update local state immediately so that queries aren't surprised.
|
||||||
_playbackEvent = _playbackEvent.copyWith(
|
_playbackEvent = _playbackEvent.copyWith(
|
||||||
updatePosition: position,
|
updatePosition: position,
|
||||||
|
@ -871,67 +895,6 @@ class SequenceState {
|
||||||
IndexedAudioSource get currentSource => sequence[currentIndex];
|
IndexedAudioSource get currentSource => sequence[currentIndex];
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The audio session categories on iOS, to be used with
|
|
||||||
/// [AudioPlayer.setIosCategory].
|
|
||||||
enum IosCategory {
|
|
||||||
ambient,
|
|
||||||
soloAmbient,
|
|
||||||
playback,
|
|
||||||
record,
|
|
||||||
playAndRecord,
|
|
||||||
multiRoute,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The Android AudioAttributes to use with a player.
|
|
||||||
class AndroidAudioAttributes {
|
|
||||||
static const FLAG_AUDIBILITY_ENFORCED = 0x1 << 0;
|
|
||||||
final AndroidAudioContentType contentType;
|
|
||||||
final int flags;
|
|
||||||
final AndroidAudioUsage usage;
|
|
||||||
final AndroidAudioAllowedCapturePolicy allowedCapturePolicy;
|
|
||||||
|
|
||||||
AndroidAudioAttributes({
|
|
||||||
this.contentType = AndroidAudioContentType.unknown,
|
|
||||||
this.flags = 0,
|
|
||||||
this.usage = AndroidAudioUsage.unknown,
|
|
||||||
this.allowedCapturePolicy = AndroidAudioAllowedCapturePolicy.all,
|
|
||||||
});
|
|
||||||
|
|
||||||
Map toJson() => {
|
|
||||||
'contentType': contentType.index,
|
|
||||||
'flags': flags,
|
|
||||||
'usage': usage.index,
|
|
||||||
// The Android constant values for this enum are 1-indexed
|
|
||||||
'allowedCapturePolicy': allowedCapturePolicy.index + 1,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The content type options for [AndroidAudioAttributes].
|
|
||||||
enum AndroidAudioContentType { unknown, speech, music, movie, sonification }
|
|
||||||
|
|
||||||
/// The usage options for [AndroidAudioAttributes].
|
|
||||||
enum AndroidAudioUsage {
|
|
||||||
unknown,
|
|
||||||
media,
|
|
||||||
voiceCommunication,
|
|
||||||
voiceCommunicationSignalling,
|
|
||||||
alarm,
|
|
||||||
notification,
|
|
||||||
notificationRingtone,
|
|
||||||
notificationCommunicationRequest,
|
|
||||||
notificationCommunicationInstant,
|
|
||||||
notificationCommunicationDelayed,
|
|
||||||
notificationEvent,
|
|
||||||
assistanceAccessibility,
|
|
||||||
assistanceNavigationGuidance,
|
|
||||||
assistanceSonification,
|
|
||||||
unused_1,
|
|
||||||
assistant,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The allowed capture policy options for [AndroidAudioAttributes].
|
|
||||||
enum AndroidAudioAllowedCapturePolicy { all, system, none }
|
|
||||||
|
|
||||||
/// A local proxy HTTP server for making remote GET requests with headers.
|
/// A local proxy HTTP server for making remote GET requests with headers.
|
||||||
///
|
///
|
||||||
/// TODO: Recursively attach headers to items in playlists like m3u8.
|
/// TODO: Recursively attach headers to items in playlists like m3u8.
|
||||||
|
|
|
@ -8,6 +8,13 @@ packages:
|
||||||
url: "https://pub.dartlang.org"
|
url: "https://pub.dartlang.org"
|
||||||
source: hosted
|
source: hosted
|
||||||
version: "2.4.2"
|
version: "2.4.2"
|
||||||
|
audio_session:
|
||||||
|
dependency: "direct main"
|
||||||
|
description:
|
||||||
|
path: "../audio_session"
|
||||||
|
relative: true
|
||||||
|
source: path
|
||||||
|
version: "0.0.1"
|
||||||
boolean_selector:
|
boolean_selector:
|
||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
|
@ -247,4 +254,4 @@ packages:
|
||||||
version: "0.1.0"
|
version: "0.1.0"
|
||||||
sdks:
|
sdks:
|
||||||
dart: ">=2.9.0-14.0.dev <3.0.0"
|
dart: ">=2.9.0-14.0.dev <3.0.0"
|
||||||
flutter: ">=1.12.13+hotfix.5 <2.0.0"
|
flutter: ">=1.20.0 <2.0.0"
|
||||||
|
|
|
@ -8,6 +8,8 @@ environment:
|
||||||
flutter: ">=1.12.8 <2.0.0"
|
flutter: ">=1.12.8 <2.0.0"
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
|
audio_session:
|
||||||
|
path: ../audio_session
|
||||||
rxdart: ^0.24.1
|
rxdart: ^0.24.1
|
||||||
path: ^1.6.4
|
path: ^1.6.4
|
||||||
path_provider: ^1.6.10
|
path_provider: ^1.6.10
|
||||||
|
|
Loading…
Reference in New Issue