diff --git a/.gitignore b/.gitignore index 7f66c9d..e78915c 100644 --- a/.gitignore +++ b/.gitignore @@ -2,8 +2,6 @@ freezerkey.jsk android/key.properties -just_audio/ - # Miscellaneous *.class *.log diff --git a/README.md b/README.md index a607241..7cb3b4e 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ https://t.me/freezerandroid Tobs: Beta tester Bas Curtiz: Icon, Logo, Banner, Design suggestions Deemix: https://notabug.org/RemixDev/deemix -just_audio: https://github.com/ryanheise/just_audio +just_audio && audio_service: https://github.com/ryanheise/just_audio ## Support me @@ -35,11 +35,12 @@ BTC: `14hcr4PGbgqeXd3SoXY9QyJFNpyurgrL9y` ETH: `0xb4D1893195404E1F4b45e5BDA77F202Ac4012288` ## just_audio -This app depends on modified just_audio plugin with Deezer support. Repo: https://notabug.org/exttex/just_audio +This app depends on modified just_audio plugin with Deezer support. +The fork repo is deprecated, current version available in this repo. ## Disclaimer ``` Freezer was not developed for piracy, but educational and private usage. It may be illegal to use this in your country! I am not responsible in any way for the usage of this app. -``` +``` \ No newline at end of file diff --git a/just_audio/.github/FUNDING.yml b/just_audio/.github/FUNDING.yml new file mode 100644 index 0000000..738822d --- /dev/null +++ b/just_audio/.github/FUNDING.yml @@ -0,0 +1 @@ +github: ryanheise diff --git a/just_audio/.github/ISSUE_TEMPLATE/bug-report.md b/just_audio/.github/ISSUE_TEMPLATE/bug-report.md new file mode 100644 index 0000000..bbd2a31 --- /dev/null +++ b/just_audio/.github/ISSUE_TEMPLATE/bug-report.md @@ -0,0 +1,53 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: 1 backlog, bug +assignees: ryanheise + +--- + + +**Which API doesn't behave as documented, and how does it misbehave?** +Name here the specific methods or fields that are not behaving as documented, and explain clearly what is happening. + +**Minimal reproduction project** +Provide a link here using one of two options: +1. Fork this repository and modify the example to reproduce the bug, then provide a link here. +2. If the unmodified official example already reproduces the bug, just write "The example". + +**To Reproduce (i.e. user steps, not code)** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Error messages** + +``` +If applicable, copy & paste error message here, within the triple quotes to preserve formatting. +``` + +**Expected behavior** +A clear and concise description of what you expected to happen. + + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS: [e.g. MacOS + version] + - Browser [e.g. chrome, safari + version] + +**Smartphone (please complete the following information):** + - Device: [e.g. iPhone6] + - OS: [e.g. iOS8.1] + +**Flutter SDK version** +``` +insert output of "flutter doctor" here +``` + +**Additional context** +Add any other context about the problem here. diff --git a/just_audio/.github/ISSUE_TEMPLATE/config.yml b/just_audio/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..dc89ca6 --- /dev/null +++ b/just_audio/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,8 @@ +blank_issues_enabled: false +contact_links: + - name: Community Support + url: https://stackoverflow.com/search?q=just_audio + about: Ask for help on Stack Overflow. + - name: New to Flutter? + url: https://gitter.im/flutter/flutter + about: Chat with other Flutter developers on Gitter. diff --git a/just_audio/.github/ISSUE_TEMPLATE/documentation-request.md b/just_audio/.github/ISSUE_TEMPLATE/documentation-request.md new file mode 100644 index 0000000..1d61cd1 --- /dev/null +++ b/just_audio/.github/ISSUE_TEMPLATE/documentation-request.md @@ -0,0 +1,39 @@ +--- +name: Documentation request +about: Suggest an improvement to the documentation +title: '' +labels: 1 backlog, documentation +assignees: ryanheise + +--- + + + +**To which pages does your suggestion apply?** + +- Direct URL 1 +- Direct URL 2 +- ... + +**Quote the sentences(s) from the documentation to be improved (if any)** + +> Insert here. (Skip if you are proposing an entirely new section.) + +**Describe your suggestion** + +... diff --git a/just_audio/.github/ISSUE_TEMPLATE/feature_request.md b/just_audio/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..1444d42 --- /dev/null +++ b/just_audio/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,37 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: 1 backlog, enhancement +assignees: ryanheise + +--- + + + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/just_audio/.gitignore b/just_audio/.gitignore new file mode 100644 index 0000000..07488ba --- /dev/null +++ b/just_audio/.gitignore @@ -0,0 +1,70 @@ +# Miscellaneous +*.class +*.log +*.pyc +*.swp +.DS_Store +.atom/ +.buildlog/ +.history +.svn/ + +# IntelliJ related +*.iml +*.ipr +*.iws +.idea/ + +# Visual Studio Code related +.vscode/ + +# Flutter/Dart/Pub related +**/doc/api/ +.dart_tool/ +.flutter-plugins +.packages +.pub-cache/ +.pub/ +/build/ + +# Android related +**/android/**/gradle-wrapper.jar +**/android/.gradle +**/android/captures/ +**/android/gradlew +**/android/gradlew.bat +**/android/local.properties +**/android/**/GeneratedPluginRegistrant.java + +# iOS/XCode related +**/ios/**/*.mode1v3 +**/ios/**/*.mode2v3 +**/ios/**/*.moved-aside +**/ios/**/*.pbxuser +**/ios/**/*.perspectivev3 +**/ios/**/*sync/ +**/ios/**/.sconsign.dblite +**/ios/**/.tags* +**/ios/**/.vagrant/ +**/ios/**/DerivedData/ +**/ios/**/Icon? +**/ios/**/Pods/ +**/ios/**/.symlinks/ +**/ios/**/profile +**/ios/**/xcuserdata +**/ios/.generated/ +**/ios/Flutter/App.framework +**/ios/Flutter/Flutter.framework +**/ios/Flutter/Generated.xcconfig +**/ios/Flutter/app.flx +**/ios/Flutter/app.zip +**/ios/Flutter/flutter_assets/ +**/ios/ServiceDefinitions.json +**/ios/Runner/GeneratedPluginRegistrant.* + +# Exceptions to above rules. +!**/ios/**/default.mode1v3 +!**/ios/**/default.mode2v3 +!**/ios/**/default.pbxuser +!**/ios/**/default.perspectivev3 +!/packages/flutter_tools/test/data/dart_dependencies_test/**/.packages diff --git a/just_audio/.metadata b/just_audio/.metadata new file mode 100644 index 0000000..8536f75 --- /dev/null +++ b/just_audio/.metadata @@ -0,0 +1,10 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: 68587a0916366e9512a78df22c44163d041dd5f3 + channel: stable + +project_type: plugin diff --git a/just_audio/CHANGELOG.md b/just_audio/CHANGELOG.md new file mode 100644 index 0000000..fc87c15 --- /dev/null +++ b/just_audio/CHANGELOG.md @@ -0,0 +1,114 @@ +## 0.3.1 + +* Prevent hang in dispose + +## 0.3.0 + +* Playlists +* Looping +* Shuffling +* Composing +* Clipping support added for iOS/macOS +* New player state model consisting of: + * playing: true/false + * processingState: none/loading/buffering/ready/completed +* Feature complete on iOS and macOS (except for DASH) +* Improved example +* Exception classes + +## 0.2.2 + +* Fix dependencies for stable channel. + +## 0.2.1 + +* Improve handling of headers. +* Report setUrl errors and duration on web. + +## 0.2.0 + +* Support dynamic duration +* Support seeking to end of live streams +* Support request headers +* V2 implementation +* Report setUrl errors on iOS +* setUrl throws exception if interrupted +* Return null when duration is unknown + +## 0.1.10 + +* Option to set audio session category on iOS. + +## 0.1.9 + +* Bug fixes. + +## 0.1.8 + +* Reduce distortion at slow speeds on iOS + +## 0.1.7 + +* Minor bug fixes. + +## 0.1.6 + +* Eliminate event lag over method channels. +* Report setUrl errors on Android. +* Report Icy Metadata on Android. +* Bug fixes. + +## 0.1.5 + +* Update dependencies and documentation. + +## 0.1.4 + +* Add MacOS implementation. +* Support cross-platform redirects on Android. +* Bug fixes. + +## 0.1.3 + +* Fix bug in web implementation. + +## 0.1.2 + +* Broadcast how much audio has been buffered. + +## 0.1.1 + +* Web implementation. +* iOS option to minimize stalling. +* Fix setAsset on iOS. + +## 0.1.0 + +* Separate buffering state from PlaybackState. +* More permissive state transitions. +* Support playing local files on iOS. + +## 0.0.6 + +* Bug fixes. + +## 0.0.5 + +* API change for audio clipping. +* Performance improvements and bug fixes on Android. + +## 0.0.4 + +* Remove reseeking hack. + +## 0.0.3 + +* Feature to change audio speed. + +## 0.0.2 + +* iOS implementation for testing (may not work). + +## 0.0.1 + +* Initial release with Android implementation. diff --git a/just_audio/LICENSE b/just_audio/LICENSE new file mode 100644 index 0000000..27a8b32 --- /dev/null +++ b/just_audio/LICENSE @@ -0,0 +1,229 @@ +MIT License + +Copyright (c) 2019-2020 Ryan Heise. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +============================================================================== + +This software includes the ExoPlayer library which is licensed under the Apache +License, Version 2.0. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/just_audio/README.md b/just_audio/README.md new file mode 100644 index 0000000..60fb32f --- /dev/null +++ b/just_audio/README.md @@ -0,0 +1,222 @@ +# just_audio + +This Flutter plugin plays audio from URLs, files, assets, DASH/HLS streams and playlists. Furthermore, it can clip, concatenate, loop, shuffle and compose audio into complex arrangements with gapless playback. This plugin can be used with [audio_service](https://pub.dev/packages/audio_service) to play audio in the background and control playback from the lock screen, Android notifications, the iOS Control Center, and headset buttons. + +## Features + +| Feature | Android | iOS | MacOS | Web | +| ------- | :-------: | :-----: | :-----: | :-----: | +| read from URL | ✅ | ✅ | ✅ | ✅ | +| read from file | ✅ | ✅ | ✅ | | +| read from asset | ✅ | ✅ | ✅ | | +| request headers | ✅ | ✅ | ✅ | | +| DASH | ✅ | | | | +| HLS | ✅ | ✅ | ✅ | | +| buffer status/position | ✅ | ✅ | ✅ | ✅ | +| play/pause/seek | ✅ | ✅ | ✅ | ✅ | +| set volume | ✅ | ✅ | ✅ | ✅ | +| set speed | ✅ | ✅ | ✅ | ✅ | +| clip audio | ✅ | ✅ | ✅ | ✅ | +| playlists | ✅ | ✅ | ✅ | ✅ | +| looping | ✅ | ✅ | ✅ | ✅ | +| shuffle | ✅ | ✅ | ✅ | ✅ | +| compose audio | ✅ | ✅ | ✅ | ✅ | +| gapless playback | ✅ | ✅ | ✅ | | +| report player errors | ✅ | ✅ | ✅ | ✅ | + +Please consider reporting any bugs you encounter [here](https://github.com/ryanheise/just_audio/issues) or submitting pull requests [here](https://github.com/ryanheise/just_audio/pulls). + +## Example + +![just_audio](https://user-images.githubusercontent.com/19899190/89558581-bf369080-d857-11ea-9376-3a5055284bab.png) + +Initialisation: + +```dart +final player = AudioPlayer(); +var duration = await player.setUrl('https://foo.com/bar.mp3'); +``` + +Standard controls: + +```dart +player.play(); // Usually you don't want to wait for playback to finish. +await player.seek(Duration(seconds: 10)); +await player.pause(); +``` + +Clipping audio: + +```dart +await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20)); +await player.play(); // Waits until the clip has finished playing +``` +Adjusting audio: + +```dart +await player.setSpeed(2.0); // Double speed +await player.setVolume(0.5); // Halve volume +``` + +Gapless playlists: + +```dart +await player.load( + ConcatenatingAudioSource( + children: [ + AudioSource.uri(Uri.parse("https://example.com/track1.mp3")), + AudioSource.uri(Uri.parse("https://example.com/track2.mp3")), + AudioSource.uri(Uri.parse("https://example.com/track3.mp3")), + ], + ), +); +player.seekToNext(); +player.seekToPrevious(); +// Jump to the beginning of track3.mp3. +player.seek(Duration(milliseconds: 0), index: 2); +``` + +Looping and shuffling: + +```dart +player.setLoopMode(LoopMode.off); // no looping (default) +player.setLoopMode(LoopMode.all); // loop playlist +player.setLoopMode(LoopMode.one); // loop current item +player.setShuffleModeEnabled(true); // shuffle except for current item +``` + +Composing audio sources: + +```dart +player.load( + // Loop child 4 times + LoopingAudioSource( + count: 4, + // Play children one after the other + child: ConcatenatingAudioSource( + children: [ + // Play a regular media file + ProgressiveAudioSource(Uri.parse("https://example.com/foo.mp3")), + // Play a DASH stream + DashAudioSource(Uri.parse("https://example.com/audio.mdp")), + // Play an HLS stream + HlsAudioSource(Uri.parse("https://example.com/audio.m3u8")), + // Play a segment of the child + ClippingAudioSource( + child: ProgressiveAudioSource(Uri.parse("https://w.xyz/p.mp3")), + start: Duration(seconds: 25), + end: Duration(seconds: 30), + ), + ], + ), + ), +); +``` + +Releasing resources: + +```dart +await player.dispose(); +``` + +Catching player errors: + +```dart +try { + await player.setUrl("https://s3.amazonaws.com/404-file.mp3"); +} catch (e) { + print("Error: $e"); +} +``` + +Listening to state changes: + +```dart +player.playerStateStream.listen((state) { + if (state.playing) ... else ... + switch (state.processingState) { + case AudioPlaybackState.none: ... + case AudioPlaybackState.loading: ... + case AudioPlaybackState.buffering: ... + case AudioPlaybackState.ready: ... + case AudioPlaybackState.completed: ... + } +}); + +// See also: +// - durationStream +// - positionStream +// - bufferedPositionStream +// - currentIndexStream +// - icyMetadataStream +// - playingStream +// - processingStateStream +// - loopModeStream +// - shuffleModeEnabledStream +// - volumeStream +// - speedStream +// - playbackEventStream +``` + +## Platform specific configuration + +### Android + +If you wish to connect to non-HTTPS URLS, add the following attribute to the `application` element of your `AndroidManifest.xml` file: + +```xml + +``` + +### iOS + +If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist` file: + +```xml +NSAppTransportSecurity + + NSAllowsArbitraryLoads + + NSAllowsArbitraryLoadsForMedia + + +``` + +By default, iOS will mute your app's audio when your phone is switched to +silent mode. Depending on the requirements of your app, you can change the +default audio session category using `AudioPlayer.setIosCategory`. For example, +if you are writing a media app, Apple recommends that you set the category to +`AVAudioSessionCategoryPlayback`, which you can achieve by adding the following +code to your app's initialisation: + +```dart +AudioPlayer.setIosCategory(IosCategory.playback); +``` + +Note: If your app uses a number of different audio plugins in combination, e.g. +for audio recording, or text to speech, or background audio, it is possible +that those plugins may internally override the setting you choose here. You may +consider asking the developer of each other plugin you use to provide a similar +method so that you can configure the same audio session category universally +across all plugins you use. + +### MacOS + +To allow your MacOS application to access audio files on the Internet, add the following to your `DebugProfile.entitlements` and `Release.entitlements` files: + +```xml + com.apple.security.network.client + +``` + +If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist` file: + +```xml +NSAppTransportSecurity + + NSAllowsArbitraryLoads + + NSAllowsArbitraryLoadsForMedia + + +``` diff --git a/just_audio/android/.gitignore b/just_audio/android/.gitignore new file mode 100644 index 0000000..c6cbe56 --- /dev/null +++ b/just_audio/android/.gitignore @@ -0,0 +1,8 @@ +*.iml +.gradle +/local.properties +/.idea/workspace.xml +/.idea/libraries +.DS_Store +/build +/captures diff --git a/just_audio/android/build.gradle b/just_audio/android/build.gradle new file mode 100644 index 0000000..d63baca --- /dev/null +++ b/just_audio/android/build.gradle @@ -0,0 +1,48 @@ +group 'com.ryanheise.just_audio' +version '1.0' + +buildscript { + repositories { + google() + jcenter() + } + + dependencies { + classpath 'com.android.tools.build:gradle:3.6.3' + } +} + +rootProject.allprojects { + repositories { + google() + jcenter() + } +} + +apply plugin: 'com.android.library' + +android { + compileSdkVersion 28 + + defaultConfig { + minSdkVersion 16 + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } + + lintOptions { + disable 'InvalidPackage' + } + + compileOptions { + sourceCompatibility 1.8 + targetCompatibility 1.8 + } +} + +dependencies { + implementation 'com.google.android.exoplayer:exoplayer-core:2.11.4' + implementation 'com.google.android.exoplayer:exoplayer-dash:2.11.4' + implementation 'com.google.android.exoplayer:exoplayer-hls:2.11.4' + implementation 'com.google.android.exoplayer:exoplayer-smoothstreaming:2.11.4' + compile files('libs/extension-flac.aar') +} diff --git a/just_audio/android/gradle.properties b/just_audio/android/gradle.properties new file mode 100644 index 0000000..38c8d45 --- /dev/null +++ b/just_audio/android/gradle.properties @@ -0,0 +1,4 @@ +org.gradle.jvmargs=-Xmx1536M +android.enableR8=true +android.useAndroidX=true +android.enableJetifier=true diff --git a/just_audio/android/gradle/wrapper/gradle-wrapper.properties b/just_audio/android/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..212deb2 --- /dev/null +++ b/just_audio/android/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Mon Aug 10 13:15:44 CEST 2020 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip diff --git a/just_audio/android/libs/extension-flac.aar b/just_audio/android/libs/extension-flac.aar new file mode 100644 index 0000000..62d38a2 Binary files /dev/null and b/just_audio/android/libs/extension-flac.aar differ diff --git a/just_audio/android/settings.gradle b/just_audio/android/settings.gradle new file mode 100644 index 0000000..c17dc37 --- /dev/null +++ b/just_audio/android/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'just_audio' diff --git a/just_audio/android/src/main/AndroidManifest.xml b/just_audio/android/src/main/AndroidManifest.xml new file mode 100644 index 0000000..e83e841 --- /dev/null +++ b/just_audio/android/src/main/AndroidManifest.xml @@ -0,0 +1,3 @@ + + diff --git a/just_audio/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java b/just_audio/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java new file mode 100644 index 0000000..f3418f0 --- /dev/null +++ b/just_audio/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java @@ -0,0 +1,723 @@ +package com.ryanheise.just_audio; + +import android.content.Context; +import android.net.Uri; +import android.os.Handler; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ExoPlaybackException; +import com.google.android.exoplayer2.PlaybackParameters; +import com.google.android.exoplayer2.Player; +import com.google.android.exoplayer2.SimpleExoPlayer; +import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.metadata.MetadataOutput; +import com.google.android.exoplayer2.metadata.icy.IcyHeaders; +import com.google.android.exoplayer2.metadata.icy.IcyInfo; +import com.google.android.exoplayer2.source.ClippingMediaSource; +import com.google.android.exoplayer2.source.ConcatenatingMediaSource; +import com.google.android.exoplayer2.source.LoopingMediaSource; +import com.google.android.exoplayer2.source.MediaSource; +import com.google.android.exoplayer2.source.ProgressiveMediaSource; +import com.google.android.exoplayer2.source.ShuffleOrder; +import com.google.android.exoplayer2.source.ShuffleOrder.DefaultShuffleOrder; +import com.google.android.exoplayer2.source.TrackGroup; +import com.google.android.exoplayer2.source.TrackGroupArray; +import com.google.android.exoplayer2.source.dash.DashMediaSource; +import com.google.android.exoplayer2.source.hls.HlsMediaSource; +import com.google.android.exoplayer2.trackselection.TrackSelectionArray; +import com.google.android.exoplayer2.upstream.DataSource; +import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory; +import com.google.android.exoplayer2.upstream.DefaultHttpDataSource; +import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory; +import com.google.android.exoplayer2.upstream.HttpDataSource; +import com.google.android.exoplayer2.util.Util; +import io.flutter.Log; +import io.flutter.plugin.common.BinaryMessenger; +import io.flutter.plugin.common.EventChannel; +import io.flutter.plugin.common.EventChannel.EventSink; +import io.flutter.plugin.common.MethodCall; +import io.flutter.plugin.common.MethodChannel; +import io.flutter.plugin.common.MethodChannel.MethodCallHandler; +import io.flutter.plugin.common.MethodChannel.Result; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.stream.Collectors; +import com.ryanheise.just_audio.DeezerDataSource; + +public class AudioPlayer implements MethodCallHandler, Player.EventListener, MetadataOutput { + + static final String TAG = "AudioPlayer"; + + private static Random random = new Random(); + + private final Context context; + private final MethodChannel methodChannel; + private final EventChannel eventChannel; + private EventSink eventSink; + + private ProcessingState processingState; + private long updateTime; + private long updatePosition; + private long bufferedPosition; + private long duration; + private Long start; + private Long end; + private Long seekPos; + private Result prepareResult; + private Result playResult; + private Result seekResult; + private boolean seekProcessed; + private boolean playing; + private Map mediaSources = new HashMap(); + private IcyInfo icyInfo; + private IcyHeaders icyHeaders; + private int errorCount; + + private SimpleExoPlayer player; + private MediaSource mediaSource; + private Integer currentIndex; + private Map loopingChildren = new HashMap<>(); + private Map loopingCounts = new HashMap<>(); + private final Handler handler = new Handler(); + private final Runnable bufferWatcher = new Runnable() { + @Override + public void run() { + if (player == null) { + return; + } + + long newBufferedPosition = player.getBufferedPosition(); + if (newBufferedPosition != bufferedPosition) { + bufferedPosition = newBufferedPosition; + broadcastPlaybackEvent(); + } + switch (processingState) { + case buffering: + handler.postDelayed(this, 200); + break; + case ready: + if (playing) { + handler.postDelayed(this, 500); + } else { + handler.postDelayed(this, 1000); + } + break; + } + } + }; + + private final Runnable onDispose; + + public AudioPlayer(final Context applicationContext, final BinaryMessenger messenger, + final String id, final Runnable onDispose) { + this.context = applicationContext; + this.onDispose = onDispose; + methodChannel = new MethodChannel(messenger, "com.ryanheise.just_audio.methods." + id); + methodChannel.setMethodCallHandler(this); + eventChannel = new EventChannel(messenger, "com.ryanheise.just_audio.events." + id); + eventChannel.setStreamHandler(new EventChannel.StreamHandler() { + @Override + public void onListen(final Object arguments, final EventSink eventSink) { + AudioPlayer.this.eventSink = eventSink; + } + + @Override + public void onCancel(final Object arguments) { + eventSink = null; + } + }); + processingState = ProcessingState.none; + } + + private void startWatchingBuffer() { + handler.removeCallbacks(bufferWatcher); + handler.post(bufferWatcher); + } + + @Override + public void onMetadata(Metadata metadata) { + for (int i = 0; i < metadata.length(); i++) { + final Metadata.Entry entry = metadata.get(i); + if (entry instanceof IcyInfo) { + icyInfo = (IcyInfo) entry; + broadcastPlaybackEvent(); + } + } + } + + @Override + public void onTracksChanged(TrackGroupArray trackGroups, TrackSelectionArray trackSelections) { + for (int i = 0; i < trackGroups.length; i++) { + TrackGroup trackGroup = trackGroups.get(i); + + for (int j = 0; j < trackGroup.length; j++) { + Metadata metadata = trackGroup.getFormat(j).metadata; + + if (metadata != null) { + for (int k = 0; k < metadata.length(); k++) { + final Metadata.Entry entry = metadata.get(k); + if (entry instanceof IcyHeaders) { + icyHeaders = (IcyHeaders) entry; + broadcastPlaybackEvent(); + } + } + } + } + } + } + + @Override + public void onPositionDiscontinuity(int reason) { + switch (reason) { + case Player.DISCONTINUITY_REASON_PERIOD_TRANSITION: + case Player.DISCONTINUITY_REASON_SEEK: + onItemMayHaveChanged(); + break; + } + } + + @Override + public void onTimelineChanged(Timeline timeline, int reason) { + if (reason == Player.TIMELINE_CHANGE_REASON_DYNAMIC) { + onItemMayHaveChanged(); + } + } + + private void onItemMayHaveChanged() { + Integer newIndex = player.getCurrentWindowIndex(); + if (newIndex != currentIndex) { + currentIndex = newIndex; + } + broadcastPlaybackEvent(); + } + + @Override + public void onPlayerStateChanged(boolean playWhenReady, int playbackState) { + switch (playbackState) { + case Player.STATE_READY: + if (prepareResult != null) { + duration = getDuration(); + transition(ProcessingState.ready); + prepareResult.success(duration); + prepareResult = null; + } else { + transition(ProcessingState.ready); + } + if (seekProcessed) { + completeSeek(); + } + break; + case Player.STATE_BUFFERING: + if (processingState != ProcessingState.buffering) { + transition(ProcessingState.buffering); + startWatchingBuffer(); + } + break; + case Player.STATE_ENDED: + if (processingState != ProcessingState.completed) { + transition(ProcessingState.completed); + } + if (playResult != null) { + playResult.success(null); + playResult = null; + } + break; + } + } + + @Override + public void onPlayerError(ExoPlaybackException error) { + switch (error.type) { + case ExoPlaybackException.TYPE_SOURCE: + Log.e(TAG, "TYPE_SOURCE: " + error.getSourceException().getMessage()); + break; + + case ExoPlaybackException.TYPE_RENDERER: + Log.e(TAG, "TYPE_RENDERER: " + error.getRendererException().getMessage()); + break; + + case ExoPlaybackException.TYPE_UNEXPECTED: + Log.e(TAG, "TYPE_UNEXPECTED: " + error.getUnexpectedException().getMessage()); + break; + + default: + Log.e(TAG, "default: " + error.getUnexpectedException().getMessage()); + } + sendError(String.valueOf(error.type), error.getMessage()); + errorCount++; + if (player.hasNext() && currentIndex != null && errorCount <= 5) { + int nextIndex = currentIndex + 1; + player.prepare(mediaSource); + player.seekTo(nextIndex, 0); + } + } + + @Override + public void onSeekProcessed() { + if (seekResult != null) { + seekProcessed = true; + if (player.getPlaybackState() == Player.STATE_READY) { + completeSeek(); + } + } + } + + private void completeSeek() { + seekProcessed = false; + seekPos = null; + seekResult.success(null); + seekResult = null; + } + + @Override + public void onMethodCall(final MethodCall call, final Result result) { + ensurePlayerInitialized(); + + final List args = (List) call.arguments; + try { + switch (call.method) { + case "load": + load(getAudioSource(args.get(0)), result); + break; + case "play": + play(result); + break; + case "pause": + pause(); + result.success(null); + break; + case "setVolume": + setVolume((float) ((double) ((Double) args.get(0)))); + result.success(null); + break; + case "setSpeed": + setSpeed((float) ((double) ((Double) args.get(0)))); + result.success(null); + break; + case "setLoopMode": + setLoopMode((Integer) args.get(0)); + result.success(null); + break; + case "setShuffleModeEnabled": + setShuffleModeEnabled((Boolean) args.get(0)); + result.success(null); + break; + case "setAutomaticallyWaitsToMinimizeStalling": + result.success(null); + break; + case "seek": + Long position = getLong(args.get(0)); + Integer index = (Integer)args.get(1); + seek(position == null ? C.TIME_UNSET : position, result, index); + break; + case "dispose": + dispose(); + result.success(null); + break; + case "concatenating.add": + concatenating(args.get(0)) + .addMediaSource(getAudioSource(args.get(1)), handler, () -> result.success(null)); + break; + case "concatenating.insert": + concatenating(args.get(0)) + .addMediaSource((Integer)args.get(1), getAudioSource(args.get(2)), handler, () -> result.success(null)); + break; + case "concatenating.addAll": + concatenating(args.get(0)) + .addMediaSources(getAudioSources(args.get(1)), handler, () -> result.success(null)); + break; + case "concatenating.insertAll": + concatenating(args.get(0)) + .addMediaSources((Integer)args.get(1), getAudioSources(args.get(2)), handler, () -> result.success(null)); + break; + case "concatenating.removeAt": + concatenating(args.get(0)) + .removeMediaSource((Integer)args.get(1), handler, () -> result.success(null)); + break; + case "concatenating.removeRange": + concatenating(args.get(0)) + .removeMediaSourceRange((Integer)args.get(1), (Integer)args.get(2), handler, () -> result.success(null)); + break; + case "concatenating.move": + concatenating(args.get(0)) + .moveMediaSource((Integer)args.get(1), (Integer)args.get(2), handler, () -> result.success(null)); + break; + case "concatenating.clear": + concatenating(args.get(0)).clear(handler, () -> result.success(null)); + break; + default: + result.notImplemented(); + break; + } + } catch (IllegalStateException e) { + e.printStackTrace(); + result.error("Illegal state: " + e.getMessage(), null, null); + } catch (Exception e) { + e.printStackTrace(); + result.error("Error: " + e, null, null); + } + } + + // Set the shuffle order for mediaSource, with currentIndex at + // the first position. Traverse the tree incrementing index at each + // node. + private int setShuffleOrder(MediaSource mediaSource, int index) { + if (mediaSource instanceof ConcatenatingMediaSource) { + final ConcatenatingMediaSource source = (ConcatenatingMediaSource)mediaSource; + // Find which child is current + Integer currentChildIndex = null; + for (int i = 0; i < source.getSize(); i++) { + final int indexBefore = index; + final MediaSource child = source.getMediaSource(i); + index = setShuffleOrder(child, index); + // If currentIndex falls within this child, make this child come first. + if (currentIndex >= indexBefore && currentIndex < index) { + currentChildIndex = i; + } + } + // Shuffle so that the current child is first in the shuffle order + source.setShuffleOrder(createShuffleOrder(source.getSize(), currentChildIndex)); + } else if (mediaSource instanceof LoopingMediaSource) { + final LoopingMediaSource source = (LoopingMediaSource)mediaSource; + // The ExoPlayer API doesn't provide accessors for these so we have + // to index them ourselves. + MediaSource child = loopingChildren.get(source); + int count = loopingCounts.get(source); + for (int i = 0; i < count; i++) { + index = setShuffleOrder(child, index); + } + } else { + // An actual media item takes up one spot in the playlist. + index++; + } + return index; + } + + private static int[] shuffle(int length, Integer firstIndex) { + final int[] shuffleOrder = new int[length]; + for (int i = 0; i < length; i++) { + final int j = random.nextInt(i + 1); + shuffleOrder[i] = shuffleOrder[j]; + shuffleOrder[j] = i; + } + if (firstIndex != null) { + for (int i = 1; i < length; i++) { + if (shuffleOrder[i] == firstIndex) { + final int v = shuffleOrder[0]; + shuffleOrder[0] = shuffleOrder[i]; + shuffleOrder[i] = v; + break; + } + } + } + return shuffleOrder; + } + + // Create a shuffle order optionally fixing the first index. + private ShuffleOrder createShuffleOrder(int length, Integer firstIndex) { + int[] shuffleIndices = shuffle(length, firstIndex); + return new DefaultShuffleOrder(shuffleIndices, random.nextLong()); + } + + private ConcatenatingMediaSource concatenating(final Object index) { + return (ConcatenatingMediaSource)mediaSources.get((String)index); + } + + private MediaSource getAudioSource(final Object json) { + Map map = (Map)json; + String id = (String)map.get("id"); + MediaSource mediaSource = mediaSources.get(id); + if (mediaSource == null) { + mediaSource = decodeAudioSource(map); + mediaSources.put(id, mediaSource); + } + return mediaSource; + } + + private MediaSource decodeAudioSource(final Object json) { + Map map = (Map)json; + String id = (String)map.get("id"); + switch ((String)map.get("type")) { + case "progressive": + Uri uri = Uri.parse((String)map.get("uri")); + //Deezer + if (uri.getHost().contains("dzcdn.net")) { + //Track id is stored in URL fragment (after #) + String fragment = uri.getFragment(); + uri = Uri.parse(((String)map.get("uri")).replace("#" + fragment, "")); + return new ProgressiveMediaSource.Factory( + () -> { + HttpDataSource deezerDataSource = new DeezerDataSource(fragment); + return deezerDataSource; + } + ).setTag(id).createMediaSource(uri); + } + + return new ProgressiveMediaSource.Factory(buildDataSourceFactory()) + .setTag(id) + .createMediaSource(uri); + case "dash": + return new DashMediaSource.Factory(buildDataSourceFactory()) + .setTag(id) + .createMediaSource(Uri.parse((String)map.get("uri"))); + case "hls": + return new HlsMediaSource.Factory(buildDataSourceFactory()) + .setTag(id) + .createMediaSource(Uri.parse((String)map.get("uri"))); + case "concatenating": + List audioSources = (List)map.get("audioSources"); + return new ConcatenatingMediaSource( + false, // isAtomic + (Boolean)map.get("useLazyPreparation"), + new DefaultShuffleOrder(audioSources.size()), + audioSources + .stream() + .map(s -> getAudioSource(s)) + .toArray(MediaSource[]::new)); + case "clipping": + Long start = getLong(map.get("start")); + Long end = getLong(map.get("end")); + return new ClippingMediaSource(getAudioSource(map.get("audioSource")), + (start != null ? start : 0) * 1000L, + (end != null ? end : C.TIME_END_OF_SOURCE) * 1000L); + case "looping": + Integer count = (Integer)map.get("count"); + MediaSource looperChild = getAudioSource(map.get("audioSource")); + LoopingMediaSource looper = new LoopingMediaSource(looperChild, count); + // TODO: store both in a single map + loopingChildren.put(looper, looperChild); + loopingCounts.put(looper, count); + return looper; + default: + throw new IllegalArgumentException("Unknown AudioSource type: " + map.get("type")); + } + } + + private List getAudioSources(final Object json) { + return ((List)json) + .stream() + .map(s -> getAudioSource(s)) + .collect(Collectors.toList()); + } + + private DataSource.Factory buildDataSourceFactory() { + String userAgent = Util.getUserAgent(context, "just_audio"); + DataSource.Factory httpDataSourceFactory = new DefaultHttpDataSourceFactory( + userAgent, + DefaultHttpDataSource.DEFAULT_CONNECT_TIMEOUT_MILLIS, + DefaultHttpDataSource.DEFAULT_READ_TIMEOUT_MILLIS, + true + ); + return new DefaultDataSourceFactory(context, httpDataSourceFactory); + } + + private void load(final MediaSource mediaSource, final Result result) { + switch (processingState) { + case none: + break; + case loading: + abortExistingConnection(); + player.stop(); + break; + default: + player.stop(); + break; + } + errorCount = 0; + prepareResult = result; + transition(ProcessingState.loading); + if (player.getShuffleModeEnabled()) { + setShuffleOrder(mediaSource, 0); + } + this.mediaSource = mediaSource; + player.prepare(mediaSource); + } + + private void ensurePlayerInitialized() { + if (player == null) { + player = new SimpleExoPlayer.Builder(context).build(); + player.addMetadataOutput(this); + player.addListener(this); + } + } + + private void broadcastPlaybackEvent() { + final Map event = new HashMap(); + event.put("processingState", processingState.ordinal()); + event.put("updatePosition", updatePosition = getCurrentPosition()); + event.put("updateTime", updateTime = System.currentTimeMillis()); + event.put("bufferedPosition", Math.max(updatePosition, bufferedPosition)); + event.put("icyMetadata", collectIcyMetadata()); + event.put("duration", duration = getDuration()); + event.put("currentIndex", currentIndex); + + if (eventSink != null) { + eventSink.success(event); + } + } + + private Map collectIcyMetadata() { + final Map icyData = new HashMap<>(); + if (icyInfo != null) { + final Map info = new HashMap<>(); + info.put("title", icyInfo.title); + info.put("url", icyInfo.url); + icyData.put("info", info); + } + if (icyHeaders != null) { + final Map headers = new HashMap<>(); + headers.put("bitrate", icyHeaders.bitrate); + headers.put("genre", icyHeaders.genre); + headers.put("name", icyHeaders.name); + headers.put("metadataInterval", icyHeaders.metadataInterval); + headers.put("url", icyHeaders.url); + headers.put("isPublic", icyHeaders.isPublic); + icyData.put("headers", headers); + } + return icyData; + } + + private long getCurrentPosition() { + if (processingState == ProcessingState.none || processingState == ProcessingState.loading) { + return 0; + } else if (seekPos != null && seekPos != C.TIME_UNSET) { + return seekPos; + } else { + return player.getCurrentPosition(); + } + } + + private long getDuration() { + if (processingState == ProcessingState.none || processingState == ProcessingState.loading) { + return C.TIME_UNSET; + } else { + return player.getDuration(); + } + } + + private void sendError(String errorCode, String errorMsg) { + if (prepareResult != null) { + prepareResult.error(errorCode, errorMsg, null); + prepareResult = null; + } + + if (eventSink != null) { + eventSink.error(errorCode, errorMsg, null); + } + } + + private void transition(final ProcessingState newState) { + processingState = newState; + broadcastPlaybackEvent(); + } + + private String getLowerCaseExtension(Uri uri) { + // Until ExoPlayer provides automatic detection of media source types, we + // rely on the file extension. When this is absent, as a temporary + // workaround we allow the app to supply a fake extension in the URL + // fragment. e.g. https://somewhere.com/somestream?x=etc#.m3u8 + String fragment = uri.getFragment(); + String filename = fragment != null && fragment.contains(".") ? fragment : uri.getPath(); + return filename.replaceAll("^.*\\.", "").toLowerCase(); + } + + public void play(Result result) { + if (player.getPlayWhenReady()) return; + if (playResult != null) { + playResult.success(null); + } + playResult = result; + startWatchingBuffer(); + player.setPlayWhenReady(true); + if (processingState == ProcessingState.completed && playResult != null) { + playResult.success(null); + playResult = null; + } + } + + public void pause() { + if (!player.getPlayWhenReady()) return; + player.setPlayWhenReady(false); + if (playResult != null) { + playResult.success(null); + playResult = null; + } + } + + public void setVolume(final float volume) { + player.setVolume(volume); + } + + public void setSpeed(final float speed) { + player.setPlaybackParameters(new PlaybackParameters(speed)); + broadcastPlaybackEvent(); + } + + public void setLoopMode(final int mode) { + player.setRepeatMode(mode); + } + + public void setShuffleModeEnabled(final boolean enabled) { + if (enabled) { + setShuffleOrder(mediaSource, 0); + } + player.setShuffleModeEnabled(enabled); + } + + public void seek(final long position, final Result result, final Integer index) { + if (processingState == ProcessingState.none || processingState == ProcessingState.loading) { + return; + } + abortSeek(); + seekPos = position; + seekResult = result; + seekProcessed = false; + int windowIndex = index != null ? index : player.getCurrentWindowIndex(); + player.seekTo(windowIndex, position); + } + + public void dispose() { + mediaSources.clear(); + mediaSource = null; + loopingChildren.clear(); + if (player != null) { + player.release(); + player = null; + transition(ProcessingState.none); + } + if (eventSink != null) { + eventSink.endOfStream(); + } + onDispose.run(); + } + + private void abortSeek() { + if (seekResult != null) { + seekResult.success(null); + seekResult = null; + seekPos = null; + seekProcessed = false; + } + } + + private void abortExistingConnection() { + sendError("abort", "Connection aborted"); + } + + public static Long getLong(Object o) { + return (o == null || o instanceof Long) ? (Long)o : new Long(((Integer)o).intValue()); + } + + enum ProcessingState { + none, + loading, + buffering, + ready, + completed + } +} diff --git a/just_audio/android/src/main/java/com/ryanheise/just_audio/DeezerDataSource.java b/just_audio/android/src/main/java/com/ryanheise/just_audio/DeezerDataSource.java new file mode 100644 index 0000000..7b45092 --- /dev/null +++ b/just_audio/android/src/main/java/com/ryanheise/just_audio/DeezerDataSource.java @@ -0,0 +1,264 @@ +package com.ryanheise.just_audio; + +import android.net.Uri; +import android.util.Log; +import com.google.android.exoplayer2.upstream.DataSpec; +import com.google.android.exoplayer2.upstream.HttpDataSource; +import com.google.android.exoplayer2.upstream.TransferListener; +import java.io.BufferedInputStream; +import java.io.ByteArrayOutputStream; +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.HttpURLConnection; +import java.net.URL; +import java.security.MessageDigest; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import javax.crypto.Cipher; +import javax.crypto.spec.SecretKeySpec; + +public class DeezerDataSource implements HttpDataSource { + HttpURLConnection connection; + InputStream inputStream; + int counter = 0; + byte[] key; + DataSpec dataSpec; + + //Quality fallback stuff + String trackId; + int quality = 0; + String md5origin; + String mediaVersion; + + public DeezerDataSource(String trackId) { + this.trackId = trackId; + this.key = getKey(trackId); + } + + @Override + public long open(DataSpec dataSpec) throws HttpDataSource.HttpDataSourceException { + this.dataSpec = dataSpec; + try { + //Check if real url or placeholder for quality fallback + URL url = new URL(dataSpec.uri.toString()); + String[] qp = url.getQuery().split("&"); + //Real deezcdn url doesnt have query params + if (qp.length >= 3) { + //Parse query parameters + for (int i = 0; i < qp.length; i++) { + String p = qp[i].replace("?", ""); + if (p.startsWith("md5")) { + this.md5origin = p.replace("md5=", ""); + } + if (p.startsWith("mv")) { + this.mediaVersion = p.replace("mv=", ""); + } + if (p.startsWith("q")) { + if (this.quality == 0) { + this.quality = Integer.parseInt(p.replace("q=", "")); + } + } + } + //Get real url + url = new URL(this.getTrackUrl(trackId, md5origin, mediaVersion, quality)); + } + + + this.connection = (HttpURLConnection) url.openConnection(); + this.connection.setChunkedStreamingMode(2048); + if (dataSpec.position > 0) { + this.counter = (int) (dataSpec.position/2048); + this.connection.setRequestProperty("Range", + "bytes=" + Long.toString(this.counter*2048) + "-"); + } + + InputStream is = this.connection.getInputStream(); + this.inputStream = new BufferedInputStream(new FilterInputStream(is) { + @Override + public int read(byte buffer[], int offset, int len) throws IOException { + byte[] b = new byte[2048]; + int t = 0; + int read = 0; + while (read != -1 && t != 2048) { + t += read = in.read(b, t, 2048-t); + } + + if (counter % 3 == 0) { + byte[] dec = decryptChunk(key, b); + System.arraycopy(dec, 0, buffer, offset, 2048); + } else { + System.arraycopy(b, 0, buffer, offset, 2048); + } + counter++; + + return t; + + } + },2048); + + + } catch (Exception e) { + //Quality fallback + if (this.quality == 1) { + Log.e("E", e.toString()); + throw new HttpDataSourceException("Error loading URL", dataSpec, HttpDataSourceException.TYPE_OPEN); + } + if (this.quality == 3) this.quality = 1; + if (this.quality == 9) this.quality = 3; + // r e c u r s i o n + return this.open(dataSpec); + } + String size = this.connection.getHeaderField("Content-Length"); + return Long.parseLong(size); + } + + @Override + public int read(byte[] buffer, int offset, int length) throws HttpDataSourceException { + int read = 0; + try { + read = this.inputStream.read(buffer, offset, length); + } catch (Exception e) { + Log.e("E", e.toString()); + //throw new HttpDataSourceException("Error reading from stream", this.dataSpec, HttpDataSourceException.TYPE_READ); + } + return read; + } + @Override + public void close() { + try { + if (this.inputStream != null) this.inputStream.close(); + if (this.connection != null) this.connection.disconnect(); + } catch (Exception e) { + Log.e("E", e.toString()); + } + } + + @Override + public void setRequestProperty(String name, String value) { + Log.d("D", "setRequestProperty"); + } + + @Override + public void clearRequestProperty(String name) { + Log.d("D", "clearRequestProperty"); + } + + @Override + public void clearAllRequestProperties() { + Log.d("D", "clearAllRequestProperties"); + } + + @Override + public int getResponseCode() { + Log.d("D", "getResponseCode"); + return 0; + } + + @Override + public Map> getResponseHeaders() { + return this.connection.getHeaderFields(); + } + + public final void addTransferListener(TransferListener transferListener) { + Log.d("D", "addTransferListener"); + } + + @Override + public Uri getUri() { + return Uri.parse(this.connection.getURL().toString()); + } + + public static String bytesToHex(byte[] bytes) { + final char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray(); + char[] hexChars = new char[bytes.length * 2]; + for (int j = 0; j < bytes.length; j++) { + int v = bytes[j] & 0xFF; + hexChars[j * 2] = HEX_ARRAY[v >>> 4]; + hexChars[j * 2 + 1] = HEX_ARRAY[v & 0x0F]; + } + return new String(hexChars); + } + + byte[] getKey(String id) { + String secret = "g4el58wc0zvf9na1"; + try { + MessageDigest md5 = MessageDigest.getInstance("MD5"); + md5.update(id.getBytes()); + byte[] md5id = md5.digest(); + String idmd5 = bytesToHex(md5id).toLowerCase(); + String key = ""; + for(int i=0; i<16; i++) { + int s0 = idmd5.charAt(i); + int s1 = idmd5.charAt(i+16); + int s2 = secret.charAt(i); + key += (char)(s0^s1^s2); + } + return key.getBytes(); + } catch (Exception e) { + Log.e("E", e.toString()); + return new byte[0]; + } + } + + + byte[] decryptChunk(byte[] key, byte[] data) { + try { + byte[] IV = {00, 01, 02, 03, 04, 05, 06, 07}; + SecretKeySpec Skey = new SecretKeySpec(key, "Blowfish"); + Cipher cipher = Cipher.getInstance("Blowfish/CBC/NoPadding"); + cipher.init(Cipher.DECRYPT_MODE, Skey, new javax.crypto.spec.IvParameterSpec(IV)); + return cipher.doFinal(data); + }catch (Exception e) { + Log.e("D", e.toString()); + return new byte[0]; + } + } + + public String getTrackUrl(String trackId, String md5origin, String mediaVersion, int quality) { + try { + int magic = 164; + + ByteArrayOutputStream step1 = new ByteArrayOutputStream(); + step1.write(md5origin.getBytes()); + step1.write(magic); + step1.write(Integer.toString(quality).getBytes()); + step1.write(magic); + step1.write(trackId.getBytes()); + step1.write(magic); + step1.write(mediaVersion.getBytes()); + //Get MD5 + MessageDigest md5 = MessageDigest.getInstance("MD5"); + md5.update(step1.toByteArray()); + byte[] digest = md5.digest(); + String md5hex = bytesToHex(digest).toLowerCase(); + + ByteArrayOutputStream step2 = new ByteArrayOutputStream(); + step2.write(md5hex.getBytes()); + step2.write(magic); + step2.write(step1.toByteArray()); + step2.write(magic); + + //Pad step2 with dots, to get correct length + while(step2.size()%16 > 0) step2.write(46); + + //Prepare AES encryption + Cipher cipher = Cipher.getInstance("AES/ECB/NoPadding"); + SecretKeySpec key = new SecretKeySpec("jo6aey6haid2Teih".getBytes(), "AES"); + cipher.init(Cipher.ENCRYPT_MODE, key); + //Encrypt + StringBuilder step3 = new StringBuilder(); + for (int i=0; i { + plugin.stopListening(); + return false; + }); + } + + @Override + public void onAttachedToEngine(@NonNull FlutterPluginBinding binding) { + startListening(binding.getApplicationContext(), binding.getBinaryMessenger()); + } + + @Override + public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) { + stopListening(); + } + + private void startListening(Context applicationContext, BinaryMessenger messenger) { + methodCallHandler = new MainMethodCallHandler(applicationContext, messenger); + + channel = new MethodChannel(messenger, "com.ryanheise.just_audio.methods"); + channel.setMethodCallHandler(methodCallHandler); + } + + private void stopListening() { + methodCallHandler.dispose(); + methodCallHandler = null; + + channel.setMethodCallHandler(null); + } +} diff --git a/just_audio/android/src/main/java/com/ryanheise/just_audio/MainMethodCallHandler.java b/just_audio/android/src/main/java/com/ryanheise/just_audio/MainMethodCallHandler.java new file mode 100644 index 0000000..990a71a --- /dev/null +++ b/just_audio/android/src/main/java/com/ryanheise/just_audio/MainMethodCallHandler.java @@ -0,0 +1,52 @@ +package com.ryanheise.just_audio; + +import android.content.Context; +import androidx.annotation.NonNull; +import io.flutter.plugin.common.BinaryMessenger; +import io.flutter.plugin.common.MethodCall; +import io.flutter.plugin.common.MethodChannel.MethodCallHandler; +import io.flutter.plugin.common.MethodChannel.Result; +import java.util.HashMap; +import java.util.List; +import java.util.ArrayList; +import java.util.Map; + +public class MainMethodCallHandler implements MethodCallHandler { + + private final Context applicationContext; + private final BinaryMessenger messenger; + + private final Map players = new HashMap<>(); + + public MainMethodCallHandler(Context applicationContext, + BinaryMessenger messenger) { + this.applicationContext = applicationContext; + this.messenger = messenger; + } + + @Override + public void onMethodCall(MethodCall call, @NonNull Result result) { + switch (call.method) { + case "init": + final List ids = call.arguments(); + String id = ids.get(0); + players.put(id, new AudioPlayer(applicationContext, messenger, id, + () -> players.remove(id) + )); + result.success(null); + break; + case "setIosCategory": + result.success(null); + break; + default: + result.notImplemented(); + break; + } + } + + void dispose() { + for (AudioPlayer player : new ArrayList(players.values())) { + player.dispose(); + } + } +} diff --git a/just_audio/darwin/Classes/AudioPlayer.m b/just_audio/darwin/Classes/AudioPlayer.m new file mode 100644 index 0000000..ccbfdea --- /dev/null +++ b/just_audio/darwin/Classes/AudioPlayer.m @@ -0,0 +1,1138 @@ +#import "AudioPlayer.h" +#import "AudioSource.h" +#import "IndexedAudioSource.h" +#import "UriAudioSource.h" +#import "ConcatenatingAudioSource.h" +#import "LoopingAudioSource.h" +#import "ClippingAudioSource.h" +#import +#import +#include + +// TODO: Check for and report invalid state transitions. +// TODO: Apply Apple's guidance on seeking: https://developer.apple.com/library/archive/qa/qa1820/_index.html +@implementation AudioPlayer { + NSObject* _registrar; + FlutterMethodChannel *_methodChannel; + FlutterEventChannel *_eventChannel; + FlutterEventSink _eventSink; + NSString *_playerId; + AVQueuePlayer *_player; + AudioSource *_audioSource; + NSMutableArray *_indexedAudioSources; + NSMutableArray *_order; + NSMutableArray *_orderInv; + int _index; + enum ProcessingState _processingState; + enum LoopMode _loopMode; + BOOL _shuffleModeEnabled; + long long _updateTime; + int _updatePosition; + int _lastPosition; + int _bufferedPosition; + // Set when the current item hasn't been played yet so we aren't sure whether sufficient audio has been buffered. + BOOL _bufferUnconfirmed; + CMTime _seekPos; + FlutterResult _loadResult; + FlutterResult _playResult; + id _timeObserver; + BOOL _automaticallyWaitsToMinimizeStalling; + BOOL _configuredSession; + BOOL _playing; +} + +- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _registrar = registrar; + _playerId = idParam; + _configuredSession = configuredSession; + _methodChannel = + [FlutterMethodChannel methodChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.methods.%@", _playerId] + binaryMessenger:[registrar messenger]]; + _eventChannel = + [FlutterEventChannel eventChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.events.%@", _playerId] + binaryMessenger:[registrar messenger]]; + [_eventChannel setStreamHandler:self]; + _index = 0; + _processingState = none; + _loopMode = loopOff; + _shuffleModeEnabled = NO; + _player = nil; + _audioSource = nil; + _indexedAudioSources = nil; + _order = nil; + _orderInv = nil; + _seekPos = kCMTimeInvalid; + _timeObserver = 0; + _updatePosition = 0; + _updateTime = 0; + _lastPosition = 0; + _bufferedPosition = 0; + _bufferUnconfirmed = NO; + _playing = NO; + _loadResult = nil; + _playResult = nil; + _automaticallyWaitsToMinimizeStalling = YES; + __weak __typeof__(self) weakSelf = self; + [_methodChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) { + [weakSelf handleMethodCall:call result:result]; + }]; + return self; +} + +- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { + NSArray* args = (NSArray*)call.arguments; + if ([@"load" isEqualToString:call.method]) { + [self load:args[0] result:result]; + } else if ([@"play" isEqualToString:call.method]) { + [self play:result]; + } else if ([@"pause" isEqualToString:call.method]) { + [self pause]; + result(nil); + } else if ([@"setVolume" isEqualToString:call.method]) { + [self setVolume:(float)[args[0] doubleValue]]; + result(nil); + } else if ([@"setSpeed" isEqualToString:call.method]) { + [self setSpeed:(float)[args[0] doubleValue]]; + result(nil); + } else if ([@"setLoopMode" isEqualToString:call.method]) { + [self setLoopMode:[args[0] intValue]]; + result(nil); + } else if ([@"setShuffleModeEnabled" isEqualToString:call.method]) { + [self setShuffleModeEnabled:(BOOL)[args[0] boolValue]]; + result(nil); + } else if ([@"setAutomaticallyWaitsToMinimizeStalling" isEqualToString:call.method]) { + [self setAutomaticallyWaitsToMinimizeStalling:(BOOL)[args[0] boolValue]]; + result(nil); + } else if ([@"seek" isEqualToString:call.method]) { + CMTime position = args[0] == [NSNull null] ? kCMTimePositiveInfinity : CMTimeMake([args[0] intValue], 1000); + [self seek:position index:args[1] completionHandler:^(BOOL finished) { + result(nil); + }]; + result(nil); + } else if ([@"dispose" isEqualToString:call.method]) { + [self dispose]; + result(nil); + } else if ([@"concatenating.add" isEqualToString:call.method]) { + [self concatenatingAdd:(NSString*)args[0] source:(NSDictionary*)args[1]]; + result(nil); + } else if ([@"concatenating.insert" isEqualToString:call.method]) { + [self concatenatingInsert:(NSString*)args[0] index:[args[1] intValue] source:(NSDictionary*)args[2]]; + result(nil); + } else if ([@"concatenating.addAll" isEqualToString:call.method]) { + [self concatenatingAddAll:(NSString*)args[0] sources:(NSArray*)args[1]]; + result(nil); + } else if ([@"concatenating.insertAll" isEqualToString:call.method]) { + [self concatenatingInsertAll:(NSString*)args[0] index:[args[1] intValue] sources:(NSArray*)args[2]]; + result(nil); + } else if ([@"concatenating.removeAt" isEqualToString:call.method]) { + [self concatenatingRemoveAt:(NSString*)args[0] index:(int)args[1]]; + result(nil); + } else if ([@"concatenating.removeRange" isEqualToString:call.method]) { + [self concatenatingRemoveRange:(NSString*)args[0] start:[args[1] intValue] end:[args[2] intValue]]; + result(nil); + } else if ([@"concatenating.move" isEqualToString:call.method]) { + [self concatenatingMove:(NSString*)args[0] currentIndex:[args[1] intValue] newIndex:[args[2] intValue]]; + result(nil); + } else if ([@"concatenating.clear" isEqualToString:call.method]) { + [self concatenatingClear:(NSString*)args[0]]; + result(nil); + } else { + result(FlutterMethodNotImplemented); + } +} + +// Untested +- (void)concatenatingAdd:(NSString *)catId source:(NSDictionary *)source { + [self concatenatingInsertAll:catId index:-1 sources:@[source]]; +} + +// Untested +- (void)concatenatingInsert:(NSString *)catId index:(int)index source:(NSDictionary *)source { + [self concatenatingInsertAll:catId index:index sources:@[source]]; +} + +// Untested +- (void)concatenatingAddAll:(NSString *)catId sources:(NSArray *)sources { + [self concatenatingInsertAll:catId index:-1 sources:sources]; +} + +// Untested +- (void)concatenatingInsertAll:(NSString *)catId index:(int)index sources:(NSArray *)sources { + // Find all duplicates of the identified ConcatenatingAudioSource. + NSMutableArray *matches = [[NSMutableArray alloc] init]; + [_audioSource findById:catId matches:matches]; + // Add each new source to each match. + for (int i = 0; i < matches.count; i++) { + ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; + int idx = index >= 0 ? index : catSource.count; + NSMutableArray *audioSources = [self decodeAudioSources:sources]; + for (int j = 0; j < audioSources.count; j++) { + AudioSource *audioSource = audioSources[j]; + [catSource insertSource:audioSource atIndex:(idx + j)]; + } + } + // Index the new audio sources. + _indexedAudioSources = [[NSMutableArray alloc] init]; + [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; + for (int i = 0; i < [_indexedAudioSources count]; i++) { + IndexedAudioSource *audioSource = _indexedAudioSources[i]; + if (!audioSource.isAttached) { + audioSource.playerItem.audioSource = audioSource; + [self addItemObservers:audioSource.playerItem]; + } + } + [self updateOrder]; + if (_player.currentItem) { + _index = [self indexForItem:_player.currentItem]; + } else { + _index = 0; + } + [self enqueueFrom:_index]; + // Notify each new IndexedAudioSource that it's been attached to the player. + for (int i = 0; i < [_indexedAudioSources count]; i++) { + if (!_indexedAudioSources[i].isAttached) { + [_indexedAudioSources[i] attach:_player]; + } + } + [self broadcastPlaybackEvent]; +} + +// Untested +- (void)concatenatingRemoveAt:(NSString *)catId index:(int)index { + [self concatenatingRemoveRange:catId start:index end:(index + 1)]; +} + +// Untested +- (void)concatenatingRemoveRange:(NSString *)catId start:(int)start end:(int)end { + // Find all duplicates of the identified ConcatenatingAudioSource. + NSMutableArray *matches = [[NSMutableArray alloc] init]; + [_audioSource findById:catId matches:matches]; + // Remove range from each match. + for (int i = 0; i < matches.count; i++) { + ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; + int endIndex = end >= 0 ? end : catSource.count; + [catSource removeSourcesFromIndex:start toIndex:endIndex]; + } + // Re-index the remaining audio sources. + NSArray *oldIndexedAudioSources = _indexedAudioSources; + _indexedAudioSources = [[NSMutableArray alloc] init]; + [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; + for (int i = 0, j = 0; i < _indexedAudioSources.count; i++, j++) { + IndexedAudioSource *audioSource = _indexedAudioSources[i]; + while (audioSource != oldIndexedAudioSources[j]) { + [self removeItemObservers:oldIndexedAudioSources[j].playerItem]; + if (j < _index) { + _index--; + } else if (j == _index) { + // The currently playing item was removed. + } + j++; + } + } + [self updateOrder]; + if (_index >= _indexedAudioSources.count) _index = _indexedAudioSources.count - 1; + if (_index < 0) _index = 0; + [self enqueueFrom:_index]; + [self broadcastPlaybackEvent]; +} + +// Untested +- (void)concatenatingMove:(NSString *)catId currentIndex:(int)currentIndex newIndex:(int)newIndex { + // Find all duplicates of the identified ConcatenatingAudioSource. + NSMutableArray *matches = [[NSMutableArray alloc] init]; + [_audioSource findById:catId matches:matches]; + // Move range within each match. + for (int i = 0; i < matches.count; i++) { + ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; + [catSource moveSourceFromIndex:currentIndex toIndex:newIndex]; + } + // Re-index the audio sources. + _indexedAudioSources = [[NSMutableArray alloc] init]; + [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; + _index = [self indexForItem:_player.currentItem]; + [self broadcastPlaybackEvent]; +} + +// Untested +- (void)concatenatingClear:(NSString *)catId { + [self concatenatingRemoveRange:catId start:0 end:-1]; +} + +- (FlutterError*)onListenWithArguments:(id)arguments eventSink:(FlutterEventSink)eventSink { + _eventSink = eventSink; + return nil; +} + +- (FlutterError*)onCancelWithArguments:(id)arguments { + _eventSink = nil; + return nil; +} + +- (void)checkForDiscontinuity { + if (!_eventSink) return; + if (!_playing || CMTIME_IS_VALID(_seekPos) || _processingState == completed) return; + int position = [self getCurrentPosition]; + if (_processingState == buffering) { + if (position > _lastPosition) { + [self leaveBuffering:@"stall ended"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } else { + long long now = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); + long long timeSinceLastUpdate = now - _updateTime; + long long expectedPosition = _updatePosition + (long long)(timeSinceLastUpdate * _player.rate); + long long drift = position - expectedPosition; + //NSLog(@"position: %d, drift: %lld", position, drift); + // Update if we've drifted or just started observing + if (_updateTime == 0L) { + [self broadcastPlaybackEvent]; + } else if (drift < -100) { + [self enterBuffering:@"stalling"]; + NSLog(@"Drift: %lld", drift); + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } + _lastPosition = position; +} + +- (void)enterBuffering:(NSString *)reason { + NSLog(@"ENTER BUFFERING: %@", reason); + _processingState = buffering; +} + +- (void)leaveBuffering:(NSString *)reason { + NSLog(@"LEAVE BUFFERING: %@", reason); + _processingState = ready; +} + +- (void)broadcastPlaybackEvent { + if (!_eventSink) return; + _eventSink(@{ + @"processingState": @(_processingState), + @"updatePosition": @(_updatePosition), + @"updateTime": @(_updateTime), + // TODO: buffer position + @"bufferedPosition": @(_updatePosition), + // TODO: Icy Metadata + @"icyMetadata": [NSNull null], + @"duration": @([self getDuration]), + @"currentIndex": @(_index), + }); +} + +- (int)getCurrentPosition { + if (_processingState == none || _processingState == loading) { + return 0; + } else if (CMTIME_IS_VALID(_seekPos)) { + return (int)(1000 * CMTimeGetSeconds(_seekPos)); + } else if (_indexedAudioSources) { + int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].position)); + if (ms < 0) ms = 0; + return ms; + } else { + return 0; + } +} + +- (int)getBufferedPosition { + if (_processingState == none || _processingState == loading) { + return 0; + } else if (_indexedAudioSources) { + int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].bufferedPosition)); + if (ms < 0) ms = 0; + return ms; + } else { + return 0; + } +} + +- (int)getDuration { + if (_processingState == none) { + return -1; + } else if (_indexedAudioSources) { + int v = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].duration)); + return v; + } else { + return 0; + } +} + +- (void)removeItemObservers:(AVPlayerItem *)playerItem { + [playerItem removeObserver:self forKeyPath:@"status"]; + [playerItem removeObserver:self forKeyPath:@"playbackBufferEmpty"]; + [playerItem removeObserver:self forKeyPath:@"playbackBufferFull"]; + //[playerItem removeObserver:self forKeyPath:@"playbackLikelyToKeepUp"]; + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemPlaybackStalledNotification object:playerItem]; +} + +- (void)addItemObservers:(AVPlayerItem *)playerItem { + // Get notified when the item is loaded or had an error loading + [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil]; + // Get notified of the buffer state + [playerItem addObserver:self forKeyPath:@"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:nil]; + [playerItem addObserver:self forKeyPath:@"playbackBufferFull" options:NSKeyValueObservingOptionNew context:nil]; + [playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil]; + //[playerItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:nil]; + // Get notified when playback has reached the end + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onComplete:) name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; + // Get notified when playback stops due to a failure (currently unused) + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onFailToComplete:) name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; + // Get notified when playback stalls (currently unused) + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onItemStalled:) name:AVPlayerItemPlaybackStalledNotification object:playerItem]; +} + +- (NSMutableArray *)decodeAudioSources:(NSArray *)data { + NSMutableArray *array = [[NSMutableArray alloc] init]; + for (int i = 0; i < [data count]; i++) { + AudioSource *source = [self decodeAudioSource:data[i]]; + [array addObject:source]; + } + return array; +} + +- (AudioSource *)decodeAudioSource:(NSDictionary *)data { + NSString *type = data[@"type"]; + if ([@"progressive" isEqualToString:type]) { + return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; + } else if ([@"dash" isEqualToString:type]) { + return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; + } else if ([@"hls" isEqualToString:type]) { + return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; + } else if ([@"concatenating" isEqualToString:type]) { + return [[ConcatenatingAudioSource alloc] initWithId:data[@"id"] + audioSources:[self decodeAudioSources:data[@"audioSources"]]]; + } else if ([@"clipping" isEqualToString:type]) { + return [[ClippingAudioSource alloc] initWithId:data[@"id"] + audioSource:[self decodeAudioSource:data[@"audioSource"]] + start:data[@"start"] + end:data[@"end"]]; + } else if ([@"looping" isEqualToString:type]) { + NSMutableArray *childSources = [NSMutableArray new]; + int count = [data[@"count"] intValue]; + for (int i = 0; i < count; i++) { + [childSources addObject:[self decodeAudioSource:data[@"audioSource"]]]; + } + return [[LoopingAudioSource alloc] initWithId:data[@"id"] audioSources:childSources]; + } else { + return nil; + } +} + +- (void)enqueueFrom:(int)index { + int oldIndex = _index; + _index = index; + + // Update the queue while keeping the currently playing item untouched. + + /* NSLog(@"before reorder: _player.items.count: ", _player.items.count); */ + /* [self dumpQueue]; */ + + // First, remove all _player items except for the currently playing one (if any). + IndexedPlayerItem *oldItem = _player.currentItem; + IndexedPlayerItem *existingItem = nil; + NSArray *oldPlayerItems = [NSArray arrayWithArray:_player.items]; + // In the first pass, preserve the old and new items. + for (int i = 0; i < oldPlayerItems.count; i++) { + if (oldPlayerItems[i] == _indexedAudioSources[_index].playerItem) { + // Preserve and tag new item if it is already in the queue. + existingItem = oldPlayerItems[i]; + } else if (oldPlayerItems[i] == oldItem) { + // Temporarily preserve old item, just to avoid jumping to + // intermediate queue positions unnecessarily. We only want to jump + // once to _index. + } else { + [_player removeItem:oldPlayerItems[i]]; + } + } + // In the second pass, remove the old item (if different from new item). + if (_index != oldIndex) { + [_player removeItem:oldItem]; + } + + /* NSLog(@"inter order: _player.items.count: ", _player.items.count); */ + /* [self dumpQueue]; */ + + // Regenerate queue + BOOL include = NO; + for (int i = 0; i < [_order count]; i++) { + int si = [_order[i] intValue]; + if (si == _index) include = YES; + if (include && _indexedAudioSources[si].playerItem != existingItem) { + [_player insertItem:_indexedAudioSources[si].playerItem afterItem:nil]; + } + } + + /* NSLog(@"after reorder: _player.items.count: ", _player.items.count); */ + /* [self dumpQueue]; */ + + if (_processingState != loading && oldItem != _indexedAudioSources[_index].playerItem) { + // || !_player.currentItem.playbackLikelyToKeepUp; + if (_player.currentItem.playbackBufferEmpty) { + [self enterBuffering:@"enqueueFrom playbackBufferEmpty"]; + } else { + [self leaveBuffering:@"enqueueFrom !playbackBufferEmpty"]; + } + [self updatePosition]; + } +} + +- (void)updatePosition { + _updatePosition = [self getCurrentPosition]; + _updateTime = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); +} + +- (void)load:(NSDictionary *)source result:(FlutterResult)result { + if (!_playing) { + [_player pause]; + } + if (_processingState == loading) { + [self abortExistingConnection]; + } + _loadResult = result; + _index = 0; + [self updatePosition]; + _processingState = loading; + [self broadcastPlaybackEvent]; + // Remove previous observers + if (_indexedAudioSources) { + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [self removeItemObservers:_indexedAudioSources[i].playerItem]; + } + } + // Decode audio source + if (_audioSource && [@"clipping" isEqualToString:source[@"type"]]) { + // Check if we're clipping an audio source that was previously loaded. + UriAudioSource *child = nil; + if ([_audioSource isKindOfClass:[ClippingAudioSource class]]) { + ClippingAudioSource *clipper = (ClippingAudioSource *)_audioSource; + child = clipper.audioSource; + } else if ([_audioSource isKindOfClass:[UriAudioSource class]]) { + child = (UriAudioSource *)_audioSource; + } + if (child) { + _audioSource = [[ClippingAudioSource alloc] initWithId:source[@"id"] + audioSource:child + start:source[@"start"] + end:source[@"end"]]; + } else { + _audioSource = [self decodeAudioSource:source]; + } + } else { + _audioSource = [self decodeAudioSource:source]; + } + _indexedAudioSources = [[NSMutableArray alloc] init]; + [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; + for (int i = 0; i < [_indexedAudioSources count]; i++) { + IndexedAudioSource *source = _indexedAudioSources[i]; + [self addItemObservers:source.playerItem]; + source.playerItem.audioSource = source; + } + [self updateOrder]; + // Set up an empty player + if (!_player) { + _player = [[AVQueuePlayer alloc] initWithItems:@[]]; + if (@available(macOS 10.12, iOS 10.0, *)) { + _player.automaticallyWaitsToMinimizeStalling = _automaticallyWaitsToMinimizeStalling; + // TODO: Remove these observers in dispose. + [_player addObserver:self + forKeyPath:@"timeControlStatus" + options:NSKeyValueObservingOptionNew + context:nil]; + } + [_player addObserver:self + forKeyPath:@"currentItem" + options:NSKeyValueObservingOptionNew + context:nil]; + // TODO: learn about the different ways to define weakSelf. + //__weak __typeof__(self) weakSelf = self; + //typeof(self) __weak weakSelf = self; + __unsafe_unretained typeof(self) weakSelf = self; + if (@available(macOS 10.12, iOS 10.0, *)) {} + else { + _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMake(200, 1000) + queue:nil + usingBlock:^(CMTime time) { + [weakSelf checkForDiscontinuity]; + } + ]; + } + } + // Initialise the AVQueuePlayer with items. + [self enqueueFrom:0]; + // Notify each IndexedAudioSource that it's been attached to the player. + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [_indexedAudioSources[i] attach:_player]; + } + + if (_player.currentItem.status == AVPlayerItemStatusReadyToPlay) { + _loadResult(@([self getDuration])); + _loadResult = nil; + } else { + // We send result after the playerItem is ready in observeValueForKeyPath. + } + [self broadcastPlaybackEvent]; +} + +- (void)updateOrder { + if (_shuffleModeEnabled) { + [_audioSource shuffle:0 currentIndex: _index]; + } + _orderInv = [NSMutableArray arrayWithCapacity:[_indexedAudioSources count]]; + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [_orderInv addObject:@(0)]; + } + if (_shuffleModeEnabled) { + _order = [_audioSource getShuffleOrder]; + } else { + NSMutableArray *order = [[NSMutableArray alloc] init]; + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [order addObject:@(i)]; + } + _order = order; + } + for (int i = 0; i < [_indexedAudioSources count]; i++) { + _orderInv[[_order[i] intValue]] = @(i); + } +} + +- (void)onItemStalled:(NSNotification *)notification { + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; + NSLog(@"onItemStalled"); +} + +- (void)onFailToComplete:(NSNotification *)notification { + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; + NSLog(@"onFailToComplete"); +} + +- (void)onComplete:(NSNotification *)notification { + NSLog(@"onComplete"); + if (_loopMode == loopOne) { + [self seek:kCMTimeZero index:@(_index) completionHandler:^(BOOL finished) { + // XXX: Not necessary? + [self play]; + }]; + } else { + IndexedPlayerItem *endedPlayerItem = (IndexedPlayerItem *)notification.object; + IndexedAudioSource *endedSource = endedPlayerItem.audioSource; + // When an item ends, seek back to its beginning. + [endedSource seek:kCMTimeZero]; + + if ([_orderInv[_index] intValue] + 1 < [_order count]) { + // account for automatic move to next item + _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; + NSLog(@"advance to next: index = %d", _index); + [self broadcastPlaybackEvent]; + } else { + // reached end of playlist + if (_loopMode == loopAll) { + NSLog(@"Loop back to first item"); + // Loop back to the beginning + // TODO: Currently there will be a gap at the loop point. + // Maybe we can do something clever by temporarily adding the + // first playlist item at the end of the queue, although this + // will affect any code that assumes the queue always + // corresponds to a contiguous region of the indexed audio + // sources. + // For now we just do a seek back to the start. + if ([_order count] == 1) { + [self seek:kCMTimeZero index:[NSNull null] completionHandler:^(BOOL finished) { + // XXX: Necessary? + [self play]; + }]; + } else { + [self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) { + // XXX: Necessary? + [self play]; + }]; + } + } else { + [self complete]; + } + } + } +} + +- (void)observeValueForKeyPath:(NSString *)keyPath + ofObject:(id)object + change:(NSDictionary *)change + context:(void *)context { + + if ([keyPath isEqualToString:@"status"]) { + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; + AVPlayerItemStatus status = AVPlayerItemStatusUnknown; + NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; + if ([statusNumber isKindOfClass:[NSNumber class]]) { + status = statusNumber.intValue; + } + switch (status) { + case AVPlayerItemStatusReadyToPlay: { + if (playerItem != _player.currentItem) return; + // Detect buffering in different ways depending on whether we're playing + if (_playing) { + if (@available(macOS 10.12, iOS 10.0, *)) { + if (_player.timeControlStatus == AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) { + [self enterBuffering:@"ready to play: playing, waitingToPlay"]; + } else { + [self leaveBuffering:@"ready to play: playing, !waitingToPlay"]; + } + [self updatePosition]; + } else { + // If this happens when we're playing, check whether buffer is confirmed + if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { + // Stay in bufering - XXX Test + [self enterBuffering:@"ready to play: playing, bufferUnconfirmed && !playbackBufferFull"]; + } else { + if (_player.currentItem.playbackBufferEmpty) { + // !_player.currentItem.playbackLikelyToKeepUp; + [self enterBuffering:@"ready to play: playing, playbackBufferEmpty"]; + } else { + [self leaveBuffering:@"ready to play: playing, !playbackBufferEmpty"]; + } + [self updatePosition]; + } + } + } else { + if (_player.currentItem.playbackBufferEmpty) { + [self enterBuffering:@"ready to play: !playing, playbackBufferEmpty"]; + // || !_player.currentItem.playbackLikelyToKeepUp; + } else { + [self leaveBuffering:@"ready to play: !playing, !playbackBufferEmpty"]; + } + [self updatePosition]; + } + [self broadcastPlaybackEvent]; + if (_loadResult) { + _loadResult(@([self getDuration])); + _loadResult = nil; + } + break; + } + case AVPlayerItemStatusFailed: { + NSLog(@"AVPlayerItemStatusFailed"); + [self sendErrorForItem:playerItem]; + break; + } + case AVPlayerItemStatusUnknown: + break; + } + } else if ([keyPath isEqualToString:@"playbackBufferEmpty"] || [keyPath isEqualToString:@"playbackBufferFull"]) { + // Use these values to detect buffering. + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; + if (playerItem != _player.currentItem) return; + // If there's a seek in progress, these values are unreliable + if (CMTIME_IS_VALID(_seekPos)) return; + // Detect buffering in different ways depending on whether we're playing + if (_playing) { + if (@available(macOS 10.12, iOS 10.0, *)) { + // We handle this with timeControlStatus instead. + } else { + if (_bufferUnconfirmed && playerItem.playbackBufferFull) { + _bufferUnconfirmed = NO; + [self leaveBuffering:@"playing, _bufferUnconfirmed && playbackBufferFull"]; + [self updatePosition]; + NSLog(@"Buffering confirmed! leaving buffering"); + [self broadcastPlaybackEvent]; + } + } + } else { + if (playerItem.playbackBufferEmpty) { + [self enterBuffering:@"!playing, playbackBufferEmpty"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } else if (!playerItem.playbackBufferEmpty || playerItem.playbackBufferFull) { + _processingState = ready; + [self leaveBuffering:@"!playing, !playbackBufferEmpty || playbackBufferFull"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } + /* } else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"]) { */ + } else if ([keyPath isEqualToString:@"timeControlStatus"]) { + if (@available(macOS 10.12, iOS 10.0, *)) { + AVPlayerTimeControlStatus status = AVPlayerTimeControlStatusPaused; + NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; + if ([statusNumber isKindOfClass:[NSNumber class]]) { + status = statusNumber.intValue; + } + switch (status) { + case AVPlayerTimeControlStatusPaused: + //NSLog(@"AVPlayerTimeControlStatusPaused"); + break; + case AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate: + //NSLog(@"AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate"); + if (_processingState != completed) { + [self enterBuffering:@"timeControlStatus"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } else { + NSLog(@"Ignoring wait signal because we reached the end"); + } + break; + case AVPlayerTimeControlStatusPlaying: + [self leaveBuffering:@"timeControlStatus"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + break; + } + } + } else if ([keyPath isEqualToString:@"currentItem"] && _player.currentItem) { + if (_player.currentItem.status == AVPlayerItemStatusFailed) { + if ([_orderInv[_index] intValue] + 1 < [_order count]) { + // account for automatic move to next item + _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; + NSLog(@"advance to next on error: index = %d", _index); + [self broadcastPlaybackEvent]; + } else { + NSLog(@"error on last item"); + } + return; + } else { + int expectedIndex = [self indexForItem:_player.currentItem]; + if (_index != expectedIndex) { + // AVQueuePlayer will sometimes skip over error items without + // notifying this observer. + NSLog(@"Queue change detected. Adjusting index from %d -> %d", _index, expectedIndex); + _index = expectedIndex; + [self broadcastPlaybackEvent]; + } + } + //NSLog(@"currentItem changed. _index=%d", _index); + _bufferUnconfirmed = YES; + // If we've skipped or transitioned to a new item and we're not + // currently in the middle of a seek + if (CMTIME_IS_INVALID(_seekPos) && _player.currentItem.status == AVPlayerItemStatusReadyToPlay) { + [self updatePosition]; + IndexedAudioSource *source = ((IndexedPlayerItem *)_player.currentItem).audioSource; + // We should already be at position zero but for + // ClippingAudioSource it might be off by some milliseconds so we + // consider anything <= 100 as close enough. + if ((int)(1000 * CMTimeGetSeconds(source.position)) > 100) { + NSLog(@"On currentItem change, seeking back to zero"); + BOOL shouldResumePlayback = NO; + AVPlayerActionAtItemEnd originalEndAction = _player.actionAtItemEnd; + if (_playing && CMTimeGetSeconds(CMTimeSubtract(source.position, source.duration)) >= 0) { + NSLog(@"Need to pause while rewinding because we're at the end"); + shouldResumePlayback = YES; + _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; + [_player pause]; + } + [self enterBuffering:@"currentItem changed, seeking"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + [source seek:kCMTimeZero completionHandler:^(BOOL finished) { + [self leaveBuffering:@"currentItem changed, finished seek"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + if (shouldResumePlayback) { + _player.actionAtItemEnd = originalEndAction; + // TODO: This logic is almost duplicated in seek. See if we can reuse this code. + [_player play]; + } + }]; + } else { + // Already at zero, no need to seek. + } + } + } else if ([keyPath isEqualToString:@"loadedTimeRanges"]) { + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; + if (playerItem != _player.currentItem) return; + int pos = [self getBufferedPosition]; + if (pos != _bufferedPosition) { + _bufferedPosition = pos; + [self broadcastPlaybackEvent]; + } + } +} + +- (void)sendErrorForItem:(IndexedPlayerItem *)playerItem { + FlutterError *flutterError = [FlutterError errorWithCode:[NSString stringWithFormat:@"%d", playerItem.error.code] + message:playerItem.error.localizedDescription + details:nil]; + [self sendError:flutterError playerItem:playerItem]; +} + +- (void)sendError:(FlutterError *)flutterError playerItem:(IndexedPlayerItem *)playerItem { + NSLog(@"sendError"); + if (_loadResult && playerItem == _player.currentItem) { + _loadResult(flutterError); + _loadResult = nil; + } + if (_eventSink) { + // Broadcast all errors even if they aren't on the current item. + _eventSink(flutterError); + } +} + +- (void)abortExistingConnection { + FlutterError *flutterError = [FlutterError errorWithCode:@"abort" + message:@"Connection aborted" + details:nil]; + [self sendError:flutterError playerItem:nil]; +} + +- (int)indexForItem:(IndexedPlayerItem *)playerItem { + for (int i = 0; i < _indexedAudioSources.count; i++) { + if (_indexedAudioSources[i].playerItem == playerItem) { + return i; + } + } + return -1; +} + +- (void)play { + [self play:nil]; +} + +- (void)play:(FlutterResult)result { + if (result) { + if (_playResult) { + NSLog(@"INTERRUPTING PLAY"); + _playResult(nil); + } + _playResult = result; + } + _playing = YES; +#if TARGET_OS_IPHONE + if (_configuredSession) { + [[AVAudioSession sharedInstance] setActive:YES error:nil]; + } +#endif + [_player play]; + [self updatePosition]; + if (@available(macOS 10.12, iOS 10.0, *)) {} + else { + if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { + [self enterBuffering:@"play, _bufferUnconfirmed && !playbackBufferFull"]; + [self broadcastPlaybackEvent]; + } + } +} + +- (void)pause { + _playing = NO; + [_player pause]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + if (_playResult) { + NSLog(@"PLAY FINISHED DUE TO PAUSE"); + _playResult(nil); + _playResult = nil; + } +} + +- (void)complete { + [self updatePosition]; + _processingState = completed; + [self broadcastPlaybackEvent]; + if (_playResult) { + NSLog(@"PLAY FINISHED DUE TO COMPLETE"); + _playResult(nil); + _playResult = nil; + } +} + +- (void)setVolume:(float)volume { + [_player setVolume:volume]; +} + +- (void)setSpeed:(float)speed { + if (speed == 1.0 + || (speed < 1.0 && _player.currentItem.canPlaySlowForward) + || (speed > 1.0 && _player.currentItem.canPlayFastForward)) { + _player.rate = speed; + } + [self updatePosition]; +} + +- (void)setLoopMode:(int)loopMode { + _loopMode = loopMode; + if (_player) { + switch (_loopMode) { + case loopOne: + _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone + break; + default: + _player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance; + } + } +} + +- (void)setShuffleModeEnabled:(BOOL)shuffleModeEnabled { + NSLog(@"setShuffleModeEnabled: %d", shuffleModeEnabled); + _shuffleModeEnabled = shuffleModeEnabled; + if (!_audioSource) return; + + [self updateOrder]; + + [self enqueueFrom:_index]; +} + +- (void)dumpQueue { + for (int i = 0; i < _player.items.count; i++) { + IndexedPlayerItem *playerItem = _player.items[i]; + for (int j = 0; j < _indexedAudioSources.count; j++) { + IndexedAudioSource *source = _indexedAudioSources[j]; + if (source.playerItem == playerItem) { + NSLog(@"- %d", j); + break; + } + } + } +} + +- (void)setAutomaticallyWaitsToMinimizeStalling:(bool)automaticallyWaitsToMinimizeStalling { + _automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; + if (@available(macOS 10.12, iOS 10.0, *)) { + if(_player) { + _player.automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; + } + } +} + +- (void)seek:(CMTime)position index:(NSNumber *)newIndex completionHandler:(void (^)(BOOL))completionHandler { + int index = _index; + if (newIndex != [NSNull null]) { + index = [newIndex intValue]; + } + if (index != _index) { + // Jump to a new item + /* if (_playing && index == _index + 1) { */ + /* // Special case for jumping to the very next item */ + /* NSLog(@"seek to next item: %d -> %d", _index, index); */ + /* [_indexedAudioSources[_index] seek:kCMTimeZero]; */ + /* _index = index; */ + /* [_player advanceToNextItem]; */ + /* [self broadcastPlaybackEvent]; */ + /* } else */ + { + // Jump to a distant item + //NSLog(@"seek# jump to distant item: %d -> %d", _index, index); + if (_playing) { + [_player pause]; + } + [_indexedAudioSources[_index] seek:kCMTimeZero]; + // The "currentItem" key observer will respect that a seek is already in progress + _seekPos = position; + [self updatePosition]; + [self enqueueFrom:index]; + IndexedAudioSource *source = _indexedAudioSources[_index]; + if (abs((int)(1000 * CMTimeGetSeconds(CMTimeSubtract(source.position, position)))) > 100) { + [self enterBuffering:@"seek to index"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + [source seek:position completionHandler:^(BOOL finished) { + if (@available(macOS 10.12, iOS 10.0, *)) { + if (_playing) { + // Handled by timeControlStatus + } else { + if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { + // Stay in buffering + } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { + [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } + } else { + if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { + // Stay in buffering + } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { + [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } + if (_playing) { + [_player play]; + } + _seekPos = kCMTimeInvalid; + [self broadcastPlaybackEvent]; + if (completionHandler) { + completionHandler(finished); + } + }]; + } else { + _seekPos = kCMTimeInvalid; + if (_playing) { + [_player play]; + } + } + } + } else { + // Seek within an item + if (_playing) { + [_player pause]; + } + _seekPos = position; + //NSLog(@"seek. enter buffering. pos = %d", (int)(1000*CMTimeGetSeconds(_indexedAudioSources[_index].position))); + // TODO: Move this into a separate method so it can also + // be used in skip. + [self enterBuffering:@"seek"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + [_indexedAudioSources[_index] seek:position completionHandler:^(BOOL finished) { + [self updatePosition]; + if (_playing) { + // If playing, buffering will be detected either by: + // 1. checkForDiscontinuity + // 2. timeControlStatus + [_player play]; + } else { + // If not playing, there is no reliable way to detect + // when buffering has completed, so we use + // !playbackBufferEmpty. Although this always seems to + // be full even right after a seek. + if (_player.currentItem.playbackBufferEmpty) { + [self enterBuffering:@"seek finished, playbackBufferEmpty"]; + } else { + [self leaveBuffering:@"seek finished, !playbackBufferEmpty"]; + } + [self updatePosition]; + if (_processingState != buffering) { + [self broadcastPlaybackEvent]; + } + } + _seekPos = kCMTimeInvalid; + [self broadcastPlaybackEvent]; + if (completionHandler) { + completionHandler(finished); + } + }]; + } +} + +- (void)dispose { + if (_processingState != none) { + [_player pause]; + _processingState = none; + [self broadcastPlaybackEvent]; + } + if (_timeObserver) { + [_player removeTimeObserver:_timeObserver]; + _timeObserver = 0; + } + if (_indexedAudioSources) { + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [self removeItemObservers:_indexedAudioSources[i].playerItem]; + } + } + if (_player) { + [_player removeObserver:self forKeyPath:@"currentItem"]; + if (@available(macOS 10.12, iOS 10.0, *)) { + [_player removeObserver:self forKeyPath:@"timeControlStatus"]; + } + _player = nil; + } + // Untested: + // [_eventChannel setStreamHandler:nil]; + // [_methodChannel setMethodHandler:nil]; +} + +@end diff --git a/just_audio/darwin/Classes/AudioSource.m b/just_audio/darwin/Classes/AudioSource.m new file mode 100644 index 0000000..81534f1 --- /dev/null +++ b/just_audio/darwin/Classes/AudioSource.m @@ -0,0 +1,37 @@ +#import "AudioSource.h" +#import + +@implementation AudioSource { + NSString *_sourceId; +} + +- (instancetype)initWithId:(NSString *)sid { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _sourceId = sid; + return self; +} + +- (NSString *)sourceId { + return _sourceId; +} + +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { + return 0; +} + +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { + if ([_sourceId isEqualToString:sourceId]) { + [matches addObject:self]; + } +} + +- (NSArray *)getShuffleOrder { + return @[]; +} + +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { + return 0; +} + +@end diff --git a/just_audio/darwin/Classes/ClippingAudioSource.m b/just_audio/darwin/Classes/ClippingAudioSource.m new file mode 100644 index 0000000..2f3b174 --- /dev/null +++ b/just_audio/darwin/Classes/ClippingAudioSource.m @@ -0,0 +1,79 @@ +#import "AudioSource.h" +#import "ClippingAudioSource.h" +#import "IndexedPlayerItem.h" +#import "UriAudioSource.h" +#import + +@implementation ClippingAudioSource { + UriAudioSource *_audioSource; + CMTime _start; + CMTime _end; +} + +- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end { + self = [super initWithId:sid]; + NSAssert(self, @"super init cannot be nil"); + _audioSource = audioSource; + _start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000); + _end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000); + return self; +} + +- (UriAudioSource *)audioSource { + return _audioSource; +} + +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { + [super findById:sourceId matches:matches]; + [_audioSource findById:sourceId matches:matches]; +} + +- (void)attach:(AVQueuePlayer *)player { + [super attach:player]; + _audioSource.playerItem.forwardPlaybackEndTime = _end; + // XXX: Not needed since currentItem observer handles it? + [self seek:kCMTimeZero]; +} + +- (IndexedPlayerItem *)playerItem { + return _audioSource.playerItem; +} + +- (NSArray *)getShuffleOrder { + return @[@(0)]; +} + +- (void)play:(AVQueuePlayer *)player { +} + +- (void)pause:(AVQueuePlayer *)player { +} + +- (void)stop:(AVQueuePlayer *)player { +} + +- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { + if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) { + CMTime absPosition = CMTimeAdd(_start, position); + [_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; + } +} + +- (CMTime)duration { + return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start); +} + +- (void)setDuration:(CMTime)duration { +} + +- (CMTime)position { + return CMTimeSubtract(self.playerItem.currentTime, _start); +} + +- (CMTime)bufferedPosition { + CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start); + CMTime dur = [self duration]; + return CMTimeCompare(pos, dur) >= 0 ? dur : pos; +} + +@end diff --git a/just_audio/darwin/Classes/ConcatenatingAudioSource.m b/just_audio/darwin/Classes/ConcatenatingAudioSource.m new file mode 100644 index 0000000..bd7b713 --- /dev/null +++ b/just_audio/darwin/Classes/ConcatenatingAudioSource.m @@ -0,0 +1,109 @@ +#import "AudioSource.h" +#import "ConcatenatingAudioSource.h" +#import +#import + +@implementation ConcatenatingAudioSource { + NSMutableArray *_audioSources; + NSMutableArray *_shuffleOrder; +} + +- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources { + self = [super initWithId:sid]; + NSAssert(self, @"super init cannot be nil"); + _audioSources = audioSources; + return self; +} + +- (int)count { + return _audioSources.count; +} + +- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index { + [_audioSources insertObject:audioSource atIndex:index]; +} + +- (void)removeSourcesFromIndex:(int)start toIndex:(int)end { + if (end == -1) end = _audioSources.count; + for (int i = start; i < end; i++) { + [_audioSources removeObjectAtIndex:start]; + } +} + +- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex { + AudioSource *source = _audioSources[currentIndex]; + [_audioSources removeObjectAtIndex:currentIndex]; + [_audioSources insertObject:source atIndex:newIndex]; +} + +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { + for (int i = 0; i < [_audioSources count]; i++) { + treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; + } + return treeIndex; +} + +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { + [super findById:sourceId matches:matches]; + for (int i = 0; i < [_audioSources count]; i++) { + [_audioSources[i] findById:sourceId matches:matches]; + } +} + +- (NSArray *)getShuffleOrder { + NSMutableArray *order = [NSMutableArray new]; + int offset = [order count]; + NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints + for (int i = 0; i < [_audioSources count]; i++) { + AudioSource *audioSource = _audioSources[i]; + NSArray *childShuffleOrder = [audioSource getShuffleOrder]; + NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new]; + for (int j = 0; j < [childShuffleOrder count]; j++) { + [offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)]; + } + [childOrders addObject:offsetChildShuffleOrder]; + offset += [childShuffleOrder count]; + } + for (int i = 0; i < [_audioSources count]; i++) { + [order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]]; + } + return order; +} + +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { + int currentChildIndex = -1; + for (int i = 0; i < [_audioSources count]; i++) { + int indexBefore = treeIndex; + AudioSource *child = _audioSources[i]; + treeIndex = [child shuffle:treeIndex currentIndex:currentIndex]; + if (currentIndex >= indexBefore && currentIndex < treeIndex) { + currentChildIndex = i; + } else {} + } + // Shuffle so that the current child is first in the shuffle order + _shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]]; + for (int i = 0; i < [_audioSources count]; i++) { + [_shuffleOrder addObject:@(0)]; + } + NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]); + // First generate a random shuffle + for (int i = 0; i < [_audioSources count]; i++) { + int j = arc4random_uniform(i + 1); + _shuffleOrder[i] = _shuffleOrder[j]; + _shuffleOrder[j] = @(i); + } + // Then bring currentIndex to the front + if (currentChildIndex != -1) { + for (int i = 1; i < [_audioSources count]; i++) { + if ([_shuffleOrder[i] integerValue] == currentChildIndex) { + NSNumber *v = _shuffleOrder[0]; + _shuffleOrder[0] = _shuffleOrder[i]; + _shuffleOrder[i] = v; + break; + } + } + } + return treeIndex; +} + +@end diff --git a/just_audio/darwin/Classes/IndexedAudioSource.m b/just_audio/darwin/Classes/IndexedAudioSource.m new file mode 100644 index 0000000..316f900 --- /dev/null +++ b/just_audio/darwin/Classes/IndexedAudioSource.m @@ -0,0 +1,68 @@ +#import "IndexedAudioSource.h" +#import "IndexedPlayerItem.h" +#import + +@implementation IndexedAudioSource { + BOOL _isAttached; +} + +- (instancetype)initWithId:(NSString *)sid { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _isAttached = NO; + return self; +} + +- (IndexedPlayerItem *)playerItem { + return nil; +} + +- (BOOL)isAttached { + return _isAttached; +} + +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { + [sequence addObject:self]; + return treeIndex + 1; +} + +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { + return treeIndex + 1; +} + +- (void)attach:(AVQueuePlayer *)player { + _isAttached = YES; +} + +- (void)play:(AVQueuePlayer *)player { +} + +- (void)pause:(AVQueuePlayer *)player { +} + +- (void)stop:(AVQueuePlayer *)player { +} + +- (void)seek:(CMTime)position { + [self seek:position completionHandler:nil]; +} + +- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { +} + +- (CMTime)duration { + return kCMTimeInvalid; +} + +- (void)setDuration:(CMTime)duration { +} + +- (CMTime)position { + return kCMTimeInvalid; +} + +- (CMTime)bufferedPosition { + return kCMTimeInvalid; +} + +@end diff --git a/just_audio/darwin/Classes/IndexedPlayerItem.m b/just_audio/darwin/Classes/IndexedPlayerItem.m new file mode 100644 index 0000000..87fafe0 --- /dev/null +++ b/just_audio/darwin/Classes/IndexedPlayerItem.m @@ -0,0 +1,16 @@ +#import "IndexedPlayerItem.h" +#import "IndexedAudioSource.h" + +@implementation IndexedPlayerItem { + IndexedAudioSource *_audioSource; +} + +-(void)setAudioSource:(IndexedAudioSource *)audioSource { + _audioSource = audioSource; +} + +-(IndexedAudioSource *)audioSource { + return _audioSource; +} + +@end diff --git a/just_audio/darwin/Classes/JustAudioPlugin.m b/just_audio/darwin/Classes/JustAudioPlugin.m new file mode 100644 index 0000000..982a260 --- /dev/null +++ b/just_audio/darwin/Classes/JustAudioPlugin.m @@ -0,0 +1,55 @@ +#import "JustAudioPlugin.h" +#import "AudioPlayer.h" +#import +#include + +@implementation JustAudioPlugin { + NSObject* _registrar; + BOOL _configuredSession; +} + ++ (void)registerWithRegistrar:(NSObject*)registrar { + FlutterMethodChannel* channel = [FlutterMethodChannel + methodChannelWithName:@"com.ryanheise.just_audio.methods" + binaryMessenger:[registrar messenger]]; + JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar]; + [registrar addMethodCallDelegate:instance channel:channel]; +} + +- (instancetype)initWithRegistrar:(NSObject *)registrar { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _registrar = registrar; + return self; +} + +- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { + if ([@"init" isEqualToString:call.method]) { + NSArray* args = (NSArray*)call.arguments; + NSString* playerId = args[0]; + /*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession]; + result(nil); + } else if ([@"setIosCategory" isEqualToString:call.method]) { +#if TARGET_OS_IPHONE + NSNumber* categoryIndex = (NSNumber*)call.arguments; + AVAudioSessionCategory category = nil; + switch (categoryIndex.integerValue) { + case 0: category = AVAudioSessionCategoryAmbient; break; + case 1: category = AVAudioSessionCategorySoloAmbient; break; + case 2: category = AVAudioSessionCategoryPlayback; break; + case 3: category = AVAudioSessionCategoryRecord; break; + case 4: category = AVAudioSessionCategoryPlayAndRecord; break; + case 5: category = AVAudioSessionCategoryMultiRoute; break; + } + if (category) { + _configuredSession = YES; + } + [[AVAudioSession sharedInstance] setCategory:category error:nil]; +#endif + result(nil); + } else { + result(FlutterMethodNotImplemented); + } +} + +@end diff --git a/just_audio/darwin/Classes/LoopingAudioSource.m b/just_audio/darwin/Classes/LoopingAudioSource.m new file mode 100644 index 0000000..ba4b52b --- /dev/null +++ b/just_audio/darwin/Classes/LoopingAudioSource.m @@ -0,0 +1,53 @@ +#import "AudioSource.h" +#import "LoopingAudioSource.h" +#import + +@implementation LoopingAudioSource { + // An array of duplicates + NSArray *_audioSources; // +} + +- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources { + self = [super initWithId:sid]; + NSAssert(self, @"super init cannot be nil"); + _audioSources = audioSources; + return self; +} + +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { + for (int i = 0; i < [_audioSources count]; i++) { + treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; + } + return treeIndex; +} + +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { + [super findById:sourceId matches:matches]; + for (int i = 0; i < [_audioSources count]; i++) { + [_audioSources[i] findById:sourceId matches:matches]; + } +} + +- (NSArray *)getShuffleOrder { + NSMutableArray *order = [NSMutableArray new]; + int offset = (int)[order count]; + for (int i = 0; i < [_audioSources count]; i++) { + AudioSource *audioSource = _audioSources[i]; + NSArray *childShuffleOrder = [audioSource getShuffleOrder]; + for (int j = 0; j < [childShuffleOrder count]; j++) { + [order addObject:@([childShuffleOrder[j] integerValue] + offset)]; + } + offset += [childShuffleOrder count]; + } + return order; +} + +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { + // TODO: This should probably shuffle the same way on all duplicates. + for (int i = 0; i < [_audioSources count]; i++) { + treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex]; + } + return treeIndex; +} + +@end diff --git a/just_audio/darwin/Classes/UriAudioSource.m b/just_audio/darwin/Classes/UriAudioSource.m new file mode 100644 index 0000000..91321d4 --- /dev/null +++ b/just_audio/darwin/Classes/UriAudioSource.m @@ -0,0 +1,79 @@ +#import "UriAudioSource.h" +#import "IndexedAudioSource.h" +#import "IndexedPlayerItem.h" +#import + +@implementation UriAudioSource { + NSString *_uri; + IndexedPlayerItem *_playerItem; + /* CMTime _duration; */ +} + +- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri { + self = [super initWithId:sid]; + NSAssert(self, @"super init cannot be nil"); + _uri = uri; + if ([_uri hasPrefix:@"file://"]) { + _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]]; + } else { + _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]]; + } + if (@available(macOS 10.13, iOS 11.0, *)) { + // This does the best at reducing distortion on voice with speeds below 1.0 + _playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain; + } + /* NSKeyValueObservingOptions options = */ + /* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */ + /* [_playerItem addObserver:self */ + /* forKeyPath:@"duration" */ + /* options:options */ + /* context:nil]; */ + return self; +} + +- (IndexedPlayerItem *)playerItem { + return _playerItem; +} + +- (NSArray *)getShuffleOrder { + return @[@(0)]; +} + +- (void)play:(AVQueuePlayer *)player { +} + +- (void)pause:(AVQueuePlayer *)player { +} + +- (void)stop:(AVQueuePlayer *)player { +} + +- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { + if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) { + [_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; + } +} + +- (CMTime)duration { + return _playerItem.duration; +} + +- (void)setDuration:(CMTime)duration { +} + +- (CMTime)position { + return _playerItem.currentTime; +} + +- (CMTime)bufferedPosition { + NSValue *last = _playerItem.loadedTimeRanges.lastObject; + if (last) { + CMTimeRange timeRange = [last CMTimeRangeValue]; + return CMTimeAdd(timeRange.start, timeRange.duration); + } else { + return _playerItem.currentTime; + } + return kCMTimeInvalid; +} + +@end diff --git a/just_audio/ios/.gitignore b/just_audio/ios/.gitignore new file mode 100644 index 0000000..aa479fd --- /dev/null +++ b/just_audio/ios/.gitignore @@ -0,0 +1,37 @@ +.idea/ +.vagrant/ +.sconsign.dblite +.svn/ + +.DS_Store +*.swp +profile + +DerivedData/ +build/ +GeneratedPluginRegistrant.h +GeneratedPluginRegistrant.m + +.generated/ + +*.pbxuser +*.mode1v3 +*.mode2v3 +*.perspectivev3 + +!default.pbxuser +!default.mode1v3 +!default.mode2v3 +!default.perspectivev3 + +xcuserdata + +*.moved-aside + +*.pyc +*sync/ +Icon? +.tags* + +/Flutter/Generated.xcconfig +/Flutter/flutter_export_environment.sh \ No newline at end of file diff --git a/just_audio/ios/Assets/.gitkeep b/just_audio/ios/Assets/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/just_audio/ios/Classes/AudioPlayer.h b/just_audio/ios/Classes/AudioPlayer.h new file mode 100644 index 0000000..1a985f3 --- /dev/null +++ b/just_audio/ios/Classes/AudioPlayer.h @@ -0,0 +1,21 @@ +#import + +@interface AudioPlayer : NSObject + +- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession; + +@end + +enum ProcessingState { + none, + loading, + buffering, + ready, + completed +}; + +enum LoopMode { + loopOff, + loopOne, + loopAll +}; diff --git a/just_audio/ios/Classes/AudioPlayer.m b/just_audio/ios/Classes/AudioPlayer.m new file mode 100644 index 0000000..ccbfdea --- /dev/null +++ b/just_audio/ios/Classes/AudioPlayer.m @@ -0,0 +1,1138 @@ +#import "AudioPlayer.h" +#import "AudioSource.h" +#import "IndexedAudioSource.h" +#import "UriAudioSource.h" +#import "ConcatenatingAudioSource.h" +#import "LoopingAudioSource.h" +#import "ClippingAudioSource.h" +#import +#import +#include + +// TODO: Check for and report invalid state transitions. +// TODO: Apply Apple's guidance on seeking: https://developer.apple.com/library/archive/qa/qa1820/_index.html +@implementation AudioPlayer { + NSObject* _registrar; + FlutterMethodChannel *_methodChannel; + FlutterEventChannel *_eventChannel; + FlutterEventSink _eventSink; + NSString *_playerId; + AVQueuePlayer *_player; + AudioSource *_audioSource; + NSMutableArray *_indexedAudioSources; + NSMutableArray *_order; + NSMutableArray *_orderInv; + int _index; + enum ProcessingState _processingState; + enum LoopMode _loopMode; + BOOL _shuffleModeEnabled; + long long _updateTime; + int _updatePosition; + int _lastPosition; + int _bufferedPosition; + // Set when the current item hasn't been played yet so we aren't sure whether sufficient audio has been buffered. + BOOL _bufferUnconfirmed; + CMTime _seekPos; + FlutterResult _loadResult; + FlutterResult _playResult; + id _timeObserver; + BOOL _automaticallyWaitsToMinimizeStalling; + BOOL _configuredSession; + BOOL _playing; +} + +- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _registrar = registrar; + _playerId = idParam; + _configuredSession = configuredSession; + _methodChannel = + [FlutterMethodChannel methodChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.methods.%@", _playerId] + binaryMessenger:[registrar messenger]]; + _eventChannel = + [FlutterEventChannel eventChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.events.%@", _playerId] + binaryMessenger:[registrar messenger]]; + [_eventChannel setStreamHandler:self]; + _index = 0; + _processingState = none; + _loopMode = loopOff; + _shuffleModeEnabled = NO; + _player = nil; + _audioSource = nil; + _indexedAudioSources = nil; + _order = nil; + _orderInv = nil; + _seekPos = kCMTimeInvalid; + _timeObserver = 0; + _updatePosition = 0; + _updateTime = 0; + _lastPosition = 0; + _bufferedPosition = 0; + _bufferUnconfirmed = NO; + _playing = NO; + _loadResult = nil; + _playResult = nil; + _automaticallyWaitsToMinimizeStalling = YES; + __weak __typeof__(self) weakSelf = self; + [_methodChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) { + [weakSelf handleMethodCall:call result:result]; + }]; + return self; +} + +- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { + NSArray* args = (NSArray*)call.arguments; + if ([@"load" isEqualToString:call.method]) { + [self load:args[0] result:result]; + } else if ([@"play" isEqualToString:call.method]) { + [self play:result]; + } else if ([@"pause" isEqualToString:call.method]) { + [self pause]; + result(nil); + } else if ([@"setVolume" isEqualToString:call.method]) { + [self setVolume:(float)[args[0] doubleValue]]; + result(nil); + } else if ([@"setSpeed" isEqualToString:call.method]) { + [self setSpeed:(float)[args[0] doubleValue]]; + result(nil); + } else if ([@"setLoopMode" isEqualToString:call.method]) { + [self setLoopMode:[args[0] intValue]]; + result(nil); + } else if ([@"setShuffleModeEnabled" isEqualToString:call.method]) { + [self setShuffleModeEnabled:(BOOL)[args[0] boolValue]]; + result(nil); + } else if ([@"setAutomaticallyWaitsToMinimizeStalling" isEqualToString:call.method]) { + [self setAutomaticallyWaitsToMinimizeStalling:(BOOL)[args[0] boolValue]]; + result(nil); + } else if ([@"seek" isEqualToString:call.method]) { + CMTime position = args[0] == [NSNull null] ? kCMTimePositiveInfinity : CMTimeMake([args[0] intValue], 1000); + [self seek:position index:args[1] completionHandler:^(BOOL finished) { + result(nil); + }]; + result(nil); + } else if ([@"dispose" isEqualToString:call.method]) { + [self dispose]; + result(nil); + } else if ([@"concatenating.add" isEqualToString:call.method]) { + [self concatenatingAdd:(NSString*)args[0] source:(NSDictionary*)args[1]]; + result(nil); + } else if ([@"concatenating.insert" isEqualToString:call.method]) { + [self concatenatingInsert:(NSString*)args[0] index:[args[1] intValue] source:(NSDictionary*)args[2]]; + result(nil); + } else if ([@"concatenating.addAll" isEqualToString:call.method]) { + [self concatenatingAddAll:(NSString*)args[0] sources:(NSArray*)args[1]]; + result(nil); + } else if ([@"concatenating.insertAll" isEqualToString:call.method]) { + [self concatenatingInsertAll:(NSString*)args[0] index:[args[1] intValue] sources:(NSArray*)args[2]]; + result(nil); + } else if ([@"concatenating.removeAt" isEqualToString:call.method]) { + [self concatenatingRemoveAt:(NSString*)args[0] index:(int)args[1]]; + result(nil); + } else if ([@"concatenating.removeRange" isEqualToString:call.method]) { + [self concatenatingRemoveRange:(NSString*)args[0] start:[args[1] intValue] end:[args[2] intValue]]; + result(nil); + } else if ([@"concatenating.move" isEqualToString:call.method]) { + [self concatenatingMove:(NSString*)args[0] currentIndex:[args[1] intValue] newIndex:[args[2] intValue]]; + result(nil); + } else if ([@"concatenating.clear" isEqualToString:call.method]) { + [self concatenatingClear:(NSString*)args[0]]; + result(nil); + } else { + result(FlutterMethodNotImplemented); + } +} + +// Untested +- (void)concatenatingAdd:(NSString *)catId source:(NSDictionary *)source { + [self concatenatingInsertAll:catId index:-1 sources:@[source]]; +} + +// Untested +- (void)concatenatingInsert:(NSString *)catId index:(int)index source:(NSDictionary *)source { + [self concatenatingInsertAll:catId index:index sources:@[source]]; +} + +// Untested +- (void)concatenatingAddAll:(NSString *)catId sources:(NSArray *)sources { + [self concatenatingInsertAll:catId index:-1 sources:sources]; +} + +// Untested +- (void)concatenatingInsertAll:(NSString *)catId index:(int)index sources:(NSArray *)sources { + // Find all duplicates of the identified ConcatenatingAudioSource. + NSMutableArray *matches = [[NSMutableArray alloc] init]; + [_audioSource findById:catId matches:matches]; + // Add each new source to each match. + for (int i = 0; i < matches.count; i++) { + ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; + int idx = index >= 0 ? index : catSource.count; + NSMutableArray *audioSources = [self decodeAudioSources:sources]; + for (int j = 0; j < audioSources.count; j++) { + AudioSource *audioSource = audioSources[j]; + [catSource insertSource:audioSource atIndex:(idx + j)]; + } + } + // Index the new audio sources. + _indexedAudioSources = [[NSMutableArray alloc] init]; + [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; + for (int i = 0; i < [_indexedAudioSources count]; i++) { + IndexedAudioSource *audioSource = _indexedAudioSources[i]; + if (!audioSource.isAttached) { + audioSource.playerItem.audioSource = audioSource; + [self addItemObservers:audioSource.playerItem]; + } + } + [self updateOrder]; + if (_player.currentItem) { + _index = [self indexForItem:_player.currentItem]; + } else { + _index = 0; + } + [self enqueueFrom:_index]; + // Notify each new IndexedAudioSource that it's been attached to the player. + for (int i = 0; i < [_indexedAudioSources count]; i++) { + if (!_indexedAudioSources[i].isAttached) { + [_indexedAudioSources[i] attach:_player]; + } + } + [self broadcastPlaybackEvent]; +} + +// Untested +- (void)concatenatingRemoveAt:(NSString *)catId index:(int)index { + [self concatenatingRemoveRange:catId start:index end:(index + 1)]; +} + +// Untested +- (void)concatenatingRemoveRange:(NSString *)catId start:(int)start end:(int)end { + // Find all duplicates of the identified ConcatenatingAudioSource. + NSMutableArray *matches = [[NSMutableArray alloc] init]; + [_audioSource findById:catId matches:matches]; + // Remove range from each match. + for (int i = 0; i < matches.count; i++) { + ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; + int endIndex = end >= 0 ? end : catSource.count; + [catSource removeSourcesFromIndex:start toIndex:endIndex]; + } + // Re-index the remaining audio sources. + NSArray *oldIndexedAudioSources = _indexedAudioSources; + _indexedAudioSources = [[NSMutableArray alloc] init]; + [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; + for (int i = 0, j = 0; i < _indexedAudioSources.count; i++, j++) { + IndexedAudioSource *audioSource = _indexedAudioSources[i]; + while (audioSource != oldIndexedAudioSources[j]) { + [self removeItemObservers:oldIndexedAudioSources[j].playerItem]; + if (j < _index) { + _index--; + } else if (j == _index) { + // The currently playing item was removed. + } + j++; + } + } + [self updateOrder]; + if (_index >= _indexedAudioSources.count) _index = _indexedAudioSources.count - 1; + if (_index < 0) _index = 0; + [self enqueueFrom:_index]; + [self broadcastPlaybackEvent]; +} + +// Untested +- (void)concatenatingMove:(NSString *)catId currentIndex:(int)currentIndex newIndex:(int)newIndex { + // Find all duplicates of the identified ConcatenatingAudioSource. + NSMutableArray *matches = [[NSMutableArray alloc] init]; + [_audioSource findById:catId matches:matches]; + // Move range within each match. + for (int i = 0; i < matches.count; i++) { + ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; + [catSource moveSourceFromIndex:currentIndex toIndex:newIndex]; + } + // Re-index the audio sources. + _indexedAudioSources = [[NSMutableArray alloc] init]; + [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; + _index = [self indexForItem:_player.currentItem]; + [self broadcastPlaybackEvent]; +} + +// Untested +- (void)concatenatingClear:(NSString *)catId { + [self concatenatingRemoveRange:catId start:0 end:-1]; +} + +- (FlutterError*)onListenWithArguments:(id)arguments eventSink:(FlutterEventSink)eventSink { + _eventSink = eventSink; + return nil; +} + +- (FlutterError*)onCancelWithArguments:(id)arguments { + _eventSink = nil; + return nil; +} + +- (void)checkForDiscontinuity { + if (!_eventSink) return; + if (!_playing || CMTIME_IS_VALID(_seekPos) || _processingState == completed) return; + int position = [self getCurrentPosition]; + if (_processingState == buffering) { + if (position > _lastPosition) { + [self leaveBuffering:@"stall ended"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } else { + long long now = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); + long long timeSinceLastUpdate = now - _updateTime; + long long expectedPosition = _updatePosition + (long long)(timeSinceLastUpdate * _player.rate); + long long drift = position - expectedPosition; + //NSLog(@"position: %d, drift: %lld", position, drift); + // Update if we've drifted or just started observing + if (_updateTime == 0L) { + [self broadcastPlaybackEvent]; + } else if (drift < -100) { + [self enterBuffering:@"stalling"]; + NSLog(@"Drift: %lld", drift); + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } + _lastPosition = position; +} + +- (void)enterBuffering:(NSString *)reason { + NSLog(@"ENTER BUFFERING: %@", reason); + _processingState = buffering; +} + +- (void)leaveBuffering:(NSString *)reason { + NSLog(@"LEAVE BUFFERING: %@", reason); + _processingState = ready; +} + +- (void)broadcastPlaybackEvent { + if (!_eventSink) return; + _eventSink(@{ + @"processingState": @(_processingState), + @"updatePosition": @(_updatePosition), + @"updateTime": @(_updateTime), + // TODO: buffer position + @"bufferedPosition": @(_updatePosition), + // TODO: Icy Metadata + @"icyMetadata": [NSNull null], + @"duration": @([self getDuration]), + @"currentIndex": @(_index), + }); +} + +- (int)getCurrentPosition { + if (_processingState == none || _processingState == loading) { + return 0; + } else if (CMTIME_IS_VALID(_seekPos)) { + return (int)(1000 * CMTimeGetSeconds(_seekPos)); + } else if (_indexedAudioSources) { + int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].position)); + if (ms < 0) ms = 0; + return ms; + } else { + return 0; + } +} + +- (int)getBufferedPosition { + if (_processingState == none || _processingState == loading) { + return 0; + } else if (_indexedAudioSources) { + int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].bufferedPosition)); + if (ms < 0) ms = 0; + return ms; + } else { + return 0; + } +} + +- (int)getDuration { + if (_processingState == none) { + return -1; + } else if (_indexedAudioSources) { + int v = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].duration)); + return v; + } else { + return 0; + } +} + +- (void)removeItemObservers:(AVPlayerItem *)playerItem { + [playerItem removeObserver:self forKeyPath:@"status"]; + [playerItem removeObserver:self forKeyPath:@"playbackBufferEmpty"]; + [playerItem removeObserver:self forKeyPath:@"playbackBufferFull"]; + //[playerItem removeObserver:self forKeyPath:@"playbackLikelyToKeepUp"]; + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemPlaybackStalledNotification object:playerItem]; +} + +- (void)addItemObservers:(AVPlayerItem *)playerItem { + // Get notified when the item is loaded or had an error loading + [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil]; + // Get notified of the buffer state + [playerItem addObserver:self forKeyPath:@"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:nil]; + [playerItem addObserver:self forKeyPath:@"playbackBufferFull" options:NSKeyValueObservingOptionNew context:nil]; + [playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil]; + //[playerItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:nil]; + // Get notified when playback has reached the end + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onComplete:) name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; + // Get notified when playback stops due to a failure (currently unused) + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onFailToComplete:) name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; + // Get notified when playback stalls (currently unused) + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onItemStalled:) name:AVPlayerItemPlaybackStalledNotification object:playerItem]; +} + +- (NSMutableArray *)decodeAudioSources:(NSArray *)data { + NSMutableArray *array = [[NSMutableArray alloc] init]; + for (int i = 0; i < [data count]; i++) { + AudioSource *source = [self decodeAudioSource:data[i]]; + [array addObject:source]; + } + return array; +} + +- (AudioSource *)decodeAudioSource:(NSDictionary *)data { + NSString *type = data[@"type"]; + if ([@"progressive" isEqualToString:type]) { + return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; + } else if ([@"dash" isEqualToString:type]) { + return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; + } else if ([@"hls" isEqualToString:type]) { + return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; + } else if ([@"concatenating" isEqualToString:type]) { + return [[ConcatenatingAudioSource alloc] initWithId:data[@"id"] + audioSources:[self decodeAudioSources:data[@"audioSources"]]]; + } else if ([@"clipping" isEqualToString:type]) { + return [[ClippingAudioSource alloc] initWithId:data[@"id"] + audioSource:[self decodeAudioSource:data[@"audioSource"]] + start:data[@"start"] + end:data[@"end"]]; + } else if ([@"looping" isEqualToString:type]) { + NSMutableArray *childSources = [NSMutableArray new]; + int count = [data[@"count"] intValue]; + for (int i = 0; i < count; i++) { + [childSources addObject:[self decodeAudioSource:data[@"audioSource"]]]; + } + return [[LoopingAudioSource alloc] initWithId:data[@"id"] audioSources:childSources]; + } else { + return nil; + } +} + +- (void)enqueueFrom:(int)index { + int oldIndex = _index; + _index = index; + + // Update the queue while keeping the currently playing item untouched. + + /* NSLog(@"before reorder: _player.items.count: ", _player.items.count); */ + /* [self dumpQueue]; */ + + // First, remove all _player items except for the currently playing one (if any). + IndexedPlayerItem *oldItem = _player.currentItem; + IndexedPlayerItem *existingItem = nil; + NSArray *oldPlayerItems = [NSArray arrayWithArray:_player.items]; + // In the first pass, preserve the old and new items. + for (int i = 0; i < oldPlayerItems.count; i++) { + if (oldPlayerItems[i] == _indexedAudioSources[_index].playerItem) { + // Preserve and tag new item if it is already in the queue. + existingItem = oldPlayerItems[i]; + } else if (oldPlayerItems[i] == oldItem) { + // Temporarily preserve old item, just to avoid jumping to + // intermediate queue positions unnecessarily. We only want to jump + // once to _index. + } else { + [_player removeItem:oldPlayerItems[i]]; + } + } + // In the second pass, remove the old item (if different from new item). + if (_index != oldIndex) { + [_player removeItem:oldItem]; + } + + /* NSLog(@"inter order: _player.items.count: ", _player.items.count); */ + /* [self dumpQueue]; */ + + // Regenerate queue + BOOL include = NO; + for (int i = 0; i < [_order count]; i++) { + int si = [_order[i] intValue]; + if (si == _index) include = YES; + if (include && _indexedAudioSources[si].playerItem != existingItem) { + [_player insertItem:_indexedAudioSources[si].playerItem afterItem:nil]; + } + } + + /* NSLog(@"after reorder: _player.items.count: ", _player.items.count); */ + /* [self dumpQueue]; */ + + if (_processingState != loading && oldItem != _indexedAudioSources[_index].playerItem) { + // || !_player.currentItem.playbackLikelyToKeepUp; + if (_player.currentItem.playbackBufferEmpty) { + [self enterBuffering:@"enqueueFrom playbackBufferEmpty"]; + } else { + [self leaveBuffering:@"enqueueFrom !playbackBufferEmpty"]; + } + [self updatePosition]; + } +} + +- (void)updatePosition { + _updatePosition = [self getCurrentPosition]; + _updateTime = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); +} + +- (void)load:(NSDictionary *)source result:(FlutterResult)result { + if (!_playing) { + [_player pause]; + } + if (_processingState == loading) { + [self abortExistingConnection]; + } + _loadResult = result; + _index = 0; + [self updatePosition]; + _processingState = loading; + [self broadcastPlaybackEvent]; + // Remove previous observers + if (_indexedAudioSources) { + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [self removeItemObservers:_indexedAudioSources[i].playerItem]; + } + } + // Decode audio source + if (_audioSource && [@"clipping" isEqualToString:source[@"type"]]) { + // Check if we're clipping an audio source that was previously loaded. + UriAudioSource *child = nil; + if ([_audioSource isKindOfClass:[ClippingAudioSource class]]) { + ClippingAudioSource *clipper = (ClippingAudioSource *)_audioSource; + child = clipper.audioSource; + } else if ([_audioSource isKindOfClass:[UriAudioSource class]]) { + child = (UriAudioSource *)_audioSource; + } + if (child) { + _audioSource = [[ClippingAudioSource alloc] initWithId:source[@"id"] + audioSource:child + start:source[@"start"] + end:source[@"end"]]; + } else { + _audioSource = [self decodeAudioSource:source]; + } + } else { + _audioSource = [self decodeAudioSource:source]; + } + _indexedAudioSources = [[NSMutableArray alloc] init]; + [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; + for (int i = 0; i < [_indexedAudioSources count]; i++) { + IndexedAudioSource *source = _indexedAudioSources[i]; + [self addItemObservers:source.playerItem]; + source.playerItem.audioSource = source; + } + [self updateOrder]; + // Set up an empty player + if (!_player) { + _player = [[AVQueuePlayer alloc] initWithItems:@[]]; + if (@available(macOS 10.12, iOS 10.0, *)) { + _player.automaticallyWaitsToMinimizeStalling = _automaticallyWaitsToMinimizeStalling; + // TODO: Remove these observers in dispose. + [_player addObserver:self + forKeyPath:@"timeControlStatus" + options:NSKeyValueObservingOptionNew + context:nil]; + } + [_player addObserver:self + forKeyPath:@"currentItem" + options:NSKeyValueObservingOptionNew + context:nil]; + // TODO: learn about the different ways to define weakSelf. + //__weak __typeof__(self) weakSelf = self; + //typeof(self) __weak weakSelf = self; + __unsafe_unretained typeof(self) weakSelf = self; + if (@available(macOS 10.12, iOS 10.0, *)) {} + else { + _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMake(200, 1000) + queue:nil + usingBlock:^(CMTime time) { + [weakSelf checkForDiscontinuity]; + } + ]; + } + } + // Initialise the AVQueuePlayer with items. + [self enqueueFrom:0]; + // Notify each IndexedAudioSource that it's been attached to the player. + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [_indexedAudioSources[i] attach:_player]; + } + + if (_player.currentItem.status == AVPlayerItemStatusReadyToPlay) { + _loadResult(@([self getDuration])); + _loadResult = nil; + } else { + // We send result after the playerItem is ready in observeValueForKeyPath. + } + [self broadcastPlaybackEvent]; +} + +- (void)updateOrder { + if (_shuffleModeEnabled) { + [_audioSource shuffle:0 currentIndex: _index]; + } + _orderInv = [NSMutableArray arrayWithCapacity:[_indexedAudioSources count]]; + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [_orderInv addObject:@(0)]; + } + if (_shuffleModeEnabled) { + _order = [_audioSource getShuffleOrder]; + } else { + NSMutableArray *order = [[NSMutableArray alloc] init]; + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [order addObject:@(i)]; + } + _order = order; + } + for (int i = 0; i < [_indexedAudioSources count]; i++) { + _orderInv[[_order[i] intValue]] = @(i); + } +} + +- (void)onItemStalled:(NSNotification *)notification { + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; + NSLog(@"onItemStalled"); +} + +- (void)onFailToComplete:(NSNotification *)notification { + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; + NSLog(@"onFailToComplete"); +} + +- (void)onComplete:(NSNotification *)notification { + NSLog(@"onComplete"); + if (_loopMode == loopOne) { + [self seek:kCMTimeZero index:@(_index) completionHandler:^(BOOL finished) { + // XXX: Not necessary? + [self play]; + }]; + } else { + IndexedPlayerItem *endedPlayerItem = (IndexedPlayerItem *)notification.object; + IndexedAudioSource *endedSource = endedPlayerItem.audioSource; + // When an item ends, seek back to its beginning. + [endedSource seek:kCMTimeZero]; + + if ([_orderInv[_index] intValue] + 1 < [_order count]) { + // account for automatic move to next item + _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; + NSLog(@"advance to next: index = %d", _index); + [self broadcastPlaybackEvent]; + } else { + // reached end of playlist + if (_loopMode == loopAll) { + NSLog(@"Loop back to first item"); + // Loop back to the beginning + // TODO: Currently there will be a gap at the loop point. + // Maybe we can do something clever by temporarily adding the + // first playlist item at the end of the queue, although this + // will affect any code that assumes the queue always + // corresponds to a contiguous region of the indexed audio + // sources. + // For now we just do a seek back to the start. + if ([_order count] == 1) { + [self seek:kCMTimeZero index:[NSNull null] completionHandler:^(BOOL finished) { + // XXX: Necessary? + [self play]; + }]; + } else { + [self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) { + // XXX: Necessary? + [self play]; + }]; + } + } else { + [self complete]; + } + } + } +} + +- (void)observeValueForKeyPath:(NSString *)keyPath + ofObject:(id)object + change:(NSDictionary *)change + context:(void *)context { + + if ([keyPath isEqualToString:@"status"]) { + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; + AVPlayerItemStatus status = AVPlayerItemStatusUnknown; + NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; + if ([statusNumber isKindOfClass:[NSNumber class]]) { + status = statusNumber.intValue; + } + switch (status) { + case AVPlayerItemStatusReadyToPlay: { + if (playerItem != _player.currentItem) return; + // Detect buffering in different ways depending on whether we're playing + if (_playing) { + if (@available(macOS 10.12, iOS 10.0, *)) { + if (_player.timeControlStatus == AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) { + [self enterBuffering:@"ready to play: playing, waitingToPlay"]; + } else { + [self leaveBuffering:@"ready to play: playing, !waitingToPlay"]; + } + [self updatePosition]; + } else { + // If this happens when we're playing, check whether buffer is confirmed + if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { + // Stay in bufering - XXX Test + [self enterBuffering:@"ready to play: playing, bufferUnconfirmed && !playbackBufferFull"]; + } else { + if (_player.currentItem.playbackBufferEmpty) { + // !_player.currentItem.playbackLikelyToKeepUp; + [self enterBuffering:@"ready to play: playing, playbackBufferEmpty"]; + } else { + [self leaveBuffering:@"ready to play: playing, !playbackBufferEmpty"]; + } + [self updatePosition]; + } + } + } else { + if (_player.currentItem.playbackBufferEmpty) { + [self enterBuffering:@"ready to play: !playing, playbackBufferEmpty"]; + // || !_player.currentItem.playbackLikelyToKeepUp; + } else { + [self leaveBuffering:@"ready to play: !playing, !playbackBufferEmpty"]; + } + [self updatePosition]; + } + [self broadcastPlaybackEvent]; + if (_loadResult) { + _loadResult(@([self getDuration])); + _loadResult = nil; + } + break; + } + case AVPlayerItemStatusFailed: { + NSLog(@"AVPlayerItemStatusFailed"); + [self sendErrorForItem:playerItem]; + break; + } + case AVPlayerItemStatusUnknown: + break; + } + } else if ([keyPath isEqualToString:@"playbackBufferEmpty"] || [keyPath isEqualToString:@"playbackBufferFull"]) { + // Use these values to detect buffering. + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; + if (playerItem != _player.currentItem) return; + // If there's a seek in progress, these values are unreliable + if (CMTIME_IS_VALID(_seekPos)) return; + // Detect buffering in different ways depending on whether we're playing + if (_playing) { + if (@available(macOS 10.12, iOS 10.0, *)) { + // We handle this with timeControlStatus instead. + } else { + if (_bufferUnconfirmed && playerItem.playbackBufferFull) { + _bufferUnconfirmed = NO; + [self leaveBuffering:@"playing, _bufferUnconfirmed && playbackBufferFull"]; + [self updatePosition]; + NSLog(@"Buffering confirmed! leaving buffering"); + [self broadcastPlaybackEvent]; + } + } + } else { + if (playerItem.playbackBufferEmpty) { + [self enterBuffering:@"!playing, playbackBufferEmpty"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } else if (!playerItem.playbackBufferEmpty || playerItem.playbackBufferFull) { + _processingState = ready; + [self leaveBuffering:@"!playing, !playbackBufferEmpty || playbackBufferFull"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } + /* } else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"]) { */ + } else if ([keyPath isEqualToString:@"timeControlStatus"]) { + if (@available(macOS 10.12, iOS 10.0, *)) { + AVPlayerTimeControlStatus status = AVPlayerTimeControlStatusPaused; + NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; + if ([statusNumber isKindOfClass:[NSNumber class]]) { + status = statusNumber.intValue; + } + switch (status) { + case AVPlayerTimeControlStatusPaused: + //NSLog(@"AVPlayerTimeControlStatusPaused"); + break; + case AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate: + //NSLog(@"AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate"); + if (_processingState != completed) { + [self enterBuffering:@"timeControlStatus"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } else { + NSLog(@"Ignoring wait signal because we reached the end"); + } + break; + case AVPlayerTimeControlStatusPlaying: + [self leaveBuffering:@"timeControlStatus"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + break; + } + } + } else if ([keyPath isEqualToString:@"currentItem"] && _player.currentItem) { + if (_player.currentItem.status == AVPlayerItemStatusFailed) { + if ([_orderInv[_index] intValue] + 1 < [_order count]) { + // account for automatic move to next item + _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; + NSLog(@"advance to next on error: index = %d", _index); + [self broadcastPlaybackEvent]; + } else { + NSLog(@"error on last item"); + } + return; + } else { + int expectedIndex = [self indexForItem:_player.currentItem]; + if (_index != expectedIndex) { + // AVQueuePlayer will sometimes skip over error items without + // notifying this observer. + NSLog(@"Queue change detected. Adjusting index from %d -> %d", _index, expectedIndex); + _index = expectedIndex; + [self broadcastPlaybackEvent]; + } + } + //NSLog(@"currentItem changed. _index=%d", _index); + _bufferUnconfirmed = YES; + // If we've skipped or transitioned to a new item and we're not + // currently in the middle of a seek + if (CMTIME_IS_INVALID(_seekPos) && _player.currentItem.status == AVPlayerItemStatusReadyToPlay) { + [self updatePosition]; + IndexedAudioSource *source = ((IndexedPlayerItem *)_player.currentItem).audioSource; + // We should already be at position zero but for + // ClippingAudioSource it might be off by some milliseconds so we + // consider anything <= 100 as close enough. + if ((int)(1000 * CMTimeGetSeconds(source.position)) > 100) { + NSLog(@"On currentItem change, seeking back to zero"); + BOOL shouldResumePlayback = NO; + AVPlayerActionAtItemEnd originalEndAction = _player.actionAtItemEnd; + if (_playing && CMTimeGetSeconds(CMTimeSubtract(source.position, source.duration)) >= 0) { + NSLog(@"Need to pause while rewinding because we're at the end"); + shouldResumePlayback = YES; + _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; + [_player pause]; + } + [self enterBuffering:@"currentItem changed, seeking"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + [source seek:kCMTimeZero completionHandler:^(BOOL finished) { + [self leaveBuffering:@"currentItem changed, finished seek"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + if (shouldResumePlayback) { + _player.actionAtItemEnd = originalEndAction; + // TODO: This logic is almost duplicated in seek. See if we can reuse this code. + [_player play]; + } + }]; + } else { + // Already at zero, no need to seek. + } + } + } else if ([keyPath isEqualToString:@"loadedTimeRanges"]) { + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; + if (playerItem != _player.currentItem) return; + int pos = [self getBufferedPosition]; + if (pos != _bufferedPosition) { + _bufferedPosition = pos; + [self broadcastPlaybackEvent]; + } + } +} + +- (void)sendErrorForItem:(IndexedPlayerItem *)playerItem { + FlutterError *flutterError = [FlutterError errorWithCode:[NSString stringWithFormat:@"%d", playerItem.error.code] + message:playerItem.error.localizedDescription + details:nil]; + [self sendError:flutterError playerItem:playerItem]; +} + +- (void)sendError:(FlutterError *)flutterError playerItem:(IndexedPlayerItem *)playerItem { + NSLog(@"sendError"); + if (_loadResult && playerItem == _player.currentItem) { + _loadResult(flutterError); + _loadResult = nil; + } + if (_eventSink) { + // Broadcast all errors even if they aren't on the current item. + _eventSink(flutterError); + } +} + +- (void)abortExistingConnection { + FlutterError *flutterError = [FlutterError errorWithCode:@"abort" + message:@"Connection aborted" + details:nil]; + [self sendError:flutterError playerItem:nil]; +} + +- (int)indexForItem:(IndexedPlayerItem *)playerItem { + for (int i = 0; i < _indexedAudioSources.count; i++) { + if (_indexedAudioSources[i].playerItem == playerItem) { + return i; + } + } + return -1; +} + +- (void)play { + [self play:nil]; +} + +- (void)play:(FlutterResult)result { + if (result) { + if (_playResult) { + NSLog(@"INTERRUPTING PLAY"); + _playResult(nil); + } + _playResult = result; + } + _playing = YES; +#if TARGET_OS_IPHONE + if (_configuredSession) { + [[AVAudioSession sharedInstance] setActive:YES error:nil]; + } +#endif + [_player play]; + [self updatePosition]; + if (@available(macOS 10.12, iOS 10.0, *)) {} + else { + if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { + [self enterBuffering:@"play, _bufferUnconfirmed && !playbackBufferFull"]; + [self broadcastPlaybackEvent]; + } + } +} + +- (void)pause { + _playing = NO; + [_player pause]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + if (_playResult) { + NSLog(@"PLAY FINISHED DUE TO PAUSE"); + _playResult(nil); + _playResult = nil; + } +} + +- (void)complete { + [self updatePosition]; + _processingState = completed; + [self broadcastPlaybackEvent]; + if (_playResult) { + NSLog(@"PLAY FINISHED DUE TO COMPLETE"); + _playResult(nil); + _playResult = nil; + } +} + +- (void)setVolume:(float)volume { + [_player setVolume:volume]; +} + +- (void)setSpeed:(float)speed { + if (speed == 1.0 + || (speed < 1.0 && _player.currentItem.canPlaySlowForward) + || (speed > 1.0 && _player.currentItem.canPlayFastForward)) { + _player.rate = speed; + } + [self updatePosition]; +} + +- (void)setLoopMode:(int)loopMode { + _loopMode = loopMode; + if (_player) { + switch (_loopMode) { + case loopOne: + _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone + break; + default: + _player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance; + } + } +} + +- (void)setShuffleModeEnabled:(BOOL)shuffleModeEnabled { + NSLog(@"setShuffleModeEnabled: %d", shuffleModeEnabled); + _shuffleModeEnabled = shuffleModeEnabled; + if (!_audioSource) return; + + [self updateOrder]; + + [self enqueueFrom:_index]; +} + +- (void)dumpQueue { + for (int i = 0; i < _player.items.count; i++) { + IndexedPlayerItem *playerItem = _player.items[i]; + for (int j = 0; j < _indexedAudioSources.count; j++) { + IndexedAudioSource *source = _indexedAudioSources[j]; + if (source.playerItem == playerItem) { + NSLog(@"- %d", j); + break; + } + } + } +} + +- (void)setAutomaticallyWaitsToMinimizeStalling:(bool)automaticallyWaitsToMinimizeStalling { + _automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; + if (@available(macOS 10.12, iOS 10.0, *)) { + if(_player) { + _player.automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; + } + } +} + +- (void)seek:(CMTime)position index:(NSNumber *)newIndex completionHandler:(void (^)(BOOL))completionHandler { + int index = _index; + if (newIndex != [NSNull null]) { + index = [newIndex intValue]; + } + if (index != _index) { + // Jump to a new item + /* if (_playing && index == _index + 1) { */ + /* // Special case for jumping to the very next item */ + /* NSLog(@"seek to next item: %d -> %d", _index, index); */ + /* [_indexedAudioSources[_index] seek:kCMTimeZero]; */ + /* _index = index; */ + /* [_player advanceToNextItem]; */ + /* [self broadcastPlaybackEvent]; */ + /* } else */ + { + // Jump to a distant item + //NSLog(@"seek# jump to distant item: %d -> %d", _index, index); + if (_playing) { + [_player pause]; + } + [_indexedAudioSources[_index] seek:kCMTimeZero]; + // The "currentItem" key observer will respect that a seek is already in progress + _seekPos = position; + [self updatePosition]; + [self enqueueFrom:index]; + IndexedAudioSource *source = _indexedAudioSources[_index]; + if (abs((int)(1000 * CMTimeGetSeconds(CMTimeSubtract(source.position, position)))) > 100) { + [self enterBuffering:@"seek to index"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + [source seek:position completionHandler:^(BOOL finished) { + if (@available(macOS 10.12, iOS 10.0, *)) { + if (_playing) { + // Handled by timeControlStatus + } else { + if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { + // Stay in buffering + } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { + [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } + } else { + if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { + // Stay in buffering + } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { + [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } + if (_playing) { + [_player play]; + } + _seekPos = kCMTimeInvalid; + [self broadcastPlaybackEvent]; + if (completionHandler) { + completionHandler(finished); + } + }]; + } else { + _seekPos = kCMTimeInvalid; + if (_playing) { + [_player play]; + } + } + } + } else { + // Seek within an item + if (_playing) { + [_player pause]; + } + _seekPos = position; + //NSLog(@"seek. enter buffering. pos = %d", (int)(1000*CMTimeGetSeconds(_indexedAudioSources[_index].position))); + // TODO: Move this into a separate method so it can also + // be used in skip. + [self enterBuffering:@"seek"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + [_indexedAudioSources[_index] seek:position completionHandler:^(BOOL finished) { + [self updatePosition]; + if (_playing) { + // If playing, buffering will be detected either by: + // 1. checkForDiscontinuity + // 2. timeControlStatus + [_player play]; + } else { + // If not playing, there is no reliable way to detect + // when buffering has completed, so we use + // !playbackBufferEmpty. Although this always seems to + // be full even right after a seek. + if (_player.currentItem.playbackBufferEmpty) { + [self enterBuffering:@"seek finished, playbackBufferEmpty"]; + } else { + [self leaveBuffering:@"seek finished, !playbackBufferEmpty"]; + } + [self updatePosition]; + if (_processingState != buffering) { + [self broadcastPlaybackEvent]; + } + } + _seekPos = kCMTimeInvalid; + [self broadcastPlaybackEvent]; + if (completionHandler) { + completionHandler(finished); + } + }]; + } +} + +- (void)dispose { + if (_processingState != none) { + [_player pause]; + _processingState = none; + [self broadcastPlaybackEvent]; + } + if (_timeObserver) { + [_player removeTimeObserver:_timeObserver]; + _timeObserver = 0; + } + if (_indexedAudioSources) { + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [self removeItemObservers:_indexedAudioSources[i].playerItem]; + } + } + if (_player) { + [_player removeObserver:self forKeyPath:@"currentItem"]; + if (@available(macOS 10.12, iOS 10.0, *)) { + [_player removeObserver:self forKeyPath:@"timeControlStatus"]; + } + _player = nil; + } + // Untested: + // [_eventChannel setStreamHandler:nil]; + // [_methodChannel setMethodHandler:nil]; +} + +@end diff --git a/just_audio/ios/Classes/AudioSource.h b/just_audio/ios/Classes/AudioSource.h new file mode 100644 index 0000000..c192f33 --- /dev/null +++ b/just_audio/ios/Classes/AudioSource.h @@ -0,0 +1,13 @@ +#import + +@interface AudioSource : NSObject + +@property (readonly, nonatomic) NSString* sourceId; + +- (instancetype)initWithId:(NSString *)sid; +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex; +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches; +- (NSArray *)getShuffleOrder; +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex; + +@end diff --git a/just_audio/ios/Classes/AudioSource.m b/just_audio/ios/Classes/AudioSource.m new file mode 100644 index 0000000..81534f1 --- /dev/null +++ b/just_audio/ios/Classes/AudioSource.m @@ -0,0 +1,37 @@ +#import "AudioSource.h" +#import + +@implementation AudioSource { + NSString *_sourceId; +} + +- (instancetype)initWithId:(NSString *)sid { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _sourceId = sid; + return self; +} + +- (NSString *)sourceId { + return _sourceId; +} + +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { + return 0; +} + +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { + if ([_sourceId isEqualToString:sourceId]) { + [matches addObject:self]; + } +} + +- (NSArray *)getShuffleOrder { + return @[]; +} + +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { + return 0; +} + +@end diff --git a/just_audio/ios/Classes/ClippingAudioSource.h b/just_audio/ios/Classes/ClippingAudioSource.h new file mode 100644 index 0000000..127019e --- /dev/null +++ b/just_audio/ios/Classes/ClippingAudioSource.h @@ -0,0 +1,11 @@ +#import "AudioSource.h" +#import "UriAudioSource.h" +#import + +@interface ClippingAudioSource : IndexedAudioSource + +@property (readonly, nonatomic) UriAudioSource* audioSource; + +- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end; + +@end diff --git a/just_audio/ios/Classes/ClippingAudioSource.m b/just_audio/ios/Classes/ClippingAudioSource.m new file mode 100644 index 0000000..2f3b174 --- /dev/null +++ b/just_audio/ios/Classes/ClippingAudioSource.m @@ -0,0 +1,79 @@ +#import "AudioSource.h" +#import "ClippingAudioSource.h" +#import "IndexedPlayerItem.h" +#import "UriAudioSource.h" +#import + +@implementation ClippingAudioSource { + UriAudioSource *_audioSource; + CMTime _start; + CMTime _end; +} + +- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end { + self = [super initWithId:sid]; + NSAssert(self, @"super init cannot be nil"); + _audioSource = audioSource; + _start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000); + _end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000); + return self; +} + +- (UriAudioSource *)audioSource { + return _audioSource; +} + +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { + [super findById:sourceId matches:matches]; + [_audioSource findById:sourceId matches:matches]; +} + +- (void)attach:(AVQueuePlayer *)player { + [super attach:player]; + _audioSource.playerItem.forwardPlaybackEndTime = _end; + // XXX: Not needed since currentItem observer handles it? + [self seek:kCMTimeZero]; +} + +- (IndexedPlayerItem *)playerItem { + return _audioSource.playerItem; +} + +- (NSArray *)getShuffleOrder { + return @[@(0)]; +} + +- (void)play:(AVQueuePlayer *)player { +} + +- (void)pause:(AVQueuePlayer *)player { +} + +- (void)stop:(AVQueuePlayer *)player { +} + +- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { + if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) { + CMTime absPosition = CMTimeAdd(_start, position); + [_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; + } +} + +- (CMTime)duration { + return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start); +} + +- (void)setDuration:(CMTime)duration { +} + +- (CMTime)position { + return CMTimeSubtract(self.playerItem.currentTime, _start); +} + +- (CMTime)bufferedPosition { + CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start); + CMTime dur = [self duration]; + return CMTimeCompare(pos, dur) >= 0 ? dur : pos; +} + +@end diff --git a/just_audio/ios/Classes/ConcatenatingAudioSource.h b/just_audio/ios/Classes/ConcatenatingAudioSource.h new file mode 100644 index 0000000..2c2350a --- /dev/null +++ b/just_audio/ios/Classes/ConcatenatingAudioSource.h @@ -0,0 +1,13 @@ +#import "AudioSource.h" +#import + +@interface ConcatenatingAudioSource : AudioSource + +@property (readonly, nonatomic) int count; + +- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources; +- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index; +- (void)removeSourcesFromIndex:(int)start toIndex:(int)end; +- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex; + +@end diff --git a/just_audio/ios/Classes/ConcatenatingAudioSource.m b/just_audio/ios/Classes/ConcatenatingAudioSource.m new file mode 100644 index 0000000..bd7b713 --- /dev/null +++ b/just_audio/ios/Classes/ConcatenatingAudioSource.m @@ -0,0 +1,109 @@ +#import "AudioSource.h" +#import "ConcatenatingAudioSource.h" +#import +#import + +@implementation ConcatenatingAudioSource { + NSMutableArray *_audioSources; + NSMutableArray *_shuffleOrder; +} + +- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources { + self = [super initWithId:sid]; + NSAssert(self, @"super init cannot be nil"); + _audioSources = audioSources; + return self; +} + +- (int)count { + return _audioSources.count; +} + +- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index { + [_audioSources insertObject:audioSource atIndex:index]; +} + +- (void)removeSourcesFromIndex:(int)start toIndex:(int)end { + if (end == -1) end = _audioSources.count; + for (int i = start; i < end; i++) { + [_audioSources removeObjectAtIndex:start]; + } +} + +- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex { + AudioSource *source = _audioSources[currentIndex]; + [_audioSources removeObjectAtIndex:currentIndex]; + [_audioSources insertObject:source atIndex:newIndex]; +} + +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { + for (int i = 0; i < [_audioSources count]; i++) { + treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; + } + return treeIndex; +} + +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { + [super findById:sourceId matches:matches]; + for (int i = 0; i < [_audioSources count]; i++) { + [_audioSources[i] findById:sourceId matches:matches]; + } +} + +- (NSArray *)getShuffleOrder { + NSMutableArray *order = [NSMutableArray new]; + int offset = [order count]; + NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints + for (int i = 0; i < [_audioSources count]; i++) { + AudioSource *audioSource = _audioSources[i]; + NSArray *childShuffleOrder = [audioSource getShuffleOrder]; + NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new]; + for (int j = 0; j < [childShuffleOrder count]; j++) { + [offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)]; + } + [childOrders addObject:offsetChildShuffleOrder]; + offset += [childShuffleOrder count]; + } + for (int i = 0; i < [_audioSources count]; i++) { + [order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]]; + } + return order; +} + +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { + int currentChildIndex = -1; + for (int i = 0; i < [_audioSources count]; i++) { + int indexBefore = treeIndex; + AudioSource *child = _audioSources[i]; + treeIndex = [child shuffle:treeIndex currentIndex:currentIndex]; + if (currentIndex >= indexBefore && currentIndex < treeIndex) { + currentChildIndex = i; + } else {} + } + // Shuffle so that the current child is first in the shuffle order + _shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]]; + for (int i = 0; i < [_audioSources count]; i++) { + [_shuffleOrder addObject:@(0)]; + } + NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]); + // First generate a random shuffle + for (int i = 0; i < [_audioSources count]; i++) { + int j = arc4random_uniform(i + 1); + _shuffleOrder[i] = _shuffleOrder[j]; + _shuffleOrder[j] = @(i); + } + // Then bring currentIndex to the front + if (currentChildIndex != -1) { + for (int i = 1; i < [_audioSources count]; i++) { + if ([_shuffleOrder[i] integerValue] == currentChildIndex) { + NSNumber *v = _shuffleOrder[0]; + _shuffleOrder[0] = _shuffleOrder[i]; + _shuffleOrder[i] = v; + break; + } + } + } + return treeIndex; +} + +@end diff --git a/just_audio/ios/Classes/IndexedAudioSource.h b/just_audio/ios/Classes/IndexedAudioSource.h new file mode 100644 index 0000000..a308a4f --- /dev/null +++ b/just_audio/ios/Classes/IndexedAudioSource.h @@ -0,0 +1,21 @@ +#import "AudioSource.h" +#import "IndexedPlayerItem.h" +#import +#import + +@interface IndexedAudioSource : AudioSource + +@property (readonly, nonatomic) IndexedPlayerItem *playerItem; +@property (readwrite, nonatomic) CMTime duration; +@property (readonly, nonatomic) CMTime position; +@property (readonly, nonatomic) CMTime bufferedPosition; +@property (readonly, nonatomic) BOOL isAttached; + +- (void)attach:(AVQueuePlayer *)player; +- (void)play:(AVQueuePlayer *)player; +- (void)pause:(AVQueuePlayer *)player; +- (void)stop:(AVQueuePlayer *)player; +- (void)seek:(CMTime)position; +- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler; + +@end diff --git a/just_audio/ios/Classes/IndexedAudioSource.m b/just_audio/ios/Classes/IndexedAudioSource.m new file mode 100644 index 0000000..316f900 --- /dev/null +++ b/just_audio/ios/Classes/IndexedAudioSource.m @@ -0,0 +1,68 @@ +#import "IndexedAudioSource.h" +#import "IndexedPlayerItem.h" +#import + +@implementation IndexedAudioSource { + BOOL _isAttached; +} + +- (instancetype)initWithId:(NSString *)sid { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _isAttached = NO; + return self; +} + +- (IndexedPlayerItem *)playerItem { + return nil; +} + +- (BOOL)isAttached { + return _isAttached; +} + +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { + [sequence addObject:self]; + return treeIndex + 1; +} + +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { + return treeIndex + 1; +} + +- (void)attach:(AVQueuePlayer *)player { + _isAttached = YES; +} + +- (void)play:(AVQueuePlayer *)player { +} + +- (void)pause:(AVQueuePlayer *)player { +} + +- (void)stop:(AVQueuePlayer *)player { +} + +- (void)seek:(CMTime)position { + [self seek:position completionHandler:nil]; +} + +- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { +} + +- (CMTime)duration { + return kCMTimeInvalid; +} + +- (void)setDuration:(CMTime)duration { +} + +- (CMTime)position { + return kCMTimeInvalid; +} + +- (CMTime)bufferedPosition { + return kCMTimeInvalid; +} + +@end diff --git a/just_audio/ios/Classes/IndexedPlayerItem.h b/just_audio/ios/Classes/IndexedPlayerItem.h new file mode 100644 index 0000000..5d4a11c --- /dev/null +++ b/just_audio/ios/Classes/IndexedPlayerItem.h @@ -0,0 +1,9 @@ +#import + +@class IndexedAudioSource; + +@interface IndexedPlayerItem : AVPlayerItem + +@property (readwrite, nonatomic) IndexedAudioSource *audioSource; + +@end diff --git a/just_audio/ios/Classes/IndexedPlayerItem.m b/just_audio/ios/Classes/IndexedPlayerItem.m new file mode 100644 index 0000000..87fafe0 --- /dev/null +++ b/just_audio/ios/Classes/IndexedPlayerItem.m @@ -0,0 +1,16 @@ +#import "IndexedPlayerItem.h" +#import "IndexedAudioSource.h" + +@implementation IndexedPlayerItem { + IndexedAudioSource *_audioSource; +} + +-(void)setAudioSource:(IndexedAudioSource *)audioSource { + _audioSource = audioSource; +} + +-(IndexedAudioSource *)audioSource { + return _audioSource; +} + +@end diff --git a/just_audio/ios/Classes/JustAudioPlugin.h b/just_audio/ios/Classes/JustAudioPlugin.h new file mode 100644 index 0000000..a694322 --- /dev/null +++ b/just_audio/ios/Classes/JustAudioPlugin.h @@ -0,0 +1,4 @@ +#import + +@interface JustAudioPlugin : NSObject +@end diff --git a/just_audio/ios/Classes/JustAudioPlugin.m b/just_audio/ios/Classes/JustAudioPlugin.m new file mode 100644 index 0000000..982a260 --- /dev/null +++ b/just_audio/ios/Classes/JustAudioPlugin.m @@ -0,0 +1,55 @@ +#import "JustAudioPlugin.h" +#import "AudioPlayer.h" +#import +#include + +@implementation JustAudioPlugin { + NSObject* _registrar; + BOOL _configuredSession; +} + ++ (void)registerWithRegistrar:(NSObject*)registrar { + FlutterMethodChannel* channel = [FlutterMethodChannel + methodChannelWithName:@"com.ryanheise.just_audio.methods" + binaryMessenger:[registrar messenger]]; + JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar]; + [registrar addMethodCallDelegate:instance channel:channel]; +} + +- (instancetype)initWithRegistrar:(NSObject *)registrar { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _registrar = registrar; + return self; +} + +- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { + if ([@"init" isEqualToString:call.method]) { + NSArray* args = (NSArray*)call.arguments; + NSString* playerId = args[0]; + /*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession]; + result(nil); + } else if ([@"setIosCategory" isEqualToString:call.method]) { +#if TARGET_OS_IPHONE + NSNumber* categoryIndex = (NSNumber*)call.arguments; + AVAudioSessionCategory category = nil; + switch (categoryIndex.integerValue) { + case 0: category = AVAudioSessionCategoryAmbient; break; + case 1: category = AVAudioSessionCategorySoloAmbient; break; + case 2: category = AVAudioSessionCategoryPlayback; break; + case 3: category = AVAudioSessionCategoryRecord; break; + case 4: category = AVAudioSessionCategoryPlayAndRecord; break; + case 5: category = AVAudioSessionCategoryMultiRoute; break; + } + if (category) { + _configuredSession = YES; + } + [[AVAudioSession sharedInstance] setCategory:category error:nil]; +#endif + result(nil); + } else { + result(FlutterMethodNotImplemented); + } +} + +@end diff --git a/just_audio/ios/Classes/LoopingAudioSource.h b/just_audio/ios/Classes/LoopingAudioSource.h new file mode 100644 index 0000000..7c524a9 --- /dev/null +++ b/just_audio/ios/Classes/LoopingAudioSource.h @@ -0,0 +1,8 @@ +#import "AudioSource.h" +#import + +@interface LoopingAudioSource : AudioSource + +- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources; + +@end diff --git a/just_audio/ios/Classes/LoopingAudioSource.m b/just_audio/ios/Classes/LoopingAudioSource.m new file mode 100644 index 0000000..ba4b52b --- /dev/null +++ b/just_audio/ios/Classes/LoopingAudioSource.m @@ -0,0 +1,53 @@ +#import "AudioSource.h" +#import "LoopingAudioSource.h" +#import + +@implementation LoopingAudioSource { + // An array of duplicates + NSArray *_audioSources; // +} + +- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources { + self = [super initWithId:sid]; + NSAssert(self, @"super init cannot be nil"); + _audioSources = audioSources; + return self; +} + +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { + for (int i = 0; i < [_audioSources count]; i++) { + treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; + } + return treeIndex; +} + +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { + [super findById:sourceId matches:matches]; + for (int i = 0; i < [_audioSources count]; i++) { + [_audioSources[i] findById:sourceId matches:matches]; + } +} + +- (NSArray *)getShuffleOrder { + NSMutableArray *order = [NSMutableArray new]; + int offset = (int)[order count]; + for (int i = 0; i < [_audioSources count]; i++) { + AudioSource *audioSource = _audioSources[i]; + NSArray *childShuffleOrder = [audioSource getShuffleOrder]; + for (int j = 0; j < [childShuffleOrder count]; j++) { + [order addObject:@([childShuffleOrder[j] integerValue] + offset)]; + } + offset += [childShuffleOrder count]; + } + return order; +} + +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { + // TODO: This should probably shuffle the same way on all duplicates. + for (int i = 0; i < [_audioSources count]; i++) { + treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex]; + } + return treeIndex; +} + +@end diff --git a/just_audio/ios/Classes/UriAudioSource.h b/just_audio/ios/Classes/UriAudioSource.h new file mode 100644 index 0000000..6ee3c2e --- /dev/null +++ b/just_audio/ios/Classes/UriAudioSource.h @@ -0,0 +1,8 @@ +#import "IndexedAudioSource.h" +#import + +@interface UriAudioSource : IndexedAudioSource + +- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri; + +@end diff --git a/just_audio/ios/Classes/UriAudioSource.m b/just_audio/ios/Classes/UriAudioSource.m new file mode 100644 index 0000000..91321d4 --- /dev/null +++ b/just_audio/ios/Classes/UriAudioSource.m @@ -0,0 +1,79 @@ +#import "UriAudioSource.h" +#import "IndexedAudioSource.h" +#import "IndexedPlayerItem.h" +#import + +@implementation UriAudioSource { + NSString *_uri; + IndexedPlayerItem *_playerItem; + /* CMTime _duration; */ +} + +- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri { + self = [super initWithId:sid]; + NSAssert(self, @"super init cannot be nil"); + _uri = uri; + if ([_uri hasPrefix:@"file://"]) { + _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]]; + } else { + _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]]; + } + if (@available(macOS 10.13, iOS 11.0, *)) { + // This does the best at reducing distortion on voice with speeds below 1.0 + _playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain; + } + /* NSKeyValueObservingOptions options = */ + /* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */ + /* [_playerItem addObserver:self */ + /* forKeyPath:@"duration" */ + /* options:options */ + /* context:nil]; */ + return self; +} + +- (IndexedPlayerItem *)playerItem { + return _playerItem; +} + +- (NSArray *)getShuffleOrder { + return @[@(0)]; +} + +- (void)play:(AVQueuePlayer *)player { +} + +- (void)pause:(AVQueuePlayer *)player { +} + +- (void)stop:(AVQueuePlayer *)player { +} + +- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { + if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) { + [_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; + } +} + +- (CMTime)duration { + return _playerItem.duration; +} + +- (void)setDuration:(CMTime)duration { +} + +- (CMTime)position { + return _playerItem.currentTime; +} + +- (CMTime)bufferedPosition { + NSValue *last = _playerItem.loadedTimeRanges.lastObject; + if (last) { + CMTimeRange timeRange = [last CMTimeRangeValue]; + return CMTimeAdd(timeRange.start, timeRange.duration); + } else { + return _playerItem.currentTime; + } + return kCMTimeInvalid; +} + +@end diff --git a/just_audio/ios/just_audio.podspec b/just_audio/ios/just_audio.podspec new file mode 100644 index 0000000..ba5c7d2 --- /dev/null +++ b/just_audio/ios/just_audio.podspec @@ -0,0 +1,21 @@ +# +# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html +# +Pod::Spec.new do |s| + s.name = 'just_audio' + s.version = '0.0.1' + s.summary = 'A new flutter plugin project.' + s.description = <<-DESC +A new flutter plugin project. + DESC + s.homepage = 'http://example.com' + s.license = { :file => '../LICENSE' } + s.author = { 'Your Company' => 'email@example.com' } + s.source = { :path => '.' } + s.source_files = 'Classes/**/*' + s.public_header_files = 'Classes/**/*.h' + s.dependency 'Flutter' + s.platform = :ios, '8.0' + s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' } +end + diff --git a/just_audio/lib/just_audio.dart b/just_audio/lib/just_audio.dart new file mode 100644 index 0000000..15f1b6f --- /dev/null +++ b/just_audio/lib/just_audio.dart @@ -0,0 +1,1293 @@ +import 'dart:async'; +import 'dart:io'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; +import 'package:flutter/widgets.dart'; +import 'package:path/path.dart' as p; +import 'package:path_provider/path_provider.dart'; +import 'package:rxdart/rxdart.dart'; +import 'package:uuid/uuid.dart'; + +final _uuid = Uuid(); + +/// An object to manage playing audio from a URL, a locale file or an asset. +/// +/// ``` +/// final player = AudioPlayer(); +/// await player.setUrl('https://foo.com/bar.mp3'); +/// player.play(); +/// await player.pause(); +/// await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20)); +/// await player.play(); +/// await player.setUrl('https://foo.com/baz.mp3'); +/// await player.seek(Duration(minutes: 5)); +/// player.play(); +/// await player.pause(); +/// await player.dispose(); +/// ``` +/// +/// You must call [dispose] to release the resources used by this player, +/// including any temporary files created to cache assets. +class AudioPlayer { + static final _mainChannel = MethodChannel('com.ryanheise.just_audio.methods'); + + static Future _init(String id) async { + await _mainChannel.invokeMethod('init', [id]); + return MethodChannel('com.ryanheise.just_audio.methods.$id'); + } + + /// Configure the audio session category on iOS. This method should be called + /// before playing any audio. It has no effect on Android or Flutter for Web. + /// + /// Note that the default category on iOS is [IosCategory.soloAmbient], but + /// for a typical media app, Apple recommends setting this to + /// [IosCategory.playback]. If you don't call this method, `just_audio` will + /// respect any prior category that was already set on your app's audio + /// session and will leave it alone. If it hasn't been previously set, this + /// will be [IosCategory.soloAmbient]. But if another audio plugin in your + /// app has configured a particular category, that will also be left alone. + /// + /// Note: If you use other audio plugins in conjunction with this one, it is + /// possible that each of those audio plugins may override the setting you + /// choose here. (You may consider asking the developers of the other plugins + /// to provide similar configurability so that you have complete control over + /// setting the overall category that you want for your app.) + static Future setIosCategory(IosCategory category) async { + await _mainChannel.invokeMethod('setIosCategory', category.index); + } + + final Future _channel; + final String _id; + _ProxyHttpServer _proxy; + Stream _eventChannelStream; + AudioSource _audioSource; + Map _audioSources = {}; + + PlaybackEvent _playbackEvent; + StreamSubscription _eventChannelStreamSubscription; + final _playbackEventSubject = BehaviorSubject(); + Future _durationFuture; + final _durationSubject = BehaviorSubject(); + final _processingStateSubject = BehaviorSubject(); + final _playingSubject = BehaviorSubject.seeded(false); + final _volumeSubject = BehaviorSubject.seeded(1.0); + final _speedSubject = BehaviorSubject.seeded(1.0); + final _bufferedPositionSubject = BehaviorSubject(); + final _icyMetadataSubject = BehaviorSubject(); + final _playerStateSubject = BehaviorSubject(); + final _currentIndexSubject = BehaviorSubject(); + final _loopModeSubject = BehaviorSubject(); + final _shuffleModeEnabledSubject = BehaviorSubject(); + BehaviorSubject _positionSubject; + bool _automaticallyWaitsToMinimizeStalling = true; + + /// Creates an [AudioPlayer]. + factory AudioPlayer() => AudioPlayer._internal(_uuid.v4()); + + AudioPlayer._internal(this._id) : _channel = _init(_id) { + _playbackEvent = PlaybackEvent( + processingState: ProcessingState.none, + updatePosition: Duration.zero, + updateTime: DateTime.now(), + bufferedPosition: Duration.zero, + duration: null, + icyMetadata: null, + currentIndex: null, + ); + _playbackEventSubject.add(_playbackEvent); + _eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id') + .receiveBroadcastStream() + .map((data) { + try { + //print("received raw event: $data"); + final duration = (data['duration'] ?? -1) < 0 + ? null + : Duration(milliseconds: data['duration']); + _durationFuture = Future.value(duration); + if (duration != _playbackEvent.duration) { + _durationSubject.add(duration); + } + _playbackEvent = PlaybackEvent( + processingState: ProcessingState.values[data['processingState']], + updatePosition: Duration(milliseconds: data['updatePosition']), + updateTime: DateTime.fromMillisecondsSinceEpoch(data['updateTime']), + bufferedPosition: Duration(milliseconds: data['bufferedPosition']), + duration: duration, + icyMetadata: data['icyMetadata'] == null + ? null + : IcyMetadata.fromJson(data['icyMetadata']), + currentIndex: data['currentIndex'], + ); + //print("created event object with state: ${_playbackEvent.state}"); + return _playbackEvent; + } catch (e, stacktrace) { + print("Error parsing event: $e"); + print("$stacktrace"); + rethrow; + } + }); + _eventChannelStreamSubscription = _eventChannelStream.listen( + _playbackEventSubject.add, + onError: _playbackEventSubject.addError, + ); + _processingStateSubject.addStream(playbackEventStream + .map((event) => event.processingState) + .distinct() + .handleError((err, stack) {/* noop */})); + _bufferedPositionSubject.addStream(playbackEventStream + .map((event) => event.bufferedPosition) + .distinct() + .handleError((err, stack) {/* noop */})); + _icyMetadataSubject.addStream(playbackEventStream + .map((event) => event.icyMetadata) + .distinct() + .handleError((err, stack) {/* noop */})); + _currentIndexSubject.addStream(playbackEventStream + .map((event) => event.currentIndex) + .distinct() + .handleError((err, stack) {/* noop */})); + _playerStateSubject.addStream( + Rx.combineLatest2( + playingStream, + playbackEventStream, + (playing, event) => PlayerState(playing, event.processingState)) + .distinct() + .handleError((err, stack) {/* noop */})); + } + + /// The latest [PlaybackEvent]. + PlaybackEvent get playbackEvent => _playbackEvent; + + /// A stream of [PlaybackEvent]s. + Stream get playbackEventStream => _playbackEventSubject.stream; + + /// The duration of the current audio or null if unknown. + Duration get duration => _playbackEvent.duration; + + /// The duration of the current audio or null if unknown. + Future get durationFuture => _durationFuture; + + /// The duration of the current audio. + Stream get durationStream => _durationSubject.stream; + + /// The current [ProcessingState]. + ProcessingState get processingState => _playbackEvent.processingState; + + /// A stream of [ProcessingState]s. + Stream get processingStateStream => + _processingStateSubject.stream; + + /// Whether the player is playing. + bool get playing => _playingSubject.value; + + /// A stream of changing [playing] states. + Stream get playingStream => _playingSubject.stream; + + /// The current volume of the player. + double get volume => _volumeSubject.value; + + /// A stream of [volume] changes. + Stream get volumeStream => _volumeSubject.stream; + + /// The current speed of the player. + double get speed => _speedSubject.value; + + /// A stream of current speed values. + Stream get speedStream => _speedSubject.stream; + + /// The position up to which buffered audio is available. + Duration get bufferedPosition => _bufferedPositionSubject.value; + + /// A stream of buffered positions. + Stream get bufferedPositionStream => + _bufferedPositionSubject.stream; + + /// The latest ICY metadata received through the audio source. + IcyMetadata get icyMetadata => _playbackEvent.icyMetadata; + + /// A stream of ICY metadata received through the audio source. + Stream get icyMetadataStream => _icyMetadataSubject.stream; + + /// The current player state containing only the processing and playing + /// states. + PlayerState get playerState => _playerStateSubject.value; + + /// A stream of [PlayerState]s. + Stream get playerStateStream => _playerStateSubject.stream; + + /// The index of the current item. + int get currentIndex => _currentIndexSubject.value; + + /// A stream broadcasting the current item. + Stream get currentIndexStream => _currentIndexSubject.stream; + + /// Whether there is another item after the current index. + bool get hasNext => + _audioSource != null && + currentIndex != null && + currentIndex + 1 < _audioSource.sequence.length; + + /// Whether there is another item before the current index. + bool get hasPrevious => + _audioSource != null && currentIndex != null && currentIndex > 0; + + /// The current loop mode. + LoopMode get loopMode => _loopModeSubject.value; + + /// A stream of [LoopMode]s. + Stream get loopModeStream => _loopModeSubject.stream; + + /// Whether shuffle mode is currently enabled. + bool get shuffleModeEnabled => _shuffleModeEnabledSubject.value; + + /// A stream of the shuffle mode status. + Stream get shuffleModeEnabledStream => + _shuffleModeEnabledSubject.stream; + + /// Whether the player should automatically delay playback in order to + /// minimize stalling. (iOS 10.0 or later only) + bool get automaticallyWaitsToMinimizeStalling => + _automaticallyWaitsToMinimizeStalling; + + /// The current position of the player. + Duration get position { + if (playing && processingState == ProcessingState.ready) { + final result = _playbackEvent.updatePosition + + (DateTime.now().difference(_playbackEvent.updateTime)) * speed; + return _playbackEvent.duration == null || + result <= _playbackEvent.duration + ? result + : _playbackEvent.duration; + } else { + return _playbackEvent.updatePosition; + } + } + + /// A stream tracking the current position of this player, suitable for + /// animating a seek bar. To ensure a smooth animation, this stream emits + /// values more frequently on short items where the seek bar moves more + /// quickly, and less frequenly on long items where the seek bar moves more + /// slowly. The interval between each update will be no quicker than once + /// every 16ms and no slower than once every 200ms. + /// + /// See [createPositionStream] for more control over the stream parameters. + Stream get positionStream { + if (_positionSubject == null) { + _positionSubject = BehaviorSubject(); + _positionSubject.addStream(createPositionStream( + steps: 800, + minPeriod: Duration(milliseconds: 16), + maxPeriod: Duration(milliseconds: 200))); + } + return _positionSubject.stream; + } + + /// Creates a new stream periodically tracking the current position of this + /// player. The stream will aim to emit [steps] position updates from the + /// beginning to the end of the current audio source, at intervals of + /// [duration] / [steps]. This interval will be clipped between [minPeriod] + /// and [maxPeriod]. This stream will not emit values while audio playback is + /// paused or stalled. + /// + /// Note: each time this method is called, a new stream is created. If you + /// intend to use this stream multiple times, you should hold a reference to + /// the returned stream and close it once you are done. + Stream createPositionStream({ + int steps = 800, + Duration minPeriod = const Duration(milliseconds: 200), + Duration maxPeriod = const Duration(milliseconds: 200), + }) { + assert(minPeriod <= maxPeriod); + assert(minPeriod > Duration.zero); + Duration duration() => this.duration ?? Duration.zero; + Duration step() { + var s = duration() ~/ steps; + if (s < minPeriod) s = minPeriod; + if (s > maxPeriod) s = maxPeriod; + return s; + } + + StreamController controller = StreamController.broadcast(); + Timer currentTimer; + StreamSubscription durationSubscription; + StreamSubscription playbackEventSubscription; + void yieldPosition(Timer timer) { + if (controller.isClosed) { + timer.cancel(); + durationSubscription?.cancel(); + playbackEventSubscription?.cancel(); + return; + } + if (_durationSubject.isClosed) { + timer.cancel(); + durationSubscription?.cancel(); + playbackEventSubscription?.cancel(); + controller.close(); + return; + } + controller.add(position); + } + + currentTimer = Timer.periodic(step(), yieldPosition); + durationSubscription = durationStream.listen((duration) { + currentTimer.cancel(); + currentTimer = Timer.periodic(step(), yieldPosition); + }); + playbackEventSubscription = playbackEventStream.listen((event) { + controller.add(position); + }); + return controller.stream.distinct(); + } + + /// Convenience method to load audio from a URL with optional headers, + /// equivalent to: + /// + /// ``` + /// load(AudioSource.uri(Uri.parse(url), headers: headers)); + /// ``` + /// + /// + Future setUrl(String url, {Map headers}) => + load(AudioSource.uri(Uri.parse(url), headers: headers)); + + /// Convenience method to load audio from a file, equivalent to: + /// + /// ``` + /// load(AudioSource.uri(Uri.file(filePath))); + /// ``` + Future setFilePath(String filePath) => + load(AudioSource.uri(Uri.file(filePath))); + + /// Convenience method to load audio from an asset, equivalent to: + /// + /// ``` + /// load(AudioSource.uri(Uri.parse('asset://$filePath'))); + /// ``` + Future setAsset(String assetPath) => + load(AudioSource.uri(Uri.parse('asset://$assetPath'))); + + /// Loads audio from an [AudioSource] and completes when the audio is ready + /// to play with the duration of that audio, or null if the duration is unknown. + /// + /// This method throws: + /// + /// * [PlayerException] if the audio source was unable to be loaded. + /// * [PlayerInterruptedException] if another call to [load] happened before + /// this call completed. + Future load(AudioSource source) async { + try { + _audioSource = source; + final duration = await _load(source); + // Wait for loading state to pass. + await processingStateStream + .firstWhere((state) => state != ProcessingState.loading); + return duration; + } catch (e) { + _audioSource = null; + _audioSources.clear(); + rethrow; + } + } + + _registerAudioSource(AudioSource source) { + _audioSources[source._id] = source; + } + + Future _load(AudioSource source) async { + try { + if (!kIsWeb && source._requiresHeaders) { + if (_proxy == null) { + _proxy = _ProxyHttpServer(); + await _proxy.start(); + } + } + await source._setup(this); + _durationFuture = _invokeMethod('load', [source.toJson()]).then( + (ms) => (ms == null || ms < 0) ? null : Duration(milliseconds: ms)); + final duration = await _durationFuture; + _durationSubject.add(duration); + return duration; + } on PlatformException catch (e) { + try { + throw PlayerException(int.parse(e.code), e.message); + } on FormatException catch (_) { + if (e.code == 'abort') { + throw PlayerInterruptedException(e.message); + } else { + throw PlayerException(9999999, e.message); + } + } + } + } + + /// Clips the current [AudioSource] to the given [start] and [end] + /// timestamps. If [start] is null, it will be reset to the start of the + /// original [AudioSource]. If [end] is null, it will be reset to the end of + /// the original [AudioSource]. This method cannot be called from the + /// [AudioPlaybackState.none] state. + Future setClip({Duration start, Duration end}) async { + final duration = await _load(start == null && end == null + ? _audioSource + : ClippingAudioSource( + child: _audioSource, + start: start, + end: end, + )); + // Wait for loading state to pass. + await processingStateStream + .firstWhere((state) => state != ProcessingState.loading); + return duration; + } + + /// Tells the player to play audio as soon as an audio source is loaded and + /// ready to play. The [Future] returned by this method completes when the + /// playback completes or is paused or stopped. If the player is already + /// playing, this method completes immediately. + /// + /// This method causes [playing] to become true, and it will remain true + /// until [pause] or [stop] is called. This means that if playback completes, + /// and then you [seek] to an earlier position in the audio, playback will + /// continue playing from that position. If you instead wish to [pause] or + /// [stop] playback on completion, you can call either method as soon as + /// [processingState] becomes [ProcessingState.completed] by listening to + /// [processingStateStream]. + Future play() async { + if (playing) return; + _playingSubject.add(true); + await _invokeMethod('play'); + } + + /// Pauses the currently playing media. This method does nothing if + /// ![playing]. + Future pause() async { + if (!playing) return; + // Update local state immediately so that queries aren't surprised. + _playbackEvent = _playbackEvent.copyWith( + updatePosition: position, + updateTime: DateTime.now(), + ); + _playbackEventSubject.add(_playbackEvent); + _playingSubject.add(false); + // TODO: perhaps modify platform side to ensure new state is broadcast + // before this method returns. + await _invokeMethod('pause'); + } + + /// Convenience method to pause and seek to zero. + Future stop() async { + await pause(); + await seek(Duration.zero); + } + + /// Sets the volume of this player, where 1.0 is normal volume. + Future setVolume(final double volume) async { + _volumeSubject.add(volume); + await _invokeMethod('setVolume', [volume]); + } + + /// Sets the playback speed of this player, where 1.0 is normal speed. + Future setSpeed(final double speed) async { + _playbackEvent = _playbackEvent.copyWith( + updatePosition: position, + updateTime: DateTime.now(), + ); + _playbackEventSubject.add(_playbackEvent); + _speedSubject.add(speed); + await _invokeMethod('setSpeed', [speed]); + } + + /// Sets the [LoopMode]. The gapless looping support is as follows: + /// + /// * Android: supported + /// * iOS/macOS: not supported, however, gapless looping can be achieved by + /// using [LoopingAudioSource]. + /// * Web: not supported + Future setLoopMode(LoopMode mode) async { + _loopModeSubject.add(mode); + await _invokeMethod('setLoopMode', [mode.index]); + } + + /// Sets whether shuffle mode is enabled. + Future setShuffleModeEnabled(bool enabled) async { + _shuffleModeEnabledSubject.add(enabled); + await _invokeMethod('setShuffleModeEnabled', [enabled]); + } + + /// Sets automaticallyWaitsToMinimizeStalling for AVPlayer in iOS 10.0 or later, defaults to true. + /// Has no effect on Android clients + Future setAutomaticallyWaitsToMinimizeStalling( + final bool automaticallyWaitsToMinimizeStalling) async { + _automaticallyWaitsToMinimizeStalling = + automaticallyWaitsToMinimizeStalling; + await _invokeMethod('setAutomaticallyWaitsToMinimizeStalling', + [automaticallyWaitsToMinimizeStalling]); + } + + /// Seeks to a particular [position]. If a composition of multiple + /// [AudioSource]s has been loaded, you may also specify [index] to seek to a + /// particular item within that sequence. This method has no effect unless + /// an audio source has been loaded. + Future seek(final Duration position, {int index}) async { + switch (processingState) { + case ProcessingState.none: + case ProcessingState.loading: + return; + default: + _playbackEvent = _playbackEvent.copyWith( + updatePosition: position, + updateTime: DateTime.now(), + ); + _playbackEventSubject.add(_playbackEvent); + await _invokeMethod('seek', [position?.inMilliseconds, index]); + } + } + + /// Seek to the next item. + Future seekToNext() async { + if (hasNext) { + await seek(Duration.zero, index: currentIndex + 1); + } + } + + /// Seek to the previous item. + Future seekToPrevious() async { + if (hasPrevious) { + await seek(Duration.zero, index: currentIndex - 1); + } + } + + /// Release all resources associated with this player. You must invoke this + /// after you are done with the player. + Future dispose() async { + await _invokeMethod('dispose'); + _audioSource = null; + _audioSources.values.forEach((s) => s._dispose()); + _audioSources.clear(); + _proxy?.stop(); + await _durationSubject.close(); + await _eventChannelStreamSubscription.cancel(); + await _loopModeSubject.close(); + await _shuffleModeEnabledSubject.close(); + await _playingSubject.close(); + await _volumeSubject.close(); + await _speedSubject.close(); + if (_positionSubject != null) { + await _positionSubject.close(); + } + } + + Future _invokeMethod(String method, [dynamic args]) async => + (await _channel).invokeMethod(method, args); +} + +/// Captures the details of any error accessing, loading or playing an audio +/// source, including an invalid or inaccessible URL, or an audio encoding that +/// could not be understood. +class PlayerException { + /// On iOS and macOS, maps to `NSError.code`. On Android, maps to + /// `ExoPlaybackException.type`. On Web, maps to `MediaError.code`. + final int code; + + /// On iOS and macOS, maps to `NSError.localizedDescription`. On Android, + /// maps to `ExoPlaybackException.getMessage()`. On Web, a generic message + /// is provided. + final String message; + + PlayerException(this.code, this.message); + + @override + String toString() => "($code) $message"; +} + +/// An error that occurs when one operation on the player has been interrupted +/// (e.g. by another simultaneous operation). +class PlayerInterruptedException { + final String message; + + PlayerInterruptedException(this.message); + + @override + String toString() => "$message"; +} + +/// Encapsulates the playback state and current position of the player. +class PlaybackEvent { + /// The current processing state. + final ProcessingState processingState; + + /// When the last time a position discontinuity happened, as measured in time + /// since the epoch. + final DateTime updateTime; + + /// The position at [updateTime]. + final Duration updatePosition; + + /// The buffer position. + final Duration bufferedPosition; + + /// The media duration, or null if unknown. + final Duration duration; + + /// The latest ICY metadata received through the audio stream. + final IcyMetadata icyMetadata; + + /// The index of the currently playing item. + final int currentIndex; + + PlaybackEvent({ + @required this.processingState, + @required this.updateTime, + @required this.updatePosition, + @required this.bufferedPosition, + @required this.duration, + @required this.icyMetadata, + @required this.currentIndex, + }); + + PlaybackEvent copyWith({ + ProcessingState processingState, + DateTime updateTime, + Duration updatePosition, + Duration bufferedPosition, + double speed, + Duration duration, + IcyMetadata icyMetadata, + UriAudioSource currentIndex, + }) => + PlaybackEvent( + processingState: processingState ?? this.processingState, + updateTime: updateTime ?? this.updateTime, + updatePosition: updatePosition ?? this.updatePosition, + bufferedPosition: bufferedPosition ?? this.bufferedPosition, + duration: duration ?? this.duration, + icyMetadata: icyMetadata ?? this.icyMetadata, + currentIndex: currentIndex ?? this.currentIndex, + ); + + @override + String toString() => + "{processingState=$processingState, updateTime=$updateTime, updatePosition=$updatePosition}"; +} + +/// Enumerates the different processing states of a player. +enum ProcessingState { + /// The player has not loaded an [AudioSource]. + none, + + /// The player is loading an [AudioSource]. + loading, + + /// The player is buffering audio and unable to play. + buffering, + + /// The player is has enough audio buffered and is able to play. + ready, + + /// The player has reached the end of the audio. + completed, +} + +/// Encapsulates the playing and processing states. These two states vary +/// orthogonally, and so if [processingState] is [ProcessingState.buffering], +/// you can check [playing] to determine whether the buffering occurred while +/// the player was playing or while the player was paused. +class PlayerState { + /// Whether the player will play when [processingState] is + /// [ProcessingState.ready]. + final bool playing; + + /// The current processing state of the player. + final ProcessingState processingState; + + PlayerState(this.playing, this.processingState); + + @override + String toString() => 'playing=$playing,processingState=$processingState'; + + @override + int get hashCode => toString().hashCode; + + @override + bool operator ==(dynamic other) => + other is PlayerState && + other?.playing == playing && + other?.processingState == processingState; +} + +class IcyInfo { + final String title; + final String url; + + IcyInfo({@required this.title, @required this.url}); + + IcyInfo.fromJson(Map json) : this(title: json['title'], url: json['url']); + + @override + String toString() => 'title=$title,url=$url'; + + @override + int get hashCode => toString().hashCode; + + @override + bool operator ==(dynamic other) => + other is IcyInfo && other?.toString() == toString(); +} + +class IcyHeaders { + final int bitrate; + final String genre; + final String name; + final int metadataInterval; + final String url; + final bool isPublic; + + IcyHeaders({ + @required this.bitrate, + @required this.genre, + @required this.name, + @required this.metadataInterval, + @required this.url, + @required this.isPublic, + }); + + IcyHeaders.fromJson(Map json) + : this( + bitrate: json['bitrate'], + genre: json['genre'], + name: json['name'], + metadataInterval: json['metadataInterval'], + url: json['url'], + isPublic: json['isPublic'], + ); + + @override + String toString() => + 'bitrate=$bitrate,genre=$genre,name=$name,metadataInterval=$metadataInterval,url=$url,isPublic=$isPublic'; + + @override + int get hashCode => toString().hashCode; + + @override + bool operator ==(dynamic other) => + other is IcyHeaders && other?.toString() == toString(); +} + +class IcyMetadata { + final IcyInfo info; + final IcyHeaders headers; + + IcyMetadata({@required this.info, @required this.headers}); + + IcyMetadata.fromJson(Map json) + : this(info: json['info'], headers: json['headers']); + + @override + int get hashCode => info.hashCode ^ headers.hashCode; + + @override + bool operator ==(dynamic other) => + other is IcyMetadata && other?.info == info && other?.headers == headers; +} + +/// The audio session categories on iOS, to be used with +/// [AudioPlayer.setIosCategory]. +enum IosCategory { + ambient, + soloAmbient, + playback, + record, + playAndRecord, + multiRoute, +} + +/// A local proxy HTTP server for making remote GET requests with headers. +/// +/// TODO: Recursively attach headers to items in playlists like m3u8. +class _ProxyHttpServer { + HttpServer _server; + + /// Maps request keys to [_ProxyRequest]s. + final Map _uriMap = {}; + + /// The port this server is bound to on localhost. This is set only after + /// [start] has completed. + int get port => _server.port; + + /// Associate headers with a URL. This may be called only after [start] has + /// completed. + Uri addUrl(Uri url, Map headers) { + final path = _requestKey(url); + _uriMap[path] = _ProxyRequest(url, headers); + return url.replace( + scheme: 'http', + host: InternetAddress.loopbackIPv4.address, + port: port, + ); + } + + /// A unique key for each request that can be processed by this proxy, + /// made up of the URL path and query string. It is not possible to + /// simultaneously track requests that have the same URL path and query + /// but differ in other respects such as the port or headers. + String _requestKey(Uri uri) => '${uri.path}?${uri.query}'; + + /// Starts the server. + Future start() async { + _server = await HttpServer.bind(InternetAddress.loopbackIPv4, 0); + _server.listen((request) async { + if (request.method == 'GET') { + final path = _requestKey(request.uri); + final proxyRequest = _uriMap[path]; + final originRequest = await HttpClient().getUrl(proxyRequest.uri); + + // Rewrite request headers + final host = originRequest.headers.value('host'); + originRequest.headers.clear(); + request.headers.forEach((name, value) { + originRequest.headers.set(name, value); + }); + for (var name in proxyRequest.headers.keys) { + originRequest.headers.set(name, proxyRequest.headers[name]); + } + originRequest.headers.set('host', host); + + // Try to make normal request + try { + final originResponse = await originRequest.close(); + + request.response.headers.clear(); + originResponse.headers.forEach((name, value) { + request.response.headers.set(name, value); + }); + + // Pipe response + await originResponse.pipe(request.response); + } on HttpException { + // We likely are dealing with a streaming protocol + if (proxyRequest.uri.scheme == 'http') { + // Try parsing HTTP 0.9 response + //request.response.headers.clear(); + final socket = await Socket.connect( + proxyRequest.uri.host, proxyRequest.uri.port); + final clientSocket = + await request.response.detachSocket(writeHeaders: false); + Completer done = Completer(); + socket.listen( + clientSocket.add, + onDone: () async { + await clientSocket.flush(); + socket.close(); + clientSocket.close(); + done.complete(); + }, + ); + // Rewrite headers + final headers = {}; + request.headers.forEach((name, value) { + if (name.toLowerCase() != 'host') { + headers[name] = value.join(","); + } + }); + for (var name in proxyRequest.headers.keys) { + headers[name] = proxyRequest.headers[name]; + } + socket.write("GET ${proxyRequest.uri.path} HTTP/1.1\n"); + if (host != null) { + socket.write("Host: $host\n"); + } + for (var name in headers.keys) { + socket.write("$name: ${headers[name]}\n"); + } + socket.write("\n"); + await socket.flush(); + await done.future; + } + } + } + }); + } + + /// Stops the server + Future stop() => _server.close(); +} + +/// A request for a URL and headers made by a [_ProxyHttpServer]. +class _ProxyRequest { + final Uri uri; + final Map headers; + + _ProxyRequest(this.uri, this.headers); +} + +/// Specifies a source of audio to be played. Audio sources are composable +/// using the subclasses of this class. The same [AudioSource] instance should +/// not be used simultaneously by more than one [AudioPlayer]. +abstract class AudioSource { + final String _id; + AudioPlayer _player; + + /// Creates an [AudioSource] from a [Uri] with optional headers by + /// attempting to guess the type of stream. On iOS, this uses Apple's SDK to + /// automatically detect the stream type. On Android, the type of stream will + /// be guessed from the extension. + /// + /// If you are loading DASH or HLS streams that do not have standard "mpd" or + /// "m3u8" extensions in their URIs, this method will fail to detect the + /// stream type on Android. If you know in advance what type of audio stream + /// it is, you should instantiate [DashAudioSource] or [HlsAudioSource] + /// directly. + static AudioSource uri(Uri uri, {Map headers, Object tag}) { + bool hasExtension(Uri uri, String extension) => + uri.path.toLowerCase().endsWith('.$extension') || + uri.fragment.toLowerCase().endsWith('.$extension'); + if (hasExtension(uri, 'mpd')) { + return DashAudioSource(uri, headers: headers, tag: tag); + } else if (hasExtension(uri, 'm3u8')) { + return HlsAudioSource(uri, headers: headers, tag: tag); + } else { + return ProgressiveAudioSource(uri, headers: headers, tag: tag); + } + } + + static AudioSource fromJson(Map json) { + switch (json['type']) { + case 'progressive': + return ProgressiveAudioSource(Uri.parse(json['uri']), + headers: json['headers']); + case "dash": + return DashAudioSource(Uri.parse(json['uri']), + headers: json['headers']); + case "hls": + return HlsAudioSource(Uri.parse(json['uri']), headers: json['headers']); + case "concatenating": + return ConcatenatingAudioSource( + children: (json['audioSources'] as List) + .map((s) => AudioSource.fromJson(s)) + .toList()); + case "clipping": + return ClippingAudioSource( + child: AudioSource.fromJson(json['audioSource']), + start: Duration(milliseconds: json['start']), + end: Duration(milliseconds: json['end'])); + default: + throw Exception("Unknown AudioSource type: " + json['type']); + } + } + + AudioSource() : _id = _uuid.v4(); + + @mustCallSuper + Future _setup(AudioPlayer player) async { + _player = player; + player._registerAudioSource(this); + } + + @mustCallSuper + void _dispose() { + _player = null; + } + + bool get _requiresHeaders; + + List get sequence; + + Map toJson(); + + @override + int get hashCode => _id.hashCode; + + @override + bool operator ==(dynamic other) => other is AudioSource && other._id == _id; +} + +/// An [AudioSource] that can appear in a sequence. +abstract class IndexedAudioSource extends AudioSource { + final Object tag; + + IndexedAudioSource(this.tag); + + @override + List get sequence => [this]; +} + +/// An abstract class representing audio sources that are loaded from a URI. +abstract class UriAudioSource extends IndexedAudioSource { + final Uri uri; + final Map headers; + final String _type; + Uri _overrideUri; + File _cacheFile; + + UriAudioSource(this.uri, {this.headers, Object tag, @required String type}) + : _type = type, + super(tag); + + @override + Future _setup(AudioPlayer player) async { + await super._setup(player); + if (uri.scheme == 'asset') { + _overrideUri = Uri.file((await _loadAsset(uri.path)).path); + } else if (headers != null) { + _overrideUri = player._proxy.addUrl(uri, headers); + } + } + + @override + void _dispose() { + if (_cacheFile?.existsSync() == true) { + _cacheFile?.deleteSync(); + } + super._dispose(); + } + + Future _loadAsset(String assetPath) async { + final file = await _getCacheFile(assetPath); + this._cacheFile = file; + if (!file.existsSync()) { + await file.create(recursive: true); + await file.writeAsBytes( + (await rootBundle.load(assetPath)).buffer.asUint8List()); + } + return file; + } + + /// Get file for caching asset media with proper extension + Future _getCacheFile(final String assetPath) async => File(p.join( + (await getTemporaryDirectory()).path, + 'just_audio_asset_cache', + '${_player._id}_$_id${p.extension(assetPath)}')); + + @override + bool get _requiresHeaders => headers != null; + + @override + Map toJson() => { + 'id': _id, + 'type': _type, + 'uri': (_overrideUri ?? uri).toString(), + 'headers': headers, + }; +} + +/// An [AudioSource] representing a regular media file such asn an MP3 or M4A +/// file. The following URI schemes are supported: +/// +/// * file: loads from a local file (provided you give your app permission to +/// access that file). +/// * asset: loads from a Flutter asset (not supported on Web). +/// * http(s): loads from an HTTP(S) resource. +/// +/// On platforms except for the web, the supplied [headers] will be passed with +/// the HTTP(S) request. +class ProgressiveAudioSource extends UriAudioSource { + ProgressiveAudioSource(Uri uri, {Map headers, Object tag}) + : super(uri, headers: headers, tag: tag, type: 'progressive'); +} + +/// An [AudioSource] representing a DASH stream. +/// +/// On platforms except for the web, the supplied [headers] will be passed with +/// the HTTP(S) request. Currently headers are not recursively applied to items +/// the HTTP(S) request. Currently headers are not applied recursively. +class DashAudioSource extends UriAudioSource { + DashAudioSource(Uri uri, {Map headers, Object tag}) + : super(uri, headers: headers, tag: tag, type: 'dash'); +} + +/// An [AudioSource] representing an HLS stream. +/// +/// On platforms except for the web, the supplied [headers] will be passed with +/// the HTTP(S) request. Currently headers are not applied recursively. +class HlsAudioSource extends UriAudioSource { + HlsAudioSource(Uri uri, {Map headers, Object tag}) + : super(uri, headers: headers, tag: tag, type: 'hls'); +} + +/// An [AudioSource] representing a concatenation of multiple audio sources to +/// be played in succession. This can be used to create playlists. Playback +/// between items will be gapless on Android, iOS and macOS, while there will +/// be a slight gap on Web. +/// +/// (Untested) Audio sources can be dynamically added, removed and reordered +/// while the audio is playing. +class ConcatenatingAudioSource extends AudioSource { + final List children; + final bool useLazyPreparation; + + ConcatenatingAudioSource({ + @required this.children, + this.useLazyPreparation = false, + }); + + @override + Future _setup(AudioPlayer player) async { + await super._setup(player); + for (var source in children) { + await source._setup(player); + } + } + + /// (Untested) Appends an [AudioSource]. + Future add(AudioSource audioSource) async { + children.add(audioSource); + if (_player != null) { + await _player + ._invokeMethod('concatenating.add', [_id, audioSource.toJson()]); + } + } + + /// (Untested) Inserts an [AudioSource] at [index]. + Future insert(int index, AudioSource audioSource) async { + children.insert(index, audioSource); + if (_player != null) { + await _player._invokeMethod( + 'concatenating.insert', [_id, index, audioSource.toJson()]); + } + } + + /// (Untested) Appends multiple [AudioSource]s. + Future addAll(List children) async { + this.children.addAll(children); + if (_player != null) { + await _player._invokeMethod('concatenating.addAll', + [_id, children.map((s) => s.toJson()).toList()]); + } + } + + /// (Untested) Insert multiple [AudioSource]s at [index]. + Future insertAll(int index, List children) async { + this.children.insertAll(index, children); + if (_player != null) { + await _player._invokeMethod('concatenating.insertAll', + [_id, index, children.map((s) => s.toJson()).toList()]); + } + } + + /// (Untested) Dynmaically remove an [AudioSource] at [index] after this + /// [ConcatenatingAudioSource] has already been loaded. + Future removeAt(int index) async { + children.removeAt(index); + if (_player != null) { + await _player._invokeMethod('concatenating.removeAt', [_id, index]); + } + } + + /// (Untested) Removes a range of [AudioSource]s from index [start] inclusive + /// to [end] exclusive. + Future removeRange(int start, int end) async { + children.removeRange(start, end); + if (_player != null) { + await _player + ._invokeMethod('concatenating.removeRange', [_id, start, end]); + } + } + + /// (Untested) Moves an [AudioSource] from [currentIndex] to [newIndex]. + Future move(int currentIndex, int newIndex) async { + children.insert(newIndex, children.removeAt(currentIndex)); + if (_player != null) { + await _player + ._invokeMethod('concatenating.move', [_id, currentIndex, newIndex]); + } + } + + /// (Untested) Removes all [AudioSources]. + Future clear() async { + children.clear(); + if (_player != null) { + await _player._invokeMethod('concatenating.clear', [_id]); + } + } + + /// The number of [AudioSource]s. + int get length => children.length; + + operator [](int index) => children[index]; + + @override + List get sequence => + children.expand((s) => s.sequence).toList(); + + @override + bool get _requiresHeaders => + children.any((source) => source._requiresHeaders); + + @override + Map toJson() => { + 'id': _id, + 'type': 'concatenating', + 'audioSources': children.map((source) => source.toJson()).toList(), + 'useLazyPreparation': useLazyPreparation, + }; +} + +/// An [AudioSource] that clips the audio of a [UriAudioSource] between a +/// certain start and end time. +class ClippingAudioSource extends IndexedAudioSource { + final UriAudioSource child; + final Duration start; + final Duration end; + + /// Creates an audio source that clips [child] to the range [start]..[end], + /// where [start] and [end] default to the beginning and end of the original + /// [child] source. + ClippingAudioSource({ + @required this.child, + this.start, + this.end, + Object tag, + }) : super(tag); + + @override + Future _setup(AudioPlayer player) async { + await super._setup(player); + await child._setup(player); + } + + @override + bool get _requiresHeaders => child._requiresHeaders; + + @override + Map toJson() => { + 'id': _id, + 'type': 'clipping', + 'audioSource': child.toJson(), + 'start': start?.inMilliseconds, + 'end': end?.inMilliseconds, + }; +} + +// An [AudioSource] that loops a nested [AudioSource] a finite number of times. +// NOTE: this can be inefficient when using a large loop count. If you wish to +// loop an infinite number of times, use [AudioPlayer.setLoopMode]. +// +// On iOS and macOS, note that [LoopingAudioSource] will provide gapless +// playback while [AudioPlayer.setLoopMode] will not. (This will be supported +// in a future release.) +class LoopingAudioSource extends AudioSource { + AudioSource child; + final int count; + + LoopingAudioSource({ + @required this.child, + this.count, + }) : super(); + + @override + List get sequence => + List.generate(count, (i) => child).expand((s) => s.sequence).toList(); + + @override + bool get _requiresHeaders => child._requiresHeaders; + + @override + Map toJson() => { + 'id': _id, + 'type': 'looping', + 'audioSource': child.toJson(), + 'count': count, + }; +} + +enum LoopMode { off, one, all } diff --git a/just_audio/lib/just_audio_web.dart b/just_audio/lib/just_audio_web.dart new file mode 100644 index 0000000..1df5735 --- /dev/null +++ b/just_audio/lib/just_audio_web.dart @@ -0,0 +1,957 @@ +import 'dart:async'; +import 'dart:html'; +import 'dart:math'; + +import 'package:flutter/services.dart'; +import 'package:flutter/widgets.dart'; +import 'package:flutter_web_plugins/flutter_web_plugins.dart'; +import 'package:just_audio/just_audio.dart'; + +final Random _random = Random(); + +class JustAudioPlugin { + static void registerWith(Registrar registrar) { + final MethodChannel channel = MethodChannel( + 'com.ryanheise.just_audio.methods', + const StandardMethodCodec(), + registrar.messenger); + final JustAudioPlugin instance = JustAudioPlugin(registrar); + channel.setMethodCallHandler(instance.handleMethodCall); + } + + final Registrar registrar; + + JustAudioPlugin(this.registrar); + + Future handleMethodCall(MethodCall call) async { + switch (call.method) { + case 'init': + final String id = call.arguments[0]; + new Html5AudioPlayer(id: id, registrar: registrar); + return null; + case 'setIosCategory': + return null; + default: + throw PlatformException(code: 'Unimplemented'); + } + } +} + +abstract class JustAudioPlayer { + final String id; + final Registrar registrar; + final MethodChannel methodChannel; + final PluginEventChannel eventChannel; + final StreamController eventController = StreamController(); + ProcessingState _processingState = ProcessingState.none; + bool _playing = false; + int _index; + + JustAudioPlayer({@required this.id, @required this.registrar}) + : methodChannel = MethodChannel('com.ryanheise.just_audio.methods.$id', + const StandardMethodCodec(), registrar.messenger), + eventChannel = PluginEventChannel('com.ryanheise.just_audio.events.$id', + const StandardMethodCodec(), registrar.messenger) { + methodChannel.setMethodCallHandler(_methodHandler); + eventChannel.controller = eventController; + } + + Future _methodHandler(MethodCall call) async { + try { + final args = call.arguments; + switch (call.method) { + case 'load': + return await load(args[0]); + case 'play': + return await play(); + case 'pause': + return await pause(); + case 'setVolume': + return await setVolume(args[0]); + case 'setSpeed': + return await setSpeed(args[0]); + case 'setLoopMode': + return await setLoopMode(args[0]); + case 'setShuffleModeEnabled': + return await setShuffleModeEnabled(args[0]); + case 'setAutomaticallyWaitsToMinimizeStalling': + return null; + case 'seek': + return await seek(args[0], args[1]); + case 'dispose': + return dispose(); + case 'concatenating.add': + return await concatenatingAdd(args[0], args[1]); + case "concatenating.insert": + return await concatenatingInsert(args[0], args[1], args[2]); + case "concatenating.addAll": + return await concatenatingAddAll(args[0], args[1]); + case "concatenating.insertAll": + return await concatenatingInsertAll(args[0], args[1], args[2]); + case "concatenating.removeAt": + return await concatenatingRemoveAt(args[0], args[1]); + case "concatenating.removeRange": + return await concatenatingRemoveRange(args[0], args[1], args[2]); + case "concatenating.move": + return await concatenatingMove(args[0], args[1], args[2]); + case "concatenating.clear": + return await concatenatingClear(args[0]); + default: + throw PlatformException(code: 'Unimplemented'); + } + } catch (e, stacktrace) { + print("$stacktrace"); + rethrow; + } + } + + Future load(Map source); + + Future play(); + + Future pause(); + + Future setVolume(double volume); + + Future setSpeed(double speed); + + Future setLoopMode(int mode); + + Future setShuffleModeEnabled(bool enabled); + + Future seek(int position, int index); + + @mustCallSuper + void dispose() { + eventController.close(); + } + + Duration getCurrentPosition(); + + Duration getBufferedPosition(); + + Duration getDuration(); + + concatenatingAdd(String playerId, Map source); + + concatenatingInsert(String playerId, int index, Map source); + + concatenatingAddAll(String playerId, List sources); + + concatenatingInsertAll(String playerId, int index, List sources); + + concatenatingRemoveAt(String playerId, int index); + + concatenatingRemoveRange(String playerId, int start, int end); + + concatenatingMove(String playerId, int currentIndex, int newIndex); + + concatenatingClear(String playerId); + + broadcastPlaybackEvent() { + var updateTime = DateTime.now().millisecondsSinceEpoch; + eventController.add({ + 'processingState': _processingState.index, + 'updatePosition': getCurrentPosition()?.inMilliseconds, + 'updateTime': updateTime, + 'bufferedPosition': getBufferedPosition()?.inMilliseconds, + // TODO: Icy Metadata + 'icyMetadata': null, + 'duration': getDuration()?.inMilliseconds, + 'currentIndex': _index, + }); + } + + transition(ProcessingState processingState) { + _processingState = processingState; + broadcastPlaybackEvent(); + } +} + +class Html5AudioPlayer extends JustAudioPlayer { + AudioElement _audioElement = AudioElement(); + Completer _durationCompleter; + AudioSourcePlayer _audioSourcePlayer; + LoopMode _loopMode = LoopMode.off; + bool _shuffleModeEnabled = false; + final Map _audioSourcePlayers = {}; + + Html5AudioPlayer({@required String id, @required Registrar registrar}) + : super(id: id, registrar: registrar) { + _audioElement.addEventListener('durationchange', (event) { + _durationCompleter?.complete(); + broadcastPlaybackEvent(); + }); + _audioElement.addEventListener('error', (event) { + _durationCompleter?.completeError(_audioElement.error); + }); + _audioElement.addEventListener('ended', (event) async { + _currentAudioSourcePlayer.complete(); + }); + _audioElement.addEventListener('timeupdate', (event) { + _currentAudioSourcePlayer.timeUpdated(_audioElement.currentTime); + }); + _audioElement.addEventListener('loadstart', (event) { + transition(ProcessingState.buffering); + }); + _audioElement.addEventListener('waiting', (event) { + transition(ProcessingState.buffering); + }); + _audioElement.addEventListener('stalled', (event) { + transition(ProcessingState.buffering); + }); + _audioElement.addEventListener('canplaythrough', (event) { + transition(ProcessingState.ready); + }); + _audioElement.addEventListener('progress', (event) { + broadcastPlaybackEvent(); + }); + } + + List get order { + final sequence = _audioSourcePlayer.sequence; + List order = List(sequence.length); + if (_shuffleModeEnabled) { + order = _audioSourcePlayer.shuffleOrder; + } else { + for (var i = 0; i < order.length; i++) { + order[i] = i; + } + } + return order; + } + + List getInv(List order) { + List orderInv = List(order.length); + for (var i = 0; i < order.length; i++) { + orderInv[order[i]] = i; + } + return orderInv; + } + + onEnded() async { + if (_loopMode == LoopMode.one) { + await seek(0, null); + play(); + } else { + final order = this.order; + final orderInv = getInv(order); + if (orderInv[_index] + 1 < order.length) { + // move to next item + _index = order[orderInv[_index] + 1]; + await _currentAudioSourcePlayer.load(); + // Should always be true... + if (_playing) { + play(); + } + } else { + // reached end of playlist + if (_loopMode == LoopMode.all) { + // Loop back to the beginning + if (order.length == 1) { + await seek(0, null); + play(); + } else { + _index = order[0]; + await _currentAudioSourcePlayer.load(); + // Should always be true... + if (_playing) { + play(); + } + } + } else { + transition(ProcessingState.completed); + } + } + } + } + + // TODO: Improve efficiency. + IndexedAudioSourcePlayer get _currentAudioSourcePlayer => + _audioSourcePlayer != null && _index < _audioSourcePlayer.sequence.length + ? _audioSourcePlayer.sequence[_index] + : null; + + @override + Future load(Map source) async { + _currentAudioSourcePlayer?.pause(); + _audioSourcePlayer = getAudioSource(source); + _index = 0; + if (_shuffleModeEnabled) { + _audioSourcePlayer?.shuffle(0, _index); + } + return (await _currentAudioSourcePlayer.load()).inMilliseconds; + } + + Future loadUri(final Uri uri) async { + transition(ProcessingState.loading); + final src = uri.toString(); + if (src != _audioElement.src) { + _durationCompleter = Completer(); + _audioElement.src = src; + _audioElement.preload = 'auto'; + _audioElement.load(); + try { + await _durationCompleter.future; + } on MediaError catch (e) { + throw PlatformException( + code: "${e.code}", message: "Failed to load URL"); + } finally { + _durationCompleter = null; + } + } + transition(ProcessingState.ready); + final seconds = _audioElement.duration; + return seconds.isFinite + ? Duration(milliseconds: (seconds * 1000).toInt()) + : null; + } + + @override + Future play() async { + _playing = true; + await _currentAudioSourcePlayer.play(); + } + + @override + Future pause() async { + _playing = false; + _currentAudioSourcePlayer.pause(); + } + + @override + Future setVolume(double volume) async { + _audioElement.volume = volume; + } + + @override + Future setSpeed(double speed) async { + _audioElement.playbackRate = speed; + } + + @override + Future setLoopMode(int mode) async { + _loopMode = LoopMode.values[mode]; + } + + @override + Future setShuffleModeEnabled(bool enabled) async { + _shuffleModeEnabled = enabled; + if (enabled) { + _audioSourcePlayer?.shuffle(0, _index); + } + } + + @override + Future seek(int position, int newIndex) async { + int index = newIndex ?? _index; + if (index != _index) { + _currentAudioSourcePlayer.pause(); + _index = index; + await _currentAudioSourcePlayer.load(); + await _currentAudioSourcePlayer.seek(position); + if (_playing) { + _currentAudioSourcePlayer.play(); + } + } else { + await _currentAudioSourcePlayer.seek(position); + } + } + + ConcatenatingAudioSourcePlayer _concatenating(String playerId) => + _audioSourcePlayers[playerId] as ConcatenatingAudioSourcePlayer; + + concatenatingAdd(String playerId, Map source) { + final playlist = _concatenating(playerId); + playlist.add(getAudioSource(source)); + } + + concatenatingInsert(String playerId, int index, Map source) { + _concatenating(playerId).insert(index, getAudioSource(source)); + if (index <= _index) { + _index++; + } + } + + concatenatingAddAll(String playerId, List sources) { + _concatenating(playerId).addAll(getAudioSources(sources)); + } + + concatenatingInsertAll(String playerId, int index, List sources) { + _concatenating(playerId).insertAll(index, getAudioSources(sources)); + if (index <= _index) { + _index += sources.length; + } + } + + concatenatingRemoveAt(String playerId, int index) async { + // Pause if removing current item + if (_index == index && _playing) { + _currentAudioSourcePlayer.pause(); + } + _concatenating(playerId).removeAt(index); + if (_index == index) { + // Skip backward if there's nothing after this + if (index == _audioSourcePlayer.sequence.length) { + _index--; + } + // Resume playback at the new item (if it exists) + if (_playing && _currentAudioSourcePlayer != null) { + await _currentAudioSourcePlayer.load(); + _currentAudioSourcePlayer.play(); + } + } else if (index < _index) { + // Reflect that the current item has shifted its position + _index--; + } + } + + concatenatingRemoveRange(String playerId, int start, int end) async { + if (_index >= start && _index < end && _playing) { + // Pause if removing current item + _currentAudioSourcePlayer.pause(); + } + _concatenating(playerId).removeRange(start, end); + if (_index >= start && _index < end) { + // Skip backward if there's nothing after this + if (start >= _audioSourcePlayer.sequence.length) { + _index = start - 1; + } else { + _index = start; + } + // Resume playback at the new item (if it exists) + if (_playing && _currentAudioSourcePlayer != null) { + await _currentAudioSourcePlayer.load(); + _currentAudioSourcePlayer.play(); + } + } else if (end <= _index) { + // Reflect that the current item has shifted its position + _index -= (end - start); + } + } + + concatenatingMove(String playerId, int currentIndex, int newIndex) { + _concatenating(playerId).move(currentIndex, newIndex); + if (currentIndex == _index) { + _index = newIndex; + } else if (currentIndex < _index && newIndex >= _index) { + _index--; + } else if (currentIndex > _index && newIndex <= _index) { + _index++; + } + } + + concatenatingClear(String playerId) { + _currentAudioSourcePlayer.pause(); + _concatenating(playerId).clear(); + } + + @override + Duration getCurrentPosition() => _currentAudioSourcePlayer?.position; + + @override + Duration getBufferedPosition() => _currentAudioSourcePlayer?.bufferedPosition; + + @override + Duration getDuration() => _currentAudioSourcePlayer?.duration; + + @override + void dispose() { + _currentAudioSourcePlayer?.pause(); + _audioElement.removeAttribute('src'); + _audioElement.load(); + transition(ProcessingState.none); + super.dispose(); + } + + List getAudioSources(List json) => + json.map((s) => getAudioSource(s)).toList(); + + AudioSourcePlayer getAudioSource(Map json) { + final String id = json['id']; + var audioSourcePlayer = _audioSourcePlayers[id]; + if (audioSourcePlayer == null) { + audioSourcePlayer = decodeAudioSource(json); + _audioSourcePlayers[id] = audioSourcePlayer; + } + return audioSourcePlayer; + } + + AudioSourcePlayer decodeAudioSource(Map json) { + try { + switch (json['type']) { + case 'progressive': + return ProgressiveAudioSourcePlayer( + this, json['id'], Uri.parse(json['uri']), json['headers']); + case "dash": + return DashAudioSourcePlayer( + this, json['id'], Uri.parse(json['uri']), json['headers']); + case "hls": + return HlsAudioSourcePlayer( + this, json['id'], Uri.parse(json['uri']), json['headers']); + case "concatenating": + return ConcatenatingAudioSourcePlayer( + this, + json['id'], + getAudioSources(json['audioSources']), + json['useLazyPreparation']); + case "clipping": + return ClippingAudioSourcePlayer( + this, + json['id'], + getAudioSource(json['audioSource']), + Duration(milliseconds: json['start']), + Duration(milliseconds: json['end'])); + case "looping": + return LoopingAudioSourcePlayer(this, json['id'], + getAudioSource(json['audioSource']), json['count']); + default: + throw Exception("Unknown AudioSource type: " + json['type']); + } + } catch (e, stacktrace) { + print("$stacktrace"); + rethrow; + } + } +} + +abstract class AudioSourcePlayer { + Html5AudioPlayer html5AudioPlayer; + final String id; + + AudioSourcePlayer(this.html5AudioPlayer, this.id); + + List get sequence; + + List get shuffleOrder; + + int shuffle(int treeIndex, int currentIndex); +} + +abstract class IndexedAudioSourcePlayer extends AudioSourcePlayer { + IndexedAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id) + : super(html5AudioPlayer, id); + + Future load(); + + Future play(); + + Future pause(); + + Future seek(int position); + + Future complete(); + + Future timeUpdated(double seconds) async {} + + Duration get duration; + + Duration get position; + + Duration get bufferedPosition; + + AudioElement get _audioElement => html5AudioPlayer._audioElement; + + @override + int shuffle(int treeIndex, int currentIndex) => treeIndex + 1; + + @override + String toString() => "${this.runtimeType}"; +} + +abstract class UriAudioSourcePlayer extends IndexedAudioSourcePlayer { + final Uri uri; + final Map headers; + double _resumePos; + Duration _duration; + Completer _completer; + + UriAudioSourcePlayer( + Html5AudioPlayer html5AudioPlayer, String id, this.uri, this.headers) + : super(html5AudioPlayer, id); + + @override + List get sequence => [this]; + + @override + List get shuffleOrder => [0]; + + @override + Future load() async { + _resumePos = 0.0; + return _duration = await html5AudioPlayer.loadUri(uri); + } + + @override + Future play() async { + _audioElement.currentTime = _resumePos; + _audioElement.play(); + _completer = Completer(); + await _completer.future; + _completer = null; + } + + @override + Future pause() async { + _resumePos = _audioElement.currentTime; + _audioElement.pause(); + _interruptPlay(); + } + + @override + Future seek(int position) async { + _audioElement.currentTime = _resumePos = position / 1000.0; + } + + @override + Future complete() async { + _interruptPlay(); + html5AudioPlayer.onEnded(); + } + + _interruptPlay() { + if (_completer?.isCompleted == false) { + _completer.complete(); + } + } + + @override + Duration get duration { + return _duration; + //final seconds = _audioElement.duration; + //return seconds.isFinite + // ? Duration(milliseconds: (seconds * 1000).toInt()) + // : null; + } + + @override + Duration get position { + double seconds = _audioElement.currentTime; + return Duration(milliseconds: (seconds * 1000).toInt()); + } + + @override + Duration get bufferedPosition { + if (_audioElement.buffered.length > 0) { + return Duration( + milliseconds: + (_audioElement.buffered.end(_audioElement.buffered.length - 1) * + 1000) + .toInt()); + } else { + return Duration.zero; + } + } +} + +class ProgressiveAudioSourcePlayer extends UriAudioSourcePlayer { + ProgressiveAudioSourcePlayer( + Html5AudioPlayer html5AudioPlayer, String id, Uri uri, Map headers) + : super(html5AudioPlayer, id, uri, headers); +} + +class DashAudioSourcePlayer extends UriAudioSourcePlayer { + DashAudioSourcePlayer( + Html5AudioPlayer html5AudioPlayer, String id, Uri uri, Map headers) + : super(html5AudioPlayer, id, uri, headers); +} + +class HlsAudioSourcePlayer extends UriAudioSourcePlayer { + HlsAudioSourcePlayer( + Html5AudioPlayer html5AudioPlayer, String id, Uri uri, Map headers) + : super(html5AudioPlayer, id, uri, headers); +} + +class ConcatenatingAudioSourcePlayer extends AudioSourcePlayer { + static List generateShuffleOrder(int length, [int firstIndex]) { + final shuffleOrder = List(length); + for (var i = 0; i < length; i++) { + final j = _random.nextInt(i + 1); + shuffleOrder[i] = shuffleOrder[j]; + shuffleOrder[j] = i; + } + if (firstIndex != null) { + for (var i = 1; i < length; i++) { + if (shuffleOrder[i] == firstIndex) { + final v = shuffleOrder[0]; + shuffleOrder[0] = shuffleOrder[i]; + shuffleOrder[i] = v; + break; + } + } + } + return shuffleOrder; + } + + final List audioSourcePlayers; + final bool useLazyPreparation; + List _shuffleOrder; + + ConcatenatingAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id, + this.audioSourcePlayers, this.useLazyPreparation) + : _shuffleOrder = generateShuffleOrder(audioSourcePlayers.length), + super(html5AudioPlayer, id); + + @override + List get sequence => + audioSourcePlayers.expand((p) => p.sequence).toList(); + + @override + List get shuffleOrder { + final order = []; + var offset = order.length; + final childOrders = >[]; + for (var audioSourcePlayer in audioSourcePlayers) { + final childShuffleOrder = audioSourcePlayer.shuffleOrder; + childOrders.add(childShuffleOrder.map((i) => i + offset).toList()); + offset += childShuffleOrder.length; + } + for (var i = 0; i < childOrders.length; i++) { + order.addAll(childOrders[_shuffleOrder[i]]); + } + return order; + } + + @override + int shuffle(int treeIndex, int currentIndex) { + int currentChildIndex; + for (var i = 0; i < audioSourcePlayers.length; i++) { + final indexBefore = treeIndex; + final child = audioSourcePlayers[i]; + treeIndex = child.shuffle(treeIndex, currentIndex); + if (currentIndex >= indexBefore && currentIndex < treeIndex) { + currentChildIndex = i; + } else {} + } + // Shuffle so that the current child is first in the shuffle order + _shuffleOrder = + generateShuffleOrder(audioSourcePlayers.length, currentChildIndex); + return treeIndex; + } + + add(AudioSourcePlayer player) { + audioSourcePlayers.add(player); + _shuffleOrder.add(audioSourcePlayers.length - 1); + } + + insert(int index, AudioSourcePlayer player) { + audioSourcePlayers.insert(index, player); + for (var i = 0; i < audioSourcePlayers.length; i++) { + if (_shuffleOrder[i] >= index) { + _shuffleOrder[i]++; + } + } + _shuffleOrder.add(index); + } + + addAll(List players) { + audioSourcePlayers.addAll(players); + _shuffleOrder.addAll( + List.generate(players.length, (i) => audioSourcePlayers.length + i) + .toList() + ..shuffle()); + } + + insertAll(int index, List players) { + audioSourcePlayers.insertAll(index, players); + for (var i = 0; i < audioSourcePlayers.length; i++) { + if (_shuffleOrder[i] >= index) { + _shuffleOrder[i] += players.length; + } + } + _shuffleOrder.addAll( + List.generate(players.length, (i) => index + i).toList()..shuffle()); + } + + removeAt(int index) { + audioSourcePlayers.removeAt(index); + // 0 1 2 3 + // 3 2 0 1 + for (var i = 0; i < audioSourcePlayers.length; i++) { + if (_shuffleOrder[i] > index) { + _shuffleOrder[i]--; + } + } + _shuffleOrder.removeWhere((i) => i == index); + } + + removeRange(int start, int end) { + audioSourcePlayers.removeRange(start, end); + for (var i = 0; i < audioSourcePlayers.length; i++) { + if (_shuffleOrder[i] >= end) { + _shuffleOrder[i] -= (end - start); + } + } + _shuffleOrder.removeWhere((i) => i >= start && i < end); + } + + move(int currentIndex, int newIndex) { + audioSourcePlayers.insert( + newIndex, audioSourcePlayers.removeAt(currentIndex)); + } + + clear() { + audioSourcePlayers.clear(); + _shuffleOrder.clear(); + } +} + +class ClippingAudioSourcePlayer extends IndexedAudioSourcePlayer { + final UriAudioSourcePlayer audioSourcePlayer; + final Duration start; + final Duration end; + Completer _completer; + double _resumePos; + Duration _duration; + + ClippingAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id, + this.audioSourcePlayer, this.start, this.end) + : super(html5AudioPlayer, id); + + @override + List get sequence => [this]; + + @override + List get shuffleOrder => [0]; + + @override + Future load() async { + _resumePos = (start ?? Duration.zero).inMilliseconds / 1000.0; + Duration fullDuration = + await html5AudioPlayer.loadUri(audioSourcePlayer.uri); + _audioElement.currentTime = _resumePos; + _duration = Duration( + milliseconds: min((end ?? fullDuration).inMilliseconds, + fullDuration.inMilliseconds) - + (start ?? Duration.zero).inMilliseconds); + return _duration; + } + + double get remaining => end.inMilliseconds / 1000 - _audioElement.currentTime; + + @override + Future play() async { + _interruptPlay(ClipInterruptReason.simultaneous); + _audioElement.currentTime = _resumePos; + _audioElement.play(); + _completer = Completer(); + ClipInterruptReason reason; + while ((reason = await _completer.future) == ClipInterruptReason.seek) { + _completer = Completer(); + } + if (reason == ClipInterruptReason.end) { + html5AudioPlayer.onEnded(); + } + _completer = null; + } + + @override + Future pause() async { + _interruptPlay(ClipInterruptReason.pause); + _resumePos = _audioElement.currentTime; + _audioElement.pause(); + } + + @override + Future seek(int position) async { + _interruptPlay(ClipInterruptReason.seek); + _audioElement.currentTime = + _resumePos = start.inMilliseconds / 1000.0 + position / 1000.0; + } + + @override + Future complete() async { + _interruptPlay(ClipInterruptReason.end); + } + + @override + Future timeUpdated(double seconds) async { + if (end != null) { + if (seconds >= end.inMilliseconds / 1000) { + _interruptPlay(ClipInterruptReason.end); + } + } + } + + @override + Duration get duration { + return _duration; + } + + @override + Duration get position { + double seconds = _audioElement.currentTime; + var position = Duration(milliseconds: (seconds * 1000).toInt()); + if (start != null) { + position -= start; + } + if (position < Duration.zero) { + position = Duration.zero; + } + return position; + } + + @override + Duration get bufferedPosition { + if (_audioElement.buffered.length > 0) { + var seconds = + _audioElement.buffered.end(_audioElement.buffered.length - 1); + var position = Duration(milliseconds: (seconds * 1000).toInt()); + if (start != null) { + position -= start; + } + if (position < Duration.zero) { + position = Duration.zero; + } + if (duration != null && position > duration) { + position = duration; + } + return position; + } else { + return Duration.zero; + } + } + + _interruptPlay(ClipInterruptReason reason) { + if (_completer?.isCompleted == false) { + _completer.complete(reason); + } + } +} + +enum ClipInterruptReason { end, pause, seek, simultaneous } + +class LoopingAudioSourcePlayer extends AudioSourcePlayer { + final AudioSourcePlayer audioSourcePlayer; + final int count; + + LoopingAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id, + this.audioSourcePlayer, this.count) + : super(html5AudioPlayer, id); + + @override + List get sequence => + List.generate(count, (i) => audioSourcePlayer) + .expand((p) => p.sequence) + .toList(); + + @override + List get shuffleOrder { + final order = []; + var offset = order.length; + for (var i = 0; i < count; i++) { + final childShuffleOrder = audioSourcePlayer.shuffleOrder; + order.addAll(childShuffleOrder.map((i) => i + offset).toList()); + offset += childShuffleOrder.length; + } + return order; + } + + @override + int shuffle(int treeIndex, int currentIndex) { + for (var i = 0; i < count; i++) { + treeIndex = audioSourcePlayer.shuffle(treeIndex, currentIndex); + } + return treeIndex; + } +} diff --git a/just_audio/macos/.gitignore b/just_audio/macos/.gitignore new file mode 100644 index 0000000..aa479fd --- /dev/null +++ b/just_audio/macos/.gitignore @@ -0,0 +1,37 @@ +.idea/ +.vagrant/ +.sconsign.dblite +.svn/ + +.DS_Store +*.swp +profile + +DerivedData/ +build/ +GeneratedPluginRegistrant.h +GeneratedPluginRegistrant.m + +.generated/ + +*.pbxuser +*.mode1v3 +*.mode2v3 +*.perspectivev3 + +!default.pbxuser +!default.mode1v3 +!default.mode2v3 +!default.perspectivev3 + +xcuserdata + +*.moved-aside + +*.pyc +*sync/ +Icon? +.tags* + +/Flutter/Generated.xcconfig +/Flutter/flutter_export_environment.sh \ No newline at end of file diff --git a/just_audio/macos/Assets/.gitkeep b/just_audio/macos/Assets/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/just_audio/macos/Classes/AudioPlayer.h b/just_audio/macos/Classes/AudioPlayer.h new file mode 100644 index 0000000..d64e13d --- /dev/null +++ b/just_audio/macos/Classes/AudioPlayer.h @@ -0,0 +1,21 @@ +#import + +@interface AudioPlayer : NSObject + +- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession; + +@end + +enum ProcessingState { + none, + loading, + buffering, + ready, + completed +}; + +enum LoopMode { + loopOff, + loopOne, + loopAll +}; diff --git a/just_audio/macos/Classes/AudioPlayer.m b/just_audio/macos/Classes/AudioPlayer.m new file mode 100644 index 0000000..ccbfdea --- /dev/null +++ b/just_audio/macos/Classes/AudioPlayer.m @@ -0,0 +1,1138 @@ +#import "AudioPlayer.h" +#import "AudioSource.h" +#import "IndexedAudioSource.h" +#import "UriAudioSource.h" +#import "ConcatenatingAudioSource.h" +#import "LoopingAudioSource.h" +#import "ClippingAudioSource.h" +#import +#import +#include + +// TODO: Check for and report invalid state transitions. +// TODO: Apply Apple's guidance on seeking: https://developer.apple.com/library/archive/qa/qa1820/_index.html +@implementation AudioPlayer { + NSObject* _registrar; + FlutterMethodChannel *_methodChannel; + FlutterEventChannel *_eventChannel; + FlutterEventSink _eventSink; + NSString *_playerId; + AVQueuePlayer *_player; + AudioSource *_audioSource; + NSMutableArray *_indexedAudioSources; + NSMutableArray *_order; + NSMutableArray *_orderInv; + int _index; + enum ProcessingState _processingState; + enum LoopMode _loopMode; + BOOL _shuffleModeEnabled; + long long _updateTime; + int _updatePosition; + int _lastPosition; + int _bufferedPosition; + // Set when the current item hasn't been played yet so we aren't sure whether sufficient audio has been buffered. + BOOL _bufferUnconfirmed; + CMTime _seekPos; + FlutterResult _loadResult; + FlutterResult _playResult; + id _timeObserver; + BOOL _automaticallyWaitsToMinimizeStalling; + BOOL _configuredSession; + BOOL _playing; +} + +- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _registrar = registrar; + _playerId = idParam; + _configuredSession = configuredSession; + _methodChannel = + [FlutterMethodChannel methodChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.methods.%@", _playerId] + binaryMessenger:[registrar messenger]]; + _eventChannel = + [FlutterEventChannel eventChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.events.%@", _playerId] + binaryMessenger:[registrar messenger]]; + [_eventChannel setStreamHandler:self]; + _index = 0; + _processingState = none; + _loopMode = loopOff; + _shuffleModeEnabled = NO; + _player = nil; + _audioSource = nil; + _indexedAudioSources = nil; + _order = nil; + _orderInv = nil; + _seekPos = kCMTimeInvalid; + _timeObserver = 0; + _updatePosition = 0; + _updateTime = 0; + _lastPosition = 0; + _bufferedPosition = 0; + _bufferUnconfirmed = NO; + _playing = NO; + _loadResult = nil; + _playResult = nil; + _automaticallyWaitsToMinimizeStalling = YES; + __weak __typeof__(self) weakSelf = self; + [_methodChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) { + [weakSelf handleMethodCall:call result:result]; + }]; + return self; +} + +- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { + NSArray* args = (NSArray*)call.arguments; + if ([@"load" isEqualToString:call.method]) { + [self load:args[0] result:result]; + } else if ([@"play" isEqualToString:call.method]) { + [self play:result]; + } else if ([@"pause" isEqualToString:call.method]) { + [self pause]; + result(nil); + } else if ([@"setVolume" isEqualToString:call.method]) { + [self setVolume:(float)[args[0] doubleValue]]; + result(nil); + } else if ([@"setSpeed" isEqualToString:call.method]) { + [self setSpeed:(float)[args[0] doubleValue]]; + result(nil); + } else if ([@"setLoopMode" isEqualToString:call.method]) { + [self setLoopMode:[args[0] intValue]]; + result(nil); + } else if ([@"setShuffleModeEnabled" isEqualToString:call.method]) { + [self setShuffleModeEnabled:(BOOL)[args[0] boolValue]]; + result(nil); + } else if ([@"setAutomaticallyWaitsToMinimizeStalling" isEqualToString:call.method]) { + [self setAutomaticallyWaitsToMinimizeStalling:(BOOL)[args[0] boolValue]]; + result(nil); + } else if ([@"seek" isEqualToString:call.method]) { + CMTime position = args[0] == [NSNull null] ? kCMTimePositiveInfinity : CMTimeMake([args[0] intValue], 1000); + [self seek:position index:args[1] completionHandler:^(BOOL finished) { + result(nil); + }]; + result(nil); + } else if ([@"dispose" isEqualToString:call.method]) { + [self dispose]; + result(nil); + } else if ([@"concatenating.add" isEqualToString:call.method]) { + [self concatenatingAdd:(NSString*)args[0] source:(NSDictionary*)args[1]]; + result(nil); + } else if ([@"concatenating.insert" isEqualToString:call.method]) { + [self concatenatingInsert:(NSString*)args[0] index:[args[1] intValue] source:(NSDictionary*)args[2]]; + result(nil); + } else if ([@"concatenating.addAll" isEqualToString:call.method]) { + [self concatenatingAddAll:(NSString*)args[0] sources:(NSArray*)args[1]]; + result(nil); + } else if ([@"concatenating.insertAll" isEqualToString:call.method]) { + [self concatenatingInsertAll:(NSString*)args[0] index:[args[1] intValue] sources:(NSArray*)args[2]]; + result(nil); + } else if ([@"concatenating.removeAt" isEqualToString:call.method]) { + [self concatenatingRemoveAt:(NSString*)args[0] index:(int)args[1]]; + result(nil); + } else if ([@"concatenating.removeRange" isEqualToString:call.method]) { + [self concatenatingRemoveRange:(NSString*)args[0] start:[args[1] intValue] end:[args[2] intValue]]; + result(nil); + } else if ([@"concatenating.move" isEqualToString:call.method]) { + [self concatenatingMove:(NSString*)args[0] currentIndex:[args[1] intValue] newIndex:[args[2] intValue]]; + result(nil); + } else if ([@"concatenating.clear" isEqualToString:call.method]) { + [self concatenatingClear:(NSString*)args[0]]; + result(nil); + } else { + result(FlutterMethodNotImplemented); + } +} + +// Untested +- (void)concatenatingAdd:(NSString *)catId source:(NSDictionary *)source { + [self concatenatingInsertAll:catId index:-1 sources:@[source]]; +} + +// Untested +- (void)concatenatingInsert:(NSString *)catId index:(int)index source:(NSDictionary *)source { + [self concatenatingInsertAll:catId index:index sources:@[source]]; +} + +// Untested +- (void)concatenatingAddAll:(NSString *)catId sources:(NSArray *)sources { + [self concatenatingInsertAll:catId index:-1 sources:sources]; +} + +// Untested +- (void)concatenatingInsertAll:(NSString *)catId index:(int)index sources:(NSArray *)sources { + // Find all duplicates of the identified ConcatenatingAudioSource. + NSMutableArray *matches = [[NSMutableArray alloc] init]; + [_audioSource findById:catId matches:matches]; + // Add each new source to each match. + for (int i = 0; i < matches.count; i++) { + ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; + int idx = index >= 0 ? index : catSource.count; + NSMutableArray *audioSources = [self decodeAudioSources:sources]; + for (int j = 0; j < audioSources.count; j++) { + AudioSource *audioSource = audioSources[j]; + [catSource insertSource:audioSource atIndex:(idx + j)]; + } + } + // Index the new audio sources. + _indexedAudioSources = [[NSMutableArray alloc] init]; + [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; + for (int i = 0; i < [_indexedAudioSources count]; i++) { + IndexedAudioSource *audioSource = _indexedAudioSources[i]; + if (!audioSource.isAttached) { + audioSource.playerItem.audioSource = audioSource; + [self addItemObservers:audioSource.playerItem]; + } + } + [self updateOrder]; + if (_player.currentItem) { + _index = [self indexForItem:_player.currentItem]; + } else { + _index = 0; + } + [self enqueueFrom:_index]; + // Notify each new IndexedAudioSource that it's been attached to the player. + for (int i = 0; i < [_indexedAudioSources count]; i++) { + if (!_indexedAudioSources[i].isAttached) { + [_indexedAudioSources[i] attach:_player]; + } + } + [self broadcastPlaybackEvent]; +} + +// Untested +- (void)concatenatingRemoveAt:(NSString *)catId index:(int)index { + [self concatenatingRemoveRange:catId start:index end:(index + 1)]; +} + +// Untested +- (void)concatenatingRemoveRange:(NSString *)catId start:(int)start end:(int)end { + // Find all duplicates of the identified ConcatenatingAudioSource. + NSMutableArray *matches = [[NSMutableArray alloc] init]; + [_audioSource findById:catId matches:matches]; + // Remove range from each match. + for (int i = 0; i < matches.count; i++) { + ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; + int endIndex = end >= 0 ? end : catSource.count; + [catSource removeSourcesFromIndex:start toIndex:endIndex]; + } + // Re-index the remaining audio sources. + NSArray *oldIndexedAudioSources = _indexedAudioSources; + _indexedAudioSources = [[NSMutableArray alloc] init]; + [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; + for (int i = 0, j = 0; i < _indexedAudioSources.count; i++, j++) { + IndexedAudioSource *audioSource = _indexedAudioSources[i]; + while (audioSource != oldIndexedAudioSources[j]) { + [self removeItemObservers:oldIndexedAudioSources[j].playerItem]; + if (j < _index) { + _index--; + } else if (j == _index) { + // The currently playing item was removed. + } + j++; + } + } + [self updateOrder]; + if (_index >= _indexedAudioSources.count) _index = _indexedAudioSources.count - 1; + if (_index < 0) _index = 0; + [self enqueueFrom:_index]; + [self broadcastPlaybackEvent]; +} + +// Untested +- (void)concatenatingMove:(NSString *)catId currentIndex:(int)currentIndex newIndex:(int)newIndex { + // Find all duplicates of the identified ConcatenatingAudioSource. + NSMutableArray *matches = [[NSMutableArray alloc] init]; + [_audioSource findById:catId matches:matches]; + // Move range within each match. + for (int i = 0; i < matches.count; i++) { + ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; + [catSource moveSourceFromIndex:currentIndex toIndex:newIndex]; + } + // Re-index the audio sources. + _indexedAudioSources = [[NSMutableArray alloc] init]; + [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; + _index = [self indexForItem:_player.currentItem]; + [self broadcastPlaybackEvent]; +} + +// Untested +- (void)concatenatingClear:(NSString *)catId { + [self concatenatingRemoveRange:catId start:0 end:-1]; +} + +- (FlutterError*)onListenWithArguments:(id)arguments eventSink:(FlutterEventSink)eventSink { + _eventSink = eventSink; + return nil; +} + +- (FlutterError*)onCancelWithArguments:(id)arguments { + _eventSink = nil; + return nil; +} + +- (void)checkForDiscontinuity { + if (!_eventSink) return; + if (!_playing || CMTIME_IS_VALID(_seekPos) || _processingState == completed) return; + int position = [self getCurrentPosition]; + if (_processingState == buffering) { + if (position > _lastPosition) { + [self leaveBuffering:@"stall ended"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } else { + long long now = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); + long long timeSinceLastUpdate = now - _updateTime; + long long expectedPosition = _updatePosition + (long long)(timeSinceLastUpdate * _player.rate); + long long drift = position - expectedPosition; + //NSLog(@"position: %d, drift: %lld", position, drift); + // Update if we've drifted or just started observing + if (_updateTime == 0L) { + [self broadcastPlaybackEvent]; + } else if (drift < -100) { + [self enterBuffering:@"stalling"]; + NSLog(@"Drift: %lld", drift); + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } + _lastPosition = position; +} + +- (void)enterBuffering:(NSString *)reason { + NSLog(@"ENTER BUFFERING: %@", reason); + _processingState = buffering; +} + +- (void)leaveBuffering:(NSString *)reason { + NSLog(@"LEAVE BUFFERING: %@", reason); + _processingState = ready; +} + +- (void)broadcastPlaybackEvent { + if (!_eventSink) return; + _eventSink(@{ + @"processingState": @(_processingState), + @"updatePosition": @(_updatePosition), + @"updateTime": @(_updateTime), + // TODO: buffer position + @"bufferedPosition": @(_updatePosition), + // TODO: Icy Metadata + @"icyMetadata": [NSNull null], + @"duration": @([self getDuration]), + @"currentIndex": @(_index), + }); +} + +- (int)getCurrentPosition { + if (_processingState == none || _processingState == loading) { + return 0; + } else if (CMTIME_IS_VALID(_seekPos)) { + return (int)(1000 * CMTimeGetSeconds(_seekPos)); + } else if (_indexedAudioSources) { + int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].position)); + if (ms < 0) ms = 0; + return ms; + } else { + return 0; + } +} + +- (int)getBufferedPosition { + if (_processingState == none || _processingState == loading) { + return 0; + } else if (_indexedAudioSources) { + int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].bufferedPosition)); + if (ms < 0) ms = 0; + return ms; + } else { + return 0; + } +} + +- (int)getDuration { + if (_processingState == none) { + return -1; + } else if (_indexedAudioSources) { + int v = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].duration)); + return v; + } else { + return 0; + } +} + +- (void)removeItemObservers:(AVPlayerItem *)playerItem { + [playerItem removeObserver:self forKeyPath:@"status"]; + [playerItem removeObserver:self forKeyPath:@"playbackBufferEmpty"]; + [playerItem removeObserver:self forKeyPath:@"playbackBufferFull"]; + //[playerItem removeObserver:self forKeyPath:@"playbackLikelyToKeepUp"]; + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemPlaybackStalledNotification object:playerItem]; +} + +- (void)addItemObservers:(AVPlayerItem *)playerItem { + // Get notified when the item is loaded or had an error loading + [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil]; + // Get notified of the buffer state + [playerItem addObserver:self forKeyPath:@"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:nil]; + [playerItem addObserver:self forKeyPath:@"playbackBufferFull" options:NSKeyValueObservingOptionNew context:nil]; + [playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil]; + //[playerItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:nil]; + // Get notified when playback has reached the end + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onComplete:) name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; + // Get notified when playback stops due to a failure (currently unused) + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onFailToComplete:) name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; + // Get notified when playback stalls (currently unused) + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onItemStalled:) name:AVPlayerItemPlaybackStalledNotification object:playerItem]; +} + +- (NSMutableArray *)decodeAudioSources:(NSArray *)data { + NSMutableArray *array = [[NSMutableArray alloc] init]; + for (int i = 0; i < [data count]; i++) { + AudioSource *source = [self decodeAudioSource:data[i]]; + [array addObject:source]; + } + return array; +} + +- (AudioSource *)decodeAudioSource:(NSDictionary *)data { + NSString *type = data[@"type"]; + if ([@"progressive" isEqualToString:type]) { + return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; + } else if ([@"dash" isEqualToString:type]) { + return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; + } else if ([@"hls" isEqualToString:type]) { + return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; + } else if ([@"concatenating" isEqualToString:type]) { + return [[ConcatenatingAudioSource alloc] initWithId:data[@"id"] + audioSources:[self decodeAudioSources:data[@"audioSources"]]]; + } else if ([@"clipping" isEqualToString:type]) { + return [[ClippingAudioSource alloc] initWithId:data[@"id"] + audioSource:[self decodeAudioSource:data[@"audioSource"]] + start:data[@"start"] + end:data[@"end"]]; + } else if ([@"looping" isEqualToString:type]) { + NSMutableArray *childSources = [NSMutableArray new]; + int count = [data[@"count"] intValue]; + for (int i = 0; i < count; i++) { + [childSources addObject:[self decodeAudioSource:data[@"audioSource"]]]; + } + return [[LoopingAudioSource alloc] initWithId:data[@"id"] audioSources:childSources]; + } else { + return nil; + } +} + +- (void)enqueueFrom:(int)index { + int oldIndex = _index; + _index = index; + + // Update the queue while keeping the currently playing item untouched. + + /* NSLog(@"before reorder: _player.items.count: ", _player.items.count); */ + /* [self dumpQueue]; */ + + // First, remove all _player items except for the currently playing one (if any). + IndexedPlayerItem *oldItem = _player.currentItem; + IndexedPlayerItem *existingItem = nil; + NSArray *oldPlayerItems = [NSArray arrayWithArray:_player.items]; + // In the first pass, preserve the old and new items. + for (int i = 0; i < oldPlayerItems.count; i++) { + if (oldPlayerItems[i] == _indexedAudioSources[_index].playerItem) { + // Preserve and tag new item if it is already in the queue. + existingItem = oldPlayerItems[i]; + } else if (oldPlayerItems[i] == oldItem) { + // Temporarily preserve old item, just to avoid jumping to + // intermediate queue positions unnecessarily. We only want to jump + // once to _index. + } else { + [_player removeItem:oldPlayerItems[i]]; + } + } + // In the second pass, remove the old item (if different from new item). + if (_index != oldIndex) { + [_player removeItem:oldItem]; + } + + /* NSLog(@"inter order: _player.items.count: ", _player.items.count); */ + /* [self dumpQueue]; */ + + // Regenerate queue + BOOL include = NO; + for (int i = 0; i < [_order count]; i++) { + int si = [_order[i] intValue]; + if (si == _index) include = YES; + if (include && _indexedAudioSources[si].playerItem != existingItem) { + [_player insertItem:_indexedAudioSources[si].playerItem afterItem:nil]; + } + } + + /* NSLog(@"after reorder: _player.items.count: ", _player.items.count); */ + /* [self dumpQueue]; */ + + if (_processingState != loading && oldItem != _indexedAudioSources[_index].playerItem) { + // || !_player.currentItem.playbackLikelyToKeepUp; + if (_player.currentItem.playbackBufferEmpty) { + [self enterBuffering:@"enqueueFrom playbackBufferEmpty"]; + } else { + [self leaveBuffering:@"enqueueFrom !playbackBufferEmpty"]; + } + [self updatePosition]; + } +} + +- (void)updatePosition { + _updatePosition = [self getCurrentPosition]; + _updateTime = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); +} + +- (void)load:(NSDictionary *)source result:(FlutterResult)result { + if (!_playing) { + [_player pause]; + } + if (_processingState == loading) { + [self abortExistingConnection]; + } + _loadResult = result; + _index = 0; + [self updatePosition]; + _processingState = loading; + [self broadcastPlaybackEvent]; + // Remove previous observers + if (_indexedAudioSources) { + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [self removeItemObservers:_indexedAudioSources[i].playerItem]; + } + } + // Decode audio source + if (_audioSource && [@"clipping" isEqualToString:source[@"type"]]) { + // Check if we're clipping an audio source that was previously loaded. + UriAudioSource *child = nil; + if ([_audioSource isKindOfClass:[ClippingAudioSource class]]) { + ClippingAudioSource *clipper = (ClippingAudioSource *)_audioSource; + child = clipper.audioSource; + } else if ([_audioSource isKindOfClass:[UriAudioSource class]]) { + child = (UriAudioSource *)_audioSource; + } + if (child) { + _audioSource = [[ClippingAudioSource alloc] initWithId:source[@"id"] + audioSource:child + start:source[@"start"] + end:source[@"end"]]; + } else { + _audioSource = [self decodeAudioSource:source]; + } + } else { + _audioSource = [self decodeAudioSource:source]; + } + _indexedAudioSources = [[NSMutableArray alloc] init]; + [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; + for (int i = 0; i < [_indexedAudioSources count]; i++) { + IndexedAudioSource *source = _indexedAudioSources[i]; + [self addItemObservers:source.playerItem]; + source.playerItem.audioSource = source; + } + [self updateOrder]; + // Set up an empty player + if (!_player) { + _player = [[AVQueuePlayer alloc] initWithItems:@[]]; + if (@available(macOS 10.12, iOS 10.0, *)) { + _player.automaticallyWaitsToMinimizeStalling = _automaticallyWaitsToMinimizeStalling; + // TODO: Remove these observers in dispose. + [_player addObserver:self + forKeyPath:@"timeControlStatus" + options:NSKeyValueObservingOptionNew + context:nil]; + } + [_player addObserver:self + forKeyPath:@"currentItem" + options:NSKeyValueObservingOptionNew + context:nil]; + // TODO: learn about the different ways to define weakSelf. + //__weak __typeof__(self) weakSelf = self; + //typeof(self) __weak weakSelf = self; + __unsafe_unretained typeof(self) weakSelf = self; + if (@available(macOS 10.12, iOS 10.0, *)) {} + else { + _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMake(200, 1000) + queue:nil + usingBlock:^(CMTime time) { + [weakSelf checkForDiscontinuity]; + } + ]; + } + } + // Initialise the AVQueuePlayer with items. + [self enqueueFrom:0]; + // Notify each IndexedAudioSource that it's been attached to the player. + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [_indexedAudioSources[i] attach:_player]; + } + + if (_player.currentItem.status == AVPlayerItemStatusReadyToPlay) { + _loadResult(@([self getDuration])); + _loadResult = nil; + } else { + // We send result after the playerItem is ready in observeValueForKeyPath. + } + [self broadcastPlaybackEvent]; +} + +- (void)updateOrder { + if (_shuffleModeEnabled) { + [_audioSource shuffle:0 currentIndex: _index]; + } + _orderInv = [NSMutableArray arrayWithCapacity:[_indexedAudioSources count]]; + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [_orderInv addObject:@(0)]; + } + if (_shuffleModeEnabled) { + _order = [_audioSource getShuffleOrder]; + } else { + NSMutableArray *order = [[NSMutableArray alloc] init]; + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [order addObject:@(i)]; + } + _order = order; + } + for (int i = 0; i < [_indexedAudioSources count]; i++) { + _orderInv[[_order[i] intValue]] = @(i); + } +} + +- (void)onItemStalled:(NSNotification *)notification { + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; + NSLog(@"onItemStalled"); +} + +- (void)onFailToComplete:(NSNotification *)notification { + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; + NSLog(@"onFailToComplete"); +} + +- (void)onComplete:(NSNotification *)notification { + NSLog(@"onComplete"); + if (_loopMode == loopOne) { + [self seek:kCMTimeZero index:@(_index) completionHandler:^(BOOL finished) { + // XXX: Not necessary? + [self play]; + }]; + } else { + IndexedPlayerItem *endedPlayerItem = (IndexedPlayerItem *)notification.object; + IndexedAudioSource *endedSource = endedPlayerItem.audioSource; + // When an item ends, seek back to its beginning. + [endedSource seek:kCMTimeZero]; + + if ([_orderInv[_index] intValue] + 1 < [_order count]) { + // account for automatic move to next item + _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; + NSLog(@"advance to next: index = %d", _index); + [self broadcastPlaybackEvent]; + } else { + // reached end of playlist + if (_loopMode == loopAll) { + NSLog(@"Loop back to first item"); + // Loop back to the beginning + // TODO: Currently there will be a gap at the loop point. + // Maybe we can do something clever by temporarily adding the + // first playlist item at the end of the queue, although this + // will affect any code that assumes the queue always + // corresponds to a contiguous region of the indexed audio + // sources. + // For now we just do a seek back to the start. + if ([_order count] == 1) { + [self seek:kCMTimeZero index:[NSNull null] completionHandler:^(BOOL finished) { + // XXX: Necessary? + [self play]; + }]; + } else { + [self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) { + // XXX: Necessary? + [self play]; + }]; + } + } else { + [self complete]; + } + } + } +} + +- (void)observeValueForKeyPath:(NSString *)keyPath + ofObject:(id)object + change:(NSDictionary *)change + context:(void *)context { + + if ([keyPath isEqualToString:@"status"]) { + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; + AVPlayerItemStatus status = AVPlayerItemStatusUnknown; + NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; + if ([statusNumber isKindOfClass:[NSNumber class]]) { + status = statusNumber.intValue; + } + switch (status) { + case AVPlayerItemStatusReadyToPlay: { + if (playerItem != _player.currentItem) return; + // Detect buffering in different ways depending on whether we're playing + if (_playing) { + if (@available(macOS 10.12, iOS 10.0, *)) { + if (_player.timeControlStatus == AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) { + [self enterBuffering:@"ready to play: playing, waitingToPlay"]; + } else { + [self leaveBuffering:@"ready to play: playing, !waitingToPlay"]; + } + [self updatePosition]; + } else { + // If this happens when we're playing, check whether buffer is confirmed + if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { + // Stay in bufering - XXX Test + [self enterBuffering:@"ready to play: playing, bufferUnconfirmed && !playbackBufferFull"]; + } else { + if (_player.currentItem.playbackBufferEmpty) { + // !_player.currentItem.playbackLikelyToKeepUp; + [self enterBuffering:@"ready to play: playing, playbackBufferEmpty"]; + } else { + [self leaveBuffering:@"ready to play: playing, !playbackBufferEmpty"]; + } + [self updatePosition]; + } + } + } else { + if (_player.currentItem.playbackBufferEmpty) { + [self enterBuffering:@"ready to play: !playing, playbackBufferEmpty"]; + // || !_player.currentItem.playbackLikelyToKeepUp; + } else { + [self leaveBuffering:@"ready to play: !playing, !playbackBufferEmpty"]; + } + [self updatePosition]; + } + [self broadcastPlaybackEvent]; + if (_loadResult) { + _loadResult(@([self getDuration])); + _loadResult = nil; + } + break; + } + case AVPlayerItemStatusFailed: { + NSLog(@"AVPlayerItemStatusFailed"); + [self sendErrorForItem:playerItem]; + break; + } + case AVPlayerItemStatusUnknown: + break; + } + } else if ([keyPath isEqualToString:@"playbackBufferEmpty"] || [keyPath isEqualToString:@"playbackBufferFull"]) { + // Use these values to detect buffering. + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; + if (playerItem != _player.currentItem) return; + // If there's a seek in progress, these values are unreliable + if (CMTIME_IS_VALID(_seekPos)) return; + // Detect buffering in different ways depending on whether we're playing + if (_playing) { + if (@available(macOS 10.12, iOS 10.0, *)) { + // We handle this with timeControlStatus instead. + } else { + if (_bufferUnconfirmed && playerItem.playbackBufferFull) { + _bufferUnconfirmed = NO; + [self leaveBuffering:@"playing, _bufferUnconfirmed && playbackBufferFull"]; + [self updatePosition]; + NSLog(@"Buffering confirmed! leaving buffering"); + [self broadcastPlaybackEvent]; + } + } + } else { + if (playerItem.playbackBufferEmpty) { + [self enterBuffering:@"!playing, playbackBufferEmpty"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } else if (!playerItem.playbackBufferEmpty || playerItem.playbackBufferFull) { + _processingState = ready; + [self leaveBuffering:@"!playing, !playbackBufferEmpty || playbackBufferFull"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } + /* } else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"]) { */ + } else if ([keyPath isEqualToString:@"timeControlStatus"]) { + if (@available(macOS 10.12, iOS 10.0, *)) { + AVPlayerTimeControlStatus status = AVPlayerTimeControlStatusPaused; + NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; + if ([statusNumber isKindOfClass:[NSNumber class]]) { + status = statusNumber.intValue; + } + switch (status) { + case AVPlayerTimeControlStatusPaused: + //NSLog(@"AVPlayerTimeControlStatusPaused"); + break; + case AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate: + //NSLog(@"AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate"); + if (_processingState != completed) { + [self enterBuffering:@"timeControlStatus"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } else { + NSLog(@"Ignoring wait signal because we reached the end"); + } + break; + case AVPlayerTimeControlStatusPlaying: + [self leaveBuffering:@"timeControlStatus"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + break; + } + } + } else if ([keyPath isEqualToString:@"currentItem"] && _player.currentItem) { + if (_player.currentItem.status == AVPlayerItemStatusFailed) { + if ([_orderInv[_index] intValue] + 1 < [_order count]) { + // account for automatic move to next item + _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; + NSLog(@"advance to next on error: index = %d", _index); + [self broadcastPlaybackEvent]; + } else { + NSLog(@"error on last item"); + } + return; + } else { + int expectedIndex = [self indexForItem:_player.currentItem]; + if (_index != expectedIndex) { + // AVQueuePlayer will sometimes skip over error items without + // notifying this observer. + NSLog(@"Queue change detected. Adjusting index from %d -> %d", _index, expectedIndex); + _index = expectedIndex; + [self broadcastPlaybackEvent]; + } + } + //NSLog(@"currentItem changed. _index=%d", _index); + _bufferUnconfirmed = YES; + // If we've skipped or transitioned to a new item and we're not + // currently in the middle of a seek + if (CMTIME_IS_INVALID(_seekPos) && _player.currentItem.status == AVPlayerItemStatusReadyToPlay) { + [self updatePosition]; + IndexedAudioSource *source = ((IndexedPlayerItem *)_player.currentItem).audioSource; + // We should already be at position zero but for + // ClippingAudioSource it might be off by some milliseconds so we + // consider anything <= 100 as close enough. + if ((int)(1000 * CMTimeGetSeconds(source.position)) > 100) { + NSLog(@"On currentItem change, seeking back to zero"); + BOOL shouldResumePlayback = NO; + AVPlayerActionAtItemEnd originalEndAction = _player.actionAtItemEnd; + if (_playing && CMTimeGetSeconds(CMTimeSubtract(source.position, source.duration)) >= 0) { + NSLog(@"Need to pause while rewinding because we're at the end"); + shouldResumePlayback = YES; + _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; + [_player pause]; + } + [self enterBuffering:@"currentItem changed, seeking"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + [source seek:kCMTimeZero completionHandler:^(BOOL finished) { + [self leaveBuffering:@"currentItem changed, finished seek"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + if (shouldResumePlayback) { + _player.actionAtItemEnd = originalEndAction; + // TODO: This logic is almost duplicated in seek. See if we can reuse this code. + [_player play]; + } + }]; + } else { + // Already at zero, no need to seek. + } + } + } else if ([keyPath isEqualToString:@"loadedTimeRanges"]) { + IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; + if (playerItem != _player.currentItem) return; + int pos = [self getBufferedPosition]; + if (pos != _bufferedPosition) { + _bufferedPosition = pos; + [self broadcastPlaybackEvent]; + } + } +} + +- (void)sendErrorForItem:(IndexedPlayerItem *)playerItem { + FlutterError *flutterError = [FlutterError errorWithCode:[NSString stringWithFormat:@"%d", playerItem.error.code] + message:playerItem.error.localizedDescription + details:nil]; + [self sendError:flutterError playerItem:playerItem]; +} + +- (void)sendError:(FlutterError *)flutterError playerItem:(IndexedPlayerItem *)playerItem { + NSLog(@"sendError"); + if (_loadResult && playerItem == _player.currentItem) { + _loadResult(flutterError); + _loadResult = nil; + } + if (_eventSink) { + // Broadcast all errors even if they aren't on the current item. + _eventSink(flutterError); + } +} + +- (void)abortExistingConnection { + FlutterError *flutterError = [FlutterError errorWithCode:@"abort" + message:@"Connection aborted" + details:nil]; + [self sendError:flutterError playerItem:nil]; +} + +- (int)indexForItem:(IndexedPlayerItem *)playerItem { + for (int i = 0; i < _indexedAudioSources.count; i++) { + if (_indexedAudioSources[i].playerItem == playerItem) { + return i; + } + } + return -1; +} + +- (void)play { + [self play:nil]; +} + +- (void)play:(FlutterResult)result { + if (result) { + if (_playResult) { + NSLog(@"INTERRUPTING PLAY"); + _playResult(nil); + } + _playResult = result; + } + _playing = YES; +#if TARGET_OS_IPHONE + if (_configuredSession) { + [[AVAudioSession sharedInstance] setActive:YES error:nil]; + } +#endif + [_player play]; + [self updatePosition]; + if (@available(macOS 10.12, iOS 10.0, *)) {} + else { + if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { + [self enterBuffering:@"play, _bufferUnconfirmed && !playbackBufferFull"]; + [self broadcastPlaybackEvent]; + } + } +} + +- (void)pause { + _playing = NO; + [_player pause]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + if (_playResult) { + NSLog(@"PLAY FINISHED DUE TO PAUSE"); + _playResult(nil); + _playResult = nil; + } +} + +- (void)complete { + [self updatePosition]; + _processingState = completed; + [self broadcastPlaybackEvent]; + if (_playResult) { + NSLog(@"PLAY FINISHED DUE TO COMPLETE"); + _playResult(nil); + _playResult = nil; + } +} + +- (void)setVolume:(float)volume { + [_player setVolume:volume]; +} + +- (void)setSpeed:(float)speed { + if (speed == 1.0 + || (speed < 1.0 && _player.currentItem.canPlaySlowForward) + || (speed > 1.0 && _player.currentItem.canPlayFastForward)) { + _player.rate = speed; + } + [self updatePosition]; +} + +- (void)setLoopMode:(int)loopMode { + _loopMode = loopMode; + if (_player) { + switch (_loopMode) { + case loopOne: + _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone + break; + default: + _player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance; + } + } +} + +- (void)setShuffleModeEnabled:(BOOL)shuffleModeEnabled { + NSLog(@"setShuffleModeEnabled: %d", shuffleModeEnabled); + _shuffleModeEnabled = shuffleModeEnabled; + if (!_audioSource) return; + + [self updateOrder]; + + [self enqueueFrom:_index]; +} + +- (void)dumpQueue { + for (int i = 0; i < _player.items.count; i++) { + IndexedPlayerItem *playerItem = _player.items[i]; + for (int j = 0; j < _indexedAudioSources.count; j++) { + IndexedAudioSource *source = _indexedAudioSources[j]; + if (source.playerItem == playerItem) { + NSLog(@"- %d", j); + break; + } + } + } +} + +- (void)setAutomaticallyWaitsToMinimizeStalling:(bool)automaticallyWaitsToMinimizeStalling { + _automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; + if (@available(macOS 10.12, iOS 10.0, *)) { + if(_player) { + _player.automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; + } + } +} + +- (void)seek:(CMTime)position index:(NSNumber *)newIndex completionHandler:(void (^)(BOOL))completionHandler { + int index = _index; + if (newIndex != [NSNull null]) { + index = [newIndex intValue]; + } + if (index != _index) { + // Jump to a new item + /* if (_playing && index == _index + 1) { */ + /* // Special case for jumping to the very next item */ + /* NSLog(@"seek to next item: %d -> %d", _index, index); */ + /* [_indexedAudioSources[_index] seek:kCMTimeZero]; */ + /* _index = index; */ + /* [_player advanceToNextItem]; */ + /* [self broadcastPlaybackEvent]; */ + /* } else */ + { + // Jump to a distant item + //NSLog(@"seek# jump to distant item: %d -> %d", _index, index); + if (_playing) { + [_player pause]; + } + [_indexedAudioSources[_index] seek:kCMTimeZero]; + // The "currentItem" key observer will respect that a seek is already in progress + _seekPos = position; + [self updatePosition]; + [self enqueueFrom:index]; + IndexedAudioSource *source = _indexedAudioSources[_index]; + if (abs((int)(1000 * CMTimeGetSeconds(CMTimeSubtract(source.position, position)))) > 100) { + [self enterBuffering:@"seek to index"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + [source seek:position completionHandler:^(BOOL finished) { + if (@available(macOS 10.12, iOS 10.0, *)) { + if (_playing) { + // Handled by timeControlStatus + } else { + if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { + // Stay in buffering + } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { + [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } + } else { + if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { + // Stay in buffering + } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { + [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + } + } + if (_playing) { + [_player play]; + } + _seekPos = kCMTimeInvalid; + [self broadcastPlaybackEvent]; + if (completionHandler) { + completionHandler(finished); + } + }]; + } else { + _seekPos = kCMTimeInvalid; + if (_playing) { + [_player play]; + } + } + } + } else { + // Seek within an item + if (_playing) { + [_player pause]; + } + _seekPos = position; + //NSLog(@"seek. enter buffering. pos = %d", (int)(1000*CMTimeGetSeconds(_indexedAudioSources[_index].position))); + // TODO: Move this into a separate method so it can also + // be used in skip. + [self enterBuffering:@"seek"]; + [self updatePosition]; + [self broadcastPlaybackEvent]; + [_indexedAudioSources[_index] seek:position completionHandler:^(BOOL finished) { + [self updatePosition]; + if (_playing) { + // If playing, buffering will be detected either by: + // 1. checkForDiscontinuity + // 2. timeControlStatus + [_player play]; + } else { + // If not playing, there is no reliable way to detect + // when buffering has completed, so we use + // !playbackBufferEmpty. Although this always seems to + // be full even right after a seek. + if (_player.currentItem.playbackBufferEmpty) { + [self enterBuffering:@"seek finished, playbackBufferEmpty"]; + } else { + [self leaveBuffering:@"seek finished, !playbackBufferEmpty"]; + } + [self updatePosition]; + if (_processingState != buffering) { + [self broadcastPlaybackEvent]; + } + } + _seekPos = kCMTimeInvalid; + [self broadcastPlaybackEvent]; + if (completionHandler) { + completionHandler(finished); + } + }]; + } +} + +- (void)dispose { + if (_processingState != none) { + [_player pause]; + _processingState = none; + [self broadcastPlaybackEvent]; + } + if (_timeObserver) { + [_player removeTimeObserver:_timeObserver]; + _timeObserver = 0; + } + if (_indexedAudioSources) { + for (int i = 0; i < [_indexedAudioSources count]; i++) { + [self removeItemObservers:_indexedAudioSources[i].playerItem]; + } + } + if (_player) { + [_player removeObserver:self forKeyPath:@"currentItem"]; + if (@available(macOS 10.12, iOS 10.0, *)) { + [_player removeObserver:self forKeyPath:@"timeControlStatus"]; + } + _player = nil; + } + // Untested: + // [_eventChannel setStreamHandler:nil]; + // [_methodChannel setMethodHandler:nil]; +} + +@end diff --git a/just_audio/macos/Classes/AudioSource.h b/just_audio/macos/Classes/AudioSource.h new file mode 100644 index 0000000..3dd1bf5 --- /dev/null +++ b/just_audio/macos/Classes/AudioSource.h @@ -0,0 +1,13 @@ +#import + +@interface AudioSource : NSObject + +@property (readonly, nonatomic) NSString* sourceId; + +- (instancetype)initWithId:(NSString *)sid; +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex; +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches; +- (NSArray *)getShuffleOrder; +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex; + +@end diff --git a/just_audio/macos/Classes/AudioSource.m b/just_audio/macos/Classes/AudioSource.m new file mode 100644 index 0000000..81534f1 --- /dev/null +++ b/just_audio/macos/Classes/AudioSource.m @@ -0,0 +1,37 @@ +#import "AudioSource.h" +#import + +@implementation AudioSource { + NSString *_sourceId; +} + +- (instancetype)initWithId:(NSString *)sid { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _sourceId = sid; + return self; +} + +- (NSString *)sourceId { + return _sourceId; +} + +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { + return 0; +} + +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { + if ([_sourceId isEqualToString:sourceId]) { + [matches addObject:self]; + } +} + +- (NSArray *)getShuffleOrder { + return @[]; +} + +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { + return 0; +} + +@end diff --git a/just_audio/macos/Classes/ClippingAudioSource.h b/just_audio/macos/Classes/ClippingAudioSource.h new file mode 100644 index 0000000..8122e3a --- /dev/null +++ b/just_audio/macos/Classes/ClippingAudioSource.h @@ -0,0 +1,11 @@ +#import "AudioSource.h" +#import "UriAudioSource.h" +#import + +@interface ClippingAudioSource : IndexedAudioSource + +@property (readonly, nonatomic) UriAudioSource* audioSource; + +- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end; + +@end diff --git a/just_audio/macos/Classes/ClippingAudioSource.m b/just_audio/macos/Classes/ClippingAudioSource.m new file mode 100644 index 0000000..2f3b174 --- /dev/null +++ b/just_audio/macos/Classes/ClippingAudioSource.m @@ -0,0 +1,79 @@ +#import "AudioSource.h" +#import "ClippingAudioSource.h" +#import "IndexedPlayerItem.h" +#import "UriAudioSource.h" +#import + +@implementation ClippingAudioSource { + UriAudioSource *_audioSource; + CMTime _start; + CMTime _end; +} + +- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end { + self = [super initWithId:sid]; + NSAssert(self, @"super init cannot be nil"); + _audioSource = audioSource; + _start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000); + _end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000); + return self; +} + +- (UriAudioSource *)audioSource { + return _audioSource; +} + +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { + [super findById:sourceId matches:matches]; + [_audioSource findById:sourceId matches:matches]; +} + +- (void)attach:(AVQueuePlayer *)player { + [super attach:player]; + _audioSource.playerItem.forwardPlaybackEndTime = _end; + // XXX: Not needed since currentItem observer handles it? + [self seek:kCMTimeZero]; +} + +- (IndexedPlayerItem *)playerItem { + return _audioSource.playerItem; +} + +- (NSArray *)getShuffleOrder { + return @[@(0)]; +} + +- (void)play:(AVQueuePlayer *)player { +} + +- (void)pause:(AVQueuePlayer *)player { +} + +- (void)stop:(AVQueuePlayer *)player { +} + +- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { + if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) { + CMTime absPosition = CMTimeAdd(_start, position); + [_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; + } +} + +- (CMTime)duration { + return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start); +} + +- (void)setDuration:(CMTime)duration { +} + +- (CMTime)position { + return CMTimeSubtract(self.playerItem.currentTime, _start); +} + +- (CMTime)bufferedPosition { + CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start); + CMTime dur = [self duration]; + return CMTimeCompare(pos, dur) >= 0 ? dur : pos; +} + +@end diff --git a/just_audio/macos/Classes/ConcatenatingAudioSource.h b/just_audio/macos/Classes/ConcatenatingAudioSource.h new file mode 100644 index 0000000..68455af --- /dev/null +++ b/just_audio/macos/Classes/ConcatenatingAudioSource.h @@ -0,0 +1,13 @@ +#import "AudioSource.h" +#import + +@interface ConcatenatingAudioSource : AudioSource + +@property (readonly, nonatomic) int count; + +- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources; +- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index; +- (void)removeSourcesFromIndex:(int)start toIndex:(int)end; +- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex; + +@end diff --git a/just_audio/macos/Classes/ConcatenatingAudioSource.m b/just_audio/macos/Classes/ConcatenatingAudioSource.m new file mode 100644 index 0000000..bd7b713 --- /dev/null +++ b/just_audio/macos/Classes/ConcatenatingAudioSource.m @@ -0,0 +1,109 @@ +#import "AudioSource.h" +#import "ConcatenatingAudioSource.h" +#import +#import + +@implementation ConcatenatingAudioSource { + NSMutableArray *_audioSources; + NSMutableArray *_shuffleOrder; +} + +- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources { + self = [super initWithId:sid]; + NSAssert(self, @"super init cannot be nil"); + _audioSources = audioSources; + return self; +} + +- (int)count { + return _audioSources.count; +} + +- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index { + [_audioSources insertObject:audioSource atIndex:index]; +} + +- (void)removeSourcesFromIndex:(int)start toIndex:(int)end { + if (end == -1) end = _audioSources.count; + for (int i = start; i < end; i++) { + [_audioSources removeObjectAtIndex:start]; + } +} + +- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex { + AudioSource *source = _audioSources[currentIndex]; + [_audioSources removeObjectAtIndex:currentIndex]; + [_audioSources insertObject:source atIndex:newIndex]; +} + +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { + for (int i = 0; i < [_audioSources count]; i++) { + treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; + } + return treeIndex; +} + +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { + [super findById:sourceId matches:matches]; + for (int i = 0; i < [_audioSources count]; i++) { + [_audioSources[i] findById:sourceId matches:matches]; + } +} + +- (NSArray *)getShuffleOrder { + NSMutableArray *order = [NSMutableArray new]; + int offset = [order count]; + NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints + for (int i = 0; i < [_audioSources count]; i++) { + AudioSource *audioSource = _audioSources[i]; + NSArray *childShuffleOrder = [audioSource getShuffleOrder]; + NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new]; + for (int j = 0; j < [childShuffleOrder count]; j++) { + [offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)]; + } + [childOrders addObject:offsetChildShuffleOrder]; + offset += [childShuffleOrder count]; + } + for (int i = 0; i < [_audioSources count]; i++) { + [order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]]; + } + return order; +} + +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { + int currentChildIndex = -1; + for (int i = 0; i < [_audioSources count]; i++) { + int indexBefore = treeIndex; + AudioSource *child = _audioSources[i]; + treeIndex = [child shuffle:treeIndex currentIndex:currentIndex]; + if (currentIndex >= indexBefore && currentIndex < treeIndex) { + currentChildIndex = i; + } else {} + } + // Shuffle so that the current child is first in the shuffle order + _shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]]; + for (int i = 0; i < [_audioSources count]; i++) { + [_shuffleOrder addObject:@(0)]; + } + NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]); + // First generate a random shuffle + for (int i = 0; i < [_audioSources count]; i++) { + int j = arc4random_uniform(i + 1); + _shuffleOrder[i] = _shuffleOrder[j]; + _shuffleOrder[j] = @(i); + } + // Then bring currentIndex to the front + if (currentChildIndex != -1) { + for (int i = 1; i < [_audioSources count]; i++) { + if ([_shuffleOrder[i] integerValue] == currentChildIndex) { + NSNumber *v = _shuffleOrder[0]; + _shuffleOrder[0] = _shuffleOrder[i]; + _shuffleOrder[i] = v; + break; + } + } + } + return treeIndex; +} + +@end diff --git a/just_audio/macos/Classes/IndexedAudioSource.h b/just_audio/macos/Classes/IndexedAudioSource.h new file mode 100644 index 0000000..7d343d8 --- /dev/null +++ b/just_audio/macos/Classes/IndexedAudioSource.h @@ -0,0 +1,21 @@ +#import "AudioSource.h" +#import "IndexedPlayerItem.h" +#import +#import + +@interface IndexedAudioSource : AudioSource + +@property (readonly, nonatomic) IndexedPlayerItem *playerItem; +@property (readwrite, nonatomic) CMTime duration; +@property (readonly, nonatomic) CMTime position; +@property (readonly, nonatomic) CMTime bufferedPosition; +@property (readonly, nonatomic) BOOL isAttached; + +- (void)attach:(AVQueuePlayer *)player; +- (void)play:(AVQueuePlayer *)player; +- (void)pause:(AVQueuePlayer *)player; +- (void)stop:(AVQueuePlayer *)player; +- (void)seek:(CMTime)position; +- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler; + +@end diff --git a/just_audio/macos/Classes/IndexedAudioSource.m b/just_audio/macos/Classes/IndexedAudioSource.m new file mode 100644 index 0000000..316f900 --- /dev/null +++ b/just_audio/macos/Classes/IndexedAudioSource.m @@ -0,0 +1,68 @@ +#import "IndexedAudioSource.h" +#import "IndexedPlayerItem.h" +#import + +@implementation IndexedAudioSource { + BOOL _isAttached; +} + +- (instancetype)initWithId:(NSString *)sid { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _isAttached = NO; + return self; +} + +- (IndexedPlayerItem *)playerItem { + return nil; +} + +- (BOOL)isAttached { + return _isAttached; +} + +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { + [sequence addObject:self]; + return treeIndex + 1; +} + +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { + return treeIndex + 1; +} + +- (void)attach:(AVQueuePlayer *)player { + _isAttached = YES; +} + +- (void)play:(AVQueuePlayer *)player { +} + +- (void)pause:(AVQueuePlayer *)player { +} + +- (void)stop:(AVQueuePlayer *)player { +} + +- (void)seek:(CMTime)position { + [self seek:position completionHandler:nil]; +} + +- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { +} + +- (CMTime)duration { + return kCMTimeInvalid; +} + +- (void)setDuration:(CMTime)duration { +} + +- (CMTime)position { + return kCMTimeInvalid; +} + +- (CMTime)bufferedPosition { + return kCMTimeInvalid; +} + +@end diff --git a/just_audio/macos/Classes/IndexedPlayerItem.h b/just_audio/macos/Classes/IndexedPlayerItem.h new file mode 100644 index 0000000..5d4a11c --- /dev/null +++ b/just_audio/macos/Classes/IndexedPlayerItem.h @@ -0,0 +1,9 @@ +#import + +@class IndexedAudioSource; + +@interface IndexedPlayerItem : AVPlayerItem + +@property (readwrite, nonatomic) IndexedAudioSource *audioSource; + +@end diff --git a/just_audio/macos/Classes/IndexedPlayerItem.m b/just_audio/macos/Classes/IndexedPlayerItem.m new file mode 100644 index 0000000..87fafe0 --- /dev/null +++ b/just_audio/macos/Classes/IndexedPlayerItem.m @@ -0,0 +1,16 @@ +#import "IndexedPlayerItem.h" +#import "IndexedAudioSource.h" + +@implementation IndexedPlayerItem { + IndexedAudioSource *_audioSource; +} + +-(void)setAudioSource:(IndexedAudioSource *)audioSource { + _audioSource = audioSource; +} + +-(IndexedAudioSource *)audioSource { + return _audioSource; +} + +@end diff --git a/just_audio/macos/Classes/JustAudioPlugin.h b/just_audio/macos/Classes/JustAudioPlugin.h new file mode 100644 index 0000000..3f4068d --- /dev/null +++ b/just_audio/macos/Classes/JustAudioPlugin.h @@ -0,0 +1,4 @@ +#import + +@interface JustAudioPlugin : NSObject +@end diff --git a/just_audio/macos/Classes/JustAudioPlugin.m b/just_audio/macos/Classes/JustAudioPlugin.m new file mode 100644 index 0000000..982a260 --- /dev/null +++ b/just_audio/macos/Classes/JustAudioPlugin.m @@ -0,0 +1,55 @@ +#import "JustAudioPlugin.h" +#import "AudioPlayer.h" +#import +#include + +@implementation JustAudioPlugin { + NSObject* _registrar; + BOOL _configuredSession; +} + ++ (void)registerWithRegistrar:(NSObject*)registrar { + FlutterMethodChannel* channel = [FlutterMethodChannel + methodChannelWithName:@"com.ryanheise.just_audio.methods" + binaryMessenger:[registrar messenger]]; + JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar]; + [registrar addMethodCallDelegate:instance channel:channel]; +} + +- (instancetype)initWithRegistrar:(NSObject *)registrar { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _registrar = registrar; + return self; +} + +- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { + if ([@"init" isEqualToString:call.method]) { + NSArray* args = (NSArray*)call.arguments; + NSString* playerId = args[0]; + /*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession]; + result(nil); + } else if ([@"setIosCategory" isEqualToString:call.method]) { +#if TARGET_OS_IPHONE + NSNumber* categoryIndex = (NSNumber*)call.arguments; + AVAudioSessionCategory category = nil; + switch (categoryIndex.integerValue) { + case 0: category = AVAudioSessionCategoryAmbient; break; + case 1: category = AVAudioSessionCategorySoloAmbient; break; + case 2: category = AVAudioSessionCategoryPlayback; break; + case 3: category = AVAudioSessionCategoryRecord; break; + case 4: category = AVAudioSessionCategoryPlayAndRecord; break; + case 5: category = AVAudioSessionCategoryMultiRoute; break; + } + if (category) { + _configuredSession = YES; + } + [[AVAudioSession sharedInstance] setCategory:category error:nil]; +#endif + result(nil); + } else { + result(FlutterMethodNotImplemented); + } +} + +@end diff --git a/just_audio/macos/Classes/LoopingAudioSource.h b/just_audio/macos/Classes/LoopingAudioSource.h new file mode 100644 index 0000000..a77636b --- /dev/null +++ b/just_audio/macos/Classes/LoopingAudioSource.h @@ -0,0 +1,8 @@ +#import "AudioSource.h" +#import + +@interface LoopingAudioSource : AudioSource + +- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources; + +@end diff --git a/just_audio/macos/Classes/LoopingAudioSource.m b/just_audio/macos/Classes/LoopingAudioSource.m new file mode 100644 index 0000000..ba4b52b --- /dev/null +++ b/just_audio/macos/Classes/LoopingAudioSource.m @@ -0,0 +1,53 @@ +#import "AudioSource.h" +#import "LoopingAudioSource.h" +#import + +@implementation LoopingAudioSource { + // An array of duplicates + NSArray *_audioSources; // +} + +- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources { + self = [super initWithId:sid]; + NSAssert(self, @"super init cannot be nil"); + _audioSources = audioSources; + return self; +} + +- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { + for (int i = 0; i < [_audioSources count]; i++) { + treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; + } + return treeIndex; +} + +- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { + [super findById:sourceId matches:matches]; + for (int i = 0; i < [_audioSources count]; i++) { + [_audioSources[i] findById:sourceId matches:matches]; + } +} + +- (NSArray *)getShuffleOrder { + NSMutableArray *order = [NSMutableArray new]; + int offset = (int)[order count]; + for (int i = 0; i < [_audioSources count]; i++) { + AudioSource *audioSource = _audioSources[i]; + NSArray *childShuffleOrder = [audioSource getShuffleOrder]; + for (int j = 0; j < [childShuffleOrder count]; j++) { + [order addObject:@([childShuffleOrder[j] integerValue] + offset)]; + } + offset += [childShuffleOrder count]; + } + return order; +} + +- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { + // TODO: This should probably shuffle the same way on all duplicates. + for (int i = 0; i < [_audioSources count]; i++) { + treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex]; + } + return treeIndex; +} + +@end diff --git a/just_audio/macos/Classes/UriAudioSource.h b/just_audio/macos/Classes/UriAudioSource.h new file mode 100644 index 0000000..9b74125 --- /dev/null +++ b/just_audio/macos/Classes/UriAudioSource.h @@ -0,0 +1,8 @@ +#import "IndexedAudioSource.h" +#import + +@interface UriAudioSource : IndexedAudioSource + +- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri; + +@end diff --git a/just_audio/macos/Classes/UriAudioSource.m b/just_audio/macos/Classes/UriAudioSource.m new file mode 100644 index 0000000..91321d4 --- /dev/null +++ b/just_audio/macos/Classes/UriAudioSource.m @@ -0,0 +1,79 @@ +#import "UriAudioSource.h" +#import "IndexedAudioSource.h" +#import "IndexedPlayerItem.h" +#import + +@implementation UriAudioSource { + NSString *_uri; + IndexedPlayerItem *_playerItem; + /* CMTime _duration; */ +} + +- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri { + self = [super initWithId:sid]; + NSAssert(self, @"super init cannot be nil"); + _uri = uri; + if ([_uri hasPrefix:@"file://"]) { + _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]]; + } else { + _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]]; + } + if (@available(macOS 10.13, iOS 11.0, *)) { + // This does the best at reducing distortion on voice with speeds below 1.0 + _playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain; + } + /* NSKeyValueObservingOptions options = */ + /* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */ + /* [_playerItem addObserver:self */ + /* forKeyPath:@"duration" */ + /* options:options */ + /* context:nil]; */ + return self; +} + +- (IndexedPlayerItem *)playerItem { + return _playerItem; +} + +- (NSArray *)getShuffleOrder { + return @[@(0)]; +} + +- (void)play:(AVQueuePlayer *)player { +} + +- (void)pause:(AVQueuePlayer *)player { +} + +- (void)stop:(AVQueuePlayer *)player { +} + +- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { + if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) { + [_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; + } +} + +- (CMTime)duration { + return _playerItem.duration; +} + +- (void)setDuration:(CMTime)duration { +} + +- (CMTime)position { + return _playerItem.currentTime; +} + +- (CMTime)bufferedPosition { + NSValue *last = _playerItem.loadedTimeRanges.lastObject; + if (last) { + CMTimeRange timeRange = [last CMTimeRangeValue]; + return CMTimeAdd(timeRange.start, timeRange.duration); + } else { + return _playerItem.currentTime; + } + return kCMTimeInvalid; +} + +@end diff --git a/just_audio/macos/just_audio.podspec b/just_audio/macos/just_audio.podspec new file mode 100644 index 0000000..ff946e8 --- /dev/null +++ b/just_audio/macos/just_audio.podspec @@ -0,0 +1,21 @@ +# +# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html +# +Pod::Spec.new do |s| + s.name = 'just_audio' + s.version = '0.0.1' + s.summary = 'A new flutter plugin project.' + s.description = <<-DESC +A new flutter plugin project. + DESC + s.homepage = 'http://example.com' + s.license = { :file => '../LICENSE' } + s.author = { 'Your Company' => 'email@example.com' } + s.source = { :path => '.' } + s.source_files = 'Classes/**/*' + s.public_header_files = 'Classes/**/*.h' + s.dependency 'FlutterMacOS' + s.platform = :osx, '10.11' + s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES' } +end + diff --git a/just_audio/pubspec.lock b/just_audio/pubspec.lock new file mode 100644 index 0000000..6c8bd0b --- /dev/null +++ b/just_audio/pubspec.lock @@ -0,0 +1,250 @@ +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + async: + dependency: "direct main" + description: + name: async + url: "https://pub.dartlang.org" + source: hosted + version: "2.4.2" + boolean_selector: + dependency: transitive + description: + name: boolean_selector + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.0" + characters: + dependency: transitive + description: + name: characters + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.0" + charcode: + dependency: transitive + description: + name: charcode + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.3" + clock: + dependency: transitive + description: + name: clock + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.1" + collection: + dependency: transitive + description: + name: collection + url: "https://pub.dartlang.org" + source: hosted + version: "1.14.13" + convert: + dependency: transitive + description: + name: convert + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.1" + crypto: + dependency: transitive + description: + name: crypto + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.4" + fake_async: + dependency: transitive + description: + name: fake_async + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.0" + file: + dependency: transitive + description: + name: file + url: "https://pub.dartlang.org" + source: hosted + version: "5.1.0" + flutter: + dependency: "direct main" + description: flutter + source: sdk + version: "0.0.0" + flutter_test: + dependency: "direct dev" + description: flutter + source: sdk + version: "0.0.0" + flutter_web_plugins: + dependency: "direct main" + description: flutter + source: sdk + version: "0.0.0" + intl: + dependency: transitive + description: + name: intl + url: "https://pub.dartlang.org" + source: hosted + version: "0.16.1" + matcher: + dependency: transitive + description: + name: matcher + url: "https://pub.dartlang.org" + source: hosted + version: "0.12.8" + meta: + dependency: transitive + description: + name: meta + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.8" + path: + dependency: "direct main" + description: + name: path + url: "https://pub.dartlang.org" + source: hosted + version: "1.7.0" + path_provider: + dependency: "direct main" + description: + name: path_provider + url: "https://pub.dartlang.org" + source: hosted + version: "1.6.10" + path_provider_linux: + dependency: transitive + description: + name: path_provider_linux + url: "https://pub.dartlang.org" + source: hosted + version: "0.0.1+1" + path_provider_macos: + dependency: transitive + description: + name: path_provider_macos + url: "https://pub.dartlang.org" + source: hosted + version: "0.0.4+3" + path_provider_platform_interface: + dependency: transitive + description: + name: path_provider_platform_interface + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.2" + platform: + dependency: transitive + description: + name: platform + url: "https://pub.dartlang.org" + source: hosted + version: "2.2.1" + plugin_platform_interface: + dependency: transitive + description: + name: plugin_platform_interface + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.2" + process: + dependency: transitive + description: + name: process + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.13" + rxdart: + dependency: "direct main" + description: + name: rxdart + url: "https://pub.dartlang.org" + source: hosted + version: "0.24.1" + sky_engine: + dependency: transitive + description: flutter + source: sdk + version: "0.0.99" + source_span: + dependency: transitive + description: + name: source_span + url: "https://pub.dartlang.org" + source: hosted + version: "1.7.0" + stack_trace: + dependency: transitive + description: + name: stack_trace + url: "https://pub.dartlang.org" + source: hosted + version: "1.9.5" + stream_channel: + dependency: transitive + description: + name: stream_channel + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.0" + string_scanner: + dependency: transitive + description: + name: string_scanner + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.5" + term_glyph: + dependency: transitive + description: + name: term_glyph + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.0" + test_api: + dependency: transitive + description: + name: test_api + url: "https://pub.dartlang.org" + source: hosted + version: "0.2.17" + typed_data: + dependency: transitive + description: + name: typed_data + url: "https://pub.dartlang.org" + source: hosted + version: "1.2.0" + uuid: + dependency: "direct main" + description: + name: uuid + url: "https://pub.dartlang.org" + source: hosted + version: "2.2.0" + vector_math: + dependency: transitive + description: + name: vector_math + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.8" + xdg_directories: + dependency: transitive + description: + name: xdg_directories + url: "https://pub.dartlang.org" + source: hosted + version: "0.1.0" +sdks: + dart: ">=2.9.0-14.0.dev <3.0.0" + flutter: ">=1.12.13+hotfix.5 <2.0.0" diff --git a/just_audio/pubspec.yaml b/just_audio/pubspec.yaml new file mode 100644 index 0000000..12616f4 --- /dev/null +++ b/just_audio/pubspec.yaml @@ -0,0 +1,37 @@ +name: just_audio +description: Flutter plugin to play audio from streams, files, assets, DASH/HLS streams and playlists. Works with audio_service to play audio in the background. +version: 0.3.1 +homepage: https://github.com/ryanheise/just_audio + +environment: + sdk: '>=2.6.0 <3.0.0' + flutter: ">=1.12.8 <2.0.0" + +dependencies: + rxdart: ^0.24.1 + path: ^1.6.4 + path_provider: ^1.6.10 + async: ^2.4.1 + uuid: ^2.2.0 + flutter: + sdk: flutter + flutter_web_plugins: + sdk: flutter + +dev_dependencies: + flutter_test: + sdk: flutter + +flutter: + plugin: + platforms: + android: + package: com.ryanheise.just_audio + pluginClass: JustAudioPlugin + ios: + pluginClass: JustAudioPlugin + macos: + pluginClass: JustAudioPlugin + web: + pluginClass: JustAudioPlugin + fileName: just_audio_web.dart diff --git a/just_audio/test/just_audio_test.dart b/just_audio/test/just_audio_test.dart new file mode 100644 index 0000000..14c6a7a --- /dev/null +++ b/just_audio/test/just_audio_test.dart @@ -0,0 +1,21 @@ +import 'package:flutter/services.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:just_audio/just_audio.dart'; + +void main() { + const MethodChannel channel = MethodChannel('just_audio'); + + setUp(() { + channel.setMockMethodCallHandler((MethodCall methodCall) async { + return '42'; + }); + }); + + tearDown(() { + channel.setMockMethodCallHandler(null); + }); + +// test('getPlatformVersion', () async { +// expect(await AudioPlayer.platformVersion, '42'); +// }); +} diff --git a/lib/api/deezer.dart b/lib/api/deezer.dart index 2bd3ef3..d5743bd 100644 --- a/lib/api/deezer.dart +++ b/lib/api/deezer.dart @@ -360,6 +360,18 @@ class DeezerAPI { //Return playlistId return data['results'].toString(); } - + + //Get part of discography + Future> discographyPage(String artistId, {int start = 0, int nb = 50}) async { + Map data = await callApi('album.getDiscography', params: { + 'art_id': int.parse(artistId), + 'discography_mode': 'all', + 'nb': nb, + 'start': start, + 'nb_songs': 30 + }); + + return data['results']['data'].map((a) => Album.fromPrivateJson(a)).toList(); + } } diff --git a/lib/api/definitions.dart b/lib/api/definitions.dart index 61e754f..7faced5 100644 --- a/lib/api/definitions.dart +++ b/lib/api/definitions.dart @@ -80,6 +80,7 @@ class Track { id: this.id, extras: { "playbackDetails": jsonEncode(this.playbackDetails), + "thumb": this.albumArt.thumb, "lyrics": jsonEncode(this.lyrics.toJson()), "albumId": this.album.id, "artists": jsonEncode(this.artists.map((art) => art.toJson()).toList()) @@ -102,7 +103,10 @@ class Track { artists: artists, album: album, id: mi.id, - albumArt: ImageDetails(fullUrl: mi.artUri), + albumArt: ImageDetails( + fullUrl: mi.artUri, + thumbUrl: mi.extras['thumb'] + ), duration: mi.duration, playbackDetails: null, // So it gets updated from api lyrics: Lyrics.fromJson(jsonDecode(((mi.extras??{})['lyrics'])??"{}")) @@ -116,7 +120,7 @@ class Track { title = "${json['SNG_TITLE']} ${json['VERSION']}"; } return Track( - id: json['SNG_ID'], + id: json['SNG_ID'].toString(), title: title, duration: Duration(seconds: int.parse(json['DURATION'])), albumArt: ImageDetails.fromPrivateString(json['ALB_PICTURE']), @@ -180,7 +184,7 @@ class Album { //JSON factory Album.fromPrivateJson(Map json, {Map songsJson = const {}, bool library = false}) => Album( - id: json['ALB_ID'], + id: json['ALB_ID'].toString(), title: json['ALB_TITLE'], art: ImageDetails.fromPrivateString(json['ALB_PICTURE']), artists: (json['ARTISTS']??[json]).map((dynamic art) => Artist.fromPrivateJson(art)).toList(), @@ -240,7 +244,7 @@ class Artist { Map topJson = const {}, bool library = false }) => Artist( - id: json['ART_ID'], + id: json['ART_ID'].toString(), name: json['ART_NAME'], fans: json['NB_FAN'], picture: ImageDetails.fromPrivateString(json['ART_PICTURE'], type: 'artist'), @@ -299,7 +303,7 @@ class Playlist { //JSON factory Playlist.fromPrivateJson(Map json, {Map songsJson = const {}, bool library = false}) => Playlist( - id: json['PLAYLIST_ID'], + id: json['PLAYLIST_ID'].toString(), title: json['TITLE'], trackCount: json['NB_SONG']??songsJson['total'], image: ImageDetails.fromPrivateString(json['PLAYLIST_PICTURE'], type: 'playlist'), diff --git a/lib/api/download.dart b/lib/api/download.dart index b7bf692..daec483 100644 --- a/lib/api/download.dart +++ b/lib/api/download.dart @@ -328,7 +328,8 @@ class DownloadManager { List duplicate = await db.rawQuery('SELECT * FROM downloads WHERE trackId == ?', [track.id]); if (duplicate.length != 0) return; //Save art - await imagesDatabase.getImage(track.albumArt.full, permanent: true); + //await imagesDatabase.getImage(track.albumArt.full); + imagesDatabase.saveImage(track.albumArt.full); //Save to db b.insert('tracks', track.toSQL(off: true), conflictAlgorithm: ConflictAlgorithm.replace); b.insert('albums', track.album.toSQL(), conflictAlgorithm: ConflictAlgorithm.ignore); diff --git a/lib/api/player.dart b/lib/api/player.dart index e98707a..dbf4078 100644 --- a/lib/api/player.dart +++ b/lib/api/player.dart @@ -1,9 +1,6 @@ import 'package:audio_service/audio_service.dart'; -import 'package:collection/collection.dart'; -import 'package:dio/dio.dart'; import 'package:fluttertoast/fluttertoast.dart'; import 'package:freezer/api/deezer.dart'; -import 'package:freezer/ui/cached_image.dart'; import 'package:just_audio/just_audio.dart'; import 'package:connectivity/connectivity.dart'; import 'package:path/path.dart' as p; @@ -23,7 +20,8 @@ class PlayerHelper { StreamSubscription _customEventSubscription; StreamSubscription _playbackStateStreamSubscription; QueueSource queueSource; - RepeatType repeatType = RepeatType.NONE; + LoopMode repeatType = LoopMode.off; + bool shuffle = false; //Find queue index by id int get queueIndex => AudioService.queue.indexWhere((mi) => mi.id == AudioService.currentMediaItem?.id??'Random string so it returns -1'); @@ -45,7 +43,6 @@ class PlayerHelper { if (event['action'] == 'queueEnd') { //If last song is played, load more queue this.queueSource = QueueSource.fromJson(event['queueSource']); - print(queueSource.toJson()); return; } }); @@ -74,20 +71,24 @@ class PlayerHelper { ); } - + Future toggleShuffle() async { + this.shuffle = !this.shuffle; + await AudioService.customAction('shuffle', this.shuffle); + } + //Repeat toggle Future changeRepeat() async { //Change to next repeat type switch (repeatType) { - case RepeatType.NONE: - repeatType = RepeatType.LIST; break; - case RepeatType.LIST: - repeatType = RepeatType.TRACK; break; + case LoopMode.one: + repeatType = LoopMode.off; break; + case LoopMode.all: + repeatType = LoopMode.one; break; default: - repeatType = RepeatType.NONE; break; + repeatType = LoopMode.all; break; } //Set repeat type - await AudioService.customAction("repeatType", RepeatType.values.indexOf(repeatType)); + await AudioService.customAction("repeatType", LoopMode.values.indexOf(repeatType)); } //Executed before exit @@ -101,7 +102,7 @@ class PlayerHelper { await startService(); await settings.updateAudioServiceQuality(); await AudioService.updateQueue(queue); - await AudioService.playFromMediaId(trackId); + await AudioService.skipToQueueItem(trackId); } //Play track from album @@ -178,277 +179,229 @@ void backgroundTaskEntrypoint() async { } class AudioPlayerTask extends BackgroundAudioTask { + AudioPlayer _player = AudioPlayer(); - AudioPlayer _audioPlayer = AudioPlayer(); - + //Queue List _queue = []; - int _queueIndex = -1; + int _queueIndex = 0; + ConcatenatingAudioSource _audioSource; - bool _playing; - bool _interrupted; AudioProcessingState _skipState; - Duration _lastPosition; + bool _interrupted; + Seeker _seeker; - ImagesDatabase imagesDB; + //Stream subscriptions + StreamSubscription _eventSub; + + //Loaded from file/frontend int mobileQuality; int wifiQuality; - - StreamSubscription _eventSub; - StreamSubscription _playerStateSub; - QueueSource queueSource; - int repeatType = 0; + Duration _lastPosition; MediaItem get mediaItem => _queue[_queueIndex]; - //Controls - final playControl = MediaControl( - androidIcon: 'drawable/ic_play_arrow', - label: 'Play', - action: MediaAction.play - ); - final pauseControl = MediaControl( - androidIcon: 'drawable/ic_pause', - label: 'Pause', - action: MediaAction.pause - ); - final stopControl = MediaControl( - androidIcon: 'drawable/ic_stop', - label: 'Stop', - action: MediaAction.stop - ); - final nextControl = MediaControl( - androidIcon: 'drawable/ic_skip_next', - label: 'Next', - action: MediaAction.skipToNext - ); - final previousControl = MediaControl( - androidIcon: 'drawable/ic_skip_previous', - label: 'Previous', - action: MediaAction.skipToPrevious - ); - @override - Future onStart(Map params) async { - _playerStateSub = _audioPlayer.playbackStateStream - .where((state) => state == AudioPlaybackState.completed) - .listen((_event) { - if (_queue.length > _queueIndex + 1) { - onSkipToNext(); - return; - } else { - //Repeat whole list (if enabled) - if (repeatType == 1) { - _skip(-_queueIndex); - return; - } - //Ask for more tracks in queue - AudioServiceBackground.sendCustomEvent({ - 'action': 'queueEnd', - 'queueSource': (queueSource??QueueSource()).toJson() - }); - if (_playing) _playing = false; - _setState(AudioProcessingState.none); - return; + Future onStart(Map params) { + + //Update track index + _player.currentIndexStream.listen((index) { + if (index != null) { + _queueIndex = index; + AudioServiceBackground.setMediaItem(mediaItem); + } + }); + //Update state on all clients on change + _eventSub = _player.playbackEventStream.listen((event) { + _broadcastState(); + }); + _player.processingStateStream.listen((state) { + switch(state) { + case ProcessingState.completed: + //Player ended, get more songs + AudioServiceBackground.sendCustomEvent({ + 'action': 'queueEnd', + 'queueSource': (queueSource??QueueSource()).toJson() + }); + break; + case ProcessingState.ready: + //Ready to play + _skipState = null; + break; + default: + break; } }); - //Read audio player events - _eventSub = _audioPlayer.playbackEventStream.listen((event) { - AudioProcessingState bufferingState = event.buffering ? AudioProcessingState.buffering : null; - switch (event.state) { - case AudioPlaybackState.paused: - case AudioPlaybackState.playing: - _setState(bufferingState ?? AudioProcessingState.ready, pos: event.position); - break; - case AudioPlaybackState.connecting: - _setState(_skipState ?? AudioProcessingState.connecting, pos: event.position); - break; - default: - break; - } - }); - - //Initialize later - //await imagesDB.init(); - + //Load queue AudioServiceBackground.setQueue(_queue); AudioServiceBackground.sendCustomEvent({'action': 'onLoad'}); } @override - Future onSkipToNext() async { - //If repeating allowed - if (repeatType == 2) { - await _skip(0); - return null; - } - _skip(1); - } + Future onSkipToQueueItem(String mediaId) async { + _lastPosition = null; - @override - Future onSkipToPrevious() => _skip(-1); + //Calculate new index + final newIndex = _queue.indexWhere((i) => i.id == mediaId); + if (newIndex == -1) return; - Future _skip(int offset) async { - int newPos = _queueIndex + offset; - //Out of bounds - if (newPos >= _queue.length || newPos < 0) return; - //First song - if (_playing == null) { - _playing = true; - } else if (_playing) { - await _audioPlayer.stop(); - } - //Update position, album art source, queue source text - _queueIndex = newPos; - //Get uri - String uri = await _getTrackUri(mediaItem); - //Modify extras - Map extras = mediaItem.extras; - extras.addAll({"qualityString": await _getQualityString(uri, mediaItem.duration)}); - _queue[_queueIndex] = mediaItem.copyWith( - artUri: await _getArtUri(mediaItem.artUri), - extras: extras - ); - //Play - AudioServiceBackground.setMediaItem(mediaItem); - _skipState = offset > 0 ? AudioProcessingState.skippingToNext:AudioProcessingState.skippingToPrevious; - //Load - await _audioPlayer.setUrl(uri); + //Update buffering state + _skipState = newIndex > _queueIndex + ? AudioProcessingState.skippingToNext + : AudioProcessingState.skippingToPrevious; + + //Skip in player + await _player.seek(Duration.zero, index: newIndex); _skipState = null; - await _saveQueue(); - (_playing??false) ? onPlay() : _setState(AudioProcessingState.ready); - } - - @override - void onPlay() async { - //Start playing preloaded queue - if (AudioServiceBackground.state.processingState == AudioProcessingState.none && _queue.length > 0) { - if (_queueIndex < 0 || _queueIndex == null) { - await this._skip(1); - } else { - await this._skip(0); - } - //Restore position from saved queue - if (_lastPosition != null) { - onSeekTo(_lastPosition); - _lastPosition = null; - } - return; - } - if (_skipState == null) { - _playing = true; - _audioPlayer.play(); - } - } - - @override - void onPause() { - if (_skipState == null && _playing) { - _playing = false; - _audioPlayer.pause(); - } - } - - @override - void onSeekTo(Duration pos) { - _audioPlayer.seek(pos); - } - - @override - void onClick(MediaButton button) { - if (_playing) onPause(); onPlay(); } @override - Future onUpdateQueue(List q) async { - this._queue = q; - AudioServiceBackground.setQueue(_queue); - await _saveQueue(); + Future onPlay() { + _player.play(); + //Restore position on play + if (_lastPosition != null) { + onSeekTo(_lastPosition); + } } @override - void onPlayFromMediaId(String mediaId) async { - int pos = this._queue.indexWhere((mi) => mi.id == mediaId); - await _skip(pos - _queueIndex); - if (_playing == null || !_playing) onPlay(); - } + Future onPause() => _player.pause(); @override - Future onFastForward() async { - await _seekRelative(fastForwardInterval); - } + Future onSeekTo(Duration pos) => _player.seek(pos); @override - void onAddQueueItemAt(MediaItem mi, int index) { - _queue.insert(index, mi); - AudioServiceBackground.setQueue(_queue); - _saveQueue(); - } + Future onFastForward() => _seekRelative(fastForwardInterval); @override - void onAddQueueItem(MediaItem mi) { - _queue.add(mi); - AudioServiceBackground.setQueue(_queue); - _saveQueue(); - } + Future onRewind() => _seekRelative(-rewindInterval); @override - Future onRewind() async { - await _seekRelative(rewindInterval); + Future onSeekForward(bool begin) async => _seekContinuously(begin, 1); + + @override + Future onSeekBackward(bool begin) async => _seekContinuously(begin, -1); + + //While seeking, jump 10s every 1s + void _seekContinuously(bool begin, int direction) { + _seeker?.stop(); + if (begin) { + _seeker = Seeker(_player, Duration(seconds: 10 * direction), Duration(seconds: 1), mediaItem)..start(); + } } + //Relative seek Future _seekRelative(Duration offset) async { - Duration newPos = _audioPlayer.playbackEvent.position + offset; + Duration newPos = _player.position + offset; + //Out of bounds check if (newPos < Duration.zero) newPos = Duration.zero; if (newPos > mediaItem.duration) newPos = mediaItem.duration; - onSeekTo(_audioPlayer.playbackEvent.position + offset); + + await _player.seek(newPos); } + //Update state on all clients + Future _broadcastState() async { + await AudioServiceBackground.setState( + controls: [ + MediaControl.skipToPrevious, + if (_player.playing) MediaControl.pause else MediaControl.play, + MediaControl.skipToNext, + //MediaControl.stop + ], + systemActions: [ + MediaAction.seekTo, + MediaAction.seekForward, + MediaAction.seekBackward + ], + processingState: _getProcessingState(), + playing: _player.playing, + position: _player.position, + bufferedPosition: _player.bufferedPosition, + speed: _player.speed + ); + } + + //just_audio state -> audio_service state. If skipping, use _skipState + AudioProcessingState _getProcessingState() { + if (_skipState != null) return _skipState; + //SRC: audio_service example + switch (_player.processingState) { + case ProcessingState.none: + return AudioProcessingState.stopped; + case ProcessingState.loading: + return AudioProcessingState.connecting; + case ProcessingState.buffering: + return AudioProcessingState.buffering; + case ProcessingState.ready: + return AudioProcessingState.ready; + case ProcessingState.completed: + return AudioProcessingState.completed; + default: + throw Exception("Invalid state: ${_player.processingState}"); + } + } + + //Replace current queue @override - Future onUpdateMediaItem(MediaItem mediaItem) async { - _queue[_queueIndex] = mediaItem; + Future onUpdateQueue(List q) async { + //just_audio + _player.stop(); + if (_audioSource != null) _audioSource.clear(); + //audio_service + this._queue = q; + AudioServiceBackground.setQueue(_queue); + //Load + await _loadQueue(); + await _player.seek(Duration.zero, index: 0); + } + + //Load queue to just_audio + Future _loadQueue() async { + List sources = []; + for(int i=0; i<_queue.length; i++) { + sources.add(await _mediaItemToAudioSource(_queue[i])); + } + + _audioSource = ConcatenatingAudioSource(children: sources); + //Load in just_audio + try { + await _player.load(_audioSource); + } catch (e) { + //Error loading tracks + } AudioServiceBackground.setMediaItem(mediaItem); } - //Audio interruptions - @override - void onAudioFocusLost(AudioInterruption interruption) { - if (_playing) _interrupted = true; - switch (interruption) { - case AudioInterruption.pause: - case AudioInterruption.temporaryPause: - case AudioInterruption.unknownPause: - if (_playing) onPause(); - break; - case AudioInterruption.temporaryDuck: - _audioPlayer.setVolume(0.5); - break; + Future _mediaItemToAudioSource(MediaItem mi) async { + String url = await _getTrackUrl(mi); + if (url.startsWith('http')) return ProgressiveAudioSource(Uri.parse(url)); + return AudioSource.uri(Uri.parse(url)); + } + + Future _getTrackUrl(MediaItem mediaItem, {int quality}) async { + //Check if offline + String _offlinePath = p.join((await getExternalStorageDirectory()).path, 'offline/'); + File f = File(p.join(_offlinePath, mediaItem.id)); + if (await f.exists()) { + return f.path; } + + //Due to current limitations of just_audio, quality fallback moved to DeezerDataSource in ExoPlayer + //This just returns fake url that contains metadata + List playbackDetails = jsonDecode(mediaItem.extras['playbackDetails']); + //Quality + ConnectivityResult conn = await Connectivity().checkConnectivity(); + quality = mobileQuality; + if (conn == ConnectivityResult.wifi) quality = wifiQuality; + + String url = 'https://dzcdn.net/?md5=${playbackDetails[0]}&mv=${playbackDetails[1]}&q=${quality.toString()}#${mediaItem.id}'; + return url; } - @override - void onAudioFocusGained(AudioInterruption interruption) { - switch (interruption) { - case AudioInterruption.temporaryPause: - if (!_playing && _interrupted) onPlay(); - break; - case AudioInterruption.temporaryDuck: - _audioPlayer.setVolume(1.0); - break; - default: - break; - } - _interrupted = false; - } - - @override - void onAudioBecomingNoisy() { - onPause(); - } - - + //Custom actions @override Future onCustomAction(String name, dynamic args) async { if (name == 'updateQuality') { @@ -457,228 +410,178 @@ class AudioPlayerTask extends BackgroundAudioTask { this.wifiQuality = args['wifiQuality']; this.mobileQuality = args['mobileQuality']; } - if (name == 'saveQueue') { - await this._saveQueue(); - } - //Load queue, called after start - if (name == 'load') { - await _loadQueue(); - } //Change queue source if (name == 'queueSource') { this.queueSource = QueueSource.fromJson(Map.from(args)); } - //Shuffle - if (name == 'shuffleQueue') { - MediaItem mi = mediaItem; - shuffle(this._queue); - _queueIndex = _queue.indexOf(mi); - AudioServiceBackground.setQueue(this._queue); - } - //Repeating + //Looping if (name == 'repeatType') { - this.repeatType = args; + _player.setLoopMode(LoopMode.values[args]); } + if (name == 'saveQueue') await this._saveQueue(); + //Load queue after some initialization in frontend + if (name == 'load') await this._loadQueueFile(); + //Shuffle + if (name == 'shuffle') await _player.setShuffleModeEnabled(args); + return true; } - Future _getArtUri(String url) async { - //Load from cache - if (url.startsWith('http')) { - //Prepare db - if (imagesDB == null) { - imagesDB = ImagesDatabase(); - await imagesDB.init(); - } - - String path = await imagesDB.getImage(url); - return 'file://$path'; + //Audio interruptions + @override + Future onAudioFocusLost(AudioInterruption interruption) { + if (_player.playing) _interrupted = true; + switch (interruption) { + case AudioInterruption.pause: + case AudioInterruption.temporaryPause: + case AudioInterruption.unknownPause: + if (_player.playing) onPause(); + break; + case AudioInterruption.temporaryDuck: + _player.setVolume(0.5); + break; } - //If file - if (url.startsWith('/')) return 'file://' + url; - return url; - } - - Future _getTrackUri(MediaItem mi, {int quality}) async { - String prefix = 'DEEZER|${mi.id}|'; - - //Check if song is available offline - String _offlinePath = p.join((await getExternalStorageDirectory()).path, 'offline/'); - File f = File(p.join(_offlinePath, mi.id)); - if (await f.exists()) return f.path; - - //Get online url - Track t = Track( - id: mi.id, - playbackDetails: jsonDecode(mi.extras['playbackDetails']) //JSON Because of audio_service bug - ); - - //Check connection - if (quality == null) { - ConnectivityResult conn = await Connectivity().checkConnectivity(); - quality = mobileQuality; - if (conn == ConnectivityResult.wifi) quality = wifiQuality; - } - String url = t.getUrl(quality); - - //Quality fallback - Dio dio = Dio(); - try { - await dio.head(url); - return prefix + url; - } catch (e) { - if (quality == 9) return _getTrackUri(mi, quality: 3); - if (quality == 3) return _getTrackUri(mi, quality: 1); - throw Exception('No available quality!'); - } - } - - - Future _getQualityString(String uri, Duration duration) async { - //Get url/path - String url = uri; - List split = uri.split('|'); - if (split.length >= 3) url = split[2]; - - int size; - String format; - String source; - - //Local file - if (url.startsWith('/')) { - //Read first 4 bytes of file, get format - File f = File(url); - Stream> reader = f.openRead(0, 4); - List magic = await reader.first; - format = _magicToFormat(magic); - size = await f.length(); - source = 'Offline'; - } - - //URL - if (url.startsWith('http')) { - Dio dio = Dio(); - Response response = await dio.head(url); - size = int.parse(response.headers['Content-Length'][0]); - //Parse format - format = response.headers['Content-Type'][0]; - if (format.trim() == 'audio/mpeg') format = 'MP3'; - if (format.trim() == 'audio/flac') format = 'FLAC'; - source = 'Stream'; - } - //Calculate - return '$format ${_bitrateString(size, duration.inSeconds)} ($source)'; - } - - String _bitrateString(int size, int duration) { - int bitrate = ((size / 125) / duration).floor(); - //Prettify - if (bitrate > 315 && bitrate < 325) return '320kbps'; - if (bitrate > 125 && bitrate < 135) return '128kbps'; - return '${bitrate}kbps'; - } - - //Magic number to string, source: https://en.wikipedia.org/wiki/List_of_file_signatures - String _magicToFormat(List magic) { - Function eq = const ListEquality().equals; - if (eq(magic.sublist(0, 4), [0x66, 0x4c, 0x61, 0x43])) return 'FLAC'; - //MP3 With ID3 - if (eq(magic.sublist(0, 3), [0x49, 0x44, 0x33])) return 'MP3'; - //MP3 - List m = magic.sublist(0, 2); - if (eq(m, [0xff, 0xfb]) ||eq(m, [0xff, 0xf3]) || eq(m, [0xff, 0xf2])) return 'MP3'; - //Unknown - return 'UNK'; } @override - void onTaskRemoved() async { + Future onAudioFocusGained(AudioInterruption interruption) { + switch (interruption) { + case AudioInterruption.temporaryPause: + if (!_player.playing && _interrupted) onPlay(); + break; + case AudioInterruption.temporaryDuck: + _player.setVolume(1.0); + break; + default: + break; + } + _interrupted = false; + } + + @override + Future onAudioBecomingNoisy() { + onPause(); + } + + @override + Future onTaskRemoved() async { await onStop(); } @override + Future onClose() async { + await onStop(); + } + Future onStop() async { - _audioPlayer.stop(); - if (_playerStateSub != null) _playerStateSub.cancel(); - if (_eventSub != null) _eventSub.cancel(); await _saveQueue(); + _player.stop(); + if (_eventSub != null) _eventSub.cancel(); - await super.onStop(); + super.onStop(); } - @override - void onClose() async { - //await _saveQueue(); - //Gets saved in onStop() - await onStop(); - } - - //Update state - void _setState(AudioProcessingState state, {Duration pos}) { - AudioServiceBackground.setState( - controls: _getControls(), - systemActions: (_playing == null) ? [] : [MediaAction.seekTo], - processingState: state ?? AudioServiceBackground.state.processingState, - playing: _playing ?? false, - position: pos ?? _audioPlayer.playbackEvent.position, - bufferedPosition: pos ?? _audioPlayer.playbackEvent.position, - speed: _audioPlayer.speed - ); - } - - List _getControls() { - if (_playing == null || !_playing) { - //Paused / not-started - return [ - previousControl, - playControl, - nextControl - ]; - } - //Playing - return [ - previousControl, - pauseControl, - nextControl - ]; - } - - //Get queue saved file path + //Get queue save file path Future _getQueuePath() async { Directory dir = await getApplicationDocumentsDirectory(); - return p.join(dir.path, 'offline.json'); + return p.join(dir.path, 'playback.json'); } //Export queue to JSON Future _saveQueue() async { - print('save'); - File f = File(await _getQueuePath()); - await f.writeAsString(jsonEncode({ + String path = await _getQueuePath(); + File f = File(path); + //Create if doesnt exist + if (! await File(path).exists()) { + f = await f.create(); + } + + Map data = { 'index': _queueIndex, 'queue': _queue.map>((mi) => mi.toJson()).toList(), - 'position': _audioPlayer.playbackEvent.position.inMilliseconds, + 'position': _player.position.inMilliseconds, 'queueSource': (queueSource??QueueSource()).toJson(), - })); + }; + await f.writeAsString(jsonEncode(data)); } - Future _loadQueue() async { + //Restore queue & playback info from path + Future _loadQueueFile() async { File f = File(await _getQueuePath()); if (await f.exists()) { Map json = jsonDecode(await f.readAsString()); this._queue = (json['queue']??[]).map((mi) => MediaItem.fromJson(mi)).toList(); - this._queueIndex = json['index'] ?? -1; + this._queueIndex = json['index'] ?? 0; this._lastPosition = Duration(milliseconds: json['position']??0); this.queueSource = QueueSource.fromJson(json['queueSource']??{}); + //Restore queue if (_queue != null) { - AudioServiceBackground.setQueue(_queue); + await AudioServiceBackground.setQueue(_queue); + await _loadQueue(); AudioServiceBackground.setMediaItem(mediaItem); - //Update state to allow play button in notification - this._setState(AudioProcessingState.none, pos: _lastPosition); } - //Send restored queue source to ui - AudioServiceBackground.sendCustomEvent({'action': 'onRestore', 'queueSource': (queueSource??QueueSource()).toJson()}); - return true; + } + //Send restored queue source to ui + AudioServiceBackground.sendCustomEvent({ + 'action': 'onRestore', + 'queueSource': (queueSource??QueueSource()).toJson() + }); + return true; + } + + @override + Future onAddQueueItemAt(MediaItem mi, int index) async { + //-1 == play next + if (index == -1) index = _queueIndex + 1; + + + _queue.insert(index, mi); + await AudioServiceBackground.setQueue(_queue); + await _audioSource.insert(index, await _mediaItemToAudioSource(mi)); + + _saveQueue(); + } + + //Add at end of queue + @override + Future onAddQueueItem(MediaItem mi) async { + _queue.add(mi); + await AudioServiceBackground.setQueue(_queue); + await _audioSource.add(await _mediaItemToAudioSource(mi)); + _saveQueue(); + } + + @override + Future onPlayFromMediaId(String mediaId) async { + //Does the same thing + await this.onSkipToQueueItem(mediaId); + } + +} + +//Seeker from audio_service example (why reinvent the wheel?) +//While holding seek button, will continuously seek +class Seeker { + final AudioPlayer player; + final Duration positionInterval; + final Duration stepInterval; + final MediaItem mediaItem; + bool _running = false; + + Seeker(this.player, this.positionInterval, this.stepInterval, this.mediaItem); + + Future start() async { + _running = true; + while (_running) { + Duration newPosition = player.position + positionInterval; + if (newPosition < Duration.zero) newPosition = Duration.zero; + if (newPosition > mediaItem.duration) newPosition = mediaItem.duration; + player.seek(newPosition); + await Future.delayed(stepInterval); } } + void stop() { + _running = false; + } } \ No newline at end of file diff --git a/lib/main.dart b/lib/main.dart index 8b147a9..de73e8d 100644 --- a/lib/main.dart +++ b/lib/main.dart @@ -27,7 +27,7 @@ void main() async { //Initialize globals settings = await Settings().loadSettings(); - await imagesDatabase.init(); + //await imagesDatabase.init(); await downloadManager.init(); runApp(FreezerApp()); @@ -44,9 +44,6 @@ class _FreezerAppState extends State { //Make update theme global updateTheme = _updateTheme; - //Precache placeholder - precacheImage(imagesDatabase.placeholderThumb, context); - super.initState(); } diff --git a/lib/ui/cached_image.dart b/lib/ui/cached_image.dart index 1dda7c5..b8daf75 100644 --- a/lib/ui/cached_image.dart +++ b/lib/ui/cached_image.dart @@ -1,209 +1,66 @@ +import 'package:flutter/cupertino.dart'; import 'package:flutter/material.dart'; import 'package:palette_generator/palette_generator.dart'; -import 'package:path/path.dart' as p; -import 'package:path_provider/path_provider.dart'; -import 'package:sqflite/sqflite.dart'; -import 'package:crypto/crypto.dart'; -import 'package:dio/dio.dart'; - -import 'dart:io'; -import 'dart:convert'; +import 'package:cached_network_image/cached_network_image.dart'; ImagesDatabase imagesDatabase = ImagesDatabase(); + class ImagesDatabase { /* - images.db: - Table: images - Fields: - id - id - name - md5 hash of url. also filename - url - url - permanent - 0/1 - if image is cached or offline + !!! Using the wrappers so i don't have to rewrite most of the code, because of migration to cached network image */ - - Database db; - String imagesPath; - - ImageProvider placeholderThumb = new AssetImage('assets/cover_thumb.jpg'); - - //Prepare database - Future init() async { - String dir = await getDatabasesPath(); - String path = p.join(dir, 'images.db'); - db = await openDatabase( - path, - version: 1, - singleInstance: false, - onCreate: (Database db, int version) async { - //Create table on db created - await db.execute('CREATE TABLE images (id INTEGER PRIMARY KEY, name TEXT, url TEXT, permanent INTEGER)'); - } - ); - //Prepare folders - imagesPath = p.join((await getApplicationDocumentsDirectory()).path, 'images/'); - Directory imagesDir = Directory(imagesPath); - await imagesDir.create(recursive: true); + void saveImage(String url) { + CachedNetworkImageProvider(url); } - String getPath(String name) { - return p.join(imagesPath, name); + Future getPaletteGenerator(String url) { + return PaletteGenerator.fromImageProvider(CachedNetworkImageProvider(url)); } - //Get image url/path, cache it - Future getImage(String url, {bool permanent = false}) async { - //Already file - if (!url.startsWith('http')) { - url = url.replaceFirst('file://', ''); - if (!permanent) return url; - //Update in db to permanent - String name = p.basenameWithoutExtension(url); - await db.rawUpdate('UPDATE images SET permanent == 1 WHERE name == ?', [name]); - } - //Filename = md5 hash - String hash = md5.convert(utf8.encode(url)).toString(); - List results = await db.rawQuery('SELECT * FROM images WHERE name == ?', [hash]); - String path = getPath(hash); - if (results.length > 0) { - //Image in database - return path; - } - //Save image - Dio dio = Dio(); - try { - await dio.download(url, path); - await db.insert('images', {'url': url, 'name': hash, 'permanent': permanent?1:0}); - return path; - } catch (e) { - return null; - } - } - - Future getPaletteGenerator(String url) async { - String path = await getImage(url); - //Get image provider - ImageProvider provider = placeholderThumb; - if (path != null) { - provider = FileImage(File(path)); - } - PaletteGenerator paletteGenerator = await PaletteGenerator.fromImageProvider(provider); - return paletteGenerator; - } - - //Get primary color from album art Future getPrimaryColor(String url) async { PaletteGenerator paletteGenerator = await getPaletteGenerator(url); return paletteGenerator.colors.first; } - //Check if is dark Future isDark(String url) async { PaletteGenerator paletteGenerator = await getPaletteGenerator(url); return paletteGenerator.colors.first.computeLuminance() > 0.5 ? false : true; } - } class CachedImage extends StatefulWidget { - final String url; final double width; final double height; final bool circular; + final bool fullThumb; - const CachedImage({Key key, this.url, this.height, this.width, this.circular = false}): super(key: key); + const CachedImage({Key key, this.url, this.height, this.width, this.circular = false, this.fullThumb = false}): super(key: key); @override _CachedImageState createState() => _CachedImageState(); } class _CachedImageState extends State { - - ImageProvider _image = imagesDatabase.placeholderThumb; - double _opacity = 0.0; - bool _disposed = false; - String _prevUrl; - - Future _getImage() async { - //Image already path - if (!widget.url.startsWith('http')) { - //Remove file://, if used in audio_service - if (widget.url.startsWith('/')) return FileImage(File(widget.url)); - return FileImage(File(widget.url.replaceFirst('file://', ''))); - } - //Load image from db - String path = await imagesDatabase.getImage(widget.url); - if (path == null) return imagesDatabase.placeholderThumb; - return FileImage(File(path)); - } - - //Load image and fade - void _load() async { - if (_prevUrl == widget.url) return; - - ImageProvider image = await _getImage(); - if (_disposed) return; - setState(() { - _image = image; - _opacity = 1.0; - }); - _prevUrl = widget.url; - } - - @override - void dispose() { - _disposed = true; - super.dispose(); - } - - @override - void initState() { - _load(); - super.initState(); - } - - @override - void didUpdateWidget(CachedImage oldWidget) { - _load(); - super.didUpdateWidget(oldWidget); - } - @override Widget build(BuildContext context) { - return Stack( - children: [ - widget.circular ? - CircleAvatar( - radius: (widget.width??widget.height), - backgroundImage: imagesDatabase.placeholderThumb, - ): - Image( - image: imagesDatabase.placeholderThumb, - height: widget.height, - width: widget.width, - ), + if (widget.circular) return ClipOval( + child: CachedImage(url: widget.url, height: widget.height, width: widget.width, circular: false) + ); - AnimatedOpacity( - duration: Duration(milliseconds: 250), - opacity: _opacity, - child: widget.circular ? - CircleAvatar( - radius: (widget.width??widget.height), - backgroundImage: _image, - ): - Image( - image: _image, - height: widget.height, - width: widget.width, - ), - ) - ], + return CachedNetworkImage( + imageUrl: widget.url, + width: widget.width, + height: widget.height, + placeholder: (context, url) { + if (widget.fullThumb) return Image.asset('assets/cover.jpg', width: widget.width, height: widget.height,); + return Image.asset('assets/cover_thumb.jpg', width: widget.width, height: widget.height); + }, + errorWidget: (context, url, error) => Image.asset('assets/cover_thumb.jpg', width: widget.width, height: widget.height), ); } - } - - diff --git a/lib/ui/details_screens.dart b/lib/ui/details_screens.dart index b08e06d..0c16fda 100644 --- a/lib/ui/details_screens.dart +++ b/lib/ui/details_screens.dart @@ -394,7 +394,19 @@ class ArtistDetails extends StatelessWidget { fontSize: 22.0 ), ), - ...List.generate(artist.albums.length, (i) { + ...List.generate(artist.albums.length > 10 ? 11 : artist.albums.length + 1, (i) { + //Show discography + if (i == 10 || i == artist.albums.length) { + return ListTile( + title: Text('Show all albums'), + onTap: () { + Navigator.of(context).push( + MaterialPageRoute(builder: (context) => DiscographyScreen(artist: artist,)) + ); + } + ); + } + //Top albums Album a = artist.albums[i]; return AlbumTile( a, @@ -419,6 +431,103 @@ class ArtistDetails extends StatelessWidget { } } +class DiscographyScreen extends StatefulWidget { + + Artist artist; + DiscographyScreen({@required this.artist, Key key}): super(key: key); + + @override + _DiscographyScreenState createState() => _DiscographyScreenState(); +} + +class _DiscographyScreenState extends State { + + Artist artist; + bool _loading = false; + bool _error = false; + ScrollController _scrollController = ScrollController(); + + Future _load() async { + if (artist.albums.length >= artist.albumCount || _loading) return; + setState(() => _loading = true); + + //Fetch data + List data; + try { + data = await deezerAPI.discographyPage(artist.id, start: artist.albums.length); + } catch (e) { + setState(() { + _error = true; + _loading = false; + }); + return; + } + + //Save + setState(() { + artist.albums.addAll(data); + _loading = false; + }); + + } + + @override + void initState() { + artist = widget.artist; + + //Lazy loading scroll + _scrollController.addListener(() { + double off = _scrollController.position.maxScrollExtent * 0.90; + if (_scrollController.position.pixels > off) { + _load(); + } + }); + + super.initState(); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar(title: Text('Discography'),), + body: ListView.builder( + controller: _scrollController, + itemCount: artist.albums.length + 1, + itemBuilder: (context, i) { + //Loading + if (i == artist.albums.length) { + if (_loading) + return Row( + mainAxisAlignment: MainAxisAlignment.center, + children: [CircularProgressIndicator()], + ); + //Error + if (_error) + return ErrorScreen(); + //Success + return Container(width: 0, height: 0,); + } + + Album a = artist.albums[i]; + return AlbumTile( + a, + onTap: () { + Navigator.of(context).push( + MaterialPageRoute(builder: (context) => AlbumDetails(a)) + ); + }, + onHold: () { + MenuSheet m = MenuSheet(context); + m.defaultAlbumMenu(a); + }, + ); + }, + ), + ); + } +} + + class PlaylistDetails extends StatefulWidget { diff --git a/lib/ui/menu.dart b/lib/ui/menu.dart index dc8cd63..82f57bb 100644 --- a/lib/ui/menu.dart +++ b/lib/ui/menu.dart @@ -140,15 +140,8 @@ class MenuSheet { title: Text('Play next'), leading: Icon(Icons.playlist_play), onTap: () async { - if (playerHelper.queueIndex == -1) { - //First track - await AudioService.addQueueItem(t.toMediaItem()); - await AudioService.play(); - } else { - //Normal - await AudioService.addQueueItemAt( - t.toMediaItem(), playerHelper.queueIndex + 1); - } + //-1 = next + await AudioService.addQueueItemAt(t.toMediaItem(), -1); _close(); }); diff --git a/lib/ui/player_bar.dart b/lib/ui/player_bar.dart index 6c9d4b3..ac6eea2 100644 --- a/lib/ui/player_bar.dart +++ b/lib/ui/player_bar.dart @@ -32,7 +32,7 @@ class PlayerBar extends StatelessWidget { leading: CachedImage( width: 50, height: 50, - url: AudioService.currentMediaItem.artUri, + url: AudioService.currentMediaItem.extras['thumb'] ?? AudioService.currentMediaItem.artUri, ), title: Text( AudioService.currentMediaItem.displayTitle, diff --git a/lib/ui/player_screen.dart b/lib/ui/player_screen.dart index 9af003a..bd36530 100644 --- a/lib/ui/player_screen.dart +++ b/lib/ui/player_screen.dart @@ -6,10 +6,12 @@ import 'package:audio_service/audio_service.dart'; import 'package:flutter_screenutil/screenutil.dart'; import 'package:freezer/api/deezer.dart'; import 'package:freezer/api/player.dart'; +import 'package:freezer/settings.dart'; import 'package:freezer/ui/menu.dart'; import 'package:freezer/ui/settings_screen.dart'; import 'package:freezer/ui/tiles.dart'; import 'package:async/async.dart'; +import 'package:just_audio/just_audio.dart'; import 'package:marquee/marquee.dart'; import 'cached_image.dart'; @@ -84,9 +86,10 @@ class _PlayerScreenHorizontalState extends State { children: [ CachedImage( url: AudioService.currentMediaItem.artUri, + fullThumb: true, ), if (_lyrics) LyricsWidget( - artUri: AudioService.currentMediaItem.artUri, + artUri: AudioService.currentMediaItem.extras['thumb'], trackId: AudioService.currentMediaItem.id, lyrics: Track.fromMediaItem(AudioService.currentMediaItem).lyrics, height: ScreenUtil().setWidth(500), @@ -188,7 +191,7 @@ class _PlayerScreenHorizontalState extends State { MaterialPageRoute(builder: (context) => QualitySettings()) ), child: Text( - AudioService.currentMediaItem.extras['qualityString'], + AudioService.currentMediaItem.extras['qualityString'] ?? '', style: TextStyle(fontSize: ScreenUtil().setSp(24)), ), ), @@ -242,9 +245,10 @@ class _PlayerScreenVerticalState extends State { children: [ CachedImage( url: AudioService.currentMediaItem.artUri, + fullThumb: true, ), if (_lyrics) LyricsWidget( - artUri: AudioService.currentMediaItem.artUri, + artUri: AudioService.currentMediaItem.extras['thumb'], trackId: AudioService.currentMediaItem.id, lyrics: Track.fromMediaItem(AudioService.currentMediaItem).lyrics, height: ScreenUtil().setHeight(1050), @@ -322,7 +326,7 @@ class _PlayerScreenVerticalState extends State { MaterialPageRoute(builder: (context) => QualitySettings()) ), child: Text( - AudioService.currentMediaItem.extras['qualityString'], + AudioService.currentMediaItem.extras['qualityString'] ?? '', style: TextStyle( fontSize: ScreenUtil().setSp(32), ), @@ -574,15 +578,15 @@ class _RepeatButtonState extends State { Icon get icon { switch (playerHelper.repeatType) { - case RepeatType.NONE: + case LoopMode.off: return Icon(Icons.repeat, size: widget.size??_size); - case RepeatType.LIST: + case LoopMode.all: return Icon( Icons.repeat, color: Theme.of(context).primaryColor, size: widget.size??_size ); - case RepeatType.TRACK: + case LoopMode.one: return Icon( Icons.repeat_one, color: Theme.of(context).primaryColor, @@ -708,6 +712,18 @@ class QueueScreen extends StatefulWidget { } class _QueueScreenState extends State { + + //Get proper icon color by theme + Color get shuffleIconColor { + Color og = Theme.of(context).primaryColor; + if (og.computeLuminance() > 0.5) { + if (playerHelper.shuffle) return Theme.of(context).primaryColorLight; + return Colors.black; + } + if (playerHelper.shuffle) return Theme.of(context).primaryColorDark; + return Colors.white; + } + @override Widget build(BuildContext context) { return Scaffold( @@ -715,10 +731,13 @@ class _QueueScreenState extends State { title: Text('Queue'), actions: [ IconButton( - icon: Icon(Icons.shuffle), + icon: Icon( + Icons.shuffle, + color: shuffleIconColor + ), onPressed: () async { - await AudioService.customAction('shuffleQueue'); - setState(() => {}); + await playerHelper.toggleShuffle(); + setState(() {}); }, ) ], diff --git a/lib/ui/search.dart b/lib/ui/search.dart index 7ba12de..ae534b5 100644 --- a/lib/ui/search.dart +++ b/lib/ui/search.dart @@ -1,3 +1,4 @@ +import 'package:connectivity/connectivity.dart'; import 'package:flutter/material.dart'; import 'package:freezer/api/download.dart'; import 'package:freezer/api/player.dart'; @@ -7,7 +8,6 @@ import 'package:freezer/ui/menu.dart'; import 'tiles.dart'; import '../api/deezer.dart'; import '../api/definitions.dart'; -import '../settings.dart'; import 'error.dart'; class SearchScreen extends StatefulWidget { @@ -18,7 +18,7 @@ class SearchScreen extends StatefulWidget { class _SearchScreenState extends State { String _query; - bool _offline = settings.offlineMode; + bool _offline = false; void _submit(BuildContext context, {String query}) { if (query != null) _query = query; @@ -27,6 +27,19 @@ class _SearchScreenState extends State { ); } + @override + void initState() { + //Check for connectivity and enable offline mode + Connectivity().checkConnectivity().then((res) { + if (res == ConnectivityResult.none) setState(() { + _offline = true; + }); + }); + + + super.initState(); + } + @override Widget build(BuildContext context) { return Scaffold( @@ -59,11 +72,7 @@ class _SearchScreenState extends State { leading: Switch( value: _offline, onChanged: (v) { - if (settings.offlineMode) { - setState(() => _offline = true); - } else { - setState(() => _offline = v); - } + setState(() => _offline = !_offline); }, ), ) diff --git a/pubspec.lock b/pubspec.lock deleted file mode 100644 index ed3238f..0000000 --- a/pubspec.lock +++ /dev/null @@ -1,826 +0,0 @@ -# Generated by pub -# See https://dart.dev/tools/pub/glossary#lockfile -packages: - _fe_analyzer_shared: - dependency: transitive - description: - name: _fe_analyzer_shared - url: "https://pub.dartlang.org" - source: hosted - version: "4.0.0" - analyzer: - dependency: transitive - description: - name: analyzer - url: "https://pub.dartlang.org" - source: hosted - version: "0.39.10" - archive: - dependency: transitive - description: - name: archive - url: "https://pub.dartlang.org" - source: hosted - version: "2.0.13" - args: - dependency: transitive - description: - name: args - url: "https://pub.dartlang.org" - source: hosted - version: "1.6.0" - async: - dependency: "direct main" - description: - name: async - url: "https://pub.dartlang.org" - source: hosted - version: "2.4.1" - audio_service: - dependency: "direct main" - description: - name: audio_service - url: "https://pub.dartlang.org" - source: hosted - version: "0.11.0" - boolean_selector: - dependency: transitive - description: - name: boolean_selector - url: "https://pub.dartlang.org" - source: hosted - version: "2.0.0" - build: - dependency: transitive - description: - name: build - url: "https://pub.dartlang.org" - source: hosted - version: "1.3.0" - build_config: - dependency: transitive - description: - name: build_config - url: "https://pub.dartlang.org" - source: hosted - version: "0.4.2" - build_daemon: - dependency: transitive - description: - name: build_daemon - url: "https://pub.dartlang.org" - source: hosted - version: "2.1.4" - build_resolvers: - dependency: transitive - description: - name: build_resolvers - url: "https://pub.dartlang.org" - source: hosted - version: "1.3.9" - build_runner: - dependency: "direct dev" - description: - name: build_runner - url: "https://pub.dartlang.org" - source: hosted - version: "1.10.0" - build_runner_core: - dependency: transitive - description: - name: build_runner_core - url: "https://pub.dartlang.org" - source: hosted - version: "5.2.0" - built_collection: - dependency: transitive - description: - name: built_collection - url: "https://pub.dartlang.org" - source: hosted - version: "4.3.2" - built_value: - dependency: transitive - description: - name: built_value - url: "https://pub.dartlang.org" - source: hosted - version: "7.1.0" - charcode: - dependency: transitive - description: - name: charcode - url: "https://pub.dartlang.org" - source: hosted - version: "1.1.3" - checked_yaml: - dependency: transitive - description: - name: checked_yaml - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.2" - clock: - dependency: transitive - description: - name: clock - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.1" - code_builder: - dependency: transitive - description: - name: code_builder - url: "https://pub.dartlang.org" - source: hosted - version: "3.3.0" - collection: - dependency: "direct main" - description: - name: collection - url: "https://pub.dartlang.org" - source: hosted - version: "1.14.12" - connectivity: - dependency: "direct main" - description: - name: connectivity - url: "https://pub.dartlang.org" - source: hosted - version: "0.4.8+6" - connectivity_macos: - dependency: transitive - description: - name: connectivity_macos - url: "https://pub.dartlang.org" - source: hosted - version: "0.1.0+3" - connectivity_platform_interface: - dependency: transitive - description: - name: connectivity_platform_interface - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.6" - convert: - dependency: transitive - description: - name: convert - url: "https://pub.dartlang.org" - source: hosted - version: "2.1.1" - cookie_jar: - dependency: "direct main" - description: - name: cookie_jar - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.1" - country_pickers: - dependency: "direct main" - description: - name: country_pickers - url: "https://pub.dartlang.org" - source: hosted - version: "1.3.0" - crypto: - dependency: "direct main" - description: - name: crypto - url: "https://pub.dartlang.org" - source: hosted - version: "2.1.4" - csslib: - dependency: transitive - description: - name: csslib - url: "https://pub.dartlang.org" - source: hosted - version: "0.16.1" - custom_navigator: - dependency: "direct main" - description: - name: custom_navigator - url: "https://pub.dartlang.org" - source: hosted - version: "0.3.0" - dart_style: - dependency: transitive - description: - name: dart_style - url: "https://pub.dartlang.org" - source: hosted - version: "1.3.6" - dio: - dependency: "direct main" - description: - name: dio - url: "https://pub.dartlang.org" - source: hosted - version: "3.0.9" - dio_cookie_manager: - dependency: "direct main" - description: - name: dio_cookie_manager - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.0" - disk_space: - dependency: "direct main" - description: - name: disk_space - url: "https://pub.dartlang.org" - source: hosted - version: "0.0.3" - ext_storage: - dependency: "direct main" - description: - name: ext_storage - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.3" - fading_edge_scrollview: - dependency: transitive - description: - name: fading_edge_scrollview - url: "https://pub.dartlang.org" - source: hosted - version: "1.1.4" - file: - dependency: transitive - description: - name: file - url: "https://pub.dartlang.org" - source: hosted - version: "5.1.0" - filesize: - dependency: "direct main" - description: - name: filesize - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.4" - fixnum: - dependency: transitive - description: - name: fixnum - url: "https://pub.dartlang.org" - source: hosted - version: "0.10.11" - flutter: - dependency: "direct main" - description: flutter - source: sdk - version: "0.0.0" - flutter_cache_manager: - dependency: transitive - description: - name: flutter_cache_manager - url: "https://pub.dartlang.org" - source: hosted - version: "1.4.0" - flutter_inappwebview: - dependency: "direct main" - description: - name: flutter_inappwebview - url: "https://pub.dartlang.org" - source: hosted - version: "3.3.0+3" - flutter_isolate: - dependency: transitive - description: - name: flutter_isolate - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.0+14" - flutter_local_notifications: - dependency: "direct main" - description: - name: flutter_local_notifications - url: "https://pub.dartlang.org" - source: hosted - version: "1.4.4+1" - flutter_local_notifications_platform_interface: - dependency: transitive - description: - name: flutter_local_notifications_platform_interface - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.1" - flutter_material_color_picker: - dependency: "direct main" - description: - name: flutter_material_color_picker - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.5" - flutter_screenutil: - dependency: "direct main" - description: - name: flutter_screenutil - url: "https://pub.dartlang.org" - source: hosted - version: "2.3.0" - flutter_test: - dependency: "direct dev" - description: flutter - source: sdk - version: "0.0.0" - flutter_web_plugins: - dependency: transitive - description: flutter - source: sdk - version: "0.0.0" - fluttertoast: - dependency: "direct main" - description: - name: fluttertoast - url: "https://pub.dartlang.org" - source: hosted - version: "4.0.1" - glob: - dependency: transitive - description: - name: glob - url: "https://pub.dartlang.org" - source: hosted - version: "1.2.0" - graphs: - dependency: transitive - description: - name: graphs - url: "https://pub.dartlang.org" - source: hosted - version: "0.2.0" - hex: - dependency: "direct main" - description: - name: hex - url: "https://pub.dartlang.org" - source: hosted - version: "0.1.2" - html: - dependency: "direct main" - description: - name: html - url: "https://pub.dartlang.org" - source: hosted - version: "0.14.0+3" - http: - dependency: transitive - description: - name: http - url: "https://pub.dartlang.org" - source: hosted - version: "0.12.1" - http_multi_server: - dependency: transitive - description: - name: http_multi_server - url: "https://pub.dartlang.org" - source: hosted - version: "2.2.0" - http_parser: - dependency: transitive - description: - name: http_parser - url: "https://pub.dartlang.org" - source: hosted - version: "3.1.4" - image: - dependency: transitive - description: - name: image - url: "https://pub.dartlang.org" - source: hosted - version: "2.1.12" - intl: - dependency: "direct main" - description: - name: intl - url: "https://pub.dartlang.org" - source: hosted - version: "0.16.1" - io: - dependency: transitive - description: - name: io - url: "https://pub.dartlang.org" - source: hosted - version: "0.3.4" - js: - dependency: transitive - description: - name: js - url: "https://pub.dartlang.org" - source: hosted - version: "0.6.2" - json_annotation: - dependency: "direct main" - description: - name: json_annotation - url: "https://pub.dartlang.org" - source: hosted - version: "3.0.1" - json_serializable: - dependency: "direct dev" - description: - name: json_serializable - url: "https://pub.dartlang.org" - source: hosted - version: "3.3.0" - just_audio: - dependency: "direct main" - description: - path: "." - ref: HEAD - resolved-ref: "70392a52590c95bd4b1ca35c7e92d30793c7c4d3" - url: "https://notabug.org/exttex/just_audio.git" - source: git - version: "0.1.10" - language_pickers: - dependency: "direct main" - description: - name: language_pickers - url: "https://pub.dartlang.org" - source: hosted - version: "0.2.0+1" - logging: - dependency: transitive - description: - name: logging - url: "https://pub.dartlang.org" - source: hosted - version: "0.11.4" - marquee: - dependency: "direct main" - description: - name: marquee - url: "https://pub.dartlang.org" - source: hosted - version: "1.5.2" - matcher: - dependency: transitive - description: - name: matcher - url: "https://pub.dartlang.org" - source: hosted - version: "0.12.6" - meta: - dependency: transitive - description: - name: meta - url: "https://pub.dartlang.org" - source: hosted - version: "1.1.8" - mime: - dependency: transitive - description: - name: mime - url: "https://pub.dartlang.org" - source: hosted - version: "0.9.6+3" - move_to_background: - dependency: "direct main" - description: - name: move_to_background - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.1" - node_interop: - dependency: transitive - description: - name: node_interop - url: "https://pub.dartlang.org" - source: hosted - version: "1.1.1" - node_io: - dependency: transitive - description: - name: node_io - url: "https://pub.dartlang.org" - source: hosted - version: "1.1.1" - package_config: - dependency: transitive - description: - name: package_config - url: "https://pub.dartlang.org" - source: hosted - version: "1.9.3" - package_info: - dependency: "direct main" - description: - name: package_info - url: "https://pub.dartlang.org" - source: hosted - version: "0.4.1" - palette_generator: - dependency: "direct main" - description: - name: palette_generator - url: "https://pub.dartlang.org" - source: hosted - version: "0.2.3" - path: - dependency: "direct main" - description: - name: path - url: "https://pub.dartlang.org" - source: hosted - version: "1.6.4" - path_provider: - dependency: "direct main" - description: - name: path_provider - url: "https://pub.dartlang.org" - source: hosted - version: "1.6.10" - path_provider_ex: - dependency: "direct main" - description: - name: path_provider_ex - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.1" - path_provider_linux: - dependency: transitive - description: - name: path_provider_linux - url: "https://pub.dartlang.org" - source: hosted - version: "0.0.1+1" - path_provider_macos: - dependency: transitive - description: - name: path_provider_macos - url: "https://pub.dartlang.org" - source: hosted - version: "0.0.4+3" - path_provider_platform_interface: - dependency: transitive - description: - name: path_provider_platform_interface - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.2" - pedantic: - dependency: transitive - description: - name: pedantic - url: "https://pub.dartlang.org" - source: hosted - version: "1.9.0" - permission_handler: - dependency: "direct main" - description: - name: permission_handler - url: "https://pub.dartlang.org" - source: hosted - version: "5.0.1" - permission_handler_platform_interface: - dependency: transitive - description: - name: permission_handler_platform_interface - url: "https://pub.dartlang.org" - source: hosted - version: "2.0.1" - petitparser: - dependency: transitive - description: - name: petitparser - url: "https://pub.dartlang.org" - source: hosted - version: "2.4.0" - platform: - dependency: transitive - description: - name: platform - url: "https://pub.dartlang.org" - source: hosted - version: "2.2.1" - plugin_platform_interface: - dependency: transitive - description: - name: plugin_platform_interface - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.2" - pointycastle: - dependency: "direct main" - description: - name: pointycastle - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.2" - pool: - dependency: transitive - description: - name: pool - url: "https://pub.dartlang.org" - source: hosted - version: "1.4.0" - process: - dependency: transitive - description: - name: process - url: "https://pub.dartlang.org" - source: hosted - version: "3.0.13" - pub_semver: - dependency: transitive - description: - name: pub_semver - url: "https://pub.dartlang.org" - source: hosted - version: "1.4.4" - pubspec_parse: - dependency: transitive - description: - name: pubspec_parse - url: "https://pub.dartlang.org" - source: hosted - version: "0.1.5" - quiver: - dependency: transitive - description: - name: quiver - url: "https://pub.dartlang.org" - source: hosted - version: "2.1.3" - random_string: - dependency: "direct main" - description: - name: random_string - url: "https://pub.dartlang.org" - source: hosted - version: "2.0.1" - rxdart: - dependency: transitive - description: - name: rxdart - url: "https://pub.dartlang.org" - source: hosted - version: "0.24.1" - shelf: - dependency: transitive - description: - name: shelf - url: "https://pub.dartlang.org" - source: hosted - version: "0.7.5" - shelf_web_socket: - dependency: transitive - description: - name: shelf_web_socket - url: "https://pub.dartlang.org" - source: hosted - version: "0.2.3" - sky_engine: - dependency: transitive - description: flutter - source: sdk - version: "0.0.99" - source_gen: - dependency: transitive - description: - name: source_gen - url: "https://pub.dartlang.org" - source: hosted - version: "0.9.5" - source_span: - dependency: transitive - description: - name: source_span - url: "https://pub.dartlang.org" - source: hosted - version: "1.7.0" - sqflite: - dependency: "direct main" - description: - name: sqflite - url: "https://pub.dartlang.org" - source: hosted - version: "1.3.1" - sqflite_common: - dependency: transitive - description: - name: sqflite_common - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.1" - stack_trace: - dependency: transitive - description: - name: stack_trace - url: "https://pub.dartlang.org" - source: hosted - version: "1.9.3" - stream_channel: - dependency: transitive - description: - name: stream_channel - url: "https://pub.dartlang.org" - source: hosted - version: "2.0.0" - stream_transform: - dependency: transitive - description: - name: stream_transform - url: "https://pub.dartlang.org" - source: hosted - version: "1.2.0" - string_scanner: - dependency: transitive - description: - name: string_scanner - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.5" - synchronized: - dependency: transitive - description: - name: synchronized - url: "https://pub.dartlang.org" - source: hosted - version: "2.2.0" - term_glyph: - dependency: transitive - description: - name: term_glyph - url: "https://pub.dartlang.org" - source: hosted - version: "1.1.0" - test_api: - dependency: transitive - description: - name: test_api - url: "https://pub.dartlang.org" - source: hosted - version: "0.2.15" - timing: - dependency: transitive - description: - name: timing - url: "https://pub.dartlang.org" - source: hosted - version: "0.1.1+2" - typed_data: - dependency: transitive - description: - name: typed_data - url: "https://pub.dartlang.org" - source: hosted - version: "1.1.6" - uuid: - dependency: transitive - description: - name: uuid - url: "https://pub.dartlang.org" - source: hosted - version: "2.0.4" - vector_math: - dependency: transitive - description: - name: vector_math - url: "https://pub.dartlang.org" - source: hosted - version: "2.0.8" - watcher: - dependency: transitive - description: - name: watcher - url: "https://pub.dartlang.org" - source: hosted - version: "0.9.7+15" - web_socket_channel: - dependency: transitive - description: - name: web_socket_channel - url: "https://pub.dartlang.org" - source: hosted - version: "1.1.0" - xdg_directories: - dependency: transitive - description: - name: xdg_directories - url: "https://pub.dartlang.org" - source: hosted - version: "0.1.0" - xml: - dependency: transitive - description: - name: xml - url: "https://pub.dartlang.org" - source: hosted - version: "3.6.1" - yaml: - dependency: transitive - description: - name: yaml - url: "https://pub.dartlang.org" - source: hosted - version: "2.2.1" -sdks: - dart: ">=2.7.0 <3.0.0" - flutter: ">=1.15.21 <2.0.0" diff --git a/pubspec.yaml b/pubspec.yaml index 96ebdb3..be6513f 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -39,10 +39,10 @@ dependencies: connectivity: ^0.4.8+6 intl: ^0.16.1 filesize: ^1.0.4 - fluttertoast: ^4.0.1 + fluttertoast: ^7.0.2 palette_generator: ^0.2.3 flutter_material_color_picker: ^1.0.5 - flutter_inappwebview: ^3.3.0+3 + flutter_inappwebview: ^4.0.0 custom_navigator: ^0.3.0 language_pickers: ^0.2.0+1 country_pickers: ^1.3.0 @@ -51,16 +51,18 @@ dependencies: flutter_local_notifications: ^1.4.4+1 collection: ^1.14.12 disk_space: ^0.0.3 - audio_service: ^0.11.0 path_provider_ex: ^1.0.1 random_string: ^2.0.1 async: ^2.4.1 html: ^0.14.0+3 flutter_screenutil: ^2.3.0 marquee: ^1.5.2 + flutter_cache_manager: ^1.4.1 + cached_network_image: ^2.2.0+1 + audio_service: ^0.13.0 just_audio: - git: https://notabug.org/exttex/just_audio.git + path: ./just_audio # cupertino_icons: ^0.1.3