Remove debug print statements (#216)

* Remove prints

* Add proper gitignore

* Remove .idea

* Delete .gitignore

* Create .gitignore
This commit is contained in:
creativecreatorormaybenot 2020-10-20 10:09:07 +00:00 committed by GitHub
parent ef3a081b0d
commit 7949f92b90
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 54 additions and 63 deletions

View File

@ -1,3 +1,7 @@
## 0.5.4+2
* Removed `print` statements from plugin code.
## 0.5.4+1 ## 0.5.4+1
* Add web dependency. * Add web dependency.

View File

@ -127,30 +127,24 @@ class AudioPlayer {
.handleError((err, stack) {/* noop */})); .handleError((err, stack) {/* noop */}));
_platform.then((platform) { _platform.then((platform) {
platform.playbackEventMessageStream.listen((message) { platform.playbackEventMessageStream.listen((message) {
try { final playbackEvent = PlaybackEvent(
final playbackEvent = PlaybackEvent( processingState:
processingState: ProcessingState.values[message.processingState.index],
ProcessingState.values[message.processingState.index], updateTime: message.updateTime,
updateTime: message.updateTime, updatePosition: message.updatePosition,
updatePosition: message.updatePosition, bufferedPosition: message.bufferedPosition,
bufferedPosition: message.bufferedPosition, duration: message.duration,
duration: message.duration, icyMetadata: message.icyMetadata == null
icyMetadata: message.icyMetadata == null ? null
? null : IcyMetadata._fromMessage(message.icyMetadata),
: IcyMetadata._fromMessage(message.icyMetadata), currentIndex: message.currentIndex,
currentIndex: message.currentIndex, androidAudioSessionId: message.androidAudioSessionId,
androidAudioSessionId: message.androidAudioSessionId, );
); _durationFuture = Future.value(playbackEvent.duration);
_durationFuture = Future.value(playbackEvent.duration); if (playbackEvent.duration != _playbackEvent.duration) {
if (playbackEvent.duration != _playbackEvent.duration) { _durationSubject.add(playbackEvent.duration);
_durationSubject.add(playbackEvent.duration);
}
_playbackEventSubject.add(_playbackEvent = playbackEvent);
} catch (e, stacktrace) {
print("Error parsing event: $e");
print("$stacktrace");
rethrow;
} }
_playbackEventSubject.add(_playbackEvent = playbackEvent);
}, onError: _playbackEventSubject.addError); }, onError: _playbackEventSubject.addError);
}); });
_sequenceSubject.add(null); _sequenceSubject.add(null);
@ -700,7 +694,6 @@ class AudioPlayer {
await JustAudioPlatform.instance await JustAudioPlatform.instance
.disposePlayer(DisposePlayerRequest(id: _id)); .disposePlayer(DisposePlayerRequest(id: _id));
} catch (e) { } catch (e) {
print("disposePlayer() not implemented. Falling back to dispose()");
await (await _platform).dispose(DisposeRequest()); await (await _platform).dispose(DisposeRequest());
} }
_audioSource = null; _audioSource = null;

View File

@ -1,6 +1,6 @@
name: just_audio name: just_audio
description: Flutter plugin to play audio from streams, files, assets, DASH/HLS streams and playlists. Works with audio_service to play audio in the background. description: Flutter plugin to play audio from streams, files, assets, DASH/HLS streams and playlists. Works with audio_service to play audio in the background.
version: 0.5.4+1 version: 0.5.4+2
homepage: https://github.com/ryanheise/just_audio/tree/master/just_audio homepage: https://github.com/ryanheise/just_audio/tree/master/just_audio
environment: environment:

View File

@ -1,3 +1,7 @@
## 0.1.0+1
* Removed `print` statements from plugin code.
## 0.1.0 ## 0.1.0
* Update to use platform interface 1.1.0. * Update to use platform interface 1.1.0.

View File

@ -13,7 +13,6 @@ class JustAudioPlugin extends JustAudioPlatform {
final Map<String, JustAudioPlayer> players = {}; final Map<String, JustAudioPlayer> players = {};
static void registerWith(Registrar registrar) { static void registerWith(Registrar registrar) {
print("registerWith setting instance");
JustAudioPlatform.instance = JustAudioPlugin(); JustAudioPlatform.instance = JustAudioPlugin();
} }
@ -179,7 +178,6 @@ class Html5AudioPlayer extends JustAudioPlayer {
@override @override
Future<LoadResponse> load(LoadRequest request) async { Future<LoadResponse> load(LoadRequest request) async {
print("web load");
_currentAudioSourcePlayer?.pause(); _currentAudioSourcePlayer?.pause();
_audioSourcePlayer = getAudioSource(request.audioSourceMessage); _audioSourcePlayer = getAudioSource(request.audioSourceMessage);
_index = request.initialIndex ?? 0; _index = request.initialIndex ?? 0;
@ -198,7 +196,6 @@ class Html5AudioPlayer extends JustAudioPlayer {
} }
Future<Duration> loadUri(final Uri uri) async { Future<Duration> loadUri(final Uri uri) async {
print("loadUri $uri");
transition(ProcessingStateMessage.loading); transition(ProcessingStateMessage.loading);
final src = uri.toString(); final src = uri.toString();
if (src != _audioElement.src) { if (src != _audioElement.src) {
@ -217,7 +214,6 @@ class Html5AudioPlayer extends JustAudioPlayer {
} }
transition(ProcessingStateMessage.ready); transition(ProcessingStateMessage.ready);
final seconds = _audioElement.duration; final seconds = _audioElement.duration;
print("loadUri returning");
return seconds.isFinite return seconds.isFinite
? Duration(milliseconds: (seconds * 1000).toInt()) ? Duration(milliseconds: (seconds * 1000).toInt())
: null; : null;
@ -370,7 +366,6 @@ class Html5AudioPlayer extends JustAudioPlayer {
@override @override
Future<void> release() async { Future<void> release() async {
print("web release");
_currentAudioSourcePlayer?.pause(); _currentAudioSourcePlayer?.pause();
_audioElement.removeAttribute('src'); _audioElement.removeAttribute('src');
_audioElement.load(); _audioElement.load();
@ -392,38 +387,33 @@ class Html5AudioPlayer extends JustAudioPlayer {
} }
AudioSourcePlayer decodeAudioSource(AudioSourceMessage audioSourceMessage) { AudioSourcePlayer decodeAudioSource(AudioSourceMessage audioSourceMessage) {
try { if (audioSourceMessage is ProgressiveAudioSourceMessage) {
if (audioSourceMessage is ProgressiveAudioSourceMessage) { return ProgressiveAudioSourcePlayer(this, audioSourceMessage.id,
return ProgressiveAudioSourcePlayer(this, audioSourceMessage.id, Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers);
Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers); } else if (audioSourceMessage is DashAudioSourceMessage) {
} else if (audioSourceMessage is DashAudioSourceMessage) { return DashAudioSourcePlayer(this, audioSourceMessage.id,
return DashAudioSourcePlayer(this, audioSourceMessage.id, Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers);
Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers); } else if (audioSourceMessage is HlsAudioSourceMessage) {
} else if (audioSourceMessage is HlsAudioSourceMessage) { return HlsAudioSourcePlayer(this, audioSourceMessage.id,
return HlsAudioSourcePlayer(this, audioSourceMessage.id, Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers);
Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers); } else if (audioSourceMessage is ConcatenatingAudioSourceMessage) {
} else if (audioSourceMessage is ConcatenatingAudioSourceMessage) { return ConcatenatingAudioSourcePlayer(
return ConcatenatingAudioSourcePlayer( this,
this, audioSourceMessage.id,
audioSourceMessage.id, getAudioSources(audioSourceMessage.children),
getAudioSources(audioSourceMessage.children), audioSourceMessage.useLazyPreparation);
audioSourceMessage.useLazyPreparation); } else if (audioSourceMessage is ClippingAudioSourceMessage) {
} else if (audioSourceMessage is ClippingAudioSourceMessage) { return ClippingAudioSourcePlayer(
return ClippingAudioSourcePlayer( this,
this, audioSourceMessage.id,
audioSourceMessage.id, getAudioSource(audioSourceMessage.child),
getAudioSource(audioSourceMessage.child), audioSourceMessage.start,
audioSourceMessage.start, audioSourceMessage.end);
audioSourceMessage.end); } else if (audioSourceMessage is LoopingAudioSourceMessage) {
} else if (audioSourceMessage is LoopingAudioSourceMessage) { return LoopingAudioSourcePlayer(this, audioSourceMessage.id,
return LoopingAudioSourcePlayer(this, audioSourceMessage.id, getAudioSource(audioSourceMessage.child), audioSourceMessage.count);
getAudioSource(audioSourceMessage.child), audioSourceMessage.count); } else {
} else { throw Exception("Unknown AudioSource type: $audioSourceMessage");
throw Exception("Unknown AudioSource type: $audioSourceMessage");
}
} catch (e, stacktrace) {
print("$stacktrace");
rethrow;
} }
} }
} }

View File

@ -1,7 +1,7 @@
name: just_audio_web name: just_audio_web
description: Web platform implementation of just_audio description: Web platform implementation of just_audio
homepage: https://github.com/ryanheise/just_audio/tree/master/just_audio_web homepage: https://github.com/ryanheise/just_audio/tree/master/just_audio_web
version: 0.1.0 version: 0.1.0+1
flutter: flutter:
plugin: plugin: