Remove debug print statements (#216)

* Remove prints

* Add proper gitignore

* Remove .idea

* Delete .gitignore

* Create .gitignore
This commit is contained in:
creativecreatorormaybenot 2020-10-20 10:09:07 +00:00 committed by GitHub
parent ef3a081b0d
commit 7949f92b90
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 54 additions and 63 deletions

View File

@ -1,3 +1,7 @@
## 0.5.4+2
* Removed `print` statements from plugin code.
## 0.5.4+1
* Add web dependency.

View File

@ -127,30 +127,24 @@ class AudioPlayer {
.handleError((err, stack) {/* noop */}));
_platform.then((platform) {
platform.playbackEventMessageStream.listen((message) {
try {
final playbackEvent = PlaybackEvent(
processingState:
ProcessingState.values[message.processingState.index],
updateTime: message.updateTime,
updatePosition: message.updatePosition,
bufferedPosition: message.bufferedPosition,
duration: message.duration,
icyMetadata: message.icyMetadata == null
? null
: IcyMetadata._fromMessage(message.icyMetadata),
currentIndex: message.currentIndex,
androidAudioSessionId: message.androidAudioSessionId,
);
_durationFuture = Future.value(playbackEvent.duration);
if (playbackEvent.duration != _playbackEvent.duration) {
_durationSubject.add(playbackEvent.duration);
}
_playbackEventSubject.add(_playbackEvent = playbackEvent);
} catch (e, stacktrace) {
print("Error parsing event: $e");
print("$stacktrace");
rethrow;
final playbackEvent = PlaybackEvent(
processingState:
ProcessingState.values[message.processingState.index],
updateTime: message.updateTime,
updatePosition: message.updatePosition,
bufferedPosition: message.bufferedPosition,
duration: message.duration,
icyMetadata: message.icyMetadata == null
? null
: IcyMetadata._fromMessage(message.icyMetadata),
currentIndex: message.currentIndex,
androidAudioSessionId: message.androidAudioSessionId,
);
_durationFuture = Future.value(playbackEvent.duration);
if (playbackEvent.duration != _playbackEvent.duration) {
_durationSubject.add(playbackEvent.duration);
}
_playbackEventSubject.add(_playbackEvent = playbackEvent);
}, onError: _playbackEventSubject.addError);
});
_sequenceSubject.add(null);
@ -700,7 +694,6 @@ class AudioPlayer {
await JustAudioPlatform.instance
.disposePlayer(DisposePlayerRequest(id: _id));
} catch (e) {
print("disposePlayer() not implemented. Falling back to dispose()");
await (await _platform).dispose(DisposeRequest());
}
_audioSource = null;

View File

@ -1,6 +1,6 @@
name: just_audio
description: Flutter plugin to play audio from streams, files, assets, DASH/HLS streams and playlists. Works with audio_service to play audio in the background.
version: 0.5.4+1
version: 0.5.4+2
homepage: https://github.com/ryanheise/just_audio/tree/master/just_audio
environment:

View File

@ -1,3 +1,7 @@
## 0.1.0+1
* Removed `print` statements from plugin code.
## 0.1.0
* Update to use platform interface 1.1.0.

View File

@ -13,7 +13,6 @@ class JustAudioPlugin extends JustAudioPlatform {
final Map<String, JustAudioPlayer> players = {};
static void registerWith(Registrar registrar) {
print("registerWith setting instance");
JustAudioPlatform.instance = JustAudioPlugin();
}
@ -179,7 +178,6 @@ class Html5AudioPlayer extends JustAudioPlayer {
@override
Future<LoadResponse> load(LoadRequest request) async {
print("web load");
_currentAudioSourcePlayer?.pause();
_audioSourcePlayer = getAudioSource(request.audioSourceMessage);
_index = request.initialIndex ?? 0;
@ -198,7 +196,6 @@ class Html5AudioPlayer extends JustAudioPlayer {
}
Future<Duration> loadUri(final Uri uri) async {
print("loadUri $uri");
transition(ProcessingStateMessage.loading);
final src = uri.toString();
if (src != _audioElement.src) {
@ -217,7 +214,6 @@ class Html5AudioPlayer extends JustAudioPlayer {
}
transition(ProcessingStateMessage.ready);
final seconds = _audioElement.duration;
print("loadUri returning");
return seconds.isFinite
? Duration(milliseconds: (seconds * 1000).toInt())
: null;
@ -370,7 +366,6 @@ class Html5AudioPlayer extends JustAudioPlayer {
@override
Future<void> release() async {
print("web release");
_currentAudioSourcePlayer?.pause();
_audioElement.removeAttribute('src');
_audioElement.load();
@ -392,38 +387,33 @@ class Html5AudioPlayer extends JustAudioPlayer {
}
AudioSourcePlayer decodeAudioSource(AudioSourceMessage audioSourceMessage) {
try {
if (audioSourceMessage is ProgressiveAudioSourceMessage) {
return ProgressiveAudioSourcePlayer(this, audioSourceMessage.id,
Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers);
} else if (audioSourceMessage is DashAudioSourceMessage) {
return DashAudioSourcePlayer(this, audioSourceMessage.id,
Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers);
} else if (audioSourceMessage is HlsAudioSourceMessage) {
return HlsAudioSourcePlayer(this, audioSourceMessage.id,
Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers);
} else if (audioSourceMessage is ConcatenatingAudioSourceMessage) {
return ConcatenatingAudioSourcePlayer(
this,
audioSourceMessage.id,
getAudioSources(audioSourceMessage.children),
audioSourceMessage.useLazyPreparation);
} else if (audioSourceMessage is ClippingAudioSourceMessage) {
return ClippingAudioSourcePlayer(
this,
audioSourceMessage.id,
getAudioSource(audioSourceMessage.child),
audioSourceMessage.start,
audioSourceMessage.end);
} else if (audioSourceMessage is LoopingAudioSourceMessage) {
return LoopingAudioSourcePlayer(this, audioSourceMessage.id,
getAudioSource(audioSourceMessage.child), audioSourceMessage.count);
} else {
throw Exception("Unknown AudioSource type: $audioSourceMessage");
}
} catch (e, stacktrace) {
print("$stacktrace");
rethrow;
if (audioSourceMessage is ProgressiveAudioSourceMessage) {
return ProgressiveAudioSourcePlayer(this, audioSourceMessage.id,
Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers);
} else if (audioSourceMessage is DashAudioSourceMessage) {
return DashAudioSourcePlayer(this, audioSourceMessage.id,
Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers);
} else if (audioSourceMessage is HlsAudioSourceMessage) {
return HlsAudioSourcePlayer(this, audioSourceMessage.id,
Uri.parse(audioSourceMessage.uri), audioSourceMessage.headers);
} else if (audioSourceMessage is ConcatenatingAudioSourceMessage) {
return ConcatenatingAudioSourcePlayer(
this,
audioSourceMessage.id,
getAudioSources(audioSourceMessage.children),
audioSourceMessage.useLazyPreparation);
} else if (audioSourceMessage is ClippingAudioSourceMessage) {
return ClippingAudioSourcePlayer(
this,
audioSourceMessage.id,
getAudioSource(audioSourceMessage.child),
audioSourceMessage.start,
audioSourceMessage.end);
} else if (audioSourceMessage is LoopingAudioSourceMessage) {
return LoopingAudioSourcePlayer(this, audioSourceMessage.id,
getAudioSource(audioSourceMessage.child), audioSourceMessage.count);
} else {
throw Exception("Unknown AudioSource type: $audioSourceMessage");
}
}
}

View File

@ -1,7 +1,7 @@
name: just_audio_web
description: Web platform implementation of just_audio
homepage: https://github.com/ryanheise/just_audio/tree/master/just_audio_web
version: 0.1.0
version: 0.1.0+1
flutter:
plugin: