just_audio/lib/just_audio.dart

674 lines
23 KiB
Dart
Raw Normal View History

2019-11-25 14:50:21 +00:00
import 'dart:async';
2019-11-28 05:16:54 +00:00
import 'dart:io';
2019-11-25 14:50:21 +00:00
2020-06-07 14:20:52 +00:00
import 'package:flutter/foundation.dart';
2019-11-25 14:50:21 +00:00
import 'package:flutter/services.dart';
2019-11-28 05:16:54 +00:00
import 'package:flutter/widgets.dart';
import 'package:path/path.dart' as p;
import 'package:path_provider/path_provider.dart';
import 'package:rxdart/rxdart.dart';
2019-11-25 14:50:21 +00:00
2019-11-28 05:16:54 +00:00
/// An object to manage playing audio from a URL, a locale file or an asset.
///
/// ```
/// final player = AudioPlayer();
/// await player.setUrl('https://foo.com/bar.mp3');
2019-11-30 15:52:54 +00:00
/// player.play();
2019-12-31 09:38:46 +00:00
/// player.pause();
/// player.play();
/// await player.stop();
/// await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20));
/// await player.play();
2019-11-28 05:16:54 +00:00
/// await player.setUrl('https://foo.com/baz.mp3');
/// await player.seek(Duration(minutes: 5));
2019-11-30 15:52:54 +00:00
/// player.play();
2019-11-28 05:16:54 +00:00
/// await player.stop();
/// await player.dispose();
/// ```
///
/// You must call [dispose] to release the resources used by this player,
/// including any temporary files created to cache assets.
///
/// The [AudioPlayer] instance transitions through different states as follows:
///
2020-01-01 10:40:25 +00:00
/// * [AudioPlaybackState.none]: immediately after instantiation and [dispose].
/// * [AudioPlaybackState.stopped]: eventually after [setUrl], [setFilePath],
/// [setAsset] or [setClip] completes, and immediately after [stop].
/// * [AudioPlaybackState.paused]: after [pause].
/// * [AudioPlaybackState.playing]: after [play].
2019-11-28 05:16:54 +00:00
/// * [AudioPlaybackState.connecting]: immediately after [setUrl],
/// [setFilePath] and [setAsset] while waiting for the media to load.
2019-12-27 05:43:09 +00:00
/// * [AudioPlaybackState.completed]: immediately after playback reaches the
2020-01-01 10:40:25 +00:00
/// end of the media or the end of the clip.
2019-11-30 15:52:54 +00:00
///
2019-11-28 05:16:54 +00:00
/// Additionally, after a [seek] request completes, the state will return to
/// whatever state the player was in prior to the seek request.
2019-11-25 14:50:21 +00:00
class AudioPlayer {
2019-11-30 15:52:54 +00:00
static final _mainChannel = MethodChannel('com.ryanheise.just_audio.methods');
2019-11-25 14:50:21 +00:00
static Future<MethodChannel> _init(int id) async {
await _mainChannel.invokeMethod('init', ['$id']);
2019-11-28 06:55:32 +00:00
return MethodChannel('com.ryanheise.just_audio.methods.$id');
2019-11-25 14:50:21 +00:00
}
2019-11-28 05:16:54 +00:00
2020-05-20 15:19:03 +00:00
/// Configure the audio session category on iOS. This method should be called
/// before playing any audio. It has no effect on Android or Flutter for Web.
2020-05-20 14:38:26 +00:00
///
/// Note that the default category on iOS is [IosCategory.soloAmbient], but
2020-05-20 15:19:03 +00:00
/// for a typical media app, Apple recommends setting this to
/// [IosCategory.playback]. If you don't call this method, `just_audio` will
/// respect any prior category that was already set on your app's audio
/// session and will leave it alone. If it hasn't been previously set, this
/// will be [IosCategory.soloAmbient]. But if another audio plugin in your
/// app has configured a particular category, that will also be left alone.
2020-05-20 14:38:26 +00:00
///
/// Note: If you use other audio plugins in conjunction with this one, it is
2020-05-20 15:19:03 +00:00
/// possible that each of those audio plugins may override the setting you
/// choose here. (You may consider asking the developers of the other plugins
/// to provide similar configurability so that you have complete control over
/// setting the overall category that you want for your app.)
2020-05-20 14:38:26 +00:00
static Future<void> setIosCategory(IosCategory category) async {
await _mainChannel.invokeMethod('setIosCategory', category.index);
}
2019-11-28 05:16:54 +00:00
final Future<MethodChannel> _channel;
2020-06-07 14:20:52 +00:00
_ProxyHttpServer _proxy;
2019-11-28 05:16:54 +00:00
final int _id;
Future<Duration> _durationFuture;
final _durationSubject = BehaviorSubject<Duration>();
// TODO: also broadcast this event on instantiation.
AudioPlaybackEvent _audioPlaybackEvent = AudioPlaybackEvent(
state: AudioPlaybackState.none,
buffering: false,
updatePosition: Duration.zero,
updateTime: Duration.zero,
2020-03-07 08:41:40 +00:00
bufferedPosition: Duration.zero,
speed: 1.0,
duration: Duration.zero,
icyMetadata: IcyMetadata(
info: IcyInfo(title: null, url: null),
headers: IcyHeaders(
bitrate: null,
genre: null,
name: null,
metadataInterval: null,
url: null,
isPublic: null)),
);
2019-11-28 05:16:54 +00:00
Stream<AudioPlaybackEvent> _eventChannelStream;
2019-11-28 05:16:54 +00:00
StreamSubscription<AudioPlaybackEvent> _eventChannelStreamSubscription;
2019-11-28 05:16:54 +00:00
final _playbackEventSubject = BehaviorSubject<AudioPlaybackEvent>();
2019-11-28 05:16:54 +00:00
final _playbackStateSubject = BehaviorSubject<AudioPlaybackState>();
final _bufferingSubject = BehaviorSubject<bool>();
2020-03-07 08:41:40 +00:00
final _bufferedPositionSubject = BehaviorSubject<Duration>();
final _icyMetadataSubject = BehaviorSubject<IcyMetadata>();
final _fullPlaybackStateSubject = BehaviorSubject<FullAudioPlaybackState>();
double _volume = 1.0;
double _speed = 1.0;
bool _automaticallyWaitsToMinimizeStalling = true;
File _cacheFile;
2019-11-28 05:16:54 +00:00
/// Creates an [AudioPlayer].
factory AudioPlayer() =>
AudioPlayer._internal(DateTime.now().microsecondsSinceEpoch);
AudioPlayer._internal(this._id) : _channel = _init(_id) {
2019-11-28 06:55:32 +00:00
_eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id')
2019-11-28 05:16:54 +00:00
.receiveBroadcastStream()
.map((data) {
final duration =
Duration(milliseconds: data.length < 7 || data[6] < 0 ? -1 : data[6]);
_durationFuture = Future.value(duration);
_durationSubject.add(duration);
return _audioPlaybackEvent = AudioPlaybackEvent(
state: AudioPlaybackState.values[data[0]],
buffering: data[1],
updatePosition: Duration(milliseconds: data[2]),
updateTime: Duration(milliseconds: data[3]),
bufferedPosition: Duration(milliseconds: data[4]),
speed: _speed,
duration: duration,
icyMetadata: data.length < 6 || data[5] == null
? null
: IcyMetadata(
info: IcyInfo(title: data[5][0][0], url: data[5][0][1]),
headers: IcyHeaders(
bitrate: data[5][1][0],
genre: data[5][1][1],
name: data[5][1][2],
metadataInterval: data[5][1][3],
url: data[5][1][4],
isPublic: data[5][1][5])),
);
});
2020-05-20 15:19:03 +00:00
_eventChannelStreamSubscription = _eventChannelStream.listen(
_playbackEventSubject.add,
onError: _playbackEventSubject.addError);
_playbackStateSubject.addStream(playbackEventStream
.map((state) => state.state)
.distinct()
.handleError((err, stack) {/* noop */}));
_bufferingSubject.addStream(playbackEventStream
.map((state) => state.buffering)
.distinct()
.handleError((err, stack) {/* noop */}));
_bufferedPositionSubject.addStream(playbackEventStream
.map((state) => state.bufferedPosition)
.distinct()
.handleError((err, stack) {/* noop */}));
_icyMetadataSubject.addStream(playbackEventStream
.map((state) => state.icyMetadata)
.distinct()
.handleError((err, stack) {/* noop */}));
_fullPlaybackStateSubject.addStream(Rx.combineLatest3<AudioPlaybackState,
bool, IcyMetadata, FullAudioPlaybackState>(
playbackStateStream,
bufferingStream,
icyMetadataStream,
(state, buffering, icyMetadata) =>
FullAudioPlaybackState(state, buffering, icyMetadata)));
2019-11-28 05:16:54 +00:00
}
/// The duration of any media set via [setUrl], [setFilePath] or [setAsset],
/// or null otherwise.
Future<Duration> get durationFuture => _durationFuture;
/// The duration of any media set via [setUrl], [setFilePath] or [setAsset].
Stream<Duration> get durationStream => _durationSubject.stream;
/// The latest [AudioPlaybackEvent].
AudioPlaybackEvent get playbackEvent => _audioPlaybackEvent;
2019-11-28 05:16:54 +00:00
/// A stream of [AudioPlaybackEvent]s.
Stream<AudioPlaybackEvent> get playbackEventStream =>
_playbackEventSubject.stream;
2019-11-28 05:16:54 +00:00
/// The current [AudioPlaybackState].
AudioPlaybackState get playbackState => _audioPlaybackEvent.state;
/// A stream of [AudioPlaybackState]s.
2019-11-28 05:16:54 +00:00
Stream<AudioPlaybackState> get playbackStateStream =>
_playbackStateSubject.stream;
/// Whether the player is buffering.
bool get buffering => _audioPlaybackEvent.buffering;
/// The current position of the player.
Duration get position => _audioPlaybackEvent.position;
IcyMetadata get icyMetadata => _audioPlaybackEvent.icyMetadata;
/// A stream of buffering state changes.
Stream<bool> get bufferingStream => _bufferingSubject.stream;
Stream<IcyMetadata> get icyMetadataStream => _icyMetadataSubject.stream;
2020-03-07 08:41:40 +00:00
/// A stream of buffered positions.
Stream<Duration> get bufferedPositionStream =>
_bufferedPositionSubject.stream;
/// A stream of [FullAudioPlaybackState]s.
Stream<FullAudioPlaybackState> get fullPlaybackStateStream =>
_fullPlaybackStateSubject.stream;
2019-11-28 05:16:54 +00:00
/// A stream periodically tracking the current position of this player.
Stream<Duration> getPositionStream(
[final Duration period = const Duration(milliseconds: 200)]) =>
Rx.combineLatest2<AudioPlaybackEvent, void, Duration>(
playbackEventStream,
// TODO: emit periodically only in playing state.
Stream.periodic(period),
(state, _) => state.position).distinct();
2019-11-28 05:16:54 +00:00
/// The current volume of the player.
double get volume => _volume;
/// The current speed of the player.
double get speed => _speed;
2020-04-30 13:47:29 +00:00
/// Whether the player should automatically delay playback in order to
/// minimize stalling. (iOS 10.0 or later only)
bool get automaticallyWaitsToMinimizeStalling =>
_automaticallyWaitsToMinimizeStalling;
/// Loads audio media from a URL and completes with the duration of that
/// audio, or a [PlatformException] if this call was interrupted by another
/// call to [setUrl], [setFilePath], [setAsset] or [stop].
2020-04-30 13:47:29 +00:00
///
/// On platforms except for the web, the supplied [headers] will be passed
2020-06-07 14:20:52 +00:00
/// with the request. Currently headers are not recursively applied to items
/// within playlist files such as m3u8.
///
2020-04-30 13:47:29 +00:00
/// On Android, DASH and HLS streams are detected only when the URL's path
/// has an "mpd" or "m3u8" extension. If the URL does not have such an
/// extension and you have no control over the server, and you also know the
/// type of the stream in advance, you may as a workaround supply the
/// extension as a URL fragment. e.g.
/// https://somewhere.com/somestream?x=etc#.m3u8
2020-06-07 14:20:52 +00:00
Future<Duration> setUrl(String url, {Map<String, String> headers}) async {
try {
2020-06-07 14:20:52 +00:00
if (!kIsWeb && headers != null) {
if (_proxy == null) {
_proxy = _ProxyHttpServer();
await _proxy.start();
}
url = _proxy.addUrl(url, headers);
}
_durationFuture = _invokeMethod('setUrl', [url]).then((ms) =>
(ms == null || ms < 0)
? const Duration(milliseconds: -1)
: Duration(milliseconds: ms));
final duration = await _durationFuture;
_durationSubject.add(duration);
return duration;
} on PlatformException catch (e) {
return Future.error(e.message);
}
2019-11-28 05:16:54 +00:00
}
/// Loads audio media from a file and completes with the duration of that
/// audio, or null if this call was interrupted by another call so [setUrl],
/// [setFilePath] or [setAsset].
Future<Duration> setFilePath(final String filePath) => setUrl(
Platform.isAndroid ? File(filePath).uri.toString() : 'file://$filePath');
2019-11-28 05:16:54 +00:00
/// Loads audio media from an asset and completes with the duration of that
/// audio, or null if this call was interrupted by another call so [setUrl],
/// [setFilePath] or [setAsset].
2019-11-28 05:16:54 +00:00
Future<Duration> setAsset(final String assetPath) async {
final file = await _getCacheFile(assetPath);
this._cacheFile = file;
2019-11-28 05:16:54 +00:00
if (!file.existsSync()) {
await file.create(recursive: true);
}
2019-11-30 15:52:54 +00:00
await file
.writeAsBytes((await rootBundle.load(assetPath)).buffer.asUint8List());
2019-11-28 05:16:54 +00:00
return await setFilePath(file.path);
}
/// Get file for caching asset media with proper extension
Future<File> _getCacheFile(final String assetPath) async => File(p.join(
(await getTemporaryDirectory()).path,
'just_audio_asset_cache',
'$_id${p.extension(assetPath)}'));
2019-11-28 05:16:54 +00:00
2020-01-01 10:40:25 +00:00
/// Clip the audio to the given [start] and [end] timestamps. This method
/// cannot be called from the [AudioPlaybackState.none] state.
2019-12-31 09:38:46 +00:00
Future<Duration> setClip({Duration start, Duration end}) async {
_durationFuture =
_invokeMethod('setClip', [start?.inMilliseconds, end?.inMilliseconds])
.then((ms) => (ms == null || ms < 0)
? const Duration(milliseconds: -1)
: Duration(milliseconds: ms));
2019-12-31 09:38:46 +00:00
final duration = await _durationFuture;
_durationSubject.add(duration);
return duration;
}
/// Plays the currently loaded media from the current position. The [Future]
/// returned by this method completes when playback completes or is paused or
2020-01-01 10:40:25 +00:00
/// stopped. This method can be called from any state except for:
2019-11-28 05:16:54 +00:00
///
2020-01-01 10:40:25 +00:00
/// * [AudioPlaybackState.connecting]
/// * [AudioPlaybackState.none]
2019-12-31 09:38:46 +00:00
Future<void> play() async {
switch (playbackState) {
case AudioPlaybackState.playing:
case AudioPlaybackState.stopped:
case AudioPlaybackState.completed:
case AudioPlaybackState.paused:
// Update local state immediately so that queries aren't surprised.
_audioPlaybackEvent = _audioPlaybackEvent.copyWith(
state: AudioPlaybackState.playing,
);
StreamSubscription subscription;
Completer completer = Completer();
bool startedPlaying = false;
subscription = playbackStateStream.listen((state) {
// TODO: It will be more reliable to let the platform
// side wait for completion since events on the flutter
// side can lag behind the platform side.
if (startedPlaying &&
(state == AudioPlaybackState.paused ||
state == AudioPlaybackState.stopped ||
state == AudioPlaybackState.completed)) {
subscription.cancel();
completer.complete();
} else if (state == AudioPlaybackState.playing) {
startedPlaying = true;
}
});
await _invokeMethod('play');
await completer.future;
break;
default:
throw Exception(
"Cannot call play from connecting/none states ($playbackState)");
}
2019-11-28 05:16:54 +00:00
}
/// Pauses the currently playing media. It is legal to invoke this method
/// only from the [AudioPlaybackState.playing] state.
2019-11-28 05:16:54 +00:00
Future<void> pause() async {
switch (playbackState) {
case AudioPlaybackState.paused:
break;
case AudioPlaybackState.playing:
// Update local state immediately so that queries aren't surprised.
_audioPlaybackEvent = _audioPlaybackEvent.copyWith(
state: AudioPlaybackState.paused,
);
// TODO: For pause, perhaps modify platform side to ensure new state
// is broadcast before this method returns.
await _invokeMethod('pause');
break;
default:
throw Exception(
"Can call pause only from playing and buffering states ($playbackState)");
}
2019-11-28 05:16:54 +00:00
}
/// Stops the currently playing media such that the next [play] invocation
/// will start from position 0. It is legal to invoke this method only from
/// the following states:
2019-11-28 05:16:54 +00:00
///
/// * [AudioPlaybackState.playing]
/// * [AudioPlaybackState.paused]
2019-12-27 05:43:09 +00:00
/// * [AudioPlaybackState.completed]
2019-11-28 05:16:54 +00:00
Future<void> stop() async {
switch (playbackState) {
case AudioPlaybackState.stopped:
break;
case AudioPlaybackState.connecting:
case AudioPlaybackState.completed:
case AudioPlaybackState.playing:
case AudioPlaybackState.paused:
// Update local state immediately so that queries aren't surprised.
// NOTE: Android implementation already handles this.
// TODO: Do the same for iOS so the line below becomes unnecessary.
_audioPlaybackEvent = _audioPlaybackEvent.copyWith(
state: AudioPlaybackState.paused,
);
await _invokeMethod('stop');
break;
default:
throw Exception("Cannot call stop from none state");
}
2019-11-28 05:16:54 +00:00
}
/// Sets the volume of this player, where 1.0 is normal volume.
Future<void> setVolume(final double volume) async {
_volume = volume;
2019-11-28 05:16:54 +00:00
await _invokeMethod('setVolume', [volume]);
}
/// Sets the playback speed of this player, where 1.0 is normal speed.
Future<void> setSpeed(final double speed) async {
_speed = speed;
await _invokeMethod('setSpeed', [speed]);
}
/// Sets automaticallyWaitsToMinimizeStalling for AVPlayer in iOS 10.0 or later, defaults to true.
/// Has no effect on Android clients
Future<void> setAutomaticallyWaitsToMinimizeStalling(
final bool automaticallyWaitsToMinimizeStalling) async {
_automaticallyWaitsToMinimizeStalling =
automaticallyWaitsToMinimizeStalling;
await _invokeMethod('setAutomaticallyWaitsToMinimizeStalling',
[automaticallyWaitsToMinimizeStalling]);
}
/// Seeks to a particular position. Specify [null] to seek to the end of live streams.
/// It is legal to invoke this method from
2019-12-27 05:43:09 +00:00
/// any state except for [AudioPlaybackState.none] and
/// [AudioPlaybackState.connecting].
2019-11-28 05:16:54 +00:00
Future<void> seek(final Duration position) async {
await _invokeMethod(
'seek', [position != null ? position.inMilliseconds : -2]);
2019-11-28 05:16:54 +00:00
}
2019-12-27 05:43:09 +00:00
/// Release all resources associated with this player. You must invoke this
2020-01-01 10:40:25 +00:00
/// after you are done with the player. This method can be invoked from any
/// state except for:
2019-12-27 05:43:09 +00:00
///
/// * [AudioPlaybackState.none]
2020-01-01 10:40:25 +00:00
/// * [AudioPlaybackState.connecting]
2019-11-28 05:16:54 +00:00
Future<void> dispose() async {
2020-06-08 11:53:05 +00:00
await _invokeMethod('dispose');
2020-06-07 14:20:52 +00:00
if (_cacheFile?.existsSync() == true) {
_cacheFile?.deleteSync();
}
_proxy?.stop();
2019-11-28 05:16:54 +00:00
await _durationSubject.close();
await _eventChannelStreamSubscription.cancel();
await _playbackEventSubject.close();
2019-11-28 05:16:54 +00:00
}
Future<dynamic> _invokeMethod(String method, [dynamic args]) async =>
(await _channel).invokeMethod(method, args);
}
/// Encapsulates the playback state and current position of the player.
class AudioPlaybackEvent {
2019-11-28 05:16:54 +00:00
/// The current playback state.
final AudioPlaybackState state;
2019-11-30 15:52:54 +00:00
/// Whether the player is buffering.
final bool buffering;
2019-11-28 05:16:54 +00:00
/// When the last time a position discontinuity happened, as measured in time
/// since the epoch.
final Duration updateTime;
2019-11-30 15:52:54 +00:00
2019-11-28 05:16:54 +00:00
/// The position at [updateTime].
final Duration updatePosition;
2020-03-07 08:41:40 +00:00
/// The buffer position.
final Duration bufferedPosition;
/// The playback speed.
final double speed;
/// The media duration.
final Duration duration;
final IcyMetadata icyMetadata;
AudioPlaybackEvent({
2019-11-28 05:16:54 +00:00
@required this.state,
@required this.buffering,
2019-11-28 05:16:54 +00:00
@required this.updateTime,
@required this.updatePosition,
2020-03-07 08:41:40 +00:00
@required this.bufferedPosition,
@required this.speed,
@required this.duration,
@required this.icyMetadata,
2019-11-28 05:16:54 +00:00
});
AudioPlaybackEvent copyWith({
AudioPlaybackState state,
bool buffering,
Duration updateTime,
Duration updatePosition,
Duration bufferedPosition,
double speed,
Duration duration,
2020-04-22 02:11:15 +00:00
IcyMetadata icyMetadata,
}) =>
AudioPlaybackEvent(
state: state ?? this.state,
buffering: buffering ?? this.buffering,
updateTime: updateTime ?? this.updateTime,
updatePosition: updatePosition ?? this.updatePosition,
bufferedPosition: bufferedPosition ?? this.bufferedPosition,
speed: speed ?? this.speed,
duration: duration ?? this.duration,
2020-04-22 02:11:15 +00:00
icyMetadata: icyMetadata ?? this.icyMetadata,
);
2019-11-28 05:16:54 +00:00
/// The current position of the player.
Duration get position {
if (state == AudioPlaybackState.playing && !buffering) {
final result = updatePosition +
2019-11-28 05:16:54 +00:00
(Duration(milliseconds: DateTime.now().millisecondsSinceEpoch) -
updateTime) *
speed;
return result <= duration ? result : duration;
} else {
return updatePosition;
}
}
@override
String toString() =>
"{state=$state, updateTime=$updateTime, updatePosition=$updatePosition, speed=$speed}";
2019-11-28 05:16:54 +00:00
}
/// Enumerates the different playback states of a player.
2020-04-07 04:30:37 +00:00
///
/// If you also need access to the buffering state, use
/// [FullAudioPlaybackState].
2019-11-28 05:16:54 +00:00
enum AudioPlaybackState {
none,
stopped,
paused,
playing,
connecting,
2019-12-27 05:43:09 +00:00
completed,
2019-11-25 14:50:21 +00:00
}
2020-04-07 04:30:37 +00:00
/// Encapsulates the playback state and the buffering state.
///
/// These two states vary orthogonally, and so if [buffering] is true, you can
/// check [state] to determine whether this buffering is occurring during the
/// playing state or the paused state.
class FullAudioPlaybackState {
final AudioPlaybackState state;
final bool buffering;
final IcyMetadata icyMetadata;
FullAudioPlaybackState(this.state, this.buffering, this.icyMetadata);
}
class IcyInfo {
final String title;
final String url;
IcyInfo({@required this.title, @required this.url});
}
class IcyHeaders {
final int bitrate;
final String genre;
final String name;
final int metadataInterval;
final String url;
final bool isPublic;
IcyHeaders(
{@required this.bitrate,
@required this.genre,
@required this.name,
@required this.metadataInterval,
@required this.url,
@required this.isPublic});
}
class IcyMetadata {
final IcyInfo info;
final IcyHeaders headers;
IcyMetadata({@required this.info, @required this.headers});
}
2020-05-20 14:38:26 +00:00
/// The audio session categories on iOS, to be used with
/// [AudioPlayer.setIosCategory].
enum IosCategory {
ambient,
soloAmbient,
playback,
record,
playAndRecord,
multiRoute,
}
2020-06-07 14:20:52 +00:00
/// A local proxy HTTP server for making remote GET requests with headers.
///
/// TODO: Recursively attach headers to items in playlists like m3u8.
class _ProxyHttpServer {
HttpServer _server;
/// Maps request keys to [_ProxyRequest]s.
final Map<String, _ProxyRequest> _uriMap = {};
/// The port this server is bound to on localhost. This is set only after
/// [start] has completed.
int get port => _server.port;
/// Associate headers with a URL. This may be called only after [start] has
/// completed.
String addUrl(String url, Map<String, String> headers) {
final uri = Uri.parse(url);
final path = _requestKey(uri);
_uriMap[path] = _ProxyRequest(uri, headers);
return uri
.replace(
scheme: 'http',
host: InternetAddress.loopbackIPv4.address,
port: port,
)
.toString();
}
/// A unique key for each request that can be processed by this proxy,
/// made up of the URL path and query string. It is not possible to
/// simultaneously track requests that have the same URL path and query
/// but differ in other respects such as the port or headers.
String _requestKey(Uri uri) => '${uri.path}?${uri.query}';
/// Starts the server.
Future start() async {
_server = await HttpServer.bind(InternetAddress.loopbackIPv4, 0);
_server.listen((request) async {
if (request.method == 'GET') {
final path = _requestKey(request.uri);
final proxyRequest = _uriMap[path];
final originRequest = await HttpClient().getUrl(proxyRequest.uri);
for (var name in proxyRequest.headers.keys) {
originRequest.headers.add(name, proxyRequest.headers[name]);
}
final originResponse = await originRequest.close();
await originResponse.pipe(request.response);
}
});
}
/// Stops the server
Future stop() => _server.close();
}
/// A request for a URL and headers made by a [_ProxyHttpServer].
class _ProxyRequest {
final Uri uri;
final Map<String, String> headers;
_ProxyRequest(this.uri, this.headers);
}