Calling `stop` directly before `dispose` throws `Cannot complete a future with itself` error
Which API doesn't behave as documented, and how does it misbehave?
Calling AudioPlayer.stop without awaiting it before AudioPlayer.dispose throws an error.
Minimal reproduction project
Run the following tree unit tests, the last one will fail
mock.dart
import 'dart:async';
import 'package:flutter/services.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:just_audio/just_audio.dart';
import 'package:just_audio_platform_interface/just_audio_platform_interface.dart';
import 'package:plugin_platform_interface/plugin_platform_interface.dart';
class MockJustAudio
with MockPlatformInterfaceMixin
implements JustAudioPlatform {
MockAudioPlayer? mostRecentPlayer;
final _players = <String, MockAudioPlayer>{};
final TestWidgetsFlutterBinding binding;
MockJustAudio(this.binding);
@override
Future<AudioPlayerPlatform> init(InitRequest request) async {
if (_players.containsKey(request.id)) {
throw PlatformException(
code: "error",
message: "Platform player ${request.id} already exists");
}
final player = MockAudioPlayer(request, binding);
_players[request.id] = player;
mostRecentPlayer = player;
return player;
}
@override
Future<DisposePlayerResponse> disposePlayer(
DisposePlayerRequest request) async {
_players[request.id]!.dispose(DisposeRequest());
_players.remove(request.id);
return DisposePlayerResponse();
}
@override
Future<DisposeAllPlayersResponse> disposeAllPlayers(
DisposeAllPlayersRequest request) async {
_players.forEach((key, value) {
value.dispose(DisposeRequest());
});
_players.clear();
return DisposeAllPlayersResponse();
}
}
const audioSourceDuration = Duration(minutes: 2);
final icyMetadata = IcyMetadata(
headers: IcyHeaders(
url: 'url',
genre: 'Genre',
metadataInterval: 3,
bitrate: 100,
isPublic: true,
name: 'name',
),
info: IcyInfo(
title: 'title',
url: 'url',
),
);
final icyMetadataMessage = IcyMetadataMessage(
headers: IcyHeadersMessage(
url: 'url',
genre: 'Genre',
metadataInterval: 3,
bitrate: 100,
isPublic: true,
name: 'name',
),
info: IcyInfoMessage(
title: 'title',
url: 'url',
),
);
extension on Completer {
void completeIfPending() {
if (!isCompleted) {
complete();
}
}
}
class MockAudioPlayer implements AudioPlayerPlatform {
final String _id;
final eventController = StreamController<PlaybackEventMessage>();
final AudioLoadConfigurationMessage? audioLoadConfiguration;
AudioSourceMessage? _audioSource;
ProcessingStateMessage _processingState = ProcessingStateMessage.idle;
Duration _updatePosition = Duration.zero;
DateTime _updateTime = DateTime.now();
Duration? _duration;
int? _index;
var _playing = false;
var _speed = 1.0;
Completer<dynamic>? _playCompleter;
Timer? _playTimer;
final TestWidgetsFlutterBinding binding;
MockAudioPlayer(InitRequest request, this.binding)
: _id = request.id,
audioLoadConfiguration = request.audioLoadConfiguration;
@override
Stream<PlayerDataMessage> get playerDataMessageStream =>
StreamController<PlayerDataMessage>().stream;
@override
String get id => _id;
@override
Stream<PlaybackEventMessage> get playbackEventMessageStream =>
eventController.stream;
@override
Future<LoadResponse> load(LoadRequest request) async {
final audioSource = request.audioSourceMessage;
_processingState = ProcessingStateMessage.loading;
_broadcastPlaybackEvent();
if (audioSource is UriAudioSourceMessage) {
if (audioSource.uri.contains('abort')) {
throw PlatformException(code: 'abort', message: 'Failed to load URL');
} else if (audioSource.uri.contains('404')) {
throw PlatformException(code: '404', message: 'Not found');
} else if (audioSource.uri.contains('error')) {
throw PlatformException(code: 'error', message: 'Unknown error');
}
_duration = audioSourceDuration;
} else if (audioSource is ClippingAudioSourceMessage) {
_duration = (audioSource.end ?? audioSourceDuration) -
(audioSource.start ?? Duration.zero);
} else {
// TODO: pull the sequence out of the audio source and return the duration
// of the first item in the sequence.
_duration = audioSourceDuration;
}
_audioSource = audioSource;
_index = request.initialIndex ?? 0;
if (binding.inTest) {
// Simulate loading time.
await Future<void>.delayed(const Duration(milliseconds: 100));
}
_setPosition(request.initialPosition ?? Duration.zero);
_processingState = ProcessingStateMessage.ready;
_broadcastPlaybackEvent();
if (_playing) {
_startTimer();
}
return LoadResponse(duration: _duration);
}
@override
Future<PlayResponse> play(PlayRequest request) async {
if (_playing) {
return PlayResponse();
}
_playing = true;
if (_duration != null) {
_startTimer();
}
_playCompleter = Completer<dynamic>();
await _playCompleter!.future;
return PlayResponse();
}
void _startTimer() {
_playTimer = Timer(_remaining, () {
_setPosition(_position);
_processingState = ProcessingStateMessage.completed;
_broadcastPlaybackEvent();
_playCompleter?.completeIfPending();
});
}
@override
Future<PauseResponse> pause(PauseRequest request) async {
if (!_playing) {
return PauseResponse();
}
_playing = false;
_playTimer?.cancel();
_playCompleter?.completeIfPending();
_setPosition(_position);
_broadcastPlaybackEvent();
return PauseResponse();
}
@override
Future<SeekResponse> seek(SeekRequest request) async {
_setPosition(request.position ?? Duration.zero);
_index = request.index ?? 0;
_broadcastPlaybackEvent();
return SeekResponse();
}
@override
Future<SetAndroidAudioAttributesResponse> setAndroidAudioAttributes(
SetAndroidAudioAttributesRequest request) async {
return SetAndroidAudioAttributesResponse();
}
@override
Future<SetAutomaticallyWaitsToMinimizeStallingResponse>
setAutomaticallyWaitsToMinimizeStalling(
SetAutomaticallyWaitsToMinimizeStallingRequest request) async {
return SetAutomaticallyWaitsToMinimizeStallingResponse();
}
@override
Future<SetLoopModeResponse> setLoopMode(SetLoopModeRequest request) async {
return SetLoopModeResponse();
}
@override
Future<SetShuffleModeResponse> setShuffleMode(
SetShuffleModeRequest request) async {
return SetShuffleModeResponse();
}
@override
Future<SetShuffleOrderResponse> setShuffleOrder(
SetShuffleOrderRequest request) async {
return SetShuffleOrderResponse();
}
@override
Future<SetSpeedResponse> setSpeed(SetSpeedRequest request) async {
_speed = request.speed;
_setPosition(_position);
return SetSpeedResponse();
}
@override
Future<SetPitchResponse> setPitch(SetPitchRequest request) async {
return SetPitchResponse();
}
@override
Future<SetSkipSilenceResponse> setSkipSilence(
SetSkipSilenceRequest request) async {
return SetSkipSilenceResponse();
}
@override
Future<SetVolumeResponse> setVolume(SetVolumeRequest request) async {
return SetVolumeResponse();
}
@override
Future<DisposeResponse> dispose(DisposeRequest request) async {
_processingState = ProcessingStateMessage.idle;
_broadcastPlaybackEvent();
_playTimer?.cancel();
return DisposeResponse();
}
@override
Future<ConcatenatingInsertAllResponse> concatenatingInsertAll(
ConcatenatingInsertAllRequest request) async {
// TODO
return ConcatenatingInsertAllResponse();
}
@override
Future<ConcatenatingMoveResponse> concatenatingMove(
ConcatenatingMoveRequest request) async {
// TODO
return ConcatenatingMoveResponse();
}
@override
Future<ConcatenatingRemoveRangeResponse> concatenatingRemoveRange(
ConcatenatingRemoveRangeRequest request) async {
// TODO
return ConcatenatingRemoveRangeResponse();
}
void _broadcastPlaybackEvent() {
String? url;
if (_audioSource is UriAudioSourceMessage) {
// Not sure why this cast is necessary...
url = (_audioSource! as UriAudioSourceMessage).uri.toString();
}
eventController.add(PlaybackEventMessage(
processingState: _processingState,
updatePosition: _updatePosition,
updateTime: _updateTime,
bufferedPosition: _position,
icyMetadata: IcyMetadataMessage(
headers: IcyHeadersMessage(
url: url,
genre: 'Genre',
metadataInterval: 3,
bitrate: 100,
isPublic: true,
name: 'name',
),
info: IcyInfoMessage(
title: 'title',
url: url,
),
),
duration: _duration,
currentIndex: _index,
androidAudioSessionId: null,
));
}
Duration get _position {
if (_playing && _processingState == ProcessingStateMessage.ready) {
final result =
_updatePosition + (DateTime.now().difference(_updateTime)) * _speed;
return result <= _duration! ? result : _duration!;
} else {
return _updatePosition;
}
}
Duration get _remaining => (_duration! - _position) * (1 / _speed);
void _setPosition(Duration position) {
_updatePosition = position;
_updateTime = DateTime.now();
}
@override
Future<SetCanUseNetworkResourcesForLiveStreamingWhilePausedResponse>
setCanUseNetworkResourcesForLiveStreamingWhilePaused(
SetCanUseNetworkResourcesForLiveStreamingWhilePausedRequest
request) async {
return SetCanUseNetworkResourcesForLiveStreamingWhilePausedResponse();
}
@override
Future<SetPreferredPeakBitRateResponse> setPreferredPeakBitRate(
SetPreferredPeakBitRateRequest request) async {
return SetPreferredPeakBitRateResponse();
}
@override
Future<AudioEffectSetEnabledResponse> audioEffectSetEnabled(
AudioEffectSetEnabledRequest request) async {
return AudioEffectSetEnabledResponse();
}
@override
Future<AndroidLoudnessEnhancerSetTargetGainResponse>
androidLoudnessEnhancerSetTargetGain(
AndroidLoudnessEnhancerSetTargetGainRequest request) async {
return AndroidLoudnessEnhancerSetTargetGainResponse();
}
@override
Future<AndroidEqualizerGetParametersResponse> androidEqualizerGetParameters(
AndroidEqualizerGetParametersRequest request) async {
return AndroidEqualizerGetParametersResponse(
parameters: AndroidEqualizerParametersMessage(
minDecibels: 0.0,
maxDecibels: 10.0,
bands: [
for (var i = 0; i < 5; i++)
AndroidEqualizerBandMessage(
index: i,
lowerFrequency: i * 1000,
upperFrequency: (i + 1) * 1000,
centerFrequency: (i + 0.5) * 1000,
gain: i * 0.1,
),
],
),
);
}
@override
Future<AndroidEqualizerBandSetGainResponse> androidEqualizerBandSetGain(
AndroidEqualizerBandSetGainRequest request) async {
return AndroidEqualizerBandSetGainResponse();
}
@override
Future<SetAllowsExternalPlaybackResponse> setAllowsExternalPlayback(
SetAllowsExternalPlaybackRequest request) {
// TODO: implement setAllowsExternalPlayback
throw UnimplementedError();
}
@override
Future<SetWebCrossOriginResponse> setWebCrossOrigin(
SetWebCrossOriginRequest request) {
// TODO: implement setWebCrossOrigin
throw UnimplementedError();
}
@override
Future<SetWebSinkIdResponse> setWebSinkId(SetWebSinkIdRequest request) {
// TODO: implement setWebSinkId
throw UnimplementedError();
}
}
final byteRangeData = List.generate(200, (i) => i);
class TestStreamAudioSource extends StreamAudioSource {
TestStreamAudioSource({super.tag});
@override
Future<StreamAudioResponse> request([int? start, int? end]) async {
return StreamAudioResponse(
contentType: 'audio/mock',
stream: Stream.value(byteRangeData.sublist(start ?? 0, end)),
contentLength: (end ?? byteRangeData.length) - (start ?? 0),
offset: start ?? 0,
sourceLength: byteRangeData.length,
);
}
}
import 'package:flutter_test/flutter_test.dart';
import 'package:just_audio/just_audio.dart';
import 'package:just_audio_platform_interface/just_audio_platform_interface.dart';
import 'mock.dart';
void main() {
test("Demonstrate that only disposing does not throw an error", () async {
JustAudioPlatform.instance =
MockJustAudio(TestWidgetsFlutterBinding.ensureInitialized());
final player = AudioPlayer();
await player.setAudioSource(AudioSource.uri(Uri.parse(
"https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3")));
await player.dispose();
});
test(
"Demonstrate that stopping and awaiting before dispose does not throw an error",
() async {
JustAudioPlatform.instance =
MockJustAudio(TestWidgetsFlutterBinding.ensureInitialized());
final player = AudioPlayer();
await player.setAudioSource(AudioSource.uri(Uri.parse(
"https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3")));
await player.stop();
await player.dispose();
});
test(
"Demonstrate that stopping without awaiting before dispose throws an error",
() async {
JustAudioPlatform.instance =
MockJustAudio(TestWidgetsFlutterBinding.ensureInitialized());
final player = AudioPlayer();
await player.setAudioSource(AudioSource.uri(Uri.parse(
"https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3")));
player.stop();
await player.dispose();
});
}
To Reproduce (i.e. user steps, not code) I haven't actually gotten the app to compile with the new Flutter version, only got the tests to compile, but I imagine the steps to reproduce the behavior from a users perspective are:
- Open the app.
- Close the app (so dispose is called).
- See the error in the logs.
Error messages
Cannot complete a future with itself: Instance of 'Future<AudioPlayerPlatform>'
Expected behavior No error should be thrown.
Desktop (please complete the following information):
- OS: NA (happens during unit testing)
- Browser: NA
Smartphone (please complete the following information):
- Device: NA
- OS: NA
Flutter SDK version
Channel stable, 3.29.2
Additional context
I have subclassed AudioPlayer and overwrote the dispose method to call stop without awaiting it before super.dispose. Please let me know if that is not an intended or supported use case, but this has worked well before. But now I've updated from Flutter 3.22 to 3.29 and I'm getting the error described above.
I dug around and found that this is caused by a new check that was added to Dart here in May 2024.
The issue is that the call to stop sets the _platform future here:
https://github.com/ryanheise/just_audio/blob/66ae4d39cc75b4d9a3e18aa9bc974510dfd6ad43/just_audio/lib/just_audio.dart#L1548
But if during the initialization it detects that the player is being disposed, it aborts and returns itself:
https://github.com/ryanheise/just_audio/blob/66ae4d39cc75b4d9a3e18aa9bc974510dfd6ad43/just_audio/lib/just_audio.dart#L1421
Regardless of whether not awaiting stop before calling dispose is a supported use case, I think this behaviour of returning the future as a result of itself is a bug. Instead of returning itself, maybe it should throw an explicit error instead.
Ensuring that all of the methods can be invoked without await is a broader task, but in this specific case of calling stop before dispose, I wouldn't consider this a particularly urgent case. After all, you can just call dispose() without calling stop() first to achieve the same effect. Or, you can even call stop() without dispose() and that is probably good enough since stop() releases all of the native platform resources anyway.
Ensuring that all of the methods can be invoked without await is a broader task,
Yeah, I'm not even sure that is something really worth supporting. I'm only asking to if we could fix the behaviour in the setPlatform function in _setPlatformActive, because I don't think it was intended for this function to await depend on it's own future.
In my opinion, the cleanest solution here is to throw an error in this case, but try to not let it bubble up as an uncaught async error. I can try to provide a PR if you want.
After all, you can just call dispose() without calling stop() first to achieve the same effect.
That is very good to know, I'll do that.
Neither approach seems to be a high priority, but in the long run, all methods should ideally be usable without awaiting their results, so that approach would be my preference.
Neither approach seems to be a high priority
Sorry, I'm not quite following, I only proposed one approach.
In any case, I opened #1429 which implements the proposed solution by throwing an error but ignoring it for the caller of stop and dispose.
As I said above, my preferred approach is:
Ensuring that all of the methods can be invoked without
await
So the two approaches are (1) address the issue specifically for stop/dispose only, and (2) address the issue across all methods.
Neither approach is a high priority, so I am happy to wait until I have time to address (2) properly.
Ok, but isn't it still worth fixing the current invalid code, even if it does not address any potential problems with other methods that might or might not exits. Especially since the fix is only two lines + a comment?
If you still don't think it's worth fixing please let me know or just close #1429, I prefer not to have a PR open if it doesn't fit what you are looking for.
Small changes like this can have unexpected consequences, and given that this "fix" is motivated by a questionable use case that probably should not be done in the first place, it would be safer to wait until I have time to fix this properly, and in the meantime, modify your app to await the call to stop before disposing. E.g.:
extension AudioPlayerExtension on AudioPlayer {
Future<void> stopAndDispose() async {
await stop();
await dispose();
}
}
Yep, I completely agree with that reasoning, thanks for letting me know. I'll keep this issue open so you can track addressing the issue across all methods, but I will close the PR since it only addresses this specific and (I agree) invalid use case.
Thanks for taking the time to look at this and for the awesome library!