mirror of
https://github.com/flame-engine/flame.git
synced 2025-11-03 04:18:25 +08:00
feat(audio): Set audio context AudioContextConfigFocus.mixWithOthers by default (#3483)
# Description AudioPlayers used to mix their sounds with other audio on the device, which is not the proposed experience on Mobile devices. So this behavior was changed in AudioPlayers, but it wasn't enforced due to a bug. Obviously this behavior is not wanted for Games as multiple sounds should play simultaneously, so this recovers the old functionality again. ## Checklist <!-- Before you create this PR confirm that it meets all requirements listed below by checking the relevant checkboxes with `[x]`. If some checkbox is not applicable, mark it as `[-]`. --> - [x] I have followed the [Contributor Guide] when preparing my PR. - [ ] I have updated/added tests for ALL new/updated/fixed functionality. - [ ] I have updated/added relevant documentation in `docs` and added dartdoc comments with `///`. - [x] I have updated/added relevant examples in `examples` or `docs`. Not sure, if I can test this properly, as it's on a system level. I could ensure the values are set though (?) ## Breaking Change? I'm actually not sure, if this is breaking, I think it's not, as `audioplayers` used to work like `mixWithOthers` due to a bug, but is not anymore. - [ ] Yes, this PR is a breaking change. - [x] No, this PR is not a breaking change. <!-- ### Migration instructions If the PR is breaking, uncomment this header and add instructions for how to migrate from the currently released version in-between the two following tags: --> <!-- End of exclude from commit message --> --------- Co-authored-by: Lukas Klingsbo <me@lukas.fyi>
This commit is contained in:
1
.github/.cspell/people_usernames.txt
vendored
1
.github/.cspell/people_usernames.txt
vendored
@ -7,6 +7,7 @@ erickzanardo # github.com/erickzanardo
|
|||||||
feroult # github.com/feroult
|
feroult # github.com/feroult
|
||||||
fröber # github.com/Brixto
|
fröber # github.com/Brixto
|
||||||
gnarhard # github.com/gnarhard
|
gnarhard # github.com/gnarhard
|
||||||
|
gustl # github.com/gustl22
|
||||||
kenney # kenney.nl
|
kenney # kenney.nl
|
||||||
Klingsbo # github.com/spydon
|
Klingsbo # github.com/spydon
|
||||||
luanpotter # github.com/luanpotter
|
luanpotter # github.com/luanpotter
|
||||||
|
|||||||
@ -39,9 +39,9 @@ class AudioGame extends FlameGame with TapDetector {
|
|||||||
|
|
||||||
Rect get button => Rect.fromLTWH(20, size.y - 300, size.x - 40, 200);
|
Rect get button => Rect.fromLTWH(20, size.y - 300, size.x - 40, 200);
|
||||||
|
|
||||||
void startBgmMusic() {
|
Future<void> startBgmMusic() async {
|
||||||
FlameAudio.bgm.initialize();
|
await FlameAudio.bgm.initialize();
|
||||||
FlameAudio.bgm.play('music/bg_music.ogg');
|
await FlameAudio.bgm.play('music/bg_music.ogg');
|
||||||
}
|
}
|
||||||
|
|
||||||
void fireOne() {
|
void fireOne() {
|
||||||
|
|||||||
@ -27,17 +27,24 @@ class Bgm extends WidgetsBindingObserver {
|
|||||||
/// Registers a [WidgetsBinding] observer.
|
/// Registers a [WidgetsBinding] observer.
|
||||||
///
|
///
|
||||||
/// This must be called for auto-pause and resume to work properly.
|
/// This must be called for auto-pause and resume to work properly.
|
||||||
void initialize() {
|
Future<void> initialize({AudioContext? audioContext}) async {
|
||||||
if (_isRegistered) {
|
if (_isRegistered) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
_isRegistered = true;
|
_isRegistered = true;
|
||||||
|
|
||||||
|
// Avoid requesting audio focus
|
||||||
|
audioContext ??= AudioContextConfig(
|
||||||
|
focus: AudioContextConfigFocus.mixWithOthers,
|
||||||
|
).build();
|
||||||
|
await audioPlayer.setAudioContext(audioContext);
|
||||||
|
|
||||||
WidgetsBinding.instance.addObserver(this);
|
WidgetsBinding.instance.addObserver(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Dispose the [WidgetsBinding] observer.
|
/// Dispose the [WidgetsBinding] observer.
|
||||||
void dispose() {
|
Future<void> dispose() async {
|
||||||
audioPlayer.dispose();
|
await audioPlayer.dispose();
|
||||||
if (!_isRegistered) {
|
if (!_isRegistered) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
import 'package:audioplayers/audioplayers.dart';
|
import 'package:audioplayers/audioplayers.dart';
|
||||||
|
|
||||||
import 'package:flame_audio/bgm.dart';
|
import 'package:flame_audio/bgm.dart';
|
||||||
|
|
||||||
export 'package:audioplayers/audioplayers.dart';
|
export 'package:audioplayers/audioplayers.dart';
|
||||||
@ -49,9 +48,12 @@ class FlameAudio {
|
|||||||
String file,
|
String file,
|
||||||
double volume,
|
double volume,
|
||||||
ReleaseMode releaseMode,
|
ReleaseMode releaseMode,
|
||||||
PlayerMode playerMode,
|
PlayerMode playerMode, {
|
||||||
) async {
|
required AudioContext? audioContext,
|
||||||
|
}) async {
|
||||||
final player = AudioPlayer()..audioCache = audioCache;
|
final player = AudioPlayer()..audioCache = audioCache;
|
||||||
|
audioContext ??= _defaultAudioContext;
|
||||||
|
await player.setAudioContext(audioContext);
|
||||||
await player.setReleaseMode(releaseMode);
|
await player.setReleaseMode(releaseMode);
|
||||||
await player.play(
|
await player.play(
|
||||||
AssetSource(file),
|
AssetSource(file),
|
||||||
@ -62,33 +64,48 @@ class FlameAudio {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Plays a single run of the given [file], with a given [volume].
|
/// Plays a single run of the given [file], with a given [volume].
|
||||||
static Future<AudioPlayer> play(String file, {double volume = 1.0}) {
|
static Future<AudioPlayer> play(
|
||||||
|
String file, {
|
||||||
|
double volume = 1.0,
|
||||||
|
AudioContext? audioContext,
|
||||||
|
}) {
|
||||||
return _preparePlayer(
|
return _preparePlayer(
|
||||||
file,
|
file,
|
||||||
volume,
|
volume,
|
||||||
ReleaseMode.release,
|
ReleaseMode.release,
|
||||||
PlayerMode.lowLatency,
|
PlayerMode.lowLatency,
|
||||||
|
audioContext: audioContext,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Plays, and keeps looping the given [file].
|
/// Plays, and keeps looping the given [file].
|
||||||
static Future<AudioPlayer> loop(String file, {double volume = 1.0}) {
|
static Future<AudioPlayer> loop(
|
||||||
|
String file, {
|
||||||
|
double volume = 1.0,
|
||||||
|
AudioContext? audioContext,
|
||||||
|
}) {
|
||||||
return _preparePlayer(
|
return _preparePlayer(
|
||||||
file,
|
file,
|
||||||
volume,
|
volume,
|
||||||
ReleaseMode.loop,
|
ReleaseMode.loop,
|
||||||
PlayerMode.lowLatency,
|
PlayerMode.lowLatency,
|
||||||
|
audioContext: audioContext,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Plays a single run of the given file [file]
|
/// Plays a single run of the given file [file]
|
||||||
/// This method supports long audio files
|
/// This method supports long audio files
|
||||||
static Future<AudioPlayer> playLongAudio(String file, {double volume = 1.0}) {
|
static Future<AudioPlayer> playLongAudio(
|
||||||
|
String file, {
|
||||||
|
double volume = 1.0,
|
||||||
|
AudioContext? audioContext,
|
||||||
|
}) {
|
||||||
return _preparePlayer(
|
return _preparePlayer(
|
||||||
file,
|
file,
|
||||||
volume,
|
volume,
|
||||||
ReleaseMode.release,
|
ReleaseMode.release,
|
||||||
PlayerMode.mediaPlayer,
|
PlayerMode.mediaPlayer,
|
||||||
|
audioContext: audioContext,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -98,12 +115,17 @@ class FlameAudio {
|
|||||||
/// NOTE: Length audio files on Android have an audio gap between loop
|
/// NOTE: Length audio files on Android have an audio gap between loop
|
||||||
/// iterations, this happens due to limitations on Android's native media
|
/// iterations, this happens due to limitations on Android's native media
|
||||||
/// player features. If you need a gapless loop, prefer the loop method.
|
/// player features. If you need a gapless loop, prefer the loop method.
|
||||||
static Future<AudioPlayer> loopLongAudio(String file, {double volume = 1.0}) {
|
static Future<AudioPlayer> loopLongAudio(
|
||||||
|
String file, {
|
||||||
|
double volume = 1.0,
|
||||||
|
AudioContext? audioContext,
|
||||||
|
}) {
|
||||||
return _preparePlayer(
|
return _preparePlayer(
|
||||||
file,
|
file,
|
||||||
volume,
|
volume,
|
||||||
ReleaseMode.loop,
|
ReleaseMode.loop,
|
||||||
PlayerMode.mediaPlayer,
|
PlayerMode.mediaPlayer,
|
||||||
|
audioContext: audioContext,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -112,7 +134,12 @@ class FlameAudio {
|
|||||||
String sound, {
|
String sound, {
|
||||||
required int maxPlayers,
|
required int maxPlayers,
|
||||||
int minPlayers = 1,
|
int minPlayers = 1,
|
||||||
}) {
|
AudioContext? audioContext,
|
||||||
|
}) async {
|
||||||
|
audioContext ??= _defaultAudioContext;
|
||||||
|
// TODO(gustl22): Probably set context for each player individually,
|
||||||
|
// as soon as functionality is supported.
|
||||||
|
await AudioPlayer.global.setAudioContext(audioContext);
|
||||||
return AudioPool.create(
|
return AudioPool.create(
|
||||||
source: AssetSource(sound),
|
source: AssetSource(sound),
|
||||||
audioCache: audioCache,
|
audioCache: audioCache,
|
||||||
@ -120,4 +147,8 @@ class FlameAudio {
|
|||||||
maxPlayers: maxPlayers,
|
maxPlayers: maxPlayers,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static final AudioContext _defaultAudioContext = AudioContextConfig(
|
||||||
|
focus: AudioContextConfigFocus.mixWithOthers,
|
||||||
|
).build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -17,7 +17,7 @@ environment:
|
|||||||
flutter: ">=3.27.1"
|
flutter: ">=3.27.1"
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
audioplayers: ^6.0.0
|
audioplayers: ^6.1.2
|
||||||
flame: ^1.24.0
|
flame: ^1.24.0
|
||||||
flutter:
|
flutter:
|
||||||
sdk: flutter
|
sdk: flutter
|
||||||
|
|||||||
Reference in New Issue
Block a user