feat: add mute sound functionality (#379)

* feat: add mute sound functionality

* fix footer layout while resizing

* remove mute button on mobile

Co-authored-by: Oscar <martinm.oscar@gmail.com>
This commit is contained in:
Kirpal Demian
2023-01-20 06:07:48 -05:00
committed by GitHub
parent 0a0364acb7
commit d17b4d2df0
40 changed files with 1089 additions and 577 deletions

Binary file not shown.

View File

@ -39,7 +39,7 @@ class AnimojiIntroView extends StatelessWidget {
},
),
),
const FullFooter(),
FullFooter(),
],
),
),

View File

@ -4,6 +4,7 @@ import 'package:convert_repository/convert_repository.dart';
import 'package:download_repository/download_repository.dart';
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/l10n/l10n.dart';
import 'package:holobooth/landing/landing.dart';
import 'package:holobooth_ui/holobooth_ui.dart';
@ -31,11 +32,14 @@ class App extends StatelessWidget {
RepositoryProvider.value(value: convertRepository),
RepositoryProvider.value(value: downloadRepository),
],
child: AnimatedFadeIn(
child: ResponsiveLayoutBuilder(
small: (_, __) => _AppView(theme: HoloboothTheme.small),
medium: (_, __) => _AppView(theme: HoloboothTheme.medium),
large: (_, __) => _AppView(theme: HoloboothTheme.standard),
child: BlocProvider(
create: (context) => MuteSoundBloc(),
child: AnimatedFadeIn(
child: ResponsiveLayoutBuilder(
small: (_, __) => _AppView(theme: HoloboothTheme.small),
medium: (_, __) => _AppView(theme: HoloboothTheme.medium),
large: (_, __) => _AppView(theme: HoloboothTheme.standard),
),
),
),
);

View File

@ -102,9 +102,6 @@ class $AssetsAudioGen {
/// File path: assets/audio/loading.mp3
String get loading => 'assets/audio/loading.mp3';
/// File path: assets/audio/loading_finished.mp3
String get loadingFinished => 'assets/audio/loading_finished.mp3';
/// File path: assets/audio/tab_click.mp3
String get tabClick => 'assets/audio/tab_click.mp3';
@ -115,7 +112,6 @@ class $AssetsAudioGen {
experienceAmbient,
faceNotDetected,
loading,
loadingFinished,
tabClick
];
}

View File

@ -1 +1,2 @@
export 'audio_player_mixin.dart';
export 'bloc/mute_sound_bloc.dart';
export 'widgets/widgets.dart';

View File

@ -1,80 +0,0 @@
import 'dart:async';
import 'package:audio_session/audio_session.dart';
import 'package:flutter/material.dart';
import 'package:just_audio/just_audio.dart';
/// A mixin that provides an internal [AudioPlayer] instance and methods to play
/// audio.
mixin AudioPlayerMixin<T extends StatefulWidget> on State<T> {
String get audioAssetPath;
static AudioPlayer? _audioPlayerOverride;
static set audioPlayerOverride(AudioPlayer? audioPlayer) =>
_audioPlayerOverride = audioPlayer;
// A future that indicates that the audio player is playing or about to play.
Future<void> _playing = Future.value();
late final AudioPlayer _audioPlayer = _audioPlayerOverride ?? AudioPlayer();
Future<void> loadAudio() async {
final audioSession = await AudioSession.instance;
// Inform the operating system of our app's audio attributes etc.
// We pick a reasonable default for an app that plays speech.
try {
await audioSession.configure(const AudioSessionConfiguration.speech());
await _audioPlayer.setAsset(audioAssetPath);
} catch (_) {}
}
Future<void> playAudio({bool loop = false}) async {
final completer = Completer<void>();
_playing = completer.future;
if (loop && _audioPlayer.loopMode != LoopMode.all) {
await _audioPlayer.setLoopMode(LoopMode.all);
}
try {
// Restarts the audio track.
await _audioPlayer.pause();
await _audioPlayer.seek(Duration.zero);
await _audioPlayer.play();
} catch (_) {
// If an error occurs, stop the audio.
await _audioPlayer.stop();
} finally {
completer.complete();
}
}
Future<void> stopAudio() async {
await _audioPlayer.stop();
}
@override
void dispose() {
_playing.then((_) => _audioPlayer.dispose());
super.dispose();
}
}
@visibleForTesting
class TestWidgetWithAudioPlayer extends StatefulWidget {
const TestWidgetWithAudioPlayer({super.key});
@override
State<StatefulWidget> createState() => TestStateWithAudioPlayer();
}
@visibleForTesting
class TestStateWithAudioPlayer extends State<TestWidgetWithAudioPlayer>
with AudioPlayerMixin {
@override
String get audioAssetPath => 'audioAssetPath';
@override
Widget build(BuildContext context) {
return Container();
}
}

View File

@ -0,0 +1,15 @@
import 'package:bloc/bloc.dart';
import 'package:equatable/equatable.dart';
part 'mute_sound_event.dart';
part 'mute_sound_state.dart';
class MuteSoundBloc extends Bloc<MuteSoundEvent, MuteSoundState> {
MuteSoundBloc() : super(const MuteSoundState(isMuted: false)) {
on<MuteSoundToggled>(
(event, emit) {
emit(MuteSoundState(isMuted: !state.isMuted));
},
);
}
}

View File

@ -0,0 +1,10 @@
part of 'mute_sound_bloc.dart';
abstract class MuteSoundEvent extends Equatable {
const MuteSoundEvent();
@override
List<Object> get props => [];
}
class MuteSoundToggled extends MuteSoundEvent {}

View File

@ -0,0 +1,10 @@
part of 'mute_sound_bloc.dart';
class MuteSoundState extends Equatable {
const MuteSoundState({required this.isMuted});
final bool isMuted;
@override
List<Object> get props => [isMuted];
}

View File

@ -0,0 +1,131 @@
import 'dart:async';
import 'package:audio_session/audio_session.dart';
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:just_audio/just_audio.dart' as just_audio;
class AudioPlayerController {
AudioPlayerController();
_AudioPlayerState? _state;
Future<void> playAudio() async => _state?.playAudio();
Future<void> stopAudio() async => _state?.stopAudio();
}
class AudioPlayer extends StatefulWidget {
const AudioPlayer({
super.key,
required this.audioAssetPath,
required this.child,
this.controller,
this.loop = false,
this.autoplay = false,
this.onAudioFinished,
});
final String audioAssetPath;
final Widget child;
final AudioPlayerController? controller;
final bool autoplay;
final bool loop;
final VoidCallback? onAudioFinished;
@override
State<AudioPlayer> createState() => _AudioPlayerState();
static just_audio.AudioPlayer? _audioPlayerOverride;
@visibleForTesting
// ignore: avoid_setters_without_getters
static set audioPlayerOverride(just_audio.AudioPlayer? audioPlayer) =>
_audioPlayerOverride = audioPlayer;
}
class _AudioPlayerState extends State<AudioPlayer> {
// A future that indicates that the audio player is playing or about to play.
Future<void> _playing = Future.value();
late final just_audio.AudioPlayer _audioPlayer =
AudioPlayer._audioPlayerOverride ?? just_audio.AudioPlayer();
@override
void initState() {
super.initState();
if (widget.controller != null) {
widget.controller!._state = this;
}
_init();
}
Future<void> _init() async {
if (context.read<MuteSoundBloc>().state.isMuted) {
await _audioPlayer.setVolume(0);
}
await loadAudio();
if (widget.autoplay) {
await playAudio();
}
}
Future<void> loadAudio() async {
final audioSession = await AudioSession.instance;
// Inform the operating system of our app's audio attributes etc.
// We pick a reasonable default for an app that plays speech.
try {
await audioSession.configure(const AudioSessionConfiguration.speech());
await _audioPlayer.setAsset(widget.audioAssetPath);
} catch (_) {}
if (widget.loop && _audioPlayer.loopMode != just_audio.LoopMode.all) {
await _audioPlayer.setLoopMode(just_audio.LoopMode.all);
}
}
Future<void> playAudio() async {
final completer = Completer<void>();
_playing = completer.future;
try {
// Restarts the audio track.
await _audioPlayer.pause();
await _audioPlayer.seek(Duration.zero);
await _audioPlayer.play();
} catch (_) {
// If an error occurs, stop the audio.
await _audioPlayer.stop();
} finally {
completer.complete();
widget.onAudioFinished?.call();
}
}
Future<void> stopAudio() async {
await _audioPlayer.stop();
}
@override
void dispose() {
_playing.then((_) => _audioPlayer.dispose());
super.dispose();
}
@override
Widget build(BuildContext context) {
return BlocListener<MuteSoundBloc, MuteSoundState>(
listenWhen: (previous, current) => previous.isMuted != current.isMuted,
listener: (context, state) {
if (state.isMuted) {
_audioPlayer.setVolume(0);
} else {
_audioPlayer.setVolume(1);
}
},
child: widget.child,
);
}
}

View File

@ -0,0 +1,28 @@
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth_ui/holobooth_ui.dart';
class MuteButton extends StatelessWidget {
const MuteButton({super.key});
@override
Widget build(BuildContext context) {
return BlocBuilder<MuteSoundBloc, MuteSoundState>(
builder: (context, state) => OutlinedButton(
style: OutlinedButton.styleFrom(
shape: const CircleBorder(),
tapTargetSize: MaterialTapTargetSize.shrinkWrap,
minimumSize: const Size.square(40),
side: BorderSide(
color: HoloBoothColors.white.withOpacity(0.32),
),
),
onPressed: () => context.read<MuteSoundBloc>().add(MuteSoundToggled()),
child: state.isMuted
? const Icon(Icons.volume_off, size: 16)
: const Icon(Icons.volume_up, size: 16),
),
);
}
}

View File

@ -0,0 +1,2 @@
export 'audio_player.dart';
export 'mute_button.dart';

View File

@ -76,8 +76,8 @@ class _ConvertViewState extends State<ConvertView> {
),
Positioned.fill(
child: Column(
children: const [
Expanded(child: ConvertBody()),
children: [
const Expanded(child: ConvertBody()),
FullFooter(),
],
),

View File

@ -3,7 +3,7 @@ import 'package:holobooth/assets/assets.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth_ui/holobooth_ui.dart';
class ConvertLoadingAnimation extends StatefulWidget {
class ConvertLoadingAnimation extends StatelessWidget {
const ConvertLoadingAnimation({
super.key,
required this.dimension,
@ -11,56 +11,37 @@ class ConvertLoadingAnimation extends StatefulWidget {
final double dimension;
@override
State<ConvertLoadingAnimation> createState() =>
_ConvertLoadingAnimationState();
}
class _ConvertLoadingAnimationState extends State<ConvertLoadingAnimation>
with AudioPlayerMixin {
@override
String get audioAssetPath => Assets.audio.loading;
@override
void initState() {
super.initState();
_init();
}
Future<void> _init() async {
// Try to load audio from a source and catch any errors.
try {
await loadAudio();
await playAudio(loop: true);
} catch (_) {}
}
@override
Widget build(BuildContext context) {
return BlurryContainer(
blur: 24,
child: Center(
child: SizedBox.square(
dimension: widget.dimension,
child: Stack(
children: [
SizedBox.square(
dimension: widget.dimension,
child: Assets.icons.loadingCircle.image(
key: const Key('LoadingOverlay_LoadingIndicator'),
fit: BoxFit.cover,
),
),
Padding(
padding: EdgeInsets.all(widget.dimension * 0.015),
child: SizedBox.square(
dimension: widget.dimension,
child: const CircularProgressIndicator(
color: HoloBoothColors.convertLoading,
return AudioPlayer(
audioAssetPath: Assets.audio.loading,
autoplay: true,
loop: true,
child: BlurryContainer(
blur: 24,
child: Center(
child: SizedBox.square(
dimension: dimension,
child: Stack(
children: [
SizedBox.square(
dimension: dimension,
child: Assets.icons.loadingCircle.image(
key: const Key('LoadingOverlay_LoadingIndicator'),
fit: BoxFit.cover,
),
),
),
],
Padding(
padding: EdgeInsets.all(dimension * 0.015),
child: SizedBox.square(
dimension: dimension,
child: const CircularProgressIndicator(
color: HoloBoothColors.convertLoading,
),
),
),
],
),
),
),
),

View File

@ -1,15 +1,19 @@
import 'package:flutter/material.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/footer/footer.dart';
import 'package:holobooth/widgets/widgets.dart';
import 'package:holobooth_ui/holobooth_ui.dart';
import 'package:platform_helper/platform_helper.dart';
class FullFooter extends StatelessWidget {
const FullFooter({
FullFooter({
super.key,
this.showIconsForSmall = true,
});
PlatformHelper? platformHelper,
}) : _platformHelper = platformHelper ?? PlatformHelper();
final bool showIconsForSmall;
final PlatformHelper _platformHelper;
@override
Widget build(BuildContext context) {
@ -41,17 +45,23 @@ class FullFooter extends StatelessWidget {
color: HoloBoothColors.scrim,
borderRadius: BorderRadius.circular(16),
),
child: showIconsForSmall
? Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
for (final icon in icons) ...[
icon,
if (icon != icons.last) gap,
],
],
)
: child,
child: Row(
mainAxisSize: MainAxisSize.min,
children: [
if (showIconsForSmall)
for (final icon in icons) ...[
icon,
if (icon != icons.last) gap,
]
else ...[
if (child != null) Flexible(child: child),
if (!_platformHelper.isMobile) ...[
gap,
const MuteButton(),
],
],
],
),
),
);
},
@ -62,24 +72,28 @@ class FullFooter extends StatelessWidget {
vertical: 24,
),
child: Row(
crossAxisAlignment: CrossAxisAlignment.end,
children: [
for (final icon in icons) ...[
icon,
if (icon != icons.last) gap,
gap,
],
Expanded(
child: Align(
alignment: Alignment.bottomRight,
alignment: Alignment.centerRight,
child: child,
),
)
),
if (!_platformHelper.isMobile) ...[
gap,
const MuteButton(),
],
],
),
);
},
child: Wrap(
alignment: WrapAlignment.center,
crossAxisAlignment: WrapCrossAlignment.center,
runSpacing: 8,
children: const [
FlutterForwardFooterLink(),

View File

@ -1,9 +1,14 @@
import 'package:flutter/material.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/widgets/widgets.dart';
import 'package:holobooth_ui/holobooth_ui.dart';
import 'package:platform_helper/platform_helper.dart';
class SimplifiedFooter extends StatelessWidget {
const SimplifiedFooter({super.key});
SimplifiedFooter({super.key, PlatformHelper? platformHelper})
: _platformHelper = platformHelper ?? PlatformHelper();
final PlatformHelper _platformHelper;
static const _separationSize = 22.0;
@ -30,14 +35,18 @@ class SimplifiedFooter extends StatelessWidget {
large: (_, __) {
return Container(
alignment: Alignment.bottomLeft,
padding: const EdgeInsets.only(left: 48, bottom: 48),
padding: const EdgeInsets.only(left: 48, bottom: 24, right: 24),
child: Row(
children: const [
FlutterIconLink(),
SizedBox(width: _separationSize),
FirebaseIconLink(),
SizedBox(width: _separationSize),
TensorflowIconLink()
children: [
const FlutterIconLink(),
const SizedBox(width: _separationSize),
const FirebaseIconLink(),
const SizedBox(width: _separationSize),
const TensorflowIconLink(),
if (!_platformHelper.isMobile) ...[
const Spacer(),
const MuteButton(),
],
],
),
);

View File

@ -13,26 +13,22 @@ class NextButton extends StatefulWidget {
State<NextButton> createState() => _NextButtonState();
}
class _NextButtonState extends State<NextButton> with AudioPlayerMixin {
@override
String get audioAssetPath => Assets.audio.buttonPress;
@override
void initState() {
super.initState();
loadAudio();
}
class _NextButtonState extends State<NextButton> {
final _audioPlayerController = AudioPlayerController();
@override
Widget build(BuildContext context) {
final l10n = context.l10n;
return GradientElevatedButton(
onPressed: () {
playAudio();
widget.onNextPressed();
},
child: Text(l10n.nextButtonText),
return AudioPlayer(
audioAssetPath: Assets.audio.buttonPress,
controller: _audioPlayerController,
child: GradientElevatedButton(
onPressed: () {
_audioPlayerController.playAudio();
widget.onNextPressed();
},
child: Text(l10n.nextButtonText),
),
);
}
}

View File

@ -24,13 +24,11 @@ class PrimarySelectionView extends StatefulWidget {
}
class _PrimarySelectionViewState extends State<PrimarySelectionView>
with TickerProviderStateMixin, AudioPlayerMixin {
with TickerProviderStateMixin {
final _audioPlayerController = AudioPlayerController();
late final TabController _tabController;
late int _indexSelected;
@override
String get audioAssetPath => Assets.audio.tabClick;
@override
void initState() {
super.initState();
@ -48,7 +46,6 @@ class _PrimarySelectionViewState extends State<PrimarySelectionView>
_indexSelected = _tabController.index;
});
});
loadAudio();
}
@override
@ -62,74 +59,79 @@ class _PrimarySelectionViewState extends State<PrimarySelectionView>
final isSmall =
MediaQuery.of(context).size.width <= HoloboothBreakpoints.small;
const buttonPadding = EdgeInsets.only(left: 15, right: 15, bottom: 15);
return Column(
crossAxisAlignment: CrossAxisAlignment.stretch,
children: [
Padding(
padding: const EdgeInsets.all(12).copyWith(bottom: isSmall ? 12 : 24),
child: TabBar(
onTap: (_) {
playAudio();
},
controller: _tabController,
tabs: const [
PrimarySelectionTab(
iconData: Icons.face,
),
PrimarySelectionTab(
iconData: Icons.wallpaper,
),
PrimarySelectionTab(
iconData: Icons.color_lens,
),
],
),
),
if (!widget.collapsed)
Expanded(
child: TabBarView(
key: PrimarySelectionView.primaryTabBarViewKey,
return AudioPlayer(
audioAssetPath: Assets.audio.tabClick,
controller: _audioPlayerController,
child: Column(
crossAxisAlignment: CrossAxisAlignment.stretch,
children: [
Padding(
padding:
const EdgeInsets.all(12).copyWith(bottom: isSmall ? 12 : 24),
child: TabBar(
onTap: (_) {
_audioPlayerController.playAudio();
},
controller: _tabController,
physics: const NeverScrollableScrollPhysics(),
children: const [
CharacterSelectionTabBarView(),
BackgroundSelectionTabBarView(),
PropsSelectionTabBarView(),
tabs: const [
PrimarySelectionTab(
iconData: Icons.face,
),
PrimarySelectionTab(
iconData: Icons.wallpaper,
),
PrimarySelectionTab(
iconData: Icons.color_lens,
),
],
),
),
if (_indexSelected == 0)
Padding(
padding: buttonPadding,
child: NextButton(
key: const Key('primarySelection_nextButton_character'),
onNextPressed: () {
_tabController.animateTo(1);
},
if (!widget.collapsed)
Expanded(
child: TabBarView(
key: PrimarySelectionView.primaryTabBarViewKey,
controller: _tabController,
physics: const NeverScrollableScrollPhysics(),
children: const [
CharacterSelectionTabBarView(),
BackgroundSelectionTabBarView(),
PropsSelectionTabBarView(),
],
),
),
),
if (_indexSelected == 1)
Padding(
padding: buttonPadding,
child: NextButton(
key: const Key('primarySelection_nextButton_background'),
onNextPressed: () {
_tabController.animateTo(2);
},
if (_indexSelected == 0)
Padding(
padding: buttonPadding,
child: NextButton(
key: const Key('primarySelection_nextButton_character'),
onNextPressed: () {
_tabController.animateTo(1);
},
),
),
),
if (_indexSelected == 2)
Padding(
padding: buttonPadding,
child: RecordingButton(
onRecordingPressed: () {
context
.read<PhotoBoothBloc>()
.add(const PhotoBoothGetReadyStarted());
},
if (_indexSelected == 1)
Padding(
padding: buttonPadding,
child: NextButton(
key: const Key('primarySelection_nextButton_background'),
onNextPressed: () {
_tabController.animateTo(2);
},
),
),
),
],
if (_indexSelected == 2)
Padding(
padding: buttonPadding,
child: RecordingButton(
onRecordingPressed: () {
context
.read<PhotoBoothBloc>()
.add(const PhotoBoothGetReadyStarted());
},
),
),
],
),
);
}
}

View File

@ -13,26 +13,22 @@ class RecordingButton extends StatefulWidget {
State<RecordingButton> createState() => _RecordingButtonState();
}
class _RecordingButtonState extends State<RecordingButton>
with AudioPlayerMixin {
@override
String get audioAssetPath => Assets.audio.counting3Seconds;
@override
void initState() {
super.initState();
loadAudio();
}
class _RecordingButtonState extends State<RecordingButton> {
final _audioPlayerController = AudioPlayerController();
@override
Widget build(BuildContext context) {
final l10n = context.l10n;
return GradientElevatedButton(
onPressed: () {
playAudio();
widget.onRecordingPressed();
},
child: Text(l10n.recordButtonText),
return AudioPlayer(
audioAssetPath: Assets.audio.counting3Seconds,
controller: _audioPlayerController,
child: GradientElevatedButton(
onPressed: () {
_audioPlayerController.playAudio();
widget.onRecordingPressed();
},
child: Text(l10n.recordButtonText),
),
);
}
}

View File

@ -27,8 +27,8 @@ class LandingView extends StatelessWidget {
),
Positioned.fill(
child: Column(
children: const [
Expanded(child: LandingBody()),
children: [
const Expanded(child: LandingBody()),
FullFooter(showIconsForSmall: false),
],
),

View File

@ -13,32 +13,27 @@ class LandingTakePhotoButton extends StatefulWidget {
State<LandingTakePhotoButton> createState() => _LandingTakePhotoButtonState();
}
class _LandingTakePhotoButtonState extends State<LandingTakePhotoButton>
with AudioPlayerMixin {
@override
String get audioAssetPath => Assets.audio.buttonPress;
@override
void initState() {
super.initState();
loadAudio();
}
class _LandingTakePhotoButtonState extends State<LandingTakePhotoButton> {
final _audioPlayerController = AudioPlayerController();
@override
Widget build(BuildContext context) {
final l10n = context.l10n;
return GradientElevatedButton(
onPressed: () {
playAudio();
trackEvent(
category: 'button',
action: 'click-start-photobooth',
label: 'start-photobooth',
);
Navigator.of(context).push<void>(AnimojiIntroPage.route());
},
child: Text(l10n.landingPageTakePhotoButtonText),
return AudioPlayer(
audioAssetPath: Assets.audio.buttonPress,
controller: _audioPlayerController,
child: GradientElevatedButton(
onPressed: () {
_audioPlayerController.playAudio();
trackEvent(
category: 'button',
action: 'click-start-photobooth',
label: 'start-photobooth',
);
Navigator.of(context).push<void>(AnimojiIntroPage.route());
},
child: Text(l10n.landingPageTakePhotoButtonText),
),
);
}
}

View File

@ -39,35 +39,28 @@ class PhotoBoothView extends StatefulWidget {
State<PhotoBoothView> createState() => _PhotoBoothViewState();
}
class _PhotoBoothViewState extends State<PhotoBoothView> with AudioPlayerMixin {
@override
String get audioAssetPath => Assets.audio.experienceAmbient;
@override
void initState() {
super.initState();
_loadAndPlayAudio();
}
Future<void> _loadAndPlayAudio() async {
await loadAudio();
await playAudio(loop: true);
}
class _PhotoBoothViewState extends State<PhotoBoothView> {
final _audioPlayerController = AudioPlayerController();
@override
Widget build(BuildContext context) {
return BlocListener<PhotoBoothBloc, PhotoBoothState>(
listener: (context, state) {
if (state.isFinished) {
stopAudio();
return AudioPlayer(
audioAssetPath: Assets.audio.experienceAmbient,
controller: _audioPlayerController,
autoplay: true,
loop: true,
child: BlocListener<PhotoBoothBloc, PhotoBoothState>(
listener: (context, state) {
if (state.isFinished) {
_audioPlayerController.stopAudio();
Navigator.of(context).pushReplacement(
ConvertPage.route(state.frames),
);
}
},
child: const Scaffold(body: PhotoboothBody()),
Navigator.of(context).pushReplacement(
ConvertPage.route(state.frames),
);
}
},
child: Scaffold(body: PhotoboothBody()),
),
);
}
}

View File

@ -4,56 +4,38 @@ import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/l10n/l10n.dart';
import 'package:holobooth_ui/holobooth_ui.dart';
class HoloBoothCharacterError extends StatefulWidget {
class HoloBoothCharacterError extends StatelessWidget {
const HoloBoothCharacterError({super.key});
@override
State<HoloBoothCharacterError> createState() =>
_HoloBoothCharacterErrorState();
}
class _HoloBoothCharacterErrorState extends State<HoloBoothCharacterError>
with AudioPlayerMixin {
@override
String get audioAssetPath => Assets.audio.faceNotDetected;
@override
void initState() {
super.initState();
_loadAndPlayAudio();
}
Future<void> _loadAndPlayAudio() async {
await loadAudio();
await playAudio();
}
@override
Widget build(BuildContext context) {
final l10n = context.l10n;
return BlurryContainer(
color: HoloBoothColors.blurrySurface,
blur: 7.5,
borderRadius: BorderRadius.circular(38),
padding: const EdgeInsets.symmetric(horizontal: 32, vertical: 16),
child: Row(
mainAxisSize: MainAxisSize.min,
children: [
const Icon(
Icons.error,
color: HoloBoothColors.red,
),
const SizedBox(width: 12),
Text(
l10n.faceNotDetected,
style: Theme.of(context)
.textTheme
.bodyLarge
?.copyWith(color: HoloBoothColors.white),
),
],
return AudioPlayer(
audioAssetPath: Assets.audio.faceNotDetected,
autoplay: true,
child: BlurryContainer(
color: HoloBoothColors.blurrySurface,
blur: 7.5,
borderRadius: BorderRadius.circular(38),
padding: const EdgeInsets.symmetric(horizontal: 32, vertical: 16),
child: Row(
mainAxisSize: MainAxisSize.min,
children: [
const Icon(
Icons.error,
color: HoloBoothColors.red,
),
const SizedBox(width: 12),
Text(
l10n.faceNotDetected,
style: Theme.of(context)
.textTheme
.bodyLarge
?.copyWith(color: HoloBoothColors.white),
),
],
),
),
);
}

View File

@ -1,23 +1,28 @@
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/avatar_detector/avatar_detector.dart';
import 'package:holobooth/camera/camera.dart';
import 'package:holobooth/footer/footer.dart';
import 'package:holobooth/in_experience_selection/in_experience_selection.dart';
import 'package:holobooth/photo_booth/photo_booth.dart';
import 'package:holobooth_ui/holobooth_ui.dart';
import 'package:platform_helper/platform_helper.dart';
import 'package:screen_recorder/screen_recorder.dart';
Exporter _getExporter() => Exporter();
class PhotoboothBody extends StatefulWidget {
const PhotoboothBody({
PhotoboothBody({
super.key,
ValueGetter<Exporter>? exporter,
}) : _exporter = exporter ?? _getExporter;
PlatformHelper? platformHelper,
}) : _exporter = exporter ?? _getExporter,
_platformHelper = platformHelper ?? PlatformHelper();
final ValueGetter<Exporter> _exporter;
final PlatformHelper _platformHelper;
@override
State<PhotoboothBody> createState() => _PhotoboothBodyState();
@ -94,10 +99,19 @@ class _PhotoboothBodyState extends State<PhotoboothBody> {
],
),
),
const Align(
Align(
alignment: Alignment.bottomCenter,
child: SimplifiedFooter(),
),
if (constraints.maxWidth <= HoloboothBreakpoints.small &&
!widget._platformHelper.isMobile)
const Align(
alignment: Alignment.topRight,
child: Padding(
padding: EdgeInsets.all(8),
child: MuteButton(),
),
),
Align(child: CameraView(onCameraReady: _onCameraReady)),
if (_isCameraAvailable)
CameraStreamListener(cameraController: _cameraController!),

View File

@ -45,8 +45,8 @@ class ShareView extends StatelessWidget {
const Positioned.fill(child: ShareBackground()),
Positioned.fill(
child: Column(
children: const [
Expanded(child: ShareBody()),
children: [
const Expanded(child: ShareBody()),
FullFooter(),
],
),

View File

@ -1,164 +0,0 @@
import 'dart:async';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:just_audio/just_audio.dart';
import 'package:mocktail/mocktail.dart';
class _MockAudioPlayer extends Mock implements AudioPlayer {}
void main() {
TestWidgetsFlutterBinding.ensureInitialized();
setUpAll(() {
registerFallbackValue(LoopMode.all);
});
group('AudioPlayerMixin', () {
late AudioPlayer audioPlayer;
setUp(() {
audioPlayer = _MockAudioPlayer();
when(audioPlayer.pause).thenAnswer((_) async {});
when(audioPlayer.play).thenAnswer((_) async {});
when(audioPlayer.stop).thenAnswer((_) async {});
when(audioPlayer.dispose).thenAnswer((_) async {});
when(() => audioPlayer.setLoopMode(any())).thenAnswer((_) async {});
when(() => audioPlayer.loopMode).thenReturn(LoopMode.off);
when(() => audioPlayer.seek(any())).thenAnswer((_) async {});
when(() => audioPlayer.setAsset(any()))
.thenAnswer((_) async => Duration.zero);
AudioPlayerMixin.audioPlayerOverride = audioPlayer;
const MethodChannel('com.ryanheise.audio_session')
.setMockMethodCallHandler((call) async {
if (call.method == 'getConfiguration') {
return {};
}
});
});
tearDown(() {
AudioPlayerMixin.audioPlayerOverride = null;
});
group('loadAudio', () {
test('sets the audio asset', () async {
final state = TestStateWithAudioPlayer();
await state.loadAudio();
verify(() => audioPlayer.setAsset('audioAssetPath')).called(1);
});
});
group('playAudio', () {
test(
'sets the loop mode when loop is true and not already set',
() async {
final state = TestStateWithAudioPlayer();
await state.playAudio(loop: true);
verify(() => audioPlayer.setLoopMode(LoopMode.all)).called(1);
},
);
test('calls stop on the audio player if playing fails', () async {
when(audioPlayer.play).thenThrow(Exception());
final state = TestStateWithAudioPlayer();
await state.playAudio();
verify(() => audioPlayer.stop()).called(1);
});
test(
'does not set the loop mode when loop is true and already set',
() async {
when(() => audioPlayer.loopMode).thenReturn(LoopMode.all);
final state = TestStateWithAudioPlayer();
await state.playAudio(loop: true);
verifyNever(() => audioPlayer.setLoopMode(LoopMode.all));
},
);
test('pauses, seeks, and plays the audio track', () async {
final state = TestStateWithAudioPlayer();
await state.playAudio();
final verificationResults = verifyInOrder([
() => audioPlayer.pause(),
() => audioPlayer.seek(Duration.zero),
() => audioPlayer.play(),
]);
for (final f in verificationResults) {
f.called(1);
}
});
});
group('stopAudio', () {
test('calls stop on the audio player', () async {
final state = TestStateWithAudioPlayer();
await state.stopAudio();
verify(() => audioPlayer.stop()).called(1);
});
});
group('dispose', () {
testWidgets('calls dispose on the audio player', (tester) async {
await tester.pumpWidget(TestWidgetWithAudioPlayer());
await tester.pumpAndSettle();
await tester.pumpWidget(Container());
verify(() => audioPlayer.dispose()).called(1);
});
testWidgets('waits for playing to end before calling dispose',
(tester) async {
final completer = Completer<void>();
when(audioPlayer.play).thenAnswer((_) => completer.future);
await tester.pumpWidget(TestWidgetWithAudioPlayer());
await tester.pumpAndSettle();
final playFuture = tester
.state<TestStateWithAudioPlayer>(
find.byType(TestWidgetWithAudioPlayer),
)
.playAudio();
await tester.pumpWidget(Container());
verifyNever(() => audioPlayer.dispose());
completer.complete();
await playFuture;
await tester.pumpAndSettle();
verify(() => audioPlayer.dispose()).called(1);
});
testWidgets('still disposes even when playing fails', (tester) async {
when(audioPlayer.play).thenThrow(Exception());
await tester.pumpWidget(TestWidgetWithAudioPlayer());
await tester.pumpAndSettle();
final playFuture = tester
.state<TestStateWithAudioPlayer>(
find.byType(TestWidgetWithAudioPlayer),
)
.playAudio();
await tester.pumpWidget(Container());
await playFuture;
await tester.pumpAndSettle();
verify(() => audioPlayer.dispose()).called(1);
});
});
});
}

View File

@ -0,0 +1,30 @@
import 'package:bloc_test/bloc_test.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:holobooth/audio_player/audio_player.dart';
void main() {
group('MuteSoundBloc', () {
test('initial state is unmuted', () {
expect(MuteSoundBloc().state, MuteSoundState(isMuted: false));
});
blocTest<MuteSoundBloc, MuteSoundState>(
'emits muted state when mute is toggled on',
build: MuteSoundBloc.new,
act: (bloc) => bloc.add(MuteSoundToggled()),
expect: () => [
MuteSoundState(isMuted: true),
],
);
blocTest<MuteSoundBloc, MuteSoundState>(
'emits unmuted state when mute is toggled off',
build: MuteSoundBloc.new,
seed: () => MuteSoundState(isMuted: true),
act: (bloc) => bloc.add(MuteSoundToggled()),
expect: () => [
MuteSoundState(isMuted: false),
],
);
});
}

View File

@ -0,0 +1,13 @@
import 'package:flutter_test/flutter_test.dart';
import 'package:holobooth/audio_player/audio_player.dart';
void main() {
group('MuteSoundEvent', () {
test('uses value equality', () {
final a = MuteSoundToggled();
final b = MuteSoundToggled();
expect(a, equals(b));
});
});
}

View File

@ -0,0 +1,15 @@
import 'package:flutter_test/flutter_test.dart';
import 'package:holobooth/audio_player/audio_player.dart';
void main() {
group('MuteSoundState', () {
test('uses value equality', () {
final a = MuteSoundState(isMuted: true);
final b = MuteSoundState(isMuted: true);
final c = MuteSoundState(isMuted: false);
expect(a, equals(b));
expect(a, isNot(equals(c)));
});
});
}

View File

@ -0,0 +1,309 @@
import 'dart:async';
import 'package:bloc_test/bloc_test.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:just_audio/just_audio.dart' as just_audio;
import 'package:mocktail/mocktail.dart';
import '../../helpers/helpers.dart';
class _MockAudioPlayer extends Mock implements just_audio.AudioPlayer {}
class _MockMuteSoundBloc extends MockBloc<MuteSoundEvent, MuteSoundState>
implements MuteSoundBloc {}
void main() {
TestWidgetsFlutterBinding.ensureInitialized();
setUpAll(() {
registerFallbackValue(just_audio.LoopMode.all);
});
group('AudioPlayer', () {
const child = SizedBox(key: Key('child'));
late just_audio.AudioPlayer audioPlayer;
setUp(() {
audioPlayer = _MockAudioPlayer();
when(audioPlayer.pause).thenAnswer((_) async {});
when(audioPlayer.play).thenAnswer((_) async {});
when(audioPlayer.stop).thenAnswer((_) async {});
when(audioPlayer.dispose).thenAnswer((_) async {});
when(() => audioPlayer.setLoopMode(any())).thenAnswer((_) async {});
when(() => audioPlayer.setVolume(any())).thenAnswer((_) async {});
when(() => audioPlayer.loopMode).thenReturn(just_audio.LoopMode.off);
when(() => audioPlayer.seek(any())).thenAnswer((_) async {});
when(() => audioPlayer.setAsset(any()))
.thenAnswer((_) async => Duration.zero);
AudioPlayer.audioPlayerOverride = audioPlayer;
const MethodChannel('com.ryanheise.audio_session')
.setMockMethodCallHandler((call) async {
if (call.method == 'getConfiguration') {
return {};
}
});
});
tearDown(() {
AudioPlayer.audioPlayerOverride = null;
});
testWidgets('sets the audio asset', (tester) async {
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
child: child,
),
);
verify(() => audioPlayer.setAsset('audioAssetPath')).called(1);
});
testWidgets('plays the audio when autoplay is true', (tester) async {
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
autoplay: true,
child: child,
),
);
verify(audioPlayer.play).called(1);
});
testWidgets('renders the child', (tester) async {
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
child: child,
),
);
expect(find.byWidget(child), findsOneWidget);
});
testWidgets(
'sets the loop mode when loop is true and not already set',
(tester) async {
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
loop: true,
child: child,
),
);
verify(
() => audioPlayer.setLoopMode(just_audio.LoopMode.all),
).called(1);
},
);
testWidgets(
'does not set the loop mode when loop is true and already set',
(tester) async {
when(() => audioPlayer.loopMode).thenReturn(just_audio.LoopMode.all);
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
loop: true,
child: child,
),
);
verifyNever(() => audioPlayer.setLoopMode(just_audio.LoopMode.all));
},
);
testWidgets('sets the volume when muted', (tester) async {
final muteSoundBloc = _MockMuteSoundBloc();
when(() => muteSoundBloc.state).thenReturn(MuteSoundState(isMuted: true));
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
child: child,
),
muteSoundBloc: muteSoundBloc,
);
verify(() => audioPlayer.setVolume(0)).called(1);
});
group('playAudio', () {
testWidgets('calls stop on the audio player if playing fails',
(tester) async {
final controller = AudioPlayerController();
when(audioPlayer.play).thenThrow(Exception());
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
controller: controller,
child: child,
),
);
await controller.playAudio();
verify(() => audioPlayer.stop()).called(1);
});
testWidgets('pauses, seeks, and plays the audio track', (tester) async {
final controller = AudioPlayerController();
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
controller: controller,
child: child,
),
);
await controller.playAudio();
final verificationResults = verifyInOrder([
() => audioPlayer.pause(),
() => audioPlayer.seek(Duration.zero),
() => audioPlayer.play(),
]);
for (final f in verificationResults) {
f.called(1);
}
});
});
group('stopAudio', () {
testWidgets('calls stop on the audio player', (tester) async {
final controller = AudioPlayerController();
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
controller: controller,
child: child,
),
);
await controller.stopAudio();
verify(() => audioPlayer.stop()).called(1);
});
});
group('dispose', () {
testWidgets('calls dispose on the audio player', (tester) async {
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
child: child,
),
);
await tester.pumpAndSettle();
await tester.pumpWidget(Container());
verify(() => audioPlayer.dispose()).called(1);
});
testWidgets('waits for playing to end before calling dispose',
(tester) async {
final completer = Completer<void>();
final controller = AudioPlayerController();
when(audioPlayer.play).thenAnswer((_) => completer.future);
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
controller: controller,
child: child,
),
);
await tester.pumpAndSettle();
final playFuture = controller.playAudio();
await tester.pumpWidget(Container());
verifyNever(() => audioPlayer.dispose());
completer.complete();
await playFuture;
await tester.pumpAndSettle();
verify(() => audioPlayer.dispose()).called(1);
});
testWidgets('still disposes even when playing fails', (tester) async {
final controller = AudioPlayerController();
when(audioPlayer.play).thenThrow(Exception());
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
controller: controller,
child: child,
),
);
await tester.pumpAndSettle();
final playFuture = controller.playAudio();
await tester.pumpWidget(Container());
await playFuture;
await tester.pumpAndSettle();
verify(() => audioPlayer.dispose()).called(1);
});
});
group('listens to the mute sound bloc', () {
testWidgets('sets the volume to 0 when muted', (tester) async {
final muteSoundBloc = _MockMuteSoundBloc();
whenListen(
muteSoundBloc,
Stream.fromIterable([
MuteSoundState(isMuted: false),
MuteSoundState(isMuted: true),
MuteSoundState(isMuted: true),
]),
initialState: MuteSoundState(isMuted: false),
);
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
child: child,
),
muteSoundBloc: muteSoundBloc,
);
await tester.pumpAndSettle();
verify(() => audioPlayer.setVolume(0)).called(1);
});
testWidgets('sets the volume to 1 when unmuted', (tester) async {
final muteSoundBloc = _MockMuteSoundBloc();
whenListen(
muteSoundBloc,
Stream.fromIterable([
MuteSoundState(isMuted: true),
MuteSoundState(isMuted: false),
MuteSoundState(isMuted: false),
]),
initialState: MuteSoundState(isMuted: true),
);
await tester.pumpApp(
AudioPlayer(
audioAssetPath: 'audioAssetPath',
child: child,
),
muteSoundBloc: muteSoundBloc,
);
await tester.pumpAndSettle();
verify(() => audioPlayer.setVolume(1)).called(1);
});
});
});
}

View File

@ -0,0 +1,59 @@
import 'package:bloc_test/bloc_test.dart';
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:mocktail/mocktail.dart';
import '../../helpers/helpers.dart';
class _MockMuteSoundBloc extends MockBloc<MuteSoundEvent, MuteSoundState>
implements MuteSoundBloc {}
void main() {
group('MuteButton', () {
late MuteSoundBloc muteSoundBloc;
setUp(() {
muteSoundBloc = _MockMuteSoundBloc();
});
testWidgets('renders correct icon when muted', (tester) async {
when(() => muteSoundBloc.state).thenReturn(MuteSoundState(isMuted: true));
await tester.pumpApp(
MuteButton(),
muteSoundBloc: muteSoundBloc,
);
expect(
find.widgetWithIcon(OutlinedButton, Icons.volume_off),
findsOneWidget,
);
});
testWidgets('renders correct icon when unmuted', (tester) async {
when(() => muteSoundBloc.state).thenReturn(
MuteSoundState(
isMuted: false,
),
);
await tester.pumpApp(
MuteButton(),
muteSoundBloc: muteSoundBloc,
);
expect(
find.widgetWithIcon(OutlinedButton, Icons.volume_up),
findsOneWidget,
);
});
testWidgets('adds event to bloc when pressed', (tester) async {
when(() => muteSoundBloc.state).thenReturn(MuteSoundState(isMuted: true));
await tester.pumpApp(
MuteButton(),
muteSoundBloc: muteSoundBloc,
);
await tester.tap(find.byType(MuteButton));
verify(() => muteSoundBloc.add(MuteSoundToggled())).called(1);
});
});
}

View File

@ -1,25 +1,25 @@
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/convert/convert.dart';
import 'package:just_audio/just_audio.dart';
import 'package:just_audio/just_audio.dart' as just_audio;
import 'package:mocktail/mocktail.dart';
import '../../helpers/helpers.dart';
class _MockAudioPlayer extends Mock implements AudioPlayer {}
class _MockAudioPlayer extends Mock implements just_audio.AudioPlayer {}
void main() {
TestWidgetsFlutterBinding.ensureInitialized();
late AudioPlayer audioPlayer;
late just_audio.AudioPlayer audioPlayer;
setUpAll(() {
registerFallbackValue(LoopMode.off);
registerFallbackValue(just_audio.LoopMode.off);
});
setUp(() {
audioPlayer = _MockAudioPlayer();
when(() => audioPlayer.loopMode).thenReturn(just_audio.LoopMode.off);
when(() => audioPlayer.setAsset(any())).thenAnswer((_) async => null);
when(() => audioPlayer.setLoopMode(any())).thenAnswer((_) async {});
when(() => audioPlayer.play()).thenAnswer((_) async {});
@ -28,11 +28,11 @@ void main() {
when(() => audioPlayer.playerStateStream).thenAnswer(
(_) => Stream.fromIterable(
[
PlayerState(true, ProcessingState.ready),
just_audio.PlayerState(true, just_audio.ProcessingState.ready),
],
),
);
AudioPlayerMixin.audioPlayerOverride = audioPlayer;
AudioPlayer.audioPlayerOverride = audioPlayer;
const MethodChannel('com.ryanheise.audio_session')
.setMockMethodCallHandler((call) async {
@ -43,7 +43,7 @@ void main() {
});
tearDown(() {
AudioPlayerMixin.audioPlayerOverride = null;
AudioPlayer.audioPlayerOverride = null;
});
group('ConvertLoadingView', () {
@ -61,11 +61,9 @@ void main() {
);
testWidgets('renders correctly', (tester) async {
await tester.pumpWidget(
const MaterialApp(
home: ConvertLoadingAnimation(
dimension: 300,
),
await tester.pumpApp(
ConvertLoadingAnimation(
dimension: 300,
),
);

View File

@ -1,18 +1,33 @@
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/footer/footer.dart';
import 'package:holobooth/widgets/widgets.dart';
import 'package:holobooth_ui/holobooth_ui.dart';
import 'package:platform_helper/platform_helper.dart';
import '../../helpers/helpers.dart';
class _FakePlatformHelper extends Fake implements PlatformHelper {
_FakePlatformHelper({required this.isMobile});
@override
final bool isMobile;
}
void main() {
group('FullFooter', () {
testWidgets(
'renders elements on small screen',
(tester) async {
tester.setSmallDisplaySize();
await tester.pumpApp(FullFooter());
await tester.pumpApp(
FullFooter(
platformHelper: _FakePlatformHelper(
isMobile: false,
),
),
);
expect(find.byType(FlutterIconLink), findsOneWidget);
expect(find.byType(FirebaseIconLink), findsOneWidget);
expect(find.byType(TensorflowIconLink), findsOneWidget);
@ -24,11 +39,36 @@ void main() {
'renders elements on small screen when showIconsForSmall is false',
(tester) async {
tester.setSmallDisplaySize();
await tester.pumpApp(FullFooter(showIconsForSmall: false));
await tester.pumpApp(
FullFooter(
showIconsForSmall: false,
platformHelper: _FakePlatformHelper(
isMobile: false,
),
),
);
expect(find.byType(FlutterForwardFooterLink), findsOneWidget);
expect(find.byType(HowItsMadeFooterLink), findsOneWidget);
expect(find.byType(FooterTermsOfServiceLink), findsOneWidget);
expect(find.byType(FooterPrivacyPolicyLink), findsOneWidget);
expect(find.byType(MuteButton), findsOneWidget);
},
);
testWidgets(
'does not render mute button on mobile for small screen size',
(tester) async {
tester.setSmallDisplaySize();
await tester.pumpApp(
FullFooter(
showIconsForSmall: false,
platformHelper: _FakePlatformHelper(
isMobile: true,
),
),
);
expect(find.byType(MuteButton), findsNothing);
},
);
@ -36,7 +76,13 @@ void main() {
'renders elements on large screen',
(tester) async {
tester.setDisplaySize(const Size(HoloboothBreakpoints.large, 800));
await tester.pumpApp(FullFooter());
await tester.pumpApp(
FullFooter(
platformHelper: _FakePlatformHelper(
isMobile: false,
),
),
);
expect(find.byType(FlutterIconLink), findsOneWidget);
expect(find.byType(FirebaseIconLink), findsOneWidget);
expect(find.byType(TensorflowIconLink), findsOneWidget);
@ -45,6 +91,25 @@ void main() {
expect(find.byType(HowItsMadeFooterLink), findsOneWidget);
expect(find.byType(FooterTermsOfServiceLink), findsOneWidget);
expect(find.byType(FooterPrivacyPolicyLink), findsOneWidget);
expect(find.byType(MuteButton), findsOneWidget);
},
);
testWidgets(
'does not render mute button on mobile for large screen size',
(tester) async {
tester.setDisplaySize(const Size(HoloboothBreakpoints.large, 800));
await tester.pumpApp(FullFooter());
await tester.pumpApp(
FullFooter(
platformHelper: _FakePlatformHelper(
isMobile: true,
),
),
);
expect(find.byType(MuteButton), findsNothing);
},
);
});

View File

@ -1,11 +1,20 @@
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/footer/footer.dart';
import 'package:holobooth/widgets/widgets.dart';
import 'package:holobooth_ui/holobooth_ui.dart';
import 'package:platform_helper/platform_helper.dart';
import '../../helpers/helpers.dart';
class _FakePlatformHelper extends Fake implements PlatformHelper {
_FakePlatformHelper({required this.isMobile});
@override
final bool isMobile;
}
void main() {
group('SimplifiedFooter', () {
testWidgets(
@ -23,10 +32,33 @@ void main() {
'render elements on large screen size',
(WidgetTester tester) async {
tester.setDisplaySize(const Size(HoloboothBreakpoints.large, 800));
await tester.pumpApp(SimplifiedFooter());
await tester.pumpApp(
SimplifiedFooter(
platformHelper: _FakePlatformHelper(
isMobile: false,
),
),
);
expect(find.byType(FlutterIconLink), findsOneWidget);
expect(find.byType(FirebaseIconLink), findsOneWidget);
expect(find.byType(TensorflowIconLink), findsOneWidget);
expect(find.byType(MuteButton), findsOneWidget);
},
);
testWidgets(
'does not render mute button on mobile with large screen size',
(WidgetTester tester) async {
tester.setDisplaySize(const Size(HoloboothBreakpoints.large, 800));
await tester.pumpApp(
SimplifiedFooter(
platformHelper: _FakePlatformHelper(
isMobile: true,
),
),
);
expect(find.byType(MuteButton), findsNothing);
},
);
});

View File

@ -1,12 +1,14 @@
import 'dart:typed_data';
import 'package:avatar_detector_repository/avatar_detector_repository.dart';
import 'package:bloc_test/bloc_test.dart';
import 'package:convert_repository/convert_repository.dart';
import 'package:download_repository/download_repository.dart';
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:flutter_localizations/flutter_localizations.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/l10n/l10n.dart';
import 'package:mocktail/mocktail.dart';
import 'package:mocktail_image_network/mocktail_image_network.dart';
@ -29,12 +31,20 @@ class _MockAvatarDetectorRepository extends Mock
}
}
class _MockMuteSoundBloc extends MockBloc<MuteSoundEvent, MuteSoundState>
implements MuteSoundBloc {
_MockMuteSoundBloc() {
when(() => state).thenReturn(MuteSoundState(isMuted: false));
}
}
extension PumpApp on WidgetTester {
Future<void> pumpApp(
Widget widget, {
AvatarDetectorRepository? avatarDetectorRepository,
ConvertRepository? convertRepository,
DownloadRepository? downloadRepository,
MuteSoundBloc? muteSoundBloc,
}) async {
return mockNetworkImages(() async {
return pumpWidget(
@ -51,13 +61,16 @@ extension PumpApp on WidgetTester {
value: downloadRepository ?? _MockDownloadRepository(),
),
],
child: MaterialApp(
localizationsDelegates: const [
AppLocalizations.delegate,
GlobalMaterialLocalizations.delegate,
],
supportedLocales: AppLocalizations.supportedLocales,
home: widget,
child: BlocProvider(
create: (context) => muteSoundBloc ?? _MockMuteSoundBloc(),
child: MaterialApp(
localizationsDelegates: const [
AppLocalizations.delegate,
GlobalMaterialLocalizations.delegate,
],
supportedLocales: AppLocalizations.supportedLocales,
home: widget,
),
),
),
);

View File

@ -7,7 +7,7 @@ import 'package:holobooth/assets/assets.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/in_experience_selection/in_experience_selection.dart';
import 'package:holobooth/photo_booth/photo_booth.dart';
import 'package:just_audio/just_audio.dart';
import 'package:just_audio/just_audio.dart' as just_audio;
import 'package:mocktail/mocktail.dart';
import '../../helpers/helpers.dart';
@ -19,17 +19,17 @@ class _MockInExperienceSelectionBloc
class _MockPhotoBoothBloc extends MockBloc<PhotoBoothEvent, PhotoBoothState>
implements PhotoBoothBloc {}
class _MockAudioPlayer extends Mock implements AudioPlayer {}
class _MockAudioPlayer extends Mock implements just_audio.AudioPlayer {}
void main() {
setUpAll(() {
registerFallbackValue(LoopMode.all);
registerFallbackValue(just_audio.LoopMode.all);
});
group('PropsSelectionTabBarView', () {
late InExperienceSelectionBloc inExperienceSelectionBloc;
late PhotoBoothBloc photoBoothBloc;
late AudioPlayer audioPlayer;
late just_audio.AudioPlayer audioPlayer;
setUp(() {
inExperienceSelectionBloc = _MockInExperienceSelectionBloc();
@ -45,12 +45,12 @@ void main() {
when(audioPlayer.play).thenAnswer((_) async {});
when(audioPlayer.dispose).thenAnswer((_) async {});
when(() => audioPlayer.setLoopMode(any())).thenAnswer((_) async {});
when(() => audioPlayer.loopMode).thenReturn(LoopMode.off);
when(() => audioPlayer.loopMode).thenReturn(just_audio.LoopMode.off);
when(() => audioPlayer.seek(any())).thenAnswer((_) async {});
when(() => audioPlayer.setAsset(any()))
.thenAnswer((_) async => Duration.zero);
AudioPlayerMixin.audioPlayerOverride = audioPlayer;
AudioPlayer.audioPlayerOverride = audioPlayer;
const MethodChannel('com.ryanheise.audio_session')
.setMockMethodCallHandler((call) async {
@ -61,7 +61,7 @@ void main() {
});
tearDown(() {
AudioPlayerMixin.audioPlayerOverride = null;
AudioPlayer.audioPlayerOverride = null;
});
testWidgets(

View File

@ -7,16 +7,16 @@ import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/l10n/l10n.dart';
import 'package:holobooth/landing/landing.dart';
import 'package:holobooth_ui/holobooth_ui.dart';
import 'package:just_audio/just_audio.dart';
import 'package:just_audio/just_audio.dart' as just_audio;
import 'package:mocktail/mocktail.dart';
import '../../helpers/helpers.dart';
class _MockAudioPlayer extends Mock implements AudioPlayer {}
class _MockAudioPlayer extends Mock implements just_audio.AudioPlayer {}
void main() {
setUpAll(() {
registerFallbackValue(LoopMode.all);
registerFallbackValue(just_audio.LoopMode.all);
});
group('LandingPage', () {
@ -27,7 +27,7 @@ void main() {
});
group('LandingView', () {
late AudioPlayer audioPlayer;
late just_audio.AudioPlayer audioPlayer;
setUp(() {
audioPlayer = _MockAudioPlayer();
@ -36,12 +36,12 @@ void main() {
when(audioPlayer.play).thenAnswer((_) async {});
when(audioPlayer.dispose).thenAnswer((_) async {});
when(() => audioPlayer.setLoopMode(any())).thenAnswer((_) async {});
when(() => audioPlayer.loopMode).thenReturn(LoopMode.off);
when(() => audioPlayer.loopMode).thenReturn(just_audio.LoopMode.off);
when(() => audioPlayer.seek(any())).thenAnswer((_) async {});
when(() => audioPlayer.setAsset(any()))
.thenAnswer((_) async => Duration.zero);
AudioPlayerMixin.audioPlayerOverride = audioPlayer;
AudioPlayer.audioPlayerOverride = audioPlayer;
const MethodChannel('com.ryanheise.audio_session')
.setMockMethodCallHandler((call) async {
@ -52,7 +52,7 @@ void main() {
});
tearDown(() {
AudioPlayerMixin.audioPlayerOverride = null;
AudioPlayer.audioPlayerOverride = null;
});
testWidgets('renders background', (tester) async {

View File

@ -14,7 +14,7 @@ import 'package:holobooth/avatar_detector/avatar_detector.dart';
import 'package:holobooth/convert/convert.dart';
import 'package:holobooth/in_experience_selection/in_experience_selection.dart';
import 'package:holobooth/photo_booth/photo_booth.dart';
import 'package:just_audio/just_audio.dart';
import 'package:just_audio/just_audio.dart' as just_audio;
import 'package:mocktail/mocktail.dart';
import 'package:plugin_platform_interface/plugin_platform_interface.dart';
import 'package:screen_recorder/screen_recorder.dart';
@ -42,7 +42,7 @@ class _MockAvatarDetectorBloc
class _MockImage extends Mock implements ui.Image {}
class _MockAudioPlayer extends Mock implements AudioPlayer {}
class _MockAudioPlayer extends Mock implements just_audio.AudioPlayer {}
class _MockConvertRepository extends Mock implements ConvertRepository {}
@ -50,13 +50,13 @@ void main() {
TestWidgetsFlutterBinding.ensureInitialized();
setUpAll(() {
registerFallbackValue(LoopMode.all);
registerFallbackValue(just_audio.LoopMode.all);
});
const cameraId = 1;
late CameraPlatform cameraPlatform;
late XFile xfile;
late AudioPlayer audioPlayer;
late just_audio.AudioPlayer audioPlayer;
late ConvertRepository convertRepository;
setUp(() async {
@ -114,12 +114,12 @@ void main() {
when(audioPlayer.stop).thenAnswer((_) async {});
when(audioPlayer.dispose).thenAnswer((_) async {});
when(() => audioPlayer.setLoopMode(any())).thenAnswer((_) async {});
when(() => audioPlayer.loopMode).thenReturn(LoopMode.off);
when(() => audioPlayer.loopMode).thenReturn(just_audio.LoopMode.off);
when(() => audioPlayer.seek(any())).thenAnswer((_) async {});
when(() => audioPlayer.setAsset(any()))
.thenAnswer((_) async => Duration.zero);
AudioPlayerMixin.audioPlayerOverride = audioPlayer;
AudioPlayer.audioPlayerOverride = audioPlayer;
const MethodChannel('com.ryanheise.audio_session')
.setMockMethodCallHandler((call) async {
@ -132,7 +132,7 @@ void main() {
tearDown(() {
CameraPlatform.instance = _MockCameraPlatform();
AudioPlayerMixin.audioPlayerOverride = null;
AudioPlayer.audioPlayerOverride = null;
});
group('PhotoBoothPage', () {

View File

@ -3,20 +3,20 @@ import 'package:flutter_test/flutter_test.dart';
import 'package:holobooth/assets/assets.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/photo_booth/widgets/widgets.dart';
import 'package:just_audio/just_audio.dart';
import 'package:just_audio/just_audio.dart' as just_audio;
import 'package:mocktail/mocktail.dart';
import '../../helpers/helpers.dart';
class _MockAudioPlayer extends Mock implements AudioPlayer {}
class _MockAudioPlayer extends Mock implements just_audio.AudioPlayer {}
void main() {
setUpAll(() {
registerFallbackValue(LoopMode.all);
registerFallbackValue(just_audio.LoopMode.all);
});
group('HoloBoothCharacterError', () {
late AudioPlayer audioPlayer;
late just_audio.AudioPlayer audioPlayer;
setUp(() {
audioPlayer = _MockAudioPlayer();
@ -25,12 +25,12 @@ void main() {
when(audioPlayer.play).thenAnswer((_) async {});
when(audioPlayer.dispose).thenAnswer((_) async {});
when(() => audioPlayer.setLoopMode(any())).thenAnswer((_) async {});
when(() => audioPlayer.loopMode).thenReturn(LoopMode.off);
when(() => audioPlayer.loopMode).thenReturn(just_audio.LoopMode.off);
when(() => audioPlayer.seek(any())).thenAnswer((_) async {});
when(() => audioPlayer.setAsset(any()))
.thenAnswer((_) async => Duration.zero);
AudioPlayerMixin.audioPlayerOverride = audioPlayer;
AudioPlayer.audioPlayerOverride = audioPlayer;
const MethodChannel('com.ryanheise.audio_session')
.setMockMethodCallHandler((call) async {
@ -41,7 +41,7 @@ void main() {
});
tearDown(() {
AudioPlayerMixin.audioPlayerOverride = null;
AudioPlayer.audioPlayerOverride = null;
});
test('can be instantiated', () {

View File

@ -6,12 +6,14 @@ import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:holobooth/audio_player/audio_player.dart';
import 'package:holobooth/avatar_detector/avatar_detector.dart';
import 'package:holobooth/camera/camera.dart';
import 'package:holobooth/in_experience_selection/in_experience_selection.dart';
import 'package:holobooth/photo_booth/photo_booth.dart';
import 'package:holobooth_ui/holobooth_ui.dart';
import 'package:mocktail/mocktail.dart';
import 'package:platform_helper/platform_helper.dart';
import 'package:plugin_platform_interface/plugin_platform_interface.dart';
import 'package:screen_recorder/screen_recorder.dart';
@ -40,6 +42,13 @@ class _MockExporter extends Mock implements Exporter {}
class _MockImage extends Mock implements ui.Image {}
class _FakePlatformHelper extends Fake implements PlatformHelper {
_FakePlatformHelper({required this.isMobile});
@override
final bool isMobile;
}
void main() {
group('PhotoboothBody', () {
late AvatarDetectorBloc avatarDetectorBloc;
@ -230,6 +239,40 @@ void main() {
},
);
testWidgets(
'renders MuteButton on a small display size',
(WidgetTester tester) async {
tester.setDisplaySize(Size(HoloboothBreakpoints.small, 800));
await tester.pumpSubject(
PhotoboothBody(
platformHelper: _FakePlatformHelper(isMobile: false),
),
photoBoothBloc: photoBoothBloc,
inExperienceSelectionBloc: inExperienceSelectionBloc,
avatarDetectorBloc: avatarDetectorBloc,
);
expect(find.byType(MuteButton), findsOneWidget);
},
);
testWidgets(
'renders MuteButton on mobile for a small display size',
(WidgetTester tester) async {
tester.setDisplaySize(Size(HoloboothBreakpoints.small, 800));
await tester.pumpSubject(
PhotoboothBody(
platformHelper: _FakePlatformHelper(isMobile: true),
),
photoBoothBloc: photoBoothBloc,
inExperienceSelectionBloc: inExperienceSelectionBloc,
avatarDetectorBloc: avatarDetectorBloc,
);
expect(find.byType(MuteButton), findsNothing);
},
);
testWidgets(
'renders SelectionLayer if not [PhotoBoothState.isRecording, '
'PhotoBoothState.gettingReady] and avatarStatus.hasLoadedModel',