[camera_avfoundation] Set highest available resolution for ResolutionPreset.Max (#5245)

The current implementation of the camera plugin for iOS iterates though all other resolution presets when set to FLTResolutionPresetMax. This results in a resolution of 3840x2160 at most, while native camera is able to produce 4032x3024

This change should partially address these issues at least.
- https://github.com/flutter/flutter/issues/58163
- https://github.com/flutter/flutter/issues/78247
- https://github.com/flutter/flutter/issues/45906

P.S. I'm not really sure about tests - it seems that resolution presets are not covered by any. Any feedback is appreciated!
This commit is contained in:
Sergei
2024-03-01 18:11:14 +03:00
committed by GitHub
parent a9c68b8335
commit 06c1fa4311
9 changed files with 244 additions and 6 deletions

View File

@ -1,3 +1,7 @@
## 0.9.14+1
* Fixes bug where max resolution preset does not produce highest available resolution on iOS.
## 0.9.14
* Adds support to HEIF format.

View File

@ -22,6 +22,7 @@
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; };
97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; };
CEF6611A2B5E36A500D33FD4 /* CameraSessionPresetsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = CEF661192B5E36A500D33FD4 /* CameraSessionPresetsTests.m */; };
E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */; };
E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */; };
E04F108627A87CA600573D0C /* FLTSavePhotoDelegateTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */; };
@ -89,6 +90,7 @@
97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
9C5CC6CAD53AD388B2694F3A /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = "<group>"; };
A24F9E418BA48BCC7409B117 /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = "<group>"; };
CEF661192B5E36A500D33FD4 /* CameraSessionPresetsTests.m */ = {isa = PBXFileReference; indentWidth = 2; lastKnownFileType = sourcecode.c.objc; path = CameraSessionPresetsTests.m; sourceTree = "<group>"; };
E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = QueueUtilsTests.m; sourceTree = "<group>"; };
E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraCaptureSessionQueueRaceConditionTests.m; sourceTree = "<group>"; };
E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FLTSavePhotoDelegateTests.m; sourceTree = "<group>"; };
@ -151,6 +153,7 @@
E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */,
788A065927B0E02900533D74 /* StreamingTest.m */,
43ED1536282570DE00EB00DE /* AvailableCamerasTest.m */,
CEF661192B5E36A500D33FD4 /* CameraSessionPresetsTests.m */,
);
path = RunnerTests;
sourceTree = "<group>";
@ -451,6 +454,7 @@
F6EE622F2710A6FC00905E4A /* MockFLTThreadSafeFlutterResult.m in Sources */,
E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */,
334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */,
CEF6611A2B5E36A500D33FD4 /* CameraSessionPresetsTests.m in Sources */,
E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */,
788A065A27B0E02900533D74 /* StreamingTest.m in Sources */,
E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */,

View File

@ -0,0 +1,78 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@import camera_avfoundation;
@import camera_avfoundation.Test;
@import AVFoundation;
@import XCTest;
#import <OCMock/OCMock.h>
#import "CameraTestUtils.h"
/// Includes test cases related to resolution presets setting operations for FLTCam class.
@interface FLTCamSessionPresetsTest : XCTestCase
@end
@implementation FLTCamSessionPresetsTest
- (void)testResolutionPresetWithBestFormat_mustUpdateCaptureSessionPreset {
NSString *expectedPreset = AVCaptureSessionPresetInputPriority;
id videoSessionMock = OCMClassMock([AVCaptureSession class]);
OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]);
id captureFormatMock = OCMClassMock([AVCaptureDeviceFormat class]);
id captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
OCMStub([captureDeviceMock formats]).andReturn(@[ captureFormatMock ]);
OCMExpect([captureDeviceMock activeFormat]).andReturn(captureFormatMock);
OCMExpect([captureDeviceMock lockForConfiguration:NULL]).andReturn(YES);
OCMExpect([videoSessionMock setSessionPreset:expectedPreset]);
FLTCreateCamWithVideoDimensionsForFormat(videoSessionMock, @"max", captureDeviceMock,
^CMVideoDimensions(AVCaptureDeviceFormat *format) {
CMVideoDimensions videoDimensions;
videoDimensions.width = 1;
videoDimensions.height = 1;
return videoDimensions;
});
OCMVerifyAll(captureDeviceMock);
OCMVerifyAll(videoSessionMock);
}
- (void)testResolutionPresetWithCanSetSessionPresetMax_mustUpdateCaptureSessionPreset {
NSString *expectedPreset = AVCaptureSessionPreset3840x2160;
id videoSessionMock = OCMClassMock([AVCaptureSession class]);
OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]);
// Make sure that setting resolution preset for session always succeeds.
OCMStub([videoSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
OCMExpect([videoSessionMock setSessionPreset:expectedPreset]);
FLTCreateCamWithVideoCaptureSession(videoSessionMock, @"max");
OCMVerifyAll(videoSessionMock);
}
- (void)testResolutionPresetWithCanSetSessionPresetUltraHigh_mustUpdateCaptureSessionPreset {
NSString *expectedPreset = AVCaptureSessionPreset3840x2160;
id videoSessionMock = OCMClassMock([AVCaptureSession class]);
OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]);
// Make sure that setting resolution preset for session always succeeds.
OCMStub([videoSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
// Expect that setting "ultraHigh" resolutionPreset correctly updates videoCaptureSession.
OCMExpect([videoSessionMock setSessionPreset:expectedPreset]);
FLTCreateCamWithVideoCaptureSession(videoSessionMock, @"ultraHigh");
OCMVerifyAll(videoSessionMock);
}
@end

View File

@ -11,6 +11,24 @@ NS_ASSUME_NONNULL_BEGIN
/// @return an FLTCam object.
extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue);
/// Creates an `FLTCam` with a given captureSession and resolutionPreset
/// @param captureSession AVCaptureSession for video
/// @param resolutionPreset preset for camera's captureSession resolution
/// @return an FLTCam object.
extern FLTCam *FLTCreateCamWithVideoCaptureSession(AVCaptureSession *captureSession,
NSString *resolutionPreset);
/// Creates an `FLTCam` with a given captureSession and resolutionPreset.
/// Allows to inject a capture device and a block to compute the video dimensions.
/// @param captureSession AVCaptureSession for video
/// @param resolutionPreset preset for camera's captureSession resolution
/// @param captureDevice AVCaptureDevice to be used
/// @param videoDimensionsForFormat custom code to determine video dimensions
/// @return an FLTCam object.
extern FLTCam *FLTCreateCamWithVideoDimensionsForFormat(
AVCaptureSession *captureSession, NSString *resolutionPreset, AVCaptureDevice *captureDevice,
VideoDimensionsForFormat videoDimensionsForFormat);
/// Creates a test sample buffer.
/// @return a test sample buffer.
extern CMSampleBufferRef FLTCreateTestSampleBuffer(void);

View File

@ -12,11 +12,11 @@ FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue
.andReturn(inputMock);
id videoSessionMock = OCMClassMock([AVCaptureSession class]);
OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]); // no-op
OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]);
OCMStub([videoSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
id audioSessionMock = OCMClassMock([AVCaptureSession class]);
OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]); // no-op
OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]);
OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
return [[FLTCam alloc] initWithCameraName:@"camera"
@ -29,6 +29,51 @@ FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue
error:nil];
}
FLTCam *FLTCreateCamWithVideoCaptureSession(AVCaptureSession *captureSession,
NSString *resolutionPreset) {
id inputMock = OCMClassMock([AVCaptureDeviceInput class]);
OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]])
.andReturn(inputMock);
id audioSessionMock = OCMClassMock([AVCaptureSession class]);
OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]);
OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
return [[FLTCam alloc] initWithCameraName:@"camera"
resolutionPreset:resolutionPreset
enableAudio:true
orientation:UIDeviceOrientationPortrait
videoCaptureSession:captureSession
audioCaptureSession:audioSessionMock
captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL)
error:nil];
}
FLTCam *FLTCreateCamWithVideoDimensionsForFormat(
AVCaptureSession *captureSession, NSString *resolutionPreset, AVCaptureDevice *captureDevice,
VideoDimensionsForFormat videoDimensionsForFormat) {
id inputMock = OCMClassMock([AVCaptureDeviceInput class]);
OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]])
.andReturn(inputMock);
id audioSessionMock = OCMClassMock([AVCaptureSession class]);
OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]);
OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
return
[[FLTCam alloc] initWithResolutionPreset:resolutionPreset
enableAudio:true
orientation:UIDeviceOrientationPortrait
videoCaptureSession:captureSession
audioCaptureSession:audioSessionMock
captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL)
captureDeviceFactory:^AVCaptureDevice *(void) {
return captureDevice;
}
videoDimensionsForFormat:videoDimensionsForFormat
error:nil];
}
CMSampleBufferRef FLTCreateTestSampleBuffer(void) {
CVPixelBufferRef pixelBuffer;
CVPixelBufferCreate(kCFAllocatorDefault, 100, 100, kCVPixelFormatType_32BGRA, NULL, &pixelBuffer);

View File

@ -43,6 +43,7 @@ NS_ASSUME_NONNULL_BEGIN
orientation:(UIDeviceOrientation)orientation
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
error:(NSError **)error;
- (void)start;
- (void)stop;
- (void)setDeviceOrientation:(UIDeviceOrientation)orientation;

View File

@ -86,6 +86,11 @@
/// Videos are written to disk by `videoAdaptor` on an internal queue managed by AVFoundation.
@property(strong, nonatomic) dispatch_queue_t photoIOQueue;
@property(assign, nonatomic) UIDeviceOrientation deviceOrientation;
/// A wrapper for CMVideoFormatDescriptionGetDimensions.
/// Allows for alternate implementations in tests.
@property(nonatomic, copy) VideoDimensionsForFormat videoDimensionsForFormat;
/// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests.
@property(nonatomic, copy) CaptureDeviceFactory captureDeviceFactory;
@end
@implementation FLTCam
@ -116,6 +121,30 @@ NSString *const errorMethod = @"error";
audioCaptureSession:(AVCaptureSession *)audioCaptureSession
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
error:(NSError **)error {
return [self initWithResolutionPreset:resolutionPreset
enableAudio:enableAudio
orientation:orientation
videoCaptureSession:videoCaptureSession
audioCaptureSession:videoCaptureSession
captureSessionQueue:captureSessionQueue
captureDeviceFactory:^AVCaptureDevice *(void) {
return [AVCaptureDevice deviceWithUniqueID:cameraName];
}
videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) {
return CMVideoFormatDescriptionGetDimensions(format.formatDescription);
}
error:error];
}
- (instancetype)initWithResolutionPreset:(NSString *)resolutionPreset
enableAudio:(BOOL)enableAudio
orientation:(UIDeviceOrientation)orientation
videoCaptureSession:(AVCaptureSession *)videoCaptureSession
audioCaptureSession:(AVCaptureSession *)audioCaptureSession
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory
videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat
error:(NSError **)error {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_resolutionPreset = FLTGetFLTResolutionPresetForString(resolutionPreset);
@ -136,7 +165,9 @@ NSString *const errorMethod = @"error";
_photoIOQueue = dispatch_queue_create("io.flutter.camera.photoIOQueue", NULL);
_videoCaptureSession = videoCaptureSession;
_audioCaptureSession = audioCaptureSession;
_captureDevice = [AVCaptureDevice deviceWithUniqueID:cameraName];
_captureDeviceFactory = captureDeviceFactory;
_captureDevice = captureDeviceFactory();
_videoDimensionsForFormat = videoDimensionsForFormat;
_flashMode = _captureDevice.hasFlash ? FLTFlashModeAuto : FLTFlashModeOff;
_exposureMode = FLTExposureModeAuto;
_focusMode = FLTFocusModeAuto;
@ -366,7 +397,24 @@ NSString *const errorMethod = @"error";
- (BOOL)setCaptureSessionPreset:(FLTResolutionPreset)resolutionPreset withError:(NSError **)error {
switch (resolutionPreset) {
case FLTResolutionPresetMax:
case FLTResolutionPresetMax: {
AVCaptureDeviceFormat *bestFormat =
[self highestResolutionFormatForCaptureDevice:_captureDevice];
if (bestFormat) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
if ([_captureDevice lockForConfiguration:NULL]) {
// Set the best device format found and finish the device configuration.
_captureDevice.activeFormat = bestFormat;
[_captureDevice unlockForConfiguration];
// Set the preview size based on values from the current capture device.
_previewSize =
CGSizeMake(_captureDevice.activeFormat.highResolutionStillImageDimensions.width,
_captureDevice.activeFormat.highResolutionStillImageDimensions.height);
break;
}
}
}
case FLTResolutionPresetUltraHigh:
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
@ -422,6 +470,24 @@ NSString *const errorMethod = @"error";
return YES;
}
/// Finds the highest available resolution in terms of pixel count for the given device.
- (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice:
(AVCaptureDevice *)captureDevice {
AVCaptureDeviceFormat *bestFormat = nil;
NSUInteger maxPixelCount = 0;
for (AVCaptureDeviceFormat *format in _captureDevice.formats) {
CMVideoDimensions res = self.videoDimensionsForFormat(format);
NSUInteger height = res.height;
NSUInteger width = res.width;
NSUInteger pixelCount = height * width;
if (pixelCount > maxPixelCount) {
maxPixelCount = pixelCount;
bestFormat = format;
}
}
return bestFormat;
}
- (void)captureOutput:(AVCaptureOutput *)output
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
@ -935,7 +1001,7 @@ NSString *const errorMethod = @"error";
return;
}
_captureDevice = [AVCaptureDevice deviceWithUniqueID:cameraName];
_captureDevice = self.captureDeviceFactory();
AVCaptureConnection *oldConnection =
[_captureVideoOutput connectionWithMediaType:AVMediaTypeVideo];

View File

@ -5,6 +5,14 @@
#import "FLTCam.h"
#import "FLTSavePhotoDelegate.h"
/// Determines the video dimensions (width and height) for a given capture device format.
/// Used in tests to mock CMVideoFormatDescriptionGetDimensions.
typedef CMVideoDimensions (^VideoDimensionsForFormat)(AVCaptureDeviceFormat *);
/// Factory block returning an AVCaptureDevice.
/// Used in tests to inject a device into FLTCam.
typedef AVCaptureDevice * (^CaptureDeviceFactory)(void);
@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
/// The queue on which `eventSink` property should be accessed.
@ -55,6 +63,19 @@
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
error:(NSError **)error;
/// Initializes a camera instance.
/// Allows for testing with specified resolution, audio preference, orientation,
/// and direct access to capture sessions and blocks.
- (instancetype)initWithResolutionPreset:(NSString *)resolutionPreset
enableAudio:(BOOL)enableAudio
orientation:(UIDeviceOrientation)orientation
videoCaptureSession:(AVCaptureSession *)videoCaptureSession
audioCaptureSession:(AVCaptureSession *)audioCaptureSession
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory
videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat
error:(NSError **)error;
/// Start streaming images.
- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler;

View File

@ -2,7 +2,8 @@ name: camera_avfoundation
description: iOS implementation of the camera plugin.
repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
version: 0.9.14
version: 0.9.14+1
environment:
sdk: ^3.2.3