diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md index d0d58be6ca92..f37c6f60c5f2 100644 --- a/packages/camera/camera/CHANGELOG.md +++ b/packages/camera/camera/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.11.1+1 + +* Fixes delivering errors from onCameraError. + ## 0.11.1 * Adds API support query for image streaming. diff --git a/packages/camera/camera/lib/src/camera_controller.dart b/packages/camera/camera/lib/src/camera_controller.dart index 3f39d3202b8f..8fbe65fdb1f0 100644 --- a/packages/camera/camera/lib/src/camera_controller.dart +++ b/packages/camera/camera/lib/src/camera_controller.dart @@ -174,7 +174,7 @@ class CameraValue { }) { return CameraValue( isInitialized: isInitialized ?? this.isInitialized, - errorDescription: errorDescription, + errorDescription: errorDescription ?? this.errorDescription, previewSize: previewSize ?? this.previewSize, isRecordingVideo: isRecordingVideo ?? this.isRecordingVideo, isTakingPicture: isTakingPicture ?? this.isTakingPicture, @@ -353,6 +353,15 @@ class CameraController extends ValueNotifier { initializeCompleter.complete(event); })); + _unawaited(CameraPlatform.instance + .onCameraError(_cameraId) + .first + .then((CameraErrorEvent event) { + value = value.copyWith( + errorDescription: event.description, + ); + })); + await CameraPlatform.instance.initializeCamera( _cameraId, imageFormatGroup: imageFormatGroup ?? ImageFormatGroup.unknown, diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 09899dfa5796..ccdd49602587 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -4,7 +4,7 @@ description: A Flutter plugin for controlling the camera. Supports previewing Dart. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.11.1 +version: 0.11.1+1 environment: sdk: ^3.6.0 diff --git a/packages/camera/camera/test/camera_test.dart b/packages/camera/camera/test/camera_test.dart index 0c6a319397e0..0667247685b2 100644 --- a/packages/camera/camera/test/camera_test.dart +++ b/packages/camera/camera/test/camera_test.dart @@ -1441,6 +1441,20 @@ void main() { 'This is a test error message', ))); }); + + test('error from onCameraError is received', () async { + final CameraController cameraController = CameraController( + const CameraDescription( + name: 'cam', + lensDirection: CameraLensDirection.back, + sensorOrientation: 90), + ResolutionPreset.max); + await cameraController.initialize(); + + expect(cameraController.value.hasError, isTrue); + expect(cameraController.value.errorDescription, + mockOnCameraErrorEvent.description); + }); }); } diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index c0dad2343bc0..368a5ea96206 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.9.18+14 + +* Handle video and audio interruptions and errors. +* Use a single time offset for both video and audio. + ## 0.9.18+13 * Migrates test utils and mocks to Swift. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift index 1c978db8edd5..d6d43293aca9 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift @@ -80,7 +80,9 @@ enum CameraTestUtils { /// Creates a test sample buffer. /// @return a test sample buffer. - static func createTestSampleBuffer() -> CMSampleBuffer { + static func createTestSampleBuffer( + timestamp: CMTime = CMTime.zero, duration: CMTime = CMTimeMake(value: 1, timescale: 44100) + ) -> CMSampleBuffer { var pixelBuffer: CVPixelBuffer? CVPixelBufferCreate(kCFAllocatorDefault, 100, 100, kCVPixelFormatType_32BGRA, nil, &pixelBuffer) @@ -91,9 +93,9 @@ enum CameraTestUtils { formatDescriptionOut: &formatDescription) var timingInfo = CMSampleTimingInfo( - duration: CMTimeMake(value: 1, timescale: 44100), - presentationTimeStamp: CMTime.zero, - decodeTimeStamp: CMTime.invalid) + duration: duration, + presentationTimeStamp: timestamp, + decodeTimeStamp: .invalid) var sampleBuffer: CMSampleBuffer? CMSampleBufferCreateReadyWithImageBuffer( @@ -108,24 +110,24 @@ enum CameraTestUtils { /// Creates a test audio sample buffer. /// @return a test audio sample buffer. - static func createTestAudioSampleBuffer() -> CMSampleBuffer? { + static func createTestAudioSampleBuffer( + timestamp: CMTime = .zero, duration: CMTime = CMTimeMake(value: 1, timescale: 44100) + ) -> CMSampleBuffer { var blockBuffer: CMBlockBuffer? CMBlockBufferCreateWithMemoryBlock( allocator: kCFAllocatorDefault, memoryBlock: nil, - blockLength: 100, + blockLength: Int(duration.value), blockAllocator: kCFAllocatorDefault, customBlockSource: nil, offsetToData: 0, - dataLength: 100, + dataLength: Int(duration.value), flags: kCMBlockBufferAssureMemoryNowFlag, blockBufferOut: &blockBuffer) - guard let blockBuffer = blockBuffer else { return nil } - var formatDescription: CMFormatDescription? var basicDescription = AudioStreamBasicDescription( - mSampleRate: 44100, + mSampleRate: Float64(duration.timescale), mFormatID: kAudioFormatLinearPCM, mFormatFlags: 0, mBytesPerPacket: 1, @@ -148,13 +150,13 @@ enum CameraTestUtils { var sampleBuffer: CMSampleBuffer? CMAudioSampleBufferCreateReadyWithPacketDescriptions( allocator: kCFAllocatorDefault, - dataBuffer: blockBuffer, + dataBuffer: blockBuffer!, formatDescription: formatDescription!, - sampleCount: 1, - presentationTimeStamp: .zero, + sampleCount: CMItemCount(duration.value), + presentationTimeStamp: timestamp, packetDescriptions: nil, sampleBufferOut: &sampleBuffer) - return sampleBuffer + return sampleBuffer! } } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.swift index b249f08ea125..0283f4f9de09 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.swift @@ -12,10 +12,12 @@ final class MockCaptureSession: NSObject, FLTCaptureSession { var stopRunningStub: (() -> Void)? var canSetSessionPresetStub: ((AVCaptureSession.Preset) -> Bool)? + var captureSession = AVCaptureSession() var _sessionPreset = AVCaptureSession.Preset.high var inputs = [AVCaptureInput]() var outputs = [AVCaptureOutput]() var automaticallyConfiguresApplicationAudioSession = false + var running = true var sessionPreset: AVCaptureSession.Preset { get { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift index 932f93ee5d5b..3485104add32 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift @@ -319,4 +319,146 @@ final class CameraSampleBufferTests: XCTestCase { AVAudioSession.sharedInstance().category == .playAndRecord, "Category should be PlayAndRecord.") } + + func testDidOutputSampleBufferMustUseSingleOffsetForVideoAndAudio() { + let (camera, writerMock, adaptorMock, inputMock, connectionMock) = createCamera() + + var status = AVAssetWriter.Status.unknown + writerMock.startWritingStub = { + status = .writing + return true + } + writerMock.statusStub = { + return status + } + + var appendedTime = CMTime.invalid + + adaptorMock.appendStub = { buffer, time in + appendedTime = time + return true + } + + inputMock.readyForMoreMediaData = true + inputMock.appendStub = { buffer in + appendedTime = CMSampleBufferGetPresentationTimeStamp(buffer) + return true + } + + camera.startVideoRecording(completion: { error in }, messengerForStreaming: nil) + + let appendVideoSample = { (time: Int64) in + camera.captureOutput( + camera.captureVideoOutput.avOutput, + didOutputSampleBuffer: CameraTestUtils.createTestSampleBuffer( + timestamp: CMTimeMake(value: time, timescale: 1), + duration: .invalid), + from: connectionMock) + } + + let appendAudioSample = { (time: Int64, duration: Int64) in + camera.captureOutput( + nil, + didOutputSampleBuffer: CameraTestUtils.createTestAudioSampleBuffer( + timestamp: CMTimeMake(value: time, timescale: 1), + duration: CMTimeMake(value: duration, timescale: 1)), + from: connectionMock) + } + + appendedTime = .invalid + camera.pauseVideoRecording() + camera.resumeVideoRecording() + appendVideoSample(1) + XCTAssertEqual(appendedTime, CMTimeMake(value: 1, timescale: 1)) + + appendedTime = .invalid + camera.pauseVideoRecording() + camera.resumeVideoRecording() + appendVideoSample(11) + XCTAssertEqual(appendedTime, .invalid) + appendVideoSample(12) + XCTAssertEqual(appendedTime, CMTimeMake(value: 2, timescale: 1)) + + appendedTime = .invalid + camera.pauseVideoRecording() + camera.resumeVideoRecording() + appendAudioSample(20, 2) + XCTAssertEqual(appendedTime, .invalid) + appendVideoSample(23) + XCTAssertEqual(appendedTime, CMTimeMake(value: 3, timescale: 1)) + + appendedTime = .invalid + camera.pauseVideoRecording() + camera.resumeVideoRecording() + appendVideoSample(28) + XCTAssertEqual(appendedTime, .invalid) + appendAudioSample(30, 2) + XCTAssertEqual(appendedTime, .invalid) + appendVideoSample(33) + XCTAssertEqual(appendedTime, .invalid) + appendAudioSample(32, 2) + XCTAssertEqual(appendedTime, CMTimeMake(value: 2, timescale: 1)) + } + + func testDidOutputSampleBufferMustConnectVideoAfterSessionInterruption() { + let (camera, writerMock, adaptorMock, inputMock, connectionMock) = createCamera() + + var status = AVAssetWriter.Status.unknown + writerMock.startWritingStub = { + status = .writing + return true + } + writerMock.statusStub = { + return status + } + + var appendedTime = CMTime.invalid + + adaptorMock.appendStub = { buffer, time in + appendedTime = time + return true + } + + inputMock.readyForMoreMediaData = true + inputMock.appendStub = { buffer in + appendedTime = CMSampleBufferGetPresentationTimeStamp(buffer) + return true + } + + camera.startVideoRecording(completion: { error in }, messengerForStreaming: nil) + + let appendVideoSample = { (time: Int64) in + camera.captureOutput( + camera.captureVideoOutput.avOutput, + didOutputSampleBuffer: CameraTestUtils.createTestSampleBuffer( + timestamp: CMTimeMake(value: time, timescale: 1), + duration: .invalid), + from: connectionMock) + } + + let appendAudioSample = { (time: Int64, duration: Int64) in + camera.captureOutput( + nil, + didOutputSampleBuffer: CameraTestUtils.createTestAudioSampleBuffer( + timestamp: CMTimeMake(value: time, timescale: 1), + duration: CMTimeMake(value: duration, timescale: 1)), + from: connectionMock) + } + + appendVideoSample(1) + appendAudioSample(1, 1) + + NotificationCenter.default.post( + name: AVCaptureSession.wasInterruptedNotification, + object: camera.audioCaptureSession.captureSession) + + appendedTime = .invalid + appendAudioSample(11, 1) + XCTAssertEqual(appendedTime, .invalid) + appendVideoSample(12) + XCTAssertEqual(appendedTime, CMTimeMake(value: 2, timescale: 1)) + appendedTime = .invalid + appendAudioSample(12, 1) + XCTAssertEqual(appendedTime, CMTimeMake(value: 2, timescale: 1)) + } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index e22b1f0dfa7d..4035bed4cfae 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -58,8 +58,6 @@ @interface FLTCam () *videoCaptureSession; -@property(readonly, nonatomic) NSObject *audioCaptureSession; @property(readonly, nonatomic) NSObject *captureVideoInput; /// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback. @@ -77,8 +75,7 @@ @interface FLTCam () *videoAdaptor; /// All FLTCam's state access and capture session related operations should be on run on this queue. @@ -278,9 +275,33 @@ - (instancetype)initWithConfiguration:(nonnull FLTCamConfiguration *)configurati [self updateOrientation]; + // Handle video and audio interruptions and errors. + // https://github.com/flutter/flutter/issues/151253 + for (NSObject *session in @[ _videoCaptureSession, _audioCaptureSession ]) { + [NSNotificationCenter.defaultCenter addObserver:self + selector:@selector(captureSessionWasInterrupted:) + name:AVCaptureSessionWasInterruptedNotification + object:session.captureSession]; + + [NSNotificationCenter.defaultCenter addObserver:self + selector:@selector(captureSessionRuntimeError:) + name:AVCaptureSessionRuntimeErrorNotification + object:session.captureSession]; + } + return self; } +- (void)captureSessionWasInterrupted:(NSNotification *)notification { + _isRecordingDisconnected = YES; +} + +- (void)captureSessionRuntimeError:(NSNotification *)notification { + [self reportErrorMessage:[NSString + stringWithFormat:@"%@", + notification.userInfo[AVCaptureSessionErrorKey]]]; +} + - (AVCaptureConnection *)createConnection:(NSError **)error { // Setup video capture input. _captureVideoInput = [_captureDeviceInputFactory deviceInputWithDevice:_captureDevice @@ -685,78 +706,72 @@ - (void)captureOutput:(AVCaptureOutput *)output }); } } - if (_isRecording && !_isRecordingPaused) { + if (_isRecording && !_isRecordingPaused && _videoCaptureSession.running && + _audioCaptureSession.running) { if (_videoWriter.status == AVAssetWriterStatusFailed) { [self reportErrorMessage:[NSString stringWithFormat:@"%@", _videoWriter.error]]; return; } - // ignore audio samples until the first video sample arrives to avoid black frames - // https://github.com/flutter/flutter/issues/57831 - if (_isFirstVideoSample && output != _captureVideoOutput.avOutput) { - return; + // do not append sample buffer when readyForMoreMediaData is NO to avoid crash + // https://github.com/flutter/flutter/issues/132073 + if (output == _captureVideoOutput.avOutput) { + if (!_videoWriterInput.readyForMoreMediaData) { + return; + } + } else { + // ignore audio samples until the first video sample arrives to avoid black frames + // https://github.com/flutter/flutter/issues/57831 + if (_isFirstVideoSample || !_audioWriterInput.readyForMoreMediaData) { + return; + } + _outputForOffsetAdjusting = output; } - CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); if (_isFirstVideoSample) { - [_videoWriter startSessionAtSourceTime:currentSampleTime]; + [_videoWriter startSessionAtSourceTime:sampleTime]; // fix sample times not being numeric when pause/resume happens before first sample buffer // arrives // https://github.com/flutter/flutter/issues/132014 - _lastVideoSampleTime = currentSampleTime; - _lastAudioSampleTime = currentSampleTime; + _isRecordingDisconnected = NO; _isFirstVideoSample = NO; } - if (output == _captureVideoOutput.avOutput) { - if (_videoIsDisconnected) { - _videoIsDisconnected = NO; - - if (_videoTimeOffset.value == 0) { - _videoTimeOffset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime); - } else { - CMTime offset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime); - _videoTimeOffset = CMTimeAdd(_videoTimeOffset, offset); - } + CMTime currentSampleEndTime = sampleTime; + CMTime dur = CMSampleBufferGetDuration(sampleBuffer); + if (CMTIME_IS_NUMERIC(dur)) { + currentSampleEndTime = CMTimeAdd(currentSampleEndTime, dur); + } - return; + // Use a single time offset for both video and audio. + // https://github.com/flutter/flutter/issues/149978 + if (_isRecordingDisconnected) { + if (output == _outputForOffsetAdjusting) { + CMTime offset = CMTimeSubtract(currentSampleEndTime, _lastSampleEndTime); + _recordingTimeOffset = CMTimeAdd(_recordingTimeOffset, offset); + _lastSampleEndTime = currentSampleEndTime; + _isRecordingDisconnected = NO; } + return; + } - _lastVideoSampleTime = currentSampleTime; + if (output == _outputForOffsetAdjusting) { + _lastSampleEndTime = currentSampleEndTime; + } + if (output == _captureVideoOutput.avOutput) { CVPixelBufferRef nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - CMTime nextSampleTime = CMTimeSubtract(_lastVideoSampleTime, _videoTimeOffset); - // do not append sample buffer when readyForMoreMediaData is NO to avoid crash - // https://github.com/flutter/flutter/issues/132073 - if (_videoWriterInput.readyForMoreMediaData) { + CMTime nextSampleTime = CMTimeSubtract(sampleTime, _recordingTimeOffset); + if (CMTIME_COMPARE_INLINE(nextSampleTime, >, _lastAppendedVideoSampleTime)) { [_videoAdaptor appendPixelBuffer:nextBuffer withPresentationTime:nextSampleTime]; + _lastAppendedVideoSampleTime = nextSampleTime; } } else { - CMTime dur = CMSampleBufferGetDuration(sampleBuffer); - - if (dur.value > 0) { - currentSampleTime = CMTimeAdd(currentSampleTime, dur); - } - - if (_audioIsDisconnected) { - _audioIsDisconnected = NO; - - if (_audioTimeOffset.value == 0) { - _audioTimeOffset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime); - } else { - CMTime offset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime); - _audioTimeOffset = CMTimeAdd(_audioTimeOffset, offset); - } - - return; - } - - _lastAudioSampleTime = currentSampleTime; - - if (_audioTimeOffset.value != 0) { + if (_recordingTimeOffset.value != 0) { CMSampleBufferRef adjustedSampleBuffer = - [self copySampleBufferWithAdjustedTime:sampleBuffer by:_audioTimeOffset]; + [self copySampleBufferWithAdjustedTime:sampleBuffer by:_recordingTimeOffset]; [self newAudioSample:adjustedSampleBuffer]; CFRelease(adjustedSampleBuffer); } else { @@ -788,10 +803,8 @@ - (void)newVideoSample:(CMSampleBufferRef)sampleBuffer { } return; } - if (_videoWriterInput.readyForMoreMediaData) { - if (![_videoWriterInput appendSampleBuffer:sampleBuffer]) { - [self reportErrorMessage:@"Unable to write to video input"]; - } + if (![_videoWriterInput appendSampleBuffer:sampleBuffer]) { + [self reportErrorMessage:@"Unable to write to video input"]; } } @@ -802,10 +815,8 @@ - (void)newAudioSample:(CMSampleBufferRef)sampleBuffer { } return; } - if (_audioWriterInput.readyForMoreMediaData) { - if (![_audioWriterInput appendSampleBuffer:sampleBuffer]) { - [self reportErrorMessage:@"Unable to write to audio input"]; - } + if (![_audioWriterInput appendSampleBuffer:sampleBuffer]) { + [self reportErrorMessage:@"Unable to write to audio input"]; } } @@ -830,6 +841,7 @@ - (void)dealloc { CFRelease(_latestPixelBuffer); } [_motionManager stopAccelerometerUpdates]; + [NSNotificationCenter.defaultCenter removeObserver:self]; } - (CVPixelBufferRef)copyPixelBuffer { @@ -875,10 +887,10 @@ - (void)startVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))com _isFirstVideoSample = YES; _isRecording = YES; _isRecordingPaused = NO; - _videoTimeOffset = CMTimeMake(0, 1); - _audioTimeOffset = CMTimeMake(0, 1); - _videoIsDisconnected = NO; - _audioIsDisconnected = NO; + _isRecordingDisconnected = NO; + _recordingTimeOffset = CMTimeMake(0, 1); + _outputForOffsetAdjusting = _captureVideoOutput.avOutput; + _lastAppendedVideoSampleTime = kCMTimeNegativeInfinity; completion(nil); } else { completion([FlutterError errorWithCode:@"Error" @@ -918,8 +930,7 @@ - (void)stopVideoRecordingWithCompletion:(void (^)(NSString *_Nullable, - (void)pauseVideoRecording { _isRecordingPaused = YES; - _videoIsDisconnected = YES; - _audioIsDisconnected = YES; + _isRecordingDisconnected = YES; } - (void)resumeVideoRecording { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCaptureSession.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCaptureSession.m index 4812d883476d..a8860f71aa93 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCaptureSession.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCaptureSession.m @@ -34,6 +34,10 @@ - (void)stopRunning { [_captureSession stopRunning]; } +- (BOOL)running { + return _captureSession.running; +} + - (BOOL)automaticallyConfiguresApplicationAudioSession { return _captureSession.automaticallyConfiguresApplicationAudioSession; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h index 3e3a44922dd6..7a567e3d2b01 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h @@ -35,6 +35,9 @@ /// True when images from the camera are being streamed. @property(assign, nonatomic) BOOL isStreamingImages; +@property(readonly, nonatomic) NSObject *videoCaptureSession; +@property(readonly, nonatomic) NSObject *audioCaptureSession; + /// A dictionary to retain all in-progress FLTSavePhotoDelegates. The key of the dictionary is the /// AVCapturePhotoSettings's uniqueID for each photo capture operation, and the value is the /// FLTSavePhotoDelegate that handles the result of each photo capture operation. Note that photo diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCaptureSession.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCaptureSession.h index 473f1a2ef0aa..dfa3dcab1e38 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCaptureSession.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCaptureSession.h @@ -12,10 +12,12 @@ NS_ASSUME_NONNULL_BEGIN /// It exists to allow replacing AVCaptureSession in tests. @protocol FLTCaptureSession +@property(nonatomic, readonly) AVCaptureSession *captureSession; @property(nonatomic, copy) AVCaptureSessionPreset sessionPreset; @property(nonatomic, readonly) NSArray *inputs; @property(nonatomic, readonly) NSArray *outputs; @property(nonatomic, assign) BOOL automaticallyConfiguresApplicationAudioSession; +@property(nonatomic, readonly) BOOL running; - (void)beginConfiguration; - (void)commitConfiguration; diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index 66dffba0df09..1d40fd04c635 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.18+13 +version: 0.9.18+14 environment: sdk: ^3.4.0