diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md index 97a41bea6c8..4bbc7f7c202 100644 --- a/packages/camera/camera/CHANGELOG.md +++ b/packages/camera/camera/CHANGELOG.md @@ -1,5 +1,6 @@ -## NEXT +## 0.11.1+1 +* Fixes delivering errors from onCameraError. * Fixes overflowed toggles in the camera example. ## 0.11.1 diff --git a/packages/camera/camera/lib/src/camera_controller.dart b/packages/camera/camera/lib/src/camera_controller.dart index 3f39d3202b8..8fbe65fdb1f 100644 --- a/packages/camera/camera/lib/src/camera_controller.dart +++ b/packages/camera/camera/lib/src/camera_controller.dart @@ -174,7 +174,7 @@ class CameraValue { }) { return CameraValue( isInitialized: isInitialized ?? this.isInitialized, - errorDescription: errorDescription, + errorDescription: errorDescription ?? this.errorDescription, previewSize: previewSize ?? this.previewSize, isRecordingVideo: isRecordingVideo ?? this.isRecordingVideo, isTakingPicture: isTakingPicture ?? this.isTakingPicture, @@ -353,6 +353,15 @@ class CameraController extends ValueNotifier { initializeCompleter.complete(event); })); + _unawaited(CameraPlatform.instance + .onCameraError(_cameraId) + .first + .then((CameraErrorEvent event) { + value = value.copyWith( + errorDescription: event.description, + ); + })); + await CameraPlatform.instance.initializeCamera( _cameraId, imageFormatGroup: imageFormatGroup ?? ImageFormatGroup.unknown, diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 09899dfa579..ccdd4960258 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -4,7 +4,7 @@ description: A Flutter plugin for controlling the camera. Supports previewing Dart. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.11.1 +version: 0.11.1+1 environment: sdk: ^3.6.0 diff --git a/packages/camera/camera/test/camera_test.dart b/packages/camera/camera/test/camera_test.dart index 0c6a319397e..0667247685b 100644 --- a/packages/camera/camera/test/camera_test.dart +++ b/packages/camera/camera/test/camera_test.dart @@ -1441,6 +1441,20 @@ void main() { 'This is a test error message', ))); }); + + test('error from onCameraError is received', () async { + final CameraController cameraController = CameraController( + const CameraDescription( + name: 'cam', + lensDirection: CameraLensDirection.back, + sensorOrientation: 90), + ResolutionPreset.max); + await cameraController.initialize(); + + expect(cameraController.value.hasError, isTrue); + expect(cameraController.value.errorDescription, + mockOnCameraErrorEvent.description); + }); }); } diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index abc280db2ad..2b76174535e 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.9.20+2 + +* Handles video and audio interruptions and errors. +* Uses a single time offset for both video and audio. + ## 0.9.20+1 * Migrates lifecycle methods (`start`, `stop`, `close`) to Swift. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift index e21497d1a1e..cad92c07fa0 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift @@ -99,7 +99,10 @@ enum CameraTestUtils { /// Creates a test sample buffer. /// @return a test sample buffer. - static func createTestSampleBuffer() -> CMSampleBuffer { + static func createTestSampleBuffer( + timestamp: CMTime = .zero, + duration: CMTime = CMTimeMake(value: 1, timescale: 44100) + ) -> CMSampleBuffer { var pixelBuffer: CVPixelBuffer? CVPixelBufferCreate(kCFAllocatorDefault, 100, 100, kCVPixelFormatType_32BGRA, nil, &pixelBuffer) @@ -110,9 +113,9 @@ enum CameraTestUtils { formatDescriptionOut: &formatDescription) var timingInfo = CMSampleTimingInfo( - duration: CMTimeMake(value: 1, timescale: 44100), - presentationTimeStamp: CMTime.zero, - decodeTimeStamp: CMTime.invalid) + duration: duration, + presentationTimeStamp: timestamp, + decodeTimeStamp: .invalid) var sampleBuffer: CMSampleBuffer? CMSampleBufferCreateReadyWithImageBuffer( @@ -127,22 +130,25 @@ enum CameraTestUtils { /// Creates a test audio sample buffer. /// @return a test audio sample buffer. - static func createTestAudioSampleBuffer() -> CMSampleBuffer { + static func createTestAudioSampleBuffer( + timestamp: CMTime = .zero, + duration: CMTime = CMTimeMake(value: 1, timescale: 44100) + ) -> CMSampleBuffer { var blockBuffer: CMBlockBuffer? CMBlockBufferCreateWithMemoryBlock( allocator: kCFAllocatorDefault, memoryBlock: nil, - blockLength: 100, + blockLength: Int(duration.value), blockAllocator: kCFAllocatorDefault, customBlockSource: nil, offsetToData: 0, - dataLength: 100, + dataLength: Int(duration.value), flags: kCMBlockBufferAssureMemoryNowFlag, blockBufferOut: &blockBuffer) var formatDescription: CMFormatDescription? var basicDescription = AudioStreamBasicDescription( - mSampleRate: 44100, + mSampleRate: Float64(duration.timescale), mFormatID: kAudioFormatLinearPCM, mFormatFlags: 0, mBytesPerPacket: 1, @@ -167,8 +173,8 @@ enum CameraTestUtils { allocator: kCFAllocatorDefault, dataBuffer: blockBuffer!, formatDescription: formatDescription!, - sampleCount: 1, - presentationTimeStamp: .zero, + sampleCount: CMItemCount(duration.value), + presentationTimeStamp: timestamp, packetDescriptions: nil, sampleBufferOut: &sampleBuffer) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.swift index 9218fa8dd89..9ee467a0cf8 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.swift @@ -19,10 +19,12 @@ final class MockCaptureSession: NSObject, FLTCaptureSession { var stopRunningStub: (() -> Void)? var canSetSessionPresetStub: ((AVCaptureSession.Preset) -> Bool)? + var captureSession = AVCaptureSession() var _sessionPreset = AVCaptureSession.Preset.high var inputs = [AVCaptureInput]() var outputs = [AVCaptureOutput]() var automaticallyConfiguresApplicationAudioSession = false + var running = true var sessionPreset: AVCaptureSession.Preset { get { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift index eeef97b292a..99cc71a7a8b 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift @@ -359,4 +359,156 @@ final class CameraSampleBufferTests: XCTestCase { AVAudioSession.sharedInstance().category == .playAndRecord, "Category should be PlayAndRecord.") } + + func testDidOutputSampleBufferMustUseSingleOffsetForVideoAndAudio() { + let (camera, writerMock, adaptorMock, inputMock) = createCamera() + + let testVideoConnection = CameraTestUtils.createTestConnection( + camera.captureVideoOutput.avOutput) + let testAudioOutput = CameraTestUtils.createTestAudioOutput() + let testAudioConnection = CameraTestUtils.createTestConnection(testAudioOutput) + + var status = AVAssetWriter.Status.unknown + writerMock.startWritingStub = { + status = .writing + return true + } + writerMock.statusStub = { + return status + } + + var appendedTime = CMTime.invalid + + adaptorMock.appendStub = { buffer, time in + appendedTime = time + return true + } + + inputMock.readyForMoreMediaData = true + inputMock.appendStub = { buffer in + appendedTime = CMSampleBufferGetPresentationTimeStamp(buffer) + return true + } + + camera.startVideoRecording(completion: { error in }, messengerForStreaming: nil) + + let appendVideoSample = { (time: Int64) in + camera.captureOutput( + camera.captureVideoOutput.avOutput, + didOutput: CameraTestUtils.createTestSampleBuffer( + timestamp: CMTimeMake(value: time, timescale: 1), + duration: .invalid), + from: testVideoConnection) + } + + let appendAudioSample = { (time: Int64, duration: Int64) in + camera.captureOutput( + testAudioOutput, + didOutput: CameraTestUtils.createTestAudioSampleBuffer( + timestamp: CMTimeMake(value: time, timescale: 1), + duration: CMTimeMake(value: duration, timescale: 1)), + from: testAudioConnection) + } + + appendedTime = .invalid + camera.pauseVideoRecording() + camera.resumeVideoRecording() + appendVideoSample(1) + XCTAssertEqual(appendedTime, CMTimeMake(value: 1, timescale: 1)) + + appendedTime = .invalid + camera.pauseVideoRecording() + camera.resumeVideoRecording() + appendVideoSample(11) + XCTAssertEqual(appendedTime, .invalid) + appendVideoSample(12) + XCTAssertEqual(appendedTime, CMTimeMake(value: 2, timescale: 1)) + + appendedTime = .invalid + camera.pauseVideoRecording() + camera.resumeVideoRecording() + appendAudioSample(20, 2) + XCTAssertEqual(appendedTime, .invalid) + appendVideoSample(23) + XCTAssertEqual(appendedTime, CMTimeMake(value: 3, timescale: 1)) + + appendedTime = .invalid + camera.pauseVideoRecording() + camera.resumeVideoRecording() + appendVideoSample(28) + XCTAssertEqual(appendedTime, .invalid) + appendAudioSample(30, 2) + XCTAssertEqual(appendedTime, .invalid) + appendVideoSample(33) + XCTAssertEqual(appendedTime, .invalid) + appendAudioSample(32, 2) + XCTAssertEqual(appendedTime, CMTimeMake(value: 2, timescale: 1)) + } + + func testDidOutputSampleBufferMustConnectVideoAfterSessionInterruption() { + let (camera, writerMock, adaptorMock, inputMock) = createCamera() + + let testVideoConnection = CameraTestUtils.createTestConnection( + camera.captureVideoOutput.avOutput) + let testAudioOutput = CameraTestUtils.createTestAudioOutput() + let testAudioConnection = CameraTestUtils.createTestConnection(testAudioOutput) + + var status = AVAssetWriter.Status.unknown + writerMock.startWritingStub = { + status = .writing + return true + } + writerMock.statusStub = { + return status + } + + var appendedTime = CMTime.invalid + + adaptorMock.appendStub = { buffer, time in + appendedTime = time + return true + } + + inputMock.readyForMoreMediaData = true + inputMock.appendStub = { buffer in + appendedTime = CMSampleBufferGetPresentationTimeStamp(buffer) + return true + } + + camera.startVideoRecording(completion: { error in }, messengerForStreaming: nil) + + let appendVideoSample = { (time: Int64) in + camera.captureOutput( + camera.captureVideoOutput.avOutput, + didOutput: CameraTestUtils.createTestSampleBuffer( + timestamp: CMTimeMake(value: time, timescale: 1), + duration: .invalid), + from: testVideoConnection) + } + + let appendAudioSample = { (time: Int64, duration: Int64) in + camera.captureOutput( + testAudioOutput, + didOutput: CameraTestUtils.createTestAudioSampleBuffer( + timestamp: CMTimeMake(value: time, timescale: 1), + duration: CMTimeMake(value: duration, timescale: 1)), + from: testAudioConnection) + } + + appendVideoSample(1) + appendAudioSample(1, 1) + + NotificationCenter.default.post( + name: AVCaptureSession.wasInterruptedNotification, + object: camera.audioCaptureSession.captureSession) + + appendedTime = .invalid + appendAudioSample(11, 1) + XCTAssertEqual(appendedTime, .invalid) + appendVideoSample(12) + XCTAssertEqual(appendedTime, CMTimeMake(value: 2, timescale: 1)) + appendedTime = .invalid + appendAudioSample(12, 1) + XCTAssertEqual(appendedTime, CMTimeMake(value: 2, timescale: 1)) + } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift index 109a7f76814..6921f1c4ea9 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -18,8 +18,8 @@ final class DefaultCamera: FLTCam, Camera { /// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback. /// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API. private var latestPixelBuffer: CVPixelBuffer? - private var lastVideoSampleTime = CMTime.zero - private var lastAudioSampleTime = CMTime.zero + /// Time of the end of the last sample. + private var lastSampleEndTime = CMTime.zero /// Maximum number of frames pending processing. /// To limit memory consumption, limit the number of frames pending processing. @@ -288,75 +288,74 @@ final class DefaultCamera: FLTCam, Camera { } } - if isRecording && !isRecordingPaused { + if isRecording && !isRecordingPaused && videoCaptureSession.running + && audioCaptureSession.running + { if videoWriter?.status == .failed, let error = videoWriter?.error { reportErrorMessage("\(error)") return } - // ignore audio samples until the first video sample arrives to avoid black frames - // https://github.com/flutter/flutter/issues/57831 - if isFirstVideoSample && output != captureVideoOutput.avOutput { - return + // do not append sample buffer when readyForMoreMediaData is NO to avoid crash + // https://github.com/flutter/flutter/issues/132073 + if output == captureVideoOutput.avOutput { + if !(videoWriterInput?.readyForMoreMediaData ?? false) { + return + } + } else { + // ignore audio samples until the first video sample arrives to avoid black frames + // https://github.com/flutter/flutter/issues/57831 + if isFirstVideoSample || !(audioWriterInput?.readyForMoreMediaData ?? false) { + return + } + outputForOffsetAdjusting = output } - var currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + let sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) if isFirstVideoSample { - videoWriter?.startSession(atSourceTime: currentSampleTime) + videoWriter?.startSession(atSourceTime: sampleTime) // fix sample times not being numeric when pause/resume happens before first sample buffer // arrives // https://github.com/flutter/flutter/issues/132014 - lastVideoSampleTime = currentSampleTime - lastAudioSampleTime = currentSampleTime + isRecordingDisconnected = false isFirstVideoSample = false } - if output == captureVideoOutput.avOutput { - if videoIsDisconnected { - videoIsDisconnected = false - - videoTimeOffset = - videoTimeOffset.value == 0 - ? CMTimeSubtract(currentSampleTime, lastVideoSampleTime) - : CMTimeAdd(videoTimeOffset, CMTimeSubtract(currentSampleTime, lastVideoSampleTime)) + var currentSampleEndTime = sampleTime + let dur = CMSampleBufferGetDuration(sampleBuffer) + if CMTIME_IS_NUMERIC(dur) { + currentSampleEndTime = CMTimeAdd(currentSampleEndTime, dur) + } - return + // Use a single time offset for both video and audio. + // https://github.com/flutter/flutter/issues/149978 + if isRecordingDisconnected { + if output == outputForOffsetAdjusting { + let offset = CMTimeSubtract(currentSampleEndTime, lastSampleEndTime) + recordingTimeOffset = CMTimeAdd(recordingTimeOffset, offset) + lastSampleEndTime = currentSampleEndTime + isRecordingDisconnected = false } + return + } - lastVideoSampleTime = currentSampleTime + if output == outputForOffsetAdjusting { + lastSampleEndTime = currentSampleEndTime + } + if output == captureVideoOutput.avOutput { let nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) - let nextSampleTime = CMTimeSubtract(lastVideoSampleTime, videoTimeOffset) - // do not append sample buffer when readyForMoreMediaData is NO to avoid crash - // https://github.com/flutter/flutter/issues/132073 - if videoWriterInput?.readyForMoreMediaData ?? false { + let nextSampleTime = CMTimeSubtract(sampleTime, recordingTimeOffset) + if nextSampleTime > lastAppendedVideoSampleTime { videoAdaptor?.append(nextBuffer!, withPresentationTime: nextSampleTime) + lastAppendedVideoSampleTime = nextSampleTime } } else { - let dur = CMSampleBufferGetDuration(sampleBuffer) - - if dur.value > 0 { - currentSampleTime = CMTimeAdd(currentSampleTime, dur) - } - - if audioIsDisconnected { - audioIsDisconnected = false - - audioTimeOffset = - audioTimeOffset.value == 0 - ? CMTimeSubtract(currentSampleTime, lastAudioSampleTime) - : CMTimeAdd(audioTimeOffset, CMTimeSubtract(currentSampleTime, lastAudioSampleTime)) - - return - } - - lastAudioSampleTime = currentSampleTime - - if audioTimeOffset.value != 0 { + if recordingTimeOffset.value != 0 { if let adjustedSampleBuffer = copySampleBufferWithAdjustedTime( sampleBuffer, - by: audioTimeOffset) + by: recordingTimeOffset) { newAudioSample(adjustedSampleBuffer) } @@ -405,10 +404,8 @@ final class DefaultCamera: FLTCam, Camera { } return } - if audioWriterInput?.readyForMoreMediaData ?? false { - if !(audioWriterInput?.append(sampleBuffer) ?? false) { - reportErrorMessage("Unable to write to audio input") - } + if !(audioWriterInput?.append(sampleBuffer) ?? false) { + reportErrorMessage("Unable to write to audio input") } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m index ce7ec49118b..500d4a869f0 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m @@ -161,9 +161,35 @@ - (instancetype)initWithConfiguration:(nonnull FLTCamConfiguration *)configurati [self updateOrientation]; + // Handle video and audio interruptions and errors. Interruption can happen for example by + // an incoming call during video recording. Error can happen for example when recording starts + // during an incoming call. + // https://github.com/flutter/flutter/issues/151253 + for (NSObject *session in @[ _videoCaptureSession, _audioCaptureSession ]) { + [NSNotificationCenter.defaultCenter addObserver:self + selector:@selector(captureSessionWasInterrupted:) + name:AVCaptureSessionWasInterruptedNotification + object:session.captureSession]; + + [NSNotificationCenter.defaultCenter addObserver:self + selector:@selector(captureSessionRuntimeError:) + name:AVCaptureSessionRuntimeErrorNotification + object:session.captureSession]; + } + return self; } +- (void)captureSessionWasInterrupted:(NSNotification *)notification { + _isRecordingDisconnected = YES; +} + +- (void)captureSessionRuntimeError:(NSNotification *)notification { + [self reportErrorMessage:[NSString + stringWithFormat:@"%@", + notification.userInfo[AVCaptureSessionErrorKey]]]; +} + - (AVCaptureConnection *)createConnection:(NSError **)error { // Setup video capture input. _captureVideoInput = [_captureDeviceInputFactory deviceInputWithDevice:_captureDevice @@ -444,6 +470,7 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset - (void)dealloc { [_motionManager stopAccelerometerUpdates]; + [NSNotificationCenter.defaultCenter removeObserver:self]; } /// Main logic to setup the video recording. @@ -471,10 +498,10 @@ - (void)setUpVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))com _isFirstVideoSample = YES; _isRecording = YES; _isRecordingPaused = NO; - _videoTimeOffset = CMTimeMake(0, 1); - _audioTimeOffset = CMTimeMake(0, 1); - _videoIsDisconnected = NO; - _audioIsDisconnected = NO; + _isRecordingDisconnected = NO; + _recordingTimeOffset = kCMTimeZero; + _outputForOffsetAdjusting = _captureVideoOutput.avOutput; + _lastAppendedVideoSampleTime = kCMTimeNegativeInfinity; completion(nil); } @@ -528,8 +555,7 @@ - (void)stopVideoRecordingWithCompletion:(void (^)(NSString *_Nullable, - (void)pauseVideoRecording { _isRecordingPaused = YES; - _videoIsDisconnected = YES; - _audioIsDisconnected = YES; + _isRecordingDisconnected = YES; } - (void)resumeVideoRecording { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCaptureSession.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCaptureSession.m index 4812d883476..a8860f71aa9 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCaptureSession.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCaptureSession.m @@ -34,6 +34,10 @@ - (void)stopRunning { [_captureSession stopRunning]; } +- (BOOL)running { + return _captureSession.running; +} + - (BOOL)automaticallyConfiguresApplicationAudioSession { return _captureSession.automaticallyConfiguresApplicationAudioSession; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h index 12b22d615f8..36ad2b22d09 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h @@ -42,10 +42,14 @@ NS_ASSUME_NONNULL_BEGIN @property(assign, nonatomic) BOOL isRecording; @property(assign, nonatomic) BOOL isRecordingPaused; @property(strong, nonatomic, nullable) NSObject *videoWriter; -@property(assign, nonatomic) BOOL videoIsDisconnected; -@property(assign, nonatomic) BOOL audioIsDisconnected; -@property(assign, nonatomic) CMTime videoTimeOffset; -@property(assign, nonatomic) CMTime audioTimeOffset; +/// Whether the recording is disconnected. +@property(assign, nonatomic) BOOL isRecordingDisconnected; +/// Represents sum of all pauses/interruptions during recording. +@property(assign, nonatomic) CMTime recordingTimeOffset; +/// Output to use for adjusting of recording time offset. +@property(nonatomic) AVCaptureOutput *outputForOffsetAdjusting; +/// Time of the last appended video sample. +@property(assign, nonatomic) CMTime lastAppendedVideoSampleTime; @property(strong, nonatomic, nullable) NSObject *videoWriterInput; @property(strong, nonatomic, nullable) NSObject *audioWriterInput; @property(nullable) NSObject *videoAdaptor; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCaptureSession.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCaptureSession.h index 473f1a2ef0a..dfa3dcab1e3 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCaptureSession.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCaptureSession.h @@ -12,10 +12,12 @@ NS_ASSUME_NONNULL_BEGIN /// It exists to allow replacing AVCaptureSession in tests. @protocol FLTCaptureSession +@property(nonatomic, readonly) AVCaptureSession *captureSession; @property(nonatomic, copy) AVCaptureSessionPreset sessionPreset; @property(nonatomic, readonly) NSArray *inputs; @property(nonatomic, readonly) NSArray *outputs; @property(nonatomic, assign) BOOL automaticallyConfiguresApplicationAudioSession; +@property(nonatomic, readonly) BOOL running; - (void)beginConfiguration; - (void)commitConfiguration; diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index c4a77a5a9a8..779bc7fb09f 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.20+1 +version: 0.9.20+2 environment: sdk: ^3.6.0