From 46890fc0b43a7a188e500ff27d0df98a54930edd Mon Sep 17 00:00:00 2001 From: Thierry Date: Wed, 16 Mar 2022 23:53:34 -0400 Subject: [PATCH 01/10] [fix] Leak in coreMotionManager.startDeviceMotionUpdates block --- Sources/CameraManager.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Sources/CameraManager.swift b/Sources/CameraManager.swift index a568cea..b035317 100644 --- a/Sources/CameraManager.swift +++ b/Sources/CameraManager.swift @@ -1484,8 +1484,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest coreMotionManager = CMMotionManager() coreMotionManager.deviceMotionUpdateInterval = 1 / 30.0 if coreMotionManager.isDeviceMotionAvailable { - coreMotionManager.startDeviceMotionUpdates(to: OperationQueue()) { motion, _ in - guard let motion = motion else { return } + coreMotionManager.startDeviceMotionUpdates(to: OperationQueue()) { [weak self] motion, _ in + guard let motion = motion, let self = self else { return } let x = motion.gravity.x let y = motion.gravity.y let previousOrientation = self.deviceOrientation From 30aef8e9a630d191d0372f15f6c951da4819165c Mon Sep 17 00:00:00 2001 From: Thierry Date: Wed, 16 Mar 2022 23:58:49 -0400 Subject: [PATCH 02/10] Added uWide camera --- Sources/CameraManager.swift | 333 ++++++++++++++++++++---------------- 1 file changed, 181 insertions(+), 152 deletions(-) diff --git a/Sources/CameraManager.swift b/Sources/CameraManager.swift index b035317..1fa71eb 100644 --- a/Sources/CameraManager.swift +++ b/Sources/CameraManager.swift @@ -21,7 +21,7 @@ public enum CameraState { } public enum CameraDevice { - case front, back + case front, back, uWide } public enum CameraFlashMode: Int { @@ -66,22 +66,22 @@ public enum CaptureContent { extension CaptureContent { public var asImage: UIImage? { switch self { - case let .image(image): return image - case let .imageData(data): return UIImage(data: data) - case let .asset(asset): - if let data = getImageData(fromAsset: asset) { - return UIImage(data: data) - } else { - return nil + case let .image(image): return image + case let .imageData(data): return UIImage(data: data) + case let .asset(asset): + if let data = getImageData(fromAsset: asset) { + return UIImage(data: data) + } else { + return nil } } } public var asData: Data? { switch self { - case let .image(image): return image.jpegData(compressionQuality: 1.0) - case let .imageData(data): return data - case let .asset(asset): return getImageData(fromAsset: asset) + case let .image(image): return image.jpegData(compressionQuality: 1.0) + case let .imageData(data): return data + case let .asset(asset): return getImageData(fromAsset: asset) } } @@ -108,6 +108,7 @@ public enum CaptureError: Error { } /// Class for handling iDevices custom camera usage +@available(iOS 13.0, *) open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGestureRecognizerDelegate { // MARK: - Public properties @@ -220,6 +221,12 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest return !frontDevices.isEmpty }() + /// Property to determine if current device has Ultra Wide camera. + open var hasuWideCamera: Bool = { + let frontDevices = AVCaptureDevice.default(.builtInUltraWideCamera, for: .video, position: .back) + return (frontDevices != nil) + }() + /// Property to determine if current device has flash. open var hasFlash: Bool = { let hasFlashDevices = AVCaptureDevice.videoDevices.filter { $0.hasFlash } @@ -351,6 +358,10 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest AVCaptureDevice.videoDevices.filter { $0.position == .back }.first }() + fileprivate lazy var uWideCameraDevice: AVCaptureDevice? = { + AVCaptureDevice.default(.builtInUltraWideCamera, for: .video, position: .back) + }() + fileprivate lazy var mic: AVCaptureDevice? = { AVCaptureDevice.default(for: AVMediaType.audio) }() @@ -512,10 +523,10 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest open func capturePictureWithCompletion(_ imageCompletion: @escaping (UIImage?, NSError?) -> Void) { func completion(_ result: CaptureResult) { switch result { - case let .success(content): - imageCompletion(content.asImage, nil) - case .failure: - imageCompletion(nil, NSError()) + case let .success(content): + imageCompletion(content.asImage, nil) + case .failure: + imageCompletion(nil, NSError()) } } @@ -663,10 +674,10 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest open func capturePictureDataWithCompletion(_ imageCompletion: @escaping (Data?, NSError?) -> Void) { func completion(_ result: CaptureResult) { switch result { - case let .success(content): - imageCompletion(content.asData, nil) - case .failure: - imageCompletion(nil, NSError()) + case let .success(content): + imageCompletion(content.asData, nil) + case .failure: + imageCompletion(nil, NSError()) } } capturePictureDataWithCompletion(completion) @@ -693,9 +704,9 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest sessionQueue.async { let stillImageOutput = self._getStillImageOutput() if let connection = stillImageOutput.connection(with: AVMediaType.video), - connection.isEnabled { + connection.isEnabled { if self.cameraDevice == CameraDevice.front, connection.isVideoMirroringSupported, - self.shouldFlipFrontCameraImage { + self.shouldFlipFrontCameraImage { connection.isVideoMirrored = true } if connection.isVideoOrientationSupported { @@ -726,12 +737,12 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate func _imageOrientation(forDeviceOrientation deviceOrientation: UIDeviceOrientation, isMirrored: Bool) -> UIImage.Orientation { switch deviceOrientation { - case .landscapeLeft: - return isMirrored ? .upMirrored : .up - case .landscapeRight: - return isMirrored ? .downMirrored : .down - default: - break + case .landscapeLeft: + return isMirrored ? .upMirrored : .up + case .landscapeRight: + return isMirrored ? .downMirrored : .down + default: + break } return isMirrored ? .leftMirrored : .right @@ -745,7 +756,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest _show(NSLocalizedString("Capture session output still image", comment: ""), message: NSLocalizedString("I can only take pictures", comment: "")) return } - + let videoOutput = _getMovieOutput() if shouldUseLocationServices { @@ -769,7 +780,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest captureSession.addConnection(AVCaptureConnection(inputPorts: [inputPort], output: videoOutput)) } - + _updateIlluminationMode(flashMode) videoOutput.startRecording(to: _tempFilePath(), recordingDelegate: self) @@ -780,7 +791,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest */ open func stopVideoRecording(_ completion: ((_ videoURL: URL?, _ error: NSError?) -> Void)?) { if let runningMovieOutput = movieOutput, - runningMovieOutput.isRecording { + runningMovieOutput.isRecording { videoCompletion = completion runningMovieOutput.stopRecording() } @@ -797,12 +808,12 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest */ open func startQRCodeDetection(_ handler: @escaping QRCodeDetectionHandler) { guard let captureSession = self.captureSession - else { return } + else { return } let output = AVCaptureMetadataOutput() guard captureSession.canAddOutput(output) - else { return } + else { return } qrCodeDetectionHandler = handler captureSession.addOutput(output) @@ -871,6 +882,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest return device.hasFlash } else if device.position == .front, cameraDevice == .front { return device.hasFlash + } else if device.position == .back, cameraDevice == .uWide { + return device.hasFlash } } return false @@ -938,8 +951,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest @objc fileprivate func _zoomStart(_ recognizer: UIPinchGestureRecognizer) { guard let view = embeddingView, - let previewLayer = previewLayer - else { return } + let previewLayer = previewLayer + else { return } var allTouchesOnPreviewLayer = true let numTouch = recognizer.numberOfTouches @@ -961,10 +974,12 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest let device: AVCaptureDevice? switch cameraDevice { - case .back: - device = backCameraDevice - case .front: - device = frontCameraDevice + case .back: + device = backCameraDevice + case .front: + device = frontCameraDevice + case .uWide: + device = uWideCameraDevice } do { @@ -1008,10 +1023,12 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest let device: AVCaptureDevice? switch cameraDevice { - case .back: - device = backCameraDevice - case .front: - device = frontCameraDevice + case .back: + device = backCameraDevice + case .front: + device = frontCameraDevice + case .uWide: + device = uWideCameraDevice } _changeExposureMode(mode: .continuousAutoExposure) @@ -1019,8 +1036,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest exposureValue = 0.5 if let validDevice = device, - let validPreviewLayer = previewLayer, - let view = recognizer.view { + let validPreviewLayer = previewLayer, + let view = recognizer.view { let pointInPreviewLayer = view.layer.convert(recognizer.location(in: view), to: validPreviewLayer) let pointOfInterest = validPreviewLayer.captureDevicePointConverted(fromLayerPoint: pointInPreviewLayer) @@ -1173,10 +1190,12 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest let device: AVCaptureDevice? switch cameraDevice { - case .back: - device = backCameraDevice - case .front: - device = frontCameraDevice + case .back: + device = backCameraDevice + case .front: + device = frontCameraDevice + case .uWide: + device = uWideCameraDevice } if device?.exposureMode == mode { return @@ -1200,10 +1219,12 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest let device: AVCaptureDevice? switch cameraDevice { - case .back: - device = backCameraDevice - case .front: - device = frontCameraDevice + case .back: + device = backCameraDevice + case .front: + device = frontCameraDevice + case .uWide: + device = uWideCameraDevice } guard let videoDevice = device else { @@ -1251,7 +1272,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest let newMovieOutput = AVCaptureMovieFileOutput() newMovieOutput.movieFragmentInterval = CMTime.invalid - + movieOutput = newMovieOutput _setupVideoConnection() @@ -1273,7 +1294,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest if videoConnection.isVideoMirroringSupported { videoConnection.isVideoMirrored = (cameraDevice == CameraDevice.front && shouldFlipFrontCameraImage) } - + if videoConnection.isVideoStabilizationSupported { videoConnection.preferredVideoStabilizationMode = videoStabilisationMode } @@ -1282,16 +1303,16 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest } } } - + fileprivate func _getStillImageOutput() -> AVCaptureStillImageOutput { if let stillImageOutput = stillImageOutput, let connection = stillImageOutput.connection(with: AVMediaType.video), - connection.isActive { + connection.isActive { return stillImageOutput } let newStillImageOutput = AVCaptureStillImageOutput() stillImageOutput = newStillImageOutput if let captureSession = captureSession, - captureSession.canAddOutput(newStillImageOutput) { + captureSession.canAddOutput(newStillImageOutput) { captureSession.beginConfiguration() captureSession.addOutput(newStillImageOutput) captureSession.commitConfiguration() @@ -1303,24 +1324,24 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest var currentConnection: AVCaptureConnection? switch cameraOutputMode { - case .stillImage: - currentConnection = stillImageOutput?.connection(with: AVMediaType.video) - case .videoOnly, .videoWithMic: - currentConnection = _getMovieOutput().connection(with: AVMediaType.video) - if let location = locationManager?.latestLocation { - _setVideoWithGPS(forLocation: location) + case .stillImage: + currentConnection = stillImageOutput?.connection(with: AVMediaType.video) + case .videoOnly, .videoWithMic: + currentConnection = _getMovieOutput().connection(with: AVMediaType.video) + if let location = locationManager?.latestLocation { + _setVideoWithGPS(forLocation: location) } } if let validPreviewLayer = previewLayer { if !shouldKeepViewAtOrientationChanges { if let validPreviewLayerConnection = validPreviewLayer.connection, - validPreviewLayerConnection.isVideoOrientationSupported { + validPreviewLayerConnection.isVideoOrientationSupported { validPreviewLayerConnection.videoOrientation = _currentPreviewVideoOrientation() } } if let validOutputLayerConnection = currentConnection, - validOutputLayerConnection.isVideoOrientationSupported { + validOutputLayerConnection.isVideoOrientationSupported { validOutputLayerConnection.videoOrientation = _currentCaptureVideoOrientation() } if !shouldKeepViewAtOrientationChanges && cameraIsObservingDeviceOrientation { @@ -1365,38 +1386,38 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate func _videoOrientation(forDeviceOrientation deviceOrientation: UIDeviceOrientation) -> AVCaptureVideoOrientation { switch deviceOrientation { - case .landscapeLeft: - return .landscapeRight - case .landscapeRight: - return .landscapeLeft - case .portraitUpsideDown: - return .portraitUpsideDown - case .faceUp: - /* - Attempt to keep the existing orientation. If the device was landscape, then face up - getting the orientation from the stats bar would fail every other time forcing it - to default to portrait which would introduce flicker into the preview layer. This - would not happen if it was in portrait then face up - */ - if let validPreviewLayer = previewLayer, let connection = validPreviewLayer.connection { - return connection.videoOrientation // Keep the existing orientation - } - // Could not get existing orientation, try to get it from stats bar - return _videoOrientationFromStatusBarOrientation() - case .faceDown: - /* - Attempt to keep the existing orientation. If the device was landscape, then face down - getting the orientation from the stats bar would fail every other time forcing it - to default to portrait which would introduce flicker into the preview layer. This - would not happen if it was in portrait then face down - */ - if let validPreviewLayer = previewLayer, let connection = validPreviewLayer.connection { - return connection.videoOrientation // Keep the existing orientation - } - // Could not get existing orientation, try to get it from stats bar - return _videoOrientationFromStatusBarOrientation() - default: - return .portrait + case .landscapeLeft: + return .landscapeRight + case .landscapeRight: + return .landscapeLeft + case .portraitUpsideDown: + return .portraitUpsideDown + case .faceUp: + /* + Attempt to keep the existing orientation. If the device was landscape, then face up + getting the orientation from the stats bar would fail every other time forcing it + to default to portrait which would introduce flicker into the preview layer. This + would not happen if it was in portrait then face up + */ + if let validPreviewLayer = previewLayer, let connection = validPreviewLayer.connection { + return connection.videoOrientation // Keep the existing orientation + } + // Could not get existing orientation, try to get it from stats bar + return _videoOrientationFromStatusBarOrientation() + case .faceDown: + /* + Attempt to keep the existing orientation. If the device was landscape, then face down + getting the orientation from the stats bar would fail every other time forcing it + to default to portrait which would introduce flicker into the preview layer. This + would not happen if it was in portrait then face down + */ + if let validPreviewLayer = previewLayer, let connection = validPreviewLayer.connection { + return connection.videoOrientation // Keep the existing orientation + } + // Could not get existing orientation, try to get it from stats bar + return _videoOrientationFromStatusBarOrientation() + default: + return .portrait } } @@ -1416,16 +1437,16 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest } switch statusBarOrientation { - case .landscapeLeft: - return .landscapeLeft - case .landscapeRight: - return .landscapeRight - case .portrait: - return .portrait - case .portraitUpsideDown: - return .portraitUpsideDown - default: - return .portrait + case .landscapeLeft: + return .landscapeLeft + case .landscapeRight: + return .landscapeRight + case .portrait: + return .portrait + case .portraitUpsideDown: + return .portraitUpsideDown + default: + return .portrait } } @@ -1547,6 +1568,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest maxZoom = backCameraDevice.activeFormat.videoMaxZoomFactor } else if cameraDevice == .front, let frontCameraDevice = frontCameraDevice { maxZoom = frontCameraDevice.activeFormat.videoMaxZoomFactor + } else if cameraDevice == .uWide, let uWideCameraDevice = uWideCameraDevice { + maxZoom = uWideCameraDevice.activeFormat.videoMaxZoomFactor } maxZoomScale = maxZoom @@ -1577,16 +1600,16 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest if let cameraOutputToRemove = oldCameraOutputMode { // remove current setting switch cameraOutputToRemove { - case .stillImage: - if let validStillImageOutput = stillImageOutput { - captureSession?.removeOutput(validStillImageOutput) + case .stillImage: + if let validStillImageOutput = stillImageOutput { + captureSession?.removeOutput(validStillImageOutput) } - case .videoOnly, .videoWithMic: - if let validMovieOutput = movieOutput { - captureSession?.removeOutput(validMovieOutput) - } - if cameraOutputToRemove == .videoWithMic { - _removeMicInput() + case .videoOnly, .videoWithMic: + if let validMovieOutput = movieOutput { + captureSession?.removeOutput(validMovieOutput) + } + if cameraOutputToRemove == .videoWithMic { + _removeMicInput() } } } @@ -1595,22 +1618,22 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest // configure new devices switch newCameraOutputMode { - case .stillImage: - let validStillImageOutput = _getStillImageOutput() - if let captureSession = captureSession, - captureSession.canAddOutput(validStillImageOutput) { - captureSession.addOutput(validStillImageOutput) + case .stillImage: + let validStillImageOutput = _getStillImageOutput() + if let captureSession = captureSession, + captureSession.canAddOutput(validStillImageOutput) { + captureSession.addOutput(validStillImageOutput) + } + case .videoOnly, .videoWithMic: + let videoMovieOutput = _getMovieOutput() + if let captureSession = captureSession, + captureSession.canAddOutput(videoMovieOutput) { + captureSession.addOutput(videoMovieOutput) } - case .videoOnly, .videoWithMic: - let videoMovieOutput = _getMovieOutput() - if let captureSession = captureSession, - captureSession.canAddOutput(videoMovieOutput) { - captureSession.addOutput(videoMovieOutput) - } - - if newCameraOutputMode == .videoWithMic, - let validMic = _deviceInputFromDevice(mic) { - captureSession?.addInput(validMic) + + if newCameraOutputMode == .videoWithMic, + let validMic = _deviceInputFromDevice(mic) { + captureSession?.addInput(validMic) } } captureSession?.commitConfiguration() @@ -1650,7 +1673,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest } if let validEmbeddingView = embeddingView, - let validPreviewLayer = previewLayer { + let validPreviewLayer = previewLayer { var tempView = UIView() if CameraManager._blurSupported() { @@ -1764,7 +1787,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest options: UIView.AnimationOptions.transitionFlipFromLeft, animations: nil, completion: { (_) -> Void in - self._removeCameraTransistionView() + self._removeCameraTransistionView() }) } } @@ -1777,9 +1800,9 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest UIView.animate(withDuration: 0.5, animations: { () -> Void in - - cameraTransitionView.alpha = 0.0 - + + cameraTransitionView.alpha = 0.0 + }, completion: { (_) -> Void in self.transitionAnimating = false @@ -1803,17 +1826,22 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest } switch cameraDevice { - case .front: - if hasFrontCamera { - if let validFrontDevice = _deviceInputFromDevice(frontCameraDevice), - !inputs.contains(validFrontDevice) { - validCaptureSession.addInput(validFrontDevice) - } + case .front: + if hasFrontCamera { + if let validFrontDevice = _deviceInputFromDevice(frontCameraDevice), + !inputs.contains(validFrontDevice) { + validCaptureSession.addInput(validFrontDevice) + } + } + case .back: + if let validBackDevice = _deviceInputFromDevice(backCameraDevice), + !inputs.contains(validBackDevice) { + validCaptureSession.addInput(validBackDevice) } - case .back: - if let validBackDevice = _deviceInputFromDevice(backCameraDevice), - !inputs.contains(validBackDevice) { - validCaptureSession.addInput(validBackDevice) + case .uWide: + if let validuWideDevice = _deviceInputFromDevice(uWideCameraDevice), + !inputs.contains(validuWideDevice) { + validCaptureSession.addInput(validuWideDevice) } } } @@ -1918,7 +1946,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest for input in inputs { if let deviceInput = input as? AVCaptureDeviceInput, - deviceInput.device == mic { + deviceInput.device == mic { captureSession?.removeInput(deviceInput) break } @@ -2044,7 +2072,7 @@ extension PHPhotoLibrary { createAssetRequest.location = location if let album = album { guard let albumChangeRequest = PHAssetCollectionChangeRequest(for: album), - let photoPlaceholder = createAssetRequest.placeholderForCreatedAsset else { return } + let photoPlaceholder = createAssetRequest.placeholderForCreatedAsset else { return } placeholder = photoPlaceholder let fastEnumeration = NSArray(array: [photoPlaceholder] as [PHObjectPlaceholder]) albumChangeRequest.addAssets(fastEnumeration) @@ -2070,7 +2098,7 @@ extension PHPhotoLibrary { createAssetRequest.location = location if let album = album { guard let albumChangeRequest = PHAssetCollectionChangeRequest(for: album), - let photoPlaceholder = createAssetRequest.placeholderForCreatedAsset else { return } + let photoPlaceholder = createAssetRequest.placeholderForCreatedAsset else { return } placeholder = photoPlaceholder let fastEnumeration = NSArray(array: [photoPlaceholder] as [PHObjectPlaceholder]) albumChangeRequest.addAssets(fastEnumeration) @@ -2097,7 +2125,7 @@ extension PHPhotoLibrary { let createAssetRequest = PHAssetChangeRequest.creationRequestForAsset(from: image) createAssetRequest.creationDate = Date() guard let albumChangeRequest = PHAssetCollectionChangeRequest(for: album), - let photoPlaceholder = createAssetRequest.placeholderForCreatedAsset else { return } + let photoPlaceholder = createAssetRequest.placeholderForCreatedAsset else { return } placeholder = photoPlaceholder let fastEnumeration = NSArray(array: [photoPlaceholder] as [PHObjectPlaceholder]) albumChangeRequest.addAssets(fastEnumeration) @@ -2117,6 +2145,7 @@ extension PHPhotoLibrary { } } +@available(iOS 13.0, *) extension CameraManager: AVCaptureMetadataOutputObjectsDelegate { /** Called when a QR code is detected. @@ -2124,7 +2153,7 @@ extension CameraManager: AVCaptureMetadataOutputObjectsDelegate { public func metadataOutput(_: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from _: AVCaptureConnection) { // Check if there is a registered handler. guard let handler = qrCodeDetectionHandler - else { return } + else { return } // Get the detection result. let stringValues = metadataObjects @@ -2132,7 +2161,7 @@ extension CameraManager: AVCaptureMetadataOutputObjectsDelegate { .compactMap { $0.stringValue } guard let stringValue = stringValues.first - else { return } + else { return } handler(.success(stringValue)) } From 38d2b6cb410032c5a0b7fc246d3a495f83060786 Mon Sep 17 00:00:00 2001 From: Thierry Date: Sun, 1 Jan 2023 16:39:20 -0500 Subject: [PATCH 03/10] [fix] previewLayer is now public --- Sources/CameraManager.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Sources/CameraManager.swift b/Sources/CameraManager.swift index 1fa71eb..5389840 100644 --- a/Sources/CameraManager.swift +++ b/Sources/CameraManager.swift @@ -368,7 +368,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate var stillImageOutput: AVCaptureStillImageOutput? fileprivate var movieOutput: AVCaptureMovieFileOutput? - fileprivate var previewLayer: AVCaptureVideoPreviewLayer? + open private(set) var previewLayer: AVCaptureVideoPreviewLayer? fileprivate var library: PHPhotoLibrary? fileprivate var cameraIsSetup = false From 8d25a00b74b842f8e98d80ded1f54cec868cef49 Mon Sep 17 00:00:00 2001 From: Thierry Date: Mon, 20 Mar 2023 09:33:21 -0400 Subject: [PATCH 04/10] [fix] Crash in MotionManager orientation updates --- Sources/CameraManager.swift | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Sources/CameraManager.swift b/Sources/CameraManager.swift index 5389840..da56b88 100644 --- a/Sources/CameraManager.swift +++ b/Sources/CameraManager.swift @@ -1505,8 +1505,10 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest coreMotionManager = CMMotionManager() coreMotionManager.deviceMotionUpdateInterval = 1 / 30.0 if coreMotionManager.isDeviceMotionAvailable { - coreMotionManager.startDeviceMotionUpdates(to: OperationQueue()) { [weak self] motion, _ in + coreMotionManager.startDeviceMotionUpdates(to: OperationQueue()) { [weak self] motion, error in guard let motion = motion, let self = self else { return } + guard error == nil else { return } + let x = motion.gravity.x let y = motion.gravity.y let previousOrientation = self.deviceOrientation From f3725b07f94ad28715d777a9faa83627ce7c2561 Mon Sep 17 00:00:00 2001 From: Thierry Date: Sat, 8 Apr 2023 11:23:26 -0400 Subject: [PATCH 05/10] [fix] Leaks --- Sources/CameraManager.swift | 122 +++++++++++++++++++----------------- 1 file changed, 66 insertions(+), 56 deletions(-) diff --git a/Sources/CameraManager.swift b/Sources/CameraManager.swift index da56b88..2ad89a1 100644 --- a/Sources/CameraManager.swift +++ b/Sources/CameraManager.swift @@ -475,9 +475,9 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest open func resumeCaptureSession() { if let validCaptureSession = captureSession { if !validCaptureSession.isRunning, cameraIsSetup { - sessionQueue.async { + sessionQueue.async { [weak self] in validCaptureSession.startRunning() - self._startFollowingDeviceOrientation() + self?._startFollowingDeviceOrientation() } } } else { @@ -539,7 +539,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest :param: imageCompletion Completion block containing the captured UIImage */ open func capturePictureWithCompletion(_ imageCompletion: @escaping (CaptureResult) -> Void) { - capturePictureDataWithCompletion { result in + capturePictureDataWithCompletion { [weak self] result in guard let imageData = result.imageData else { if case let .failure(error) = result { @@ -551,12 +551,14 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest return } - if self.animateShutter { - self._performShutterAnimation { + if let self = self { + if self.animateShutter { + self._performShutterAnimation { + self._capturePicture(imageData, imageCompletion) + } + } else { self._capturePicture(imageData, imageCompletion) } - } else { - self._capturePicture(imageData, imageCompletion) } } } @@ -701,36 +703,38 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest _updateIlluminationMode(flashMode) - sessionQueue.async { - let stillImageOutput = self._getStillImageOutput() - if let connection = stillImageOutput.connection(with: AVMediaType.video), - connection.isEnabled { - if self.cameraDevice == CameraDevice.front, connection.isVideoMirroringSupported, - self.shouldFlipFrontCameraImage { - connection.isVideoMirrored = true - } - if connection.isVideoOrientationSupported { - connection.videoOrientation = self._currentCaptureVideoOrientation() - } - - stillImageOutput.captureStillImageAsynchronously(from: connection, completionHandler: { [weak self] sample, error in - - if let error = error { - self?._show(NSLocalizedString("Error", comment: ""), message: error.localizedDescription) - imageCompletion(.failure(error)) - return + sessionQueue.async { [weak self] in + if let self = self { + let stillImageOutput = self._getStillImageOutput() + if let connection = stillImageOutput.connection(with: AVMediaType.video), + connection.isEnabled { + if self.cameraDevice == CameraDevice.front, connection.isVideoMirroringSupported, + self.shouldFlipFrontCameraImage { + connection.isVideoMirrored = true } - - guard let sample = sample else { imageCompletion(.failure(CaptureError.noSampleBuffer)); return } - if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sample) { - imageCompletion(CaptureResult(imageData)) - } else { - imageCompletion(.failure(CaptureError.noImageData)) + if connection.isVideoOrientationSupported { + connection.videoOrientation = self._currentCaptureVideoOrientation() } - }) - } else { - imageCompletion(.failure(CaptureError.noVideoConnection)) + stillImageOutput.captureStillImageAsynchronously(from: connection, completionHandler: { [weak self] sample, error in + + if let error = error { + self?._show(NSLocalizedString("Error", comment: ""), message: error.localizedDescription) + imageCompletion(.failure(error)) + return + } + + guard let sample = sample else { imageCompletion(.failure(CaptureError.noSampleBuffer)); return } + if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sample) { + imageCompletion(CaptureResult(imageData)) + } else { + imageCompletion(.failure(CaptureError.noImageData)) + } + + }) + } else { + imageCompletion(.failure(CaptureError.noVideoConnection)) + } } } } @@ -934,10 +938,12 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate lazy var zoomGesture = UIPinchGestureRecognizer() fileprivate func attachZoom(_ view: UIView) { - DispatchQueue.main.async { - self.zoomGesture.addTarget(self, action: #selector(CameraManager._zoomStart(_:))) - view.addGestureRecognizer(self.zoomGesture) - self.zoomGesture.delegate = self + DispatchQueue.main.async { [weak self] in + if let self = self { + self.zoomGesture.addTarget(self, action: #selector(CameraManager._zoomStart(_:))) + view.addGestureRecognizer(self.zoomGesture) + self.zoomGesture.delegate = self + } } } @@ -1002,20 +1008,24 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate lazy var focusGesture = UITapGestureRecognizer() fileprivate func attachFocus(_ view: UIView) { - DispatchQueue.main.async { - self.focusGesture.addTarget(self, action: #selector(CameraManager._focusStart(_:))) - view.addGestureRecognizer(self.focusGesture) - self.focusGesture.delegate = self + DispatchQueue.main.async { [weak self] in + if let self = self { + self.focusGesture.addTarget(self, action: #selector(CameraManager._focusStart(_:))) + view.addGestureRecognizer(self.focusGesture) + self.focusGesture.delegate = self + } } } fileprivate lazy var exposureGesture = UIPanGestureRecognizer() fileprivate func attachExposure(_ view: UIView) { - DispatchQueue.main.async { - self.exposureGesture.addTarget(self, action: #selector(CameraManager._exposureStart(_:))) - view.addGestureRecognizer(self.exposureGesture) - self.exposureGesture.delegate = self + DispatchQueue.main.async { [weak self] in + if let self = self { + self.exposureGesture.addTarget(self, action: #selector(CameraManager._exposureStart(_:))) + view.addGestureRecognizer(self.exposureGesture) + self.exposureGesture.delegate = self + } } } @@ -1345,8 +1355,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest validOutputLayerConnection.videoOrientation = _currentCaptureVideoOrientation() } if !shouldKeepViewAtOrientationChanges && cameraIsObservingDeviceOrientation { - DispatchQueue.main.async { () -> Void in - if let validEmbeddingView = self.embeddingView { + DispatchQueue.main.async { [weak self] () -> Void in + if let self = self, let validEmbeddingView = self.embeddingView { validPreviewLayer.frame = validEmbeddingView.bounds } } @@ -1479,8 +1489,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate func _setupCamera(_ completion: @escaping () -> Void) { captureSession = AVCaptureSession() - sessionQueue.async { - if let validCaptureSession = self.captureSession { + sessionQueue.async { [weak self] in + if let self = self, let validCaptureSession = self.captureSession { validCaptureSession.beginConfiguration() validCaptureSession.sessionPreset = AVCaptureSession.Preset.high self._updateCameraDevice(self.cameraDevice) @@ -1554,8 +1564,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest attachFocus(view) attachExposure(view) - DispatchQueue.main.async { () -> Void in - guard let previewLayer = self.previewLayer else { return } + DispatchQueue.main.async { [weak self] () -> Void in + guard let self = self, let previewLayer = self.previewLayer else { return } previewLayer.frame = view.layer.bounds view.clipsToBounds = true view.layer.addSublayer(previewLayer) @@ -1700,8 +1710,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest validPreviewLayer.opacity = 0.0 - DispatchQueue.main.async { - self._flipCameraTransitionView() + DispatchQueue.main.async { [weak self] in + self?._flipCameraTransitionView() } } } @@ -1957,8 +1967,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate func _show(_ title: String, message: String) { if showErrorsToUsers { - DispatchQueue.main.async { () -> Void in - self.showErrorBlock(title, message) + DispatchQueue.main.async { [weak self] () -> Void in + self?.showErrorBlock(title, message) } } } From 0e58f982bb9d2e4e396ff97902c7d93210e3f0b6 Mon Sep 17 00:00:00 2001 From: Thierry Date: Tue, 26 Sep 2023 17:59:48 -0400 Subject: [PATCH 06/10] [fix] iPhone 15 (AVCapturePhotoOutput) [fix] Manging other lens. Using ultraWide and wide lens. --- Sources/CameraManager.swift | 361 +++++++++++++++++++++++------------- 1 file changed, 235 insertions(+), 126 deletions(-) diff --git a/Sources/CameraManager.swift b/Sources/CameraManager.swift index 2ad89a1..f174208 100644 --- a/Sources/CameraManager.swift +++ b/Sources/CameraManager.swift @@ -21,7 +21,7 @@ public enum CameraState { } public enum CameraDevice { - case front, back, uWide + case front, telephoto, wideAngle, ultraWideAngle, dual, dualWideAngle, triple } public enum CameraFlashMode: Int { @@ -109,9 +109,10 @@ public enum CaptureError: Error { /// Class for handling iDevices custom camera usage @available(iOS 13.0, *) -open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGestureRecognizerDelegate { +open class CameraManager: NSObject, AVCapturePhotoCaptureDelegate, AVCaptureFileOutputRecordingDelegate, UIGestureRecognizerDelegate { // MARK: - Public properties + // Property for custom image album name. open var imageAlbumName: String? @@ -217,14 +218,44 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest /// Property to determine if current device has front camera. open var hasFrontCamera: Bool = { - let frontDevices = AVCaptureDevice.videoDevices.filter { $0.position == .front } - return !frontDevices.isEmpty + let frontCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) + return (frontCamera != nil) + }() + + /// Property to determine if current device has Telephoto camera. + open var hasTelephotoCamera: Bool = { + let telephotoCamera = AVCaptureDevice.default(.builtInTelephotoCamera, for: .video, position: .back) + return (telephotoCamera != nil) + }() + + /// Property to determine if current device has Wide Angle camera. + open var hasWideAngleCamera: Bool = { + let wideAngleCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) + return (wideAngleCamera != nil) }() /// Property to determine if current device has Ultra Wide camera. - open var hasuWideCamera: Bool = { - let frontDevices = AVCaptureDevice.default(.builtInUltraWideCamera, for: .video, position: .back) - return (frontDevices != nil) + open var hasUltraWideAngleCamera: Bool = { + let ultraWideAngleCamera = AVCaptureDevice.default(.builtInUltraWideCamera, for: .video, position: .back) + return (ultraWideAngleCamera != nil) + }() + + /// Property to determine if current device has Dual camera. + open var hasDualCamera: Bool = { + let dualCamera = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) + return (dualCamera != nil) + }() + + /// Property to determine if current device has Dual Wide Angle camera. + open var hasDualWideAngleCamera: Bool = { + let dualWideAngleCamera = AVCaptureDevice.default(.builtInDualWideCamera, for: .video, position: .back) + return (dualWideAngleCamera != nil) + }() + + /// Property to determine if current device has Triple camera. + open var hasTripleCamera: Bool = { + let tripleCamera = AVCaptureDevice.default(.builtInTripleCamera, for: .video, position: .back) + return (tripleCamera != nil) }() /// Property to determine if current device has flash. @@ -258,7 +289,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest } /// Property to change camera device between front and back. - open var cameraDevice: CameraDevice = .back { + open var cameraDevice: CameraDevice = .wideAngle { didSet { if cameraIsSetup, cameraDevice != oldValue { if animateCameraDeviceChange { @@ -282,8 +313,11 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest } } + /// Property to change camera quality prioritization mode + open var photoQualityPrioritization = AVCapturePhotoOutput.QualityPrioritization.speed + /// Property to change camera output quality. - open var cameraOutputQuality: AVCaptureSession.Preset = .high { + open var cameraOutputQuality: AVCaptureSession.Preset = .photo { didSet { if cameraIsSetup && cameraOutputQuality != oldValue { _updateCameraQualityMode(cameraOutputQuality) @@ -341,6 +375,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest return .off } + private var imageCompletionHandler: ((CaptureResult) -> Void)? + // MARK: - Private properties fileprivate var locationManager: CameraLocationManager? @@ -351,22 +387,38 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate var sessionQueue: DispatchQueue = DispatchQueue(label: "CameraSessionQueue", attributes: []) fileprivate lazy var frontCameraDevice: AVCaptureDevice? = { - AVCaptureDevice.videoDevices.filter { $0.position == .front }.first + AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) + }() + + fileprivate lazy var telephotoCameraDevice: AVCaptureDevice? = { + AVCaptureDevice.default(.builtInTelephotoCamera, for: .video, position: .back) }() - fileprivate lazy var backCameraDevice: AVCaptureDevice? = { - AVCaptureDevice.videoDevices.filter { $0.position == .back }.first + fileprivate lazy var wideAngleCameraDevice: AVCaptureDevice? = { + AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) }() - fileprivate lazy var uWideCameraDevice: AVCaptureDevice? = { + fileprivate lazy var ultraWideCameraDevice: AVCaptureDevice? = { AVCaptureDevice.default(.builtInUltraWideCamera, for: .video, position: .back) }() + + fileprivate lazy var dualCameraDevice: AVCaptureDevice? = { + AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) + }() + + fileprivate lazy var dualWideAngleCameraDevice: AVCaptureDevice? = { + AVCaptureDevice.default(.builtInDualWideCamera, for: .video, position: .back) + }() + + fileprivate lazy var tripleCameraDevice: AVCaptureDevice? = { + AVCaptureDevice.default(.builtInTripleCamera, for: .video, position: .back) + }() fileprivate lazy var mic: AVCaptureDevice? = { AVCaptureDevice.default(for: AVMediaType.audio) }() - fileprivate var stillImageOutput: AVCaptureStillImageOutput? + fileprivate var stillImageOutput: AVCapturePhotoOutput? fileprivate var movieOutput: AVCaptureMovieFileOutput? open private(set) var previewLayer: AVCaptureVideoPreviewLayer? fileprivate var library: PHPhotoLibrary? @@ -502,12 +554,12 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest stopCaptureSession() let oldAnimationValue = animateCameraDeviceChange animateCameraDeviceChange = false - cameraDevice = .back + cameraDevice = .wideAngle cameraIsSetup = false previewLayer = nil captureSession = nil frontCameraDevice = nil - backCameraDevice = nil + wideAngleCameraDevice = nil mic = nil stillImageOutput = nil movieOutput = nil @@ -667,24 +719,6 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest } } - /** - Captures still image from currently running capture session. - - :param: imageCompletion Completion block containing the captured imageData - */ - @available(*, deprecated) - open func capturePictureDataWithCompletion(_ imageCompletion: @escaping (Data?, NSError?) -> Void) { - func completion(_ result: CaptureResult) { - switch result { - case let .success(content): - imageCompletion(content.asData, nil) - case .failure: - imageCompletion(nil, NSError()) - } - } - capturePictureDataWithCompletion(completion) - } - /** Captures still image from currently running capture session. @@ -701,44 +735,65 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest return } + self.imageCompletionHandler = imageCompletion + _updateIlluminationMode(flashMode) sessionQueue.async { [weak self] in - if let self = self { - let stillImageOutput = self._getStillImageOutput() - if let connection = stillImageOutput.connection(with: AVMediaType.video), - connection.isEnabled { - if self.cameraDevice == CameraDevice.front, connection.isVideoMirroringSupported, - self.shouldFlipFrontCameraImage { - connection.isVideoMirrored = true - } - if connection.isVideoOrientationSupported { - connection.videoOrientation = self._currentCaptureVideoOrientation() - } - - stillImageOutput.captureStillImageAsynchronously(from: connection, completionHandler: { [weak self] sample, error in - - if let error = error { - self?._show(NSLocalizedString("Error", comment: ""), message: error.localizedDescription) - imageCompletion(.failure(error)) - return - } - - guard let sample = sample else { imageCompletion(.failure(CaptureError.noSampleBuffer)); return } - if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sample) { - imageCompletion(CaptureResult(imageData)) - } else { - imageCompletion(.failure(CaptureError.noImageData)) - } - - }) - } else { - imageCompletion(.failure(CaptureError.noVideoConnection)) + guard let self = self else { return } + + let photoOutput = self._getStillImageOutput() + + if let connection = photoOutput.connection(with: AVMediaType.video), connection.isEnabled { + + if self.cameraDevice == CameraDevice.front, connection.isVideoMirroringSupported, self.shouldFlipFrontCameraImage { + connection.isVideoMirrored = true + } + + if connection.isVideoOrientationSupported { + connection.videoOrientation = self._currentCaptureVideoOrientation() } + + photoOutput.capturePhoto(with: _getPhotoSettings(), delegate: self) + + } else { + imageCompletion(.failure(CaptureError.noVideoConnection)) } } } + fileprivate func _getPhotoSettings() -> AVCapturePhotoSettings { + let photoSettings = AVCapturePhotoSettings() + photoSettings.photoQualityPrioritization = self.photoQualityPrioritization + + switch flashMode { + case .off: + photoSettings.flashMode = .off + case .on: + photoSettings.flashMode = .on + case .auto: + photoSettings.flashMode = .auto + } + + return photoSettings + } + + public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { + + if let error = error { + self._show(NSLocalizedString("Error", comment: ""), message: error.localizedDescription) + imageCompletionHandler?(.failure(error)) + return + } + + guard let imageData = photo.fileDataRepresentation() else { + imageCompletionHandler?(.failure(CaptureError.noImageData)) + return + } + + imageCompletionHandler?(CaptureResult(imageData)) + } + fileprivate func _imageOrientation(forDeviceOrientation deviceOrientation: UIDeviceOrientation, isMirrored: Bool) -> UIImage.Orientation { switch deviceOrientation { case .landscapeLeft: @@ -882,11 +937,19 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest open func hasFlash(for cameraDevice: CameraDevice) -> Bool { let devices = AVCaptureDevice.videoDevices for device in devices { - if device.position == .back, cameraDevice == .back { + if device.position == .front, cameraDevice == .front { return device.hasFlash - } else if device.position == .front, cameraDevice == .front { + } else if device.position == .back, cameraDevice == .telephoto { return device.hasFlash - } else if device.position == .back, cameraDevice == .uWide { + } else if device.position == .back, cameraDevice == .wideAngle { + return device.hasFlash + } else if device.position == .back, cameraDevice == .ultraWideAngle { + return device.hasFlash + } else if device.position == .back, cameraDevice == .dual { + return device.hasFlash + } else if device.position == .back, cameraDevice == .dualWideAngle { + return device.hasFlash + } else if device.position == .back, cameraDevice == .triple { return device.hasFlash } } @@ -980,12 +1043,20 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest let device: AVCaptureDevice? switch cameraDevice { - case .back: - device = backCameraDevice case .front: device = frontCameraDevice - case .uWide: - device = uWideCameraDevice + case .telephoto: + device = telephotoCameraDevice + case .wideAngle: + device = wideAngleCameraDevice + case .ultraWideAngle: + device = ultraWideCameraDevice + case .dual: + device = dualCameraDevice + case .dualWideAngle: + device = dualWideAngleCameraDevice + case .triple: + device = tripleCameraDevice } do { @@ -1033,12 +1104,20 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest let device: AVCaptureDevice? switch cameraDevice { - case .back: - device = backCameraDevice case .front: device = frontCameraDevice - case .uWide: - device = uWideCameraDevice + case .telephoto: + device = telephotoCameraDevice + case .wideAngle: + device = wideAngleCameraDevice + case .ultraWideAngle: + device = ultraWideCameraDevice + case .dual: + device = dualCameraDevice + case .dualWideAngle: + device = dualWideAngleCameraDevice + case .triple: + device = tripleCameraDevice } _changeExposureMode(mode: .continuousAutoExposure) @@ -1200,12 +1279,20 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest let device: AVCaptureDevice? switch cameraDevice { - case .back: - device = backCameraDevice case .front: device = frontCameraDevice - case .uWide: - device = uWideCameraDevice + case .telephoto: + device = telephotoCameraDevice + case .wideAngle: + device = wideAngleCameraDevice + case .ultraWideAngle: + device = ultraWideCameraDevice + case .dual: + device = dualCameraDevice + case .dualWideAngle: + device = dualWideAngleCameraDevice + case .triple: + device = tripleCameraDevice } if device?.exposureMode == mode { return @@ -1229,12 +1316,20 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest let device: AVCaptureDevice? switch cameraDevice { - case .back: - device = backCameraDevice case .front: device = frontCameraDevice - case .uWide: - device = uWideCameraDevice + case .telephoto: + device = telephotoCameraDevice + case .wideAngle: + device = wideAngleCameraDevice + case .ultraWideAngle: + device = ultraWideCameraDevice + case .dual: + device = dualCameraDevice + case .dualWideAngle: + device = dualWideAngleCameraDevice + case .triple: + device = tripleCameraDevice } guard let videoDevice = device else { @@ -1314,12 +1409,12 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest } } - fileprivate func _getStillImageOutput() -> AVCaptureStillImageOutput { + fileprivate func _getStillImageOutput() -> AVCapturePhotoOutput { if let stillImageOutput = stillImageOutput, let connection = stillImageOutput.connection(with: AVMediaType.video), connection.isActive { return stillImageOutput } - let newStillImageOutput = AVCaptureStillImageOutput() + let newStillImageOutput = AVCapturePhotoOutput() stillImageOutput = newStillImageOutput if let captureSession = captureSession, captureSession.canAddOutput(newStillImageOutput) { @@ -1492,7 +1587,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest sessionQueue.async { [weak self] in if let self = self, let validCaptureSession = self.captureSession { validCaptureSession.beginConfiguration() - validCaptureSession.sessionPreset = AVCaptureSession.Preset.high + validCaptureSession.sessionPreset = AVCaptureSession.Preset.photo self._updateCameraDevice(self.cameraDevice) self._setupOutputs() self._setupOutputMode(self.cameraOutputMode, oldCameraOutputMode: nil) @@ -1576,12 +1671,20 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest var maxZoom = CGFloat(1.0) beginZoomScale = CGFloat(1.0) - if cameraDevice == .back, let backCameraDevice = backCameraDevice { - maxZoom = backCameraDevice.activeFormat.videoMaxZoomFactor - } else if cameraDevice == .front, let frontCameraDevice = frontCameraDevice { + if cameraDevice == .front, let frontCameraDevice = frontCameraDevice { maxZoom = frontCameraDevice.activeFormat.videoMaxZoomFactor - } else if cameraDevice == .uWide, let uWideCameraDevice = uWideCameraDevice { - maxZoom = uWideCameraDevice.activeFormat.videoMaxZoomFactor + } else if cameraDevice == .telephoto, let telephotoCameraDevice = telephotoCameraDevice { + maxZoom = telephotoCameraDevice.activeFormat.videoMaxZoomFactor + } else if cameraDevice == .wideAngle, let wideAngleCameraDevice = wideAngleCameraDevice { + maxZoom = wideAngleCameraDevice.activeFormat.videoMaxZoomFactor + } else if cameraDevice == .ultraWideAngle, let ultraWideAngleCameraDevice = ultraWideCameraDevice { + maxZoom = ultraWideAngleCameraDevice.activeFormat.videoMaxZoomFactor + } else if cameraDevice == .dual, let dualCameraDevice = dualCameraDevice { + maxZoom = dualCameraDevice.activeFormat.videoMaxZoomFactor + } else if cameraDevice == .dualWideAngle, let dualWideAngleCameraDevice = dualWideAngleCameraDevice { + maxZoom = dualWideAngleCameraDevice.activeFormat.videoMaxZoomFactor + } else if cameraDevice == .triple, let tripleCameraDevice = tripleCameraDevice { + maxZoom = tripleCameraDevice.activeFormat.videoMaxZoomFactor } maxZoomScale = maxZoom @@ -1655,7 +1758,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate func _setupOutputs() { if stillImageOutput == nil { - stillImageOutput = AVCaptureStillImageOutput() + stillImageOutput = AVCapturePhotoOutput() } if movieOutput == nil { movieOutput = _getMovieOutput() @@ -1839,21 +1942,46 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest switch cameraDevice { case .front: - if hasFrontCamera { - if let validFrontDevice = _deviceInputFromDevice(frontCameraDevice), - !inputs.contains(validFrontDevice) { - validCaptureSession.addInput(validFrontDevice) - } + guard hasFrontCamera else { return } + if let validFrontDevice = _deviceInputFromDevice(frontCameraDevice), + !inputs.contains(validFrontDevice) { + validCaptureSession.addInput(validFrontDevice) + } + case .telephoto: + guard hasTelephotoCamera else { return } + if let validTelephotoDevice = _deviceInputFromDevice(telephotoCameraDevice), + !inputs.contains(validTelephotoDevice) { + validCaptureSession.addInput(validTelephotoDevice) } - case .back: - if let validBackDevice = _deviceInputFromDevice(backCameraDevice), - !inputs.contains(validBackDevice) { - validCaptureSession.addInput(validBackDevice) + case .wideAngle: + guard hasWideAngleCamera else { return } + if let validWideAngleDevice = _deviceInputFromDevice(wideAngleCameraDevice), + !inputs.contains(validWideAngleDevice) { + validCaptureSession.addInput(validWideAngleDevice) } - case .uWide: - if let validuWideDevice = _deviceInputFromDevice(uWideCameraDevice), - !inputs.contains(validuWideDevice) { - validCaptureSession.addInput(validuWideDevice) + case .ultraWideAngle: + guard hasUltraWideAngleCamera else { return } + if let validUltraWideAngleDevice = _deviceInputFromDevice(ultraWideCameraDevice), + !inputs.contains(validUltraWideAngleDevice) { + validCaptureSession.addInput(validUltraWideAngleDevice) + } + case .dual: + guard hasDualCamera else { return } + if let validDualDevice = _deviceInputFromDevice(dualCameraDevice), + !inputs.contains(validDualDevice) { + validCaptureSession.addInput(validDualDevice) + } + case .dualWideAngle: + guard hasDualWideAngleCamera else { return } + if let validDualWideAngleDevice = _deviceInputFromDevice(dualWideAngleCameraDevice), + !inputs.contains(validDualWideAngleDevice) { + validCaptureSession.addInput(validDualWideAngleDevice) + } + case .triple: + guard hasTripleCamera else { return } + if let validTripleDevice = _deviceInputFromDevice(tripleCameraDevice), + !inputs.contains(validTripleDevice) { + validCaptureSession.addInput(validTripleDevice) } } } @@ -1862,8 +1990,6 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate func _updateIlluminationMode(_ mode: CameraFlashMode) { if cameraOutputMode != .stillImage { _updateTorch(mode) - } else { - _updateFlash(mode) } } @@ -1872,7 +1998,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest defer { captureSession?.commitConfiguration() } for captureDevice in AVCaptureDevice.videoDevices { guard let avTorchMode = AVCaptureDevice.TorchMode(rawValue: flashMode.rawValue) else { continue } - if captureDevice.isTorchModeSupported(avTorchMode), cameraDevice == .back { + if captureDevice.isTorchModeSupported(avTorchMode), cameraDevice == .wideAngle { do { try captureDevice.lockForConfiguration() @@ -1886,23 +2012,6 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest } } - fileprivate func _updateFlash(_ flashMode: CameraFlashMode) { - captureSession?.beginConfiguration() - defer { captureSession?.commitConfiguration() } - for captureDevice in AVCaptureDevice.videoDevices { - guard let avFlashMode = AVCaptureDevice.FlashMode(rawValue: flashMode.rawValue) else { continue } - if captureDevice.isFlashModeSupported(avFlashMode) { - do { - try captureDevice.lockForConfiguration() - captureDevice.flashMode = avFlashMode - captureDevice.unlockForConfiguration() - } catch { - return - } - } - } - } - fileprivate func _performShutterAnimation(_ completion: (() -> Void)?) { if let validPreviewLayer = previewLayer { DispatchQueue.main.async { @@ -1933,7 +2042,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate func _updateCameraQualityMode(_ newCameraOutputQuality: AVCaptureSession.Preset) { if let validCaptureSession = captureSession { var sessionPreset = newCameraOutputQuality - if newCameraOutputQuality == .high { + if newCameraOutputQuality == .photo { if cameraOutputMode == .stillImage { sessionPreset = AVCaptureSession.Preset.photo } else { From fd5d23de595d53de31c79969d744cb1bbed43118 Mon Sep 17 00:00:00 2001 From: Thierry Date: Tue, 7 Nov 2023 12:52:24 -0500 Subject: [PATCH 07/10] [fix] Added isFlashSupported --- Sources/CameraManager.swift | 29 ++++++++++++++++++++++------- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/Sources/CameraManager.swift b/Sources/CameraManager.swift index f174208..00ef63a 100644 --- a/Sources/CameraManager.swift +++ b/Sources/CameraManager.swift @@ -762,21 +762,36 @@ open class CameraManager: NSObject, AVCapturePhotoCaptureDelegate, AVCaptureFile } } + public func isFlashSupported() -> Bool { + let supportedFlashModes = AVCapturePhotoOutput().supportedFlashModes + return !supportedFlashModes.isEmpty + } + fileprivate func _getPhotoSettings() -> AVCapturePhotoSettings { let photoSettings = AVCapturePhotoSettings() photoSettings.photoQualityPrioritization = self.photoQualityPrioritization - switch flashMode { - case .off: - photoSettings.flashMode = .off - case .on: - photoSettings.flashMode = .on - case .auto: - photoSettings.flashMode = .auto + if let supportedFlashModes = stillImageOutput?.supportedFlashModes { + + switch flashMode { + case .off: + if supportedFlashModes.contains(.off) { + photoSettings.flashMode = .off + } + case .on: + if supportedFlashModes.contains(.on) { + photoSettings.flashMode = .on + } + case .auto: + if supportedFlashModes.contains(.auto) { + photoSettings.flashMode = .auto + } + } } return photoSettings } + public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { From c8918c7be13f290a7e453c756a92c2f28b2c0b93 Mon Sep 17 00:00:00 2001 From: Thierry Date: Wed, 8 Nov 2023 09:22:51 -0500 Subject: [PATCH 08/10] [fix] Crash in stopAndRemoveCaptureSession --- Sources/CameraManager.swift | 1 - 1 file changed, 1 deletion(-) diff --git a/Sources/CameraManager.swift b/Sources/CameraManager.swift index 00ef63a..afba6fc 100644 --- a/Sources/CameraManager.swift +++ b/Sources/CameraManager.swift @@ -554,7 +554,6 @@ open class CameraManager: NSObject, AVCapturePhotoCaptureDelegate, AVCaptureFile stopCaptureSession() let oldAnimationValue = animateCameraDeviceChange animateCameraDeviceChange = false - cameraDevice = .wideAngle cameraIsSetup = false previewLayer = nil captureSession = nil From 8c5d1882e3a9a628f0f0a43d67071e6e43126009 Mon Sep 17 00:00:00 2001 From: Thierry Date: Thu, 25 Jan 2024 11:25:05 -0500 Subject: [PATCH 09/10] [Fix] Possible crash in _startFollowingDeviceOrientation: NSOperationQueue (QOS: UNSPECIFIED) EXC_BAD_ACCESS KERN_INVALID_ADDRESS 0x0000000000000118 --- Sources/CameraManager.swift | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/Sources/CameraManager.swift b/Sources/CameraManager.swift index afba6fc..bd122d3 100644 --- a/Sources/CameraManager.swift +++ b/Sources/CameraManager.swift @@ -379,6 +379,13 @@ open class CameraManager: NSObject, AVCapturePhotoCaptureDelegate, AVCaptureFile // MARK: - Private properties + fileprivate var deviceOrientationOperationQueue: OperationQueue = { + let queue = OperationQueue() + queue.name = "DeviceOrientationQueue" + queue.maxConcurrentOperationCount = 1 + return queue + }() + fileprivate var locationManager: CameraLocationManager? fileprivate weak var embeddingView: UIView? @@ -1624,28 +1631,28 @@ open class CameraManager: NSObject, AVCapturePhotoCaptureDelegate, AVCaptureFile coreMotionManager = CMMotionManager() coreMotionManager.deviceMotionUpdateInterval = 1 / 30.0 if coreMotionManager.isDeviceMotionAvailable { - coreMotionManager.startDeviceMotionUpdates(to: OperationQueue()) { [weak self] motion, error in - guard let motion = motion, let self = self else { return } + coreMotionManager.startDeviceMotionUpdates(to: deviceOrientationOperationQueue) { [weak self] motion, error in + guard let motion = motion, let strongSelf = self else { return } guard error == nil else { return } let x = motion.gravity.x let y = motion.gravity.y - let previousOrientation = self.deviceOrientation + let previousOrientation = strongSelf.deviceOrientation if fabs(y) >= fabs(x) { if y >= 0 { - self.deviceOrientation = .portraitUpsideDown + strongSelf.deviceOrientation = .portraitUpsideDown } else { - self.deviceOrientation = .portrait + strongSelf.deviceOrientation = .portrait } } else { if x >= 0 { - self.deviceOrientation = .landscapeRight + strongSelf.deviceOrientation = .landscapeRight } else { - self.deviceOrientation = .landscapeLeft + strongSelf.deviceOrientation = .landscapeLeft } } - if previousOrientation != self.deviceOrientation { - self._orientationChanged() + if previousOrientation != strongSelf.deviceOrientation { + strongSelf._orientationChanged() } } From 96f6d5d6278e968ae9adff03fc93128ff9d4a997 Mon Sep 17 00:00:00 2001 From: Thierry Date: Fri, 19 Jul 2024 09:57:13 -0400 Subject: [PATCH 10/10] Fixed camera access message (typo) --- Sources/CameraManager.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Sources/CameraManager.swift b/Sources/CameraManager.swift index bd122d3..f1d6449 100644 --- a/Sources/CameraManager.swift +++ b/Sources/CameraManager.swift @@ -1721,7 +1721,7 @@ open class CameraManager: NSObject, AVCapturePhotoCaptureDelegate, AVCaptureFile } else if authorizationStatus == AVAuthorizationStatus.notDetermined { return .notDetermined } else { - _show(NSLocalizedString("Camera access denied", comment: ""), message: NSLocalizedString("You need to go to settings app and grant acces to the camera device to use it.", comment: "")) + _show(NSLocalizedString("Camera access denied", comment: ""), message: NSLocalizedString("You need to go to settings app and grant access to the camera device to use it.", comment: "")) return .accessDenied } } else {