Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,13 +88,27 @@ cameraManager.stopVideoRecording({ (videoURL, recordError) -> Void in
})
```

To zoom in manually:
To zoom manually:

```swift
// Zoom in
let zoomScale = CGFloat(2.0)
cameraManager.zoom(zoomScale)

// Zoom out (uses ultra-wide camera on supported devices)
let zoomScale = CGFloat(1.0)
cameraManager.zoom(zoomScale)
```

The zoom range is automatically determined by the device's camera capabilities using `minAvailableVideoZoomFactor` and `maxAvailableVideoZoomFactor`.

**Ultra-Wide Camera Support:**
The library automatically detects and uses multi-camera virtual devices (iPhone 11+) that include ultra-wide lenses:
- iPhone 11 Pro/12 Pro/13 Pro/14 Pro: `.builtInTripleCamera` (ultra-wide + wide + telephoto)
- iPhone 11/13/14: `.builtInDualWideCamera` (ultra-wide + wide)

On these devices, setting zoom to 1.0 automatically uses the ultra-wide lens. iOS handles the camera switching seamlessly as you zoom in and out.

### Properties

You can set input device to front or back camera. `(Default: .Back)`
Expand Down
185 changes: 110 additions & 75 deletions Sources/CameraManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
_updateCameraDevice(cameraDevice)
_updateIlluminationMode(flashMode)
_setupMaxZoomScale()
_zoom(0)
_zoom(1)
_orientationChanged()
}
}
Expand Down Expand Up @@ -292,7 +292,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
_setupOutputMode(cameraOutputMode, oldCameraOutputMode: oldValue)
}
_setupMaxZoomScale()
_zoom(0)
_zoom(1)
}
}
}
Expand Down Expand Up @@ -348,14 +348,34 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
}()

fileprivate lazy var backCameraDevice: AVCaptureDevice? = {
AVCaptureDevice.videoDevices.filter { $0.position == .back }.first
// Try to get multi-camera virtual devices first (supports ultra-wide)
var deviceTypes: [AVCaptureDevice.DeviceType] = [.builtInWideAngleCamera]

if #available(iOS 13.0, *) {
// Prefer virtual devices that support multiple cameras including ultra-wide
deviceTypes = [
.builtInTripleCamera, // iPhone 11 Pro, 12 Pro, 13 Pro, 14 Pro, etc.
.builtInDualWideCamera, // iPhone 11, 13, 14 (wide + ultra-wide)
.builtInDualCamera, // iPhone 7 Plus, 8 Plus, X, XS (wide + telephoto)
.builtInWideAngleCamera // Fallback for single camera devices
]
}

let discoverySession = AVCaptureDevice.DiscoverySession(
deviceTypes: deviceTypes,
mediaType: .video,
position: .back
)

return discoverySession.devices.first
}()

fileprivate lazy var mic: AVCaptureDevice? = {
AVCaptureDevice.default(for: AVMediaType.audio)
}()

fileprivate var stillImageOutput: AVCaptureStillImageOutput?
fileprivate var photoOutput: AVCapturePhotoOutput?
fileprivate var photoCaptureCompletion: ((CaptureResult) -> Void)?
fileprivate var movieOutput: AVCaptureMovieFileOutput?
fileprivate var previewLayer: AVCaptureVideoPreviewLayer?
fileprivate var library: PHPhotoLibrary?
Expand All @@ -366,6 +386,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
fileprivate var zoomScale = CGFloat(1.0)
fileprivate var beginZoomScale = CGFloat(1.0)
fileprivate var maxZoomScale = CGFloat(1.0)
fileprivate var minZoomScale = CGFloat(1.0)

fileprivate func _tempFilePath() -> URL {
let tempURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("tempMovie\(Date().timeIntervalSince1970)").appendingPathExtension("mp4")
Expand Down Expand Up @@ -498,7 +519,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
frontCameraDevice = nil
backCameraDevice = nil
mic = nil
stillImageOutput = nil
photoOutput = nil
photoCaptureCompletion = nil
movieOutput = nil
animateCameraDeviceChange = oldAnimationValue
}
Expand Down Expand Up @@ -688,39 +710,27 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
return
}

_updateIlluminationMode(flashMode)

sessionQueue.async {
let stillImageOutput = self._getStillImageOutput()
if let connection = stillImageOutput.connection(with: AVMediaType.video),
connection.isEnabled {
if self.cameraDevice == CameraDevice.front, connection.isVideoMirroringSupported,
self.shouldFlipFrontCameraImage {
connection.isVideoMirrored = true
}
if connection.isVideoOrientationSupported {
connection.videoOrientation = self._currentCaptureVideoOrientation()
}

stillImageOutput.captureStillImageAsynchronously(from: connection, completionHandler: { [weak self] sample, error in

if let error = error {
self?._show(NSLocalizedString("Error", comment: ""), message: error.localizedDescription)
imageCompletion(.failure(error))
return
}

guard let sample = sample else { imageCompletion(.failure(CaptureError.noSampleBuffer)); return }
if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sample) {
imageCompletion(CaptureResult(imageData))
} else {
imageCompletion(.failure(CaptureError.noImageData))
}

})
} else {
let photoOutput = self._getPhotoOutput()
guard let connection = photoOutput.connection(with: AVMediaType.video), connection.isEnabled else {
imageCompletion(.failure(CaptureError.noVideoConnection))
return
}
if self.cameraDevice == CameraDevice.front, connection.isVideoMirroringSupported,
self.shouldFlipFrontCameraImage {
connection.isVideoMirrored = true
}
if connection.isVideoOrientationSupported {
connection.videoOrientation = self._currentCaptureVideoOrientation()
}

let settings = AVCapturePhotoSettings()
if self.cameraDevice == .back, photoOutput.supportedFlashModes.contains(AVCaptureDevice.FlashMode(rawValue: self.flashMode.rawValue) ?? .off) {
settings.flashMode = AVCaptureDevice.FlashMode(rawValue: self.flashMode.rawValue) ?? .off
}

self.photoCaptureCompletion = imageCompletion
photoOutput.capturePhoto(with: settings, delegate: self)
}
}

Expand All @@ -740,7 +750,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
/**
Starts recording a video with or without voice as in the session preset.
*/
open func startRecordingVideo() {
open func startRecordingVideo(toURL url: URL? = nil) {
guard cameraOutputMode != .stillImage else {
_show(NSLocalizedString("Capture session output still image", comment: ""), message: NSLocalizedString("I can only take pictures", comment: ""))
return
Expand Down Expand Up @@ -772,7 +782,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest

_updateIlluminationMode(flashMode)

videoOutput.startRecording(to: _tempFilePath(), recordingDelegate: self)
videoOutput.startRecording(to: url ?? _tempFilePath(), recordingDelegate: self)
}

/**
Expand Down Expand Up @@ -971,7 +981,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
let captureDevice = device
try captureDevice?.lockForConfiguration()

zoomScale = max(1.0, min(beginZoomScale * scale, maxZoomScale))
zoomScale = max(minZoomScale, min(beginZoomScale * scale, maxZoomScale))

captureDevice?.videoZoomFactor = zoomScale

Expand Down Expand Up @@ -1283,28 +1293,28 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
}
}

fileprivate func _getStillImageOutput() -> AVCaptureStillImageOutput {
if let stillImageOutput = stillImageOutput, let connection = stillImageOutput.connection(with: AVMediaType.video),
fileprivate func _getPhotoOutput() -> AVCapturePhotoOutput {
if let photoOutput = photoOutput, let connection = photoOutput.connection(with: AVMediaType.video),
connection.isActive {
return stillImageOutput
return photoOutput
}
let newStillImageOutput = AVCaptureStillImageOutput()
stillImageOutput = newStillImageOutput
let newPhotoOutput = AVCapturePhotoOutput()
photoOutput = newPhotoOutput
if let captureSession = captureSession,
captureSession.canAddOutput(newStillImageOutput) {
captureSession.canAddOutput(newPhotoOutput) {
captureSession.beginConfiguration()
captureSession.addOutput(newStillImageOutput)
captureSession.addOutput(newPhotoOutput)
captureSession.commitConfiguration()
}
return newStillImageOutput
return newPhotoOutput
}

@objc fileprivate func _orientationChanged() {
var currentConnection: AVCaptureConnection?

switch cameraOutputMode {
case .stillImage:
currentConnection = stillImageOutput?.connection(with: AVMediaType.video)
currentConnection = photoOutput?.connection(with: AVMediaType.video)
case .videoOnly, .videoWithMic:
currentConnection = _getMovieOutput().connection(with: AVMediaType.video)
if let location = locationManager?.latestLocation {
Expand Down Expand Up @@ -1541,15 +1551,41 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest

fileprivate func _setupMaxZoomScale() {
var maxZoom = CGFloat(1.0)
var minZoom = CGFloat(1.0)
var defaultZoom = CGFloat(1.0)
beginZoomScale = CGFloat(1.0)

if cameraDevice == .back, let backCameraDevice = backCameraDevice {
maxZoom = backCameraDevice.activeFormat.videoMaxZoomFactor
minZoom = backCameraDevice.minAvailableVideoZoomFactor

// For multi-camera devices, start at the first switchover point (main wide-angle camera)
// This represents "1x" zoom in the Camera app
if #available(iOS 13.0, *),
let switchOverFactors = backCameraDevice.virtualDeviceSwitchOverVideoZoomFactors as? [CGFloat],
let firstSwitchOver = switchOverFactors.first {
defaultZoom = firstSwitchOver
} else {
defaultZoom = minZoom
}
} else if cameraDevice == .front, let frontCameraDevice = frontCameraDevice {
maxZoom = frontCameraDevice.activeFormat.videoMaxZoomFactor
minZoom = frontCameraDevice.minAvailableVideoZoomFactor

// For multi-camera front devices, start at the first switchover point
if #available(iOS 13.0, *),
let switchOverFactors = frontCameraDevice.virtualDeviceSwitchOverVideoZoomFactors as? [CGFloat],
let firstSwitchOver = switchOverFactors.first {
defaultZoom = firstSwitchOver
} else {
defaultZoom = minZoom
}
}

maxZoomScale = maxZoom
minZoomScale = minZoom
zoomScale = defaultZoom
beginZoomScale = defaultZoom
}

fileprivate func _checkIfCameraIsAvailable() -> CameraState {
Expand Down Expand Up @@ -1578,8 +1614,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
// remove current setting
switch cameraOutputToRemove {
case .stillImage:
if let validStillImageOutput = stillImageOutput {
captureSession?.removeOutput(validStillImageOutput)
if let validPhotoOutput = photoOutput {
captureSession?.removeOutput(validPhotoOutput)
}
case .videoOnly, .videoWithMic:
if let validMovieOutput = movieOutput {
Expand All @@ -1596,10 +1632,10 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
// configure new devices
switch newCameraOutputMode {
case .stillImage:
let validStillImageOutput = _getStillImageOutput()
let validPhotoOutput = _getPhotoOutput()
if let captureSession = captureSession,
captureSession.canAddOutput(validStillImageOutput) {
captureSession.addOutput(validStillImageOutput)
captureSession.canAddOutput(validPhotoOutput) {
captureSession.addOutput(validPhotoOutput)
}
case .videoOnly, .videoWithMic:
let videoMovieOutput = _getMovieOutput()
Expand All @@ -1619,8 +1655,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
}

fileprivate func _setupOutputs() {
if stillImageOutput == nil {
stillImageOutput = AVCaptureStillImageOutput()
if photoOutput == nil {
photoOutput = AVCapturePhotoOutput()
}
if movieOutput == nil {
movieOutput = _getMovieOutput()
Expand Down Expand Up @@ -1822,11 +1858,10 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
fileprivate func _updateIlluminationMode(_ mode: CameraFlashMode) {
if cameraOutputMode != .stillImage {
_updateTorch(mode)
} else {
_updateFlash(mode)
}
// For stillImage, flash is applied via AVCapturePhotoSettings at capture time
}

fileprivate func _updateTorch(_: CameraFlashMode) {
captureSession?.beginConfiguration()
defer { captureSession?.commitConfiguration() }
Expand All @@ -1846,22 +1881,6 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest
}
}

fileprivate func _updateFlash(_ flashMode: CameraFlashMode) {
captureSession?.beginConfiguration()
defer { captureSession?.commitConfiguration() }
for captureDevice in AVCaptureDevice.videoDevices {
guard let avFlashMode = AVCaptureDevice.FlashMode(rawValue: flashMode.rawValue) else { continue }
if captureDevice.isFlashModeSupported(avFlashMode) {
do {
try captureDevice.lockForConfiguration()
captureDevice.flashMode = avFlashMode
captureDevice.unlockForConfiguration()
} catch {
return
}
}
}
}

fileprivate func _performShutterAnimation(_ completion: (() -> Void)?) {
if let validPreviewLayer = previewLayer {
Expand Down Expand Up @@ -2117,6 +2136,22 @@ extension PHPhotoLibrary {
}
}

extension CameraManager: AVCapturePhotoCaptureDelegate {
public func photoOutput(_: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
let completion = photoCaptureCompletion
photoCaptureCompletion = nil
if let error = error {
completion?(.failure(error))
return
}
guard let imageData = photo.fileDataRepresentation() else {
completion?(.failure(CaptureError.noImageData))
return
}
completion?(CaptureResult(imageData))
}
}

extension CameraManager: AVCaptureMetadataOutputObjectsDelegate {
/**
Called when a QR code is detected.
Expand Down