PHPhotoLibrary.requestAuthorization { status in
guard status == .authorized else { return }
// Use PHPhotoLibrary.shared().performChanges(...) to add assets.
}
class PhotoCaptureProcessor: NSObject, AVCapturePhotoCaptureDelegate {
var completionHandler: () -> () = {}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) {
completionHandler()
}
// ... other delegate methods to handle capture results...
}
// Keep a set of in-progress capture delegates.
var capturesInProgress = Set<PhotoCaptureProcessor>()
func shootPhoto() {
// Make a new capture delegate for each capture and add it to the set.
let captureProcessor = PhotoCaptureProcessor()
capturesInProgress.insert(captureProcessor)
// Schedule for the capture delegate to be removed from the set after capture.
captureProcessor.completionHandler = { [weak self] in
self?.capturesInProgress.remove(captureProcessor); return
}
self.photoOutput.capturePhoto(with: self.settingsForNextPhoto(), delegate: captureProcessor)
}
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized: // The user has previously granted access to the camera.
self.setupCaptureSession()
case .notDetermined: // The user has not yet been asked for camera access.
AVCaptureDevice.requestAccess(for: .video) { granted in
if granted {
self.setupCaptureSession()
}
}
case .denied: // The user has previously denied access.
return
case .restricted: // The user can't grant access due to restrictions.
return
}
class PreviewView: UIView {
override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
return layer as! AVCaptureVideoPreviewLayer
}
}
// Remove the existing device input first, because AVCaptureSession doesn't support
// simultaneous use of the rear and front cameras.
self.session.removeInput(self.videoDeviceInput)
if self.session.canAddInput(videoDeviceInput) {
NotificationCenter.default.removeObserver(self, name: .AVCaptureDeviceSubjectAreaDidChange, object: currentVideoDevice)
NotificationCenter.default.addObserver(self, selector: #selector(self.subjectAreaDidChange), name: .AVCaptureDeviceSubjectAreaDidChange, object: videoDeviceInput.device)
self.session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
} else {
self.session.addInput(self.videoDeviceInput)
}
if reason == .audioDeviceInUseByAnotherClient || reason == .videoDeviceInUseByAnotherClient {
showResumeButton = true
} else if reason == .videoDeviceNotAvailableWithMultipleForegroundApps {
// Fade-in a label to inform the user that the camera is unavailable.
cameraUnavailableLabel.alpha = 0
cameraUnavailableLabel.isHidden = false
UIView.animate(withDuration: 0.25) {
self.cameraUnavailableLabel.alpha = 1
}
} else if reason == .videoDeviceNotAvailableDueToSystemPressure {
print("Session stopped running due to shutdown system pressure level.")
}
// Flash the screen to signal that AVCam took a photo.
DispatchQueue.main.async {
self.previewView.videoPreviewLayer.opacity = 0
UIView.animate(withDuration: 0.25) {
self.previewView.videoPreviewLayer.opacity = 1
}
}
// A portrait effects matte gets generated only if AVFoundation detects a face.
if var portraitEffectsMatte = photo.portraitEffectsMatte {
if let orientation = photo.metadata[ String(kCGImagePropertyOrientation) ] as? UInt32 {
portraitEffectsMatte = portraitEffectsMatte.applyingExifOrientation(CGImagePropertyOrientation(rawValue: orientation)!)
}
let portraitEffectsMattePixelBuffer = portraitEffectsMatte.mattingImage
let portraitEffectsMatteImage = CIImage( cvImageBuffer: portraitEffectsMattePixelBuffer, options: [ .auxiliaryPortraitEffectsMatte: true ] )
if var portraitEffectsMatte = photo.portraitEffectsMatte {
if let orientation = photo.metadata[String(kCGImagePropertyOrientation)] as? UInt32 {
portraitEffectsMatte = portraitEffectsMatte.applyingExifOrientation(CGImagePropertyOrientation(rawValue: orientation)!)
}
let portraitEffectsMattePixelBuffer = portraitEffectsMatte.mattingImage
// Find the semantic segmentation matte image for the specified type.
guard var segmentationMatte = photo.semanticSegmentationMatte(for: ssmType) else { return }
// Retrieve the photo orientation and apply it to the matte image.
if let orientation = photo.metadata[String(kCGImagePropertyOrientation)] as? UInt32,
let exifOrientation = CGImagePropertyOrientation(rawValue: orientation) {
// Apply the Exif orientation to the matte image.
segmentationMatte = segmentationMatte.applyingExifOrientation(exifOrientation)
}
var imageOption: CIImageOption!
// Switch on the AVSemanticSegmentationMatteType value.
switch ssmType {
case .hair:
imageOption = .auxiliarySemanticSegmentationHairMatte
case .skin:
imageOption = .auxiliarySemanticSegmentationSkinMatte
case .teeth:
imageOption = .auxiliarySemanticSegmentationTeethMatte
default:
print("This semantic segmentation type is not supported!")
return
}
guard let perceptualColorSpace = CGColorSpace(name: CGColorSpace.sRGB) else { return }
// Create a new CIImage from the matte's underlying CVPixelBuffer.
let ciImage = CIImage( cvImageBuffer: segmentationMatte.mattingImage,
options: [imageOption: true,
.colorSpace: perceptualColorSpace])
// Get the HEIF representation of this image.
guard let imageData = context.heifRepresentation(of: ciImage,
format: .RGBA8,
colorSpace: perceptualColorSpace,
options: [.depthImage: ciImage]) else { return }
// Add the image data to the SSM data array for writing to the photo library.
semanticSegmentationMatteDataArray.append(imageData)
if let dualCameraDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) {
defaultVideoDevice = dualCameraDevice
} else if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) {
// If a rear dual camera is not available, default to the rear wide angle camera.
defaultVideoDevice = backCameraDevice
} else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) {
// If the rear wide angle camera isn't available, default to the front wide angle camera.
defaultVideoDevice = frontCameraDevice
}
PHPhotoLibrary.shared().performChanges({
let options = PHAssetResourceCreationOptions()
options.shouldMoveFile = true
let creationRequest = PHAssetCreationRequest.forAsset()
creationRequest.addResource(with: .video, fileURL: outputFileURL, options: options)
}, completionHandler: { success, error in
if !success {
print("AVCam couldn't save the movie to your photo library: \(String(describing: error))")
}
cleanup()
}
)