Created
June 26, 2024 15:26
-
-
Save all12jus/9460879a16b0ac30ee3c4ff00f70de63 to your computer and use it in GitHub Desktop.
SwiftUI Camera Preview Example
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| // | |
| // ContentView.swift | |
| // SelfieADay | |
| // | |
| // Created by Justin Allen on 6/11/24. | |
| // | |
| // https://developer.apple.com/tutorials/sample-apps/capturingphotos-camerapreview | |
| import SwiftUI | |
| import SwiftData | |
| import Photos | |
| import AVFoundation | |
| import SwiftUI | |
| import os.log | |
| class PhotoLibrary { | |
| static func checkAuthorization() async -> Bool { | |
| switch PHPhotoLibrary.authorizationStatus(for: .readWrite) { | |
| case .authorized: | |
| logger.debug("Photo library access authorized.") | |
| return true | |
| case .notDetermined: | |
| logger.debug("Photo library access not determined.") | |
| return await PHPhotoLibrary.requestAuthorization(for: .readWrite) == .authorized | |
| case .denied: | |
| logger.debug("Photo library access denied.") | |
| return false | |
| case .limited: | |
| logger.debug("Photo library access limited.") | |
| return false | |
| case .restricted: | |
| logger.debug("Photo library access restricted.") | |
| return false | |
| @unknown default: | |
| return false | |
| } | |
| } | |
| } | |
| struct ImageData { | |
| var ciImage: CIImage | |
| var image: Image | |
| init(ciImage: CIImage) { | |
| self.ciImage = ciImage | |
| self.image = ciImage.image! | |
| } | |
| } | |
| final class DataModel: ObservableObject { | |
| let camera = Camera() | |
| @Published var currentImage: ImageData? | |
| @Published var savedImages: [StoredImage] = [] | |
| init() { | |
| Task { | |
| await handleCameraPreviews() | |
| } | |
| } | |
| func handleCameraPreviews() async { | |
| let imageStream = camera.previewStream | |
| .map { ImageData(ciImage: $0) } | |
| for await image in imageStream { | |
| Task { @MainActor in | |
| currentImage = image | |
| } | |
| } | |
| } | |
| func saveCameraPhoto() { | |
| if let currentImage { | |
| savedImages.append(StoredImage(image: currentImage.image)) | |
| } | |
| } | |
| } | |
| fileprivate extension CIImage { | |
| var image: Image? { | |
| let ciContext = CIContext() | |
| guard let cgImage = ciContext.createCGImage(self, from: self.extent) else { return nil } | |
| return Image(decorative: cgImage, scale: 1, orientation: .up) | |
| } | |
| } | |
| fileprivate extension Image.Orientation { | |
| init(_ cgImageOrientation: CGImagePropertyOrientation) { | |
| switch cgImageOrientation { | |
| case .up: self = .up | |
| case .upMirrored: self = .upMirrored | |
| case .down: self = .down | |
| case .downMirrored: self = .downMirrored | |
| case .left: self = .left | |
| case .leftMirrored: self = .leftMirrored | |
| case .right: self = .right | |
| case .rightMirrored: self = .rightMirrored | |
| } | |
| } | |
| } | |
| fileprivate let logger = Logger(subsystem: "com.apple.swiftplaygroundscontent.capturingphotos", category: "DataModel") | |
| struct StoredImage: Identifiable { | |
| var id = UUID() | |
| var image: Image | |
| } | |
| struct ContentView: View { | |
| @StateObject private var model = DataModel() | |
| var body: some View { | |
| VStack { | |
| ViewfinderView(image: $model.currentImage) | |
| Button("Capture") { | |
| model.saveCameraPhoto() | |
| } | |
| LazyHStack { | |
| ForEach (model.savedImages) { image in | |
| // image.image | |
| image.image | |
| .resizable() | |
| .scaledToFill() | |
| .frame(width: 100, height: 100) | |
| // Image(cgImage: image.image.image) | |
| // Image(image) | |
| } | |
| } | |
| } | |
| .task { | |
| await model.camera.start() | |
| // await model.loadPhotos() | |
| // await model.loadThumbnail() | |
| } | |
| } | |
| } | |
| #Preview { | |
| ContentView() | |
| .modelContainer(for: Item.self, inMemory: true) | |
| } | |
| class Camera: NSObject { | |
| private let captureSession = AVCaptureSession() | |
| private var isCaptureSessionConfigured = false | |
| private var deviceInput: AVCaptureDeviceInput? | |
| private var photoOutput: AVCapturePhotoOutput? | |
| private var videoOutput: AVCaptureVideoDataOutput? | |
| private var sessionQueue: DispatchQueue! | |
| private var allCaptureDevices: [AVCaptureDevice] { | |
| AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInTrueDepthCamera, .builtInDualCamera, .builtInDualWideCamera, .builtInWideAngleCamera, .builtInDualWideCamera], mediaType: .video, position: .unspecified).devices | |
| } | |
| private var frontCaptureDevices: [AVCaptureDevice] { | |
| allCaptureDevices | |
| .filter { $0.position == .front } | |
| } | |
| private var backCaptureDevices: [AVCaptureDevice] { | |
| allCaptureDevices | |
| .filter { $0.position == .back } | |
| } | |
| private var captureDevices: [AVCaptureDevice] { | |
| var devices = [AVCaptureDevice]() | |
| #if os(macOS) || (os(iOS) && targetEnvironment(macCatalyst)) | |
| devices += allCaptureDevices | |
| #else | |
| if let backDevice = backCaptureDevices.first { | |
| devices += [backDevice] | |
| } | |
| if let frontDevice = frontCaptureDevices.first { | |
| devices += [frontDevice] | |
| } | |
| #endif | |
| return devices | |
| } | |
| private var availableCaptureDevices: [AVCaptureDevice] { | |
| captureDevices | |
| .filter( { $0.isConnected } ) | |
| .filter( { !$0.isSuspended } ) | |
| } | |
| private var captureDevice: AVCaptureDevice? { | |
| didSet { | |
| guard let captureDevice = captureDevice else { return } | |
| logger.debug("Using capture device: \(captureDevice.localizedName)") | |
| sessionQueue.async { | |
| self.updateSessionForCaptureDevice(captureDevice) | |
| } | |
| } | |
| } | |
| var isRunning: Bool { | |
| captureSession.isRunning | |
| } | |
| var isUsingFrontCaptureDevice: Bool { | |
| guard let captureDevice = captureDevice else { return false } | |
| return frontCaptureDevices.contains(captureDevice) | |
| } | |
| var isUsingBackCaptureDevice: Bool { | |
| guard let captureDevice = captureDevice else { return false } | |
| return backCaptureDevices.contains(captureDevice) | |
| } | |
| // private var addToPhotoStream: ((AVCapturePhoto) -> Void)? | |
| private var addToPreviewStream: ((CIImage) -> Void)? | |
| var isPreviewPaused = false | |
| lazy var previewStream: AsyncStream<CIImage> = { | |
| AsyncStream { continuation in | |
| addToPreviewStream = { ciImage in | |
| if !self.isPreviewPaused { | |
| continuation.yield(ciImage) | |
| } | |
| } | |
| } | |
| }() | |
| override init() { | |
| super.init() | |
| initialize() | |
| } | |
| private func initialize() { | |
| sessionQueue = DispatchQueue(label: "session queue") | |
| captureDevice = availableCaptureDevices.first ?? AVCaptureDevice.default(for: .video) | |
| } | |
| private func configureCaptureSession(completionHandler: (_ success: Bool) -> Void) { | |
| var success = false | |
| self.captureSession.beginConfiguration() | |
| defer { | |
| self.captureSession.commitConfiguration() | |
| completionHandler(success) | |
| } | |
| guard | |
| let captureDevice = captureDevice, | |
| let deviceInput = try? AVCaptureDeviceInput(device: captureDevice) | |
| else { | |
| logger.error("Failed to obtain video input.") | |
| return | |
| } | |
| captureSession.sessionPreset = AVCaptureSession.Preset.photo | |
| let videoOutput = AVCaptureVideoDataOutput() | |
| videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "VideoDataOutputQueue")) | |
| guard captureSession.canAddInput(deviceInput) else { | |
| logger.error("Unable to add device input to capture session.") | |
| return | |
| } | |
| guard captureSession.canAddOutput(videoOutput) else { | |
| logger.error("Unable to add video output to capture session.") | |
| return | |
| } | |
| captureSession.addInput(deviceInput) | |
| captureSession.addOutput(videoOutput) | |
| self.deviceInput = deviceInput | |
| self.videoOutput = videoOutput | |
| updateVideoOutputConnection() | |
| isCaptureSessionConfigured = true | |
| success = true | |
| } | |
| private func checkAuthorization() async -> Bool { | |
| switch AVCaptureDevice.authorizationStatus(for: .video) { | |
| case .authorized: | |
| logger.debug("Camera access authorized.") | |
| return true | |
| case .notDetermined: | |
| logger.debug("Camera access not determined.") | |
| sessionQueue.suspend() | |
| let status = await AVCaptureDevice.requestAccess(for: .video) | |
| sessionQueue.resume() | |
| return status | |
| case .denied: | |
| logger.debug("Camera access denied.") | |
| return false | |
| case .restricted: | |
| logger.debug("Camera library access restricted.") | |
| return false | |
| @unknown default: | |
| return false | |
| } | |
| } | |
| private func deviceInputFor(device: AVCaptureDevice?) -> AVCaptureDeviceInput? { | |
| guard let validDevice = device else { return nil } | |
| do { | |
| return try AVCaptureDeviceInput(device: validDevice) | |
| } catch let error { | |
| logger.error("Error getting capture device input: \(error.localizedDescription)") | |
| return nil | |
| } | |
| } | |
| private func updateSessionForCaptureDevice(_ captureDevice: AVCaptureDevice) { | |
| guard isCaptureSessionConfigured else { return } | |
| captureSession.beginConfiguration() | |
| defer { captureSession.commitConfiguration() } | |
| for input in captureSession.inputs { | |
| if let deviceInput = input as? AVCaptureDeviceInput { | |
| captureSession.removeInput(deviceInput) | |
| } | |
| } | |
| if let deviceInput = deviceInputFor(device: captureDevice) { | |
| if !captureSession.inputs.contains(deviceInput), captureSession.canAddInput(deviceInput) { | |
| captureSession.addInput(deviceInput) | |
| } | |
| } | |
| updateVideoOutputConnection() | |
| } | |
| private func updateVideoOutputConnection() { | |
| if let videoOutput = videoOutput, let videoOutputConnection = videoOutput.connection(with: .video) { | |
| if videoOutputConnection.isVideoMirroringSupported { | |
| videoOutputConnection.isVideoMirrored = isUsingFrontCaptureDevice | |
| } | |
| } | |
| } | |
| func start() async { | |
| let authorized = await checkAuthorization() | |
| guard authorized else { | |
| logger.error("Camera access was not authorized.") | |
| return | |
| } | |
| if isCaptureSessionConfigured { | |
| if !captureSession.isRunning { | |
| sessionQueue.async { [self] in | |
| self.captureSession.startRunning() | |
| } | |
| } | |
| return | |
| } | |
| sessionQueue.async { [self] in | |
| self.configureCaptureSession { success in | |
| guard success else { return } | |
| self.captureSession.startRunning() | |
| } | |
| } | |
| } | |
| func stop() { | |
| guard isCaptureSessionConfigured else { return } | |
| if captureSession.isRunning { | |
| sessionQueue.async { | |
| self.captureSession.stopRunning() | |
| } | |
| } | |
| } | |
| func switchCaptureDevice() { | |
| if let captureDevice = captureDevice, let index = availableCaptureDevices.firstIndex(of: captureDevice) { | |
| let nextIndex = (index + 1) % availableCaptureDevices.count | |
| self.captureDevice = availableCaptureDevices[nextIndex] | |
| } else { | |
| self.captureDevice = AVCaptureDevice.default(for: .video) | |
| } | |
| } | |
| } | |
| extension Camera: AVCaptureVideoDataOutputSampleBufferDelegate { | |
| func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { | |
| guard let pixelBuffer = sampleBuffer.imageBuffer else { return } | |
| connection.videoRotationAngle = 90 | |
| // Create a CIImage from the pixel buffer | |
| let ciImage = CIImage(cvPixelBuffer: pixelBuffer) | |
| // Get the dimensions of the image | |
| let imageWidth = ciImage.extent.width | |
| let imageHeight = ciImage.extent.height | |
| // Calculate the size and origin for the square crop | |
| let cropSize = min(imageWidth, imageHeight) | |
| let cropOriginX = (imageWidth - cropSize) / 2 | |
| let cropOriginY = (imageHeight - cropSize) / 2 | |
| let cropRect = CGRect(x: cropOriginX, y: cropOriginY, width: cropSize, height: cropSize) | |
| // Crop the image to a square | |
| let croppedImage = ciImage.cropped(to: cropRect) | |
| // Add the cropped image to the preview stream | |
| addToPreviewStream?(croppedImage) | |
| // addToPreviewStream?(CIImage(cvPixelBuffer: pixelBuffer)) | |
| } | |
| } | |
| fileprivate extension UIScreen { | |
| var orientation: UIDeviceOrientation { | |
| let point = coordinateSpace.convert(CGPoint.zero, to: fixedCoordinateSpace) | |
| if point == CGPoint.zero { | |
| return .portrait | |
| } else if point.x != 0 && point.y != 0 { | |
| return .portraitUpsideDown | |
| } else if point.x == 0 && point.y != 0 { | |
| return .landscapeRight //.landscapeLeft | |
| } else if point.x != 0 && point.y == 0 { | |
| return .landscapeLeft //.landscapeRight | |
| } else { | |
| return .unknown | |
| } | |
| } | |
| } | |
| struct SquareImage: View { | |
| var image: UIImage | |
| @State private var height: CGFloat = 10 | |
| var body: some View { | |
| GeometryReader { geometry in | |
| let sideLength = geometry.size.width | |
| Image(uiImage: image) | |
| .resizable() | |
| .scaledToFit() | |
| .cornerRadius(8) | |
| .frame(width: sideLength, height: sideLength) | |
| .onAppear { | |
| height = sideLength | |
| } | |
| } | |
| .frame(height: height) | |
| } | |
| } | |
| struct ViewfinderView: View { | |
| @Binding var image: ImageData? | |
| // @State var size: CGFloat = 0 | |
| @State private var height: CGFloat = 10 | |
| var body: some View { | |
| NavigationStack { | |
| Group { | |
| if let image = image?.image { | |
| image | |
| .resizable() | |
| .aspectRatio(1, contentMode: .fill) | |
| .cornerRadius(16) | |
| .frame(maxWidth: .infinity) | |
| .padding() | |
| } | |
| } | |
| .padding() | |
| .navigationTitle("Camera") | |
| .navigationBarTitleDisplayMode(.inline) | |
| } | |
| } | |
| } | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment