From 7a2e48d38d0f1c806e3746e92bc7c12fba21dae6 Mon Sep 17 00:00:00 2001 From: Liam Ronan Date: Mon, 3 Nov 2025 12:17:28 -0800 Subject: [PATCH 1/7] initial stream-based photo capture --- Sources/DualCameraKit/CameraRenderer.swift | 27 +- .../DualCameraKit/DualCameraController.swift | 88 +++- .../DualCameraKit/DualCameraEnvironment.swift | 2 +- .../DualCameraStreamPhotoCapturer.swift | 413 ++++++++++++++++++ .../Screen/DualCameraViewModel.swift | 19 +- 5 files changed, 532 insertions(+), 17 deletions(-) create mode 100644 Sources/DualCameraKit/DualCameraStreamPhotoCapturer.swift diff --git a/Sources/DualCameraKit/CameraRenderer.swift b/Sources/DualCameraKit/CameraRenderer.swift index 7cad47a..5a0ebeb 100644 --- a/Sources/DualCameraKit/CameraRenderer.swift +++ b/Sources/DualCameraKit/CameraRenderer.swift @@ -7,9 +7,12 @@ import UIKit public protocol CameraRenderer: AnyObject { /// Update renderer with new camera frame. func update(with buffer: CVPixelBuffer) - + /// Capture current frame as UIImage. func captureCurrentFrame() async throws -> UIImage + + /// Capture current frame as raw pixel buffer (for high-quality composition). + func captureCurrentBuffer() async throws -> CVPixelBuffer } enum MetalRendererError: Error { @@ -32,6 +35,10 @@ public final class MetalCameraRenderer: MTKView, CameraRenderer, MTKViewDelegate private var textureCache: CVMetalTextureCache? private var renderPipelineState: MTLRenderPipelineState? private var currentTexture: MTLTexture? + + // MARK: - Buffer Storage + /// Stores the most recent pixel buffer for high-quality capture + private var currentBuffer: CVPixelBuffer? // MARK: - Initialization public required init(coder: NSCoder) { @@ -160,6 +167,9 @@ public final class MetalCameraRenderer: MTKView, CameraRenderer, MTKViewDelegate extension MetalCameraRenderer { public func update(with buffer: CVPixelBuffer) { + // Store the buffer for high-quality capture + self.currentBuffer = buffer + let bufferWrapper = PixelBufferWrapper(buffer: buffer) createAndUpdateTexture(from: bufferWrapper) } @@ -247,11 +257,20 @@ extension MetalCameraRenderer { free(rawData) // Create and return the UIImage - // TODO: should this scale be dynamic? + // TODO: should this scale be dynamic? return UIImage(cgImage: cgImage, scale: 1.0, orientation: .up) } - - + + /// Captures the current frame as a raw pixel buffer at native camera resolution. + /// This is preferred over captureCurrentFrame() for high-quality photo/video composition. + public func captureCurrentBuffer() async throws -> CVPixelBuffer { + guard let buffer = self.currentBuffer else { + throw DualCameraError.captureFailure(.noFrameAvailable) + } + return buffer + } + + // MARK: - Private Helpers /// Creates a texture from the given buffer and updates the view. diff --git a/Sources/DualCameraKit/DualCameraController.swift b/Sources/DualCameraKit/DualCameraController.swift index 6be70ab..2c0a11f 100644 --- a/Sources/DualCameraKit/DualCameraController.swift +++ b/Sources/DualCameraKit/DualCameraController.swift @@ -13,6 +13,10 @@ public protocol DualCameraControlling { var photoCapturer: any DualCameraPhotoCapturing { get } func captureRawPhotos() async throws -> (front: UIImage, back: UIImage) func captureCurrentScreen(mode: DualCameraPhotoCaptureMode) async throws -> UIImage + + // NEW: Stream-based capture with layout composition + func captureComposedPhoto(layout: DualCameraLayout, mode: DualCameraPhotoCaptureMode) async throws -> UIImage + // Ideally we could remove the need for `photoCapturer` and `videoRecorder` to be public. // We only are accessing them from inside this file - one videoRecorder type requires access to the `photoCapturer` which we do in the extension. // Probably more decoupling would help but not focused on that atm. @@ -64,22 +68,32 @@ public final class DualCameraController: DualCameraControlling { // a) this controller may just be used to capture photos AND // b) this allows dynamic VideoRecorder creation at start of video capture (see startVideoRecording(recorderType:) public var videoRecorder: (any DualCameraVideoRecording)? - + var renderers: [DualCameraSource: CameraRenderer] = [:] - + private let streamSource = DualCameraCameraStreamSource() - + // Internal storage for renderers and their stream tasks. private var streamTasks: [DualCameraSource: Task] = [:] - + + // MARK: - Stream-based Capture + private let streamPhotoCapturer: DualCameraStreamPhotoCapturer + public let useStreamCapture: Bool + // MARK: - Video Recording Properties - + private var assetWriter: AVAssetWriter? private var assetWriterVideoInput: AVAssetWriterInput? private var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor? - - public init(photoCapturer: any DualCameraPhotoCapturing = DualCameraPhotoCapturer()) { + + public init( + photoCapturer: any DualCameraPhotoCapturing = DualCameraPhotoCapturer(), + useStreamCapture: Bool = false, // Default to legacy for backward compatibility + photoStyle: DualCameraPhotoStyle = .dualCameraScreen // Match SwiftUI appearance by default + ) { self.photoCapturer = photoCapturer + self.useStreamCapture = useStreamCapture + self.streamPhotoCapturer = DualCameraStreamPhotoCapturer(style: photoStyle) } nonisolated public var frontCameraStream: AsyncStream { @@ -109,6 +123,42 @@ public final class DualCameraController: DualCameraControlling { try streamSource.setTorchMode(mode, for: camera) } + /// Captures a composed photo using stream-based composition at native camera resolution + public func captureComposedPhoto(layout: DualCameraLayout, mode: DualCameraPhotoCaptureMode) async throws -> UIImage { + let frontRenderer = getRenderer(for: .front) + let backRenderer = getRenderer(for: .back) + + // Calculate output size based on mode + let outputSize = calculateOutputSize(for: mode) + + return try await streamPhotoCapturer.captureComposedPhoto( + frontRenderer: frontRenderer, + backRenderer: backRenderer, + layout: layout, + outputSize: outputSize + ) + } + + /// Helper to calculate output size from capture mode + private func calculateOutputSize(for mode: DualCameraPhotoCaptureMode) -> CGSize { + switch mode { + case .fullScreen: + // Use screen dimensions at native scale + let screen = UIScreen.main + return CGSize( + width: screen.bounds.width * screen.scale, + height: screen.bounds.height * screen.scale + ) + case .containerFrame(let frame): + // Use container dimensions at native scale + let screen = UIScreen.main + return CGSize( + width: frame.width * screen.scale, + height: frame.height * screen.scale + ) + } + } + /// Creates a renderer (using MetalCameraRenderer by default). public func createRenderer() -> CameraRenderer { return MetalCameraRenderer() @@ -161,10 +211,28 @@ public final class DualCameraController: DualCameraControlling { /// and we can remove the need for this "fake" behavior, i.e., make things more consistent. public final class DualCameraMockController: DualCameraControlling { public func setTorchMode(_ mode: AVCaptureDevice.TorchMode, for camera: DualCameraSource) throws { - + // Mock implementation - no-op } - - + + public func captureComposedPhoto(layout: DualCameraLayout, mode: DualCameraPhotoCaptureMode) async throws -> UIImage { + // Mock implementation - return a placeholder image + let size = CGSize(width: 1080, height: 1920) + let renderer = UIGraphicsImageRenderer(size: size) + return renderer.image { context in + // Draw purple for back camera, yellow for front camera in PiP + UIColor.purple.setFill() + context.fill(CGRect(origin: .zero, size: size)) + + // Add mini camera overlay for PiP layouts + if case .piP = layout { + UIColor.yellow.setFill() + let miniSize = CGSize(width: size.width * 0.25, height: size.height * 0.25) + context.fill(CGRect(origin: CGPoint(x: 20, y: 20), size: miniSize)) + } + } + } + + public init() { // for now, unmocked - we'll probably revisit this for testing. // this works just fine for simulator purposes though. diff --git a/Sources/DualCameraKit/DualCameraEnvironment.swift b/Sources/DualCameraKit/DualCameraEnvironment.swift index a37635d..6a76772 100644 --- a/Sources/DualCameraKit/DualCameraEnvironment.swift +++ b/Sources/DualCameraKit/DualCameraEnvironment.swift @@ -16,7 +16,7 @@ public struct DualCameraEnvironment: Sendable { #if targetEnvironment(simulator) return DualCameraMockController() #else - return DualCameraController() + return DualCameraController(useStreamCapture: true) #endif } } diff --git a/Sources/DualCameraKit/DualCameraStreamPhotoCapturer.swift b/Sources/DualCameraKit/DualCameraStreamPhotoCapturer.swift new file mode 100644 index 0000000..fb8699d --- /dev/null +++ b/Sources/DualCameraKit/DualCameraStreamPhotoCapturer.swift @@ -0,0 +1,413 @@ +import AVFoundation +import CoreImage +import UIKit + +/// Configuration for visual styling of composed photos (rounded corners, shadows, etc.) +public struct DualCameraPhotoStyle { + /// Corner radius for mini camera in PiP layout (in points) + public let miniCameraCornerRadius: CGFloat + /// Shadow properties for mini camera + public let miniCameraShadow: ShadowStyle? + /// Border properties for mini camera + public let miniCameraBorder: BorderStyle? + + public struct ShadowStyle { + public let color: UIColor + public let radius: CGFloat + public let opacity: Float + public let offset: CGSize + + public init(color: UIColor = .black, radius: CGFloat = 10, opacity: Float = 0.5, offset: CGSize = CGSize(width: 0, height: 4)) { + self.color = color + self.radius = radius + self.opacity = opacity + self.offset = offset + } + } + + public struct BorderStyle { + public let color: UIColor + public let width: CGFloat + + public init(color: UIColor = .white, width: CGFloat = 2) { + self.color = color + self.width = width + } + } + + /// Default style matching DualCameraScreen's SwiftUI appearance + public static let dualCameraScreen = DualCameraPhotoStyle( + miniCameraCornerRadius: 12, + miniCameraShadow: ShadowStyle(color: .black, radius: 10, opacity: 0.3, offset: CGSize(width: 0, height: 4)), + miniCameraBorder: BorderStyle(color: .white.withAlphaComponent(0.3), width: 2) + ) + + /// Minimal style with no effects + public static let minimal = DualCameraPhotoStyle( + miniCameraCornerRadius: 0, + miniCameraShadow: nil, + miniCameraBorder: nil + ) + + public init(miniCameraCornerRadius: CGFloat = 0, miniCameraShadow: ShadowStyle? = nil, miniCameraBorder: BorderStyle? = nil) { + self.miniCameraCornerRadius = miniCameraCornerRadius + self.miniCameraShadow = miniCameraShadow + self.miniCameraBorder = miniCameraBorder + } +} + +/// Captures and composes high-resolution photos from independent camera streams. +/// This approach provides native camera resolution instead of screen-limited screenshot quality. +@MainActor +public class DualCameraStreamPhotoCapturer { + + private let ciContext: CIContext + public var style: DualCameraPhotoStyle + + public init(style: DualCameraPhotoStyle = .dualCameraScreen) { + // Create GPU-accelerated Core Image context + self.ciContext = CIContext(options: [.useSoftwareRenderer: false]) + self.style = style + } + + /// Captures synchronized frames from both cameras and composes them according to layout + public func captureComposedPhoto( + frontRenderer: CameraRenderer, + backRenderer: CameraRenderer, + layout: DualCameraLayout, + outputSize: CGSize + ) async throws -> UIImage { + // 1. Capture raw buffers from both cameras at native resolution + let frontBuffer = try await frontRenderer.captureCurrentBuffer() + let backBuffer = try await backRenderer.captureCurrentBuffer() + + // 2. Compose based on layout + let composedImage = try composeImages( + frontBuffer: frontBuffer, + backBuffer: backBuffer, + layout: layout, + outputSize: outputSize + ) + + // 3. Convert to UIImage + return try createUIImage(from: composedImage, outputSize: outputSize) + } + + // MARK: - Composition + + private func composeImages( + frontBuffer: CVPixelBuffer, + backBuffer: CVPixelBuffer, + layout: DualCameraLayout, + outputSize: CGSize + ) throws -> CIImage { + let frontImage = CIImage(cvPixelBuffer: frontBuffer) + let backImage = CIImage(cvPixelBuffer: backBuffer) + + switch layout { + case .piP(let miniCamera, let position): + return try composePictureInPicture( + primary: miniCamera == .front ? frontImage : backImage, + mini: miniCamera == .front ? frontImage : backImage, + position: position, + outputSize: outputSize + ) + + case .sideBySide: + return try composeSideBySide( + front: frontImage, + back: backImage, + outputSize: outputSize + ) + + case .stackedVertical: + return try composeStackedVertical( + front: frontImage, + back: backImage, + outputSize: outputSize + ) + } + } + + // MARK: - Layout Implementations + + /// Composes picture-in-picture layout with one camera as primary and one as mini overlay + private func composePictureInPicture( + primary: CIImage, + mini: CIImage, + position: DualCameraLayout.MiniCameraPosition, + outputSize: CGSize + ) throws -> CIImage { + // Scale primary to fill output size (aspect fill) + let primaryScaled = scaleImageToFill(primary, targetSize: outputSize) + + // Scale mini to 1/4 size (25% of output) + let miniSize = CGSize( + width: outputSize.width * 0.25, + height: outputSize.height * 0.25 + ) + let miniScaled = scaleImageToFit(mini, targetSize: miniSize) + + // Apply styling effects to mini camera + let miniStyled = applyMiniCameraEffects(to: miniScaled, size: miniSize) + + // Calculate position for mini camera + let padding: CGFloat = 16 * (outputSize.width / 390) // Scale padding with output size + let miniPosition: CGPoint + switch position { + case .topLeading: + miniPosition = CGPoint(x: padding, y: outputSize.height - miniSize.height - padding) + case .topTrailing: + miniPosition = CGPoint(x: outputSize.width - miniSize.width - padding, y: outputSize.height - miniSize.height - padding) + case .bottomLeading: + miniPosition = CGPoint(x: padding, y: padding) + case .bottomTrailing: + miniPosition = CGPoint(x: outputSize.width - miniSize.width - padding, y: padding) + } + + // Position the styled mini image + let miniPositioned = miniStyled.transformed(by: CGAffineTransform(translationX: miniPosition.x, y: miniPosition.y)) + + // Composite mini over primary + let composed = miniPositioned.composited(over: primaryScaled) + + return composed + } + + /// Applies UI effects (rounded corners, shadows, borders) to the mini camera image + private func applyMiniCameraEffects(to image: CIImage, size: CGSize) -> CIImage { + var result = image + + // 1. Apply rounded corners + if style.miniCameraCornerRadius > 0 { + result = applyRoundedCorners(to: result, radius: style.miniCameraCornerRadius, size: size) + } + + // 2. Apply border + if let border = style.miniCameraBorder { + result = applyBorder(to: result, border: border, size: size) + } +// +// // 3. Apply shadow +// if let shadow = style.miniCameraShadow { +// result = applyShadow(to: result, shadow: shadow, size: size) +// } + + return result + } + + /// Applies rounded corners to an image using Core Image + private func applyRoundedCorners(to image: CIImage, radius: CGFloat, size: CGSize) -> CIImage { + // Create a rounded rect mask at high resolution (match image scale) + let scale: CGFloat = 3.0 // Use retina scale for crisp edges + let scaledSize = CGSize(width: size.width * scale, height: size.height * scale) + let scaledRadius = radius * scale + + let renderer = UIGraphicsImageRenderer(size: scaledSize) + let maskImage = renderer.image { context in + let rect = CGRect(origin: .zero, size: scaledSize) + let path = UIBezierPath(roundedRect: rect, cornerRadius: scaledRadius) + UIColor.white.setFill() + path.fill() + } + + guard var maskCIImage = CIImage(image: maskImage) else { return image } + + // Scale mask back down to match image size + let scaleTransform = CGAffineTransform(scaleX: 1.0 / scale, y: 1.0 / scale) + maskCIImage = maskCIImage.transformed(by: scaleTransform) + + // Apply mask using blend mode + let filter = CIFilter(name: "CIBlendWithMask") + filter?.setValue(image, forKey: kCIInputImageKey) + filter?.setValue(CIImage(color: .clear).cropped(to: image.extent), forKey: kCIInputBackgroundImageKey) + filter?.setValue(maskCIImage, forKey: kCIInputMaskImageKey) + + return filter?.outputImage ?? image + } + + /// Applies a border to an image **without** resampling the image. + /// We render a transparent stroke-only layer in the CIImage's pixel space + /// and composite it over the original image to avoid any scaling blur. + /// `size` is the intended point-size of the mini view; we only use it to + /// correctly convert point-based style values (radius/lineWidth) into pixels. + private func applyBorder(to image: CIImage, border: DualCameraPhotoStyle.BorderStyle, size: CGSize) -> CIImage { + // Work in the CIImage's pixel space to avoid stretching/scaling. + let extent = image.extent.integral + let pixelSize = extent.size + + // Guard against empty inputs + guard pixelSize.width > 0, pixelSize.height > 0, size.width > 0, size.height > 0 else { + return image + } + + // Convert point-based style to pixel units to keep visual parity at any output scale. + // `size` represents the points we asked the mini camera to occupy; the CI image may be higher-res. + // We map points -> pixels by measuring how many pixels correspond to one point. + let scaleX = pixelSize.width / size.width + let scaleY = pixelSize.height / size.height + let scale = max(scaleX, scaleY) // conservative (match aspectFill used earlier) + + let borderWidthPx = max(1.0, border.width * scale) + let cornerRadiusPx = max(0.0, style.miniCameraCornerRadius * scale) + + // Render only the border stroke on a transparent background, in pixel space. + let format = UIGraphicsImageRendererFormat() + format.opaque = false + format.scale = 1.0 // IMPORTANT: pixel-accurate canvas (no UIKit scaling) + + let renderer = UIGraphicsImageRenderer(size: pixelSize, format: format) + let borderLayer = renderer.image { _ in + // Stroke a rounded-rect that matches the masked mini image. + let rect = CGRect(origin: .zero, size: pixelSize).insetBy(dx: borderWidthPx / 2, dy: borderWidthPx / 2) + let path = UIBezierPath(roundedRect: rect, cornerRadius: cornerRadiusPx) + path.lineWidth = borderWidthPx + border.color.setStroke() + path.stroke() + } + + guard let borderCI = CIImage(image: borderLayer) else { + return image + } + + // Composite border over the original without touching the original pixels. + return borderCI.composited(over: image) + } + + /// Applies a shadow effect to an image + private func applyShadow(to image: CIImage, shadow: DualCameraPhotoStyle.ShadowStyle, size: CGSize) -> CIImage { + // Expand canvas to accommodate shadow + let shadowPadding: CGFloat = shadow.radius * 2 + let expandedSize = CGSize( + width: size.width + shadowPadding * 2, + height: size.height + shadowPadding * 2 + ) + + // Use high-resolution rendering + let format = UIGraphicsImageRendererFormat() + format.scale = 3.0 + + let renderer = UIGraphicsImageRenderer(size: expandedSize, format: format) + let shadowedImage = renderer.image { context in + let cgContext = context.cgContext + + // Configure shadow + cgContext.setShadow( + offset: shadow.offset, + blur: shadow.radius, + color: shadow.color.withAlphaComponent(CGFloat(shadow.opacity)).cgColor + ) + + // Draw the image with shadow at full quality + if let cgImage = ciContext.createCGImage(image, from: image.extent) { + let imageRect = CGRect( + x: shadowPadding, + y: shadowPadding, + width: size.width, + height: size.height + ) + let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .up) + uiImage.draw(in: imageRect) + } + } + + return CIImage(image: shadowedImage)?.transformed(by: CGAffineTransform(translationX: -shadowPadding, y: -shadowPadding)) ?? image + } + + /// Composes side-by-side layout with both cameras at equal size + private func composeSideBySide( + front: CIImage, + back: CIImage, + outputSize: CGSize + ) throws -> CIImage { + // Each camera gets half the width + let halfWidth = outputSize.width / 2 + let cameraSize = CGSize(width: halfWidth, height: outputSize.height) + + // Scale both cameras to fit their half + let frontScaled = scaleImageToFill(front, targetSize: cameraSize) + let backScaled = scaleImageToFill(back, targetSize: cameraSize) + + // Position back camera on the left, front on the right + let backPositioned = backScaled // Already at origin + let frontPositioned = frontScaled.transformed(by: CGAffineTransform(translationX: halfWidth, y: 0)) + + // Composite + let composed = frontPositioned.composited(over: backPositioned) + + return composed + } + + /// Composes stacked vertical layout with both cameras at equal size + private func composeStackedVertical( + front: CIImage, + back: CIImage, + outputSize: CGSize + ) throws -> CIImage { + // Each camera gets half the height + let halfHeight = outputSize.height / 2 + let cameraSize = CGSize(width: outputSize.width, height: halfHeight) + + // Scale both cameras to fit their half + let frontScaled = scaleImageToFill(front, targetSize: cameraSize) + let backScaled = scaleImageToFill(back, targetSize: cameraSize) + + // Position back camera on top, front on bottom + let backPositioned = backScaled.transformed(by: CGAffineTransform(translationX: 0, y: halfHeight)) + let frontPositioned = frontScaled // Already at origin + + // Composite + let composed = backPositioned.composited(over: frontPositioned) + + return composed + } + + // MARK: - Scaling Helpers + + /// Scales image to fill target size (aspect fill - may crop) + private func scaleImageToFill(_ image: CIImage, targetSize: CGSize) -> CIImage { + let imageSize = image.extent.size + let scaleX = targetSize.width / imageSize.width + let scaleY = targetSize.height / imageSize.height + let scale = max(scaleX, scaleY) // Use larger scale to fill + + let scaledImage = image.transformed(by: CGAffineTransform(scaleX: scale, y: scale)) + + // Crop to target size if needed + let cropRect = CGRect( + x: (scaledImage.extent.width - targetSize.width) / 2, + y: (scaledImage.extent.height - targetSize.height) / 2, + width: targetSize.width, + height: targetSize.height + ) + + return scaledImage.cropped(to: cropRect) + .transformed(by: CGAffineTransform(translationX: -cropRect.origin.x, y: -cropRect.origin.y)) + } + + /// Scales image to fit target size (aspect fit - no crop, may have letterboxing) + private func scaleImageToFit(_ image: CIImage, targetSize: CGSize) -> CIImage { + let imageSize = image.extent.size + let scaleX = targetSize.width / imageSize.width + let scaleY = targetSize.height / imageSize.height + let scale = min(scaleX, scaleY) // Use smaller scale to fit + + return image.transformed(by: CGAffineTransform(scaleX: scale, y: scale)) + } + + // MARK: - Image Conversion + + /// Converts CIImage to UIImage + private func createUIImage(from ciImage: CIImage, outputSize: CGSize) throws -> UIImage { + // Define the bounds for rendering + let bounds = CGRect(origin: .zero, size: outputSize) + + // Render CIImage to CGImage + guard let cgImage = ciContext.createCGImage(ciImage, from: bounds) else { + throw DualCameraError.captureFailure(.imageCreationFailed) + } + + // Create UIImage with proper orientation + return UIImage(cgImage: cgImage, scale: 1.0, orientation: .up) + } +} diff --git a/Sources/DualCameraKit/Screen/DualCameraViewModel.swift b/Sources/DualCameraKit/Screen/DualCameraViewModel.swift index 34871e5..e134c32 100644 --- a/Sources/DualCameraKit/Screen/DualCameraViewModel.swift +++ b/Sources/DualCameraKit/Screen/DualCameraViewModel.swift @@ -101,7 +101,7 @@ public final class DualCameraViewModel { saveToLibrary: Bool = false, mediaLibraryService: MediaLibraryService = CurrentDualCameraEnvironment.mediaLibraryService, showSettingsButton: Bool = false, - showCameraFlashButton: Bool = true, + showCameraFlashButton: Bool = true ) { self.controller = dualCameraController self.cameraLayout = layout @@ -202,7 +202,22 @@ public final class DualCameraViewModel { } try await Task.sleep(for: .seconds(0.25)) - let image = try await controller.captureCurrentScreen(mode: selectedCaptureScope.toPhotoCaptureMode(using: containerFrame)) + + // Use stream-based capture if controller supports it, otherwise fall back to screenshot + let image: UIImage + if let dualCameraController = controller as? DualCameraController, dualCameraController.useStreamCapture { + // NEW: High-quality stream composition + image = try await controller.captureComposedPhoto( + layout: cameraLayout, + mode: selectedCaptureScope.toPhotoCaptureMode(using: containerFrame) + ) + } else { + // LEGACY: Screenshot-based capture + image = try await controller.captureCurrentScreen( + mode: selectedCaptureScope.toPhotoCaptureMode(using: containerFrame) + ) + } + viewState = .ready // Turn off torch after capture From d1449ee61ed3b0eac92b52afb69f965ac907e023 Mon Sep 17 00:00:00 2001 From: Liam Ronan Date: Mon, 3 Nov 2025 12:41:29 -0800 Subject: [PATCH 2/7] add pinch + zoom to captured demo image --- .../DualCameraDemo.xcodeproj/project.pbxproj | 20 +++++++++++++++++++ .../CapturePreviewOverlay.swift | 5 +++++ 2 files changed, 25 insertions(+) diff --git a/DualCameraDemo/DualCameraDemo.xcodeproj/project.pbxproj b/DualCameraDemo/DualCameraDemo.xcodeproj/project.pbxproj index 9efa2f1..8a70be5 100644 --- a/DualCameraDemo/DualCameraDemo.xcodeproj/project.pbxproj +++ b/DualCameraDemo/DualCameraDemo.xcodeproj/project.pbxproj @@ -7,6 +7,7 @@ objects = { /* Begin PBXBuildFile section */ + 4F4460DE2EB94A7C00685CD7 /* Zoomable in Frameworks */ = {isa = PBXBuildFile; productRef = 4F4460DD2EB94A7C00685CD7 /* Zoomable */; }; 4F7E4C7F2D6D2B0A006F5609 /* DualCameraKit in Frameworks */ = {isa = PBXBuildFile; productRef = 4F7E4C7E2D6D2B0A006F5609 /* DualCameraKit */; }; /* End PBXBuildFile section */ @@ -70,6 +71,7 @@ buildActionMask = 2147483647; files = ( 4F7E4C7F2D6D2B0A006F5609 /* DualCameraKit in Frameworks */, + 4F4460DE2EB94A7C00685CD7 /* Zoomable in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -132,6 +134,7 @@ name = DualCameraDemo; packageProductDependencies = ( 4F7E4C7E2D6D2B0A006F5609 /* DualCameraKit */, + 4F4460DD2EB94A7C00685CD7 /* Zoomable */, ); productName = DualCameraDemo; productReference = 4F4D659B2D66D02C00F40490 /* DualCameraDemo.app */; @@ -217,6 +220,7 @@ minimizedProjectReferenceProxies = 1; packageReferences = ( 4F7E4C7D2D6D2B0A006F5609 /* XCLocalSwiftPackageReference "../../DualCameraKit" */, + 4F4460DC2EB94A7C00685CD7 /* XCRemoteSwiftPackageReference "Zoomable" */, ); preferredProjectObjectVersion = 77; productRefGroup = 4F4D659C2D66D02C00F40490 /* Products */; @@ -620,7 +624,23 @@ }; /* End XCLocalSwiftPackageReference section */ +/* Begin XCRemoteSwiftPackageReference section */ + 4F4460DC2EB94A7C00685CD7 /* XCRemoteSwiftPackageReference "Zoomable" */ = { + isa = XCRemoteSwiftPackageReference; + repositoryURL = "https://github.com/ryohey/Zoomable"; + requirement = { + branch = main; + kind = branch; + }; + }; +/* End XCRemoteSwiftPackageReference section */ + /* Begin XCSwiftPackageProductDependency section */ + 4F4460DD2EB94A7C00685CD7 /* Zoomable */ = { + isa = XCSwiftPackageProductDependency; + package = 4F4460DC2EB94A7C00685CD7 /* XCRemoteSwiftPackageReference "Zoomable" */; + productName = Zoomable; + }; 4F7E4C7E2D6D2B0A006F5609 /* DualCameraKit */ = { isa = XCSwiftPackageProductDependency; productName = DualCameraKit; diff --git a/DualCameraDemo/DualCameraDemo/CapturePreviewOverlay.swift b/DualCameraDemo/DualCameraDemo/CapturePreviewOverlay.swift index ec6e8fb..a6bddcf 100644 --- a/DualCameraDemo/DualCameraDemo/CapturePreviewOverlay.swift +++ b/DualCameraDemo/DualCameraDemo/CapturePreviewOverlay.swift @@ -1,9 +1,11 @@ import SwiftUI +import Zoomable struct CapturePreviewOverlay: View { let image: UIImage let onDismiss: () -> Void let onConfirm: () -> Void + @GestureState private var magnifyBy = 1.0 var body: some View { ZStack { @@ -16,6 +18,7 @@ struct CapturePreviewOverlay: View { Image(systemName: "xmark.circle.fill") .font(.system(size: 24)) .foregroundColor(.white) + } Spacer() @@ -38,6 +41,7 @@ struct CapturePreviewOverlay: View { .resizable() .scaledToFit() .cornerRadius(12) + .zoomable() .padding() Spacer() @@ -45,4 +49,5 @@ struct CapturePreviewOverlay: View { .padding() } } + } From 07328ad70f2f150410505119f79a3850a2c35401 Mon Sep 17 00:00:00 2001 From: Liam Ronan Date: Mon, 3 Nov 2025 13:53:40 -0800 Subject: [PATCH 3/7] initial PiP effects composing for high-quality --- .../DualCameraStreamPhotoCapturer.swift | 267 +++++++++--------- 1 file changed, 141 insertions(+), 126 deletions(-) diff --git a/Sources/DualCameraKit/DualCameraStreamPhotoCapturer.swift b/Sources/DualCameraKit/DualCameraStreamPhotoCapturer.swift index fb8699d..fd2eacf 100644 --- a/Sources/DualCameraKit/DualCameraStreamPhotoCapturer.swift +++ b/Sources/DualCameraKit/DualCameraStreamPhotoCapturer.swift @@ -10,13 +10,13 @@ public struct DualCameraPhotoStyle { public let miniCameraShadow: ShadowStyle? /// Border properties for mini camera public let miniCameraBorder: BorderStyle? - + public struct ShadowStyle { public let color: UIColor public let radius: CGFloat public let opacity: Float public let offset: CGSize - + public init(color: UIColor = .black, radius: CGFloat = 10, opacity: Float = 0.5, offset: CGSize = CGSize(width: 0, height: 4)) { self.color = color self.radius = radius @@ -24,31 +24,31 @@ public struct DualCameraPhotoStyle { self.offset = offset } } - + public struct BorderStyle { public let color: UIColor public let width: CGFloat - + public init(color: UIColor = .white, width: CGFloat = 2) { self.color = color self.width = width } } - + /// Default style matching DualCameraScreen's SwiftUI appearance public static let dualCameraScreen = DualCameraPhotoStyle( miniCameraCornerRadius: 12, miniCameraShadow: ShadowStyle(color: .black, radius: 10, opacity: 0.3, offset: CGSize(width: 0, height: 4)), miniCameraBorder: BorderStyle(color: .white.withAlphaComponent(0.3), width: 2) ) - + /// Minimal style with no effects public static let minimal = DualCameraPhotoStyle( miniCameraCornerRadius: 0, miniCameraShadow: nil, miniCameraBorder: nil ) - + public init(miniCameraCornerRadius: CGFloat = 0, miniCameraShadow: ShadowStyle? = nil, miniCameraBorder: BorderStyle? = nil) { self.miniCameraCornerRadius = miniCameraCornerRadius self.miniCameraShadow = miniCameraShadow @@ -60,16 +60,16 @@ public struct DualCameraPhotoStyle { /// This approach provides native camera resolution instead of screen-limited screenshot quality. @MainActor public class DualCameraStreamPhotoCapturer { - + private let ciContext: CIContext public var style: DualCameraPhotoStyle - + public init(style: DualCameraPhotoStyle = .dualCameraScreen) { // Create GPU-accelerated Core Image context self.ciContext = CIContext(options: [.useSoftwareRenderer: false]) self.style = style } - + /// Captures synchronized frames from both cameras and composes them according to layout public func captureComposedPhoto( frontRenderer: CameraRenderer, @@ -80,7 +80,7 @@ public class DualCameraStreamPhotoCapturer { // 1. Capture raw buffers from both cameras at native resolution let frontBuffer = try await frontRenderer.captureCurrentBuffer() let backBuffer = try await backRenderer.captureCurrentBuffer() - + // 2. Compose based on layout let composedImage = try composeImages( frontBuffer: frontBuffer, @@ -88,13 +88,13 @@ public class DualCameraStreamPhotoCapturer { layout: layout, outputSize: outputSize ) - + // 3. Convert to UIImage return try createUIImage(from: composedImage, outputSize: outputSize) } - + // MARK: - Composition - + private func composeImages( frontBuffer: CVPixelBuffer, backBuffer: CVPixelBuffer, @@ -103,23 +103,23 @@ public class DualCameraStreamPhotoCapturer { ) throws -> CIImage { let frontImage = CIImage(cvPixelBuffer: frontBuffer) let backImage = CIImage(cvPixelBuffer: backBuffer) - + switch layout { case .piP(let miniCamera, let position): return try composePictureInPicture( - primary: miniCamera == .front ? frontImage : backImage, + primary: miniCamera == .front ? backImage : frontImage, mini: miniCamera == .front ? frontImage : backImage, position: position, outputSize: outputSize ) - + case .sideBySide: return try composeSideBySide( front: frontImage, back: backImage, outputSize: outputSize ) - + case .stackedVertical: return try composeStackedVertical( front: frontImage, @@ -128,9 +128,9 @@ public class DualCameraStreamPhotoCapturer { ) } } - + // MARK: - Layout Implementations - + /// Composes picture-in-picture layout with one camera as primary and one as mini overlay private func composePictureInPicture( primary: CIImage, @@ -140,17 +140,17 @@ public class DualCameraStreamPhotoCapturer { ) throws -> CIImage { // Scale primary to fill output size (aspect fill) let primaryScaled = scaleImageToFill(primary, targetSize: outputSize) - + // Scale mini to 1/4 size (25% of output) let miniSize = CGSize( width: outputSize.width * 0.25, height: outputSize.height * 0.25 ) let miniScaled = scaleImageToFit(mini, targetSize: miniSize) - + // Apply styling effects to mini camera let miniStyled = applyMiniCameraEffects(to: miniScaled, size: miniSize) - + // Calculate position for mini camera let padding: CGFloat = 16 * (outputSize.width / 390) // Scale padding with output size let miniPosition: CGPoint @@ -164,68 +164,76 @@ public class DualCameraStreamPhotoCapturer { case .bottomTrailing: miniPosition = CGPoint(x: outputSize.width - miniSize.width - padding, y: padding) } - + // Position the styled mini image let miniPositioned = miniStyled.transformed(by: CGAffineTransform(translationX: miniPosition.x, y: miniPosition.y)) - + // Composite mini over primary let composed = miniPositioned.composited(over: primaryScaled) - + return composed } - + /// Applies UI effects (rounded corners, shadows, borders) to the mini camera image private func applyMiniCameraEffects(to image: CIImage, size: CGSize) -> CIImage { var result = image - + // 1. Apply rounded corners if style.miniCameraCornerRadius > 0 { result = applyRoundedCorners(to: result, radius: style.miniCameraCornerRadius, size: size) } - + // 2. Apply border if let border = style.miniCameraBorder { result = applyBorder(to: result, border: border, size: size) } -// -// // 3. Apply shadow -// if let shadow = style.miniCameraShadow { -// result = applyShadow(to: result, shadow: shadow, size: size) -// } - + + // 3. Apply shadow + if let shadow = style.miniCameraShadow { + result = applyShadow(to: result, shadow: shadow, size: size) + } + return result } - + /// Applies rounded corners to an image using Core Image + /// Applies rounded corners using a CI-generated rounded-rectangle mask aligned to the image's extent. + /// This avoids UIKit's top-left origin and any scale/flip issues that can misplace corners. private func applyRoundedCorners(to image: CIImage, radius: CGFloat, size: CGSize) -> CIImage { - // Create a rounded rect mask at high resolution (match image scale) - let scale: CGFloat = 3.0 // Use retina scale for crisp edges - let scaledSize = CGSize(width: size.width * scale, height: size.height * scale) - let scaledRadius = radius * scale - - let renderer = UIGraphicsImageRenderer(size: scaledSize) - let maskImage = renderer.image { context in - let rect = CGRect(origin: .zero, size: scaledSize) - let path = UIBezierPath(roundedRect: rect, cornerRadius: scaledRadius) - UIColor.white.setFill() - path.fill() + let extent = image.extent.integral + let pixelSize = extent.size + guard pixelSize.width > 0, pixelSize.height > 0, size.width > 0, size.height > 0 else { + return image } - - guard var maskCIImage = CIImage(image: maskImage) else { return image } - - // Scale mask back down to match image size - let scaleTransform = CGAffineTransform(scaleX: 1.0 / scale, y: 1.0 / scale) - maskCIImage = maskCIImage.transformed(by: scaleTransform) - - // Apply mask using blend mode - let filter = CIFilter(name: "CIBlendWithMask") - filter?.setValue(image, forKey: kCIInputImageKey) - filter?.setValue(CIImage(color: .clear).cropped(to: image.extent), forKey: kCIInputBackgroundImageKey) - filter?.setValue(maskCIImage, forKey: kCIInputMaskImageKey) - - return filter?.outputImage ?? image + + // Convert point-based corner radius (SwiftUI style) to pixel units to match the scaled mini image. + let scaleX = pixelSize.width / size.width + let scaleY = pixelSize.height / size.height + let scale = max(scaleX, scaleY) // match aspectFill logic elsewhere + let cornerRadiusPx = max(0.0, radius * scale) + + // Generate a rounded-rectangle mask directly in CI coordinate space (origin: bottom-left), + // with the SAME extent as the target image to avoid any translation mismatches. + guard let roundedGen = CIFilter(name: "CIRoundedRectangleGenerator") else { + return image + } + roundedGen.setValue(CIVector(cgRect: extent), forKey: "inputExtent") + roundedGen.setValue(cornerRadiusPx, forKey: "inputRadius") + + guard let rawMask = roundedGen.outputImage?.cropped(to: extent) else { + return image + } + + // Blend the image over a transparent background using the rounded mask. + let clearBG = CIImage(color: .clear).cropped(to: extent) + let blend = CIFilter(name: "CIBlendWithMask") + blend?.setValue(image, forKey: kCIInputImageKey) + blend?.setValue(clearBG, forKey: kCIInputBackgroundImageKey) + blend?.setValue(rawMask, forKey: kCIInputMaskImageKey) + + return blend?.outputImage ?? image } - + /// Applies a border to an image **without** resampling the image. /// We render a transparent stroke-only layer in the CIImage's pixel space /// and composite it over the original image to avoid any scaling blur. @@ -235,27 +243,27 @@ public class DualCameraStreamPhotoCapturer { // Work in the CIImage's pixel space to avoid stretching/scaling. let extent = image.extent.integral let pixelSize = extent.size - + // Guard against empty inputs guard pixelSize.width > 0, pixelSize.height > 0, size.width > 0, size.height > 0 else { return image } - + // Convert point-based style to pixel units to keep visual parity at any output scale. // `size` represents the points we asked the mini camera to occupy; the CI image may be higher-res. // We map points -> pixels by measuring how many pixels correspond to one point. let scaleX = pixelSize.width / size.width let scaleY = pixelSize.height / size.height let scale = max(scaleX, scaleY) // conservative (match aspectFill used earlier) - + let borderWidthPx = max(1.0, border.width * scale) let cornerRadiusPx = max(0.0, style.miniCameraCornerRadius * scale) - + // Render only the border stroke on a transparent background, in pixel space. let format = UIGraphicsImageRendererFormat() format.opaque = false format.scale = 1.0 // IMPORTANT: pixel-accurate canvas (no UIKit scaling) - + let renderer = UIGraphicsImageRenderer(size: pixelSize, format: format) let borderLayer = renderer.image { _ in // Stroke a rounded-rect that matches the masked mini image. @@ -265,55 +273,62 @@ public class DualCameraStreamPhotoCapturer { border.color.setStroke() path.stroke() } - + guard let borderCI = CIImage(image: borderLayer) else { return image } - + // Composite border over the original without touching the original pixels. return borderCI.composited(over: image) } - - /// Applies a shadow effect to an image + + /// Applies a drop shadow entirely in Core Image space to avoid UIKit y-flip/resampling. + /// Shadow is created from the image's alpha, blurred, translated, tinted, then composited under the image. private func applyShadow(to image: CIImage, shadow: DualCameraPhotoStyle.ShadowStyle, size: CGSize) -> CIImage { - // Expand canvas to accommodate shadow - let shadowPadding: CGFloat = shadow.radius * 2 - let expandedSize = CGSize( - width: size.width + shadowPadding * 2, - height: size.height + shadowPadding * 2 - ) - - // Use high-resolution rendering - let format = UIGraphicsImageRendererFormat() - format.scale = 3.0 - - let renderer = UIGraphicsImageRenderer(size: expandedSize, format: format) - let shadowedImage = renderer.image { context in - let cgContext = context.cgContext - - // Configure shadow - cgContext.setShadow( - offset: shadow.offset, - blur: shadow.radius, - color: shadow.color.withAlphaComponent(CGFloat(shadow.opacity)).cgColor - ) - - // Draw the image with shadow at full quality - if let cgImage = ciContext.createCGImage(image, from: image.extent) { - let imageRect = CGRect( - x: shadowPadding, - y: shadowPadding, - width: size.width, - height: size.height - ) - let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .up) - uiImage.draw(in: imageRect) - } + let extent = image.extent.integral + let pixelSize = extent.size + guard pixelSize.width > 0, pixelSize.height > 0, size.width > 0, size.height > 0 else { + return image } - return CIImage(image: shadowedImage)?.transformed(by: CGAffineTransform(translationX: -shadowPadding, y: -shadowPadding)) ?? image + // Convert point-based values (SwiftUI) to pixels for the current mini size. + let scaleX = pixelSize.width / size.width + let scaleY = pixelSize.height / size.height + let scale = max(scaleX, scaleY) + + let blurPx = max(0, shadow.radius * scale) + let offsetPx = CGSize(width: shadow.offset.width * scale, height: shadow.offset.height * scale) + let opacity = CGFloat(shadow.opacity) + + // 1) Build an alpha mask of the current image (transparent corners already respected). + // If the image already has a proper alpha, CIMaskToAlpha will produce a luminance-based alpha. + let alphaMask = CIFilter(name: "CIMaskToAlpha", parameters: [kCIInputImageKey: image])?.outputImage? + .cropped(to: extent) ?? image + + // 2) Blur the alpha to make the shadow soft. + let blurredMask = CIFilter(name: "CIGaussianBlur", parameters: [ + kCIInputImageKey: alphaMask, + kCIInputRadiusKey: blurPx + ])?.outputImage?.cropped(to: extent) ?? alphaMask + + // 3) Offset the blurred mask by the shadow's offset. + let translatedMask = blurredMask.transformed(by: CGAffineTransform(translationX: offsetPx.width, y: offsetPx.height)) + + // 4) Create a solid color image for the shadow and apply the mask as alpha. + let color = CIColor(color: shadow.color.withAlphaComponent(opacity)) + let colorImage = CIImage(color: color).cropped(to: translatedMask.extent) + let shadowLayer = CIFilter(name: "CIBlendWithAlphaMask", parameters: [ + kCIInputImageKey: colorImage, + kCIInputBackgroundImageKey: CIImage(color: .clear).cropped(to: translatedMask.extent), + kCIInputMaskImageKey: translatedMask + ])?.outputImage ?? colorImage + + // 5) Composite shadow layer under the original image. + let withShadow = shadowLayer.composited(over: image) + + return withShadow } - + /// Composes side-by-side layout with both cameras at equal size private func composeSideBySide( front: CIImage, @@ -323,21 +338,21 @@ public class DualCameraStreamPhotoCapturer { // Each camera gets half the width let halfWidth = outputSize.width / 2 let cameraSize = CGSize(width: halfWidth, height: outputSize.height) - + // Scale both cameras to fit their half let frontScaled = scaleImageToFill(front, targetSize: cameraSize) let backScaled = scaleImageToFill(back, targetSize: cameraSize) - + // Position back camera on the left, front on the right let backPositioned = backScaled // Already at origin let frontPositioned = frontScaled.transformed(by: CGAffineTransform(translationX: halfWidth, y: 0)) - + // Composite let composed = frontPositioned.composited(over: backPositioned) - + return composed } - + /// Composes stacked vertical layout with both cameras at equal size private func composeStackedVertical( front: CIImage, @@ -347,32 +362,32 @@ public class DualCameraStreamPhotoCapturer { // Each camera gets half the height let halfHeight = outputSize.height / 2 let cameraSize = CGSize(width: outputSize.width, height: halfHeight) - + // Scale both cameras to fit their half let frontScaled = scaleImageToFill(front, targetSize: cameraSize) let backScaled = scaleImageToFill(back, targetSize: cameraSize) - + // Position back camera on top, front on bottom let backPositioned = backScaled.transformed(by: CGAffineTransform(translationX: 0, y: halfHeight)) let frontPositioned = frontScaled // Already at origin - + // Composite let composed = backPositioned.composited(over: frontPositioned) - + return composed } - + // MARK: - Scaling Helpers - + /// Scales image to fill target size (aspect fill - may crop) private func scaleImageToFill(_ image: CIImage, targetSize: CGSize) -> CIImage { let imageSize = image.extent.size let scaleX = targetSize.width / imageSize.width let scaleY = targetSize.height / imageSize.height let scale = max(scaleX, scaleY) // Use larger scale to fill - + let scaledImage = image.transformed(by: CGAffineTransform(scaleX: scale, y: scale)) - + // Crop to target size if needed let cropRect = CGRect( x: (scaledImage.extent.width - targetSize.width) / 2, @@ -380,33 +395,33 @@ public class DualCameraStreamPhotoCapturer { width: targetSize.width, height: targetSize.height ) - + return scaledImage.cropped(to: cropRect) .transformed(by: CGAffineTransform(translationX: -cropRect.origin.x, y: -cropRect.origin.y)) } - + /// Scales image to fit target size (aspect fit - no crop, may have letterboxing) private func scaleImageToFit(_ image: CIImage, targetSize: CGSize) -> CIImage { let imageSize = image.extent.size let scaleX = targetSize.width / imageSize.width let scaleY = targetSize.height / imageSize.height let scale = min(scaleX, scaleY) // Use smaller scale to fit - + return image.transformed(by: CGAffineTransform(scaleX: scale, y: scale)) } - + // MARK: - Image Conversion - + /// Converts CIImage to UIImage private func createUIImage(from ciImage: CIImage, outputSize: CGSize) throws -> UIImage { // Define the bounds for rendering let bounds = CGRect(origin: .zero, size: outputSize) - + // Render CIImage to CGImage guard let cgImage = ciContext.createCGImage(ciImage, from: bounds) else { throw DualCameraError.captureFailure(.imageCreationFailed) } - + // Create UIImage with proper orientation return UIImage(cgImage: cgImage, scale: 1.0, orientation: .up) } From 1131c8632691a5223db076d3bcf67064a1d3a916 Mon Sep 17 00:00:00 2001 From: Liam Ronan Date: Mon, 3 Nov 2025 13:54:14 -0800 Subject: [PATCH 4/7] cleanup unused --- Sources/DualCameraKit/DualCameraPhotoCapturing.swift | 5 ----- 1 file changed, 5 deletions(-) diff --git a/Sources/DualCameraKit/DualCameraPhotoCapturing.swift b/Sources/DualCameraKit/DualCameraPhotoCapturing.swift index 48684f9..503b32a 100644 --- a/Sources/DualCameraKit/DualCameraPhotoCapturing.swift +++ b/Sources/DualCameraKit/DualCameraPhotoCapturing.swift @@ -16,11 +16,6 @@ public enum DualCameraPhotoCaptureMode: Sendable, Equatable { /// The frame's origin determines the top-left corner to start capturing from, /// and the size determines the dimensions of the captured area. case containerFrame(CGRect) - - @available(*, deprecated, message: "Use containerFrame instead") - public static func containerSize(_ size: CGSize) -> DualCameraPhotoCaptureMode { - .containerFrame(CGRect(origin: .zero, size: size)) - } } public class DualCameraPhotoCapturer: DualCameraPhotoCapturing { From 4488a586146dab1de2cb4a8d7538b1efd57bc0ea Mon Sep 17 00:00:00 2001 From: Liam Ronan Date: Mon, 3 Nov 2025 17:37:50 -0800 Subject: [PATCH 5/7] remove DualCameraMockController; move to DI --- .../DualCameraDemo/ContainerExample.swift | 2 +- .../DualCameraKit/DualCameraController.swift | 111 +----------------- .../DualCameraKit/DualCameraDisplayView.swift | 12 +- .../DualCameraKit/DualCameraEnvironment.swift | 11 +- .../Screen/DualCameraConfigView.swift | 2 +- 5 files changed, 21 insertions(+), 117 deletions(-) diff --git a/DualCameraDemo/DualCameraDemo/ContainerExample.swift b/DualCameraDemo/DualCameraDemo/ContainerExample.swift index 0bbf4be..39b6cd3 100644 --- a/DualCameraDemo/DualCameraDemo/ContainerExample.swift +++ b/DualCameraDemo/DualCameraDemo/ContainerExample.swift @@ -77,7 +77,7 @@ private struct AppTabView: View { .transition(.scale.combined(with: .opacity)) } } - .background(.primary) + .background(.black) .onChange(of: vm.capturedPhoto) { oldValue, newValue in if let image = newValue { captureReviewState.showPreview(image) diff --git a/Sources/DualCameraKit/DualCameraController.swift b/Sources/DualCameraKit/DualCameraController.swift index 2c0a11f..2e94826 100644 --- a/Sources/DualCameraKit/DualCameraController.swift +++ b/Sources/DualCameraKit/DualCameraController.swift @@ -71,8 +71,8 @@ public final class DualCameraController: DualCameraControlling { var renderers: [DualCameraSource: CameraRenderer] = [:] - private let streamSource = DualCameraCameraStreamSource() - + private let streamSource: DualCameraCameraStreamSourcing + // Internal storage for renderers and their stream tasks. private var streamTasks: [DualCameraSource: Task] = [:] @@ -89,11 +89,13 @@ public final class DualCameraController: DualCameraControlling { public init( photoCapturer: any DualCameraPhotoCapturing = DualCameraPhotoCapturer(), useStreamCapture: Bool = false, // Default to legacy for backward compatibility - photoStyle: DualCameraPhotoStyle = .dualCameraScreen // Match SwiftUI appearance by default + photoStyle: DualCameraPhotoStyle = .dualCameraScreen, + streamSource: DualCameraCameraStreamSourcing ) { self.photoCapturer = photoCapturer self.useStreamCapture = useStreamCapture self.streamPhotoCapturer = DualCameraStreamPhotoCapturer(style: photoStyle) + self.streamSource = streamSource } nonisolated public var frontCameraStream: AsyncStream { @@ -206,107 +208,4 @@ public final class DualCameraController: DualCameraControlling { } } -/// currently, this mock controller is a "fake" DualCameraController and shadows some functionality -/// via mocks. I think we'll evolve this here such that the DualCameraController can take mocked implementations -/// and we can remove the need for this "fake" behavior, i.e., make things more consistent. -public final class DualCameraMockController: DualCameraControlling { - public func setTorchMode(_ mode: AVCaptureDevice.TorchMode, for camera: DualCameraSource) throws { - // Mock implementation - no-op - } - - public func captureComposedPhoto(layout: DualCameraLayout, mode: DualCameraPhotoCaptureMode) async throws -> UIImage { - // Mock implementation - return a placeholder image - let size = CGSize(width: 1080, height: 1920) - let renderer = UIGraphicsImageRenderer(size: size) - return renderer.image { context in - // Draw purple for back camera, yellow for front camera in PiP - UIColor.purple.setFill() - context.fill(CGRect(origin: .zero, size: size)) - - // Add mini camera overlay for PiP layouts - if case .piP = layout { - UIColor.yellow.setFill() - let miniSize = CGSize(width: size.width * 0.25, height: size.height * 0.25) - context.fill(CGRect(origin: CGPoint(x: 20, y: 20), size: miniSize)) - } - } - } - - public init() { - // for now, unmocked - we'll probably revisit this for testing. - // this works just fine for simulator purposes though. - self.photoCapturer = DualCameraPhotoCapturer() - } - - private var streamSource: DualCameraCameraStreamSourcing = DualCameraMockCameraStreamSource() - private var renderers: [DualCameraSource: CameraRenderer] = [:] - - public var frontCameraStream: AsyncStream { - streamSource.frontCameraStream - } - - public var backCameraStream: AsyncStream { - streamSource.backCameraStream - } - - /// Creates a renderer (using MetalCameraRenderer by default). - public func createRenderer() -> CameraRenderer { - return MetalCameraRenderer() - } - - /// Returns a renderer for the specified camera source. - /// If one does not exist yet, it is created and connected to its stream. - public func getRenderer(for source: DualCameraSource) -> CameraRenderer { - if let renderer = renderers[source] { - return renderer - } - - let newRenderer = createRenderer() - renderers[source] = newRenderer - connectStream(for: source, renderer: newRenderer) - return newRenderer - } - - public func startSession() async throws { - try await streamSource.startSession() - - // Auto-initialize renderers - _ = getRenderer(for: .front) - _ = getRenderer(for: .back) - } - - public func stopSession() { - streamSource.stopSession() - cancelRendererTasks() - // Clear renderers so they're recreated with fresh stream connections on next startSession() - renderers.removeAll() - } - - public var photoCapturer: any DualCameraPhotoCapturing - - public var videoRecorder: (any DualCameraVideoRecording)? - - public func setVideoRecorder(_ recorder: any DualCameraVideoRecording) async throws {} - - private func connectStream(for source: DualCameraSource, renderer: CameraRenderer) { - let stream: AsyncStream = source == .front ? frontCameraStream : backCameraStream - // Create a task that forwards frames from the stream to the renderer. - let task = Task { - for await buffer in stream { - if Task.isCancelled { break } - renderer.update(with: buffer.buffer) - } - } - streamTasks[source] = task - } - - private var streamTasks: [DualCameraSource: Task] = [:] - - private func cancelRendererTasks() { - for task in streamTasks.values { - task.cancel() - } - streamTasks.removeAll() - } -} diff --git a/Sources/DualCameraKit/DualCameraDisplayView.swift b/Sources/DualCameraKit/DualCameraDisplayView.swift index 46e0d41..7d90478 100644 --- a/Sources/DualCameraKit/DualCameraDisplayView.swift +++ b/Sources/DualCameraKit/DualCameraDisplayView.swift @@ -73,42 +73,42 @@ public struct DualCameraDisplayView: View { #Preview("PiP - Bottom Trailing") { DualCameraDisplayView( - controller: DualCameraMockController(), + controller: DualCameraEnvironment.getDefaultCameraController(), layout: .piP(miniCamera: .front, miniCameraPosition: .bottomTrailing) ) } #Preview("PiP - Bottom Leading") { DualCameraDisplayView( - controller: DualCameraMockController(), + controller: DualCameraEnvironment.getDefaultCameraController(), layout: .piP(miniCamera: .front, miniCameraPosition: .bottomLeading) ) } #Preview("PiP - Top Trailing") { DualCameraDisplayView( - controller: DualCameraMockController(), + controller: DualCameraEnvironment.getDefaultCameraController(), layout: .piP(miniCamera: .front, miniCameraPosition: .topTrailing) ) } #Preview("PiP - Top Leading") { DualCameraDisplayView( - controller: DualCameraMockController(), + controller: DualCameraEnvironment.getDefaultCameraController(), layout: .piP(miniCamera: .front, miniCameraPosition: .topLeading) ) } #Preview("Stacked Vertical") { DualCameraDisplayView( - controller: DualCameraMockController(), + controller: DualCameraEnvironment.getDefaultCameraController(), layout: .stackedVertical ) } #Preview("Side by Side") { DualCameraDisplayView( - controller: DualCameraMockController(), + controller: DualCameraEnvironment.getDefaultCameraController(), layout: .sideBySide ) } diff --git a/Sources/DualCameraKit/DualCameraEnvironment.swift b/Sources/DualCameraKit/DualCameraEnvironment.swift index 6a76772..dc79461 100644 --- a/Sources/DualCameraKit/DualCameraEnvironment.swift +++ b/Sources/DualCameraKit/DualCameraEnvironment.swift @@ -13,11 +13,16 @@ public struct DualCameraEnvironment: Sendable { @MainActor static func getDefaultCameraController() -> DualCameraControlling { + var streamSource: DualCameraCameraStreamSourcing #if targetEnvironment(simulator) - return DualCameraMockController() + streamSource = DualCameraMockCameraStreamSource() #else - return DualCameraController(useStreamCapture: true) -#endif + streamSource = DualCameraCameraStreamSource() +#endif + return DualCameraController( + useStreamCapture: true, + streamSource: streamSource + ) } } diff --git a/Sources/DualCameraKit/Screen/DualCameraConfigView.swift b/Sources/DualCameraKit/Screen/DualCameraConfigView.swift index 875bcf0..273c783 100644 --- a/Sources/DualCameraKit/Screen/DualCameraConfigView.swift +++ b/Sources/DualCameraKit/Screen/DualCameraConfigView.swift @@ -93,7 +93,7 @@ struct DualCameraConfigView: View { #Preview { DualCameraConfigView( viewModel: DualCameraViewModel( - dualCameraController: DualCameraMockController(), + dualCameraController: DualCameraEnvironment.getDefaultCameraController(), saveToLibrary: false ) ) From b477b10988ced7de46a560974327383bd8c94f5b Mon Sep 17 00:00:00 2001 From: Liam Ronan Date: Mon, 3 Nov 2025 20:10:46 -0800 Subject: [PATCH 6/7] give buffer mock stable size --- Sources/DualCameraKit/DualCameraCameraStreamSource.swift | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Sources/DualCameraKit/DualCameraCameraStreamSource.swift b/Sources/DualCameraKit/DualCameraCameraStreamSource.swift index aa53a6f..b9d363c 100644 --- a/Sources/DualCameraKit/DualCameraCameraStreamSource.swift +++ b/Sources/DualCameraKit/DualCameraCameraStreamSource.swift @@ -252,10 +252,11 @@ public final class DualCameraMockCameraStreamSource: DualCameraCameraStreamSourc public init() { } public func startSession() async throws { - let purpleBuffer: CVPixelBuffer = UIColor.purple.asImage().pixelBuffer()! + let mockSize = CGSize(width: 1080, height: 1920) + let purpleBuffer: CVPixelBuffer = UIColor.purple.asImage(mockSize).pixelBuffer()! let purpleBufferWrapper = PixelBufferWrapper(buffer: purpleBuffer) - let yellowBuffer = UIColor.yellow.asImage().pixelBuffer()! + let yellowBuffer = UIColor.yellow.asImage(mockSize).pixelBuffer()! let yellowBufferWrapper = PixelBufferWrapper(buffer: yellowBuffer) await frontBroadcaster.broadcast(yellowBufferWrapper) await backBroadcaster.broadcast(purpleBufferWrapper) From 45f7181e3a4463e9b5583ea3acb6d8ab1cbd290c Mon Sep 17 00:00:00 2001 From: Liam Ronan Date: Mon, 3 Nov 2025 20:31:49 -0800 Subject: [PATCH 7/7] standardize logging api --- Sources/DualCameraKit/CameraRenderer.swift | 15 +++++------ .../FileSaving/MediaLibraryService.swift | 2 +- Sources/DualCameraKit/Log.swift | 25 ++++++++++++++----- .../DualCameraCPUVideoRecorderManager.swift | 9 +++---- .../DualCameraReplayKitVideoRecorder.swift | 5 ++-- 5 files changed, 34 insertions(+), 22 deletions(-) diff --git a/Sources/DualCameraKit/CameraRenderer.swift b/Sources/DualCameraKit/CameraRenderer.swift index 5a0ebeb..714319f 100644 --- a/Sources/DualCameraKit/CameraRenderer.swift +++ b/Sources/DualCameraKit/CameraRenderer.swift @@ -76,7 +76,7 @@ public final class MetalCameraRenderer: MTKView, CameraRenderer, MTKViewDelegate /// Initializes Metal components and sets up the render pipeline. private func initializeMetal() throws { guard let device = self.device else { - DualCameraLogger.errors.error("❌ Metal not supported on this device") + DualCameraLogger.log("❌ Metal not supported on this device", category: .errors, level: .error) throw MetalRendererError.metalNotSupported } @@ -84,7 +84,7 @@ public final class MetalCameraRenderer: MTKView, CameraRenderer, MTKViewDelegate let status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device, nil, &textureCache) if status != kCVReturnSuccess { - DualCameraLogger.errors.error("❌ Failed to create Metal texture cache") + DualCameraLogger.log("❌ Failed to create Metal texture cache", category: .errors, level: .error) throw MetalRendererError.textureCreationFailed } @@ -106,7 +106,7 @@ public final class MetalCameraRenderer: MTKView, CameraRenderer, MTKViewDelegate /// Sets up the Metal render pipeline. private func setupRenderPipeline() throws { guard let device = device else { - DualCameraLogger.errors.error("❌ No metal device found") + DualCameraLogger.log("❌ No metal device found", category: .errors, level: .error) throw MetalRendererError.metalLibraryLoadFailed } @@ -116,13 +116,13 @@ public final class MetalCameraRenderer: MTKView, CameraRenderer, MTKViewDelegate do { library = try device.makeDefaultLibrary(bundle: spmBundle) } catch { - DualCameraLogger.errors.error("❌ Failed to load Metal library: \(error.localizedDescription)") + DualCameraLogger.log("❌ Failed to load Metal library: \(error.localizedDescription)", category: .errors, level: .error) throw MetalRendererError.metalLibraryLoadFailed } guard let vertexFunction = library.makeFunction(name: MetalLibFunctionName.vertexShader), let fragmentFunction = library.makeFunction(name: MetalLibFunctionName.fragmentShader) else { - DualCameraLogger.errors.error("❌ Metal functions not found in library") + DualCameraLogger.log("❌ Metal functions not found in library", category: .errors, level: .error) throw MetalRendererError.metalFunctionNotFound } @@ -133,9 +133,10 @@ public final class MetalCameraRenderer: MTKView, CameraRenderer, MTKViewDelegate do { renderPipelineState = try device.makeRenderPipelineState(descriptor: pipelineDescriptor) - DualCameraLogger.session.info("✅ Metal render pipeline initialized successfully") + DualCameraLogger.log("✅ Metal render pipeline initialized successfully", category: .session) + } catch { - DualCameraLogger.errors.error("❌ Failed to create render pipeline: \(error.localizedDescription)") + DualCameraLogger.log("❌ Failed to create render pipeline: \(error.localizedDescription)", category: .errors, level: .error) throw MetalRendererError.renderPipelineCreationFailed(error) } } diff --git a/Sources/DualCameraKit/FileSaving/MediaLibraryService.swift b/Sources/DualCameraKit/FileSaving/MediaLibraryService.swift index 36fac0e..356f6c4 100644 --- a/Sources/DualCameraKit/FileSaving/MediaLibraryService.swift +++ b/Sources/DualCameraKit/FileSaving/MediaLibraryService.swift @@ -44,7 +44,7 @@ public extension MediaLibraryService { do { try await removeItem(url) } catch { - DualCameraLogger.errors.error("Failed to remove video at path: \(url.absoluteString, privacy: .public). Error: \(error.localizedDescription, privacy: .public)") + DualCameraLogger.log("Failed to remove video at path: \(url.absoluteString). Error: \(error.localizedDescription)", category: .errors, level: .error) } } ) diff --git a/Sources/DualCameraKit/Log.swift b/Sources/DualCameraKit/Log.swift index b14dd17..6c14be5 100644 --- a/Sources/DualCameraKit/Log.swift +++ b/Sources/DualCameraKit/Log.swift @@ -1,10 +1,23 @@ import os -/// Centralized logging for DualCameraKit -internal struct DualCameraLogger { +public enum LogCategory: String { + case camera, session, errors, general +} + +public struct DualCameraLogger { private static let subsystem = "DualCameraKit" - - static let camera = Logger(subsystem: subsystem, category: "Camera") - static let session = Logger(subsystem: subsystem, category: "Session") - static let errors = Logger(subsystem: subsystem, category: "Errors") + + // Unified access + public static func log( + _ message: String, + category: LogCategory = .general, + level: OSLogType = .debug + ) { + let logger = Logger(subsystem: subsystem, category: category.rawValue) + #if targetEnvironment(simulator) + // Always echo to console for fast feedback in Simulator + print("[\(category.rawValue.uppercased())] \(message)") + #endif + logger.log(level: level, "\(message, privacy: .public)") + } } diff --git a/Sources/DualCameraKit/VideoRecording/DualCameraCPUVideoRecorderManager.swift b/Sources/DualCameraKit/VideoRecording/DualCameraCPUVideoRecorderManager.swift index ac6abe1..631ad74 100644 --- a/Sources/DualCameraKit/VideoRecording/DualCameraCPUVideoRecorderManager.swift +++ b/Sources/DualCameraKit/VideoRecording/DualCameraCPUVideoRecorderManager.swift @@ -125,7 +125,7 @@ public actor DualCameraCPUVideoRecorderManager: DualCameraVideoRecording { setupDisplayLink(frameRate: frameRate) state = .active(outputURL: outputURL, quality: quality) - DualCameraLogger.session.debug("📹 Screen recording started with DualCameraCPUVideoRecorderManager") + DualCameraLogger.log("📹 Screen recording started with DualCameraCPUVideoRecorderManager", category: .session) } public func stopVideoRecording() async throws -> URL { @@ -155,8 +155,7 @@ public actor DualCameraCPUVideoRecorderManager: DualCameraVideoRecording { } } } - - DualCameraLogger.session.debug("Recording completed with \(self.frameCount) frames using DualCameraCPUVideoRecorderManager") + DualCameraLogger.log("Recording completed with \(self.frameCount) frames using DualCameraCPUVideoRecorderManager", category: .session) resetRecordingState() @@ -250,7 +249,7 @@ public actor DualCameraCPUVideoRecorderManager: DualCameraVideoRecording { let runLoop = RunLoop.main self.displayLink?.add(to: runLoop, forMode: .common) self.displayLink?.add(to: runLoop, forMode: .tracking) - DualCameraLogger.session.info("Standard display link in use") + DualCameraLogger.log("Standard display link in use", category: .session) } /// Create an optimized pixel buffer pool for more efficient memory use and better performance @@ -280,7 +279,7 @@ public actor DualCameraCPUVideoRecorderManager: DualCameraVideoRecording { // Check status and throw error if pool creation failed guard status == kCVReturnSuccess, let pool = pixelBufferPool else { - DualCameraLogger.errors.error("Failed to create pixel buffer pool: \(status)") + DualCameraLogger.log("Failed to create pixel buffer pool: \(status)", category: .errors, level: .error) throw DualCameraError.recordingFailed(.pixelBufferPoolCreationFailed) } } diff --git a/Sources/DualCameraKit/VideoRecording/DualCameraReplayKitVideoRecorder.swift b/Sources/DualCameraKit/VideoRecording/DualCameraReplayKitVideoRecorder.swift index 79c1ed4..eb0b582 100644 --- a/Sources/DualCameraKit/VideoRecording/DualCameraReplayKitVideoRecorder.swift +++ b/Sources/DualCameraKit/VideoRecording/DualCameraReplayKitVideoRecorder.swift @@ -42,14 +42,13 @@ public actor DualCameraReplayKitVideoRecorder: DualCameraVideoRecording { recorder.startRecording() state = .active(outputURL: outputURL) - DualCameraLogger.session.debug("📹 Screen recording started with ReplayKit") + DualCameraLogger.log("📹 Screen recording started with ReplayKit", category: .session) } /// Stops an ongoing video recording and returns the URL of the recorded file public func stopVideoRecording() async throws -> URL { let recorder = RPScreenRecorder.shared() - - DualCameraLogger.session.debug("📹 Screen recording stopped with ReplayKit") + DualCameraLogger.log("📹 Screen recording stopped with ReplayKit", category: .session) guard case .active(let outputURL) = state else { throw DualCameraError.noRecordingInProgress