Skip to content

Instantly share code, notes, and snippets.

@p-larson
Last active August 29, 2022 05:42

Revisions

  1. p-larson revised this gist Aug 29, 2022. 1 changed file with 13 additions and 30 deletions.
    43 changes: 13 additions & 30 deletions ComposableCameraApp.swift
    Original file line number Diff line number Diff line change
    @@ -5,21 +5,6 @@ import AVFoundation
    import VideoToolbox
    import ComposableArchitecture

    struct ComposableCameraApp: App {
    var body: some Scene {
    WindowGroup {
    ContentView(
    store: .init(
    initialState: CameraState(feed: nil, isRecording: false),
    reducer: reducer.debug(),
    environment: CameraEnvironment(cameraClient: .live)
    )
    )
    }
    }
    }


    struct ContentView: View {
    let store: Store<CameraState, CameraAction>

    @@ -47,7 +32,6 @@ struct ContentView: View {
    }

    public struct CameraState: Equatable {
    var numberOfFrames = 0
    var feed: CGImage?
    var isRecording: Bool
    }
    @@ -67,24 +51,19 @@ struct CameraClient {
    private final class Delegate: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
    let continuation: AsyncStream<CGImage>.Continuation

    private var feed: CGImage!

    init(continuation: AsyncStream<CGImage>.Continuation) {
    self.continuation = continuation
    }

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    if let imageBuffer = sampleBuffer.imageBuffer {
    VTCreateCGImageFromCVPixelBuffer(imageBuffer, options: nil, imageOut: &feed)
    self.continuation.yield(feed)
    var image: CGImage?
    VTCreateCGImageFromCVPixelBuffer(imageBuffer, options: nil, imageOut: &image)
    if let image = image {
    self.continuation.yield(image)
    }
    }
    }

    func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    let reason = CMGetAttachment(sampleBuffer, key: kCMSampleBufferAttachmentKey_DroppedFrameReason, attachmentModeOut: nil)
    print(String(describing: reason))

    }
    }

    private final actor Camera {
    @@ -99,7 +78,7 @@ private final actor Camera {
    session.commitConfiguration()
    }

    return AsyncStream<CGImage> { continuation in
    return AsyncStream<CGImage>(bufferingPolicy: .bufferingNewest(1)) { continuation in
    self.delegate = Delegate(continuation: continuation)
    }
    }
    @@ -122,7 +101,7 @@ extension CameraClient {
    struct CameraEnvironment {
    var cameraClient: CameraClient
    var session = AVCaptureSession()
    var sessionQueue = DispatchQueue(label: "com.demo.camera", qos: .userInitiated, attributes: .concurrent, autoreleaseFrequency: .workItem)
    var sessionQueue = DispatchQueue(label: "com.demo.camera", qos: .userInitiated, autoreleaseFrequency: .workItem)
    var videoOutput = AVCaptureVideoDataOutput()
    }

    @@ -137,7 +116,6 @@ let reducer = Reducer<CameraState, CameraAction, CameraEnvironment> {
    await send(.authorizationResponse(status))

    guard status == .authorized else {
    print(status)
    return
    }

    @@ -164,6 +142,7 @@ let reducer = Reducer<CameraState, CameraAction, CameraEnvironment> {
    )

    guard let camera = device else {
    // TODO: Handle error
    fatalError()
    }

    @@ -173,9 +152,11 @@ let reducer = Reducer<CameraState, CameraAction, CameraEnvironment> {
    if environment.session.canAddInput(cameraInput) {
    environment.session.addInput(cameraInput)
    } else {
    // TODO: Handle error
    fatalError()
    }
    } catch {
    // TODO: Handle error
    fatalError()
    }

    @@ -190,6 +171,7 @@ let reducer = Reducer<CameraState, CameraAction, CameraEnvironment> {

    videoConnection?.videoOrientation = .portrait
    } else {
    // TODO: Handle error
    fatalError()
    }
    }
    @@ -200,9 +182,10 @@ let reducer = Reducer<CameraState, CameraAction, CameraEnvironment> {
    return .none

    case .authorizationResponse(let status):
    // TODO: Handle response
    switch status {
    default:
    return .none
    }
    }
    }
    }
  2. p-larson renamed this gist Aug 29, 2022. 1 changed file with 0 additions and 0 deletions.
    File renamed without changes.
  3. p-larson revised this gist Aug 29, 2022. 1 changed file with 0 additions and 1 deletion.
    1 change: 0 additions & 1 deletion ContentView.swift
    Original file line number Diff line number Diff line change
    @@ -143,7 +143,6 @@ let reducer = Reducer<CameraState, CameraAction, CameraEnvironment> {

    await send(.start)

    //
    for await frame in await environment.cameraClient.startFeed(environment.session, environment.videoOutput, environment.sessionQueue) {
    await send(.receive(frame))
    }
  4. p-larson revised this gist Aug 29, 2022. 1 changed file with 15 additions and 0 deletions.
    15 changes: 15 additions & 0 deletions ContentView.swift
    Original file line number Diff line number Diff line change
    @@ -5,6 +5,21 @@ import AVFoundation
    import VideoToolbox
    import ComposableArchitecture

    struct ComposableCameraApp: App {
    var body: some Scene {
    WindowGroup {
    ContentView(
    store: .init(
    initialState: CameraState(feed: nil, isRecording: false),
    reducer: reducer.debug(),
    environment: CameraEnvironment(cameraClient: .live)
    )
    )
    }
    }
    }


    struct ContentView: View {
    let store: Store<CameraState, CameraAction>

  5. p-larson created this gist Aug 29, 2022.
    194 changes: 194 additions & 0 deletions ContentView.swift
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,194 @@
    import SwiftUI
    import Foundation
    import CoreGraphics
    import AVFoundation
    import VideoToolbox
    import ComposableArchitecture

    struct ContentView: View {
    let store: Store<CameraState, CameraAction>

    var body: some View {
    WithViewStore(self.store) { viewStore in
    if let image = viewStore.feed {
    GeometryReader { geometry in
    Image(decorative: image, scale: 1.0, orientation: .upMirrored)
    .resizable()
    .scaledToFill()
    .frame(
    width: geometry.size.width,
    height: geometry.size.height,
    alignment: .center
    )
    .clipped()
    }
    } else {
    Color.black.onAppear(perform: {
    viewStore.send(.open)
    })
    }
    }
    }
    }

    public struct CameraState: Equatable {
    var numberOfFrames = 0
    var feed: CGImage?
    var isRecording: Bool
    }

    enum CameraAction: Equatable {
    case open
    case start
    case receive(CGImage)
    case authorizationResponse(AVAuthorizationStatus)
    }

    struct CameraClient {
    var requestAuthorization: @Sendable () async -> AVAuthorizationStatus
    var startFeed: @Sendable (AVCaptureSession, AVCaptureVideoDataOutput, DispatchQueue) async -> AsyncStream<CGImage>
    }

    private final class Delegate: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
    let continuation: AsyncStream<CGImage>.Continuation

    private var feed: CGImage!

    init(continuation: AsyncStream<CGImage>.Continuation) {
    self.continuation = continuation
    }

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    if let imageBuffer = sampleBuffer.imageBuffer {
    VTCreateCGImageFromCVPixelBuffer(imageBuffer, options: nil, imageOut: &feed)
    self.continuation.yield(feed)
    }
    }

    func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    let reason = CMGetAttachment(sampleBuffer, key: kCMSampleBufferAttachmentKey_DroppedFrameReason, attachmentModeOut: nil)
    print(String(describing: reason))

    }
    }

    private final actor Camera {
    var delegate: Delegate?

    func startFeed(_ session: AVCaptureSession, _ output: AVCaptureVideoDataOutput, _ queue: DispatchQueue) async -> AsyncStream<CGImage> {
    defer {
    session.beginConfiguration()

    output.setSampleBufferDelegate(self.delegate, queue: queue)

    session.commitConfiguration()
    }

    return AsyncStream<CGImage> { continuation in
    self.delegate = Delegate(continuation: continuation)
    }
    }
    }

    extension CameraClient {
    static var live: Self {
    let camera = Camera()

    return Self(
    requestAuthorization: {
    return AVCaptureDevice.authorizationStatus(for: .video)
    }, startFeed: { session, output, queue in
    await camera.startFeed(session, output, queue)
    }
    )
    }
    }

    struct CameraEnvironment {
    var cameraClient: CameraClient
    var session = AVCaptureSession()
    var sessionQueue = DispatchQueue(label: "com.demo.camera", qos: .userInitiated, attributes: .concurrent, autoreleaseFrequency: .workItem)
    var videoOutput = AVCaptureVideoDataOutput()
    }

    let reducer = Reducer<CameraState, CameraAction, CameraEnvironment> {
    state, action, environment in

    switch action {
    case .open:
    return .run { send in
    let status = await environment.cameraClient.requestAuthorization()

    await send(.authorizationResponse(status))

    guard status == .authorized else {
    print(status)
    return
    }

    await send(.start)

    //
    for await frame in await environment.cameraClient.startFeed(environment.session, environment.videoOutput, environment.sessionQueue) {
    await send(.receive(frame))
    }
    }
    case .start:
    return .fireAndForget {
    environment.sessionQueue.async {
    environment.session.beginConfiguration()

    defer {
    environment.session.commitConfiguration()
    environment.session.startRunning()
    }

    let device = AVCaptureDevice.default(
    .builtInWideAngleCamera,
    for: .video,
    position: .back
    )

    guard let camera = device else {
    fatalError()
    }

    do {
    let cameraInput = try AVCaptureDeviceInput(device: camera)

    if environment.session.canAddInput(cameraInput) {
    environment.session.addInput(cameraInput)
    } else {
    fatalError()
    }
    } catch {
    fatalError()
    }

    if environment.session.canAddOutput(environment.videoOutput) {
    environment.session.addOutput(environment.videoOutput)

    environment.videoOutput.videoSettings = [
    kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA
    ]

    let videoConnection = environment.videoOutput.connection(with: .video)

    videoConnection?.videoOrientation = .portrait
    } else {
    fatalError()
    }
    }
    }
    case .receive(let live):
    state.feed = live
    // Buffer is not being released.
    return .none

    case .authorizationResponse(let status):
    switch status {
    default:
    return .none
    }
    }
    }