Skip to content

Instantly share code, notes, and snippets.

@himanshunaidu
Created January 17, 2025 03:24
Show Gist options
  • Save himanshunaidu/c63d546f797807d81e5644ff0c221bb5 to your computer and use it in GitHub Desktop.
Save himanshunaidu/c63d546f797807d81e5644ff0c221bb5 to your computer and use it in GitHub Desktop.
import SwiftUI
import AVFoundation
import Vision
// Used as delegate by the CameraController
protocol CaptureDataReceiver: AnyObject {
func onNewData(cameraImage: CGImage, depthPixelBuffer: CVPixelBuffer)
}
class CameraController: NSObject, ObservableObject {
private let videoDataOutputQueue = DispatchQueue(label: "videoQueue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
private(set) var captureSession: AVCaptureSession!
private(set) var captureDevice: AVCaptureDevice!
private var depthDataOutput: AVCaptureDepthDataOutput!
private var videoDataOutput: AVCaptureVideoDataOutput!
private var outputVideoSync: AVCaptureDataOutputSynchronizer!
weak var delegate: CaptureDataReceiver?
override init() {
super.init()
do {
try setupSession()
} catch {
fatalError("Unable to configure the capture session.")
}
}
// Initialize the captureSession and set its configuration
private func setupSession() throws {
captureSession = AVCaptureSession()
// Configure the capture session.
captureSession.beginConfiguration()
captureSession.sessionPreset = .inputPriority
try setupCaptureInput()
setupCaptureOutputs()
// Finalize the capture session configuration.
captureSession.commitConfiguration()
}
// Add a device input to the capture session.
private func setupCaptureInput() throws {
let captureDevice = AVCaptureDevice.default(.builtInLiDARDepthCamera, for: .video, position: .back)!
let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(deviceInput)
}
private func setupCaptureOutputs() {
var dataOutputs: [AVCaptureOutput] = []
// Create an object to output video sample buffers.
videoDataOutput = AVCaptureVideoDataOutput()
captureSession.addOutput(videoDataOutput)
dataOutputs.append(videoDataOutput)
depthDataOutput = AVCaptureDepthDataOutput()
depthDataOutput.isFilteringEnabled = isFilteringEnabled
captureSession.addOutput(depthDataOutput)
dataOutputs.append(depthDataOutput)
// Create an object to synchronize the delivery of depth and video data.
outputVideoSync = AVCaptureDataOutputSynchronizer(dataOutputs: dataOutputs)
outputVideoSync.setDelegate(self, queue: videoDataOutputQueue)
}
func startStream() {
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
self.captureDevice.configureDesiredFrameRate(5)
}
}
func stopStream() {
captureSession.stopRunning()
}
}
// MARK: Output Synchronizer Delegate
extension CameraController: AVCaptureDataOutputSynchronizerDelegate {
func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer,
didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {
// Retrieve the synchronized depth and sample buffer container objects.
guard let syncedVideoData = synchronizedDataCollection.synchronizedData(for: videoDataOutput) as? AVCaptureSynchronizedSampleBufferData else { return }
guard let pixelBuffer = syncedVideoData.sampleBuffer.imageBuffer else { return }
let context = CIContext()
let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)
guard let syncedDepthData = synchronizedDataCollection.synchronizedData(for: depthDataOutput) as? AVCaptureSynchronizedDepthData else { return }
let depthData = syncedDepthData.depthData
let depthPixelBuffer = depthData.converting(toDepthDataType: kCVPixelFormatType_DepthFloat32).depthDataMap
delegate?.onNewData(cameraImage: cameraImage, depthPixelBuffer: depthPixelBuffer)
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment