Skip to content

Instantly share code, notes, and snippets.

@siamakrostami
Last active March 5, 2023 06:52
Show Gist options
  • Save siamakrostami/75a234e81675eb09eb5cce9ded1be437 to your computer and use it in GitHub Desktop.
Save siamakrostami/75a234e81675eb09eb5cce9ded1be437 to your computer and use it in GitHub Desktop.
Video Recorder + Liveness Detection
import AVFoundation
import AVKit
import Combine
import Foundation
// MARK: - VideoRecorderHelper
class VideoRecorderHelper: NSObject {
// MARK: Lifecycle
init(containerView: UIView) {
self.containerView = containerView
super.init()
self.initializeCaptureSession()
}
deinit {
self.deinitializeHelper()
debugPrint("Video Recorder Deinited")
}
// MARK: Internal
var previewLayer: AVCaptureVideoPreviewLayer?
var containerView: UIView
var error = CurrentValueSubject<Error?, Never>(nil)
var disposeBag = Set<AnyCancellable>()
var recordedVideoOutputURL = CurrentValueSubject<URL?, Never>(nil)
// MARK: Private
private var captureSession: AVCaptureSession?
private var cameraDevice: AVCaptureDevice?
private var videoOutput: AVCaptureMovieFileOutput?
private var faceDetector: CIDetector?
private lazy var rightEyeBlinks: [Bool] = .init()
private lazy var leftEyeBlinks: [Bool] = .init()
private lazy var mouthPositionArray: [CGPoint] = .init()
}
extension VideoRecorderHelper {
// MARK: - Initialize Session
func initializeCaptureSession() {
self.captureSession = .init()
self.videoOutput = .init()
self.captureSession?.sessionPreset = .high
let videoDeviceDiscovery = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .unspecified)
self.cameraDevice = (videoDeviceDiscovery.devices as [AVCaptureDevice]).first(where: { $0.position == .front })
guard let cameraDevice = cameraDevice, let captureSession = captureSession, let videoOutput = videoOutput else {
return
}
guard let audioInput = AVCaptureDevice.default(for: .audio) else {
return
}
self.addInputsAndOutputs(cameraDevice: cameraDevice, captureSession: captureSession, videoOutput: videoOutput, audioInput: audioInput)
self.initializeFaceDetector(captureSession: captureSession)
self.setupPreviewLayer(session: captureSession)
}
// MARK: - Add inputs and outputs
private func addInputsAndOutputs(cameraDevice: AVCaptureDevice, captureSession: AVCaptureSession, videoOutput: AVCaptureMovieFileOutput, audioInput: AVCaptureDevice) {
do {
let videoInput = try AVCaptureDeviceInput(device: cameraDevice)
if captureSession.canAddInput(videoInput) {
captureSession.addInput(videoInput)
}
} catch {
self.error.send(error)
}
do {
let audioInput = try AVCaptureDeviceInput(device: audioInput)
if captureSession.canAddInput(audioInput) {
captureSession.addInput(audioInput)
}
} catch {
self.error.send(error)
}
if captureSession.canAddOutput(videoOutput) {
captureSession.addOutput(videoOutput)
}
// - Mirroring camera
if let connection = videoOutput.connection(with: .video) {
connection.isVideoMirrored = cameraDevice.position == .front
}
}
// MARK: - Initialize face detector
// - Initializing faceDetector to determine user's liveness
private func initializeFaceDetector(captureSession: AVCaptureSession) {
let videoBufferOutput = AVCaptureVideoDataOutput()
videoBufferOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
videoBufferOutput.alwaysDiscardsLateVideoFrames = true
let videoBufferOutputQueue = DispatchQueue(label: "videoBufferOutput")
videoBufferOutput.setSampleBufferDelegate(self, queue: videoBufferOutputQueue)
if captureSession.canAddOutput(videoBufferOutput) {
captureSession.addOutput(videoBufferOutput)
}
let configurationOptions: [String: AnyObject] = [CIDetectorAccuracy: CIDetectorAccuracyHigh as AnyObject, CIDetectorTracking: true as AnyObject, CIDetectorNumberOfAngles: 11 as AnyObject]
self.faceDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: configurationOptions)
captureSession.commitConfiguration()
}
// MARK: - Deinitialize
private func deinitializeHelper() {
self.captureSession = nil
self.videoOutput = nil
self.cameraDevice = nil
self.previewLayer = nil
self.leftEyeBlinks.removeAll()
self.rightEyeBlinks.removeAll()
self.mouthPositionArray.removeAll()
}
// MARK: - Creating Preview Layer
private func setupPreviewLayer(session: AVCaptureSession) {
self.previewLayer = AVCaptureVideoPreviewLayer(session: session)
self.previewLayer?.videoGravity = .resizeAspectFill
self.previewLayer?.masksToBounds = true
self.containerView.layer.addSublayer(self.previewLayer ?? .init())
DispatchQueue.main.async {
self.previewLayer?.frame = self.containerView.bounds
}
}
// MARK: - Start recording video
func startRecording() {
self.rightEyeBlinks.removeAll()
self.leftEyeBlinks.removeAll()
self.mouthPositionArray.removeAll()
guard let captureSession = captureSession, let videoOutput = videoOutput else {
return
}
DispatchQueue.global(qos: .background).async {
captureSession.startRunning()
guard let url = self.createRecordedVideoOutputURL() else {
return
}
videoOutput.startRecording(to: url, recordingDelegate: self)
}
}
// MARK: - Stop video recorder
func stopRecording() {
self.videoOutput?.stopRecording()
self.captureSession?.stopRunning()
}
// MARK: - Create output file path
private func createRecordedVideoOutputURL() -> URL? {
let dirPaths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let filePath = dirPaths[0].appendingPathComponent("recording-video.mp4")
if FileManager.default.fileExists(atPath: filePath.path) {
do {
try FileManager.default.removeItem(at: filePath)
} catch {
self.error.send(error)
}
}
return filePath
}
// MARK: - Check liveness
// - This validation is very simple
/// We just check the amount of eyeblinks and mouth positions to verify that user is alive
/// Another aproach is the combination of current soloution with the face angle and yaw
private func isAlive() -> Bool {
guard self.rightEyeBlinks.count > 50, self.leftEyeBlinks.count > 50, Array(Set(self.mouthPositionArray)).count > 50 else {
return false
}
return true
}
}
// MARK: AVCaptureFileOutputRecordingDelegate
extension VideoRecorderHelper: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if self.isAlive() {
self.recordedVideoOutputURL.send(outputFileURL)
} else {
let error = NSError(domain: "", code: 8888, userInfo: [NSLocalizedDescriptionKey: "Liveness Not Detected"])
debugPrint(error)
self.error.send(error)
}
}
}
// MARK: AVCaptureVideoDataOutputSampleBufferDelegate
extension VideoRecorderHelper: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}
let inputImage = CIImage(cvImageBuffer: imageBuffer)
let detectorOptions: [String: AnyObject] = [CIDetectorSmile: true as AnyObject, CIDetectorEyeBlink: true as AnyObject, CIDetectorImageOrientation: 6 as AnyObject]
guard let faces = self.faceDetector?.features(in: inputImage, options: detectorOptions) else {
return
}
for face in faces as! [CIFaceFeature] {
DispatchQueue.main.async {
if face.rightEyeClosed {
self.rightEyeBlinks.append(face.rightEyeClosed)
}
if face.leftEyeClosed {
self.leftEyeBlinks.append(face.leftEyeClosed)
}
let x = face.mouthPosition.x.rounded()
let y = face.mouthPosition.y.rounded()
let newMouthPosition = CGPoint(x: x, y: y)
self.mouthPositionArray.append(newMouthPosition)
}
}
}
}
//MARK: - CGPoint+Hashable
extension CGPoint: Hashable {
public func hash(into hasher: inout Hasher) {
hasher.combine(x)
hasher.combine(y)
}
}
import Combine
import UIKit
// MARK: - ViewController
class ViewController: UIViewController {
// Our video recorder container view
@IBOutlet var videoView: UIView!
var videoRecorderHelper: VideoRecorderHelper?
override func viewDidLoad() {
super.viewDidLoad()
self.initializeVideoHelper()
self.bindObservables()
// Do any additional setup after loading the view.
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
self.videoView.layer.cornerRadius = self.videoView.frame.height / 2
self.videoView.layer.masksToBounds = true
self.videoView.layer.borderColor = UIColor.red.cgColor
self.videoView.layer.borderWidth = 5
}
@IBAction func start(_ sender: Any) {
self.videoRecorderHelper?.startRecording()
}
@IBAction func stop(_ sender: Any) {
self.videoRecorderHelper?.stopRecording()
}
@IBAction func back(_ sender: Any) {
self.navigationController?.popViewController(animated: true)
}
}
extension ViewController {
// MARK: Private
private func initializeVideoHelper() {
self.videoRecorderHelper = VideoRecorderHelper(containerView: self.videoView)
}
private func bindObservables() {
self.bindRecordedVideoOutputURL()
self.bindError()
}
private func bindRecordedVideoOutputURL() {
if let videoRecorderHelper {
videoRecorderHelper.recordedVideoOutputURL.subscribe(on: DispatchQueue.main)
.sink(receiveValue: { [weak self] outputURL in
guard let `self` = self else {
return
}
// Open video player or etc.
})
.store(in: &videoRecorderHelper.disposeBag)
}
}
private func bindError() {
if let videoRecorderHelper {
videoRecorderHelper.error.subscribe(on: DispatchQueue.main)
.sink { [weak self] error in
guard let `self` = self, let error = error else {
return
}
// Show error
}
.store(in: &videoRecorderHelper.disposeBag)
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment