me
me copied to clipboard
学习MacOS App (Part 6: Camera Preview)
通过AV Foundation来实现摄像头的preview, 下面的代码直接用了当前View对应的layer,也可以加入一个Custom关联上去,一个道理。PreviewLayer只需要有session就行,所以这里就没有关联output,后面我们会来介绍。
func setup() {
// Create a capture session
session = AVCaptureSession()
session.sessionPreset = .hd1280x720
// Get a refenerce to the default camera
let videoDevice = AVCaptureDevice.default(for: .video)
// Create a device input for the camera
guard
let videoInput = try? AVCaptureDeviceInput(device: videoDevice!),
session.canAddInput(videoInput)
else {
print("### input FAILED")
return
}
// Connect the input to the session
session.addInput(videoInput)
let previewLayer = AVCaptureVideoPreviewLayer.init(session: session)
previewLayer.videoGravity = .resizeAspectFill
self.view.layer = previewLayer
session.startRunning()
print("### setup OK")
}
参考:
- AV Foundation 秘籍
- Setting Up a Capture Session | Apple Developer Documentation
- iOS - 视频采集详解 | LinXunFeng‘s Blog
- ObjC 中国 - iOS 上的相机捕捉
MacOS提供的AVCaptureScreenInput可以很方便的采集屏幕,
// enumerate screens
for each in NSScreen.screens {
print("\(each.localizedName), \(each.displayID!)")
}
// Create a capture session
session = AVCaptureSession()
session.sessionPreset = .hd1280x720
let screenInput = AVCaptureScreenInput(displayID: NSScreen.main?.displayID ?? 1)
AVCaptureVideoDataOutput
前面通过AVCaptureVideoPreviewLayer来渲染直接预览,但如果我们需要对buffer做拦截,比如灰度或者美颜,这时就需要用到data output.
class ViewController: NSViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
var session : AVCaptureSession!
var bufferDisplayLayer: AVSampleBufferDisplayLayer!
override func viewDidLoad() {
super.viewDidLoad()
bufferDisplayLayer = AVSampleBufferDisplayLayer()
setupCamera()
}
func setupCamera() {
// Create a capture session
session = AVCaptureSession()
session.sessionPreset = .hd1280x720
// Get a refenerce to the default camera
let videoDevice = AVCaptureDevice.default(for: .video)
// Create a device input for the camera
guard
let videoInput = try? AVCaptureDeviceInput(device: videoDevice!),
session.canAddInput(videoInput)
else {
print("### input FAILED")
return
}
// Connect the input to the session
session.addInput(videoInput)
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue.global())
if(session.canAddOutput(dataOutput)) {
session.addOutput(dataOutput)
}
// Link layer
bufferDisplayLayer.videoGravity = .resizeAspectFill
self.view.layer = bufferDisplayLayer;
session.startRunning()
print("### setup OK")
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
bufferDisplayLayer.enqueue(sampleBuffer)
}
}
后面我们继续看滤镜效果。