Replace this paragraph with a short description of the incorrect behavior.
func startRecording() throws {
let sessionID = UUID()
let url = FileManager.default.temporaryDirectory.appendingPathComponent("\(sessionID.uuidString).mp4")
// record audio when permission is given
let hasAudio = self.camera.audioDataOutput != nil
let recorder = try MovieRecorder(url: url, configuration: MovieRecorder.Configuration(hasAudio: hasAudio))
state.isRecording = true
queue.async {
self.recorder = recorder
}
}
Video inputs: not ready for media data, dropping sample buffer (t: 135065.272566061).
Video inputs: not ready for media data, dropping sample buffer (t: 135065.339290207).
Video inputs: not ready for media data, dropping sample buffer (t: 135065.572860042).
Video inputs: not ready for media data, dropping sample buffer (t: 135065.639584103).
The consequence is that the first second of the recorded video is "glitchy" as some of the frames have been dropped. This by itself is ok, but sometimes ( 1 out of 20 times), the error: Video inputs: not ready for media data, dropping sample buffer
is repeated for the entire duration of the recording, the result is that no video has been recorded at all.
Describe what you expect to happen.
Consistently no error on start recording.
Describe or copy/paste the behavior you observe.
Behavior: fail to record all frames on first second on every record invocation. Moreover, once every 20 invocations or so, the camera fails to record at all.
The entirety of the class I use to record video is reproduced below. It's a loose refactor of CapturePipeline
found in the sample project.
import Foundation
import SwiftUI
import MetalPetal
import VideoIO
import VideoToolbox
import AVKit
//MARK:- pipeline for rendering effect in video
class MetalPipeline: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
// depth of cache
var cacheDepth : Int = 3
// the rendered image with effect layered in
@Published var previewImage: CGImage?
// buffer to store recent images
private var imageBuffer : [CGImage] = []
private var cachedImage : CGImage?
// default backward facing camera pose
private var cameraPose : AVCaptureDevice.Position = .back
struct Face {
var bounds: CGRect
}
struct State {
var isRecording: Bool = false
var isVideoMirrored: Bool = false
}
@Published private var _state: State = State()
private let stateLock = MTILockCreate()
private(set) var state: State {
get {
stateLock.lock()
defer {
stateLock.unlock()
}
return _state
}
set {
stateLock.lock()
defer {
stateLock.unlock()
}
_state = newValue
}
}
private let renderContext = try! MTIContext(device: MTLCreateSystemDefaultDevice()!)
private let queue: DispatchQueue = DispatchQueue(label: "org.metalpetal.capture")
private let camera: Camera = {
var configurator = Camera.Configurator()
configurator.videoConnectionConfigurator = { camera, connection in
#if os(iOS)
switch UIApplication.shared.windows.first(where: { $0.windowScene != nil })?.windowScene?.interfaceOrientation {
case .landscapeLeft:
connection.videoOrientation = .landscapeLeft
case .landscapeRight:
connection.videoOrientation = .landscapeRight
case .portraitUpsideDown:
connection.videoOrientation = .portraitUpsideDown
default:
connection.videoOrientation = .portrait
}
#else
connection.videoOrientation = .portrait
#endif
}
// @TODO: make sure you're able to change session cam default cam position
let session_cam = Camera(captureSessionPreset: .hd1280x720, defaultCameraPosition: .back, configurator: configurator)
return session_cam
}()
private let imageRenderer = PixelBufferPoolBackedImageRenderer()
private var isMetadataOutputEnabled: Bool = false
private var recorder: MovieRecorder?
//MARK:- effects
// filter effects
enum Effect: String, Identifiable, CaseIterable {
case polaroidA = "polaroidA"
var id: String { rawValue }
typealias Filter = (MTIImage, [Face]) -> MTIImage
func makeFilter() -> Filter {
let filter = MTICoreImageUnaryFilter()
filter.filter = CIFilter(name: "CIPhotoEffectInstant")
return { image, faces in
filter.inputImage = image
return filter.outputImage!
}
// return { image, faces in image }
}
}
private var filter: Effect.Filter = { image, faces in image }
@Published var effect: Effect = .polaroidA {
didSet {
let filter = effect.makeFilter()
queue.async {
self.filter = filter
}
}
}
private var faces: [Face] = []
//MARK:- end effect
override init() {
super.init()
try? self.camera.enableVideoDataOutput(on: queue, delegate: self)
try? self.camera.enableAudioDataOutput(on: queue, delegate: self)
self.camera.videoDataOutput?.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange]
}
//MARK:- API
func startRunningCaptureSession() {
queue.async {
self.camera.startRunningCaptureSession()
}
}
func stopRunningCaptureSession() {
queue.async {
self.camera.stopRunningCaptureSession()
}
}
func startRecording() throws {
let sessionID = UUID()
let url = FileManager.default.temporaryDirectory.appendingPathComponent("\(sessionID.uuidString).mp4")
// record audio when permission is given
let hasAudio = self.camera.audioDataOutput != nil
let recorder = try MovieRecorder(url: url, configuration: MovieRecorder.Configuration(hasAudio: hasAudio))
state.isRecording = true
queue.async {
self.recorder = recorder
}
}
func stopRecording(completion: @escaping (Result<URL, Error>) -> Void) {
if let recorder = recorder {
recorder.stopRecording(completion: { error in
self.state.isRecording = false
if let error = error {
completion(.failure(error))
} else {
completion(.success(recorder.url))
}
})
queue.async {
self.recorder = nil
}
}
}
// @use: flip the camera
func flipCamera(){
switch cameraPose {
case .front:
do {
try self.camera.switchToVideoCaptureDevice(with: .back)
self.cameraPose = .back
} catch {
return
}
default:
do {
try self.camera.switchToVideoCaptureDevice(with: .front)
self.cameraPose = .front
} catch {
return
}
}
}
//@use: Take picture
public func snapImage() -> CGImage? {
if let im = self.previewImage {
return im
} else {
return self.previewImage
}
}
// @Use: cache the previous frame
private func cachePreviousImg( _ img: CGImage? ){
self.cachedImage = img;
}
//@use: cache multiple images in buffer
private func cacheInBuffer(){
if let m = self.previewImage {
imageBuffer.append(m)
}
}
//MARK:- render filtered image delegate
// @note: this is a delegate fn that gets called. and is outputting rendered image
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let formatDescription = sampleBuffer.formatDescription else {
return
}
switch formatDescription.mediaType {
case .audio:
do {
try self.recorder?.appendSampleBuffer(sampleBuffer)
} catch {
print("captureOutput audio error: ", error)
}
case .video:
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
do {
let image = MTIImage(cvPixelBuffer: pixelBuffer, alphaType: .alphaIsOne)
let filterOutputImage = self.filter(image, faces)
let outputImage = self.state.isVideoMirrored ? filterOutputImage.oriented(.upMirrored) : filterOutputImage
let renderOutput = try self.imageRenderer.render(outputImage, using: renderContext)
try self.recorder?.appendSampleBuffer(SampleBufferUtilities.makeSampleBufferByReplacingImageBuffer(of: sampleBuffer, with: renderOutput.pixelBuffer)!)
DispatchQueue.main.async {
// output rendered image and cache image in buffer
self.cachedImage = self.previewImage
self.previewImage = renderOutput.cgImage
}
} catch {
print("captureOutput video error: ", error)
}
default:
break
}
}
}
//MARK:- ios delegates
#if os(iOS)
extension MetalPipeline: AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
var faces = [Face]()
for faceMetadataObject in metadataObjects.compactMap({ $0 as? AVMetadataFaceObject}) {
if let rect = self.camera.videoDataOutput?.outputRectConverted(fromMetadataOutputRect: faceMetadataObject.bounds) {
faces.append(Face(bounds: rect.insetBy(dx: -rect.width/4, dy: -rect.height/4)))
}
}
self.faces = faces
}
}
#endif