iOS:将UIImage转换为CMSampleBuffer

4

有些问题涉及如何将CMSampleBuffer转换为UIImage,但没有关于如何进行反向转换的答案,即将UIImage转换为CMSampleBuffer。

这个问题与类似问题不同,因为下面的代码提供了将UIImage转换为CVPixelBuffer的起点,希望有更多AVFoundation专业知识的人可以帮助修复以转换为CMSampleBuffer。

func convertImageToBuffer(from image: UIImage) -> CVPixelBuffer? {
    let attrs = [
        String(kCVPixelBufferCGImageCompatibilityKey) : kCFBooleanTrue,
        String(kCVPixelBufferCGBitmapContextCompatibilityKey) : kCFBooleanTrue
        ] as [String : Any]
    var buffer : CVPixelBuffer?
    let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(image.size.width), Int(image.size.height), kCVPixelFormatType_32ARGB, attrs as CFDictionary, &buffer)
    guard (status == kCVReturnSuccess) else {
        return nil
    }

    CVPixelBufferLockBaseAddress(buffer!, CVPixelBufferLockFlags(rawValue: 0))
    let pixelData = CVPixelBufferGetBaseAddress(buffer!)

    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
    let context = CGContext(data: pixelData, width: Int(image.size.width), height: Int(image.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(buffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)

    context?.translateBy(x: 0, y: image.size.height)
    context?.scaleBy(x: 1.0, y: -1.0)

    UIGraphicsPushContext(context!)
    image.draw(in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
    UIGraphicsPopContext()
    CVPixelBufferUnlockBaseAddress(buffer!, CVPixelBufferLockFlags(rawValue: 0))

    return buffer
}

@Spads 没有,我没有看到它!但是这种方法似乎存在问题?感谢你找到它。非常感谢你的帮助!还有其他想法吗?我也会单独深入研究AVAssetWriterInputPixelBufferAdaptor。 - Crashalot
这是我能找到的最好的,抱歉。 - adamfowlerphoto
@Spads没问题,这总比没有好。再次感谢。 :) - Crashalot
该方法将转换为cvPixelBuffer,请更新代码。 - famfamfam
你最终解决了这个问题吗? - Sdanson
显示剩余3条评论
2个回答

4

你已经完成了一半。只需将CVPixelBuffer转换为CMSampleBuffer:

extension UIImage {
    
    var cvPixelBuffer: CVPixelBuffer? {
        let attrs = [
            String(kCVPixelBufferCGImageCompatibilityKey): kCFBooleanTrue,
            String(kCVPixelBufferCGBitmapContextCompatibilityKey): kCFBooleanTrue
            ] as [String: Any]
        var buffer: CVPixelBuffer?
        let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(self.size.width), Int(self.size.height), kCVPixelFormatType_32ARGB, attrs as CFDictionary, &buffer)
        guard status == kCVReturnSuccess else {
            return nil
        }

        CVPixelBufferLockBaseAddress(buffer!, CVPixelBufferLockFlags(rawValue: 0))
        let pixelData = CVPixelBufferGetBaseAddress(buffer!)

        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
        let context = CGContext(data: pixelData, width: Int(self.size.width), height: Int(self.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(buffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)

        context?.translateBy(x: 0, y: self.size.height)
        context?.scaleBy(x: 1.0, y: -1.0)

        UIGraphicsPushContext(context!)
        self.draw(in: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height))
        UIGraphicsPopContext()
        CVPixelBufferUnlockBaseAddress(buffer!, CVPixelBufferLockFlags(rawValue: 0))

        return buffer
    }
       
    func createCMSampleBuffer() -> CMSampleBuffer? {
        let pixelBuffer = cvPixelBuffer
        var newSampleBuffer: CMSampleBuffer?
        var timimgInfo: CMSampleTimingInfo?
        var videoInfo: CMVideoFormatDescription?
        CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: pixelBuffer!, formatDescriptionOut: &videoInfo)
        CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
                                           imageBuffer: pixelBuffer!,
                                           dataReady: true,
                                           makeDataReadyCallback: nil,
                                           refcon: nil,
                                           formatDescription: videoInfo!,
                                           sampleTiming: &timimgInfo,
                                           sampleBufferOut: &newSampleBuffer)
        return newSampleBuffer!
    }
    
}

我在“sampleTiming”中的“timimgInfo”处遇到了错误:“Unexpectedly found nil while unwrapping an Optional value”。我该如何修复它? - Vishnu Hari

0
使用这段Swift代码。
import Foundation
import AVFoundation
import UIKit


private func freeBlock(_ refCon: UnsafeMutableRawPointer?, doomedMemoryBlock: UnsafeMutableRawPointer, sizeInBytes: Int) -> Void {
    let unmanagedData = Unmanaged<NSData>.fromOpaque(refCon!)
    unmanagedData.release()
}

enum CMEncodingError: Error {
    case cmBlockCreationFailed
}

extension Data {

    func toCMBlockBuffer() throws -> CMBlockBuffer {
        // This block source is a manually retained pointer to our data instance.
        // The passed FreeBlock function manually releases it when the block buffer gets deallocated.
        let data = NSMutableData(data: self)
        var source = CMBlockBufferCustomBlockSource()
        source.refCon = Unmanaged.passRetained(data).toOpaque()
        source.FreeBlock = freeBlock

        var blockBuffer: CMBlockBuffer?
        let result = CMBlockBufferCreateWithMemoryBlock(
            allocator: kCFAllocatorDefault,
            memoryBlock: data.mutableBytes,
            blockLength: data.length,
            blockAllocator: kCFAllocatorNull,
            customBlockSource: &source,
            offsetToData: 0,
            dataLength: data.length,
            flags: 0,
            blockBufferOut: &blockBuffer)
        if OSStatus(result) != kCMBlockBufferNoErr {
            throw CMEncodingError.cmBlockCreationFailed
        }

        guard let buffer = blockBuffer else {
            throw CMEncodingError.cmBlockCreationFailed
        }

        assert(CMBlockBufferGetDataLength(buffer) == data.length)
        return buffer
    }
}

extension UIImage {
    
    func sampleBuffer() -> CMSampleBuffer? {
        
        guard let jpegData = self.jpegData(compressionQuality: 1) else {
            return nil
        }
        
        let rawPixelSize = CGSize(width: size.width, height: size.height)
        var format: CMFormatDescription? = nil
        let _ = CMVideoFormatDescriptionCreate(allocator: kCFAllocatorDefault, codecType: kCMVideoCodecType_JPEG, width: Int32(rawPixelSize.width), height: Int32(rawPixelSize.height), extensions: nil, formatDescriptionOut: &format)

        do {
            let cmBlockBuffer = try jpegData.toCMBlockBuffer()

            var size = jpegData.count

            var sampleBuffer: CMSampleBuffer? = nil
            let nowTime = CMTime(seconds: CACurrentMediaTime(), preferredTimescale: 60)
            let _1_60_s = CMTime(value: 1, timescale: 60) //CMTime(seconds: 1.0, preferredTimescale: 30)
            var timingInfo: CMSampleTimingInfo = CMSampleTimingInfo(duration: _1_60_s, presentationTimeStamp: nowTime, decodeTimeStamp: .invalid)

            let _ = CMSampleBufferCreateReady(allocator: kCFAllocatorDefault, dataBuffer: cmBlockBuffer, formatDescription: format, sampleCount: 1, sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &size, sampleBufferOut: &sampleBuffer)
            if sampleBuffer != nil {
                //print("sending buffer to displayBufferLayer")
                //self.bufferDisplayLayer.enqueue(sampleBuffer!)
                return sampleBuffer
            } else {
                print("sampleBuffer is nil")
                return nil
            }
        } catch {
            print("error ugh ", error)
            return nil
        }
    }
}

如何:

let image = UIImage(named: "YourImageName")
let sampleBuffer = image?.sampleBuffer()

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接