func processAudioData(pcmBuffer: AVAudioPCMBuffer) -> CMSampleBuffer? {
        let audioBufferList = pcmBuffer.mutableAudioBufferList
        let asbd = pcmBuffer.format.streamDescription
        
        var sampleBuffer: CMSampleBuffer? = nil
        var format: CMFormatDescription? = nil
        
        var status = CMAudioFormatDescriptionCreate(allocator: kCFAllocatorDefault,
                                                    asbd: asbd,
                                                    layoutSize: 0,
                                                    layout: nil,
                                                    magicCookieSize: 0,
                                                    magicCookie: nil,
                                                    extensions: nil,
                                                    formatDescriptionOut: &format);
        if (status != noErr) { return nil; }
        
        var timing: CMSampleTimingInfo = CMSampleTimingInfo(duration: CMTime(value: 1, timescale: Int32(asbd.pointee.mSampleRate)),
                                                            presentationTimeStamp: CMClockGetTime(CMClockGetHostTimeClock()),
                                                            decodeTimeStamp: CMTime.invalid)
        status = CMSampleBufferCreate(allocator: kCFAllocatorDefault,
                                      dataBuffer: nil,
                                      dataReady: false,
                                      makeDataReadyCallback: nil,
                                      refcon: nil,
                                      formatDescription: format,
                                      sampleCount: CMItemCount(pcmBuffer.frameLength),
                                      sampleTimingEntryCount: 1,
                                      sampleTimingArray: &timing,
                                      sampleSizeEntryCount: 0,
                                      sampleSizeArray: nil,
                                      sampleBufferOut: &sampleBuffer);
        if (status != noErr) { NSLog("CMSampleBufferCreate returned error: \(status)"); return nil }
        
        status = CMSampleBufferSetDataBufferFromAudioBufferList(sampleBuffer!,
                                                                blockBufferAllocator: kCFAllocatorDefault,
                                                                blockBufferMemoryAllocator: kCFAllocatorDefault,
                                                                flags: 0,
                                                                bufferList: audioBufferList);
        if (status != noErr) { NSLog("CMSampleBufferSetDataBufferFromAudioBufferList returned error: \(status)"); return nil; }
        
        return sampleBuffer
    }

 

[사용방법]

var streamDesc = AudioStreamBasicDescription()
        
let bytesPerSample: UInt32 = UInt32(MemoryLayout<Int16>.size)  // 2
streamDesc.mFormatID = kAudioFormatLinearPCM
        
streamDesc.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked  //kLinearPCMFormatFlagIsPacked
        
streamDesc.mBytesPerPacket = bytesPerSample// 2
streamDesc.mFramesPerPacket = 1
streamDesc.mBytesPerFrame = bytesPerSample //2
streamDesc.mChannelsPerFrame = 1 // 1
streamDesc.mBitsPerChannel = 8 * bytesPerSample
streamDesc.mSampleRate = 48000 //44100

subscribe { [weak self] chunkBuffer in
	guard let `self` = self else { return }
    
    let pcmBuffer = self.bytesToAudioBuffer(chunkBuffer.samples, withFormat: &(streamDesc))
	//AVAudioPCMBuffer to CMSampleBuffer 변환
    if let sampleBuffer = self.audioMixer.processAudioData(pcmBuffer: pcmBuffer) {
		   				..... 생략 .....
	}
}

+ Recent posts