-
-
Save aibo-cora/c57d1a4125e145e586ecb61ebecff47c to your computer and use it in GitHub Desktop.
| import Foundation | |
| import AVFoundation | |
| import CoreMedia | |
| class Converter { | |
| static func configureSampleBuffer(pcmBuffer: AVAudioPCMBuffer) -> CMSampleBuffer? { | |
| let audioBufferList = pcmBuffer.mutableAudioBufferList | |
| let asbd = pcmBuffer.format.streamDescription | |
| var sampleBuffer: CMSampleBuffer? = nil | |
| var format: CMFormatDescription? = nil | |
| var status = CMAudioFormatDescriptionCreate(allocator: kCFAllocatorDefault, | |
| asbd: asbd, | |
| layoutSize: 0, | |
| layout: nil, | |
| magicCookieSize: 0, | |
| magicCookie: nil, | |
| extensions: nil, | |
| formatDescriptionOut: &format); | |
| if (status != noErr) { return nil; } | |
| var timing: CMSampleTimingInfo = CMSampleTimingInfo(duration: CMTime(value: 1, timescale: Int32(asbd.pointee.mSampleRate)), | |
| presentationTimeStamp: CMClockGetTime(CMClockGetHostTimeClock()), | |
| decodeTimeStamp: CMTime.invalid) | |
| status = CMSampleBufferCreate(allocator: kCFAllocatorDefault, | |
| dataBuffer: nil, | |
| dataReady: false, | |
| makeDataReadyCallback: nil, | |
| refcon: nil, | |
| formatDescription: format, | |
| sampleCount: CMItemCount(pcmBuffer.frameLength), | |
| sampleTimingEntryCount: 1, | |
| sampleTimingArray: &timing, | |
| sampleSizeEntryCount: 0, | |
| sampleSizeArray: nil, | |
| sampleBufferOut: &sampleBuffer); | |
| if (status != noErr) { NSLog("CMSampleBufferCreate returned error: \(status)"); return nil } | |
| status = CMSampleBufferSetDataBufferFromAudioBufferList(sampleBuffer!, | |
| blockBufferAllocator: kCFAllocatorDefault, | |
| blockBufferMemoryAllocator: kCFAllocatorDefault, | |
| flags: 0, | |
| bufferList: audioBufferList); | |
| if (status != noErr) { NSLog("CMSampleBufferSetDataBufferFromAudioBufferList returned error: \(status)"); return nil; } | |
| return sampleBuffer | |
| } | |
| } |
Apple docs for
CMSampleBufferCreatestate:Example of usage for uncompressed (non-)interleaved audio:
...
sampleTimingArray: one entry = {duration = 1/48000, presentationTimeStamp = 0/48000, decodeTimeStamp = invalid }
...So I think the definition of the
timingparameter should be:var timing: CMSampleTimingInfo = CMSampleTimingInfo( duration: CMTime(value: 1, timescale: Int32(asbd.pointee.mSampleRate)), - presentationTimeStamp: CMClockGetTime(CMClockGetHostTimeClock()), + presentationTimeStamp: CMTime(value: 0, timescale: Int32(asbd.pointee.mSampleRate)), decodeTimeStamp: CMTime.invalid )
@rlaguilar Feel free to adapt this to your needs. This implementation is used in a live app as is.
Apple docs for
CMSampleBufferCreatestate:Example of usage for uncompressed (non-)interleaved audio:
...
sampleTimingArray: one entry = {duration = 1/48000, presentationTimeStamp = 0/48000, decodeTimeStamp = invalid }
...So I think the definition of the
timingparameter should be:var timing: CMSampleTimingInfo = CMSampleTimingInfo( duration: CMTime(value: 1, timescale: Int32(asbd.pointee.mSampleRate)), - presentationTimeStamp: CMClockGetTime(CMClockGetHostTimeClock()), + presentationTimeStamp: CMTime(value: 0, timescale: Int32(asbd.pointee.mSampleRate)), decodeTimeStamp: CMTime.invalid )
That doesn't seem correct as each buffer will then have 0 as their timestamp causing issues when writing to disk.
Apple docs for
CMSampleBufferCreatestate:So I think the definition of the
timingparameter should be:var timing: CMSampleTimingInfo = CMSampleTimingInfo( duration: CMTime(value: 1, timescale: Int32(asbd.pointee.mSampleRate)), - presentationTimeStamp: CMClockGetTime(CMClockGetHostTimeClock()), + presentationTimeStamp: CMTime(value: 0, timescale: Int32(asbd.pointee.mSampleRate)), decodeTimeStamp: CMTime.invalid )