Skip to content

Commit

Permalink
Use renderContext of updated API.
Browse files Browse the repository at this point in the history
  • Loading branch information
mstyura committed Aug 24, 2022
1 parent ca95fc7 commit 53b2e3f
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 5 deletions.
4 changes: 2 additions & 2 deletions CustomRTCAudioDevice/AUAudioUnitRTCAudioDevice.swift
Original file line number Diff line number Diff line change
Expand Up @@ -154,11 +154,11 @@ final class AUAudioUnitRTCAudioDevice: NSObject {
measureTime(label: "AVAudioUnit define inputHandler") {
let deliverRecordedData = delegate.deliverRecordedData
let renderBlock = audioUnit.renderBlock
let customRenderBlock = { actionFlags, timestamp, inputBusNumber, frameCount, abl in
let customRenderBlock: RTCAudioDeviceRenderRecordedDataBlock = { actionFlags, timestamp, inputBusNumber, frameCount, abl, renderContext in
return renderBlock(actionFlags, timestamp, frameCount, inputBusNumber, abl, nil)
}
audioUnit.inputHandler = { actionFlags, timestamp, frameCount, inputBusNumber in
let status = deliverRecordedData(actionFlags, timestamp, inputBusNumber, frameCount, nil, customRenderBlock)
let status = deliverRecordedData(actionFlags, timestamp, inputBusNumber, frameCount, nil, nil, customRenderBlock)
if status != noErr {
print("Failed to deliver audio data: \(status)")
}
Expand Down
11 changes: 8 additions & 3 deletions CustomRTCAudioDevice/AVAudioEngineRTCAudioDevice.swift
Original file line number Diff line number Diff line change
Expand Up @@ -216,11 +216,16 @@ final class AVAudioEngineRTCAudioDevice: NSObject {
// NOTE: AVAudioSinkNode provides audio data with HW sample rate in 32-bit float format,
// WebRTC requires 16-bit int format, so do the conversion
let converter = SimpleAudioConverter(from: inputFormat, to: rtcRecordFormat)!

let customRenderBlock: RTCAudioDeviceRenderRecordedDataBlock = { actionFlags, timestamp, inputBusNumber, frameCount, abl, renderContext in
let (converter, inputData) = renderContext!.assumingMemoryBound(to: (Unmanaged<SimpleAudioConverter>, UnsafeMutablePointer<AudioBufferList>).self).pointee
return converter.takeUnretainedValue().convert(framesCount: frameCount, from: inputData, to: abl)
}

let audioSink = AVAudioSinkNode(receiverBlock: { (timestamp, framesCount, inputData) -> OSStatus in
var flags: AudioUnitRenderActionFlags = []
return deliverRecordedData(&flags, timestamp, 1, framesCount, nil, { actionFlags, timestamp, inputBusNumber, frameCount, abl in
return converter.convert(framesCount: framesCount, from: inputData, to: abl)
})
var renderContext = (Unmanaged.passUnretained(converter), inputData)
return deliverRecordedData(&flags, timestamp, 1, framesCount, nil, &renderContext, customRenderBlock)
})

measureTime(label: "Attach AVAudioSinkNode") {
Expand Down

0 comments on commit 53b2e3f

Please sign in to comment.