I would most definitely look at the new AVAudioEngine API for doing the kinds of things you are trying to do.
https://developer.apple.com/library/ios/documentation/AVFoundation/Reference/AVAudioEngine_Class/ <https://developer.apple.com/library/ios/documentation/AVFoundation/Reference/AVAudioEngine_Class/> -DS > On Mar 16, 2015, at 1:07 AM, Patrick J. Collins > <[email protected]> wrote: > > Hi everyone, > > I am trying to implement a simple audio unit graph that goes: > > buffer of samples->low pass filter->generic output > > Where the generic output would be copied into a new buffer that could then be > processed further, saved to disk, etc. > > All of the examples I can find online having to do with setting up an audio > unit graph involve using a generator with the > kAudioUnitSubType_AudioFilePlayer > as the input source... I am dealing with a buffer of samples already > acquired, > so those examples do not help... Based on looking around in the > AudioUnitProperties.h file, it looks like I should be using using is > kAudioUnitSubType_ScheduledSoundPlayer? > > I can't seem to much documentation on how to hook this up, so I am quite stuck > and am hoping someone here can help me out. > > To simplify things, I just started out by trying to get my buffer of samples > to > go straight to the system output. I get no errors, but I also get no audio. > Here is my code: > > #import "EffectMachine.h" > #import <AudioToolbox/AudioToolbox.h> > #import "AudioHelpers.h" > #import "Buffer.h" > > @interface EffectMachine () > @property (nonatomic, strong) Buffer *buffer; > @end > > typedef struct EffectMachineGraph { > AUGraph graph; > AudioUnit input; > AudioUnit lowpass; > AudioUnit output; > } EffectMachineGraph; > > @implementation EffectMachine { > EffectMachineGraph machine; > } > > -(instancetype)initWithBuffer:(Buffer *)buffer { > if (self = [super init]) { > self.buffer = buffer; > > // buffer is a simple wrapper object that holds two properties: > // a pointer to the array of samples (as doubles) and the size > (number of samples) > > } > return self; > } > > -(void)process { > struct EffectMachineGraph initialized = {0}; > machine = initialized; > > CheckError(NewAUGraph(&machine.graph), > "NewAUGraph failed"); > > AudioComponentDescription outputCD = {0}; > outputCD.componentType = kAudioUnitType_Output; > outputCD.componentSubType = kAudioUnitSubType_DefaultOutput; > outputCD.componentManufacturer = kAudioUnitManufacturer_Apple; > > AUNode outputNode; > CheckError(AUGraphAddNode(machine.graph, > &outputCD, > &outputNode), > "AUGraphAddNode[kAudioUnitSubType_GenericOutput] failed"); > > AudioComponentDescription inputCD = {0}; > inputCD.componentType = kAudioUnitType_Generator; > inputCD.componentSubType = kAudioUnitSubType_ScheduledSoundPlayer; > inputCD.componentManufacturer = kAudioUnitManufacturer_Apple; > > AUNode inputNode; > CheckError(AUGraphAddNode(machine.graph, > &inputCD, > &inputNode), > "AUGraphAddNode[kAudioUnitSubType_ScheduledSoundPlayer] > failed"); > > CheckError(AUGraphOpen(machine.graph), > "AUGraphOpen failed"); > > CheckError(AUGraphNodeInfo(machine.graph, > inputNode, > NULL, > &machine.input), > "AUGraphNodeInfo failed"); > > CheckError(AUGraphConnectNodeInput(machine.graph, > inputNode, > 0, > outputNode, > 0), > "AUGraphConnectNodeInput"); > > CheckError(AUGraphInitialize(machine.graph), > "AUGraphInitialize failed"); > > // prepare input > > AudioBufferList ioData = {0}; > ioData.mNumberBuffers = 1; > ioData.mBuffers[0].mNumberChannels = 1; > ioData.mBuffers[0].mDataByteSize = (UInt32)(2 * self.buffer.size); > ioData.mBuffers[0].mData = self.buffer.samples; > > ScheduledAudioSlice slice = {0}; > AudioTimeStamp timeStamp = {0}; > > slice.mTimeStamp = timeStamp; > slice.mNumberFrames = (UInt32)self.buffer.size; > slice.mBufferList = &ioData; > > CheckError(AudioUnitSetProperty(machine.input, > kAudioUnitProperty_ScheduleAudioSlice, > kAudioUnitScope_Global, > 0, > &slice, > sizeof(slice)), > > "AudioUnitSetProperty[kAudioUnitProperty_ScheduleStartTimeStamp] failed"); > > AudioTimeStamp startTimeStamp = {0}; > timeStamp.mFlags = kAudioTimeStampSampleTimeValid; > timeStamp.mSampleTime = -1; > > CheckError(AudioUnitSetProperty(machine.input, > > kAudioUnitProperty_ScheduleStartTimeStamp, > kAudioUnitScope_Global, > 0, > &startTimeStamp, > sizeof(startTimeStamp)), > > "AudioUnitSetProperty[kAudioUnitProperty_ScheduleStartTimeStamp] failed"); > > CheckError(AUGraphStart(machine.graph), > "AUGraphStart failed"); > > // AUGraphStop(machine.graph); <-- commented out to make sure it wasn't > stopping before playing. > // AUGraphUninitialize(machine.graph); > // AUGraphClose(machine.graph); > > } > > Does anyone know what I am doing wrong here? > > Patrick J. Collins > http://collinatorstudios.com > > _______________________________________________ > Do not post admin requests to the list. They will be ignored. > Coreaudio-api mailing list ([email protected]) > Help/Unsubscribe/Update your Subscription: > https://lists.apple.com/mailman/options/coreaudio-api/douglas_scott%40apple.com > > This email sent to [email protected]
_______________________________________________ Do not post admin requests to the list. They will be ignored. Coreaudio-api mailing list ([email protected]) Help/Unsubscribe/Update your Subscription: https://lists.apple.com/mailman/options/coreaudio-api/archive%40mail-archive.com This email sent to [email protected]
