本文整理汇总了C++中AudioUnitSetProperty函数的典型用法代码示例。如果您正苦于以下问题:C++ AudioUnitSetProperty函数的具体用法?C++ AudioUnitSetProperty怎么用?C++ AudioUnitSetProperty使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了AudioUnitSetProperty函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: zeromem
Error AudioDriverIphone::init() {
active = false;
channels = 2;
AudioStreamBasicDescription strdesc;
strdesc.mFormatID = kAudioFormatLinearPCM;
strdesc.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
strdesc.mChannelsPerFrame = channels;
strdesc.mSampleRate = 44100;
strdesc.mFramesPerPacket = 1;
strdesc.mBitsPerChannel = 16;
strdesc.mBytesPerFrame =
strdesc.mBitsPerChannel * strdesc.mChannelsPerFrame / 8;
strdesc.mBytesPerPacket =
strdesc.mBytesPerFrame * strdesc.mFramesPerPacket;
OSStatus result = noErr;
AURenderCallbackStruct callback;
AudioComponentDescription desc;
AudioComponent comp = NULL;
const AudioUnitElement output_bus = 0;
const AudioUnitElement bus = output_bus;
const AudioUnitScope scope = kAudioUnitScope_Input;
zeromem(&desc, sizeof(desc));
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO; /* !!! FIXME: ? */
comp = AudioComponentFindNext(NULL, &desc);
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
result = AudioComponentInstanceNew(comp, &audio_unit);
ERR_FAIL_COND_V(result != noErr, FAILED);
ERR_FAIL_COND_V(comp == NULL, FAILED);
result = AudioUnitSetProperty(audio_unit,
kAudioUnitProperty_StreamFormat,
scope, bus, &strdesc, sizeof(strdesc));
ERR_FAIL_COND_V(result != noErr, FAILED);
zeromem(&callback, sizeof(AURenderCallbackStruct));
callback.inputProc = &AudioDriverIphone::output_callback;
callback.inputProcRefCon = this;
result = AudioUnitSetProperty(audio_unit,
kAudioUnitProperty_SetRenderCallback,
scope, bus, &callback, sizeof(callback));
ERR_FAIL_COND_V(result != noErr, FAILED);
result = AudioUnitInitialize(audio_unit);
ERR_FAIL_COND_V(result != noErr, FAILED);
result = AudioOutputUnitStart(audio_unit);
ERR_FAIL_COND_V(result != noErr, FAILED);
const int samples = 1024;
samples_in = memnew_arr(int32_t, samples); // whatever
buffer_frames = samples / channels;
return FAILED;
};
示例2: AUGraphAddNode
OSStatus FCoreAudioSoundSource::CreateAudioUnit( OSType Type, OSType SubType, OSType Manufacturer, AudioStreamBasicDescription* InputFormat, AudioStreamBasicDescription* OutputFormat, AUNode* OutNode, AudioUnit* OutUnit )
{
AudioComponentDescription Desc;
Desc.componentFlags = 0;
Desc.componentFlagsMask = 0;
Desc.componentType = Type;
Desc.componentSubType = SubType;
Desc.componentManufacturer = Manufacturer;
OSStatus Status = AUGraphAddNode( AudioDevice->GetAudioUnitGraph(), &Desc, OutNode );
if( Status == noErr )
{
Status = AUGraphNodeInfo( AudioDevice->GetAudioUnitGraph(), *OutNode, NULL, OutUnit );
}
if( Status == noErr )
{
if( InputFormat )
{
Status = AudioUnitSetProperty( *OutUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, InputFormat, sizeof( AudioStreamBasicDescription ) );
}
if( Status == noErr )
{
if( OutputFormat )
{
Status = AudioUnitSetProperty( *OutUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, OutputFormat, sizeof( AudioStreamBasicDescription ) );
}
}
}
return Status;
}
示例3: CFStringCreateWithCString
// ----------------------------------------------------------
void ofxAudioUnitNetReceive::connectToHost(const std::string &address, unsigned long port)
// ----------------------------------------------------------
{
stringstream ss;
ss << address << ":" << port;
CFStringRef hostName = CFStringCreateWithCString(kCFAllocatorDefault,
ss.str().c_str(),
kCFStringEncodingUTF8);
OFXAU_PRINT(AudioUnitSetProperty(*_unit,
kAUNetReceiveProperty_Hostname,
kAudioUnitScope_Global,
0,
&hostName,
sizeof(hostName)),
"setting net receive host name");
// setting net send disconnect to 0 to connect net receive because that makes sense
UInt32 connect = 0;
OFXAU_PRINT(AudioUnitSetProperty(*_unit,
kAUNetSendProperty_Disconnect,
kAudioUnitScope_Global,
0,
&connect,
sizeof(connect)),
"connecting net receive");
CFRelease(hostName);
}
示例4: audiounits_start
static int audiounits_start(void *usr) {
au_instance_t *ap = (au_instance_t*) usr;
OSStatus err;
if (ap->kind == AI_RECORDER) {
#if defined(MAC_OS_X_VERSION_10_5) && (MAC_OS_X_VERSION_MIN_REQUIRED>=MAC_OS_X_VERSION_10_5)
err = AudioDeviceStart(ap->inDev, ap->inIOProcID);
#else
err = AudioDeviceStart(ap->inDev, inputRenderProc);
#endif
if (err) Rf_error("unable to start recording (%08x)", err);
} else {
AURenderCallbackStruct renderCallback = { outputRenderProc, usr };
ap->done = NO;
/* set format */
ap->fmtOut.mSampleRate = ap->sample_rate;
err = AudioUnitSetProperty(ap->outUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &ap->fmtOut, sizeof(ap->fmtOut));
if (err) Rf_error("unable to set output audio format (%08x)", err);
/* set callback */
err = AudioUnitSetProperty(ap->outUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &renderCallback, sizeof(renderCallback));
if (err) Rf_error("unable to register audio callback (%08x)", err);
/* start audio */
err = AudioOutputUnitStart(ap->outUnit);
if (err) Rf_error("unable to start playback (%08x)", err);
}
return 1;
}
示例5: AudioUnitSetProperty
void AudioDestinationIOS::configure()
{
// Set render callback
AURenderCallbackStruct input;
input.inputProc = inputProc;
input.inputProcRefCon = this;
OSStatus result = AudioUnitSetProperty(m_outputUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &input, sizeof(input));
ASSERT(!result);
// Set stream format
AudioStreamBasicDescription streamFormat;
UInt32 size = sizeof(AudioStreamBasicDescription);
result = AudioUnitGetProperty(m_outputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, (void*)&streamFormat, &size);
ASSERT(!result);
const int bytesPerFloat = sizeof(Float32);
const int bitsPerByte = 8;
streamFormat.mSampleRate = m_sampleRate;
streamFormat.mFormatID = kAudioFormatLinearPCM;
streamFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked | kAudioFormatFlagIsNonInterleaved;
streamFormat.mBytesPerPacket = bytesPerFloat;
streamFormat.mFramesPerPacket = 1;
streamFormat.mBytesPerFrame = bytesPerFloat;
streamFormat.mChannelsPerFrame = 2;
streamFormat.mBitsPerChannel = bitsPerByte * bytesPerFloat;
result = AudioUnitSetProperty(m_outputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, (void*)&streamFormat, sizeof(AudioStreamBasicDescription));
ASSERT(!result);
AudioSession::sharedSession().setPreferredBufferSize(kPreferredBufferSize);
}
示例6: AudioUnitSetProperty
OSStatus CAPlayThrough::EnableIO()
{
OSStatus err = noErr;
UInt32 enableIO;
///////////////
//ENABLE IO (INPUT)
//You must enable the Audio Unit (AUHAL) for input and disable output
//BEFORE setting the AUHAL's current device.
//Enable input on the AUHAL
enableIO = 1;
err = AudioUnitSetProperty(mInputUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
1, // input element
&enableIO,
sizeof(enableIO));
checkErr(err);
//disable Output on the AUHAL
enableIO = 0;
err = AudioUnitSetProperty(mInputUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
0, //output element
&enableIO,
sizeof(enableIO));
return err;
}
示例7: AudioUnitGetProperty
bool CAUOutputDevice::EnableInputOuput()
{
if (!m_audioUnit)
return false;
OSStatus ret;
UInt32 enable;
UInt32 hasio;
UInt32 size=sizeof(UInt32);
ret = AudioUnitGetProperty(m_audioUnit,kAudioOutputUnitProperty_HasIO,kAudioUnitScope_Input, 1, &hasio, &size);
if (hasio)
{
enable = 1;
ret = AudioUnitSetProperty(m_audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &enable, sizeof(enable));
if (ret)
{
CLog::Log(LOGERROR, "CAUOutputDevice::EnableInputOuput:: Unable to enable input on bus 1. Error = %s", GetError(ret).c_str());
return false;
}
enable = 1;
ret = AudioUnitSetProperty(m_audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &enable, sizeof(enable));
if (ret)
{
CLog::Log(LOGERROR, "CAUOutputDevice::EnableInputOuput:: Unable to disable output on bus 0. Error = %s", GetError(ret).c_str());
return false;
}
}
return true;
}
示例8: AudioUnitSetProperty
void AudioDestinationMac::configure()
{
// Set render callback
AURenderCallbackStruct input;
input.inputProc = inputProc;
input.inputProcRefCon = this;
OSStatus result = AudioUnitSetProperty(m_outputUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, 0, &input, sizeof(input));
ASSERT(!result);
// Set stream format
AudioStreamBasicDescription streamFormat;
streamFormat.mSampleRate = m_sampleRate;
streamFormat.mFormatID = kAudioFormatLinearPCM;
streamFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked | kAudioFormatFlagIsNonInterleaved;
streamFormat.mBitsPerChannel = 8 * sizeof(Float32);
streamFormat.mChannelsPerFrame = 2;
streamFormat.mFramesPerPacket = 1;
streamFormat.mBytesPerPacket = sizeof(Float32);
streamFormat.mBytesPerFrame = sizeof(Float32);
result = AudioUnitSetProperty(m_outputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, (void*)&streamFormat, sizeof(AudioStreamBasicDescription));
ASSERT(!result);
// Set the buffer frame size.
UInt32 bufferSize = kBufferSize;
result = AudioUnitSetProperty(m_outputUnit, kAudioDevicePropertyBufferFrameSize, kAudioUnitScope_Output, 0, (void*)&bufferSize, sizeof(bufferSize));
ASSERT(!result);
m_input->configure(streamFormat, bufferSize);
}
示例9: RegionForEntireFile
void ofxAudioUnitFilePlayer::prime() {
_region = RegionForEntireFile(_fileID[0]);
if(_seekSampleTime) {
_region.mStartFrame = _seekSampleTime;
_pauseTimeAccumulator += _seekSampleTime;
} else if(_pauseTimeStamp.mSampleTime > 0) {
_region.mStartFrame = _pauseTimeStamp.mSampleTime + _pauseTimeAccumulator;
_pauseTimeAccumulator += _pauseTimeStamp.mSampleTime;
} else {
_pauseTimeAccumulator = 0;
}
if(_loopCount > 0) {
_region.mLoopCount = _loopCount;
}
// resetting time-tracking members
memset(&_pauseTimeStamp, 0, sizeof(_pauseTimeStamp));
_loopCount = 0;
_seekSampleTime = 0;
if(!(_region.mTimeStamp.mFlags & kAudioTimeStampHostTimeValid)) {
cout << "ofxAudioUnitFilePlayer has no file to play" << endl;
return;
}
OFXAU_RETURN(AudioUnitSetProperty(*_unit,
kAudioUnitProperty_ScheduledFileIDs,
kAudioUnitScope_Global,
0,
_fileID,
sizeof(_fileID)),
"setting file player's file ID");
OFXAU_RETURN(AudioUnitSetProperty(*_unit,
kAudioUnitProperty_ScheduledFileRegion,
kAudioUnitScope_Global,
0,
&_region,
sizeof(_region)),
"setting file player region");
UInt32 framesToPrime = 0; // 0 = "use the default"
OFXAU_RETURN(AudioUnitSetProperty(*_unit,
kAudioUnitProperty_ScheduledFilePrime,
kAudioUnitScope_Global,
0,
&framesToPrime,
sizeof(framesToPrime)),
"priming file player");
_primed = true;
}
示例10: SetupRemoteIO
int SetupRemoteIO (AudioUnit& inRemoteIOUnit, Float64 sampleRate, AURenderCallbackStruct inRenderProc, CAStreamBasicDescription& outFormat)
{
try {
// Open the output unit
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = AudioComponentFindNext(NULL, &desc);
XThrowIfError(AudioComponentInstanceNew(comp, &inRemoteIOUnit), "couldn't open the remote I/O unit");
UInt32 zero = 0;
UInt32 one = 1;
//enable input
XThrowIfError(AudioUnitSetProperty(inRemoteIOUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof(one)), "couldn't enable input on the remote I/O unit");
//disable output
XThrowIfError(AudioUnitSetProperty(inRemoteIOUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, 0, &zero, sizeof(zero)), "couldn't disable output ");
//set input callback
XThrowIfError(AudioUnitSetProperty(inRemoteIOUnit, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 1, &inRenderProc, sizeof(inRenderProc)), "couldn't set remote i/o input callback");
// set our required format - LPCM non-interleaved 32 bit floating point
AudioStreamBasicDescription outFormat;
outFormat.mSampleRate = sampleRate;
outFormat.mFormatID = kAudioFormatLinearPCM;
outFormat.mFormatFlags = kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked | kAudioFormatFlagIsFloat | kAudioFormatFlagIsNonInterleaved;
outFormat.mFramesPerPacket = 1;
outFormat.mBytesPerPacket= 4;
outFormat.mChannelsPerFrame = 1;
outFormat.mBitsPerChannel = 32;
outFormat.mBytesPerFrame = 4;
XThrowIfError(AudioUnitSetProperty(inRemoteIOUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &outFormat, sizeof(outFormat)), "couldn't set the remote I/O unit's output client format");
XThrowIfError(AudioUnitInitialize(inRemoteIOUnit), "couldn't initialize the remote I/O unit");
}
catch (CAXException &e) {
char buf[256];
fprintf(stderr, "Error: %s (%s)\n", e.mOperation, e.FormatError(buf));
return 1;
}
catch (...) {
fprintf(stderr, "An unknown error occurred\n");
return 1;
}
return 0;
}
示例11: AudioUnitSetProperty
OSStatus CAAudioUnit::SetPresentPreset (AUPreset &inData)
{
OSStatus result = AudioUnitSetProperty (AU(), kAudioUnitProperty_PresentPreset,
kAudioUnitScope_Global, 0,
&inData, sizeof (AUPreset));
if (result == kAudioUnitErr_InvalidProperty) {
result = AudioUnitSetProperty (AU(), kAudioUnitProperty_CurrentPreset,
kAudioUnitScope_Global, 0,
&inData, sizeof (AUPreset));
}
return result;
}
示例12: notification
static OSStatus notification(AudioDeviceID inDevice,
UInt32 inChannel,
Boolean isInput,
AudioDevicePropertyID inPropertyID,
void* inClientData)
{
coreaudio_driver_t* driver = (coreaudio_driver_t*)inClientData;
switch (inPropertyID) {
case kAudioDeviceProcessorOverload:
driver->xrun_detected = 1;
break;
case kAudioDevicePropertyNominalSampleRate: {
UInt32 outSize = sizeof(Float64);
Float64 sampleRate;
AudioStreamBasicDescription srcFormat, dstFormat;
OSStatus err = AudioDeviceGetProperty(driver->device_id, 0, kAudioDeviceSectionGlobal, kAudioDevicePropertyNominalSampleRate, &outSize, &sampleRate);
if (err != noErr) {
jack_error("Cannot get current sample rate");
return kAudioHardwareUnsupportedOperationError;
}
JCALog("JackCoreAudioDriver::NotificationCallback kAudioDevicePropertyNominalSampleRate %ld\n", (long)sampleRate);
outSize = sizeof(AudioStreamBasicDescription);
// Update SR for input
err = AudioUnitGetProperty(driver->au_hal, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &srcFormat, &outSize);
if (err != noErr) {
jack_error("Error calling AudioUnitSetProperty - kAudioUnitProperty_StreamFormat kAudioUnitScope_Input");
}
srcFormat.mSampleRate = sampleRate;
err = AudioUnitSetProperty(driver->au_hal, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &srcFormat, outSize);
if (err != noErr) {
jack_error("Error calling AudioUnitSetProperty - kAudioUnitProperty_StreamFormat kAudioUnitScope_Input");
}
// Update SR for output
err = AudioUnitGetProperty(driver->au_hal, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &dstFormat, &outSize);
if (err != noErr) {
jack_error("Error calling AudioUnitSetProperty - kAudioUnitProperty_StreamFormat kAudioUnitScope_Output");
}
dstFormat.mSampleRate = sampleRate;
err = AudioUnitSetProperty(driver->au_hal, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &dstFormat, outSize);
if (err != noErr) {
jack_error("Error calling AudioUnitSetProperty - kAudioUnitProperty_StreamFormat kAudioUnitScope_Output");
}
break;
}
}
return noErr;
}
示例13: sizeof
// ----------------------------------------------------------
ofxAudioUnit& ofxAudioUnitInput::connectTo(ofxAudioUnit &otherUnit, int destinationBus, int sourceBus)
// ----------------------------------------------------------
{
AudioStreamBasicDescription ASBD;
UInt32 ASBDSize = sizeof(ASBD);
OFXAU_PRINT(AudioUnitGetProperty(otherUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
destinationBus,
&ASBD,
&ASBDSize),
"getting hardware input destination's format");
OFXAU_PRINT(AudioUnitSetProperty(*_unit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
1,
&ASBD,
sizeof(ASBD)),
"setting hardware input's output format");
AURenderCallbackStruct callback = {PullCallback, &_impl->ctx};
otherUnit.setRenderCallback(callback, destinationBus);
return otherUnit;
}
示例14: sizeof
OSStatus CAPlayThrough::SetInputDeviceAsCurrent(AudioDeviceID in)
{
UInt32 size = sizeof(AudioDeviceID);
OSStatus err = noErr;
if(in == kAudioDeviceUnknown) //get the default input device if device is unknown
{
err = AudioHardwareGetProperty(kAudioHardwarePropertyDefaultInputDevice,
&size,
&in);
checkErr(err);
}
mInputDevice.Init(in, true);
//Set the Current Device to the AUHAL.
//this should be done only after IO has been enabled on the AUHAL.
err = AudioUnitSetProperty(mInputUnit,
kAudioOutputUnitProperty_CurrentDevice,
kAudioUnitScope_Global,
0,
&mInputDevice.mID,
sizeof(mInputDevice.mID));
checkErr(err);
return err;
}
示例15: Core_CloseAudio
void Core_CloseAudio(_THIS)
{
OSStatus result;
struct AURenderCallbackStruct callback;
/* stop processing the audio unit */
result = AudioOutputUnitStop (outputAudioUnit);
if (result != noErr) {
SDL_SetError("Core_CloseAudio: AudioOutputUnitStop");
return;
}
/* Remove the input callback */
callback.inputProc = 0;
callback.inputProcRefCon = 0;
result = AudioUnitSetProperty (outputAudioUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input,
0,
&callback,
sizeof(callback));
if (result != noErr) {
SDL_SetError("Core_CloseAudio: AudioUnitSetProperty (kAudioUnitProperty_SetInputCallback)");
return;
}
result = CloseComponent(outputAudioUnit);
if (result != noErr) {
SDL_SetError("Core_CloseAudio: CloseComponent");
return;
}
SDL_free(buffer);
}