About CoreAudio
Core Audio is Apple's low-level audio processing framework that forms the foundation of all audio capabilities on iOS. It provides direct access to audio hardware and supports advanced audio processing, making it suitable for professional audio applications.
Key Features
- Low-latency audio processing
- Hardware-level access
- Multi-channel audio support
- Audio Units architecture
- Audio processing graphs
Code Example
// CoreAudio example with Audio Queue Services
#import <AudioToolbox/AudioToolbox.h>
// Define audio format
AudioStreamBasicDescription audioFormat;
audioFormat.mSampleRate = 44100.0;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 2; // Stereo
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerPacket = 4;
audioFormat.mBytesPerFrame = 4;
// Audio queue reference
AudioQueueRef queue;
// Callback function
static void AudioQueueCallback(void *inUserData, AudioQueueRef inQueue, AudioQueueBufferRef inBuffer) {
// Fill buffer with audio data
// ...
// Enqueue buffer again
AudioQueueEnqueueBuffer(inQueue, inBuffer, 0, NULL);
}
// Create audio queue for playback
OSStatus status = AudioQueueNewOutput(
&audioFormat,
AudioQueueCallback,
NULL, // User data for callback
CFRunLoopGetCurrent(),
kCFRunLoopCommonModes,
0, // Reserved
&queue
);
// Allocate buffers
for (int i = 0; i < 3; i++) {
AudioQueueBufferRef buffer;
AudioQueueAllocateBuffer(queue, 4096, &buffer);
// Initial fill of the buffer
// ...
// Enqueue the buffer
AudioQueueEnqueueBuffer(queue, buffer, 0, NULL);
}
// Start playback
AudioQueueStart(queue, NULL);
// When done
AudioQueueStop(queue, true);
AudioQueueDispose(queue, true);