iPhone网络音频的播放示例(摘自《iPhone Cool Projects》)

AudioPlayerAppDelegate.h

 

//
//  AudioPlayerAppDelegate.h
//

#import <UIKit/UIKit.h>
#import "AudioPlayer.h"

@interface AudioPlayerAppDelegate : NSObject <UIApplicationDelegate, UITextFieldDelegate, AudioPlayerDelegate> {
    UIWindow *window;
	IBOutlet UITextField *urlTextField;
	IBOutlet UIActivityIndicatorView *activityIndicatorView;
	IBOutlet UIButton *pauseButton;
	IBOutlet UIButton *playButton;
	
	AudioPlayer *audioPlayer;
}

@property (nonatomic, retain) IBOutlet UIWindow *window;

/*
 * Loads and plays the audio URL input by the user.
 */
- (IBAction)load;

/*
 * Pauses the currently playing audio.
 */
- (IBAction)pause;

/*
 * Resumes playing the current audio after pause.
 */
- (IBAction)play;

@end

AudioPlayerAppDelegate.m

 

//
//  AudioPlayerAppDelegate.m
//

#import "AudioPlayerAppDelegate.h"

@implementation AudioPlayerAppDelegate

@synthesize window;

void interruptionListener (void *inClientData, UInt32 inInterruptionState);

- (void)applicationDidFinishLaunching:(UIApplication *)application {
	// We need to initiate an audio session to play nice with Core Audio.
	// This will prevent the device from sleeping when locked, and allows
	// us to handle incoming phone calls and text messages gracefully.
	AudioSessionInitialize (
							NULL,                  // use the default (main) run loop
							NULL,                  // use the default run loop mode
							interruptionListener,  // a reference to your interruption callback
							self                   // userData
							);
	
	UInt32 sessionCategory = kAudioSessionCategory_MediaPlayback;
	AudioSessionSetProperty (kAudioSessionProperty_AudioCategory, sizeof (sessionCategory), &sessionCategory);
		
	[urlTextField addTarget:self action:@selector(load) forControlEvents:UIControlEventEditingDidEndOnExit];
	//http://listen.idj.126.net/uf/532/034741f46c474bfba87647f3339bddf4.mp3
	urlTextField.text = @"http://neilmix.com/book/etude.mp3";
	//urlTextField.text = @"http://listen.idj.126.net/uf/532/034741f46c474bfba87647f3339bddf4.mp3";
	[urlTextField becomeFirstResponder];
	[window makeKeyAndVisible];
}


- (void)dealloc {
    [window release];
	[urlTextField release];
	[activityIndicatorView release];
	[pauseButton release];
	[playButton release];
	[audioPlayer release];
    [super dealloc];
}

- (void)showStopped {
	[UIView beginAnimations:nil context:nil];
	[activityIndicatorView stopAnimating];
	pauseButton.alpha = 0;
	playButton.alpha = 0;
	[UIView commitAnimations];
}

- (void)showLoading {
	[UIView beginAnimations:nil context:nil];
	[activityIndicatorView startAnimating];
	pauseButton.alpha = 0;
	playButton.alpha = 0;
	[UIView commitAnimations];
}

- (void)showPlaying {
	[UIView beginAnimations:nil context:nil];
	[activityIndicatorView stopAnimating];
	pauseButton.alpha = 1;
	playButton.alpha = 0;
	[UIView commitAnimations];
}

- (void)showPaused {
	[UIView beginAnimations:nil context:nil];
	[activityIndicatorView stopAnimating];
	pauseButton.alpha = 0;
	playButton.alpha = 1;
	[UIView commitAnimations];
}

- (BOOL)textFieldShouldReturn:(UITextField *)textField {
	return YES;
}

- (IBAction)load {
	AudioSessionSetActive(YES);
	[audioPlayer cancel];
	[audioPlayer release];
	audioPlayer = [[AudioPlayer alloc] initPlayerWithURL:[NSURL URLWithString:urlTextField.text] delegate:self];
	[urlTextField resignFirstResponder];
	[self showLoading];
}

- (IBAction)pause {
	audioPlayer.paused = YES;
	[self showPaused];
}

- (IBAction)play {
	audioPlayer.paused = NO;
	[self showPlaying];
}

- (void)audioPlayerDownloadFailed:(AudioPlayer *)audioPlayer {
	UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:@"Audio download failed" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
	[alert show];
	[alert autorelease];
	[self showStopped];
}

- (void)audioPlayerPlaybackStarted:(AudioPlayer *)audioPlayer {
	[self showPlaying];
}

- (void)audioPlayerPlaybackFinished:(AudioPlayer *)audioPlayer {
	AudioSessionSetActive(NO);
	[self showStopped];
}

void interruptionListener(void *userData, UInt32  interruptionState) {
	AudioPlayerAppDelegate *self = userData;
	if (interruptionState == kAudioSessionBeginInterruption) {
		[self pause];
		AudioSessionSetActive(NO);
	} else if (interruptionState == kAudioSessionEndInterruption) {
		AudioSessionSetActive(YES);
	}
}

@end

AudioPlayer.h

 

//
//  AudioPlayer.h
//

#import <Foundation/Foundation.h>

#import "AudioRequest.h"
#import "AudioFileStream.h"
#import "AudioQueue.h"

@protocol AudioPlayerDelegate;

@interface AudioPlayer : NSObject <
	AudioRequestDelegate,
	AudioFileStreamDelegate,
	AudioQueueDelegate
> {
	id<AudioPlayerDelegate> delegate;
	AudioRequest *request;
	AudioFileStream *fileStream;
	AudioQueue *queue;
	BOOL audioIsReadyToPlay;
	BOOL paused;
}

@property (nonatomic) BOOL paused;

- (id)initPlayerWithURL:(NSURL *)url delegate:(id<AudioPlayerDelegate>) aDelegate;

/*
 * Cancels an audio request, guaranteeing that no further delegate messages are sent.
 */
- (void)cancel;
@end



@protocol AudioPlayerDelegate<NSObject>
/*
 * Notifies the delegate that the requested file was not playable.
 */
- (void)audioPlayerDownloadFailed:(AudioPlayer *)audioPlayer;

/*
 * Notifies the delegate that playback of the requested file has begun.
 */
- (void)audioPlayerPlaybackStarted:(AudioPlayer *)audioPlayer;

/*
 * Notifies the delegate that playback of the request file is complete.
 */
- (void)audioPlayerPlaybackFinished:(AudioPlayer *)audioPlayer;
@end

AudioPlayer.m

 

//
//  AudioPlayer.m
//

#import "AudioPlayer.h"


@implementation AudioPlayer

- (id)initPlayerWithURL:(NSURL *)url delegate:(id<AudioPlayerDelegate>) aDelegate {
	self = [super init];

	delegate = aDelegate;
	
	queue = [[AudioQueue alloc] initQueueWithDelegate:self];

	fileStream = [[AudioFileStream alloc] initFileStreamWithDelegate:self];
	[fileStream open];

	request = [[AudioRequest alloc] initRequestWithURL:url delegate:self];

	return self;
}

- (void)error {
	[self cancel];
	[delegate audioPlayerDownloadFailed:self];
}

- (void)audioRequest:(AudioRequest *)request didReceiveData:(NSData *)data {
	if ([fileStream parseBytes:data] != noErr) {
		[self error];
	}
}

- (void)audioRequestDidFinish:(AudioRequest *)request {
	if (!audioIsReadyToPlay) {
		[self error];
	}
}

- (void)audioFileStream:(AudioFileStream *)stream foundMagicCookie:(NSData *)cookie {
	// if an error happens here, it may be recoverable so we let it slide...
	[queue setMagicCookie:cookie];
}

- (void)audioFileStream:(AudioFileStream *)stream isReadyToProducePacketsWithASBD:(AudioStreamBasicDescription)absd {
	if ([queue setAudioStreamBasicDescription:absd] == noErr) {
		audioIsReadyToPlay = YES;
		if (!paused) {
			[queue start];
		}
	} else {
		[self error];
	}
}

- (void)audioFileStream:(AudioFileStream *)stream 
	  didProducePackets:(NSData *)packetData 
			  withCount:(UInt32)packetCount 
		andDescriptions:(AudioStreamPacketDescription *)packetDescriptions
{
	AudioQueueBufferRef bufferRef;
	OSStatus status = [queue allocateBufferWithData:packetData 
										packetCount:packetCount 
								 packetDescriptions:packetDescriptions 
									   outBufferRef:&bufferRef];
	if (status == noErr) {
		[queue enqueueBuffer:bufferRef];
	} else {
		[self error];
	}
}

- (void)audioQueue:(AudioQueue *)audioQueue 
  isDoneWithBuffer:(AudioQueueBufferRef)bufferRef
{
	// nothing to do
}

- (void)audioQueuePlaybackIsStarting:(AudioQueue *)audioQueue {
	[delegate audioPlayerPlaybackStarted:self];
}

- (void)audioQueuePlaybackIsComplete:(AudioQueue *)audioQueue {
	[delegate audioPlayerPlaybackFinished:self];
}

- (BOOL)paused {
	return paused;
}

- (void)setPaused:(BOOL)b {
	if (b == paused) {
		return;
	}

	paused = b;
	if (!audioIsReadyToPlay) {
		return;
	}
	
	if (paused) {
		[queue pause];
	} else {
		[queue start];
	}
}

- (void)cancel {
	request.delegate = nil;
	[request cancel];
	[request release];
	
	fileStream.delegate = nil;
	[fileStream release];
	
	queue.delegate = nil;
	[queue release];

	// nil out our references so that any further operations
	// (such as cancel during dealloc) don't cause errors.
	request = nil;
	fileStream = nil;
	queue = nil;
}

- (void)dealloc {
	[self cancel];
	[super dealloc];
}

@end

AudioQueue.h

 

//
//  AudioQueue.h
//

#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>

@protocol AudioQueueDelegate;
@interface AudioQueue : NSObject {
	AudioQueueRef audioQueue;
	id<AudioQueueDelegate> delegate;
	AudioQueueBufferRef buffersHead;
	AudioQueueBufferRef buffersTail;
}

@property (nonatomic,retain) id<AudioQueueDelegate> delegate;

- (id)initQueueWithDelegate:(id<AudioQueueDelegate>)delegate;

/*
 * Sets the ASBD for this AudioQueue.  See Core Audio documentation
 * for AudioQueueNewOutput and AudioQueueSetProperty for possible
 * return values.
 */
- (OSStatus)setAudioStreamBasicDescription:(AudioStreamBasicDescription)absd;

/*
 * Sets the magic cookie for this audio queue.  See Core Audio documentation
 * for AudioQueueSetProperty for possible return values.
 */
- (OSStatus)setMagicCookie:(NSData *)magicCookie;

/*
 * Starts playback of the audio queue.  See Core Audio documentation
 * for AudioQueueStart for possible return values.
 */
- (OSStatus)start;

/*
 * Pauses playback of the audio queue.  See Core Audio documentation
 * for AudioQueuePause for possible return values.
 */
- (OSStatus)pause;

/*
 * Allocates a buffer for future queueing on this AudioQueue.
 * See Core Audio documentation for AudioQueueAllocateBufferWithPacketDescriptions
 * for possible return values;
 */
- (OSStatus)allocateBufferWithData:(NSData *)data 
					   packetCount:(UInt32)packetCount 
				packetDescriptions:(AudioStreamPacketDescription *)packetDescriptions
					  outBufferRef:(AudioQueueBufferRef *)outBufferRef;

/*
 * Enqueues a buffer for future playback.  See Core Audio documentation for 
 * AudioQueueEnqueueBuffer for possible return values;
 */
- (OSStatus)enqueueBuffer:(AudioQueueBufferRef)bufferRef;

/*
 * Notifies this AudioQueue that no more audio will be queued, and that
 * the AudioQueue should stop once the last currently queued buffer 
 * is complete. See Core Audio documentation for AudioQueueFlush and
 * AudioStop for possible return values;
 */
- (OSStatus)endOfStream;
@end



@protocol AudioQueueDelegate<NSObject>
/*
 * Notifies the delegate that the AudioQueue is done enqueueing a
 * buffer and the buffer may now be released.
 */
- (void)audioQueue:(AudioQueue *)audioQueue isDoneWithBuffer:(AudioQueueBufferRef)bufferRef;

/*
 * Notifies the delegate that audio is now playing on this AudioQueue.
 */
- (void)audioQueuePlaybackIsStarting:(AudioQueue *)audioQueue;

/*
 * Notifies the delegate that audio playback has finished
 * on this AudioQueue.
 */
- (void)audioQueuePlaybackIsComplete:(AudioQueue *)audioQueue;
@end

 

AudioQueue.m

 

//
//  AudioQueue.m
//

#import "AudioQueue.h"
#import "AudioPlayerUtil.h"

@implementation AudioQueue

@synthesize delegate;

void audioQueueOutputCallback (void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer);
void propertyChangeIsRunning(void *data, AudioQueueRef inAQ, AudioQueuePropertyID inID);

- (id)initQueueWithDelegate:(id<AudioQueueDelegate>)aDelegate {
	self = [super init];
	self.delegate = aDelegate;
	return self;
}

- (OSStatus)setAudioStreamBasicDescription:(AudioStreamBasicDescription)absd {
	OSStatus status = AudioQueueNewOutput(&absd, 
										  audioQueueOutputCallback, 
										  self, 
										  CFRunLoopGetCurrent(), 
										  kCFRunLoopCommonModes, 
										  0, 
										  &audioQueue);
	if (!VERIFY_STATUS(status)) {
		return status;
	}
	status = AudioQueueAddPropertyListener (audioQueue, kAudioQueueProperty_IsRunning, propertyChangeIsRunning, self);
	VERIFY_STATUS(status);
	return status;
}

- (OSStatus)setMagicCookie:(NSData *)magicCookie {
	return AudioQueueSetProperty(audioQueue, kAudioQueueProperty_MagicCookie, magicCookie.bytes, magicCookie.length);
}

- (OSStatus)start {
	return AudioQueueStart(audioQueue, NULL);
}

- (OSStatus)pause {
	return AudioQueuePause(audioQueue);
}

- (OSStatus)allocateBufferWithData:(NSData *)data 
					   packetCount:(UInt32)packetCount 
				packetDescriptions:(AudioStreamPacketDescription *)packetDescriptions
					  outBufferRef:(AudioQueueBufferRef *)outBufferRefPtr;
{
	OSStatus status = AudioQueueAllocateBufferWithPacketDescriptions(self->audioQueue, data.length, packetCount, outBufferRefPtr);

	if (VERIFY_STATUS(status)) {
		AudioQueueBufferRef outBufferRef = *outBufferRefPtr;
		memcpy(outBufferRef->mAudioData, data.bytes, data.length);
		outBufferRef->mAudioDataByteSize = data.length;
		memcpy(outBufferRef->mPacketDescriptions, packetDescriptions, sizeof(AudioStreamPacketDescription) * packetCount);	
		outBufferRef->mPacketDescriptionCount = packetCount;
	}

	return status;
}

- (OSStatus)enqueueBuffer:(AudioQueueBufferRef)bufferRef {
	OSStatus status = AudioQueueEnqueueBuffer(audioQueue, bufferRef, 0, NULL);	
	VERIFY_STATUS(status);
	return status;
}

void audioQueueOutputCallback (void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer) {
	AudioQueue *self = (AudioQueue *)inUserData;
	[self->delegate audioQueue:self isDoneWithBuffer:inBuffer];
}

- (OSStatus)endOfStream {
	OSStatus status = AudioQueueFlush(audioQueue);
	if (VERIFY_STATUS(status)) {
		status = AudioQueueStop(audioQueue, false);
		VERIFY_STATUS(status);
	}
	return status;
}

void propertyChangeIsRunning(void *data, AudioQueueRef inAQ, AudioQueuePropertyID inID) {
	AudioQueue *self = (AudioQueue *)data;
	
	int result = 0;
	UInt32 size = sizeof(UInt32);
	OSStatus status = AudioQueueGetProperty (self->audioQueue, kAudioQueueProperty_IsRunning, &result, &size);
	if (VERIFY_STATUS(status) && result == 0) {
		[self->delegate audioQueuePlaybackIsComplete:self];
	} else {
		[self->delegate audioQueuePlaybackIsStarting:self];
	}
}

- (void)dealloc {
	if (audioQueue != NULL) {
		VERIFY_STATUS(AudioQueueDispose(audioQueue, true));
	}
	[delegate release];
	[super dealloc];
}

@end

AudioRequest.h

 

//
//  AudioRequest.h
//

#import <Foundation/Foundation.h>

@protocol AudioRequestDelegate;
@interface AudioRequest : NSObject {
	NSURLConnection *connection;
	id<AudioRequestDelegate> delegate;
}

@property (nonatomic, retain) id<AudioRequestDelegate> delegate;

- (id)initRequestWithURL:(NSURL *)url delegate:(id<AudioRequestDelegate>)delegate;

/*
 * Cancels the request, guaranteeing that no further delegate messages will be sent.
 */
- (void)cancel;
@end

@protocol AudioRequestDelegate<NSObject>
/*
 * Notifies the delegate when we've received bytes from the network.
 */
- (void)audioRequest:(AudioRequest *)request didReceiveData:(NSData *)data;

/*
 * Notifies the delegate when there are no more bytes to deliver.
 */
- (void)audioRequestDidFinish:(AudioRequest *)request;
@end

AudioRequest.m

 

//
//  AudioRequest.m
//

#import "AudioRequest.h"


@implementation AudioRequest
@synthesize delegate;

static CFTimeInterval kTimeoutInterval = 15;

- (id)initRequestWithURL:(NSURL *)url delegate:(id<AudioRequestDelegate>)aDelegate {
	self = [super init];
	self.delegate = aDelegate;
	NSURLRequest *request = [NSURLRequest requestWithURL:url
											 cachePolicy:NSURLRequestUseProtocolCachePolicy
										 timeoutInterval:kTimeoutInterval];
	connection = [[NSURLConnection connectionWithRequest:request delegate:self] retain];
	return self;
}

- (void)connection:(NSURLConnection *)connection didReceiveResponse:(NSURLResponse *)aResponse {
	if ([aResponse isKindOfClass:[NSHTTPURLResponse class]]) {
		NSHTTPURLResponse *response = (NSHTTPURLResponse *)aResponse;
		if (response.statusCode >= 400) {
			NSLog(@"AudioRequest received error HTTP status code: %i", response.statusCode);
			[delegate audioRequestDidFinish:self];

			// prevent the delivery of any more bytes, which would not be audio bytes and
			// therefore could harm the audio subsystems.
			[self cancel];
		}
	}
}

- (void)connection:(NSURLConnection *)connection didReceiveData:(NSData *)fromData {
	[delegate audioRequest:self didReceiveData:fromData];
}

- (void)connection:(NSURLConnection *)aConnection didFailWithError:(NSError *)error {
	NSLog(@"AudioRequest received NSError: %i", error.code);
	[self cancel];
	[delegate audioRequestDidFinish:self];
}

- (void)connectionDidFinishLoading:(NSURLConnection *)connection {
	[self cancel];
	[delegate audioRequestDidFinish:self];
}

- (void)cancel {
	[connection cancel];
	[connection release];
	connection = nil;
}

- (void)dealloc {
	if (connection) {
		[self cancel];
	}
	[delegate release];
	[super dealloc];
}

@end

AudioFileStream.h

 

//
//  AudioFileStream.h
//
// This is a very simple Objective-C wrapper around the
// AudioFileStream C API.
//

#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>

@protocol AudioFileStreamDelegate;
@interface AudioFileStream : NSObject {
	AudioFileStreamID streamID;
	id<AudioFileStreamDelegate> delegate;
	OSStatus callbackStatus;
}

@property (nonatomic,retain) id<AudioFileStreamDelegate> delegate;

- (id)initFileStreamWithDelegate:(id<AudioFileStreamDelegate>)delegate;

/*
 * Opens this file stream for parsing.
 * Returns an error code if an error occurs.  See documentation for 
 * AudioFileStreamOpen for possible errors.
 */
- (OSStatus)open;

/*
 * Parses bytes from this audio stream.
 * Delegate will be notified asynchronously of any magic cookie, 
 * AudioStreamBasicDescription, or packet data resulting from this
 * parsing call.  All asynchronous notifications will happen before
 * this method returns.
 *
 * Returns an error code if an error occurs.  See documentation for
 * AudioFileStreamParseBytes, AudioFileStreamGetPropertyInfo, or
 * AudioFileStreamGetProperty for possible errors.  Errors from this 
 * method are generally unexpected.  If one occurs, it is probably
 * best not to continue parsing with this AudioFileStream object.
 */
- (OSStatus)parseBytes:(NSData *)data;
@end

@protocol AudioFileStreamDelegate<NSObject>
/*
 * Some audio formats have "magic cookies" which contain special
 * metadata about the stream that is specific to the audio format in
 * a non-generalizable way.  AudioQueue requires the magic cookie
 * for such audio formats to work correctly.  This method notifies
 * a delegate if a magic cookie is found when parsing an AudioFileStream.
 */
- (void)audioFileStream:(AudioFileStream *)stream foundMagicCookie:(NSData *)cookie;

/*
 * This method notifies a delegate when an AudioFileStream is about to
 * begin sending packets.  It is a signal to the delegate that the stream
 * is valid, and the delegate should now create an AudioQueue and prepare
 * it for queining and playback.
 */
- (void)audioFileStream:(AudioFileStream *)stream isReadyToProducePacketsWithASBD:(AudioStreamBasicDescription)absd;

/*
 * This method notfies a delegate that new packets have been parsed from
 * the stream and are ready for queuing in an AudioQueue.
 */
- (void)audioFileStream:(AudioFileStream *)stream 
	  didProducePackets:(NSData *)packetData 
			  withCount:(UInt32)packetCount 
		andDescriptions:(AudioStreamPacketDescription *)packetDescriptions;
@end 

 AudioFileStream.m

 

//
//  AudioFileStream.m
//

#import "AudioFileStream.h"
#import "AudioPlayerUtil.h"

@implementation AudioFileStream

@synthesize delegate;

// These are declarations for callbacks used by our AudioFileStream.  In both cases,
// they simply forward the call onto the self object that created the file stream.
void propertyCallback(void *clientData, AudioFileStreamID stream, AudioFileStreamPropertyID property, UInt32 *ioFlags);
void packetCallback(void *clientData, UInt32 byteCount, UInt32 packetCount, const void *inputData, AudioStreamPacketDescription *packetDescriptions);

- (id)initFileStreamWithDelegate:(id<AudioFileStreamDelegate>)aDelegate {
	self = [super init];
	self.delegate = aDelegate;
	return self;
}

- (OSStatus) open {
	// Open our file stream.  Our callback methods are implemented above.
	// We pass our self as clientData so that our callbacks simply message 
	// us when called, thus providing a simple Objective-C wrapper around
	// the C API.
	// We pass 0 as a fileTypeHint because CoreAudio is pretty good at
	// determining the fileType for us (and may ignore our hint anyway.)
	return AudioFileStreamOpen(self, 
							   propertyCallback, 
							   packetCallback, 
							   0, 
							   &streamID);
}

- (OSStatus)parseBytes:(NSData *)data {
	// Callbacks resulting from the parse may have errors,
	// so we'll use callbackStatus to keep track of those
	// errors and return as appropriate from this call.
	callbackStatus = noErr;

	// Our property and packet callbacks will
	// be called as a result of parsing bytes here.
	OSStatus status = AudioFileStreamParseBytes(streamID, 
												data.length, 
												data.bytes, 
												0);

	if (!VERIFY_STATUS(status)) {
		return status;
	}
	
	// Parsing happens synchronously, so any callbacks for this parse have 
	// already been called.  Rather than call our delegate back 
	// asynchronously with any errors, we'll instead return the error here.
	return callbackStatus;
}

- (void)notifyDelegateWithMagicCookie {	
	// This method is called when our propertyCallback is called
	// as a result of parsing bytes.
	
	// First, we need to fine out how big our magic cookie is.
	UInt32 size;
	Boolean writeable;
	callbackStatus = AudioFileStreamGetPropertyInfo(streamID, 
													kAudioFileStreamProperty_MagicCookieData, 
													&size, 
													&writeable);
	if (!VERIFY_STATUS(callbackStatus)) {
		return;
	}
	
	// Now we get the actual magic cookie data and send it to our delegate.
	NSMutableData *data = [NSMutableData dataWithLength:size];
	callbackStatus = AudioFileStreamGetProperty(streamID, 
												kAudioFileStreamProperty_MagicCookieData, 
												&size, 
												data.mutableBytes);
	if (!VERIFY_STATUS(callbackStatus)) {
		return;
	}
	
	[delegate audioFileStream:self foundMagicCookie:data];
}

- (void)notifyDelegateWithASBD {
	// For AAC-PLUS, the returned audio may actually have multiple supported formats, support for which may
	// vary based on the device.  Therefore we need to do some juggling here to figure out what we support.
	
	// We want to get a list of formats this audio stream supports.
	// Before we can that, we need to find the size of data we're trying to get.
	UInt32 formatListSize;
	Boolean b;
	AudioFileStreamGetPropertyInfo(streamID, 
								   kAudioFileStreamProperty_FormatList, 
								   &formatListSize, 
								   &b);
	
	// now get the format data
	NSMutableData *listData = [NSMutableData dataWithLength:formatListSize];
	OSStatus status = AudioFileStreamGetProperty(streamID, 
												 kAudioFileStreamProperty_FormatList, 
												 &formatListSize, 
												 [listData mutableBytes]);
	AudioFormatListItem *formatList = [listData mutableBytes];
	
	AudioStreamBasicDescription asbd;
	// The formatList property isn't always supported, so an error isn't unexpected here.
	// Therefore, we won't call VERIFY_STATUS on this status code.
	if (status == noErr) {
		// now see which format this device supports best
		UInt32 chosen;
		UInt32 chosenSize = sizeof(UInt32);
		int formatCount = formatListSize/sizeof(AudioFormatListItem);
		status = AudioFormatGetProperty ('fpfl', 
										 formatListSize, 
										 formatList, 
										 &chosenSize, 
										 &chosen);
		if (VERIFY_STATUS(status)) {
			asbd = formatList[chosen].mASBD;
		} else {
			// the docs tell us to grab the last in the list because it's most compatible
			asbd = formatList[formatCount - 1].mASBD;
		}
	} else {
		// fall back to the stream's DataFormat
		UInt32 descriptionSize = sizeof(AudioStreamBasicDescription);
		callbackStatus = AudioFileStreamGetProperty(streamID, 
													kAudioFileStreamProperty_DataFormat, 
													&descriptionSize, 
													&asbd);
		if (!VERIFY_STATUS(callbackStatus)) {
			return;
		}
	}
	
	[delegate audioFileStream:self isReadyToProducePacketsWithASBD:asbd];
}

- (void)propertyDidChange:(AudioFileStreamPropertyID)property {
	if (callbackStatus != noErr) {
		// We had a previous error during the current parse.  We should
		// stop processing this parse.
		return;
	}
	
	// This method is called by our propertyCallback
	switch (property) {
		case kAudioFileStreamProperty_MagicCookieData:
			// Our stream contains a "magic cookie".  Magic cookies contain special
			// metadata about the stream that is specific to the audio format in
			// a non-generalizable way.  AudioQueue requires the magic cookie
			// for some audio formats to work correctly.
			[self notifyDelegateWithMagicCookie];
			break;
		case kAudioFileStreamProperty_ReadyToProducePackets:
			// Enough data has been read from our stream to know the audio format
			// and begin sending audio data to an audio queue.  Notify our delegate
			// of the AudioStreamBasicDescriptor of this stream so that our delegate
			// can create and stream to an AudioQueue.
			[self notifyDelegateWithASBD];
			break;
		default:
			break;
	}
}

- (void)didProducePackets:(AudioStreamPacketDescription *)packetDescriptions 
		withPacketCount:(UInt32)packetCount 
			   fromData:(const void *)inputData
		   andByteCount:(UInt32)byteCount 
{
	// We've received packets from our audio queue.  Forward them onto our
	// delegate for queuing and playback.
	[delegate audioFileStream:self 
			didProducePackets:[NSData dataWithBytes:inputData length:byteCount]
					withCount:packetCount
			  andDescriptions:packetDescriptions];
}	

void propertyCallback(void *clientData,
					  AudioFileStreamID stream,
					  AudioFileStreamPropertyID property,
					  UInt32 *ioFlags)
{
	// forward the call onto the self object that created the file stream
	AudioFileStream *self = (AudioFileStream *)clientData;
	[self propertyDidChange:property];
}

void packetCallback(void *clientData,
					UInt32 byteCount,
					UInt32 packetCount,
					const void *inputData,
					AudioStreamPacketDescription *packetDescriptions)
{
	// forward the call onto the self object that created the file stream
	AudioFileStream *self = (AudioFileStream *)clientData;
	[self didProducePackets:packetDescriptions 
			withPacketCount:packetCount 
				   fromData:inputData 
			   andByteCount:byteCount];
}

- (void)dealloc {
	if (streamID != NULL) {
		OSStatus status = AudioFileStreamClose(streamID);
		VERIFY_STATUS(status);
	}
	[delegate release];
	[super dealloc];
}
@end

AudioPlayerUtil.h

 

/*
 *  AudioPlayerUtil.h
 */

#import <AudioToolbox/AudioToolbox.h>

#define VERIFY_STATUS(status) AudioPlayerVerifyStatus(status, __FILE__, __LINE__)

BOOL AudioPlayerVerifyStatus(OSStatus status, char *file, int line);

AudioPlayerUtil.c

 

/*
 *  AudioPlayerUtil.c
 */

#import <Foundation/Foundation.h>
#import "AudioPlayerUtil.h"

BOOL AudioPlayerVerifyStatus(OSStatus status, char *file, int line) {
	if (status == noErr) {
		// Logging of all sucesses is quite prolific, but useful when debugging...
		// We'll turn it off by default, but if you encounter a problem, you can
		// uncomment this to trace the path of execution.
		//NSLog(@"success at %s:%i", file, line);
	} else {
		char *s = (char *)&status;
		NSLog(@"error number: %i error code: %c%c%c%c at %s:%i", status, s[3], s[2], s[1], s[0], file, line);
	}
	return status == noErr;
}

 

 

 

 

 

 

 

posted @ 2011-01-25 23:57  andyzhshg  阅读(284)  评论(0)    收藏  举报