相机 视频流数据--预览 拍照 变焦
转载自 http://www.cnblogs.com/iCodePhone/p/3785283.html
实现功能:
1. 视频流数据
2. 预览和拍照变焦, 所见即所得。
运行环境:
1. XCODE 5.1.1
2. 真机(IPHONE5 , IOS6.1.4)
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h> //导入 - "视频流"
@interface MCViewController : UIViewController<AVCaptureVideoDataOutputSampleBufferDelegate>
@property (strong, nonatomic) AVCaptureSession * captureSession; //AVCaptureSession实例
@property (strong, nonatomic) AVCaptureDeviceInput * videoInput; //持有视频输入实例
@property (strong, nonatomic) AVCaptureStillImageOutput * stillImageOutput; //持有静态图像输出实例
@end
//
// MCViewController.m
// MyCamera
#import "MCViewController.h"
@interface MCViewController ()
@end
@implementation MCViewController
#pragma mark - life cycle
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
//开始扑捉会话
[self.captureSession startRunning];
}
- (void)viewWillDisappear:(BOOL)animated
{
[super viewWillDisappear:animated];
//停止扑捉会话
[self.captureSession stopRunning];
}
- (void)viewDidLoad
{
[super viewDidLoad];
//初始化视频流
[self initAv];
//添加拍照按钮
[self addCaptureButton];
}
#pragma mark - 初始化视频流
- (void) initAv
{
//1.1 创建AVCaptureSession
self.captureSession = [[AVCaptureSession alloc] init];
//1.2 指定输入设备。这里使用后摄像头。
AVCaptureDevice * device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//1.3 创建AVCaptureDeviceInput的实例,将所选设备作为扑捉会话的输入。
// 此外,在将是其添加到回话前请创建好输入,这里需要做个检查。
NSError * error = nil;
self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (self.videoInput) {
[self.captureSession addInput:self.videoInput];
}
else
{
NSLog(@"input error : %@", error);
}
//4. 视频流帧数据
AVCaptureVideoDataOutput * output = [[AVCaptureVideoDataOutput alloc] init];
[self.captureSession addOutput:output];
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
//dispatch_release(queue);
// output.videoSettings = [NSDictionary dictionaryWithObject:
// [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
// forKey:(id)kCVPixelBufferPixelFormatTypeKey];
output.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt: 320], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithInt: 240], (id)kCVPixelBufferHeightKey,
nil];
//output.minFrameDuration = CMTimeMake(1, 15);
//2. 创建预览层
AVCaptureVideoPreviewLayer * previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
UIView * aView = self.view;
previewLayer.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[aView.layer addSublayer:previewLayer];
[previewLayer setAffineTransform:CGAffineTransformMakeScale(5.0, 5.0)]; //3. 实现拍照
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary * stillImageOutputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:stillImageOutputSettings];
[self.captureSession addOutput:self.stillImageOutput];
// //4. 拍照变焦
// AVCaptureConnection * stillImageConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[self.stillImageOutput connections]];
//
// [stillImageConnection setVideoScaleAndCropFactor:5.0];
}
#pragma mark -
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections
{
for ( AVCaptureConnection *connection in connections )
{
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
if ( [[port mediaType] isEqual:mediaType] )
{
return connection;
}
}
}
return nil;
}
#pragma mark - delegate - AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
//视频流一帧数据(UIImage类型)
UIImage * image = [self imageFromSampleBuffer:sampleBuffer];
NSLog(@"视频流, 宽:(%f) 高:(%f)", image.size.width, image.size.height);
//IPHONE4 720 * 1280
//IPHONE5 1080 * 1920
//或:视频流一帧数据(NSData类型)
//NSData * imageData = UIImageJPEGRepresentation(image, 0.5);
//Code...
}
// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
//UIImage *image = [UIImage imageWithCGImage:quartzImage];
UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0f orientation:UIImageOrientationRight];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}
#pragma mark - 添加按钮 AND 点击按钮事件
- (void)addCaptureButton
{
CGRect frame = CGRectMake(0, 0, 100, 100);
UIButton * btn = [UIButton buttonWithType:UIButtonTypeRoundedRect];
btn.frame = frame;
[btn setTitle:@"拍照" forState:UIControlStateNormal];
btn.backgroundColor = [UIColor clearColor];
btn.tag = 1111;
[btn addTarget:self action:@selector(onClickCaptureButton:) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:btn];
}
-(IBAction)onClickCaptureButton:(id)sender
{
[self takePicture];
}
#pragma mark - 保持图像到相册
- (void)saveImageToPhotos:(UIImage*)savedImage
{
UIImageWriteToSavedPhotosAlbum(savedImage, self, @selector(image:didFinishSavingWithError:contextInfo:), NULL);
}
// 指定回调方法
- (void)image: (UIImage *) image didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo
{
NSString * msg = nil ;
if(error != NULL)
{
msg = @"保存图片失败" ;
}
else
{
msg = @"保存图片成功" ;
}
UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@"保存图片结果提示"
message:msg
delegate:self
cancelButtonTitle:@"确定"
otherButtonTitles:nil];
[alert show];
}
#pragma mark - 拍照函数
//拍照
- (void) takePicture
{
//1.
AVCaptureConnection * stillImageConnection = [self.stillImageOutput.connections objectAtIndex:0];
if ([stillImageConnection isVideoOrientationSupported]) {
[stillImageConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
}
//2.
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:stillImageConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL)
{
//图像数据类型转换
NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage * image = [[UIImage alloc] initWithData:imageData];
//保存图像
[self saveImageToPhotos:image];
}
else
{
NSLog(@"Error capturing still image %@", error);
}
}];
}
@end