`
cwh643
  • 浏览: 126769 次
  • 性别: Icon_minigender_1
  • 来自: 苏州
文章分类
社区版块
存档分类
最新评论

ios4下不使用私有API,轻松打开摄像头,获取摄像流

阅读更多
第一步:初始化AVCaptureSession,添加输入,输出源
#import <AVFoundation/AVFoundation.h>

// Create and configure a capture session and start it running
- (void)setupCaptureSession 
{
    NSError *error = nil;

    // Create the session
    AVCaptureSession *session = [[AVCaptureSession alloc] init];

    // Configure the session to produce lower resolution video frames, if your 
    // processing algorithm can cope. We'll specify medium quality for the
    // chosen device.
    session.sessionPreset = AVCaptureSessionPresetMedium;

    // Find a suitable AVCaptureDevice
    AVCaptureDevice *device = [AVCaptureDevice
                             defaultDeviceWithMediaType:AVMediaTypeVideo];

    // Create a device input with the device and add it to the session.
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device 
                                                                    error:&error];
    if (!input) {
        // Handling the error appropriately.
    }
    [session addInput:input];

    // Create a VideoDataOutput and add it to the session
    AVCaptureVideoDataOutput *output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
    [session addOutput:output];

    // Configure your output.
    dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
    [output setSampleBufferDelegate:self queue:queue];
    dispatch_release(queue);

    // Specify the pixel format
    output.videoSettings = 
                [NSDictionary dictionaryWithObject:
                    [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] 
                    forKey:(id)kCVPixelBufferPixelFormatTypeKey];


    // If you wish to cap the frame rate to a known value, such as 15 fps, set 
    // minFrameDuration.
    output.minFrameDuration = CMTimeMake(1, 15);

    // Start the session running to start the flow of data
    [session startRunning];

    // Assign session to an ivar.
    [self setSession:session];
}

第二步:实现AVCaptureVideoDataOutputSampleBufferDelegate协议方法
// Delegate routine that is called when a sample buffer was written
- (void)captureOutput:(AVCaptureOutput *)captureOutput 
         didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
         fromConnection:(AVCaptureConnection *)connection
{ 
    // Create a UIImage from the sample buffer data
    UIImage *image = [self imageFromSampleBuffer:sampleBuffer];

     < Add your code here that uses the image >

}

// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer 
{
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    // Lock the base address of the pixel buffer
    CVPixelBufferLockBaseAddress(imageBuffer,0);

    // Get the number of bytes per row for the pixel buffer
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    // Get the pixel buffer width and height
    size_t width = CVPixelBufferGetWidth(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer); 

    // Create a device-dependent RGB color space
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
    if (!colorSpace) 
    {
        NSLog(@"CGColorSpaceCreateDeviceRGB failure");
        return nil;
    }

    // Get the base address of the pixel buffer
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
    // Get the data size for contiguous planes of the pixel buffer.
    size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer); 

    // Create a Quartz direct-access data provider that uses data we supply
    CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, baseAddress, bufferSize, 
                                                            NULL);
    // Create a bitmap image from data supplied by our data provider
    CGImageRef cgImage = 
        CGImageCreate(width,
                        height,
                        8,
                        32,
                        bytesPerRow,
                        colorSpace,
                        kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little,
                        provider,
                        NULL,
                        true,
                        kCGRenderingIntentDefault);
    CGDataProviderRelease(provider);
    CGColorSpaceRelease(colorSpace);

    // Create and return an image object representing the specified Quartz image
    UIImage *image = [UIImage imageWithCGImage:cgImage];
    CGImageRelease(cgImage);

    CVPixelBufferUnlockBaseAddress(imageBuffer, 0);

    return image;
}

好了,现在就可以自由的显示和分析获取的UIImage了,再也不用使用私有API了
4
0
分享到:
评论
3 楼 cwh643 2011-02-15  
lanter206 写道
朋友,你的这段代码和官网上的AVCaptureSession类的介绍中是一样的,但是我在调试的时候没有通过,你有调试过吗?手上项目涉及到这方面

我这里试了没有问题的,可以使用的
2 楼 lanter206 2011-01-13  
朋友,你的这段代码和官网上的AVCaptureSession类的介绍中是一样的,但是我在调试的时候没有通过,你有调试过吗?手上项目涉及到这方面
1 楼 handy.wang 2010-08-12  
Hi dude. Can you share with me how to catch the stream from camera with private API in iOS3. THX.

相关推荐

Global site tag (gtag.js) - Google Analytics