第一步:初始化AVCaptureSession,添加输入,输出源
#import <AVFoundation/AVFoundation.h>
// Create and configure a capture session and start it running
- (void)setupCaptureSession
{
NSError *error = nil;
// Create the session
AVCaptureSession *session = [[AVCaptureSession alloc] init];
// Configure the session to produce lower resolution video frames, if your
// processing algorithm can cope. We'll specify medium quality for the
// chosen device.
session.sessionPreset = AVCaptureSessionPresetMedium;
// Find a suitable AVCaptureDevice
AVCaptureDevice *device = [AVCaptureDevice
defaultDeviceWithMediaType:AVMediaTypeVideo];
// Create a device input with the device and add it to the session.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
if (!input) {
// Handling the error appropriately.
}
[session addInput:input];
// Create a VideoDataOutput and add it to the session
AVCaptureVideoDataOutput *output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
[session addOutput:output];
// Configure your output.
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Specify the pixel format
output.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.
output.minFrameDuration = CMTimeMake(1, 15);
// Start the session running to start the flow of data
[session startRunning];
// Assign session to an ivar.
[self setSession:session];
}
第二步:实现AVCaptureVideoDataOutputSampleBufferDelegate协议方法
// Delegate routine that is called when a sample buffer was written
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
// Create a UIImage from the sample buffer data
UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
< Add your code here that uses the image >
}
// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer,0);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
if (!colorSpace)
{
NSLog(@"CGColorSpaceCreateDeviceRGB failure");
return nil;
}
// Get the base address of the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the data size for contiguous planes of the pixel buffer.
size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer);
// Create a Quartz direct-access data provider that uses data we supply
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, baseAddress, bufferSize,
NULL);
// Create a bitmap image from data supplied by our data provider
CGImageRef cgImage =
CGImageCreate(width,
height,
8,
32,
bytesPerRow,
colorSpace,
kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little,
provider,
NULL,
true,
kCGRenderingIntentDefault);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
// Create and return an image object representing the specified Quartz image
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
return image;
}
好了,现在就可以自由的显示和分析获取的UIImage了,再也不用使用私有API了
分享到:
相关推荐
检查iOS源代码是否包含私有API工具,在提交AppStore审核前用这个工具检查一下十分必要,减少被拒的概率。
IOS7最新私有API,object-c全
这是最新的ios 6私有api头文件 ,里边包含了各个privateframework的类
ios应用源码之获取本地视频库和获取摄像头视频流 videoupload
使用私有API实现蓝牙功能 实现打开关闭,给出提示,如果需要搜索周边蓝牙等可以跟到程序里看相关API。 不过是私有API,请慎用噢,使用了私有API的IOS APP是不能通过APP store审核的。
IOS应用源码之获取本地视频库和获取摄像头视频流 VideoUpload .zip
IOS 私有API
ios的私有的api,希望大家多看看,ios的私有的api。
基于IOS7 下的 私有API 。从网上收集的。
如何从XCode安装目录中导出私有Api的头文件 mac 10.9.1上执行成功
通过H5 打开 android IOS 摄像头, 显示, 拍照,
获取本地视频库和获取摄像头视频流 VideoUpload.zip
iOS 摄像头捕获视频 将视频转化为帧 形成images 进行连续捕获
NULL 博文链接:https://sinaier.iteye.com/blog/1273110
苹果虚拟摄像头|无人直播|虚拟相机deb|硬改手机摄像头|搬运去重|tiktok抖音搬运直播ios无人直播虚拟视频虚拟相机deb文件ios虚拟视频无人直播-appel6键deb最新版摄像头模块,不限时长,高清不卡顿!ios虚拟视频无人...
unity使用webcam调用移动端摄像头适配安卓和ios 源码和安卓apk
iOS 开发中使用 UIImagePickerController 调用 摄像头,相机的Demo;
IOS应用源码——获取本地视频库和获取摄像头视频流 VideoUpload.zip
RuntimeBrowser查看iOS系统私有API-附件资源
FFmpeg-X264-Encode-for-iOS, 利用FFmpeg x264将iOS摄像头实时视频流编码为h264文件