#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>{
AVCaptureSession *captureSession;
AVCaptureDevice *captureDevice;
AVCaptureVideoPreviewLayer *previewLayer;
UIImage *resultImage;
BOOL isStart;
}
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
[self initLocalDetector];
isStart = NO;
[self isStartTrue];
captureSession = [[AVCaptureSession alloc]init];
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
captureSession.sessionPreset = AVCaptureSessionPresetLow;
NSArray *devices = [[NSArray alloc]init];
devices = [AVCaptureDevice devices];
for (AVCaptureDevice *device in devices) {
if ([device hasMediaType:AVMediaTypeVideo]) {
if (device.position == AVCaptureDevicePositionFront) {
captureDevice = device;
if (captureDevice != nil) {
NSLog(@"Capture Device found");
[self beginSession];
}
}
}
}
// Do any additional setup after loading the view, typically from a nib.
}
-(void) isStartTrue {
isStart = YES;
}
-(void)beginSession {
AVCaptureDeviceInput *captureDeviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:nil];
[captureSession addInput:captureDeviceInput];
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc]init];
dispatch_queue_t cameraQueue;
cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
[output setSampleBufferDelegate:self queue:cameraQueue];
NSDictionary *videoSettings = [[NSDictionary alloc] initWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey, nil];
output.videoSettings = videoSettings;
[captureSession addOutput:output];
previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:captureSession];
previewLayer.videoGravity = @"AVLayerVideoGravityResizeAspect";
previewLayer.frame = self.view.bounds;
[self.view.layer addSublayer:previewLayer];
[captureSession startRunning];
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if (isStart) {
resultImage = [[UIImage alloc] init];
resultImage = [self sampleBufferToImage:sampleBuffer];
CIContext *context = [CIContext contextWithOptions:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:kCIContextUseSoftwareRenderer]];
CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace context:context options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]];
CIImage *ciImage = [[CIImage alloc]init];
ciImage = [CIImage imageWithCGImage:resultImage.CGImage];
dispatch_async(dispatch_get_main_queue(), ^{
previewIV.image = resultImage;
});
NSArray *results = [detector featuresInImage:ciImage options:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:6] forKey:CIDetectorImageOrientation]];
for (CIFaceFeature *face in results) {
UIImage *faceImage = [UIImage imageWithCGImage:[context createCGImage:ciImage fromRect:face.bounds] scale:1.0 orientation:UIImageOrientationRight];
NSLog(@" ====%@", NSStringFromCGRect(face.bounds));
}
}
}
-(UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
void * baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
int bitsPerCompornent = 8;
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, bitsPerCompornent, bytesPerRow, colorSpace, (kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst));
CGImageRef imageRef = CGBitmapContextCreateImage(context);
UIImage *result = [[UIImage alloc]initWithCGImage:imageRef scale:1.0 orientation:UIImageOrientationRight];
return result;
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
[captureSession stopRunning];
// Dispose of any resources that can be recreated.
}
@end
怪我咯2017-04-17 17:40:34
我在各种社区寻求帮助最终搞定了,这是全部代码,有些不需要的可以删掉,关键代码还是sampletoimage的方法和capturoutput的方法。
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "FaceppLocalDetectorSDK/FaceppLocalDetector.h"
#import "FaceppSDK_ARC/FaceppAPI.h"
@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>{
AVCaptureSession *captureSession;
AVCaptureDevice *captureDevice;
AVCaptureVideoPreviewLayer *previewLayer;
FaceppLocalDetector *detector;
UIImage *resultImage;
UIImage *sample;
BOOL isStart;
UIImageView *screenShot;
UIButton *photo;
UIImageView *ok;
UIImageView *circle;
UIImageView *person;
BOOL getSample;
}
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
getSample = NO;
sample = [[UIImage alloc]init];
[self initLocalDetector];
isStart = NO;
[self isStartTrue];
captureSession = [[AVCaptureSession alloc]init];
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
NSArray *devices = [[NSArray alloc]init];
devices = [AVCaptureDevice devices];
for (AVCaptureDevice *device in devices) {
if ([device hasMediaType:AVMediaTypeVideo]) {
if (device.position == AVCaptureDevicePositionFront) {
captureDevice = device;
if (captureDevice != nil) {
NSLog(@"Capture Device found");
[self beginSession];
}
}
}
}
[self initUI];
// Do any additional setup after loading the view, typically from a nib.
}
-(void)initLocalDetector {
NSDictionary *option = [NSDictionary dictionaryWithObjects:[NSArray arrayWithObjects:[NSNumber numberWithBool:NO],[NSNumber numberWithInt:20],FaceppDetectorAccuracyHigh, nil] forKeys:[NSArray arrayWithObjects:FaceppDetectorTracking,FaceppDetectorMinFaceSize,FaceppDetectorAccuracy, nil]];
detector = [FaceppLocalDetector detectorOfOptions:option andAPIKey:@"8577edf0b2176bd9964fdfa8c9203659"];
}
-(void) isStartTrue {
isStart = YES;
}
-(void)beginSession {
AVCaptureDeviceInput *captureDeviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:nil];
[captureSession addInput:captureDeviceInput];
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc]init];
dispatch_queue_t cameraQueue;
cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
[output setSampleBufferDelegate:self queue:cameraQueue];
NSDictionary *videoSettings = [[NSDictionary alloc] initWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey, nil];
output.videoSettings = videoSettings;
[captureSession addOutput:output];
previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:captureSession];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.frame = self.view.bounds;
[self.view.layer addSublayer:previewLayer];
[captureSession startRunning];
}
-(void)initUI {
screenShot = [[UIImageView alloc]initWithFrame:CGRectMake(0, 0, 110, 177.5)];
ok = [[UIImageView alloc]initWithFrame:CGRectMake(self.view.bounds.size.width/2 +23, self.view.bounds.size.height-73, 20, 20)];
person = [[UIImageView alloc]initWithFrame:CGRectMake(-70, self.view.bounds.size.height/2-180, 460, 540)];
person.image = [UIImage imageNamed:@"redperson"];
ok.image = [UIImage imageNamed:@"ok"];
ok.hidden = YES;
circle = [[UIImageView alloc]initWithFrame:CGRectMake(self.view.bounds.size.width/2 - 40, self.view.bounds.size.height-73, 80, 80)];
circle.image = [UIImage imageNamed:@"redcircle"];
photo = [[UIButton alloc]initWithFrame:CGRectMake(self.view.bounds.size.width/2 - 25, self.view.bounds.size.height-55, 50, 45)];
[photo setBackgroundImage:[UIImage imageNamed:@"redphoto"] forState:UIControlStateNormal];
[photo addTarget:self action:@selector(takePicture) forControlEvents:UIControlEventTouchUpInside];
photo.enabled = NO;
[self.view addSubview:photo];
[self.view addSubview:ok];
[self.view bringSubviewToFront:ok];
[self.view bringSubviewToFront:photo];
[self.view addSubview:screenShot];
[self.view addSubview:person];
[self.view bringSubviewToFront:person];
[self.view bringSubviewToFront:screenShot];
[self.view addSubview:circle];
[self.view bringSubviewToFront:circle];
}
-(void)takePicture{
getSample = YES;
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if (isStart) {
resultImage = [[UIImage alloc] init];
UIImage *image = [self sampleBufferToImage:sampleBuffer];
resultImage = [self fixOrientation:image];
FaceppLocalResult *result = [detector detectWithImage:resultImage];
for (size_t i=0; i<result.faces.count; i++) {
FaceppLocalFace *face = [result.faces objectAtIndex:i];
// NSLog(@"=========%@",NSStringFromCGRect(face.bounds));
}
if (result.faces.count ==1) {
if (getSample) {
sample = resultImage;
getSample = !getSample;
}
dispatch_sync(dispatch_get_main_queue(), ^{
photo.enabled = YES;
ok.hidden = NO;
screenShot.image = sample;
circle.image = [UIImage imageNamed:@"bluecircle"];
person.image = [UIImage imageNamed:@"blueperson"];
[photo setBackgroundImage:[UIImage imageNamed:@"bluephoto"] forState:UIControlStateNormal];
});
}
else{
dispatch_sync(dispatch_get_main_queue(), ^{
photo.enabled = NO;
ok.hidden = YES;
circle.image = [UIImage imageNamed:@"redcircle"];
person.image = [UIImage imageNamed:@"redperson"];
[photo setBackgroundImage:[UIImage imageNamed:@"redphoto"] forState:UIControlStateNormal];
});
}
//
}
}
-(UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
CIContext *temporaryContext = [CIContext contextWithOptions:nil];
CGImageRef videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer))];
UIImage *result = [[UIImage alloc] initWithCGImage:videoImage scale:1.0 orientation:UIImageOrientationLeftMirrored];
CGImageRelease(videoImage);
return result;
}
- (UIImage *)fixOrientation:(UIImage *)aImage {
// No-op if the orientation is already correct
if (aImage.imageOrientation == UIImageOrientationUp)
return aImage;
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
CGAffineTransform transform = CGAffineTransformIdentity;
switch (aImage.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.width, aImage.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, aImage.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
switch (aImage.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
default:
break;
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
CGContextRef ctx = CGBitmapContextCreate(NULL, aImage.size.width, aImage.size.height,
CGImageGetBitsPerComponent(aImage.CGImage), 0,
CGImageGetColorSpace(aImage.CGImage),
CGImageGetBitmapInfo(aImage.CGImage));
CGContextConcatCTM(ctx, transform);
switch (aImage.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.height,aImage.size.width), aImage.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.width,aImage.size.height), aImage.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
[captureSession stopRunning];
// Dispose of any resources that can be recreated.
}
@end