试图通过 Apple 的人脸检测 API 获得一个简单的概念证明。我看过其他几个例子,包括 Apple 的 SquareCam,还有这个 https://github.com/jeroentrappers/FaceDetectionPOC
基于这些,我似乎在遵循正确的模式来运行 API,但我被卡住了。无论我做什么,我的人脸检测器的 CIDetector 总是为零!!!
我将非常感谢任何帮助、线索 - 提示 - 建议!
-(void)initCamera{
session = [[AVCaptureSession alloc]init];
AVCaptureDevice *device;
/*
if([self frontCameraAvailable]){
device = [self frontCamera];
}else{
device = [self backCamera];
}*/
device = [self frontCamera];
isUsingFrontFacingCamera = YES;
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if(input && [session canAddInput:input]){
[session addInput:input];
}else{
NSLog(@"Error %@", error);
//make this Dlog...
}
videoDataOutput = [[AVCaptureVideoDataOutput alloc]init];
NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKeyid)kCVPixelBufferPixelFormatTypeKey];
[videoDataOutput setVideoSettings:rgbOutputSettings];
[videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
[[videoDataOutput connectionWithMediaType:AVMediaTypeVideo]setEnabled:YES];
if ([session canAddOutput:videoDataOutput]) {
[session addOutput:videoDataOutput];
}
[self embedPreviewInView:self.theImageView];
[session startRunning];
}
-(void)captureOutputAVCaptureOutput *)captureOutput didOutputSampleBufferCMSampleBufferRef)sampleBuffer fromConnectionAVCaptureConnection *)connection{
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options__bridge NSDictionary *)attachments];
if(attachments){
CFRelease(attachments);
}
UIDeviceOrientation curDeviceOrientation = [[UIDevice currentDevice] orientation];
NSDictionary *imageOptions = @{CIDetectorImageOrientation:[self exifOrientation:curDeviceOrientation] };
NSDictionary *detectorOptions = @{CIDetectorAccuracy: CIDetectorAccuracyLow};
CIDetector *faceDetector = [CIDetector detectorOfType:CIFeatureTypeFace context:nil options:detectorOptions];
NSArray *faceFeatures = [faceDetector featuresInImage:ciImage options:imageOptions];
if([faceFeatures count]>0){
NSLog(@"GOT a face!");
NSLog(@"%@", faceFeatures);
}
dispatch_async(dispatch_get_main_queue(), ^(void) {
//NSLog(@"updating main thread");
});
}
CIDetector *smileDetector = [CIDetector detectorOfType:CIDetectorTypeFace
context:context
options{CIDetectorTracking: @YES,
CIDetectorAccuracy: CIDetectorAccuracyLow}];
NSArray *features = [smileDetector featuresInImage:image options{CIDetectorSmileYES}];
if (([features count] > 0) && (((CIFaceFeature *)features[0]).hasSmile)) {
UIImageWriteToSavedPhotosAlbum(image, self, @selector(didFinishWritingImage), features);
} else {
self.label.text = @"Say Cheese!"
}
关于ios - objective-C : No Matter what I do CIDetector is always nil,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/19055760/
欢迎光临 OStack程序员社区-中国程序员成长平台 (https://ostack.cn/) | Powered by Discuz! X3.4 |