You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

CameraViewController.mm 20 kB

6 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499
  1. //
  2. // CameraViewController.m
  3. // lpr
  4. //
  5. // Created by baotim on 2018/10/26.
  6. // Copyright © 2018 lprSample. All rights reserved.
  7. //
  8. #import "CameraViewController.h"
  9. #import "Utility.h"
  10. #import "Pipeline.h"
  11. //屏幕的宽、高
  12. #define kScreenWidth [UIScreen mainScreen].bounds.size.width
  13. #define kScreenHeight [UIScreen mainScreen].bounds.size.height
  14. @interface CameraViewController () <UIAlertViewDelegate, AVCaptureVideoDataOutputSampleBufferDelegate>
  15. {
  16. AVCaptureSession *_session;
  17. AVCaptureDeviceInput *_captureInput;
  18. AVCaptureStillImageOutput *_captureOutput;
  19. AVCaptureVideoPreviewLayer *_preview;
  20. AVCaptureDevice *_device;
  21. NSTimer *_timer; //定时器
  22. BOOL _on; //闪光灯状态
  23. BOOL _capture;//导航栏动画是否完成
  24. BOOL _isFoucePixel;//是否相位对焦
  25. CGRect _imgRect;//拍照裁剪
  26. int _count;//每几帧识别
  27. CGFloat _isLensChanged;//镜头位置
  28. /*相位聚焦下镜头位置 镜头晃动 值不停的改变 */
  29. CGFloat _isIOS8AndFoucePixelLensPosition;
  30. /*
  31. 控制识别速度,最小值为1!数值越大识别越慢。
  32. 相机初始化时,设置默认值为1(不要改动),判断设备若为相位对焦时,设置此值为2(可以修改,最小为1,越大越慢)
  33. 此值的功能是为了减小相位对焦下,因识别速度过快
  34. 此值在相机初始化中设置,在相机代理中使用,用户若无特殊需求不用修改。
  35. */
  36. int _MaxFR;
  37. cv::Mat source_image;
  38. }
  39. @property (assign, nonatomic) BOOL adjustingFocus;
  40. @property (nonatomic, retain) CALayer *customLayer;
  41. @property (nonatomic,assign) BOOL isProcessingImage;
  42. @property (nonatomic, strong) UIImage* image;
  43. @end
  44. @implementation CameraViewController
  45. - (void)viewDidLoad {
  46. [super viewDidLoad];
  47. // Do any additional setup after loading the view.
  48. self.view.backgroundColor = [UIColor clearColor];
  49. //初始化相机
  50. [self initialize];
  51. //创建相机界面控件
  52. [self createCameraView];
  53. }
  54. - (void)viewWillAppear:(BOOL)animated {
  55. [super viewWillAppear:animated];
  56. _capture = NO;
  57. [self performSelector:@selector(changeCapture) withObject:nil afterDelay:0.4];
  58. //不支持相位对焦情况下(iPhone6以后的手机支持相位对焦) 设置定时器 开启连续对焦
  59. if (!_isFoucePixel) {
  60. _timer = [NSTimer scheduledTimerWithTimeInterval:1.3 target:self selector:@selector(fouceMode) userInfo:nil repeats:YES];
  61. }
  62. AVCaptureDevice*camDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  63. int flags = NSKeyValueObservingOptionNew;
  64. //注册通知
  65. [camDevice addObserver:self forKeyPath:@"adjustingFocus" options:flags context:nil];
  66. if (_isFoucePixel) {
  67. [camDevice addObserver:self forKeyPath:@"lensPosition" options:flags context:nil];
  68. }
  69. [_session startRunning];
  70. }
  71. - (void)viewWillDisappear:(BOOL)animated {
  72. [super viewWillDisappear:animated];
  73. self.navigationController.navigationBarHidden = NO;
  74. }
  75. - (void)viewDidDisappear:(BOOL)animated {
  76. [super viewDidDisappear:animated];
  77. if (!_isFoucePixel) {
  78. [_timer invalidate];
  79. _timer = nil;
  80. }
  81. AVCaptureDevice*camDevice =[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  82. [camDevice removeObserver:self forKeyPath:@"adjustingFocus"];
  83. if (_isFoucePixel) {
  84. [camDevice removeObserver:self forKeyPath:@"lensPosition"];
  85. }
  86. [_session stopRunning];
  87. _capture = NO;
  88. }
  89. - (void)changeCapture {
  90. _capture = YES;
  91. }
  92. #pragma mark - Private Methods
  93. //初始化相机
  94. - (void)initialize {
  95. //判断摄像头授权
  96. AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
  97. if(authStatus == AVAuthorizationStatusRestricted || authStatus == AVAuthorizationStatusDenied){
  98. self.view.backgroundColor = [UIColor blackColor];
  99. UIAlertView * alt = [[UIAlertView alloc] initWithTitle:@"未获得授权使用摄像头" message:@"请在'设置-隐私-相机'打开" delegate:self cancelButtonTitle:nil otherButtonTitles:@"OK", nil];
  100. [alt show];
  101. return;
  102. }
  103. _MaxFR = 1;
  104. //1.创建会话层
  105. _session = [[AVCaptureSession alloc] init];
  106. [_session setSessionPreset:AVCaptureSessionPreset1920x1080];
  107. //2.创建、配置输入设备
  108. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  109. for (AVCaptureDevice *device in devices){
  110. if (device.position == AVCaptureDevicePositionBack){
  111. _device = device;
  112. _captureInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
  113. }
  114. }
  115. [_session addInput:_captureInput];
  116. //2.创建视频流输出
  117. AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
  118. captureOutput.alwaysDiscardsLateVideoFrames = YES;
  119. dispatch_queue_t queue;
  120. queue = dispatch_queue_create("cameraQueue", NULL);
  121. [captureOutput setSampleBufferDelegate:self queue:queue];
  122. NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
  123. NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
  124. NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
  125. [captureOutput setVideoSettings:videoSettings];
  126. [_session addOutput:captureOutput];
  127. //3.创建、配置静态拍照输出
  128. _captureOutput = [[AVCaptureStillImageOutput alloc] init];
  129. NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey,nil];
  130. [_captureOutput setOutputSettings:outputSettings];
  131. [_session addOutput:_captureOutput];
  132. //4.预览图层
  133. _preview = [AVCaptureVideoPreviewLayer layerWithSession: _session];
  134. _preview.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height);
  135. _preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
  136. [self.view.layer addSublayer:_preview];
  137. //判断是否相位对焦
  138. if ([[[UIDevice currentDevice] systemVersion] floatValue] >= 8.0) {
  139. AVCaptureDeviceFormat *deviceFormat = _device.activeFormat;
  140. if (deviceFormat.autoFocusSystem == AVCaptureAutoFocusSystemPhaseDetection){
  141. _isFoucePixel = YES;
  142. _MaxFR = 2;
  143. }
  144. }
  145. }
  146. - (void)createCameraView
  147. {
  148. //设置覆盖层
  149. CAShapeLayer *maskWithHole = [CAShapeLayer layer];
  150. // Both frames are defined in the same coordinate system
  151. CGRect biggerRect = self.view.bounds;
  152. CGFloat offset = 1.0f;
  153. if ([[UIScreen mainScreen] scale] >= 2) {
  154. offset = 0.5;
  155. }
  156. CGRect smallFrame = CGRectMake(45, 100, 300, 500);
  157. CGRect smallerRect = CGRectInset(smallFrame, -offset, -offset) ;
  158. UIBezierPath *maskPath = [UIBezierPath bezierPath];
  159. [maskPath moveToPoint:CGPointMake(CGRectGetMinX(biggerRect), CGRectGetMinY(biggerRect))];
  160. [maskPath addLineToPoint:CGPointMake(CGRectGetMinX(biggerRect), CGRectGetMaxY(biggerRect))];
  161. [maskPath addLineToPoint:CGPointMake(CGRectGetMaxX(biggerRect), CGRectGetMaxY(biggerRect))];
  162. [maskPath addLineToPoint:CGPointMake(CGRectGetMaxX(biggerRect), CGRectGetMinY(biggerRect))];
  163. [maskPath addLineToPoint:CGPointMake(CGRectGetMinX(biggerRect), CGRectGetMinY(biggerRect))];
  164. [maskPath moveToPoint:CGPointMake(CGRectGetMinX(smallerRect), CGRectGetMinY(smallerRect))];
  165. [maskPath addLineToPoint:CGPointMake(CGRectGetMinX(smallerRect), CGRectGetMaxY(smallerRect))];
  166. [maskPath addLineToPoint:CGPointMake(CGRectGetMaxX(smallerRect), CGRectGetMaxY(smallerRect))];
  167. [maskPath addLineToPoint:CGPointMake(CGRectGetMaxX(smallerRect), CGRectGetMinY(smallerRect))];
  168. [maskPath addLineToPoint:CGPointMake(CGRectGetMinX(smallerRect), CGRectGetMinY(smallerRect))];
  169. [maskWithHole setPath:[maskPath CGPath]];
  170. [maskWithHole setFillRule:kCAFillRuleEvenOdd];
  171. [maskWithHole setFillColor:[[UIColor colorWithWhite:0 alpha:0.35] CGColor]];
  172. [self.view.layer addSublayer:maskWithHole];
  173. [self.view.layer setMasksToBounds:YES];
  174. /* 相机按钮 适配了iPhone和ipad 不同需求自行修改界面*/
  175. //返回、闪光灯按钮
  176. CGFloat backWidth = 35;
  177. if (kScreenHeight>=1024) {
  178. backWidth = 50;
  179. }
  180. CGFloat s = 80;
  181. CGFloat s1 = 0;
  182. if (kScreenHeight==480) {
  183. s = 60;
  184. s1 = 10;
  185. }
  186. UIButton *backBtn = [[UIButton alloc]initWithFrame:CGRectMake(kScreenWidth/16,kScreenWidth/16-s1, backWidth, backWidth)];
  187. [backBtn addTarget:self action:@selector(backAction) forControlEvents:UIControlEventTouchUpInside];
  188. [backBtn setImage:[UIImage imageNamed:@"back_camera_btn"] forState:UIControlStateNormal];
  189. backBtn.titleLabel.textAlignment = NSTextAlignmentLeft;
  190. [self.view addSubview:backBtn];
  191. UIButton *flashBtn = [[UIButton alloc]initWithFrame:CGRectMake(kScreenWidth-kScreenWidth/16-backWidth,kScreenWidth/16-s1, backWidth, backWidth)];
  192. [flashBtn setImage:[UIImage imageNamed:@"flash_camera_btn"] forState:UIControlStateNormal];
  193. [flashBtn addTarget:self action:@selector(modeBtn) forControlEvents:UIControlEventTouchUpInside];
  194. [self.view addSubview:flashBtn];
  195. //拍照视图 上拉按钮 拍照按钮
  196. UIButton *upBtn = [[UIButton alloc]initWithFrame:CGRectMake(kScreenWidth/2-60, kScreenHeight-20, 120, 20)];
  197. upBtn.tag = 1001;
  198. [upBtn addTarget:self action:@selector(upBtn:) forControlEvents:UIControlEventTouchUpInside];
  199. [upBtn setImage:[UIImage imageNamed:@"locker_btn_def"] forState:UIControlStateNormal];
  200. [self.view addSubview:upBtn];
  201. UIButton *photoBtn = [[UIButton alloc]initWithFrame:CGRectMake(kScreenWidth/2-30,kScreenHeight-s,60, 60)];
  202. photoBtn.tag = 1000;
  203. photoBtn.hidden = YES;
  204. [photoBtn setImage:[UIImage imageNamed:@"take_pic_btn"] forState:UIControlStateNormal];
  205. [photoBtn addTarget:self action:@selector(photoBtn) forControlEvents:UIControlEventTouchUpInside];
  206. [photoBtn setTitleColor:[UIColor grayColor] forState:UIControlStateHighlighted];
  207. [self.view addSubview:photoBtn];
  208. [self.view bringSubviewToFront:photoBtn];
  209. }
  210. #pragma mark - UIAlertViewDelegate
  211. - (void)alertView:(UIAlertView *)alertView clickedButtonAtIndex:(NSInteger)buttonIndex{
  212. [self.navigationController popViewControllerAnimated:YES];
  213. }
  214. //隐藏状态栏
  215. - (UIStatusBarStyle)preferredStatusBarStyle{
  216. return UIStatusBarStyleDefault;
  217. }
  218. - (BOOL)prefersStatusBarHidden{
  219. return YES;
  220. }
  221. //对焦
  222. - (void)fouceMode
  223. {
  224. NSError *error;
  225. AVCaptureDevice *device = [self cameraWithPosition:AVCaptureDevicePositionBack];
  226. if ([device isFocusModeSupported:AVCaptureFocusModeAutoFocus])
  227. {
  228. if ([device lockForConfiguration:&error]) {
  229. CGPoint cameraPoint = [_preview captureDevicePointOfInterestForPoint:self.view.center];
  230. [device setFocusPointOfInterest:cameraPoint];
  231. [device setFocusMode:AVCaptureFocusModeAutoFocus];
  232. [device unlockForConfiguration];
  233. } else {
  234. //NSLog(@"Error: %@", error);
  235. }
  236. }
  237. }
  238. - (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position
  239. {
  240. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  241. for (AVCaptureDevice *device in devices){
  242. if (device.position == position){
  243. return device;
  244. }
  245. }
  246. return nil;
  247. }
  248. - (NSString *)getPath:(NSString*)fileName
  249. {
  250. NSString *bundlePath = [NSBundle mainBundle].bundlePath;
  251. NSString *path = [bundlePath stringByAppendingPathComponent:fileName];
  252. return path;
  253. }
  254. - (NSString *)simpleRecognition:(cv::Mat&)src
  255. {
  256. NSString *path_1 = [self getPath:@"cascade.xml"];
  257. NSString *path_2 = [self getPath:@"HorizonalFinemapping.prototxt"];
  258. NSString *path_3 = [self getPath:@"HorizonalFinemapping.caffemodel"];
  259. NSString *path_4 = [self getPath:@"Segmentation.prototxt"];
  260. NSString *path_5 = [self getPath:@"Segmentation.caffemodel"];
  261. NSString *path_6 = [self getPath:@"CharacterRecognization.prototxt"];
  262. NSString *path_7 = [self getPath:@"CharacterRecognization.caffemodel"];
  263. std::string *cpath_1 = new std::string([path_1 UTF8String]);
  264. std::string *cpath_2 = new std::string([path_2 UTF8String]);
  265. std::string *cpath_3 = new std::string([path_3 UTF8String]);
  266. std::string *cpath_4 = new std::string([path_4 UTF8String]);
  267. std::string *cpath_5 = new std::string([path_5 UTF8String]);
  268. std::string *cpath_6 = new std::string([path_6 UTF8String]);
  269. std::string *cpath_7 = new std::string([path_7 UTF8String]);
  270. pr::PipelinePR pr2 = pr::PipelinePR(*cpath_1, *cpath_2, *cpath_3, *cpath_4, *cpath_5, *cpath_6, *cpath_7);
  271. std::vector<pr::PlateInfo> list_res = pr2.RunPiplineAsImage(src);
  272. std::string concat_results = "";
  273. for(auto one:list_res) {
  274. if(one.confidence>0.7) {
  275. concat_results += one.getPlateName()+",";
  276. }
  277. }
  278. NSString *str = [NSString stringWithCString:concat_results.c_str() encoding:NSUTF8StringEncoding];
  279. if (str.length > 0) {
  280. str = [str substringToIndex:str.length-1];
  281. str = [NSString stringWithFormat:@"%@",str];
  282. } else {
  283. str = [NSString stringWithFormat:@"未识别成功"];
  284. }
  285. NSLog(@"===> 识别结果 = %@", str);
  286. return str;
  287. }
  288. #pragma mark - Actions
  289. //返回按钮按钮点击事件
  290. - (void)backAction
  291. {
  292. [self dismissViewControllerAnimated:YES completion:nil];
  293. }
  294. //闪光灯按钮点击事件
  295. - (void)modeBtn
  296. {
  297. if (![_device hasTorch]) {
  298. //NSLog(@"no torch");
  299. } else {
  300. [_device lockForConfiguration:nil];
  301. if (!_on) {
  302. [_device setTorchMode: AVCaptureTorchModeOn];
  303. _on = YES;
  304. }else{
  305. [_device setTorchMode: AVCaptureTorchModeOff];
  306. _on = NO;
  307. }
  308. [_device unlockForConfiguration];
  309. }
  310. }
  311. //上拉按钮点击事件
  312. - (void)upBtn:(UIButton *)upBtn
  313. {
  314. UIButton *photoBtn = (UIButton *)[self.view viewWithTag:1000];
  315. photoBtn.hidden = NO;
  316. upBtn.hidden = YES;
  317. }
  318. //拍照按钮点击事件
  319. - (void)photoBtn
  320. {
  321. self.isProcessingImage = YES;
  322. //get connection
  323. AVCaptureConnection *videoConnection = nil;
  324. for (AVCaptureConnection *connection in _captureOutput.connections) {
  325. for (AVCaptureInputPort *port in [connection inputPorts]) {
  326. if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
  327. videoConnection = connection;
  328. break;
  329. }
  330. }
  331. if (videoConnection) { break; }
  332. }
  333. //get UIImage
  334. [_captureOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:
  335. ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
  336. if (imageSampleBuffer != NULL) {
  337. //停止取景
  338. [_session stopRunning];
  339. NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
  340. UIImage *tempImage = [[UIImage alloc] initWithData:imageData];
  341. UIImage *temp_image = [Utility scaleAndRotateImageBackCamera:tempImage];
  342. source_image = [Utility cvMatFromUIImage:temp_image];
  343. NSString* text = [self simpleRecognition:source_image];
  344. NSMutableDictionary* resultDict = [NSMutableDictionary new];
  345. resultDict[@"image"] = temp_image;
  346. resultDict[@"text"] = text;
  347. [self performSelectorOnMainThread:@selector(readyToGetImage:) withObject:resultDict waitUntilDone:NO];
  348. self.isProcessingImage = NO;
  349. }
  350. }];
  351. }
  352. //从摄像头缓冲区获取图像
  353. #pragma mark - AVCaptureSession delegate
  354. - (void)captureOutput:(AVCaptureOutput *)captureOutput
  355. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  356. fromConnection:(AVCaptureConnection *)connection
  357. {
  358. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  359. CVPixelBufferLockBaseAddress(imageBuffer,0);
  360. uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
  361. size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
  362. size_t width = CVPixelBufferGetWidth(imageBuffer);
  363. size_t height = CVPixelBufferGetHeight(imageBuffer);
  364. /*We unlock the image buffer*/
  365. CVPixelBufferUnlockBaseAddress(imageBuffer,0);
  366. /*Create a CGImageRef from the CVImageBufferRef*/
  367. CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
  368. CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
  369. CGImageRef newImage = CGBitmapContextCreateImage(newContext);
  370. /*We release some components*/
  371. CGContextRelease(newContext);
  372. CGColorSpaceRelease(colorSpace);
  373. /*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly)*/
  374. self.image = [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationUp];
  375. /*We relase the CGImageRef*/
  376. CGImageRelease(newImage);
  377. //检边识别
  378. if (_capture == YES) { //导航栏动画完成
  379. if (self.isProcessingImage==NO) { //点击拍照后 不去识别
  380. if (!self.adjustingFocus) { //反差对焦下 非正在对焦状态(相位对焦下self.adjustingFocus此值不会改变)
  381. if (_isLensChanged == _isIOS8AndFoucePixelLensPosition) {
  382. _count++;
  383. if (_count >= _MaxFR) {
  384. //识别
  385. UIImage *temp_image = [Utility scaleAndRotateImageBackCamera:self.image];
  386. source_image = [Utility cvMatFromUIImage:temp_image];
  387. NSString* text = [self simpleRecognition:source_image];
  388. if (text.length == 7) { //识别成功
  389. _count = 0;
  390. // 停止取景
  391. [_session stopRunning];
  392. //设置震动
  393. AudioServicesPlaySystemSound(kSystemSoundID_Vibrate);
  394. NSMutableDictionary* resultDict = [NSMutableDictionary new];
  395. resultDict[@"image"] = temp_image;
  396. resultDict[@"text"] = text;
  397. [self performSelectorOnMainThread:@selector(readyToGetImage:) withObject:resultDict waitUntilDone:NO];
  398. }
  399. }
  400. } else {
  401. _isLensChanged = _isIOS8AndFoucePixelLensPosition;
  402. _count = 0;
  403. }
  404. }
  405. }
  406. }
  407. CVPixelBufferUnlockBaseAddress(imageBuffer,0);
  408. }
  409. //找边成功开始拍照
  410. - (void)readyToGetImage:(NSDictionary *)resultDict
  411. {
  412. [self dismissViewControllerAnimated:NO completion:^{
  413. }];
  414. if (self.resultCB) {
  415. self.resultCB(resultDict[@"text"], resultDict[@"image"]);
  416. }
  417. }
  418. - (void)observeValueForKeyPath:(NSString*)keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context {
  419. /*反差对焦 监听反差对焦此*/
  420. if([keyPath isEqualToString:@"adjustingFocus"]){
  421. self.adjustingFocus =[[change objectForKey:NSKeyValueChangeNewKey] isEqualToNumber:[NSNumber numberWithInt:1]];
  422. }
  423. /*监听相位对焦此*/
  424. if([keyPath isEqualToString:@"lensPosition"]){
  425. _isIOS8AndFoucePixelLensPosition =[[change objectForKey:NSKeyValueChangeNewKey] floatValue];
  426. //NSLog(@"监听_isIOS8AndFoucePixelLensPosition == %f",_isIOS8AndFoucePixelLensPosition);
  427. }
  428. }
  429. @end