2
0

LBXScanNative.m 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624
  1. #import "LBXScanNative.h"
  2. @interface LBXScanNative()<AVCaptureMetadataOutputObjectsDelegate>
  3. {
  4. BOOL bNeedScanResult;
  5. }
  6. @property (assign,nonatomic)AVCaptureDevice * device;
  7. @property (strong,nonatomic)AVCaptureDeviceInput * input;
  8. @property (strong,nonatomic)AVCaptureMetadataOutput * output;
  9. @property (strong,nonatomic)AVCaptureSession * session;
  10. @property (strong,nonatomic)AVCaptureVideoPreviewLayer * preview;
  11. @property(nonatomic,strong) AVCaptureStillImageOutput *stillImageOutput;//拍照
  12. @property(nonatomic,assign)BOOL isNeedCaputureImage;
  13. //扫码结果
  14. @property (nonatomic, strong) NSMutableArray<LBXScanResult*> *arrayResult;
  15. //扫码类型
  16. @property (nonatomic, strong) NSArray* arrayBarCodeType;
  17. /**
  18. @brief 视频预览显示视图
  19. */
  20. @property (nonatomic,weak)UIView *videoPreView;
  21. /*!
  22. * 扫码结果返回
  23. */
  24. @property(nonatomic,copy)void (^blockScanResult)(NSArray<LBXScanResult*> *array);
  25. @end
  26. @implementation LBXScanNative
  27. - (void)setNeedCaptureImage:(BOOL)isNeedCaputureImg
  28. {
  29. _isNeedCaputureImage = isNeedCaputureImg;
  30. }
  31. - (instancetype)initWithPreView:(UIView*)preView ObjectType:(NSArray*)objType cropRect:(CGRect)cropRect success:(void(^)(NSArray<LBXScanResult*> *array))block
  32. {
  33. if (self = [super init]) {
  34. [self initParaWithPreView:preView ObjectType:objType cropRect:cropRect success:block];
  35. }
  36. return self;
  37. }
  38. - (instancetype)initWithPreView:(UIView*)preView ObjectType:(NSArray*)objType success:(void(^)(NSArray<LBXScanResult*> *array))block
  39. {
  40. if (self = [super init]) {
  41. [self initParaWithPreView:preView ObjectType:objType cropRect:CGRectZero success:block];
  42. }
  43. return self;
  44. }
  45. - (void)initParaWithPreView:(UIView*)videoPreView ObjectType:(NSArray*)objType cropRect:(CGRect)cropRect success:(void(^)(NSArray<LBXScanResult*> *array))block
  46. {
  47. self.arrayBarCodeType = objType;
  48. self.blockScanResult = block;
  49. self.videoPreView = videoPreView;
  50. _device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  51. if (!_device) {
  52. return;
  53. }
  54. // Input
  55. _input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil];
  56. if ( !_input )
  57. return ;
  58. bNeedScanResult = YES;
  59. // Output
  60. _output = [[AVCaptureMetadataOutput alloc]init];
  61. [_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
  62. if ( !CGRectEqualToRect(cropRect,CGRectZero) )
  63. {
  64. _output.rectOfInterest = cropRect;
  65. }
  66. /*
  67. // Setup the still image file output
  68. */
  69. // AVCapturePhotoOutput
  70. _stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
  71. NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
  72. AVVideoCodecJPEG, AVVideoCodecKey,
  73. nil];
  74. [_stillImageOutput setOutputSettings:outputSettings];
  75. // Session
  76. _session = [[AVCaptureSession alloc]init];
  77. [_session setSessionPreset:AVCaptureSessionPresetHigh];
  78. // _session.
  79. // videoScaleAndCropFactor
  80. if ([_session canAddInput:_input])
  81. {
  82. [_session addInput:_input];
  83. }
  84. if ([_session canAddOutput:_output])
  85. {
  86. [_session addOutput:_output];
  87. }
  88. if ([_session canAddOutput:_stillImageOutput])
  89. {
  90. [_session addOutput:_stillImageOutput];
  91. }
  92. // 条码类型 AVMetadataObjectTypeQRCode
  93. // _output.metadataObjectTypes =@[AVMetadataObjectTypeQRCode];
  94. if (!objType) {
  95. objType = [self defaultMetaDataObjectTypes];
  96. }
  97. _output.metadataObjectTypes = objType;
  98. // Preview
  99. _preview =[AVCaptureVideoPreviewLayer layerWithSession:_session];
  100. _preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
  101. //_preview.frame =CGRectMake(20,110,280,280);
  102. CGRect frame = videoPreView.frame;
  103. frame.origin = CGPointZero;
  104. _preview.frame = frame;
  105. [videoPreView.layer insertSublayer:self.preview atIndex:0];
  106. AVCaptureConnection *videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[[self stillImageOutput] connections]];
  107. // CGFloat maxScale = videoConnection.videoMaxScaleAndCropFactor;
  108. CGFloat scale = videoConnection.videoScaleAndCropFactor;
  109. NSLog(@"%f",scale);
  110. // CGFloat zoom = maxScale / 50;
  111. // if (zoom < 1.0f || zoom > maxScale)
  112. // {
  113. // return;
  114. // }
  115. // videoConnection.videoScaleAndCropFactor += zoom;
  116. // CGAffineTransform transform = videoPreView.transform;
  117. // videoPreView.transform = CGAffineTransformScale(transform, zoom, zoom);
  118. //先进行判断是否支持控制对焦,不开启自动对焦功能,很难识别二维码。
  119. if (_device.isFocusPointOfInterestSupported &&[_device isFocusModeSupported:AVCaptureFocusModeAutoFocus])
  120. {
  121. [_input.device lockForConfiguration:nil];
  122. [_input.device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
  123. [_input.device unlockForConfiguration];
  124. }
  125. }
  126. - (CGFloat)getVideoMaxScale
  127. {
  128. [_input.device lockForConfiguration:nil];
  129. AVCaptureConnection *videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[[self stillImageOutput] connections]];
  130. CGFloat maxScale = videoConnection.videoMaxScaleAndCropFactor;
  131. [_input.device unlockForConfiguration];
  132. return maxScale;
  133. }
  134. - (void)setVideoScale:(CGFloat)scale
  135. {
  136. [_input.device lockForConfiguration:nil];
  137. AVCaptureConnection *videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[[self stillImageOutput] connections]];
  138. CGFloat zoom = scale / videoConnection.videoScaleAndCropFactor;
  139. videoConnection.videoScaleAndCropFactor = scale;
  140. [_input.device unlockForConfiguration];
  141. CGAffineTransform transform = _videoPreView.transform;
  142. _videoPreView.transform = CGAffineTransformScale(transform, zoom, zoom);
  143. }
  144. - (void)setScanRect:(CGRect)scanRect
  145. {
  146. //识别区域设置
  147. if (_output) {
  148. _output.rectOfInterest = [self.preview metadataOutputRectOfInterestForRect:scanRect];
  149. }
  150. }
  151. - (void)changeScanType:(NSArray*)objType
  152. {
  153. _output.metadataObjectTypes = objType;
  154. }
  155. - (void)startScan
  156. {
  157. if ( _input && !_session.isRunning )
  158. {
  159. [_session startRunning];
  160. bNeedScanResult = YES;
  161. [_videoPreView.layer insertSublayer:self.preview atIndex:0];
  162. // [_input.device addObserver:self forKeyPath:@"torchMode" options:0 context:nil];
  163. }
  164. bNeedScanResult = YES;
  165. }
  166. - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
  167. {
  168. if ( object == _input.device ) {
  169. NSLog(@"flash change");
  170. }
  171. }
  172. - (void)stopScan
  173. {
  174. bNeedScanResult = NO;
  175. if ( _input && _session.isRunning )
  176. {
  177. bNeedScanResult = NO;
  178. [_session stopRunning];
  179. // [self.preview removeFromSuperlayer];
  180. }
  181. }
  182. - (void)setTorch:(BOOL)torch {
  183. [self.input.device lockForConfiguration:nil];
  184. self.input.device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff;
  185. [self.input.device unlockForConfiguration];
  186. }
  187. - (void)changeTorch
  188. {
  189. AVCaptureTorchMode torch = self.input.device.torchMode;
  190. switch (_input.device.torchMode) {
  191. case AVCaptureTorchModeAuto:
  192. break;
  193. case AVCaptureTorchModeOff:
  194. torch = AVCaptureTorchModeOn;
  195. break;
  196. case AVCaptureTorchModeOn:
  197. torch = AVCaptureTorchModeOff;
  198. break;
  199. default:
  200. break;
  201. }
  202. [_input.device lockForConfiguration:nil];
  203. _input.device.torchMode = torch;
  204. [_input.device unlockForConfiguration];
  205. }
  206. -(UIImage *)getImageFromLayer:(CALayer *)layer size:(CGSize)size
  207. {
  208. UIGraphicsBeginImageContextWithOptions(size, YES, [[UIScreen mainScreen]scale]);
  209. [layer renderInContext:UIGraphicsGetCurrentContext()];
  210. UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
  211. UIGraphicsEndImageContext();
  212. return image;
  213. }
  214. - (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections
  215. {
  216. for ( AVCaptureConnection *connection in connections ) {
  217. for ( AVCaptureInputPort *port in [connection inputPorts] ) {
  218. if ( [[port mediaType] isEqual:mediaType] ) {
  219. return connection;
  220. }
  221. }
  222. }
  223. return nil;
  224. }
  225. - (void)captureImage
  226. {
  227. AVCaptureConnection *stillImageConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[[self stillImageOutput] connections]];
  228. [[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:stillImageConnection
  229. completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
  230. {
  231. [self stopScan];
  232. if (imageDataSampleBuffer)
  233. {
  234. NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
  235. UIImage *img = [UIImage imageWithData:imageData];
  236. for (LBXScanResult* result in _arrayResult) {
  237. result.imgScanned = img;
  238. }
  239. }
  240. if (_blockScanResult)
  241. {
  242. _blockScanResult(_arrayResult);
  243. }
  244. }];
  245. }
  246. #pragma mark AVCaptureMetadataOutputObjectsDelegate
  247. - (void)captureOutput2:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
  248. {
  249. //识别扫码类型
  250. for(AVMetadataObject *current in metadataObjects)
  251. {
  252. if ([current isKindOfClass:[AVMetadataMachineReadableCodeObject class]] )
  253. {
  254. NSString *scannedResult = [(AVMetadataMachineReadableCodeObject *) current stringValue];
  255. NSLog(@"type:%@",current.type);
  256. NSLog(@"result:%@",scannedResult);
  257. //测试可以同时识别多个二维码
  258. }
  259. }
  260. }
  261. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
  262. {
  263. if (!bNeedScanResult) {
  264. return;
  265. }
  266. bNeedScanResult = NO;
  267. if (!_arrayResult) {
  268. self.arrayResult = [NSMutableArray arrayWithCapacity:1];
  269. }
  270. else
  271. {
  272. [_arrayResult removeAllObjects];
  273. }
  274. //识别扫码类型
  275. for(AVMetadataObject *current in metadataObjects)
  276. {
  277. if ([current isKindOfClass:[AVMetadataMachineReadableCodeObject class]] )
  278. {
  279. bNeedScanResult = NO;
  280. NSLog(@"type:%@",current.type);
  281. NSString *scannedResult = [(AVMetadataMachineReadableCodeObject *) current stringValue];
  282. if (scannedResult && ![scannedResult isEqualToString:@""])
  283. {
  284. LBXScanResult *result = [LBXScanResult new];
  285. result.strScanned = scannedResult;
  286. result.strBarCodeType = current.type;
  287. [_arrayResult addObject:result];
  288. }
  289. //测试可以同时识别多个二维码
  290. }
  291. }
  292. if (_arrayResult.count < 1)
  293. {
  294. bNeedScanResult = YES;
  295. return;
  296. }
  297. if (_isNeedCaputureImage)
  298. {
  299. [self captureImage];
  300. }
  301. else
  302. {
  303. [self stopScan];
  304. if (_blockScanResult) {
  305. _blockScanResult(_arrayResult);
  306. }
  307. }
  308. }
  309. /**
  310. @brief 默认支持码的类别
  311. @return 支持类别 数组
  312. */
  313. - (NSArray *)defaultMetaDataObjectTypes
  314. {
  315. NSMutableArray *types = [@[AVMetadataObjectTypeQRCode,
  316. AVMetadataObjectTypeUPCECode,
  317. AVMetadataObjectTypeCode39Code,
  318. AVMetadataObjectTypeCode39Mod43Code,
  319. AVMetadataObjectTypeEAN13Code,
  320. AVMetadataObjectTypeEAN8Code,
  321. AVMetadataObjectTypeCode93Code,
  322. AVMetadataObjectTypeCode128Code,
  323. AVMetadataObjectTypePDF417Code,
  324. AVMetadataObjectTypeAztecCode] mutableCopy];
  325. if (floor(NSFoundationVersionNumber) > NSFoundationVersionNumber_iOS_8_0)
  326. {
  327. [types addObjectsFromArray:@[
  328. AVMetadataObjectTypeInterleaved2of5Code,
  329. AVMetadataObjectTypeITF14Code,
  330. AVMetadataObjectTypeDataMatrixCode
  331. ]];
  332. }
  333. return types;
  334. }
  335. #pragma mark --识别条码图片
  336. + (void)recognizeImage:(UIImage*)image success:(void(^)(NSArray<LBXScanResult*> *array))block;
  337. {
  338. if ([[[UIDevice currentDevice]systemVersion]floatValue] < 8.0 )
  339. {
  340. if (block) {
  341. LBXScanResult *result = [[LBXScanResult alloc]init];
  342. result.strScanned = @"只支持ios8.0之后系统";
  343. block(@[result]);
  344. }
  345. return;
  346. }
  347. CIDetector*detector = [CIDetector detectorOfType:CIDetectorTypeQRCode context:nil options:@{ CIDetectorAccuracy : CIDetectorAccuracyHigh }];
  348. NSArray *features = [detector featuresInImage:[CIImage imageWithCGImage:image.CGImage]];
  349. NSMutableArray<LBXScanResult*> *mutableArray = [[NSMutableArray alloc]initWithCapacity:1];
  350. for (int index = 0; index < [features count]; index ++)
  351. {
  352. CIQRCodeFeature *feature = [features objectAtIndex:index];
  353. NSString *scannedResult = feature.messageString;
  354. NSLog(@"result:%@",scannedResult);
  355. LBXScanResult *item = [[LBXScanResult alloc]init];
  356. item.strScanned = scannedResult;
  357. item.strBarCodeType = CIDetectorTypeQRCode;
  358. item.imgScanned = image;
  359. [mutableArray addObject:item];
  360. }
  361. if (block) {
  362. block(mutableArray);
  363. }
  364. }
  365. #pragma mark --生成条码
  366. //下面引用自 https://github.com/yourtion/Demo_CustomQRCode
  367. #pragma mark - InterpolatedUIImage
  368. + (UIImage *)createNonInterpolatedUIImageFormCIImage:(CIImage *)image withSize:(CGFloat) size {
  369. CGRect extent = CGRectIntegral(image.extent);
  370. CGFloat scale = MIN(size/CGRectGetWidth(extent), size/CGRectGetHeight(extent));
  371. // 创建bitmap;
  372. size_t width = CGRectGetWidth(extent) * scale;
  373. size_t height = CGRectGetHeight(extent) * scale;
  374. CGColorSpaceRef cs = CGColorSpaceCreateDeviceGray();
  375. CGContextRef bitmapRef = CGBitmapContextCreate(nil, width, height, 8, 0, cs, (CGBitmapInfo)kCGImageAlphaNone);
  376. CGColorSpaceRelease(cs);
  377. CIContext *context = [CIContext contextWithOptions:nil];
  378. CGImageRef bitmapImage = [context createCGImage:image fromRect:extent];
  379. CGContextSetInterpolationQuality(bitmapRef, kCGInterpolationNone);
  380. CGContextScaleCTM(bitmapRef, scale, scale);
  381. CGContextDrawImage(bitmapRef, extent, bitmapImage);
  382. // 保存bitmap到图片
  383. CGImageRef scaledImage = CGBitmapContextCreateImage(bitmapRef);
  384. CGContextRelease(bitmapRef);
  385. CGImageRelease(bitmapImage);
  386. UIImage *aImage = [UIImage imageWithCGImage:scaledImage];
  387. CGImageRelease(scaledImage);
  388. return aImage;
  389. }
  390. #pragma mark - QRCodeGenerator
  391. + (CIImage *)createQRForString:(NSString *)qrString {
  392. NSData *stringData = [qrString dataUsingEncoding:NSUTF8StringEncoding];
  393. // 创建filter
  394. CIFilter *qrFilter = [CIFilter filterWithName:@"CIQRCodeGenerator"];
  395. // 设置内容和纠错级别
  396. [qrFilter setValue:stringData forKey:@"inputMessage"];
  397. [qrFilter setValue:@"H" forKey:@"inputCorrectionLevel"];
  398. // 返回CIImage
  399. return qrFilter.outputImage;
  400. }
  401. #pragma mark - 生成二维码,背景色及二维码颜色设置
  402. + (UIImage*)createQRWithString:(NSString*)text QRSize:(CGSize)size
  403. {
  404. NSData *stringData = [text dataUsingEncoding: NSUTF8StringEncoding];
  405. //生成
  406. CIFilter *qrFilter = [CIFilter filterWithName:@"CIQRCodeGenerator"];
  407. [qrFilter setValue:stringData forKey:@"inputMessage"];
  408. [qrFilter setValue:@"H" forKey:@"inputCorrectionLevel"];
  409. CIImage *qrImage = qrFilter.outputImage;
  410. //绘制
  411. CGImageRef cgImage = [[CIContext contextWithOptions:nil] createCGImage:qrImage fromRect:qrImage.extent];
  412. UIGraphicsBeginImageContext(size);
  413. CGContextRef context = UIGraphicsGetCurrentContext();
  414. CGContextSetInterpolationQuality(context, kCGInterpolationNone);
  415. CGContextScaleCTM(context, 1.0, -1.0);
  416. CGContextDrawImage(context, CGContextGetClipBoundingBox(context), cgImage);
  417. UIImage *codeImage = UIGraphicsGetImageFromCurrentImageContext();
  418. UIGraphicsEndImageContext();
  419. CGImageRelease(cgImage);
  420. return codeImage;
  421. }
  422. //引用自:http://www.jianshu.com/p/e8f7a257b612
  423. + (UIImage*)createQRWithString:(NSString*)text QRSize:(CGSize)size QRColor:(UIColor*)qrColor bkColor:(UIColor*)bkColor
  424. {
  425. NSData *stringData = [text dataUsingEncoding: NSUTF8StringEncoding];
  426. //生成
  427. CIFilter *qrFilter = [CIFilter filterWithName:@"CIQRCodeGenerator"];
  428. [qrFilter setValue:stringData forKey:@"inputMessage"];
  429. [qrFilter setValue:@"H" forKey:@"inputCorrectionLevel"];
  430. //上色
  431. CIFilter *colorFilter = [CIFilter filterWithName:@"CIFalseColor"
  432. keysAndValues:
  433. @"inputImage",qrFilter.outputImage,
  434. @"inputColor0",[CIColor colorWithCGColor:qrColor.CGColor],
  435. @"inputColor1",[CIColor colorWithCGColor:bkColor.CGColor],
  436. nil];
  437. CIImage *qrImage = colorFilter.outputImage;
  438. //绘制
  439. CGImageRef cgImage = [[CIContext contextWithOptions:nil] createCGImage:qrImage fromRect:qrImage.extent];
  440. UIGraphicsBeginImageContext(size);
  441. CGContextRef context = UIGraphicsGetCurrentContext();
  442. CGContextSetInterpolationQuality(context, kCGInterpolationNone);
  443. CGContextScaleCTM(context, 1.0, -1.0);
  444. CGContextDrawImage(context, CGContextGetClipBoundingBox(context), cgImage);
  445. UIImage *codeImage = UIGraphicsGetImageFromCurrentImageContext();
  446. UIGraphicsEndImageContext();
  447. CGImageRelease(cgImage);
  448. return codeImage;
  449. }
  450. + (UIImage*)createBarCodeWithString:(NSString*)text QRSize:(CGSize)size
  451. {
  452. NSData *data = [text dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:false];
  453. CIFilter *filter = [CIFilter filterWithName:@"CICode128BarcodeGenerator"];
  454. [filter setValue:data forKey:@"inputMessage"];
  455. CIImage *barcodeImage = [filter outputImage];
  456. // 消除模糊
  457. CGFloat scaleX = size.width / barcodeImage.extent.size.width; // extent 返回图片的frame
  458. CGFloat scaleY = size.height / barcodeImage.extent.size.height;
  459. CIImage *transformedImage = [barcodeImage imageByApplyingTransform:CGAffineTransformScale(CGAffineTransformIdentity, scaleX, scaleY)];
  460. return [UIImage imageWithCIImage:transformedImage];
  461. }
  462. @end