diff --git a/LBXScan/LBXNative/LBXScanNative.h b/LBXScan/LBXNative/LBXScanNative.h index 5c06101..a4a3b74 100644 --- a/LBXScan/LBXNative/LBXScanNative.h +++ b/LBXScan/LBXNative/LBXScanNative.h @@ -23,6 +23,9 @@ @interface LBXScanNative : NSObject +/// 是否需要条码位置信息,默认NO,位置信息在LBXScanResult中返回 +@property (nonatomic, assign) BOOL needCodePosion; + #pragma mark --初始化 /** @brief 初始化采集相机 diff --git a/LBXScan/LBXNative/LBXScanNative.m b/LBXScan/LBXNative/LBXScanNative.m index f2ee17a..12ec559 100644 --- a/LBXScan/LBXNative/LBXScanNative.m +++ b/LBXScan/LBXNative/LBXScanNative.m @@ -110,6 +110,7 @@ - (void)initParaWithPreView:(UIView*)videoPreView success:(void(^)(NSArray *array))success { + self.needCodePosion = NO; self.blockvideoMaxScale = blockvideoMaxScale; self.arrayBarCodeType = objType; @@ -155,9 +156,7 @@ - (void)initParaWithPreView:(UIView*)videoPreView _session = [[AVCaptureSession alloc]init]; [_session setSessionPreset:AVCaptureSessionPresetHigh]; - // _session. - - // videoScaleAndCropFactor +// [_session setSessionPreset:AVCaptureSessionPreset1280x720]; if ([_session canAddInput:_input]) { @@ -174,9 +173,6 @@ - (void)initParaWithPreView:(UIView*)videoPreView [_session addOutput:_stillImageOutput]; } - - - // 条码类型 AVMetadataObjectTypeQRCode // _output.metadataObjectTypes =@[AVMetadataObjectTypeQRCode]; @@ -190,12 +186,15 @@ - (void)initParaWithPreView:(UIView*)videoPreView _preview =[AVCaptureVideoPreviewLayer layerWithSession:_session]; _preview.videoGravity = AVLayerVideoGravityResizeAspectFill; +// _preview + //_preview.frame =CGRectMake(20,110,280,280); CGRect frame = videoPreView.frame; frame.origin = CGPointZero; _preview.frame = frame; + [videoPreView.layer insertSublayer:self.preview atIndex:0]; @@ -208,19 +207,8 @@ - (void)initParaWithPreView:(UIView*)videoPreView _blockvideoMaxScale(maxScale); } - - -// CGFloat zoom = maxScale / 50; -// if (zoom < 1.0f || zoom > maxScale) -// { -// return; -// } -// videoConnection.videoScaleAndCropFactor += zoom; -// CGAffineTransform transform = videoPreView.transform; -// videoPreView.transform = CGAffineTransformScale(transform, zoom, zoom); - //先进行判断是否支持控制对焦,不开启自动对焦功能,很难识别二维码。 if (_device.isFocusPointOfInterestSupported &&[_device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { @@ -315,8 +303,6 @@ - (void)stopScan { bNeedScanResult = NO; [_session stopRunning]; - - // [self.preview removeFromSuperlayer]; } } @@ -447,6 +433,12 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects: [_arrayResult removeAllObjects]; } + if (_needCodePosion) { + [self stopScan]; + } + + metadataObjects = [self transformedCodesFromCodes:metadataObjects]; + //识别扫码类型 for(AVMetadataObject *current in metadataObjects) { @@ -458,61 +450,17 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects: NSString *scannedResult = [(AVMetadataMachineReadableCodeObject *) current stringValue]; NSArray *corners = ((AVMetadataMachineReadableCodeObject *) current).corners; - - NSLog(@"corners:%@",corners); - - CGFloat totalX = 0; - CGFloat totalY = 0; - for (NSDictionary* dic in corners) { - - NSNumber *numX = dic[@"X"]; - NSNumber *numY = dic[@"Y"]; - - totalX += numX.floatValue; - totalY += numY.floatValue; - } - - NSLog(@"aver X:%f Y:%f",totalX / corners.count,totalY / corners.count); - - - dispatch_async(dispatch_get_main_queue(), ^{ - // 更新界面 - [self videoNearCode:totalX / corners.count averY:totalY / corners.count]; - }); - - - [self stopScan]; - - // CGFloat y = 0; - // y = y + zoom > 1 ? zoom : -zoom; - // //移动 - // _videoPreView.transform = CGAffineTransformTranslate(_videoPreView.transform, 0, y); - - //y从下往上 -// ( -// { -// X = "0.3534338575269208"; -// Y = "0.7310508255651641"; -// }, -// { -// X = "0.4657595844499312"; -// Y = "0.7077938333219527"; -// }, -// { -// X = "0.4562285844779192"; -// Y = "0.509763749366999"; -// }, -// { -// X = "0.343985141844123"; -// Y = "0.5347846218594517"; -// } -// ) + CGRect bounds = ((AVMetadataMachineReadableCodeObject *) current).bounds; + + NSLog(@"corners:%@ bounds:%@",corners,NSStringFromCGRect( bounds )); if (scannedResult && ![scannedResult isEqualToString:@""]) { LBXScanResult *result = [LBXScanResult new]; result.strScanned = scannedResult; result.strBarCodeType = current.type; + result.corners = corners; + result.bounds = bounds; [_arrayResult addObject:result]; } @@ -523,13 +471,11 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects: if (_arrayResult.count < 1) { bNeedScanResult = YES; + [self startScan]; return; } - - return; - - if (_isNeedCaputureImage) + if (!_needCodePosion && _isNeedCaputureImage) { [self captureImage]; } @@ -543,8 +489,67 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects: } } -- (void)videoNearCode:(CGFloat)averX averY:(CGFloat)averY + + +- (NSArray *)transformedCodesFromCodes:(NSArray *)codes { + NSMutableArray *transformedCodes = [NSMutableArray array]; + [codes enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) { + AVMetadataObject *transformedCode = [self.preview transformedMetadataObjectForMetadataObject:obj]; + [transformedCodes addObject:transformedCode]; + }]; + return [transformedCodes copy]; +} + +- (CGPoint)pointForCorner:(NSDictionary *)corner { + CGPoint point; + CGPointMakeWithDictionaryRepresentation((CFDictionaryRef)corner, &point); + return point; +} +- (void)handCorners:(NSArray *)corners bounds:(CGRect)bounds +{ + CGFloat totalX = 0; + CGFloat totalY = 0; + + for (NSDictionary *dic in corners) { + CGPoint pt = [self pointForCorner:dic]; + NSLog(@"pt:%@",NSStringFromCGPoint(pt)); + totalX += pt.x; + totalY += pt.y; + } + + CGFloat averX = totalX / corners.count; + CGFloat averY = totalY / corners.count; + + + + CGFloat minSize = MIN(bounds.size.width , bounds.size.height); + + NSLog(@"averx:%f,avery:%f minsize:%f",averX,averY,minSize); + + dispatch_async(dispatch_get_main_queue(), ^{ + + [self signCodeWithCenterX:averX centerY:averY]; + + }); +} + +- (void)signCodeWithCenterX:(CGFloat)centerX centerY:(CGFloat)centerY +{ + UIView *signView = [[UIView alloc]initWithFrame:CGRectMake(centerX-10, centerY-10, 20, 20)]; + + [self.videoPreView addSubview:signView]; + signView.backgroundColor = [UIColor redColor]; + +} + + +/// 条码可以放到到指定位置 (条码在边缘位置,放大及平移后,导致边缘是黑色 +/// @param averX averX descriptio +/// @param averY averY description +/// @param bounds bounds description +- (void)videoNearCode:(CGFloat)averX averY:(CGFloat)averY bounds:(CGRect)bounds { + CGFloat minSize = MIN(bounds.size.width , bounds.size.height); // CGFloat y = 0; // y = y + zoom > 1 ? zoom : -zoom; // //移动 @@ -556,70 +561,47 @@ - (void)videoNearCode:(CGFloat)averX averY:(CGFloat)averY CGFloat centerX = width / 2; CGFloat centerY = height / 2; - CGFloat diffX = centerX - averX * width; - CGFloat diffY = centerY - averY * height; + CGFloat diffX = centerX - averX; + CGFloat diffY = centerY - averY; //计算二维码尺寸,然后计算放大比例 - CGFloat scale = 1.2; - -// diffX = diffX /scale; - diffY = diffY /scale; - - NSLog(@"diffX:%f,diffY:%f",diffX,diffY); + CGFloat scale = 100 / minSize * 1.1; -// self.videoPreView.layer.anchorPoint = CGPointMake(width * averX, height * averY); - - - - -// [_input.device lockForConfiguration:nil]; -// -// -// AVCaptureConnection *videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[[self stillImageOutput] connections]]; -// -// -// if (scale < 1 || scale > videoConnection.videoMaxScaleAndCropFactor ) { -// return; -// } -// -// CGFloat zoom = scale / videoConnection.videoScaleAndCropFactor; -// // NSLog(@"max :%f",videoConnection.videoMaxScaleAndCropFactor); -// -// -// videoConnection.videoScaleAndCropFactor = scale; -// -// [_input.device unlockForConfiguration]; -// -// -// self.videoPreView.layer.anchorPoint = CGPointMake(0.0f, 0.0f); -// -// CGAffineTransform transform = _videoPreView.transform; -// -// [UIView animateWithDuration:0.3 animations:^{ -// -// self.videoPreView.transform = CGAffineTransformScale(transform, zoom, zoom); -// }]; - - - - -// [UIView animateWithDuration:0.3 animations:^{ -// -// -// self.videoPreView.transform = CGAffineTransformMakeTranslation(diffX, diffY); -// }]; + NSLog(@"diffX:%f,diffY:%f,scale:%f",diffX,diffY,scale); + diffX = diffX / MAX(1, scale * 0.8); + diffY = diffY / MAX(1, scale * 0.8); - [UIView animateWithDuration:0.3 animations:^{ - - - self.videoPreView.transform = CGAffineTransformTranslate(self.videoPreView.transform,diffX , 0); - }]; -// - - - + if (scale > 1) { + + [_input.device lockForConfiguration:nil]; + + AVCaptureConnection *videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[[self stillImageOutput] connections]]; + + + if (scale < 1 || scale > videoConnection.videoMaxScaleAndCropFactor ) { + return; + } + + CGFloat zoom = scale / videoConnection.videoScaleAndCropFactor; + + videoConnection.videoScaleAndCropFactor = scale; + + [_input.device unlockForConfiguration]; + + CGAffineTransform transform = _videoPreView.transform; + + [UIView animateWithDuration:0.3 animations:^{ + + self.videoPreView.transform = CGAffineTransformScale(transform, zoom, zoom); + }]; + + [UIView animateWithDuration:0.3 animations:^{ + + self.videoPreView.transform = CGAffineTransformTranslate(self.videoPreView.transform,diffX , diffY); + }]; + } } diff --git a/LBXScan/LBXScanTypes.h b/LBXScan/LBXScanTypes.h index 9b9d063..4b983d1 100644 --- a/LBXScan/LBXScanTypes.h +++ b/LBXScan/LBXScanTypes.h @@ -32,5 +32,11 @@ */ @property (nonatomic, copy) NSString* strBarCodeType; +//条码4个角 +@property (nonatomic, copy) NSArray *corners; + +//没有corners精确 +@property (nonatomic, assign) CGRect bounds; + @end diff --git a/LBXScan/LBXScanTypes.m b/LBXScan/LBXScanTypes.m index 2760fc6..07e4ed4 100644 --- a/LBXScan/LBXScanTypes.m +++ b/LBXScan/LBXScanTypes.m @@ -12,6 +12,7 @@ - (instancetype)initWithScanString:(NSString*)str imgScan:(UIImage*)img barCodeT self.strScanned = str; self.imgScanned = img; self.strBarCodeType = type; + self.bounds = CGRectZero; } return self; diff --git a/LBXScan/LBXZXing/LBXZXCapture.m b/LBXScan/LBXZXing/LBXZXCapture.m index 085d185..49c407b 100755 --- a/LBXScan/LBXZXing/LBXZXCapture.m +++ b/LBXScan/LBXZXing/LBXZXCapture.m @@ -94,86 +94,86 @@ - (void)dealloc { #pragma mark - Property Getters - (CALayer *)layer { - AVCaptureVideoPreviewLayer *layer = (AVCaptureVideoPreviewLayer *)_layer; - if (!_layer) { - layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session]; - layer.affineTransform = self.transform; - layer.delegate = self; - layer.videoGravity = AVLayerVideoGravityResizeAspect; - layer.videoGravity = AVLayerVideoGravityResizeAspectFill; - - _layer = layer; - } - return layer; + AVCaptureVideoPreviewLayer *layer = (AVCaptureVideoPreviewLayer *)_layer; + if (!_layer) { + layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session]; + layer.affineTransform = self.transform; + layer.delegate = self; + layer.videoGravity = AVLayerVideoGravityResizeAspect; + layer.videoGravity = AVLayerVideoGravityResizeAspectFill; + + _layer = layer; + } + return layer; } - (AVCaptureVideoDataOutput *)output { - if (!_output) { - _output = [[AVCaptureVideoDataOutput alloc] init]; - [_output setVideoSettings:@{ - (NSString *)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] - }]; - [_output setAlwaysDiscardsLateVideoFrames:YES]; - [_output setSampleBufferDelegate:self queue:_captureQueue]; - - [self.session addOutput:_output]; - } - - return _output; + if (!_output) { + _output = [[AVCaptureVideoDataOutput alloc] init]; + [_output setVideoSettings:@{ + (NSString *)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] + }]; + [_output setAlwaysDiscardsLateVideoFrames:YES]; + [_output setSampleBufferDelegate:self queue:_captureQueue]; + + [self.session addOutput:_output]; + } + + return _output; } #pragma mark - Property Setters - (void)setCamera:(int)camera { - if (_camera != camera) { - _camera = camera; - self.captureDeviceIndex = -1; - self.captureDevice = nil; - [self replaceInput]; - } + if (_camera != camera) { + _camera = camera; + self.captureDeviceIndex = -1; + self.captureDevice = nil; + [self replaceInput]; + } } - (void)setDelegate:(id)delegate { - _delegate = delegate; - -// if (delegate) { -// self.hardStop = NO; -// } -// [self startStop]; + _delegate = delegate; + + // if (delegate) { + // self.hardStop = NO; + // } + // [self startStop]; } - (void)setFocusMode:(AVCaptureFocusMode)focusMode { - if ([self.input.device isFocusModeSupported:focusMode] && self.input.device.focusMode != focusMode) { - _focusMode = focusMode; - - [self.input.device lockForConfiguration:nil]; - self.input.device.focusMode = focusMode; - [self.input.device unlockForConfiguration]; - } + if ([self.input.device isFocusModeSupported:focusMode] && self.input.device.focusMode != focusMode) { + _focusMode = focusMode; + + [self.input.device lockForConfiguration:nil]; + self.input.device.focusMode = focusMode; + [self.input.device unlockForConfiguration]; + } } - (void)setLastScannedImage:(CGImageRef)lastScannedImage { - if (_lastScannedImage) { - CGImageRelease(_lastScannedImage); - } - - if (lastScannedImage) { - CGImageRetain(lastScannedImage); - } - - _lastScannedImage = lastScannedImage; + if (_lastScannedImage) { + CGImageRelease(_lastScannedImage); + } + + if (lastScannedImage) { + CGImageRetain(lastScannedImage); + } + + _lastScannedImage = lastScannedImage; } - (void)setMirror:(BOOL)mirror { - if (_mirror != mirror) { - _mirror = mirror; - if (self.layer) { - CGAffineTransform transform = self.transform; - transform.a = - transform.a; - self.transform = transform; - [self.layer setAffineTransform:self.transform]; + if (_mirror != mirror) { + _mirror = mirror; + if (self.layer) { + CGAffineTransform transform = self.transform; + transform.a = - transform.a; + self.transform = transform; + [self.layer setAffineTransform:self.transform]; + } } - } } - (void)setTorch:(BOOL)torch { @@ -210,79 +210,79 @@ - (void)changeTorch - (void)setTransform:(CGAffineTransform)transform { - _transform = transform; - [self.layer setAffineTransform:transform]; + _transform = transform; + [self.layer setAffineTransform:transform]; } #pragma mark - Back, Front, Torch - (int)back { - return 1; + return 1; } - (int)front { - return 0; + return 0; } - (BOOL)hasFront { - NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - return [devices count] > 1; + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + return [devices count] > 1; } - (BOOL)hasBack { - NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - return [devices count] > 0; + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + return [devices count] > 0; } - (BOOL)hasTorch { - if ([self device]) { - return [self device].hasTorch; - } else { - return NO; - } + if ([self device]) { + return [self device].hasTorch; + } else { + return NO; + } } #pragma mark - Binary - (CALayer *)binary { - return self.binaryLayer; + return self.binaryLayer; } - (void)setBinary:(BOOL)on { - if (on && !self.binaryLayer) { - self.binaryLayer = [CALayer layer]; - } else if (!on && self.binaryLayer) { - self.binaryLayer = nil; - } + if (on && !self.binaryLayer) { + self.binaryLayer = [CALayer layer]; + } else if (!on && self.binaryLayer) { + self.binaryLayer = nil; + } } #pragma mark - Luminance - (CALayer *)luminance { - return self.luminanceLayer; + return self.luminanceLayer; } - (void)setLuminance:(BOOL)on { - if (on && !self.luminanceLayer) { - self.luminanceLayer = [CALayer layer]; - } else if (!on && self.luminanceLayer) { - self.luminanceLayer = nil; - } + if (on && !self.luminanceLayer) { + self.luminanceLayer = [CALayer layer]; + } else if (!on && self.luminanceLayer) { + self.luminanceLayer = nil; + } } #pragma mark - Start, Stop - (void)hard_stop { - self.hardStop = YES; - - if (self.running) { - [self stop]; - } + self.hardStop = YES; + + if (self.running) { + [self stop]; + } } - (void)order_skip { - self.orderInSkip = 1; - self.orderOutSkip = 1; + self.orderInSkip = 1; + self.orderOutSkip = 1; } - (void)start { @@ -312,15 +312,15 @@ - (void)start { - (void)stop { - if (!self.running) { - return; - } - - if (self.session.running) { - // [self.layer removeFromSuperlayer]; - [self.session stopRunning]; - } - self.running = NO; + if (!self.running) { + return; + } + + if (self.session.running) { + // [self.layer removeFromSuperlayer]; + [self.session stopRunning]; + } + self.running = NO; } @@ -328,33 +328,33 @@ - (void)stop #pragma mark - CAAction - (id)actionForLayer:(CALayer *)_layer forKey:(NSString *)event { - [CATransaction setValue:[NSNumber numberWithFloat:0.0f] forKey:kCATransactionAnimationDuration]; - - if ([event isEqualToString:kCAOnOrderIn] || [event isEqualToString:kCAOnOrderOut]) { - return self; - } - - return nil; + [CATransaction setValue:[NSNumber numberWithFloat:0.0f] forKey:kCATransactionAnimationDuration]; + + if ([event isEqualToString:kCAOnOrderIn] || [event isEqualToString:kCAOnOrderOut]) { + return self; + } + + return nil; } - (void)runActionForKey:(NSString *)key object:(id)anObject arguments:(NSDictionary *)dict { - if ([key isEqualToString:kCAOnOrderIn]) { - if (self.orderInSkip) { - self.orderInSkip--; - return; - } - - self.onScreen = YES; -// [self startStop]; - } else if ([key isEqualToString:kCAOnOrderOut]) { - if (self.orderOutSkip) { - self.orderOutSkip--; - return; + if ([key isEqualToString:kCAOnOrderIn]) { + if (self.orderInSkip) { + self.orderInSkip--; + return; + } + + self.onScreen = YES; + // [self startStop]; + } else if ([key isEqualToString:kCAOnOrderOut]) { + if (self.orderOutSkip) { + self.orderOutSkip--; + return; + } + + self.onScreen = NO; + // [self startStop]; } - - self.onScreen = NO; -// [self startStop]; - } } #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate @@ -364,99 +364,102 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput fromConnection:(AVCaptureConnection *)connection { - @autoreleasepool { - if (!self.cameraIsReady) { - self.cameraIsReady = YES; - if ([self.delegate respondsToSelector:@selector(captureCameraIsReady:)]) { - dispatch_async(dispatch_get_main_queue(), ^{ - [self.delegate LBXCaptureCameraIsReady:self]; - }); - } - } - - if (!self.captureToFilename && !self.luminanceLayer && !self.binaryLayer && !self.delegate) { - return; - } - - CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer); - - CGImageRef videoFrameImage = [ZXCGImageLuminanceSource createImageFromBuffer:videoFrame]; - CGImageRef rotatedImage = [self createRotatedImage:videoFrameImage degrees:self.rotation]; - CGImageRelease(videoFrameImage); - - // If scanRect is set, crop the current image to include only the desired rect - if (!CGRectIsEmpty(self.scanRect)) { - CGImageRef croppedImage = CGImageCreateWithImageInRect(rotatedImage, self.scanRect); - CFRelease(rotatedImage); - rotatedImage = croppedImage; - } - - self.lastScannedImage = rotatedImage; - - if (self.captureToFilename) { - NSURL *url = [NSURL fileURLWithPath:self.captureToFilename]; - CGImageDestinationRef dest = CGImageDestinationCreateWithURL((__bridge CFURLRef)url, (__bridge CFStringRef)@"public.png", 1, nil); - CGImageDestinationAddImage(dest, rotatedImage, nil); - CGImageDestinationFinalize(dest); - CFRelease(dest); - self.captureToFilename = nil; - } - - ZXCGImageLuminanceSource *source = [[ZXCGImageLuminanceSource alloc] initWithCGImage:rotatedImage]; - CGImageRelease(rotatedImage); - - if (self.luminanceLayer) { - CGImageRef image = source.image; - CGImageRetain(image); - dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0), dispatch_get_main_queue(), ^{ - self.luminanceLayer.contents = (__bridge id)image; - CGImageRelease(image); - }); - } - - - if (self.binaryLayer || self.delegate) - { - ZXHybridBinarizer *binarizer = [[ZXHybridBinarizer alloc] initWithSource:self.invert ? [source invert] : source]; - - if (self.binaryLayer) - { - CGImageRef image = [binarizer createImage]; - dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0), dispatch_get_main_queue(), ^{ - self.binaryLayer.contents = (__bridge id)image; - CGImageRelease(image); - }); - - } - - if (self.delegate) - { - static NSInteger sampleBufferNums = 0; - - sampleBufferNums++; - - if (sampleBufferNums != 2) { - return; - } - - NSLog(@"sampleBufferNums"); - sampleBufferNums = 0; - - ZXBinaryBitmap *bitmap = [[ZXBinaryBitmap alloc] initWithBinarizer:binarizer]; - - NSError *error; - ZXResult *result = [self.reader decode:bitmap hints:self.hints error:&error]; - if (result) { -// CGImageRef iOffscreen = CGBitmapContextCreateImage(context); - UIImage* image = [UIImage imageWithCGImage: _lastScannedImage]; + @autoreleasepool { + if (!self.cameraIsReady) { + self.cameraIsReady = YES; + if ([self.delegate respondsToSelector:@selector(captureCameraIsReady:)]) { + dispatch_async(dispatch_get_main_queue(), ^{ + [self.delegate LBXCaptureCameraIsReady:self]; + }); + } + } + + if (!self.captureToFilename && !self.luminanceLayer && !self.binaryLayer && !self.delegate) { + return; + } + + CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer); + + CGImageRef videoFrameImage = [ZXCGImageLuminanceSource createImageFromBuffer:videoFrame]; + CGImageRef rotatedImage = [self createRotatedImage:videoFrameImage degrees:self.rotation]; + CGImageRelease(videoFrameImage); + + // If scanRect is set, crop the current image to include only the desired rect + if (!CGRectIsEmpty(self.scanRect)) { + CGImageRef croppedImage = CGImageCreateWithImageInRect(rotatedImage, self.scanRect); + CFRelease(rotatedImage); + rotatedImage = croppedImage; + } + + self.lastScannedImage = rotatedImage; + + if (self.captureToFilename) { + NSURL *url = [NSURL fileURLWithPath:self.captureToFilename]; + CGImageDestinationRef dest = CGImageDestinationCreateWithURL((__bridge CFURLRef)url, (__bridge CFStringRef)@"public.png", 1, nil); + CGImageDestinationAddImage(dest, rotatedImage, nil); + CGImageDestinationFinalize(dest); + CFRelease(dest); + self.captureToFilename = nil; + } + + ZXCGImageLuminanceSource *source = [[ZXCGImageLuminanceSource alloc] initWithCGImage:rotatedImage]; + CGImageRelease(rotatedImage); + + if (self.luminanceLayer) { + CGImageRef image = source.image; + CGImageRetain(image); + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0), dispatch_get_main_queue(), ^{ + self.luminanceLayer.contents = (__bridge id)image; + CGImageRelease(image); + }); + } + + + if (self.binaryLayer || self.delegate) + { + ZXHybridBinarizer *binarizer = [[ZXHybridBinarizer alloc] initWithSource:self.invert ? [source invert] : source]; - dispatch_async(dispatch_get_main_queue(), ^{ - [self.delegate captureResult:self result:result scanImage:image]; - }); + if (self.binaryLayer) + { + CGImageRef image = [binarizer createImage]; + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0), dispatch_get_main_queue(), ^{ + self.binaryLayer.contents = (__bridge id)image; + CGImageRelease(image); + }); + + } + + if (self.delegate) + { + static NSInteger sampleBufferNums = 0; + + sampleBufferNums++; + + if (sampleBufferNums != 2) { + return; + } + + NSLog(@"sampleBufferNums"); + sampleBufferNums = 0; + + ZXBinaryBitmap *bitmap = [[ZXBinaryBitmap alloc] initWithBinarizer:binarizer]; + + NSError *error; + ZXResult *result = [self.reader decode:bitmap hints:self.hints error:&error]; + if (result) { + // CGImageRef iOffscreen = CGBitmapContextCreateImage(context); + + + // NSLog(@"%@",result.resultPoints); + + dispatch_async(dispatch_get_main_queue(), ^{ + UIImage* image = [UIImage imageWithCGImage: self.lastScannedImage]; + [self.delegate captureResult:self result:result scanImage:image]; + }); + } + } } - } } - } } //+ (ZXResult *)regocnizeImage:(UIImage*)image @@ -486,136 +489,136 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput // Adapted from http://blog.coriolis.ch/2009/09/04/arbitrary-rotation-of-a-cgimage/ and https://github.com/JanX2/CreateRotateWriteCGImage - (CGImageRef)createRotatedImage:(CGImageRef)original degrees:(float)degrees CF_RETURNS_RETAINED { - if (degrees == 0.0f) { - CGImageRetain(original); - return original; - } else { - double radians = degrees * M_PI / 180; - + if (degrees == 0.0f) { + CGImageRetain(original); + return original; + } else { + double radians = degrees * M_PI / 180; + #if TARGET_OS_EMBEDDED || TARGET_IPHONE_SIMULATOR - radians = -1 * radians; + radians = -1 * radians; #endif - - size_t _width = CGImageGetWidth(original); - size_t _height = CGImageGetHeight(original); - - CGRect imgRect = CGRectMake(0, 0, _width, _height); - CGAffineTransform __transform = CGAffineTransformMakeRotation(radians); - CGRect rotatedRect = CGRectApplyAffineTransform(imgRect, __transform); - - CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); - CGContextRef context = CGBitmapContextCreate(NULL, - rotatedRect.size.width, - rotatedRect.size.height, - CGImageGetBitsPerComponent(original), - 0, - colorSpace, - kCGBitmapAlphaInfoMask & kCGImageAlphaPremultipliedFirst); - CGContextSetAllowsAntialiasing(context, FALSE); - CGContextSetInterpolationQuality(context, kCGInterpolationNone); - CGColorSpaceRelease(colorSpace); - - CGContextTranslateCTM(context, - +(rotatedRect.size.width/2), - +(rotatedRect.size.height/2)); - CGContextRotateCTM(context, radians); - - CGContextDrawImage(context, CGRectMake(-imgRect.size.width/2, - -imgRect.size.height/2, - imgRect.size.width, - imgRect.size.height), - original); - - CGImageRef rotatedImage = CGBitmapContextCreateImage(context); - CFRelease(context); - - return rotatedImage; - } + + size_t _width = CGImageGetWidth(original); + size_t _height = CGImageGetHeight(original); + + CGRect imgRect = CGRectMake(0, 0, _width, _height); + CGAffineTransform __transform = CGAffineTransformMakeRotation(radians); + CGRect rotatedRect = CGRectApplyAffineTransform(imgRect, __transform); + + CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); + CGContextRef context = CGBitmapContextCreate(NULL, + rotatedRect.size.width, + rotatedRect.size.height, + CGImageGetBitsPerComponent(original), + 0, + colorSpace, + kCGBitmapAlphaInfoMask & kCGImageAlphaPremultipliedFirst); + CGContextSetAllowsAntialiasing(context, FALSE); + CGContextSetInterpolationQuality(context, kCGInterpolationNone); + CGColorSpaceRelease(colorSpace); + + CGContextTranslateCTM(context, + +(rotatedRect.size.width/2), + +(rotatedRect.size.height/2)); + CGContextRotateCTM(context, radians); + + CGContextDrawImage(context, CGRectMake(-imgRect.size.width/2, + -imgRect.size.height/2, + imgRect.size.width, + imgRect.size.height), + original); + + CGImageRef rotatedImage = CGBitmapContextCreateImage(context); + CFRelease(context); + + return rotatedImage; + } } - (AVCaptureDevice *)device { - if (self.captureDevice) { - return self.captureDevice; - } - - AVCaptureDevice *zxd = nil; - - NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - - if ([devices count] > 0) { - if (self.captureDeviceIndex == -1) { - AVCaptureDevicePosition position = AVCaptureDevicePositionBack; - if (self.camera == self.front) { - position = AVCaptureDevicePositionFront; - } - - for (unsigned int i = 0; i < [devices count]; ++i) { - AVCaptureDevice *dev = [devices objectAtIndex:i]; - if (dev.position == position) { - self.captureDeviceIndex = i; - zxd = dev; - break; + if (self.captureDevice) { + return self.captureDevice; + } + + AVCaptureDevice *zxd = nil; + + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + + if ([devices count] > 0) { + if (self.captureDeviceIndex == -1) { + AVCaptureDevicePosition position = AVCaptureDevicePositionBack; + if (self.camera == self.front) { + position = AVCaptureDevicePositionFront; + } + + for (unsigned int i = 0; i < [devices count]; ++i) { + AVCaptureDevice *dev = [devices objectAtIndex:i]; + if (dev.position == position) { + self.captureDeviceIndex = i; + zxd = dev; + break; + } + } + } + + if (!zxd && self.captureDeviceIndex != -1) { + zxd = [devices objectAtIndex:self.captureDeviceIndex]; } - } } - - if (!zxd && self.captureDeviceIndex != -1) { - zxd = [devices objectAtIndex:self.captureDeviceIndex]; + + if (!zxd) { + zxd = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; } - } - - if (!zxd) { - zxd = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - - self.captureDevice = zxd; - - return zxd; + + self.captureDevice = zxd; + + return zxd; } - (void)replaceInput { - [self.session beginConfiguration]; - - if (self.session && self.input) { - [self.session removeInput:self.input]; - self.input = nil; - } - - AVCaptureDevice *zxd = [self device]; - - if (zxd) { - self.input = [AVCaptureDeviceInput deviceInputWithDevice:zxd error:nil]; - self.focusMode = self.focusMode; - } - - if (self.input) { - self.session.sessionPreset = self.sessionPreset; - [self.session addInput:self.input]; - } - - [self.session commitConfiguration]; + [self.session beginConfiguration]; + + if (self.session && self.input) { + [self.session removeInput:self.input]; + self.input = nil; + } + + AVCaptureDevice *zxd = [self device]; + + if (zxd) { + self.input = [AVCaptureDeviceInput deviceInputWithDevice:zxd error:nil]; + self.focusMode = self.focusMode; + } + + if (self.input) { + self.session.sessionPreset = self.sessionPreset; + [self.session addInput:self.input]; + } + + [self.session commitConfiguration]; } - (AVCaptureSession *)session { - if (!_session) { - _session = [[AVCaptureSession alloc] init]; - [self replaceInput]; - } - - return _session; + if (!_session) { + _session = [[AVCaptureSession alloc] init]; + [self replaceInput]; + } + + return _session; } - (void)startStop { - if ((!self.running && (self.delegate || self.onScreen)) || - (!self.output && - (self.delegate || - (self.onScreen && (self.luminanceLayer || self.binaryLayer))))) { - [self start]; - } - - if (self.running && !self.delegate && !self.onScreen) { - [self stop]; - } + if ((!self.running && (self.delegate || self.onScreen)) || + (!self.output && + (self.delegate || + (self.onScreen && (self.luminanceLayer || self.binaryLayer))))) { + [self start]; + } + + if (self.running && !self.delegate && !self.onScreen) { + [self stop]; + } } @end diff --git a/LBXScan/LBXZXing/LBXZXCaptureDelegate.h b/LBXScan/LBXZXing/LBXZXCaptureDelegate.h index db66882..c43d948 100755 --- a/LBXScan/LBXZXing/LBXZXCaptureDelegate.h +++ b/LBXScan/LBXZXing/LBXZXCaptureDelegate.h @@ -21,6 +21,7 @@ - (void)captureResult:(LBXZXCapture *)capture result:(ZXResult *)result scanImage:(UIImage*)img; + @optional - (void)LBXCaptureSize:(LBXZXCapture *)capture width:(NSNumber *)width diff --git a/LBXScan/LBXZXing/ZXingWrapper.h b/LBXScan/LBXZXing/ZXingWrapper.h index e8c1648..c355e8e 100644 --- a/LBXScan/LBXZXing/ZXingWrapper.h +++ b/LBXScan/LBXZXing/ZXingWrapper.h @@ -31,6 +31,16 @@ +/** +初始化ZXing + +@param preView 视频预览视图 +@param success 返回识别结果,resultPoints 表示条码在图像scanImg上的坐标 +@return 返回封装对象 +*/ +- (id)initWithPreView:(UIView*)preView success:(void(^)(ZXBarcodeFormat barcodeFormat,NSString *str,UIImage *scanImg,NSArray* resultPoints))success; + + /** 设置识别区域,不设置默认全屏识别 diff --git a/LBXScan/LBXZXing/ZXingWrapper.m b/LBXScan/LBXZXing/ZXingWrapper.m index 7a39672..8768ad4 100644 --- a/LBXScan/LBXZXing/ZXingWrapper.m +++ b/LBXScan/LBXZXing/ZXingWrapper.m @@ -12,12 +12,12 @@ #import "LBXZXCapture.h" -typedef void(^blockScan)(ZXBarcodeFormat barcodeFormat,NSString *str,UIImage *scanImg); - @interface ZXingWrapper() @property (nonatomic, strong) LBXZXCapture *capture; -@property (nonatomic,copy)blockScan block; +@property (nonatomic, copy) void (^success)(ZXBarcodeFormat barcodeFormat,NSString *str,UIImage *scanImg); + +@property (nonatomic, copy) void (^onSuccess)(ZXBarcodeFormat barcodeFormat,NSString *str,UIImage *scanImg,NSArray* resultPoints); @property (nonatomic, assign) BOOL bNeedScanResult; @@ -51,7 +51,32 @@ - (id)initWithPreView:(UIView*)preView block:(void(^)(ZXBarcodeFormat barcodeFor self.capture.delegate = self; - self.block = block; + self.success = block; + + CGRect rect = preView.frame; + rect.origin = CGPointZero; + + self.capture.layer.frame = rect; + //[preView.layer addSublayer:self.capture.layer]; + + [preView.layer insertSublayer:self.capture.layer atIndex:0]; + + } + return self; +} + +- (id)initWithPreView:(UIView*)preView success:(void(^)(ZXBarcodeFormat barcodeFormat,NSString *str,UIImage *scanImg,NSArray* resultPoints))success +{ + if (self = [super init]) { + + self.capture = [[LBXZXCapture alloc] init]; + self.capture.camera = self.capture.back; + self.capture.focusMode = AVCaptureFocusModeContinuousAutoFocus; + self.capture.rotation = 90.0f; + + self.capture.delegate = self; + + self.onSuccess = success; CGRect rect = preView.frame; rect.origin = CGPointZero; @@ -103,12 +128,15 @@ - (void)captureResult:(ZXCapture *)capture result:(ZXResult *)result scanImage:( return; } + + [self stop]; - if ( _block ) + if (_onSuccess) { + _onSuccess(result.barcodeFormat,result.text,img,result.resultPoints); + } + else if ( _success ) { - [self stop]; - - _block(result.barcodeFormat,result.text,img); + _success(result.barcodeFormat,result.text,img); } } diff --git a/LBXScanDemo/DIYScanViewController/LBXScanBaseViewController.h b/LBXScanDemo/DIYScanViewController/LBXScanBaseViewController.h index f48a63d..1221c6c 100644 --- a/LBXScanDemo/DIYScanViewController/LBXScanBaseViewController.h +++ b/LBXScanDemo/DIYScanViewController/LBXScanBaseViewController.h @@ -42,6 +42,10 @@ */ @property (nonatomic,strong) LBXScanView* qRScanView; +//条码识别位置标示 +@property (nonatomic, strong) UIView *codeFlagView; +@property (nonatomic, strong) NSArray *layers; + /** @brief 扫码存储的当前图片 */ @@ -53,7 +57,8 @@ */ @property(nonatomic,assign)BOOL isOpenFlash; - +//相机预览 +@property (nonatomic, strong) UIView *cameraPreView; @@ -62,6 +67,8 @@ - (void)scanResultWithArray:(NSArray*)array; +- (void)resetCodeFlagView; + - (void)requestCameraPemissionWithResult:(void(^)( BOOL granted))completion; + (void)authorizePhotoPermissionWithCompletion:(void(^)(BOOL granted,BOOL firstTime))completion; @end diff --git a/LBXScanDemo/DIYScanViewController/LBXScanBaseViewController.m b/LBXScanDemo/DIYScanViewController/LBXScanBaseViewController.m index 1490861..a05bd4c 100644 --- a/LBXScanDemo/DIYScanViewController/LBXScanBaseViewController.m +++ b/LBXScanDemo/DIYScanViewController/LBXScanBaseViewController.m @@ -20,6 +20,10 @@ @implementation LBXScanBaseViewController - (void)viewDidLoad { [super viewDidLoad]; // Do any additional setup after loading the view. + if ([self respondsToSelector:@selector(setEdgesForExtendedLayout:)]) { + + self.edgesForExtendedLayout = UIRectEdgeNone; + } } @@ -52,22 +56,109 @@ - (void)scanResultWithArray:(NSArray*)array return; } + [self.qRScanView stopScanAnimation]; + self.scanImage = scanResult.imgScanned; - + //TODO: 这里可以根据需要添加震动或播放成功提醒音等提示相关代码 //... - [self showNextVCWithScanResult:scanResult]; + //TODO:表示二维码位置 + //ZXing在开启区域识别后,当前计算方式不准确 + if (!self.isOpenInterestRect && self.cameraPreView && !CGRectEqualToRect(CGRectZero, scanResult.bounds) ) { + + CGFloat centerX = scanResult.bounds.origin.x + scanResult.bounds.size.width / 2; + CGFloat centerY = scanResult.bounds.origin.y + scanResult.bounds.size.height / 2; + + [self signCodeWithCenterX:centerX centerY:centerY]; + + [self didDetectCodes:scanResult.bounds corner:scanResult.corners]; + + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{ + dispatch_async(dispatch_get_main_queue(), ^{ + [self showNextVCWithScanResult:scanResult]; + }); + }); + } + else + { + [self showNextVCWithScanResult:scanResult]; + } + +} + + +- (CGPoint)pointForCorner:(NSDictionary *)corner { + CGPoint point; + CGPointMakeWithDictionaryRepresentation((CFDictionaryRef)corner, &point); + return point; } + +- (void)handCorners:(NSArray *)corners bounds:(CGRect)bounds +{ + CGFloat totalX = 0; + CGFloat totalY = 0; + + for (NSDictionary *dic in corners) { + CGPoint pt = [self pointForCorner:dic]; + NSLog(@"pt:%@",NSStringFromCGPoint(pt)); + totalX += pt.x; + totalY += pt.y; + } + + CGFloat averX = totalX / corners.count; + CGFloat averY = totalY / corners.count; + + + + CGFloat minSize = MIN(bounds.size.width , bounds.size.height); + + NSLog(@"averx:%f,avery:%f minsize:%f",averX,averY,minSize); + + dispatch_async(dispatch_get_main_queue(), ^{ + + [self signCodeWithCenterX:averX centerY:averY]; + + }); +} + +- (void)signCodeWithCenterX:(CGFloat)centerX centerY:(CGFloat)centerY +{ + UIView *signView = [[UIView alloc]initWithFrame:CGRectMake(centerX-10, centerY-10, 20, 20)]; + + [self.cameraPreView addSubview:signView]; + signView.backgroundColor = [UIColor redColor]; + + self.codeFlagView = signView; +} + + + //继承者实现 - (void)reStartDevice { } +- (void)resetCodeFlagView +{ + if (_codeFlagView) { + [_codeFlagView removeFromSuperview]; + self.codeFlagView = nil; + } + if (self.layers) { + + for (CALayer *layer in self.layers) { + [layer removeFromSuperlayer]; + } + + self.layers = nil; + } +} + - (void)showNextVCWithScanResult:(LBXScanResult*)strResult { @@ -79,8 +170,88 @@ - (void)showNextVCWithScanResult:(LBXScanResult*)strResult vc.strCodeType = strResult.strBarCodeType; [self.navigationController pushViewController:vc animated:YES]; + + [self resetCodeFlagView]; +} + + +#pragma mark- 绘制二维码区域标志 +- (void)didDetectCodes:(CGRect)bounds corner:(NSArray*)corners +{ + AVCaptureVideoPreviewLayer * preview = nil; + + for (CALayer *layer in [self.cameraPreView.layer sublayers]) { + + if ( [layer isKindOfClass:[AVCaptureVideoPreviewLayer class]]) { + + preview = (AVCaptureVideoPreviewLayer*)layer; + } + } + + NSArray *layers = nil; + if (!layers) { + layers = @[[self makeBoundsLayer],[self makeCornersLayer]]; + [preview addSublayer:layers[0]]; + [preview addSublayer:layers[1]]; + } + + CAShapeLayer *boundsLayer = layers[0]; + boundsLayer.path = [self bezierPathForBounds:bounds].CGPath; + //得到一个CGPathRef赋给图层的path属性 + + if (corners) { + CAShapeLayer *cornersLayer = layers[1]; + cornersLayer.path = [self bezierPathForCorners:corners].CGPath; + //对于cornersLayer,基于元数据对象创建一个CGPath + } + + self.layers = layers; + +} + + +- (UIBezierPath *)bezierPathForBounds:(CGRect)bounds { + // 图层边界,创建一个和对象的bounds关联的UIBezierPath + return [UIBezierPath bezierPathWithRect:bounds]; } +- (CAShapeLayer *)makeBoundsLayer { + //CAShapeLayer 是具体化的CALayer子类,用于绘制Bezier路径 + CAShapeLayer *shapeLayer = [CAShapeLayer layer]; + shapeLayer.strokeColor = [UIColor colorWithRed:0.96f green:0.75f blue:0.06f alpha:1.0f].CGColor; + shapeLayer.fillColor = nil; + shapeLayer.lineWidth = 4.0f; + + return shapeLayer; +} + +- (CAShapeLayer *)makeCornersLayer { + + CAShapeLayer *cornersLayer = [CAShapeLayer layer]; + cornersLayer.lineWidth = 2.0f; + cornersLayer.strokeColor = [UIColor colorWithRed:0.172 green:0.671 blue:0.428 alpha:1.0].CGColor; + cornersLayer.fillColor = [UIColor colorWithRed:0.190 green:0.753 blue:0.489 alpha:0.5].CGColor; + + return cornersLayer;; +} + +- (UIBezierPath *)bezierPathForCorners:(NSArray *)corners { + + UIBezierPath *path = [UIBezierPath bezierPath]; + for (int i = 0; i < corners.count; i ++) { + CGPoint point = [self pointForCorner:corners[i]]; + //遍历每个条目,为每个条目创建一个CGPoint + if (i == 0) { + [path moveToPoint:point]; + } else { + [path addLineToPoint:point]; + } + } + [path closePath]; + return path; +} + + #pragma mark- 相册 //继承者实现 diff --git a/LBXScanDemo/DIYScanViewController/Native/LBXScanNativeViewController.m b/LBXScanDemo/DIYScanViewController/Native/LBXScanNativeViewController.m index 4ba042e..9d41865 100644 --- a/LBXScanDemo/DIYScanViewController/Native/LBXScanNativeViewController.m +++ b/LBXScanDemo/DIYScanViewController/Native/LBXScanNativeViewController.m @@ -11,7 +11,7 @@ @interface LBXScanNativeViewController () -@property (nonatomic, strong) UIView *videoView; +//@property (nonatomic, strong) UIView *videoView; @end @implementation LBXScanNativeViewController @@ -38,18 +38,23 @@ - (void)viewDidAppear:(BOOL)animated [self drawScanView]; + + + [self requestCameraPemissionWithResult:^(BOOL granted) { - + if (granted) { - + //不延时,可能会导致界面黑屏并卡住一会 [self performSelector:@selector(startScan) withObject:nil afterDelay:0.3]; - + }else{ - + [self.qRScanView stopDeviceReadying]; } }]; + + } @@ -77,19 +82,26 @@ - (void)drawScanView - (void)reStartDevice { + [self resetCodeFlagView]; + + [self.qRScanView stopScanAnimation]; + [self.qRScanView startScanAnimation]; + + [_scanObj startScan]; } //启动设备 - (void)startScan { - UIView *videoView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, CGRectGetWidth(self.view.frame), CGRectGetHeight(self.view.frame))]; - videoView.backgroundColor = [UIColor clearColor]; - [self.view insertSubview:videoView atIndex:0]; - __weak __typeof(self) weakSelf = self; - - self.videoView = videoView; - + if (!self.cameraPreView) { + + UIView *videoView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, CGRectGetWidth(self.view.frame), CGRectGetHeight(self.view.frame))]; + videoView.backgroundColor = [UIColor clearColor]; + [self.view insertSubview:videoView atIndex:0]; + + self.cameraPreView = videoView; + } if (!_scanObj ) { @@ -101,15 +113,18 @@ - (void)startScan cropRect = [LBXScanView getScanRectWithPreView:self.view style:self.style]; } - // AVMetadataObjectTypeITF14Code 扫码效果不行,另外只能输入一个码制,虽然接口是可以输入多个码制 - self.scanObj = [[LBXScanNative alloc]initWithPreView:videoView ObjectType:self.listScanTypes cropRect:cropRect videoMaxScale:^(CGFloat maxScale) { - + __weak __typeof(self) weakSelf = self; + + self.scanObj = [[LBXScanNative alloc]initWithPreView:self.cameraPreView ObjectType:self.listScanTypes cropRect:cropRect videoMaxScale:^(CGFloat maxScale) { [weakSelf setVideoMaxScale:maxScale]; + } success:^(NSArray *array) { - [weakSelf scanResultWithArray:array]; + [weakSelf handScanNative:array]; }]; [_scanObj setNeedCaptureImage:self.isNeedScanImage]; + //是否需要返回条码坐标 + _scanObj.needCodePosion = YES; } [_scanObj startScan]; @@ -120,6 +135,11 @@ - (void)startScan self.view.backgroundColor = [UIColor clearColor]; } +- (void)handScanNative:(NSArray *)array +{ + [self scanResultWithArray:array]; +} + - (void)setVideoMaxScale:(CGFloat)maxScale { @@ -186,14 +206,7 @@ - (void)recognizeImageWithImage:(UIImage*)image } } -- (void)viewWillLayoutSubviews -{ - [super viewWillLayoutSubviews]; - -// if (_videoView) { -// _videoView.frame = CGRectMake(0, 0, CGRectGetWidth(self.view.frame), CGRectGetHeight(self.view.frame)); -// } -} + @end diff --git a/LBXScanDemo/DIYScanViewController/Native/QQScanNativeViewController.m b/LBXScanDemo/DIYScanViewController/Native/QQScanNativeViewController.m index 2735eb3..50f8968 100644 --- a/LBXScanDemo/DIYScanViewController/Native/QQScanNativeViewController.m +++ b/LBXScanDemo/DIYScanViewController/Native/QQScanNativeViewController.m @@ -189,73 +189,6 @@ - (void)drawBottomItems } -- (void)showError:(NSString*)str -{ - [LBXAlertAction showAlertWithTitle:@"提示" msg:str buttonsStatement:@[@"知道了"] chooseBlock:nil]; -} - -- (void)scanResultWithArray:(NSArray*)array -{ - if (array.count < 1) - { - [self popAlertMsgWithScanResult:nil]; - - return; - } - - //经测试,可以同时识别2个二维码,不能同时识别二维码和条形码 - for (LBXScanResult *result in array) { - - NSLog(@"scanResult:%@",result.strScanned); - } - - LBXScanResult *scanResult = array[0]; - - NSString*strResult = scanResult.strScanned; - - self.scanImage = scanResult.imgScanned; - - if (!strResult) { - - [self popAlertMsgWithScanResult:nil]; - - return; - } - - //震动提醒 - // [LBXScanWrapper systemVibrate]; - //声音提醒 - //[LBXScanWrapper systemSound]; - - [self showNextVCWithScanResult:scanResult]; - -} - -- (void)popAlertMsgWithScanResult:(NSString*)strResult -{ - if (!strResult) { - - strResult = @"识别失败"; - } - - __weak __typeof(self) weakSelf = self; - [LBXAlertAction showAlertWithTitle:@"扫码内容" msg:strResult buttonsStatement:@[@"知道了"] chooseBlock:^(NSInteger buttonIdx) { - - [weakSelf reStartDevice]; - }]; -} - -- (void)showNextVCWithScanResult:(LBXScanResult*)strResult -{ - ScanResultViewController *vc = [ScanResultViewController new]; - vc.imgScan = strResult.imgScanned; - - vc.strScan = strResult.strScanned; - - vc.strCodeType = strResult.strBarCodeType; - - [self.navigationController pushViewController:vc animated:YES]; -} #pragma mark -底部功能项 diff --git a/LBXScanDemo/DIYScanViewController/ZBar/LBXScanZBarViewController.m b/LBXScanDemo/DIYScanViewController/ZBar/LBXScanZBarViewController.m index 12d4ddd..25a116a 100644 --- a/LBXScanDemo/DIYScanViewController/ZBar/LBXScanZBarViewController.m +++ b/LBXScanDemo/DIYScanViewController/ZBar/LBXScanZBarViewController.m @@ -71,6 +71,10 @@ - (void)drawScanView - (void)reStartDevice { + [self resetCodeFlagView]; + [self.qRScanView stopScanAnimation]; + [self.qRScanView startScanAnimation]; + [_zbarObj start]; } @@ -80,21 +84,15 @@ - (void)startScan UIView *videoView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, CGRectGetWidth(self.view.frame), CGRectGetHeight(self.view.frame))]; videoView.backgroundColor = [UIColor clearColor]; [self.view insertSubview:videoView atIndex:0]; + + self.cameraPreView = videoView; __weak __typeof(self) weakSelf = self; if (!_zbarObj) { self.zbarObj = [[LBXZBarWrapper alloc]initWithPreView:videoView barCodeType:self.zbarType block:^(NSArray *result) { - //测试,只使用扫码结果第一项 - LBXZbarResult *firstObj = result[0]; - - LBXScanResult *scanResult = [[LBXScanResult alloc]init]; - scanResult.strScanned = firstObj.strScanned; - scanResult.imgScanned = firstObj.imgScanned; - scanResult.strBarCodeType = [LBXZBarWrapper convertFormat2String:firstObj.format]; - - [weakSelf scanResultWithArray:@[scanResult]]; + [weakSelf handZBarResult:result]; }]; } [_zbarObj start]; @@ -107,6 +105,45 @@ - (void)startScan self.view.backgroundColor = [UIColor clearColor]; } +- (void)handZBarResult:(NSArray *)result +{ + //测试,只使用扫码结果第一项 + LBXZbarResult *firstObj = result[0]; + + LBXScanResult *scanResult = [[LBXScanResult alloc]init]; + scanResult.strScanned = firstObj.strScanned; + scanResult.imgScanned = firstObj.imgScanned; + scanResult.strBarCodeType = [LBXZBarWrapper convertFormat2String:firstObj.format]; + + CGRect bounds = firstObj.bounds; + CGSize imgSize = firstObj.imgScanned.size; + CGSize preViewSize = self.cameraPreView.frame.size; +// CGFloat left = bounds.origin.x / imgSize.width * preViewSize.width; +// CGFloat top = bounds.origin.y / imgSize.height * preViewSize.height; + +// bounds.origin = CGPointMake(left, top); + + CGFloat minx = bounds.origin.x; + CGFloat miny= bounds.origin.y; + CGFloat maxx = bounds.origin.x + bounds.size.width; + CGFloat maxy= bounds.origin.y + bounds.size.height; + + minx = minx / imgSize.width * preViewSize.width; + maxx = maxx / imgSize.width * preViewSize.width; + miny = miny / imgSize.height * preViewSize.height; + maxy = maxy / imgSize.height * preViewSize.height; + + + CGFloat w = maxx - minx; + CGFloat h = maxy - miny; + + miny = minx; + minx = preViewSize.width - minx; + + scanResult.bounds = CGRectMake(minx, miny, w, h); + + [self scanResultWithArray:@[scanResult]]; +} - (void)viewWillDisappear:(BOOL)animated { @@ -117,7 +154,6 @@ - (void)viewWillDisappear:(BOOL)animated [self stopScan]; [self.qRScanView stopScanAnimation]; - } - (void)stopScan @@ -128,9 +164,7 @@ - (void)stopScan //开关闪光灯 - (void)openOrCloseFlash { - [_zbarObj openOrCloseFlash]; - self.isOpenFlash =!self.isOpenFlash; } diff --git a/LBXScanDemo/DIYScanViewController/ZBar/QQScanZBarViewController.m b/LBXScanDemo/DIYScanViewController/ZBar/QQScanZBarViewController.m index 7d4813d..8dfb524 100644 --- a/LBXScanDemo/DIYScanViewController/ZBar/QQScanZBarViewController.m +++ b/LBXScanDemo/DIYScanViewController/ZBar/QQScanZBarViewController.m @@ -8,7 +8,6 @@ #import "QQScanZBarViewController.h" #import "CreateBarCodeViewController.h" -#import "ScanResultViewController.h" #import "LBXPermission.h" #import "LBXPermissionSetting.h" @@ -115,73 +114,7 @@ - (void)drawBottomItems } -- (void)showError:(NSString*)str -{ - [LBXAlertAction showAlertWithTitle:@"提示" msg:str buttonsStatement:@[@"知道了"] chooseBlock:nil]; -} -- (void)scanResultWithArray:(NSArray*)array -{ - if (array.count < 1) - { - [self popAlertMsgWithScanResult:nil]; - - return; - } - - //经测试,可以同时识别2个二维码,不能同时识别二维码和条形码 - for (LBXScanResult *result in array) { - - NSLog(@"scanResult:%@",result.strScanned); - } - - LBXScanResult *scanResult = array[0]; - - NSString*strResult = scanResult.strScanned; - - self.scanImage = scanResult.imgScanned; - - if (!strResult) { - - [self popAlertMsgWithScanResult:nil]; - - return; - } - - //震动提醒 - // [LBXScanWrapper systemVibrate]; - //声音提醒 - //[LBXScanWrapper systemSound]; - - [self showNextVCWithScanResult:scanResult]; - -} - -- (void)popAlertMsgWithScanResult:(NSString*)strResult -{ - if (!strResult) { - - strResult = @"识别失败"; - } - - __weak __typeof(self) weakSelf = self; - [LBXAlertAction showAlertWithTitle:@"扫码内容" msg:strResult buttonsStatement:@[@"知道了"] chooseBlock:^(NSInteger buttonIdx) { - - [weakSelf reStartDevice]; - }]; -} - -- (void)showNextVCWithScanResult:(LBXScanResult*)strResult -{ - ScanResultViewController *vc = [ScanResultViewController new]; - vc.imgScan = strResult.imgScanned; - - vc.strScan = strResult.strScanned; - - vc.strCodeType = strResult.strBarCodeType; - - [self.navigationController pushViewController:vc animated:YES]; -} #pragma mark -底部功能项 diff --git a/LBXScanDemo/DIYScanViewController/ZXing/LBXScanZXingViewController.m b/LBXScanDemo/DIYScanViewController/ZXing/LBXScanZXingViewController.m index 3bd6f81..71f7ad2 100644 --- a/LBXScanDemo/DIYScanViewController/ZXing/LBXScanZXingViewController.m +++ b/LBXScanDemo/DIYScanViewController/ZXing/LBXScanZXingViewController.m @@ -7,6 +7,8 @@ // #import "LBXScanZXingViewController.h" +#import +//#import @interface LBXScanZXingViewController () @end @@ -25,6 +27,8 @@ - (void)viewDidLoad { self.view.backgroundColor = [UIColor blackColor]; self.title = @"ZXing"; + + self.isNeedScanImage = YES; } @@ -73,7 +77,10 @@ - (void)drawScanView - (void)reStartDevice { - + [self resetCodeFlagView]; + + [self.qRScanView stopScanAnimation]; + [self.qRScanView startScanAnimation]; [_zxingObj start]; } @@ -81,28 +88,26 @@ - (void)reStartDevice //启动设备 - (void)startScan { - UIView *videoView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, CGRectGetWidth(self.view.frame), CGRectGetHeight(self.view.frame))]; - videoView.backgroundColor = [UIColor clearColor]; - [self.view insertSubview:videoView atIndex:0]; + if (!self.cameraPreView) { + UIView *videoView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, CGRectGetWidth(self.view.frame), CGRectGetHeight(self.view.frame))]; + videoView.backgroundColor = [UIColor clearColor]; + [self.view insertSubview:videoView atIndex:0]; + + self.cameraPreView = videoView; + } if (!_zxingObj) { __weak __typeof(self) weakSelf = self; - self.zxingObj = [[ZXingWrapper alloc]initWithPreView:videoView block:^(ZXBarcodeFormat barcodeFormat, NSString *str, UIImage *scanImg) { - - LBXScanResult *result = [[LBXScanResult alloc]init]; - result.strScanned = str; - result.imgScanned = scanImg; - result.strBarCodeType = [weakSelf convertZXBarcodeFormat:barcodeFormat]; - - [weakSelf scanResultWithArray:@[result]]; - + + self.zxingObj = [[ZXingWrapper alloc]initWithPreView:self.cameraPreView success:^(ZXBarcodeFormat barcodeFormat, NSString *str, UIImage *scanImg, NSArray *resultPoints) { + [weakSelf handZXingResult:barcodeFormat barStr:str scanImg:scanImg resultPoints:resultPoints]; }]; if (self.isOpenInterestRect) { //设置只识别框内区域 - CGRect cropRect = [LBXScanView getZXingScanRectWithPreView:videoView style:self.style]; + CGRect cropRect = [LBXScanView getZXingScanRectWithPreView:self.cameraPreView style:self.style]; [_zxingObj setScanRect:cropRect]; } @@ -117,6 +122,61 @@ - (void)startScan self.view.backgroundColor = [UIColor clearColor]; } +- (void)handZXingResult:(ZXBarcodeFormat)barcodeFormat barStr:(NSString*)str scanImg:(UIImage*)scanImg resultPoints:(NSArray*)resultPoints +{ + LBXScanResult *result = [[LBXScanResult alloc]init]; + result.strScanned = str; + result.imgScanned = scanImg; + result.strBarCodeType = [self convertZXBarcodeFormat:barcodeFormat]; + + NSLog(@"ZXing pts:%@",resultPoints); + + if (self.cameraPreView && resultPoints && scanImg) { + + CGFloat minx = 100000; + CGFloat miny= 100000; + CGFloat maxx = 0; + CGFloat maxy= 0; + + for (ZXResultPoint *pt in resultPoints) { + + if (pt.x < minx) { + minx = pt.x; + } + if (pt.x > maxx) { + maxx = pt.x; + } + + if (pt.y < miny) { + miny = pt.y; + } + if (pt.y > maxy) { + maxy = pt.y; + } + } + +// CGFloat width = maxx - minx; +// CGFloat height = maxy - miny; + + CGSize imgSize = scanImg.size; + CGSize preViewSize = self.cameraPreView.frame.size; + minx = minx / imgSize.width * preViewSize.width; + maxx = maxx / imgSize.width * preViewSize.width; + miny = miny / imgSize.height * preViewSize.height; + maxy = maxy / imgSize.height * preViewSize.height; + + result.bounds = CGRectMake(minx, miny, maxx - minx,maxy - miny); + + NSLog(@"bounds:%@",NSStringFromCGRect(result.bounds)); + + [self scanResultWithArray:@[result]]; + } + else + { + [self scanResultWithArray:@[result]]; + } +} + - (void)viewWillDisappear:(BOOL)animated { diff --git a/LBXScanDemo/DIYScanViewController/ZXing/QQScanZXingViewController.m b/LBXScanDemo/DIYScanViewController/ZXing/QQScanZXingViewController.m index aa478ff..fbf1cce 100644 --- a/LBXScanDemo/DIYScanViewController/ZXing/QQScanZXingViewController.m +++ b/LBXScanDemo/DIYScanViewController/ZXing/QQScanZXingViewController.m @@ -8,9 +8,8 @@ #import "QQScanZXingViewController.h" #import "CreateBarCodeViewController.h" -#import "ScanResultViewController.h" #import "LBXPermission.h" -#import "LBXPermissionSetting.h" + @interface QQScanZXingViewController () @end @@ -29,18 +28,13 @@ - (void)viewDidLoad } self.view.backgroundColor = [UIColor blackColor]; - //设置扫码后需要扫码图像 - self.isNeedScanImage = YES; - } - (void)viewDidAppear:(BOOL)animated { [super viewDidAppear:animated]; - - - + [self drawBottomItems]; [self drawTitle]; [self.view bringSubviewToFront:_topTitle]; @@ -115,73 +109,7 @@ - (void)drawBottomItems } -- (void)showError:(NSString*)str -{ - [LBXAlertAction showAlertWithTitle:@"提示" msg:str buttonsStatement:@[@"知道了"] chooseBlock:nil]; -} - -- (void)scanResultWithArray:(NSArray*)array -{ - if (array.count < 1) - { - [self popAlertMsgWithScanResult:nil]; - - return; - } - - //经测试,可以同时识别2个二维码,不能同时识别二维码和条形码 - for (LBXScanResult *result in array) { - - NSLog(@"scanResult:%@",result.strScanned); - } - - LBXScanResult *scanResult = array[0]; - - NSString*strResult = scanResult.strScanned; - - self.scanImage = scanResult.imgScanned; - - if (!strResult) { - - [self popAlertMsgWithScanResult:nil]; - - return; - } - - //震动提醒 - // [LBXScanWrapper systemVibrate]; - //声音提醒 - //[LBXScanWrapper systemSound]; - - [self showNextVCWithScanResult:scanResult]; - -} - -- (void)popAlertMsgWithScanResult:(NSString*)strResult -{ - if (!strResult) { - - strResult = @"识别失败"; - } - - __weak __typeof(self) weakSelf = self; - [LBXAlertAction showAlertWithTitle:@"扫码内容" msg:strResult buttonsStatement:@[@"知道了"] chooseBlock:^(NSInteger buttonIdx) { - - [weakSelf reStartDevice]; - }]; -} -- (void)showNextVCWithScanResult:(LBXScanResult*)strResult -{ - ScanResultViewController *vc = [ScanResultViewController new]; - vc.imgScan = strResult.imgScanned; - - vc.strScan = strResult.strScanned; - - vc.strCodeType = strResult.strBarCodeType; - - [self.navigationController pushViewController:vc animated:YES]; -} #pragma mark -底部功能项 @@ -224,5 +152,4 @@ - (void)myQRCode } - @end diff --git a/LBXScanDemo/LBXScanDemo/DemoListTableViewController.m b/LBXScanDemo/LBXScanDemo/DemoListTableViewController.m index d5e2bb8..832b89b 100644 --- a/LBXScanDemo/LBXScanDemo/DemoListTableViewController.m +++ b/LBXScanDemo/LBXScanDemo/DemoListTableViewController.m @@ -212,8 +212,9 @@ - (void)startWithIndexPath:(NSIndexPath *)indexPath - (void)openScanVCWithStyle:(LBXScanViewStyle*)style { - LBXScanBaseViewController *vc = [self createScanVC]; vc.style = style; + LBXScanBaseViewController *vc = [self createScanVC]; + vc.style = style; [self.navigationController pushViewController:vc animated:YES]; } @@ -347,7 +348,7 @@ - (LBXScanBaseViewController*)createScanVC vc.cameraInvokeMsg = @"相机启动中"; //开启只识别框内,ZBar暂不支持 - vc.isOpenInterestRect = YES; + vc.isOpenInterestRect = NO; return vc;