From edde122964d8aa4d2de57762baac26a66b0f7348 Mon Sep 17 00:00:00 2001 From: Francois Nadeau Date: Wed, 31 Jul 2019 15:37:18 -0400 Subject: [PATCH] Update ALPRCameraManager.mm AVCaptureStillImageOutput is deprecated, so I switched to use the latest and greatest from Apple. --- ios/ALPRCameraManager.mm | 78 ++++++++++++++++++++-------------------- 1 file changed, 38 insertions(+), 40 deletions(-) diff --git a/ios/ALPRCameraManager.mm b/ios/ALPRCameraManager.mm index 9569d5a..b030825 100644 --- a/ios/ALPRCameraManager.mm +++ b/ios/ALPRCameraManager.mm @@ -38,12 +38,16 @@ void rot90(cv::Mat &matImage, int rotflag) { #pragma mark Implementation - -@interface ALPRCameraManager () { +@interface ALPRCameraManager () { dispatch_queue_t videoDataOutputQueue; UIDeviceOrientation deviceOrientation; } @property (atomic) BOOL isProcessingFrame; -@property(nonatomic, strong) AVCaptureStillImageOutput *stillImageOutput; +@property(nonatomic, strong) AVCapturePhotoOutput *avCaptureOutput; +@property(nonatomic, strong) NSHashTable *takePictureParams; +@property(nonatomic, strong) NSDictionary *takePictureOptions; +@property(nonatomic, strong) RCTPromiseResolveBlock takePictureResolve; +@property(nonatomic, strong) RCTPromiseRejectBlock takePictureReject; @end @@ -195,27 +199,33 @@ - (id)init { RCT_EXPORT_METHOD(takePicture:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject) { - AVCaptureConnection *connection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo]; - [connection setVideoOrientation:self.previewLayer.connection.videoOrientation]; - [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) { - if (imageSampleBuffer && !error) { - NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer]; - NSData* compressedImage = [ALPRCameraManager imageWithImage:imageData options:options]; - NSString *path = [ALPRCameraManager generatePathInDirectory:[[ALPRCameraManager cacheDirectoryPath] stringByAppendingPathComponent:@"Camera"] withExtension:@".jpg"]; - NSString *uri = [ALPRCameraManager writeImage:compressedImage toPath:path]; - resolve(uri); - } else { - reject(@"E_IMAGE_CAPTURE_FAILED", @"Image could not be captured", error); - } - }]; + self.takePictureOptions = options; + self.takePictureResolve = resolve; + self.takePictureReject = reject; + + AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; + [self.avCaptureOutput capturePhotoWithSettings:settings delegate:self]; +} + +- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(nullable NSError *)error +{ + if (!error) { + NSData *imageData = [photo fileDataRepresentation]; + NSData* compressedImage = [ALPRCameraManager imageWithImage:imageData options:self.takePictureOptions]; + NSString *path = [ALPRCameraManager generatePathInDirectory:[[ALPRCameraManager cacheDirectoryPath] stringByAppendingPathComponent:@"Camera"] withExtension:@".jpg"]; + NSString *uri = [ALPRCameraManager writeImage:compressedImage toPath:path]; + self.takePictureResolve(uri); + } else { + self.takePictureReject(@"E_IMAGE_CAPTURE_FAILED", @"Image could not be captured", error); + } } + (NSData *)imageWithImage:(NSData *)imageData options:(NSDictionary *)options { UIImage *image = [UIImage imageWithData:imageData]; // Calculate the image size. - int width = 0, height = 0; - float quality; + int width = image.size.width, height = image.size.height; + float quality, scale; if([options valueForKey:@"width"] != nil) { width = [options[@"width"] intValue]; @@ -224,19 +234,14 @@ + (NSData *)imageWithImage:(NSData *)imageData options:(NSDictionary *)options { height = [options[@"height"] intValue]; } - if(image.size.width > image.size.height) { - if(width == 0) { - width = image.size.width; // Default max width - } - height = width * image.size.height / image.size.width; - + float widthScale = image.size.width / width; + float heightScale = image.size.height / height; + + if(widthScale > heightScale) { + scale = heightScale; } else { - if(height == 0) { - height = image.size.height; // Default max height - } - width = height * image.size.width / image.size.height; + scale = widthScale; } - CGSize size = CGSizeMake(width,height); if([options valueForKey:@"quality"] != nil) { quality = [options[@"quality"] floatValue]; @@ -244,11 +249,7 @@ + (NSData *)imageWithImage:(NSData *)imageData options:(NSDictionary *)options { quality = 1.0; // Default quality } - UIGraphicsBeginImageContext(size); - [image drawInRect:CGRectMake(0, 0, size.width, size.height)]; - UIImage *destImage = UIGraphicsGetImageFromCurrentImageContext(); - UIGraphicsEndImageContext(); - + UIImage *destImage = [UIImage imageWithCGImage:[image CGImage] scale:scale orientation:UIImageOrientationUp]; NSData *destData = UIImageJPEGRepresentation(destImage, quality); return destData; } @@ -372,14 +373,11 @@ - (void)startSession { [self.session addOutput:videoDataOutput]; } - AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; - if ([self.session canAddOutput:stillImageOutput]) { - stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG}; - [self.session addOutput:stillImageOutput]; - [stillImageOutput setHighResolutionStillImageOutputEnabled:YES]; - self.stillImageOutput = stillImageOutput; + self.avCaptureOutput = [[AVCapturePhotoOutput alloc] init]; + if([self.session canAddOutput:self.avCaptureOutput]) { + [self.session addOutput:self.avCaptureOutput]; } - + __weak ALPRCameraManager *weakSelf = self; [self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) { ALPRCameraManager *strongSelf = weakSelf;