Skip to content

Commit

Permalink
Update ALPRCameraManager.mm
Browse files Browse the repository at this point in the history
AVCaptureStillImageOutput is deprecated, so I switched to use the latest and greatest from Apple.
  • Loading branch information
Francois Nadeau authored Jul 31, 2019
1 parent a072fa2 commit edde122
Showing 1 changed file with 38 additions and 40 deletions.
78 changes: 38 additions & 40 deletions ios/ALPRCameraManager.mm
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,16 @@ void rot90(cv::Mat &matImage, int rotflag) {

#pragma mark Implementation -

@interface ALPRCameraManager () {
@interface ALPRCameraManager () <AVCapturePhotoCaptureDelegate> {
dispatch_queue_t videoDataOutputQueue;
UIDeviceOrientation deviceOrientation;
}
@property (atomic) BOOL isProcessingFrame;
@property(nonatomic, strong) AVCaptureStillImageOutput *stillImageOutput;
@property(nonatomic, strong) AVCapturePhotoOutput *avCaptureOutput;
@property(nonatomic, strong) NSHashTable *takePictureParams;
@property(nonatomic, strong) NSDictionary *takePictureOptions;
@property(nonatomic, strong) RCTPromiseResolveBlock takePictureResolve;
@property(nonatomic, strong) RCTPromiseRejectBlock takePictureReject;

@end

Expand Down Expand Up @@ -195,27 +199,33 @@ - (id)init {
RCT_EXPORT_METHOD(takePicture:(NSDictionary *)options
resolve:(RCTPromiseResolveBlock)resolve
reject:(RCTPromiseRejectBlock)reject) {
AVCaptureConnection *connection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[connection setVideoOrientation:self.previewLayer.connection.videoOrientation];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer && !error) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
NSData* compressedImage = [ALPRCameraManager imageWithImage:imageData options:options];
NSString *path = [ALPRCameraManager generatePathInDirectory:[[ALPRCameraManager cacheDirectoryPath] stringByAppendingPathComponent:@"Camera"] withExtension:@".jpg"];
NSString *uri = [ALPRCameraManager writeImage:compressedImage toPath:path];
resolve(uri);
} else {
reject(@"E_IMAGE_CAPTURE_FAILED", @"Image could not be captured", error);
}
}];
self.takePictureOptions = options;
self.takePictureResolve = resolve;
self.takePictureReject = reject;

AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
[self.avCaptureOutput capturePhotoWithSettings:settings delegate:self];
}

- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(nullable NSError *)error
{
if (!error) {
NSData *imageData = [photo fileDataRepresentation];
NSData* compressedImage = [ALPRCameraManager imageWithImage:imageData options:self.takePictureOptions];
NSString *path = [ALPRCameraManager generatePathInDirectory:[[ALPRCameraManager cacheDirectoryPath] stringByAppendingPathComponent:@"Camera"] withExtension:@".jpg"];
NSString *uri = [ALPRCameraManager writeImage:compressedImage toPath:path];
self.takePictureResolve(uri);
} else {
self.takePictureReject(@"E_IMAGE_CAPTURE_FAILED", @"Image could not be captured", error);
}
}

+ (NSData *)imageWithImage:(NSData *)imageData options:(NSDictionary *)options {
UIImage *image = [UIImage imageWithData:imageData];

// Calculate the image size.
int width = 0, height = 0;
float quality;
int width = image.size.width, height = image.size.height;
float quality, scale;

if([options valueForKey:@"width"] != nil) {
width = [options[@"width"] intValue];
Expand All @@ -224,31 +234,22 @@ + (NSData *)imageWithImage:(NSData *)imageData options:(NSDictionary *)options {
height = [options[@"height"] intValue];
}

if(image.size.width > image.size.height) {
if(width == 0) {
width = image.size.width; // Default max width
}
height = width * image.size.height / image.size.width;

float widthScale = image.size.width / width;
float heightScale = image.size.height / height;

if(widthScale > heightScale) {
scale = heightScale;
} else {
if(height == 0) {
height = image.size.height; // Default max height
}
width = height * image.size.width / image.size.height;
scale = widthScale;
}
CGSize size = CGSizeMake(width,height);

if([options valueForKey:@"quality"] != nil) {
quality = [options[@"quality"] floatValue];
} else {
quality = 1.0; // Default quality
}

UIGraphicsBeginImageContext(size);
[image drawInRect:CGRectMake(0, 0, size.width, size.height)];
UIImage *destImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();

UIImage *destImage = [UIImage imageWithCGImage:[image CGImage] scale:scale orientation:UIImageOrientationUp];
NSData *destData = UIImageJPEGRepresentation(destImage, quality);
return destData;
}
Expand Down Expand Up @@ -372,14 +373,11 @@ - (void)startSession {
[self.session addOutput:videoDataOutput];
}

AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([self.session canAddOutput:stillImageOutput]) {
stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
[self.session addOutput:stillImageOutput];
[stillImageOutput setHighResolutionStillImageOutputEnabled:YES];
self.stillImageOutput = stillImageOutput;
self.avCaptureOutput = [[AVCapturePhotoOutput alloc] init];
if([self.session canAddOutput:self.avCaptureOutput]) {
[self.session addOutput:self.avCaptureOutput];
}

__weak ALPRCameraManager *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
ALPRCameraManager *strongSelf = weakSelf;
Expand Down

0 comments on commit edde122

Please sign in to comment.