Last active
December 6, 2024 02:03
-
-
Save jtremback/e5dde527da9bcd69536e to your computer and use it in GitHub Desktop.
Add GPS metadata, stop stripping camera metadata
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#import "RCTCameraManager.h" | |
#import "RCTCamera.h" | |
#import "RCTBridge.h" | |
#import "RCTEventDispatcher.h" | |
#import "RCTUtils.h" | |
#import "RCTLog.h" | |
#import "UIView+React.h" | |
#import <AssetsLibrary/ALAssetsLibrary.h> | |
#import <AVFoundation/AVFoundation.h> | |
#import <ImageIO/ImageIO.h> | |
@implementation RCTCameraManager | |
RCT_EXPORT_MODULE(); | |
- (UIView *)view | |
{ | |
return [[RCTCamera alloc] initWithManager:self]; | |
} | |
RCT_EXPORT_VIEW_PROPERTY(aspect, NSInteger); | |
RCT_EXPORT_VIEW_PROPERTY(type, NSInteger); | |
RCT_EXPORT_VIEW_PROPERTY(orientation, NSInteger); | |
RCT_EXPORT_VIEW_PROPERTY(flashMode, NSInteger); | |
- (NSDictionary *)constantsToExport | |
{ | |
return @{ | |
@"Aspect": @{ | |
@"stretch": @(RCTCameraAspectStretch), | |
@"fit": @(RCTCameraAspectFit), | |
@"fill": @(RCTCameraAspectFill) | |
}, | |
@"Type": @{ | |
@"front": @(RCTCameraTypeFront), | |
@"back": @(RCTCameraTypeBack) | |
}, | |
@"CaptureMode": @{ | |
@"still": @(RCTCameraCaptureModeStill), | |
@"video": @(RCTCameraCaptureModeVideo) | |
}, | |
@"CaptureTarget": @{ | |
@"memory": @(RCTCameraCaptureTargetMemory), | |
@"disk": @(RCTCameraCaptureTargetDisk), | |
@"cameraRoll": @(RCTCameraCaptureTargetCameraRoll) | |
}, | |
@"Orientation": @{ | |
@"auto": @(RCTCameraOrientationAuto), | |
@"landscapeLeft": @(RCTCameraOrientationLandscapeLeft), | |
@"landscapeRight": @(RCTCameraOrientationLandscapeRight), | |
@"portrait": @(RCTCameraOrientationPortrait), | |
@"portraitUpsideDown": @(RCTCameraOrientationPortraitUpsideDown) | |
}, | |
@"FlashMode": @{ | |
@"off": @(RCTCameraFlashModeOff), | |
@"on": @(RCTCameraFlashModeOn), | |
@"auto": @(RCTCameraFlashModeAuto) | |
} | |
}; | |
} | |
- (id)init { | |
if ((self = [super init])) { | |
self.session = [AVCaptureSession new]; | |
self.session.sessionPreset = AVCaptureSessionPresetHigh; | |
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session]; | |
self.previewLayer.needsDisplayOnBoundsChange = YES; | |
self.sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL); | |
dispatch_async(self.sessionQueue, ^{ | |
NSError *error = nil; | |
if (self.presetCamera == AVCaptureDevicePositionUnspecified) { | |
self.presetCamera = AVCaptureDevicePositionBack; | |
} | |
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera]; | |
if (captureDevice != nil) { | |
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error]; | |
if (error) | |
{ | |
NSLog(@"%@", error); | |
} | |
if ([self.session canAddInput:captureDeviceInput]) | |
{ | |
[self.session addInput:captureDeviceInput]; | |
self.captureDeviceInput = captureDeviceInput; | |
} | |
} | |
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; | |
if ([self.session canAddOutput:stillImageOutput]) | |
{ | |
stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG}; | |
[self.session addOutput:stillImageOutput]; | |
self.stillImageOutput = stillImageOutput; | |
} | |
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init]; | |
if ([self.session canAddOutput:metadataOutput]) { | |
[metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue]; | |
[self.session addOutput:metadataOutput]; | |
[metadataOutput setMetadataObjectTypes:metadataOutput.availableMetadataObjectTypes]; | |
self.metadataOutput = metadataOutput; | |
} | |
__weak RCTCameraManager *weakSelf = self; | |
[self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) { | |
RCTCameraManager *strongSelf = weakSelf; | |
dispatch_async(strongSelf.sessionQueue, ^{ | |
// Manually restarting the session since it must have been stopped due to an error. | |
[strongSelf.session startRunning]; | |
}); | |
}]]; | |
[self.session startRunning]; | |
}); | |
} | |
return self; | |
} | |
RCT_EXPORT_METHOD(checkDeviceAuthorizationStatus:(RCTResponseSenderBlock) callback) | |
{ | |
NSString *mediaType = AVMediaTypeVideo; | |
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) { | |
callback(@[[NSNull null], @(granted)]); | |
}]; | |
} | |
RCT_EXPORT_METHOD(changeFlashMode:(NSInteger)flashMode) { | |
AVCaptureDevice *currentCaptureDevice = [self.captureDeviceInput device]; | |
[self setFlashMode:flashMode forDevice:currentCaptureDevice]; | |
} | |
RCT_EXPORT_METHOD(changeCamera:(NSInteger)camera) { | |
AVCaptureDevice *currentCaptureDevice = [self.captureDeviceInput device]; | |
AVCaptureDevicePosition position = (AVCaptureDevicePosition)camera; | |
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position]; | |
if (captureDevice == nil) { | |
return; | |
} | |
NSError *error = nil; | |
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error]; | |
if (error) | |
{ | |
NSLog(@"%@", error); | |
return; | |
} | |
[self.session beginConfiguration]; | |
[self.session removeInput:self.captureDeviceInput]; | |
if ([self.session canAddInput:captureDeviceInput]) | |
{ | |
[NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice]; | |
[NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice]; | |
[self.session addInput:captureDeviceInput]; | |
self.captureDeviceInput = captureDeviceInput; | |
} | |
else | |
{ | |
[self.session addInput:self.captureDeviceInput]; | |
} | |
[self.session commitConfiguration]; | |
} | |
RCT_EXPORT_METHOD(changeAspect:(NSString *)aspect) { | |
self.previewLayer.videoGravity = aspect; | |
} | |
RCT_EXPORT_METHOD(changeOrientation:(NSInteger)orientation) { | |
self.previewLayer.connection.videoOrientation = orientation; | |
} | |
RCT_EXPORT_METHOD(capture:(NSDictionary *)options callback:(RCTResponseSenderBlock)callback) { | |
NSInteger captureMode = [[options valueForKey:@"mode"] intValue]; | |
NSInteger captureTarget = [[options valueForKey:@"target"] intValue]; | |
NSDictionary *metadata = [options valueForKey:@"metadata"]; | |
if (captureMode == RCTCameraCaptureModeStill) { | |
[self captureStill:captureTarget metadata:metadata callback:callback]; | |
} | |
else if (captureMode == RCTCameraCaptureModeVideo) { | |
// waiting for incoming PRs | |
} | |
} | |
-(void)captureStill:(NSInteger)target metadata:(NSDictionary *)metadata callback:(RCTResponseSenderBlock)callback { | |
[[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:self.previewLayer.connection.videoOrientation]; | |
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { | |
if (metadata) { | |
// Copy metadata from imageDataSampleBuffer | |
CFDictionaryRef metaDict = CMCopyDictionaryOfAttachments(NULL, imageDataSampleBuffer, kCMAttachmentMode_ShouldPropagate); | |
// Make it mutable so we can change it | |
CFMutableDictionaryRef mutable = CFDictionaryCreateMutableCopy(NULL, 0, metaDict); | |
NSMutableDictionary *location = [self getGPSDictionaryForLocation:[metadata objectForKey:@"location"]]; | |
CFDictionarySetValue(mutable, kCGImagePropertyGPSDictionary, (__bridge const void *)(location)); | |
// set the dictionary back to the buffer | |
CMSetAttachments(imageDataSampleBuffer, mutable, kCMAttachmentMode_ShouldPropagate); | |
} | |
if (imageDataSampleBuffer) | |
{ | |
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; | |
NSString *responseString; | |
if (target == RCTCameraCaptureTargetMemory) { | |
responseString = [imageData base64EncodedStringWithOptions:0]; | |
} | |
else if (target == RCTCameraCaptureTargetDisk) { | |
responseString = [self saveImage:imageData withName:[[NSUUID UUID] UUIDString]]; | |
} | |
else if (target == RCTCameraCaptureTargetCameraRoll) { | |
[[[ALAssetsLibrary alloc] init] writeImageDataToSavedPhotosAlbum:imageData metadata:nil completionBlock:^(NSURL* url, NSError* error) { | |
if (error == nil) { | |
callback(@[[NSNull null], [url absoluteString]]); | |
} | |
else { | |
callback(@[RCTMakeError(error.description, nil, nil)]); | |
} | |
}]; | |
return; | |
} | |
callback(@[[NSNull null], responseString]); | |
} | |
else { | |
callback(@[RCTMakeError(error.description, nil, nil)]); | |
} | |
}]; | |
} | |
- (NSString *)saveImage:(NSData *)imageData withName:(NSString *)name { | |
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); | |
NSString *documentsDirectory = [paths firstObject]; | |
// NSData *data = UIImageJPEGRepresentation(image, 1.0); | |
NSFileManager *fileManager = [NSFileManager defaultManager]; | |
NSString *fullPath = [documentsDirectory stringByAppendingPathComponent:name]; | |
[fileManager createFileAtPath:fullPath contents:imageData attributes:nil]; | |
return fullPath; | |
} | |
- (NSMutableDictionary *)getGPSDictionaryForLocation:(NSDictionary *)location { | |
NSMutableDictionary *gps = [NSMutableDictionary dictionary]; | |
NSDictionary *coords = [location objectForKey:@"coords"]; | |
// GPS tag version | |
[gps setObject:@"2.2.0.0" forKey:(NSString *)kCGImagePropertyGPSVersion]; | |
// Timestamp | |
double timestamp = floor([[location objectForKey:@"timestamp"] doubleValue]); | |
NSDate *date = [NSDate dateWithTimeIntervalSince1970:timestamp]; | |
NSDateFormatter *formatter = [[NSDateFormatter alloc] init]; | |
[formatter setDateFormat:@"HH:mm:ss.SSSSSS"]; | |
[formatter setTimeZone:[NSTimeZone timeZoneWithAbbreviation:@"UTC"]]; | |
[gps setObject:[formatter stringFromDate:date] forKey:(NSString *)kCGImagePropertyGPSTimeStamp]; | |
[formatter setDateFormat:@"yyyy:MM:dd"]; | |
[gps setObject:[formatter stringFromDate:date] forKey:(NSString *)kCGImagePropertyGPSDateStamp]; | |
// Latitude | |
double latitude = [[coords objectForKey:@"latitude"] doubleValue]; | |
if (latitude < 0) { | |
latitude = -latitude; | |
[gps setObject:@"S" forKey:(NSString *)kCGImagePropertyGPSLatitudeRef]; | |
} else { | |
[gps setObject:@"N" forKey:(NSString *)kCGImagePropertyGPSLatitudeRef]; | |
} | |
[gps setObject:[NSNumber numberWithFloat:latitude] forKey:(NSString *)kCGImagePropertyGPSLatitude]; | |
// Longitude | |
double longitude = [[coords objectForKey:@"longitude"] doubleValue]; | |
if (longitude < 0) { | |
longitude = -longitude; | |
[gps setObject:@"W" forKey:(NSString *)kCGImagePropertyGPSLongitudeRef]; | |
} else { | |
[gps setObject:@"E" forKey:(NSString *)kCGImagePropertyGPSLongitudeRef]; | |
} | |
[gps setObject:[NSNumber numberWithFloat:longitude] forKey:(NSString *)kCGImagePropertyGPSLongitude]; | |
// Altitude | |
double altitude = [[coords objectForKey:@"altitude"] doubleValue]; | |
if (!isnan(altitude)){ | |
if (altitude < 0) { | |
altitude = -altitude; | |
[gps setObject:@"1" forKey:(NSString *)kCGImagePropertyGPSAltitudeRef]; | |
} else { | |
[gps setObject:@"0" forKey:(NSString *)kCGImagePropertyGPSAltitudeRef]; | |
} | |
[gps setObject:[NSNumber numberWithFloat:altitude] forKey:(NSString *)kCGImagePropertyGPSAltitude]; | |
} | |
// Speed, must be converted from m/s to km/h | |
double speed = [[coords objectForKey:@"speed"] doubleValue]; | |
if (speed >= 0){ | |
[gps setObject:@"K" forKey:(NSString *)kCGImagePropertyGPSSpeedRef]; | |
[gps setObject:[NSNumber numberWithFloat:speed*3.6] forKey:(NSString *)kCGImagePropertyGPSSpeed]; | |
} | |
// Heading | |
double heading = [[coords objectForKey:@"heading"] doubleValue]; | |
if (heading >= 0){ | |
[gps setObject:@"T" forKey:(NSString *)kCGImagePropertyGPSTrackRef]; | |
[gps setObject:[NSNumber numberWithFloat:heading] forKey:(NSString *)kCGImagePropertyGPSTrack]; | |
} | |
return gps; | |
} | |
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection { | |
NSArray *barcodeTypes = @[ | |
AVMetadataObjectTypeUPCECode, | |
AVMetadataObjectTypeCode39Code, | |
AVMetadataObjectTypeCode39Mod43Code, | |
AVMetadataObjectTypeEAN13Code, | |
AVMetadataObjectTypeEAN8Code, | |
AVMetadataObjectTypeCode93Code, | |
AVMetadataObjectTypeCode128Code, | |
AVMetadataObjectTypePDF417Code, | |
AVMetadataObjectTypeQRCode, | |
AVMetadataObjectTypeAztecCode | |
]; | |
for (AVMetadataMachineReadableCodeObject *metadata in metadataObjects) { | |
for (id barcodeType in barcodeTypes) { | |
if (metadata.type == barcodeType) { | |
[self.bridge.eventDispatcher sendDeviceEventWithName:@"CameraBarCodeRead" | |
body:@{ | |
@"data": metadata.stringValue, | |
@"bounds": @{ | |
@"origin": @{ | |
@"x": [NSString stringWithFormat:@"%f", metadata.bounds.origin.x], | |
@"y": [NSString stringWithFormat:@"%f", metadata.bounds.origin.y] | |
}, | |
@"size": @{ | |
@"height": [NSString stringWithFormat:@"%f", metadata.bounds.size.height], | |
@"width": [NSString stringWithFormat:@"%f", metadata.bounds.size.width], | |
} | |
} | |
}]; | |
} | |
} | |
} | |
} | |
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position | |
{ | |
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType]; | |
AVCaptureDevice *captureDevice = [devices firstObject]; | |
for (AVCaptureDevice *device in devices) | |
{ | |
if ([device position] == position) | |
{ | |
captureDevice = device; | |
break; | |
} | |
} | |
return captureDevice; | |
} | |
- (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device | |
{ | |
if (device.hasFlash && [device isFlashModeSupported:flashMode]) | |
{ | |
NSError *error = nil; | |
if ([device lockForConfiguration:&error]) | |
{ | |
[device setFlashMode:flashMode]; | |
[device unlockForConfiguration]; | |
} | |
else | |
{ | |
NSLog(@"%@", error); | |
} | |
} | |
} | |
- (void)subjectAreaDidChange:(NSNotification *)notification | |
{ | |
CGPoint devicePoint = CGPointMake(.5, .5); | |
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO]; | |
} | |
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange | |
{ | |
dispatch_async([self sessionQueue], ^{ | |
AVCaptureDevice *device = [[self captureDeviceInput] device]; | |
NSError *error = nil; | |
if ([device lockForConfiguration:&error]) | |
{ | |
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode]) | |
{ | |
[device setFocusMode:focusMode]; | |
[device setFocusPointOfInterest:point]; | |
} | |
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode]) | |
{ | |
[device setExposureMode:exposureMode]; | |
[device setExposurePointOfInterest:point]; | |
} | |
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange]; | |
[device unlockForConfiguration]; | |
} | |
else | |
{ | |
NSLog(@"%@", error); | |
} | |
}); | |
} | |
@end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment