mirror of
https://github.com/BlueWallet/BlueWallet.git
synced 2024-11-20 10:12:01 +01:00
1720 lines
73 KiB
Diff
1720 lines
73 KiB
Diff
8a9
|
|
> #if !TARGET_OS_MACCATALYST
|
|
9a11
|
|
> #endif
|
|
33,34c35,36
|
|
< self.session = [AVCaptureSession new];
|
|
< #if !(TARGET_IPHONE_SIMULATOR)
|
|
---
|
|
> self.session = [AVCaptureSession new];
|
|
> #if !(TARGET_IPHONE_SIMULATOR)
|
|
37,42c39,44
|
|
< #endif
|
|
<
|
|
< if(!self.camera){
|
|
< self.camera = [[RCTCamera alloc] initWithManager:self bridge:self.bridge];
|
|
< }
|
|
< return self.camera;
|
|
---
|
|
> #endif
|
|
>
|
|
> if(!self.camera){
|
|
> self.camera = [[RCTCamera alloc] initWithManager:self bridge:self.bridge];
|
|
> }
|
|
> return self.camera;
|
|
47c49
|
|
< return NO;
|
|
---
|
|
> return NO;
|
|
52c54
|
|
<
|
|
---
|
|
>
|
|
55,66c57,68
|
|
< @"upce": AVMetadataObjectTypeUPCECode,
|
|
< @"code39": AVMetadataObjectTypeCode39Code,
|
|
< @"code39mod43": AVMetadataObjectTypeCode39Mod43Code,
|
|
< @"ean13": AVMetadataObjectTypeEAN13Code,
|
|
< @"ean8": AVMetadataObjectTypeEAN8Code,
|
|
< @"code93": AVMetadataObjectTypeCode93Code,
|
|
< @"code128": AVMetadataObjectTypeCode128Code,
|
|
< @"pdf417": AVMetadataObjectTypePDF417Code,
|
|
< @"qr": AVMetadataObjectTypeQRCode,
|
|
< @"aztec": AVMetadataObjectTypeAztecCode
|
|
< }];
|
|
<
|
|
---
|
|
> @"upce": AVMetadataObjectTypeUPCECode,
|
|
> @"code39": AVMetadataObjectTypeCode39Code,
|
|
> @"code39mod43": AVMetadataObjectTypeCode39Mod43Code,
|
|
> @"ean13": AVMetadataObjectTypeEAN13Code,
|
|
> @"ean8": AVMetadataObjectTypeEAN8Code,
|
|
> @"code93": AVMetadataObjectTypeCode93Code,
|
|
> @"code128": AVMetadataObjectTypeCode128Code,
|
|
> @"pdf417": AVMetadataObjectTypePDF417Code,
|
|
> @"qr": AVMetadataObjectTypeQRCode,
|
|
> @"aztec": AVMetadataObjectTypeAztecCode
|
|
> }];
|
|
>
|
|
70c72
|
|
<
|
|
---
|
|
>
|
|
74c76
|
|
<
|
|
---
|
|
>
|
|
78,136c80,138
|
|
<
|
|
<
|
|
< return @{
|
|
< @"Aspect": @{
|
|
< @"stretch": @(RCTCameraAspectStretch),
|
|
< @"fit": @(RCTCameraAspectFit),
|
|
< @"fill": @(RCTCameraAspectFill)
|
|
< },
|
|
< @"BarCodeType": runtimeBarcodeTypes,
|
|
< @"Type": @{
|
|
< @"front": @(RCTCameraTypeFront),
|
|
< @"back": @(RCTCameraTypeBack)
|
|
< },
|
|
< @"CaptureMode": @{
|
|
< @"still": @(RCTCameraCaptureModeStill),
|
|
< @"video": @(RCTCameraCaptureModeVideo)
|
|
< },
|
|
< @"CaptureQuality": @{
|
|
< @"low": @(RCTCameraCaptureSessionPresetLow),
|
|
< @"AVCaptureSessionPresetLow": @(RCTCameraCaptureSessionPresetLow),
|
|
< @"medium": @(RCTCameraCaptureSessionPresetMedium),
|
|
< @"AVCaptureSessionPresetMedium": @(RCTCameraCaptureSessionPresetMedium),
|
|
< @"high": @(RCTCameraCaptureSessionPresetHigh),
|
|
< @"AVCaptureSessionPresetHigh": @(RCTCameraCaptureSessionPresetHigh),
|
|
< @"photo": @(RCTCameraCaptureSessionPresetPhoto),
|
|
< @"AVCaptureSessionPresetPhoto": @(RCTCameraCaptureSessionPresetPhoto),
|
|
< @"480p": @(RCTCameraCaptureSessionPreset480p),
|
|
< @"AVCaptureSessionPreset640x480": @(RCTCameraCaptureSessionPreset480p),
|
|
< @"720p": @(RCTCameraCaptureSessionPreset720p),
|
|
< @"AVCaptureSessionPreset1280x720": @(RCTCameraCaptureSessionPreset720p),
|
|
< @"1080p": @(RCTCameraCaptureSessionPreset1080p),
|
|
< @"AVCaptureSessionPreset1920x1080": @(RCTCameraCaptureSessionPreset1080p),
|
|
< @"4k": @(RCTCameraCaptureSessionPreset4k),
|
|
< @"AVCaptureSessionPreset3840x2160": @(RCTCameraCaptureSessionPreset4k)
|
|
< },
|
|
< @"CaptureTarget": @{
|
|
< @"memory": @(RCTCameraCaptureTargetMemory),
|
|
< @"disk": @(RCTCameraCaptureTargetDisk),
|
|
< @"temp": @(RCTCameraCaptureTargetTemp),
|
|
< @"cameraRoll": @(RCTCameraCaptureTargetCameraRoll)
|
|
< },
|
|
< @"Orientation": @{
|
|
< @"auto": @(RCTCameraOrientationAuto),
|
|
< @"landscapeLeft": @(RCTCameraOrientationLandscapeLeft),
|
|
< @"landscapeRight": @(RCTCameraOrientationLandscapeRight),
|
|
< @"portrait": @(RCTCameraOrientationPortrait),
|
|
< @"portraitUpsideDown": @(RCTCameraOrientationPortraitUpsideDown)
|
|
< },
|
|
< @"FlashMode": @{
|
|
< @"off": @(RCTCameraFlashModeOff),
|
|
< @"on": @(RCTCameraFlashModeOn),
|
|
< @"auto": @(RCTCameraFlashModeAuto)
|
|
< },
|
|
< @"TorchMode": @{
|
|
< @"off": @(RCTCameraTorchModeOff),
|
|
< @"on": @(RCTCameraTorchModeOn),
|
|
< @"auto": @(RCTCameraTorchModeAuto)
|
|
< }
|
|
< };
|
|
---
|
|
>
|
|
>
|
|
> return @{
|
|
> @"Aspect": @{
|
|
> @"stretch": @(RCTCameraAspectStretch),
|
|
> @"fit": @(RCTCameraAspectFit),
|
|
> @"fill": @(RCTCameraAspectFill)
|
|
> },
|
|
> @"BarCodeType": runtimeBarcodeTypes,
|
|
> @"Type": @{
|
|
> @"front": @(RCTCameraTypeFront),
|
|
> @"back": @(RCTCameraTypeBack)
|
|
> },
|
|
> @"CaptureMode": @{
|
|
> @"still": @(RCTCameraCaptureModeStill),
|
|
> @"video": @(RCTCameraCaptureModeVideo)
|
|
> },
|
|
> @"CaptureQuality": @{
|
|
> @"low": @(RCTCameraCaptureSessionPresetLow),
|
|
> @"AVCaptureSessionPresetLow": @(RCTCameraCaptureSessionPresetLow),
|
|
> @"medium": @(RCTCameraCaptureSessionPresetMedium),
|
|
> @"AVCaptureSessionPresetMedium": @(RCTCameraCaptureSessionPresetMedium),
|
|
> @"high": @(RCTCameraCaptureSessionPresetHigh),
|
|
> @"AVCaptureSessionPresetHigh": @(RCTCameraCaptureSessionPresetHigh),
|
|
> @"photo": @(RCTCameraCaptureSessionPresetPhoto),
|
|
> @"AVCaptureSessionPresetPhoto": @(RCTCameraCaptureSessionPresetPhoto),
|
|
> @"480p": @(RCTCameraCaptureSessionPreset480p),
|
|
> @"AVCaptureSessionPreset640x480": @(RCTCameraCaptureSessionPreset480p),
|
|
> @"720p": @(RCTCameraCaptureSessionPreset720p),
|
|
> @"AVCaptureSessionPreset1280x720": @(RCTCameraCaptureSessionPreset720p),
|
|
> @"1080p": @(RCTCameraCaptureSessionPreset1080p),
|
|
> @"AVCaptureSessionPreset1920x1080": @(RCTCameraCaptureSessionPreset1080p),
|
|
> @"4k": @(RCTCameraCaptureSessionPreset4k),
|
|
> @"AVCaptureSessionPreset3840x2160": @(RCTCameraCaptureSessionPreset4k)
|
|
> },
|
|
> @"CaptureTarget": @{
|
|
> @"memory": @(RCTCameraCaptureTargetMemory),
|
|
> @"disk": @(RCTCameraCaptureTargetDisk),
|
|
> @"temp": @(RCTCameraCaptureTargetTemp),
|
|
> @"cameraRoll": @(RCTCameraCaptureTargetCameraRoll)
|
|
> },
|
|
> @"Orientation": @{
|
|
> @"auto": @(RCTCameraOrientationAuto),
|
|
> @"landscapeLeft": @(RCTCameraOrientationLandscapeLeft),
|
|
> @"landscapeRight": @(RCTCameraOrientationLandscapeRight),
|
|
> @"portrait": @(RCTCameraOrientationPortrait),
|
|
> @"portraitUpsideDown": @(RCTCameraOrientationPortraitUpsideDown)
|
|
> },
|
|
> @"FlashMode": @{
|
|
> @"off": @(RCTCameraFlashModeOff),
|
|
> @"on": @(RCTCameraFlashModeOn),
|
|
> @"auto": @(RCTCameraFlashModeAuto)
|
|
> },
|
|
> @"TorchMode": @{
|
|
> @"off": @(RCTCameraTorchModeOff),
|
|
> @"on": @(RCTCameraTorchModeOn),
|
|
> @"auto": @(RCTCameraTorchModeAuto)
|
|
> }
|
|
> };
|
|
145,175c147,177
|
|
< NSInteger quality = [RCTConvert NSInteger:json];
|
|
< NSString *qualityString;
|
|
< switch (quality) {
|
|
< default:
|
|
< case RCTCameraCaptureSessionPresetHigh:
|
|
< qualityString = AVCaptureSessionPresetHigh;
|
|
< break;
|
|
< case RCTCameraCaptureSessionPresetMedium:
|
|
< qualityString = AVCaptureSessionPresetMedium;
|
|
< break;
|
|
< case RCTCameraCaptureSessionPresetLow:
|
|
< qualityString = AVCaptureSessionPresetLow;
|
|
< break;
|
|
< case RCTCameraCaptureSessionPresetPhoto:
|
|
< qualityString = AVCaptureSessionPresetPhoto;
|
|
< break;
|
|
< case RCTCameraCaptureSessionPreset4k:
|
|
< qualityString = AVCaptureSessionPreset3840x2160;
|
|
< break;
|
|
< case RCTCameraCaptureSessionPreset1080p:
|
|
< qualityString = AVCaptureSessionPreset1920x1080;
|
|
< break;
|
|
< case RCTCameraCaptureSessionPreset720p:
|
|
< qualityString = AVCaptureSessionPreset1280x720;
|
|
< break;
|
|
< case RCTCameraCaptureSessionPreset480p:
|
|
< qualityString = AVCaptureSessionPreset640x480;
|
|
< break;
|
|
< }
|
|
<
|
|
< [self setCaptureQuality:qualityString];
|
|
---
|
|
> NSInteger quality = [RCTConvert NSInteger:json];
|
|
> NSString *qualityString;
|
|
> switch (quality) {
|
|
> default:
|
|
> case RCTCameraCaptureSessionPresetHigh:
|
|
> qualityString = AVCaptureSessionPresetHigh;
|
|
> break;
|
|
> case RCTCameraCaptureSessionPresetMedium:
|
|
> qualityString = AVCaptureSessionPresetMedium;
|
|
> break;
|
|
> case RCTCameraCaptureSessionPresetLow:
|
|
> qualityString = AVCaptureSessionPresetLow;
|
|
> break;
|
|
> case RCTCameraCaptureSessionPresetPhoto:
|
|
> qualityString = AVCaptureSessionPresetPhoto;
|
|
> break;
|
|
> case RCTCameraCaptureSessionPreset4k:
|
|
> qualityString = AVCaptureSessionPreset3840x2160;
|
|
> break;
|
|
> case RCTCameraCaptureSessionPreset1080p:
|
|
> qualityString = AVCaptureSessionPreset1920x1080;
|
|
> break;
|
|
> case RCTCameraCaptureSessionPreset720p:
|
|
> qualityString = AVCaptureSessionPreset1280x720;
|
|
> break;
|
|
> case RCTCameraCaptureSessionPreset480p:
|
|
> qualityString = AVCaptureSessionPreset640x480;
|
|
> break;
|
|
> }
|
|
>
|
|
> [self setCaptureQuality:qualityString];
|
|
179,194c181,196
|
|
< NSInteger aspect = [RCTConvert NSInteger:json];
|
|
< NSString *aspectString;
|
|
< switch (aspect) {
|
|
< default:
|
|
< case RCTCameraAspectFill:
|
|
< aspectString = AVLayerVideoGravityResizeAspectFill;
|
|
< break;
|
|
< case RCTCameraAspectFit:
|
|
< aspectString = AVLayerVideoGravityResizeAspect;
|
|
< break;
|
|
< case RCTCameraAspectStretch:
|
|
< aspectString = AVLayerVideoGravityResize;
|
|
< break;
|
|
< }
|
|
<
|
|
< self.previewLayer.videoGravity = aspectString;
|
|
---
|
|
> NSInteger aspect = [RCTConvert NSInteger:json];
|
|
> NSString *aspectString;
|
|
> switch (aspect) {
|
|
> default:
|
|
> case RCTCameraAspectFill:
|
|
> aspectString = AVLayerVideoGravityResizeAspectFill;
|
|
> break;
|
|
> case RCTCameraAspectFit:
|
|
> aspectString = AVLayerVideoGravityResizeAspect;
|
|
> break;
|
|
> case RCTCameraAspectStretch:
|
|
> aspectString = AVLayerVideoGravityResize;
|
|
> break;
|
|
> }
|
|
>
|
|
> self.previewLayer.videoGravity = aspectString;
|
|
198,243c200,245
|
|
< NSInteger type = [RCTConvert NSInteger:json];
|
|
<
|
|
< self.presetCamera = type;
|
|
< if (self.session.isRunning) {
|
|
< dispatch_async(self.sessionQueue, ^{
|
|
< AVCaptureDevice *currentCaptureDevice = [self.videoCaptureDeviceInput device];
|
|
< AVCaptureDevicePosition position = (AVCaptureDevicePosition)type;
|
|
< AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
|
|
<
|
|
< if (captureDevice == nil) {
|
|
< return;
|
|
< }
|
|
<
|
|
< self.presetCamera = type;
|
|
<
|
|
< NSError *error = nil;
|
|
< AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
|
|
<
|
|
< if (error || captureDeviceInput == nil)
|
|
< {
|
|
< NSLog(@"%@", error);
|
|
< return;
|
|
< }
|
|
<
|
|
< [self.session beginConfiguration];
|
|
<
|
|
< [self.session removeInput:self.videoCaptureDeviceInput];
|
|
<
|
|
< if ([self.session canAddInput:captureDeviceInput])
|
|
< {
|
|
< [self.session addInput:captureDeviceInput];
|
|
<
|
|
< [NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
|
|
< [NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
|
|
< self.videoCaptureDeviceInput = captureDeviceInput;
|
|
< [self setFlashMode];
|
|
< }
|
|
< else
|
|
< {
|
|
< [self.session addInput:self.videoCaptureDeviceInput];
|
|
< }
|
|
<
|
|
< [self.session commitConfiguration];
|
|
< });
|
|
< }
|
|
< [self initializeCaptureSessionInput:AVMediaTypeVideo];
|
|
---
|
|
> NSInteger type = [RCTConvert NSInteger:json];
|
|
>
|
|
> self.presetCamera = type;
|
|
> if (self.session.isRunning) {
|
|
> dispatch_async(self.sessionQueue, ^{
|
|
> AVCaptureDevice *currentCaptureDevice = [self.videoCaptureDeviceInput device];
|
|
> AVCaptureDevicePosition position = (AVCaptureDevicePosition)type;
|
|
> AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
|
|
>
|
|
> if (captureDevice == nil) {
|
|
> return;
|
|
> }
|
|
>
|
|
> self.presetCamera = type;
|
|
>
|
|
> NSError *error = nil;
|
|
> AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
|
|
>
|
|
> if (error || captureDeviceInput == nil)
|
|
> {
|
|
> NSLog(@"%@", error);
|
|
> return;
|
|
> }
|
|
>
|
|
> [self.session beginConfiguration];
|
|
>
|
|
> [self.session removeInput:self.videoCaptureDeviceInput];
|
|
>
|
|
> if ([self.session canAddInput:captureDeviceInput])
|
|
> {
|
|
> [self.session addInput:captureDeviceInput];
|
|
>
|
|
> [NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
|
|
> [NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
|
|
> self.videoCaptureDeviceInput = captureDeviceInput;
|
|
> [self setFlashMode];
|
|
> }
|
|
> else
|
|
> {
|
|
> [self.session addInput:self.videoCaptureDeviceInput];
|
|
> }
|
|
>
|
|
> [self.session commitConfiguration];
|
|
> });
|
|
> }
|
|
> [self initializeCaptureSessionInput:AVMediaTypeVideo];
|
|
254c256
|
|
<
|
|
---
|
|
>
|
|
277,289c279,291
|
|
< dispatch_async(self.sessionQueue, ^{
|
|
< NSInteger *torchMode = [RCTConvert NSInteger:json];
|
|
< AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
|
|
< NSError *error = nil;
|
|
<
|
|
< if (![device hasTorch]) return;
|
|
< if (![device lockForConfiguration:&error]) {
|
|
< NSLog(@"%@", error);
|
|
< return;
|
|
< }
|
|
< [device setTorchMode: torchMode];
|
|
< [device unlockForConfiguration];
|
|
< });
|
|
---
|
|
> dispatch_async(self.sessionQueue, ^{
|
|
> NSInteger *torchMode = [RCTConvert NSInteger:json];
|
|
> AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
|
|
> NSError *error = nil;
|
|
>
|
|
> if (![device hasTorch]) return;
|
|
> if (![device lockForConfiguration:&error]) {
|
|
> NSLog(@"%@", error);
|
|
> return;
|
|
> }
|
|
> [device setTorchMode: torchMode];
|
|
> [device unlockForConfiguration];
|
|
> });
|
|
293,294c295,296
|
|
< BOOL enabled = [RCTConvert BOOL:json];
|
|
< [UIApplication sharedApplication].idleTimerDisabled = enabled;
|
|
---
|
|
> BOOL enabled = [RCTConvert BOOL:json];
|
|
> [UIApplication sharedApplication].idleTimerDisabled = enabled;
|
|
298c300
|
|
< self.mirrorImage = [RCTConvert BOOL:json];
|
|
---
|
|
> self.mirrorImage = [RCTConvert BOOL:json];
|
|
310c312
|
|
< self.barCodeTypes = [RCTConvert NSArray:json];
|
|
---
|
|
> self.barCodeTypes = [RCTConvert NSArray:json];
|
|
314,318c316,320
|
|
< BOOL captureAudio = [RCTConvert BOOL:json];
|
|
< if (captureAudio) {
|
|
< RCTLog(@"capturing audio");
|
|
< [self initializeCaptureSessionInput:AVMediaTypeAudio];
|
|
< }
|
|
---
|
|
> BOOL captureAudio = [RCTConvert BOOL:json];
|
|
> if (captureAudio) {
|
|
> RCTLog(@"capturing audio");
|
|
> [self initializeCaptureSessionInput:AVMediaTypeAudio];
|
|
> }
|
|
324,325c326,327
|
|
< @"focusChanged",
|
|
< @"zoomChanged",
|
|
---
|
|
> @"focusChanged",
|
|
> @"zoomChanged",
|
|
330,338c332,340
|
|
< if ((self = [super init])) {
|
|
< self.mirrorImage = false;
|
|
< self.mirrorVideo = false;
|
|
<
|
|
< self.sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
|
|
<
|
|
< self.sensorOrientationChecker = [RCTSensorOrientationChecker new];
|
|
< }
|
|
< return self;
|
|
---
|
|
> if ((self = [super init])) {
|
|
> self.mirrorImage = false;
|
|
> self.mirrorVideo = false;
|
|
>
|
|
> self.sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
|
|
>
|
|
> self.sensorOrientationChecker = [RCTSensorOrientationChecker new];
|
|
> }
|
|
> return self;
|
|
343,355c345,357
|
|
< __block NSString *mediaType = AVMediaTypeVideo;
|
|
<
|
|
< [AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
|
|
< if (!granted) {
|
|
< resolve(@(granted));
|
|
< }
|
|
< else {
|
|
< mediaType = AVMediaTypeAudio;
|
|
< [AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
|
|
< resolve(@(granted));
|
|
< }];
|
|
< }
|
|
< }];
|
|
---
|
|
> __block NSString *mediaType = AVMediaTypeVideo;
|
|
>
|
|
> [AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
|
|
> if (!granted) {
|
|
> resolve(@(granted));
|
|
> }
|
|
> else {
|
|
> mediaType = AVMediaTypeAudio;
|
|
> [AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
|
|
> resolve(@(granted));
|
|
> }];
|
|
> }
|
|
> }];
|
|
362c364
|
|
<
|
|
---
|
|
>
|
|
371c373
|
|
<
|
|
---
|
|
>
|
|
378c380
|
|
< [self setOrientation:orientation];
|
|
---
|
|
> [self setOrientation:orientation];
|
|
384,392c386,394
|
|
< NSInteger captureMode = [[options valueForKey:@"mode"] intValue];
|
|
< NSInteger captureTarget = [[options valueForKey:@"target"] intValue];
|
|
<
|
|
< if (captureMode == RCTCameraCaptureModeStill) {
|
|
< [self captureStill:captureTarget options:options resolve:resolve reject:reject];
|
|
< }
|
|
< else if (captureMode == RCTCameraCaptureModeVideo) {
|
|
< [self captureVideo:captureTarget options:options resolve:resolve reject:reject];
|
|
< }
|
|
---
|
|
> NSInteger captureMode = [[options valueForKey:@"mode"] intValue];
|
|
> NSInteger captureTarget = [[options valueForKey:@"target"] intValue];
|
|
>
|
|
> if (captureMode == RCTCameraCaptureModeStill) {
|
|
> [self captureStill:captureTarget options:options resolve:resolve reject:reject];
|
|
> }
|
|
> else if (captureMode == RCTCameraCaptureModeVideo) {
|
|
> [self captureVideo:captureTarget options:options resolve:resolve reject:reject];
|
|
> }
|
|
418,420c420,422
|
|
< if (self.movieFileOutput.recording) {
|
|
< [self.movieFileOutput stopRecording];
|
|
< }
|
|
---
|
|
> if (self.movieFileOutput.recording) {
|
|
> [self.movieFileOutput stopRecording];
|
|
> }
|
|
424,452c426,454
|
|
< NSArray *devices = [AVCaptureDevice devices];
|
|
< AVCaptureDevice *frontCamera;
|
|
< AVCaptureDevice *backCamera;
|
|
< double frontFov = 0.0;
|
|
< double backFov = 0.0;
|
|
<
|
|
< for (AVCaptureDevice *device in devices) {
|
|
<
|
|
< NSLog(@"Device name: %@", [device localizedName]);
|
|
<
|
|
< if ([device hasMediaType:AVMediaTypeVideo]) {
|
|
<
|
|
< if ([device position] == AVCaptureDevicePositionBack) {
|
|
< NSLog(@"Device position : back");
|
|
< backCamera = device;
|
|
< backFov = backCamera.activeFormat.videoFieldOfView;
|
|
< }
|
|
< else {
|
|
< NSLog(@"Device position : front");
|
|
< frontCamera = device;
|
|
< frontFov = frontCamera.activeFormat.videoFieldOfView;
|
|
< }
|
|
< }
|
|
< }
|
|
<
|
|
< resolve(@{
|
|
< [NSNumber numberWithInt:RCTCameraTypeBack]: [NSNumber numberWithDouble: backFov],
|
|
< [NSNumber numberWithInt:RCTCameraTypeFront]: [NSNumber numberWithDouble: frontFov]
|
|
< });
|
|
---
|
|
> NSArray *devices = [AVCaptureDevice devices];
|
|
> AVCaptureDevice *frontCamera;
|
|
> AVCaptureDevice *backCamera;
|
|
> double frontFov = 0.0;
|
|
> double backFov = 0.0;
|
|
>
|
|
> for (AVCaptureDevice *device in devices) {
|
|
>
|
|
> NSLog(@"Device name: %@", [device localizedName]);
|
|
>
|
|
> if ([device hasMediaType:AVMediaTypeVideo]) {
|
|
>
|
|
> if ([device position] == AVCaptureDevicePositionBack) {
|
|
> NSLog(@"Device position : back");
|
|
> backCamera = device;
|
|
> backFov = backCamera.activeFormat.videoFieldOfView;
|
|
> }
|
|
> else {
|
|
> NSLog(@"Device position : front");
|
|
> frontCamera = device;
|
|
> frontFov = frontCamera.activeFormat.videoFieldOfView;
|
|
> }
|
|
> }
|
|
> }
|
|
>
|
|
> resolve(@{
|
|
> [NSNumber numberWithInt:RCTCameraTypeBack]: [NSNumber numberWithDouble: backFov],
|
|
> [NSNumber numberWithInt:RCTCameraTypeFront]: [NSNumber numberWithDouble: frontFov]
|
|
> });
|
|
476c478
|
|
< return;
|
|
---
|
|
> return;
|
|
478,516c480,518
|
|
< dispatch_async(self.sessionQueue, ^{
|
|
< if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
|
|
< self.presetCamera = AVCaptureDevicePositionBack;
|
|
< }
|
|
<
|
|
< AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
|
< if ([self.session canAddOutput:stillImageOutput])
|
|
< {
|
|
< stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
|
|
< [self.session addOutput:stillImageOutput];
|
|
< self.stillImageOutput = stillImageOutput;
|
|
< }
|
|
<
|
|
< AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
|
|
< if ([self.session canAddOutput:movieFileOutput])
|
|
< {
|
|
< [self.session addOutput:movieFileOutput];
|
|
< self.movieFileOutput = movieFileOutput;
|
|
< }
|
|
<
|
|
< AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
|
|
< if ([self.session canAddOutput:metadataOutput]) {
|
|
< [metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue];
|
|
< [self.session addOutput:metadataOutput];
|
|
< [metadataOutput setMetadataObjectTypes:self.barCodeTypes];
|
|
< self.metadataOutput = metadataOutput;
|
|
< }
|
|
<
|
|
< __weak RCTCameraManager *weakSelf = self;
|
|
< [self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
|
|
< RCTCameraManager *strongSelf = weakSelf;
|
|
< dispatch_async(strongSelf.sessionQueue, ^{
|
|
< // Manually restarting the session since it must have been stopped due to an error.
|
|
< [strongSelf.session startRunning];
|
|
< });
|
|
< }]];
|
|
<
|
|
< [self.session startRunning];
|
|
< });
|
|
---
|
|
> dispatch_async(self.sessionQueue, ^{
|
|
> if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
|
|
> self.presetCamera = AVCaptureDevicePositionBack;
|
|
> }
|
|
>
|
|
> AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
|
> if ([self.session canAddOutput:stillImageOutput])
|
|
> {
|
|
> stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
|
|
> [self.session addOutput:stillImageOutput];
|
|
> self.stillImageOutput = stillImageOutput;
|
|
> }
|
|
>
|
|
> AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
|
|
> if ([self.session canAddOutput:movieFileOutput])
|
|
> {
|
|
> [self.session addOutput:movieFileOutput];
|
|
> self.movieFileOutput = movieFileOutput;
|
|
> }
|
|
>
|
|
> AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
|
|
> if ([self.session canAddOutput:metadataOutput]) {
|
|
> [metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue];
|
|
> [self.session addOutput:metadataOutput];
|
|
> [metadataOutput setMetadataObjectTypes:self.barCodeTypes];
|
|
> self.metadataOutput = metadataOutput;
|
|
> }
|
|
>
|
|
> __weak RCTCameraManager *weakSelf = self;
|
|
> [self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
|
|
> RCTCameraManager *strongSelf = weakSelf;
|
|
> dispatch_async(strongSelf.sessionQueue, ^{
|
|
> // Manually restarting the session since it must have been stopped due to an error.
|
|
> [strongSelf.session startRunning];
|
|
> });
|
|
> }]];
|
|
>
|
|
> [self.session startRunning];
|
|
> });
|
|
521,524d522
|
|
< self.camera = nil;
|
|
< return;
|
|
< #endif
|
|
< dispatch_async(self.sessionQueue, ^{
|
|
526,536c524,538
|
|
< [self.previewLayer removeFromSuperlayer];
|
|
< [self.session commitConfiguration];
|
|
< [self.session stopRunning];
|
|
< for(AVCaptureInput *input in self.session.inputs) {
|
|
< [self.session removeInput:input];
|
|
< }
|
|
<
|
|
< for(AVCaptureOutput *output in self.session.outputs) {
|
|
< [self.session removeOutput:output];
|
|
< }
|
|
< });
|
|
---
|
|
> return;
|
|
> #endif
|
|
> dispatch_async(self.sessionQueue, ^{
|
|
> self.camera = nil;
|
|
> [self.previewLayer removeFromSuperlayer];
|
|
> [self.session commitConfiguration];
|
|
> [self.session stopRunning];
|
|
> for(AVCaptureInput *input in self.session.inputs) {
|
|
> [self.session removeInput:input];
|
|
> }
|
|
>
|
|
> for(AVCaptureOutput *output in self.session.outputs) {
|
|
> [self.session removeOutput:output];
|
|
> }
|
|
> });
|
|
540,545c542,549
|
|
< dispatch_async(self.sessionQueue, ^{
|
|
< if (type == AVMediaTypeAudio) {
|
|
< for (AVCaptureDeviceInput* input in [self.session inputs]) {
|
|
< if ([input.device hasMediaType:AVMediaTypeAudio]) {
|
|
< // If an audio input has been configured we don't need to set it up again
|
|
< return;
|
|
---
|
|
> dispatch_async(self.sessionQueue, ^{
|
|
> if (type == AVMediaTypeAudio) {
|
|
> for (AVCaptureDeviceInput* input in [self.session inputs]) {
|
|
> if ([input.device hasMediaType:AVMediaTypeAudio]) {
|
|
> // If an audio input has been configured we don't need to set it up again
|
|
> return;
|
|
> }
|
|
> }
|
|
547,596c551,598
|
|
< }
|
|
< }
|
|
<
|
|
< [self.session beginConfiguration];
|
|
<
|
|
< NSError *error = nil;
|
|
<
|
|
< AVCaptureDevice *currentCaptureDevice = [self.videoCaptureDeviceInput device];
|
|
< AVCaptureDevice *captureDevice;
|
|
<
|
|
< if (type == AVMediaTypeAudio) {
|
|
< captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
|
|
< }
|
|
< else if (type == AVMediaTypeVideo) {
|
|
< captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
|
|
< }
|
|
<
|
|
< if (captureDevice == nil) {
|
|
< return;
|
|
< }
|
|
<
|
|
< AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
|
|
<
|
|
< if (error || captureDeviceInput == nil) {
|
|
< NSLog(@"%@", error);
|
|
< return;
|
|
< }
|
|
<
|
|
< if (type == AVMediaTypeVideo) {
|
|
< [self.session removeInput:self.videoCaptureDeviceInput];
|
|
< }
|
|
<
|
|
< if ([self.session canAddInput:captureDeviceInput]) {
|
|
< [self.session addInput:captureDeviceInput];
|
|
<
|
|
< if (type == AVMediaTypeAudio) {
|
|
< self.audioCaptureDeviceInput = captureDeviceInput;
|
|
< }
|
|
< else if (type == AVMediaTypeVideo) {
|
|
< [NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
|
|
< [NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
|
|
<
|
|
< self.videoCaptureDeviceInput = captureDeviceInput;
|
|
< [self setFlashMode];
|
|
< }
|
|
< [self.metadataOutput setMetadataObjectTypes:self.metadataOutput.availableMetadataObjectTypes];
|
|
< }
|
|
<
|
|
< [self.session commitConfiguration];
|
|
< });
|
|
---
|
|
>
|
|
> [self.session beginConfiguration];
|
|
>
|
|
> NSError *error = nil;
|
|
>
|
|
> AVCaptureDevice *currentCaptureDevice = [self.videoCaptureDeviceInput device];
|
|
> AVCaptureDevice *captureDevice;
|
|
>
|
|
> if (type == AVMediaTypeAudio) {
|
|
> captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
|
|
> }
|
|
> else if (type == AVMediaTypeVideo) {
|
|
> captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
|
|
> }
|
|
>
|
|
> if (captureDevice == nil) {
|
|
> return;
|
|
> }
|
|
>
|
|
> AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
|
|
>
|
|
> if (error || captureDeviceInput == nil) {
|
|
> NSLog(@"%@", error);
|
|
> return;
|
|
> }
|
|
>
|
|
> if (type == AVMediaTypeVideo) {
|
|
> [self.session removeInput:self.videoCaptureDeviceInput];
|
|
> }
|
|
>
|
|
> if ([self.session canAddInput:captureDeviceInput]) {
|
|
> [self.session addInput:captureDeviceInput];
|
|
>
|
|
> if (type == AVMediaTypeAudio) {
|
|
> self.audioCaptureDeviceInput = captureDeviceInput;
|
|
> }
|
|
> else if (type == AVMediaTypeVideo) {
|
|
> [NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
|
|
> [NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
|
|
>
|
|
> self.videoCaptureDeviceInput = captureDeviceInput;
|
|
> [self setFlashMode];
|
|
> }
|
|
> [self.metadataOutput setMetadataObjectTypes:self.metadataOutput.availableMetadataObjectTypes];
|
|
> }
|
|
>
|
|
> [self.session commitConfiguration];
|
|
> });
|
|
603,609c605,611
|
|
< #if TARGET_IPHONE_SIMULATOR
|
|
< [self captureStill:target options:options orientation:self.previewLayer.connection.videoOrientation resolve:resolve reject:reject];
|
|
< #else
|
|
< [self.sensorOrientationChecker getDeviceOrientationWithBlock:^(UIInterfaceOrientation orientation) {
|
|
< [self captureStill:target options:options orientation:[self.sensorOrientationChecker convertToAVCaptureVideoOrientation: orientation] resolve:resolve reject:reject];
|
|
< }];
|
|
< #endif
|
|
---
|
|
> #if TARGET_IPHONE_SIMULATOR
|
|
> [self captureStill:target options:options orientation:self.previewLayer.connection.videoOrientation resolve:resolve reject:reject];
|
|
> #else
|
|
> [self.sensorOrientationChecker getDeviceOrientationWithBlock:^(UIInterfaceOrientation orientation) {
|
|
> [self captureStill:target options:options orientation:[self.sensorOrientationChecker convertToAVCaptureVideoOrientation: orientation] resolve:resolve reject:reject];
|
|
> }];
|
|
> #endif
|
|
617c619
|
|
< dispatch_async(self.sessionQueue, ^{
|
|
---
|
|
> dispatch_async(self.sessionQueue, ^{
|
|
619,642c621,644
|
|
< CGSize size = CGSizeMake(720, 1280);
|
|
< UIGraphicsBeginImageContextWithOptions(size, YES, 0);
|
|
< // Thanks https://gist.github.com/kylefox/1689973
|
|
< CGFloat hue = ( arc4random() % 256 / 256.0 ); // 0.0 to 1.0
|
|
< CGFloat saturation = ( arc4random() % 128 / 256.0 ) + 0.5; // 0.5 to 1.0, away from white
|
|
< CGFloat brightness = ( arc4random() % 128 / 256.0 ) + 0.5; // 0.5 to 1.0, away from black
|
|
< UIColor *color = [UIColor colorWithHue:hue saturation:saturation brightness:brightness alpha:1];
|
|
< [color setFill];
|
|
< UIRectFill(CGRectMake(0, 0, size.width, size.height));
|
|
< NSDate *currentDate = [NSDate date];
|
|
< NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
|
|
< [dateFormatter setDateFormat:@"dd.MM.YY HH:mm:ss"];
|
|
< NSString *text = [dateFormatter stringFromDate:currentDate];
|
|
< UIFont *font = [UIFont systemFontOfSize:40.0];
|
|
< NSDictionary *attributes = [NSDictionary dictionaryWithObjects:
|
|
< @[font, [UIColor blackColor]]
|
|
< forKeys:
|
|
< @[NSFontAttributeName, NSForegroundColorAttributeName]];
|
|
< [text drawAtPoint:CGPointMake(size.width/3, size.height/2) withAttributes:attributes];
|
|
< UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
|
< UIGraphicsEndImageContext();
|
|
<
|
|
< NSData *imageData = UIImageJPEGRepresentation(image, 1.0);
|
|
< [self saveImage:imageData imageSize:size target:target metadata:nil resolve:resolve reject:reject];
|
|
---
|
|
> CGSize size = CGSizeMake(720, 1280);
|
|
> UIGraphicsBeginImageContextWithOptions(size, YES, 0);
|
|
> // Thanks https://gist.github.com/kylefox/1689973
|
|
> CGFloat hue = ( arc4random() % 256 / 256.0 ); // 0.0 to 1.0
|
|
> CGFloat saturation = ( arc4random() % 128 / 256.0 ) + 0.5; // 0.5 to 1.0, away from white
|
|
> CGFloat brightness = ( arc4random() % 128 / 256.0 ) + 0.5; // 0.5 to 1.0, away from black
|
|
> UIColor *color = [UIColor colorWithHue:hue saturation:saturation brightness:brightness alpha:1];
|
|
> [color setFill];
|
|
> UIRectFill(CGRectMake(0, 0, size.width, size.height));
|
|
> NSDate *currentDate = [NSDate date];
|
|
> NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
|
|
> [dateFormatter setDateFormat:@"dd.MM.YY HH:mm:ss"];
|
|
> NSString *text = [dateFormatter stringFromDate:currentDate];
|
|
> UIFont *font = [UIFont systemFontOfSize:40.0];
|
|
> NSDictionary *attributes = [NSDictionary dictionaryWithObjects:
|
|
> @[font, [UIColor blackColor]]
|
|
> forKeys:
|
|
> @[NSFontAttributeName, NSForegroundColorAttributeName]];
|
|
> [text drawAtPoint:CGPointMake(size.width/3, size.height/2) withAttributes:attributes];
|
|
> UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
|
> UIGraphicsEndImageContext();
|
|
>
|
|
> NSData *imageData = UIImageJPEGRepresentation(image, 1.0);
|
|
> [self saveImage:imageData imageSize:size target:target metadata:nil resolve:resolve reject:reject];
|
|
644,680c646,735
|
|
< [[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:orientation];
|
|
<
|
|
< [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
|
|
<
|
|
< if (imageDataSampleBuffer) {
|
|
< NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
|
|
<
|
|
< // Create image source
|
|
< CGImageSourceRef source = CGImageSourceCreateWithData((CFDataRef)imageData, NULL);
|
|
< //get all the metadata in the image
|
|
< NSMutableDictionary *imageMetadata = [(NSDictionary *) CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(source, 0, NULL)) mutableCopy];
|
|
<
|
|
< // create cgimage
|
|
< CGImageRef cgImage = CGImageSourceCreateImageAtIndex(source, 0, NULL);
|
|
<
|
|
< // setup viewport size before using
|
|
< CGSize viewportSize;
|
|
<
|
|
< // Rotate it
|
|
< CGImageRef rotatedCGImage;
|
|
< if ([options objectForKey:@"rotation"]) {
|
|
< float rotation = [[options objectForKey:@"rotation"] floatValue];
|
|
< rotatedCGImage = [self newCGImageRotatedByAngle:cgImage angle:rotation];
|
|
< } else if ([[options objectForKey:@"fixOrientation"] boolValue] == YES) {
|
|
< // Get metadata orientation
|
|
< int metadataOrientation = [[imageMetadata objectForKey:(NSString *)kCGImagePropertyOrientation] intValue];
|
|
<
|
|
< bool rotated = false;
|
|
< //see http://www.impulseadventure.com/photo/exif-orientation.html
|
|
< if (metadataOrientation == 6) {
|
|
< rotatedCGImage = [self newCGImageRotatedByAngle:cgImage angle:270];
|
|
< rotated = true;
|
|
< } else if (metadataOrientation == 3) {
|
|
< rotatedCGImage = [self newCGImageRotatedByAngle:cgImage angle:180];
|
|
< rotated = true;
|
|
< } else {
|
|
< rotatedCGImage = cgImage;
|
|
---
|
|
> [[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:orientation];
|
|
>
|
|
> [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
|
|
>
|
|
> if (imageDataSampleBuffer) {
|
|
> NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
|
|
>
|
|
> // Create image source
|
|
> CGImageSourceRef source = CGImageSourceCreateWithData((CFDataRef)imageData, NULL);
|
|
> //get all the metadata in the image
|
|
> NSMutableDictionary *imageMetadata = [(NSDictionary *) CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(source, 0, NULL)) mutableCopy];
|
|
>
|
|
> // create cgimage
|
|
> CGImageRef cgImage = CGImageSourceCreateImageAtIndex(source, 0, NULL);
|
|
>
|
|
> // setup viewport size before using
|
|
> CGSize viewportSize;
|
|
>
|
|
> // Rotate it
|
|
> CGImageRef rotatedCGImage;
|
|
> if ([options objectForKey:@"rotation"]) {
|
|
> float rotation = [[options objectForKey:@"rotation"] floatValue];
|
|
> rotatedCGImage = [self newCGImageRotatedByAngle:cgImage angle:rotation];
|
|
> } else if ([[options objectForKey:@"fixOrientation"] boolValue] == YES) {
|
|
> // Get metadata orientation
|
|
> int metadataOrientation = [[imageMetadata objectForKey:(NSString *)kCGImagePropertyOrientation] intValue];
|
|
>
|
|
> bool rotated = false;
|
|
> //see http://www.impulseadventure.com/photo/exif-orientation.html
|
|
> if (metadataOrientation == 6) {
|
|
> rotatedCGImage = [self newCGImageRotatedByAngle:cgImage angle:270];
|
|
> rotated = true;
|
|
> } else if (metadataOrientation == 3) {
|
|
> rotatedCGImage = [self newCGImageRotatedByAngle:cgImage angle:180];
|
|
> rotated = true;
|
|
> } else {
|
|
> rotatedCGImage = cgImage;
|
|
> }
|
|
>
|
|
> if(rotated) {
|
|
> [imageMetadata setObject:[NSNumber numberWithInteger:1] forKey:(NSString *)kCGImagePropertyOrientation];
|
|
> CGImageRelease(cgImage);
|
|
> }
|
|
> } else {
|
|
> rotatedCGImage = cgImage;
|
|
> }
|
|
>
|
|
> // Crop it
|
|
> if (self.cropToPreview) {
|
|
>
|
|
> if (UIInterfaceOrientationIsPortrait([[UIApplication sharedApplication] statusBarOrientation]))
|
|
> {
|
|
> viewportSize = CGSizeMake(self.previewLayer.frame.size.height, self.previewLayer.frame.size.width);
|
|
> } else {
|
|
> viewportSize = CGSizeMake(self.previewLayer.frame.size.width, self.previewLayer.frame.size.height);
|
|
> }
|
|
>
|
|
> CGRect captureRect = CGRectMake(0, 0, CGImageGetWidth(rotatedCGImage), CGImageGetHeight(rotatedCGImage));
|
|
> CGRect croppedSize = AVMakeRectWithAspectRatioInsideRect(viewportSize, captureRect);
|
|
> rotatedCGImage = CGImageCreateWithImageInRect(rotatedCGImage, croppedSize);
|
|
> }
|
|
>
|
|
> // Erase stupid TIFF stuff
|
|
> [imageMetadata removeObjectForKey:(NSString *)kCGImagePropertyTIFFDictionary];
|
|
>
|
|
> // Add input metadata
|
|
> [imageMetadata mergeMetadata:[options objectForKey:@"metadata"]];
|
|
>
|
|
> // Create destination thing
|
|
> NSMutableData *rotatedImageData = [NSMutableData data];
|
|
> CGImageDestinationRef destination = CGImageDestinationCreateWithData((CFMutableDataRef)rotatedImageData, CGImageSourceGetType(source), 1, NULL);
|
|
> CFRelease(source);
|
|
> // add the image to the destination, reattaching metadata
|
|
> CGImageDestinationAddImage(destination, rotatedCGImage, (CFDictionaryRef) imageMetadata);
|
|
> // And write
|
|
> CGImageDestinationFinalize(destination);
|
|
> CGSize frameSize;
|
|
> if (UIInterfaceOrientationIsPortrait([[UIApplication sharedApplication] statusBarOrientation]))
|
|
> {
|
|
> frameSize = CGSizeMake(CGImageGetHeight(rotatedCGImage),
|
|
> CGImageGetWidth(rotatedCGImage));
|
|
> } else {
|
|
> frameSize = CGSizeMake(CGImageGetWidth(rotatedCGImage),
|
|
> CGImageGetHeight(rotatedCGImage));
|
|
> }
|
|
> CFRelease(destination);
|
|
>
|
|
> [self saveImage:rotatedImageData imageSize:frameSize target:target metadata:imageMetadata resolve:resolve reject:reject];
|
|
>
|
|
> CGImageRelease(rotatedCGImage);
|
|
682,685c737,738
|
|
<
|
|
< if(rotated) {
|
|
< [imageMetadata setObject:[NSNumber numberWithInteger:1] forKey:(NSString *)kCGImagePropertyOrientation];
|
|
< CGImageRelease(cgImage);
|
|
---
|
|
> else {
|
|
> reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
687,738c740
|
|
< } else {
|
|
< rotatedCGImage = cgImage;
|
|
< }
|
|
<
|
|
< // Crop it
|
|
< if (self.cropToPreview) {
|
|
<
|
|
< if (UIInterfaceOrientationIsPortrait([[UIApplication sharedApplication] statusBarOrientation]))
|
|
< {
|
|
< viewportSize = CGSizeMake(self.previewLayer.frame.size.height, self.previewLayer.frame.size.width);
|
|
< } else {
|
|
< viewportSize = CGSizeMake(self.previewLayer.frame.size.width, self.previewLayer.frame.size.height);
|
|
< }
|
|
<
|
|
< CGRect captureRect = CGRectMake(0, 0, CGImageGetWidth(rotatedCGImage), CGImageGetHeight(rotatedCGImage));
|
|
< CGRect croppedSize = AVMakeRectWithAspectRatioInsideRect(viewportSize, captureRect);
|
|
< rotatedCGImage = CGImageCreateWithImageInRect(rotatedCGImage, croppedSize);
|
|
< }
|
|
<
|
|
< // Erase stupid TIFF stuff
|
|
< [imageMetadata removeObjectForKey:(NSString *)kCGImagePropertyTIFFDictionary];
|
|
<
|
|
< // Add input metadata
|
|
< [imageMetadata mergeMetadata:[options objectForKey:@"metadata"]];
|
|
<
|
|
< // Create destination thing
|
|
< NSMutableData *rotatedImageData = [NSMutableData data];
|
|
< CGImageDestinationRef destination = CGImageDestinationCreateWithData((CFMutableDataRef)rotatedImageData, CGImageSourceGetType(source), 1, NULL);
|
|
< CFRelease(source);
|
|
< // add the image to the destination, reattaching metadata
|
|
< CGImageDestinationAddImage(destination, rotatedCGImage, (CFDictionaryRef) imageMetadata);
|
|
< // And write
|
|
< CGImageDestinationFinalize(destination);
|
|
< CGSize frameSize;
|
|
< if (UIInterfaceOrientationIsPortrait([[UIApplication sharedApplication] statusBarOrientation]))
|
|
< {
|
|
< frameSize = CGSizeMake(CGImageGetHeight(rotatedCGImage),
|
|
< CGImageGetWidth(rotatedCGImage));
|
|
< } else {
|
|
< frameSize = CGSizeMake(CGImageGetWidth(rotatedCGImage),
|
|
< CGImageGetHeight(rotatedCGImage));
|
|
< }
|
|
< CFRelease(destination);
|
|
<
|
|
< [self saveImage:rotatedImageData imageSize:frameSize target:target metadata:imageMetadata resolve:resolve reject:reject];
|
|
<
|
|
< CGImageRelease(rotatedCGImage);
|
|
< }
|
|
< else {
|
|
< reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
< }
|
|
< }];
|
|
---
|
|
> }];
|
|
740c742
|
|
< });
|
|
---
|
|
> });
|
|
745,783c747,787
|
|
< NSString *responseString;
|
|
<
|
|
< if (target == RCTCameraCaptureTargetMemory) {
|
|
< resolve(@{@"data":[imageData base64EncodedStringWithOptions:0]});
|
|
< return;
|
|
< }
|
|
<
|
|
< else if (target == RCTCameraCaptureTargetDisk) {
|
|
< NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
|
< NSString *documentsDirectory = [paths firstObject];
|
|
<
|
|
< NSFileManager *fileManager = [NSFileManager defaultManager];
|
|
< NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"jpg"];
|
|
<
|
|
< [fileManager createFileAtPath:fullPath contents:imageData attributes:nil];
|
|
< responseString = fullPath;
|
|
< }
|
|
<
|
|
< else if (target == RCTCameraCaptureTargetTemp) {
|
|
< NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString];
|
|
< NSString *fullPath = [NSString stringWithFormat:@"%@%@.jpg", NSTemporaryDirectory(), fileName];
|
|
<
|
|
< [imageData writeToFile:fullPath atomically:YES];
|
|
< responseString = fullPath;
|
|
< }
|
|
<
|
|
< else if (target == RCTCameraCaptureTargetCameraRoll) {
|
|
< [[[ALAssetsLibrary alloc] init] writeImageDataToSavedPhotosAlbum:imageData metadata:metadata completionBlock:^(NSURL* url, NSError* error) {
|
|
< if (error == nil) {
|
|
< //path isn't really applicable here (this is an asset uri), but left it in for backward comparability
|
|
< resolve(@{@"path":[url absoluteString], @"mediaUri":[url absoluteString]});
|
|
< }
|
|
< else {
|
|
< reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
< }
|
|
< }];
|
|
< return;
|
|
< }
|
|
< resolve(@{@"path":responseString, @"width":[NSNumber numberWithFloat:imageSize.width], @"height":[NSNumber numberWithFloat:imageSize.height]});
|
|
---
|
|
> NSString *responseString;
|
|
>
|
|
> if (target == RCTCameraCaptureTargetMemory) {
|
|
> resolve(@{@"data":[imageData base64EncodedStringWithOptions:0]});
|
|
> return;
|
|
> }
|
|
>
|
|
> else if (target == RCTCameraCaptureTargetDisk) {
|
|
> NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
|
> NSString *documentsDirectory = [paths firstObject];
|
|
>
|
|
> NSFileManager *fileManager = [NSFileManager defaultManager];
|
|
> NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"jpg"];
|
|
>
|
|
> [fileManager createFileAtPath:fullPath contents:imageData attributes:nil];
|
|
> responseString = fullPath;
|
|
> }
|
|
>
|
|
> else if (target == RCTCameraCaptureTargetTemp) {
|
|
> NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString];
|
|
> NSString *fullPath = [NSString stringWithFormat:@"%@%@.jpg", NSTemporaryDirectory(), fileName];
|
|
>
|
|
> [imageData writeToFile:fullPath atomically:YES];
|
|
> responseString = fullPath;
|
|
> }
|
|
>
|
|
> else if (target == RCTCameraCaptureTargetCameraRoll) {
|
|
> #if !TARGET_OS_MACCATALYST
|
|
> [[[ALAssetsLibrary alloc] init] writeImageDataToSavedPhotosAlbum:imageData metadata:metadata completionBlock:^(NSURL* url, NSError* error) {
|
|
> if (error == nil) {
|
|
> //path isn't really applicable here (this is an asset uri), but left it in for backward comparability
|
|
> resolve(@{@"path":[url absoluteString], @"mediaUri":[url absoluteString]});
|
|
> }
|
|
> else {
|
|
> reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
> }
|
|
> }];
|
|
> #endif
|
|
> return;
|
|
> }
|
|
> resolve(@{@"path":responseString, @"width":[NSNumber numberWithFloat:imageSize.width], @"height":[NSNumber numberWithFloat:imageSize.height]});
|
|
788,818c792,822
|
|
< CGFloat angleInRadians = angle * (M_PI / 180);
|
|
< CGFloat width = CGImageGetWidth(imgRef);
|
|
< CGFloat height = CGImageGetHeight(imgRef);
|
|
<
|
|
< CGRect imgRect = CGRectMake(0, 0, width, height);
|
|
< CGAffineTransform transform = CGAffineTransformMakeRotation(angleInRadians);
|
|
< CGRect rotatedRect = CGRectApplyAffineTransform(imgRect, transform);
|
|
<
|
|
< CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
|
< CGContextRef bmContext = CGBitmapContextCreate(NULL, rotatedRect.size.width, rotatedRect.size.height, 8, 0, colorSpace, (CGBitmapInfo) kCGImageAlphaPremultipliedFirst);
|
|
<
|
|
< if (self.mirrorImage) {
|
|
< CGAffineTransform transform = CGAffineTransformMakeTranslation(rotatedRect.size.width, 0.0);
|
|
< transform = CGAffineTransformScale(transform, -1.0, 1.0);
|
|
< CGContextConcatCTM(bmContext, transform);
|
|
< }
|
|
<
|
|
< CGContextSetAllowsAntialiasing(bmContext, TRUE);
|
|
< CGContextSetInterpolationQuality(bmContext, kCGInterpolationNone);
|
|
<
|
|
< CGColorSpaceRelease(colorSpace);
|
|
<
|
|
< CGContextTranslateCTM(bmContext, +(rotatedRect.size.width/2), +(rotatedRect.size.height/2));
|
|
< CGContextRotateCTM(bmContext, angleInRadians);
|
|
< CGContextTranslateCTM(bmContext, -(rotatedRect.size.width/2), -(rotatedRect.size.height/2));
|
|
<
|
|
< CGContextDrawImage(bmContext, CGRectMake((rotatedRect.size.width-width)/2.0f, (rotatedRect.size.height-height)/2.0f, width, height), imgRef);
|
|
<
|
|
< CGImageRef rotatedImage = CGBitmapContextCreateImage(bmContext);
|
|
< CFRelease(bmContext);
|
|
< return rotatedImage;
|
|
---
|
|
> CGFloat angleInRadians = angle * (M_PI / 180);
|
|
> CGFloat width = CGImageGetWidth(imgRef);
|
|
> CGFloat height = CGImageGetHeight(imgRef);
|
|
>
|
|
> CGRect imgRect = CGRectMake(0, 0, width, height);
|
|
> CGAffineTransform transform = CGAffineTransformMakeRotation(angleInRadians);
|
|
> CGRect rotatedRect = CGRectApplyAffineTransform(imgRect, transform);
|
|
>
|
|
> CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
|
> CGContextRef bmContext = CGBitmapContextCreate(NULL, rotatedRect.size.width, rotatedRect.size.height, 8, 0, colorSpace, (CGBitmapInfo) kCGImageAlphaPremultipliedFirst);
|
|
>
|
|
> if (self.mirrorImage) {
|
|
> CGAffineTransform transform = CGAffineTransformMakeTranslation(rotatedRect.size.width, 0.0);
|
|
> transform = CGAffineTransformScale(transform, -1.0, 1.0);
|
|
> CGContextConcatCTM(bmContext, transform);
|
|
> }
|
|
>
|
|
> CGContextSetAllowsAntialiasing(bmContext, TRUE);
|
|
> CGContextSetInterpolationQuality(bmContext, kCGInterpolationNone);
|
|
>
|
|
> CGColorSpaceRelease(colorSpace);
|
|
>
|
|
> CGContextTranslateCTM(bmContext, +(rotatedRect.size.width/2), +(rotatedRect.size.height/2));
|
|
> CGContextRotateCTM(bmContext, angleInRadians);
|
|
> CGContextTranslateCTM(bmContext, -(rotatedRect.size.width/2), -(rotatedRect.size.height/2));
|
|
>
|
|
> CGContextDrawImage(bmContext, CGRectMake((rotatedRect.size.width-width)/2.0f, (rotatedRect.size.height-height)/2.0f, width, height), imgRef);
|
|
>
|
|
> CGImageRef rotatedImage = CGBitmapContextCreateImage(bmContext);
|
|
> CFRelease(bmContext);
|
|
> return rotatedImage;
|
|
835,866c839,841
|
|
< if (self.movieFileOutput.recording) {
|
|
< reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Already recording"));
|
|
< return;
|
|
< }
|
|
<
|
|
< if ([[options valueForKey:@"audio"] boolValue]) {
|
|
< [self initializeCaptureSessionInput:AVMediaTypeAudio];
|
|
< }
|
|
<
|
|
< Float64 totalSeconds = [[options valueForKey:@"totalSeconds"] floatValue];
|
|
< if (totalSeconds > -1) {
|
|
< int32_t preferredTimeScale = [[options valueForKey:@"preferredTimeScale"] intValue];
|
|
< CMTime maxDuration = CMTimeMakeWithSeconds(totalSeconds, preferredTimeScale);
|
|
< self.movieFileOutput.maxRecordedDuration = maxDuration;
|
|
< }
|
|
<
|
|
< dispatch_async(self.sessionQueue, ^{
|
|
< [[self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:orientation];
|
|
<
|
|
< if (self.mirrorVideo) {
|
|
< [[self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo] setVideoMirrored:YES];
|
|
< }
|
|
< //Create temporary URL to record to
|
|
< NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
|
|
< NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
|
|
< NSFileManager *fileManager = [NSFileManager defaultManager];
|
|
< if ([fileManager fileExistsAtPath:outputPath]) {
|
|
< NSError *error;
|
|
< if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
|
|
< reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
< return;
|
|
< }
|
|
---
|
|
> if (self.movieFileOutput.recording) {
|
|
> reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Already recording"));
|
|
> return;
|
|
868,875c843,879
|
|
<
|
|
< //Start recording
|
|
< [self.movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
|
|
<
|
|
< self.videoResolve = resolve;
|
|
< self.videoReject = reject;
|
|
< self.videoTarget = target;
|
|
< });
|
|
---
|
|
>
|
|
> if ([[options valueForKey:@"audio"] boolValue]) {
|
|
> [self initializeCaptureSessionInput:AVMediaTypeAudio];
|
|
> }
|
|
>
|
|
> Float64 totalSeconds = [[options valueForKey:@"totalSeconds"] floatValue];
|
|
> if (totalSeconds > -1) {
|
|
> int32_t preferredTimeScale = [[options valueForKey:@"preferredTimeScale"] intValue];
|
|
> CMTime maxDuration = CMTimeMakeWithSeconds(totalSeconds, preferredTimeScale);
|
|
> self.movieFileOutput.maxRecordedDuration = maxDuration;
|
|
> }
|
|
>
|
|
> dispatch_async(self.sessionQueue, ^{
|
|
> [[self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:orientation];
|
|
>
|
|
> if (self.mirrorVideo) {
|
|
> [[self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo] setVideoMirrored:YES];
|
|
> }
|
|
> //Create temporary URL to record to
|
|
> NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
|
|
> NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
|
|
> NSFileManager *fileManager = [NSFileManager defaultManager];
|
|
> if ([fileManager fileExistsAtPath:outputPath]) {
|
|
> NSError *error;
|
|
> if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
|
|
> reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
> return;
|
|
> }
|
|
> }
|
|
>
|
|
> //Start recording
|
|
> [self.movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
|
|
>
|
|
> self.videoResolve = resolve;
|
|
> self.videoReject = reject;
|
|
> self.videoTarget = target;
|
|
> });
|
|
883,967c887,896
|
|
< BOOL recordSuccess = YES;
|
|
< if ([error code] != noErr) {
|
|
< // A problem occurred: Find out if the recording was successful.
|
|
< id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
|
|
< if (value) {
|
|
< recordSuccess = [value boolValue];
|
|
< }
|
|
< }
|
|
< if (!recordSuccess) {
|
|
< self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Error while recording"));
|
|
< return;
|
|
< }
|
|
<
|
|
< AVURLAsset* videoAsAsset = [AVURLAsset URLAssetWithURL:outputFileURL options:nil];
|
|
< AVAssetTrack* videoTrack = [[videoAsAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
|
|
< float videoWidth;
|
|
< float videoHeight;
|
|
<
|
|
< CGSize videoSize = [videoTrack naturalSize];
|
|
< CGAffineTransform txf = [videoTrack preferredTransform];
|
|
<
|
|
< if ((txf.tx == videoSize.width && txf.ty == videoSize.height) || (txf.tx == 0 && txf.ty == 0)) {
|
|
< // Video recorded in landscape orientation
|
|
< videoWidth = videoSize.width;
|
|
< videoHeight = videoSize.height;
|
|
< } else {
|
|
< // Video recorded in portrait orientation, so have to swap reported width/height
|
|
< videoWidth = videoSize.height;
|
|
< videoHeight = videoSize.width;
|
|
< }
|
|
<
|
|
< NSMutableDictionary *videoInfo = [NSMutableDictionary dictionaryWithDictionary:@{
|
|
< @"duration":[NSNumber numberWithFloat:CMTimeGetSeconds(videoAsAsset.duration)],
|
|
< @"width":[NSNumber numberWithFloat:videoWidth],
|
|
< @"height":[NSNumber numberWithFloat:videoHeight],
|
|
< @"size":[NSNumber numberWithLongLong:captureOutput.recordedFileSize],
|
|
< }];
|
|
<
|
|
< if (self.videoTarget == RCTCameraCaptureTargetCameraRoll) {
|
|
< ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
|
|
< if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) {
|
|
< [library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
|
|
< completionBlock:^(NSURL *assetURL, NSError *error) {
|
|
< if (error) {
|
|
< self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
< return;
|
|
< } else if (assetURL == nil) {
|
|
< //It's possible for writing to camera roll to fail,
|
|
< //without receiving an error message, but assetURL will be nil
|
|
< //Happens when disk is (almost) full
|
|
< self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Not enough storage"));
|
|
< return;
|
|
< }
|
|
<
|
|
< [videoInfo setObject:[assetURL absoluteString] forKey:@"path"];
|
|
< self.videoResolve(videoInfo);
|
|
< }];
|
|
< }
|
|
< }
|
|
< else if (self.videoTarget == RCTCameraCaptureTargetDisk) {
|
|
< NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
|
< NSString *documentsDirectory = [paths firstObject];
|
|
< NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"mov"];
|
|
<
|
|
< NSFileManager * fileManager = [NSFileManager defaultManager];
|
|
< NSError * error = nil;
|
|
<
|
|
< //moving to destination
|
|
< if (!([fileManager moveItemAtPath:[outputFileURL path] toPath:fullPath error:&error])) {
|
|
< self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
< return;
|
|
< }
|
|
< [videoInfo setObject:fullPath forKey:@"path"];
|
|
< self.videoResolve(videoInfo);
|
|
< }
|
|
< else if (self.videoTarget == RCTCameraCaptureTargetTemp) {
|
|
< NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString];
|
|
< NSString *fullPath = [NSString stringWithFormat:@"%@%@.mov", NSTemporaryDirectory(), fileName];
|
|
<
|
|
< NSFileManager * fileManager = [NSFileManager defaultManager];
|
|
< NSError * error = nil;
|
|
<
|
|
< //moving to destination
|
|
< if (!([fileManager moveItemAtPath:[outputFileURL path] toPath:fullPath error:&error])) {
|
|
< self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
---
|
|
> BOOL recordSuccess = YES;
|
|
> if ([error code] != noErr) {
|
|
> // A problem occurred: Find out if the recording was successful.
|
|
> id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
|
|
> if (value) {
|
|
> recordSuccess = [value boolValue];
|
|
> }
|
|
> }
|
|
> if (!recordSuccess) {
|
|
> self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Error while recording"));
|
|
970,975c899,982
|
|
< [videoInfo setObject:fullPath forKey:@"path"];
|
|
< self.videoResolve(videoInfo);
|
|
< }
|
|
< else {
|
|
< self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Target not supported"));
|
|
< }
|
|
---
|
|
>
|
|
> AVURLAsset* videoAsAsset = [AVURLAsset URLAssetWithURL:outputFileURL options:nil];
|
|
> AVAssetTrack* videoTrack = [[videoAsAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
|
|
> float videoWidth;
|
|
> float videoHeight;
|
|
>
|
|
> CGSize videoSize = [videoTrack naturalSize];
|
|
> CGAffineTransform txf = [videoTrack preferredTransform];
|
|
>
|
|
> if ((txf.tx == videoSize.width && txf.ty == videoSize.height) || (txf.tx == 0 && txf.ty == 0)) {
|
|
> // Video recorded in landscape orientation
|
|
> videoWidth = videoSize.width;
|
|
> videoHeight = videoSize.height;
|
|
> } else {
|
|
> // Video recorded in portrait orientation, so have to swap reported width/height
|
|
> videoWidth = videoSize.height;
|
|
> videoHeight = videoSize.width;
|
|
> }
|
|
>
|
|
> NSMutableDictionary *videoInfo = [NSMutableDictionary dictionaryWithDictionary:@{
|
|
> @"duration":[NSNumber numberWithFloat:CMTimeGetSeconds(videoAsAsset.duration)],
|
|
> @"width":[NSNumber numberWithFloat:videoWidth],
|
|
> @"height":[NSNumber numberWithFloat:videoHeight],
|
|
> @"size":[NSNumber numberWithLongLong:captureOutput.recordedFileSize],
|
|
> }];
|
|
>
|
|
> if (self.videoTarget == RCTCameraCaptureTargetCameraRoll) {
|
|
> #if !TARGET_OS_MACCATALYST
|
|
> ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
|
|
> if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) {
|
|
> [library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
|
|
> completionBlock:^(NSURL *assetURL, NSError *error) {
|
|
> if (error) {
|
|
> self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
> return;
|
|
> } else if (assetURL == nil) {
|
|
> //It's possible for writing to camera roll to fail,
|
|
> //without receiving an error message, but assetURL will be nil
|
|
> //Happens when disk is (almost) full
|
|
> self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Not enough storage"));
|
|
> return;
|
|
> }
|
|
>
|
|
> [videoInfo setObject:[assetURL absoluteString] forKey:@"path"];
|
|
> self.videoResolve(videoInfo);
|
|
> }];
|
|
> }
|
|
>
|
|
> #endif
|
|
> }
|
|
> else if (self.videoTarget == RCTCameraCaptureTargetDisk) {
|
|
> NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
|
> NSString *documentsDirectory = [paths firstObject];
|
|
> NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"mov"];
|
|
>
|
|
> NSFileManager * fileManager = [NSFileManager defaultManager];
|
|
> NSError * error = nil;
|
|
>
|
|
> //moving to destination
|
|
> if (!([fileManager moveItemAtPath:[outputFileURL path] toPath:fullPath error:&error])) {
|
|
> self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
> return;
|
|
> }
|
|
> [videoInfo setObject:fullPath forKey:@"path"];
|
|
> self.videoResolve(videoInfo);
|
|
> }
|
|
> else if (self.videoTarget == RCTCameraCaptureTargetTemp) {
|
|
> NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString];
|
|
> NSString *fullPath = [NSString stringWithFormat:@"%@%@.mov", NSTemporaryDirectory(), fileName];
|
|
>
|
|
> NSFileManager * fileManager = [NSFileManager defaultManager];
|
|
> NSError * error = nil;
|
|
>
|
|
> //moving to destination
|
|
> if (!([fileManager moveItemAtPath:[outputFileURL path] toPath:fullPath error:&error])) {
|
|
> self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
> return;
|
|
> }
|
|
> [videoInfo setObject:fullPath forKey:@"path"];
|
|
> self.videoResolve(videoInfo);
|
|
> }
|
|
> else {
|
|
> self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Target not supported"));
|
|
> }
|
|
979,996c986,1008
|
|
<
|
|
< for (AVMetadataMachineReadableCodeObject *metadata in metadataObjects) {
|
|
< for (id barcodeType in self.barCodeTypes) {
|
|
< if ([metadata.type isEqualToString:barcodeType] && metadata.stringValue) {
|
|
< // Transform the meta-data coordinates to screen coords
|
|
< AVMetadataMachineReadableCodeObject *transformed = (AVMetadataMachineReadableCodeObject *)[_previewLayer transformedMetadataObjectForMetadataObject:metadata];
|
|
<
|
|
< NSDictionary *event = @{
|
|
< @"type": metadata.type,
|
|
< @"data": metadata.stringValue,
|
|
< @"bounds": @{
|
|
< @"origin": @{
|
|
< @"x": [NSString stringWithFormat:@"%f", transformed.bounds.origin.x],
|
|
< @"y": [NSString stringWithFormat:@"%f", transformed.bounds.origin.y]
|
|
< },
|
|
< @"size": @{
|
|
< @"height": [NSString stringWithFormat:@"%f", transformed.bounds.size.height],
|
|
< @"width": [NSString stringWithFormat:@"%f", transformed.bounds.size.width],
|
|
---
|
|
>
|
|
> for (AVMetadataMachineReadableCodeObject *metadata in metadataObjects) {
|
|
> for (id barcodeType in self.barCodeTypes) {
|
|
> if ([metadata.type isEqualToString:barcodeType] && metadata.stringValue) {
|
|
> // Transform the meta-data coordinates to screen coords
|
|
> AVMetadataMachineReadableCodeObject *transformed = (AVMetadataMachineReadableCodeObject *)[_previewLayer transformedMetadataObjectForMetadataObject:metadata];
|
|
>
|
|
> NSDictionary *event = @{
|
|
> @"type": metadata.type,
|
|
> @"data": metadata.stringValue,
|
|
> @"bounds": @{
|
|
> @"origin": @{
|
|
> @"x": [NSString stringWithFormat:@"%f", transformed.bounds.origin.x],
|
|
> @"y": [NSString stringWithFormat:@"%f", transformed.bounds.origin.y]
|
|
> },
|
|
> @"size": @{
|
|
> @"height": [NSString stringWithFormat:@"%f", transformed.bounds.size.height],
|
|
> @"width": [NSString stringWithFormat:@"%f", transformed.bounds.size.width],
|
|
> }
|
|
> }
|
|
> };
|
|
>
|
|
> [self.bridge.eventDispatcher sendAppEventWithName:@"CameraBarCodeRead" body:event];
|
|
998,1002c1010
|
|
< }
|
|
< };
|
|
<
|
|
< [self.bridge.eventDispatcher sendAppEventWithName:@"CameraBarCodeRead" body:event];
|
|
< }
|
|
---
|
|
> }
|
|
1004d1011
|
|
< }
|
|
1010,1015c1017,1020
|
|
< NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
|
|
< AVCaptureDevice *captureDevice = [devices firstObject];
|
|
<
|
|
< for (AVCaptureDevice *device in devices)
|
|
< {
|
|
< if ([device position] == position)
|
|
---
|
|
> NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
|
|
> AVCaptureDevice *captureDevice = [devices firstObject];
|
|
>
|
|
> for (AVCaptureDevice *device in devices)
|
|
1017,1018c1022,1026
|
|
< captureDevice = device;
|
|
< break;
|
|
---
|
|
> if ([device position] == position)
|
|
> {
|
|
> captureDevice = device;
|
|
> break;
|
|
> }
|
|
1020,1022c1028,1029
|
|
< }
|
|
<
|
|
< return captureDevice;
|
|
---
|
|
>
|
|
> return captureDevice;
|
|
1027,1028c1034,1044
|
|
< CGPoint devicePoint = CGPointMake(.5, .5);
|
|
< [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
|
|
---
|
|
> CGPoint devicePoint = CGPointMake(.5, .5);
|
|
> [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
|
|
>
|
|
> if (self.camera.camFocus)
|
|
> {
|
|
> [self.camera.camFocus removeFromSuperview];
|
|
> }
|
|
> self.camera.camFocus = [[RCTCameraFocusSquare alloc]initWithFrame:CGRectMake([self.view center].x-80, [self.view center].y-80, 160, 160)];
|
|
> [self.camera.camFocus setBackgroundColor:[UIColor clearColor]];
|
|
> [self.view addSubview:self.camera.camFocus];
|
|
> [self.camera.camFocus setNeedsDisplay];
|
|
1030,1042c1046,1049
|
|
< if (self.camera.camFocus)
|
|
< {
|
|
< [self.camera.camFocus removeFromSuperview];
|
|
< }
|
|
< self.camera.camFocus = [[RCTCameraFocusSquare alloc]initWithFrame:CGRectMake([self.view center].x-80, [self.view center].y-80, 160, 160)];
|
|
< [self.camera.camFocus setBackgroundColor:[UIColor clearColor]];
|
|
< [self.view addSubview:self.camera.camFocus];
|
|
< [self.camera.camFocus setNeedsDisplay];
|
|
<
|
|
< [UIView beginAnimations:nil context:NULL];
|
|
< [UIView setAnimationDuration:1.0];
|
|
< [self.camera.camFocus setAlpha:0.0];
|
|
< [UIView commitAnimations];
|
|
---
|
|
> [UIView beginAnimations:nil context:NULL];
|
|
> [UIView setAnimationDuration:1.0];
|
|
> [self.camera.camFocus setAlpha:0.0];
|
|
> [UIView commitAnimations];
|
|
1047,1069c1054,1076
|
|
< dispatch_async([self sessionQueue], ^{
|
|
< AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
|
|
< NSError *error = nil;
|
|
< if ([device lockForConfiguration:&error])
|
|
< {
|
|
< if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
|
|
< {
|
|
< [device setFocusMode:focusMode];
|
|
< [device setFocusPointOfInterest:point];
|
|
< }
|
|
< if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
|
|
< {
|
|
< [device setExposureMode:exposureMode];
|
|
< [device setExposurePointOfInterest:point];
|
|
< }
|
|
< [device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
|
|
< [device unlockForConfiguration];
|
|
< }
|
|
< else
|
|
< {
|
|
< NSLog(@"%@", error);
|
|
< }
|
|
< });
|
|
---
|
|
> dispatch_async([self sessionQueue], ^{
|
|
> AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
|
|
> NSError *error = nil;
|
|
> if ([device lockForConfiguration:&error])
|
|
> {
|
|
> if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
|
|
> {
|
|
> [device setFocusMode:focusMode];
|
|
> [device setFocusPointOfInterest:point];
|
|
> }
|
|
> if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
|
|
> {
|
|
> [device setExposureMode:exposureMode];
|
|
> [device setExposurePointOfInterest:point];
|
|
> }
|
|
> [device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
|
|
> [device unlockForConfiguration];
|
|
> }
|
|
> else
|
|
> {
|
|
> NSLog(@"%@", error);
|
|
> }
|
|
> });
|
|
1115c1122
|
|
<
|
|
---
|
|
>
|
|
1117,1119c1124,1126
|
|
< @"target": reactTag,
|
|
< @"zoomFactor": [NSNumber numberWithDouble:zoomFactor],
|
|
< @"velocity": [NSNumber numberWithDouble:velocity]
|
|
---
|
|
> @"target": reactTag,
|
|
> @"zoomFactor": [NSNumber numberWithDouble:zoomFactor],
|
|
> @"velocity": [NSNumber numberWithDouble:velocity]
|
|
1121c1128
|
|
<
|
|
---
|
|
>
|
|
1123c1130
|
|
<
|
|
---
|
|
>
|
|
1133,1145c1140,1152
|
|
< #if !(TARGET_IPHONE_SIMULATOR)
|
|
< if (quality) {
|
|
< dispatch_async([self sessionQueue], ^{
|
|
< [self.session beginConfiguration];
|
|
< if ([self.session canSetSessionPreset:quality]) {
|
|
< self.session.sessionPreset = quality;
|
|
< } else {
|
|
< self.session.sessionPreset = AVCaptureSessionPresetHigh;
|
|
< }
|
|
< [self.session commitConfiguration];
|
|
< });
|
|
< }
|
|
< #endif
|
|
---
|
|
> #if !(TARGET_IPHONE_SIMULATOR)
|
|
> if (quality) {
|
|
> dispatch_async([self sessionQueue], ^{
|
|
> [self.session beginConfiguration];
|
|
> if ([self.session canSetSessionPreset:quality]) {
|
|
> self.session.sessionPreset = quality;
|
|
> } else {
|
|
> self.session.sessionPreset = AVCaptureSessionPresetHigh;
|
|
> }
|
|
> [self.session commitConfiguration];
|
|
> });
|
|
> }
|
|
> #endif
|