forked from hrydgard/ppsspp
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathCameraHelper.mm
158 lines (132 loc) · 5.71 KB
/
CameraHelper.mm
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
#include <vector>
#include <string>
#include "Core/Config.h"
#import "CameraHelper.h"
#import <UIKit/UIKit.h>
@interface CameraHelper() {
AVCaptureSession *captureSession;
AVCaptureVideoPreviewLayer *previewLayer;
int mWidth;
int mHeight;
}
@end
@implementation CameraHelper
std::vector<std::string> __cameraGetDeviceList() {
std::vector<std::string> deviceList;
for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
deviceList.push_back([device.localizedName UTF8String]);
}
return deviceList;
}
NSString *getSelectedCamera() {
NSString *selectedCamera = [NSString stringWithCString:g_Config.sCameraDevice.c_str() encoding:[NSString defaultCStringEncoding]];
return selectedCamera;
}
-(int) checkPermission {
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
NSLog(@"CameraHelper::checkPermission %ld", (long)status);
switch (status) {
case AVAuthorizationStatusNotDetermined: {
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
if (granted) {
NSLog(@"camera permission granted");
dispatch_async(dispatch_get_main_queue(), ^{
[self startVideo];
});
} else {
NSLog(@"camera permission denied");
}
}];
return 1;
}
case AVAuthorizationStatusRestricted:
case AVAuthorizationStatusDenied: {
NSLog(@"camera permission denied");
return 1;
}
case AVAuthorizationStatusAuthorized: {
return 0;
}
}
}
-(void) setCameraSize: (int)width h:(int)height {
NSLog(@"CameraHelper::setCameraSize %dx%d", width, height);
mWidth = width;
mHeight = height;
}
-(void) startVideo {
NSLog(@"CameraHelper::startVideo");
if ([self checkPermission]) {
return;
}
dispatch_async(dispatch_get_main_queue(), ^{
NSError *error = nil;
captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureDeviceInput *videoInput = nil;
NSString *selectedCamera = getSelectedCamera();
for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([device.localizedName isEqualToString:selectedCamera]) {
videoInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
}
}
if (videoInput == nil || error) {
NSLog(@"selectedCamera error; try default device");
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (videoDevice == nil) {
NSLog(@"videoDevice error");
return;
}
videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (videoInput == nil) {
NSLog(@"videoInput error");
return;
}
}
[captureSession addInput:videoInput];
AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
videoOutput.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (id)kCVPixelBufferPixelFormatTypeKey];
[captureSession addOutput:videoOutput];
dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL);
[videoOutput setSampleBufferDelegate:self queue:queue];
previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[previewLayer setFrame:CGRectMake(0, 0, mWidth, mHeight)];
[captureSession startRunning];
});
}
-(void) stopVideo {
dispatch_async(dispatch_get_main_queue(), ^{
[captureSession stopRunning];
});
}
- (void) captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
CGImageRef cgImage = [self imageFromSampleBuffer:sampleBuffer];
UIImage *theImage = [UIImage imageWithCGImage: cgImage];
CGImageRelease(cgImage);
NSData *imageData = UIImageJPEGRepresentation(theImage, 0.6);
[self.delegate PushCameraImageIOS:imageData.length buffer:(unsigned char*)imageData.bytes];
}
- (CGImageRef) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
void* baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef inContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef inImage = CGBitmapContextCreateImage(inContext);
CGContextRelease(inContext);
CGRect outRect = CGRectMake(0, 0, width, height);
CGContextRef outContext = CGBitmapContextCreate(nil, mWidth, mHeight, 8, mWidth * 4, colorSpace, kCGImageAlphaPremultipliedFirst);
CGContextDrawImage(outContext, outRect, inImage);
CGImageRelease(inImage);
CGImageRef outImage = CGBitmapContextCreateImage(outContext);
CGColorSpaceRelease(colorSpace);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
return outImage;
}
@end