mirror of
https://github.com/Tha14/toxic.git
synced 2025-07-04 03:26:44 +02:00
Astyle everything and add an astyle options script to repo
This commit is contained in:
104
src/osx_video.m
104
src/osx_video.m
@ -44,19 +44,19 @@
|
||||
static uint8_t rgb_to_y(int r, int g, int b)
|
||||
{
|
||||
int y = ((9798 * r + 19235 * g + 3736 * b) >> 15);
|
||||
return y>255? 255 : y<0 ? 0 : y;
|
||||
return y > 255 ? 255 : y < 0 ? 0 : y;
|
||||
}
|
||||
|
||||
static uint8_t rgb_to_u(int r, int g, int b)
|
||||
{
|
||||
int u = ((-5538 * r + -10846 * g + 16351 * b) >> 15) + 128;
|
||||
return u>255? 255 : u<0 ? 0 : u;
|
||||
return u > 255 ? 255 : u < 0 ? 0 : u;
|
||||
}
|
||||
|
||||
static uint8_t rgb_to_v(int r, int g, int b)
|
||||
{
|
||||
int v = ((16351 * r + -13697 * g + -2664 * b) >> 15) + 128;
|
||||
return v>255? 255 : v<0 ? 0 : v;
|
||||
return v > 255 ? 255 : v < 0 ? 0 : v;
|
||||
}
|
||||
|
||||
void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t *rgb, uint16_t width, uint16_t height)
|
||||
@ -65,9 +65,10 @@ void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t
|
||||
uint8_t *p;
|
||||
uint8_t r, g, b;
|
||||
|
||||
for(y = 0; y != height; y += 2) {
|
||||
for (y = 0; y != height; y += 2) {
|
||||
p = rgb;
|
||||
for(x = 0; x != width; x++) {
|
||||
|
||||
for (x = 0; x != width; x++) {
|
||||
b = *rgb++;
|
||||
g = *rgb++;
|
||||
r = *rgb++;
|
||||
@ -76,7 +77,7 @@ void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t
|
||||
*plane_y++ = rgb_to_y(r, g, b);
|
||||
}
|
||||
|
||||
for(x = 0; x != width / 2; x++) {
|
||||
for (x = 0; x != width / 2; x++) {
|
||||
b = *rgb++;
|
||||
g = *rgb++;
|
||||
r = *rgb++;
|
||||
@ -91,9 +92,12 @@ void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t
|
||||
|
||||
*plane_y++ = rgb_to_y(r, g, b);
|
||||
|
||||
b = ((int)b + (int)*(rgb - 8) + (int)*p + (int)*(p + 4) + 2) / 4; p++;
|
||||
g = ((int)g + (int)*(rgb - 7) + (int)*p + (int)*(p + 4) + 2) / 4; p++;
|
||||
r = ((int)r + (int)*(rgb - 6) + (int)*p + (int)*(p + 4) + 2) / 4; p++;
|
||||
b = ((int)b + (int) * (rgb - 8) + (int) * p + (int) * (p + 4) + 2) / 4;
|
||||
p++;
|
||||
g = ((int)g + (int) * (rgb - 7) + (int) * p + (int) * (p + 4) + 2) / 4;
|
||||
p++;
|
||||
r = ((int)r + (int) * (rgb - 6) + (int) * p + (int) * (p + 4) + 2) / 4;
|
||||
p++;
|
||||
p++;
|
||||
|
||||
*plane_u++ = rgb_to_u(r, g, b);
|
||||
@ -122,22 +126,26 @@ void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t
|
||||
BOOL _shouldMangleDimensions;
|
||||
}
|
||||
|
||||
- (instancetype)initWithDeviceNames: (char **)device_names AmtDevices: (int *)size {
|
||||
- (instancetype)initWithDeviceNames:
|
||||
(char **)device_names AmtDevices:
|
||||
(int *)size {
|
||||
_session = [[AVCaptureSession alloc] init];
|
||||
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
|
||||
int i;
|
||||
|
||||
for (i = 0; i < [devices count]; ++i) {
|
||||
AVCaptureDevice *device = [devices objectAtIndex:i];
|
||||
AVCaptureDevice *device = [devices objectAtIndex: i];
|
||||
char *video_input_name;
|
||||
NSString *localizedName = [device localizedName];
|
||||
video_input_name = (char*)malloc(strlen([localizedName cStringUsingEncoding:NSUTF8StringEncoding]) + 1);
|
||||
strcpy(video_input_name, (char*)[localizedName cStringUsingEncoding:NSUTF8StringEncoding]);
|
||||
video_input_name = (char *)malloc(strlen([localizedName cStringUsingEncoding: NSUTF8StringEncoding]) + 1);
|
||||
strcpy(video_input_name, (char *)[localizedName cStringUsingEncoding: NSUTF8StringEncoding]);
|
||||
device_names[i] = video_input_name;
|
||||
}
|
||||
|
||||
if ( i <= 0 )
|
||||
return nil;
|
||||
|
||||
*size = i;
|
||||
|
||||
return self;
|
||||
@ -151,14 +159,17 @@ void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t
|
||||
[super dealloc];
|
||||
}
|
||||
|
||||
- (int)openVideoDeviceIndex: (uint32_t)device_idx Width: (uint16_t *)width Height: (uint16_t *)height {
|
||||
- (int)openVideoDeviceIndex:
|
||||
(uint32_t)device_idx Width:
|
||||
(uint16_t *)width Height:
|
||||
(uint16_t *)height {
|
||||
pthread_mutex_init(&_frameLock, NULL);
|
||||
pthread_mutex_lock(&_frameLock);
|
||||
_processingQueue = dispatch_queue_create("Toxic processing queue", DISPATCH_QUEUE_SERIAL);
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
||||
AVCaptureDevice *device = [devices objectAtIndex:device_idx];
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
|
||||
AVCaptureDevice *device = [devices objectAtIndex: device_idx];
|
||||
NSError *error = NULL;
|
||||
AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
|
||||
AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice: device error: &error];
|
||||
|
||||
if ( error != NULL ) {
|
||||
[input release];
|
||||
@ -166,7 +177,7 @@ void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t
|
||||
}
|
||||
|
||||
[_session beginConfiguration];
|
||||
[_session addInput:input];
|
||||
[_session addInput: input];
|
||||
//_session.sessionPreset = AVCaptureSessionPreset640x480;
|
||||
//*width = 640;
|
||||
//*height = 480;
|
||||
@ -176,8 +187,9 @@ void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t
|
||||
[device release];
|
||||
|
||||
/* Obtain device resolution */
|
||||
AVCaptureInputPort *port = [input.ports objectAtIndex:0];
|
||||
AVCaptureInputPort *port = [input.ports objectAtIndex: 0];
|
||||
CMFormatDescriptionRef format_description = port.formatDescription;
|
||||
|
||||
if ( format_description ) {
|
||||
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format_description);
|
||||
*width = dimensions.width;
|
||||
@ -188,36 +200,44 @@ void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t
|
||||
}
|
||||
|
||||
_linkerVideo = [[AVCaptureVideoDataOutput alloc] init];
|
||||
[_linkerVideo setSampleBufferDelegate:self queue:_processingQueue];
|
||||
[_linkerVideo setSampleBufferDelegate: self queue: _processingQueue];
|
||||
|
||||
// TODO possibly get a better pixel format
|
||||
if (_shouldMangleDimensions) {
|
||||
[_linkerVideo setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
|
||||
(id)kCVPixelBufferWidthKey: @640,
|
||||
(id)kCVPixelBufferHeightKey: @480}];
|
||||
[_linkerVideo setVideoSettings: @ {
|
||||
(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
|
||||
(id)kCVPixelBufferWidthKey: @640,
|
||||
(id)kCVPixelBufferHeightKey: @480
|
||||
}];
|
||||
} else {
|
||||
[_linkerVideo setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}];
|
||||
[_linkerVideo setVideoSettings: @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}];
|
||||
}
|
||||
[_session addOutput:_linkerVideo];
|
||||
[_session addOutput: _linkerVideo];
|
||||
[_session startRunning];
|
||||
|
||||
pthread_mutex_unlock(&_frameLock);
|
||||
return 0;
|
||||
}
|
||||
|
||||
- (void)closeVideoDeviceIndex: (uint32_t)device_idx {
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
||||
AVCaptureDevice *device = [devices objectAtIndex:device_idx];
|
||||
- (void)closeVideoDeviceIndex:
|
||||
(uint32_t)device_idx {
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
|
||||
AVCaptureDevice *device = [devices objectAtIndex: device_idx];
|
||||
NSError *error = NULL;
|
||||
AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
|
||||
AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice: device error: &error];
|
||||
[_session stopRunning];
|
||||
[_session removeOutput:_linkerVideo];
|
||||
[_session removeInput:input];
|
||||
[_session removeOutput: _linkerVideo];
|
||||
[_session removeInput: input];
|
||||
[_linkerVideo release];
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
|
||||
- (void)captureOutput:
|
||||
(AVCaptureOutput *)captureOutput didOutputSampleBuffer:
|
||||
(CMSampleBufferRef)sampleBuffer fromConnection:
|
||||
(AVCaptureConnection *)connection {
|
||||
pthread_mutex_lock(&_frameLock);
|
||||
CVImageBufferRef img = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
|
||||
if (!img) {
|
||||
NSLog(@"Toxic WARNING: Bad sampleBuffer from AVfoundation!");
|
||||
} else {
|
||||
@ -231,7 +251,12 @@ void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t
|
||||
pthread_mutex_unlock(&_frameLock);
|
||||
}
|
||||
|
||||
- (int)getVideoFrameY: (uint8_t *)y U: (uint8_t *)u V: (uint8_t *)v Width: (uint16_t *)width Height: (uint16_t *)height {
|
||||
- (int)getVideoFrameY:
|
||||
(uint8_t *)y U:
|
||||
(uint8_t *)u V:
|
||||
(uint8_t *)v Width:
|
||||
(uint16_t *)width Height:
|
||||
(uint16_t *)height {
|
||||
if (!_currentFrame) {
|
||||
return -1;
|
||||
}
|
||||
@ -240,6 +265,7 @@ void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t
|
||||
CFRetain(_currentFrame);
|
||||
|
||||
CFTypeID imageType = CFGetTypeID(_currentFrame);
|
||||
|
||||
if (imageType == CVPixelBufferGetTypeID()) {
|
||||
// TODO maybe handle other formats
|
||||
bgrxtoyuv420(y, u, v, CVPixelBufferGetBaseAddress(_currentFrame), *width, *height);
|
||||
@ -263,11 +289,11 @@ void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t
|
||||
/*
|
||||
* C-interface for OSXVideo
|
||||
*/
|
||||
static OSXVideo* _OSXVideo = nil;
|
||||
static OSXVideo *_OSXVideo = nil;
|
||||
|
||||
int osx_video_init(char **device_names, int *size)
|
||||
{
|
||||
_OSXVideo = [[OSXVideo alloc] initWithDeviceNames: device_names AmtDevices: size];
|
||||
_OSXVideo = [[OSXVideo alloc] initWithDeviceNames: device_names AmtDevices: size];
|
||||
|
||||
if ( _OSXVideo == nil )
|
||||
return -1;
|
||||
@ -286,12 +312,12 @@ int osx_video_open_device(uint32_t selection, uint16_t *width, uint16_t *height)
|
||||
if ( _OSXVideo == nil )
|
||||
return -1;
|
||||
|
||||
return [_OSXVideo openVideoDeviceIndex: selection Width: width Height: height];
|
||||
return [_OSXVideo openVideoDeviceIndex: selection Width: width Height: height];
|
||||
}
|
||||
|
||||
void osx_video_close_device(uint32_t device_idx)
|
||||
{
|
||||
[_OSXVideo closeVideoDeviceIndex: device_idx];
|
||||
[_OSXVideo closeVideoDeviceIndex: device_idx];
|
||||
}
|
||||
|
||||
int osx_video_read_device(uint8_t *y, uint8_t *u, uint8_t *v, uint16_t *width, uint16_t *height)
|
||||
@ -299,7 +325,7 @@ int osx_video_read_device(uint8_t *y, uint8_t *u, uint8_t *v, uint16_t *width, u
|
||||
if ( _OSXVideo == nil )
|
||||
return -1;
|
||||
|
||||
return [_OSXVideo getVideoFrameY: y U: u V: v Width: width Height: height];
|
||||
return [_OSXVideo getVideoFrameY: y U: u V: v Width: width Height: height];
|
||||
}
|
||||
/*
|
||||
* End of C-interface for OSXVideo
|
||||
|
Reference in New Issue
Block a user