mirror of
https://github.com/Tha14/toxic.git
synced 2025-07-04 03:56:44 +02:00
Add possibility to save QR code in PNG file format
This commit is contained in:
@ -127,19 +127,20 @@ void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t
|
||||
}
|
||||
|
||||
- (instancetype)initWithDeviceNames:
|
||||
(char **)device_names AmtDevices:
|
||||
(int *)size {
|
||||
(char **)device_names AmtDevices:
|
||||
(int *)size
|
||||
{
|
||||
_session = [[AVCaptureSession alloc] init];
|
||||
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
|
||||
int i;
|
||||
|
||||
for (i = 0; i < [devices count]; ++i) {
|
||||
AVCaptureDevice *device = [devices objectAtIndex: i];
|
||||
AVCaptureDevice *device = [devices objectAtIndex: i];
|
||||
char *video_input_name;
|
||||
NSString *localizedName = [device localizedName];
|
||||
video_input_name = (char *)malloc(strlen([localizedName cStringUsingEncoding: NSUTF8StringEncoding]) + 1);
|
||||
strcpy(video_input_name, (char *)[localizedName cStringUsingEncoding: NSUTF8StringEncoding]);
|
||||
video_input_name = (char *)malloc(strlen([localizedName cStringUsingEncoding: NSUTF8StringEncoding]) + 1);
|
||||
strcpy(video_input_name, (char *)[localizedName cStringUsingEncoding: NSUTF8StringEncoding]);
|
||||
device_names[i] = video_input_name;
|
||||
}
|
||||
|
||||
@ -151,7 +152,8 @@ strcpy(video_input_name, (char *)[localizedName cStringUsingEncoding: NSUTF8Stri
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
- (void)dealloc
|
||||
{
|
||||
pthread_mutex_destroy(&_frameLock);
|
||||
[_session release];
|
||||
[_linkerVideo release];
|
||||
@ -160,16 +162,17 @@ strcpy(video_input_name, (char *)[localizedName cStringUsingEncoding: NSUTF8Stri
|
||||
}
|
||||
|
||||
- (int)openVideoDeviceIndex:
|
||||
(uint32_t)device_idx Width:
|
||||
(uint16_t *)width Height:
|
||||
(uint16_t *)height {
|
||||
(uint32_t)device_idx Width:
|
||||
(uint16_t *)width Height:
|
||||
(uint16_t *)height
|
||||
{
|
||||
pthread_mutex_init(&_frameLock, NULL);
|
||||
pthread_mutex_lock(&_frameLock);
|
||||
_processingQueue = dispatch_queue_create("Toxic processing queue", DISPATCH_QUEUE_SERIAL);
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
|
||||
AVCaptureDevice *device = [devices objectAtIndex: device_idx];
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
|
||||
AVCaptureDevice *device = [devices objectAtIndex: device_idx];
|
||||
NSError *error = NULL;
|
||||
AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice: device error: &error];
|
||||
AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice: device error: &error];
|
||||
|
||||
if ( error != NULL ) {
|
||||
[input release];
|
||||
@ -177,7 +180,7 @@ AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice: device err
|
||||
}
|
||||
|
||||
[_session beginConfiguration];
|
||||
[_session addInput: input];
|
||||
[_session addInput: input];
|
||||
//_session.sessionPreset = AVCaptureSessionPreset640x480;
|
||||
//*width = 640;
|
||||
//*height = 480;
|
||||
@ -200,19 +203,20 @@ AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice: device err
|
||||
}
|
||||
|
||||
_linkerVideo = [[AVCaptureVideoDataOutput alloc] init];
|
||||
[_linkerVideo setSampleBufferDelegate: self queue: _processingQueue];
|
||||
[_linkerVideo setSampleBufferDelegate: self queue: _processingQueue];
|
||||
|
||||
// TODO possibly get a better pixel format
|
||||
if (_shouldMangleDimensions) {
|
||||
[_linkerVideo setVideoSettings: @ {
|
||||
[_linkerVideo setVideoSettings: @ {
|
||||
(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
|
||||
(id)kCVPixelBufferWidthKey: @640,
|
||||
(id)kCVPixelBufferHeightKey: @480
|
||||
}];
|
||||
} else {
|
||||
[_linkerVideo setVideoSettings: @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}];
|
||||
[_linkerVideo setVideoSettings: @ {(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}];
|
||||
}
|
||||
[_session addOutput: _linkerVideo];
|
||||
|
||||
[_session addOutput: _linkerVideo];
|
||||
[_session startRunning];
|
||||
|
||||
pthread_mutex_unlock(&_frameLock);
|
||||
@ -220,21 +224,23 @@ AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice: device err
|
||||
}
|
||||
|
||||
- (void)closeVideoDeviceIndex:
|
||||
(uint32_t)device_idx {
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
|
||||
AVCaptureDevice *device = [devices objectAtIndex: device_idx];
|
||||
(uint32_t)device_idx
|
||||
{
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
|
||||
AVCaptureDevice *device = [devices objectAtIndex: device_idx];
|
||||
NSError *error = NULL;
|
||||
AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice: device error: &error];
|
||||
AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice: device error: &error];
|
||||
[_session stopRunning];
|
||||
[_session removeOutput: _linkerVideo];
|
||||
[_session removeInput: input];
|
||||
[_session removeOutput: _linkerVideo];
|
||||
[_session removeInput: input];
|
||||
[_linkerVideo release];
|
||||
}
|
||||
|
||||
- (void)captureOutput:
|
||||
(AVCaptureOutput *)captureOutput didOutputSampleBuffer:
|
||||
(CMSampleBufferRef)sampleBuffer fromConnection:
|
||||
(AVCaptureConnection *)connection {
|
||||
(AVCaptureOutput *)captureOutput didOutputSampleBuffer:
|
||||
(CMSampleBufferRef)sampleBuffer fromConnection:
|
||||
(AVCaptureConnection *)connection
|
||||
{
|
||||
pthread_mutex_lock(&_frameLock);
|
||||
CVImageBufferRef img = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
|
||||
@ -248,15 +254,17 @@ AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice: device err
|
||||
// we're not going to do anything to it, so it's safe to lock it always
|
||||
CVPixelBufferLockBaseAddress(_currentFrame, kCVPixelBufferLock_ReadOnly);
|
||||
}
|
||||
|
||||
pthread_mutex_unlock(&_frameLock);
|
||||
}
|
||||
|
||||
- (int)getVideoFrameY:
|
||||
(uint8_t *)y U:
|
||||
(uint8_t *)u V:
|
||||
(uint8_t *)v Width:
|
||||
(uint16_t *)width Height:
|
||||
(uint16_t *)height {
|
||||
(uint8_t *)y U:
|
||||
(uint8_t *)u V:
|
||||
(uint8_t *)v Width:
|
||||
(uint16_t *)width Height:
|
||||
(uint16_t *)height
|
||||
{
|
||||
if (!_currentFrame) {
|
||||
return -1;
|
||||
}
|
||||
@ -293,7 +301,7 @@ static OSXVideo *_OSXVideo = nil;
|
||||
|
||||
int osx_video_init(char **device_names, int *size)
|
||||
{
|
||||
_OSXVideo = [[OSXVideo alloc] initWithDeviceNames: device_names AmtDevices: size];
|
||||
_OSXVideo = [[OSXVideo alloc] initWithDeviceNames: device_names AmtDevices: size];
|
||||
|
||||
if ( _OSXVideo == nil )
|
||||
return -1;
|
||||
@ -312,12 +320,12 @@ int osx_video_open_device(uint32_t selection, uint16_t *width, uint16_t *height)
|
||||
if ( _OSXVideo == nil )
|
||||
return -1;
|
||||
|
||||
return [_OSXVideo openVideoDeviceIndex: selection Width: width Height: height];
|
||||
return [_OSXVideo openVideoDeviceIndex: selection Width: width Height: height];
|
||||
}
|
||||
|
||||
void osx_video_close_device(uint32_t device_idx)
|
||||
{
|
||||
[_OSXVideo closeVideoDeviceIndex: device_idx];
|
||||
[_OSXVideo closeVideoDeviceIndex: device_idx];
|
||||
}
|
||||
|
||||
int osx_video_read_device(uint8_t *y, uint8_t *u, uint8_t *v, uint16_t *width, uint16_t *height)
|
||||
@ -325,7 +333,7 @@ int osx_video_read_device(uint8_t *y, uint8_t *u, uint8_t *v, uint16_t *width, u
|
||||
if ( _OSXVideo == nil )
|
||||
return -1;
|
||||
|
||||
return [_OSXVideo getVideoFrameY: y U: u V: v Width: width Height: height];
|
||||
return [_OSXVideo getVideoFrameY: y U: u V: v Width: width Height: height];
|
||||
}
|
||||
/*
|
||||
* End of C-interface for OSXVideo
|
||||
|
Reference in New Issue
Block a user