mirror of
https://github.com/Tha14/toxic.git
synced 2024-11-23 01:43:01 +01:00
Implemented video calls for OSX
This commit is contained in:
parent
c8a9ac21f3
commit
1606d01158
@ -11,7 +11,7 @@ CFLAGS += -I/usr/local/opt/freealut/include/AL -I/usr/local/opt/glib/include/gli
|
|||||||
|
|
||||||
OSX_LIBRARIES = -lobjc -lresolv
|
OSX_LIBRARIES = -lobjc -lresolv
|
||||||
OSX_FRAMEWORKS = -framework Foundation -framework CoreFoundation -framework AVFoundation \
|
OSX_FRAMEWORKS = -framework Foundation -framework CoreFoundation -framework AVFoundation \
|
||||||
-framework QuartzCore
|
-framework QuartzCore -framework CoreMedia
|
||||||
OSX_VIDEO = osx_video.m
|
OSX_VIDEO = osx_video.m
|
||||||
|
|
||||||
LDFLAGS += $(OSX_LIBRARIES) $(OSX_FRAMEWORKS)
|
LDFLAGS += $(OSX_LIBRARIES) $(OSX_FRAMEWORKS)
|
||||||
|
@ -408,6 +408,7 @@ void callback_peer_timeout(uint32_t friend_number)
|
|||||||
CB_BODY(friend_number, onPeerTimeout);
|
CB_BODY(friend_number, onPeerTimeout);
|
||||||
|
|
||||||
callback_video_end(friend_number);
|
callback_video_end(friend_number);
|
||||||
|
callback_recv_video_end(friend_number);
|
||||||
stop_transmission(&CallControl.calls[friend_number], friend_number);
|
stop_transmission(&CallControl.calls[friend_number], friend_number);
|
||||||
/* Call is stopped manually since there might be some other
|
/* Call is stopped manually since there might be some other
|
||||||
* actions that one can possibly take on timeout
|
* actions that one can possibly take on timeout
|
||||||
|
@ -25,26 +25,30 @@
|
|||||||
|
|
||||||
#include <netinet/in.h>
|
#include <netinet/in.h>
|
||||||
|
|
||||||
void bgrtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t *rgb, uint16_t width, uint16_t height);
|
|
||||||
|
|
||||||
#ifdef __OBJC__
|
#ifdef __OBJC__
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
#import <AVFoundation/AVFoundation.h>
|
#import <AVFoundation/AVFoundation.h>
|
||||||
|
#endif /* __OBJC__ */
|
||||||
|
|
||||||
|
#define RELEASE_CHK(func, obj) if ((obj))\
|
||||||
|
func((obj));
|
||||||
|
|
||||||
|
void bgrtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t *rgb, uint16_t width, uint16_t height);
|
||||||
|
|
||||||
|
#ifdef __OBJC__
|
||||||
@interface OSXVideo : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
|
@interface OSXVideo : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
|
||||||
- (instancetype)initWithDeviceNames:(char **)device_names AmtDevices:(int *)size;
|
- (instancetype)initWithDeviceNames:(char **)device_names AmtDevices:(int *)size;
|
||||||
@end
|
@end
|
||||||
|
|
||||||
#endif /* __OBJC__ */
|
#endif /* __OBJC__ */
|
||||||
|
|
||||||
int osx_video_init(char **device_names, int *size);
|
int osx_video_init(char **device_names, int *size);
|
||||||
void osx_video_release();
|
void osx_video_release();
|
||||||
/* Start device */
|
/* Start device */
|
||||||
int osx_video_open_device(uint32_t device_idx, uint16_t *width, uint16_t *height);
|
int osx_video_open_device(uint32_t selection, uint16_t *width, uint16_t *height);
|
||||||
/* Stop device */
|
/* Stop device */
|
||||||
void osx_video_close_device(uint32_t device_idx);
|
void osx_video_close_device(uint32_t device_idx);
|
||||||
/* Read data from device */
|
/* Read data from device */
|
||||||
void osx_video_read_device(uint8_t *y, uint8_t *u, uint8_t *v, uint16_t *width, uint16_t *height);
|
int osx_video_read_device(uint8_t *y, uint8_t *u, uint8_t *v, uint16_t *width, uint16_t *height);
|
||||||
|
|
||||||
|
|
||||||
#endif /* OSX_VIDEO_H */
|
#endif /* OSX_VIDEO_H */
|
127
src/osx_video.m
127
src/osx_video.m
@ -59,7 +59,7 @@ static uint8_t rgb_to_v(int r, int g, int b)
|
|||||||
return v>255? 255 : v<0 ? 0 : v;
|
return v>255? 255 : v<0 ? 0 : v;
|
||||||
}
|
}
|
||||||
|
|
||||||
void bgrtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t *rgb, uint16_t width, uint16_t height)
|
void bgrxtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t *rgb, uint16_t width, uint16_t height)
|
||||||
{
|
{
|
||||||
uint16_t x, y;
|
uint16_t x, y;
|
||||||
uint8_t *p;
|
uint8_t *p;
|
||||||
@ -71,6 +71,8 @@ void bgrtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t *
|
|||||||
b = *rgb++;
|
b = *rgb++;
|
||||||
g = *rgb++;
|
g = *rgb++;
|
||||||
r = *rgb++;
|
r = *rgb++;
|
||||||
|
rgb++;
|
||||||
|
|
||||||
*plane_y++ = rgb_to_y(r, g, b);
|
*plane_y++ = rgb_to_y(r, g, b);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -78,21 +80,26 @@ void bgrtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t *
|
|||||||
b = *rgb++;
|
b = *rgb++;
|
||||||
g = *rgb++;
|
g = *rgb++;
|
||||||
r = *rgb++;
|
r = *rgb++;
|
||||||
|
rgb++;
|
||||||
|
|
||||||
*plane_y++ = rgb_to_y(r, g, b);
|
*plane_y++ = rgb_to_y(r, g, b);
|
||||||
|
|
||||||
b = *rgb++;
|
b = *rgb++;
|
||||||
g = *rgb++;
|
g = *rgb++;
|
||||||
r = *rgb++;
|
r = *rgb++;
|
||||||
|
rgb++;
|
||||||
|
|
||||||
*plane_y++ = rgb_to_y(r, g, b);
|
*plane_y++ = rgb_to_y(r, g, b);
|
||||||
|
|
||||||
b = ((int)b + (int)*(rgb - 6) + (int)*p + (int)*(p + 3) + 2) / 4; p++;
|
b = ((int)b + (int)*(rgb - 8) + (int)*p + (int)*(p + 4) + 2) / 4; p++;
|
||||||
g = ((int)g + (int)*(rgb - 5) + (int)*p + (int)*(p + 3) + 2) / 4; p++;
|
g = ((int)g + (int)*(rgb - 7) + (int)*p + (int)*(p + 4) + 2) / 4; p++;
|
||||||
r = ((int)r + (int)*(rgb - 4) + (int)*p + (int)*(p + 3) + 2) / 4; p++;
|
r = ((int)r + (int)*(rgb - 6) + (int)*p + (int)*(p + 4) + 2) / 4; p++;
|
||||||
|
p++;
|
||||||
|
|
||||||
*plane_u++ = rgb_to_u(r, g, b);
|
*plane_u++ = rgb_to_u(r, g, b);
|
||||||
*plane_v++ = rgb_to_v(r, g, b);
|
*plane_v++ = rgb_to_v(r, g, b);
|
||||||
|
|
||||||
p += 3;
|
p += 4;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -112,21 +119,23 @@ void bgrtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t *
|
|||||||
|
|
||||||
CVImageBufferRef _currentFrame;
|
CVImageBufferRef _currentFrame;
|
||||||
pthread_mutex_t _frameLock;
|
pthread_mutex_t _frameLock;
|
||||||
|
BOOL _shouldMangleDimensions;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithDeviceNames: (char **)device_names AmtDevices: (int *)size {
|
- (instancetype)initWithDeviceNames: (char **)device_names AmtDevices: (int *)size {
|
||||||
_session = [[AVCaptureSession alloc] init];
|
_session = [[AVCaptureSession alloc] init];
|
||||||
|
|
||||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
||||||
int i = 0;
|
int i;
|
||||||
for (AVCaptureDevice *device in devices) {
|
for (i = 0; i < [devices count]; ++i) {
|
||||||
|
AVCaptureDevice *device = [devices objectAtIndex:i];
|
||||||
char *video_input_name;
|
char *video_input_name;
|
||||||
NSString *localizedName = [device localizedName];
|
NSString *localizedName = [device localizedName];
|
||||||
video_input_name = (char*)malloc(strlen([localizedName cStringUsingEncoding:NSUTF8StringEncoding]) + 1);
|
video_input_name = (char*)malloc(strlen([localizedName cStringUsingEncoding:NSUTF8StringEncoding]) + 1);
|
||||||
strcpy(video_input_name, (char*)[localizedName cStringUsingEncoding:NSUTF8StringEncoding]);
|
strcpy(video_input_name, (char*)[localizedName cStringUsingEncoding:NSUTF8StringEncoding]);
|
||||||
device_names[i] = video_input_name;
|
device_names[i] = video_input_name;
|
||||||
++i;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( i <= 0 )
|
if ( i <= 0 )
|
||||||
return nil;
|
return nil;
|
||||||
*size = i;
|
*size = i;
|
||||||
@ -134,26 +143,40 @@ void bgrtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t *
|
|||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)release {
|
- (void)dealloc {
|
||||||
|
pthread_mutex_destroy(&_frameLock);
|
||||||
[_session release];
|
[_session release];
|
||||||
[super release];
|
[_linkerVideo release];
|
||||||
|
dispatch_release(_processingQueue);
|
||||||
|
[super dealloc];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (int)openVideoDeviceIndex: (uint32_t)device_idx Width: (uint16_t *)width Height: (uint16_t *)height {
|
- (int)openVideoDeviceIndex: (uint32_t)device_idx Width: (uint16_t *)width Height: (uint16_t *)height {
|
||||||
pthread_mutex_init(&_frameLock, NULL);
|
pthread_mutex_init(&_frameLock, NULL);
|
||||||
|
pthread_mutex_lock(&_frameLock);
|
||||||
_processingQueue = dispatch_queue_create("Toxic processing queue", DISPATCH_QUEUE_SERIAL);
|
_processingQueue = dispatch_queue_create("Toxic processing queue", DISPATCH_QUEUE_SERIAL);
|
||||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
||||||
int i = 0;
|
AVCaptureDevice *device = [devices objectAtIndex:device_idx];
|
||||||
for (AVCaptureDevice *device in devices) {
|
|
||||||
if ( i == device_idx ) {
|
|
||||||
NSError *error = NULL;
|
NSError *error = NULL;
|
||||||
AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
|
AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
|
||||||
|
|
||||||
if ( error != NULL )
|
if ( error != NULL ) {
|
||||||
|
[input release];
|
||||||
return -1;
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
[_session beginConfiguration];
|
||||||
|
[_session addInput:input];
|
||||||
|
//_session.sessionPreset = AVCaptureSessionPreset640x480;
|
||||||
|
//*width = 640;
|
||||||
|
//*height = 480;
|
||||||
|
_shouldMangleDimensions = YES;
|
||||||
|
[_session commitConfiguration];
|
||||||
|
[input release];
|
||||||
|
[device release];
|
||||||
|
|
||||||
/* Obtain device resolution */
|
/* Obtain device resolution */
|
||||||
/*AVCaptureInputPort *port = [input.ports objectAtIndex:0];
|
AVCaptureInputPort *port = [input.ports objectAtIndex:0];
|
||||||
CMFormatDescriptionRef format_description = port.formatDescription;
|
CMFormatDescriptionRef format_description = port.formatDescription;
|
||||||
if ( format_description ) {
|
if ( format_description ) {
|
||||||
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format_description);
|
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format_description);
|
||||||
@ -162,52 +185,73 @@ void bgrtoyuv420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v, uint8_t *
|
|||||||
} else {
|
} else {
|
||||||
*width = 0;
|
*width = 0;
|
||||||
*height = 0;
|
*height = 0;
|
||||||
}*/
|
|
||||||
|
|
||||||
[_session beginConfiguration];
|
|
||||||
[_session addInput:input];
|
|
||||||
//session.sessionPreset = AVCaptureSessionPreset640x480;
|
|
||||||
[_session commitConfiguration];
|
|
||||||
[input release];
|
|
||||||
|
|
||||||
break;
|
|
||||||
} else {
|
|
||||||
++i;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_linkerVideo = [[AVCaptureVideoDataOutput alloc] init];
|
_linkerVideo = [[AVCaptureVideoDataOutput alloc] init];
|
||||||
[_linkerVideo setSampleBufferDelegate:self queue:_processingQueue];
|
[_linkerVideo setSampleBufferDelegate:self queue:_processingQueue];
|
||||||
// TODO possibly get a better pixel format
|
// TODO possibly get a better pixel format
|
||||||
|
if (_shouldMangleDimensions) {
|
||||||
|
[_linkerVideo setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
|
||||||
|
(id)kCVPixelBufferWidthKey: @640,
|
||||||
|
(id)kCVPixelBufferHeightKey: @480}];
|
||||||
|
} else {
|
||||||
[_linkerVideo setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}];
|
[_linkerVideo setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}];
|
||||||
|
}
|
||||||
[_session addOutput:_linkerVideo];
|
[_session addOutput:_linkerVideo];
|
||||||
|
|
||||||
[_session startRunning];
|
[_session startRunning];
|
||||||
|
|
||||||
|
pthread_mutex_unlock(&_frameLock);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)closeVideoDeviceIndex: (uint32_t)device_idx {
|
- (void)closeVideoDeviceIndex: (uint32_t)device_idx {
|
||||||
|
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
||||||
|
AVCaptureDevice *device = [devices objectAtIndex:device_idx];
|
||||||
|
NSError *error = NULL;
|
||||||
|
AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
|
||||||
[_session stopRunning];
|
[_session stopRunning];
|
||||||
|
[_session removeOutput:_linkerVideo];
|
||||||
|
[_session removeInput:input];
|
||||||
[_linkerVideo release];
|
[_linkerVideo release];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)getVideoFrameY: (uint8_t *)y U: (uint8_t *)u V: (uint8_t *)v Width: (uint16_t *)width Height: (uint16_t *)height {
|
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
|
||||||
/*CVImageBufferRef currentFrame = NULL;
|
pthread_mutex_lock(&_frameLock);
|
||||||
|
CVImageBufferRef img = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||||
|
if (!img) {
|
||||||
|
NSLog(@"Toxic WARNING: Bad sampleBuffer from AVfoundation!");
|
||||||
|
} else {
|
||||||
|
CVPixelBufferUnlockBaseAddress(_currentFrame, kCVPixelBufferLock_ReadOnly);
|
||||||
|
RELEASE_CHK(CFRelease, _currentFrame);
|
||||||
|
|
||||||
CFRetain(currentFrame);
|
_currentFrame = (CVImageBufferRef)CFRetain(img);
|
||||||
|
// we're not going to do anything to it, so it's safe to lock it always
|
||||||
|
CVPixelBufferLockBaseAddress(_currentFrame, kCVPixelBufferLock_ReadOnly);
|
||||||
|
}
|
||||||
|
pthread_mutex_unlock(&_frameLock);
|
||||||
|
}
|
||||||
|
|
||||||
CFTypeID imageType = CFGetTypeID(currentFrame);
|
- (int)getVideoFrameY: (uint8_t *)y U: (uint8_t *)u V: (uint8_t *)v Width: (uint16_t *)width Height: (uint16_t *)height {
|
||||||
|
if (!_currentFrame) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
pthread_mutex_lock(&_frameLock);
|
||||||
|
CFRetain(_currentFrame);
|
||||||
|
|
||||||
|
CFTypeID imageType = CFGetTypeID(_currentFrame);
|
||||||
if (imageType == CVPixelBufferGetTypeID()) {
|
if (imageType == CVPixelBufferGetTypeID()) {
|
||||||
// TODO maybe handle other formats
|
// TODO maybe handle other formats
|
||||||
bgrxtoyuv420(y, u, v, CVPixelBufferGetBaseAddress(currentFrame), width, height);
|
bgrxtoyuv420(y, u, v, CVPixelBufferGetBaseAddress(_currentFrame), *width, *height);
|
||||||
} else if (imageType == CVOpenGLBufferGetTypeID()) {
|
} else if (imageType == CVOpenGLBufferGetTypeID()) {
|
||||||
// OpenGL pbuffer
|
// OpenGL pbuffer
|
||||||
} else if (imageType == CVOpenGLTextureGetTypeID()) {
|
} else if (imageType == CVOpenGLTextureGetTypeID()) {
|
||||||
// OpenGL Texture (Do we need to handle these?)
|
// OpenGL Texture (Do we need to handle these?)
|
||||||
}
|
}
|
||||||
|
|
||||||
CVPixelBufferRelease(currentFrame);*/
|
CVPixelBufferRelease(_currentFrame);
|
||||||
|
pthread_mutex_unlock(&_frameLock);
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
@ -234,11 +278,15 @@ int osx_video_init(char **device_names, int *size)
|
|||||||
void osx_video_release()
|
void osx_video_release()
|
||||||
{
|
{
|
||||||
[_OSXVideo release];
|
[_OSXVideo release];
|
||||||
|
_OSXVideo = nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
int osx_video_open_device(uint32_t device_idx, uint16_t *width, uint16_t *height)
|
int osx_video_open_device(uint32_t selection, uint16_t *width, uint16_t *height)
|
||||||
{
|
{
|
||||||
return [_OSXVideo openVideoDeviceIndex: device_idx Width: width Height: height];
|
if ( _OSXVideo == nil )
|
||||||
|
return -1;
|
||||||
|
|
||||||
|
return [_OSXVideo openVideoDeviceIndex: selection Width: width Height: height];
|
||||||
}
|
}
|
||||||
|
|
||||||
void osx_video_close_device(uint32_t device_idx)
|
void osx_video_close_device(uint32_t device_idx)
|
||||||
@ -246,9 +294,12 @@ void osx_video_close_device(uint32_t device_idx)
|
|||||||
[_OSXVideo closeVideoDeviceIndex: device_idx];
|
[_OSXVideo closeVideoDeviceIndex: device_idx];
|
||||||
}
|
}
|
||||||
|
|
||||||
void osx_video_read_device(uint8_t *y, uint8_t *u, uint8_t *v, uint16_t *width, uint16_t *height)
|
int osx_video_read_device(uint8_t *y, uint8_t *u, uint8_t *v, uint16_t *width, uint16_t *height)
|
||||||
{
|
{
|
||||||
[_OSXVideo getVideoFrameY: y U: u V: v Width: width Height: height];
|
if ( _OSXVideo == nil )
|
||||||
|
return -1;
|
||||||
|
|
||||||
|
return [_OSXVideo getVideoFrameY: y U: u V: v Width: width Height: height];
|
||||||
}
|
}
|
||||||
/*
|
/*
|
||||||
* End of C-interface for OSXVideo
|
* End of C-interface for OSXVideo
|
||||||
|
@ -71,8 +71,6 @@ typedef struct VideoDevice {
|
|||||||
struct v4l2_format fmt;
|
struct v4l2_format fmt;
|
||||||
struct VideoBuffer *buffers;
|
struct VideoBuffer *buffers;
|
||||||
uint32_t n_buffers;
|
uint32_t n_buffers;
|
||||||
#else /* __OSX__ */
|
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
uint32_t ref_count;
|
uint32_t ref_count;
|
||||||
@ -109,17 +107,6 @@ bool video_thread_running = true,
|
|||||||
|
|
||||||
void* video_thread_poll(void*);
|
void* video_thread_poll(void*);
|
||||||
|
|
||||||
static int xioctl(int fh, unsigned long request, void *arg)
|
|
||||||
{
|
|
||||||
int r;
|
|
||||||
|
|
||||||
do {
|
|
||||||
r = ioctl(fh, request, arg);
|
|
||||||
} while (-1 == r && EINTR == errno);
|
|
||||||
|
|
||||||
return r;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void yuv420tobgr(uint16_t width, uint16_t height, const uint8_t *y,
|
static void yuv420tobgr(uint16_t width, uint16_t height, const uint8_t *y,
|
||||||
const uint8_t *u, const uint8_t *v, unsigned int ystride,
|
const uint8_t *u, const uint8_t *v, unsigned int ystride,
|
||||||
unsigned int ustride, unsigned int vstride, uint8_t *out)
|
unsigned int ustride, unsigned int vstride, uint8_t *out)
|
||||||
@ -145,6 +132,7 @@ static void yuv420tobgr(uint16_t width, uint16_t height, const uint8_t *y,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef __linux__
|
||||||
static void yuv422to420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v,
|
static void yuv422to420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v,
|
||||||
uint8_t *input, uint16_t width, uint16_t height)
|
uint8_t *input, uint16_t width, uint16_t height)
|
||||||
{
|
{
|
||||||
@ -168,6 +156,19 @@ static void yuv422to420(uint8_t *plane_y, uint8_t *plane_u, uint8_t *plane_v,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static int xioctl(int fh, unsigned long request, void *arg)
|
||||||
|
{
|
||||||
|
int r;
|
||||||
|
|
||||||
|
do {
|
||||||
|
r = ioctl(fh, request, arg);
|
||||||
|
} while (-1 == r && EINTR == errno);
|
||||||
|
|
||||||
|
return r;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif /* __linux__ */
|
||||||
|
|
||||||
/* Meet devices */
|
/* Meet devices */
|
||||||
#ifdef VIDEO
|
#ifdef VIDEO
|
||||||
VideoDeviceError init_video_devices(ToxAV* av_)
|
VideoDeviceError init_video_devices(ToxAV* av_)
|
||||||
@ -210,11 +211,10 @@ VideoDeviceError init_video_devices()
|
|||||||
}
|
}
|
||||||
|
|
||||||
#else /* __OSX__ */
|
#else /* __OSX__ */
|
||||||
if( osx_video_init(video_devices_names[vdt_input], &size[vdt_input]) != 0 )
|
if( osx_video_init((char**)video_devices_names[vdt_input], &size[vdt_input]) != 0 )
|
||||||
return vde_InternalError;
|
return vde_InternalError;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
size[vdt_output] = 1;
|
size[vdt_output] = 1;
|
||||||
char* video_output_name = "Toxic Video Receiver";
|
char* video_output_name = "Toxic Video Receiver";
|
||||||
video_devices_names[vdt_output][0] = video_output_name;
|
video_devices_names[vdt_output][0] = video_output_name;
|
||||||
@ -249,7 +249,7 @@ VideoDeviceError terminate_video_devices()
|
|||||||
return (VideoDeviceError) vde_InternalError;
|
return (VideoDeviceError) vde_InternalError;
|
||||||
|
|
||||||
#ifdef __OSX__
|
#ifdef __OSX__
|
||||||
void osx_video_release();
|
osx_video_release();
|
||||||
#endif /* __OSX__ */
|
#endif /* __OSX__ */
|
||||||
|
|
||||||
return (VideoDeviceError) vde_None;
|
return (VideoDeviceError) vde_None;
|
||||||
@ -449,7 +449,11 @@ VideoDeviceError open_video_device(VideoDeviceType type, int32_t selection, uint
|
|||||||
}
|
}
|
||||||
|
|
||||||
#else /* __OSX__ */
|
#else /* __OSX__ */
|
||||||
osx_video_open_device(*device_idx, device->video_width, device->video_height);
|
if ( osx_video_open_device(selection, &device->video_width, &device->video_height) != 0 ) {
|
||||||
|
free(device);
|
||||||
|
|
||||||
|
return vde_FailedStart;
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* Create X11 window associated to device */
|
/* Create X11 window associated to device */
|
||||||
@ -612,10 +616,11 @@ void* video_thread_poll (void* arg) // TODO: maybe use thread for every input so
|
|||||||
{
|
{
|
||||||
/* Obtain frame image data from device buffers */
|
/* Obtain frame image data from device buffers */
|
||||||
VideoDevice* device = video_devices_running[vdt_input][i];
|
VideoDevice* device = video_devices_running[vdt_input][i];
|
||||||
void *data;
|
uint16_t video_width = device->video_width;
|
||||||
uint16_t video_width;
|
uint16_t video_height = device->video_height;
|
||||||
uint16_t video_height;
|
uint8_t *y = device->input.planes[0];
|
||||||
uint8_t *y, *u, *v;
|
uint8_t *u = device->input.planes[1];
|
||||||
|
uint8_t *v = device->input.planes[2];
|
||||||
|
|
||||||
#ifdef __linux__
|
#ifdef __linux__
|
||||||
struct v4l2_buffer buf;
|
struct v4l2_buffer buf;
|
||||||
@ -629,19 +634,18 @@ void* video_thread_poll (void* arg) // TODO: maybe use thread for every input so
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
data = (void*)device->buffers[buf.index].start;
|
void *data = (void*)device->buffers[buf.index].start;
|
||||||
video_width = device->video_width;
|
|
||||||
video_height = device->video_height;
|
|
||||||
y = device->input.planes[0];
|
|
||||||
u = device->input.planes[1];
|
|
||||||
v = device->input.planes[2];
|
|
||||||
#else /* __OSX__*/
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* Convert frame image data to YUV420 for ToxAV */
|
/* Convert frame image data to YUV420 for ToxAV */
|
||||||
yuv422to420(y, u, v, data, video_width, video_height);
|
yuv422to420(y, u, v, data, video_width, video_height);
|
||||||
|
|
||||||
|
#else /* __OSX__*/
|
||||||
|
if ( osx_video_read_device(y, u, v, &video_width, &video_height) != 0 ) {
|
||||||
|
unlock;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
/* Send frame data to friend through ToxAV */
|
/* Send frame data to friend through ToxAV */
|
||||||
if ( device->cb )
|
if ( device->cb )
|
||||||
device->cb(video_width, video_height, y, u, v, device->cb_data);
|
device->cb(video_width, video_height, y, u, v, device->cb_data);
|
||||||
@ -681,8 +685,8 @@ void* video_thread_poll (void* arg) // TODO: maybe use thread for every input so
|
|||||||
unlock;
|
unlock;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
#else /* __OSX__ */
|
#endif /* __linux__ */
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
unlock;
|
unlock;
|
||||||
}
|
}
|
||||||
@ -725,7 +729,7 @@ VideoDeviceError close_video_device(VideoDeviceType type, uint32_t device_idx)
|
|||||||
#else /* __OSX__ */
|
#else /* __OSX__ */
|
||||||
osx_video_close_device(device_idx);
|
osx_video_close_device(device_idx);
|
||||||
#endif
|
#endif
|
||||||
|
vpx_img_free(&device->input);
|
||||||
XDestroyWindow(device->x_display, device->x_window);
|
XDestroyWindow(device->x_display, device->x_window);
|
||||||
XFlush(device->x_display);
|
XFlush(device->x_display);
|
||||||
XCloseDisplay(device->x_display);
|
XCloseDisplay(device->x_display);
|
||||||
@ -733,8 +737,8 @@ VideoDeviceError close_video_device(VideoDeviceType type, uint32_t device_idx)
|
|||||||
|
|
||||||
#ifdef __linux__
|
#ifdef __linux__
|
||||||
free(device->buffers);
|
free(device->buffers);
|
||||||
#else /* __OSX__ */
|
#endif /* __linux__ */
|
||||||
#endif
|
|
||||||
free(device);
|
free(device);
|
||||||
} else {
|
} else {
|
||||||
vpx_img_free(&device->input);
|
vpx_img_free(&device->input);
|
||||||
|
Loading…
Reference in New Issue
Block a user