Initial commit

This commit is contained in:
Tha_14
2024-02-22 21:43:11 +02:00
commit 1b96a031d2
1108 changed files with 157706 additions and 0 deletions

View File

@ -0,0 +1,23 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
#import <Foundation/Foundation.h>
#import "OCTToxAVConstants.h"
@import CoreVideo;
/**
* This class helps with allocating and keeping CVPixelBuffers.
*/
@interface OCTPixelBufferPool : NSObject
- (instancetype)initWithFormat:(OSType)format;
/**
* Grab a pixel buffer from the pool.
* @param bufferRef Reference to the buffer ref.
* @return YES on success, NO otherwise.
*/
- (BOOL)createPixelBuffer:(CVPixelBufferRef *)bufferRef width:(OCTToxAVVideoWidth)width height:(OCTToxAVVideoHeight)height;
@end

View File

@ -0,0 +1,103 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
#import "OCTPixelBufferPool.h"
#import "OCTLogging.h"
@interface OCTPixelBufferPool ()
@property (nonatomic, assign) CVPixelBufferPoolRef pool;
@property (nonatomic, assign) OSType formatType;
@property (nonatomic, assign) OCTToxAVVideoWidth width;
@property (nonatomic, assign) OCTToxAVVideoHeight height;
@end
@implementation OCTPixelBufferPool
#pragma mark - Lifecycle
- (instancetype)initWithFormat:(OSType)format;
{
self = [super init];
if (! self) {
return nil;
}
_formatType = format;
return self;
}
- (void)dealloc
{
if (self.pool) {
CFRelease(self.pool);
}
}
#pragma mark - Public
- (BOOL)createPixelBuffer:(CVPixelBufferRef *)bufferRef width:(OCTToxAVVideoWidth)width height:(OCTToxAVVideoHeight)height
{
BOOL success = YES;
if (! self.pool) {
success = [self createPoolWithWidth:width height:height format:self.formatType];
}
if ((self.width != width) || (self.height != height)) {
success = [self createPoolWithWidth:width height:height format:self.formatType];
}
if (! success) {
return NO;
}
return [self createPixelBuffer:bufferRef];
}
#pragma mark - Private
- (BOOL)createPoolWithWidth:(OCTToxAVVideoWidth)width height:(OCTToxAVVideoHeight)height format:(OSType)format
{
if (self.pool) {
CFRelease(self.pool);
}
self.width = width;
self.height = height;
NSDictionary *pixelBufferAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{},
(id)kCVPixelBufferHeightKey : @(height),
(id)kCVPixelBufferWidthKey : @(width),
(id)kCVPixelBufferPixelFormatTypeKey : @(format)};
CVReturn success = CVPixelBufferPoolCreate(kCFAllocatorDefault,
NULL,
(__bridge CFDictionaryRef)(pixelBufferAttributes),
&_pool);
if (success != kCVReturnSuccess) {
OCTLogWarn(@"failed to create CVPixelBufferPool error:%d", success);
}
return (success == kCVReturnSuccess);
}
- (BOOL)createPixelBuffer:(CVPixelBufferRef *)bufferRef
{
CVReturn success = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault,
self.pool,
bufferRef);
if (success != kCVReturnSuccess) {
OCTLogWarn(@"Failed to create pixelBuffer error:%d", success);
}
return (success == kCVReturnSuccess);
}
@end

View File

@ -0,0 +1,118 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
#import <Foundation/Foundation.h>
#import "OCTView.h"
#import "OCTToxAV.h"
@interface OCTVideoEngine : NSObject
@property (weak, nonatomic) OCTToxAV *toxav;
/**
* Current friend number that video engine should
* process video data to and from.
*/
@property (nonatomic, assign) OCTToxFriendNumber friendNumber;
/**
* This must be called prior to using the video session.
* @param error Pointer to error object.
* @return YES if successful, otherwise NO.
*/
- (BOOL)setupAndReturnError:(NSError **)error;
/**
* Start sending video data.
* This will turn on processIncomingVideo to YES
*/
- (void)startSendingVideo;
/**
* Stop sending video data.
* This will turn off processIncomingVideo to NO
*/
- (void)stopSendingVideo;
/**
* Indicates if the video engine is sending video.
* @return YES if running, NO otherwise.
*/
- (BOOL)isSendingVideo;
/**
* Generate a OCTView with the current incoming video feed.
*/
- (OCTView *)videoFeed;
/**
* Layer of the preview video.
* Layer will be nil if videoSession is not running.
* @param completionBlock Block responsible for using the layer. This
* must not be nil.
*/
- (void)getVideoCallPreview:(void (^)(CALayer *layer))completionBlock;
/**
* Provide video frames to video engine to process.
* @param width Width of the frame in pixels.
* @param height Height of the frame in pixels.
* @param yPlane
* @param uPlane
* @param vPlane Plane data.
* The size of plane data is derived from width and height where
* Y = MAX(width, abs(ystride)) * height,
* U = MAX(width/2, abs(ustride)) * (height/2) and
* V = MAX(width/2, abs(vstride)) * (height/2).
* @param yStride
* @param uStride
* @param vStride Strides data. Strides represent padding for each plane
* that may or may not be present. You must handle strides in
* your image processing code. Strides are negative if the
* image is bottom-up hence why you MUST abs() it when
* calculating plane buffer size.
* @param friendNumber The friend number of the friend who sent an audio frame.
*
*/
- (void)receiveVideoFrameWithWidth:(OCTToxAVVideoWidth)width
height:(OCTToxAVVideoHeight)height
yPlane:(OCTToxAVPlaneData *)yPlane
uPlane:(OCTToxAVPlaneData *)uPlane
vPlane:(OCTToxAVPlaneData *)vPlane
yStride:(OCTToxAVStrideData)yStride
uStride:(OCTToxAVStrideData)uStride
vStride:(OCTToxAVStrideData)vStride
friendNumber:(OCTToxFriendNumber)friendNumber;
@end
#if ! TARGET_OS_IPHONE
@interface OCTVideoEngine (MacDevice)
/**
* Use a different camera for input.
* @param camera The camera's AVFoundation device ID.
* @param error Pointer to error object.
* @return YES on success, otherwise NO.
*/
- (BOOL)switchToCamera:(NSString *)camera error:(NSError **)error;
@end
#else
@interface OCTVideoEngine (iOSDevice)
/**
* Use a different camera for input.
* @param camera The camera's AVFoundation device ID.
* @param error Pointer to error object.
* @return YES on success, otherwise NO.
*/
- (BOOL)useFrontCamera:(BOOL)front error:(NSError **)error;
@end
#endif

View File

@ -0,0 +1,489 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
#import "OCTVideoEngine.h"
#import "OCTVideoView.h"
#import "OCTPixelBufferPool.h"
#import "OCTManagerConstants.h"
#import "OCTLogging.h"
@import AVFoundation;
static const OSType kPixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
@interface OCTVideoEngine () <AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureVideoDataOutput *dataOutput;
@property (nonatomic, strong) dispatch_queue_t processingQueue;
@property (nonatomic, strong) OCTVideoView *videoView;
@property (nonatomic, weak) AVCaptureVideoPreviewLayer *previewLayer;
@property (nonatomic, assign) uint8_t *reusableUChromaPlane;
@property (nonatomic, assign) uint8_t *reusableVChromaPlane;
@property (nonatomic, assign) uint8_t *reusableYChromaPlane;
@property (strong, nonatomic) OCTPixelBufferPool *pixelPool;
@property (nonatomic, assign) NSUInteger sizeOfChromaPlanes;
@property (nonatomic, assign) NSUInteger sizeOfYPlane;
@end
@implementation OCTVideoEngine
#pragma mark - Life cycle
- (instancetype)init
{
self = [super init];
if (! self) {
return nil;
}
OCTLogVerbose(@"init");
// Disabling captureSession for simulator due to bug in iOS 10.
// See https://forums.developer.apple.com/thread/62230
#if ! TARGET_OS_SIMULATOR
_captureSession = [AVCaptureSession new];
_captureSession.sessionPreset = AVCaptureSessionPresetMedium;
if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
_captureSession.sessionPreset = AVCaptureSessionPreset640x480;
}
#endif
_dataOutput = [AVCaptureVideoDataOutput new];
_processingQueue = dispatch_queue_create("me.dvor.objcTox.OCTVideoEngineQueue", NULL);
_pixelPool = [[OCTPixelBufferPool alloc] initWithFormat:kPixelFormat];
return self;
}
- (void)dealloc
{
if (self.reusableUChromaPlane) {
free(self.reusableUChromaPlane);
}
if (self.reusableVChromaPlane) {
free(self.reusableVChromaPlane);
}
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
#pragma mark - Public
- (BOOL)setupAndReturnError:(NSError **)error
{
OCTLogVerbose(@"setupAndReturnError");
#if TARGET_OS_IPHONE
AVCaptureDevice *videoCaptureDevice = [self getDeviceForPosition:AVCaptureDevicePositionFront];
#else
AVCaptureDevice *videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
#endif
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoCaptureDevice error:error];
if (! videoInput) {
return NO;
}
if ([self.captureSession canAddInput:videoInput]) {
[self.captureSession addInput:videoInput];
}
self.dataOutput.alwaysDiscardsLateVideoFrames = YES;
self.dataOutput.videoSettings = @{
(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kPixelFormat),
};
[self.dataOutput setSampleBufferDelegate:self queue:self.processingQueue];
[self.captureSession addOutput:self.dataOutput];
AVCaptureConnection *conn = [self.dataOutput connectionWithMediaType:AVMediaTypeVideo];
if (conn.supportsVideoOrientation) {
[self registerOrientationNotification];
[self orientationChanged];
}
return YES;
}
#if ! TARGET_OS_IPHONE
- (BOOL)switchToCamera:(NSString *)camera error:(NSError **)error
{
AVCaptureDevice *dev = nil;
if (! camera) {
dev = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
else {
dev = [AVCaptureDevice deviceWithUniqueID:camera];
}
return [self actuallySetCamera:dev error:error];
}
#else
- (BOOL)useFrontCamera:(BOOL)front error:(NSError **)error
{
AVCaptureDevicePosition position = front ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
AVCaptureDevice *dev = [self getDeviceForPosition:position];
return [self actuallySetCamera:dev error:error];
}
#endif
- (BOOL)actuallySetCamera:(AVCaptureDevice *)dev error:(NSError **)error
{
OCTLogVerbose(@"actuallySetCamera: %@", dev);
NSArray *inputs = [self.captureSession inputs];
AVCaptureInput *current = [inputs firstObject];
if ([current isKindOfClass:[AVCaptureDeviceInput class]]) {
AVCaptureDeviceInput *inputDevice = (AVCaptureDeviceInput *)current;
if ([inputDevice.device.uniqueID isEqualToString:dev.uniqueID]) {
return YES;
}
}
for (AVCaptureInput *input in inputs) {
[self.captureSession removeInput:input];
}
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:dev error:error];
if (! videoInput) {
return NO;
}
if (! [self.captureSession canAddInput:videoInput]) {
return NO;
}
[self.captureSession addInput:videoInput];
[self orientationChanged];
return YES;
}
- (void)startSendingVideo
{
OCTLogVerbose(@"startSendingVideo");
dispatch_async(self.processingQueue, ^{
if ([self isSendingVideo]) {
return;
}
[self.captureSession startRunning];
});
}
- (void)stopSendingVideo
{
OCTLogVerbose(@"stopSendingVideo");
dispatch_async(self.processingQueue, ^{
if (! [self isSendingVideo]) {
return;
}
[self.captureSession stopRunning];
});
}
- (BOOL)isSendingVideo
{
OCTLogVerbose(@"isSendingVideo");
return self.captureSession.isRunning;
}
- (void)getVideoCallPreview:(void (^)(CALayer *))completionBlock
{
NSParameterAssert(completionBlock);
OCTLogVerbose(@"videoCallPreview");
dispatch_async(self.processingQueue, ^{
AVCaptureVideoPreviewLayer *previewLayer = self.previewLayer;
if (! self.previewLayer) {
previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
}
dispatch_async(dispatch_get_main_queue(), ^{
completionBlock(previewLayer);
});
self.previewLayer = previewLayer;
});
}
- (OCTView *)videoFeed;
{
OCTLogVerbose(@"videoFeed");
OCTVideoView *feed = self.videoView;
if (! feed) {
feed = [OCTVideoView view];
self.videoView = feed;
}
return feed;
}
- (void)receiveVideoFrameWithWidth:(OCTToxAVVideoWidth)width
height:(OCTToxAVVideoHeight)height
yPlane:(OCTToxAVPlaneData *)yPlane
uPlane:(OCTToxAVPlaneData *)uPlane
vPlane:(OCTToxAVPlaneData *)vPlane
yStride:(OCTToxAVStrideData)yStride
uStride:(OCTToxAVStrideData)uStride
vStride:(OCTToxAVStrideData)vStride
friendNumber:(OCTToxFriendNumber)friendNumber
{
if (! self.videoView) {
return;
}
if (! yPlane) {
return;
}
if (! uPlane) {
return;
}
if (! vPlane) {
return;
}
size_t yBytesPerRow = MIN(width, abs(yStride));
size_t uvBytesPerRow = MIN(width / 2, abs(uStride));
/**
* Create pixel buffers and copy YUV planes over
*/
CVPixelBufferRef bufferRef = NULL;
if (! [self.pixelPool createPixelBuffer:&bufferRef width:width height:height]) {
return;
}
CVPixelBufferLockBaseAddress(bufferRef, 0);
OCTToxAVPlaneData *ySource = yPlane;
// if stride is negative, start reading from the left of the last row
if (yStride < 0) {
ySource = ySource + ((-yStride) * (height - 1));
}
uint8_t *yDestinationPlane = CVPixelBufferGetBaseAddressOfPlane(bufferRef, 0);
size_t yDestinationStride = CVPixelBufferGetBytesPerRowOfPlane(bufferRef, 0);
/* Copy yPlane data */
for (size_t yHeight = 0; yHeight < height; yHeight++) {
memcpy(yDestinationPlane, ySource, yBytesPerRow);
ySource += yStride;
yDestinationPlane += yDestinationStride;
}
/* Interweave U and V */
uint8_t *uvDestinationPlane = CVPixelBufferGetBaseAddressOfPlane(bufferRef, 1);
size_t uvDestinationStride = CVPixelBufferGetBytesPerRowOfPlane(bufferRef, 1);
OCTToxAVPlaneData *uSource = uPlane;
if (uStride < 0) {
uSource = uSource + ((-uStride) * ((height / 2) - 1));
}
OCTToxAVPlaneData *vSource = vPlane;
if (vStride < 0) {
vSource = vSource + ((-vStride) * ((height / 2) - 1));
}
for (size_t yHeight = 0; yHeight < height / 2; yHeight++) {
for (size_t index = 0; index < uvBytesPerRow; index++) {
uvDestinationPlane[index * 2] = uSource[index];
uvDestinationPlane[(index * 2) + 1] = vSource[index];
}
uvDestinationPlane += uvDestinationStride;
uSource += uStride;
vSource += vStride;
}
CVPixelBufferUnlockBaseAddress(bufferRef, 0);
dispatch_async(self.processingQueue, ^{
/* Create Core Image */
CIImage *coreImage = [CIImage imageWithCVPixelBuffer:bufferRef];
CVPixelBufferRelease(bufferRef);
self.videoView.image = coreImage;
});
}
#pragma mark - Buffer Delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (! imageBuffer) {
return;
}
CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
size_t yHeight = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
size_t yBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
size_t yStride = MAX(CVPixelBufferGetWidthOfPlane(imageBuffer, 0), yBytesPerRow);
size_t uvHeight = CVPixelBufferGetHeightOfPlane(imageBuffer, 1);
size_t uvBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1);
size_t uvStride = MAX(CVPixelBufferGetWidthOfPlane(imageBuffer, 1), uvBytesPerRow);
size_t ySize = yBytesPerRow * yHeight;
size_t numberOfElementsForChroma = uvBytesPerRow * uvHeight / 2;
/**
* Recreate the buffers if the original ones are too small
*/
if (numberOfElementsForChroma > self.sizeOfChromaPlanes) {
if (self.reusableUChromaPlane) {
free(self.reusableUChromaPlane);
}
if (self.reusableVChromaPlane) {
free(self.reusableVChromaPlane);
}
self.reusableUChromaPlane = malloc(numberOfElementsForChroma * sizeof(OCTToxAVPlaneData));
self.reusableVChromaPlane = malloc(numberOfElementsForChroma * sizeof(OCTToxAVPlaneData));
self.sizeOfChromaPlanes = numberOfElementsForChroma;
}
if (ySize > self.sizeOfYPlane) {
if (self.reusableYChromaPlane) {
free(self.reusableYChromaPlane);
}
self.reusableYChromaPlane = malloc(ySize * sizeof(OCTToxAVPlaneData));
self.sizeOfYPlane = ySize;
}
/**
* Copy the Y plane data while skipping stride
*/
OCTToxAVPlaneData *yPlane = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
uint8_t *yDestination = self.reusableYChromaPlane;
for (size_t i = 0; i < yHeight; i++) {
memcpy(yDestination, yPlane, yBytesPerRow);
yPlane += yStride;
yDestination += yBytesPerRow;
}
/**
* Deinterleaved the UV [uvuvuvuv] planes and place them to in the reusable arrays
*/
OCTToxAVPlaneData *uvPlane = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
uint8_t *uDestination = self.reusableUChromaPlane;
uint8_t *vDestination = self.reusableVChromaPlane;
for (size_t height = 0; height < uvHeight; height++) {
for (size_t i = 0; i < uvBytesPerRow; i += 2) {
uDestination[i / 2] = uvPlane[i];
vDestination[i / 2] = uvPlane[i + 1];
}
uvPlane += uvStride;
uDestination += uvBytesPerRow / 2;
vDestination += uvBytesPerRow / 2;
}
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
uDestination = nil;
vDestination = nil;
NSError *error;
if (! [self.toxav sendVideoFrametoFriend:self.friendNumber
width:(OCTToxAVVideoWidth)yBytesPerRow
height:(OCTToxAVVideoHeight)yHeight
yPlane:self.reusableYChromaPlane
uPlane:self.reusableUChromaPlane
vPlane:self.reusableVChromaPlane
error:&error]) {
OCTLogWarn(@"error:%@ width:%zu height:%zu", error, yBytesPerRow, yHeight);
}
}
#pragma mark - Private
- (AVCaptureDevice *)getDeviceForPosition:(AVCaptureDevicePosition)position
{
OCTLogVerbose(@"getDeviceForPosition");
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == position) {
return device;
}
}
return nil;
}
- (void)registerOrientationNotification
{
#if TARGET_OS_IPHONE
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(orientationChanged)
name:UIDeviceOrientationDidChangeNotification
object:nil];
#endif
}
- (void)orientationChanged
{
#if TARGET_OS_IPHONE
UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
AVCaptureConnection *conn = [self.dataOutput connectionWithMediaType:AVMediaTypeVideo];
AVCaptureVideoOrientation orientation;
switch (deviceOrientation) {
case UIInterfaceOrientationPortraitUpsideDown:
orientation = AVCaptureVideoOrientationPortraitUpsideDown;
break;
case UIDeviceOrientationPortrait:
orientation = AVCaptureVideoOrientationPortrait;
break;
/* Landscapes are reversed, otherwise for some reason the video will be upside down */
case UIDeviceOrientationLandscapeLeft:
orientation = AVCaptureVideoOrientationLandscapeRight;
break;
case UIDeviceOrientationLandscapeRight:
orientation = AVCaptureVideoOrientationLandscapeLeft;
break;
default:
return;
}
conn.videoOrientation = orientation;
self.previewLayer.connection.videoOrientation = orientation;
#endif
}
@end

View File

@ -0,0 +1,22 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
#import "OCTView.h"
@import GLKit;
#if TARGET_OS_IPHONE
@interface OCTVideoView : GLKView
#else
@interface OCTVideoView : NSOpenGLView
#endif
@property (strong, nonatomic) CIImage *image;
/**
* Allocs and calls the platform-specific initializers.
*/
+ (instancetype)view;
@end

View File

@ -0,0 +1,100 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
#import "OCTVideoView.h"
#import "OCTManagerConstants.h"
#import "OCTLogging.h"
@import Foundation;
@import AVFoundation;
@interface OCTVideoView ()
@property (strong, nonatomic) CIContext *coreImageContext;
@end
@implementation OCTVideoView
+ (instancetype)view
{
#if TARGET_OS_IPHONE
OCTVideoView *videoView = [[self alloc] initWithFrame:CGRectZero];
#else
OCTVideoView *videoView = [[self alloc] initWithFrame:CGRectZero pixelFormat:[self defaultPixelFormat]];
#endif
[videoView finishInitializing];
return videoView;
}
- (void)dealloc
{
OCTLogVerbose(@"dealloc");
}
- (void)finishInitializing
{
#if TARGET_OS_IPHONE
__weak OCTVideoView *weakSelf = self;
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0), ^{
OCTVideoView *strongSelf = weakSelf;
strongSelf.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
strongSelf.coreImageContext = [CIContext contextWithEAGLContext:strongSelf.context];
});
self.enableSetNeedsDisplay = NO;
#endif
}
- (void)setImage:(CIImage *)image
{
_image = image;
#if TARGET_OS_IPHONE
[self display];
#else
[self setNeedsDisplay:YES];
#endif
}
#if ! TARGET_OS_IPHONE
// OS X: we need to correct the viewport when the view size changes
- (void)reshape
{
glViewport(0, 0, self.bounds.size.width, self.bounds.size.height);
}
#endif
- (void)drawRect:(CGRect)rect
{
#if TARGET_OS_IPHONE
if (self.image) {
glClearColor(0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
CGRect destRect = AVMakeRectWithAspectRatioInsideRect(self.image.extent.size, rect);
float screenscale = self.window.screen.scale;
destRect = CGRectApplyAffineTransform(destRect, CGAffineTransformMakeScale(screenscale, screenscale));
[self.coreImageContext drawImage:self.image inRect:destRect fromRect:self.image.extent];
}
#else
[self.openGLContext makeCurrentContext];
if (self.image) {
CIContext *ctx = [CIContext contextWithCGLContext:self.openGLContext.CGLContextObj pixelFormat:self.openGLContext.pixelFormat.CGLPixelFormatObj colorSpace:nil options:nil];
// The GL coordinate system goes from -1 to 1 on all axes by default.
// We didn't set a matrix so use that instead of bounds.
[ctx drawImage:self.image inRect:(CGRect) {-1, -1, 2, 2} fromRect:self.image.extent];
}
else {
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
}
glFlush();
#endif
}
@end