#ARCALView.h
#import <UIKit/UIKit.h>
#import <QuartzCore/QuartzCore.h>
#import <AVFoundation/AVFoundation.h>
#import <QCAR/UIGLViewProtocol.h>
#import <GLKit/GLKit.h>
@class QCARutils;
// This class wraps the CAEAGLLayer from CoreAnimation into a convenient UIView
// subclass. The view content is basically an EAGL surface you render your
// OpenGL scene into. Note that setting the view non-opaque will only work if
// the EAGL surface has an alpha channel.
@interface ARCALView : UIView <UIGLViewProtocol>
{
@public
NSMutableArray *textureList; // list of textures to load
@protected
EAGLContext *context_;
GLuint framebuffer_;
GLuint renderbuffer_;
QCARutils *qUtils; // QCAR utils class
NSMutableArray* textures; // loaded textures
CALayer *cameraLayer;
AVPlayer *player;
AVPlayerLayer *playerLayer;
BOOL videoInitialized;
CATransform3D transform;
int frameCount;
BOOL hideVideo;
}
@property (nonatomic, retain) NSMutableArray *textureList;
@property (nonatomic, retain) id cameraImage;
- (void) useTextures:(NSMutableArray *)theTextures;
@end
#ARCALView.m
#import "ARCALView.h"
#import "Texture.h"
#import <QCAR/QCAR.h>
#import <QCAR/Renderer.h>
#import <QCAR/Image.h>
#import "QCARutils.h"
#ifndef USE_OPENGL1
#import "ShaderUtils.h"
#define MAKESTRING(x) #x
#endif
@interface ARCALView (PrivateMethods)
- (int)loadTextures;
@end
@implementation ARCALView
@synthesize textureList;
@synthesize cameraImage;
+ (Class)layerClass
{
return [CAEAGLLayer class];
}
- (CATransform3D) GLtoCATransform3D:(QCAR::Matrix44F)m
{
CATransform3D t = CATransform3DIdentity;
t.m11 = m.data[0];
t.m12 = m.data[1];
t.m13 = m.data[2];
t.m14 = m.data[3];
t.m21 = m.data[4];
t.m22 = m.data[5];
t.m23 = m.data[6];
t.m24 = m.data[7];
t.m31 = m.data[8];
t.m32 = m.data[9];
t.m33 = m.data[10];
t.m34 = m.data[11];
t.m41 = m.data[12];
t.m42 = m.data[13];
t.m43 = m.data[14];
t.m44 = m.data[15];
return t;
}
- (CGImageRef)createCGImage:(const QCAR::Image *)qcarImage
{
int width = qcarImage->getWidth();
int height = qcarImage->getHeight();
int bitsPerComponent = 8;
int bitsPerPixel = QCAR::getBitsPerPixel(QCAR::RGB888);
int bytesPerRow = qcarImage->getBufferWidth() * bitsPerPixel / bitsPerComponent;
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNone;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, qcarImage->getPixels(), QCAR::getBufferSize(width, height, QCAR::RGB888), NULL);
CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpaceRef);
return (CGImageRef)[(id)imageRef autorelease];
}
- (void)createFrameBuffer {
// This is called on main thread
if (context_ && !framebuffer_)
[EAGLContext setCurrentContext:context_];
glGenFramebuffers(1, &framebuffer_);
glGenRenderbuffers(1, &renderbuffer_);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_);
glBindRenderbuffer(GL_RENDERBUFFER, renderbuffer_);
[context_ renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderbuffer_);
}
- (void)render {
// Render video background and retrieve tracking state
QCAR::setFrameFormat(QCAR::RGB888, true);
QCAR::State state = QCAR::Renderer::getInstance().begin();
//QCAR::Renderer::getInstance().drawVideoBackground();
QCAR::Frame ff = state.getFrame();
if (ff.getNumImages() <= 0) {
QCAR::Renderer::getInstance().end();
return;
}
for (int i = 0; i < ff.getNumImages(); i++) {
const QCAR::Image *qcarImage = ff.getImage(i);
if (qcarImage->getFormat() == QCAR::RGB888)
{
self.cameraImage = (id)[self createCGImage:qcarImage];
break;
}
}
if (state.getNumActiveTrackables() == 0 || videoInitialized == NO) {
[player pause];
hideVideo = YES;
QCAR::Renderer::getInstance().end();
return;
}
hideVideo = NO;
if (player.rate == 0) {
[player play];
}
// Get the trackable
const QCAR::Trackable* trackable = state.getActiveTrackable(0);
QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(trackable->getPose());
CGFloat ScreenScale = [[UIScreen mainScreen] scale];
float xscl = qUtils.viewport.sizeX/ScreenScale/2;
float yscl = qUtils.viewport.sizeY/ScreenScale/2;
QCAR::Matrix44F scalingMatrix = {xscl,0,0,0,
0,yscl,0,0,
0,0,1,0,
0,0,0,1};
QCAR::Matrix44F flipY = { 1, 0,0,0,
0,-1,0,0,
0, 0,1,0,
0, 0,0,1};
ShaderUtils::translatePoseMatrix(0.0f, 0.0f, 3, &modelViewMatrix.data[0]);
ShaderUtils::multiplyMatrix(&modelViewMatrix.data[0], &flipY.data[0], &modelViewMatrix.data[0]);
ShaderUtils::multiplyMatrix(&qUtils.projectionMatrix.data[0],&modelViewMatrix.data[0], &modelViewMatrix.data[0]);
ShaderUtils::multiplyMatrix(&scalingMatrix.data[0], &modelViewMatrix.data[0], &modelViewMatrix.data[0]);
ShaderUtils::multiplyMatrix(&flipY.data[0], &modelViewMatrix.data[0], &modelViewMatrix.data[0]);
transform = [self GLtoCATransform3D:modelViewMatrix];
QCAR::Renderer::getInstance().end();
}
- (void)initVideo {
NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"video.mp4" ofType:nil]];
//NSURL *url = [NSURL URLWithString:@"http://dl.dropbox.com/u/6040916/video.mp4"];
AVURLAsset *avasset = [[AVURLAsset alloc] initWithURL:url options:nil];
AVPlayerItem *item = [[AVPlayerItem alloc] initWithAsset:avasset];
player = [[AVPlayer alloc] initWithPlayerItem:item];
playerLayer = [[AVPlayerLayer playerLayerWithPlayer:player] retain];
CGSize size = self.bounds.size;
float x = size.width/2.0-187.0;
float y = size.height/2.0 - 125.0;
playerLayer.frame = CGRectMake(x, y, 374, 250);
playerLayer.backgroundColor = [UIColor blackColor].CGColor;
[cameraLayer addSublayer:playerLayer];
playerLayer.hidden = hideVideo;
transform = CATransform3DIdentity;
NSString *tracksKey = @"tracks";
[avasset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:tracksKey] completionHandler:
^{
dispatch_async(dispatch_get_main_queue(),
^{
NSError *error = nil;
AVKeyValueStatus status = [avasset statusOfValueForKey:tracksKey error:&error];
if (status == AVKeyValueStatusLoaded) {
videoInitialized = YES;
}
else {
// You should deal with the error appropriately.
NSLog(@"The asset's tracks were not loaded:\n%@", [error localizedDescription]);
}
});
}];
}
// test to see if the screen has hi-res mode
- (BOOL) isRetinaEnabled
{
return ([[UIScreen mainScreen] respondsToSelector:@selector(displayLinkWithTarget:selector:)]
&&
([UIScreen mainScreen].scale == 2.0));
}
// use to allow this view to access loaded textures
- (void) useTextures:(NSMutableArray *)theTextures
{
textures = theTextures;
}
#pragma mark ---- view lifecycle ---
/////////////////////////////////////////////////////////////////
//
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
qUtils = [QCARutils getInstance];
textureList = [[NSMutableArray alloc] initWithCapacity:2];
// switch on hi-res mode if available
if ([self isRetinaEnabled])
{
self.contentScaleFactor = 2.0f;
qUtils.contentScalingFactor = self.contentScaleFactor;
}
qUtils.QCARFlags = QCAR::GL_20;
CAEAGLLayer *layer = (CAEAGLLayer *)self.layer;
layer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat,
nil];
context_ = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
frameCount = 0;
cameraLayer = [CALayer layer];
cameraLayer.contentsGravity = kCAGravityResizeAspectFill;
cameraLayer.frame = self.layer.bounds;
[self.layer addSublayer:cameraLayer];
hideVideo = YES;
[self initVideo];
NSLog(@"QCAR OpenGL flag: %d", qUtils.QCARFlags);
}
return self;
}
- (void)dealloc {
if (framebuffer_) {
glDeleteFramebuffers(1, &framebuffer_);
glDeleteFramebuffers(1, &renderbuffer_);
}
if ([EAGLContext currentContext] == context_) {
[EAGLContext setCurrentContext:nil];
}
[context_ release];
[textureList release];
[cameraImage release];
[super dealloc];
}
- (void) postInitQCAR {
// These two calls to setHint tell QCAR to split work over multiple
// frames. Depending on your requirements you can opt to omit these.
QCAR::setHint(QCAR::HINT_IMAGE_TARGET_MULTI_FRAME_ENABLED, 1);
QCAR::setHint(QCAR::HINT_IMAGE_TARGET_MILLISECONDS_PER_MULTI_FRAME, 40);
// Here we could also make a QCAR::setHint call to set the maximum
// number of simultaneous targets
// QCAR::setHint(QCAR::HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 2);
}
////////////////////////////////////////////////////////////////////////////////
// Draw the current frame using OpenGL
//
// This code is a TEMPLATE for the subclassing EAGLView to complete
//
// The subclass override of this method is called by QCAR when it wishes to render the current frame to
// the screen.
//
// *** QCAR will call the subclassed method on a single background thread ***
- (void)renderFrameQCAR {
if (!framebuffer_) {
[self performSelectorOnMainThread:@selector(createFrameBuffer) withObject:nil waitUntilDone:YES];
}
[EAGLContext setCurrentContext:context_];
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_);
glBindRenderbuffer(GL_RENDERBUFFER, renderbuffer_);
//glRenderbufferStorage(GL_RENDERBUFFER, GL_RGB565, 1, 1);
/*GLint width, height;
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &width);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &height);
glViewport(0, 0, width, height);*/
if (frameCount < 5) {
frameCount++;
return;
}
[self render];
dispatch_async(dispatch_get_main_queue(), ^{
[CATransaction begin];
[CATransaction setValue:(id)kCFBooleanTrue
forKey:kCATransactionDisableActions];
cameraLayer.contents = cameraImage;
playerLayer.transform = transform;
playerLayer.hidden = hideVideo;
[CATransaction commit];
});
}
@end