//============================================================================================================================= // // Copyright (c) 2015-2019 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved. // EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China // and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd. // //============================================================================================================================= #import "helloar.h" @import CoreImage; #import "RecorderRenderer.h" #import "BGRenderer.h" #import #import #import #import #import #import #import #import #import #import #include #import "VideoRenderer.h" #import "ARVideo.h" #import #import easyar_DelayedCallbackScheduler * scheduler; easyar_CameraDevice * camera; BGRenderer * bgRenderer; RecorderRenderer * recorder_renderer; easyar_Recorder * recorder; int view_size[] = {0, 0}; bool recording_started = false; easyar_InputFrameThrottler * throttler; easyar_InputFrameToOutputFrameAdapter * i2OAdapter; easyar_OutputFrameBuffer * outputFrameBuffer; int previousInputFrameIndex = -1; easyar_FeedbackFrameFork * feedBackFrameFork; NSMutableArray * trackers = nil; NSMutableArray * video_renderers = nil; VideoRenderer * current_video_renderer = nil; int tracked_target = 0; int active_target = 0; ARVideo * video = nil; easyar_InputFrameFork * inputFrameFork; easyar_OutputFrameJoin * join; easyar_InputFrameToFeedbackFrameAdapter * i2FAdapter; easyar_OutputFrameFork * outputFrameFork; void createScheduler() { scheduler = [easyar_DelayedCallbackScheduler create]; } easyar_DelayedCallbackScheduler * getScheduler() { return scheduler; } void recreate_context() { if (active_target != 0) { [video onLost]; video = nil; tracked_target = 0; active_target = 0; } bgRenderer = nil; video_renderers = nil; current_video_renderer = nil; previousInputFrameIndex = -1; bgRenderer = [BGRenderer create]; recorder_renderer = [RecorderRenderer create]; video_renderers = [[NSMutableArray alloc] init]; for (int k = 0; k < 3; k += 1) { VideoRenderer * video_renderer = [[VideoRenderer alloc] init]; [video_renderer init_]; [video_renderers addObject:video_renderer]; } recorder_renderer = nil; } void loadFromImage(easyar_ImageTracker * tracker, NSString * path, NSString * name) { easyar_ImageTarget * target = [easyar_ImageTarget createFromImageFile:path storageType:easyar_StorageType_Assets name:name uid:@"" meta:@"" scale:1]; [tracker loadTarget:target callbackScheduler:scheduler callback:^(easyar_Target * target, bool status) { NSLog(@"load target (%d): %@ (%d)", status, [target name], [target runtimeID]); }]; } void initialize() { recreate_context(); throttler = [easyar_InputFrameThrottler create]; inputFrameFork = [easyar_InputFrameFork create:2]; outputFrameBuffer = [easyar_OutputFrameBuffer create]; i2OAdapter = [easyar_InputFrameToOutputFrameAdapter create]; i2FAdapter = [easyar_InputFrameToFeedbackFrameAdapter create]; outputFrameFork = [easyar_OutputFrameFork create:2]; camera = [easyar_CameraDeviceSelector createCameraDevice: easyar_CameraDevicePreference_PreferObjectSensing]; if (![camera openWithType:easyar_CameraDeviceType_Default]) { return; } [camera setFocusMode:easyar_CameraDeviceFocusMode_Continousauto]; [camera setSize:[easyar_Vec2I create:@[@1280, @720]]]; easyar_ImageTracker * tracker = [easyar_ImageTracker create]; loadFromImage(tracker, @"namecard.jpg", @"namecard"); loadFromImage(tracker, @"argame00.jpg", @"argame"); loadFromImage(tracker, @"idback.jpg", @"idback"); trackers = [[NSMutableArray alloc] init]; [trackers addObject:tracker]; feedBackFrameFork = [easyar_FeedbackFrameFork create:(int)[trackers count]]; join = [easyar_OutputFrameJoin create:(int)(trackers.count + 1)]; [[camera inputFrameSource] connect:[throttler input]]; [[throttler output] connect:[inputFrameFork input]]; [[inputFrameFork output:0] connect:[i2OAdapter input]]; [[i2OAdapter output] connect:[join input:0]]; [[inputFrameFork output:1] connect:[i2FAdapter input]]; [[i2FAdapter output] connect:[feedBackFrameFork input]]; for (int i = 0; i < trackers.count; i++) { [[feedBackFrameFork output:i] connect:trackers[i].feedbackFrameSink]; [[trackers[i] outputFrameSource] connect:[join input:i+1]]; } [[join output] connect:[outputFrameFork input]]; [[outputFrameFork output:0] connect:[outputFrameBuffer input]]; [[outputFrameFork output:1] connect:[i2FAdapter sideInput]]; [[outputFrameBuffer signalOutput] connect:[throttler signalInput]]; } void finalize() { recorder = nil; recorder_renderer = nil; video = nil; tracked_target = 0; active_target = 0; [trackers removeAllObjects]; [video_renderers removeAllObjects]; current_video_renderer = nil; throttler = nil; bgRenderer = nil; scheduler = nil; camera = nil; inputFrameFork = nil; outputFrameBuffer = nil; i2OAdapter = nil; i2FAdapter = nil; outputFrameFork = nil; } BOOL start() { bool status = true; if (camera != nil) { status &= [camera start]; } else { status = NO; } for (easyar_ImageTracker * tracker in trackers) { status &= [tracker start]; } printf("start() status = %d\n", status); return status; } void stop() { for (easyar_ImageTracker * tracker in trackers) { [tracker stop]; } if (camera != nil) { [camera stop]; } } void render(int width, int height, int screenRotation, void (^rebindFrameBuffer)()) { view_size[0] = width; view_size[1] = height; while ([scheduler runOne]) { } glViewport(0, 0, width, height); glClearColor(0.f, 0.f, 0.f, 1.f); easyar_OutputFrame * oFrame = [outputFrameBuffer peek]; if (oFrame == nil){ return; } easyar_InputFrame * iFrame = [oFrame inputFrame]; if (iFrame == nil) { return; } if (![iFrame hasCameraParameters]) { return; } easyar_CameraParameters * cameraParameters = [iFrame cameraParameters]; float viewport_aspect_ratio = (float)width / (float)height; easyar_Matrix44F * projection = [cameraParameters projection:0.01f farPlane:1000.f viewportAspectRatio:viewport_aspect_ratio screenRotation:screenRotation combiningFlip:true manualHorizontalFlip:false]; easyar_Matrix44F * imageProjection = [cameraParameters imageProjection:viewport_aspect_ratio screenRotation:screenRotation combiningFlip:true manualHorizontalFlip:false]; easyar_Image * image = [iFrame image]; if ([iFrame index] != previousInputFrameIndex) { [bgRenderer upload:[image format] width:[image width] height:[image height] bufferData:[[image buffer] data]]; previousInputFrameIndex = [iFrame index]; } //Recording if (recording_started) { //Switch to render texture in wich recording will be performed [recorder_renderer preRender]; //Render camera background [bgRenderer render:imageProjection]; //Marker tracking part NSArray * results = [oFrame results]; for (int i = 0; i < [results count]; i++) { easyar_ImageTrackerResult * result = nil; easyar_FrameFilterResult * _result = results[i]; result = ([_result isEqual:[NSNull null]])? nil: (easyar_ImageTrackerResult * )_result; if(result == nil){ continue; } NSArray * targetInstances = [result targetInstances]; if ([targetInstances count] > 0) { easyar_TargetInstance * targetInstance = [targetInstances objectAtIndex:0]; easyar_Target * target = [targetInstance target]; int status = [targetInstance status]; if (status == easyar_TargetStatus_Tracked) { int runtimeID = [target runtimeID]; if (active_target != 0 && active_target != runtimeID) { [video onLost]; video = nil; tracked_target = 0; active_target = 0; } if (tracked_target == 0) { if (video == nil && [video_renderers count] > 0) { NSString * target_name = [target name]; if ([target_name isEqualToString:@"argame"] && [[video_renderers objectAtIndex:0] texid] != 0) { video = [[ARVideo alloc] init]; [video openVideoFile:@"video.mp4" texid:[[video_renderers objectAtIndex:0] texid] scheduler:scheduler]; current_video_renderer = [video_renderers objectAtIndex:0]; } else if ([target_name isEqualToString:@"namecard"] && [[video_renderers objectAtIndex:1] texid] != 0) { video = [[ARVideo alloc] init]; [video openTransparentVideoFile:@"transparentvideo.mp4" texid:[[video_renderers objectAtIndex:1] texid] scheduler:scheduler]; current_video_renderer = [video_renderers objectAtIndex:1]; } else if ([target_name isEqualToString:@"idback"] && [[video_renderers objectAtIndex:2] texid] != 0) { video = [[ARVideo alloc] init]; [video openStreamingVideo:@"https://sightpvideo-cdn.sightp.com/sdkvideo/EasyARSDKShow201520.mp4" texid:[[video_renderers objectAtIndex:2] texid] scheduler:scheduler]; current_video_renderer = [video_renderers objectAtIndex:2]; } } if (video != nil) { [video onFound]; tracked_target = runtimeID; active_target = runtimeID; } } easyar_ImageTarget * imagetarget = [target isKindOfClass:[easyar_ImageTarget class]] ? (easyar_ImageTarget *)target : nil; if (imagetarget != nil) { if (current_video_renderer != nil) { [video update]; NSArray * images = ((easyar_ImageTarget *) target).images; if(images.count > 0) { easyar_Image * targetImg = images[0]; int width = [targetImg width]; int height = [targetImg height]; float targtScale = [imagetarget scale]; easyar_Vec2F * scale = [easyar_Vec2F create:@[@(targtScale),@(targtScale)]]; if(width < height) { scale = [easyar_Vec2F create:@[@(targtScale * width/height),@(targtScale)]]; }else { scale = [easyar_Vec2F create:@[@(targtScale),@(targtScale * height/width)]]; } if ([video isRenderTextureAvailable]) { [current_video_renderer render:projection cameraview:[targetInstance pose] size: scale]; } } } } } } else { if (tracked_target != 0) { [video onLost]; tracked_target = 0; } } } rebindFrameBuffer(); [recorder_renderer postRender]; //Pass pixels to videoCodec [recorder updateFrame:[easyar_TextureId fromInt:[recorder_renderer getTextureId]] width:view_size[0] height:view_size[1]]; } else { [bgRenderer render:imageProjection]; NSArray * results = [oFrame results]; for (int i = 0; i < [results count]; i++) { easyar_ImageTrackerResult * result = nil; easyar_FrameFilterResult * _result = results[i]; result = ([_result isEqual:[NSNull null]])? nil: (easyar_ImageTrackerResult * )_result; if(result == nil){ continue; } NSArray * targetInstances = [result targetInstances]; if ([targetInstances count] > 0) { easyar_TargetInstance * targetInstance = [targetInstances objectAtIndex:0]; easyar_Target * target = [targetInstance target]; int status = [targetInstance status]; if (status == easyar_TargetStatus_Tracked) { int runtimeID = [target runtimeID]; if (active_target != 0 && active_target != runtimeID) { [video onLost]; video = nil; tracked_target = 0; active_target = 0; } if (tracked_target == 0) { if (video == nil && [video_renderers count] > 0) { NSString * target_name = [target name]; if ([target_name isEqualToString:@"argame"] && [[video_renderers objectAtIndex:0] texid] != 0) { video = [[ARVideo alloc] init]; [video openVideoFile:@"video.mp4" texid:[[video_renderers objectAtIndex:0] texid] scheduler:scheduler]; current_video_renderer = [video_renderers objectAtIndex:0]; } else if ([target_name isEqualToString:@"namecard"] && [[video_renderers objectAtIndex:1] texid] != 0) { video = [[ARVideo alloc] init]; [video openTransparentVideoFile:@"transparentvideo.mp4" texid:[[video_renderers objectAtIndex:1] texid] scheduler:scheduler]; current_video_renderer = [video_renderers objectAtIndex:1]; } else if ([target_name isEqualToString:@"idback"] && [[video_renderers objectAtIndex:2] texid] != 0) { video = [[ARVideo alloc] init]; [video openStreamingVideo:@"https://sightpvideo-cdn.sightp.com/sdkvideo/EasyARSDKShow201520.mp4" texid:[[video_renderers objectAtIndex:2] texid] scheduler:scheduler]; current_video_renderer = [video_renderers objectAtIndex:2]; } } if (video != nil) { [video onFound]; tracked_target = runtimeID; active_target = runtimeID; } } easyar_ImageTarget * imagetarget = [target isKindOfClass:[easyar_ImageTarget class]] ? (easyar_ImageTarget *)target : nil; if (imagetarget != nil) { if (current_video_renderer != nil) { [video update]; NSArray * images = ((easyar_ImageTarget *) target).images; if(images.count > 0) { easyar_Image * targetImg = images[0]; int width = [targetImg width]; int height = [targetImg height]; float targtScale = [imagetarget scale]; easyar_Vec2F * scale = [easyar_Vec2F create:@[@(targtScale),@(targtScale)]]; if(width < height) { scale = [easyar_Vec2F create:@[@(targtScale * width/height),@(targtScale)]]; }else { scale = [easyar_Vec2F create:@[@(targtScale),@(targtScale * height/width)]]; } if ([video isRenderTextureAvailable]) { [current_video_renderer render:projection cameraview:[targetInstance pose] size: scale]; } } } } } } else { if (tracked_target != 0) { [video onLost]; tracked_target = 0; } } } } } void requestPermissions(void(^ callback)(easyar_PermissionStatus status, NSString* msg)) { [easyar_Recorder requestPermissions:[easyar_ImmediateCallbackScheduler getDefault] permissionCallback:callback]; } void startRecording(NSString * path, void(^ callback)(easyar_RecordStatus status, NSString *value)) { if (recording_started) { return; } easyar_RecorderConfiguration * conf = [easyar_RecorderConfiguration create]; [conf setOutputFile:path]; [conf setZoomMode:easyar_RecordZoomMode_ZoomInWithAllContent]; [conf setProfile:easyar_RecordProfile_Quality_720P_Middle]; easyar_RecordVideoOrientation mode = view_size[0] < view_size[1]? easyar_RecordVideoOrientation_Portrait :easyar_RecordVideoOrientation_Landscape; [conf setVideoOrientation:(mode)]; recorder = [easyar_Recorder create:conf callbackScheduler:scheduler statusCallback:^(easyar_RecordStatus status, NSString *value) { if (status == easyar_RecordStatus_OnStopped) { recording_started = false; } callback(status, value); }]; printf("startRecording view_size: (%dx%d)", view_size[0], view_size[1]); [recorder_renderer resize:view_size[0] height:view_size[1]]; [recorder start]; recording_started = true; } void stopRecording() { if (!recording_started) { return; } [recorder stop]; recorder = nil; recording_started = false; }