Advertisement
Alexander_Fedoseev

helloar+video.m

Nov 6th, 2019
326
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. //=============================================================================================================================
  2. //
  3. // Copyright (c) 2015-2019 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
  4. // EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
  5. // and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
  6. //
  7. //=============================================================================================================================
  8.  
  9. #import "helloar.h"
  10. @import CoreImage;
  11. #import "RecorderRenderer.h"
  12. #import "BGRenderer.h"
  13.  
  14. #import <easyar/types.oc.h>
  15. #import <easyar/buffer.oc.h>
  16. #import <easyar/camera.oc.h>
  17. #import <easyar/frame.oc.h>
  18. #import <easyar/texture.oc.h>
  19. #import <easyar/recorder.oc.h>
  20. #import <easyar/recorder_configuration.oc.h>
  21. #import <easyar/callbackscheduler.oc.h>
  22. #import <easyar/dataflow.oc.h>
  23. #import <easyar/cameraparameters.oc.h>
  24.  
  25. #include <OpenGLES/ES2/gl.h>
  26.  
  27.  
  28. #import "VideoRenderer.h"
  29. #import "ARVideo.h"
  30. #import <easyar/imagetracker.oc.h>
  31. #import <easyar/imagetarget.oc.h>
  32.  
  33. easyar_DelayedCallbackScheduler * scheduler;
  34. easyar_CameraDevice * camera;
  35. BGRenderer * bgRenderer;
  36. RecorderRenderer * recorder_renderer;
  37. easyar_Recorder * recorder;
  38. int view_size[] = {0, 0};
  39. bool recording_started = false;
  40. easyar_InputFrameThrottler * throttler;
  41. easyar_InputFrameToOutputFrameAdapter * i2OAdapter;
  42. easyar_OutputFrameBuffer * outputFrameBuffer;
  43. int previousInputFrameIndex = -1;
  44. easyar_FeedbackFrameFork * feedBackFrameFork;
  45.  
  46. NSMutableArray<easyar_ImageTracker *> * trackers = nil;
  47. NSMutableArray<VideoRenderer *> * video_renderers = nil;
  48. VideoRenderer * current_video_renderer = nil;
  49. int tracked_target = 0;
  50. int active_target = 0;
  51. ARVideo * video = nil;
  52.  
  53. easyar_InputFrameFork * inputFrameFork;
  54. easyar_OutputFrameJoin * join;
  55. easyar_InputFrameToFeedbackFrameAdapter * i2FAdapter;
  56. easyar_OutputFrameFork * outputFrameFork;
  57.  
  58. void createScheduler()
  59. {
  60.     scheduler = [easyar_DelayedCallbackScheduler create];
  61. }
  62. easyar_DelayedCallbackScheduler * getScheduler()
  63. {
  64.     return scheduler;
  65. }
  66.  
  67. void recreate_context()
  68. {
  69.     if (active_target != 0) {
  70.         [video onLost];
  71.         video = nil;
  72.         tracked_target = 0;
  73.         active_target = 0;
  74.     }
  75.    
  76.     bgRenderer = nil;
  77.     video_renderers = nil;
  78.     current_video_renderer = nil;
  79.     previousInputFrameIndex = -1;
  80.     bgRenderer = [BGRenderer create];
  81.     recorder_renderer = [RecorderRenderer create];
  82.     video_renderers = [[NSMutableArray<VideoRenderer *> alloc] init];
  83.     for (int k = 0; k < 3; k += 1) {
  84.         VideoRenderer * video_renderer = [[VideoRenderer alloc] init];
  85.         [video_renderer init_];
  86.         [video_renderers addObject:video_renderer];
  87.     }
  88.    
  89.     recorder_renderer = nil;
  90. }
  91.  
  92. void loadFromImage(easyar_ImageTracker * tracker, NSString * path, NSString * name)
  93. {
  94.     easyar_ImageTarget * target = [easyar_ImageTarget createFromImageFile:path storageType:easyar_StorageType_Assets name:name uid:@"" meta:@"" scale:1];
  95.     [tracker loadTarget:target callbackScheduler:scheduler callback:^(easyar_Target * target, bool status) {
  96.         NSLog(@"load target (%d): %@ (%d)", status, [target name], [target runtimeID]);
  97.     }];
  98. }
  99.  
  100.  
  101. void initialize()
  102. {
  103.     recreate_context();
  104.  
  105.     throttler = [easyar_InputFrameThrottler create];
  106.     inputFrameFork = [easyar_InputFrameFork create:2];
  107.     outputFrameBuffer = [easyar_OutputFrameBuffer create];
  108.     i2OAdapter = [easyar_InputFrameToOutputFrameAdapter create];
  109.     i2FAdapter = [easyar_InputFrameToFeedbackFrameAdapter create];
  110.     outputFrameFork = [easyar_OutputFrameFork create:2];
  111.  
  112.     camera = [easyar_CameraDeviceSelector createCameraDevice: easyar_CameraDevicePreference_PreferObjectSensing];
  113.     if (![camera openWithType:easyar_CameraDeviceType_Default]) { return; }
  114.     [camera setFocusMode:easyar_CameraDeviceFocusMode_Continousauto];
  115.     [camera setSize:[easyar_Vec2I create:@[@1280, @720]]];
  116.  
  117.     easyar_ImageTracker * tracker = [easyar_ImageTracker create];
  118.     loadFromImage(tracker, @"namecard.jpg", @"namecard");
  119.     loadFromImage(tracker, @"argame00.jpg", @"argame");
  120.     loadFromImage(tracker, @"idback.jpg", @"idback");
  121.     trackers = [[NSMutableArray<easyar_ImageTracker *> alloc] init];
  122.     [trackers addObject:tracker];
  123.    
  124.     feedBackFrameFork = [easyar_FeedbackFrameFork create:(int)[trackers count]];
  125.     join = [easyar_OutputFrameJoin create:(int)(trackers.count + 1)];
  126.     [[camera inputFrameSource] connect:[throttler input]];
  127.     [[throttler output] connect:[inputFrameFork input]];
  128.     [[inputFrameFork output:0] connect:[i2OAdapter input]];
  129.     [[i2OAdapter output] connect:[join input:0]];
  130.     [[inputFrameFork output:1] connect:[i2FAdapter input]];
  131.     [[i2FAdapter output] connect:[feedBackFrameFork input]];
  132.    
  133.     for (int i = 0; i < trackers.count; i++) {
  134.         [[feedBackFrameFork output:i] connect:trackers[i].feedbackFrameSink];
  135.         [[trackers[i] outputFrameSource] connect:[join input:i+1]];
  136.     }
  137.    
  138.     [[join output] connect:[outputFrameFork input]];
  139.     [[outputFrameFork output:0] connect:[outputFrameBuffer input]];
  140.     [[outputFrameFork output:1] connect:[i2FAdapter sideInput]];
  141.     [[outputFrameBuffer signalOutput] connect:[throttler signalInput]];
  142. }
  143.  
  144. void finalize()
  145. {
  146.     recorder = nil;
  147.     recorder_renderer = nil;
  148.    
  149.     video = nil;
  150.     tracked_target = 0;
  151.     active_target = 0;
  152.    
  153.     [trackers removeAllObjects];
  154.     [video_renderers removeAllObjects];
  155.     current_video_renderer = nil;
  156.     throttler = nil;
  157.     bgRenderer = nil;
  158.     scheduler = nil;
  159.     camera = nil;
  160.     inputFrameFork = nil;
  161.     outputFrameBuffer = nil;
  162.     i2OAdapter = nil;
  163.     i2FAdapter = nil;
  164.     outputFrameFork = nil;
  165. }
  166.  
  167. BOOL start()
  168. {
  169.     bool status = true;
  170.     if (camera != nil) {
  171.         status &= [camera start];
  172.     } else {
  173.         status = NO;
  174.     }
  175.     for (easyar_ImageTracker * tracker in trackers) {
  176.         status &= [tracker start];
  177.     }
  178.    
  179.     printf("start() status = %d\n", status);
  180.     return status;
  181. }
  182.  
  183. void stop()
  184. {
  185.     for (easyar_ImageTracker * tracker in trackers) {
  186.         [tracker stop];
  187.     }
  188.     if (camera != nil) {
  189.         [camera stop];
  190.     }
  191. }
  192.  
  193. void render(int width, int height, int screenRotation, void (^rebindFrameBuffer)())
  194. {
  195.     view_size[0] = width;
  196.     view_size[1] = height;
  197.  
  198.     while ([scheduler runOne])
  199.     {
  200.     }
  201.  
  202.     glViewport(0, 0, width, height);
  203.     glClearColor(0.f, 0.f, 0.f, 1.f);
  204.  
  205.     easyar_OutputFrame * oFrame = [outputFrameBuffer peek];
  206.     if (oFrame == nil){ return; }
  207.     easyar_InputFrame * iFrame = [oFrame inputFrame];
  208.     if (iFrame == nil) { return; }
  209.     if (![iFrame hasCameraParameters]) { return; }
  210.     easyar_CameraParameters * cameraParameters = [iFrame cameraParameters];
  211.     float viewport_aspect_ratio = (float)width / (float)height;
  212.     easyar_Matrix44F * projection = [cameraParameters projection:0.01f farPlane:1000.f viewportAspectRatio:viewport_aspect_ratio screenRotation:screenRotation combiningFlip:true manualHorizontalFlip:false];
  213.     easyar_Matrix44F * imageProjection = [cameraParameters imageProjection:viewport_aspect_ratio screenRotation:screenRotation combiningFlip:true manualHorizontalFlip:false];
  214.     easyar_Image * image = [iFrame image];
  215.  
  216.    
  217.    
  218.     if ([iFrame index] != previousInputFrameIndex) {
  219.         [bgRenderer upload:[image format] width:[image width] height:[image height] bufferData:[[image buffer] data]];
  220.         previousInputFrameIndex = [iFrame index];
  221.     }
  222.  
  223.     //Recording
  224.     if (recording_started)
  225.     {
  226.         //Switch to render texture in wich recording will be performed
  227.         [recorder_renderer preRender];
  228.        
  229.         //Render camera background
  230.         [bgRenderer render:imageProjection];
  231.        
  232.        
  233.         //Marker tracking part
  234.         NSArray<easyar_FrameFilterResult *> * results = [oFrame results];
  235.         for (int i = 0; i < [results count]; i++) {
  236.             easyar_ImageTrackerResult * result = nil;
  237.             easyar_FrameFilterResult * _result = results[i];
  238.             result = ([_result isEqual:[NSNull null]])? nil: (easyar_ImageTrackerResult * )_result;
  239.             if(result == nil){
  240.                 continue;
  241.             }
  242.             NSArray<easyar_TargetInstance *> * targetInstances = [result targetInstances];
  243.             if ([targetInstances count] > 0) {
  244.                 easyar_TargetInstance * targetInstance = [targetInstances objectAtIndex:0];
  245.                 easyar_Target * target = [targetInstance target];
  246.                 int status = [targetInstance status];
  247.                 if (status == easyar_TargetStatus_Tracked) {
  248.                     int runtimeID = [target runtimeID];
  249.                     if (active_target != 0 && active_target != runtimeID) {
  250.                         [video onLost];
  251.                         video = nil;
  252.                         tracked_target = 0;
  253.                         active_target = 0;
  254.                     }
  255.                     if (tracked_target == 0) {
  256.                         if (video == nil && [video_renderers count] > 0) {
  257.                             NSString * target_name = [target name];
  258.                             if ([target_name isEqualToString:@"argame"] && [[video_renderers objectAtIndex:0] texid] != 0) {
  259.                                 video = [[ARVideo alloc] init];
  260.                                 [video openVideoFile:@"video.mp4" texid:[[video_renderers objectAtIndex:0] texid] scheduler:scheduler];
  261.                                 current_video_renderer = [video_renderers objectAtIndex:0];
  262.                             } else if ([target_name isEqualToString:@"namecard"] && [[video_renderers objectAtIndex:1]     texid] != 0) {
  263.                                 video = [[ARVideo alloc] init];
  264.                                 [video openTransparentVideoFile:@"transparentvideo.mp4" texid:[[video_renderers objectAtIndex:1] texid] scheduler:scheduler];
  265.                                 current_video_renderer = [video_renderers objectAtIndex:1];
  266.                             } else if ([target_name isEqualToString:@"idback"] && [[video_renderers objectAtIndex:2]     texid] != 0) {
  267.                                 video = [[ARVideo alloc] init];
  268.                                 [video openStreamingVideo:@"https://sightpvideo-cdn.sightp.com/sdkvideo/EasyARSDKShow201520.mp4" texid:[[video_renderers objectAtIndex:2] texid]     scheduler:scheduler];
  269.                                 current_video_renderer = [video_renderers objectAtIndex:2];
  270.                             }
  271.                         }
  272.                         if (video != nil) {
  273.                             [video onFound];
  274.                             tracked_target = runtimeID;
  275.                             active_target = runtimeID;
  276.                         }
  277.                     }
  278.                     easyar_ImageTarget * imagetarget = [target isKindOfClass:[easyar_ImageTarget class]] ?     (easyar_ImageTarget *)target : nil;
  279.                     if (imagetarget != nil) {
  280.                         if (current_video_renderer != nil) {
  281.                             [video update];
  282.                            
  283.                             NSArray<easyar_Image *> * images = ((easyar_ImageTarget *) target).images;
  284.                             if(images.count > 0)
  285.                             {
  286.                                 easyar_Image * targetImg = images[0];
  287.                                 int width = [targetImg width];
  288.                                 int height = [targetImg height];
  289.                                 float targtScale = [imagetarget scale];
  290.                                 easyar_Vec2F * scale = [easyar_Vec2F create:@[@(targtScale),@(targtScale)]];
  291.                                 if(width < height)
  292.                                 {
  293.                                     scale = [easyar_Vec2F create:@[@(targtScale * width/height),@(targtScale)]];
  294.                                 }else
  295.                                 {
  296.                                     scale = [easyar_Vec2F create:@[@(targtScale),@(targtScale * height/width)]];
  297.                                 }
  298.                                 if ([video isRenderTextureAvailable]) {
  299.                                     [current_video_renderer render:projection cameraview:[targetInstance pose] size:  scale];
  300.                                 }
  301.                             }
  302.                         }
  303.                     }
  304.                 }
  305.             } else {
  306.                 if (tracked_target != 0) {
  307.                     [video onLost];
  308.                     tracked_target = 0;
  309.                 }
  310.             }
  311.         }
  312.  
  313.        
  314.         rebindFrameBuffer();
  315.        
  316.         [recorder_renderer postRender];
  317.        
  318.        
  319.         //Pass pixels to videoCodec
  320.         [recorder updateFrame:[easyar_TextureId fromInt:[recorder_renderer getTextureId]] width:view_size[0] height:view_size[1]];
  321.        
  322.     } else {
  323.         [bgRenderer render:imageProjection];
  324.        
  325.         NSArray<easyar_FrameFilterResult *> * results = [oFrame results];
  326.         for (int i = 0; i < [results count]; i++) {
  327.             easyar_ImageTrackerResult * result = nil;
  328.             easyar_FrameFilterResult * _result = results[i];
  329.             result = ([_result isEqual:[NSNull null]])? nil: (easyar_ImageTrackerResult * )_result;
  330.             if(result == nil){
  331.                 continue;
  332.             }
  333.             NSArray<easyar_TargetInstance *> * targetInstances = [result targetInstances];
  334.             if ([targetInstances count] > 0) {
  335.                 easyar_TargetInstance * targetInstance = [targetInstances objectAtIndex:0];
  336.                 easyar_Target * target = [targetInstance target];
  337.                 int status = [targetInstance status];
  338.                 if (status == easyar_TargetStatus_Tracked) {
  339.                     int runtimeID = [target runtimeID];
  340.                     if (active_target != 0 && active_target != runtimeID) {
  341.                         [video onLost];
  342.                         video = nil;
  343.                         tracked_target = 0;
  344.                         active_target = 0;
  345.                     }
  346.                     if (tracked_target == 0) {
  347.                         if (video == nil && [video_renderers count] > 0) {
  348.                             NSString * target_name = [target name];
  349.                             if ([target_name isEqualToString:@"argame"] && [[video_renderers objectAtIndex:0] texid] != 0) {
  350.                                 video = [[ARVideo alloc] init];
  351.                                 [video openVideoFile:@"video.mp4" texid:[[video_renderers objectAtIndex:0] texid] scheduler:scheduler];
  352.                                 current_video_renderer = [video_renderers objectAtIndex:0];
  353.                             } else if ([target_name isEqualToString:@"namecard"] && [[video_renderers objectAtIndex:1]     texid] != 0) {
  354.                                 video = [[ARVideo alloc] init];
  355.                                 [video openTransparentVideoFile:@"transparentvideo.mp4" texid:[[video_renderers objectAtIndex:1] texid] scheduler:scheduler];
  356.                                 current_video_renderer = [video_renderers objectAtIndex:1];
  357.                             } else if ([target_name isEqualToString:@"idback"] && [[video_renderers objectAtIndex:2]     texid] != 0) {
  358.                                 video = [[ARVideo alloc] init];
  359.                                 [video openStreamingVideo:@"https://sightpvideo-cdn.sightp.com/sdkvideo/EasyARSDKShow201520.mp4" texid:[[video_renderers objectAtIndex:2] texid]     scheduler:scheduler];
  360.                                 current_video_renderer = [video_renderers objectAtIndex:2];
  361.                             }
  362.                         }
  363.                         if (video != nil) {
  364.                             [video onFound];
  365.                             tracked_target = runtimeID;
  366.                             active_target = runtimeID;
  367.                         }
  368.                     }
  369.                     easyar_ImageTarget * imagetarget = [target isKindOfClass:[easyar_ImageTarget class]] ?     (easyar_ImageTarget *)target : nil;
  370.                     if (imagetarget != nil) {
  371.                         if (current_video_renderer != nil) {
  372.                             [video update];
  373.                            
  374.                             NSArray<easyar_Image *> * images = ((easyar_ImageTarget *) target).images;
  375.                             if(images.count > 0)
  376.                             {
  377.                                 easyar_Image * targetImg = images[0];
  378.                                 int width = [targetImg width];
  379.                                 int height = [targetImg height];
  380.                                 float targtScale = [imagetarget scale];
  381.                                 easyar_Vec2F * scale = [easyar_Vec2F create:@[@(targtScale),@(targtScale)]];
  382.                                 if(width < height)
  383.                                 {
  384.                                     scale = [easyar_Vec2F create:@[@(targtScale * width/height),@(targtScale)]];
  385.                                 }else
  386.                                 {
  387.                                     scale = [easyar_Vec2F create:@[@(targtScale),@(targtScale * height/width)]];
  388.                                 }
  389.                                 if ([video isRenderTextureAvailable]) {
  390.                                     [current_video_renderer render:projection cameraview:[targetInstance pose] size:  scale];
  391.                                 }
  392.                             }
  393.                         }
  394.                     }
  395.                 }
  396.             } else {
  397.                 if (tracked_target != 0) {
  398.                     [video onLost];
  399.                     tracked_target = 0;
  400.                 }
  401.             }
  402.         }
  403.     }
  404.    
  405.    
  406. }
  407.  
  408. void requestPermissions(void(^ callback)(easyar_PermissionStatus status, NSString* msg))
  409. {
  410.     [easyar_Recorder requestPermissions:[easyar_ImmediateCallbackScheduler getDefault] permissionCallback:callback];
  411. }
  412.  
  413. void startRecording(NSString * path, void(^ callback)(easyar_RecordStatus status, NSString *value))
  414. {
  415.     if (recording_started) { return; }
  416.     easyar_RecorderConfiguration * conf = [easyar_RecorderConfiguration create];
  417.  
  418.     [conf setOutputFile:path];
  419.     [conf setZoomMode:easyar_RecordZoomMode_ZoomInWithAllContent];
  420.     [conf setProfile:easyar_RecordProfile_Quality_720P_Middle];
  421.     easyar_RecordVideoOrientation mode = view_size[0] < view_size[1]? easyar_RecordVideoOrientation_Portrait :easyar_RecordVideoOrientation_Landscape;
  422.     [conf setVideoOrientation:(mode)];
  423.  
  424.     recorder = [easyar_Recorder create:conf callbackScheduler:scheduler statusCallback:^(easyar_RecordStatus status, NSString *value) {
  425.         if (status == easyar_RecordStatus_OnStopped) {
  426.             recording_started = false;
  427.         }
  428.         callback(status, value);
  429.     }];
  430.    
  431.     printf("startRecording view_size: (%dx%d)", view_size[0], view_size[1]);
  432.    
  433.     [recorder_renderer resize:view_size[0] height:view_size[1]];
  434.     [recorder start];
  435.     recording_started = true;
  436. }
  437.  
  438. void stopRecording()
  439. {
  440.     if (!recording_started) { return; }
  441.     [recorder stop];
  442.     recorder = nil;
  443.     recording_started = false;
  444. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement