Guest User

OculusRoomTiny DX11 (updated)

a guest
May 28th, 2016
360
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 15.67 KB | None | 0 0
  1. /// Use WASD keys to move around, and cursor keys to look around & up/down.
  2. /// Dismiss the health and safety warning by tapping the headset or pressing any key.
  3.  
  4.  
  5.  
  6. // DirectX
  7. #include "../../OculusRoomTiny_Advanced/Common/Win32_DirectXAppUtil.h"
  8.  
  9. // Oculus SDK
  10. #pragma comment(lib, "Z:\\dev\\OculusSDK\\LibOVR\\Lib\\Windows\\Win32\\Release\\VS2015\\LibOVR.lib")
  11. #include "Z:\dev\OculusSDK\LibOVR\Include\OVR_CAPI_D3D.h"
  12.  
  13.  
  14. //------------------------------------------------------------
  15. // ovrSwapTextureSet wrapper class that also maintains the render target views
  16. // needed for D3D11 rendering.
  17. struct OculusTexture
  18. {
  19.    ovrSession               m_session;
  20.    ovrTextureSwapChain      m_texture_chain;
  21.    std::vector<ID3D11RenderTargetView*> m_tex_rtv;
  22.  
  23.    OculusTexture() : m_session(nullptr), m_texture_chain(nullptr)
  24.    {
  25.    }
  26.  
  27.    bool Init(ovrSession session, int sizeW, int sizeH)
  28.    {
  29.       m_session = session;
  30.  
  31.       ovrTextureSwapChainDesc desc = {};
  32.       desc.Type = ovrTexture_2D;
  33.       desc.ArraySize = 1;
  34.       desc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
  35.       desc.Width = sizeW;
  36.       desc.Height = sizeH;
  37.       desc.MipLevels = 1;
  38.       desc.SampleCount = 1;
  39.       desc.MiscFlags = ovrTextureMisc_DX_Typeless;
  40.       desc.BindFlags = ovrTextureBind_DX_RenderTarget;
  41.       desc.StaticImage = ovrFalse;
  42.  
  43.       ovrResult result = ovr_CreateTextureSwapChainDX(session, DIRECTX.Device, &desc, &m_texture_chain);
  44.  
  45.       if (!OVR_SUCCESS(result))
  46.          return false;
  47.  
  48.       int texture_count = 0;
  49.       ovr_GetTextureSwapChainLength(m_session, m_texture_chain, &texture_count);
  50.  
  51.       for (int i = 0; i < texture_count; ++i)
  52.       {
  53.          ID3D11Texture2D* tex = nullptr;
  54.          ovr_GetTextureSwapChainBufferDX(m_session, m_texture_chain, i, IID_PPV_ARGS(&tex));
  55.  
  56.          D3D11_RENDER_TARGET_VIEW_DESC rtvd = {};
  57.          rtvd.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
  58.          rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
  59.  
  60.          ID3D11RenderTargetView* rtv;
  61.          DIRECTX.Device->CreateRenderTargetView(tex, &rtvd, &rtv);
  62.          m_tex_rtv.push_back(rtv);
  63.          tex->Release();
  64.       }
  65.  
  66.       return true;
  67.    }
  68.  
  69.    ~OculusTexture()
  70.    {
  71.       for (int i = 0; i < (int)m_tex_rtv.size(); ++i)
  72.       {
  73.          Release(m_tex_rtv[i]);
  74.       }
  75.  
  76.       if (m_texture_chain)
  77.       {
  78.          ovr_DestroyTextureSwapChain(m_session, m_texture_chain);
  79.       }
  80.    }
  81.  
  82.    ID3D11RenderTargetView* GetRTV()
  83.    {
  84.       int index = 0;
  85.       ovr_GetTextureSwapChainCurrentIndex(m_session, m_texture_chain, &index);
  86.       return m_tex_rtv[index];
  87.    }
  88.  
  89.    // Commit changes
  90.    void Commit()
  91.    {
  92.       ovr_CommitTextureSwapChain(m_session, m_texture_chain);
  93.    }
  94. };
  95.  
  96.  
  97. class HMD
  98. {
  99. public:
  100.    HMD() :
  101.       m_mirror_texture(nullptr),
  102.       m_mirror_desc{},
  103.       m_hmd_desc{},
  104.       m_viewport_eye{},
  105.       m_is_visible(true),
  106.       m_frame_index(0)
  107.    {
  108.       m_eye_texture[0] = nullptr;
  109.       m_eye_texture[1] = nullptr;
  110.  
  111.       m_eye_depth_buffer[0] = nullptr;
  112.       m_eye_depth_buffer[1] = nullptr;
  113.    }
  114.  
  115.    bool Create();
  116.    void Release();
  117.    void Update();
  118.    bool Present();
  119.  
  120.    void PrepareViewport(int eye);
  121.    XMMATRIX GetProjection(int eye, const XMVECTOR &observor_rotation_q, const XMVECTOR &observor_position, bool coord_sys_left);
  122.  
  123.    OculusTexture  *m_eye_texture[2];
  124.    DepthBuffer    *m_eye_depth_buffer[2];
  125.  
  126.    ovrMirrorTextureDesc m_mirror_desc;
  127.    ovrMirrorTexture m_mirror_texture;
  128.  
  129.    bool m_is_visible;
  130.    long long m_frame_index;
  131.    ovrSession m_session;
  132.    ovrGraphicsLuid m_luid;
  133.    ovrHmdDesc m_hmd_desc;
  134.    ovrRecti m_viewport_eye[2];
  135.    ovrPosef m_eye_render_pose[2];
  136.    ovrEyeRenderDesc m_eye_render_desc[2];
  137.    double m_sensor_sample_time;
  138. };
  139.  
  140.  
  141. class Zone
  142. {
  143. public:
  144.  
  145.    Zone()
  146.    {
  147.       m_scene = new Scene(false);
  148.       m_camera = new Camera(&XMVectorSet(0.0f, 0.0f, 5.0f, 0), &XMQuaternionIdentity());
  149.    }
  150.  
  151.    void Release()
  152.    {
  153.       if (m_camera)
  154.          delete m_camera;
  155.  
  156.       if (m_scene)
  157.          delete m_scene;
  158.    }
  159.  
  160.    Scene  *m_scene;
  161.    Camera *m_camera;
  162. };
  163.  
  164.  
  165.  
  166. // TODO: Support Retry on ovrError_DisplayLost
  167. // if (result == ovrError_DisplayLost) ...
  168.  
  169. bool HMD::Create()
  170. {
  171.    ovrResult result = ovr_Create(&m_session, &m_luid);
  172.  
  173.    if (!OVR_SUCCESS(result))
  174.       return false;
  175.  
  176.    m_hmd_desc = ovr_GetHmdDesc(m_session);
  177.  
  178.    // Setup device and graphics
  179.    // Note: the mirror window can be any size, for this sample we use 1/2 the HMD resolution
  180.    if (!DIRECTX.InitDevice(m_hmd_desc.Resolution.w / 2, m_hmd_desc.Resolution.h / 2, reinterpret_cast<LUID*>(&m_luid)))
  181.       return false;
  182.  
  183.    // Make the eye render buffers (caution if actual size < requested due to HW limits).
  184.  
  185.    for (int eye = 0; eye < 2; ++eye)
  186.    {
  187.       ovrSizei ideal_size = ovr_GetFovTextureSize(m_session, (ovrEyeType)eye, m_hmd_desc.DefaultEyeFov[eye], 1.0f);
  188.       m_eye_texture[eye] = new OculusTexture();
  189.  
  190.       if (!m_eye_texture[eye]->Init(m_session, ideal_size.w, ideal_size.h))
  191.       {
  192.          VALIDATE(false, "Failed to create eye texture.");
  193.          return false;
  194.       }
  195.  
  196.       m_eye_depth_buffer[eye] = new DepthBuffer(DIRECTX.Device, ideal_size.w, ideal_size.h);
  197.       m_viewport_eye[eye].Pos.x = 0;
  198.       m_viewport_eye[eye].Pos.y = 0;
  199.       m_viewport_eye[eye].Size = ideal_size;
  200.  
  201.       if (!m_eye_texture[eye]->m_texture_chain)
  202.       {
  203.          VALIDATE(false, "Failed to create texture.");
  204.          return false;
  205.       }
  206.    }
  207.  
  208.    // Create a mirror to see on the monitor.
  209.    m_mirror_desc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
  210.    m_mirror_desc.Width = DIRECTX.WinSizeW;
  211.    m_mirror_desc.Height = DIRECTX.WinSizeH;
  212.    result = ovr_CreateMirrorTextureDX(m_session, DIRECTX.Device, &m_mirror_desc, &m_mirror_texture);
  213.  
  214.    if (!OVR_SUCCESS(result))
  215.    {
  216.       VALIDATE(false, "Failed to create mirror texture.");
  217.       return false;
  218.    }
  219.  
  220.    // FloorLevel will give tracking poses where the floor height is 0
  221.    ovr_SetTrackingOriginType(m_session, ovrTrackingOrigin_FloorLevel);
  222.  
  223.    return true;
  224. }
  225.  
  226.  
  227. HWND Rift_GetWindow()
  228. {
  229.    return DIRECTX.Window;
  230. }
  231.  
  232. ID3D11Device *Rift_GetDevice()
  233. {
  234.    return DIRECTX.Device;
  235. }
  236.  
  237. ID3D11DeviceContext *Rift_GetDeviceContext()
  238. {
  239.    return DIRECTX.Context;
  240. }
  241.  
  242.  
  243. void Input_Processing(Zone &z)
  244. {
  245.    XMVECTOR forward = XMVector3Rotate(XMVectorSet(0, 0, -0.05f, 0), z.m_camera->Rot);
  246.    XMVECTOR right = XMVector3Rotate(XMVectorSet(0.05f, 0, 0, 0), z.m_camera->Rot);
  247.  
  248.    if (DIRECTX.Key['W'])
  249.       z.m_camera->Pos = XMVectorAdd(z.m_camera->Pos, forward);
  250.  
  251.    if (DIRECTX.Key['S'])
  252.       z.m_camera->Pos = XMVectorSubtract(z.m_camera->Pos, forward);
  253.  
  254.    if (DIRECTX.Key['D'])
  255.       z.m_camera->Pos = XMVectorAdd(z.m_camera->Pos, right);
  256.  
  257.    if (DIRECTX.Key['A'])
  258.       z.m_camera->Pos = XMVectorSubtract(z.m_camera->Pos, right);
  259.  
  260.    static float yaw = 0;
  261.    static float pitch = 0;
  262.  
  263.    if (DIRECTX.Key[VK_LEFT])
  264.       yaw += 0.02f;
  265.  
  266.    if (DIRECTX.Key[VK_RIGHT])
  267.       yaw -= 0.02f;
  268.    
  269.    if (DIRECTX.Key[VK_UP])
  270.    {
  271.       pitch += 0.02f;
  272.      
  273.       // Clamp
  274.       if (pitch > 0.8f)
  275.          pitch = 0.4f;
  276.    }
  277.  
  278.    if (DIRECTX.Key[VK_DOWN])
  279.    {
  280.       pitch -= 0.02f;
  281.  
  282.       // Clamp
  283.       if (pitch < -0.8f)
  284.          pitch = -0.4f;
  285.    }
  286.      
  287.    z.m_camera->Rot = XMQuaternionRotationRollPitchYaw(pitch, yaw, 0);
  288. }
  289.  
  290.  
  291. void Animate(Scene *s)
  292. {
  293.    // Animate the cube
  294.    static float cube_clock = 0;
  295.    s->Models[0]->Pos = XMFLOAT3(9 * sin(cube_clock), 3, 9 * cos(cube_clock += 0.015f));
  296. }
  297.  
  298.  
  299. void HMD::Update()
  300. {
  301.    // Call ovr_GetRenderDesc each frame to get the ovrEyeRenderDesc,
  302.    // as the returned values (e.g. hmd_to_eye_offset) may change at runtime.
  303.    m_eye_render_desc[0] = ovr_GetRenderDesc(m_session, ovrEye_Left, m_hmd_desc.DefaultEyeFov[0]);
  304.    m_eye_render_desc[1] = ovr_GetRenderDesc(m_session, ovrEye_Right, m_hmd_desc.DefaultEyeFov[1]);
  305.  
  306.    // Get both eye poses simultaneously, with IPD offset already included.
  307.    ovrVector3f      hmd_to_eye_offset[2] = { m_eye_render_desc[0].HmdToEyeOffset,
  308.       m_eye_render_desc[1].HmdToEyeOffset };
  309.  
  310.    ovr_GetEyePoses(m_session, m_frame_index, ovrTrue, hmd_to_eye_offset, m_eye_render_pose, &m_sensor_sample_time);
  311.  
  312.    // sensor_sample_time is fed into the layer later
  313. }
  314.  
  315.  
  316. void HMD::PrepareViewport(int eye)
  317. {
  318.    // Clear and set up rendertarget
  319.    DIRECTX.SetAndClearRenderTarget(m_eye_texture[eye]->GetRTV(), m_eye_depth_buffer[eye]);
  320.  
  321.    DIRECTX.SetViewport((float)m_viewport_eye[eye].Pos.x, (float)m_viewport_eye[eye].Pos.y,
  322.       (float)m_viewport_eye[eye].Size.w, (float)m_viewport_eye[eye].Size.h);
  323. }
  324.  
  325.  
  326. OVR_PUBLIC_FUNCTION(void) __ovrPosef_FlipHandedness(const ovrPosef* inPose, ovrPosef* outPose)
  327. {
  328.    outPose->Orientation.x = -inPose->Orientation.x;
  329.    outPose->Orientation.y = -inPose->Orientation.y;
  330.    outPose->Orientation.z = inPose->Orientation.z;
  331.    outPose->Orientation.w = inPose->Orientation.w;
  332.  
  333.    outPose->Position.x = -inPose->Position.x;
  334.    outPose->Position.y = inPose->Position.y;
  335.    outPose->Position.z = inPose->Position.z;
  336. }
  337.  
  338.  
  339. XMMATRIX HMD::GetProjection(int eye, const XMVECTOR &observor_rotation_q, const XMVECTOR &observor_position, bool coord_sys_left)
  340. {
  341.    ovrPosef eye_pose;
  342.  
  343.    if (coord_sys_left)
  344.    {
  345.       // Current rift SDK flip function appears to have a bug, resulting in up/down reversed.
  346.       // Hence the above, which resolves the problem.
  347.       __ovrPosef_FlipHandedness(&(m_eye_render_pose[eye]), &eye_pose);
  348.    }
  349.    else
  350.       eye_pose = m_eye_render_pose[eye];
  351.  
  352.    // Get the pose information in XM format
  353.  
  354.    XMVECTOR eye_quat = XMVectorSet(eye_pose.Orientation.x, eye_pose.Orientation.y,
  355.       eye_pose.Orientation.z, eye_pose.Orientation.w);
  356.  
  357.    XMVECTOR eye_pos = XMVectorSet(eye_pose.Position.x, eye_pose.Position.y, eye_pose.Position.z, 0);
  358.  
  359.    // Get view and projection matrices for the Rift camera
  360.    XMVECTOR combined_pos = XMVectorAdd(observor_position, XMVector3Rotate(eye_pos, observor_rotation_q));
  361.  
  362.    // BUG: Look up/down rolls rather than pitches when combined here.
  363.    // TODO: What's the correct means of combining these two quats, please.
  364.    XMVECTOR combined_rot = XMQuaternionMultiply(eye_quat, observor_rotation_q);
  365.    
  366.    // DBG: use this to verify observor rotation is correct.
  367.    //XMVECTOR combined_rot = observor_rotation_q;
  368.    
  369.    Camera final_cam(&combined_pos, &combined_rot);
  370.  
  371.    XMMATRIX view = final_cam.GetViewMatrix();
  372.  
  373.    ovrMatrix4f p = ovrMatrix4f_Projection(m_eye_render_desc[eye].Fov, 0.2f, 1000.0f, coord_sys_left ? ovrProjection_LeftHanded : ovrProjection_None);
  374.  
  375.    XMMATRIX proj = XMMatrixSet(p.M[0][0], p.M[1][0], p.M[2][0], p.M[3][0],
  376.       p.M[0][1], p.M[1][1], p.M[2][1], p.M[3][1],
  377.       p.M[0][2], p.M[1][2], p.M[2][2], p.M[3][2],
  378.       p.M[0][3], p.M[1][3], p.M[2][3], p.M[3][3]);
  379.  
  380.    return XMMatrixMultiply(view, proj);
  381. }
  382.  
  383.  
  384. bool HMD::Present()
  385. {
  386.    for (int eye = 0; eye < 2; eye++)
  387.       m_eye_texture[eye]->Commit();
  388.  
  389.    // Initialize our single full screen Fov layer.
  390.    ovrLayerEyeFov ld = {};
  391.    ld.Header.Type = ovrLayerType_EyeFov;
  392.    ld.Header.Flags = 0;
  393.  
  394.    for (int eye = 0; eye < 2; ++eye)
  395.    {
  396.       ld.ColorTexture[eye] = m_eye_texture[eye]->m_texture_chain;
  397.       ld.Viewport[eye] = m_viewport_eye[eye];
  398.       ld.Fov[eye] = m_hmd_desc.DefaultEyeFov[eye];
  399.       ld.RenderPose[eye] = m_eye_render_pose[eye];
  400.       ld.SensorSampleTime = m_sensor_sample_time;
  401.    }
  402.  
  403.    ovrLayerHeader* layers = &ld.Header;
  404.    ovrResult result = ovr_SubmitFrame(m_session, m_frame_index, nullptr, &layers, 1);
  405.  
  406.    // Exit the rendering loop if submit returns an error. TODO: retry on ovrError_DisplayLost
  407.    if (!OVR_SUCCESS(result))
  408.       return false;
  409.  
  410.    m_is_visible = (result == ovrSuccess);
  411.  
  412.    ovrSessionStatus session_status;
  413.    ovr_GetSessionStatus(m_session, &session_status);
  414.  
  415.    if (session_status.ShouldQuit)
  416.       return false;
  417.  
  418.    if (session_status.ShouldRecenter)
  419.       ovr_RecenterTrackingOrigin(m_session);
  420.  
  421.    // Render mirror
  422.    ID3D11Texture2D* tex = nullptr;
  423.    ovr_GetMirrorTextureBufferDX(m_session, m_mirror_texture, IID_PPV_ARGS(&tex));
  424.    DIRECTX.Context->CopyResource(DIRECTX.BackBuffer, tex);
  425.    tex->Release();
  426.    DIRECTX.SwapChain->Present(0, 0);
  427.  
  428.    m_frame_index++;
  429.  
  430.    return true;
  431. }
  432.  
  433. HMD __hmd;
  434. Zone *__zone = nullptr;
  435.  
  436.  
  437. bool Rift_Init(HINSTANCE hinst)
  438. {
  439.    // Initializes LibOVR, and the Rift
  440.    ovrResult result = ovr_Initialize(nullptr);
  441.    VALIDATE(OVR_SUCCESS(result), "Failed to initialize libOVR.");
  442.  
  443.    VALIDATE(DIRECTX.InitWindow(hinst, L"<><> Infinity for Oculus"), "Failed to open window.");
  444.  
  445.    return __hmd.Create();
  446. }
  447.  
  448.  
  449. void Rift_GetRenderTarget(void *&render_target, void *&depth, int eye)
  450. {
  451.    render_target = (void*)__hmd.m_eye_texture[eye]->GetRTV();
  452.    depth = (void*)__hmd.m_eye_depth_buffer[eye]->TexDsv;
  453. }
  454.  
  455.  
  456. void Rift_GetViewport(float &x, float &y, float &w, float &h, int eye)
  457. {
  458.    x = (float)__hmd.m_viewport_eye[eye].Pos.x;
  459.    y = (float)__hmd.m_viewport_eye[eye].Pos.y;
  460.    w = (float)__hmd.m_viewport_eye[eye].Size.w;
  461.    h = (float)__hmd.m_viewport_eye[eye].Size.h;
  462. }
  463.  
  464.  
  465. void Rift_Present()
  466. {
  467.    __hmd.Present();
  468. }
  469.  
  470.  
  471. void Rift_Update()
  472. {
  473.    __hmd.Update();
  474. }
  475.  
  476.  
  477. XMMATRIX Rift_Get_Projection(int eye, const XMFLOAT4 &rot, const XMFLOAT3 &pos, bool projection_lh)
  478. {
  479.    XMVECTOR xpos = XMVectorSet(pos.x, pos.y, pos.z, 0);
  480.    XMVECTOR xrot = XMVectorSet(rot.x, rot.y, rot.z, rot.w);
  481.  
  482.    return __hmd.GetProjection(eye, xrot, xpos, projection_lh);
  483. }
  484.  
  485.  
  486. #if 1
  487. bool Perform_Processing(HMD &hmd, Zone &z)
  488. {
  489.    Input_Processing(z);
  490.  
  491.    Animate(z.m_scene);
  492.  
  493.    hmd.Update();
  494.  
  495.    // Render scene to the eye buffers ...
  496.  
  497.    if (hmd.m_is_visible)
  498.    {
  499.       for (int eye = 0; eye < 2; ++eye)
  500.       {
  501.          hmd.PrepareViewport(eye);
  502.  
  503.          const XMVECTOR &observor_rotation = z.m_camera->Rot;
  504.          const XMVECTOR &observor_position = z.m_camera->Pos;
  505.  
  506.          // Oculus sample scene uses right handed coord system
  507.          XMMATRIX cam_to_clip = hmd.GetProjection(eye, observor_rotation, observor_position, false); // false);
  508.          z.m_scene->Render(&cam_to_clip, 1, 1, 1, 1, true);
  509.       }
  510.    }
  511.  
  512.    hmd.Present();
  513.  
  514.    return true;
  515. }
  516. #else
  517. // Absolute minimal implementation
  518. bool Perform_Processing(HMD &hmd, Zone &z)
  519. {
  520.    Rift_Update();
  521.  
  522.    float green[4] = { 0, 1, 0, 1.0f };
  523.    float blue[4] = { 0, 0, 1, 1.0f };
  524.  
  525.    ID3D11DeviceContext *ctx = Rift_GetDeviceContext();
  526.  
  527.    if (!ctx)
  528.       return false;
  529.  
  530.    // Left
  531.    void *rtv = nullptr, *depth = nullptr;
  532.    Rift_GetRenderTarget(rtv, depth, 0);
  533.  
  534.    if (rtv)
  535.       ctx->ClearRenderTargetView((ID3D11RenderTargetView *)rtv, green);
  536.  
  537.    // Right
  538.    rtv = nullptr; depth = nullptr;
  539.    Rift_GetRenderTarget(rtv, depth, 1);
  540.  
  541.    if (rtv)
  542.       ctx->ClearRenderTargetView((ID3D11RenderTargetView *)rtv, blue);
  543.  
  544.    Rift_Present();
  545.  
  546.    return true;
  547. }
  548. #endif
  549.  
  550.  
  551. void HMD::Release()
  552. {
  553.    if (m_mirror_texture)
  554.       ovr_DestroyMirrorTexture(m_session, m_mirror_texture);
  555.  
  556.    for (int eye = 0; eye < 2; ++eye)
  557.    {
  558.       delete m_eye_texture[eye];
  559.       delete m_eye_depth_buffer[eye];
  560.    }
  561.  
  562.    DIRECTX.ReleaseDevice();
  563.    ovr_Destroy(m_session);
  564. }
  565.  
  566.  
  567.  
  568.  
  569.  
  570.  
  571. //-------------------------------------------------------------------------------------
  572. int WINAPI WinMain(HINSTANCE hinst, HINSTANCE, LPSTR, int)
  573. {
  574.    bool ok = Rift_Init(hinst);
  575.  
  576.    __zone = new Zone;
  577.  
  578.    if (ok)
  579.    {
  580.       // TODO: need to reconnect ovrError_DisplayLost functionality.
  581.  
  582.       while (DIRECTX.HandleMessages())
  583.       {
  584.          if (!Perform_Processing(__hmd, *__zone))
  585.             break;
  586.       }
  587.    }
  588.  
  589.    if (__zone)
  590.    {
  591.       __zone->Release();
  592.       delete __zone;
  593.    }
  594.  
  595.    __hmd.Release();
  596.  
  597.    ovr_Shutdown();
  598.  
  599.    return 0;
  600. }
Advertisement
Add Comment
Please, Sign In to add comment