mirror of
https://github.com/servo/servo.git
synced 2025-08-02 20:20:14 +01:00
Remove Immersive Mode demo
This commit is contained in:
parent
d09692664e
commit
42f2c472f7
25 changed files with 0 additions and 2664 deletions
|
@ -6,12 +6,10 @@
|
|||
#include "logs.h"
|
||||
#include "BrowserPage.h"
|
||||
#include "BrowserPage.g.cpp"
|
||||
#include "ImmersiveView.h"
|
||||
|
||||
using namespace winrt::Windows::UI::Xaml;
|
||||
using namespace winrt::Windows::UI::Core;
|
||||
using namespace winrt::Windows::UI::ViewManagement;
|
||||
using namespace winrt::Windows::Graphics::Holographic;
|
||||
using namespace winrt::Windows::ApplicationModel::Core;
|
||||
|
||||
namespace winrt::ServoApp::implementation {
|
||||
|
@ -74,20 +72,4 @@ void BrowserPage::OnURLEdited(IInspectable const &sender,
|
|||
}
|
||||
}
|
||||
|
||||
void BrowserPage::OnImmersiveButtonClicked(IInspectable const &,
|
||||
RoutedEventArgs const &) {
|
||||
if (HolographicSpace::IsAvailable()) {
|
||||
log("Holographic space available");
|
||||
auto v = CoreApplication::CreateNewView(mImmersiveViewSource);
|
||||
auto parentId = ApplicationView::GetForCurrentView().Id();
|
||||
v.Dispatcher().RunAsync(CoreDispatcherPriority::Normal, [=] {
|
||||
auto winId = ApplicationView::GetForCurrentView().Id();
|
||||
ApplicationViewSwitcher::SwitchAsync(winId, parentId);
|
||||
log("Immersive view started");
|
||||
});
|
||||
} else {
|
||||
log("Holographic space not available");
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace winrt::ServoApp::implementation
|
||||
|
|
|
@ -5,18 +5,14 @@
|
|||
#pragma once
|
||||
|
||||
#include "BrowserPage.g.h"
|
||||
#include "ImmersiveView.h"
|
||||
#include "ServoControl\ServoControl.h"
|
||||
|
||||
|
||||
namespace winrt::ServoApp::implementation {
|
||||
|
||||
struct BrowserPage : BrowserPageT<BrowserPage> {
|
||||
public:
|
||||
BrowserPage();
|
||||
|
||||
void OnImmersiveButtonClicked(Windows::Foundation::IInspectable const &,
|
||||
Windows::UI::Xaml::RoutedEventArgs const &);
|
||||
void OnForwardButtonClicked(Windows::Foundation::IInspectable const &,
|
||||
Windows::UI::Xaml::RoutedEventArgs const &);
|
||||
void OnBackButtonClicked(Windows::Foundation::IInspectable const &,
|
||||
|
@ -30,7 +26,6 @@ public:
|
|||
void Shutdown();
|
||||
|
||||
private:
|
||||
winrt::ServoApp::ImmersiveViewSource mImmersiveViewSource;
|
||||
void BindServoEvents();
|
||||
};
|
||||
} // namespace winrt::ServoApp::implementation
|
||||
|
|
|
@ -26,9 +26,6 @@
|
|||
<Button x:Name="stopButton" IsTabStop="true" IsEnabled="false" Content="stop" Click="OnStopButtonClicked"/>
|
||||
</StackPanel>
|
||||
<TextBox Text="" IsTabStop="true" InputScope="Url" PlaceholderText="Type a URL" x:Name="urlTextbox" Grid.Column="1" KeyUp="OnURLEdited"/>
|
||||
<StackPanel Orientation="Horizontal" Grid.Column="2">
|
||||
<Button x:Name="immersiveButton" Click="OnImmersiveButtonClicked">Run Immersive</Button>
|
||||
</StackPanel>
|
||||
</Grid>
|
||||
<local:ServoControl TabIndex="0" x:Name="servoControl" Grid.Row="1"/>
|
||||
</Grid>
|
||||
|
|
|
@ -1,234 +0,0 @@
|
|||
#include "pch.h"
|
||||
|
||||
#include "CameraResources.h"
|
||||
#include "Common/DirectXHelper.h"
|
||||
#include "DeviceResources.h"
|
||||
|
||||
using namespace DirectX;
|
||||
using namespace Microsoft::WRL;
|
||||
using namespace winrt::Windows::Foundation::Numerics;
|
||||
using namespace winrt::Windows::Graphics::DirectX::Direct3D11;
|
||||
using namespace winrt::Windows::Graphics::Holographic;
|
||||
using namespace winrt::Windows::Perception::Spatial;
|
||||
|
||||
DX::CameraResources::CameraResources(HolographicCamera const &camera)
|
||||
: m_holographicCamera(camera), m_isStereo(camera.IsStereo()),
|
||||
m_d3dRenderTargetSize(camera.RenderTargetSize()) {
|
||||
m_d3dViewport = CD3D11_VIEWPORT(0.f, 0.f, m_d3dRenderTargetSize.Width,
|
||||
m_d3dRenderTargetSize.Height);
|
||||
};
|
||||
|
||||
// Updates resources associated with a holographic camera's swap chain.
|
||||
// The app does not access the swap chain directly, but it does create
|
||||
// resource views for the back buffer.
|
||||
void DX::CameraResources::CreateResourcesForBackBuffer(
|
||||
DX::DeviceResources *pDeviceResources,
|
||||
HolographicCameraRenderingParameters const &cameraParameters) {
|
||||
ID3D11Device *device = pDeviceResources->GetD3DDevice();
|
||||
|
||||
// Get the WinRT object representing the holographic camera's back buffer.
|
||||
IDirect3DSurface surface = cameraParameters.Direct3D11BackBuffer();
|
||||
|
||||
// Get the holographic camera's back buffer.
|
||||
// Holographic apps do not create a swap chain themselves; instead, buffers
|
||||
// are owned by the system. The Direct3D back buffer resources are provided to
|
||||
// the app using WinRT interop APIs.
|
||||
ComPtr<ID3D11Texture2D> cameraBackBuffer;
|
||||
winrt::check_hresult(surface
|
||||
.as<::Windows::Graphics::DirectX::Direct3D11::
|
||||
IDirect3DDxgiInterfaceAccess>()
|
||||
->GetInterface(IID_PPV_ARGS(&cameraBackBuffer)));
|
||||
|
||||
// Determine if the back buffer has changed. If so, ensure that the render
|
||||
// target view is for the current back buffer.
|
||||
if (m_d3dBackBuffer.Get() != cameraBackBuffer.Get()) {
|
||||
// This can change every frame as the system moves to the next buffer in the
|
||||
// swap chain. This mode of operation will occur when certain rendering
|
||||
// modes are activated.
|
||||
m_d3dBackBuffer = cameraBackBuffer;
|
||||
|
||||
// Create a render target view of the back buffer.
|
||||
// Creating this resource is inexpensive, and is better than keeping track
|
||||
// of the back buffers in order to pre-allocate render target views for each
|
||||
// one.
|
||||
winrt::check_hresult(device->CreateRenderTargetView(
|
||||
m_d3dBackBuffer.Get(), nullptr, &m_d3dRenderTargetView));
|
||||
|
||||
// Get the DXGI format for the back buffer.
|
||||
// This information can be accessed by the app using
|
||||
// CameraResources::GetBackBufferDXGIFormat().
|
||||
D3D11_TEXTURE2D_DESC backBufferDesc;
|
||||
m_d3dBackBuffer->GetDesc(&backBufferDesc);
|
||||
m_dxgiFormat = backBufferDesc.Format;
|
||||
|
||||
// Check for render target size changes.
|
||||
winrt::Windows::Foundation::Size currentSize =
|
||||
m_holographicCamera.RenderTargetSize();
|
||||
if (m_d3dRenderTargetSize != currentSize) {
|
||||
// Set render target size.
|
||||
m_d3dRenderTargetSize = currentSize;
|
||||
|
||||
// A new depth stencil view is also needed.
|
||||
m_d3dDepthStencilView.Reset();
|
||||
}
|
||||
}
|
||||
|
||||
// Refresh depth stencil resources, if needed.
|
||||
if (m_d3dDepthStencilView == nullptr) {
|
||||
// Create a depth stencil view for use with 3D rendering if needed.
|
||||
CD3D11_TEXTURE2D_DESC depthStencilDesc(
|
||||
DXGI_FORMAT_R16_TYPELESS,
|
||||
static_cast<UINT>(m_d3dRenderTargetSize.Width),
|
||||
static_cast<UINT>(m_d3dRenderTargetSize.Height),
|
||||
m_isStereo ? 2 : 1, // Create two textures when rendering in stereo.
|
||||
1, // Use a single mipmap level.
|
||||
D3D11_BIND_DEPTH_STENCIL | D3D11_BIND_SHADER_RESOURCE);
|
||||
|
||||
winrt::check_hresult(device->CreateTexture2D(&depthStencilDesc, nullptr,
|
||||
&m_d3dDepthStencil));
|
||||
|
||||
CD3D11_DEPTH_STENCIL_VIEW_DESC depthStencilViewDesc(
|
||||
m_isStereo ? D3D11_DSV_DIMENSION_TEXTURE2DARRAY
|
||||
: D3D11_DSV_DIMENSION_TEXTURE2D,
|
||||
DXGI_FORMAT_D16_UNORM);
|
||||
winrt::check_hresult(device->CreateDepthStencilView(
|
||||
m_d3dDepthStencil.Get(), &depthStencilViewDesc,
|
||||
&m_d3dDepthStencilView));
|
||||
}
|
||||
|
||||
// Create the constant buffer, if needed.
|
||||
if (m_viewProjectionConstantBuffer == nullptr) {
|
||||
// Create a constant buffer to store view and projection matrices for the
|
||||
// camera.
|
||||
CD3D11_BUFFER_DESC constantBufferDesc(sizeof(ViewProjectionConstantBuffer),
|
||||
D3D11_BIND_CONSTANT_BUFFER);
|
||||
winrt::check_hresult(device->CreateBuffer(&constantBufferDesc, nullptr,
|
||||
&m_viewProjectionConstantBuffer));
|
||||
}
|
||||
}
|
||||
|
||||
// Releases resources associated with a back buffer.
|
||||
void DX::CameraResources::ReleaseResourcesForBackBuffer(
|
||||
DX::DeviceResources *pDeviceResources) {
|
||||
ID3D11DeviceContext *context = pDeviceResources->GetD3DDeviceContext();
|
||||
|
||||
// Release camera-specific resources.
|
||||
m_d3dBackBuffer.Reset();
|
||||
m_d3dDepthStencil.Reset();
|
||||
m_d3dRenderTargetView.Reset();
|
||||
m_d3dDepthStencilView.Reset();
|
||||
m_viewProjectionConstantBuffer.Reset();
|
||||
|
||||
// Ensure system references to the back buffer are released by clearing the
|
||||
// render target from the graphics pipeline state, and then flushing the
|
||||
// Direct3D context.
|
||||
ID3D11RenderTargetView *nullViews[D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT] = {
|
||||
nullptr};
|
||||
context->OMSetRenderTargets(ARRAYSIZE(nullViews), nullViews, nullptr);
|
||||
context->Flush();
|
||||
}
|
||||
|
||||
// Updates the view/projection constant buffer for a holographic camera.
|
||||
void DX::CameraResources::UpdateViewProjectionBuffer(
|
||||
std::shared_ptr<DX::DeviceResources> deviceResources,
|
||||
HolographicCameraPose const &cameraPose,
|
||||
SpatialCoordinateSystem const &coordinateSystem) {
|
||||
// The system changes the viewport on a per-frame basis for system
|
||||
// optimizations.
|
||||
auto viewport = cameraPose.Viewport();
|
||||
m_d3dViewport =
|
||||
CD3D11_VIEWPORT(viewport.X, viewport.Y, viewport.Width, viewport.Height);
|
||||
|
||||
// The projection transform for each frame is provided by the
|
||||
// HolographicCameraPose.
|
||||
HolographicStereoTransform cameraProjectionTransform =
|
||||
cameraPose.ProjectionTransform();
|
||||
|
||||
// Get a container object with the view and projection matrices for the given
|
||||
// pose in the given coordinate system.
|
||||
auto viewTransformContainer =
|
||||
cameraPose.TryGetViewTransform(coordinateSystem);
|
||||
|
||||
// If TryGetViewTransform returns a null pointer, that means the pose and
|
||||
// coordinate system cannot be understood relative to one another; content
|
||||
// cannot be rendered in this coordinate system for the duration of the
|
||||
// current frame. This usually means that positional tracking is not active
|
||||
// for the current frame, in which case it is possible to use a
|
||||
// SpatialLocatorAttachedFrameOfReference to render content that is not
|
||||
// world-locked instead.
|
||||
DX::ViewProjectionConstantBuffer viewProjectionConstantBufferData;
|
||||
bool viewTransformAcquired = viewTransformContainer != nullptr;
|
||||
if (viewTransformAcquired) {
|
||||
// Otherwise, the set of view transforms can be retrieved.
|
||||
HolographicStereoTransform viewCoordinateSystemTransform =
|
||||
viewTransformContainer.Value();
|
||||
|
||||
// Update the view matrices. Holographic cameras (such as Microsoft
|
||||
// HoloLens) are constantly moving relative to the world. The view matrices
|
||||
// need to be updated every frame.
|
||||
XMStoreFloat4x4(
|
||||
&viewProjectionConstantBufferData.viewProjection[0],
|
||||
XMMatrixTranspose(XMLoadFloat4x4(&viewCoordinateSystemTransform.Left) *
|
||||
XMLoadFloat4x4(&cameraProjectionTransform.Left)));
|
||||
XMStoreFloat4x4(
|
||||
&viewProjectionConstantBufferData.viewProjection[1],
|
||||
XMMatrixTranspose(XMLoadFloat4x4(&viewCoordinateSystemTransform.Right) *
|
||||
XMLoadFloat4x4(&cameraProjectionTransform.Right)));
|
||||
}
|
||||
|
||||
// Use the D3D device context to update Direct3D device-based resources.
|
||||
ID3D11DeviceContext *context = deviceResources->GetD3DDeviceContext();
|
||||
|
||||
// Loading is asynchronous. Resources must be created before they can be
|
||||
// updated.
|
||||
if (context == nullptr || m_viewProjectionConstantBuffer == nullptr ||
|
||||
!viewTransformAcquired) {
|
||||
m_framePending = false;
|
||||
} else {
|
||||
// Update the view and projection matrices.
|
||||
context->UpdateSubresource(m_viewProjectionConstantBuffer.Get(), 0, nullptr,
|
||||
&viewProjectionConstantBufferData, 0, 0);
|
||||
|
||||
m_framePending = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Gets the view-projection constant buffer for the HolographicCamera and
|
||||
// attaches it to the shader pipeline.
|
||||
bool DX::CameraResources::AttachViewProjectionBuffer(
|
||||
std::shared_ptr<DX::DeviceResources> &deviceResources) {
|
||||
// This method uses Direct3D device-based resources.
|
||||
ID3D11DeviceContext *context = deviceResources->GetD3DDeviceContext();
|
||||
|
||||
// Loading is asynchronous. Resources must be created before they can be
|
||||
// updated. Cameras can also be added asynchronously, in which case they must
|
||||
// be initialized before they can be used.
|
||||
if (context == nullptr || m_viewProjectionConstantBuffer == nullptr ||
|
||||
m_framePending == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Set the viewport for this camera.
|
||||
context->RSSetViewports(1, &m_d3dViewport);
|
||||
|
||||
// Send the constant buffer to the vertex shader.
|
||||
context->VSSetConstantBuffers(1, 1,
|
||||
m_viewProjectionConstantBuffer.GetAddressOf());
|
||||
|
||||
// The template includes a pass-through geometry shader that is used by
|
||||
// default on systems that don't support the D3D11_FEATURE_D3D11_OPTIONS3::
|
||||
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer extension. The shader
|
||||
// will be enabled at run-time on systems that require it.
|
||||
// If your app will also use the geometry shader for other tasks and those
|
||||
// tasks require the view/projection matrix, uncomment the following line
|
||||
// of code to send the constant buffer to the geometry shader as well.
|
||||
/*context->GSSetConstantBuffers(
|
||||
1,
|
||||
1,
|
||||
m_viewProjectionConstantBuffer.GetAddressOf()
|
||||
);*/
|
||||
|
||||
m_framePending = false;
|
||||
|
||||
return true;
|
||||
}
|
|
@ -1,93 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
namespace DX {
|
||||
class DeviceResources;
|
||||
|
||||
// Constant buffer used to send the view-projection matrices to the shader
|
||||
// pipeline.
|
||||
struct ViewProjectionConstantBuffer {
|
||||
DirectX::XMFLOAT4X4 viewProjection[2];
|
||||
};
|
||||
|
||||
// Assert that the constant buffer remains 16-byte aligned (best practice).
|
||||
static_assert((sizeof(ViewProjectionConstantBuffer) % (sizeof(float) * 4)) == 0,
|
||||
"ViewProjection constant buffer size must be 16-byte aligned (16 "
|
||||
"bytes is the length of four floats).");
|
||||
|
||||
// Manages DirectX device resources that are specific to a holographic camera,
|
||||
// such as the back buffer, ViewProjection constant buffer, and viewport.
|
||||
class CameraResources {
|
||||
public:
|
||||
CameraResources(winrt::Windows::Graphics::Holographic::HolographicCamera const
|
||||
&holographicCamera);
|
||||
|
||||
void CreateResourcesForBackBuffer(
|
||||
DX::DeviceResources *pDeviceResources,
|
||||
winrt::Windows::Graphics::Holographic::
|
||||
HolographicCameraRenderingParameters const &cameraParameters);
|
||||
void ReleaseResourcesForBackBuffer(DX::DeviceResources *pDeviceResources);
|
||||
|
||||
void UpdateViewProjectionBuffer(
|
||||
std::shared_ptr<DX::DeviceResources> deviceResources,
|
||||
winrt::Windows::Graphics::Holographic::HolographicCameraPose const
|
||||
&cameraPose,
|
||||
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem const
|
||||
&coordinateSystem);
|
||||
|
||||
bool AttachViewProjectionBuffer(
|
||||
std::shared_ptr<DX::DeviceResources> &deviceResources);
|
||||
|
||||
// Direct3D device resources.
|
||||
ID3D11RenderTargetView *GetBackBufferRenderTargetView() const {
|
||||
return m_d3dRenderTargetView.Get();
|
||||
}
|
||||
ID3D11DepthStencilView *GetDepthStencilView() const {
|
||||
return m_d3dDepthStencilView.Get();
|
||||
}
|
||||
ID3D11Texture2D *GetBackBufferTexture2D() const {
|
||||
return m_d3dBackBuffer.Get();
|
||||
}
|
||||
ID3D11Texture2D *GetDepthStencilTexture2D() const {
|
||||
return m_d3dDepthStencil.Get();
|
||||
}
|
||||
D3D11_VIEWPORT GetViewport() const { return m_d3dViewport; }
|
||||
DXGI_FORMAT GetBackBufferDXGIFormat() const { return m_dxgiFormat; }
|
||||
|
||||
// Render target properties.
|
||||
winrt::Windows::Foundation::Size GetRenderTargetSize() const & {
|
||||
return m_d3dRenderTargetSize;
|
||||
}
|
||||
bool IsRenderingStereoscopic() const { return m_isStereo; }
|
||||
|
||||
// The holographic camera these resources are for.
|
||||
winrt::Windows::Graphics::Holographic::HolographicCamera const &
|
||||
GetHolographicCamera() const {
|
||||
return m_holographicCamera;
|
||||
}
|
||||
|
||||
private:
|
||||
// Direct3D rendering objects. Required for 3D.
|
||||
Microsoft::WRL::ComPtr<ID3D11RenderTargetView> m_d3dRenderTargetView;
|
||||
Microsoft::WRL::ComPtr<ID3D11DepthStencilView> m_d3dDepthStencilView;
|
||||
Microsoft::WRL::ComPtr<ID3D11Texture2D> m_d3dBackBuffer;
|
||||
Microsoft::WRL::ComPtr<ID3D11Texture2D> m_d3dDepthStencil;
|
||||
|
||||
// Device resource to store view and projection matrices.
|
||||
Microsoft::WRL::ComPtr<ID3D11Buffer> m_viewProjectionConstantBuffer;
|
||||
|
||||
// Direct3D rendering properties.
|
||||
DXGI_FORMAT m_dxgiFormat;
|
||||
winrt::Windows::Foundation::Size m_d3dRenderTargetSize;
|
||||
D3D11_VIEWPORT m_d3dViewport;
|
||||
|
||||
// Indicates whether the camera supports stereoscopic rendering.
|
||||
bool m_isStereo = false;
|
||||
|
||||
// Indicates whether this camera has a pending frame.
|
||||
bool m_framePending = false;
|
||||
|
||||
// Pointer to the holographic camera these resources are for.
|
||||
winrt::Windows::Graphics::Holographic::HolographicCamera m_holographicCamera =
|
||||
nullptr;
|
||||
};
|
||||
} // namespace DX
|
|
@ -1,280 +0,0 @@
|
|||
|
||||
#include "pch.h"
|
||||
#include "DeviceResources.h"
|
||||
#include "DirectXHelper.h"
|
||||
|
||||
using namespace D2D1;
|
||||
using namespace Microsoft::WRL;
|
||||
using namespace winrt::Windows::Graphics::DirectX::Direct3D11;
|
||||
using namespace winrt::Windows::Graphics::Display;
|
||||
using namespace winrt::Windows::Graphics::Holographic;
|
||||
|
||||
// Constructor for DeviceResources.
|
||||
DX::DeviceResources::DeviceResources() { CreateDeviceIndependentResources(); }
|
||||
|
||||
// Configures resources that don't depend on the Direct3D device.
|
||||
void DX::DeviceResources::CreateDeviceIndependentResources() {
|
||||
// Initialize Direct2D resources.
|
||||
D2D1_FACTORY_OPTIONS options{};
|
||||
|
||||
#if defined(_DEBUG)
|
||||
// If the project is in a debug build, enable Direct2D debugging via SDK
|
||||
// Layers.
|
||||
options.debugLevel = D2D1_DEBUG_LEVEL_INFORMATION;
|
||||
#endif
|
||||
|
||||
// Initialize the Direct2D Factory.
|
||||
winrt::check_hresult(D2D1CreateFactory(D2D1_FACTORY_TYPE_SINGLE_THREADED,
|
||||
__uuidof(ID2D1Factory2), &options,
|
||||
&m_d2dFactory));
|
||||
|
||||
// Initialize the DirectWrite Factory.
|
||||
winrt::check_hresult(DWriteCreateFactory(
|
||||
DWRITE_FACTORY_TYPE_SHARED, __uuidof(IDWriteFactory2), &m_dwriteFactory));
|
||||
|
||||
// Initialize the Windows Imaging Component (WIC) Factory.
|
||||
winrt::check_hresult(CoCreateInstance(CLSID_WICImagingFactory2, nullptr,
|
||||
CLSCTX_INPROC_SERVER,
|
||||
IID_PPV_ARGS(&m_wicFactory)));
|
||||
}
|
||||
|
||||
void DX::DeviceResources::SetHolographicSpace(
|
||||
HolographicSpace holographicSpace) {
|
||||
// Cache the holographic space. Used to re-initalize during device-lost
|
||||
// scenarios.
|
||||
m_holographicSpace = holographicSpace;
|
||||
|
||||
InitializeUsingHolographicSpace();
|
||||
}
|
||||
|
||||
void DX::DeviceResources::InitializeUsingHolographicSpace() {
|
||||
// The holographic space might need to determine which adapter supports
|
||||
// holograms, in which case it will specify a non-zero PrimaryAdapterId.
|
||||
LUID id = {m_holographicSpace.PrimaryAdapterId().LowPart,
|
||||
m_holographicSpace.PrimaryAdapterId().HighPart};
|
||||
|
||||
// When a primary adapter ID is given to the app, the app should find
|
||||
// the corresponding DXGI adapter and use it to create Direct3D devices
|
||||
// and device contexts. Otherwise, there is no restriction on the DXGI
|
||||
// adapter the app can use.
|
||||
if ((id.HighPart != 0) || (id.LowPart != 0)) {
|
||||
UINT createFlags = 0;
|
||||
#ifdef DEBUG
|
||||
if (DX::SdkLayersAvailable()) {
|
||||
createFlags |= DXGI_CREATE_FACTORY_DEBUG;
|
||||
}
|
||||
#endif
|
||||
// Create the DXGI factory.
|
||||
ComPtr<IDXGIFactory1> dxgiFactory;
|
||||
winrt::check_hresult(
|
||||
CreateDXGIFactory2(createFlags, IID_PPV_ARGS(&dxgiFactory)));
|
||||
ComPtr<IDXGIFactory4> dxgiFactory4;
|
||||
winrt::check_hresult(dxgiFactory.As(&dxgiFactory4));
|
||||
|
||||
// Retrieve the adapter specified by the holographic space.
|
||||
winrt::check_hresult(
|
||||
dxgiFactory4->EnumAdapterByLuid(id, IID_PPV_ARGS(&m_dxgiAdapter)));
|
||||
} else {
|
||||
m_dxgiAdapter.Reset();
|
||||
}
|
||||
|
||||
CreateDeviceResources();
|
||||
|
||||
m_holographicSpace.SetDirect3D11Device(m_d3dInteropDevice);
|
||||
}
|
||||
|
||||
// Configures the Direct3D device, and stores handles to it and the device
|
||||
// context.
|
||||
void DX::DeviceResources::CreateDeviceResources() {
|
||||
// This flag adds support for surfaces with a different color channel ordering
|
||||
// than the API default. It is required for compatibility with Direct2D.
|
||||
UINT creationFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
|
||||
|
||||
#if defined(_DEBUG)
|
||||
if (DX::SdkLayersAvailable()) {
|
||||
// If the project is in a debug build, enable debugging via SDK Layers with
|
||||
// this flag.
|
||||
creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
|
||||
}
|
||||
#endif
|
||||
|
||||
// This array defines the set of DirectX hardware feature levels this app will
|
||||
// support. Note the ordering should be preserved. Note that HoloLens supports
|
||||
// feature level 11.1. The HoloLens emulator is also capable of running on
|
||||
// graphics cards starting with feature level 10.0.
|
||||
D3D_FEATURE_LEVEL featureLevels[] = {
|
||||
D3D_FEATURE_LEVEL_12_1, D3D_FEATURE_LEVEL_12_0, D3D_FEATURE_LEVEL_11_1,
|
||||
D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, D3D_FEATURE_LEVEL_10_0};
|
||||
|
||||
// Create the Direct3D 11 API device object and a corresponding context.
|
||||
ComPtr<ID3D11Device> device;
|
||||
ComPtr<ID3D11DeviceContext> context;
|
||||
|
||||
const D3D_DRIVER_TYPE driverType = m_dxgiAdapter == nullptr
|
||||
? D3D_DRIVER_TYPE_HARDWARE
|
||||
: D3D_DRIVER_TYPE_UNKNOWN;
|
||||
const HRESULT hr = D3D11CreateDevice(
|
||||
m_dxgiAdapter.Get(), // Either nullptr, or the primary adapter determined
|
||||
// by Windows Holographic.
|
||||
driverType, // Create a device using the hardware graphics driver.
|
||||
0, // Should be 0 unless the driver is D3D_DRIVER_TYPE_SOFTWARE.
|
||||
creationFlags, // Set debug and Direct2D compatibility flags.
|
||||
featureLevels, // List of feature levels this app can support.
|
||||
ARRAYSIZE(featureLevels), // Size of the list above.
|
||||
D3D11_SDK_VERSION, // Always set this to D3D11_SDK_VERSION for Windows
|
||||
// Runtime apps.
|
||||
&device, // Returns the Direct3D device created.
|
||||
&m_d3dFeatureLevel, // Returns feature level of device created.
|
||||
&context // Returns the device immediate context.
|
||||
);
|
||||
|
||||
if (FAILED(hr)) {
|
||||
// If the initialization fails, fall back to the WARP device.
|
||||
// For more information on WARP, see:
|
||||
// http://go.microsoft.com/fwlink/?LinkId=286690
|
||||
winrt::check_hresult(D3D11CreateDevice(
|
||||
nullptr, // Use the default DXGI adapter for WARP.
|
||||
D3D_DRIVER_TYPE_WARP, // Create a WARP device instead of a hardware
|
||||
// device.
|
||||
0, creationFlags, featureLevels, ARRAYSIZE(featureLevels),
|
||||
D3D11_SDK_VERSION, &device, &m_d3dFeatureLevel, &context));
|
||||
}
|
||||
|
||||
// Store pointers to the Direct3D device and immediate context.
|
||||
winrt::check_hresult(device.As(&m_d3dDevice));
|
||||
winrt::check_hresult(context.As(&m_d3dContext));
|
||||
|
||||
// Acquire the DXGI interface for the Direct3D device.
|
||||
ComPtr<IDXGIDevice3> dxgiDevice;
|
||||
winrt::check_hresult(m_d3dDevice.As(&dxgiDevice));
|
||||
|
||||
// Wrap the native device using a WinRT interop object.
|
||||
winrt::com_ptr<::IInspectable> object;
|
||||
winrt::check_hresult(CreateDirect3D11DeviceFromDXGIDevice(
|
||||
dxgiDevice.Get(),
|
||||
reinterpret_cast<IInspectable **>(winrt::put_abi(object))));
|
||||
m_d3dInteropDevice = object.as<IDirect3DDevice>();
|
||||
|
||||
// Cache the DXGI adapter.
|
||||
// This is for the case of no preferred DXGI adapter, or fallback to WARP.
|
||||
ComPtr<IDXGIAdapter> dxgiAdapter;
|
||||
winrt::check_hresult(dxgiDevice->GetAdapter(&dxgiAdapter));
|
||||
winrt::check_hresult(dxgiAdapter.As(&m_dxgiAdapter));
|
||||
|
||||
// Check for device support for the optional feature that allows setting the
|
||||
// render target array index from the vertex shader stage.
|
||||
D3D11_FEATURE_DATA_D3D11_OPTIONS3 options;
|
||||
m_d3dDevice->CheckFeatureSupport(D3D11_FEATURE_D3D11_OPTIONS3, &options,
|
||||
sizeof(options));
|
||||
if (options.VPAndRTArrayIndexFromAnyShaderFeedingRasterizer) {
|
||||
m_supportsVprt = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Validates the back buffer for each HolographicCamera and recreates
|
||||
// resources for back buffers that have changed.
|
||||
// Locks the set of holographic camera resources until the function exits.
|
||||
void DX::DeviceResources::EnsureCameraResources(
|
||||
HolographicFrame frame, HolographicFramePrediction prediction) {
|
||||
UseHolographicCameraResources<void>(
|
||||
[this, frame,
|
||||
prediction](std::map<UINT32, std::unique_ptr<CameraResources>>
|
||||
&cameraResourceMap) {
|
||||
for (HolographicCameraPose const &cameraPose :
|
||||
prediction.CameraPoses()) {
|
||||
HolographicCameraRenderingParameters renderingParameters =
|
||||
frame.GetRenderingParameters(cameraPose);
|
||||
CameraResources *pCameraResources =
|
||||
cameraResourceMap[cameraPose.HolographicCamera().Id()].get();
|
||||
|
||||
pCameraResources->CreateResourcesForBackBuffer(this,
|
||||
renderingParameters);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Prepares to allocate resources and adds resource views for a camera.
|
||||
// Locks the set of holographic camera resources until the function exits.
|
||||
void DX::DeviceResources::AddHolographicCamera(HolographicCamera camera) {
|
||||
UseHolographicCameraResources<void>(
|
||||
[this, camera](std::map<UINT32, std::unique_ptr<CameraResources>>
|
||||
&cameraResourceMap) {
|
||||
cameraResourceMap[camera.Id()] =
|
||||
std::make_unique<CameraResources>(camera);
|
||||
});
|
||||
}
|
||||
|
||||
// Deallocates resources for a camera and removes the camera from the set.
|
||||
// Locks the set of holographic camera resources until the function exits.
|
||||
void DX::DeviceResources::RemoveHolographicCamera(HolographicCamera camera) {
|
||||
UseHolographicCameraResources<void>(
|
||||
[this, camera](std::map<UINT32, std::unique_ptr<CameraResources>>
|
||||
&cameraResourceMap) {
|
||||
CameraResources *pCameraResources =
|
||||
cameraResourceMap[camera.Id()].get();
|
||||
|
||||
if (pCameraResources != nullptr) {
|
||||
pCameraResources->ReleaseResourcesForBackBuffer(this);
|
||||
cameraResourceMap.erase(camera.Id());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Recreate all device resources and set them back to the current state.
|
||||
// Locks the set of holographic camera resources until the function exits.
|
||||
void DX::DeviceResources::HandleDeviceLost() {
|
||||
if (m_deviceNotify != nullptr) {
|
||||
m_deviceNotify->OnDeviceLost();
|
||||
}
|
||||
|
||||
UseHolographicCameraResources<void>(
|
||||
[this](std::map<UINT32, std::unique_ptr<CameraResources>>
|
||||
&cameraResourceMap) {
|
||||
for (auto &pair : cameraResourceMap) {
|
||||
CameraResources *pCameraResources = pair.second.get();
|
||||
pCameraResources->ReleaseResourcesForBackBuffer(this);
|
||||
}
|
||||
});
|
||||
|
||||
InitializeUsingHolographicSpace();
|
||||
|
||||
if (m_deviceNotify != nullptr) {
|
||||
m_deviceNotify->OnDeviceRestored();
|
||||
}
|
||||
}
|
||||
|
||||
// Register our DeviceNotify to be informed on device lost and creation.
|
||||
void DX::DeviceResources::RegisterDeviceNotify(
|
||||
DX::IDeviceNotify *deviceNotify) {
|
||||
m_deviceNotify = deviceNotify;
|
||||
}
|
||||
|
||||
// Call this method when the app suspends. It provides a hint to the driver that
|
||||
// the app is entering an idle state and that temporary buffers can be reclaimed
|
||||
// for use by other apps.
|
||||
void DX::DeviceResources::Trim() {
|
||||
m_d3dContext->ClearState();
|
||||
|
||||
ComPtr<IDXGIDevice3> dxgiDevice;
|
||||
winrt::check_hresult(m_d3dDevice.As(&dxgiDevice));
|
||||
dxgiDevice->Trim();
|
||||
}
|
||||
|
||||
// Present the contents of the swap chain to the screen.
|
||||
// Locks the set of holographic camera resources until the function exits.
|
||||
void DX::DeviceResources::Present(HolographicFrame frame) {
|
||||
// By default, this API waits for the frame to finish before it returns.
|
||||
// Holographic apps should wait for the previous frame to finish before
|
||||
// starting work on a new frame. This allows for better results from
|
||||
// holographic frame predictions.
|
||||
HolographicFramePresentResult presentResult =
|
||||
frame.PresentUsingCurrentPrediction();
|
||||
|
||||
// The PresentUsingCurrentPrediction API will detect when the graphics device
|
||||
// changes or becomes invalid. When this happens, it is considered a Direct3D
|
||||
// device lost scenario.
|
||||
if (presentResult == HolographicFramePresentResult::DeviceRemoved) {
|
||||
// The Direct3D device, context, and resources should be recreated.
|
||||
HandleDeviceLost();
|
||||
}
|
||||
}
|
|
@ -1,118 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
#include "CameraResources.h"
|
||||
|
||||
namespace DX {
|
||||
// Provides an interface for an application that owns DeviceResources to be
|
||||
// notified of the device being lost or created.
|
||||
interface IDeviceNotify {
|
||||
virtual void OnDeviceLost() = 0;
|
||||
virtual void OnDeviceRestored() = 0;
|
||||
};
|
||||
|
||||
// Creates and manages a Direct3D device and immediate context, Direct2D device
|
||||
// and context (for debug), and the holographic swap chain.
|
||||
class DeviceResources {
|
||||
public:
|
||||
DeviceResources();
|
||||
|
||||
// Public methods related to Direct3D devices.
|
||||
void HandleDeviceLost();
|
||||
void RegisterDeviceNotify(IDeviceNotify *deviceNotify);
|
||||
void Trim();
|
||||
void Present(winrt::Windows::Graphics::Holographic::HolographicFrame frame);
|
||||
|
||||
// Public methods related to holographic devices.
|
||||
void SetHolographicSpace(
|
||||
winrt::Windows::Graphics::Holographic::HolographicSpace space);
|
||||
void EnsureCameraResources(
|
||||
winrt::Windows::Graphics::Holographic::HolographicFrame frame,
|
||||
winrt::Windows::Graphics::Holographic::HolographicFramePrediction
|
||||
prediction);
|
||||
|
||||
void AddHolographicCamera(
|
||||
winrt::Windows::Graphics::Holographic::HolographicCamera camera);
|
||||
void RemoveHolographicCamera(
|
||||
winrt::Windows::Graphics::Holographic::HolographicCamera camera);
|
||||
|
||||
// Holographic accessors.
|
||||
template <typename RetType, typename LCallback>
|
||||
RetType UseHolographicCameraResources(LCallback const &callback);
|
||||
|
||||
winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice
|
||||
GetD3DInteropDevice() const {
|
||||
return m_d3dInteropDevice;
|
||||
}
|
||||
|
||||
// D3D accessors.
|
||||
ID3D11Device4 *GetD3DDevice() const { return m_d3dDevice.Get(); }
|
||||
ID3D11DeviceContext3 *GetD3DDeviceContext() const {
|
||||
return m_d3dContext.Get();
|
||||
}
|
||||
D3D_FEATURE_LEVEL GetDeviceFeatureLevel() const { return m_d3dFeatureLevel; }
|
||||
bool GetDeviceSupportsVprt() const { return m_supportsVprt; }
|
||||
|
||||
// DXGI acessors.
|
||||
IDXGIAdapter3 *GetDXGIAdapter() const { return m_dxgiAdapter.Get(); }
|
||||
|
||||
// D2D accessors.
|
||||
ID2D1Factory2 *GetD2DFactory() const { return m_d2dFactory.Get(); }
|
||||
IDWriteFactory2 *GetDWriteFactory() const { return m_dwriteFactory.Get(); }
|
||||
IWICImagingFactory2 *GetWicImagingFactory() const {
|
||||
return m_wicFactory.Get();
|
||||
}
|
||||
|
||||
private:
|
||||
// Private methods related to the Direct3D device, and resources based on that
|
||||
// device.
|
||||
void CreateDeviceIndependentResources();
|
||||
void InitializeUsingHolographicSpace();
|
||||
void CreateDeviceResources();
|
||||
|
||||
// Direct3D objects.
|
||||
Microsoft::WRL::ComPtr<ID3D11Device4> m_d3dDevice;
|
||||
Microsoft::WRL::ComPtr<ID3D11DeviceContext3> m_d3dContext;
|
||||
Microsoft::WRL::ComPtr<IDXGIAdapter3> m_dxgiAdapter;
|
||||
|
||||
// Direct3D interop objects.
|
||||
winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice
|
||||
m_d3dInteropDevice;
|
||||
|
||||
// Direct2D factories.
|
||||
Microsoft::WRL::ComPtr<ID2D1Factory2> m_d2dFactory;
|
||||
Microsoft::WRL::ComPtr<IDWriteFactory2> m_dwriteFactory;
|
||||
Microsoft::WRL::ComPtr<IWICImagingFactory2> m_wicFactory;
|
||||
|
||||
// The holographic space provides a preferred DXGI adapter ID.
|
||||
winrt::Windows::Graphics::Holographic::HolographicSpace m_holographicSpace =
|
||||
nullptr;
|
||||
|
||||
// Properties of the Direct3D device currently in use.
|
||||
D3D_FEATURE_LEVEL m_d3dFeatureLevel = D3D_FEATURE_LEVEL_10_0;
|
||||
|
||||
// The IDeviceNotify can be held directly as it owns the DeviceResources.
|
||||
IDeviceNotify *m_deviceNotify = nullptr;
|
||||
|
||||
// Whether or not the current Direct3D device supports the optional feature
|
||||
// for setting the render target array index from the vertex shader stage.
|
||||
bool m_supportsVprt = false;
|
||||
|
||||
// Back buffer resources, etc. for attached holographic cameras.
|
||||
std::map<UINT32, std::unique_ptr<CameraResources>> m_cameraResources;
|
||||
std::mutex m_cameraResourcesLock;
|
||||
};
|
||||
} // namespace DX
|
||||
|
||||
// Device-based resources for holographic cameras are stored in a std::map.
|
||||
// Access this list by providing a callback to this function, and the std::map
|
||||
// will be guarded from add and remove events until the callback returns. The
|
||||
// callback is processed immediately and must not contain any nested calls to
|
||||
// UseHolographicCameraResources. The callback takes a parameter of type
|
||||
// std::map<UINT32, std::unique_ptr<DX::CameraResources>>& through which the
|
||||
// list of cameras will be accessed.
|
||||
template <typename RetType, typename LCallback>
|
||||
RetType
|
||||
DX::DeviceResources::UseHolographicCameraResources(LCallback const &callback) {
|
||||
std::lock_guard<std::mutex> guard(m_cameraResourcesLock);
|
||||
return callback(m_cameraResources);
|
||||
}
|
|
@ -1,65 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
namespace DX {
|
||||
// Function that reads from a binary file asynchronously.
|
||||
inline std::future<std::vector<byte>>
|
||||
ReadDataAsync(const std::wstring_view &filename) {
|
||||
using namespace winrt::Windows::Storage;
|
||||
using namespace winrt::Windows::Storage::Streams;
|
||||
|
||||
IBuffer fileBuffer = co_await PathIO::ReadBufferAsync(filename);
|
||||
|
||||
std::vector<byte> returnBuffer;
|
||||
returnBuffer.resize(fileBuffer.Length());
|
||||
DataReader::FromBuffer(fileBuffer)
|
||||
.ReadBytes(winrt::array_view<uint8_t>(returnBuffer));
|
||||
return returnBuffer;
|
||||
}
|
||||
|
||||
// Converts a length in device-independent pixels (DIPs) to a length in physical
|
||||
// pixels.
|
||||
inline float ConvertDipsToPixels(float dips, float dpi) {
|
||||
constexpr float dipsPerInch = 96.0f;
|
||||
return floorf(dips * dpi / dipsPerInch + 0.5f); // Round to nearest integer.
|
||||
}
|
||||
|
||||
inline winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DSurface
|
||||
CreateDepthTextureInteropObject(
|
||||
const Microsoft::WRL::ComPtr<ID3D11Texture2D> spTexture2D) {
|
||||
// Direct3D interop APIs are used to provide the buffer to the WinRT API.
|
||||
Microsoft::WRL::ComPtr<IDXGIResource1> depthStencilResource;
|
||||
winrt::check_hresult(spTexture2D.As(&depthStencilResource));
|
||||
Microsoft::WRL::ComPtr<IDXGISurface2> depthDxgiSurface;
|
||||
winrt::check_hresult(
|
||||
depthStencilResource->CreateSubresourceSurface(0, &depthDxgiSurface));
|
||||
winrt::com_ptr<::IInspectable> inspectableSurface;
|
||||
winrt::check_hresult(CreateDirect3D11SurfaceFromDXGISurface(
|
||||
depthDxgiSurface.Get(),
|
||||
reinterpret_cast<IInspectable **>(winrt::put_abi(inspectableSurface))));
|
||||
|
||||
return inspectableSurface
|
||||
.as<winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DSurface>();
|
||||
}
|
||||
|
||||
#if defined(_DEBUG)
|
||||
// Check for SDK Layer support.
|
||||
inline bool SdkLayersAvailable() {
|
||||
HRESULT hr = D3D11CreateDevice(
|
||||
nullptr,
|
||||
D3D_DRIVER_TYPE_NULL, // There is no need to create a real hardware
|
||||
// device.
|
||||
0,
|
||||
D3D11_CREATE_DEVICE_DEBUG, // Check for the SDK layers.
|
||||
nullptr, // Any feature level will do.
|
||||
0,
|
||||
D3D11_SDK_VERSION, // Always set this to D3D11_SDK_VERSION for Windows
|
||||
// Runtime apps.
|
||||
nullptr, // No need to keep the D3D device reference.
|
||||
nullptr, // No need to know the feature level.
|
||||
nullptr // No need to keep the D3D device context reference.
|
||||
);
|
||||
|
||||
return SUCCEEDED(hr);
|
||||
}
|
||||
#endif
|
||||
} // namespace DX
|
|
@ -1,179 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
namespace DX {
|
||||
// Helper class for animation and simulation timing.
|
||||
class StepTimer {
|
||||
public:
|
||||
StepTimer()
|
||||
: m_elapsedTicks(0), m_totalTicks(0), m_leftOverTicks(0), m_frameCount(0),
|
||||
m_framesPerSecond(0), m_framesThisSecond(0), m_qpcSecondCounter(0),
|
||||
m_isFixedTimeStep(false), m_targetElapsedTicks(TicksPerSecond / 60) {
|
||||
m_qpcFrequency = GetPerformanceFrequency();
|
||||
|
||||
// Initialize max delta to 1/10 of a second.
|
||||
m_qpcMaxDelta = m_qpcFrequency / 10;
|
||||
}
|
||||
|
||||
// Get elapsed time since the previous Update call.
|
||||
uint64_t GetElapsedTicks() const { return m_elapsedTicks; }
|
||||
double GetElapsedSeconds() const { return TicksToSeconds(m_elapsedTicks); }
|
||||
|
||||
// Get total time since the start of the program.
|
||||
uint64_t GetTotalTicks() const { return m_totalTicks; }
|
||||
double GetTotalSeconds() const { return TicksToSeconds(m_totalTicks); }
|
||||
|
||||
// Get total number of updates since start of the program.
|
||||
uint32_t GetFrameCount() const { return m_frameCount; }
|
||||
|
||||
// Get the current framerate.
|
||||
uint32_t GetFramesPerSecond() const { return m_framesPerSecond; }
|
||||
|
||||
// Set whether to use fixed or variable timestep mode.
|
||||
void SetFixedTimeStep(bool isFixedTimestep) {
|
||||
m_isFixedTimeStep = isFixedTimestep;
|
||||
}
|
||||
|
||||
// Set how often to call Update when in fixed timestep mode.
|
||||
void SetTargetElapsedTicks(uint64_t targetElapsed) {
|
||||
m_targetElapsedTicks = targetElapsed;
|
||||
}
|
||||
void SetTargetElapsedSeconds(double targetElapsed) {
|
||||
m_targetElapsedTicks = SecondsToTicks(targetElapsed);
|
||||
}
|
||||
|
||||
// Integer format represents time using 10,000,000 ticks per second.
|
||||
static const uint64_t TicksPerSecond = 10'000'000;
|
||||
|
||||
static double TicksToSeconds(uint64_t ticks) {
|
||||
return static_cast<double>(ticks) / TicksPerSecond;
|
||||
}
|
||||
static uint64_t SecondsToTicks(double seconds) {
|
||||
return static_cast<uint64_t>(seconds * TicksPerSecond);
|
||||
}
|
||||
|
||||
// Convenient wrapper for QueryPerformanceFrequency. Throws an exception if
|
||||
// the call to QueryPerformanceFrequency fails.
|
||||
static inline uint64_t GetPerformanceFrequency() {
|
||||
LARGE_INTEGER freq;
|
||||
if (!QueryPerformanceFrequency(&freq)) {
|
||||
winrt::throw_last_error();
|
||||
}
|
||||
return freq.QuadPart;
|
||||
}
|
||||
|
||||
// Gets the current number of ticks from QueryPerformanceCounter. Throws an
|
||||
// exception if the call to QueryPerformanceCounter fails.
|
||||
static inline int64_t GetTicks() {
|
||||
LARGE_INTEGER ticks;
|
||||
if (!QueryPerformanceCounter(&ticks)) {
|
||||
winrt::throw_last_error();
|
||||
}
|
||||
return ticks.QuadPart;
|
||||
}
|
||||
|
||||
// After an intentional timing discontinuity (for instance a blocking IO
|
||||
// operation) call this to avoid having the fixed timestep logic attempt a set
|
||||
// of catch-up Update calls.
|
||||
|
||||
void ResetElapsedTime() {
|
||||
m_qpcLastTime = GetTicks();
|
||||
|
||||
m_leftOverTicks = 0;
|
||||
m_framesPerSecond = 0;
|
||||
m_framesThisSecond = 0;
|
||||
m_qpcSecondCounter = 0;
|
||||
}
|
||||
|
||||
// Update timer state, calling the specified Update function the appropriate
|
||||
// number of times.
|
||||
template <typename TUpdate> void Tick(const TUpdate &update) {
|
||||
// Query the current time.
|
||||
uint64_t currentTime = GetTicks();
|
||||
uint64_t timeDelta = currentTime - m_qpcLastTime;
|
||||
|
||||
m_qpcLastTime = currentTime;
|
||||
m_qpcSecondCounter += timeDelta;
|
||||
|
||||
// Clamp excessively large time deltas (e.g. after paused in the debugger).
|
||||
if (timeDelta > m_qpcMaxDelta) {
|
||||
timeDelta = m_qpcMaxDelta;
|
||||
}
|
||||
|
||||
// Convert QPC units into a canonical tick format. This cannot overflow due
|
||||
// to the previous clamp.
|
||||
timeDelta *= TicksPerSecond;
|
||||
timeDelta /= m_qpcFrequency;
|
||||
|
||||
uint32_t lastFrameCount = m_frameCount;
|
||||
|
||||
if (m_isFixedTimeStep) {
|
||||
// Fixed timestep update logic
|
||||
|
||||
// If the app is running very close to the target elapsed time (within 1/4
|
||||
// of a millisecond) just clamp the clock to exactly match the target
|
||||
// value. This prevents tiny and irrelevant errors from accumulating over
|
||||
// time. Without this clamping, a game that requested a 60 fps fixed
|
||||
// update, running with vsync enabled on a 59.94 NTSC display, would
|
||||
// eventually accumulate enough tiny errors that it would drop a frame. It
|
||||
// is better to just round small deviations down to zero to leave things
|
||||
// running smoothly.
|
||||
|
||||
if (abs(static_cast<int64_t>(timeDelta - m_targetElapsedTicks)) <
|
||||
TicksPerSecond / 4000) {
|
||||
timeDelta = m_targetElapsedTicks;
|
||||
}
|
||||
|
||||
m_leftOverTicks += timeDelta;
|
||||
|
||||
while (m_leftOverTicks >= m_targetElapsedTicks) {
|
||||
m_elapsedTicks = m_targetElapsedTicks;
|
||||
m_totalTicks += m_targetElapsedTicks;
|
||||
m_leftOverTicks -= m_targetElapsedTicks;
|
||||
m_frameCount++;
|
||||
|
||||
update();
|
||||
}
|
||||
} else {
|
||||
// Variable timestep update logic.
|
||||
m_elapsedTicks = timeDelta;
|
||||
m_totalTicks += timeDelta;
|
||||
m_leftOverTicks = 0;
|
||||
m_frameCount++;
|
||||
|
||||
update();
|
||||
}
|
||||
|
||||
// Track the current framerate.
|
||||
if (m_frameCount != lastFrameCount) {
|
||||
m_framesThisSecond++;
|
||||
}
|
||||
|
||||
if (m_qpcSecondCounter >= static_cast<uint64_t>(m_qpcFrequency)) {
|
||||
m_framesPerSecond = m_framesThisSecond;
|
||||
m_framesThisSecond = 0;
|
||||
m_qpcSecondCounter %= m_qpcFrequency;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
// Source timing data uses QPC units.
|
||||
uint64_t m_qpcFrequency;
|
||||
uint64_t m_qpcLastTime;
|
||||
uint64_t m_qpcMaxDelta;
|
||||
|
||||
// Derived timing data uses a canonical tick format.
|
||||
uint64_t m_elapsedTicks;
|
||||
uint64_t m_totalTicks;
|
||||
uint64_t m_leftOverTicks;
|
||||
|
||||
// Members for tracking the framerate.
|
||||
uint32_t m_frameCount;
|
||||
uint32_t m_framesPerSecond;
|
||||
uint32_t m_framesThisSecond;
|
||||
uint64_t m_qpcSecondCounter;
|
||||
|
||||
// Members for configuring fixed timestep mode.
|
||||
bool m_isFixedTimeStep;
|
||||
uint64_t m_targetElapsedTicks;
|
||||
};
|
||||
} // namespace DX
|
|
@ -1,31 +0,0 @@
|
|||
// Per-vertex data from the vertex shader.
|
||||
struct GeometryShaderInput
|
||||
{
|
||||
min16float4 pos : SV_POSITION;
|
||||
min16float3 color : COLOR0;
|
||||
uint instId : TEXCOORD0;
|
||||
};
|
||||
|
||||
// Per-vertex data passed to the rasterizer.
|
||||
struct GeometryShaderOutput
|
||||
{
|
||||
min16float4 pos : SV_POSITION;
|
||||
min16float3 color : COLOR0;
|
||||
uint rtvId : SV_RenderTargetArrayIndex;
|
||||
};
|
||||
|
||||
// This geometry shader is a pass-through that leaves the geometry unmodified
|
||||
// and sets the render target array index.
|
||||
[maxvertexcount(3)]
|
||||
void main(triangle GeometryShaderInput input[3], inout TriangleStream<GeometryShaderOutput> outStream)
|
||||
{
|
||||
GeometryShaderOutput output;
|
||||
[unroll(3)]
|
||||
for (int i = 0; i < 3; ++i)
|
||||
{
|
||||
output.pos = input[i].pos;
|
||||
output.color = input[i].color;
|
||||
output.rtvId = input[i].instId;
|
||||
outStream.Append(output);
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
// Per-pixel color data passed through the pixel shader.
|
||||
struct PixelShaderInput
|
||||
{
|
||||
min16float4 pos : SV_POSITION;
|
||||
min16float3 color : COLOR0;
|
||||
};
|
||||
|
||||
// The pixel shader passes through the color data. The color data from
|
||||
// is interpolated and assigned to a pixel at the rasterization step.
|
||||
min16float4 main(PixelShaderInput input) : SV_TARGET
|
||||
{
|
||||
return min16float4(input.color, 1.0f);
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
namespace Immersive {
|
||||
// Constant buffer used to send hologram position transform to the shader
|
||||
// pipeline.
|
||||
struct ModelConstantBuffer {
|
||||
DirectX::XMFLOAT4X4 model;
|
||||
};
|
||||
|
||||
// Assert that the constant buffer remains 16-byte aligned (best practice).
|
||||
static_assert((sizeof(ModelConstantBuffer) % (sizeof(float) * 4)) == 0,
|
||||
"Model constant buffer size must be 16-byte aligned (16 bytes is "
|
||||
"the length of four floats).");
|
||||
|
||||
// Used to send per-vertex data to the vertex shader.
|
||||
struct VertexPositionColor {
|
||||
DirectX::XMFLOAT3 pos;
|
||||
DirectX::XMFLOAT3 color;
|
||||
};
|
||||
} // namespace Immersive
|
|
@ -1,51 +0,0 @@
|
|||
#include "pch.h"
|
||||
#include "SpatialInputHandler.h"
|
||||
#include <functional>
|
||||
|
||||
using namespace Immersive;
|
||||
|
||||
using namespace std::placeholders;
|
||||
using namespace winrt::Windows::Foundation;
|
||||
using namespace winrt::Windows::UI::Input::Spatial;
|
||||
|
||||
// Creates and initializes a GestureRecognizer that listens to a Person.
|
||||
SpatialInputHandler::SpatialInputHandler() {
|
||||
// The interaction manager provides an event that informs the app when
|
||||
// spatial interactions are detected.
|
||||
m_interactionManager = SpatialInteractionManager::GetForCurrentView();
|
||||
|
||||
// Bind a handler to the SourcePressed event.
|
||||
m_sourcePressedEventToken = m_interactionManager.SourcePressed(
|
||||
bind(&SpatialInputHandler::OnSourcePressed, this, _1, _2));
|
||||
|
||||
//
|
||||
// TODO: Expand this class to use other gesture-based input events as
|
||||
// applicable to
|
||||
// your app.
|
||||
//
|
||||
}
|
||||
|
||||
SpatialInputHandler::~SpatialInputHandler() {
|
||||
// Unregister our handler for the OnSourcePressed event.
|
||||
m_interactionManager.SourcePressed(m_sourcePressedEventToken);
|
||||
}
|
||||
|
||||
// Checks if the user performed an input gesture since the last call to this
|
||||
// method. Allows the main update loop to check for asynchronous changes to the
|
||||
// user input state.
|
||||
SpatialInteractionSourceState SpatialInputHandler::CheckForInput() {
|
||||
SpatialInteractionSourceState sourceState = m_sourceState;
|
||||
m_sourceState = nullptr;
|
||||
return sourceState;
|
||||
}
|
||||
|
||||
void SpatialInputHandler::OnSourcePressed(
|
||||
SpatialInteractionManager const &,
|
||||
SpatialInteractionSourceEventArgs const &args) {
|
||||
m_sourceState = args.State();
|
||||
|
||||
//
|
||||
// TODO: In your app or game engine, rewrite this method to queue
|
||||
// input events in your input class or event handler.
|
||||
//
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
namespace Immersive {
|
||||
// Sample gesture handler.
|
||||
// Hooks up events to recognize a tap gesture, and keeps track of input using a
|
||||
// boolean value.
|
||||
class SpatialInputHandler {
|
||||
public:
|
||||
SpatialInputHandler();
|
||||
~SpatialInputHandler();
|
||||
|
||||
winrt::Windows::UI::Input::Spatial::SpatialInteractionSourceState
|
||||
CheckForInput();
|
||||
|
||||
private:
|
||||
// Interaction event handler.
|
||||
void OnSourcePressed(
|
||||
winrt::Windows::UI::Input::Spatial::SpatialInteractionManager const
|
||||
&sender,
|
||||
winrt::Windows::UI::Input::Spatial::
|
||||
SpatialInteractionSourceEventArgs const &args);
|
||||
|
||||
// API objects used to process gesture input, and generate gesture events.
|
||||
winrt::Windows::UI::Input::Spatial::SpatialInteractionManager
|
||||
m_interactionManager = nullptr;
|
||||
|
||||
// Event registration token.
|
||||
winrt::event_token m_sourcePressedEventToken;
|
||||
|
||||
// Used to indicate that a Pressed input event was received this frame.
|
||||
winrt::Windows::UI::Input::Spatial::SpatialInteractionSourceState
|
||||
m_sourceState = nullptr;
|
||||
};
|
||||
} // namespace Immersive
|
|
@ -1,266 +0,0 @@
|
|||
#include "pch.h"
|
||||
#include "SpinningCubeRenderer.h"
|
||||
#include "Common/DirectXHelper.h"
|
||||
|
||||
using namespace Immersive;
|
||||
using namespace DirectX;
|
||||
using namespace winrt::Windows::Foundation::Numerics;
|
||||
using namespace winrt::Windows::UI::Input::Spatial;
|
||||
|
||||
// Loads vertex and pixel shaders from files and instantiates the cube geometry.
|
||||
SpinningCubeRenderer::SpinningCubeRenderer(
|
||||
std::shared_ptr<DX::DeviceResources> const &deviceResources)
|
||||
: m_deviceResources(deviceResources) {
|
||||
CreateDeviceDependentResources();
|
||||
}
|
||||
|
||||
// This function uses a SpatialPointerPose to position the world-locked hologram
|
||||
// two meters in front of the user's heading.
|
||||
void SpinningCubeRenderer::PositionHologram(
|
||||
SpatialPointerPose const &pointerPose) {
|
||||
if (pointerPose != nullptr) {
|
||||
// Get the gaze direction relative to the given coordinate system.
|
||||
const float3 headPosition = pointerPose.Head().Position();
|
||||
const float3 headDirection = pointerPose.Head().ForwardDirection();
|
||||
|
||||
// The hologram is positioned two meters along the user's gaze direction.
|
||||
constexpr float distanceFromUser = 2.0f; // meters
|
||||
const float3 gazeAtTwoMeters =
|
||||
headPosition + (distanceFromUser * headDirection);
|
||||
|
||||
// This will be used as the translation component of the hologram's
|
||||
// model transform.
|
||||
SetPosition(gazeAtTwoMeters);
|
||||
}
|
||||
}
|
||||
|
||||
// Called once per frame. Rotates the cube, and calculates and sets the model
|
||||
// matrix relative to the position transform indicated by
|
||||
// hologramPositionTransform.
|
||||
void SpinningCubeRenderer::Update(DX::StepTimer const &timer) {
|
||||
// Rotate the cube.
|
||||
// Convert degrees to radians, then convert seconds to rotation angle.
|
||||
const float radiansPerSecond = XMConvertToRadians(m_degreesPerSecond);
|
||||
const double totalRotation = timer.GetTotalSeconds() * radiansPerSecond;
|
||||
const float radians = static_cast<float>(fmod(totalRotation, XM_2PI));
|
||||
const XMMATRIX modelRotation = XMMatrixRotationY(-radians);
|
||||
|
||||
// Position the cube.
|
||||
const XMMATRIX modelTranslation =
|
||||
XMMatrixTranslationFromVector(XMLoadFloat3(&m_position));
|
||||
|
||||
// Multiply to get the transform matrix.
|
||||
// Note that this transform does not enforce a particular coordinate system.
|
||||
// The calling class is responsible for rendering this content in a consistent
|
||||
// manner.
|
||||
const XMMATRIX modelTransform =
|
||||
XMMatrixMultiply(modelRotation, modelTranslation);
|
||||
|
||||
// The view and projection matrices are provided by the system; they are
|
||||
// associated with holographic cameras, and updated on a per-camera basis.
|
||||
// Here, we provide the model transform for the sample hologram. The model
|
||||
// transform matrix is transposed to prepare it for the shader.
|
||||
XMStoreFloat4x4(&m_modelConstantBufferData.model,
|
||||
XMMatrixTranspose(modelTransform));
|
||||
|
||||
// Loading is asynchronous. Resources must be created before they can be
|
||||
// updated.
|
||||
if (!m_loadingComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Use the D3D device context to update Direct3D device-based resources.
|
||||
const auto context = m_deviceResources->GetD3DDeviceContext();
|
||||
|
||||
// Update the model transform buffer for the hologram.
|
||||
context->UpdateSubresource(m_modelConstantBuffer.Get(), 0, nullptr,
|
||||
&m_modelConstantBufferData, 0, 0);
|
||||
}
|
||||
|
||||
// Renders one frame using the vertex and pixel shaders.
|
||||
// On devices that do not support the D3D11_FEATURE_D3D11_OPTIONS3::
|
||||
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature,
|
||||
// a pass-through geometry shader is also used to set the render
|
||||
// target array index.
|
||||
void SpinningCubeRenderer::Render() {
|
||||
// Loading is asynchronous. Resources must be created before drawing can
|
||||
// occur.
|
||||
if (!m_loadingComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
const auto context = m_deviceResources->GetD3DDeviceContext();
|
||||
|
||||
// Each vertex is one instance of the VertexPositionColor struct.
|
||||
const UINT stride = sizeof(VertexPositionColor);
|
||||
const UINT offset = 0;
|
||||
context->IASetVertexBuffers(0, 1, m_vertexBuffer.GetAddressOf(), &stride,
|
||||
&offset);
|
||||
context->IASetIndexBuffer(m_indexBuffer.Get(),
|
||||
DXGI_FORMAT_R16_UINT, // Each index is one 16-bit
|
||||
// unsigned integer (short).
|
||||
0);
|
||||
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
|
||||
context->IASetInputLayout(m_inputLayout.Get());
|
||||
|
||||
// Attach the vertex shader.
|
||||
context->VSSetShader(m_vertexShader.Get(), nullptr, 0);
|
||||
// Apply the model constant buffer to the vertex shader.
|
||||
context->VSSetConstantBuffers(0, 1, m_modelConstantBuffer.GetAddressOf());
|
||||
|
||||
if (!m_usingVprtShaders) {
|
||||
// On devices that do not support the D3D11_FEATURE_D3D11_OPTIONS3::
|
||||
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature,
|
||||
// a pass-through geometry shader is used to set the render target
|
||||
// array index.
|
||||
context->GSSetShader(m_geometryShader.Get(), nullptr, 0);
|
||||
}
|
||||
|
||||
// Attach the pixel shader.
|
||||
context->PSSetShader(m_pixelShader.Get(), nullptr, 0);
|
||||
|
||||
// Draw the objects.
|
||||
context->DrawIndexedInstanced(m_indexCount, // Index count per instance.
|
||||
2, // Instance count.
|
||||
0, // Start index location.
|
||||
0, // Base vertex location.
|
||||
0 // Start instance location.
|
||||
);
|
||||
}
|
||||
|
||||
std::future<void> SpinningCubeRenderer::CreateDeviceDependentResources() {
|
||||
m_usingVprtShaders = m_deviceResources->GetDeviceSupportsVprt();
|
||||
|
||||
// On devices that do support the D3D11_FEATURE_D3D11_OPTIONS3::
|
||||
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature
|
||||
// we can avoid using a pass-through geometry shader to set the render
|
||||
// target array index, thus avoiding any overhead that would be
|
||||
// incurred by setting the geometry shader stage.
|
||||
std::wstring vertexShaderFileName = m_usingVprtShaders
|
||||
? L"ms-appx:///VprtVertexShader.cso"
|
||||
: L"ms-appx:///VertexShader.cso";
|
||||
|
||||
// Shaders will be loaded asynchronously.
|
||||
|
||||
// After the vertex shader file is loaded, create the shader and input layout.
|
||||
std::vector<byte> vertexShaderFileData =
|
||||
co_await DX::ReadDataAsync(vertexShaderFileName);
|
||||
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateVertexShader(
|
||||
vertexShaderFileData.data(), vertexShaderFileData.size(), nullptr,
|
||||
&m_vertexShader));
|
||||
|
||||
constexpr std::array<D3D11_INPUT_ELEMENT_DESC, 2> vertexDesc = {{
|
||||
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0,
|
||||
D3D11_INPUT_PER_VERTEX_DATA, 0},
|
||||
{"COLOR", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12,
|
||||
D3D11_INPUT_PER_VERTEX_DATA, 0},
|
||||
}};
|
||||
|
||||
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateInputLayout(
|
||||
vertexDesc.data(), static_cast<UINT>(vertexDesc.size()),
|
||||
vertexShaderFileData.data(),
|
||||
static_cast<UINT>(vertexShaderFileData.size()), &m_inputLayout));
|
||||
|
||||
// After the pixel shader file is loaded, create the shader and constant
|
||||
// buffer.
|
||||
std::vector<byte> pixelShaderFileData =
|
||||
co_await DX::ReadDataAsync(L"ms-appx:///PixelShader.cso");
|
||||
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreatePixelShader(
|
||||
pixelShaderFileData.data(), pixelShaderFileData.size(), nullptr,
|
||||
&m_pixelShader));
|
||||
|
||||
const CD3D11_BUFFER_DESC constantBufferDesc(sizeof(ModelConstantBuffer),
|
||||
D3D11_BIND_CONSTANT_BUFFER);
|
||||
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(
|
||||
&constantBufferDesc, nullptr, &m_modelConstantBuffer));
|
||||
|
||||
if (!m_usingVprtShaders) {
|
||||
// Load the pass-through geometry shader.
|
||||
std::vector<byte> geometryShaderFileData =
|
||||
co_await DX::ReadDataAsync(L"ms-appx:///GeometryShader.cso");
|
||||
|
||||
// After the pass-through geometry shader file is loaded, create the shader.
|
||||
winrt::check_hresult(
|
||||
m_deviceResources->GetD3DDevice()->CreateGeometryShader(
|
||||
geometryShaderFileData.data(), geometryShaderFileData.size(),
|
||||
nullptr, &m_geometryShader));
|
||||
}
|
||||
|
||||
// Load mesh vertices. Each vertex has a position and a color.
|
||||
// Note that the cube size has changed from the default DirectX app
|
||||
// template. Windows Holographic is scaled in meters, so to draw the
|
||||
// cube at a comfortable size we made the cube width 0.2 m (20 cm).
|
||||
static const std::array<VertexPositionColor, 8> cubeVertices = {{
|
||||
{XMFLOAT3(-0.1f, -0.1f, -0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f)},
|
||||
{XMFLOAT3(-0.1f, -0.1f, 0.1f), XMFLOAT3(0.0f, 0.0f, 1.0f)},
|
||||
{XMFLOAT3(-0.1f, 0.1f, -0.1f), XMFLOAT3(0.0f, 1.0f, 0.0f)},
|
||||
{XMFLOAT3(-0.1f, 0.1f, 0.1f), XMFLOAT3(0.0f, 1.0f, 1.0f)},
|
||||
{XMFLOAT3(0.1f, -0.1f, -0.1f), XMFLOAT3(1.0f, 0.0f, 0.0f)},
|
||||
{XMFLOAT3(0.1f, -0.1f, 0.1f), XMFLOAT3(1.0f, 0.0f, 1.0f)},
|
||||
{XMFLOAT3(0.1f, 0.1f, -0.1f), XMFLOAT3(1.0f, 1.0f, 0.0f)},
|
||||
{XMFLOAT3(0.1f, 0.1f, 0.1f), XMFLOAT3(1.0f, 1.0f, 1.0f)},
|
||||
}};
|
||||
|
||||
D3D11_SUBRESOURCE_DATA vertexBufferData = {0};
|
||||
vertexBufferData.pSysMem = cubeVertices.data();
|
||||
vertexBufferData.SysMemPitch = 0;
|
||||
vertexBufferData.SysMemSlicePitch = 0;
|
||||
const CD3D11_BUFFER_DESC vertexBufferDesc(
|
||||
sizeof(VertexPositionColor) * static_cast<UINT>(cubeVertices.size()),
|
||||
D3D11_BIND_VERTEX_BUFFER);
|
||||
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(
|
||||
&vertexBufferDesc, &vertexBufferData, &m_vertexBuffer));
|
||||
|
||||
// Load mesh indices. Each trio of indices represents
|
||||
// a triangle to be rendered on the screen.
|
||||
// For example: 2,1,0 means that the vertices with indexes
|
||||
// 2, 1, and 0 from the vertex buffer compose the
|
||||
// first triangle of this mesh.
|
||||
// Note that the winding order is clockwise by default.
|
||||
constexpr std::array<unsigned short, 36> cubeIndices = {{
|
||||
2, 1, 0, // -x
|
||||
2, 3, 1,
|
||||
|
||||
6, 4, 5, // +x
|
||||
6, 5, 7,
|
||||
|
||||
0, 1, 5, // -y
|
||||
0, 5, 4,
|
||||
|
||||
2, 6, 7, // +y
|
||||
2, 7, 3,
|
||||
|
||||
0, 4, 6, // -z
|
||||
0, 6, 2,
|
||||
|
||||
1, 3, 7, // +z
|
||||
1, 7, 5,
|
||||
}};
|
||||
|
||||
m_indexCount = static_cast<unsigned int>(cubeIndices.size());
|
||||
|
||||
D3D11_SUBRESOURCE_DATA indexBufferData = {0};
|
||||
indexBufferData.pSysMem = cubeIndices.data();
|
||||
indexBufferData.SysMemPitch = 0;
|
||||
indexBufferData.SysMemSlicePitch = 0;
|
||||
CD3D11_BUFFER_DESC indexBufferDesc(sizeof(unsigned short) *
|
||||
static_cast<UINT>(cubeIndices.size()),
|
||||
D3D11_BIND_INDEX_BUFFER);
|
||||
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(
|
||||
&indexBufferDesc, &indexBufferData, &m_indexBuffer));
|
||||
|
||||
// Once the cube is loaded, the object is ready to be rendered.
|
||||
m_loadingComplete = true;
|
||||
};
|
||||
|
||||
void SpinningCubeRenderer::ReleaseDeviceDependentResources() {
|
||||
m_loadingComplete = false;
|
||||
m_usingVprtShaders = false;
|
||||
m_vertexShader.Reset();
|
||||
m_inputLayout.Reset();
|
||||
m_pixelShader.Reset();
|
||||
m_geometryShader.Reset();
|
||||
m_modelConstantBuffer.Reset();
|
||||
m_vertexBuffer.Reset();
|
||||
m_indexBuffer.Reset();
|
||||
}
|
|
@ -1,57 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
#include "../Common/DeviceResources.h"
|
||||
#include "../Common/StepTimer.h"
|
||||
#include "ShaderStructures.h"
|
||||
|
||||
namespace Immersive {
|
||||
// This sample renderer instantiates a basic rendering pipeline.
|
||||
class SpinningCubeRenderer {
|
||||
public:
|
||||
SpinningCubeRenderer(
|
||||
std::shared_ptr<DX::DeviceResources> const &deviceResources);
|
||||
std::future<void> CreateDeviceDependentResources();
|
||||
void ReleaseDeviceDependentResources();
|
||||
void Update(DX::StepTimer const &timer);
|
||||
void Render();
|
||||
|
||||
// Repositions the sample hologram.
|
||||
void
|
||||
PositionHologram(winrt::Windows::UI::Input::Spatial::SpatialPointerPose const
|
||||
&pointerPose);
|
||||
|
||||
// Property accessors.
|
||||
void SetPosition(winrt::Windows::Foundation::Numerics::float3 const &pos) {
|
||||
m_position = pos;
|
||||
}
|
||||
winrt::Windows::Foundation::Numerics::float3 const &GetPosition() {
|
||||
return m_position;
|
||||
}
|
||||
|
||||
private:
|
||||
// Cached pointer to device resources.
|
||||
std::shared_ptr<DX::DeviceResources> m_deviceResources;
|
||||
|
||||
// Direct3D resources for cube geometry.
|
||||
Microsoft::WRL::ComPtr<ID3D11InputLayout> m_inputLayout;
|
||||
Microsoft::WRL::ComPtr<ID3D11Buffer> m_vertexBuffer;
|
||||
Microsoft::WRL::ComPtr<ID3D11Buffer> m_indexBuffer;
|
||||
Microsoft::WRL::ComPtr<ID3D11VertexShader> m_vertexShader;
|
||||
Microsoft::WRL::ComPtr<ID3D11GeometryShader> m_geometryShader;
|
||||
Microsoft::WRL::ComPtr<ID3D11PixelShader> m_pixelShader;
|
||||
Microsoft::WRL::ComPtr<ID3D11Buffer> m_modelConstantBuffer;
|
||||
|
||||
// System resources for cube geometry.
|
||||
ModelConstantBuffer m_modelConstantBufferData;
|
||||
uint32_t m_indexCount = 0;
|
||||
|
||||
// Variables used with the rendering loop.
|
||||
bool m_loadingComplete = false;
|
||||
float m_degreesPerSecond = 45.f;
|
||||
winrt::Windows::Foundation::Numerics::float3 m_position = {0.f, 0.f, -2.f};
|
||||
|
||||
// If the current D3D Device supports VPRT, we can avoid using a geometry
|
||||
// shader just to set the render target array index.
|
||||
bool m_usingVprtShaders = false;
|
||||
};
|
||||
} // namespace Immersive
|
|
@ -1,11 +0,0 @@
|
|||
// Per-vertex data passed to the geometry shader.
|
||||
struct VertexShaderOutput
|
||||
{
|
||||
min16float4 pos : SV_POSITION;
|
||||
min16float3 color : COLOR0;
|
||||
|
||||
// The render target array index is set here in the vertex shader.
|
||||
uint viewId : SV_RenderTargetArrayIndex;
|
||||
};
|
||||
|
||||
#include "VertexShaderShared.hlsl"
|
|
@ -1,11 +0,0 @@
|
|||
// Per-vertex data passed to the geometry shader.
|
||||
struct VertexShaderOutput
|
||||
{
|
||||
min16float4 pos : SV_POSITION;
|
||||
min16float3 color : COLOR0;
|
||||
|
||||
// The render target array index will be set by the geometry shader.
|
||||
uint viewId : TEXCOORD0;
|
||||
};
|
||||
|
||||
#include "VertexShaderShared.hlsl"
|
|
@ -1,47 +0,0 @@
|
|||
// A constant buffer that stores the model transform.
|
||||
cbuffer ModelConstantBuffer : register(b0)
|
||||
{
|
||||
float4x4 model;
|
||||
};
|
||||
|
||||
// A constant buffer that stores each set of view and projection matrices in column-major format.
|
||||
cbuffer ViewProjectionConstantBuffer : register(b1)
|
||||
{
|
||||
float4x4 viewProjection[2];
|
||||
};
|
||||
|
||||
// Per-vertex data used as input to the vertex shader.
|
||||
struct VertexShaderInput
|
||||
{
|
||||
min16float3 pos : POSITION;
|
||||
min16float3 color : COLOR0;
|
||||
uint instId : SV_InstanceID;
|
||||
};
|
||||
|
||||
// Simple shader to do vertex processing on the GPU.
|
||||
VertexShaderOutput main(VertexShaderInput input)
|
||||
{
|
||||
VertexShaderOutput output;
|
||||
float4 pos = float4(input.pos, 1.0f);
|
||||
|
||||
// Note which view this vertex has been sent to. Used for matrix lookup.
|
||||
// Taking the modulo of the instance ID allows geometry instancing to be used
|
||||
// along with stereo instanced drawing; in that case, two copies of each
|
||||
// instance would be drawn, one for left and one for right.
|
||||
int idx = input.instId % 2;
|
||||
|
||||
// Transform the vertex position into world space.
|
||||
pos = mul(pos, model);
|
||||
|
||||
// Correct for perspective and project the vertex position onto the screen.
|
||||
pos = mul(pos, viewProjection[idx]);
|
||||
output.pos = (min16float4)pos;
|
||||
|
||||
// Pass the color through without modification.
|
||||
output.color = input.color;
|
||||
|
||||
// Set the render target array index.
|
||||
output.viewId = idx;
|
||||
|
||||
return output;
|
||||
}
|
|
@ -1,557 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "pch.h"
|
||||
#include "ImmersiveMain.h"
|
||||
#include "Common/DirectXHelper.h"
|
||||
|
||||
#include <windows.graphics.directx.direct3d11.interop.h>
|
||||
|
||||
using namespace Immersive;
|
||||
using namespace concurrency;
|
||||
using namespace Microsoft::WRL;
|
||||
using namespace std::placeholders;
|
||||
using namespace winrt::Windows::Foundation::Numerics;
|
||||
using namespace winrt::Windows::Gaming::Input;
|
||||
using namespace winrt::Windows::Graphics::Holographic;
|
||||
using namespace winrt::Windows::Graphics::DirectX::Direct3D11;
|
||||
using namespace winrt::Windows::Perception::Spatial;
|
||||
using namespace winrt::Windows::UI::Input::Spatial;
|
||||
|
||||
// Loads and initializes application assets when the application is loaded.
|
||||
ImmersiveMain::ImmersiveMain(
|
||||
std::shared_ptr<DX::DeviceResources> const &deviceResources)
|
||||
: m_deviceResources(deviceResources) {
|
||||
// Register to be notified if the device is lost or recreated.
|
||||
m_deviceResources->RegisterDeviceNotify(this);
|
||||
|
||||
// If connected, a game controller can also be used for input.
|
||||
m_gamepadAddedEventToken =
|
||||
Gamepad::GamepadAdded(bind(&ImmersiveMain::OnGamepadAdded, this, _1, _2));
|
||||
m_gamepadRemovedEventToken = Gamepad::GamepadRemoved(
|
||||
bind(&ImmersiveMain::OnGamepadRemoved, this, _1, _2));
|
||||
|
||||
for (Gamepad const &gamepad : Gamepad::Gamepads()) {
|
||||
OnGamepadAdded(nullptr, gamepad);
|
||||
}
|
||||
|
||||
m_canGetHolographicDisplayForCamera =
|
||||
winrt::Windows::Foundation::Metadata::ApiInformation::IsPropertyPresent(
|
||||
L"Windows.Graphics.Holographic.HolographicCamera", L"Display");
|
||||
m_canGetDefaultHolographicDisplay =
|
||||
winrt::Windows::Foundation::Metadata::ApiInformation::IsMethodPresent(
|
||||
L"Windows.Graphics.Holographic.HolographicDisplay", L"GetDefault");
|
||||
m_canCommitDirect3D11DepthBuffer =
|
||||
winrt::Windows::Foundation::Metadata::ApiInformation::IsMethodPresent(
|
||||
L"Windows.Graphics.Holographic.HolographicCameraRenderingParameters",
|
||||
L"CommitDirect3D11DepthBuffer");
|
||||
|
||||
if (m_canGetDefaultHolographicDisplay) {
|
||||
// Subscribe for notifications about changes to the state of the default
|
||||
// HolographicDisplay and its SpatialLocator.
|
||||
m_holographicDisplayIsAvailableChangedEventToken =
|
||||
HolographicSpace::IsAvailableChanged(
|
||||
bind(&ImmersiveMain::OnHolographicDisplayIsAvailableChanged, this,
|
||||
_1, _2));
|
||||
}
|
||||
|
||||
// Acquire the current state of the default HolographicDisplay and its
|
||||
// SpatialLocator.
|
||||
OnHolographicDisplayIsAvailableChanged(nullptr, nullptr);
|
||||
}
|
||||
|
||||
void ImmersiveMain::SetHolographicSpace(
|
||||
HolographicSpace const &holographicSpace) {
|
||||
UnregisterHolographicEventHandlers();
|
||||
|
||||
m_holographicSpace = holographicSpace;
|
||||
|
||||
//
|
||||
// TODO: Add code here to initialize your holographic content.
|
||||
//
|
||||
|
||||
#ifdef DRAW_SAMPLE_CONTENT
|
||||
// Initialize the sample hologram.
|
||||
m_spinningCubeRenderer =
|
||||
std::make_unique<SpinningCubeRenderer>(m_deviceResources);
|
||||
m_spatialInputHandler = std::make_unique<SpatialInputHandler>();
|
||||
#endif
|
||||
|
||||
// Respond to camera added events by creating any resources that are specific
|
||||
// to that camera, such as the back buffer render target view.
|
||||
// When we add an event handler for CameraAdded, the API layer will avoid
|
||||
// putting the new camera in new HolographicFrames until we complete the
|
||||
// deferral we created for that handler, or return from the handler without
|
||||
// creating a deferral. This allows the app to take more than one frame to
|
||||
// finish creating resources and loading assets for the new holographic
|
||||
// camera. This function should be registered before the app creates any
|
||||
// HolographicFrames.
|
||||
m_cameraAddedToken = m_holographicSpace.CameraAdded(
|
||||
std::bind(&ImmersiveMain::OnCameraAdded, this, _1, _2));
|
||||
|
||||
// Respond to camera removed events by releasing resources that were created
|
||||
// for that camera. When the app receives a CameraRemoved event, it releases
|
||||
// all references to the back buffer right away. This includes render target
|
||||
// views, Direct2D target bitmaps, and so on. The app must also ensure that
|
||||
// the back buffer is not attached as a render target, as shown in
|
||||
// DeviceResources::ReleaseResourcesForBackBuffer.
|
||||
m_cameraRemovedToken = m_holographicSpace.CameraRemoved(
|
||||
std::bind(&ImmersiveMain::OnCameraRemoved, this, _1, _2));
|
||||
|
||||
// Notes on spatial tracking APIs:
|
||||
// * Stationary reference frames are designed to provide a best-fit position
|
||||
// relative to the
|
||||
// overall space. Individual positions within that reference frame are
|
||||
// allowed to drift slightly as the device learns more about the
|
||||
// environment.
|
||||
// * When precise placement of individual holograms is required, a
|
||||
// SpatialAnchor should be used to
|
||||
// anchor the individual hologram to a position in the real world - for
|
||||
// example, a point the user indicates to be of special interest. Anchor
|
||||
// positions do not drift, but can be corrected; the anchor will use the
|
||||
// corrected position starting in the next frame after the correction has
|
||||
// occurred.
|
||||
}
|
||||
|
||||
void ImmersiveMain::UnregisterHolographicEventHandlers() {
|
||||
if (m_holographicSpace != nullptr) {
|
||||
// Clear previous event registrations.
|
||||
m_holographicSpace.CameraAdded(m_cameraAddedToken);
|
||||
m_cameraAddedToken = {};
|
||||
m_holographicSpace.CameraRemoved(m_cameraRemovedToken);
|
||||
m_cameraRemovedToken = {};
|
||||
}
|
||||
|
||||
if (m_spatialLocator != nullptr) {
|
||||
m_spatialLocator.LocatabilityChanged(m_locatabilityChangedToken);
|
||||
}
|
||||
}
|
||||
|
||||
ImmersiveMain::~ImmersiveMain() {
|
||||
// Deregister device notification.
|
||||
m_deviceResources->RegisterDeviceNotify(nullptr);
|
||||
|
||||
UnregisterHolographicEventHandlers();
|
||||
|
||||
Gamepad::GamepadAdded(m_gamepadAddedEventToken);
|
||||
Gamepad::GamepadRemoved(m_gamepadRemovedEventToken);
|
||||
HolographicSpace::IsAvailableChanged(
|
||||
m_holographicDisplayIsAvailableChangedEventToken);
|
||||
}
|
||||
|
||||
// Updates the application state once per frame.
|
||||
HolographicFrame ImmersiveMain::Update() {
|
||||
// Before doing the timer update, there is some work to do per-frame
|
||||
// to maintain holographic rendering. First, we will get information
|
||||
// about the current frame.
|
||||
|
||||
// The HolographicFrame has information that the app needs in order
|
||||
// to update and render the current frame. The app begins each new
|
||||
// frame by calling CreateNextFrame.
|
||||
HolographicFrame holographicFrame = m_holographicSpace.CreateNextFrame();
|
||||
|
||||
// Get a prediction of where holographic cameras will be when this frame
|
||||
// is presented.
|
||||
HolographicFramePrediction prediction = holographicFrame.CurrentPrediction();
|
||||
|
||||
// Back buffers can change from frame to frame. Validate each buffer, and
|
||||
// recreate resource views and depth buffers as needed.
|
||||
m_deviceResources->EnsureCameraResources(holographicFrame, prediction);
|
||||
|
||||
#ifdef DRAW_SAMPLE_CONTENT
|
||||
if (m_stationaryReferenceFrame != nullptr) {
|
||||
// Check for new input state since the last frame.
|
||||
for (GamepadWithButtonState &gamepadWithButtonState : m_gamepads) {
|
||||
bool buttonDownThisUpdate =
|
||||
((gamepadWithButtonState.gamepad.GetCurrentReading().Buttons &
|
||||
GamepadButtons::A) == GamepadButtons::A);
|
||||
if (buttonDownThisUpdate &&
|
||||
!gamepadWithButtonState.buttonAWasPressedLastFrame) {
|
||||
m_pointerPressed = true;
|
||||
}
|
||||
gamepadWithButtonState.buttonAWasPressedLastFrame = buttonDownThisUpdate;
|
||||
}
|
||||
|
||||
SpatialInteractionSourceState pointerState =
|
||||
m_spatialInputHandler->CheckForInput();
|
||||
SpatialPointerPose pose = nullptr;
|
||||
if (pointerState != nullptr) {
|
||||
pose = pointerState.TryGetPointerPose(
|
||||
m_stationaryReferenceFrame.CoordinateSystem());
|
||||
} else if (m_pointerPressed) {
|
||||
pose = SpatialPointerPose::TryGetAtTimestamp(
|
||||
m_stationaryReferenceFrame.CoordinateSystem(),
|
||||
prediction.Timestamp());
|
||||
}
|
||||
m_pointerPressed = false;
|
||||
|
||||
// When a Pressed gesture is detected, the sample hologram will be
|
||||
// repositioned two meters in front of the user.
|
||||
m_spinningCubeRenderer->PositionHologram(pose);
|
||||
}
|
||||
#endif
|
||||
|
||||
m_timer.Tick([this]() {
|
||||
//
|
||||
// TODO: Update scene objects.
|
||||
//
|
||||
// Put time-based updates here. By default this code will run once per frame,
|
||||
// but if you change the StepTimer to use a fixed time step this code will
|
||||
// run as many times as needed to get to the current step.
|
||||
//
|
||||
|
||||
#ifdef DRAW_SAMPLE_CONTENT
|
||||
m_spinningCubeRenderer->Update(m_timer);
|
||||
#endif
|
||||
});
|
||||
|
||||
if (!m_canCommitDirect3D11DepthBuffer) {
|
||||
// On versions of the platform that do not support the
|
||||
// CommitDirect3D11DepthBuffer API, we can control image stabilization by
|
||||
// setting a focus point with optional plane normal and velocity.
|
||||
for (HolographicCameraPose const &cameraPose : prediction.CameraPoses()) {
|
||||
#ifdef DRAW_SAMPLE_CONTENT
|
||||
// The HolographicCameraRenderingParameters class provides access to set
|
||||
// the image stabilization parameters.
|
||||
HolographicCameraRenderingParameters renderingParameters =
|
||||
holographicFrame.GetRenderingParameters(cameraPose);
|
||||
|
||||
// SetFocusPoint informs the system about a specific point in your scene
|
||||
// to prioritize for image stabilization. The focus point is set
|
||||
// independently for each holographic camera. When setting the focus
|
||||
// point, put it on or near content that the user is looking at. In this
|
||||
// example, we put the focus point at the center of the sample hologram.
|
||||
// You can also set the relative velocity and facing of the stabilization
|
||||
// plane using overloads of this method.
|
||||
if (m_stationaryReferenceFrame != nullptr) {
|
||||
renderingParameters.SetFocusPoint(
|
||||
m_stationaryReferenceFrame.CoordinateSystem(),
|
||||
m_spinningCubeRenderer->GetPosition());
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
// The holographic frame will be used to get up-to-date view and projection
|
||||
// matrices and to present the swap chain.
|
||||
return holographicFrame;
|
||||
}
|
||||
|
||||
// Renders the current frame to each holographic camera, according to the
|
||||
// current application and spatial positioning state. Returns true if the
|
||||
// frame was rendered to at least one camera.
|
||||
bool ImmersiveMain::Render(HolographicFrame const &holographicFrame) {
|
||||
// Don't try to render anything before the first Update.
|
||||
if (m_timer.GetFrameCount() == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
//
|
||||
// TODO: Add code for pre-pass rendering here.
|
||||
//
|
||||
// Take care of any tasks that are not specific to an individual holographic
|
||||
// camera. This includes anything that doesn't need the final view or
|
||||
// projection matrix, such as lighting maps.
|
||||
//
|
||||
|
||||
// Lock the set of holographic camera resources, then draw to each camera
|
||||
// in this frame.
|
||||
return m_deviceResources->UseHolographicCameraResources<bool>(
|
||||
[this,
|
||||
holographicFrame](std::map<UINT32, std::unique_ptr<DX::CameraResources>>
|
||||
&cameraResourceMap) {
|
||||
// Up-to-date frame predictions enhance the effectiveness of image
|
||||
// stablization and allow more accurate positioning of holograms.
|
||||
holographicFrame.UpdateCurrentPrediction();
|
||||
HolographicFramePrediction prediction =
|
||||
holographicFrame.CurrentPrediction();
|
||||
|
||||
bool atLeastOneCameraRendered = false;
|
||||
for (HolographicCameraPose const &cameraPose :
|
||||
prediction.CameraPoses()) {
|
||||
// This represents the device-based resources for a HolographicCamera.
|
||||
DX::CameraResources *pCameraResources =
|
||||
cameraResourceMap[cameraPose.HolographicCamera().Id()].get();
|
||||
|
||||
// Get the device context.
|
||||
const auto context = m_deviceResources->GetD3DDeviceContext();
|
||||
const auto depthStencilView = pCameraResources->GetDepthStencilView();
|
||||
|
||||
// Set render targets to the current holographic camera.
|
||||
ID3D11RenderTargetView *const targets[1] = {
|
||||
pCameraResources->GetBackBufferRenderTargetView()};
|
||||
context->OMSetRenderTargets(1, targets, depthStencilView);
|
||||
|
||||
// Clear the back buffer and depth stencil view.
|
||||
if (m_canGetHolographicDisplayForCamera &&
|
||||
cameraPose.HolographicCamera().Display().IsOpaque()) {
|
||||
context->ClearRenderTargetView(targets[0],
|
||||
DirectX::Colors::CornflowerBlue);
|
||||
} else {
|
||||
context->ClearRenderTargetView(targets[0],
|
||||
DirectX::Colors::Transparent);
|
||||
}
|
||||
context->ClearDepthStencilView(
|
||||
depthStencilView, D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 1.0f,
|
||||
0);
|
||||
|
||||
//
|
||||
// TODO: Replace the sample content with your own content.
|
||||
//
|
||||
// Notes regarding holographic content:
|
||||
// * For drawing, remember that you have the potential to fill
|
||||
// twice as many pixels
|
||||
// in a stereoscopic render target as compared to a
|
||||
// non-stereoscopic render target of the same resolution. Avoid
|
||||
// unnecessary or repeated writes to the same pixel, and only
|
||||
// draw holograms that the user can see.
|
||||
// * To help occlude hologram geometry, you can create a depth map
|
||||
// using geometry
|
||||
// data obtained via the surface mapping APIs. You can use this
|
||||
// depth map to avoid rendering holograms that are intended to be
|
||||
// hidden behind tables, walls, monitors, and so on.
|
||||
// * On HolographicDisplays that are transparent, black pixels will
|
||||
// appear transparent
|
||||
// to the user. On such devices, you should clear the screen to
|
||||
// Transparent as shown above. You should still use alpha
|
||||
// blending to draw semitransparent holograms.
|
||||
//
|
||||
|
||||
// The view and projection matrices for each holographic camera will
|
||||
// change every frame. This function refreshes the data in the
|
||||
// constant buffer for the holographic camera indicated by cameraPose.
|
||||
if (m_stationaryReferenceFrame) {
|
||||
pCameraResources->UpdateViewProjectionBuffer(
|
||||
m_deviceResources, cameraPose,
|
||||
m_stationaryReferenceFrame.CoordinateSystem());
|
||||
}
|
||||
|
||||
// Attach the view/projection constant buffer for this camera to the
|
||||
// graphics pipeline.
|
||||
bool cameraActive =
|
||||
pCameraResources->AttachViewProjectionBuffer(m_deviceResources);
|
||||
|
||||
#ifdef DRAW_SAMPLE_CONTENT
|
||||
// Only render world-locked content when positional tracking is
|
||||
// active.
|
||||
if (cameraActive) {
|
||||
// Draw the sample hologram.
|
||||
m_spinningCubeRenderer->Render();
|
||||
if (m_canCommitDirect3D11DepthBuffer) {
|
||||
// On versions of the platform that support the
|
||||
// CommitDirect3D11DepthBuffer API, we can provide the depth
|
||||
// buffer to the system, and it will use depth information to
|
||||
// stabilize the image at a per-pixel level.
|
||||
HolographicCameraRenderingParameters renderingParameters =
|
||||
holographicFrame.GetRenderingParameters(cameraPose);
|
||||
|
||||
IDirect3DSurface interopSurface =
|
||||
DX::CreateDepthTextureInteropObject(
|
||||
pCameraResources->GetDepthStencilTexture2D());
|
||||
|
||||
// Calling CommitDirect3D11DepthBuffer causes the system to queue
|
||||
// Direct3D commands to read the depth buffer. It will then use
|
||||
// that information to stabilize the image as the HolographicFrame
|
||||
// is presented.
|
||||
renderingParameters.CommitDirect3D11DepthBuffer(interopSurface);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
atLeastOneCameraRendered = true;
|
||||
}
|
||||
|
||||
return atLeastOneCameraRendered;
|
||||
});
|
||||
}
|
||||
|
||||
void ImmersiveMain::SaveAppState() {
|
||||
//
|
||||
// TODO: Insert code here to save your app state.
|
||||
// This method is called when the app is about to suspend.
|
||||
//
|
||||
// For example, store information in the SpatialAnchorStore.
|
||||
//
|
||||
}
|
||||
|
||||
void ImmersiveMain::LoadAppState() {
|
||||
//
|
||||
// TODO: Insert code here to load your app state.
|
||||
// This method is called when the app resumes.
|
||||
//
|
||||
// For example, load information from the SpatialAnchorStore.
|
||||
//
|
||||
}
|
||||
|
||||
void ImmersiveMain::OnPointerPressed() { m_pointerPressed = true; }
|
||||
|
||||
// Notifies classes that use Direct3D device resources that the device resources
|
||||
// need to be released before this method returns.
|
||||
void ImmersiveMain::OnDeviceLost() {
|
||||
#ifdef DRAW_SAMPLE_CONTENT
|
||||
m_spinningCubeRenderer->ReleaseDeviceDependentResources();
|
||||
#endif
|
||||
}
|
||||
|
||||
// Notifies classes that use Direct3D device resources that the device resources
|
||||
// may now be recreated.
|
||||
void ImmersiveMain::OnDeviceRestored() {
|
||||
#ifdef DRAW_SAMPLE_CONTENT
|
||||
m_spinningCubeRenderer->CreateDeviceDependentResources();
|
||||
#endif
|
||||
}
|
||||
|
||||
void ImmersiveMain::OnLocatabilityChanged(
|
||||
SpatialLocator const &sender,
|
||||
winrt::Windows::Foundation::IInspectable const &) {
|
||||
switch (sender.Locatability()) {
|
||||
case SpatialLocatability::Unavailable:
|
||||
// Holograms cannot be rendered.
|
||||
{
|
||||
winrt::hstring message(L"Warning! Positional tracking is " +
|
||||
std::to_wstring(int(sender.Locatability())) +
|
||||
L".\n");
|
||||
OutputDebugStringW(message.data());
|
||||
}
|
||||
break;
|
||||
|
||||
// In the following three cases, it is still possible to place holograms using
|
||||
// a SpatialLocatorAttachedFrameOfReference.
|
||||
case SpatialLocatability::PositionalTrackingActivating:
|
||||
// The system is preparing to use positional tracking.
|
||||
|
||||
case SpatialLocatability::OrientationOnly:
|
||||
// Positional tracking has not been activated.
|
||||
|
||||
case SpatialLocatability::PositionalTrackingInhibited:
|
||||
// Positional tracking is temporarily inhibited. User action may be required
|
||||
// in order to restore positional tracking.
|
||||
break;
|
||||
|
||||
case SpatialLocatability::PositionalTrackingActive:
|
||||
// Positional tracking is active. World-locked content can be rendered.
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void ImmersiveMain::OnCameraAdded(
|
||||
HolographicSpace const &,
|
||||
HolographicSpaceCameraAddedEventArgs const &args) {
|
||||
winrt::Windows::Foundation::Deferral deferral = args.GetDeferral();
|
||||
HolographicCamera holographicCamera = args.Camera();
|
||||
create_task([this, deferral, holographicCamera]() {
|
||||
//
|
||||
// TODO: Allocate resources for the new camera and load any content specific
|
||||
// to
|
||||
// that camera. Note that the render target size (in pixels) is a
|
||||
// property of the HolographicCamera object, and can be used to create
|
||||
// off-screen render targets that match the resolution of the
|
||||
// HolographicCamera.
|
||||
//
|
||||
|
||||
// Create device-based resources for the holographic camera and add it to
|
||||
// the list of cameras used for updates and rendering. Notes:
|
||||
// * Since this function may be called at any time, the
|
||||
// AddHolographicCamera function
|
||||
// waits until it can get a lock on the set of holographic camera
|
||||
// resources before adding the new camera. At 60 frames per second this
|
||||
// wait should not take long.
|
||||
// * A subsequent Update will take the back buffer from the
|
||||
// RenderingParameters of this
|
||||
// camera's CameraPose and use it to create the ID3D11RenderTargetView
|
||||
// for this camera. Content can then be rendered for the
|
||||
// HolographicCamera.
|
||||
m_deviceResources->AddHolographicCamera(holographicCamera);
|
||||
|
||||
// Holographic frame predictions will not include any information about this
|
||||
// camera until the deferral is completed.
|
||||
deferral.Complete();
|
||||
});
|
||||
}
|
||||
|
||||
void ImmersiveMain::OnCameraRemoved(
|
||||
HolographicSpace const &,
|
||||
HolographicSpaceCameraRemovedEventArgs const &args) {
|
||||
create_task([this]() {
|
||||
//
|
||||
// TODO: Asynchronously unload or deactivate content resources (not back
|
||||
// buffer
|
||||
// resources) that are specific only to the camera that was removed.
|
||||
//
|
||||
});
|
||||
|
||||
// Before letting this callback return, ensure that all references to the back
|
||||
// buffer are released. Since this function may be called at any time, the
|
||||
// RemoveHolographicCamera function waits until it can get a lock on the set
|
||||
// of holographic camera resources before deallocating resources for this
|
||||
// camera. At 60 frames per second this wait should not take long.
|
||||
m_deviceResources->RemoveHolographicCamera(args.Camera());
|
||||
}
|
||||
|
||||
void ImmersiveMain::OnGamepadAdded(winrt::Windows::Foundation::IInspectable,
|
||||
Gamepad const &args) {
|
||||
for (GamepadWithButtonState const &gamepadWithButtonState : m_gamepads) {
|
||||
if (args == gamepadWithButtonState.gamepad) {
|
||||
// This gamepad is already in the list.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
GamepadWithButtonState newGamepad = {args, false};
|
||||
m_gamepads.push_back(newGamepad);
|
||||
}
|
||||
|
||||
void ImmersiveMain::OnGamepadRemoved(winrt::Windows::Foundation::IInspectable,
|
||||
Gamepad const &args) {
|
||||
m_gamepads.erase(
|
||||
std::remove_if(m_gamepads.begin(), m_gamepads.end(),
|
||||
[&](GamepadWithButtonState &gamepadWithState) {
|
||||
return gamepadWithState.gamepad == args;
|
||||
}),
|
||||
m_gamepads.end());
|
||||
}
|
||||
|
||||
void ImmersiveMain::OnHolographicDisplayIsAvailableChanged(
|
||||
winrt::Windows::Foundation::IInspectable,
|
||||
winrt::Windows::Foundation::IInspectable) {
|
||||
// Get the spatial locator for the default HolographicDisplay, if one is
|
||||
// available.
|
||||
SpatialLocator spatialLocator = nullptr;
|
||||
if (m_canGetDefaultHolographicDisplay) {
|
||||
HolographicDisplay defaultHolographicDisplay =
|
||||
HolographicDisplay::GetDefault();
|
||||
if (defaultHolographicDisplay) {
|
||||
spatialLocator = defaultHolographicDisplay.SpatialLocator();
|
||||
}
|
||||
} else {
|
||||
spatialLocator = SpatialLocator::GetDefault();
|
||||
}
|
||||
|
||||
if (m_spatialLocator != spatialLocator) {
|
||||
// If the spatial locator is disconnected or replaced, we should discard all
|
||||
// state that was based on it.
|
||||
if (m_spatialLocator != nullptr) {
|
||||
m_spatialLocator.LocatabilityChanged(m_locatabilityChangedToken);
|
||||
m_spatialLocator = nullptr;
|
||||
}
|
||||
|
||||
m_stationaryReferenceFrame = nullptr;
|
||||
|
||||
if (spatialLocator != nullptr) {
|
||||
// Use the SpatialLocator from the default HolographicDisplay to track the
|
||||
// motion of the device.
|
||||
m_spatialLocator = spatialLocator;
|
||||
|
||||
// Respond to changes in the positional tracking state.
|
||||
m_locatabilityChangedToken = m_spatialLocator.LocatabilityChanged(
|
||||
std::bind(&ImmersiveMain::OnLocatabilityChanged, this, _1, _2));
|
||||
|
||||
// The simplest way to render world-locked holograms is to create a
|
||||
// stationary reference frame based on a SpatialLocator. This is roughly
|
||||
// analogous to creating a "world" coordinate system with the origin
|
||||
// placed at the device's position as the app is launched.
|
||||
m_stationaryReferenceFrame =
|
||||
m_spatialLocator.CreateStationaryFrameOfReferenceAtCurrentLocation();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,151 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#pragma once
|
||||
|
||||
//
|
||||
// Comment out this preprocessor definition to disable all of the
|
||||
// sample content.
|
||||
//
|
||||
// To remove the content after disabling it:
|
||||
// * Remove the unused code from your app's Main class.
|
||||
// * Delete the Content folder provided with this template.
|
||||
//
|
||||
#define DRAW_SAMPLE_CONTENT
|
||||
|
||||
#include "Common/DeviceResources.h"
|
||||
#include "Common/StepTimer.h"
|
||||
|
||||
#ifdef DRAW_SAMPLE_CONTENT
|
||||
#include "Content/SpinningCubeRenderer.h"
|
||||
#include "Content/SpatialInputHandler.h"
|
||||
#endif
|
||||
|
||||
// Updates, renders, and presents holographic content using Direct3D.
|
||||
namespace Immersive {
|
||||
class ImmersiveMain : public DX::IDeviceNotify {
|
||||
public:
|
||||
ImmersiveMain(std::shared_ptr<DX::DeviceResources> const &deviceResources);
|
||||
~ImmersiveMain();
|
||||
|
||||
// Sets the holographic space. This is our closest analogue to setting a new
|
||||
// window for the app.
|
||||
void SetHolographicSpace(
|
||||
winrt::Windows::Graphics::Holographic::HolographicSpace const
|
||||
&holographicSpace);
|
||||
|
||||
// Starts the holographic frame and updates the content.
|
||||
winrt::Windows::Graphics::Holographic::HolographicFrame Update();
|
||||
|
||||
// Renders holograms, including world-locked content.
|
||||
bool Render(winrt::Windows::Graphics::Holographic::HolographicFrame const
|
||||
&holographicFrame);
|
||||
|
||||
// Handle saving and loading of app state owned by AppMain.
|
||||
void SaveAppState();
|
||||
void LoadAppState();
|
||||
|
||||
// Handle mouse input.
|
||||
void OnPointerPressed();
|
||||
|
||||
// IDeviceNotify
|
||||
void OnDeviceLost() override;
|
||||
void OnDeviceRestored() override;
|
||||
|
||||
private:
|
||||
// Asynchronously creates resources for new holographic cameras.
|
||||
void OnCameraAdded(
|
||||
winrt::Windows::Graphics::Holographic::HolographicSpace const &sender,
|
||||
winrt::Windows::Graphics::Holographic::
|
||||
HolographicSpaceCameraAddedEventArgs const &args);
|
||||
|
||||
// Synchronously releases resources for holographic cameras that are no longer
|
||||
// attached to the system.
|
||||
void OnCameraRemoved(
|
||||
winrt::Windows::Graphics::Holographic::HolographicSpace const &sender,
|
||||
winrt::Windows::Graphics::Holographic::
|
||||
HolographicSpaceCameraRemovedEventArgs const &args);
|
||||
|
||||
// Used to notify the app when the positional tracking state changes.
|
||||
void OnLocatabilityChanged(
|
||||
winrt::Windows::Perception::Spatial::SpatialLocator const &sender,
|
||||
winrt::Windows::Foundation::IInspectable const &args);
|
||||
|
||||
// Used to be aware of gamepads that are plugged in after the app starts.
|
||||
void OnGamepadAdded(winrt::Windows::Foundation::IInspectable,
|
||||
winrt::Windows::Gaming::Input::Gamepad const &args);
|
||||
|
||||
// Used to stop looking for gamepads that are removed while the app is
|
||||
// running.
|
||||
void OnGamepadRemoved(winrt::Windows::Foundation::IInspectable,
|
||||
winrt::Windows::Gaming::Input::Gamepad const &args);
|
||||
|
||||
// Used to respond to changes to the default spatial locator.
|
||||
void OnHolographicDisplayIsAvailableChanged(
|
||||
winrt::Windows::Foundation::IInspectable,
|
||||
winrt::Windows::Foundation::IInspectable);
|
||||
|
||||
// Clears event registration state. Used when changing to a new
|
||||
// HolographicSpace and when tearing down AppMain.
|
||||
void UnregisterHolographicEventHandlers();
|
||||
|
||||
#ifdef DRAW_SAMPLE_CONTENT
|
||||
// Renders a colorful holographic cube that's 20 centimeters wide. This sample
|
||||
// content is used to demonstrate world-locked rendering.
|
||||
std::unique_ptr<SpinningCubeRenderer> m_spinningCubeRenderer;
|
||||
|
||||
// Listens for the Pressed spatial input event.
|
||||
std::shared_ptr<SpatialInputHandler> m_spatialInputHandler;
|
||||
#endif
|
||||
|
||||
// Cached pointer to device resources.
|
||||
std::shared_ptr<DX::DeviceResources> m_deviceResources;
|
||||
|
||||
// Render loop timer.
|
||||
DX::StepTimer m_timer;
|
||||
|
||||
// Represents the holographic space around the user.
|
||||
winrt::Windows::Graphics::Holographic::HolographicSpace m_holographicSpace =
|
||||
nullptr;
|
||||
|
||||
// SpatialLocator that is attached to the default HolographicDisplay.
|
||||
winrt::Windows::Perception::Spatial::SpatialLocator m_spatialLocator =
|
||||
nullptr;
|
||||
|
||||
// A stationary reference frame based on m_spatialLocator.
|
||||
winrt::Windows::Perception::Spatial::SpatialStationaryFrameOfReference
|
||||
m_stationaryReferenceFrame = nullptr;
|
||||
|
||||
// Event registration tokens.
|
||||
winrt::event_token m_cameraAddedToken;
|
||||
winrt::event_token m_cameraRemovedToken;
|
||||
winrt::event_token m_locatabilityChangedToken;
|
||||
winrt::event_token m_gamepadAddedEventToken;
|
||||
winrt::event_token m_gamepadRemovedEventToken;
|
||||
winrt::event_token m_holographicDisplayIsAvailableChangedEventToken;
|
||||
|
||||
// Keep track of gamepads.
|
||||
struct GamepadWithButtonState {
|
||||
winrt::Windows::Gaming::Input::Gamepad gamepad;
|
||||
bool buttonAWasPressedLastFrame = false;
|
||||
};
|
||||
std::vector<GamepadWithButtonState> m_gamepads;
|
||||
|
||||
// Keep track of mouse input.
|
||||
bool m_pointerPressed = false;
|
||||
|
||||
// Cache whether or not the HolographicCamera.Display property can be
|
||||
// accessed.
|
||||
bool m_canGetHolographicDisplayForCamera = false;
|
||||
|
||||
// Cache whether or not the HolographicDisplay.GetDefault() method can be
|
||||
// called.
|
||||
bool m_canGetDefaultHolographicDisplay = false;
|
||||
|
||||
// Cache whether or not the
|
||||
// HolographicCameraRenderingParameters.CommitDirect3D11DepthBuffer() method
|
||||
// can be called.
|
||||
bool m_canCommitDirect3D11DepthBuffer = false;
|
||||
};
|
||||
} // namespace Immersive
|
|
@ -1,236 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "pch.h"
|
||||
#include "logs.h"
|
||||
#include "ImmersiveView.h"
|
||||
#include "ImmersiveMain.h"
|
||||
|
||||
using namespace winrt::ServoApp;
|
||||
|
||||
using namespace concurrency;
|
||||
using namespace std::placeholders;
|
||||
using namespace winrt::Windows::ApplicationModel;
|
||||
using namespace winrt::Windows::ApplicationModel::Activation;
|
||||
using namespace winrt::Windows::ApplicationModel::Core;
|
||||
using namespace winrt::Windows::Foundation;
|
||||
using namespace winrt::Windows::Graphics::Holographic;
|
||||
using namespace winrt::Windows::UI::Core;
|
||||
|
||||
// Immediatly start immersive mode:
|
||||
// int __stdcall wWinMain(HINSTANCE, HINSTANCE, PWSTR, int)
|
||||
//{
|
||||
// winrt::init_apartment();
|
||||
// CoreApplication::Run(ImmersiveViewSource());
|
||||
// return 0;
|
||||
//}
|
||||
|
||||
// IFrameworkViewSource methods
|
||||
|
||||
IFrameworkView ImmersiveViewSource::CreateView() { return holographicView; }
|
||||
|
||||
// IFrameworkView methods
|
||||
|
||||
// The first method called when the IFrameworkView is being created.
|
||||
// Use this method to subscribe for Windows shell events and to initialize your
|
||||
// app.
|
||||
void ImmersiveView::Initialize(CoreApplicationView const &applicationView) {
|
||||
applicationView.Activated(
|
||||
std::bind(&ImmersiveView::OnViewActivated, this, _1, _2));
|
||||
|
||||
// Register event handlers for app lifecycle.
|
||||
m_suspendingEventToken = CoreApplication::Suspending(
|
||||
bind(&ImmersiveView::OnSuspending, this, _1, _2));
|
||||
m_resumingEventToken =
|
||||
CoreApplication::Resuming(bind(&ImmersiveView::OnResuming, this, _1, _2));
|
||||
|
||||
// At this point we have access to the device and we can create
|
||||
// device-dependent resources.
|
||||
m_deviceResources = std::make_shared<DX::DeviceResources>();
|
||||
|
||||
m_main = std::make_unique<Immersive::ImmersiveMain>(m_deviceResources);
|
||||
}
|
||||
|
||||
// Called when the CoreWindow object is created (or re-created).
|
||||
void ImmersiveView::SetWindow(CoreWindow const &window) {
|
||||
|
||||
if (m_main == nullptr) {
|
||||
winrt::hstring message(L"main program not intialized.\n");
|
||||
OutputDebugStringW(message.data());
|
||||
return;
|
||||
}
|
||||
|
||||
// Register for keypress notifications.
|
||||
m_keyDownEventToken =
|
||||
window.KeyDown(bind(&ImmersiveView::OnKeyPressed, this, _1, _2));
|
||||
|
||||
// Register for pointer pressed notifications.
|
||||
m_pointerPressedEventToken = window.PointerPressed(
|
||||
bind(&ImmersiveView::OnPointerPressed, this, _1, _2));
|
||||
|
||||
// Register for notification that the app window is being closed.
|
||||
m_windowClosedEventToken =
|
||||
window.Closed(bind(&ImmersiveView::OnWindowClosed, this, _1, _2));
|
||||
|
||||
// Register for notifications that the app window is losing focus.
|
||||
m_visibilityChangedEventToken = window.VisibilityChanged(
|
||||
bind(&ImmersiveView::OnVisibilityChanged, this, _1, _2));
|
||||
|
||||
// Create a holographic space for the core window for the current view.
|
||||
// Presenting holographic frames that are created by this holographic space
|
||||
// will put the app into exclusive mode.
|
||||
m_holographicSpace = HolographicSpace::CreateForCoreWindow(window);
|
||||
|
||||
// The DeviceResources class uses the preferred DXGI adapter ID from the
|
||||
// holographic space (when available) to create a Direct3D device. The
|
||||
// HolographicSpace uses this ID3D11Device to create and manage device-based
|
||||
// resources such as swap chains.
|
||||
m_deviceResources->SetHolographicSpace(m_holographicSpace);
|
||||
|
||||
// The main class uses the holographic space for updates and rendering.
|
||||
m_main->SetHolographicSpace(m_holographicSpace);
|
||||
}
|
||||
|
||||
// The Load method can be used to initialize scene resources or to load a
|
||||
// previously saved app state.
|
||||
void ImmersiveView::Load(winrt::hstring const &) {}
|
||||
|
||||
// This method is called after the window becomes active. It oversees the
|
||||
// update, draw, and present loop, and it also oversees window message
|
||||
// processing.
|
||||
void ImmersiveView::Run() {
|
||||
|
||||
if (m_main == nullptr) {
|
||||
winrt::hstring message(L"main program not intialized.\n");
|
||||
OutputDebugStringW(message.data());
|
||||
return;
|
||||
}
|
||||
|
||||
CoreWindow::GetForCurrentThread().Activate();
|
||||
|
||||
while (!m_windowClosed) {
|
||||
if (m_windowVisible && (m_holographicSpace != nullptr)) {
|
||||
CoreWindow::GetForCurrentThread().Dispatcher().ProcessEvents(
|
||||
CoreProcessEventsOption::ProcessAllIfPresent);
|
||||
|
||||
HolographicFrame holographicFrame = m_main->Update();
|
||||
|
||||
if (m_main->Render(holographicFrame)) {
|
||||
// The holographic frame has an API that presents the swap chain for
|
||||
// each holographic camera.
|
||||
m_deviceResources->Present(holographicFrame);
|
||||
}
|
||||
} else {
|
||||
CoreWindow::GetForCurrentThread().Dispatcher().ProcessEvents(
|
||||
CoreProcessEventsOption::ProcessOneAndAllPending);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Terminate events do not cause Uninitialize to be called. It will be called if
|
||||
// your IFrameworkView class is torn down while the app is in the foreground,
|
||||
// for example if the Run method exits.
|
||||
void ImmersiveView::Uninitialize() {
|
||||
m_main.reset();
|
||||
m_deviceResources.reset();
|
||||
|
||||
CoreApplication::Suspending(m_suspendingEventToken);
|
||||
CoreApplication::Resuming(m_resumingEventToken);
|
||||
|
||||
auto const &window = CoreWindow::GetForCurrentThread();
|
||||
window.KeyDown(m_keyDownEventToken);
|
||||
window.PointerPressed(m_pointerPressedEventToken);
|
||||
window.Closed(m_windowClosedEventToken);
|
||||
window.VisibilityChanged(m_visibilityChangedEventToken);
|
||||
}
|
||||
|
||||
// Application lifecycle event handlers
|
||||
|
||||
// Called when the app is prelaunched. Use this method to load resources ahead
|
||||
// of time and enable faster launch times.
|
||||
void ImmersiveView::OnLaunched(LaunchActivatedEventArgs const &args) {
|
||||
if (args.PrelaunchActivated()) {
|
||||
//
|
||||
// TODO: Insert code to preload resources here.
|
||||
//
|
||||
}
|
||||
}
|
||||
|
||||
// Called when the app view is activated. Activates the app's CoreWindow.
|
||||
void ImmersiveView::OnViewActivated(CoreApplicationView const &sender,
|
||||
IActivatedEventArgs const &) {
|
||||
// Run() won't start until the CoreWindow is activated.
|
||||
sender.CoreWindow().Activate();
|
||||
}
|
||||
|
||||
void ImmersiveView::OnSuspending(
|
||||
winrt::Windows::Foundation::IInspectable const &,
|
||||
SuspendingEventArgs const &args) {
|
||||
// Save app state asynchronously after requesting a deferral. Holding a
|
||||
// deferral indicates that the application is busy performing suspending
|
||||
// operations. Be aware that a deferral may not be held indefinitely; after
|
||||
// about five seconds, the app will be forced to exit.
|
||||
SuspendingDeferral deferral = args.SuspendingOperation().GetDeferral();
|
||||
|
||||
create_task([this, deferral]() {
|
||||
m_deviceResources->Trim();
|
||||
|
||||
if (m_main != nullptr) {
|
||||
m_main->SaveAppState();
|
||||
}
|
||||
|
||||
//
|
||||
// TODO: Insert code here to save your app state.
|
||||
//
|
||||
|
||||
deferral.Complete();
|
||||
});
|
||||
}
|
||||
|
||||
void ImmersiveView::OnResuming(
|
||||
winrt::Windows::Foundation::IInspectable const &,
|
||||
winrt::Windows::Foundation::IInspectable const &) {
|
||||
// Restore any data or state that was unloaded on suspend. By default, data
|
||||
// and state are persisted when resuming from suspend. Note that this event
|
||||
// does not occur if the app was previously terminated.
|
||||
|
||||
if (m_main != nullptr) {
|
||||
m_main->LoadAppState();
|
||||
}
|
||||
|
||||
//
|
||||
// TODO: Insert code here to load your app state.
|
||||
//
|
||||
}
|
||||
|
||||
// Window event handlers
|
||||
|
||||
void ImmersiveView::OnVisibilityChanged(
|
||||
CoreWindow const &, VisibilityChangedEventArgs const &args) {
|
||||
m_windowVisible = args.Visible();
|
||||
}
|
||||
|
||||
void ImmersiveView::OnWindowClosed(CoreWindow const &,
|
||||
CoreWindowEventArgs const &) {
|
||||
m_windowClosed = true;
|
||||
}
|
||||
|
||||
// Input event handlers
|
||||
|
||||
void ImmersiveView::OnKeyPressed(CoreWindow const &, KeyEventArgs const &) {
|
||||
//
|
||||
// TODO: Bluetooth keyboards are supported by HoloLens. You can use this
|
||||
// method for
|
||||
// keyboard input if you want to support it as an optional input method
|
||||
// for your holographic app.
|
||||
//
|
||||
}
|
||||
|
||||
void ImmersiveView::OnPointerPressed(CoreWindow const &,
|
||||
PointerEventArgs const &) {
|
||||
// Allow the user to interact with the holographic world using the mouse.
|
||||
if (m_main != nullptr) {
|
||||
m_main->OnPointerPressed();
|
||||
}
|
||||
}
|
|
@ -1,90 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "Common/DeviceResources.h"
|
||||
#include "ImmersiveMain.h"
|
||||
|
||||
namespace winrt::ServoApp {
|
||||
// IFrameworkView class. Connects the app with the Windows shell and handles
|
||||
// application lifecycle events.
|
||||
class ImmersiveView sealed
|
||||
: public winrt::implements<
|
||||
ImmersiveView,
|
||||
winrt::Windows::ApplicationModel::Core::IFrameworkView> {
|
||||
public:
|
||||
// IFrameworkView methods.
|
||||
void
|
||||
Initialize(winrt::Windows::ApplicationModel::Core::CoreApplicationView const
|
||||
&applicationView);
|
||||
void SetWindow(winrt::Windows::UI::Core::CoreWindow const &window);
|
||||
void Load(winrt::hstring const &entryPoint);
|
||||
void Run();
|
||||
void Uninitialize();
|
||||
|
||||
protected:
|
||||
// Application lifecycle event handlers.
|
||||
void OnLaunched(winrt::Windows::ApplicationModel::Activation::
|
||||
LaunchActivatedEventArgs const &args);
|
||||
void OnViewActivated(
|
||||
winrt::Windows::ApplicationModel::Core::CoreApplicationView const &sender,
|
||||
winrt::Windows::ApplicationModel::Activation::IActivatedEventArgs const
|
||||
&args);
|
||||
void OnSuspending(
|
||||
winrt::Windows::Foundation::IInspectable const &sender,
|
||||
winrt::Windows::ApplicationModel::SuspendingEventArgs const &args);
|
||||
void OnResuming(winrt::Windows::Foundation::IInspectable const &sender,
|
||||
winrt::Windows::Foundation::IInspectable const &args);
|
||||
|
||||
// Window event handlers.
|
||||
void OnVisibilityChanged(
|
||||
winrt::Windows::UI::Core::CoreWindow const &sender,
|
||||
winrt::Windows::UI::Core::VisibilityChangedEventArgs const &args);
|
||||
void
|
||||
OnWindowClosed(winrt::Windows::UI::Core::CoreWindow const &sender,
|
||||
winrt::Windows::UI::Core::CoreWindowEventArgs const &args);
|
||||
|
||||
// CoreWindow input event handlers.
|
||||
void OnKeyPressed(winrt::Windows::UI::Core::CoreWindow const &sender,
|
||||
winrt::Windows::UI::Core::KeyEventArgs const &args);
|
||||
void OnPointerPressed(winrt::Windows::UI::Core::CoreWindow const &sender,
|
||||
winrt::Windows::UI::Core::PointerEventArgs const &args);
|
||||
|
||||
private:
|
||||
std::unique_ptr<Immersive::ImmersiveMain> m_main = nullptr;
|
||||
|
||||
std::shared_ptr<DX::DeviceResources> m_deviceResources = nullptr;
|
||||
bool m_windowClosed = false;
|
||||
bool m_windowVisible = true;
|
||||
|
||||
// Event registration tokens.
|
||||
winrt::event_token m_suspendingEventToken;
|
||||
winrt::event_token m_resumingEventToken;
|
||||
winrt::event_token m_keyDownEventToken;
|
||||
winrt::event_token m_pointerPressedEventToken;
|
||||
winrt::event_token m_windowClosedEventToken;
|
||||
winrt::event_token m_visibilityChangedEventToken;
|
||||
|
||||
// The holographic space the app will use for rendering.
|
||||
winrt::Windows::Graphics::Holographic::HolographicSpace m_holographicSpace =
|
||||
nullptr;
|
||||
|
||||
// FIXME: initialization is done twice: here and in BrowserPage. Share it.
|
||||
// OpenGLES mOpenGLES;
|
||||
EGLSurface mRenderSurface{EGL_NO_SURFACE};
|
||||
};
|
||||
|
||||
class ImmersiveViewSource sealed
|
||||
: public winrt::implements<
|
||||
ImmersiveViewSource,
|
||||
winrt::Windows::ApplicationModel::Core::IFrameworkViewSource> {
|
||||
public:
|
||||
// IFrameworkViewSource method.
|
||||
winrt::Windows::ApplicationModel::Core::IFrameworkView CreateView();
|
||||
|
||||
private:
|
||||
ImmersiveView holographicView;
|
||||
};
|
||||
} // namespace winrt::ServoApp
|
|
@ -141,15 +141,6 @@
|
|||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="Common\CameraResources.h" />
|
||||
<ClInclude Include="Common\DeviceResources.h" />
|
||||
<ClInclude Include="Common\DirectXHelper.h" />
|
||||
<ClInclude Include="Common\StepTimer.h" />
|
||||
<ClInclude Include="Content\ShaderStructures.h" />
|
||||
<ClInclude Include="Content\SpatialInputHandler.h" />
|
||||
<ClInclude Include="Content\SpinningCubeRenderer.h" />
|
||||
<ClInclude Include="ImmersiveMain.h" />
|
||||
<ClInclude Include="ImmersiveView.h" />
|
||||
<ClInclude Include="logs.h" />
|
||||
<ClInclude Include="pch.h" />
|
||||
<ClInclude Include="App.h">
|
||||
|
@ -292,12 +283,6 @@
|
|||
<Image Include="Assets\Wide310x150Logo.scale-200.png" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="Common\CameraResources.cpp" />
|
||||
<ClCompile Include="Common\DeviceResources.cpp" />
|
||||
<ClCompile Include="Content\SpatialInputHandler.cpp" />
|
||||
<ClCompile Include="Content\SpinningCubeRenderer.cpp" />
|
||||
<ClCompile Include="ImmersiveMain.cpp" />
|
||||
<ClCompile Include="ImmersiveView.cpp" />
|
||||
<ClCompile Include="pch.cpp">
|
||||
<PrecompiledHeader>Create</PrecompiledHeader>
|
||||
</ClCompile>
|
||||
|
@ -326,25 +311,6 @@
|
|||
<None Include="packages.config" />
|
||||
<None Include="PropertySheet.props" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<FxCompile Include="Content\PixelShader.hlsl">
|
||||
<ShaderType>Pixel</ShaderType>
|
||||
<ShaderModel>5.0</ShaderModel>
|
||||
</FxCompile>
|
||||
<None Include="Content\VertexShaderShared.hlsl" />
|
||||
<FxCompile Include="Content\VertexShader.hlsl">
|
||||
<ShaderType>Vertex</ShaderType>
|
||||
<ShaderModel>5.0</ShaderModel>
|
||||
</FxCompile>
|
||||
<FxCompile Include="Content\VPRTVertexShader.hlsl">
|
||||
<ShaderType>Vertex</ShaderType>
|
||||
<ShaderModel>5.0</ShaderModel>
|
||||
</FxCompile>
|
||||
<FxCompile Include="Content\GeometryShader.hlsl">
|
||||
<ShaderType>Geometry</ShaderType>
|
||||
<ShaderModel>5.0</ShaderModel>
|
||||
</FxCompile>
|
||||
</ItemGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets">
|
||||
<Import Project="..\packages\Microsoft.Windows.CppWinRT.2.0.190620.2\build\native\Microsoft.Windows.CppWinRT.targets" Condition="Exists('..\packages\Microsoft.Windows.CppWinRT.2.0.190620.2\build\native\Microsoft.Windows.CppWinRT.targets')" />
|
||||
|
|
|
@ -11,22 +11,8 @@
|
|||
<ClCompile Include="pch.cpp" />
|
||||
<ClCompile Include="$(GeneratedFilesDir)module.g.cpp" />
|
||||
<ClCompile Include="logs.cpp" />
|
||||
<ClCompile Include="ImmersiveView.cpp" />
|
||||
<ClCompile Include="BrowserPage.cpp" />
|
||||
<ClCompile Include="App.cpp" />
|
||||
<ClCompile Include="Common\CameraResources.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="Common\DeviceResources.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="Content\SpatialInputHandler.cpp">
|
||||
<Filter>Content</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="Content\SpinningCubeRenderer.cpp">
|
||||
<Filter>Content</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="ImmersiveMain.cpp" />
|
||||
<ClCompile Include="ServoControl\OpenGLES.cpp">
|
||||
<Filter>ServoControl</Filter>
|
||||
</ClCompile>
|
||||
|
@ -40,31 +26,8 @@
|
|||
<ItemGroup>
|
||||
<ClInclude Include="pch.h" />
|
||||
<ClInclude Include="logs.h" />
|
||||
<ClInclude Include="ImmersiveView.h" />
|
||||
<ClInclude Include="BrowserPage.h" />
|
||||
<ClInclude Include="App.h" />
|
||||
<ClInclude Include="Common\CameraResources.h">
|
||||
<Filter>Common</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="Common\DeviceResources.h">
|
||||
<Filter>Common</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="Common\DirectXHelper.h">
|
||||
<Filter>Common</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="Common\StepTimer.h">
|
||||
<Filter>Common</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="Content\ShaderStructures.h">
|
||||
<Filter>Content</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="Content\SpatialInputHandler.h">
|
||||
<Filter>Content</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="Content\SpinningCubeRenderer.h">
|
||||
<Filter>Content</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ImmersiveMain.h" />
|
||||
<ClInclude Include="ServoControl\OpenGLES.h">
|
||||
<Filter>ServoControl</Filter>
|
||||
</ClInclude>
|
||||
|
@ -104,9 +67,6 @@
|
|||
<ItemGroup>
|
||||
<None Include="ServoApp_TemporaryKey.pfx" />
|
||||
<None Include="packages.config" />
|
||||
<None Include="Content\VertexShaderShared.hlsl">
|
||||
<Filter>Content</Filter>
|
||||
</None>
|
||||
<None Include="..\..\..\target\debug\openxr_loader.dll">
|
||||
<Filter>DebugServoDLLs</Filter>
|
||||
</None>
|
||||
|
@ -196,12 +156,6 @@
|
|||
<Filter Include="ReleaseServoDLLs">
|
||||
<UniqueIdentifier>{663f48bc-21ec-4f00-9db5-cd0a5fa87983}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Common">
|
||||
<UniqueIdentifier>{e6c0d90c-587b-446e-a6a7-037217d03006}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Content">
|
||||
<UniqueIdentifier>{83d6e7af-b929-4869-804e-571256af2969}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="DebugARM64ServoDLLs">
|
||||
<UniqueIdentifier>{e372e8ac-7cab-47de-80a5-020370a51fd4}</UniqueIdentifier>
|
||||
</Filter>
|
||||
|
@ -221,20 +175,6 @@
|
|||
<Filter>ServoControl</Filter>
|
||||
</Page>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<FxCompile Include="Content\GeometryShader.hlsl">
|
||||
<Filter>Content</Filter>
|
||||
</FxCompile>
|
||||
<FxCompile Include="Content\PixelShader.hlsl">
|
||||
<Filter>Content</Filter>
|
||||
</FxCompile>
|
||||
<FxCompile Include="Content\VertexShader.hlsl">
|
||||
<Filter>Content</Filter>
|
||||
</FxCompile>
|
||||
<FxCompile Include="Content\VPRTVertexShader.hlsl">
|
||||
<Filter>Content</Filter>
|
||||
</FxCompile>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ApplicationDefinition Include="App.xaml" />
|
||||
</ItemGroup>
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue