diff --git a/README.md b/README.md
index b4a00578dd..cbfd8e6c37 100644
--- a/README.md
+++ b/README.md
@@ -93,35 +93,36 @@ For additional Windows samples, see [Windows on GitHub](http://microsoft.github.
Camera resolution
+ Camera stream coordinate mapper
Camera stream correlation
- DASH streaming
+ DASH streaming
Direct2D photo adjustment
Media editing
- Media import
+ Media import
Media transport controls
MIDI
- Playlists
+ Playlists
PlayReady
Processing frames with OpenCV
- Simple imaging
+ Simple imaging
Spatial audio
System media transport controls
- Transcoding media
+ Transcoding media
Video playback
Video playback synchronization
- Video stabilization
+ Video stabilization
Windows audio session (WASAPI)
@@ -284,6 +285,7 @@ For additional Windows samples, see [Windows on GitHub](http://microsoft.github.
Sharing content source app
Sharing content target app
+ User activities
diff --git a/Samples/360VideoPlayback/cpp/Common/DeviceResources.cpp b/Samples/360VideoPlayback/cpp/Common/DeviceResources.cpp
index 08df6b75fd..53b2fe6893 100644
--- a/Samples/360VideoPlayback/cpp/Common/DeviceResources.cpp
+++ b/Samples/360VideoPlayback/cpp/Common/DeviceResources.cpp
@@ -361,25 +361,6 @@ void DX::DeviceResources::Present(HolographicFrame^ frame)
// holographic frame predictions.
HolographicFramePresentResult presentResult = frame->PresentUsingCurrentPrediction();
- HolographicFramePrediction^ prediction = frame->CurrentPrediction;
- UseHolographicCameraResources([this, prediction](std::map>& cameraResourceMap)
- {
- for (auto cameraPose : prediction->CameraPoses)
- {
- // This represents the device-based resources for a HolographicCamera.
- DX::CameraResources* pCameraResources = cameraResourceMap[cameraPose->HolographicCamera->Id].get();
-
- // Discard the contents of the render target.
- // This is a valid operation only when the existing contents will be
- // entirely overwritten. If dirty or scroll rects are used, this call
- // should be removed.
- m_d3dContext->DiscardView(pCameraResources->GetBackBufferRenderTargetView());
-
- // Discard the contents of the depth stencil.
- m_d3dContext->DiscardView(pCameraResources->GetDepthStencilView());
- }
- });
-
// The PresentUsingCurrentPrediction API will detect when the graphics device
// changes or becomes invalid. When this happens, it is considered a Direct3D
// device lost scenario.
diff --git a/Samples/360VideoPlayback/cs/Common/DeviceResources.cs b/Samples/360VideoPlayback/cs/Common/DeviceResources.cs
index cf5fb773c9..192522df2d 100644
--- a/Samples/360VideoPlayback/cs/Common/DeviceResources.cs
+++ b/Samples/360VideoPlayback/cs/Common/DeviceResources.cs
@@ -363,25 +363,6 @@ public void Present(ref HolographicFrame frame)
HolographicFramePresentWaitBehavior.WaitForFrameToFinish
);
- HolographicFramePrediction prediction = frame.CurrentPrediction;
- UseHolographicCameraResources((Dictionary cameraResourcesDictionary) =>
- {
- foreach (var cameraPose in prediction.CameraPoses)
- {
- // This represents the device-based resources for a HolographicCamera.
- CameraResources cameraResources = cameraResourcesDictionary[cameraPose.HolographicCamera.Id];
-
- // Discard the contents of the render target.
- // This is a valid operation only when the existing contents will be
- // entirely overwritten. If dirty or scroll rects are used, this call
- // should be removed.
- d3dContext.DiscardView(cameraResources.BackBufferRenderTargetView);
-
- // Discard the contents of the depth stencil.
- d3dContext.DiscardView(cameraResources.DepthStencilView);
- }
- });
-
// The PresentUsingCurrentPrediction API will detect when the graphics device
// changes or becomes invalid. When this happens, it is considered a Direct3D
// device lost scenario.
diff --git a/Samples/360VideoPlayback/cs/ms.fxcompile.targets b/Samples/360VideoPlayback/cs/ms.fxcompile.targets
index 1b2bc1c8b1..3a932b6df0 100644
--- a/Samples/360VideoPlayback/cs/ms.fxcompile.targets
+++ b/Samples/360VideoPlayback/cs/ms.fxcompile.targets
@@ -154,7 +154,8 @@
- $(MSBuildProgramFiles32)\Windows Kits\10\bin\x86
+ $(MSBuildProgramFiles32)\Windows Kits\10\bin\$(TargetPlatformVersion)\x86
+ $(MSBuildProgramFiles32)\Windows Kits\10\bin\x86
diff --git a/Samples/Advertising/README.md b/Samples/Advertising/README.md
index 958ac4d409..e0f1a29503 100644
--- a/Samples/Advertising/README.md
+++ b/Samples/Advertising/README.md
@@ -22,8 +22,9 @@ Specifically, this sample shows how to:
## Prerequisites
-This sample requires the [Microsoft Advertising SDK Libraries for XAML or JavaScript](http://go.microsoft.com/fwlink/?LinkID=619694).
-[Download it here](http://go.microsoft.com/fwlink/p/?LinkId=518026).
+This sample requires the [Microsoft Advertising SDK Libraries for XAML or JavaScript](http://go.microsoft.com/fwlink/?LinkID=619694),
+which is obtained via NuGet package,
+or you can [download it here](http://go.microsoft.com/fwlink/p/?LinkId=518026).
It also requires the [internet client capability](https://msdn.microsoft.com/library/windows/apps/mt270968#general-use_capabilities) to be added to the manifest.
When you incorporate this sample into your own app,
diff --git a/Samples/Advertising/cpp/Advertising.vcxproj b/Samples/Advertising/cpp/Advertising.vcxproj
index 09e36c62ba..2860b58f4b 100644
--- a/Samples/Advertising/cpp/Advertising.vcxproj
+++ b/Samples/Advertising/cpp/Advertising.vcxproj
@@ -222,7 +222,17 @@
+
+
+
+
+
+
+ This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}.
+
+
+
\ No newline at end of file
diff --git a/Samples/Advertising/cpp/Advertising.vcxproj.filters b/Samples/Advertising/cpp/Advertising.vcxproj.filters
index 2e538e7b56..a1c8c01ad4 100644
--- a/Samples/Advertising/cpp/Advertising.vcxproj.filters
+++ b/Samples/Advertising/cpp/Advertising.vcxproj.filters
@@ -65,4 +65,7 @@
Assets
+
+
+
\ No newline at end of file
diff --git a/Samples/Advertising/cpp/packages.config b/Samples/Advertising/cpp/packages.config
new file mode 100644
index 0000000000..f32e2866a7
--- /dev/null
+++ b/Samples/Advertising/cpp/packages.config
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/Samples/Advertising/cs/project.json b/Samples/Advertising/cs/project.json
index c594939270..6dd4dc2ef9 100644
--- a/Samples/Advertising/cs/project.json
+++ b/Samples/Advertising/cs/project.json
@@ -1,9 +1,10 @@
{
"dependencies": {
+ "Microsoft.Advertising.XAML": "10.1705.16001",
"Microsoft.NETCore.UniversalWindowsPlatform": "5.0.0"
},
"frameworks": {
- "uap10.0": {}
+ "uap10.0.16299": {}
},
"runtimes": {
"win10-arm": {},
diff --git a/Samples/Advertising/js/Advertising.jsproj b/Samples/Advertising/js/Advertising.jsproj
index 06e68193f2..66034ca609 100644
--- a/Samples/Advertising/js/Advertising.jsproj
+++ b/Samples/Advertising/js/Advertising.jsproj
@@ -1,10 +1,6 @@
-
- Debug
- AnyCPU
-
Debug
ARM
@@ -17,10 +13,6 @@
Debug
x86
-
- Release
- AnyCPU
-
Release
ARM
@@ -127,12 +119,20 @@
sample-utils\scenario-select.html
+
+
+
+
+ This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}.
+
+
+
+
+# Camera stream coordinate mapper sample
+
+Shows how to use spatially correlated color and depth cameras and the depth frames to map image pixels from one camera to another using [CameraIntrinsics](https://docs.microsoft.com/uwp/api/windows.media.devices.core.cameraintrinsics) and [SpatialCoordinateSystem](https://docs.microsoft.com/uwp/api/windows.perception.spatial.spatialcoordinatesystem) using DirectX and shaders for real time applications.
+
+> **Note:** This sample is part of a large collection of UWP feature samples.
+> If you are unfamiliar with Git and GitHub, you can download the entire collection as a
+> [ZIP file](https://github.com/Microsoft/Windows-universal-samples/archive/master.zip), but be
+> sure to unzip everything to access shared dependencies. For more info on working with the ZIP file,
+> the samples collection, and GitHub, see [Get the UWP samples from GitHub](https://aka.ms/ovu2uq).
+> For more samples, see the [Samples portal](https://aka.ms/winsamples) on the Windows Dev Center.
+
+This sample demonstrates how to:
+
+- Find cameras which support color and depth formats.
+- Create MediaFrameReaders to read frames from multiple sources concurrently.
+- Map depth camera's pixels onto a correlated color camera using DirectX shaders.
+
+### Correlation of multiple capture sources
+
+This sample is intended to demonstrate how to map entire images from one camera to another using depth information from the depth camera, [CameraIntrinsics](https://docs.microsoft.com/uwp/api/windows.media.devices.core.cameraintrinsics), and [SpatialCoordinateSystem](https://docs.microsoft.com/uwp/api/windows.perception.spatial.spatialcoordinatesystem). This is similar to the functionality of the [DepthCorrelatedCoordinateMapper](https://docs.microsoft.com/uwp/api/windows.media.devices.core.depthcorrelatedcoordinatemapper) except this sample is designed for processing an entire image using DirectX and shaders. The [DepthCorrelatedCoordinateMapper](https://docs.microsoft.com/uwp/api/windows.media.devices.core.depthcorrelatedcoordinatemapper) has a member function "MapPoints" which will map an array of pixels from one camera to another using the depth data for correlation, but was not performant on lower-end hardware for a full image of pixels. The performance of the Gpu algorithm in this sample, running on an Xbox One running as a UWP app is < 2ms per frame. Using a desktop PC with a GTX 1060 resulted in < 0.25ms per frame.
+
+There are multiple ways to interpret the result of the image mapping. This sample demonstrates how to visualize the resulting data as a 3D point cloud from different perspectives. This sample also demonstrates how to project the correlated 3D data back onto one of the cameras to correlate the results in 2D (i.e. use the depth data to discard pixels that are too far away from the camera).
+
+**Note** The Windows universal samples for Windows 10 require Visual Studio 2017 Update 2 and Windows SDK version 15063 or above to build.
+
+To obtain information about Windows 10 development, go to the [Windows Dev Center](https://dev.windows.com).
+
+## See also
+
+### Samples
+
+* [CameraFrames](/Samples/CameraFrames)
+* [CameraStreamCorrelation](/Samples/CameraStreamCorrelation)
+
+### Reference
+
+* [Windows.Media.Capture.Frames namespace](https://docs.microsoft.com/uwp/api/windows.media.capture.frames)
+* [Windows.Media.Devices.Core.CameraIntrinsics](https://docs.microsoft.com/uwp/api/windows.media.devices.core.cameraintrinsics)
+* [Windows.Perception.Spatial.SpatialCoordinateSystem](https://docs.microsoft.com/uwp/api/windows.perception.spatial.spatialcoordinatesystem)
+
+## System requirements
+
+**Client:** Windows 10 build 15063
+
+**Camera:** Correlated color and depth camera (e.g. Kinect V2 sensor)
+
+## Build the sample
+
+1. If you download the samples ZIP, be sure to unzip the entire archive, not just the folder with
+ the sample you want to build.
+2. Start Microsoft Visual Studio 2017 and select **File** \> **Open** \> **Project/Solution**.
+3. Starting in the folder where you unzipped the samples, go to the Samples subfolder, then the
+ subfolder for this specific sample, then the subfolder for your preferred language (C++, C#, or
+ JavaScript). Double-click the Visual Studio Solution (.sln) file.
+4. Press Ctrl+Shift+B, or select **Build** \> **Build Solution**.
+
+## Run the sample
+
+The next steps depend on whether you just want to deploy the sample or you want to both deploy and
+run it.
+
+### Deploying and running the sample
+
+- To debug the sample and then run it, follow the steps listed above to connect your
+ developer-unlocked Microsoft HoloLens, then press F5 or select **Debug** \> **Start Debugging**.
+ To run the sample without debugging, press Ctrl+F5 or select **Debug** \> **Start Without Debugging**.
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/App.cpp b/Samples/CameraStreamCoordinateMapper/cpp/App.cpp
new file mode 100644
index 0000000000..3f245d1303
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/App.cpp
@@ -0,0 +1,207 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+#include "App.h"
+
+#include "CameraStreamCoordinateMapperMain.h"
+
+using namespace CameraStreamCoordinateMapper;
+
+using namespace concurrency;
+using namespace Windows::ApplicationModel;
+using namespace Windows::ApplicationModel::Core;
+using namespace Windows::ApplicationModel::Activation;
+using namespace Windows::UI::Core;
+using namespace Windows::UI::Input;
+using namespace Windows::System;
+using namespace Windows::Foundation;
+using namespace Windows::Graphics::Display;
+
+// The main function is only used to initialize our IFrameworkView class.
+[Platform::MTAThread]
+int main(Platform::Array^)
+{
+ auto direct3DApplicationSource = ref new Direct3DApplicationSource();
+ CoreApplication::Run(direct3DApplicationSource);
+ return 0;
+}
+
+IFrameworkView^ Direct3DApplicationSource::CreateView()
+{
+ return ref new App();
+}
+
+App::App() :
+ m_windowClosed(false),
+ m_windowVisible(true)
+{
+}
+
+// The first method called when the IFrameworkView is being created.
+void App::Initialize(CoreApplicationView^ applicationView)
+{
+ // Register event handlers for app lifecycle. This example includes Activated, so that we
+ // can make the CoreWindow active and start rendering on the window.
+ applicationView->Activated +=
+ ref new TypedEventHandler(this, &App::OnActivated);
+
+ CoreApplication::Suspending +=
+ ref new EventHandler(this, &App::OnSuspending);
+
+ CoreApplication::Resuming +=
+ ref new EventHandler(this, &App::OnResuming);
+
+ // At this point we have access to the device.
+ // We can create the device-dependent resources.
+ m_deviceResources = std::make_shared();
+}
+
+// Called when the CoreWindow object is created (or re-created).
+void App::SetWindow(CoreWindow^ window)
+{
+ window->SizeChanged +=
+ ref new TypedEventHandler(this, &App::OnWindowSizeChanged);
+
+ window->VisibilityChanged +=
+ ref new TypedEventHandler(this, &App::OnVisibilityChanged);
+
+ window->Closed +=
+ ref new TypedEventHandler(this, &App::OnWindowClosed);
+
+ DisplayInformation^ currentDisplayInformation = DisplayInformation::GetForCurrentView();
+
+ currentDisplayInformation->DpiChanged +=
+ ref new TypedEventHandler(this, &App::OnDpiChanged);
+
+ currentDisplayInformation->OrientationChanged +=
+ ref new TypedEventHandler(this, &App::OnOrientationChanged);
+
+ DisplayInformation::DisplayContentsInvalidated +=
+ ref new TypedEventHandler(this, &App::OnDisplayContentsInvalidated);
+
+ m_deviceResources->SetWindow(window);
+}
+
+// Initializes scene resources, or loads a previously saved app state.
+void App::Load(Platform::String^ entryPoint)
+{
+ if (m_main == nullptr)
+ {
+ m_main = std::make_unique(m_deviceResources);
+ }
+}
+
+// This method is called after the window becomes active.
+void App::Run()
+{
+ while (!m_windowClosed)
+ {
+ if (m_windowVisible)
+ {
+ CoreWindow::GetForCurrentThread()->Dispatcher->ProcessEvents(CoreProcessEventsOption::ProcessAllIfPresent);
+
+ m_main->Update();
+
+ if (m_main->Render())
+ {
+ m_deviceResources->Present();
+ }
+ }
+ else
+ {
+ CoreWindow::GetForCurrentThread()->Dispatcher->ProcessEvents(CoreProcessEventsOption::ProcessOneAndAllPending);
+ }
+ }
+}
+
+// Required for IFrameworkView.
+// Terminate events do not cause Uninitialize to be called. It will be called if your IFrameworkView
+// class is torn down while the app is in the foreground.
+void App::Uninitialize()
+{
+}
+
+// Application lifecycle event handlers.
+
+void App::OnActivated(CoreApplicationView^ applicationView, IActivatedEventArgs^ args)
+{
+ // Run() won't start until the CoreWindow is activated.
+ CoreWindow::GetForCurrentThread()->Activate();
+}
+
+void App::OnSuspending(Platform::Object^ sender, SuspendingEventArgs^ args)
+{
+ // Save app state asynchronously after requesting a deferral. Holding a deferral
+ // indicates that the application is busy performing suspending operations. Be
+ // aware that a deferral may not be held indefinitely. After about five seconds,
+ // the app will be forced to exit.
+ SuspendingDeferral^ deferral = args->SuspendingOperation->GetDeferral();
+
+ create_task([this, deferral]
+ {
+ m_deviceResources->Trim();
+
+ return m_main->OnSuspendingAsync().then([deferral] {
+ deferral->Complete();
+ });
+ });
+}
+
+void App::OnResuming(Platform::Object^ sender, Platform::Object^ args)
+{
+ // Restore any data or state that was unloaded on suspend. By default, data
+ // and state are persisted when resuming from suspend. Note that this event
+ // does not occur if the app was previously terminated.
+
+ m_main->OnResuming();
+}
+
+// Window event handlers.
+
+void App::OnWindowSizeChanged(CoreWindow^ sender, WindowSizeChangedEventArgs^ args)
+{
+ m_deviceResources->SetLogicalSize(Size(sender->Bounds.Width, sender->Bounds.Height));
+ m_main->CreateWindowSizeDependentResources();
+}
+
+void App::OnVisibilityChanged(CoreWindow^ sender, VisibilityChangedEventArgs^ args)
+{
+ m_windowVisible = args->Visible;
+}
+
+void App::OnWindowClosed(CoreWindow^ sender, CoreWindowEventArgs^ args)
+{
+ m_windowClosed = true;
+}
+
+// DisplayInformation event handlers.
+
+void App::OnDpiChanged(DisplayInformation^ sender, Object^ args)
+{
+ // Note: The value for LogicalDpi retrieved here may not match the effective DPI of the app
+ // if it is being scaled for high resolution devices. Once the DPI is set on DeviceResources,
+ // you should always retrieve it using the GetDpi method.
+ // See DeviceResources.cpp for more details.
+ m_deviceResources->SetDpi(sender->LogicalDpi);
+ m_main->CreateWindowSizeDependentResources();
+}
+
+void App::OnOrientationChanged(DisplayInformation^ sender, Object^ args)
+{
+ m_deviceResources->SetCurrentOrientation(sender->CurrentOrientation);
+ m_main->CreateWindowSizeDependentResources();
+}
+
+void App::OnDisplayContentsInvalidated(DisplayInformation^ sender, Object^ args)
+{
+ m_deviceResources->ValidateDevice();
+}
\ No newline at end of file
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/App.h b/Samples/CameraStreamCoordinateMapper/cpp/App.h
new file mode 100644
index 0000000000..115c1f8f47
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/App.h
@@ -0,0 +1,61 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+#include "Common\DeviceResources.h"
+
+namespace CameraStreamCoordinateMapper
+{
+ class CameraStreamCoordinateMapperMain;
+
+ // Main entry point for our app. Connects the app with the Windows shell and handles application lifecycle events.
+ ref class App sealed : public Windows::ApplicationModel::Core::IFrameworkView
+ {
+ public:
+ App();
+
+ // IFrameworkView Methods.
+ virtual void Initialize(Windows::ApplicationModel::Core::CoreApplicationView^ applicationView);
+ virtual void SetWindow(Windows::UI::Core::CoreWindow^ window);
+ virtual void Load(Platform::String^ entryPoint);
+ virtual void Run();
+ virtual void Uninitialize();
+
+ protected:
+ // Application lifecycle event handlers.
+ void OnActivated(Windows::ApplicationModel::Core::CoreApplicationView^ applicationView, Windows::ApplicationModel::Activation::IActivatedEventArgs^ args);
+ void OnSuspending(Platform::Object^ sender, Windows::ApplicationModel::SuspendingEventArgs^ args);
+ void OnResuming(Platform::Object^ sender, Platform::Object^ args);
+
+ // Window event handlers.
+ void OnWindowSizeChanged(Windows::UI::Core::CoreWindow^ sender, Windows::UI::Core::WindowSizeChangedEventArgs^ args);
+ void OnVisibilityChanged(Windows::UI::Core::CoreWindow^ sender, Windows::UI::Core::VisibilityChangedEventArgs^ args);
+ void OnWindowClosed(Windows::UI::Core::CoreWindow^ sender, Windows::UI::Core::CoreWindowEventArgs^ args);
+
+ // DisplayInformation event handlers.
+ void OnDpiChanged(Windows::Graphics::Display::DisplayInformation^ sender, Platform::Object^ args);
+ void OnOrientationChanged(Windows::Graphics::Display::DisplayInformation^ sender, Platform::Object^ args);
+ void OnDisplayContentsInvalidated(Windows::Graphics::Display::DisplayInformation^ sender, Platform::Object^ args);
+
+ private:
+ std::shared_ptr m_deviceResources;
+ std::unique_ptr m_main;
+ bool m_windowClosed;
+ bool m_windowVisible;
+ };
+}
+
+ref class Direct3DApplicationSource sealed : Windows::ApplicationModel::Core::IFrameworkViewSource
+{
+public:
+ virtual Windows::ApplicationModel::Core::IFrameworkView^ CreateView();
+};
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapper.sln b/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapper.sln
new file mode 100644
index 0000000000..fb517f0625
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapper.sln
@@ -0,0 +1,43 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio 15
+VisualStudioVersion = 15.0.27004.2010
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CameraStreamCoordinateMapper", "CameraStreamCoordinateMapper.vcxproj", "{ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|ARM = Debug|ARM
+ Debug|x64 = Debug|x64
+ Debug|x86 = Debug|x86
+ Release|ARM = Release|ARM
+ Release|x64 = Release|x64
+ Release|x86 = Release|x86
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Debug|ARM.ActiveCfg = Debug|ARM
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Debug|ARM.Build.0 = Debug|ARM
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Debug|ARM.Deploy.0 = Debug|ARM
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Debug|x64.ActiveCfg = Debug|x64
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Debug|x64.Build.0 = Debug|x64
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Debug|x64.Deploy.0 = Debug|x64
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Debug|x86.ActiveCfg = Debug|Win32
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Debug|x86.Build.0 = Debug|Win32
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Debug|x86.Deploy.0 = Debug|Win32
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Release|ARM.ActiveCfg = Release|ARM
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Release|ARM.Build.0 = Release|ARM
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Release|ARM.Deploy.0 = Release|ARM
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Release|x64.ActiveCfg = Release|x64
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Release|x64.Build.0 = Release|x64
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Release|x64.Deploy.0 = Release|x64
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Release|x86.ActiveCfg = Release|Win32
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Release|x86.Build.0 = Release|Win32
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}.Release|x86.Deploy.0 = Release|Win32
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {023D1E34-87AC-4D01-869A-2938AEAC18B5}
+ EndGlobalSection
+EndGlobal
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapper.vcxproj b/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapper.vcxproj
new file mode 100644
index 0000000000..d3f516a307
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapper.vcxproj
@@ -0,0 +1,322 @@
+
+
+
+ {ED480BBB-C2CE-4C66-BB05-2C1C3DDB965D}
+ DirectXApp
+ CameraStreamCoordinateMapper
+ en-US
+ 14.0
+ true
+ Windows Store
+ 10.0
+ 10.0.15063.0
+ 10.0.15063.0
+
+
+
+ Debug
+ Win32
+
+
+ Release
+ Win32
+
+
+ Debug
+ x64
+
+
+ Release
+ x64
+
+
+ Debug
+ ARM
+
+
+ Release
+ ARM
+
+
+
+
+ Application
+ true
+ v141
+
+
+ Application
+ true
+ v141
+
+
+ Application
+ true
+ v141
+
+
+ Application
+ false
+ true
+ v141
+ true
+
+
+ Application
+ false
+ true
+ v141
+ true
+
+
+ Application
+ false
+ true
+ v141
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+ $(VC_IncludePath);$(UniversalCRT_IncludePath);$(WindowsSDK_IncludePath);..\..\..\SharedContent\cpp
+
+
+
+ d2d1.lib; d3d11.lib; dxgi.lib; windowscodecs.lib; dwrite.lib; %(AdditionalDependencies)
+ %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store\arm; $(VCInstallDir)\lib\arm
+
+
+ pch.h
+ $(IntDir)pch.pch
+ $(ProjectDir);$(IntermediateOutputPath);$(OutDir);%(AdditionalIncludeDirectories)
+ /bigobj /await
+ 4453;28204
+ _DEBUG;%(PreprocessorDefinitions)
+ stdcpp17
+
+
+
+
+ d2d1.lib; d3d11.lib; dxgi.lib; windowscodecs.lib; dwrite.lib; %(AdditionalDependencies)
+ %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store\arm; $(VCInstallDir)\lib\arm
+
+
+ pch.h
+ $(IntDir)pch.pch
+ $(ProjectDir);$(IntermediateOutputPath);$(OutDir);%(AdditionalIncludeDirectories)
+ /bigobj /await
+ 4453;28204
+ NDEBUG;%(PreprocessorDefinitions)
+ stdcpp17
+
+
+
+
+ d2d1.lib; d3d11.lib; dxgi.lib; windowscodecs.lib; dwrite.lib; %(AdditionalDependencies)
+ %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store; $(VCInstallDir)\lib
+
+
+ pch.h
+ $(IntDir)pch.pch
+ $(ProjectDir);$(IntermediateOutputPath);$(OutDir);%(AdditionalIncludeDirectories)
+ /bigobj /await
+ 4453;28204
+ _DEBUG;%(PreprocessorDefinitions)
+ stdcpp17
+
+
+
+
+ d2d1.lib; d3d11.lib; dxgi.lib; windowscodecs.lib; dwrite.lib; %(AdditionalDependencies)
+ %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store; $(VCInstallDir)\lib
+
+
+ pch.h
+ $(IntDir)pch.pch
+ $(ProjectDir);$(IntermediateOutputPath);$(OutDir);%(AdditionalIncludeDirectories)
+ /bigobj /await
+ 4453;28204
+ NDEBUG;%(PreprocessorDefinitions)
+ stdcpp17
+
+
+
+
+ d2d1.lib; d3d11.lib; dxgi.lib; windowscodecs.lib; dwrite.lib; %(AdditionalDependencies)
+ %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store\amd64; $(VCInstallDir)\lib\amd64
+
+
+ pch.h
+ $(IntDir)pch.pch
+ $(ProjectDir);$(IntermediateOutputPath);$(OutDir);%(AdditionalIncludeDirectories)
+ /bigobj /await
+ 4453;28204
+ _DEBUG;%(PreprocessorDefinitions)
+ stdcpp17
+
+
+
+
+
+
+
+
+
+
+ d2d1.lib; d3d11.lib; dxgi.lib; windowscodecs.lib; dwrite.lib; %(AdditionalDependencies)
+ %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store\amd64; $(VCInstallDir)\lib\amd64
+
+
+ pch.h
+ $(IntDir)pch.pch
+ $(ProjectDir);$(IntermediateOutputPath);$(OutDir);%(AdditionalIncludeDirectories)
+ /bigobj /await
+ 4453;28204
+ NDEBUG;%(PreprocessorDefinitions)
+ stdcpp17
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Create
+
+
+
+
+
+ Designer
+
+
+ Document
+
+
+ Vertex
+ 4.0
+ false
+ true
+ true
+ g_%(Filename)
+ $(OutDir)Shaders\%(Filename).h
+
+
+
+
+ Pixel
+ 4.0
+ false
+ true
+ true
+ g_%(Filename)
+ $(OutDir)Shaders\%(Filename).h
+
+
+
+
+ Vertex
+ 4.0
+ false
+ true
+ true
+ g_%(Filename)
+ $(OutDir)Shaders\%(Filename).h
+
+
+
+
+
+
+ Pixel
+ 4.0
+ false
+ true
+ true
+ g_%(Filename)
+ $(OutDir)Shaders\%(Filename).h
+
+
+
+
+ Pixel
+ 4.0
+ false
+ true
+ true
+ g_%(Filename)
+ $(OutDir)Shaders\%(Filename).h
+
+
+
+
+ Vertex
+ 4.0
+ false
+ true
+ true
+ g_%(Filename)
+ $(OutDir)Shaders\%(Filename).h
+
+
+
+
+
+
+ Assets\microsoft-sdk.png
+
+
+ Assets\smalltile-sdk.png
+
+
+ Assets\splash-sdk.png
+
+
+ Assets\squaretile-sdk.png
+
+
+ Assets\storelogo-sdk.png
+
+
+ Assets\tile-sdk.png
+
+
+ Assets\windows-sdk.png
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapper.vcxproj.filters b/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapper.vcxproj.filters
new file mode 100644
index 0000000000..6c3b5ecdc3
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapper.vcxproj.filters
@@ -0,0 +1,131 @@
+
+
+
+
+ 80bfd669-aa83-4537-9611-027cffe0d8af
+ bmp;fbx;gif;jpg;jpeg;tga;tiff;tif;png
+
+
+ {ff4f51b5-d7e2-4469-8646-cfb32072fcf9}
+
+
+ {7008d2da-1e1e-4cab-ab53-4ed72f8f381f}
+
+
+ {a8106968-2fbf-47cb-a14f-33ce9794d315}
+
+
+
+
+
+
+
+ Common
+
+
+ Common
+
+
+ Common
+
+
+ Content
+
+
+ Content
+
+
+ Content
+
+
+ Content
+
+
+ Content
+
+
+
+
+
+
+
+ Common
+
+
+ Common
+
+
+ Common
+
+
+ Common
+
+
+ Shaders
+
+
+ Content
+
+
+ Content
+
+
+ Content
+
+
+ Content
+
+
+ Shaders
+
+
+ Content
+
+
+
+
+
+
+
+ Assets
+
+
+ Assets
+
+
+ Assets
+
+
+ Assets
+
+
+ Assets
+
+
+ Assets
+
+
+ Assets
+
+
+
+
+ Shaders
+
+
+ Shaders
+
+
+ Shaders
+
+
+ Shaders
+
+
+ Shaders
+
+
+ Shaders
+
+
+
\ No newline at end of file
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapperMain.cpp b/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapperMain.cpp
new file mode 100644
index 0000000000..9bcf293dc1
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapperMain.cpp
@@ -0,0 +1,262 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+#include "CameraStreamCoordinateMapperMain.h"
+
+#include "Common\DirectXHelper.h"
+#include "Common\GraphicsCamera.h"
+
+#include "Content\QuadRenderer.h"
+#include "Content\TextRenderer.h"
+#include "Content\CameraCoordinateMapper.h"
+
+using namespace CameraStreamCoordinateMapper;
+
+using namespace Microsoft::WRL;
+using namespace Microsoft::WRL::Wrappers;
+
+using namespace Windows::Foundation;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::Storage;
+using namespace Windows::System::Threading;
+
+using namespace Windows::UI;
+using namespace Windows::UI::Core;
+using namespace Windows::UI::Xaml;
+
+using namespace concurrency;
+using namespace DirectX;
+
+CameraStreamCoordinateMapperMain::CameraStreamCoordinateMapperMain(const std::shared_ptr& deviceResources) :
+ m_deviceResources(deviceResources)
+{
+ m_camera = ref new FirstPersonCamera();
+
+ m_gpuPerformanceTimer = std::make_unique(m_deviceResources);
+ m_quadRenderer = std::make_unique(m_deviceResources);
+ m_statisticsTextRenderer = std::make_unique(m_deviceResources);
+ m_helpTextRenderer = std::make_unique(m_deviceResources);
+
+ static constexpr const wchar_t* c_HelpTextDisabledStr =
+ L"'F1' or 'Gamepad Menu' to toggle help \n";
+
+ static constexpr const wchar_t* c_HelpTextEnabledStr =
+ L"'F1' or 'Gamepad Menu' to toggle help \n"
+ L"'Space' or 'Gamepad A' to toggle streaming \n"
+ L"'R' or 'Left Thumbstick' to reset the camera \n"
+ L"'Y' or 'Gamepad Y' to toggle camera mode \n"
+ L"'W/A/S/D/Q/E' to translate the free-roam camera \n"
+ L"'Left thumbstick' to translate the free-roam camera \n"
+ L"'Left mouse button' to rotate the free-roam camera \n"
+ L"'Right thumbstick' to rotate the free-roam camera \n";
+
+ m_helpTextRenderer->Update(c_HelpTextDisabledStr);
+
+ CoreWindow::GetForCurrentThread()->KeyDown +=
+ ref new TypedEventHandler([this](CoreWindow^ window, KeyEventArgs^ args)
+ {
+ if (args->VirtualKey == Windows::System::VirtualKey::Space ||
+ args->VirtualKey == Windows::System::VirtualKey::GamepadA)
+ {
+ m_streamingEnabled = !m_streamingEnabled;
+ }
+
+ if (args->VirtualKey == Windows::System::VirtualKey::R ||
+ args->VirtualKey == Windows::System::VirtualKey::GamepadLeftThumbstickButton)
+ {
+ m_camera->Reset();
+ }
+
+ if (args->VirtualKey == Windows::System::VirtualKey::Y ||
+ args->VirtualKey == Windows::System::VirtualKey::GamepadY)
+ {
+ m_freeRoamCameraEnabled = !m_freeRoamCameraEnabled;
+ m_camera->Reset();
+ }
+
+ if (args->VirtualKey == Windows::System::VirtualKey::F1 ||
+ args->VirtualKey == Windows::System::VirtualKey::GamepadMenu)
+ {
+ m_showHelpText = !m_showHelpText;
+
+ if (m_showHelpText)
+ {
+ m_helpTextRenderer->Update(c_HelpTextEnabledStr);
+ }
+ else
+ {
+ m_helpTextRenderer->Update(c_HelpTextDisabledStr);
+ }
+ }
+ });
+
+ CreateResourcesAsync().then([this]
+ {
+ m_readyToRender = true;
+ });
+
+ // Register to be notified if the Device is lost or recreated
+ m_deviceResources->RegisterDeviceNotify(this);
+}
+
+task CameraStreamCoordinateMapper::CameraStreamCoordinateMapperMain::CreateResourcesAsync()
+{
+ m_cameraCoordinateMapper = co_await CameraCoordinateMapper::CreateAndStartAsync(m_deviceResources);
+}
+
+CameraStreamCoordinateMapperMain::~CameraStreamCoordinateMapperMain()
+{
+ // Deregister device notification
+ m_deviceResources->RegisterDeviceNotify(nullptr);
+}
+
+// Updates application state when the window size changes (e.g. device orientation change)
+void CameraStreamCoordinateMapperMain::CreateWindowSizeDependentResources()
+{
+ m_quadRenderer->CreateWindowSizeDependentResources();
+}
+
+// Updates the application state once per frame.
+void CameraStreamCoordinateMapperMain::Update()
+{
+ if (!m_readyToRender)
+ {
+ return;
+ }
+
+ // Update scene objects.
+ m_timer.Tick([&]()
+ {
+ m_camera->Update(static_cast(m_timer.GetElapsedSeconds()));
+
+ if (m_cameraCoordinateMapper->AreCamerasStreaming())
+ {
+ static wchar_t buffer[256] = {};
+
+ const wchar_t* cameraModeStr = m_freeRoamCameraEnabled ? L"[Free-roam camera]" : L"[Target camera]";
+ const wchar_t* streamingModeStr = m_streamingEnabled ? L"[Streaming]" : L"[Paused]";
+ const float gpuTime = m_gpuPerformanceTimer->GetAvgFrameTime();
+
+ swprintf_s(buffer, L"%s %s [Gpu time: %.3fms] [FPS: %.4u]", streamingModeStr, cameraModeStr, gpuTime, m_timer.GetFramesPerSecond());
+
+ m_statisticsTextRenderer->Update(buffer);
+ }
+ else
+ {
+ m_statisticsTextRenderer->Update(L"No compatible/accessible was camera found\nEnsure the application has permission to access the camera");
+ }
+ });
+
+ if (!m_streamingEnabled)
+ {
+ return;
+ }
+
+ if (m_cameraCoordinateMapper->TryAcquireLatestFrameData())
+ {
+ m_gpuPerformanceTimer->StartTimerForFrame();
+
+ m_cameraCoordinateMapper->RunCoordinateMapping();
+
+ m_gpuPerformanceTimer->EndTimerForFrame();
+ }
+}
+
+// Renders the current frame according to the current application state.
+// Returns true if the frame was rendered and is ready to be displayed.
+bool CameraStreamCoordinateMapperMain::Render()
+{
+ if (!m_readyToRender)
+ {
+ return false;
+ }
+
+ if (m_freeRoamCameraEnabled && m_cameraCoordinateMapper->AreCamerasStreaming())
+ {
+ m_cameraCoordinateMapper->VisualizeCoordinateMapping(
+ m_deviceResources->GetBackBufferRenderTargetView(),
+ m_deviceResources->GetDepthStencilView(),
+ m_deviceResources->GetScreenViewport(),
+ m_camera->GetWorldToCamera());
+ }
+ else
+ {
+ ID3D11DeviceContext* context = m_deviceResources->GetD3DDeviceContext();
+
+ // Reset the viewport to target the whole screen.
+ const D3D11_VIEWPORT viewport = m_deviceResources->GetScreenViewport();
+ context->RSSetViewports(1, &viewport);
+
+ // Reset render targets to the screen.
+ ID3D11RenderTargetView *const targets[1] = { m_deviceResources->GetBackBufferRenderTargetView() };
+ context->OMSetRenderTargets(1, targets, m_deviceResources->GetDepthStencilView());
+
+ // Clear the back imageBuffer and depth stencil view.
+ context->ClearRenderTargetView(m_deviceResources->GetBackBufferRenderTargetView(), DirectX::Colors::CornflowerBlue);
+ context->ClearDepthStencilView(m_deviceResources->GetDepthStencilView(), D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 1.0f, 0);
+
+ if (m_cameraCoordinateMapper->AreCamerasStreaming())
+ {
+ ID3D11ShaderResourceView* targetTexture = m_cameraCoordinateMapper->GetTargetShaderResourceView();
+ const float targetAspectRatio = m_cameraCoordinateMapper->GetTargetAspectRatio();
+
+ // Render the resulting target image as a quad
+ if (targetTexture)
+ {
+ m_quadRenderer->Render(targetTexture, targetAspectRatio);
+ }
+ }
+ }
+
+ m_statisticsTextRenderer->Render(TextRenderer::TextAlignment::BottomRight);
+ m_helpTextRenderer->Render(TextRenderer::TextAlignment::TopLeft);
+
+ return true;
+}
+
+task CameraStreamCoordinateMapperMain::OnSuspendingAsync()
+{
+ m_readyToRender = false;
+ return m_cameraCoordinateMapper->StopAsync();
+}
+
+void CameraStreamCoordinateMapperMain::OnResuming()
+{
+ m_cameraCoordinateMapper->StartAsync().then([this]
+ {
+ m_readyToRender = true;
+ });
+}
+
+// Notifies renderers that device resources need to be released.
+void CameraStreamCoordinateMapperMain::OnDeviceLost()
+{
+ m_readyToRender = false;
+
+ m_quadRenderer->ReleaseDeviceDependentResources();
+ m_statisticsTextRenderer->ReleaseDeviceDependentResources();
+ m_helpTextRenderer->ReleaseDeviceDependentResources();
+ m_cameraCoordinateMapper->ReleaseDeviceDependentResources();
+}
+
+// Notifies renderers that device resources may now be recreated.
+void CameraStreamCoordinateMapperMain::OnDeviceRestored()
+{
+ m_cameraCoordinateMapper->CreateDeviceDependentResources();
+ m_helpTextRenderer->CreateDeviceDependentResources();
+ m_statisticsTextRenderer->CreateDeviceDependentResources();
+ m_quadRenderer->CreateDeviceDependentResources();
+
+ CreateWindowSizeDependentResources();
+
+ m_readyToRender = true;
+}
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapperMain.h b/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapperMain.h
new file mode 100644
index 0000000000..01e2fd9d75
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/CameraStreamCoordinateMapperMain.h
@@ -0,0 +1,69 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+#include "Common\StepTimer.h"
+#include "Common\DeviceResources.h"
+
+#include "Content\GpuCoordinateMapper.h"
+
+namespace DX {
+ class GpuPerformanceTimer;
+}
+
+ref class FirstPersonCamera;
+
+// Renders Direct2D and 3D content on the screen.
+namespace CameraStreamCoordinateMapper
+{
+ class QuadRenderer;
+ class TextRenderer;
+ class CameraCoordinateMapper;
+
+ class CameraStreamCoordinateMapperMain : public DX::IDeviceNotify
+ {
+ public:
+ CameraStreamCoordinateMapperMain(const std::shared_ptr& deviceResources);
+ concurrency::task CreateResourcesAsync();
+
+ ~CameraStreamCoordinateMapperMain();
+ void CreateWindowSizeDependentResources();
+ void Update();
+ bool Render();
+
+ concurrency::task OnSuspendingAsync();
+ void OnResuming();
+
+ // IDeviceNotify
+ virtual void OnDeviceLost();
+ virtual void OnDeviceRestored();
+
+ private:
+ std::shared_ptr m_deviceResources;
+
+ DX::StepTimer m_timer;
+
+ FirstPersonCamera^ m_camera;
+
+ std::unique_ptr m_quadRenderer;
+ std::unique_ptr m_statisticsTextRenderer;
+ std::unique_ptr m_helpTextRenderer;
+
+ std::unique_ptr m_gpuPerformanceTimer;
+ std::shared_ptr m_cameraCoordinateMapper;
+
+ bool m_streamingEnabled = true;
+ bool m_freeRoamCameraEnabled = true;
+ bool m_showHelpText = false;
+ bool m_readyToRender = false;
+ };
+}
\ No newline at end of file
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Common/DeviceResources.cpp b/Samples/CameraStreamCoordinateMapper/cpp/Common/DeviceResources.cpp
new file mode 100644
index 0000000000..29d893da00
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Common/DeviceResources.cpp
@@ -0,0 +1,715 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+#include "DeviceResources.h"
+#include "DirectXHelper.h"
+#include
+
+using namespace D2D1;
+using namespace DirectX;
+using namespace Microsoft::WRL;
+using namespace Windows::Foundation;
+using namespace Windows::Graphics::Display;
+using namespace Windows::UI::Core;
+using namespace Windows::UI::Xaml::Controls;
+using namespace Platform;
+
+namespace DisplayMetrics
+{
+ // High resolution displays can require a lot of GPU and battery power to render.
+ // High resolution phones, for example, may suffer from poor battery life if
+ // games attempt to render at 60 frames per second at full fidelity.
+ // The decision to render at full fidelity across all platforms and form factors
+ // should be deliberate.
+ static const bool SupportHighResolutions = false;
+
+ // The default thresholds that define a "high resolution" display. If the thresholds
+ // are exceeded and SupportHighResolutions is false, the dimensions will be scaled
+ // by 50%.
+ static const float DpiThreshold = 192.0f; // 200% of standard desktop display.
+ static const float WidthThreshold = 1920.0f; // 1080p width.
+ static const float HeightThreshold = 1080.0f; // 1080p height.
+};
+
+// Constants used to calculate screen rotations
+namespace ScreenRotation
+{
+ // 0-degree Z-rotation
+ static const XMFLOAT4X4 Rotation0(
+ 1.0f, 0.0f, 0.0f, 0.0f,
+ 0.0f, 1.0f, 0.0f, 0.0f,
+ 0.0f, 0.0f, 1.0f, 0.0f,
+ 0.0f, 0.0f, 0.0f, 1.0f
+ );
+
+ // 90-degree Z-rotation
+ static const XMFLOAT4X4 Rotation90(
+ 0.0f, 1.0f, 0.0f, 0.0f,
+ -1.0f, 0.0f, 0.0f, 0.0f,
+ 0.0f, 0.0f, 1.0f, 0.0f,
+ 0.0f, 0.0f, 0.0f, 1.0f
+ );
+
+ // 180-degree Z-rotation
+ static const XMFLOAT4X4 Rotation180(
+ -1.0f, 0.0f, 0.0f, 0.0f,
+ 0.0f, -1.0f, 0.0f, 0.0f,
+ 0.0f, 0.0f, 1.0f, 0.0f,
+ 0.0f, 0.0f, 0.0f, 1.0f
+ );
+
+ // 270-degree Z-rotation
+ static const XMFLOAT4X4 Rotation270(
+ 0.0f, -1.0f, 0.0f, 0.0f,
+ 1.0f, 0.0f, 0.0f, 0.0f,
+ 0.0f, 0.0f, 1.0f, 0.0f,
+ 0.0f, 0.0f, 0.0f, 1.0f
+ );
+};
+
+// Constructor for DeviceResources.
+DX::DeviceResources::DeviceResources() :
+ m_screenViewport(),
+ m_d3dFeatureLevel(D3D_FEATURE_LEVEL_9_1),
+ m_d3dRenderTargetSize(),
+ m_outputSize(),
+ m_logicalSize(),
+ m_nativeOrientation(DisplayOrientations::None),
+ m_currentOrientation(DisplayOrientations::None),
+ m_dpi(-1.0f),
+ m_effectiveDpi(-1.0f),
+ m_deviceNotify(nullptr)
+{
+ CreateDeviceIndependentResources();
+ CreateDeviceResources();
+}
+
+// Configures resources that don't depend on the Direct3D device.
+void DX::DeviceResources::CreateDeviceIndependentResources()
+{
+ // Initialize Direct2D resources.
+ D2D1_FACTORY_OPTIONS options;
+ ZeroMemory(&options, sizeof(D2D1_FACTORY_OPTIONS));
+
+#if defined(_DEBUG)
+ // If the project is in a debug build, enable Direct2D debugging via SDK Layers.
+ options.debugLevel = D2D1_DEBUG_LEVEL_INFORMATION;
+#endif
+
+ // Initialize the Direct2D Factory.
+ DX::ThrowIfFailed(
+ D2D1CreateFactory(
+ D2D1_FACTORY_TYPE_SINGLE_THREADED,
+ __uuidof(ID2D1Factory3),
+ &options,
+ &m_d2dFactory
+ )
+ );
+
+ // Initialize the DirectWrite Factory.
+ DX::ThrowIfFailed(
+ DWriteCreateFactory(
+ DWRITE_FACTORY_TYPE_SHARED,
+ __uuidof(IDWriteFactory3),
+ &m_dwriteFactory
+ )
+ );
+
+ // Initialize the Windows Imaging Component (WIC) Factory.
+ DX::ThrowIfFailed(
+ CoCreateInstance(
+ CLSID_WICImagingFactory2,
+ nullptr,
+ CLSCTX_INPROC_SERVER,
+ IID_PPV_ARGS(&m_wicFactory)
+ )
+ );
+}
+
+// Configures the Direct3D device, and stores handles to it and the device context.
+void DX::DeviceResources::CreateDeviceResources()
+{
+ // This flag adds support for surfaces with a different color channel ordering
+ // than the API default. It is required for compatibility with Direct2D.
+ UINT creationFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
+
+#if defined(_DEBUG)
+ if (DX::SdkLayersAvailable())
+ {
+ // If the project is in a debug build, enable debugging via SDK Layers with this flag.
+ creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
+ }
+#endif
+
+ // This array defines the set of DirectX hardware feature levels this app will support.
+ // Note the ordering should be preserved.
+ // Don't forget to declare your application's minimum required feature level in its
+ // description. All applications are assumed to support 9.1 unless otherwise stated.
+ D3D_FEATURE_LEVEL featureLevels[] =
+ {
+ D3D_FEATURE_LEVEL_10_0, // Explicitly make 10.0 feature level the highest we support
+ D3D_FEATURE_LEVEL_9_3,
+ D3D_FEATURE_LEVEL_9_2,
+ D3D_FEATURE_LEVEL_9_1
+ };
+
+ // Create the Direct3D 11 API device object and a corresponding context.
+ ComPtr device;
+ ComPtr context;
+
+ HRESULT hr = D3D11CreateDevice(
+ nullptr, // Specify nullptr to use the default adapter.
+ D3D_DRIVER_TYPE_HARDWARE, // Create a device using the hardware graphics driver.
+ 0, // Should be 0 unless the driver is D3D_DRIVER_TYPE_SOFTWARE.
+ creationFlags, // Set debug and Direct2D compatibility flags.
+ featureLevels, // List of feature levels this app can support.
+ ARRAYSIZE(featureLevels), // Size of the list above.
+ D3D11_SDK_VERSION, // Always set this to D3D11_SDK_VERSION for Windows Runtime apps.
+ &device, // Returns the Direct3D device created.
+ &m_d3dFeatureLevel, // Returns feature level of device created.
+ &context // Returns the device immediate context.
+ );
+
+ if (FAILED(hr))
+ {
+ // If the initialization fails, fall back to the WARP device.
+ // For more information on WARP, see:
+ // http://go.microsoft.com/fwlink/?LinkId=286690
+ DX::ThrowIfFailed(
+ D3D11CreateDevice(
+ nullptr,
+ D3D_DRIVER_TYPE_WARP, // Create a WARP device instead of a hardware device.
+ 0,
+ creationFlags,
+ featureLevels,
+ ARRAYSIZE(featureLevels),
+ D3D11_SDK_VERSION,
+ &device,
+ &m_d3dFeatureLevel,
+ &context
+ )
+ );
+ }
+
+ // Store pointers to the Direct3D 11.3 API device and immediate context.
+ DX::ThrowIfFailed(
+ device.As(&m_d3dDevice)
+ );
+
+ D3D11_FEATURE_DATA_D3D10_X_HARDWARE_OPTIONS options;
+ m_d3dDevice->CheckFeatureSupport(D3D11_FEATURE_D3D10_X_HARDWARE_OPTIONS, &options, sizeof(options));
+
+ DX::ThrowIfFailed(
+ context.As(&m_d3dContext)
+ );
+
+ // Create the Direct2D device object and a corresponding context.
+ ComPtr dxgiDevice;
+ DX::ThrowIfFailed(
+ m_d3dDevice.As(&dxgiDevice)
+ );
+
+ DX::ThrowIfFailed(
+ m_d2dFactory->CreateDevice(dxgiDevice.Get(), &m_d2dDevice)
+ );
+
+ DX::ThrowIfFailed(
+ m_d2dDevice->CreateDeviceContext(
+ D2D1_DEVICE_CONTEXT_OPTIONS_NONE,
+ &m_d2dContext
+ )
+ );
+}
+
+// These resources need to be recreated every time the window size is changed.
+void DX::DeviceResources::CreateWindowSizeDependentResources()
+{
+ // Clear the previous window size specific context.
+ ID3D11RenderTargetView* nullViews[] = {nullptr};
+ m_d3dContext->OMSetRenderTargets(ARRAYSIZE(nullViews), nullViews, nullptr);
+ m_d3dRenderTargetView = nullptr;
+ m_d2dContext->SetTarget(nullptr);
+ m_d2dTargetBitmap = nullptr;
+ m_d3dDepthStencilView = nullptr;
+
+ m_d3dContext->ClearState();
+ m_d3dContext->Flush1(D3D11_CONTEXT_TYPE_ALL, nullptr);
+
+ UpdateRenderTargetSize();
+
+ // The width and height of the swap chain must be based on the window's
+ // natively-oriented width and height. If the window is not in the native
+ // orientation, the dimensions must be reversed.
+ DXGI_MODE_ROTATION displayRotation = ComputeDisplayRotation();
+
+ bool swapDimensions = displayRotation == DXGI_MODE_ROTATION_ROTATE90 || displayRotation == DXGI_MODE_ROTATION_ROTATE270;
+ m_d3dRenderTargetSize.Width = swapDimensions ? m_outputSize.Height : m_outputSize.Width;
+ m_d3dRenderTargetSize.Height = swapDimensions ? m_outputSize.Width : m_outputSize.Height;
+
+ if (m_swapChain != nullptr)
+ {
+ // If the swap chain already exists, resize it.
+ HRESULT hr = m_swapChain->ResizeBuffers(
+ 2, // Double-buffered swap chain.
+ lround(m_d3dRenderTargetSize.Width),
+ lround(m_d3dRenderTargetSize.Height),
+ DXGI_FORMAT_B8G8R8A8_UNORM,
+ 0
+ );
+
+ if (hr == DXGI_ERROR_DEVICE_REMOVED || hr == DXGI_ERROR_DEVICE_RESET)
+ {
+ // If the device was removed for any reason, a new device and swap chain will need to be created.
+ HandleDeviceLost();
+
+ // Everything is set up now. Do not continue execution of this method. HandleDeviceLost will reenter this method
+ // and correctly set up the new device.
+ return;
+ }
+ else
+ {
+ DX::ThrowIfFailed(hr);
+ }
+ }
+ else
+ {
+ // Otherwise, create a new one using the same adapter as the existing Direct3D device.
+ DXGI_SCALING scaling = DisplayMetrics::SupportHighResolutions ? DXGI_SCALING_NONE : DXGI_SCALING_STRETCH;
+ DXGI_SWAP_CHAIN_DESC1 swapChainDesc = {0};
+
+ swapChainDesc.Width = lround(m_d3dRenderTargetSize.Width); // Match the size of the window.
+ swapChainDesc.Height = lround(m_d3dRenderTargetSize.Height);
+ swapChainDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM; // This is the most common swap chain format.
+ swapChainDesc.Stereo = false;
+ swapChainDesc.SampleDesc.Count = 1; // Don't use multi-sampling.
+ swapChainDesc.SampleDesc.Quality = 0;
+ swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
+ swapChainDesc.BufferCount = 2; // Use double-buffering to minimize latency.
+ swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL; // All Windows Store apps must use this SwapEffect.
+ swapChainDesc.Flags = 0;
+ swapChainDesc.Scaling = scaling;
+ swapChainDesc.AlphaMode = DXGI_ALPHA_MODE_IGNORE;
+
+ // This sequence obtains the DXGI factory that was used to create the Direct3D device above.
+ ComPtr dxgiDevice;
+ DX::ThrowIfFailed(
+ m_d3dDevice.As(&dxgiDevice)
+ );
+
+ ComPtr dxgiAdapter;
+ DX::ThrowIfFailed(
+ dxgiDevice->GetAdapter(&dxgiAdapter)
+ );
+
+ ComPtr dxgiFactory;
+ DX::ThrowIfFailed(
+ dxgiAdapter->GetParent(IID_PPV_ARGS(&dxgiFactory))
+ );
+
+ ComPtr swapChain;
+ DX::ThrowIfFailed(
+ dxgiFactory->CreateSwapChainForCoreWindow(
+ m_d3dDevice.Get(),
+ reinterpret_cast(m_window.Get()),
+ &swapChainDesc,
+ nullptr,
+ &swapChain
+ )
+ );
+ DX::ThrowIfFailed(
+ swapChain.As(&m_swapChain)
+ );
+
+ // Ensure that DXGI does not queue more than one frame at a time. This both reduces latency and
+ // ensures that the application will only render after each VSync, minimizing power consumption.
+ DX::ThrowIfFailed(
+ dxgiDevice->SetMaximumFrameLatency(1)
+ );
+ }
+
+ // Set the proper orientation for the swap chain, and generate 2D and
+ // 3D matrix transformations for rendering to the rotated swap chain.
+ // Note the rotation angle for the 2D and 3D transforms are different.
+ // This is due to the difference in coordinate spaces. Additionally,
+ // the 3D matrix is specified explicitly to avoid rounding errors.
+
+ switch (displayRotation)
+ {
+ case DXGI_MODE_ROTATION_IDENTITY:
+ m_orientationTransform2D = Matrix3x2F::Identity();
+ m_orientationTransform3D = ScreenRotation::Rotation0;
+ break;
+
+ case DXGI_MODE_ROTATION_ROTATE90:
+ m_orientationTransform2D =
+ Matrix3x2F::Rotation(90.0f) *
+ Matrix3x2F::Translation(m_logicalSize.Height, 0.0f);
+ m_orientationTransform3D = ScreenRotation::Rotation270;
+ break;
+
+ case DXGI_MODE_ROTATION_ROTATE180:
+ m_orientationTransform2D =
+ Matrix3x2F::Rotation(180.0f) *
+ Matrix3x2F::Translation(m_logicalSize.Width, m_logicalSize.Height);
+ m_orientationTransform3D = ScreenRotation::Rotation180;
+ break;
+
+ case DXGI_MODE_ROTATION_ROTATE270:
+ m_orientationTransform2D =
+ Matrix3x2F::Rotation(270.0f) *
+ Matrix3x2F::Translation(0.0f, m_logicalSize.Width);
+ m_orientationTransform3D = ScreenRotation::Rotation90;
+ break;
+
+ default:
+ throw ref new FailureException();
+ }
+
+ DX::ThrowIfFailed(
+ m_swapChain->SetRotation(displayRotation)
+ );
+
+ // Create a render target view of the swap chain back buffer.
+ ComPtr backBuffer;
+ DX::ThrowIfFailed(
+ m_swapChain->GetBuffer(0, IID_PPV_ARGS(&backBuffer))
+ );
+
+ DX::ThrowIfFailed(
+ m_d3dDevice->CreateRenderTargetView1(
+ backBuffer.Get(),
+ nullptr,
+ &m_d3dRenderTargetView
+ )
+ );
+
+ // Create a depth stencil view for use with 3D rendering if needed.
+ CD3D11_TEXTURE2D_DESC1 depthStencilDesc(
+ DXGI_FORMAT_D24_UNORM_S8_UINT,
+ lround(m_d3dRenderTargetSize.Width),
+ lround(m_d3dRenderTargetSize.Height),
+ 1, // This depth stencil view has only one texture.
+ 1, // Use a single mipmap level.
+ D3D11_BIND_DEPTH_STENCIL
+ );
+
+ ComPtr depthStencil;
+ DX::ThrowIfFailed(
+ m_d3dDevice->CreateTexture2D1(
+ &depthStencilDesc,
+ nullptr,
+ &depthStencil
+ )
+ );
+
+ CD3D11_DEPTH_STENCIL_VIEW_DESC depthStencilViewDesc(D3D11_DSV_DIMENSION_TEXTURE2D);
+ DX::ThrowIfFailed(
+ m_d3dDevice->CreateDepthStencilView(
+ depthStencil.Get(),
+ &depthStencilViewDesc,
+ &m_d3dDepthStencilView
+ )
+ );
+
+ // Set the 3D rendering viewport to target the entire window.
+ m_screenViewport = CD3D11_VIEWPORT(
+ 0.0f,
+ 0.0f,
+ m_d3dRenderTargetSize.Width,
+ m_d3dRenderTargetSize.Height
+ );
+
+ m_d3dContext->RSSetViewports(1, &m_screenViewport);
+
+ // Create a Direct2D target bitmap associated with the
+ // swap chain back buffer and set it as the current target.
+ D2D1_BITMAP_PROPERTIES1 bitmapProperties =
+ D2D1::BitmapProperties1(
+ D2D1_BITMAP_OPTIONS_TARGET | D2D1_BITMAP_OPTIONS_CANNOT_DRAW,
+ D2D1::PixelFormat(DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_PREMULTIPLIED),
+ m_dpi,
+ m_dpi
+ );
+
+ ComPtr dxgiBackBuffer;
+ DX::ThrowIfFailed(
+ m_swapChain->GetBuffer(0, IID_PPV_ARGS(&dxgiBackBuffer))
+ );
+
+ DX::ThrowIfFailed(
+ m_d2dContext->CreateBitmapFromDxgiSurface(
+ dxgiBackBuffer.Get(),
+ &bitmapProperties,
+ &m_d2dTargetBitmap
+ )
+ );
+
+ m_d2dContext->SetTarget(m_d2dTargetBitmap.Get());
+ m_d2dContext->SetDpi(m_effectiveDpi, m_effectiveDpi);
+
+ // Grayscale text anti-aliasing is recommended for all Windows Store apps.
+ m_d2dContext->SetTextAntialiasMode(D2D1_TEXT_ANTIALIAS_MODE_GRAYSCALE);
+}
+
+// Determine the dimensions of the render target and whether it will be scaled down.
+void DX::DeviceResources::UpdateRenderTargetSize()
+{
+ m_effectiveDpi = m_dpi;
+
+ // To improve battery life on high resolution devices, render to a smaller render target
+ // and allow the GPU to scale the output when it is presented.
+ if (!DisplayMetrics::SupportHighResolutions && m_dpi > DisplayMetrics::DpiThreshold)
+ {
+ float width = DX::ConvertDipsToPixels(m_logicalSize.Width, m_dpi);
+ float height = DX::ConvertDipsToPixels(m_logicalSize.Height, m_dpi);
+
+ // When the device is in portrait orientation, height > width. Compare the
+ // larger dimension against the width threshold and the smaller dimension
+ // against the height threshold.
+ if (std::max(width, height) > DisplayMetrics::WidthThreshold && std::min(width, height) > DisplayMetrics::HeightThreshold)
+ {
+ // To scale the app we change the effective DPI. Logical size does not change.
+ m_effectiveDpi /= 2.0f;
+ }
+ }
+
+ // Calculate the necessary render target size in pixels.
+ m_outputSize.Width = DX::ConvertDipsToPixels(m_logicalSize.Width, m_effectiveDpi);
+ m_outputSize.Height = DX::ConvertDipsToPixels(m_logicalSize.Height, m_effectiveDpi);
+
+ // Prevent zero size DirectX content from being created.
+ m_outputSize.Width = std::max(m_outputSize.Width, 1.0f);
+ m_outputSize.Height = std::max(m_outputSize.Height, 1.0f);
+}
+
+// This method is called when the CoreWindow is created (or re-created).
+void DX::DeviceResources::SetWindow(CoreWindow^ window)
+{
+ DisplayInformation^ currentDisplayInformation = DisplayInformation::GetForCurrentView();
+
+ m_window = window;
+ m_logicalSize = Windows::Foundation::Size(window->Bounds.Width, window->Bounds.Height);
+ m_nativeOrientation = currentDisplayInformation->NativeOrientation;
+ m_currentOrientation = currentDisplayInformation->CurrentOrientation;
+ m_dpi = currentDisplayInformation->LogicalDpi;
+ m_d2dContext->SetDpi(m_dpi, m_dpi);
+
+ CreateWindowSizeDependentResources();
+}
+
+// This method is called in the event handler for the SizeChanged event.
+void DX::DeviceResources::SetLogicalSize(Windows::Foundation::Size logicalSize)
+{
+ if (m_logicalSize != logicalSize)
+ {
+ m_logicalSize = logicalSize;
+ CreateWindowSizeDependentResources();
+ }
+}
+
+// This method is called in the event handler for the DpiChanged event.
+void DX::DeviceResources::SetDpi(float dpi)
+{
+ if (dpi != m_dpi)
+ {
+ m_dpi = dpi;
+
+ // When the display DPI changes, the logical size of the window (measured in Dips) also changes and needs to be updated.
+ m_logicalSize = Windows::Foundation::Size(m_window->Bounds.Width, m_window->Bounds.Height);
+
+ m_d2dContext->SetDpi(m_dpi, m_dpi);
+ CreateWindowSizeDependentResources();
+ }
+}
+
+// This method is called in the event handler for the OrientationChanged event.
+void DX::DeviceResources::SetCurrentOrientation(DisplayOrientations currentOrientation)
+{
+ if (m_currentOrientation != currentOrientation)
+ {
+ m_currentOrientation = currentOrientation;
+ CreateWindowSizeDependentResources();
+ }
+}
+
+// This method is called in the event handler for the DisplayContentsInvalidated event.
+void DX::DeviceResources::ValidateDevice()
+{
+ // The D3D Device is no longer valid if the default adapter changed since the device
+ // was created or if the device has been removed.
+
+ // First, get the information for the default adapter from when the device was created.
+
+ ComPtr dxgiDevice;
+ DX::ThrowIfFailed(m_d3dDevice.As(&dxgiDevice));
+
+ ComPtr deviceAdapter;
+ DX::ThrowIfFailed(dxgiDevice->GetAdapter(&deviceAdapter));
+
+ ComPtr deviceFactory;
+ DX::ThrowIfFailed(deviceAdapter->GetParent(IID_PPV_ARGS(&deviceFactory)));
+
+ ComPtr previousDefaultAdapter;
+ DX::ThrowIfFailed(deviceFactory->EnumAdapters1(0, &previousDefaultAdapter));
+
+ DXGI_ADAPTER_DESC1 previousDesc;
+ DX::ThrowIfFailed(previousDefaultAdapter->GetDesc1(&previousDesc));
+
+ // Next, get the information for the current default adapter.
+
+ ComPtr currentFactory;
+ DX::ThrowIfFailed(CreateDXGIFactory1(IID_PPV_ARGS(¤tFactory)));
+
+ ComPtr currentDefaultAdapter;
+ DX::ThrowIfFailed(currentFactory->EnumAdapters1(0, ¤tDefaultAdapter));
+
+ DXGI_ADAPTER_DESC1 currentDesc;
+ DX::ThrowIfFailed(currentDefaultAdapter->GetDesc1(¤tDesc));
+
+ // If the adapter LUIDs don't match, or if the device reports that it has been removed,
+ // a new D3D device must be created.
+
+ if (previousDesc.AdapterLuid.LowPart != currentDesc.AdapterLuid.LowPart ||
+ previousDesc.AdapterLuid.HighPart != currentDesc.AdapterLuid.HighPart ||
+ FAILED(m_d3dDevice->GetDeviceRemovedReason()))
+ {
+ // Release references to resources related to the old device.
+ dxgiDevice = nullptr;
+ deviceAdapter = nullptr;
+ deviceFactory = nullptr;
+ previousDefaultAdapter = nullptr;
+
+ // Create a new device and swap chain.
+ HandleDeviceLost();
+ }
+}
+
+// Recreate all device resources and set them back to the current state.
+void DX::DeviceResources::HandleDeviceLost()
+{
+ m_swapChain = nullptr;
+
+ if (m_deviceNotify != nullptr)
+ {
+ m_deviceNotify->OnDeviceLost();
+ }
+
+ CreateDeviceResources();
+ m_d2dContext->SetDpi(m_dpi, m_dpi);
+ CreateWindowSizeDependentResources();
+
+ if (m_deviceNotify != nullptr)
+ {
+ m_deviceNotify->OnDeviceRestored();
+ }
+}
+
+// Register our DeviceNotify to be informed on device lost and creation.
+void DX::DeviceResources::RegisterDeviceNotify(DX::IDeviceNotify* deviceNotify)
+{
+ m_deviceNotify = deviceNotify;
+}
+
+// Call this method when the app suspends. It provides a hint to the driver that the app
+// is entering an idle state and that temporary buffers can be reclaimed for use by other apps.
+void DX::DeviceResources::Trim()
+{
+ ComPtr dxgiDevice;
+ m_d3dDevice.As(&dxgiDevice);
+
+ dxgiDevice->Trim();
+}
+
+// Present the contents of the swap chain to the screen.
+void DX::DeviceResources::Present()
+{
+ // The first argument instructs DXGI to block until VSync, putting the application
+ // to sleep until the next VSync. This ensures we don't waste any cycles rendering
+ // frames that will never be displayed to the screen.
+ DXGI_PRESENT_PARAMETERS parameters = { 0 };
+ HRESULT hr = m_swapChain->Present1(0, 0, ¶meters);
+
+ // Discard the contents of the render target.
+ // This is a valid operation only when the existing contents will be entirely
+ // overwritten. If dirty or scroll rects are used, this call should be removed.
+ m_d3dContext->DiscardView1(m_d3dRenderTargetView.Get(), nullptr, 0);
+
+ // Discard the contents of the depth stencil.
+ m_d3dContext->DiscardView1(m_d3dDepthStencilView.Get(), nullptr, 0);
+
+ // If the device was removed either by a disconnection or a driver upgrade, we
+ // must recreate all device resources.
+
+ if (hr == DXGI_ERROR_DEVICE_REMOVED || hr == DXGI_ERROR_DEVICE_RESET)
+ {
+ HandleDeviceLost();
+ }
+ else
+ {
+ DX::ThrowIfFailed(hr);
+ }
+}
+
+// This method determines the rotation between the display device's native orientation and the
+// current display orientation.
+DXGI_MODE_ROTATION DX::DeviceResources::ComputeDisplayRotation()
+{
+ DXGI_MODE_ROTATION rotation = DXGI_MODE_ROTATION_UNSPECIFIED;
+
+ // Note: NativeOrientation can only be Landscape or Portrait even though
+ // the DisplayOrientations enum has other values.
+ switch (m_nativeOrientation)
+ {
+ case DisplayOrientations::Landscape:
+ switch (m_currentOrientation)
+ {
+ case DisplayOrientations::Landscape:
+ rotation = DXGI_MODE_ROTATION_IDENTITY;
+ break;
+
+ case DisplayOrientations::Portrait:
+ rotation = DXGI_MODE_ROTATION_ROTATE270;
+ break;
+
+ case DisplayOrientations::LandscapeFlipped:
+ rotation = DXGI_MODE_ROTATION_ROTATE180;
+ break;
+
+ case DisplayOrientations::PortraitFlipped:
+ rotation = DXGI_MODE_ROTATION_ROTATE90;
+ break;
+ }
+ break;
+
+ case DisplayOrientations::Portrait:
+ switch (m_currentOrientation)
+ {
+ case DisplayOrientations::Landscape:
+ rotation = DXGI_MODE_ROTATION_ROTATE90;
+ break;
+
+ case DisplayOrientations::Portrait:
+ rotation = DXGI_MODE_ROTATION_IDENTITY;
+ break;
+
+ case DisplayOrientations::LandscapeFlipped:
+ rotation = DXGI_MODE_ROTATION_ROTATE270;
+ break;
+
+ case DisplayOrientations::PortraitFlipped:
+ rotation = DXGI_MODE_ROTATION_ROTATE180;
+ break;
+ }
+ break;
+ }
+ return rotation;
+}
\ No newline at end of file
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Common/DeviceResources.h b/Samples/CameraStreamCoordinateMapper/cpp/Common/DeviceResources.h
new file mode 100644
index 0000000000..b40be8f31f
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Common/DeviceResources.h
@@ -0,0 +1,113 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+namespace DX
+{
+ // Provides an interface for an application that owns DeviceResources to be notified of the device being lost or created.
+ interface IDeviceNotify
+ {
+ virtual void OnDeviceLost() = 0;
+ virtual void OnDeviceRestored() = 0;
+ };
+
+ // Controls all the DirectX device resources.
+ class DeviceResources
+ {
+ public:
+ DeviceResources();
+ void SetWindow(Windows::UI::Core::CoreWindow^ window);
+ void SetLogicalSize(Windows::Foundation::Size logicalSize);
+ void SetCurrentOrientation(Windows::Graphics::Display::DisplayOrientations currentOrientation);
+ void SetDpi(float dpi);
+ void ValidateDevice();
+ void HandleDeviceLost();
+ void RegisterDeviceNotify(IDeviceNotify* deviceNotify);
+ void Trim();
+ void Present();
+
+ // The size of the render target, in pixels.
+ Windows::Foundation::Size GetOutputSize() const { return m_outputSize; }
+
+ // The size of the render target, in dips.
+ Windows::Foundation::Size GetLogicalSize() const { return m_logicalSize; }
+ float GetDpi() const { return m_effectiveDpi; }
+
+ // D3D Accessors.
+ ID3D11Device3* GetD3DDevice() const { return m_d3dDevice.Get(); }
+ ID3D11DeviceContext3* GetD3DDeviceContext() const { return m_d3dContext.Get(); }
+ IDXGISwapChain3* GetSwapChain() const { return m_swapChain.Get(); }
+ D3D_FEATURE_LEVEL GetDeviceFeatureLevel() const { return m_d3dFeatureLevel; }
+ ID3D11RenderTargetView1* GetBackBufferRenderTargetView() const { return m_d3dRenderTargetView.Get(); }
+ ID3D11DepthStencilView* GetDepthStencilView() const { return m_d3dDepthStencilView.Get(); }
+ D3D11_VIEWPORT GetScreenViewport() const { return m_screenViewport; }
+ DirectX::XMFLOAT4X4 GetOrientationTransform3D() const { return m_orientationTransform3D; }
+
+ // D2D Accessors.
+ ID2D1Factory3* GetD2DFactory() const { return m_d2dFactory.Get(); }
+ ID2D1Device2* GetD2DDevice() const { return m_d2dDevice.Get(); }
+ ID2D1DeviceContext2* GetD2DDeviceContext() const { return m_d2dContext.Get(); }
+ ID2D1Bitmap1* GetD2DTargetBitmap() const { return m_d2dTargetBitmap.Get(); }
+ IDWriteFactory3* GetDWriteFactory() const { return m_dwriteFactory.Get(); }
+ IWICImagingFactory2* GetWicImagingFactory() const { return m_wicFactory.Get(); }
+ D2D1::Matrix3x2F GetOrientationTransform2D() const { return m_orientationTransform2D; }
+
+ private:
+ void CreateDeviceIndependentResources();
+ void CreateDeviceResources();
+ void CreateWindowSizeDependentResources();
+ void UpdateRenderTargetSize();
+ DXGI_MODE_ROTATION ComputeDisplayRotation();
+
+ // Direct3D objects.
+ Microsoft::WRL::ComPtr m_d3dDevice;
+ Microsoft::WRL::ComPtr m_d3dContext;
+ Microsoft::WRL::ComPtr m_swapChain;
+
+ // Direct3D rendering objects. Required for 3D.
+ Microsoft::WRL::ComPtr m_d3dRenderTargetView;
+ Microsoft::WRL::ComPtr m_d3dDepthStencilView;
+ D3D11_VIEWPORT m_screenViewport;
+
+ // Direct2D drawing components.
+ Microsoft::WRL::ComPtr m_d2dFactory;
+ Microsoft::WRL::ComPtr m_d2dDevice;
+ Microsoft::WRL::ComPtr m_d2dContext;
+ Microsoft::WRL::ComPtr m_d2dTargetBitmap;
+
+ // DirectWrite drawing components.
+ Microsoft::WRL::ComPtr m_dwriteFactory;
+ Microsoft::WRL::ComPtr m_wicFactory;
+
+ // Cached reference to the Window.
+ Platform::Agile m_window;
+
+ // Cached device properties.
+ D3D_FEATURE_LEVEL m_d3dFeatureLevel;
+ Windows::Foundation::Size m_d3dRenderTargetSize;
+ Windows::Foundation::Size m_outputSize;
+ Windows::Foundation::Size m_logicalSize;
+ Windows::Graphics::Display::DisplayOrientations m_nativeOrientation;
+ Windows::Graphics::Display::DisplayOrientations m_currentOrientation;
+ float m_dpi;
+
+ // This is the DPI that will be reported back to the app. It takes into account whether the app supports high resolution screens or not.
+ float m_effectiveDpi;
+
+ // Transforms used for display orientation.
+ D2D1::Matrix3x2F m_orientationTransform2D;
+ DirectX::XMFLOAT4X4 m_orientationTransform3D;
+
+ // The IDeviceNotify can be held directly as it owns the DeviceResources.
+ IDeviceNotify* m_deviceNotify;
+ };
+}
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Common/DirectXHelper.cpp b/Samples/CameraStreamCoordinateMapper/cpp/Common/DirectXHelper.cpp
new file mode 100644
index 0000000000..85e13b02ed
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Common/DirectXHelper.cpp
@@ -0,0 +1,301 @@
+#include "pch.h"
+#include "Common\DirectXHelper.h"
+#include "Common\DeviceResources.h"
+
+using namespace Microsoft::WRL;
+
+ComPtr DX::CreateVertexShader(
+ ID3D11Device* device,
+ const void* data,
+ size_t dataSize)
+{
+ if (dataSize > std::numeric_limits::max())
+ {
+ throw std::invalid_argument("size is larger than UINT");
+ }
+
+ ComPtr result;
+ DX::ThrowIfFailed(device->CreateVertexShader(
+ data,
+ static_cast(dataSize),
+ nullptr,
+ &result));
+
+ return result;
+}
+
+ComPtr DX::CreatePixelShader(
+ ID3D11Device* device,
+ const void* data,
+ size_t dataSize)
+{
+ if (dataSize > std::numeric_limits::max())
+ {
+ throw std::invalid_argument("size is larger than UINT");
+ }
+
+ ComPtr result;
+ DX::ThrowIfFailed(device->CreatePixelShader(
+ data,
+ static_cast(dataSize),
+ nullptr,
+ &result));
+
+ return result;
+}
+
+ComPtr DX::CreateInputLayout(
+ ID3D11Device* device,
+ const D3D11_INPUT_ELEMENT_DESC* vertexDesc,
+ size_t vertexDescCount,
+ const void* vertexShaderData,
+ size_t vertexShaderDataSize)
+{
+ if (vertexShaderDataSize > std::numeric_limits::max() ||
+ vertexDescCount > std::numeric_limits::max())
+ {
+ throw std::invalid_argument("size is larger than UINT");
+ }
+
+ ComPtr result;
+ DX::ThrowIfFailed(device->CreateInputLayout(
+ vertexDesc,
+ static_cast(vertexDescCount),
+ vertexShaderData,
+ static_cast(vertexShaderDataSize),
+ &result));
+
+ return result;
+}
+
+ComPtr DX::CreateSamplerState(
+ ID3D11Device* device,
+ const D3D11_SAMPLER_DESC& samplerDesc)
+{
+ ComPtr result;
+
+ DX::ThrowIfFailed(device->CreateSamplerState(
+ &samplerDesc,
+ &result));
+
+ return result;
+}
+
+ComPtr DX::CreateTexture(
+ ID3D11Device* device,
+ UINT width,
+ UINT height,
+ DXGI_FORMAT format,
+ std::optional textureData,
+ std::optional bindFlags,
+ std::optional usage,
+ std::optional cpuFlags)
+{
+ const CD3D11_TEXTURE2D_DESC desc{
+ format,
+ width,
+ height,
+ 1u,
+ 1u,
+ bindFlags.value_or(D3D11_BIND_SHADER_RESOURCE),
+ usage.value_or(D3D11_USAGE_DEFAULT),
+ cpuFlags.value_or(0x0)
+ };
+
+ D3D11_SUBRESOURCE_DATA data = {};
+
+ if (textureData)
+ {
+ data.pSysMem = textureData->buffer;
+ data.SysMemPitch = textureData->bytesPerPixel * width;
+ }
+
+ const D3D11_SUBRESOURCE_DATA* pData = (textureData) ? &data : nullptr;
+
+ ComPtr result;
+ DX::ThrowIfFailed(device->CreateTexture2D(&desc, pData, &result));
+ return result;
+}
+
+ComPtr DX::CreateBuffer(
+ ID3D11Device* device,
+ const size_t size,
+ const void* buffer,
+ std::optional bindFlags,
+ std::optional usage,
+ std::optional cpuFlags)
+{
+ if (size > std::numeric_limits::max())
+ {
+ throw std::invalid_argument("size is larger than UINT");
+ }
+
+ const CD3D11_BUFFER_DESC desc{
+ static_cast(size),
+ bindFlags.value_or(D3D11_BIND_CONSTANT_BUFFER),
+ usage.value_or(D3D11_USAGE_DEFAULT),
+ cpuFlags.value_or(0x0),
+ };
+
+ const D3D11_SUBRESOURCE_DATA data{
+ buffer,
+ 0u,
+ 0u
+ };
+
+ const D3D11_SUBRESOURCE_DATA* pData = (buffer) ? &data : nullptr;
+
+ ComPtr result;
+ DX::ThrowIfFailed(device->CreateBuffer(&desc, pData, &result));
+ return result;
+}
+
+ComPtr DX::CreateShaderResourceView(
+ ID3D11Texture2D* texture,
+ std::optional format)
+{
+ ComPtr device;
+ texture->GetDevice(&device);
+
+ const CD3D11_SHADER_RESOURCE_VIEW_DESC desc{
+ texture,
+ D3D11_SRV_DIMENSION_TEXTURE2D,
+ format.value_or(DXGI_FORMAT_UNKNOWN)
+ };
+
+ ComPtr result;
+ DX::ThrowIfFailed(device->CreateShaderResourceView(texture, &desc, &result));
+ return result;
+}
+
+ComPtr DX::CreateRenderTargetView(
+ ID3D11Texture2D* texture)
+{
+ ComPtr device;
+ texture->GetDevice(&device);
+
+ const CD3D11_RENDER_TARGET_VIEW_DESC desc{
+ texture,
+ D3D11_RTV_DIMENSION_TEXTURE2D
+ };
+
+ ComPtr result;
+ DX::ThrowIfFailed(device->CreateRenderTargetView(texture, &desc, &result));
+ return result;
+}
+
+ComPtr DX::CreateDepthStencilView(
+ ID3D11Texture2D* texture,
+ std::optional format)
+{
+ ComPtr device;
+ texture->GetDevice(&device);
+
+ const CD3D11_DEPTH_STENCIL_VIEW_DESC desc{
+ texture,
+ D3D11_DSV_DIMENSION_TEXTURE2D,
+ format.value_or(DXGI_FORMAT_UNKNOWN)
+ };
+
+ ComPtr result;
+ DX::ThrowIfFailed(device->CreateDepthStencilView(texture, &desc, &result));
+ return result;
+}
+
+DX::GpuPerformanceTimer::GpuPerformanceTimer(
+ const std::shared_ptr& deviceResources) :
+ m_deviceResources(deviceResources)
+{
+ ID3D11Device* device = m_deviceResources->GetD3DDevice();
+
+ const D3D11_QUERY_DESC timestampQuery = CD3D11_QUERY_DESC(D3D11_QUERY_TIMESTAMP);
+ const D3D11_QUERY_DESC disjointQuery = CD3D11_QUERY_DESC(D3D11_QUERY_TIMESTAMP_DISJOINT);
+
+ for (uint32_t i = 0; i < QueryCount; ++i)
+ {
+ device->CreateQuery(×tampQuery, m_startTimestampQuery[i].ReleaseAndGetAddressOf());
+ device->CreateQuery(×tampQuery, m_endTimestampQuery[i].ReleaseAndGetAddressOf());
+ device->CreateQuery(&disjointQuery, m_disjointQuery[i].ReleaseAndGetAddressOf());
+ }
+}
+
+void DX::GpuPerformanceTimer::StartTimerForFrame()
+{
+ ID3D11DeviceContext* context = m_deviceResources->GetD3DDeviceContext();
+
+ // Just some book-keeping to see how long this algorithm is running
+ m_currentQuery = (m_currentQuery + 1) % QueryCount;
+ const uint32_t ReadQuery = (m_currentQuery + 1) % QueryCount;
+
+ if (m_currentFrameIndex >= QueryCount)
+ {
+ D3D11_QUERY_DATA_TIMESTAMP_DISJOINT disjointData;
+ while (context->GetData(m_disjointQuery[ReadQuery].Get(), &disjointData, sizeof(disjointData), 0) != S_OK);
+
+ if (disjointData.Disjoint == 0)
+ {
+ uint64_t start;
+ while (context->GetData(m_startTimestampQuery[ReadQuery].Get(), &start, sizeof(start), 0) != S_OK);
+
+ uint64_t end;
+ while (context->GetData(m_endTimestampQuery[ReadQuery].Get(), &end, sizeof(end), 0) != S_OK);
+
+ constexpr float secondsToMilliseconds = 1000.0f;
+ const float ticksToSeconds = 1.0f / static_cast(disjointData.Frequency);
+ const uint64_t ticksElapsed = end - start;
+
+ m_processingTimeMs = static_cast(ticksElapsed) * ticksToSeconds * secondsToMilliseconds;
+
+ // Reset every ~10 seconds so the min/max has a chance to update
+ if (m_currentFrameIndex % 300 == 0)
+ {
+ m_processingTimeMinMs = +std::numeric_limits::infinity();
+ m_processingTimeMaxMs = -std::numeric_limits::infinity();
+ }
+
+ m_processingTimeMinMs = std::min(m_processingTimeMinMs, m_processingTimeMs);
+ m_processingTimeMaxMs = std::max(m_processingTimeMaxMs, m_processingTimeMs);
+ }
+ else
+ {
+ m_processingTimeMs = std::numeric_limits::quiet_NaN();;
+ }
+
+ m_processingTimeHistory[m_processingTimeHistoryIndex] = m_processingTimeMs;
+ m_processingTimeHistoryIndex = (m_processingTimeHistoryIndex + 1) % TimeHistoryCount;
+
+ m_processingTimeAvgMs = std::accumulate(m_processingTimeHistory, m_processingTimeHistory + TimeHistoryCount, 0.0f) / static_cast(TimeHistoryCount);
+ }
+
+ ++m_currentFrameIndex;
+
+ context->Begin(m_disjointQuery[m_currentQuery].Get());
+ context->End(m_startTimestampQuery[m_currentQuery].Get());
+}
+
+void DX::GpuPerformanceTimer::EndTimerForFrame()
+{
+ ID3D11DeviceContext* context = m_deviceResources->GetD3DDeviceContext();
+ context->End(m_endTimestampQuery[m_currentQuery].Get());
+ context->End(m_disjointQuery[m_currentQuery].Get());
+}
+
+float DX::GpuPerformanceTimer::GetFrameTime() const
+{
+ return m_processingTimeMs;
+}
+
+float DX::GpuPerformanceTimer::GetAvgFrameTime() const
+{
+ return m_processingTimeAvgMs;
+}
+
+float DX::GpuPerformanceTimer::GetMinFrameTime() const
+{
+ return m_processingTimeMinMs;
+}
+
+float DX::GpuPerformanceTimer::GetMaxFrameTime() const
+{
+ return m_processingTimeMaxMs;
+}
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Common/DirectXHelper.h b/Samples/CameraStreamCoordinateMapper/cpp/Common/DirectXHelper.h
new file mode 100644
index 0000000000..3b30b0800a
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Common/DirectXHelper.h
@@ -0,0 +1,168 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+namespace DX
+{
+ inline void ThrowIfFailed(HRESULT hr)
+ {
+ if (FAILED(hr))
+ {
+ // Set a breakpoint on this line to catch Win32 API errors.
+ throw Platform::Exception::CreateException(hr);
+ }
+ }
+
+ // Function that reads from a binary file asynchronously.
+ inline Concurrency::task> ReadDataAsync(const std::wstring& filename)
+ {
+ using namespace Windows::Storage;
+ using namespace Concurrency;
+
+ Windows::Storage::StorageFolder^ folder = Windows::ApplicationModel::Package::Current->InstalledLocation;
+
+ return create_task(folder->GetFileAsync(Platform::StringReference(filename.c_str()))).then([] (StorageFile^ file)
+ {
+ return FileIO::ReadBufferAsync(file);
+ }).then([] (Streams::IBuffer^ fileBuffer) -> std::vector
+ {
+ std::vector returnBuffer;
+ returnBuffer.resize(fileBuffer->Length);
+ Streams::DataReader::FromBuffer(fileBuffer)->ReadBytes(Platform::ArrayReference(returnBuffer.data(), fileBuffer->Length));
+ return returnBuffer;
+ });
+ }
+
+ // Converts a length in device-independent pixels (DIPs) to a length in physical pixels.
+ inline float ConvertDipsToPixels(float dips, float dpi)
+ {
+ static const float dipsPerInch = 96.0f;
+ return floorf(dips * dpi / dipsPerInch + 0.5f); // Round to nearest integer.
+ }
+
+#if defined(_DEBUG)
+ // Check for SDK Layer support.
+ inline bool SdkLayersAvailable()
+ {
+ HRESULT hr = D3D11CreateDevice(
+ nullptr,
+ D3D_DRIVER_TYPE_NULL, // There is no need to create a real hardware device.
+ 0,
+ D3D11_CREATE_DEVICE_DEBUG, // Check for the SDK layers.
+ nullptr, // Any feature level will do.
+ 0,
+ D3D11_SDK_VERSION, // Always set this to D3D11_SDK_VERSION for Windows Store apps.
+ nullptr, // No need to keep the D3D device reference.
+ nullptr, // No need to know the feature level.
+ nullptr // No need to keep the D3D device context reference.
+ );
+
+ return SUCCEEDED(hr);
+ }
+#endif
+
+ struct TextureData
+ {
+ const void* buffer;
+ const UINT bytesPerPixel;
+ };
+
+ Microsoft::WRL::ComPtr CreateVertexShader(
+ ID3D11Device* device,
+ const void* data,
+ size_t dataSize);
+
+ Microsoft::WRL::ComPtr CreatePixelShader(
+ ID3D11Device* device,
+ const void* data,
+ size_t dataSize);
+
+ Microsoft::WRL::ComPtr CreateInputLayout(
+ ID3D11Device* device,
+ const D3D11_INPUT_ELEMENT_DESC* vertexDesc,
+ size_t vertexDescCount,
+ const void* vertexShaderData,
+ size_t vertexShaderDataSize);
+
+ Microsoft::WRL::ComPtr CreateSamplerState(
+ ID3D11Device* device,
+ const D3D11_SAMPLER_DESC& samplerDesc = CD3D11_SAMPLER_DESC{ CD3D11_DEFAULT{} });
+
+ Microsoft::WRL::ComPtr CreateTexture(
+ ID3D11Device* device,
+ UINT width,
+ UINT height,
+ DXGI_FORMAT format,
+ std::optional textureData = std::nullopt,
+ std::optional bindFlags = std::nullopt,
+ std::optional usage = std::nullopt,
+ std::optional cpuFlags = std::nullopt);
+
+ Microsoft::WRL::ComPtr CreateBuffer(
+ ID3D11Device* device,
+ const size_t size,
+ const void* buffer = nullptr,
+ std::optional bindFlags = std::nullopt,
+ std::optional usage = std::nullopt,
+ std::optional cpuFlags = std::nullopt);
+
+ Microsoft::WRL::ComPtr CreateShaderResourceView(
+ ID3D11Texture2D* texture,
+ std::optional format = std::nullopt);
+
+ Microsoft::WRL::ComPtr CreateRenderTargetView(
+ ID3D11Texture2D* texture);
+
+ Microsoft::WRL::ComPtr CreateDepthStencilView(
+ ID3D11Texture2D* texture,
+ std::optional format = std::nullopt);
+
+ class DeviceResources;
+
+ // Used to track time spent executing Gpu work
+ class GpuPerformanceTimer
+ {
+ public:
+ GpuPerformanceTimer(
+ const std::shared_ptr& deviceResources);
+
+ void StartTimerForFrame();
+ void EndTimerForFrame();
+
+ float GetFrameTime() const;
+ float GetAvgFrameTime() const;
+ float GetMinFrameTime() const;
+ float GetMaxFrameTime() const;
+
+ private:
+ std::shared_ptr m_deviceResources;
+
+ static constexpr uint32_t QueryCount = 5u;
+
+ Microsoft::WRL::ComPtr m_startTimestampQuery[QueryCount];
+ Microsoft::WRL::ComPtr m_endTimestampQuery[QueryCount];
+ Microsoft::WRL::ComPtr m_disjointQuery[QueryCount];
+
+ uint32_t m_currentQuery = 0u;
+ uint32_t m_currentFrameIndex = 0u;
+
+ static constexpr uint32_t TimeHistoryCount = 64u;
+
+ uint32_t m_processingTimeHistoryIndex = 0u;
+ float m_processingTimeHistory[TimeHistoryCount] = {};
+
+ float m_processingTimeMs = 0.0f;
+ float m_processingTimeAvgMs = 0.0f;
+ float m_processingTimeMinMs = +std::numeric_limits::infinity();
+ float m_processingTimeMaxMs = -std::numeric_limits::infinity();
+ };
+}
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Common/GraphicsCamera.cpp b/Samples/CameraStreamCoordinateMapper/cpp/Common/GraphicsCamera.cpp
new file mode 100644
index 0000000000..b9ecbc7c3e
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Common/GraphicsCamera.cpp
@@ -0,0 +1,172 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+#include "Common\GraphicsCamera.h"
+
+using namespace Windows::System;
+using namespace Windows::Foundation;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::UI::Core;
+
+FirstPersonCamera::FirstPersonCamera()
+{
+ CoreWindow::GetForCurrentThread()->PointerMoved +=
+ ref new TypedEventHandler(this, &FirstPersonCamera::OnPointerMoved);
+}
+
+void FirstPersonCamera::OnPointerMoved(
+ CoreWindow^ window,
+ PointerEventArgs^ args)
+{
+ if (args->CurrentPoint->PointerDevice->PointerDeviceType == Windows::Devices::Input::PointerDeviceType::Mouse)
+ {
+ const Point currCursor = args->CurrentPoint->Position;
+
+ // We should ignore the first mouse event as we have no previous to compare against.
+ const bool validPrevious = !std::isnan(m_prevCursor.X) && !std::isnan(m_prevCursor.Y);
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Rotation by Mouse
+ if (args->CurrentPoint->Properties->IsLeftButtonPressed && validPrevious)
+ {
+ const float deltaX = currCursor.X - m_prevCursor.X;
+ const float deltaY = currCursor.Y - m_prevCursor.Y;
+
+ RotateYaw(deltaX * cPixelsToRadiansPerSecond);
+ RotatePitch(deltaY * cPixelsToRadiansPerSecond);
+ }
+
+ m_prevCursor = currCursor;
+ }
+}
+
+void FirstPersonCamera::Update(float dt)
+{
+ auto isKeyHeldDown = [](VirtualKey key) {
+ return (CoreWindow::GetForCurrentThread()->GetKeyState(key) & CoreVirtualKeyStates::Down) == CoreVirtualKeyStates::Down;
+ };
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Movement by Gamepad or Keyboard
+
+ if (isKeyHeldDown(VirtualKey::W) ||
+ isKeyHeldDown(VirtualKey::GamepadLeftThumbstickUp))
+ {
+ MoveAlongZ(-cMovementMetersPerSecond * dt);
+ }
+
+ if (isKeyHeldDown(VirtualKey::S) ||
+ isKeyHeldDown(VirtualKey::GamepadLeftThumbstickDown))
+ {
+ MoveAlongZ(+cMovementMetersPerSecond * dt);
+ }
+
+ if (isKeyHeldDown(VirtualKey::A) ||
+ isKeyHeldDown(VirtualKey::GamepadLeftThumbstickLeft))
+ {
+ MoveAlongX(-cMovementMetersPerSecond * dt);
+ }
+
+ if (isKeyHeldDown(VirtualKey::D) ||
+ isKeyHeldDown(VirtualKey::GamepadLeftThumbstickRight))
+ {
+ MoveAlongX(+cMovementMetersPerSecond * dt);
+ }
+
+ if (isKeyHeldDown(VirtualKey::Q) ||
+ isKeyHeldDown(VirtualKey::GamepadLeftTrigger))
+ {
+ MoveAlongY(-cMovementMetersPerSecond * dt);
+ }
+
+ if (isKeyHeldDown(VirtualKey::E) ||
+ isKeyHeldDown(VirtualKey::GamepadRightTrigger))
+ {
+ MoveAlongY(+cMovementMetersPerSecond * dt);
+ }
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Rotation by Gamepad
+
+ if (isKeyHeldDown(VirtualKey::GamepadRightThumbstickLeft))
+ {
+ RotateYaw(-cRotationRadiansPerSecond * dt);
+ }
+
+ if (isKeyHeldDown(VirtualKey::GamepadRightThumbstickRight))
+ {
+ RotateYaw(+cRotationRadiansPerSecond * dt);
+ }
+
+ if (isKeyHeldDown(VirtualKey::GamepadRightThumbstickUp))
+ {
+ RotatePitch(-cRotationRadiansPerSecond * dt);
+ }
+
+ if (isKeyHeldDown(VirtualKey::GamepadRightThumbstickDown))
+ {
+ RotatePitch(+cRotationRadiansPerSecond * dt);
+ }
+}
+
+void FirstPersonCamera::Reset()
+{
+ m_position = Windows::Foundation::Numerics::float3::zero();
+ m_prevCursor = {
+ std::numeric_limits::quiet_NaN(),
+ std::numeric_limits::quiet_NaN()
+ };
+
+ m_yawAngle = 0.0f;
+ m_pitchAngle = 0.0f;
+}
+
+float4x4 FirstPersonCamera::GetWorldToCamera()
+{
+ return make_float4x4_translation(-m_position) * make_float4x4_rotation_y(m_yawAngle) * make_float4x4_rotation_x(m_pitchAngle);
+}
+
+float4x4 FirstPersonCamera::GetCameraToWorld()
+{
+ return make_float4x4_rotation_x(-m_pitchAngle) * make_float4x4_rotation_y(-m_yawAngle) * make_float4x4_translation(m_position);
+}
+
+void FirstPersonCamera::RotateYaw(float radians)
+{
+ m_yawAngle += radians;
+}
+
+void FirstPersonCamera::RotatePitch(float radians)
+{
+ m_pitchAngle += radians;
+}
+
+void FirstPersonCamera::MoveAlongX(float distance)
+{
+ const float4x4 cameraToWorld = GetCameraToWorld();
+ const float3 rightVec = { cameraToWorld.m11, cameraToWorld.m12, cameraToWorld.m13 };
+ m_position += rightVec * distance;
+}
+
+void FirstPersonCamera::MoveAlongY(float distance)
+{
+ const float4x4 cameraToWorld = GetCameraToWorld();
+ const float3 upVec = { cameraToWorld.m21, cameraToWorld.m22, cameraToWorld.m23 };
+ m_position += upVec * distance;
+}
+
+void FirstPersonCamera::MoveAlongZ(float distance)
+{
+ const float4x4 cameraToWorld = GetCameraToWorld();
+ const float3 forwardVec = { cameraToWorld.m31, cameraToWorld.m32, cameraToWorld.m33 };
+ m_position += forwardVec * distance;
+}
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Common/GraphicsCamera.h b/Samples/CameraStreamCoordinateMapper/cpp/Common/GraphicsCamera.h
new file mode 100644
index 0000000000..298ecd1b62
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Common/GraphicsCamera.h
@@ -0,0 +1,50 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+ref class FirstPersonCamera sealed
+{
+public:
+ FirstPersonCamera();
+
+ void Update(float dt);
+ void Reset();
+
+ Windows::Foundation::Numerics::float4x4 GetWorldToCamera();
+ Windows::Foundation::Numerics::float4x4 GetCameraToWorld();
+
+private:
+ void OnPointerMoved(
+ Windows::UI::Core::CoreWindow^ window,
+ Windows::UI::Core::PointerEventArgs^ args);
+
+ void RotateYaw(float radians);
+ void RotatePitch(float radians);
+
+ void MoveAlongX(float distance);
+ void MoveAlongY(float distance);
+ void MoveAlongZ(float distance);
+
+ // Parameters for configuring the camera movement speed
+ static constexpr float cPixelsToRadians = 0.00125f;
+ static constexpr float cPixelsToMeters = 0.005f;
+ static constexpr float cMovementMetersPerSecond = 1.0f;
+ static constexpr float cRotationRadiansPerSecond = 1.0f;
+ static constexpr float cPixelsToRadiansPerSecond = cPixelsToRadians * cRotationRadiansPerSecond;
+ static constexpr float cPixelsToMetersPerSecond = cPixelsToMeters * cMovementMetersPerSecond;
+
+ Windows::Foundation::Numerics::float3 m_position;
+ Windows::Foundation::Point m_prevCursor;
+
+ float m_yawAngle;
+ float m_pitchAngle;
+};
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Common/StepTimer.h b/Samples/CameraStreamCoordinateMapper/cpp/Common/StepTimer.h
new file mode 100644
index 0000000000..6dd56123b2
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Common/StepTimer.h
@@ -0,0 +1,192 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+namespace DX
+{
+ // Helper class for animation and simulation timing.
+ class StepTimer
+ {
+ public:
+ StepTimer() :
+ m_elapsedTicks(0),
+ m_totalTicks(0),
+ m_leftOverTicks(0),
+ m_frameCount(0),
+ m_framesPerSecond(0),
+ m_framesThisSecond(0),
+ m_qpcSecondCounter(0),
+ m_isFixedTimeStep(false),
+ m_targetElapsedTicks(TicksPerSecond / 60)
+ {
+ if (!QueryPerformanceFrequency(&m_qpcFrequency))
+ {
+ throw ref new Platform::FailureException();
+ }
+
+ if (!QueryPerformanceCounter(&m_qpcLastTime))
+ {
+ throw ref new Platform::FailureException();
+ }
+
+ // Initialize max delta to 1/10 of a second.
+ m_qpcMaxDelta = m_qpcFrequency.QuadPart / 10;
+ }
+
+ // Get elapsed time since the previous Update call.
+ uint64 GetElapsedTicks() const { return m_elapsedTicks; }
+ double GetElapsedSeconds() const { return TicksToSeconds(m_elapsedTicks); }
+
+ // Get total time since the start of the program.
+ uint64 GetTotalTicks() const { return m_totalTicks; }
+ double GetTotalSeconds() const { return TicksToSeconds(m_totalTicks); }
+
+ // Get total number of updates since start of the program.
+ uint32 GetFrameCount() const { return m_frameCount; }
+
+ // Get the current framerate.
+ uint32 GetFramesPerSecond() const { return m_framesPerSecond; }
+
+ // Set whether to use fixed or variable timestep mode.
+ void SetFixedTimeStep(bool isFixedTimestep) { m_isFixedTimeStep = isFixedTimestep; }
+
+ // Set how often to call Update when in fixed timestep mode.
+ void SetTargetElapsedTicks(uint64 targetElapsed) { m_targetElapsedTicks = targetElapsed; }
+ void SetTargetElapsedSeconds(double targetElapsed) { m_targetElapsedTicks = SecondsToTicks(targetElapsed); }
+
+ // Integer format represents time using 10,000,000 ticks per second.
+ static const uint64 TicksPerSecond = 10000000;
+
+ static double TicksToSeconds(uint64 ticks) { return static_cast(ticks) / TicksPerSecond; }
+ static uint64 SecondsToTicks(double seconds) { return static_cast(seconds * TicksPerSecond); }
+
+ // After an intentional timing discontinuity (for instance a blocking IO operation)
+ // call this to avoid having the fixed timestep logic attempt a set of catch-up
+ // Update calls.
+
+ void ResetElapsedTime()
+ {
+ if (!QueryPerformanceCounter(&m_qpcLastTime))
+ {
+ throw ref new Platform::FailureException();
+ }
+
+ m_leftOverTicks = 0;
+ m_framesPerSecond = 0;
+ m_framesThisSecond = 0;
+ m_qpcSecondCounter = 0;
+ }
+
+ // Update timer state, calling the specified Update function the appropriate number of times.
+ template
+ void Tick(const TUpdate& update)
+ {
+ // Query the current time.
+ LARGE_INTEGER currentTime;
+
+ if (!QueryPerformanceCounter(¤tTime))
+ {
+ throw ref new Platform::FailureException();
+ }
+
+ uint64 timeDelta = currentTime.QuadPart - m_qpcLastTime.QuadPart;
+
+ m_qpcLastTime = currentTime;
+ m_qpcSecondCounter += timeDelta;
+
+ // Clamp excessively large time deltas (e.g. after paused in the debugger).
+ if (timeDelta > m_qpcMaxDelta)
+ {
+ timeDelta = m_qpcMaxDelta;
+ }
+
+ // Convert QPC units into a canonical tick format. This cannot overflow due to the previous clamp.
+ timeDelta *= TicksPerSecond;
+ timeDelta /= m_qpcFrequency.QuadPart;
+
+ uint32 lastFrameCount = m_frameCount;
+
+ if (m_isFixedTimeStep)
+ {
+ // Fixed timestep update logic
+
+ // If the app is running very close to the target elapsed time (within 1/4 of a millisecond) just clamp
+ // the clock to exactly match the target value. This prevents tiny and irrelevant errors
+ // from accumulating over time. Without this clamping, a game that requested a 60 fps
+ // fixed update, running with vsync enabled on a 59.94 NTSC display, would eventually
+ // accumulate enough tiny errors that it would drop a frame. It is better to just round
+ // small deviations down to zero to leave things running smoothly.
+
+ if (abs(static_cast(timeDelta - m_targetElapsedTicks)) < TicksPerSecond / 4000)
+ {
+ timeDelta = m_targetElapsedTicks;
+ }
+
+ m_leftOverTicks += timeDelta;
+
+ while (m_leftOverTicks >= m_targetElapsedTicks)
+ {
+ m_elapsedTicks = m_targetElapsedTicks;
+ m_totalTicks += m_targetElapsedTicks;
+ m_leftOverTicks -= m_targetElapsedTicks;
+ m_frameCount++;
+
+ update();
+ }
+ }
+ else
+ {
+ // Variable timestep update logic.
+ m_elapsedTicks = timeDelta;
+ m_totalTicks += timeDelta;
+ m_leftOverTicks = 0;
+ m_frameCount++;
+
+ update();
+ }
+
+ // Track the current framerate.
+ if (m_frameCount != lastFrameCount)
+ {
+ m_framesThisSecond++;
+ }
+
+ if (m_qpcSecondCounter >= static_cast(m_qpcFrequency.QuadPart))
+ {
+ m_framesPerSecond = m_framesThisSecond;
+ m_framesThisSecond = 0;
+ m_qpcSecondCounter %= m_qpcFrequency.QuadPart;
+ }
+ }
+
+ private:
+ // Source timing data uses QPC units.
+ LARGE_INTEGER m_qpcFrequency;
+ LARGE_INTEGER m_qpcLastTime;
+ uint64 m_qpcMaxDelta;
+
+ // Derived timing data uses a canonical tick format.
+ uint64 m_elapsedTicks;
+ uint64 m_totalTicks;
+ uint64 m_leftOverTicks;
+
+ // Members for tracking the framerate.
+ uint32 m_frameCount;
+ uint32 m_framesPerSecond;
+ uint32 m_framesThisSecond;
+ uint64 m_qpcSecondCounter;
+
+ // Members for configuring fixed timestep mode.
+ bool m_isFixedTimeStep;
+ uint64 m_targetElapsedTicks;
+ };
+}
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Content/CameraCoordinateMapper.cpp b/Samples/CameraStreamCoordinateMapper/cpp/Content/CameraCoordinateMapper.cpp
new file mode 100644
index 0000000000..1a74e85028
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Content/CameraCoordinateMapper.cpp
@@ -0,0 +1,462 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+
+#include "Common\DirectXHelper.h"
+
+#include "CameraCoordinateMapper.h"
+#include "CameraStreamProcessor.h"
+#include "GpuCoordinateMapper.h"
+
+#include
+
+using namespace CameraStreamCoordinateMapper;
+using namespace concurrency;
+
+using namespace Windows::Foundation;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::Graphics::Imaging;
+using namespace Windows::Media::Capture;
+using namespace Windows::Media::Capture::Frames;
+using namespace Windows::Media::Devices::Core;
+using namespace Windows::Media::MediaProperties;
+
+using namespace Microsoft::WRL;
+
+namespace
+{
+ template
+ void AccessSoftwareBitmap(SoftwareBitmap^ bitmap, Func&& func)
+ {
+ if (BitmapBuffer^ bitmapBuffer = bitmap->LockBuffer(BitmapBufferAccessMode::Read))
+ {
+ if (IMemoryBufferReference^ bitmapRef = bitmapBuffer->CreateReference())
+ {
+ ComPtr memoryBufferByteAccess;
+ const HRESULT hr = reinterpret_cast(bitmapRef)->QueryInterface(IID_PPV_ARGS(&memoryBufferByteAccess));
+
+ if (SUCCEEDED(hr) && memoryBufferByteAccess)
+ {
+ byte* pSourceBuffer;
+ UINT32 capacity;
+ if (SUCCEEDED(memoryBufferByteAccess->GetBuffer(&pSourceBuffer, &capacity)) && pSourceBuffer)
+ {
+ func(pSourceBuffer, capacity);
+ }
+ }
+ }
+ }
+ }
+
+ void CopySoftwareBitmapToDirect3DTexture(SoftwareBitmap^ softwareBitmap, ID3D11DeviceContext* context, ID3D11Texture2D* texture)
+ {
+ AccessSoftwareBitmap(softwareBitmap, [&](byte* buffer, UINT32 capacity)
+ {
+ D3D11_MAPPED_SUBRESOURCE subresource;
+ if (SUCCEEDED(context->Map(texture, 0, D3D11_MAP_WRITE_DISCARD, 0, &subresource)))
+ {
+ std::memcpy(subresource.pData, buffer, capacity);
+ context->Unmap(texture, 0);
+ }
+ });
+ };
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // This is used to generate look-up tables based on the camera intrinsics
+ //
+ // For every point on the CameraIntrinsics from [0, imageWidth] and [0, imageHeight]
+ // "func" will be called, and the result stored in the corresponding location in the
+ // gpu texture. This allows us to utilize the camera intrinsics in shaders.
+ template
+ ComPtr GenerateIntrinsicsMapping(ID3D11Device* device, CameraIntrinsics^ cameraIntrinsics, Func&& func)
+ {
+ using OutputType = decltype(func(cameraIntrinsics, Point{}));
+ static_assert(sizeof(OutputType) == sizeof(float2), "Func should return the equivalent of 2 floats");
+
+ const uint32_t imageWidth = cameraIntrinsics->ImageWidth;
+ const uint32_t imageHeight = cameraIntrinsics->ImageHeight;
+
+ const size_t numElements = imageWidth * imageHeight;
+
+ auto spMap = std::make_unique(numElements);
+
+ for (uint32_t y = 0; y < imageHeight; ++y)
+ {
+ for (uint32_t x = 0; x < imageWidth; ++x)
+ {
+ spMap[y * imageWidth + x] = func(cameraIntrinsics, Point(float(x), float(y)));
+ }
+ }
+
+ DX::TextureData data{ spMap.get(), sizeof(OutputType) };
+ return DX::CreateTexture(
+ device,
+ imageWidth,
+ imageHeight,
+ DXGI_FORMAT_R32G32_FLOAT,
+ data,
+ D3D11_BIND_SHADER_RESOURCE,
+ D3D11_USAGE_IMMUTABLE);
+ }
+
+ bool IsValidColorFrame(MediaFrameReference^ frame)
+ {
+ // These properties are required for our coordinate mapping algorithm
+ return frame &&
+ frame->CoordinateSystem &&
+ frame->VideoMediaFrame &&
+ frame->VideoMediaFrame->CameraIntrinsics &&
+ frame->VideoMediaFrame->SoftwareBitmap;
+ }
+
+ bool IsValidDepthFrame(MediaFrameReference^ frame)
+ {
+ // A valid depth frame has the same properties of a color frame, as well as a valid DepthMediaFrame property
+ return IsValidColorFrame(frame) && frame->VideoMediaFrame->DepthMediaFrame;
+ }
+}
+
+task> CameraCoordinateMapper::CreateAndStartAsync(std::shared_ptr deviceResources)
+{
+ // Future work: Support more color formats
+ std::shared_ptr colorStreamProcessor;
+ std::shared_ptr depthStreamProcessor;
+
+ // Creating the camera streams is not required to succeed, if there are failures we display an error message to the user
+ try
+ {
+ colorStreamProcessor = co_await CameraStreamProcessor::CreateAndStartAsync(MediaFrameSourceKind::Color, MediaEncodingSubtypes::Bgra8);
+ depthStreamProcessor = co_await CameraStreamProcessor::CreateAndStartAsync(MediaFrameSourceKind::Depth, MediaEncodingSubtypes::D16);
+ }
+ catch (Platform::Exception^ exception)
+ {
+ OutputDebugString(exception->Message->Data());
+ }
+
+ return std::make_shared(
+ std::move(deviceResources),
+ std::move(colorStreamProcessor),
+ std::move(depthStreamProcessor));
+}
+
+CameraCoordinateMapper::CameraCoordinateMapper(std::shared_ptr deviceResources, std::shared_ptr colorStreamProcessor, std::shared_ptr depthStreamProcessor) :
+ m_deviceResources(std::move(deviceResources)),
+ m_colorCameraStreamProcessor(std::move(colorStreamProcessor)),
+ m_depthCameraStreamProcessor(std::move(depthStreamProcessor))
+{
+ m_gpuCoordinateMapper = std::make_unique(m_deviceResources);
+}
+
+void CameraCoordinateMapper::CreateDeviceDependentResources()
+{
+ m_gpuCoordinateMapper->CreateDeviceDependentResources();
+}
+
+void CameraCoordinateMapper::ReleaseDeviceDependentResources()
+{
+ m_depthTexture = nullptr;
+ m_depthTextureSRV = nullptr;
+
+ m_colorTexture = nullptr;
+ m_colorTextureSRV = nullptr;
+
+ m_targetTexture = nullptr;
+ m_targetTextureSRV = nullptr;
+ m_targetTextureRTV = nullptr;
+
+ m_targetRasterizedDepthTexture = nullptr;
+ m_targetRasterizedDepthTextureDSV = nullptr;
+ m_targetRasterizedDepthTextureSRV = nullptr;
+
+ m_depthUnprojectionMapTexture = nullptr;
+ m_depthUnprojectionMapTextureSRV = nullptr;
+
+ m_colorDistortionMapTexture = nullptr;
+ m_colorDistortionMapTextureSRV = nullptr;
+
+ m_rasterizeFrameBlob = {};
+ m_visualizeFrameBlob = {};
+
+ m_depthIntrinsics = {};
+ m_colorIntrinsics = {};
+
+ m_depthFrameIndex = {};
+ m_colorFrameIndex = {};
+
+ m_gpuCoordinateMapper->ReleaseDeviceDependentResources();
+}
+
+task CameraCoordinateMapper::StartAsync()
+{
+ if (m_colorCameraStreamProcessor)
+ {
+ co_await m_colorCameraStreamProcessor->StartAsync();
+ }
+
+ if (m_depthCameraStreamProcessor)
+ {
+ co_await m_depthCameraStreamProcessor->StartAsync();
+ }
+}
+
+task CameraCoordinateMapper::StopAsync()
+{
+ if (m_colorCameraStreamProcessor)
+ {
+ co_await m_colorCameraStreamProcessor->StopAsync();
+ }
+
+ if (m_depthCameraStreamProcessor)
+ {
+ co_await m_depthCameraStreamProcessor->StopAsync();
+ }
+}
+
+bool CameraCoordinateMapper::TryAcquireLatestFrameData() try {
+
+ if (!m_colorCameraStreamProcessor || !m_depthCameraStreamProcessor)
+ {
+ return false;
+ }
+
+ CameraStreamProcessor::FrameData colorFrameBlob = m_colorCameraStreamProcessor->GetLatestFrame();
+ CameraStreamProcessor::FrameData depthFrameBlob = m_depthCameraStreamProcessor->GetLatestFrame();
+
+ MediaFrameReference^ colorFrame = colorFrameBlob.frame;
+ MediaFrameReference^ depthFrame = depthFrameBlob.frame;
+
+ // Invalid color or depth frame
+ if (!IsValidColorFrame(colorFrame) || !IsValidDepthFrame(depthFrame))
+ {
+ return false;
+ }
+
+ const bool sameColorFrame = m_colorFrameIndex.has_value() && colorFrameBlob.index == m_colorFrameIndex.value();
+ const bool sameDepthFrame = m_depthFrameIndex.has_value() && depthFrameBlob.index == m_depthFrameIndex.value();
+
+ // Already processed this frame
+ if (sameColorFrame && sameDepthFrame)
+ {
+ return false;
+ }
+
+ ID3D11DeviceContext* context = m_deviceResources->GetD3DDeviceContext();
+
+ if (!sameColorFrame)
+ {
+ UpdateColorResources(colorFrame);
+ CopySoftwareBitmapToDirect3DTexture(colorFrame->VideoMediaFrame->SoftwareBitmap, context, m_colorTexture.Get());
+
+ m_rasterizeFrameBlob.sourceIntrinsics = m_colorIntrinsics;
+ m_rasterizeFrameBlob.targetIntrinsics = m_colorIntrinsics;
+
+ const float4x4 colorProjectionMatrix = colorFrame->VideoMediaFrame->CameraIntrinsics->UndistortedProjectionTransform;
+
+ m_rasterizeFrameBlob.sourceProjectionMatrix = colorProjectionMatrix;
+ m_rasterizeFrameBlob.targetProjectionMatrix = colorProjectionMatrix;
+ m_rasterizeFrameBlob.sourceImage = m_colorTextureSRV;
+ m_rasterizeFrameBlob.sourceDistortionMap = m_colorDistortionMapTextureSRV;
+ m_rasterizeFrameBlob.targetImage = m_targetTextureRTV;
+ m_rasterizeFrameBlob.targetRasterizedDepth = m_targetRasterizedDepthTextureDSV;
+ m_rasterizeFrameBlob.targetDistortionMap = m_colorDistortionMapTextureSRV;
+
+ m_colorFrameIndex = colorFrameBlob.index;
+ }
+
+ if (!sameDepthFrame)
+ {
+ UpdateDepthResources(depthFrame);
+ CopySoftwareBitmapToDirect3DTexture(depthFrame->VideoMediaFrame->SoftwareBitmap, context, m_depthTexture.Get());
+
+ const float depthScaleInMeters = static_cast(depthFrame->VideoMediaFrame->DepthMediaFrame->DepthFormat->DepthScaleInMeters);
+
+ m_rasterizeFrameBlob.depthIntrinsics = m_depthIntrinsics;
+ m_rasterizeFrameBlob.depthScaleInMeters = depthScaleInMeters;
+ m_rasterizeFrameBlob.depthRangeMinimumInMeters = static_cast(depthFrame->VideoMediaFrame->DepthMediaFrame->MinReliableDepth) * depthScaleInMeters;
+ m_rasterizeFrameBlob.depthRangeMaximumInMeters = static_cast(depthFrame->VideoMediaFrame->DepthMediaFrame->MaxReliableDepth) * depthScaleInMeters;
+ m_rasterizeFrameBlob.depthImage = m_depthTextureSRV;
+ m_rasterizeFrameBlob.depthUnprojectionMap = m_depthUnprojectionMapTextureSRV;
+
+ m_depthFrameIndex = depthFrameBlob.index;
+ }
+
+ Platform::IBox^ depthToColorRef = depthFrame->CoordinateSystem->TryGetTransformTo(colorFrame->CoordinateSystem);
+
+ if (depthToColorRef)
+ {
+ m_rasterizeFrameBlob.depthToSource = depthToColorRef->Value;
+ m_rasterizeFrameBlob.depthToTarget = depthToColorRef->Value;
+ }
+ else
+ {
+ throw std::exception("These camera frames can't be correlated");
+ }
+
+ return true;
+}
+// If we're too slow while processing frames (i.e. running under a debugger), then we may access a VideoMediaFrame that was already closed.
+catch (Platform::ObjectDisposedException^)
+{
+ return false;
+}
+
+void CameraCoordinateMapper::RunCoordinateMapping()
+{
+ m_gpuCoordinateMapper->RunCoordinateMapping(m_rasterizeFrameBlob);
+}
+
+void CameraCoordinateMapper::VisualizeCoordinateMapping(ID3D11RenderTargetView * renderTarget, ID3D11DepthStencilView * depthBuffer, const D3D11_VIEWPORT & viewport, const Windows::Foundation::Numerics::float4x4 & worldToView)
+{
+ const float aspectRatio = float(viewport.Width) / float(viewport.Height);
+
+ const float4x4 viewToProj = make_float4x4_perspective_field_of_view(DirectX::XMConvertToRadians(90.0f), aspectRatio, 0.001f, 100.0f);
+
+ m_visualizeFrameBlob.renderTarget = renderTarget;
+ m_visualizeFrameBlob.renderTargetDepthBuffer = depthBuffer;
+ m_visualizeFrameBlob.worldToView = worldToView;
+ m_visualizeFrameBlob.viewToProj = viewToProj;
+ m_visualizeFrameBlob.viewport = viewport;
+
+ m_gpuCoordinateMapper->VisualizeCoordinateMapping(m_rasterizeFrameBlob, m_visualizeFrameBlob);
+
+ m_visualizeFrameBlob.renderTargetDepthBuffer = nullptr;
+ m_visualizeFrameBlob.renderTarget = nullptr;
+}
+
+bool CameraCoordinateMapper::AreCamerasStreaming() const
+{
+ const bool colorIsStreaming = m_colorCameraStreamProcessor && m_colorCameraStreamProcessor->IsStreaming();
+ const bool depthIsStreaming = m_depthCameraStreamProcessor && m_depthCameraStreamProcessor->IsStreaming();
+
+ return colorIsStreaming && depthIsStreaming;
+}
+
+ID3D11ShaderResourceView* CameraCoordinateMapper::GetTargetShaderResourceView() const
+{
+ return m_targetTextureSRV.Get();
+}
+
+float CameraCoordinateMapper::GetTargetAspectRatio() const
+{
+ return static_cast(m_colorIntrinsics.imageWidth) / static_cast(m_colorIntrinsics.imageHeight);
+}
+
+void CameraCoordinateMapper::UpdateDepthResources(Windows::Media::Capture::Frames::MediaFrameReference ^ depthFrame)
+{
+ ID3D11Device* device = m_deviceResources->GetD3DDevice();
+ ID3D11DeviceContext* context = m_deviceResources->GetD3DDeviceContext();
+
+ CameraIntrinsics^ depthIntrinsicsRt = depthFrame->VideoMediaFrame->CameraIntrinsics;
+ const GpuCoordinateMapper::CameraIntrinsics depthIntrinsics(depthIntrinsicsRt);
+
+ // Only need to update the internal data if the intrinsics have changed
+ if (depthIntrinsics != m_depthIntrinsics)
+ {
+ if (depthFrame->VideoMediaFrame->VideoFormat->MediaFrameFormat->Subtype != Windows::Media::MediaProperties::MediaEncodingSubtypes::D16)
+ {
+ throw std::invalid_argument("This depth format is not yet supported");
+ }
+
+ m_depthTexture = DX::CreateTexture(
+ device,
+ depthIntrinsics.imageWidth,
+ depthIntrinsics.imageHeight,
+ DXGI_FORMAT_R16_UNORM,
+ std::nullopt,
+ D3D11_BIND_SHADER_RESOURCE,
+ D3D11_USAGE_DYNAMIC,
+ D3D11_CPU_ACCESS_WRITE);
+
+ m_depthTextureSRV = DX::CreateShaderResourceView(m_depthTexture.Get());
+
+ m_depthUnprojectionMapTexture = GenerateIntrinsicsMapping(device, depthIntrinsicsRt,
+ [](CameraIntrinsics^ intrinsics, Point point)
+ {
+ return intrinsics->UnprojectAtUnitDepth(point);
+ });
+
+ m_depthUnprojectionMapTextureSRV = DX::CreateShaderResourceView(m_depthUnprojectionMapTexture.Get());
+
+ m_depthIntrinsics = depthIntrinsics;
+ }
+}
+
+void CameraCoordinateMapper::UpdateColorResources(Windows::Media::Capture::Frames::MediaFrameReference ^ colorFrame)
+{
+ ID3D11Device* device = m_deviceResources->GetD3DDevice();
+ ID3D11DeviceContext* context = m_deviceResources->GetD3DDeviceContext();
+
+ CameraIntrinsics^ colorIntrinsicsRt = colorFrame->VideoMediaFrame->CameraIntrinsics;
+ const GpuCoordinateMapper::CameraIntrinsics colorIntrinsics(colorIntrinsicsRt);
+
+ // Only need to update the internal data if the intrinsics have changed
+ if (colorIntrinsics != m_colorIntrinsics)
+ {
+ // Future work: Support more color formats
+ if (colorFrame->VideoMediaFrame->VideoFormat->MediaFrameFormat->Subtype != L"ARGB32")
+ {
+ throw std::invalid_argument("This video format is not yet supported");
+ }
+
+ m_colorTexture = DX::CreateTexture(
+ device,
+ colorIntrinsics.imageWidth,
+ colorIntrinsics.imageHeight,
+ DXGI_FORMAT_B8G8R8A8_UNORM,
+ std::nullopt,
+ D3D11_BIND_SHADER_RESOURCE,
+ D3D11_USAGE_DYNAMIC,
+ D3D11_CPU_ACCESS_WRITE);
+
+ m_colorTextureSRV = DX::CreateShaderResourceView(m_colorTexture.Get());
+
+ m_colorDistortionMapTexture = GenerateIntrinsicsMapping(device, colorIntrinsicsRt,
+ [](CameraIntrinsics^ intrinsics, Point point)
+ {
+ return intrinsics->DistortPoint(point);
+ });
+
+ m_colorDistortionMapTextureSRV = DX::CreateShaderResourceView(m_colorDistortionMapTexture.Get());
+
+ m_targetTexture = DX::CreateTexture(
+ device,
+ colorIntrinsics.imageWidth,
+ colorIntrinsics.imageHeight,
+ DXGI_FORMAT_B8G8R8A8_UNORM,
+ std::nullopt,
+ D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET);
+
+ m_targetTextureSRV = DX::CreateShaderResourceView(m_targetTexture.Get());
+ m_targetTextureRTV = DX::CreateRenderTargetView(m_targetTexture.Get());
+
+ m_targetRasterizedDepthTexture = DX::CreateTexture(
+ device,
+ colorIntrinsics.imageWidth,
+ colorIntrinsics.imageHeight,
+ DXGI_FORMAT_R24G8_TYPELESS,
+ std::nullopt,
+ D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_DEPTH_STENCIL);
+
+ m_targetRasterizedDepthTextureDSV = DX::CreateDepthStencilView(
+ m_targetRasterizedDepthTexture.Get(),
+ DXGI_FORMAT_D24_UNORM_S8_UINT);
+
+ m_targetRasterizedDepthTextureSRV = DX::CreateShaderResourceView(
+ m_targetRasterizedDepthTexture.Get(),
+ DXGI_FORMAT_R24_UNORM_X8_TYPELESS);
+
+ m_visualizeFrameBlob.targetImage = m_targetTextureSRV;
+ m_visualizeFrameBlob.targetImageDepthBuffer = m_targetRasterizedDepthTextureSRV;
+
+ m_colorIntrinsics = colorIntrinsics;
+ }
+}
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Content/CameraCoordinateMapper.h b/Samples/CameraStreamCoordinateMapper/cpp/Content/CameraCoordinateMapper.h
new file mode 100644
index 0000000000..ad12874918
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Content/CameraCoordinateMapper.h
@@ -0,0 +1,99 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+#include "Common\DeviceResources.h"
+#include "GpuCoordinateMapper.h"
+
+namespace CameraStreamCoordinateMapper {
+
+ class CameraStreamProcessor;
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // CameraCoordinateMapper
+ //
+ // This gathers frames from the CameraStreamProcessor and feeds them into the GpuCoordinateMapper
+ //
+ // The core algorithm resides in GpuCoordinateMapper and its respective shaders, CameraCoordinateMapper
+ // is meant to manage the resources and recreate them if the camera's intrinsics or other properties change
+
+ class CameraCoordinateMapper
+ {
+ public:
+ static concurrency::task> CreateAndStartAsync(std::shared_ptr deviceResources);
+
+ CameraCoordinateMapper(
+ std::shared_ptr deviceResources,
+ std::shared_ptr colorStreamProcessor,
+ std::shared_ptr depthStreamProcessor);
+
+ void CreateDeviceDependentResources();
+ void ReleaseDeviceDependentResources();
+
+ concurrency::task StartAsync();
+ concurrency::task StopAsync();
+
+ bool TryAcquireLatestFrameData();
+
+ void RunCoordinateMapping();
+
+ void VisualizeCoordinateMapping(
+ ID3D11RenderTargetView* renderTarget,
+ ID3D11DepthStencilView* depthBuffer,
+ const D3D11_VIEWPORT& viewport,
+ const Windows::Foundation::Numerics::float4x4& worldToView);
+
+ bool AreCamerasStreaming() const;
+ ID3D11ShaderResourceView* GetTargetShaderResourceView() const;
+ float GetTargetAspectRatio() const;
+
+ private:
+ void UpdateDepthResources(Windows::Media::Capture::Frames::MediaFrameReference^ depthFrame);
+ void UpdateColorResources(Windows::Media::Capture::Frames::MediaFrameReference^ colorFrame);
+
+ std::shared_ptr m_deviceResources;
+
+ std::shared_ptr m_colorCameraStreamProcessor;
+ std::shared_ptr m_depthCameraStreamProcessor;
+ std::unique_ptr m_gpuCoordinateMapper;
+
+ Microsoft::WRL::ComPtr m_depthTexture;
+ Microsoft::WRL::ComPtr m_depthTextureSRV;
+
+ Microsoft::WRL::ComPtr m_colorTexture;
+ Microsoft::WRL::ComPtr m_colorTextureSRV;
+
+ Microsoft::WRL::ComPtr m_targetTexture;
+ Microsoft::WRL::ComPtr m_targetTextureSRV;
+ Microsoft::WRL::ComPtr m_targetTextureRTV;
+
+ Microsoft::WRL::ComPtr m_targetRasterizedDepthTexture;
+ Microsoft::WRL::ComPtr m_targetRasterizedDepthTextureDSV;
+ Microsoft::WRL::ComPtr m_targetRasterizedDepthTextureSRV;
+
+ Microsoft::WRL::ComPtr m_depthUnprojectionMapTexture;
+ Microsoft::WRL::ComPtr m_depthUnprojectionMapTextureSRV;
+
+ Microsoft::WRL::ComPtr m_colorDistortionMapTexture;
+ Microsoft::WRL::ComPtr m_colorDistortionMapTextureSRV;
+
+ GpuCoordinateMapper::RasterizedFrameBlob m_rasterizeFrameBlob = {};
+ GpuCoordinateMapper::VisualizeFrameBlob m_visualizeFrameBlob = {};
+
+ GpuCoordinateMapper::CameraIntrinsics m_depthIntrinsics = {};
+ GpuCoordinateMapper::CameraIntrinsics m_colorIntrinsics = {};
+
+ std::optional m_depthFrameIndex = {};
+ std::optional m_colorFrameIndex = {};
+ };
+}
+
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Content/CameraStreamProcessor.cpp b/Samples/CameraStreamCoordinateMapper/cpp/Content/CameraStreamProcessor.cpp
new file mode 100644
index 0000000000..24a1d65992
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Content/CameraStreamProcessor.cpp
@@ -0,0 +1,158 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+#include "CameraStreamProcessor.h"
+
+using namespace CameraStreamCoordinateMapper;
+using namespace Windows::Foundation;
+using namespace Windows::Foundation::Collections;
+using namespace Windows::Foundation::Numerics;
+
+using namespace Windows::Media::Capture;
+using namespace Windows::Media::Capture::Frames;
+
+using namespace concurrency;
+using namespace Platform;
+
+using namespace std::placeholders;
+
+task> CameraStreamProcessor::CreateAndStartAsync(
+ MediaFrameSourceKind sourceKind,
+ Platform::String^ sourceFormat,
+ MediaCaptureMemoryPreference memoryPreference)
+{
+ auto streamProcessor = std::make_shared(sourceKind, sourceFormat, memoryPreference);
+
+ co_await streamProcessor->StartAsync();
+
+ return streamProcessor;
+}
+
+CameraStreamProcessor::CameraStreamProcessor(
+ MediaFrameSourceKind sourceKind,
+ Platform::String^ format,
+ MediaCaptureMemoryPreference memoryPreference) :
+ m_sourceKind(sourceKind),
+ m_sourceFormat(std::move(format)),
+ m_memoryPreference(memoryPreference)
+{}
+
+CameraStreamProcessor::~CameraStreamProcessor()
+{
+ std::lock_guard lock(m_lock);
+ m_mediaFrameReader->FrameArrived -= m_frameArrivedToken;
+}
+
+CameraStreamProcessor::FrameData CameraStreamProcessor::GetLatestFrame(void) const
+{
+ auto lock = std::shared_lock(m_lock);
+ return m_latestFrameData;
+}
+
+task CameraStreamProcessor::StartAsync()
+{
+ IVectorView^ groups = co_await MediaFrameSourceGroup::FindAllAsync();
+
+ MediaFrameSourceGroup^ selectedGroup;
+ MediaFrameSourceInfo^ selectedSourceInfo;
+
+ // Pick the first group and first matching source from that group
+ for (MediaFrameSourceGroup^ group : groups)
+ {
+ for (MediaFrameSourceInfo^ sourceInfo : group->SourceInfos)
+ {
+ if (sourceInfo->SourceKind == m_sourceKind)
+ {
+ selectedSourceInfo = sourceInfo;
+ break;
+ }
+ }
+
+ if (selectedSourceInfo != nullptr)
+ {
+ selectedGroup = group;
+ break;
+ }
+ }
+
+ if (selectedGroup == nullptr || selectedSourceInfo == nullptr)
+ {
+ OutputDebugString(L"Warning: No source found that matched the requested parameters\n");
+ co_return;
+ }
+
+ MediaCaptureInitializationSettings^ settings = ref new MediaCaptureInitializationSettings();
+ settings->MemoryPreference = m_memoryPreference;
+ settings->StreamingCaptureMode = StreamingCaptureMode::Video;
+ settings->SourceGroup = selectedGroup;
+ settings->SharingMode = MediaCaptureSharingMode::SharedReadOnly;
+
+ Platform::Agile mediaCapture(ref new MediaCapture());
+
+ co_await mediaCapture->InitializeAsync(settings);
+
+ MediaFrameSource^ selectedSource = mediaCapture->FrameSources->Lookup(selectedSourceInfo->Id);
+
+ MediaFrameReader^ reader = co_await mediaCapture->CreateFrameReaderAsync(selectedSource, m_sourceFormat);
+
+ MediaFrameReaderStartStatus status = co_await reader->StartAsync();
+
+ if (status == MediaFrameReaderStartStatus::Success)
+ {
+ std::lock_guard lock(m_lock);
+
+ m_mediaCapture = std::move(mediaCapture);
+ m_mediaFrameReader = std::move(reader);
+
+ m_frameArrivedToken = m_mediaFrameReader->FrameArrived +=
+ ref new TypedEventHandler(
+ std::bind(&CameraStreamProcessor::OnFrameArrived, this, _1, _2));
+
+ m_isStreaming = true;
+ }
+ else
+ {
+ OutputDebugString(L"Warning: Reader failed to start\n");
+ }
+}
+
+task CameraStreamProcessor::StopAsync()
+{
+ m_isStreaming = false;
+
+ std::lock_guard lock(m_lock);
+
+ m_mediaFrameReader->FrameArrived -= m_frameArrivedToken;
+ co_await m_mediaFrameReader->StopAsync();
+
+ m_mediaFrameReader = nullptr;
+ m_mediaCapture = nullptr;
+ m_latestFrameData = {};
+ m_frameArrivedToken = {};
+ m_frameCount = 0u;
+}
+
+bool CameraStreamCoordinateMapper::CameraStreamProcessor::IsStreaming() const
+{
+ return m_isStreaming;
+}
+
+void CameraStreamProcessor::OnFrameArrived(MediaFrameReader^ sender, MediaFrameArrivedEventArgs^ args)
+{
+ if (MediaFrameReference^ frame = sender->TryAcquireLatestFrame())
+ {
+ std::lock_guard lock(m_lock);
+
+ m_latestFrameData.frame = frame;
+ m_latestFrameData.index = m_frameCount++;
+ }
+}
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Content/CameraStreamProcessor.h b/Samples/CameraStreamCoordinateMapper/cpp/Content/CameraStreamProcessor.h
new file mode 100644
index 0000000000..40901d42be
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Content/CameraStreamProcessor.h
@@ -0,0 +1,76 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+namespace CameraStreamCoordinateMapper
+{
+ ////////////////////////////////////////////////////////////////////////////////
+ // CameraStreamProcessor
+ //
+ // This class is mostly a helper for accessing the MediaFrameReader class
+ // and change the frame arrival events into a polling pattern
+ //
+ // So instead of listening for an event "FrameArrived", you can just query
+ // with "GetLatestFrame" and check the index to see if you have seen it yet
+
+ class CameraStreamProcessor
+ {
+ public:
+ struct FrameData
+ {
+ Windows::Media::Capture::Frames::MediaFrameReference^ frame;
+ uint32_t index;
+ };
+
+ static Concurrency::task> CreateAndStartAsync(
+ Windows::Media::Capture::Frames::MediaFrameSourceKind sourceKind,
+ Platform::String^ format,
+ Windows::Media::Capture::MediaCaptureMemoryPreference memoryPreference = Windows::Media::Capture::MediaCaptureMemoryPreference::Cpu);
+
+ CameraStreamProcessor(
+ Windows::Media::Capture::Frames::MediaFrameSourceKind sourceKind,
+ Platform::String^ format,
+ Windows::Media::Capture::MediaCaptureMemoryPreference memoryPreference);
+
+ ~CameraStreamProcessor();
+
+ FrameData GetLatestFrame() const;
+
+ concurrency::task StartAsync();
+ concurrency::task StopAsync();
+
+ bool IsStreaming() const;
+
+ void OnFrameArrived(
+ Windows::Media::Capture::Frames::MediaFrameReader^ sender,
+ Windows::Media::Capture::Frames::MediaFrameArrivedEventArgs^ args);
+
+ private:
+ // Media Capture
+ Windows::Media::Capture::Frames::MediaFrameSourceKind m_sourceKind;
+ Platform::String^ m_sourceFormat;
+ Windows::Media::Capture::MediaCaptureMemoryPreference m_memoryPreference;
+
+ Platform::Agile m_mediaCapture;
+ Windows::Media::Capture::Frames::MediaFrameReader^ m_mediaFrameReader;
+
+ mutable std::shared_mutex m_lock;
+
+ std::atomic m_isStreaming = false;
+
+ FrameData m_latestFrameData;
+
+ Windows::Foundation::EventRegistrationToken m_frameArrivedToken;
+
+ uint32_t m_frameCount = 0u;
+ };
+}
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Content/GpuCoordinateMapper.cpp b/Samples/CameraStreamCoordinateMapper/cpp/Content/GpuCoordinateMapper.cpp
new file mode 100644
index 0000000000..82527f3046
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Content/GpuCoordinateMapper.cpp
@@ -0,0 +1,373 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+#include "GpuCoordinateMapper.h"
+#include "Common\DirectXHelper.h"
+
+#include "Shaders\RasterizeDepthMeshVS.h"
+#include "Shaders\RasterizeDepthMeshPS.h"
+
+#include "Shaders\VisualizeDepthMeshVS.h"
+#include "Shaders\VisualizeDepthMeshPS.h"
+
+using namespace CameraStreamCoordinateMapper;
+using namespace Windows::Foundation;
+using namespace Windows::UI::Core;
+using namespace Windows::Foundation::Numerics;
+
+namespace
+{
+ CameraIntrinsics_Shader GetCameraIntrinsicsConstantsForShader(const GpuCoordinateMapper::CameraIntrinsics& intrinsics)
+ {
+ CameraIntrinsics_Shader constants;
+
+ constants.imageWidth = intrinsics.imageWidth;
+ constants.imageHeight = intrinsics.imageHeight;
+
+ return constants;
+ }
+
+ RasterizeDepthMeshConstants_Shader GetRasterizeDepthMeshConstantsForShader(const GpuCoordinateMapper::RasterizedFrameBlob& frameBlob)
+ {
+ RasterizeDepthMeshConstants_Shader constants;
+
+ constants.depthIntrinsics = GetCameraIntrinsicsConstantsForShader(frameBlob.depthIntrinsics);
+ constants.targetIntrinsics = GetCameraIntrinsicsConstantsForShader(frameBlob.targetIntrinsics);
+ constants.sourceIntrinsics = GetCameraIntrinsicsConstantsForShader(frameBlob.sourceIntrinsics);
+ constants.depthToSource = frameBlob.depthToSource;
+ constants.depthToTarget = frameBlob.depthToTarget;
+ constants.sourceProjectionMatrix = frameBlob.sourceProjectionMatrix;
+ constants.targetProjectionMatrix = frameBlob.targetProjectionMatrix;
+
+ // DepthMaps are currently D16 format: [0,65535], depthScaleInMeters is how large each unit is in meters.
+ constants.depthMapUnitsToMeters = frameBlob.depthScaleInMeters * std::numeric_limits::max();
+ constants.depthRangeMinimumInMeters = frameBlob.depthRangeMinimumInMeters;
+ constants.depthRangeMaximumInMeters = frameBlob.depthRangeMaximumInMeters;
+ constants.depthMapImageWidthMinusOne = frameBlob.depthIntrinsics.imageWidth - 1u;
+
+ return constants;
+ }
+
+ VisualizeDepthMeshConstants_Shader GetVisualizeDepthMeshConstantsForShader(const GpuCoordinateMapper::VisualizeFrameBlob& frameBlob)
+ {
+ VisualizeDepthMeshConstants_Shader constants;
+
+ constants.worldToProj = frameBlob.worldToView * frameBlob.viewToProj;
+
+ return constants;
+ }
+}
+
+GpuCoordinateMapper::GpuCoordinateMapper(const std::shared_ptr& deviceResources) :
+ m_deviceResources(deviceResources)
+{
+ CreateDeviceDependentResources();
+}
+
+void CameraStreamCoordinateMapper::GpuCoordinateMapper::CreateDeviceDependentResources()
+{
+ ID3D11Device* device = m_deviceResources->GetD3DDevice();
+
+ static const D3D11_INPUT_ELEMENT_DESC vertexDesc[] =
+ {
+ { "POSITION", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
+ };
+
+ m_rasterizeDepthMeshVertexShader = DX::CreateVertexShader(
+ device,
+ g_RasterizeDepthMeshVS,
+ _countof(g_RasterizeDepthMeshVS));
+
+ m_rasterizeDepthMeshInputLayout = DX::CreateInputLayout(
+ device,
+ vertexDesc,
+ _countof(vertexDesc),
+ g_RasterizeDepthMeshVS,
+ _countof(g_RasterizeDepthMeshVS));
+
+ m_rasterizeDepthMeshPixelShader = DX::CreatePixelShader(
+ device,
+ g_RasterizeDepthMeshPS,
+ _countof(g_RasterizeDepthMeshPS));
+
+ m_visualizeDepthMeshVertexShader = DX::CreateVertexShader(
+ device,
+ g_VisualizeDepthMeshVS,
+ _countof(g_VisualizeDepthMeshVS));
+
+ m_visualizeDepthMeshInputLayout = DX::CreateInputLayout(
+ device,
+ vertexDesc,
+ _countof(vertexDesc),
+ g_VisualizeDepthMeshVS,
+ _countof(g_VisualizeDepthMeshVS));
+
+ m_visualizeDepthMeshPixelShader = DX::CreatePixelShader(
+ device,
+ g_VisualizeDepthMeshPS,
+ _countof(g_VisualizeDepthMeshPS));
+
+ m_rasterizeDepthMeshConstants = DX::CreateBuffer(
+ device,
+ sizeof(RasterizeDepthMeshConstants_Shader),
+ nullptr,
+ D3D11_BIND_CONSTANT_BUFFER);
+
+ m_visualizeDepthMeshConstants = DX::CreateBuffer(
+ device,
+ sizeof(VisualizeDepthMeshConstants_Shader),
+ nullptr,
+ D3D11_BIND_CONSTANT_BUFFER);
+
+ m_samplerState = DX::CreateSamplerState(device);
+
+ static const float4 quadVertices[] = {
+ float4(-1.0f, +1.0f, 0.0f, 1.0f), // top left
+ float4(+1.0f, +1.0f, 0.0f, 1.0f), // top right
+ float4(-1.0f, -1.0f, 0.0f, 1.0f), // bottom left
+
+ float4(+1.0f, +1.0f, 0.0f, 1.0f), // top right
+ float4(+1.0f, -1.0f, 0.0f, 1.0f), // bottom right
+ float4(-1.0f, -1.0f, 0.0f, 1.0f), // bottom left
+ };
+
+ m_vertexCount = _countof(quadVertices);
+
+ m_vertexBuffer = DX::CreateBuffer(
+ device,
+ sizeof(quadVertices),
+ quadVertices,
+ D3D11_BIND_VERTEX_BUFFER);
+}
+
+void CameraStreamCoordinateMapper::GpuCoordinateMapper::ReleaseDeviceDependentResources()
+{
+ m_rasterizeDepthMeshVertexShader = nullptr;
+ m_rasterizeDepthMeshInputLayout = nullptr;
+ m_rasterizeDepthMeshPixelShader = nullptr;
+
+ m_visualizeDepthMeshVertexShader = nullptr;
+ m_visualizeDepthMeshInputLayout = nullptr;
+ m_visualizeDepthMeshPixelShader = nullptr;
+
+ m_rasterizeDepthMeshConstants = nullptr;
+ m_visualizeDepthMeshConstants = nullptr;
+ m_vertexBuffer = nullptr;
+ m_samplerState = nullptr;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// RunCoordinateMapping - executes the correlation algorithm for this set of frame data
+//
+// The inputs come from the "depth*", "source*", and "target*" parameters, output is written to the "targetImage*" parameters
+//
+// The core algorithm:
+// 1.) Read each point from the depth image and undistort + unprojects it into 3D space (using depthUnprojectionMap)
+// 2.) Transforms that 3D point into "source" and "target" camera space (using depthToSource and depthToTarget matrices)
+// 3.) Projects + distorts it into source camera 2D space (using sourceProjectionMatrix and sourceDistortionMap)
+// to determine which pixel to sample from
+// 4.) Projects + distorts it into target camera 2D space (using targetProjectionMatrix and targetDistortionMap)
+// to determine where the final pixel is rendered on screen.
+//
+// The most common scenario has "source" and "target" as the same camera (Color), but
+// the algorithm is generic to allow other scenarios.
+
+void CameraStreamCoordinateMapper::GpuCoordinateMapper::RunCoordinateMapping(
+ const RasterizedFrameBlob& frameBlob)
+{
+ ID3D11DeviceContext* context = m_deviceResources->GetD3DDeviceContext();
+
+ const RasterizeDepthMeshConstants_Shader constants = GetRasterizeDepthMeshConstantsForShader(frameBlob);
+
+ context->UpdateSubresource(m_rasterizeDepthMeshConstants.Get(), 0, nullptr, &constants, 0, 0);
+
+ context->ClearRenderTargetView(frameBlob.targetImage.Get(), DirectX::Colors::Black);
+ context->ClearDepthStencilView(frameBlob.targetRasterizedDepth.Get(), D3D11_CLEAR_DEPTH, 1.0f, 0);
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Input Assembler + Rasterizer
+ const UINT stride = sizeof(float4);
+ const UINT offset = 0;
+ context->IASetVertexBuffers(0, 1, m_vertexBuffer.GetAddressOf(), &stride, &offset);
+ context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
+ context->IASetInputLayout(m_rasterizeDepthMeshInputLayout.Get());
+
+ const D3D11_VIEWPORT viewport = CD3D11_VIEWPORT(
+ 0.0f,
+ 0.0f,
+ static_cast(frameBlob.targetIntrinsics.imageWidth),
+ static_cast(frameBlob.targetIntrinsics.imageHeight)
+ );
+
+ context->RSSetViewports(1, &viewport);
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Vertex Shader
+ context->VSSetShader(m_rasterizeDepthMeshVertexShader.Get(), nullptr, 0);
+
+ ID3D11Buffer* vertexConstantBuffers[] = {
+ m_rasterizeDepthMeshConstants.Get()
+ };
+
+ context->VSSetConstantBuffers(0, 1, m_rasterizeDepthMeshConstants.GetAddressOf());
+
+ ID3D11ShaderResourceView* vertexShaderResources[] = {
+ frameBlob.depthImage.Get(), // Depth Texture
+ frameBlob.depthUnprojectionMap.Get(), // Depth Unprojection Map
+ frameBlob.sourceDistortionMap.Get(), // Source Distortion Map
+ frameBlob.targetDistortionMap.Get() // Target Distortion Map
+ };
+
+ context->VSSetShaderResources(0, _countof(vertexShaderResources), vertexShaderResources);
+ context->VSSetSamplers(0, 1, m_samplerState.GetAddressOf());
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Pixel Shader
+ context->PSSetShader(m_rasterizeDepthMeshPixelShader.Get(), nullptr, 0);
+
+ ID3D11ShaderResourceView* pixelShaderResources[] = {
+ frameBlob.sourceImage.Get() // Source Texture
+ };
+
+ context->PSSetShaderResources(0, _countof(pixelShaderResources), pixelShaderResources);
+ context->PSSetSamplers(0, 1, m_samplerState.GetAddressOf());
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Output Merger
+ ID3D11RenderTargetView* outputRenderTargets[] = {
+ frameBlob.targetImage.Get() // Target Texture
+ };
+
+ context->OMSetRenderTargets(_countof(outputRenderTargets), outputRenderTargets, frameBlob.targetRasterizedDepth.Get());
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Render
+ const uint32_t instanceCount = (frameBlob.depthIntrinsics.imageWidth - 1) * (frameBlob.depthIntrinsics.imageHeight - 1);
+
+ context->DrawInstanced(m_vertexCount, instanceCount, 0, 0);
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Cleanup: Unbind resources that might be used in later passes
+ decltype(outputRenderTargets) null_outputRenderTargets = { nullptr };
+ context->OMSetRenderTargets(_countof(null_outputRenderTargets), null_outputRenderTargets, nullptr);
+
+ decltype(pixelShaderResources) null_pixelShaderResources = { nullptr };
+ context->PSSetShaderResources(0, _countof(null_pixelShaderResources), null_pixelShaderResources);
+
+ decltype(vertexShaderResources) null_vertexShaderResources = { nullptr };
+ context->VSSetShaderResources(0, _countof(null_vertexShaderResources), null_vertexShaderResources);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// VisualizeCoordinateMapping - executes the correlation algorithm for this set of frame data
+// except visualized from another perspective, using depth information from the previous pass
+// in order to occlude pixels that weren't visible from the original target camera's perspective
+//
+// The inputs come from the "depth*" and "target*" parameters inside of the rasterizedFrameBlob.
+// The output is written to the visualizeFrameBlob "renderTarget*" parameters
+//
+// The core algorithm for visualizing from alternate perspectives:
+// 1.) Read each point from the depth image and undistort + unprojects it into 3D space (using depthUnprojectionMap)
+// 2.) Transforms that 3D point into "target" camera space (using depthToTarget matrices)
+// 3.) Projects + distorts it into target camera 2D space (using targetProjectionMatrix and targetDistortionMap)
+// to determine where the final pixel was rendered on screen (so we can sample the depth buffer in the pixel shader)
+// 4.) Compute the depth of this point in "target" camera space and store it in the vertex
+// 5.) Compute the position of the 3D point given the app provided camera/projection
+
+void GpuCoordinateMapper::VisualizeCoordinateMapping(
+ const RasterizedFrameBlob& rasterizedFrameBlob,
+ const VisualizeFrameBlob& visualizeFrameBlob)
+{
+ ID3D11DeviceContext* context = m_deviceResources->GetD3DDeviceContext();
+
+ const RasterizeDepthMeshConstants_Shader rasterizeConstants = GetRasterizeDepthMeshConstantsForShader(rasterizedFrameBlob);
+ context->UpdateSubresource(m_rasterizeDepthMeshConstants.Get(), 0, nullptr, &rasterizeConstants, 0, 0);
+
+ const VisualizeDepthMeshConstants_Shader visualizeConstants = GetVisualizeDepthMeshConstantsForShader(visualizeFrameBlob);
+ context->UpdateSubresource(m_visualizeDepthMeshConstants.Get(), 0, nullptr, &visualizeConstants, 0, 0);
+
+ context->ClearRenderTargetView(visualizeFrameBlob.renderTarget.Get(), DirectX::Colors::Black);
+ context->ClearDepthStencilView(visualizeFrameBlob.renderTargetDepthBuffer.Get(), D3D11_CLEAR_DEPTH, 1.0f, 0);
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Input Assembler + Rasterizer
+ const UINT stride = sizeof(float4);
+ const UINT offset = 0;
+ context->IASetVertexBuffers(0, 1, m_vertexBuffer.GetAddressOf(), &stride, &offset);
+ context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
+ context->IASetInputLayout(m_rasterizeDepthMeshInputLayout.Get());
+
+ context->RSSetViewports(1, &visualizeFrameBlob.viewport);
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Vertex Shader
+ context->VSSetShader(m_visualizeDepthMeshVertexShader.Get(), nullptr, 0);
+
+ ID3D11Buffer* const vertexConstantBuffers[] = {
+ m_rasterizeDepthMeshConstants.Get(),
+ m_visualizeDepthMeshConstants.Get()
+ };
+
+ context->VSSetConstantBuffers(0, _countof(vertexConstantBuffers), vertexConstantBuffers);
+
+ ID3D11ShaderResourceView* const vertexShaderResources[] = {
+ rasterizedFrameBlob.depthImage.Get(), // Depth Texture
+ rasterizedFrameBlob.depthUnprojectionMap.Get(), // Depth Unprojection Map
+ rasterizedFrameBlob.targetDistortionMap.Get() // Target Distortion Map
+ };
+
+ context->VSSetShaderResources(0, _countof(vertexShaderResources), vertexShaderResources);
+ context->VSSetSamplers(0, 1, m_samplerState.GetAddressOf());
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Pixel Shader
+ context->PSSetShader(m_visualizeDepthMeshPixelShader.Get(), nullptr, 0);
+
+ ID3D11Buffer* const pixelConstantBuffers[] = {
+ m_rasterizeDepthMeshConstants.Get()
+ };
+
+ context->PSSetConstantBuffers(0, _countof(pixelConstantBuffers), pixelConstantBuffers);
+
+ ID3D11ShaderResourceView* const pixelShaderResources[] = {
+ visualizeFrameBlob.targetImage.Get(), // Target Texture
+ visualizeFrameBlob.targetImageDepthBuffer.Get() // Target Depth Buffer
+ };
+
+ context->PSSetShaderResources(0, _countof(pixelShaderResources), pixelShaderResources);
+ context->PSSetSamplers(0, 1, m_samplerState.GetAddressOf());
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Output Merger
+ ID3D11RenderTargetView* const outputRenderTargets[] = {
+ visualizeFrameBlob.renderTarget.Get()
+ };
+
+ context->OMSetRenderTargets(_countof(outputRenderTargets), outputRenderTargets, visualizeFrameBlob.renderTargetDepthBuffer.Get());
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Render
+ const uint32_t instanceCount = (rasterizedFrameBlob.depthIntrinsics.imageWidth - 1) * (rasterizedFrameBlob.depthIntrinsics.imageHeight - 1);
+
+ context->DrawInstanced(m_vertexCount, instanceCount, 0, 0);
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Cleanup: Unbind resources that might be used in later passes
+ decltype(outputRenderTargets) null_outputRenderTargets = { nullptr };
+ context->OMSetRenderTargets(_countof(null_outputRenderTargets), null_outputRenderTargets, nullptr);
+
+ decltype(pixelShaderResources) null_pixelShaderResources = { nullptr };
+ context->PSSetShaderResources(0, _countof(null_pixelShaderResources), null_pixelShaderResources);
+
+ decltype(vertexShaderResources) null_vertexShaderResources = { nullptr };
+ context->VSSetShaderResources(0, _countof(null_vertexShaderResources), null_vertexShaderResources);
+}
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Content/GpuCoordinateMapper.h b/Samples/CameraStreamCoordinateMapper/cpp/Content/GpuCoordinateMapper.h
new file mode 100644
index 0000000000..1599052f59
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Content/GpuCoordinateMapper.h
@@ -0,0 +1,134 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+#include "Common\DeviceResources.h"
+#include "ShaderStructures.h"
+
+namespace CameraStreamCoordinateMapper
+{
+ ////////////////////////////////////////////////////////////////////////////////
+ // GpuCoordinateMapper
+ //
+ // This class is the core class for mapping image data between the correlated cameras
+ //
+ // It uses triangle rasterization on the gpu to increase performance of processing
+ // the depth image. This is particuarly useful when the depth + color cameras do
+ // not have matching frame sizes. For example on Kinect V2 the depth camera is 512x424
+ // and the color camera is 1920x1080.
+ //
+ // This means there are almost 4 color pixels for each depth pixel (along the x-axis).
+ // We can use triangle rasterization to interpolate the missing values to give us the
+ // most accurate result.
+
+ class GpuCoordinateMapper
+ {
+ public:
+
+ // This is used to detect when the CameraIntrinsics from the VideoMediaFrame have
+ // changed. We store the underlying intrinsics values to compare against on a
+ // frame-by-frame basis. If they change, then we can recreate resources that were
+ // dependent on the intrinsics values (e.g. Distortion and Unprojection look up tables)
+ struct CameraIntrinsics
+ {
+ CameraIntrinsics() = default;
+
+ explicit CameraIntrinsics(Windows::Media::Devices::Core::CameraIntrinsics^ intrinsics) :
+ imageWidth(intrinsics->ImageWidth),
+ imageHeight(intrinsics->ImageHeight),
+ principalPoint(intrinsics->PrincipalPoint),
+ focalLength(intrinsics->FocalLength),
+ radialDistortion(intrinsics->RadialDistortion),
+ tangentialDistortion(intrinsics->TangentialDistortion)
+ {}
+
+ inline bool operator!=(const CameraIntrinsics& rhs) const
+ {
+ return std::memcmp(this, &rhs, sizeof(CameraIntrinsics)) != 0;
+ }
+
+ uint32_t imageWidth;
+ uint32_t imageHeight;
+ Windows::Foundation::Numerics::float2 principalPoint;
+ Windows::Foundation::Numerics::float2 focalLength;
+ Windows::Foundation::Numerics::float3 radialDistortion;
+ Windows::Foundation::Numerics::float2 tangentialDistortion;
+ };
+
+ struct RasterizedFrameBlob {
+ CameraIntrinsics depthIntrinsics;
+ CameraIntrinsics sourceIntrinsics;
+ CameraIntrinsics targetIntrinsics;
+ Windows::Foundation::Numerics::float4x4 depthToSource;
+ Windows::Foundation::Numerics::float4x4 depthToTarget;
+ Windows::Foundation::Numerics::float4x4 sourceProjectionMatrix;
+ Windows::Foundation::Numerics::float4x4 targetProjectionMatrix;
+ float depthScaleInMeters;
+ float depthRangeMinimumInMeters;
+ float depthRangeMaximumInMeters;
+ Microsoft::WRL::ComPtr depthImage;
+ Microsoft::WRL::ComPtr depthUnprojectionMap;
+ Microsoft::WRL::ComPtr sourceImage;
+ Microsoft::WRL::ComPtr sourceDistortionMap;
+ Microsoft::WRL::ComPtr targetImage;
+ Microsoft::WRL::ComPtr targetRasterizedDepth;
+ Microsoft::WRL::ComPtr targetDistortionMap;
+ };
+
+ struct VisualizeFrameBlob {
+ Windows::Foundation::Numerics::float4x4 worldToView;
+ Windows::Foundation::Numerics::float4x4 viewToProj;
+ Microsoft::WRL::ComPtr targetImage;
+ Microsoft::WRL::ComPtr targetImageDepthBuffer;
+ Microsoft::WRL::ComPtr renderTarget;
+ Microsoft::WRL::ComPtr renderTargetDepthBuffer;
+ D3D11_VIEWPORT viewport;
+ };
+
+ GpuCoordinateMapper(
+ const std::shared_ptr& deviceResources);
+
+ void CreateDeviceDependentResources();
+ void ReleaseDeviceDependentResources();
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // RunCoordinateMapping - executes the correlation algorithm for this set of frame data
+ void RunCoordinateMapping(
+ const RasterizedFrameBlob& frameBlob);
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // VisualizeCoordinateMapping - executes the correlation algorithm for this set of frame data
+ // except visualized from another perspective, using depth information from the previous pass
+ // in order to occlude pixels that weren't visible from the original target camera's perspective
+ void VisualizeCoordinateMapping(
+ const RasterizedFrameBlob& rasterizedFrameBlob,
+ const VisualizeFrameBlob& visualizeFrameBlob);
+
+ private:
+ std::shared_ptr m_deviceResources;
+
+ Microsoft::WRL::ComPtr m_rasterizeDepthMeshVertexShader;
+ Microsoft::WRL::ComPtr m_rasterizeDepthMeshInputLayout;
+ Microsoft::WRL::ComPtr m_rasterizeDepthMeshPixelShader;
+
+ Microsoft::WRL::ComPtr m_visualizeDepthMeshVertexShader;
+ Microsoft::WRL::ComPtr m_visualizeDepthMeshInputLayout;
+ Microsoft::WRL::ComPtr m_visualizeDepthMeshPixelShader;
+
+ Microsoft::WRL::ComPtr m_rasterizeDepthMeshConstants;
+ Microsoft::WRL::ComPtr m_visualizeDepthMeshConstants;
+ Microsoft::WRL::ComPtr m_vertexBuffer;
+ Microsoft::WRL::ComPtr m_samplerState;
+
+ uint32_t m_vertexCount;
+ };
+}
\ No newline at end of file
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Content/QuadRenderer.cpp b/Samples/CameraStreamCoordinateMapper/cpp/Content/QuadRenderer.cpp
new file mode 100644
index 0000000000..fd734dd97d
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Content/QuadRenderer.cpp
@@ -0,0 +1,170 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#include "pch.h"
+#include "QuadRenderer.h"
+
+#include "Common\DirectXHelper.h"
+
+#include "Shaders\QuadVertexShader.h"
+#include "Shaders\QuadPixelShader.h"
+
+using namespace CameraStreamCoordinateMapper;
+
+using namespace DirectX;
+using namespace Windows::Foundation;
+
+// Loads vertex and pixel shaders from files and instantiates the cube geometry.
+QuadRenderer::QuadRenderer(const std::shared_ptr& deviceResources) :
+ m_deviceResources(deviceResources)
+{
+ CreateDeviceDependentResources();
+ CreateWindowSizeDependentResources();
+}
+
+// Initializes view parameters when the window size changes.
+void QuadRenderer::CreateWindowSizeDependentResources()
+{
+ const Size outputSize = m_deviceResources->GetOutputSize();
+ const float aspectRatio = outputSize.Width / outputSize.Height;
+
+ const XMMATRIX projectionMatrix = XMMatrixOrthographicRH(2.0f, 2.0f / aspectRatio, 0.01f, 100.0f);
+
+ XMStoreFloat4x4(&m_constantBufferData.projection, XMMatrixTranspose(projectionMatrix));
+
+ const XMMATRIX viewMatrix = XMMatrixTranslation(0.0f, 0.0f, -1.0f);
+
+ XMStoreFloat4x4(&m_constantBufferData.view, XMMatrixTranspose(viewMatrix));
+}
+
+// Renders one frame using the vertex and pixel shaders.
+void QuadRenderer::Render(ID3D11ShaderResourceView* texture, float aspectRatio)
+{
+ ID3D11DeviceContext* context = m_deviceResources->GetD3DDeviceContext();
+
+ XMStoreFloat4x4(&m_constantBufferData.model, XMMatrixTranspose(XMMatrixScaling(1.0f, 1.0f / aspectRatio, 1.0f)));
+
+ context->UpdateSubresource(m_constantBuffer.Get(), 0, nullptr, &m_constantBufferData, 0, 0);
+
+ const UINT stride = sizeof(VertexPositionTex);
+ const UINT offset = 0;
+ context->IASetVertexBuffers(0, 1, m_vertexBuffer.GetAddressOf(), &stride, &offset);
+ context->IASetIndexBuffer(m_indexBuffer.Get(), DXGI_FORMAT_R16_UINT, 0);
+ context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
+ context->IASetInputLayout(m_inputLayout.Get());
+
+ context->VSSetShader(m_vertexShader.Get(), nullptr, 0);
+
+ context->VSSetConstantBuffers(0, 1, m_constantBuffer.GetAddressOf());
+
+ context->PSSetShader(m_pixelShader.Get(), nullptr, 0);
+ context->PSSetSamplers(0, 1, m_samplerState.GetAddressOf());
+
+ const Size outputSize = m_deviceResources->GetOutputSize();
+
+ const D3D11_VIEWPORT viewport = CD3D11_VIEWPORT(
+ 0.0f,
+ 0.0f,
+ outputSize.Width,
+ outputSize.Height
+ );
+
+ context->RSSetViewports(1, &viewport);
+
+ ID3D11ShaderResourceView* textures[] = {
+ texture
+ };
+
+ context->PSSetShaderResources(0, _countof(textures), textures);
+
+ context->DrawIndexed(m_indexCount, 0, 0);
+
+ ID3D11ShaderResourceView* nullTextures[] = {
+ nullptr
+ };
+
+ context->PSSetShaderResources(0, _countof(nullTextures), nullTextures);
+}
+
+void QuadRenderer::CreateDeviceDependentResources()
+{
+ ID3D11Device* device = m_deviceResources->GetD3DDevice();
+
+ m_vertexShader = DX::CreateVertexShader(
+ device,
+ g_QuadVertexShader,
+ _countof(g_QuadVertexShader));
+
+ static const D3D11_INPUT_ELEMENT_DESC vertexDesc[] =
+ {
+ { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
+ { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
+ };
+
+ m_inputLayout = DX::CreateInputLayout(
+ device,
+ vertexDesc,
+ _countof(vertexDesc),
+ g_QuadVertexShader,
+ _countof(g_QuadVertexShader));
+
+ m_pixelShader = DX::CreatePixelShader(
+ device,
+ g_QuadPixelShader,
+ _countof(g_QuadPixelShader));
+
+ m_constantBuffer = DX::CreateBuffer(
+ device,
+ sizeof(ModelViewProjectionConstantBuffer),
+ nullptr,
+ D3D11_BIND_CONSTANT_BUFFER);
+
+ m_samplerState = DX::CreateSamplerState(device);
+
+ static const VertexPositionTex quadVertices[] =
+ {
+ { XMFLOAT3(-1.0f, 1.0f, 0.0f), XMFLOAT2(0.0f, 0.0f) },
+ { XMFLOAT3( 1.0f, 1.0f, 0.0f), XMFLOAT2(1.0f, 0.0f) },
+ { XMFLOAT3( 1.0f, -1.0f, 0.0f), XMFLOAT2(1.0f, 1.0f) },
+ { XMFLOAT3(-1.0f, -1.0f, 0.0f), XMFLOAT2(0.0f, 1.0f) },
+ };
+
+ m_vertexBuffer = DX::CreateBuffer(
+ device,
+ sizeof(quadVertices),
+ quadVertices,
+ D3D11_BIND_VERTEX_BUFFER);
+
+ static const unsigned short quadIndices [] =
+ {
+ 0,2,3,
+ 0,1,2,
+ };
+
+ m_indexCount = _countof(quadIndices);
+
+ m_indexBuffer = DX::CreateBuffer(
+ device,
+ sizeof(quadIndices),
+ quadIndices,
+ D3D11_BIND_INDEX_BUFFER);
+}
+
+void QuadRenderer::ReleaseDeviceDependentResources()
+{
+ m_vertexShader = nullptr;
+ m_inputLayout = nullptr;
+ m_pixelShader = nullptr;
+ m_constantBuffer = nullptr;
+ m_vertexBuffer = nullptr;
+ m_indexBuffer = nullptr;
+ m_samplerState = nullptr;
+}
\ No newline at end of file
diff --git a/Samples/CameraStreamCoordinateMapper/cpp/Content/QuadRenderer.h b/Samples/CameraStreamCoordinateMapper/cpp/Content/QuadRenderer.h
new file mode 100644
index 0000000000..f61ce59729
--- /dev/null
+++ b/Samples/CameraStreamCoordinateMapper/cpp/Content/QuadRenderer.h
@@ -0,0 +1,50 @@
+//*********************************************************
+//
+// Copyright (c) Microsoft. All rights reserved.
+// This code is licensed under the MIT License (MIT).
+// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
+// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
+// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
+//
+//*********************************************************
+
+#pragma once
+
+#include "Common\DeviceResources.h"
+#include "Common\StepTimer.h"
+#include "ShaderStructures.h"
+
+namespace CameraStreamCoordinateMapper
+{
+ // This sample renderer instantiates a basic rendering pipeline.
+ class QuadRenderer
+ {
+ public:
+ QuadRenderer(const std::shared_ptr& deviceResources);
+
+ void CreateDeviceDependentResources();
+ void CreateWindowSizeDependentResources();
+ void ReleaseDeviceDependentResources();
+
+ void Render(ID3D11ShaderResourceView* texture, float aspectRatio);
+
+ private:
+ // Cached pointer to device resources.
+ std::shared_ptr m_deviceResources;
+
+ // Direct3D resources for the geometry.
+ Microsoft::WRL::ComPtr m_inputLayout;
+ Microsoft::WRL::ComPtr m_vertexBuffer;
+ Microsoft::WRL::ComPtr m_indexBuffer;
+ Microsoft::WRL::ComPtr m_vertexShader;
+ Microsoft::WRL::ComPtr