Followers 0

DX11 Setting up DirectX issues...

18 posts in this topic

Hi Guys,
I have been working on some directX tutorials and am having trouble with one step. I am setting up separate functions for each step of my program, and Visual Studio crashes when it reaches my Render() function. Using the debugger I can see that my backBufferTarget_ has a value of 0x00000000. So even though I think I am initializing it in InitD3D(), it is not remembering or something. My code is below, if anyone has an advice that would be greatly appreciated. I am sure it is a simple thing I am missing.

#include<Windows.h>
#include<memory>
#include<xnamath.h>
#include<d3d11.h>
#include<d3dx11.h>
#include<DxErr.h>

#pragma comment(lib, "winmm.lib")
#pragma comment(lib, "d3d11.lib")
#pragma comment(lib, "d3dx11.lib")
#pragma comment(lib, "dxerr.lib")

LRESULT CALLBACK WndProc(HWND hwnd, UINT message, WPARAM wParam, LPARAM lParam);

HWND hwnd = NULL;
HINSTANCE hInstance = NULL;

ID3D11DeviceContext* d3dContext_ = NULL;
ID3D11Device* d3dDevice_ = NULL;
IDXGISwapChain* swapChain_ = NULL;
ID3D11RenderTargetView* backBufferTarget_ = NULL;

//********************
//Function prototypes*
//********************
void MessagePump(void);
bool GetFullScreen(void);
bool InitWindow(void);
void KillWindow(void);
bool InitD3D(void);
void Render(void);
bool InitScene(void);
bool InitObjects(void);

bool progFinished = FALSE;
bool progFullScreen = FALSE;

#define APP_NAME "Kenneth Game"

//************************
//Application entry point*
//************************
int WINAPI wWinMain(HINSTANCE hInstance, HINSTANCE prevInstance, LPWSTR cmdLine, int cmdShow)
{

if (!GetFullScreen())
{
OutputDebugString("User abort\n");
exit(5);
}

InitWindow();

InitD3D();

InitScene();

while (!progFinished)
{

MessagePump(); //Check for window messages

Render(); //Draw our graphics
}

return 0;
}

//****************************************************************************************
//Initialise a window (full-screen or otherwise) in which our graphics will be displayed.*
//****************************************************************************************
bool InitWindow(void)
{

//UNREFERENCED_PARAMETER(prevInstance);
//UNREFERENCED_PARAMETER(cmdLine);

WNDCLASSEX wndClass = { 0 };
wndClass.cbSize = sizeof(WNDCLASSEX);
wndClass.style = CS_HREDRAW | CS_VREDRAW;
wndClass.lpfnWndProc = WndProc;
wndClass.hInstance = hInstance;
wndClass.hbrBackground = (HBRUSH)(COLOR_WINDOW + 1);
wndClass.lpszClassName = "DX11BookWindowClass";

if (!RegisterClassEx(&wndClass))
return false;

RECT rc = { 0, 0, 640, 480 };

HWND hwnd = CreateWindowA("DX11BookWindowClass", "Blank Win32 Window", WS_OVERLAPPEDWINDOW, CW_USEDEFAULT, CW_USEDEFAULT, rc.right - rc.left, rc.bottom - rc.top, NULL, NULL, hInstance, NULL);

if (!hwnd)
return false;

ShowWindow(hwnd, SW_SHOW);
//UpdateWindow(hwnd);

return true;
}

//*************************************************
//Terminate the window that was previously opened.*
//*************************************************
void KillWindow(void)
{
MSG msg;

while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE))
{
DispatchMessage(&msg);
}

//ghAppWindow = NULL;
ShowCursor(true);
}

//***************************
//Windows message processor.*
//***************************
LRESULT CALLBACK WndProc(HWND hwnd, UINT message, WPARAM wParam, LPARAM lParam)
{
PAINTSTRUCT paintStruct;
HDC hDC;

switch (message)
{
case WM_PAINT:
hDC = BeginPaint(hwnd, &paintStruct);
EndPaint(hwnd, &paintStruct);
break;

case WM_DESTROY:
progFinished = true;
PostQuitMessage(0);
break;

default:
return DefWindowProc(hwnd, message, wParam, lParam);
}

return 0;
}

//**********************************************
//Process any messages that Windows has sent us*
//**********************************************
void MessagePump(void)
{
MSG msg;

if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE))
{
TranslateMessage(&msg);
DispatchMessage(&msg);
}

}

//*************************************
//Prompt selection of full screen mode*
//*************************************
bool GetFullScreen(void)
{
int iResult;
bool bRet = true;

iResult = MessageBox(NULL, "Run fullscreen?", APP_NAME, MB_YESNOCANCEL | MB_ICONQUESTION);
switch (iResult)
{
case IDCANCEL:
bRet = false;
break;
case IDNO:
progFullScreen = false;
break;
case IDYES:
progFullScreen = true;
break;
case 0:
OutputDebugString("Couldn't open MessageBox, closing");
exit(10);
break;
}

return bRet;
}

//**************************************
//Creates a hardware device in Direct3D*
//**************************************
bool InitD3D(void)
{
D3D_DRIVER_TYPE driverType_;
D3D_FEATURE_LEVEL featureLevel_;
RECT dimensions;
GetClientRect(hwnd, &dimensions);

unsigned int width = dimensions.right - dimensions.left;
unsigned int height = dimensions.bottom - dimensions.top;

D3D_DRIVER_TYPE driverTypes[] =
{
D3D_DRIVER_TYPE_HARDWARE, D3D_DRIVER_TYPE_WARP,
D3D_DRIVER_TYPE_REFERENCE, D3D_DRIVER_TYPE_SOFTWARE
};

unsigned int totalDriverTypes = ARRAYSIZE(driverTypes);

D3D_FEATURE_LEVEL featureLevels[] =
{
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0
};

unsigned int totalFeatureLevels = ARRAYSIZE(featureLevels);

DXGI_SWAP_CHAIN_DESC swapChainDesc;
ZeroMemory(&swapChainDesc, sizeof(swapChainDesc));
swapChainDesc.BufferCount = 1;
swapChainDesc.BufferDesc.Width = width;
swapChainDesc.BufferDesc.Height = height;
swapChainDesc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
swapChainDesc.BufferDesc.RefreshRate.Numerator = 60;
swapChainDesc.BufferDesc.RefreshRate.Denominator = 1;
swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDesc.OutputWindow = hwnd;
swapChainDesc.Windowed = true;
swapChainDesc.SampleDesc.Count = 1;
swapChainDesc.SampleDesc.Quality = 0;

unsigned int creationFlags = 0;

#ifdef _DEBUG
creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
#endif

HRESULT result;
unsigned int driver = 0;

for (driver = 0; driver < totalDriverTypes; ++driver)
{
result = D3D11CreateDeviceAndSwapChain(0, driverTypes[driver], 0, creationFlags, featureLevels, totalFeatureLevels, D3D11_SDK_VERSION, &swapChainDesc, &swapChain_,
&d3dDevice_, &featureLevel_, &d3dContext_);

if (SUCCEEDED(result))
{
driverType_ = driverTypes[driver];
break;
}
}

if (FAILED(result))
{
DXTRACE_MSG("Failed to create the Direct3D device!");
return false;
}

ID3D11Texture2D* backBufferTexture;

result = swapChain_->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&backBufferTexture);

if (FAILED(result))
{
DXTRACE_MSG("Failed to get the swap chain back buffer!");
return false;
}

result = d3dDevice_->CreateRenderTargetView(backBufferTexture, 0, &backBufferTarget_);

if (backBufferTexture)
backBufferTexture->Release();

if (FAILED(result))
{
DXTRACE_MSG("Failed to create the render target view!");
return false;
}

d3dContext_->OMSetRenderTargets(1, &backBufferTarget_, 0);

D3D11_VIEWPORT viewport;
viewport.Width = static_cast<float>(width);
viewport.Height = static_cast<float>(height);
viewport.MinDepth = 0.0f;
viewport.MaxDepth = 1.0f;
viewport.TopLeftX = 0.0f;
viewport.TopLeftY = 0.0f;

return true;
}

//****************************************************
// Initialise DirectX ready for us to start rendering*
//****************************************************
bool InitScene(void)
{

return true;
}

//*******************************************
//Initialise the 3d objects we will be using*
//*******************************************
bool InitObjects(void)
{

return true;
}

void Render(void)
{

float clearColor[4] = { 0.0f, 0.0f, 0.25f, 1.0f };
d3dContext_->ClearRenderTargetView(backBufferTarget_, clearColor);

swapChain_->Present(0, 0);

}

0

Share on other sites

Oh dude, thank you SO MUCH - that fixed it. I was trying for hours last night heaps of stuff. Still wrapping my head around this extra laywer of complexity (after just finishing a C++ book).

Thanks!

0

Share on other sites

Hi Again,

I come with more perils. I am now trying to add keyboard support, and like before, the program is crashing when it comes to the device I thought I have created. I am keeping my eye out for uninitiated global variables as you taught me before but cannot find the problem. The added code is:

//**********************
//DirectInput variables*
//**********************
#define KEYDOWN( name, key ) ( name[key] & 0x80 )
LPDIRECTINPUT8 directInput_ = NULL;
LPDIRECTINPUTDEVICE8 keyboardDevice_ = NULL;
char keyboardKeys_[256];
char prevKeyboardKeys_[256];


For the Initialization routine (which is called before Update() and Render():

bool InitInput(void)
{
HRESULT result;

ZeroMemory(keyboardKeys_, sizeof(keyboardKeys_));
ZeroMemory(prevKeyboardKeys_, sizeof(prevKeyboardKeys_));

result = DirectInput8Create(hInstance, DIRECTINPUT_VERSION, IID_IDirectInput8, (void**)&directInput_, 0); //Initialize DirectInput8

if (FAILED(result))
{
return false;
}

result = directInput_->CreateDevice(GUID_SysKeyboard, &keyboardDevice_, 0);

if (FAILED(result))
{
return false;
}

result = keyboardDevice_->SetDataFormat(&c_dfDIKeyboard);

if (FAILED(result))
{
return false;
}

result = keyboardDevice_->SetCooperativeLevel(ghHwnd, DISCL_FOREGROUND | DISCL_NONEXCLUSIVE);

if (FAILED(result))
{
return false;
}

result = keyboardDevice_->Acquire();

if (FAILED(result))
{
return false;
}

return true;
}


And for the Update() routine:

//******************************
//Get current state of Keyboard*
//******************************
void Update(void)
{
float fElapsed;
float rotZ = 0;
float fY = 0.0f;

keyboardDevice_->GetDeviceState(sizeof(keyboardKeys_), (LPVOID)&keyboardKeys_);

if (GetAsyncKeyState(VK_ESCAPE))
{
PostQuitMessage(0);
}

// Button up event.
if (KEYDOWN(prevKeyboardKeys_, DIK_DOWN) && !KEYDOWN(keyboardKeys_, DIK_DOWN))
{
fY -= 0.1f;
}

if (KEYDOWN(prevKeyboardKeys_, DIK_UP) && !KEYDOWN(keyboardKeys_, DIK_UP))
{
fY += 0.1f;
}

memcpy(prevKeyboardKeys_, keyboardKeys_, sizeof(keyboardKeys_));

gfTimeScale = 0.001f;
fElapsed = GetElapsedTime();
rotZ += fElapsed;
}


It is crashing at keyboardDevice_->GetDeviceState(sizeof(keyboardKeys_), (LPVOID)&keyboardKeys_);

The debugger shows keyboardDevice_ becomes a 0x00000000 (similar to before). Again this is adapting code that works when it was done in the way the book shows.

Again any help is greatly appreciated.

Thanks

Edited by SteveHatcher
0

Share on other sites

The debugger shows keyboardDevice_ becomes a 0x00000000 (similar to before). Again this is adapting code that works when it was done in the way the book shows.

After a quick glance at your code in not sure what's wrong, but I did notice one thing: you don't seem to ever check the result of your Init functions. In InitInput you return false if anything failed, but then you never check if InitInput returns true or not (I'm assuming, based on the code in the original post).

You should check the return values of your Init functions, and at the very least log something if they're false. It's possible that keyboardDevice_ never initialized properly (and thus was always NULL) but you won't catch that until you're update loop. You want to try and find errors as soon as possible! Crash early and crash often, as the saying goes.

EDIT: Looked at your code again. Looks like hInstance is uninitialized. You can get hInstance from WinMain. Edited by Samith
1

Share on other sites

Thank you very much! In my main I changed it to:

	if (!InitInput())
{
OutputDebugString("  Input initialisation failed\n");
exit(5);
}


and could see it was failing here. You were spot on about hInstance. The way I got it was with

hInstance = GetModuleHandle(NULL);


Do you think that's okay? Or is another better way to do it?

Thanks heaps!

0

Share on other sites

Hi Guys..

Stuck yet again. This time I am simply trying to display my triangle slightly rotated.

VertexPos gVertices[] =
{
XMFLOAT3(0.5f, 0.5f, 0.5f),
XMFLOAT3(0.5f, -0.5f, 0.5f),
XMFLOAT3(-0.5f, -0.5f, 0.5f)
};


which come into play here in InitObjects(void)

	ZeroMemory(&resourceData, sizeof(resourceData));
resourceData.pSysMem = gVertices;
d3dResult = d3dDevice_->CreateBuffer(&vertexDesc, &resourceData, &vertexBuffer_);

D3D11_BUFFER_DESC constDesc;
ZeroMemory(&constDesc, sizeof(constDesc));
constDesc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
constDesc.ByteWidth = sizeof(XMMATRIX);
constDesc.Usage = D3D11_USAGE_DEFAULT;

d3dResult = d3dDevice_->CreateBuffer(&constDesc, 0, &mvpCB_);


with the final Render() function as

void Render(void)
{
if (d3dContext_ == 0) //Checks that the Direct3D context is valid.
return;

float clearColor[4] = { 0.0f, 0.0f, 0.25f, 1.0f };
d3dContext_->ClearRenderTargetView(backBufferTarget_, clearColor);
unsigned int stride = sizeof(VertexPos);
unsigned int offset = 0;

//Setting up the input assembly
d3dContext_->IASetInputLayout(inputLayout_);
d3dContext_->IASetVertexBuffers(0, 1, &vertexBuffer_, &stride, &offset);
d3dContext_->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);

XMMATRIX view = XMMatrixIdentity();
XMMATRIX projection = XMMatrixOrthographicOffCenterLH(0.0f, 800.0f, 0.0f, 600.0f, 0.1f, 100.0f); //1a. Creates an orthographic projection matrix using LHS. Return value is a XMMATRIX structure where the resulting projection matrix is placed.
XMMATRIX vpMatrix_ = XMMatrixMultiply(view, projection);

XMMATRIX translation = XMMatrixTranslation(10.0f, 10.0f, 10.0f);
XMMATRIX rotationZ = XMMatrixRotationZ(30.0f);
XMMATRIX scale = XMMatrixScaling(0.0f, 0.0f, 0.0f);
XMMATRIX TriangleWorld = translation * rotationZ;

XMMATRIX mvp = TriangleWorld*vpMatrix_*translation;
mvp = XMMatrixTranspose(mvp);

d3dContext_->UpdateSubresource(mvpCB_, 0, 0, &mvp, 0, 0);
d3dContext_->VSSetConstantBuffers(0, 1, &mvpCB_);

d3dContext_->Draw(3, 0);
swapChain_->Present(0, 0);
}


No matter what I change in the XMMatrixRotationZ or XMMatrixTranslation, my triangle stays the same. Its as if since the buffer is created I am not altering it at all.

I have tried many combinations of matrix multiplication ways and large numbers to no avail. It is as if the mvp matrix is having no effect on the final image. Thank you for your time - I have spent many hours trying to figure this out and only come here as a last resort. I find the best way to learn is trying to figure out broken code...

Edited by SteveHatcher
0

1

Share on other sites

Not 100% sure what you mean, but my vertex shader is created here:

bool InitObjects(void)
{

#if defined( DEBUG ) || defined( _DEBUG )
#endif

ID3DBlob* errorBuffer = 0;
ID3DBlob* vsBuffer = 0;

//bool compileResult = CompileD3DShader("SolidGreenColor.fx", "VS_Main", "vs_4_0", &vsBuffer); //Loads vertex shader from the text file and compiles it into byte code.
//bool compileResult = D3DX11CompileFromFile("SolidGreenColor.fx", 0, 0, "VS_Main", "vs_4_0", shaderFlags, 0, 0, &vsBuffer, &errorBuffer, 0);

HRESULT result;
result =  D3DX11CompileFromFile("SolidGreenColor.fx", 0, 0, "VS_Main", "vs_4_0", shaderFlags, 0, 0, &vsBuffer, &errorBuffer, 0);

if (FAILED(result))
{
if (errorBuffer != 0)
{
OutputDebugStringA((char*)errorBuffer->GetBufferPointer());
errorBuffer->Release();
}

return false;
}

HRESULT d3dResult;

d3dResult = d3dDevice_->CreateVertexShader(vsBuffer->GetBufferPointer(), vsBuffer->GetBufferSize(), 0, &solidColorVS_);

if (FAILED(d3dResult))
{
if (vsBuffer)
vsBuffer->Release();

return false;
}

D3D11_INPUT_ELEMENT_DESC solidColorLayout[] = //Used to describe the vertex latout of a vertex streucture. (msdn).
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 } //3b
};

unsigned int totalLayoutElements = ARRAYSIZE(solidColorLayout);

d3dResult = d3dDevice_->CreateInputLayout(solidColorLayout, totalLayoutElements, //3c The input layout uses the type of ID3D11InputLayout. Created with a call to the D3D device function CreateInputLayout.
vsBuffer->GetBufferPointer(), vsBuffer->GetBufferSize(), &inputLayout_);

vsBuffer->Release();

if (FAILED(d3dResult))
{
return false;
}

ID3DBlob* psBuffer = 0;

result = D3DX11CompileFromFile("SolidGreenColor.fx", 0, 0, "PS_Main", "ps_4_0", shaderFlags, 0, 0, &psBuffer, &errorBuffer, 0);

if (FAILED(result))
{
if (errorBuffer != 0)
{
OutputDebugStringA((char*)errorBuffer->GetBufferPointer());
errorBuffer->Release();
}

return false;
}

d3dResult = d3dDevice_->CreatePixelShader(psBuffer->GetBufferPointer(), psBuffer->GetBufferSize(), 0, &solidColorPS_);

psBuffer->Release();

ZeroMemory(&vertexDesc, sizeof(vertexDesc));
vertexDesc.Usage = D3D11_USAGE_DEFAULT;
vertexDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
vertexDesc.ByteWidth = sizeof(VertexPos)* 3;

ZeroMemory(&resourceData, sizeof(resourceData));
resourceData.pSysMem = gVertices;
d3dResult = d3dDevice_->CreateBuffer(&vertexDesc, &resourceData, &vertexBuffer_);

D3D11_BUFFER_DESC constDesc;
ZeroMemory(&constDesc, sizeof(constDesc));
constDesc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
constDesc.ByteWidth = sizeof(XMMATRIX);
constDesc.Usage = D3D11_USAGE_DEFAULT;

d3dResult = d3dDevice_->CreateBuffer(&constDesc, 0, &mvpCB_);

if (FAILED(d3dResult))
{
return false;
}

return true;

}


My goal is to modify the triangle in world space so the rotation or translation matrices have an effect on it. Thanks for your time.

Edited by SteveHatcher
0

Share on other sites

Nope, I meant the HLSL, the source code of your vertex shader: VS_Main.

1

Share on other sites

Oh Sorry, Im guessing you mean the .fx file that D3Dx11CompileFromFile is grabbing?

It is:

float4 VS_Main( float4 pos : POSITION ) : SV_POSITION
{
return pos;
}

float4 PS_Main( float4 pos : SV_POSITION ) : SV_TARGET
{
return float4( 0.0f, 1.0f, 0.0f, 1.0f );
}


A solid green shader as far as I am aware.

0

Share on other sites

Your vertex shader just passes the unchanged vertex positions. To apply any transformation, e.g. rotation, you need to multiply with the transformation matrix you assigned to the constant buffer.

2

Share on other sites
Precisely. Yeah, that's a pass-through vertex shader, the position doesn't change at all

It should look something like this:

cbuffer VSParameters : register(b0)  // deliberately assigning slot 0 !
{
matrix WVP;
};

float4 VS_Main( float4 pos : POSITION ) : SV_POSITION
{
return mul(position, WVP);
}

Also, you probably want to rotate (and/or scale) first, then translate. Order of multiplication matters with matrices:

XMMATRIX TriangleWorld = rotationZ * translation;

1

Share on other sites

Okay, thanks guys. I am still slowly digging my way through the literature.

Is there a way to create a simple shader like this but entirely in a struct, or class of my definition so it does not need a separate .fx file? If so, what function do I use instead of the D3Dx11CompileFromFile?

I hope my question makes sense. Thanks. you guys are extremely helpful and awesome!

0

Share on other sites
Not quite sure what you mean. You can have several shaders in the same file (like you already do for both vertex and pixel shader). And there are compile functions which take source code directly, without the detour of a file, e.g. D3DX11CompileFromMemory or alternatively the newer compile function D3DCompile.

Put your HLSL source as a static string in your cpp file and feed it to one of those functions.

(Alternatively one could even let the command line compiler fxc spit out compiled binaries as a hex array source code, with the option /Fh. This way no runtime compilation is needed)
1

Share on other sites

Is there a way to create a simple shader like this but entirely in a struct, or class of my definition so it does not need a separate .fx file? If so, what function do I use instead of the D3Dx11CompileFromFile?

No, that doesn't make much sense.

Of course you could include the source of your HLSL file as string constant in your code, but this is not very common.

If you don't want to compile your shaders at runtime, you can do it offline using the fxc.exe command line compiler. Take this:

http://msdn.microsoft.com/en-us/library/windows/desktop/bb509709%28v=vs.85%29.aspx

What I do is this: I let fxc.exe create an header file (command line option /Fh). This .h file contains a byte array with the compiled shader code that can be passed to e.g. ID3D11Device::CreateVertexShader.

1

Share on other sites

Just out of interest, why do they have them in a separate file? I can't see the advantage of this as opposed to having them in a header file and just including it?

Thanks

0

Share on other sites

I'm not sure if I get the idea behind your question. You have a project with mixed languages: C++ and HLSL. They have different compilers. Why would you mix the languages in a single file?

1

Share on other sites

I'm not sure if I get the idea behind your question. You have a project with mixed languages: C++ and HLSL. They have different compilers. Why would you mix the languages in a single file?

That answers my question, didn't click about it being a different language lol. Thanks

0

Create an account

Register a new account

Followers 0

• Similar Content

• Hi,
I started reading Introduction to 3D Game Programming with Direct3D 11.0 and have a little question about callback function. In author's example code d3dApp.cpp, he managed to assign a member function to WNDCLASS::lpfnWndProc
namespace {     // This is just used to forward Windows messages from a global window     // procedure to our member function window procedure because we cannot     // assign a member function to WNDCLASS::lpfnWndProc.     D3DApp* gd3dApp = 0; } LRESULT CALLBACK MainWndProc(HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam) {     // Forward hwnd on because we can get messages (e.g., WM_CREATE)     // before CreateWindow returns, and thus before mhMainWnd is valid.     return gd3dApp->MsgProc(hwnd, msg, wParam, lParam); } in constructor D3DApp::D3DApp()
gd3dApp = this; and in bool D3DApp::InitMainWindow()
wc.lpfnWndProc = MainWndProc; Notice that D3DApp::MsgProc is a virtual function.
As far as I'm concerned, I would find it convenient to declare MsgProc member function as static. However, a static member can't be virtual. Is there any solution so that I can overcome the contradiction except author's method?

• I am working on a game (shameless plug: Cosmoteer) that is written in a custom game engine on top of Direct3D 11. (It's written in C# using SharpDX, though I think that's immaterial to the problem at hand.)
The problem I'm having is that a small but understandably-frustrated percentage of my players (about 1.5% of about 10K players/day) are getting frequent device hangs. Specifically, the call to IDXGISwapChain::Present() is failing with DXGI_ERROR_DEVICE_REMOVED, and calling GetDeviceRemovedReason() returns DXGI_ERROR_DEVICE_HUNG. I'm not ready to dismiss the errors as unsolveable driver issues because these players claim to not be having problems with any other games, and there are more complaints on my own forums about this issue than there are for games with orders of magnitude more players.
My first debugging step was, of course, to turn on the Direct3D debug layer and look for any errors/warnings in the output. Locally, the game runs 100% free of any errors or warnings. (And yes, I verified that I'm actually getting debug output by deliberately causing a warning.) I've also had several players run the game with the debug layer turned on, and they are also 100% free of errors/warnings, except for the actual hung device:
[MessageIdDeviceRemovalProcessAtFault] [Error] [Execution] : ID3D11Device::RemoveDevice: Device removal has been triggered for the following reason (DXGI_ERROR_DEVICE_HUNG: The Device took an unreasonable amount of time to execute its commands, or the hardware crashed/hung. As a result, the TDR (Timeout Detection and Recovery) mechanism has been triggered. The current Device Context was executing commands when the hang occurred. The application may want to respawn and fallback to less aggressive use of the display hardware). So something my game is doing is causing the device to hang and the TDR to be triggered for a small percentage of players. The latest update of my game measures the time spent in IDXGISwapChain::Present(), and indeed in every case of a hung device, it spends more than 2 seconds in Present() before returning the error. AFAIK my game isn't doing anything particularly "aggressive" with the display hardware, and logs report that average FPS for the few seconds before the hang is usually 60+.
So now I'm pretty stumped! I have zero clues about what specifically could be causing the hung device for these players, and I can only debug post-mortem since I can't reproduce the issue locally. Are there any additional ways to figure out what could be causing a hung device? Are there any common causes of this?
Here's my remarkably un-interesting Present() call:
SwapChain.Present(_vsyncIn ? 1 : 0, PresentFlags.None); I'd be happy to share any other code that might be relevant, though I don't myself know what that might be. (And if anyone is feeling especially generous with their time and wants to look at my full code, I can give you read access to my Git repo on Bitbucket.)
1. The errors happen on all OS'es my game supports (Windows 7, 8, 10, both 32-bit and 64-bit), GPU vendors (Intel, Nvidia, AMD), and driver versions. I've been unable to discern any patterns with the game hanging on specific hardware or drivers.
2. For the most part, the hang seems to happen at random. Some individual players report it crashes in somewhat consistent places (such as on startup or when doing a certain action in the game), but there is no consistency between players.
3. Many players have reported that turning on V-Sync significantly reduces (but does not eliminate) the errors.
4. I have assured that my code never makes calls to the immediate context or DXGI on multiple threads at the same time by wrapping literally every call to the immediate context and DXGI in a mutex region (C# lock statement). (My code *does* sometimes make calls to the immediate context off the main thread to create resources, but these calls are always synchronized with the main thread.) I also tried synchronizing all calls to the D3D device as well, even though that's supposed to be thread-safe. (Which did not solve *this* problem, but did, curiously, fix another crash a few players were having.)
5. The handful of places where my game accesses memory through pointers (it's written in C#, so it's pretty rare to use raw pointers) are done through a special SafePtr that guards against out-of-bounds access and checks to make sure the memory hasn't been deallocated/unmapped. So I'm 99% sure I'm not writing to memory I shouldn't be writing to.
6. None of my shaders use any loops.
Thanks for any clues or insights you can provide. I know there's not a lot to go on here, which is part of my problem. I'm coming to you all because I'm out of ideas for what do investigate next, and I'm hoping someone else here has ideas for possible causes I can investigate.
Thanks again!

• By thmfrnk
Hello,
I am working on a Deferred Shading Engine, which actually uses MSAA for Antialising. Apart from the big G-Buffer ressources its working fine. But the intention of my engine is not only realtime-rendering as also render Screenshots as well as Videos. In that case I've enough time to do everything to get the best results. While using 8x MSAA, some scenes might still flicker.. especially on vegetations. Unfortunately 8x seems to be the maximum on DX11 Hardware, so there is no way to get better results, even if don't prefer realtime.
So finally I am looking for a solution, which might offer an unlimited Sample count. The first thing I thought about was to find a way to manually manipulate MSAA Sample locations, in order to be able to render multiple frames with different patterns and combining them. I found out that NVIDIA did something equal with TXAA. However, I only found a solution to use NVAPI, in order to change sample locations. https://mynameismjp.wordpress.com/2015/09/13/programmable-sample-points/
While I am working on .NET and SlimDX I've no idea how hard it would to implement the NVIDIA API and if its possible to use it together with SlimDX. And this approach would be also limited to NV.
Does anyone have an idea or maybe a better approach I could use?
Thanks, Thomas

• For vector operations which mathematically result in a single scalar f (such as XMVector3Length or XMPlaneDotCoord), which of the following extractions from an XMVECTOR is preferred:
1. The very explicit store operation
const XMVECTOR v = ...; float f; XMStoreFloat(&f, v); 2. A shorter but less explicit version (note that const can now be used explicitly)
const XMVECTOR v = ...; const float f = XMVectorGetX(v);

• Hi guys,
this is a exam question regarding alpha blending, however there is no official solution, so i am wondering  whether my solution is right or not... thanks in advance...

my idea:
BS1:
since BS1 with BlendEnable set as false, just write value into back buffer.
-A : (0.4, 0.4, 0.0, 0.5)
-B : (0.2, 0.4, 0.8, 0.5)

BS2:

backbuffer.RGB: = (0.4, 0.0, 0.0) * 1 + (0.0, 0.0, 0.0) * (1-0.5)      = ( 0.4, 0.0, 0.0)
backbuffer.Alpha = 1*1 + 0*0   =1

A.RGB = (0.4, 0.4, 0.0)* 0.5 + (0.4, 0.0, 0.0)* ( 1-0.5)   = (0.4,0.2,0.0)
A.Alpha=0.5*1+1*(1-0.5) = 1

B.RGB = (0.2, 0.4, 0.8) * 0.5 + (0.4, 0.2, 0.0) * (1-0.5)  = (0.3, 0.3, 0.4)
B.Alpha = 0.5 * 1 + 1*(1-0.5)  = 1

==========================
BS3:

backbuffer.RGB = (0.4, 0.0, 0.0) + (0.0, 0.0, 0.0)  = (0.4, 0.0, 0.0)
backbuffer.Alpha = 0

A.RGB = (0.4, 0.4, 0.0) + (0.4, 0.0, 0.0) = (0.8, 0.4, 0.0)
A.Alpha = 0

B.RGB = (0.2, 0.4, 0.8) + (0.8, 0.4, 0.0) = (1.0, 0.8, 0.8)
B.Alpha = 0

• 22
• 11
• 15
• 15
• 19