I am attempting to load pre-compiled shaders and create a PSO. The moment I go to create the PSO, I receive the following D3D12 Error:
D3D12 ERROR: ID3D12Device::CreateInputLayout: Encoded Signature size doesn't match specified size. [ STATE_CREATION ERROR #63: CREATEINPUTLAYOUT_UNPARSEABLEINPUTSIGNATURE].
I have the following shaders which compile without errors in VS using Shader Model 5.1:
//VertexShader.hlsl
#include "PSInput.hlsli"
PSInput VSMain(float4 position : POSITION, float4 color : COLOR)
{
PSInput result;
result.position = position;
result.color = color;
return result;
}
//-------------------------------------------------
// PixelShader.hlsl
#include "PSInput.hlsli"
float4 PSMain(PSInput input) : SV_TARGET
{
return input.color;
}
//-------------------------------------------------
// PSInput.hlsli
struct PSInput {
float4 position : SV_POSITION;
float4 color : COLOR;
};
.
Below is my code for reading in each shader .cso file and binding the shader bytecode to the PSO descriptor. I've checked that both vertexShaderData and pixelShaderData are non-null and that the data length is greater than zero. Interestingly vertexShaderDataLength=668 bytes, while pxielShaderDataLength=14368 bytes (much greater than I expected so I wonder if this is something to worry about).
byte * vertexShaderData(nullptr);
uint vertexShaderDataLength(0);
ThrowIfFailed(
ReadDataFromFile(
GetAssetFullPath(L"VertexShader.cso").c_str(),
&vertexShaderData,
&vertexShaderDataLength
)
);
byte * pixelShaderData(nullptr);
uint pixelShaderDataLength(0);
ThrowIfFailed(
ReadDataFromFile(
GetAssetFullPath(L"PixelShader.cso").c_str(),
&pixelShaderData,
&pixelShaderDataLength
)
);
D3D12_GRAPHICS_PIPELINE_STATE_DESC psoDesc = {};
psoDesc.InputLayout = { inputElementDescriptor, _countof(inputElementDescriptor) };
psoDesc.pRootSignature = rootSignature;
psoDesc.VS = CD3DX12_SHADER_BYTECODE(vertexShaderData, vertexShaderDataLength);
psoDesc.PS = CD3DX12_SHADER_BYTECODE(pixelShaderData, pixelShaderDataLength);
// fill in remainder psoDesc ...
device->CreateGraphicsPipelineState ( // <--- Exception Thrown here.
&psoDesc,
IID_PPV_ARGS(&pipelineState)
);
.
Since the report error mentioned unparsable input signature, I'll include my input element descriptor as well. I'm using vertex interleaving with just positions and colors.
// Define the vertex input layout.
D3D12_INPUT_ELEMENT_DESC inputElementDescriptor[2];
// Positions
inputElementDescriptor[0].SemanticName = "POSITION";
inputElementDescriptor[0].SemanticIndex = 0;
inputElementDescriptor[0].Format = DXGI_FORMAT_R32G32B32_FLOAT;
inputElementDescriptor[0].InputSlot = 0;
inputElementDescriptor[0].AlignedByteOffset = 0;
inputElementDescriptor[0].InputSlotClass = D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA;
inputElementDescriptor[0].InstanceDataStepRate = 0;
// Colors
inputElementDescriptor[1].SemanticName = "COLOR";
inputElementDescriptor[1].SemanticIndex = 0;
inputElementDescriptor[1].Format = DXGI_FORMAT_R32G32B32_FLOAT;
inputElementDescriptor[1].InputSlot = 0;
inputElementDescriptor[1].AlignedByteOffset = sizeof(float) * 3;
inputElementDescriptor[1].InputSlotClass = D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA;
inputElementDescriptor[1].InstanceDataStepRate = 0;
.
The really odd part of this is that my program runs without error when I combine both the VertexShader and PixelShader into a single effects file "shader.hlsl" and compile the shaders at runtime.
ComPtr<ID3DBlob> vertexShader;
ComPtr<ID3DBlob> pixelShader;
// Compile vertex shader
ThrowIfFailed(
D3DCompileFromFile (
GetAssetFullPath(L"shaders.hlsl").c_str(),
nullptr, nullptr, "VSMain", "vs_5_0",
compileFlags, 0, &vertexShader, nullptr
)
);
// Compile pixel shader
ThrowIfFailed (
D3DCompileFromFile (
GetAssetFullPath(L"shaders.hlsl").c_str(),
nullptr, nullptr, "PSMain", "ps_5_0",
compileFlags, 0, &pixelShader, nullptr
)
);