Diffuse Light

Started by
3 comments, last by vinterberg 8 years, 5 months ago
Hello,

Recently I've continued working on my own framework and tried implementing some diffuse lighting shader.
First I've create a shader that only takes a POSITION semantic after that I created a shader that takes a POSITION & TEXCOORD semantic
But now when I'm using POSITION TEXCOORD NORMAL my cube turns into something like this.
I looked at the VertexStruct and the Inputlayout and there does not seem to be anything wrong with it.

//VERTEX STRUCT
//-------------------------------------------------------------------------------------
struct VertexPosTexNorm : public VertexPos
{
public:
    VertexPosTexNorm()
        :VertexPos()
        , TexCoord(0,0)
        , Normal(-1,-1,-1)
    {
    }
    
    VertexPosTexNorm(const XMFLOAT3& p, const XMFLOAT3& n, const XMFLOAT2& uv)
        : VertexPos(p)
        , TexCoord(uv)
        , Normal(n)
    {
    }

    VertexPosTexNorm(float px, float py, float pz, float u, float v, float nx, float ny, float nz)
        : VertexPos(px, py, pz)
        , TexCoord(u,v)
        , Normal(nx,ny,nz)
    {
    }

    VertexPosTexNorm(const VertexPosTexNorm& v)
        : VertexPos(v.Position.x, v.Position.y, v.Position.z)
        , TexCoord(v.TexCoord.x, v.TexCoord.y)
        , Normal(v.Normal.x,v.Normal.y,v.Normal.z)
    {
    }
     
    VertexPosTexNorm(const VertexPos& v)
        :VertexPos(v.Position.x,v.Position.y,v.Position.z)
        , TexCoord(-1,-1)
        , Normal(-1, -1, -1)
    {}

    VertexPosTexNorm(const VertexPosTex& v)
        :VertexPos(v)
        , TexCoord(v.TexCoord)
        , Normal(-1,-1,-1)
    {}
     
    bool operator==(const VertexPosTexNorm& rhs) const
    {
        return XMFloat3Compare(this->Position, rhs.Position) && XMFloat3Compare(this->Normal,rhs.Normal) && XMFloat2Compare(this->TexCoord,rhs.TexCoord);
    }
     
    XMFLOAT2 TexCoord;
    XMFLOAT3 Normal;
};
 
//VERTEX BUFFERS
//-------------------------------------------------------------------------------------
 
HRESULT hr = S_OK;
 
D3D11_BUFFER_DESC bdVertex;
ZeroMemory(&bdVertex, sizeof(bdVertex));
 
// Create vertex buffer
bdVertex.Usage = D3D11_USAGE_DEFAULT;
bdVertex.ByteWidth = sizeof(VertexPosTexNorm)* (int)_vertexVec.size();
bdVertex.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bdVertex.CPUAccessFlags = 0;
bdVertex.MiscFlags = 0;
bdVertex.StructureByteStride = 0;
D3D11_SUBRESOURCE_DATA initDataVertex;
ZeroMemory(&initDataVertex, sizeof(initDataVertex));
initDataVertex.pSysMem = _vertexVec.data();
 
hr = GRAPHICSDEVICEMANAGER->GetGraphicsDevice3D()->GetDevice()->CreateBuffer(&bdVertex, &initDataVertex, &package.vertexBuffer);
if (FAILED(hr))
    return hr;
 
D3D11_BUFFER_DESC bdIndex;
ZeroMemory(&bdIndex, sizeof(bdIndex));
 
bdIndex.Usage = D3D11_USAGE_DEFAULT;
bdIndex.ByteWidth = sizeof(DWORD)* (int)_indexVec.size();
bdIndex.BindFlags = D3D11_BIND_INDEX_BUFFER;
bdIndex.CPUAccessFlags = 0;
bdIndex.MiscFlags = 0;
bdIndex.StructureByteStride = 0;
D3D11_SUBRESOURCE_DATA initDataIndex;
ZeroMemory(&initDataIndex, sizeof(initDataIndex));
initDataIndex.pSysMem = _indexVec.data();
 
hr = GRAPHICSDEVICEMANAGER->GetGraphicsDevice3D()->GetDevice()->CreateBuffer(&bdIndex, &initDataIndex, &package.indexBuffer);
if (FAILED(hr))
    return hr;
 
return hr;
 
//INPUT LAYOUT
//-------------------------------------------------------------------------------------

D3D11_INPUT_ELEMENT_DESC polygonLayout[3];
unsigned int numElements;
 
// Create the vertex input layout description.
// This setup needs to match the VertexType stucture in the ModelClass and in the shader.
polygonLayout[0].SemanticName = "POSITION";
polygonLayout[0].SemanticIndex = 0;
polygonLayout[0].Format = DXGI_FORMAT_R32G32B32_FLOAT;
polygonLayout[0].InputSlot = 0;
polygonLayout[0].AlignedByteOffset = 0;
polygonLayout[0].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
polygonLayout[0].InstanceDataStepRate = 0;
 
polygonLayout[1].SemanticName = "TEXCOORD";
polygonLayout[1].SemanticIndex = 0;
polygonLayout[1].Format = DXGI_FORMAT_R32G32_FLOAT;
polygonLayout[1].InputSlot = 0;
polygonLayout[1].AlignedByteOffset = D3D11_APPEND_ALIGNED_ELEMENT;
polygonLayout[1].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
polygonLayout[1].InstanceDataStepRate = 0;
 
polygonLayout[2].SemanticName = "NORMAL";
polygonLayout[2].SemanticIndex = 0;
polygonLayout[2].Format = DXGI_FORMAT_R32G32B32_FLOAT;
polygonLayout[2].InputSlot = 0;
polygonLayout[2].AlignedByteOffset = D3D11_APPEND_ALIGNED_ELEMENT;
polygonLayout[2].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
polygonLayout[2].InstanceDataStepRate = 0;
 
// Get a count of the elements in the layout.
numElements = sizeof(polygonLayout) / sizeof(polygonLayout[0]);
 
// Create the input layout
hr = GRAPHICSDEVICEMANAGER->GetGraphicsDevice3D()->GetDevice()->CreateInputLayout(polygonLayout, numElements, this->m_MaterialData->pVSBlob->GetBufferPointer(), this->m_MaterialData->pVSBlob->GetBufferSize(), &this->m_MaterialData->pInputLayout);
if (FAILED(hr))
    return hr;

I kinda hardcode everything now, to find where the error could be.
But I don't seem to find it.
Does anybody have any idea what could be going on?
Advertisement

You are going to have better luck getting an answer to this question if you post this in the D3D technical forum

For your convenience, moving it there now.

I took a quick look, and I didn't notice anything immediately wrong with your code. Are you definitely binding the correct input layout when you're drawing the cube? It's probably worth capturing a frame in RenderDoc or the VS graphics debugger and making sure that all of your state is correctly set up at the time of your draw call.

Posting the code where you render would help alot, it could be because you don't specify the indices size to UINT32 when assigning the index buffer to the pipeline (it will default to UINT16 if you don't) or something else...?

Edit: Based the assumption about UINT32 upon:


sizeof(DWORD)* (int)_indexVec.size()

.:vinterberg:.

This topic is closed to new replies.

Advertisement