Jump to content

  • Log In with Google      Sign In   
  • Create Account


#ActualJoakim1234

Posted 27 July 2012 - 07:21 PM

I'm using a little helper function to create vertex buffers:
void AssetManager::CreateVertexBuffer(ID3D11Buffer** buffer, unsigned int bufferSize, void* vertices, D3D11_USAGE bufferUsage, D3D11_CPU_ACCESS_FLAG processorAccess)
{
	D3D11_BUFFER_DESC bufferDesc;
	D3D11_SUBRESOURCE_DATA subResData;

	bufferDesc.Usage = bufferUsage;
	bufferDesc.ByteWidth = bufferSize;
	bufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
	bufferDesc.CPUAccessFlags = processorAccess;
	bufferDesc.MiscFlags = 0;
	bufferDesc.StructureByteStride = 0;

	subResData.pSysMem = vertices;
	subResData.SysMemPitch = 0;
	subResData.SysMemSlicePitch = 0;
	HR(gD3DDevice->CreateBuffer(&bufferDesc, &subResData, buffer));
}

And it causes a E_INVALIDARG error,but doesn't give a clue about which argument is the invalid one.Here is how the function is called in the program:
vertexBuffer = 0;

VertexType* vertices = new VertexType[vertexCount];;

vertices[0].position = XMFLOAT3(-1.0f, -1.0f, 0.0f); //just random values to see if it debugs at all
vertices[0].texture = XMFLOAT2(0.0f, 1.0f);
vertices[1].position = XMFLOAT3(0.0f, 1.0f, 0.0f);
vertices[1].texture = XMFLOAT2(0.5f, 0.0f);

vertices[2].position = XMFLOAT3(1.0f, -1.0f, 0.0f);
vertices[2].texture = XMFLOAT2(1.0f, 1.0f);


AssetManager->CreateVertexBuffer(&vertexBuffer,sizeof(VertexType)*vertexCount, &vertices);

Could the problem be in "bufferDesc.StructureByteStride = 0;"?Am I supposed to leave it at 0?Cause all the tutorial project files leave it at 0 and they compile just fine.But the problem can't be in the project itself,I linked all the needed libraries and the paths to the SDK.

#1Joakim1234

Posted 27 July 2012 - 06:33 PM

I'm using a little helper function to create vertex buffers:
void AssetManager::CreateVertexBuffer(ID3D11Buffer** buffer, unsigned int bufferSize, void* vertices, D3D11_USAGE bufferUsage, D3D11_CPU_ACCESS_FLAG processorAccess)
{
    D3D11_BUFFER_DESC bufferDesc;
    D3D11_SUBRESOURCE_DATA subResData;

    bufferDesc.Usage = bufferUsage;
    bufferDesc.ByteWidth = bufferSize;
    bufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
    bufferDesc.CPUAccessFlags = processorAccess;
    bufferDesc.MiscFlags = 0;
    bufferDesc.StructureByteStride = 0;

    subResData.pSysMem = vertices;
    subResData.SysMemPitch = 0;
    subResData.SysMemSlicePitch = 0;
    HR(gD3DDevice->CreateBuffer(&bufferDesc, &subResData, buffer));
}

And it causes a E_INVALIDARG error,but doesn't give a clue about which argument is the invalid one.Here is how the function is called in the program:
vertexBuffer = 0;

VertexType* vertices = new VertexType[vertexCount];;

vertices[0].position = XMFLOAT3(-1.0f, -1.0f, 0.0f); //just random values to see if it debugs at all
vertices[0].texture = XMFLOAT2(0.0f, 1.0f);
vertices[1].position = XMFLOAT3(0.0f, 1.0f, 0.0f);
vertices[1].texture = XMFLOAT2(0.5f, 0.0f);

vertices[2].position = XMFLOAT3(1.0f, -1.0f, 0.0f);
vertices[2].texture = XMFLOAT2(1.0f, 1.0f);


AssetManager->CreateVertexBuffer(&vertexBuffer,sizeof(VertexType)*vertexCount, &vertices);

Could the problem be in "bufferDesc.StructureByteStride = 0;"?Am I supposed to leave it at 0?Cause all the tutorial project files leave it at 0 and they compile just fine.But the problem can't be in the project itself,I linked all the needed libraries and the paths to the SDK.

PARTNERS