Consume and append buffer

Started by
5 comments, last by Dingleberry 8 years, 3 months ago

I'm trying to make a unordered list with consume and append buffers but getting all zeros.

MY shader code


struct InputData
{
 float4 data; 
};




struct OutputData
{
 float data;
};




ConsumeStructuredBuffer<InputData> gInput : register(u0);
AppendStructuredBuffer<OutputData> gOutput : register(u1);




[numthreads(32, 1, 1)]
void CS(int3 dtid : SV_DispatchThreadID)
{
 OutputData output;
 InputData input = gInput.Consume(); 
 float len = length(input.data);
 output.data = len;
 gOutput.Append(output);
}

And my buffers being built


void VecAddApp::BuildBuffersAndViews()
{
	std::vector<Data> dataA(mNumElements);
	
	for(int i = 0; i < 32; i++)
	{
		dataA[i].v1 = XMFLOAT4(i, i, i,0);
		

		
	
	}

	// Create a buffer to be bound as a shader input (D3D11_BIND_SHADER_RESOURCE).
	D3D11_BUFFER_DESC inputDesc;
    inputDesc.Usage = D3D11_USAGE_DEFAULT;
    inputDesc.ByteWidth = sizeof(Data) * mNumElements;
    inputDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_UNORDERED_ACCESS;
    inputDesc.CPUAccessFlags = 0;
	inputDesc.StructureByteStride = sizeof(Data);
    inputDesc.MiscFlags =0 ;

    D3D11_SUBRESOURCE_DATA vinitDataA;
    vinitDataA.pSysMem = &dataA[0];

	ID3D11Buffer* bufferA = 0;
    HR(md3dDevice->CreateBuffer(&inputDesc, &vinitDataA, &bufferA));




	// Create a read-write buffer the compute shader can write to (D3D11_BIND_UNORDERED_ACCESS).
	D3D11_BUFFER_DESC outputDesc;
    outputDesc.Usage = D3D11_USAGE_DEFAULT;
    outputDesc.ByteWidth = sizeof(Data) * mNumElements;
    outputDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_UNORDERED_ACCESS;
    outputDesc.CPUAccessFlags = 0;
	outputDesc.StructureByteStride = 0;
    outputDesc.MiscFlags = 0;

    HR(md3dDevice->CreateBuffer(&outputDesc, 0, &mOutputBuffer));

	// Create a system memory version of the buffer to read the results back from.
	outputDesc.Usage = D3D11_USAGE_STAGING;
	outputDesc.BindFlags = 0;
	outputDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
	HR(md3dDevice->CreateBuffer(&outputDesc, 0, &mOutputDebugBuffer));


	



	D3D11_UNORDERED_ACCESS_VIEW_DESC uavDesc2;
	uavDesc2.Format =DXGI_FORMAT_R32G32B32A32_FLOAT;
	uavDesc2.ViewDimension = D3D11_UAV_DIMENSION_BUFFER;
	uavDesc2.Buffer.FirstElement = 0;
	uavDesc2.Buffer.Flags = 0;
	uavDesc2.Buffer.NumElements = mNumElements;

    md3dDevice->CreateUnorderedAccessView(bufferA, &uavDesc2, &mInputASRV);
	


	D3D11_UNORDERED_ACCESS_VIEW_DESC uavDesc;
	uavDesc.Format =DXGI_FORMAT_R32G32B32A32_FLOAT;
	uavDesc.ViewDimension = D3D11_UAV_DIMENSION_BUFFER;
	uavDesc.Buffer.FirstElement = 0;
	uavDesc.Buffer.Flags = 0;
	uavDesc.Buffer.NumElements = mNumElements;

	md3dDevice->CreateUnorderedAccessView(mOutputBuffer, &uavDesc, &mOutputUAV);


	// Views hold references to buffers, so we can release these.
	ReleaseCOM(bufferA);
	
}

Does anyone see any problems here?

Advertisement
Hey! I never used those structured buffers but just to make sure, have you followed restrictions stated in this link

https://msdn.microsoft.com/en-us/library/windows/desktop/ff471459(v=vs.85).aspx

There's no sign of D3D11_BUFFER_UAV_FLAG_APPEND or D3D11_BUFFER_UAV_FLAG_COUNTER in your C++, so it won't work in its current state.

Adam Miles - Principal Software Development Engineer - Microsoft Xbox Advanced Technology Group

Ok , I'm I got D3D11_BUFFER_UAV_FLAG_APPEND in there now.


void VecAddApp::BuildBuffersAndViews()
{
	std::vector<Data> dataA(mNumElements);
	std::vector<Data> dataB(mNumElements);
	for(int i = 0; i < mNumElements; ++i)
	{
		dataA[i].v1 = XMFLOAT4(i, i, i,0);
		

		
	}

	// Create a buffer to be bound as a shader input (D3D11_BIND_SHADER_RESOURCE).
	// Create a buffer to be bound as a shader input
	D3D11_BUFFER_DESC inputDesc;
	inputDesc.Usage = D3D11_USAGE_DEFAULT;
	inputDesc.ByteWidth = sizeof(dataA) * mNumElements;
	inputDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_UNORDERED_ACCESS;
	inputDesc.CPUAccessFlags = 0;
	inputDesc.StructureByteStride = sizeof(dataA);
	inputDesc.MiscFlags = D3D11_RESOURCE_MISC_BUFFER_STRUCTURED;

	D3D11_SUBRESOURCE_DATA vinitInputData;
	vinitInputData.pSysMem = &dataA[0];

        ID3D11Buffer* inputBuffer = 0;
	HR(md3dDevice->CreateBuffer(&inputDesc, &vinitInputData, &inputBuffer));

       // Create a buffer the compute shader can write to 
	D3D11_BUFFER_DESC outputDesc;
	outputDesc.Usage = D3D11_USAGE_DEFAULT;
	outputDesc.ByteWidth = sizeof(Data) * mNumElements;
	outputDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_UNORDERED_ACCESS;
	outputDesc.CPUAccessFlags = 0;
	outputDesc.StructureByteStride = sizeof(Data);
	outputDesc.MiscFlags = D3D11_RESOURCE_MISC_BUFFER_STRUCTURED;

	HR(md3dDevice->CreateBuffer(&outputDesc, 0, &mOutputBuffer));

	// Create a system memory version of the buffer to read the results back from.
	outputDesc.Usage = D3D11_USAGE_STAGING;
	outputDesc.BindFlags = 0;
	outputDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
	HR(md3dDevice->CreateBuffer(&outputDesc, 0, &mOutputDebugBuffer));

        D3D11_UNORDERED_ACCESS_VIEW_DESC uavDesc;
	uavDesc.Format = DXGI_FORMAT_UNKNOWN;
	uavDesc.ViewDimension = D3D11_UAV_DIMENSION_BUFFER;
	uavDesc.Buffer.FirstElement = 0;
	uavDesc.Buffer.Flags =  D3D11_BUFFER_UAV_FLAG_APPEND;
	uavDesc.Buffer.NumElements = mNumElements;

		
	HR(md3dDevice->CreateUnorderedAccessView(inputBuffer, &uavDesc, &mInputAUAV));	
	HR(md3dDevice->CreateUnorderedAccessView(mOutputBuffer, &uavDesc, &mOutputUAV));
}

The shader simply takes a input of a vector and computes the the length of the vector but I get all zeros.

Here is additonal code


void VecAddApp::DoComputeWork()
{
	D3DX11_TECHNIQUE_DESC techDesc;

	Effects::VecAddFX->SetInputA(mInputAUAV);
		Effects::VecAddFX->SetOutput(mOutputUAV);

	Effects::VecAddFX->VecAddTech->GetDesc( &techDesc );
	for(UINT p = 0; p < techDesc.Passes; ++p)
	{
		ID3DX11EffectPass* pass = Effects::VecAddFX->VecAddTech->GetPassByIndex(p);
		pass->Apply(0, md3dImmediateContext);

		md3dImmediateContext->Dispatch(1, 1, 1);
	}

	// Unbind the input textures from the CS for good housekeeping.
	ID3D11ShaderResourceView* nullSRV[1] = { 0 };
	md3dImmediateContext->CSSetShaderResources( 0, 1, nullSRV );

	// Unbind output from compute shader (we are going to use this output as an input in the next pass, 
	// and a resource cannot be both an output and input at the same time.
	UINT counters[2] = {mNumElements,0};
ID3D11UnorderedAccessView* uavs[2] = { mInputAUAV, mOutputUAV };
md3dImmediateContext->CSSetUnorderedAccessViews(0, 2, uavs, counters);

	// Disable compute shader.
	md3dImmediateContext->CSSetShader(0, 0, 0);

	std::ofstream fout("results.txt");

	// Copy the output buffer to system memory.
	md3dImmediateContext->CopyResource(mOutputDebugBuffer, mOutputBuffer);

	// Map the data for reading.
	D3D11_MAPPED_SUBRESOURCE mappedData; 
    md3dImmediateContext->Map(mOutputDebugBuffer, 0, D3D11_MAP_READ, 0, &mappedData);

	Data* dataView = reinterpret_cast<Data*>(mappedData.pData);

	for(int i = 0; i < mNumElements; ++i)
	{
		fout << dataView[i].v1.x << std::endl;
	}

    md3dImmediateContext->Unmap(mOutputDebugBuffer, 0);

	fout.close();
}

I also tried using setting the counter in CSSetUnorderedAccessViews with CopyStructureCount


What does RenderDoc or Visual Studio tell you?
What does the DirectX debug runtime say?


L. Spiro

I restore Nintendo 64 video-game OST’s into HD! https://www.youtube.com/channel/UCCtX_wedtZ5BoyQBXEhnVZw/playlists?view=1&sort=lad&flow=grid

The DirectX debug runtime would have caught the original flags problem -- definitely keep it on during development.

This topic is closed to new replies.

Advertisement