Hey again,
Thanks for the responses, I have created my quad class, along with a vertex shader to go with it. Here's my class:
struct ScreenQuadVertex
{
D3DXVECTOR4 pos;
D3DXVECTOR2 texCoords;
};
class ScreenQuad
{
private:
LPDIRECT3DVERTEXBUFFER9 vertexBuffer;
IDirect3DVertexDeclaration9* vertexDecleration;
public:
ScreenQuad(){};
bool CreateResources()
{
ScreenQuadVertex vertices[] =
{
{D3DXVECTOR4(-1, 1, 0, 1), D3DXVECTOR2(0, 1)}, //Bottom Left
{D3DXVECTOR4(1, 1, 0, 1), D3DXVECTOR2(1, 1)}, //Bottom Right
{D3DXVECTOR4(-1, -1, 0, 1), D3DXVECTOR2(0, 0)}, //Top Left
{D3DXVECTOR4(1, -1, 0, 1), D3DXVECTOR2(1, 0)}, //Top Right
};
D3DVERTEXELEMENT9 elements[] =
{
{0, sizeof(float)*0, D3DDECLTYPE_FLOAT4, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_POSITION, 0},
{0, sizeof(float)*4, D3DDECLTYPE_FLOAT2, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_TEXCOORD, 0},
D3DDECL_END()
};
vertexDecleration = 0;
if (FAILED(d3ddev->CreateVertexDeclaration(elements, &vertexDecleration))){return false;}
if (FAILED(d3ddev->CreateVertexBuffer(4*sizeof(ScreenQuadVertex), 0, 0, D3DPOOL_MANAGED, &vertexBuffer, 0))){return false;}
void* pVoid;
if (FAILED(vertexBuffer->Lock(0, 0, (void**)&pVoid, 0))){return false;}
memcpy(pVoid, vertices, sizeof(vertices));
if (FAILED(vertexBuffer->Unlock())){return false;}
return true;
}
bool Render()
{
if (FAILED(d3ddev->SetVertexDeclaration(vertexDecleration))){return false;}
if (FAILED(d3ddev->SetStreamSource(0, vertexBuffer, 0, sizeof(ScreenQuadVertex)))){return false;}
if (FAILED(d3ddev->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2))){return false;}
return true;
}
void DeleteResources()
{
SAFE_RELEASE(&vertexBuffer);
SAFE_RELEASE(&vertexDecleration);
}
};
...and here's a simple effect file which is supposed to take a texture (which is the same dimensions as the program client area) and display it on screen:
Texture ScreenTexture;
sampler ScreenTextureSampler = sampler_state
{
texture = <ScreenTexture>;
magfilter = POINT;
minfilter = POINT;
mipfilter = POINT;
AddressU = Mirror;
AddressV = Mirror;
};
struct PixelColourOut
{
float4 Colour : COLOR0;
};
struct ScreenQuadVertexToPixel
{
float4 Position : POSITION;
float2 TexCoords : TEXCOORD0;
};
ScreenQuadVertexToPixel ScreenQuadVertexShader(float4 inPos : POSITION, float2 inTexCoords : TEXCOORD0)
{
ScreenQuadVertexToPixel Output = (ScreenQuadVertexToPixel)0;
Output.Position = inPos;
Output.TexCoords = inTexCoords;
return Output;
}
PixelColourOut ScreenPixelShader(ScreenQuadVertexToPixel PSIn)
{
PixelColourOut Output = (PixelColourOut)0;
Output.Colour = tex2D(ScreenTextureSampler, PSIn.TexCoords);
return Output;
}
technique ShowTexture
{
pass Pass0
{
VertexShader = compile vs_3_0 ScreenQuadVertexShader();
PixelShader = compile ps_3_0 ScreenPixelShader();
}
}
It seems to work overall, i.e. when I send in this texture:
[attachment=29511:testtex.jpg]
I get this result when I run the program:
[attachment=29512:testtexrender.png]
There are two obvious issues - first, the image is flipped about the horizontal, implying that there is something wrong with the texture coordinates I defined, and second, the texture is very blocky when displayed by my program. This shouldn't happen because even though the shader sampler uses no filtering, the client area and texture are the same dimensions, meaning that there should be a 1:1 correspondence between the pixels. Note that I used the AdjustWindowRect() function to make sure the client area of my window is the same as the texture dimensions (640x480 in this case).
Can anyone see why these things are happening from my code? I thought that HLSL used (0, 0) for the top left of the texture and (1, 1) for the bottom right, as I have coded into the four vertices of the ScreenQuad. Is this not the case?
Thanks for the help