Jump to content

  • Log In with Google      Sign In   
  • Create Account


#ActualLPVOID

Posted 08 September 2013 - 09:36 AM

Thanks for your reading!

 

the G buffer defined like this

R16 ---------G16----------B16---------A16                
{colorR}        {colorG}        {colorB}         {alpha}                           
{norR}        {norG}            {norB}         {glow}                           
{posR}        {posG}            {posB}         {matID}                           

 

Z's format is DXGI_FORMAT_R32_TYPELESS.

Z is not compute manully    m_pDevice->OMSetRenderTargets(3,pGBuffer->ppRTV,pGBuffer->pZRT);

 

 

static mesh rendering code is here

//these codes show how to Rendering static mesh To GBuffer
//Rendering terrain is similer to it;

float4x4 gV:VIEW;
float4x4 gWV:WORLDVIEW;
float4x4 gWVP:WORLDVIEWPROJ;

Texture2D diffTex:DIFFUSETEX;
Texture2D specTex:SPECULARTEX;//not used
Texture2D glowTex:GLOWTEX;//not used
Texture2D norTex: NORTEX;//not used

//some state 
BlendState noBlend
{
    BlendEnable[0] = TRUE;
};

BlendState NoBlend
{
    BlendEnable[0] = FALSE;
};

DepthStencilState dsState
{
   DepthFunc = LESS_EQUAL;
   DepthEnable = TRUE;
   DepthWriteMask =1;
   
};

RasterizerState rState
{
 FillMode = Solid;
 CullMode = 3;
};


SamplerState defaultSam
{
    Filter = MIN_MAG_MIP_LINEAR;
    AddressU = Wrap;
    AddressV = Wrap;
};

struct VS_IN
{
   float4 posL: POSITION;
   float3 nor: NORMAL;
   float2 texCoor:TEXCOORD;
};
struct PS_IN
{
   float4 pos:SV_POSITION;
   float3 nor:NORMAL;
   float2 texCoor:TEXCOORD0;
   float4 posView:TEXCOORD1;
};

struct PS_OUTPUT
{
	float4 G0 : SV_Target0;//xyz=diffuse, w=alpha
	float4 G1 : SV_Target1;//xyz=norInView,w=glow
	float4 G2:  SV_Target2;//xyz=posInView , w=materail ID
};

PS_IN VS(VS_IN vIn)
{
  PS_IN pIn;
  pIn.pos=mul(vIn.posL,gWVP);
  pIn.posView=mul(vIn.posL,gWV);
//When Rendering terrain ,I use" mul(vIn.nor,(float3x3)gV)," instead of gWV.
//because terrain's height come from vertex texture displacement. 
//Using gV can avoid normal to be scaled
  pIn.nor=normalize(mul(vIn.nor,(float3x3)gWV));
  pIn.texCoor=vIn.texCoor;
  return pIn;
}
PS_OUTPUT PS(PS_IN pIn)
{
   PS_OUTPUT pOut =(PS_OUTPUT) 0.0;
   float4 colorSuface=diffTex.Sample(defaultSam,pIn.texCoor);
   if(colorSuface.w<0.5f)
   {
		clip(-1);
   }
   else
   {
       pOut.G0=colorSuface;
       pOut.G1=float4(pIn.nor,0);
       pOut.G1.w=0;
       pOut.G2=pIn.posView;
       pOut.G2.w=0;
   }
   return pOut;
}

technique10 DEFFERD_RENDER
{
  pass P0
 {
    SetVertexShader( CompileShader( vs_4_0, VS() ) );
    SetGeometryShader( NULL );
    SetPixelShader( CompileShader( ps_4_0, PS() ) );

    SetDepthStencilState(dsState, 0);
    SetRasterizerState(rState);
    SetBlendState(noBlend, float4( 0.0f, 0.0f, 0.0f, 0.0f ), 0xFFFFFFFF );
  }
}

In the stage of rendering to G, fx was used.

In the stage of "G to backBuffer" ,I'm not using fx but the individual vertexShader and pixelShader.

 

In the 2nd stage ,I found that when i use the function "mul(gV_Invert,float4(posInView,1.0))", the vertex can't tranformd to world space..does this make SSAO failure?


#1LPVOID

Posted 08 September 2013 - 09:24 AM

Thanks for your reading!

 

the G buffer defined like this

R16 ---------G16----------B16---------A16                
{colorR}        {colorG}        {colorB}         {alpha}                           
{norR}        {norG}            {norB}         {glow}                           
{posR}        {posG}            {posB}         {matID}                           

 

Z's format is DXGI_FORMAT_R32_TYPELESS.

Z is not compute manully    m_pDevice->OMSetRenderTargets(3,pGBuffer->ppRTV,pGBuffer->pZRT);

 

 

static mesh rendering code is here

//these codes show how to Rendering static mesh To GBuffer
//Rendering terrain is similer to it;

float4x4 gV:VIEW;
float4x4 gWV:WORLDVIEW;
float4x4 gWVP:WORLDVIEWPROJ;

Texture2D diffTex:DIFFUSETEX;
Texture2D specTex:SPECULARTEX;//not used
Texture2D glowTex:GLOWTEX;//not used
Texture2D norTex: NORTEX;//not used

//some state 
BlendState noBlend
{
    BlendEnable[0] = TRUE;
};

BlendState NoBlend
{
    BlendEnable[0] = FALSE;
};

DepthStencilState dsState
{
   DepthFunc = LESS_EQUAL;
   DepthEnable = TRUE;
   DepthWriteMask =1;
   
};

RasterizerState rState
{
 FillMode = Solid;
 CullMode = 3;
};


SamplerState defaultSam
{
    Filter = MIN_MAG_MIP_LINEAR;
    AddressU = Wrap;
    AddressV = Wrap;
};

struct VS_IN
{
   float4 posL: POSITION;
   float3 nor: NORMAL;
   float2 texCoor:TEXCOORD;
};
struct PS_IN
{
   float4 pos:SV_POSITION;
   float3 nor:NORMAL;
   float2 texCoor:TEXCOORD0;
   float4 posView:TEXCOORD1;
};

struct PS_OUTPUT
{
	float4 G0 : SV_Target0;//xyz=diffuse, w=alpha
	float4 G1 : SV_Target1;//xyz=norInView,w=glow
	float4 G2:  SV_Target2;//xyz=posInView , w=materail ID
};

PS_IN VS(VS_IN vIn)
{
  PS_IN pIn;
  pIn.pos=mul(vIn.posL,gWVP);
  pIn.posView=mul(vIn.posL,gWV);
//When Rendering terrain ,I use" mul(vIn.nor,(float3x3)gV)," instead of gWV.
//because terrain's height come from vertex texture displacement. 
//Using gV can avoid normal to be scaled
  pIn.nor=normalize(mul(vIn.nor,(float3x3)gWV));
  pIn.texCoor=vIn.texCoor;
  return pIn;
}
PS_OUTPUT PS(PS_IN pIn)
{
   PS_OUTPUT pOut =(PS_OUTPUT) 0.0;
   float4 colorSuface=diffTex.Sample(defaultSam,pIn.texCoor);
   if(colorSuface.w<0.5f)
   {
		clip(-1);
   }
   else
   {
       pOut.G0=colorSuface;
       pOut.G1=float4(pIn.nor,0);
       pOut.G1.w=0;
       pOut.G2=pIn.posView;
       pOut.G2.w=0;
   }
   return pOut;
}

technique10 DEFFERD_RENDER
{
  pass P0
 {
    SetVertexShader( CompileShader( vs_4_0, VS() ) );
    SetGeometryShader( NULL );
    SetPixelShader( CompileShader( ps_4_0, PS() ) );

    SetDepthStencilState(dsState, 0);
    SetRasterizerState(rState);
    SetBlendState(noBlend, float4( 0.0f, 0.0f, 0.0f, 0.0f ), 0xFFFFFFFF );
  }
}

PARTNERS