Jump to content

  • Log In with Google      Sign In   
  • Create Account

qventura

Member Since 15 Jun 2011
Offline Last Active Jun 07 2012 09:22 AM

Topics I've Started

D3DImage D3D10 && Multisampling

04 April 2012 - 01:39 PM

Hi everyone,

I'm trying to implement a renderengine using Direct3D 10 (SlimDX) and WPF.

I create my device and rendertargetview with the right MultiSample parameters ( 1,0 / 2,0 and 4,0 are working)


			this.multiSamplingDescription = new SampleDescription(sampleCount, qualityLevel - 1); // 4,1

			Texture2DDescription colordesc = new Texture2DDescription();
			colordesc.BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource;
			colordesc.Format = this.renderTargetViewFormat; // Format.B8G8R8A8_UNorm
			colordesc.Width = width;
			colordesc.Height = height;
			colordesc.MipLevels = 1;
			colordesc.SampleDescription = multiSamplingDescription;
			colordesc.Usage = ResourceUsage.Default;
			colordesc.OptionFlags = ResourceOptionFlags.Shared;
			colordesc.CpuAccessFlags = CpuAccessFlags.None;
			colordesc.ArraySize = 4;


Then I've got a problem while trying to create the shared texture for D3DImage ...

this.Direct3D9Context = new Direct3DEx();
			this.presentParams = new PresentParameters();
			this.presentParams.Windowed = true;
			this.presentParams.SwapEffect = SwapEffect.Discard;
			this.presentParams.DeviceWindowHandle = GetDesktopWindow();
			this.presentParams.PresentationInterval = PresentInterval.Immediate;
			this.presentParams.Multisample = MultisampleType.None;
			this.presentParams.MultisampleQuality = 0;

			this.Device = new DeviceEx(this.Direct3D9Context, 0, DeviceType.Hardware, IntPtr.Zero, CreateFlags.HardwareVertexProcessing | CreateFlags.Multithreaded | CreateFlags.FpuPreserve, this.presentParams);

...



				this.presentParams.Multisample = sampleCount; // = 4
				this.presentParams.MultisampleQuality = 0;
				this.Device.Reset(this.presentParams);

...

				this.SharedTexture = new Texture(this.Device, texture.Description.Width, texture.Description.Height, 1, Usage.RenderTarget, format, Pool.Default, ref Handle);

// format = Format.A8R8G8B8
// Width = 1244 , same as colordesc
// height = 699, same as colordesc


I'm missing something ?

AlphaBlending and MultiPass Shader

15 March 2012 - 10:54 AM

Hi everyone,

I've got a little probleme with AlphaBlending. My simplified scene is composed by a textured quad, and an outlined polygon (that's the problem :) )

First, I call draw to display a textured Quad

Then, I've got a second call to Draw using an other effect with 2 pass.
First pass draw the expended polygon with (1,0,0,1) as color
The second Pass draw the original polygon with (1, 0, 0, 0.6)

So, basically, I would like to to draw a semi-transparent polygon with an opaque outline, and I want to see my texture beyond.

Is that possible ?
In my 2nd pass, can i access to the pixel color before the first pass ?

Here are my shaders ...

technique PositionNormal
{ 
pass Outline
{
  VertexShader = compile vs_3_0 OutlineVS();
  PixelShader = compile ps_3_0 OutlinePS(); 
  AlphaBlendEnable = TRUE;
  SrcBlend = SrcAlpha;
  DestBlend = InvSrcAlpha;
  BlendOp = Add;
    } 
pass Inner
{
  VertexShader = compile vs_3_0 VS();
  PixelShader = compile ps_3_0 PS();
  AlphaBlendEnable = TRUE; 
  SrcBlend = SrcAlpha;
  DestBlend = InvDestAlpha;
  BlendOp = 4;
    }


}

Thanks in advance

Flickering probleme with SlimDX / D3D10 / WPF

10 February 2012 - 07:57 AM

Hi,
I got a strange flickering problem in my scene ...
When i move the camera (pan or zoom), the NeedToRender boolean is seted as true, it call the render function

public bool Render()
		{
			if (isReadyToRender() && NeedToRender)
			{
				worldEffectVariable.SetMatrix(worldMatrix);
				viewEffectVariable.SetMatrix(camera.ViewMatrix);
				projectionEffectVariable.SetMatrix(camera.ProjectionMatrix);
				D3DDevice.ClearDepthStencilView(depthView, DepthStencilClearFlags.Depth | DepthStencilClearFlags.Stencil, 1.0f, 0);
				D3DDevice.ClearRenderTargetView(renderView, new Color4(new Vector4(1.0f, 0.0f, 0.0f, 1.0f)));
							foreach (IModel model in ModelsToRender)
				{
					if (model.GetType() == typeof(TexturedModel))
					{
						Matrix objectTranformMatrix = Matrix.Scaling((float)((TexturedModel)model).Width / 2.0f, (float)((TexturedModel)model).Height / 2.0f, 1.0f);
						objectTranformMatrix = Matrix.Multiply(objectTranformMatrix, Matrix.Translation((float)((TexturedModel)model).Width / 2.0f, (float)((TexturedModel)model).Height / 2.0f, 0.0f));
						objectTransformEffectVariable.SetMatrix(objectTranformMatrix);
						currentRenderer = texturedRenderer;
						texturedRenderer.GetTexture(model.Id, out currentTexture);
						textureEffectVariable.SetResource(currentTexture);
					}
					else if (model.GetType() == typeof(SphereTargetModel))
					{
						Matrix objectTranformMatrix = Matrix.Translation(((SphereTargetModel)model).CurrentPosition.X, ((SphereTargetModel)model).CurrentPosition.Y, -20.0f);
						objectTransformEffectVariable.SetMatrix(objectTranformMatrix);
						colorEffectVariable.Set(Convertor.ToVector4(((SphereTargetModel)model).Color));						currentRenderer = sphereRenderer;
					}
					else
					{
						currentRenderer = diskRenderer;
					}
					technique = effect.GetTechniqueByName(currentRenderer.TypeName);
					pass = technique.GetPassByIndex(0);
					layout = new InputLayout(D3DDevice, pass.Description.Signature, currentRenderer.InputElements);					D3DDevice.InputAssembler.SetInputLayout(layout);
					D3DDevice.InputAssembler.SetPrimitiveTopology(PrimitiveTopology.TriangleList);
					D3DDevice.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(currentRenderer.VerticesBuffer, currentRenderer.VertexSize, 0));
					D3DDevice.InputAssembler.SetIndexBuffer(currentRenderer.IndicesBuffer, Format.R16_UInt, 0);
				  
					for (int i = 0; i < technique.Description.PassCount; ++i)
					{
						pass.Apply();
						D3DDevice.DrawIndexed(currentRenderer.IndicesNumber, 0, 0);
					}
					layout.Dispose();
				}
				D3DDevice.Flush();
				camera.HasChanged = false;
				sceneHasChanged = false;

				renderingTime = DateTime.Now.Subtract(beginTime);
				if (renderingTime.Milliseconds != 0)
				{
					Console.WriteLine("DX10 : RenderingTime = " + renderingTime.Milliseconds + " FPS = " + (1000 / renderingTime.Milliseconds));
				}
				return true;
			}
			return false;
		}

Here is my OnRendering function, called by my WPF Control


public void OnRendering(object sender, EventArgs e)		{
			if (RenderEngine.Render())
			{
				if (RenderEngine.BackBufferSurface != null)
				{
					D3DTarget.Invalidate();
				}
			}
		}



Invalidate is called only when something append in my rendering engine

D3DTarget is a "D3DImageSlimDX", qnd here is the Invalidate function

public void Invalidate()
		{
			if (SharedTexture != null)
			{
				Lock();
				AddDirtyRect(new Int32Rect(0, 0, PixelWidth, PixelHeight));
				Unlock();
			}
		}

The strange thing is that the flickering appear for every object of my scene (ObjetcsToRender) EXCEPT the first one

By the way, I've got the same probleme with my D3D9 Engine .... I think that's a DirtyRect problem but i'm not sure !

[Edit]
Just adding my device initialization:

public void InitDevice()
	    {
		    ReleaseDevice();
		    Texture2DDescription colordesc = new Texture2DDescription();
			    colordesc.BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource;
			    colordesc.Format = Format.B8G8R8A8_UNorm;
			    colordesc.Width = ControlWidth;
			    colordesc.Height = ControlHeight;
			    colordesc.MipLevels = 1;
			    colordesc.SampleDescription = new SampleDescription(1, 0);
			    colordesc.Usage = ResourceUsage.Default;
			    colordesc.OptionFlags = ResourceOptionFlags.Shared;
			    colordesc.CpuAccessFlags = CpuAccessFlags.None;
			    colordesc.ArraySize = 1;
		    Texture2DDescription depthdesc = new Texture2DDescription();
			    depthdesc.BindFlags = BindFlags.DepthStencil;
			    depthdesc.Format = Format.D32_Float_S8X24_UInt;
			    depthdesc.Width = ControlWidth;
			    depthdesc.Height = ControlHeight;
			    depthdesc.MipLevels = 1;
			    depthdesc.SampleDescription = new SampleDescription(1, 0);
			    depthdesc.Usage = ResourceUsage.Default;
			    depthdesc.OptionFlags = ResourceOptionFlags.None;
			    depthdesc.CpuAccessFlags = CpuAccessFlags.None;
			    depthdesc.ArraySize = 1;
		    BackBufferSurface = new Texture2D(D3DDevice, colordesc);
		    DepthTexture = new Texture2D(D3DDevice, depthdesc);
		    renderView = new RenderTargetView(D3DDevice, BackBufferSurface);
		    depthView = new DepthStencilView(D3DDevice, DepthTexture);
		    D3DDevice.OutputMerger.SetTargets(depthView, renderView);
		    D3DDevice.Rasterizer.SetViewports(new Viewport(0, 0, ControlWidth, ControlHeight, 0.0f, 1.0f));
		    D3DDevice.Flush();
		    deviceReadyToRender = true;
	    }

Thanks in advance,

Q

Problem with SlimDX / DX9 / Camera

08 February 2012 - 07:57 AM

Hi,
I don't really understand how "cameras" work with D3D9

First, how i set my camera up:

public Camera()
		{
			this.eye = new Vector3(0.0f, 0.0f, 0.0f);
			this.lookAt = new Vector3(0.0f, 0.0f, 1.0f);
			this.up = new Vector3(0.0f, 1.0f, 0.0f);
			viewMatrix = Matrix.LookAtLH(eye, lookAt, up);
			projectionMatrix = Matrix.OrthoLH(1 * zoomLevel, 1 * zoomLevel, 0.0f, 1.0f);
		}

And my vertices :

			vertices = new VertexTexture[]
			{
				new VertexTexture()
				{
					Position =  new Vector4(0.0f, 0.0f, 0.0f, 1.0f),
					TextureCoord = new Vector2(0.0f, 1.0f)
				},
				new VertexTexture()
				{
					Position =  new Vector4(0.0f, model.Height, 0.0f, 1.0f),
					TextureCoord = new Vector2(0.0f, 0.0f)
				},
				new VertexTexture()
				{
					Position =  new Vector4(model.Width, model.Height, 0.0f, 1.0f),
					TextureCoord = new Vector2(1.0f, 0.0f)
				},
				new VertexTexture()
				{
					Position =  new Vector4(model.Width, 0.0f, 0.0f, 1.0f),
					TextureCoord = new Vector2(1.0f, 1.0f)
				}
			};

It works. I can move the camera, zoom, etc.

But the cameras properties seems weird to me ! I thought it would be something like:

public Camera()
		{
			this.eye = new Vector3(0.0f, 0.0f, 1.0f);
			this.lookAt = new Vector3(0.0f, 0.0f, 0.0f);
			this.up = new Vector3(0.0f, 1.0f, 0.0f);
			viewMatrix = Matrix.LookAtLH(eye, lookAt, up);
			projectionMatrix = Matrix.OrthoLH(1 * zoomLevel, 1 * zoomLevel, 0.1f, 100.0f);
		}

but with that parameters it doesn't work. Same if i change the Z coordinate for my plan (Which need to be set at 0 to work).

Now, i try to render other objects. I generate vertices for a sphere (it works fine on D3D 10, a 1 radius sphere generated around (0;0;0) ) but nothing appear on the screen

I played with the eye and lookat parameters but i can figure how to make it work, so, what i m doing wrong ?

SlimDX / D3D9 / VertexDeclaration problem with DeclarationUsage.Position

03 February 2012 - 09:32 AM

Hi everyone,
My code below is working, with a DeclarationUsage.PositionTransformed.

Now i want to add a camera, so i need to use DeclarationUsage.Position, But, even if i'm not doing anything, (I'm not modifyinf the position, and applying a fixed color to the pixel, nothing append ...

Does anyone see something wrong ?

My Vertex struct
	public struct VertexTexture
	{
		public Vector4 Position;
		public Vector2 TextureCoord;
	}

My rendering function
viewMatrix = camera.ViewMatrix;
				projectionMatrix = camera.ProjectionMatrix;
				D3Device.Clear(ClearFlags.Target | ClearFlags.ZBuffer, new Color4(System.Drawing.Color.Red), 1, 0);
				D3Device.BeginScene();
				D3Device.SetStreamSource(0, vertices, 0, 24);
				D3Device.VertexDeclaration = vertexDecl;
				D3Device.Indices = indices;

				int numPasses = effect.Begin();
				{
					for (int i = 0; i < numPasses; i++)
					{
						effect.BeginPass(i);
						D3Device.DrawIndexedPrimitives(PrimitiveType.TriangleList, 0, 0, /* Dynamic */6, 0, 2);
						effect.EndPass();
					}
				}
				effect.End();
				D3Device.EndScene();
				D3Device.Present();

My VertexDeclaration
vertexDecl = new VertexDeclaration(D3Device, new[]
					{
				  new VertexElement(0, 0, DeclarationType.Float4, DeclarationMethod.Default, DeclarationUsage.Position, 0),
				  new VertexElement(0, 16, DeclarationType.Float2, DeclarationMethod.Default, DeclarationUsage.TextureCoordinate, 0),
			VertexElement.VertexDeclarationEnd
				 });

There is my shaders. I'm remove everything, just want to display my quad with a fixed color

float4x4 WorldProjection;
texture textureToApply;
struct VS_OUT
{
	float4 Position : POSITION;
	float2 TextureCoord : TEXCOORD0;
};
struct PS_OUT
{
	float4 Color : COLOR0;	
};
VS_OUT VS( float4 Position : POSITION, float2 TextureCoord : TEXCOORD0 )
{
VS_OUT output = (VS_OUT)0;
		output.Position = Position;
		output.TextureCoord = TextureCoord;
	return output;
}
PS_OUT PS( VS_OUT input)
{
PS_OUT output;
output.Color = float4(0.0f, 1.0f, 0.0f, 1.0f);
	return output;
}
technique Render
{
	pass P0
	{
  VertexShader = compile vs_3_0 VS();
  PixelShader = compile ps_3_0 PS();
	}
}

PARTNERS