Yeah, seems like DirectX10 just isn't as strict.
Do you happen to know how DirectX wants its vertex and UV coordinates?
My UV coordinates are plain 0/0, 0/1, 1/0, 1/1.
I just try to render a quad...I'm sure it's another tiny error again. With RHW coordinates, the same shader works just fine. I have to supply coordinates in the range of -1/1(topleft) to 1/-1(bottomright) with 0/0 in the middle of the screen right?
Right now this produces a quad with a hugely magnified texture on it.
struct Vertex{ public Vector4 Position; public Vector2 Texture;}var vertices = new VertexBuffer(device, 4 * 24, Usage.WriteOnly,VertexFormat.Position | VertexFormat.Texture1, Pool.Managed);Effect effect = Effect.FromString(device, shaderCode, ShaderFlags.None);var effectHandle =effect.GetParameter(null, "DiffuseMap");effect.SetTexture((SlimDX.Direct3D9.EffectHandle)effectHandle,texture);device.SetStreamSource(0, vertices, 0, 24);device.VertexFormat = VertexFormat.Position | VertexFormat.Texture1;// with RHW and pixel coordinates it worked!device.BeginScene();var technique = effect.GetTechnique(0);effect.Technique = technique;effect.Begin();effect.BeginPass(0); device.DrawPrimitives(PrimitiveType.TriangleStrip, 0, 2);effect.EndPass();effect.End();
// vertices with the uv coordinates for untransformed coordinates
new Vertex() { Texture = new Vector2(0,0), Position = new Vector4(-1.0f, -1.0f, 1.0f, 0.0f) }, new Vertex() { Texture = new Vector2(0,1), Position = new Vector4(-1.0f, 1.0f, 1.0f, 0.0f) }, new Vertex() { Texture = new Vector2(1,0), Position = new Vector4(1.0f, -1.0f, 1.0f, 0.0f) }, new Vertex() { Texture = new Vector2(1,1), Position = new Vector4(1.0f, 1.0f, 1.0f, 0.0f) },