I'm delving into directx via the SharpDX wrapper for .NET, but I'm getting some unexpected results.
This is my expected result:
and here is the result I'm getting:
Here is my shader:
struct VOut
{
float4 position : SV_POSITION;
float4 color : COLOR;
};
VOut vsMain(float4 position : POSITION, float4 color : COLOR)
{
VOut output;
output.position = position;
output.color = color;
return output;
}
float4 psMain(VOut pInput) : SV_TARGET
{
return pInput.color;
}
along with the Input Layout:
private D3D11.InputElement[] inputElements = new D3D11.InputElement[]
{
new D3D11.InputElement("POSITION", 0, Format.R32G32B32_Float, 0, 0, D3D11.InputClassification.PerVertexData, 0),
new D3D11.InputElement("COLOR", 0, Format.R32G32B32A32_Float, 12, 0)
};
I'm passing the following set of vertices through the vertexBuffer
mesh = new NonIndexMesh() {
vertices = new List<Vertex>() {
new Vertex(new Vector3(-0.5f, 0.5f, 0.0f), Color.Red),
new Vertex(new Vector3(0.5f, 0.5f, 0.0f), Color.Red),
new Vertex(new Vector3(0.0f, -0.5f, 0.0f), Color.Red)
}
}
my Vertex type looks like this:
public struct Vertex {
public Vector3 position;
public Color color;
public Vertex(Vector3 pos, Color col) {
position = pos;
color = col;
}
}
The data being passed through is correct even at runtime according to what's printed out to the Console, and the positions of each vertex being rendered seem to be correct.
What am I missing here that's causing these weird colors in my rendered triangle?
The color class you are using represents the colors as bytes. So values for RGBA range from 0 - 255. Try using the class Color4 which represents the colors as floats in the range 0.0 - 1.0. This is indeed what the shader expects.
Related
Hello I'm using c# with monogame and rendering models with hardware instancing I extract a models vertices, normal, textureCoordinate and color. When I render the model using its Texture and textureCoordinate with HLSL the model looks fine. But when I render the model only by its Color it comes up all broken and wrong.
The left model is correct this is the same 3d model but is loaded as a single 3d model without hardware instancing. This is how the colors should look I used blender to manually set these colors.
The right model is using hardware instancing, the shape looks correct but the colors are wrong.
This is how I initialize the custom vertex decleration:
public struct VertexPositionNormalTextureColor
{
public Vector3 Position;
public Vector3 Normal;
public Vector2 TextureCoordinate;
public Vector4 Color;
public VertexPositionNormalTextureColor(Vector3 _Position, Vector3 _Normal, Vector2 _TextureCoordinate, Vector4 _Color)
{
Position = _Position;
Normal = _Normal;
TextureCoordinate = _TextureCoordinate;
Color = _Color;
}
static public VertexDeclaration VertexDeclaration { get { return MyVertexDeclaration; } }
static readonly VertexDeclaration MyVertexDeclaration = new VertexDeclaration(new VertexElement[]
{
new VertexElement( 0, VertexElementFormat.Vector3, VertexElementUsage.Position, 0 ),
new VertexElement(sizeof(float) * 3, VertexElementFormat.Vector3, VertexElementUsage.Normal, 0 ),
new VertexElement(sizeof(float) * 6, VertexElementFormat.Vector2, VertexElementUsage.TextureCoordinate, 0 ),
new VertexElement(sizeof(float) * 8, VertexElementFormat.Vector4, VertexElementUsage.Color, 0 )
});
}
This is how I gather the models vertices, normal, textureCoordinate and color:
List<Vector3> vertices = new List<Vector3>();
List<Vector3> normal = new List<Vector3>();
List<Vector2> texCoord = new List<Vector2>();
List<Vector4> color = new List<Vector4>();
bones = new Matrix[myModel.Bones.Count];
myModel.CopyAbsoluteBoneTransformsTo(bones);
foreach (ModelMesh mm in myModel.Meshes)
{
foreach (ModelMeshPart mmp in mm.MeshParts)
{
VertexPositionNormalTextureColor[] vertexData = new
VertexPositionNormalTextureColor[mmp.NumVertices];
mmp.VertexBuffer.GetData(mmp.VertexOffset * mmp.VertexBuffer.VertexDeclaration.VertexStride,
vertexData, 0, mmp.NumVertices, mmp.VertexBuffer.VertexDeclaration.VertexStride);
for (int i = 0; i != vertexData.Length; i++)
{
vertices.Add(vertexData[i].Position);
normal.Add(vertexData[i].Normal);
texCoord.Add(vertexData[i].TextureCoordinate);
color.Add(vertexData[i].Color);
}
}
}
This is how I set the models vertices, normal, textureCoordinate, color to the vertex buffer:
jvertices = new List<VertexPositionNormalTextureColor>(vertices.Count);
for(int i = 0; i < vertices.Count(); i++)
{
jvertices.Add(new VertexPositionNormalTextureColor(vertices[i], normal[i], texCoord[i], color[i]));
}
geometryBuffer = new VertexBuffer(device, VertexPositionNormalTextureColor.VertexDeclaration, vertices.Count(), BufferUsage.WriteOnly);
geometryBuffer.SetData(jvertices.ToArray());
The code for the HLSL is as below:
struct VertexShaderInput
{
float3 inPositionOS : SV_Position;
float3 NormalOS : NORMAL0;
float2 inTexCoords : TEXCOORD0;
float4 inColor : COLOR0;
};
struct VertexShaderOutput
{
float4 PositionCS : SV_Position; //clip space
float4 PositionWS : POSITIONWS; //world space
float3 NormalWS : NORMAL0;
float2 inTexCoords : TEXCOORD0;
float4 inColor : COLOR0;
};
VertexShaderOutput InstancingVS(VertexShaderInput input, float4x4 instanceTransform : TEXCOORD2)
{
VertexShaderOutput output;
float4x4 instance = transpose(instanceTransform);
output.PositionWS = mul(float4(input.inPositionOS.xyz, 1.0f), instance);
output.PositionCS = mul(output.PositionWS, ViewProjection);
output.NormalWS = normalize(mul(input.NormalOS, (float3x3)instance));
output.inTexCoords = input.inTexCoords;
output.inColor = input.inColor;
return output;
}
float4 InstancingPS(VertexShaderOutput input) : COLOR0
{
float4 color = input.inColor;
if (color.a < 0.75f) { clip(-1); return color; }
else color.a = 1;
return color;
}
If anyone notices any issue that might be the reason why Color wont work on HLSL but a texture works perfectly fine.
Edit:
I changed the order of the variables in the struct setting the vertex declaration. It turns out it changed the result I looked into what mmp.VertexBuffer.GetData() actually had stored from the model and for some reason it doesn't have Color.
static readonly VertexDeclaration MyVertexDeclaration = new VertexDeclaration(new VertexElement[]
{
new VertexElement(0, VertexElementFormat.Vector3, VertexElementUsage.Position, 0 ),
new VertexElement(12, VertexElementFormat.Vector3, VertexElementUsage.Normal, 0 ),
new VertexElement(24, VertexElementFormat.Vector2, VertexElementUsage.TextureCoordinate, 0 ),
new VertexElement(32, VertexElementFormat.Vector3, VertexElementUsage.Tangent, 0 ),
new VertexElement(44, VertexElementFormat.Vector3, VertexElementUsage.Binormal, 0 )
});
I followed the offset and order for these variables in the vertex declaration struct and it seems to be working. How do I make mmp.VertexBuffer.GetData() also gather the Color from the model, Thankyou.
I'm using DirectX11 with SharpDx & WPF D3DImage in C# to render image as texture.
Previously i was updating my render target (which worked perfectly) and rather not using the pixel shader to display the updated texture within my quad.
Now having realized my mistake i decided to use the pixel shaders so that i can implement letterboxing technique with the help of view port.
In doing so I'm kinda not able to figure out why my rendered texture is a sub region of the image which in turn is displayed as stretched .
Code Used:
a) Initialisation code
var device = this.Device;
var context = device.ImmediateContext;
byte[] fileBytes = GetShaderEffectsFileBytes();
var vertexShaderByteCode = ShaderBytecode.Compile(fileBytes, "VSMain", "vs_5_0", ShaderFlags.None, EffectFlags.None);
var vertexShader = new VertexShader(device, vertexShaderByteCode);
var pixelShaderByteCode = ShaderBytecode.Compile(fileBytes, "PSMain", "ps_5_0", ShaderFlags.None, EffectFlags.None);
var pixelShader = new PixelShader(device, pixelShaderByteCode);
layout = new InputLayout(device, vertexShaderByteCode, new[] {
new InputElement("SV_Position", 0, Format.R32G32B32A32_Float, 0, 0),
new InputElement("TEXCOORD", 0, Format.R32G32_Float, 16, 0),
});
// Write vertex data to a datastream
var stream = new DataStream(Utilities.SizeOf<VertexPositionTexture>() * 6, true, true);
stream.WriteRange(new[]
{
new VertexPositionTexture(
new Vector4(1, 1, 0.5f, 1.0f), // position top-left
new Vector2(1.0f, 0.0f)
),
new VertexPositionTexture(
new Vector4(1, -1, 0.5f, 1.0f), // position top-right
new Vector2(1.0f, 1.0f)
),
new VertexPositionTexture(
new Vector4(-1, 1, 0.5f, 1.0f), // position bottom-left
new Vector2(0.0f, 0.0f)
),
new VertexPositionTexture(
new Vector4(-1, -1, 0.5f, 1.0f), // position bottom-right
new Vector2(0.0f, 1.0f)
),
});
stream.Position = 0;
vertices = new SharpDX.Direct3D11.Buffer(device, stream, new BufferDescription()
{
BindFlags = BindFlags.VertexBuffer,
CpuAccessFlags = CpuAccessFlags.None,
OptionFlags = ResourceOptionFlags.None,
SizeInBytes = Utilities.SizeOf<VertexPositionTexture>() * 6,
Usage = ResourceUsage.Default,
StructureByteStride = 0
});
stream.Dispose();
context.InputAssembler.InputLayout = (layout);
context.InputAssembler.PrimitiveTopology = (PrimitiveTopology.TriangleStrip);
context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertices, Utilities.SizeOf<VertexPositionTexture>(), 0));
context.VertexShader.Set(vertexShader);
context.GeometryShader.Set(null);
context.PixelShader.Set(pixelShader);
Device.ImmediateContext.OutputMerger.SetTargets(m_depthStencilView, m_RenderTargetView);
this.ImgSource.SetRenderTargetDX11(this.RenderTarget);
Texture2D flower = Texture2D.FromFile<Texture2D>(this.Device, "3.jpg");
var srv = new ShaderResourceView(this.Device, flower);
Device.ImmediateContext.PixelShader.SetShaderResource(0, srv);
srv.Dispose();
b) VertexPositionTexture
public struct VertexPositionTexture
{
public VertexPositionTexture(Vector4 position, Vector2 textureUV)
{
Position = position;
TextureUV = textureUV;
}
public Vector4 Position;
public Vector2 TextureUV;
}
c) Rendering Code
Device.ImmediateContext.ClearRenderTargetView(this.m_RenderTargetView, new Color4(Color.Blue.R, Color.Blue.G, Color.Blue.B, Color.Blue.A));
Device.ImmediateContext.ClearDepthStencilView(m_depthStencilView, DepthStencilClearFlags.Depth, 1.0f, 0);
Device.ImmediateContext.Draw(4, 0);
Device.ImmediateContext.Flush();
this.ImgSource.InvalidateD3DImage();
d) Shader Effects File:
Texture2D ShaderTexture : register(t0);
SamplerState Sampler : register (s0);
struct VertexShaderInput
{
float4 Position : SV_Position;
float2 TextureUV : TEXCOORD0;
};
struct VertexShaderOutput
{
float4 Position : SV_Position;
float2 TextureUV : TEXCOORD0;
};
VertexShaderOutput VSMain(VertexShaderInput input)
{
VertexShaderOutput output = (VertexShaderOutput)0;
output.Position = input.Position;
output.TextureUV = input.TextureUV;
return output;
}
float4 PSMain(VertexShaderOutput input) : SV_Target
{
return ShaderTexture.Sample(Sampler, input.TextureUV);
}
Also below i have added a screenshot of the issue i have been having and actual image that should be rendered.
Actual image
Failed texture rendered
Any suggestions or help would be really helpful as I have researched the web and various forums but had no luck .
Thanks.
Link to Sample Test Application
Abstract and Goal:
I am trying to make a shader to perform a simple window effect for a game editor. The effect will draw a frame with a low value border color and a high value highlight. I've tried many methods, but overall I have only come up with one possible solution to achieve this using the GPU.
First I create a custom vertex type for storing the XY coordinates of a vector in screen space.
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Graphics;
namespace WindowsGame1 {
public struct Vertex2D : IVertexType {
public Vector2 Position;
public static readonly VertexDeclaration VertexDeclaration = new VertexDeclaration(new VertexElement(0, VertexElementFormat.Vector2, VertexElementUsage.Position, 0));
public Vertex2D(float x, float y) {
Position = new Vector2(x, y);
}
VertexDeclaration IVertexType.VertexDeclaration {
get {
return VertexDeclaration;
}
}
}
}
Next I create an instance of a custom Window class. The constructor sets up the vertex buffer and sets the view, projection, and color parameters in the effect.
Here is the effect file.
float4x4 view;
float4x4 projection;
float4 color;
float shadowPercent = 0.36893203883495145631067961165049;
float highlightPercent = 1.262135922330097087378640776699;
Texture2D targetTexture;
struct FillVertexShaderInput {
float4 position : POSITION0;
};
struct FillPixelShaderInput {
float4 position : POSITION0;
};
struct BorderPixelShaderInput {
float4 position : SV_Position;
};
// Transforms color component range from 0-255 to 0-1.
float4 ClampColor(float4 color) {
return float4(color[0] / 255, color[1] / 255, color[2] / 255, color[3] / 255);
}
// Shifts the value of a color by a percent to get border color and highlight color from a fill color.
float4 ShiftValue(float4 color, float percent) {
return float4(clamp(color[0] * percent, 0, 1), clamp(color[1] * percent, 0, 1), clamp(color[2] * percent, 0, 1), clamp(color[3] * percent, 0, 1));
}
FillPixelShaderInput FillVertexShader(FillVertexShaderInput input) {
FillPixelShaderInput output;
output.position = mul(mul(input.position, view), projection);
return output;
}
float4 FillPixelShader(FillPixelShaderInput input) : COLOR0 {
return color;
}
float4 BorderPixelShader(BorderPixelShaderInput input) : COLOR0 {
// Get color of pixel above?
// float4 tempColor = texture.Sample(sampler, (input.position[0], input.position[1] - width));
return color;
}
technique Frame {
// Store Texture2D, sampler2D, and others to be stored between passes?
/*Texture2D texture;
sampler2D sampler;
float width;
float height;
texture.GetDimensions(width, height);
color = ClampColor(color);
float4 shadowColor = ShiftValue(color, shadowPercent);
float4 highlightColor = ShiftValue(color, highlightPercent);*/
pass Fill {
VertexShader = compile vs_2_0 FillVertexShader();
PixelShader = compile ps_2_0 FillPixelShader();
}
pass Border {
PixelShader = compile ps_4_0 BorderPixelShader();
}
}
I would like to be able to store the data between passes, but I don't know if that is possible so I tried storing a render target in XNA and using it as a parameter for the next pass.
Here is the Draw code of the Window.
public void Draw(Game1 game) {
// rectangle is a simple window for this test.
RenderTarget2D target = new RenderTarget2D(game.GraphicsDevice, rectangle.Width, rectangle.Height);
game.GraphicsDevice.SetRenderTarget(target);
game.GraphicsDevice.BlendState = BlendState.AlphaBlend;
game.GraphicsDevice.Clear(Color.Transparent);
game.GraphicsDevice.SetVertexBuffer(vertexbuffer);
effect.Techniques["Frame"].Passes["Fill"].Apply();
game.GraphicsDevice.DrawPrimitives(PrimitiveType.TriangleStrip, 0, 2);
game.GraphicsDevice.SetRenderTarget(null);
effect.Parameters["targetTexture"].SetValue(target);
effect.Techniques["Frame"].Passes["Border"].Apply();
}
If I can get the position and color of surrounding pixels of the current pixel in a pixel shader, I can determine what color to draw the pixel at the current position. The Fill works fine. I just don't know the best way to go about drawing the border shadow and highlight. Also I am having problems with the alpha blending. Everything except the window is the default dark purple color, even though I set alpha blending and clear the render target buffer to transparent.
Thanks in advance if you decide to help.
I believe it works like this:
PixelShaderOutput PixelShaderFunction(VertexShaderOutput input, float2 vPos : VPOS)
{
// shader code here
}
Then you can just use vPos.x and vPos.y to access the coordinates of current pixel being processed.
Im working with model instancing in XNA 4.0 and I sending my model instance transformation in a parallel stream. Im following this tutorial. However when I want a matrix as input to my shader I get what looks like a damaged matrix, because I get strange projection results.
Does anyone know the source to the problem and why I can't pass the matrix when others suggest so?
Problem:
struct VertexShaderInput
{
float4 Position : POSITION0;
float3 Normal : NORMAL0;
float3 UV : TEXCOORD0;
float3 Color : COLOR0;
float3 Tangent : TANGENT0;
float3 Binormal : BINORMAL0;
float4x4 World : TEXCOORD3; //Problem
};
Changing the vertex shader function to the following does not help either:
VertexShaderOutput VertexShaderFunction(VertexShaderInput input, float4x4 World : TEXCOORD3)
{
}
This works if I build the matrix with the vectors alone, I dont know why. Im I losing data?
struct VertexShaderInput
{
float4 Position : POSITION0;
float3 Normal : NORMAL0;
float3 UV : TEXCOORD0;
float3 Color : COLOR0;
float3 Tangent : TANGENT0;
float3 Binormal : BINORMAL0;
float4 World1 : TEXCOORD3;
float4 World2 : TEXCOORD4;
float4 World3 : TEXCOORD5;
float4 World4 : TEXCOORD6;
};
Vertex format:
internal struct InstanceDataVertex
{
public Matrix World;
public InstanceDataVertex(Matrix World)
{
this.World = World;
}
public readonly static VertexDeclaration VertexDeclaration = new VertexDeclaration
(
new VertexElement(0, VertexElementFormat.Vector4, VertexElementUsage.TextureCoordinate, 3),
new VertexElement(sizeof(float) * 4, VertexElementFormat.Vector4, VertexElementUsage.TextureCoordinate, 4),
new VertexElement(sizeof(float) * 8, VertexElementFormat.Vector4, VertexElementUsage.TextureCoordinate, 5),
new VertexElement(sizeof(float) * 12, VertexElementFormat.Vector4, VertexElementUsage.TextureCoordinate, 6)
);
}
The input registers on the GPU are limited in size. The size of TEXCOORDn is float4 (as listed here). There are no float4x4 inputs.
Splitting your matrix across several input registers and then reconstructing it should work fine. It's just a matter of making sure the right values in your C# Matrix end up in the right places in your HLSL float4x4. I suspect the mapping is trivial, but I'm not sure.
here is code I use in my project
public Instance(Instancer instancer, float scale, Vector3 translate, Vector3 information)
{
ID++;
id = ID;
this.Scale(scale);
this.Translate(translate);
this.Update(); //update the model matrix modelMatrix=scale*rotate*translate ma!
Instancer = instancer;
modelMatrix.M12 = information.X; //additional info unique for each instance
modelMatrix.M23 = information.Y;
modelMatrix.M34 = information.Z;
Instancer.instanceTransformMatrices.Add(this, ModelMatrix);
}
Model Matrix for each instance
protected static VertexDeclaration instanceVertexDeclaration = new VertexDeclaration
(
new VertexElement(0, VertexElementFormat.Vector4, VertexElementUsage.BlendWeight, 0),
new VertexElement(16, VertexElementFormat.Vector4, VertexElementUsage.BlendWeight, 1),
new VertexElement(32, VertexElementFormat.Vector4, VertexElementUsage.BlendWeight, 2),
new VertexElement(48, VertexElementFormat.Vector4, VertexElementUsage.BlendWeight, 3)
);
List of Matrices to vbo where modelVertexBuffer is a quad or any other geometry
instanceVertexBuffer = new DynamicVertexBuffer(BaseClass.Device, instanceVertexDeclaration, instanceTransformMatrices.Count, BufferUsage.WriteOnly);
instanceVertexBuffer.SetData(instanceTransformMatrices.Values.ToArray(), 0, instanceTransformMatrices.Count, SetDataOptions.Discard);
Draw Function
BaseClass.Device.SetVertexBuffers(
new VertexBufferBinding(modelVertexBuffer, 0, 0),
new VertexBufferBinding(instanceVertexBuffer, 0, 1)
);
BaseClass.Device.Indices = indexBuffer;
BaseClass.Device.DrawInstancedPrimitives(PrimitiveType.TriangleList, 0, 0,modelVertexBuffer.VertexCount, 0,2,instanceTransformMatrices.Count);
Sample Vertex Shader
VertexOut VS(VertexIn input, float4x4 instanceTransform : BLENDWEIGHT)
{
VertexOut Out = (VertexOut)0;
float4x4 world = transpose(instanceTransform);
input.Position.xyz = float3(world._41,world._42,world._43);
I have this basic 3d application and trying to write my own toon shader, but even if I remove the tooning part it still still just stays plain darkblue when I give a red color to it.
shader code :
struct VertexShaderInput
{
float4 Position : POSITION0;
float3 Normal : NORMAL0;
float4 Color : COLOR0;
};
struct VertexShaderOutput
{
float4 Position : POSITION0;
float LightAmount : TEXCOORD1;
float4 Color : COLOR0;
};
VertexShaderOutput VertexShaderFunction(VertexShaderInput input)
{
VertexShaderOutput output;
float4 worldPosition = mul(input.Position, World);
float4 viewPosition = mul(worldPosition, View);
output.Position = mul(viewPosition, Projection);
output.Color = input.Color;
float3 worldNormal = mul(input.Normal, World);
output.LightAmount = dot(worldNormal, LightDirection);
// TODO: add your vertex shader code here.
return output;
}
float4 PixelShaderFunction(VertexShaderOutput input) : COLOR0
{
float4 color = input.Color;
float light;
if (input.LightAmount > ToonThresholds[0])
light = ToonBrightnessLevels[0];
else if (input.LightAmount > ToonThresholds[1])
light = ToonBrightnessLevels[1];
else
light = ToonBrightnessLevels[2];
color.rgb *= light;
return color;
}
Custom vertex format :
public struct VertexPositionNormalColored
{
public Vector3 Position;
public Color Color;
public Vector3 Normal;
public static int SizeInBytes = 7 * 4;
public static VertexElement[] VertexElements = new VertexElement[]
{
new VertexElement(0,VertexElementFormat.Vector3,VertexElementUsage.Position,0),
new VertexElement(12,VertexElementFormat.Vector3,VertexElementUsage.Normal,0),
new VertexElement(24,VertexElementFormat.Color,VertexElementUsage.Color,0)
};
}
Init of the triangle I am trying to draw :
testVetices = new VertexPositionNormalColored[3];
testVetices[0].Position = new Vector3(-0.5f, -0.5f, 0f);
testVetices[0].Color = Color.Red;
testVetices[0].Normal = new Vector3(0, 0, 1);
testVetices[1].Position = new Vector3(0, 0.5f, 0f);
testVetices[1].Color = Color.Red;
testVetices[1].Normal = new Vector3(0, 0, 1);
testVetices[2].Position = new Vector3(0.5f, -0.5f, 0f);
testVetices[2].Color = Color.Red;
testVetices[2].Normal = new Vector3(0, 0, 1);
In C#, struct fields are ordered sequentially in memory. But the order of fields in your structure does not match what you have set in VertexElements.
It should be:
public struct VertexPositionNormalColored
{
public Vector3 Position;
public Vector3 Normal;
public Color Color; // oh look I've moved!
public static int SizeInBytes = 7 * 4;
public static VertexElement[] VertexElements = new VertexElement[]
{
new VertexElement(0,VertexElementFormat.Vector3,VertexElementUsage.Position,0),
new VertexElement(12,VertexElementFormat.Vector3,VertexElementUsage.Normal,0),
new VertexElement(24,VertexElementFormat.Color,VertexElementUsage.Color,0)
};
}
(Not sure if that's the only problem in your code, but that's what stuck out.)
If you are using XNA 4.0, you might also want to have a read of this blog post.