I want to check in Unity if the device has been rotated on all of it's axis.
So, I am reading the rotation of all the axis.
What should I do in order to validate for example that the user has "flipped" his device over the X-axis? I need to check the value, and see that they contain 0, 90, 180 and 270 degrees in a loop.
Here is part of my code:
void Update () {
float X = Input.acceleration.x;
float Y = Input.acceleration.y;
float Z = Input.acceleration.z;
xText.text = ((Mathf.Atan2(Y, Z) * 180 / Mathf.PI)+180).ToString();
yText.text = ((Mathf.Atan2(X, Z) * 180 / Mathf.PI)+180).ToString();
zText.text = ((Mathf.Atan2(X, Y) * 180 / Mathf.PI)+180).ToString();
}
The accelerometer only tells you if the acceleration of the device changes. So you will have values if the device started moving, or stopped moving. You can't retrieve its orientation from that.
Instead you need to use the gyroscope of the device. Most device have one nowadays.
Fortunately, Unity supports the gyroscope through the Gyroscope class
Simply using
Input.gyro.attitude
Will give you the orientation of the device in space, in the form of a quaternion.
To check the angles, use the eulerAngles function, for instance, is the device flipped in the x axis:
Vector3 angles = Input.gyro.attitude.eulerAngles;
bool xFlipped = angles.x > 180;
Be careful, you might have to invert some values if you want to apply the rotation in Unity (because it depend which orientation the devices uses for positive values, left or right)
// The Gyroscope is right-handed. Unity is left handed.
// Make the necessary change to the camera.
private static Quaternion GyroToUnity(Quaternion q)
{
return new Quaternion(q.x, q.y, -q.z, -q.w);
}
Here is the full example from the doc (Unity version 2017.3), in case the link above is broken. It shows how to read value from the gyroscope, and apply them to an object in Unity.
// Create a cube with camera vector names on the faces.
// Allow the device to show named faces as it is oriented.
using UnityEngine;
public class ExampleScript : MonoBehaviour
{
// Faces for 6 sides of the cube
private GameObject[] quads = new GameObject[6];
// Textures for each quad, should be +X, +Y etc
// with appropriate colors, red, green, blue, etc
public Texture[] labels;
void Start()
{
// make camera solid colour and based at the origin
GetComponent<Camera>().backgroundColor = new Color(49.0f / 255.0f, 77.0f / 255.0f, 121.0f / 255.0f);
GetComponent<Camera>().transform.position = new Vector3(0, 0, 0);
GetComponent<Camera>().clearFlags = CameraClearFlags.SolidColor;
// create the six quads forming the sides of a cube
GameObject quad = GameObject.CreatePrimitive(PrimitiveType.Quad);
quads[0] = createQuad(quad, new Vector3(1, 0, 0), new Vector3(0, 90, 0), "plus x",
new Color(0.90f, 0.10f, 0.10f, 1), labels[0]);
quads[1] = createQuad(quad, new Vector3(0, 1, 0), new Vector3(-90, 0, 0), "plus y",
new Color(0.10f, 0.90f, 0.10f, 1), labels[1]);
quads[2] = createQuad(quad, new Vector3(0, 0, 1), new Vector3(0, 0, 0), "plus z",
new Color(0.10f, 0.10f, 0.90f, 1), labels[2]);
quads[3] = createQuad(quad, new Vector3(-1, 0, 0), new Vector3(0, -90, 0), "neg x",
new Color(0.90f, 0.50f, 0.50f, 1), labels[3]);
quads[4] = createQuad(quad, new Vector3(0, -1, 0), new Vector3(90, 0, 0), "neg y",
new Color(0.50f, 0.90f, 0.50f, 1), labels[4]);
quads[5] = createQuad(quad, new Vector3(0, 0, -1), new Vector3(0, 180, 0), "neg z",
new Color(0.50f, 0.50f, 0.90f, 1), labels[5]);
GameObject.Destroy(quad);
}
// make a quad for one side of the cube
GameObject createQuad(GameObject quad, Vector3 pos, Vector3 rot, string name, Color col, Texture t)
{
Quaternion quat = Quaternion.Euler(rot);
GameObject GO = Instantiate(quad, pos, quat);
GO.name = name;
GO.GetComponent<Renderer>().material.color = col;
GO.GetComponent<Renderer>().material.mainTexture = t;
GO.transform.localScale += new Vector3(0.25f, 0.25f, 0.25f);
return GO;
}
protected void Update()
{
GyroModifyCamera();
}
protected void OnGUI()
{
GUI.skin.label.fontSize = Screen.width / 40;
GUILayout.Label("Orientation: " + Screen.orientation);
GUILayout.Label("input.gyro.attitude: " + Input.gyro.attitude);
GUILayout.Label("iphone width/font: " + Screen.width + " : " + GUI.skin.label.fontSize);
}
/********************************************/
// The Gyroscope is right-handed. Unity is left handed.
// Make the necessary change to the camera.
void GyroModifyCamera()
{
transform.rotation = GyroToUnity(Input.gyro.attitude);
}
private static Quaternion GyroToUnity(Quaternion q)
{
return new Quaternion(q.x, q.y, -q.z, -q.w);
}
}
I am trying to render texture on a cube. However, I do something wrong. I have the texture but coordinates looks like wrong and I do not know how to set it correctly. What am I missing ? I think I must do something about IndexBuffer and UV. However, my mind is very mixed.
The result I get :https://www.youtube.com/watch?v=_fdJAaU81sQ
Mesh.cs
public class Mesh : IDisposable
{
public string File;
public string Name;
public Vector4[] Vertices { get; set; }
public int VerticesCount=0;
public Vector3 Position; //BASED ON PIVOT
public Vector3 PivotPosition; //MOVE MESH BASED ON THIS POSITION
public Vector3 Rotation;
public double Weight;
public SharpDX.Direct3D11.Device d3dDevice;
public SharpDX.Direct3D11.Buffer VerticesBuffer;
public bool IsDisposed=false;
public bool IsSelected = false;
public int Triangles;
public string Texture_DiffuseMap;
public Mesh(string _name, Vector4[] _vertices, string _file, SharpDX.Direct3D11.Device _device, string _Texture_DiffuseMap = "")
{
Vertices = new[]
{
new Vector4(-1.0f, -1.0f, -1.0f, 1.0f), new Vector4(1.0f, 0.0f, 0.0f, 1.0f), // Front
new Vector4(-1.0f, 1.0f, -1.0f, 1.0f), new Vector4(1.0f, 0.0f, 0.0f, 1.0f),
new Vector4( 1.0f, 1.0f, -1.0f, 1.0f), new Vector4(1.0f, 0.0f, 0.0f, 1.0f),
new Vector4(-1.0f, -1.0f, -1.0f, 1.0f), new Vector4(1.0f, 0.0f, 0.0f, 1.0f),
new Vector4( 1.0f, 1.0f, -1.0f, 1.0f), new Vector4(1.0f, 0.0f, 0.0f, 1.0f),
new Vector4( 1.0f, -1.0f, -1.0f, 1.0f), new Vector4(1.0f, 0.0f, 0.0f, 1.0f),
new Vector4(-1.0f, -1.0f, 1.0f, 1.0f), new Vector4(0.0f, 1.0f, 0.0f, 1.0f), // BACK
new Vector4( 1.0f, 1.0f, 1.0f, 1.0f), new Vector4(0.0f, 1.0f, 0.0f, 1.0f),
new Vector4(-1.0f, 1.0f, 1.0f, 1.0f), new Vector4(0.0f, 1.0f, 0.0f, 1.0f),
new Vector4(-1.0f, -1.0f, 1.0f, 1.0f), new Vector4(0.0f, 1.0f, 0.0f, 1.0f),
new Vector4( 1.0f, -1.0f, 1.0f, 1.0f), new Vector4(0.0f, 1.0f, 0.0f, 1.0f),
new Vector4( 1.0f, 1.0f, 1.0f, 1.0f), new Vector4(0.0f, 1.0f, 0.0f, 1.0f),
new Vector4(-1.0f, 1.0f, -1.0f, 1.0f), new Vector4(0.0f, 0.0f, 1.0f, 1.0f), // Top
new Vector4(-1.0f, 1.0f, 1.0f, 1.0f), new Vector4(0.0f, 0.0f, 1.0f, 1.0f),
new Vector4( 1.0f, 1.0f, 1.0f, 1.0f), new Vector4(0.0f, 0.0f, 1.0f, 1.0f),
new Vector4(-1.0f, 1.0f, -1.0f, 1.0f), new Vector4(0.0f, 0.0f, 1.0f, 1.0f),
new Vector4( 1.0f, 1.0f, 1.0f, 1.0f), new Vector4(0.0f, 0.0f, 1.0f, 1.0f),
new Vector4( 1.0f, 1.0f, -1.0f, 1.0f), new Vector4(0.0f, 0.0f, 1.0f, 1.0f),
new Vector4(-1.0f,-1.0f, -1.0f, 1.0f), new Vector4(1.0f, 1.0f, 0.0f, 1.0f), // Bottom
new Vector4( 1.0f,-1.0f, 1.0f, 1.0f), new Vector4(1.0f, 1.0f, 0.0f, 1.0f),
new Vector4(-1.0f,-1.0f, 1.0f, 1.0f), new Vector4(1.0f, 1.0f, 0.0f, 1.0f),
new Vector4(-1.0f,-1.0f, -1.0f, 1.0f), new Vector4(1.0f, 1.0f, 0.0f, 1.0f),
new Vector4( 1.0f,-1.0f, -1.0f, 1.0f), new Vector4(1.0f, 1.0f, 0.0f, 1.0f),
new Vector4( 1.0f,-1.0f, 1.0f, 1.0f), new Vector4(1.0f, 1.0f, 0.0f, 1.0f),
new Vector4(-1.0f, -1.0f, -1.0f, 1.0f), new Vector4(1.0f, 0.0f, 1.0f, 1.0f), // Left
new Vector4(-1.0f, -1.0f, 1.0f, 1.0f), new Vector4(1.0f, 0.0f, 1.0f, 1.0f),
new Vector4(-1.0f, 1.0f, 1.0f, 1.0f), new Vector4(1.0f, 0.0f, 1.0f, 1.0f),
new Vector4(-1.0f, -1.0f, -1.0f, 1.0f), new Vector4(1.0f, 0.0f, 1.0f, 1.0f),
new Vector4(-1.0f, 1.0f, 1.0f, 1.0f), new Vector4(1.0f, 0.0f, 1.0f, 1.0f),
new Vector4(-1.0f, 1.0f, -1.0f, 1.0f), new Vector4(1.0f, 0.0f, 1.0f, 1.0f),
new Vector4( 1.0f, -1.0f, -1.0f, 1.0f), new Vector4(0.0f, 1.0f, 1.0f, 1.0f), // Right
new Vector4( 1.0f, 1.0f, 1.0f, 1.0f), new Vector4(0.0f, 1.0f, 1.0f, 1.0f),
new Vector4( 1.0f, -1.0f, 1.0f, 1.0f), new Vector4(0.0f, 1.0f, 1.0f, 1.0f),
new Vector4( 1.0f, -1.0f, -1.0f, 1.0f), new Vector4(0.0f, 1.0f, 1.0f, 1.0f),
new Vector4( 1.0f, 1.0f, -1.0f, 1.0f), new Vector4(0.0f, 1.0f, 1.0f, 1.0f),
new Vector4( 1.0f, 1.0f, 1.0f, 1.0f), new Vector4(0.0f, 1.0f, 1.0f, 1.0f),
};
Texture_DiffuseMap = _Texture_DiffuseMap;
_vertices = Vertices;
d3dDevice = _device;
VerticesCount = Vertices.Count();
Name = _name;
File = _file;
Meshes.Add(this);
}
// Other functions go here...
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
IsDisposed = true;
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
Dispose();
}
// free native resources if there are any.
VerticesBuffer.Dispose();
IsDisposed = true;
}
public void Render()
{
d3dDevice.ImmediateContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList;
d3dDevice.ImmediateContext.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(VerticesBuffer, Utilities.SizeOf<Vector4>() * 2, 0));
d3dDevice.ImmediateContext.Draw(VerticesCount,0);
/*d3dDevice.ImmediateContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.LineList;
d3dDevice.ImmediateContext.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(VerticesBuffer, Utilities.SizeOf<Vector4>() * 2, 0));
d3dDevice.ImmediateContext.Draw(VerticesCount, 0);
*/
VerticesBuffer.Dispose();
}
}
Program.cs
[STAThread]
private static void Main()
{
new Thread(new ThreadStart(() =>
{
DisposeCollector DC=new DisposeCollector();
var form = new RenderForm(Globals.Window_Title) { Width = Globals.Window_Size.Width, Height = Globals.Window_Size.Height, AllowUserResizing = false, MinimizeBox = false };
InputHandler IHandler = new InputHandler(form);
SampleDescription SamplerDesc = new SampleDescription(8, 0);
// SwapChain description
var desc = new SwapChainDescription()
{
BufferCount = 2,
ModeDescription = new ModeDescription(form.ClientSize.Width, form.ClientSize.Height, new Rational(60, 1), Format.R8G8B8A8_UNorm),
IsWindowed = true,
OutputHandle = form.Handle,
SampleDescription = SamplerDesc,
SwapEffect = SwapEffect.Discard,
Usage = Usage.RenderTargetOutput
};
var samplerStateDescription = new SamplerStateDescription
{
AddressU = TextureAddressMode.Wrap,
AddressV = TextureAddressMode.Wrap,
AddressW = TextureAddressMode.Wrap,
Filter = Filter.MinMagMipLinear
};
var rasterizerStateDescription = RasterizerStateDescription.Default();
rasterizerStateDescription.IsFrontCounterClockwise = true;
// Used for debugging dispose object references
Configuration.EnableObjectTracking = true;
// Disable throws on shader compilation errors
Configuration.ThrowOnShaderCompileError = false;
SharpDX.DXGI.Factory factory = new SharpDX.DXGI.Factory1();
SharpDX.DXGI.Adapter adapter = factory.GetAdapter(1);
Adapter[] availableAdapters = factory.Adapters;
foreach(Adapter _adapter in availableAdapters)
{
Console.WriteLine(_adapter.Description.Description);
}
// Create Device and SwapChain
Device device;
SwapChain swapChain;
Device.CreateWithSwapChain(adapter, DeviceCreationFlags.SingleThreaded, desc, out device, out swapChain);
var context = device.ImmediateContext;
//factory.MakeWindowAssociation(form.Handle, WindowAssociationFlags.IgnoreAll);
// Compile Vertex and Pixel shaders
var vertexShaderByteCode = ShaderBytecode.CompileFromFile("MiniCube.hlsl", "VS", "vs_5_0", ShaderFlags.Debug);
var vertexShader = new VertexShader(device, vertexShaderByteCode);
var pixelShaderByteCode = ShaderBytecode.CompileFromFile("MiniCube.hlsl", "PS", "ps_5_0", ShaderFlags.Debug);
var pixelShader = new PixelShader(device, pixelShaderByteCode);
var signature = ShaderSignature.GetInputSignature(vertexShaderByteCode);
// Layout from VertexShader input signature
var layout = new InputLayout(device, signature, new[]
{
new InputElement("POSITION", 0, Format.R32G32B32A32_Float, 0, 0),
new InputElement("NORMAL", 0, Format.R32G32B32A32_Float, 0, 0),
new InputElement("COLOR", 0, Format.R32G32B32A32_Float, 16, 0),
new InputElement("TEXCOORD", 0, Format.R32G32_Float, InputElement.AppendAligned, 0)
});
var samplerState = new SamplerState(device, samplerStateDescription);
Mesh mesh1 = new Mesh("mesh1", new[] { new Vector4(0, 0, 0, 1) }, "", device, "1_Purple.jpg") { IsSelected=true };
Mesh mesh2 = new Mesh("mesh2", new[] { new Vector4(0, 0, 0, 1) }, "", device, "1_GREEN.jpg");
//MenuCreator menu1 = new MenuCreator(device,new[] {new Vector4(0, 0, 0, 0) });
// Create Constant Buffer
var contantBuffer = new Buffer(device, Utilities.SizeOf<Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);
ShaderResourceView textureView;
SharpDX.WIC.ImagingFactory2 ImagingFactory2 = new SharpDX.WIC.ImagingFactory2();
// Prepare All the stages
context.InputAssembler.InputLayout = layout;
context.VertexShader.SetConstantBuffer(0, contantBuffer);
context.VertexShader.Set(vertexShader);
context.PixelShader.Set(pixelShader);
context.PixelShader.SetSampler(0, samplerState);
//context.PixelShader.SetShaderResource(0, textureView);
Matrix proj = Matrix.Identity;
// Use clock
var clock = new Stopwatch();
FPS fps = new FPS();
clock.Start();
// Declare texture for rendering
bool userResized = true;
Texture2D backBuffer = null;
RenderTargetView renderView = null;
Texture2D depthBuffer = null;
DepthStencilView depthView = null;
// Setup handler on resize form
form.UserResized += (sender, args) => userResized = true;
// Setup full screen mode change F5 (Full) F4 (Window)
form.KeyUp += (sender, args) =>
{
if (args.KeyCode == Keys.F5)
swapChain.SetFullscreenState(true, null);
else if (args.KeyCode == Keys.F4)
swapChain.SetFullscreenState(false, null);
else if (args.KeyCode == Keys.Escape)
form.Close();
};
//CREATE DEPTH STENCIL DESCRIPTION
DepthStencilStateDescription depthSSD = new DepthStencilStateDescription();
depthSSD.IsDepthEnabled = false;
depthSSD.DepthComparison = Comparison.LessEqual;
depthSSD.DepthWriteMask = DepthWriteMask.Zero;
DepthStencilState DSState = new DepthStencilState(device, depthSSD);
Camera camera = new Camera();
camera.eye = new Vector3(0, 0, -5);
camera.target = new Vector3(0, 0, 0);
Globals.Render = true;
/*void DrawEmptyCircle(Vector3 startPoint, Vector2 radius, Color color)
{
List<VertexPositionColor> circle = new List<VertexPositionColor>();
float X, Y;
var stepDegree = 0.3f;
for (float angle = 0; angle <= 360; angle += stepDegree)
{
X = startPoint.X + radius.X * (float)Math.Cos((angle));
Y = startPoint.Y + radius.Y * (float)Math.Sin((angle));
Vector3 point = new Vector3(X, Y, 0);
circle.Add(new VertexPositionColor(point, color));
}
}*/
CubeApp.Windows.SystemInformation.Print(CubeApp.Windows.SystemInformation.GetSystemInformation());
HardwareInformation.GetHardwareInformation("Win32_DisplayConfiguration", "Description");
// Main loop
RenderLoop.Run(form, () =>
{
fps.Count();fps.setFormHeader(form);
// Prepare matrices
if (Globals.Render)
{
var view = camera.getView();
// If Form resized
if (userResized)
{
// Dispose all previous allocated resources
Utilities.Dispose(ref backBuffer);
Utilities.Dispose(ref renderView);
Utilities.Dispose(ref depthBuffer);
Utilities.Dispose(ref depthView);
foreach (Mesh _mesh in Meshes.MeshCollection)
{
if (_mesh.IsDisposed == false)
{
Utilities.Dispose(ref _mesh.VerticesBuffer);
}
}
// Resize the backbuffer
swapChain.ResizeBuffers(desc.BufferCount, form.ClientSize.Width, form.ClientSize.Height, Format.Unknown, SwapChainFlags.None);
// Get the backbuffer from the swapchain
backBuffer = Texture2D.FromSwapChain<Texture2D>(swapChain, 0);
// Renderview on the backbuffer
renderView = new RenderTargetView(device, backBuffer);
// Create the depth buffer
depthBuffer = new Texture2D(device, new Texture2DDescription()
{
Format = Format.D32_Float_S8X24_UInt,
ArraySize = 1,
MipLevels = 1,
Width = form.ClientSize.Width,
Height = form.ClientSize.Height,
SampleDescription = SamplerDesc,
Usage = ResourceUsage.Default,
BindFlags = BindFlags.DepthStencil,
CpuAccessFlags = CpuAccessFlags.None,
OptionFlags = ResourceOptionFlags.None
});
// Create the depth buffer view
depthView = new DepthStencilView(device, depthBuffer);
// Setup targets and viewport for rendering
context.Rasterizer.SetViewport(new Viewport(0, 0, form.ClientSize.Width, form.ClientSize.Height, 0.0f, 1.0f));
//context.OutputMerger.SetDepthStencilState(DSState);
context.OutputMerger.SetTargets(depthView, renderView);
// Setup new projection matrix with correct aspect ratio
proj = Matrix.PerspectiveFovLH((float)Math.PI / 4.0f, form.ClientSize.Width / (float)form.ClientSize.Height, 0.1f, 100.0f);
// We are done resizing
userResized = false;
}
var time = clock.ElapsedMilliseconds / 1000.0f;
var viewProj = Matrix.Multiply(view, proj);
// Clear views
context.ClearDepthStencilView(depthView, DepthStencilClearFlags.Depth, 1.0f, 0);
context.ClearRenderTargetView(renderView, Color.WhiteSmoke);
// Update WorldViewProj Matrix
var worldViewProj = Matrix.RotationX(45) * Matrix.RotationY(0 * 2) * Matrix.RotationZ(0 * .7f) * viewProj;
worldViewProj.Transpose();
context.UpdateSubresource(ref worldViewProj, contantBuffer);
//Update Camera Position
Vector3 _camEye = camera.eye;
Vector3 _camTarget = camera.target;
if (IHandler.KeyW)
{
_camEye.Z+= 0.050f; _camTarget.Z += 0.050f;
}
if (IHandler.KeyS)
{
_camEye.Z -= 0.050f; _camTarget.Z -= 0.050f;
}
if (IHandler.KeyA)
{
_camEye.X -= 0.050f; _camTarget.X -= 0.050f;
}
if (IHandler.KeyD)
{
_camTarget.X += 0.050f;
_camEye.X += 0.050f;
}
if (IHandler.KeyQ)
{
}
camera.eye = _camEye;
camera.target = _camTarget;
camera.updateView();
// Draw the cube
foreach (Mesh __mesh in Meshes.MeshCollection)
{
if ( __mesh.IsSelected )
{
for (int i = 0; i <= __mesh.VerticesCount - 1; i++)
{
if (IHandler.KeyRight) __mesh.Vertices[i].X += 0.050f;
if (IHandler.KeyLeft) __mesh.Vertices[i].X -= 0.050f;
if (IHandler.KeyUp) __mesh.Vertices[i].Y += 0.050f;
if (IHandler.KeyDown) __mesh.Vertices[i].Y -= 0.050f;
}
}
var texture = TextureLoader.CreateTexture2DFromBitmap(device, TextureLoader.LoadBitmap(ImagingFactory2, __mesh.Texture_DiffuseMap));
textureView = new ShaderResourceView(device, texture);
context.PixelShader.SetShaderResource(0, textureView);
texture.Dispose();
textureView.Dispose();
__mesh.VerticesBuffer = SharpDX.Direct3D11.Buffer.Create(device, BindFlags.VertexBuffer, __mesh.Vertices);
//EnvironmentDisplayModes.SetDisplayMode(device, __mesh, EnvironmentDisplayModes.DisplayMode.Standart);
__mesh.Render();
}
// Present!
swapChain.Present(0, PresentFlags.None);
}
});
// Release all resources
foreach (Mesh msh in Meshes.MeshCollection)
{
msh.d3dDevice.Dispose();
msh.VerticesBuffer.Dispose();
}
DC.DisposeAndClear();
signature.Dispose();
vertexShaderByteCode.Dispose();
vertexShader.Dispose();
pixelShaderByteCode.Dispose();
pixelShader.Dispose();
layout.Dispose();
contantBuffer.Dispose();
depthBuffer.Dispose();
depthView.Dispose();
renderView.Dispose();
backBuffer.Dispose();
ImagingFactory2.Dispose();
device.Dispose();
context.Dispose();
swapChain.Dispose();
factory.Dispose();
adapter.Dispose();
DSState.Dispose();
samplerState.Dispose();
DC.Dispose();
form.Dispose();
})).Start();
}
}
MiniCube.hlsl
Texture2D ShaderTexture : register(t0);
SamplerState Sampler : register(s0);
struct VS_IN
{
float4 pos : POSITION;
float3 Normal : NORMAL;
float4 col : COLOR;
float2 TextureUV: TEXCOORD; // Texture UV coordinate
};
struct VS_OUTPUT
{
float4 pos : POSITION0;
float depth : TEXCOORD0;
float2 TextureUV: TEXCOORD;
};
struct PS_IN
{
float4 pos : SV_POSITION;
float4 col : COLOR;
float2 TextureUV: TEXCOORD;
float3 WorldNormal : NORMAL;
float3 WorldPosition : WORLDPOS;
};
float4x4 worldViewProj;
PS_IN VS( VS_IN input )
{
PS_IN output = (PS_IN)0;
output.pos = mul(input.pos, worldViewProj);
input.pos.z= input.pos.z - 0.9f;
input.pos.z *= 10.0f;
output.col = 1.0f-((input.pos.w /* * input.col*/) / (input.pos.z /* *input.col*/));
output.TextureUV = input.TextureUV;
return output;
}
float4 PS( PS_IN input ) : SV_Target
{
return ShaderTexture.Sample(Sampler, input.TextureUV)*input.col;
}
First of all it is common to store the texture coordinates in the vertices.
So the first thing to do for you is to change your structure of your vertices to:
public MyVertex
{
public Vector3 Position;
public Vector3 Normal;
public Vector2 TextureCoord;
}
I do not see the necessity to make the position and normal a vector4, so this should do it. As you use textures there is also no need to use color in the vertex structure.
Next you change your input structure for the shader to the above structure. In the shader but also in your intialization of it.
It is recommended that you set the texture coordinates in the initialization of the mesh. An example would be for a plane:
var vertices = new MyVertex[]
{
new MyVertex(){Position = new Vector3(0.0f, 0.0f, 0.0f),Normal = new Vector3(0.0f, 1.0f, 0.0f), TextureCoord = new Vector2(0.0f, 0.0f)},
new MyVertex(){Position = new Vector3(1.0f, 0.0f, 0.0f),Normal = new Vector3(0.0f, 1.0f, 0.0f), TextureCoord = new Vector2(1.0f, 0.0f)},
new MyVertex(){Position = new Vector3(1.0f, 0.0f, 1.0f),Normal = new Vector3(0.0f, 1.0f, 0.0f), TextureCoord = new Vector2(1.0f, 1.0f)},
new MyVertex(){Position = new Vector3(0.0f, 0.0f, 1.0f),Normal = new Vector3(0.0f, 1.0f, 0.0f), TextureCoord = new Vector2(0.0f, 1.0f)}
};
If you store your vertices like that, it should work like a charm.
Only thing left to do is, to use the input texturecoordinates for your sampling of the passed texture in the shader.
Also there is no need for manipulation of the texturecoordinates in the shader if you want to use simple texture mapping. Otherwise you can look up different types of texture mapping on wikipedia, like spherical-, box- or plane-texturemapping.
I'm using DirectX11 with SharpDx & WPF D3DImage in C# to render image as texture.
Previously i was updating my render target (which worked perfectly) and rather not using the pixel shader to display the updated texture within my quad.
Now having realized my mistake i decided to use the pixel shaders so that i can implement letterboxing technique with the help of view port.
In doing so I'm kinda not able to figure out why my rendered texture is a sub region of the image which in turn is displayed as stretched .
Code Used:
a) Initialisation code
var device = this.Device;
var context = device.ImmediateContext;
byte[] fileBytes = GetShaderEffectsFileBytes();
var vertexShaderByteCode = ShaderBytecode.Compile(fileBytes, "VSMain", "vs_5_0", ShaderFlags.None, EffectFlags.None);
var vertexShader = new VertexShader(device, vertexShaderByteCode);
var pixelShaderByteCode = ShaderBytecode.Compile(fileBytes, "PSMain", "ps_5_0", ShaderFlags.None, EffectFlags.None);
var pixelShader = new PixelShader(device, pixelShaderByteCode);
layout = new InputLayout(device, vertexShaderByteCode, new[] {
new InputElement("SV_Position", 0, Format.R32G32B32A32_Float, 0, 0),
new InputElement("TEXCOORD", 0, Format.R32G32_Float, 16, 0),
});
// Write vertex data to a datastream
var stream = new DataStream(Utilities.SizeOf<VertexPositionTexture>() * 6, true, true);
stream.WriteRange(new[]
{
new VertexPositionTexture(
new Vector4(1, 1, 0.5f, 1.0f), // position top-left
new Vector2(1.0f, 0.0f)
),
new VertexPositionTexture(
new Vector4(1, -1, 0.5f, 1.0f), // position top-right
new Vector2(1.0f, 1.0f)
),
new VertexPositionTexture(
new Vector4(-1, 1, 0.5f, 1.0f), // position bottom-left
new Vector2(0.0f, 0.0f)
),
new VertexPositionTexture(
new Vector4(-1, -1, 0.5f, 1.0f), // position bottom-right
new Vector2(0.0f, 1.0f)
),
});
stream.Position = 0;
vertices = new SharpDX.Direct3D11.Buffer(device, stream, new BufferDescription()
{
BindFlags = BindFlags.VertexBuffer,
CpuAccessFlags = CpuAccessFlags.None,
OptionFlags = ResourceOptionFlags.None,
SizeInBytes = Utilities.SizeOf<VertexPositionTexture>() * 6,
Usage = ResourceUsage.Default,
StructureByteStride = 0
});
stream.Dispose();
context.InputAssembler.InputLayout = (layout);
context.InputAssembler.PrimitiveTopology = (PrimitiveTopology.TriangleStrip);
context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertices, Utilities.SizeOf<VertexPositionTexture>(), 0));
context.VertexShader.Set(vertexShader);
context.GeometryShader.Set(null);
context.PixelShader.Set(pixelShader);
Device.ImmediateContext.OutputMerger.SetTargets(m_depthStencilView, m_RenderTargetView);
this.ImgSource.SetRenderTargetDX11(this.RenderTarget);
Texture2D flower = Texture2D.FromFile<Texture2D>(this.Device, "3.jpg");
var srv = new ShaderResourceView(this.Device, flower);
Device.ImmediateContext.PixelShader.SetShaderResource(0, srv);
srv.Dispose();
b) VertexPositionTexture
public struct VertexPositionTexture
{
public VertexPositionTexture(Vector4 position, Vector2 textureUV)
{
Position = position;
TextureUV = textureUV;
}
public Vector4 Position;
public Vector2 TextureUV;
}
c) Rendering Code
Device.ImmediateContext.ClearRenderTargetView(this.m_RenderTargetView, new Color4(Color.Blue.R, Color.Blue.G, Color.Blue.B, Color.Blue.A));
Device.ImmediateContext.ClearDepthStencilView(m_depthStencilView, DepthStencilClearFlags.Depth, 1.0f, 0);
Device.ImmediateContext.Draw(4, 0);
Device.ImmediateContext.Flush();
this.ImgSource.InvalidateD3DImage();
d) Shader Effects File:
Texture2D ShaderTexture : register(t0);
SamplerState Sampler : register (s0);
struct VertexShaderInput
{
float4 Position : SV_Position;
float2 TextureUV : TEXCOORD0;
};
struct VertexShaderOutput
{
float4 Position : SV_Position;
float2 TextureUV : TEXCOORD0;
};
VertexShaderOutput VSMain(VertexShaderInput input)
{
VertexShaderOutput output = (VertexShaderOutput)0;
output.Position = input.Position;
output.TextureUV = input.TextureUV;
return output;
}
float4 PSMain(VertexShaderOutput input) : SV_Target
{
return ShaderTexture.Sample(Sampler, input.TextureUV);
}
Also below i have added a screenshot of the issue i have been having and actual image that should be rendered.
Actual image
Failed texture rendered
Any suggestions or help would be really helpful as I have researched the web and various forums but had no luck .
Thanks.
Link to Sample Test Application