I just tried to use VBOs. So I render a cube, and here's what's appening.
If I don't rotate it, everything is OK :
But when I rotate it, this thing appens :
It looks like the cube is translucid and... I don't really know, it's messing with my mind.
Here's my code :
internal class CubeRenderer
{
private VertexBuffer vertexBuffer;
private IndexBuffer indexBuffer;
public CubeRenderer()
{
vertexBuffer = new VertexBuffer(new[]
{
// front
new Vertex(-1.0f, -1.0f, 1.0f, Color.Red),
new Vertex(1.0f, -1.0f, 1.0f, Color.Beige),
new Vertex(1.0f, 1.0f, 1.0f, Color.SaddleBrown),
new Vertex(-1.0f, 1.0f, 1.0f, Color.AliceBlue),
//back
new Vertex(-1.0f, -1.0f, -1.0f, Color.DarkBlue),
new Vertex(1.0f, -1.0f, -1.0f, Color.Firebrick),
new Vertex(1.0f, 1.0f, -1.0f, Color.IndianRed),
new Vertex(-1.0f, 1.0f, -1.0f, Color.Yellow)
});
indexBuffer = new IndexBuffer(new uint[]
{
// front
0, 1, 2,
2, 3, 0,
// top
3, 2, 6,
6, 7, 3,
// back
7, 6, 5,
5, 4, 7,
// bottom
4, 5, 1,
1, 0, 4,
// left
4, 0, 3,
3, 7, 4,
// right
1, 5, 6,
6, 2, 1
});
}
public void Draw()
{
// 1) Ensure that the VertexArray client state is enabled.
GL.EnableClientState(ArrayCap.VertexArray);
GL.EnableClientState(ArrayCap.NormalArray);
GL.EnableClientState(ArrayCap.TextureCoordArray);
GL.EnableClientState(ArrayCap.ColorArray);
// 2) Bind the vertex and element (=indices) buffer handles.
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBuffer.Id);
GL.BindBuffer(BufferTarget.ElementArrayBuffer, indexBuffer.Id);
GL.VertexPointer(3, VertexPointerType.Float, vertexBuffer.Stride, IntPtr.Zero);
GL.NormalPointer(NormalPointerType.Float, vertexBuffer.Stride, new IntPtr(Vector3.SizeInBytes));
GL.TexCoordPointer(2, TexCoordPointerType.Float, vertexBuffer.Stride, new IntPtr(Vector3.SizeInBytes*2));
GL.ColorPointer(4, ColorPointerType.UnsignedByte, vertexBuffer.Stride, new IntPtr(Vector3.SizeInBytes*2 + Vector2.SizeInBytes));
// 4) Call DrawElements. (Note: the last parameter is an offset into the element buffer and will usually be IntPtr.Zero).
GL.DrawElements(PrimitiveType.Triangles, indexBuffer.Count, DrawElementsType.UnsignedInt, IntPtr.Zero);
//Disable client state
GL.DisableClientState(ArrayCap.VertexArray);
GL.DisableClientState(ArrayCap.NormalArray);
GL.DisableClientState(ArrayCap.TextureCoordArray);
GL.DisableClientState(ArrayCap.ColorArray);
}
}
Edit 1:
Looks like it is a problem of depth buffer. I tried to enable the DepthTest, but it still does the same thing.
Edit 2:
It might be coming from the way that I rotate the matrix...?
GL.Ortho(-Zoom * ratio, Zoom * ratio, -Zoom, Zoom, 0, 100);
Allright I found the answer by myself. The problem came from the fact that I was using glOrtho to zoom, and somehow using wrong values. I switched to glScale and everything is good now!
Related
I am trying to draw a wire cube (edges only) using ElementBufferObject. I have set the coordinates of the eight vertices of the cube and an array of indices. Next, I initialize VAO, VBO and EBO for the future cube. Then I try to draw it, but I don't see any result.
Cube vertices and indices:
private readonly float[] _cubeVertices =
{
// Bottom side
-0.5f, -0.5f, 0.0f, // [0] Front-left
0.5f, -0.5f, 0.0f, // [1] Front-right
-0.5f, -0.5f, -1.0f, // [2] Rear-left
0.5f, -0.5f, -1.0f, // [3] Rear-right
// Upper side
-0.5f, 0.5f, 0.0f, // [4] Front-left
0.5f, 0.5f, 0.0f, // [5] Front-right
-0.5f, 0.5f, -1.0f, // [6] Rear-left
0.5f, 0.5f, -1.0f // [7] Rear-right
};
//indices
private readonly uint[] _cubeEdges =
{
0, 1,
0, 2,
0, 4,
3, 2,
3, 1,
3, 7,
5, 6,
5, 4,
5, 1,
6, 7,
6, 4,
6, 2
};
onLoad():
_cubeShader = new Shader("../../../Shaders/cubeShader.vert", "../../../Shaders/shader.frag");
_cubeShader.Use();
_vboCube = GL.GenBuffer();
GL.BindBuffer(BufferTarget.ArrayBuffer, _vboCube);
GL.BufferData(BufferTarget.ArrayBuffer, _cubeVertices.Length * sizeof(float), _cubeVertices, BufferUsageHint.StaticDraw);
_vaoCube = GL.GenVertexArray();
GL.BindVertexArray(_vaoCube);
var vertexLocation = _cubeShader.GetAttribLocation("aPosition");
GL.VertexAttribPointer(vertexLocation, 3, VertexAttribPointerType.Float, false, 3 * sizeof(float), 0);
_eboCube = GL.GenBuffer();
GL.BindBuffer(BufferTarget.ElementArrayBuffer, _eboCube);
GL.BufferData(BufferTarget.ElementArrayBuffer, _cubeEdges.Length * sizeof(uint), _cubeEdges, BufferUsageHint.StaticDraw);
Actual drawing:
_cubeShader.Use();
GL.BindVertexArray(_vaoCube);
//GL.LineWidth(2.0f);
GL.DrawElements(PrimitiveType.Lines, _cubeEdges.Length, DrawElementsType.UnsignedInt, 0);
cubeShader.vert:
#version 330 core
in vec3 aPosition;
void main(void)
{
gl_Position = vec4(aPosition, 1.0);
}
shader.frag:
#version 330 core
out vec4 FragColor;
void main()
{
FragColor = vec4(1.0);
}
Shader.cs code you can find here GitHub.
Actually, all my code based on LearnOpenTK repo
This is my first introduction to OpenGL, so most likely I'm missing something important in the process. Is it even possible to draw lines using EBO? I have already tried the same thing with triangles and everything worked out.
I figured it out. I forgot the line
GL.EnableVertexAttribArray(vertex Location);
after calling
GL.vertexAttribPointer()
I'm trying to render a hexagon in unity using these coordinates
https://qph.fs.quoracdn.net/main-qimg-9ad01ef3babb64b57d378a1558f468a7
What I'm getting, is this error:
Failed setting triangles. Some indices are referencing out of bounds vertices. IndexCount: 18, VertexCount: 7
Any ideas what is wrong with this code?
void Start()
{
MeshFilter meshFilter = gameObject.AddComponent<MeshFilter>();
Mesh mesh = new Mesh();
Vector3[] vertices = new Vector3[7]
{
new Vector3(0, 0),
new Vector3(-1.0f, 0),
new Vector3(-0.5f, Mathf.Sqrt(3/2)),
new Vector3(0.5f, Mathf.Sqrt(3/2)),
new Vector3(1, 0),
new Vector3(0.5f, - Mathf.Sqrt(3/2)),
new Vector3(-0.5f, - Mathf.Sqrt(3/2))
};
mesh.vertices = vertices;
int[] tris = new int[18]
{
0, 2, 1,
0, 3, 2,
0, 4, 3,
0, 5, 4,
0, 6, 5,
0, 7, 6
};
mesh.triangles = tris;
meshFilter.mesh = mesh;
}
The last triangle uses the vertex at index 7 which does not exist since you specified the hexagon to have only 7 points. In case you don't know, indexes start at 0 which is why this doesn't work (although by the looks of it you already know this, though you could be blindley following a tutorial which is why I said this).
It should be 1 instead of 7, since you have to loop back around and connect a triangle between index 6 (the last index) and the first perimeter index, which is index 1.
I cannot render triangles for the life of me with a VBO in OpenTK. I am loading my data to the VBO in glControl_Load() event. I get a background screen with no triangles when running. The data is a from a mesh m.OpenGLArrays(out data, out indices) outputs a list of floats and ints. The list of floats for the vertices T1v1, T1v2, T1v3, T2v1, T2v2, T2v3, .... , all three vertices for each triangle back to back.
However given a blank screen with the code when I comment the "intermediate" rendering code everything renders fine....??? What am I doing wrong?
private void glControl_Load(object sender, EventArgs e)
{
loaded = true;
glControl.MouseMove += new MouseEventHandler(glControl_MouseMove);
glControl.MouseWheel += new MouseEventHandler(glControl_MouseWheel);
GL.ClearColor(Color.DarkSlateGray);
GL.Color3(1f, 1f, 1f);
m.OpenGLArrays(out data, out indices);
this.indicesSize = (uint)indices.Length;
GL.GenBuffers(1, out VBOid[0]);
GL.GenBuffers(1, out VBOid[1]);
SetupViewport();
}
private void SetupViewport()
{
if (this.WindowState == FormWindowState.Minimized) return;
glControl.Width = this.Width - 32;
glControl.Height = this.Height - 80;
Frame_label.Location = new System.Drawing.Point(glControl.Width / 2, glControl.Height + 25);
GL.MatrixMode(MatrixMode.Projection);
//GL.LoadIdentity();
GL.Ortho(0, glControl.Width, 0, glControl.Height, -1, 1); // Bottom-left corner pixel has coordinate (0, 0)
GL.Viewport(0, 0, glControl.Width, glControl.Height); // Use all of the glControl painting area
GL.Enable(EnableCap.DepthTest);
GL.BindBuffer(BufferTarget.ArrayBuffer, VBOid[0]);
GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(data.Length * sizeof(float)), data, BufferUsageHint.StaticDraw);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
float aspect_ratio = this.Width / (float)this.Height;
projection = Matrix4.CreatePerspectiveFieldOfView(MathHelper.PiOver4, aspect_ratio, 1, 1024);
GL.MatrixMode(MatrixMode.Projection);
GL.LoadMatrix(ref projection);
}
private void glControl_Paint(object sender, PaintEventArgs e)
{
if (loaded)
{
GL.Clear(ClearBufferMask.ColorBufferBit |
ClearBufferMask.DepthBufferBit |
ClearBufferMask.StencilBufferBit);
modelview = Matrix4.LookAt(0f, 0f, -200f + zoomFactor, 0, 0, 0, 0.0f, 1.0f, 0.0f);
var aspect_ratio = Width / (float)Height;
projection = Matrix4.CreatePerspectiveFieldOfView(MathHelper.PiOver4, aspect_ratio, 1, 512);
GL.MatrixMode(MatrixMode.Projection);
GL.LoadMatrix(ref projection);
GL.MatrixMode(MatrixMode.Modelview);
GL.LoadMatrix(ref modelview);
GL.Rotate(angleY, 1.0f, 0, 0);
GL.Rotate(angleX, 0, 1.0f, 0);
GL.EnableClientState(ArrayCap.VertexArray);
GL.BindBuffer(BufferTarget.ArrayBuffer, VBOid[0]);
GL.Color3(Color.Yellow);
GL.VertexPointer(3, VertexPointerType.Float, Vector3.SizeInBytes, new IntPtr(0));
GL.DrawArrays(PrimitiveType.Triangles, 0, data.Length);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
GL.DisableClientState(ArrayCap.VertexArray);
//GL.Color3(Color.Yellow);
//GL.PolygonMode(MaterialFace.Front, PolygonMode.Fill);
//GL.Begin(PrimitiveType.Triangles);
//for (int i = 0; i < this.md.mesh.Count; i++)
//{
// GL.Normal3(this.md.mesh[i].normal);
// GL.Vertex3(this.md.mesh[i].vertices[0]);
// GL.Vertex3(this.md.mesh[i].vertices[1]);
// GL.Vertex3(this.md.mesh[i].vertices[2]);
//}
//GL.End();
//GL.EndList();
glControl.SwapBuffers();
Frame_label.Text = "Frame: " + frameNum++;
}
}
If something doesn't seem right, then it probably isn't. I seriously questioned my understanding of opengl and spent hours looking at this. However it was just a simple error of forgetting to iterate a count variable in a for loop to transfer the mesh from one object to another. Each triangle had identical vertices! Always expect the unexpected when it comes to debugging!
I am trying to create a very basic mesh renderer using D3D11 to use in my final project for school. Although I followed the basic online tutorials like the rastertek site's and Frank De Luna's book to the letter, used the simplest passthrough shader imaginable, etc, I couldn't get my triangles to show up on the screen. Finally I found out about VS 2013's graphics debugging ability, and I was able to see that my vertex and index buffers were filled with garbage data. I've hosted the solution here if you want to run the code, but can someone familiar with D3D and/or its SharpDX C# wrapper tell me what I'm doing wrong in the following code?
This is my geometry data. The Vertex struct has Vector4 position and color fields, and Index is an alias for ushort.
var vertices = new[]
{
new Vertex(new Vector4(-1, 1, 0, 1), Color.Red),
new Vertex(new Vector4(1, 1, 0, 1), Color.Green),
new Vertex(new Vector4(1, -1, 0, 1), Color.Blue),
new Vertex(new Vector4(-1, -1, 0, 1), Color.White)
};
var indices = new Index[]
{
0, 2, 1,
0, 3, 2
};
And here is the code that fails to initialize my vertex and index buffers with the above data.
var vStream = new DataStream(sizeInBytes: vertices.Length * sizeof(Vertex), canRead: false, canWrite: true);
var iStream = new DataStream(sizeInBytes: indices.Length * sizeof(Index), canRead: false, canWrite: true);
{
vStream.WriteRange(vertices);
iStream.WriteRange(indices);
vBuffer = new Buffer(
device, vStream, new BufferDescription(
vertices.Length * sizeof(Vertex),
ResourceUsage.Immutable,
BindFlags.VertexBuffer,
CpuAccessFlags.None,
ResourceOptionFlags.None,
0)) { DebugName = "Vertex Buffer" };
iBuffer = new Buffer(
device, iStream, new BufferDescription(
indices.Length * sizeof(Index),
ResourceUsage.Immutable,
BindFlags.IndexBuffer,
CpuAccessFlags.None,
ResourceOptionFlags.None,
0)) { DebugName = "Index Buffer" };
}
If I replace the above code with the following, however, it works. I have no idea what I'm doing wrong.
vBuffer = Buffer.Create(
device, vertices, new BufferDescription(
vertices.Length * sizeof(Vertex),
ResourceUsage.Immutable,
BindFlags.VertexBuffer,
CpuAccessFlags.None,
ResourceOptionFlags.None,
0));
vBuffer.DebugName = "Vertex Buffer";
iBuffer = Buffer.Create(
device, indices, new BufferDescription(
indices.Length * sizeof(Index),
ResourceUsage.Immutable,
BindFlags.IndexBuffer,
CpuAccessFlags.None,
ResourceOptionFlags.None,
0));
iBuffer.DebugName = "Index Buffer";
You need to reset the stream position to zero (like iStream.Position = 0) before passing it to new Buffer(...)
I'm trying to draw an indexed square using SlimDX and Direct3D11. I've managed to draw a square without indices, but when I swap to my indexed version I just get a blank screen.
My input layout is set to only take position data (I'm essentially extending from the third tutorial on the SlimDX website) and to draw Triangle Lists.
My render loop code is as follows (I am using the triangle.fx pixel and vertex shader files from the tutorial, they take vertex positions (in screen coordinates) and paint them yellow, D3D is shorthand for SlimDX.Direct3D11)
//clear the render target
context.ClearRenderTargetView(renderTarget, new Color4(0.5f, 0.5f, 1.0f));
context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(mesh.VertexBuffer,12, 0));
context.InputAssembler.SetIndexBuffer(mesh.IndexBuffer, Format.R16_UNorm, 0);
context.DrawIndexed(mesh.indices, 0, 0);
swapChain.Present(0, PresentFlags.None);
"mesh" is a struct that holds a Vertex buffer, Index buffer and vertex count. The data is filled here:
Vertex[] vertexes = new Vertex[4];
vertexes[0].Position = new Vector3(0, 0, 0.5f);
vertexes[1].Position = new Vector3(0, 0.5f, 0.5f);
vertexes[2].Position = new Vector3(0.5f, 0, 0.5f);
vertexes[3].Position = new Vector3(0.5f, 0.5f, 0.5f);
UInt16[] indexes = { 0, 1, 2, 1, 3, 2 };
DataStream vertices = new DataStream(12 * 4, true, true);
foreach (Vertex vertex in vertexes)
{
vertices.Write(vertex.Position);
}
vertices.Position = 0;
DataStream indices = new DataStream(sizeof(int) * 6, true, true);
foreach (UInt16 index in indexes)
{
indices.Write(index);
}
indices.Position = 0;
mesh = new Mesh();
D3D.Buffer vertexBuffer = new D3D.Buffer(device, vertices, 12 * 4, ResourceUsage.Default, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);
mesh.VertexBuffer = vertexBuffer;
mesh.IndexBuffer = new D3D.Buffer(device, indices, 2 * 6, ResourceUsage.Default, BindFlags.IndexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);
mesh.vertices = vertexes.GetLength(0);
mesh.indices = indexes.Length;
All of this is nearly identical to my unindexed square method (with the addition of index buffers and indices, and the removal of two duplicate vertices that aren't needed with indexing), but while the unindexed method draws a square, the indexed method doesn't.
My current theory is that there is either something wrong with this line:
mesh.IndexBuffer = new D3D.Buffer(device, indices, 2 * 6, ResourceUsage.Default, BindFlags.IndexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);
Or these lines:
context.InputAssembler.SetIndexBuffer(mesh.IndexBuffer, Format.R16_UNorm, 0);
context.DrawIndexed(mesh.indices, 0, 0);
Why don't you just use a vertex and indexbuffer for this simple example?
Like this way (Directx9):
VertexBuffer vb;
IndexBuffer ib;
vertices = new PositionColored[WIDTH * HEIGHT];
//vertex creation
vb = new VertexBuffer(device, HEIGHT * WIDTH * PositionColored.SizeInBytes, Usage.WriteOnly, PositionColored.Format, Pool.Default);
DataStream stream = vb.Lock(0, 0, LockFlags.None);
stream.WriteRange(vertices);
vb.Unlock();
indices = new short[(WIDTH - 1) * (HEIGHT - 1) * 6];
//indicies creation
ib = new IndexBuffer(device, sizeof(int) * (WIDTH - 1) * (HEIGHT - 1) * 6, Usage.WriteOnly, Pool.Default, false);
DataStream stream = ib.Lock(0, 0, LockFlags.None);
stream.WriteRange(indices);
ib.Unlock();
//Drawing
device.Clear(ClearFlags.Target | ClearFlags.ZBuffer, Color.DarkSlateBlue, 1.0f, 0);
device.BeginScene();
device.VertexFormat = PositionColored.Format;
device.SetStreamSource(0, vb, 0, PositionColored.SizeInBytes);
device.Indices = ib;
device.SetTransform(TransformState.World, Matrix.Translation(-HEIGHT / 2, -WIDTH / 2, 0) * Matrix.RotationZ(angle));
device.DrawIndexedPrimitives(PrimitiveType.TriangleList, 0, 0, WIDTH * HEIGHT, 0, indices.Length / 3);
device.EndScene();
device.Present();
I use the mesh in another way (directx9 code again):
private void CreateMesh()
{
meshTerrain = new Mesh(device, (WIDTH - 1) * (HEIGHT - 1) * 2, WIDTH * HEIGHT, MeshFlags.Managed, PositionColored.Format);
DataStream stream = meshTerrain.VertexBuffer.Lock(0, 0, LockFlags.None);
stream.WriteRange(vertices);
meshTerrain.VertexBuffer.Unlock();
stream.Close();
stream = meshTerrain.IndexBuffer.Lock(0, 0, LockFlags.None);
stream.WriteRange(indices);
meshTerrain.IndexBuffer.Unlock();
stream.Close();
meshTerrain.GenerateAdjacency(0.5f);
meshTerrain.OptimizeInPlace(MeshOptimizeFlags.VertexCache);
meshTerrain = meshTerrain.Clone(device, MeshFlags.Dynamic, PositionNormalColored.Format);
meshTerrain.ComputeNormals();
}
//Drawing
device.Clear(ClearFlags.Target | ClearFlags.ZBuffer, Color.DarkSlateBlue, 1.0f, 0);
device.BeginScene();
device.VertexFormat = PositionColored.Format;
device.SetTransform(TransformState.World, Matrix.Translation(-HEIGHT / 2, -WIDTH / 2, 0) * Matrix.RotationZ(angle));
int numSubSets = meshTerrain.GetAttributeTable().Length;
for (int i = 0; i < numSubSets; i++)
{
meshTerrain.DrawSubset(i);
}
device.EndScene();
device.Present();