Plotting 10 data sets on OpenTK - c#

I have 10 sets of data saved in arrays. For example A1 = [1,2 ,3], A2 = [5,6,7]...etc. I am using OpenTk GameWindow to plot it as I have a big data set (10million+ per data set). For now I know how to plot 1 data set as following:
public GraphWIndow (Data Dataset[0]) : base(800, 600, default, "Data Analyzer", GameWindowFlags.Default, default,4,0, default)
{
test= EditVertices(Dataset[0]);
}
private float[] EditVertices(List<float> list)
{
float[] list2d = new float[2*list.Count()];
for(int i =0; i<list.Count(); i++)
{
list2d[2*i]=-1+ i*(((float)2/(float)list.Count()));
list2d[2*i +1] =((float)(list[i]-list.Min())/(float)(list.Max()-list.Min()));
}
return list2d;
}
protected override void OnLoad(EventArgs e)
{
CursorVisible = true;
shader = new Shader("shader.vert", "shader.frag");
shader.Use();
VertexArrayObject = GL.GenVertexArray();
VertexBufferObject = GL.GenBuffer();
GL.BindVertexArray(VertexArrayObject);
GL.BindBuffer(BufferTarget.ArrayBuffer, VertexBufferObject);
GL.BufferData(BufferTarget.ArrayBuffer, test.Count()*sizeof(float), test, BufferUsageHint.StaticDraw);
var vertexLocation = shader.GetAttribLocation("aPosition");
GL.EnableVertexAttribArray(vertexLocation);
GL.VertexAttribPointer( vertexLocation, 2, VertexAttribPointerType.Float, false, 2*sizeof(float), 0);
GL.EnableVertexAttribArray(0);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
GL.BindVertexArray(0);
base.OnLoad(e);
}
protected override void OnRenderFrame(FrameEventArgs e)
{
GL.ClearColor(Color4.DarkBlue);
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
GL.BindVertexArray(VertexArrayObject);
GL.DrawArrays(PrimitiveType.LineStrip, 0, test.Count());
GL.PointSize(10);
SwapBuffers();
base.OnRenderFrame(e);
}
shader.vert
#version 400 core
in vec2 aPosition;
void main()
{
gl_Position = vec4(aPosition, 0.0f, 1.0f);
}
I kind of know how I will draw the 10 sets. I will just add an offset to every dataset and combine them in one array(test array) and then pass it to shader.vert. The thing is I don't know what to do with the x coordinates. I don't want to keep adding it to test array, which is shown in EditVertices function, as it will significantly increase its size especially that I am dealing with a huge size of data. Is there a way where I can pass x coordinates separately somehow but at the same time relate it to all data sets? as in they all share the same x coordinates but different Y values. I hope my question is clear.

Is there a way where I can pass x coordinates separately somehow but at the same time relate it to all data sets?
Use 2 attributes of type float, one for the x coordinate and another one for the y coordinate:
#version 400 core
in float aPositionX;
in float aPositionY;
void main()
{
gl_Position = vec4(aPositionX, aPositionY, 0.0, 1.0);
}
var vertexLocationX = shader.GetAttribLocation("aPositionX");
var vertexLocationY = shader.GetAttribLocation("aPositionY");
Now you can use the same X coordinate for all data sets. Create 1 array of vertex attribute data for the X coordinates:
float[] vertexX = new float[2*list.Count()];
for(int i =0; i < list.Count(); i++)
vertexX[i] = (float)(-1 + i * 2/list.Count());
The array for the Y coordinates has to be created separately for each data set
float[] vertexY = new float[2*list.Count()];
for(int i =0; i < list.Count(); i++)
vertexY[i] = (float)((vertexY[i]-list.Min() / (list.Max()-list.Min());
You have to create 2 separate Vertex Buffer Objects:
VertexBufferObjectX = GL.GenBuffer();
GL.BindBuffer(BufferTarget.ArrayBuffer, VertexBufferObjectX);
GL.BufferData(BufferTarget.ArrayBuffer, vertexX.Count()*sizeof(float), vertexX, BufferUsageHint.StaticDraw);
VertexBufferObjectY = GL.GenBuffer();
GL.BindBuffer(BufferTarget.ArrayBuffer, VertexBufferObjectY);
GL.BufferData(BufferTarget.ArrayBuffer, vertexY.Count()*sizeof(float), vertexY, BufferUsageHint.StaticDraw);
And you have to adapt the specification of the Vertex Array Object:
VertexArrayObject = GL.GenVertexArray();
GL.BindVertexArray(VertexArrayObject);
GL.BindBuffer(BufferTarget.ArrayBuffer, VertexBufferObjectX);
GL.VertexAttribPointer(vertexLocationX, 1, VertexAttribPointerType.Float, false, 0, 0);
GL.BindBuffer(BufferTarget.ArrayBuffer, VertexBufferObjectY);
GL.VertexAttribPointer(vertexLocationY, 1, VertexAttribPointerType.Float, false, 0, 0);
GL.EnableVertexAttribArray(vertexLocationX);
GL.EnableVertexAttribArray(vertexLocationY);
GL.BindVertexArray(0);

Related

Opengl does not find texture coordinates VertexAttribPointer

I have probably a very simple question in OpenGL. I want to draw different shapes with textures on them.
So I defined a vertex struct, which contains the positions of knots and additional information (normal vector, texture coordinates, reference to texture):
public struct Vertex
{
public Vector3 position;
public Vector3 normal;
public Vector2 texCoord;
public int texid;
public static int SizeInBytes
{
get { return Vector3.SizeInBytes * 2 + Vector2.SizeInBytes + sizeof(int); }
}
public Vertex(Vector3 position, Vector3 normal, Vector2 texCoord, int texid)
{
this.position = position;
this.normal = normal;
this.texCoord = texCoord;
this.texid = texid;
}
}
Then I create 2 vertex arrays (VA_TopBottom_WP and VA_Side_WP) with its vertex buffers (VB_TopBottom_WP and VB_Side_WP). Moreover, I create all textures.
internal void init()
{
// Create Buffer for top and bottom part
GL.GenVertexArrays(1, out VA_TopBottom_WP);
GL.BindVertexArray(VA_TopBottom_WP);
VB_TopBottom_WP = GL.GenBuffer();
// Create Buffer for side part
GL.GenVertexArrays(1, out VA_Side_WP);
GL.BindVertexArray(VA_Side_WP);
VB_Side_WP = GL.GenBuffer();
// Create textures
texes = new List<STL_Tools.Texture2D>();
texes.Add(new STL_Tools.Texture2D());//Default texture
for (int n = 0; n < session.pgm.Count(); n++)
{
texes.Add(new STL_Tools.Texture2D());
}
updateFrame();
}
Then I update the buffer content:
public void updateFrame()
{
// Top and bottom buffer
GL.BindVertexArray(VA_TopBottom_WP);
GL.BindBuffer(BufferTarget.ArrayBuffer, VB_TopBottom_WP);
//Fill vertex structure
vertBuffer_TopBottom = poly.GetTopBottomMeshesVertex();
GL.BufferData<Vertex>(BufferTarget.ArrayBuffer, (IntPtr)(Vertex.SizeInBytes * vertBuffer_TopBottom.Length), vertBuffer_TopBottom, BufferUsageHint.StaticDraw);
GL.EnableVertexAttribArray(0);
GL.VertexAttribPointer(0,3, VertexAttribPointerType.Float,false, Vertex.SizeInBytes, (IntPtr)0);//Position
GL.VertexAttribPointer(1, 2, VertexAttribPointerType.Float, false, Vertex.SizeInBytes, (IntPtr)(Vector3.SizeInBytes*2));//Texture
GL.VertexAttribPointer(2, 3, VertexAttribPointerType.Float, false, Vertex.SizeInBytes, (IntPtr)(Vector3.SizeInBytes));//Normal
// Side buffer
GL.BindVertexArray(VA_Side_WP);
GL.BindBuffer(BufferTarget.ArrayBuffer, VB_Side_WP);
vertBuffer_Side = poly.GetSideVertex();//Fill vertices
GL.BufferData<Vertex>(BufferTarget.ArrayBuffer, (IntPtr)(Vertex.SizeInBytes * vertBuffer_Side.Length), vertBuffer_Side, BufferUsageHint.StaticDraw);
GL.EnableVertexAttribArray(0);
GL.VertexAttribPointer(0, 3, VertexAttribPointerType.Float, false, Vertex.SizeInBytes, (IntPtr)0);//Position
GL.VertexAttribPointer(1, 2, VertexAttribPointerType.Float, false, Vertex.SizeInBytes, (IntPtr)(Vector3.SizeInBytes * 2));//Texture
GL.VertexAttribPointer(2, 3, VertexAttribPointerType.Float, false, Vertex.SizeInBytes, (IntPtr)(Vector3.SizeInBytes));//Normal
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
}
The Vertex variables contain afterwards reasonable texture coordinates (Vector2 objects in the following). E. g.:
vertBuffer_Side = new Vertex[4]
{new Vertex(new Vector3(0,0,0),new Vector3(1,0,0),new Vector2(0,0),0),
new Vertex(new Vector3(0,-80,0),new Vector3(1,0,0),new Vector2(1,0),0),
new Vertex(new Vector3(0,-80,-10),new Vector3(1,0,0),new Vector2(1,1),0),
new Vertex(new Vector3(0,0,-10),new Vector3(1,0,0),new Vector2(0,0),0)};
Then I draw everything with the following function:
public void renderFrame()
{
GL.PushMatrix();
GL.EnableClientState(ArrayCap.VertexArray);
GL.EnableClientState(ArrayCap.NormalArray);
GL.EnableClientState(ArrayCap.TextureCoordArray);
// Draw Side
GL.BindVertexArray(VA_Side_WP);
GL.Color3(Color.White);
GL.Enable(EnableCap.Texture2D);
for (int n = 0; n < (vertBuffer_Side.Length / 4); n++)
{
//GL.BindTexture(TextureTarget.Texture2D, 0);
GL.BindTexture(TextureTarget.Texture2D, texes[vertBuffer_Side[n*4].texid].ID);
GL.DrawArrays(PrimitiveType.Quads, n*4,4);
}
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
GL.Disable(EnableCap.Texture2D);
//Draw top and bottom
GL.BindVertexArray(VA_TopBottom_WP);
...
GL.PopMatrix();
}
The geometry is drawn correctly. But the texture is not drawn correctly. It is drawn only with one color. It is the color of the far right pixel of the textures.
Probably, the following function does not work correctly:
GL.VertexAttribPointer(1, 2, VertexAttribPointerType.Float, false, Vertex.SizeInBytes, (IntPtr)(Vector3.SizeInBytes * 2));//Texture
Where is my mistake?
The client-side capabilities are states of the Vertex Array Object. Therefor you need to bind the VAO before setting the client-side capabilities. Also, you are not using a shader, but a shader program. Therefore, you must define the VertexPointer, NormalVector and TexCoordPointer instead of specifying generic arrays of vertex attributes. Note that it is possible to specify the vertex attribute 0 instead of the VertexPointer, but this is not possible for the normals and texture coordinates (also see What are the Attribute locations for fixed function pipeline in OpenGL 4.0++ core profile?):
GL.VertexPointer(3, VertexAttribPointerType.Float, false, Vertex.SizeInBytes, (IntPtr)0);//Position
GL.TexCoordPointer(2, VertexAttribPointerType.Float, false, Vertex.SizeInBytes, (IntPtr)(Vector3.SizeInBytes*2));//Texture
GL.NormalPointer(VertexAttribPointerType.Float, false, Vertex.SizeInBytes, (IntPtr)(Vector3.SizeInBytes));//Normal
GL.BindVertexArray(VA_Side_WP);
GL.EnableClientState(ArrayCap.VertexArray);
GL.EnableClientState(ArrayCap.NormalArray);
GL.EnableClientState(ArrayCap.TextureCoordArray);

How to render instances using the new OperGL / OpenTk APIs

I'm trying to put together information from this tutorial (Advanced-OpenGL/Instancing) and these answers (How to render using 2 VBO) (New API Clarification) in order to render instances of a square, giving the model matrix for each instance to the shader through an ArrayBuffer. The code I ended with is the following. I sliced and tested any part, and the problem seems to be the model matrix itself is not passed correctly to the shader. I'm using OpenTK in Visual Studio.
For simplicity and debugging, the pool contains just a single square, so I don't still have divisor problems or other funny things I still don't cope with.
My vertex data arrays contain the 3 floats for position and 4 floats for color (stride = 7 time float size).
My results with the attached code are:
if I remove the imodel multiplication in the vertex shader, I get exactly what I expect, a red square (rendered as 2 triangles) with a green border (rendered as a line loop).
if I change the shader and I multiply by the model matrix, I get a red line above the center of the screen which is changing its length over time. The animation makes sense because the simulation is rotating the square, so the angle updates regularly and thus the model matrix calculated changes. Another great result because I'm actually sending dynamic data to the shader. Howvere I can't have my original square rotated and translated.
Any clue?
Thanks a lot.
Vertex Shader:
#version 430 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec4 aCol;
layout (location = 2) in mat4 imodel;
out vec4 fColor;
uniform mat4 view;
uniform mat4 projection;
void main() {
fColor = aCol;
gl_Position = vec4(aPos, 1.0) * imodel * view * projection;
}
Fragment Shader:
#version 430 core
in vec4 fColor;
out vec4 FragColor;
void main() {
FragColor = fColor;
}
OnLoad snippet (initialization):
InstanceVBO = GL.GenBuffer();
GL.GenBuffers(2, VBO);
GL.BindBuffer(BufferTarget.ArrayBuffer, VBO[0]);
GL.BufferData(BufferTarget.ArrayBuffer,
7 * LineLoopVertCount * sizeof(float),
LineLoopVertData, BufferUsageHint.StaticDraw);
GL.BindBuffer(BufferTarget.ArrayBuffer, VBO[1]);
GL.BufferData(BufferTarget.ArrayBuffer,
7 * TrianglesVertCount * sizeof(float),
TrianglesVertData, BufferUsageHint.StaticDraw);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
// VAO SETUP
VAO = GL.GenVertexArray();
GL.BindVertexArray(VAO);
// Position
GL.EnableVertexAttribArray(0);
GL.VertexAttribFormat(0, 3, VertexAttribType.Float, false, 0);
GL.VertexArrayAttribBinding(VAO, 0, 0);
// COlor
GL.EnableVertexAttribArray(1);
GL.VertexAttribFormat(1, 4, VertexAttribType.Float, false, 3 * sizeof(float));
GL.VertexArrayAttribBinding(VAO, 1, 0);
int vec4Size = 4;
GL.EnableVertexAttribArray(2);
GL.VertexAttribFormat(2, 4, VertexAttribType.Float, false, 0 * vec4Size * sizeof(float));
GL.VertexAttribFormat(3, 4, VertexAttribType.Float, false, 1 * vec4Size * sizeof(float));
GL.VertexAttribFormat(4, 4, VertexAttribType.Float, false, 2 * vec4Size * sizeof(float));
GL.VertexAttribFormat(5, 4, VertexAttribType.Float, false, 3 * vec4Size * sizeof(float));
GL.VertexAttribDivisor(2, 1);
GL.VertexAttribDivisor(3, 1);
GL.VertexAttribDivisor(4, 1);
GL.VertexAttribDivisor(5, 1);
GL.VertexArrayAttribBinding(VAO, 2, 1);
GL.BindVertexArray(0);
OnFrameRender snippet:
shader.Use();
shader.SetMatrix4("view", cameraViewMatrix);
shader.SetMatrix4("projection", cameraProjectionMatrix);
int mat4Size = 16;
for (int i = 0; i < simulation.poolCount; i++)
{
modelMatrix[i] = Matrix4.CreateFromAxisAngle(
this.RotationAxis, simulation.pool[i].Angle);
modelMatrix[i] = matrix[i] * Matrix4.CreateTranslation(new Vector3(
simulation.pool[i].Position.X,
simulation.pool[i].Position.Y,
0f));
//modelMatrix[i] = Matrix4.Identity;
}
// Copy model matrices into the VBO
// ----------------------------------------
GL.BindBuffer(BufferTarget.ArrayBuffer, InstanceVBO);
GL.BufferData(BufferTarget.ArrayBuffer,
simulation.poolCount * mat4Size * sizeof(float),
modelMatrix, BufferUsageHint.DynamicDraw);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
// ----------------------------------------
GL.BindVertexArray(VAO);
GL.BindVertexBuffer(1, InstanceVBO, IntPtr.Zero, mat4Size * sizeof(float));
GL.BindVertexBuffer(0, VBO[0], IntPtr.Zero, 7 * sizeof(float));
GL.DrawArraysInstanced(PrimitiveType.LineLoop, 0, LineLoopVertCount, simulation.poolCount);
GL.BindVertexBuffer(0, lifeFormVBO[1], IntPtr.Zero, lifeFormTrianglesFStride * sizeof(float));
GL.DrawArraysInstanced(PrimitiveType.Triangles, 0, TrianglesVertCount, simulation.poolCount);
GL.BindVertexArray(0);
There is a lot wrong here.
First, you don't enable any of the attribute arrays after 2, even though your shader says that you're reading 3-5 too. Similarly, you don't set the attribute binding for any of the arrays after 2.
But your bigger problem is that you use glVertexAttribDivisor. That's the wrong function for what you're trying to do. That's the old API for setting the divisor.
In separate attribute format, the divisor is part of the buffer binding, not the vertex attribute. So the divisor needs to be set with glVertexBindingDivisor, and the index it is given is the index you intend to bind the buffer to. Which should be 1.
So presumably, your code should look like:
int vec4Size = 4;
for(int ix = 0; ix < 4; ++ix)
{
int attribIx = 2 + ix;
GL.EnableVertexAttribArray(attribIx);
GL.VertexAttribFormat(attribIx, 4, VertexAttribType.Float, false, ix * vec4Size * sizeof(float));
GL.VertexArrayAttribBinding(VAO, attribIx, 1); //All use the same buffer binding
}
GL.VertexBindingDivisor(1, 1);
In opentk the matrices are sent by columns not by rows as usual so it is necessary to invert the rows by columns, you can do it like this.
private Matrix4[] TransposeMatrix(Matrix4[] inputModel)
{
var outputModel = new Matrix4[inputModel.Length];
for(int i = 0; i < inputModel.Length; i++)
{
outputModel[i].Row0 = inputModel[i].Column0;
outputModel[i].Row1 = inputModel[i].Column1;
outputModel[i].Row2 = inputModel[i].Column2;
outputModel[i].Row3 = inputModel[i].Column3;
}
return outputModel;
}

Sprite Sheet Animation in OpenTK

I have been trying to make my first ever game engine (in OpenTK and Im stuck. I am trying to incoperate animations and I'm trying to use sprite sheets because they would greatly decrease filesize.
I know that I need to use a for loop to draw all the sprites in sequence, but I dont have a clue about what I should do to make it work with spritesheets.
Here is my current drawing code:
//The Basis fuction for all drawing done in the application
public static void Draw (Texture2D texture, Vector2 position, Vector2 scale, Color color, Vector2 origin, RectangleF? SourceRec = null)
{
Vector2[] vertices = new Vector2[4]
{
new Vector2(0, 0),
new Vector2(1, 0),
new Vector2(1, 1),
new Vector2(0, 1),
};
GL.BindTexture(TextureTarget.Texture2D, texture.ID);
//Beginning to draw on the screen
GL.Begin(PrimitiveType.Quads);
GL.Color3(color);
for (int i = 0; i < 4; i++)
{
GL.TexCoord2((SourceRec.Value.Left + vertices[i].X * SourceRec.Value.Width) / texture.Width, (SourceRec.Value.Left + vertices[i].Y * SourceRec.Value.Height) / texture.Height);
vertices[i].X *= texture.Width;
vertices[i].Y *= texture.Height;
vertices[i] -= origin;
vertices[i] *= scale;
vertices[i] += position;
GL.Vertex2(vertices[i]);
}
GL.End();
}
public static void Begin(int screenWidth, int screenHeight)
{
GL.MatrixMode(MatrixMode.Projection);
GL.LoadIdentity();
GL.Ortho(-screenWidth / 2, screenWidth / 2, screenHeight / 2, -screenHeight / 2, 0f, 1f);
}
Thanks in advance!!! :)
Firstly, I beg you, abandon the fixed function pipeline. It blinds you and restricts your understanding and creativity. Move to core 3.1+, it's structural perfection is matched only by it's potentiality, and the collaborative forethought and innovation will truly move you.
Now, you'll want to store your sprite sheet images as a Texture2DArray. It took me some time to figure out how to load these correctly:
public SpriteSheet(string filename, int spriteWidth, int spriteHeight)
{
// Assign ID and get name
this.textureID = GL.GenTexture();
this.spriteSheetName = Path.GetFileNameWithoutExtension(filename);
// Bind the Texture Array and set appropriate parameters
GL.BindTexture(TextureTarget.Texture2DArray, textureID);
GL.TexParameter(TextureTarget.Texture2DArray, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Nearest);
GL.TexParameter(TextureTarget.Texture2DArray, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Nearest);
GL.TexParameter(TextureTarget.Texture2DArray, TextureParameterName.TextureWrapS, (int)TextureWrapMode.ClampToEdge);
GL.TexParameter(TextureTarget.Texture2DArray, TextureParameterName.TextureWrapT, (int)TextureWrapMode.ClampToEdge);
// Load the image file
Bitmap image = new Bitmap(#"Tiles/" + filename);
BitmapData data = image.LockBits(new System.Drawing.Rectangle(0, 0, image.Width, image.Height), ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
// Determine columns and rows
int spriteSheetwidth = image.Width;
int spriteSheetheight = image.Height;
int columns = spriteSheetwidth / spriteWidth;
int rows = spriteSheetheight / spriteHeight;
// Allocate storage
GL.TexStorage3D(TextureTarget3d.Texture2DArray, 1, SizedInternalFormat.Rgba8, spriteWidth, spriteHeight, rows * columns);
// Split the loaded image into individual Texture2D slices
GL.PixelStore(PixelStoreParameter.UnpackRowLength, spriteSheetwidth);
for (int i = 0; i < columns * rows; i++)
{
GL.TexSubImage3D(TextureTarget.Texture2DArray,
0, 0, 0, i, spriteWidth, spriteHeight, 1,
OpenTK.Graphics.OpenGL.PixelFormat.Bgra, PixelType.UnsignedByte,
data.Scan0 + (spriteWidth * 4 * (i % columns)) + (spriteSheetwidth * 4 * spriteHeight * (i / columns))); // 4 bytes in an Bgra value.
}
image.UnlockBits(data);
}
Then you can simply draw a quad, and the Z texture co-ordinate is the Texture2D index in the Texture2DArray. Note frag_texcoord is of type vec3.
#version 330 core
in vec3 frag_texcoord;
out vec4 outColor;
uniform sampler2DArray tex;
void main()
{
outColor = texture(tex, vec3(frag_texcoord));
}

OpenGL 4 multisampling with FBO - FBO incomplete

I'm making 2D games in OpenTK (a C# wrapper for OpenGL 4), and all was well except for jagged edges of polygons and things jumping and stuttering instead of moving smoothly - so I'm trying to add in multisampling to antialias my textures.
My setup has several Cameras which render all their scene objects onto a FrameBufferObject texture (I would like this to be MSAA), which are then all drawn to the screen (no multisampling needed), one on top of the other.
Without multisampling, it worked fine, but now I tried to change all my Texture2D calls to Texture2DMultisample etc but now I get FBO Not Complete errors and it draws wrong. I believe I need to change my shaders too, but I want to solve this first.
The code below references a few classes like Texture that I've made, but I don't think that should impact this, and I don't want to clutter the post - will give mroe details if needed.
I set up the FBO for each camera with:
private void SetUpFBOTex()
{
_frameBufferTexture = new Texture(GL.GenTexture(), Window.W, Window.H);
GL.BindTexture(TextureTarget.Texture2DMultisample, _frameBufferTexture.ID);
GL.TexImage2DMultisample(TextureTargetMultisample.Texture2DMultisample, 0, PixelInternalFormat.Rgba8, Window.W, Window.H, true);
_frameBufferID = GL.GenFramebuffer();
}
and draw with:
public void Render(Matrix4 matrix)
{
GL.Enable(EnableCap.Multisample);
//Bind FBO to be the draw destination and clear it
GL.BindFramebuffer(FramebufferTarget.Framebuffer, _frameBufferID);
GL.FramebufferTexture2D(FramebufferTarget.Framebuffer, FramebufferAttachment.ColorAttachment0, TextureTarget.Texture2DMultisample, _frameBufferTexture.ID, 0);
GL.ClearColor(new Color4(0,0,0,0));
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
//draw stuff here
foreach (Layer l in Layers)
l.Render(Matrix);
GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
//Bind the FBO to be drawn
_frameBufferTexture.Bind();
//Translate to camera window position
Matrix4 fbomatrix = matrix * Matrix4.CreateTranslation(_window.x, _window.y, 0) * FBOMatrix;
//Bind shader
shader.Bind(ref fbomatrix, DrawType);
//Some OpenGL setup nonsense, binding vertices and index buffer and telling OpenGL where in the vertex struct things are, pointers &c
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBuffer);
GL.EnableVertexAttribArray(shader.LocationPosition);
GL.VertexAttribPointer(shader.LocationPosition, 2, VertexAttribPointerType.Float, false, Stride, 0);
if (shader.LocationTexture != -1)
{
GL.EnableVertexAttribArray(shader.LocationTexture);
GL.VertexAttribPointer(shader.LocationTexture, 2, VertexAttribPointerType.Float, false, Stride, 8);
}
GL.EnableVertexAttribArray(shader.LocationColour);
GL.VertexAttribPointer(shader.LocationColour, 4, VertexAttribPointerType.UnsignedByte, true, Stride, 16);
GL.BindBuffer(BufferTarget.ElementArrayBuffer, indexBuffer);
//Draw the damn quad
GL.DrawArrays(DrawType, 0, Vertices.Length);
//Cleanup
GL.DisableVertexAttribArray(shader.LocationPosition);
if (shader.LocationTexture != -1)
GL.DisableVertexAttribArray(shader.LocationTexture);
GL.DisableVertexAttribArray(shader.LocationColour);
}
Ok #Andon gets credit for this - if you write it as an answer I'll mark that as the solution. I was indeed doing antialiasing with 0 samples!
I'm posting the working antialiased drawing to multiple FBOS code for future OpenTK googlers.
private void SetUpFBOTex()
{
_frameBufferTexture = new Texture(GL.GenTexture(), Window.W, Window.H);
GL.BindTexture(TextureTarget.Texture2DMultisample, _frameBufferTexture.ID);
GL.TexImage2DMultisample(TextureTargetMultisample.Texture2DMultisample, 8, PixelInternalFormat.Rgba8, Window.W, Window.H, false);
_frameBufferID = GL.GenFramebuffer();
}
public void Render(Matrix4 matrix)
{
//Bind FBO to be the draw destination and clear it
GL.BindFramebuffer(FramebufferTarget.Framebuffer, _frameBufferID);
GL.FramebufferTexture2D(FramebufferTarget.Framebuffer, FramebufferAttachment.ColorAttachment0, TextureTarget.Texture2DMultisample, _frameBufferTexture.ID, 0);
GL.ClearColor(new Color4(0,0,0,0));
GL.Clear(ClearBufferMask.ColorBufferBit);
//draw stuff here
foreach (Layer l in Layers)
l.Render(Matrix);
//unbind FBO to allow drawing to screen again
GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
//Bind the FBO to be drawn
GL.BindTexture(TextureTarget.Texture2DMultisample, _frameBufferTexture.ID);
//Translate to camera window position
Matrix4 fbomatrix = matrix * Matrix4.CreateTranslation(_window.x, _window.y, 0) * FBOMatrix;
//Rotate camera FBO texture
if (_rotationAngle != 0f)
{
fbomatrix = Matrix4.CreateTranslation(RotationCentre.x, RotationCentre.y, 0) * fbomatrix;
fbomatrix = Matrix4.CreateRotationZ(_rotationAngle) * fbomatrix;
fbomatrix = Matrix4.CreateTranslation(-RotationCentre.x, -RotationCentre.y, 0) * fbomatrix;
}
shader.Bind(ref fbomatrix, DrawType);
//Some OpenGL setup nonsense, binding vertices and index buffer and telling OpenGL where in the vertex struct things are, pointers &c
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBuffer);
GL.EnableVertexAttribArray(shader.LocationPosition);
GL.VertexAttribPointer(shader.LocationPosition, 2, VertexAttribPointerType.Float, false, Stride, 0);
if (shader.LocationTexture != -1)
{
GL.EnableVertexAttribArray(shader.LocationTexture);
GL.VertexAttribPointer(shader.LocationTexture, 2, VertexAttribPointerType.Float, false, Stride, 8);
}
GL.EnableVertexAttribArray(shader.LocationColour);
GL.VertexAttribPointer(shader.LocationColour, 4, VertexAttribPointerType.UnsignedByte, true, Stride, 16);
GL.BindBuffer(BufferTarget.ElementArrayBuffer, indexBuffer);
//Draw the damn quad
GL.DrawArrays(DrawType, 0, Vertices.Length);
//Cleanup
GL.DisableVertexAttribArray(shader.LocationPosition);
if (shader.LocationTexture != -1)
GL.DisableVertexAttribArray(shader.LocationTexture);
GL.DisableVertexAttribArray(shader.LocationColour);
}
I have a wrapper class to control Shader code, here's the bind call:
internal void Bind(ref Matrix4 matrixMVP)
{
//Set this shader as active shader
GL.UseProgram(programID);
//Load position matrix into vertex shaders
GL.UniformMatrix4(LocationMVPMatrix, false, ref matrixMVP);
//Load active texture into fragment shaders
GL.Uniform1(LocationSampler, 0);
}
Fragment shader:
/// <summary>
/// Test for a Multisampled fragment shader - http://www.opentk.com/node/2251
/// </summary>
public const string fragmentShaderTestSrc =
#"
#version 330
uniform sampler2DMS Sampler;
in vec2 InTexture;
in vec4 OutColour;
out vec4 OutFragColor;
int samples = 16;
float div= 1.0/samples;
void main()
{
OutFragColor = vec4(0.0);
ivec2 texcoord = ivec2(textureSize(Sampler) * InTexture); // used to fetch msaa texel location
for (int i=0;i<samples;i++)
{
OutFragColor += texelFetch(Sampler, texcoord, i) * OutColour; // add color samples together
}
OutFragColor*= div; //devide by num of samples to get color avg.
}
";
Vertex shader:
/// <summary>
/// Default vertex shader that only applies specified matrix transformation
/// </summary>
public const string vertexShaderDefaultSrc =
#"
#version 330
uniform mat4 MVPMatrix;
layout (location = 0) in vec2 Position;
layout (location = 1) in vec2 Texture;
layout (location = 2) in vec4 Colour;
out vec2 InVTexture;
out vec4 vFragColorVs;
void main()
{
gl_Position = MVPMatrix * vec4(Position, 0, 1);
InVTexture = Texture;
vFragColorVs = Colour;
}";

Rendering triangles with OpenTK from VBO

I cannot render triangles for the life of me with a VBO in OpenTK. I am loading my data to the VBO in glControl_Load() event. I get a background screen with no triangles when running. The data is a from a mesh m.OpenGLArrays(out data, out indices) outputs a list of floats and ints. The list of floats for the vertices T1v1, T1v2, T1v3, T2v1, T2v2, T2v3, .... , all three vertices for each triangle back to back.
However given a blank screen with the code when I comment the "intermediate" rendering code everything renders fine....??? What am I doing wrong?
private void glControl_Load(object sender, EventArgs e)
{
loaded = true;
glControl.MouseMove += new MouseEventHandler(glControl_MouseMove);
glControl.MouseWheel += new MouseEventHandler(glControl_MouseWheel);
GL.ClearColor(Color.DarkSlateGray);
GL.Color3(1f, 1f, 1f);
m.OpenGLArrays(out data, out indices);
this.indicesSize = (uint)indices.Length;
GL.GenBuffers(1, out VBOid[0]);
GL.GenBuffers(1, out VBOid[1]);
SetupViewport();
}
private void SetupViewport()
{
if (this.WindowState == FormWindowState.Minimized) return;
glControl.Width = this.Width - 32;
glControl.Height = this.Height - 80;
Frame_label.Location = new System.Drawing.Point(glControl.Width / 2, glControl.Height + 25);
GL.MatrixMode(MatrixMode.Projection);
//GL.LoadIdentity();
GL.Ortho(0, glControl.Width, 0, glControl.Height, -1, 1); // Bottom-left corner pixel has coordinate (0, 0)
GL.Viewport(0, 0, glControl.Width, glControl.Height); // Use all of the glControl painting area
GL.Enable(EnableCap.DepthTest);
GL.BindBuffer(BufferTarget.ArrayBuffer, VBOid[0]);
GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(data.Length * sizeof(float)), data, BufferUsageHint.StaticDraw);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
float aspect_ratio = this.Width / (float)this.Height;
projection = Matrix4.CreatePerspectiveFieldOfView(MathHelper.PiOver4, aspect_ratio, 1, 1024);
GL.MatrixMode(MatrixMode.Projection);
GL.LoadMatrix(ref projection);
}
private void glControl_Paint(object sender, PaintEventArgs e)
{
if (loaded)
{
GL.Clear(ClearBufferMask.ColorBufferBit |
ClearBufferMask.DepthBufferBit |
ClearBufferMask.StencilBufferBit);
modelview = Matrix4.LookAt(0f, 0f, -200f + zoomFactor, 0, 0, 0, 0.0f, 1.0f, 0.0f);
var aspect_ratio = Width / (float)Height;
projection = Matrix4.CreatePerspectiveFieldOfView(MathHelper.PiOver4, aspect_ratio, 1, 512);
GL.MatrixMode(MatrixMode.Projection);
GL.LoadMatrix(ref projection);
GL.MatrixMode(MatrixMode.Modelview);
GL.LoadMatrix(ref modelview);
GL.Rotate(angleY, 1.0f, 0, 0);
GL.Rotate(angleX, 0, 1.0f, 0);
GL.EnableClientState(ArrayCap.VertexArray);
GL.BindBuffer(BufferTarget.ArrayBuffer, VBOid[0]);
GL.Color3(Color.Yellow);
GL.VertexPointer(3, VertexPointerType.Float, Vector3.SizeInBytes, new IntPtr(0));
GL.DrawArrays(PrimitiveType.Triangles, 0, data.Length);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
GL.DisableClientState(ArrayCap.VertexArray);
//GL.Color3(Color.Yellow);
//GL.PolygonMode(MaterialFace.Front, PolygonMode.Fill);
//GL.Begin(PrimitiveType.Triangles);
//for (int i = 0; i < this.md.mesh.Count; i++)
//{
// GL.Normal3(this.md.mesh[i].normal);
// GL.Vertex3(this.md.mesh[i].vertices[0]);
// GL.Vertex3(this.md.mesh[i].vertices[1]);
// GL.Vertex3(this.md.mesh[i].vertices[2]);
//}
//GL.End();
//GL.EndList();
glControl.SwapBuffers();
Frame_label.Text = "Frame: " + frameNum++;
}
}
If something doesn't seem right, then it probably isn't. I seriously questioned my understanding of opengl and spent hours looking at this. However it was just a simple error of forgetting to iterate a count variable in a for loop to transfer the mesh from one object to another. Each triangle had identical vertices! Always expect the unexpected when it comes to debugging!

Categories

Resources