OpenTK basic triangle not drawing as it should - c#

I am trying to draw the triangle with the colours and verticies specified but currently it seems like its picking some colour numbers for the positions and is not doing what its supposed to do
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using OpenTK;
using OpenTK.Graphics.OpenGL;
using OpenTK.Graphics;
namespace newTriangle
{
class Program
{
static void Main(string[] args)
{
MyWindow myWindow = new MyWindow();
myWindow.Run();
}
}
class MyWindow : GameWindow
{
private uint[] vertexBufferObjectIDs = new uint[2];
private int vertexArrayID, vertexShaderID, fragmentShaderID, shaderProgramID;
public MyWindow()
: base(800, // Width
600, // Height
GraphicsMode.Default,
"My OpenTK Window",
GameWindowFlags.Default,
DisplayDevice.Default,
3, // major
0, // minor
GraphicsContextFlags.ForwardCompatible) { }
protected override void OnLoad(EventArgs e)
{
base.OnLoad(e);
GL.ClearColor(Color4.CornflowerBlue);
GL.GenVertexArrays(1, out vertexArrayID);
GL.BindVertexArray(vertexArrayID);
ushort[] indices = new ushort[] { 0, 1, 2 };
float[] vertices = new float[] {-1.0f, 1.0f, 0.0f, 1.0f, 0.0f,
0.0f, -1.0f, 1.0f, 0.0f, 0.0f,
1.0f, 1.0f, 0.0f, 0.0f, 1.0f };
GL.GenBuffers(vertexBufferObjectIDs.Length, vertexBufferObjectIDs);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBufferObjectIDs[0]);
GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(vertices.Length * sizeof(float)), vertices, BufferUsageHint.StaticDraw);
GL.BindBuffer(BufferTarget.ElementArrayBuffer, vertexBufferObjectIDs[1]);
GL.BufferData(BufferTarget.ElementArrayBuffer, (IntPtr)(indices.Length * sizeof(ushort)), indices, BufferUsageHint.StaticDraw);
GL.VertexAttribPointer(0, 2, VertexAttribPointerType.Float, true, 5 * sizeof(float), 0);
GL.EnableVertexAttribArray(0);
GL.VertexAttribPointer(1, 3, VertexAttribPointerType.Float, true, 5 * sizeof(float), 2 * sizeof(float));
GL.EnableVertexAttribArray(1);
vertexShaderID = GL.CreateShader(ShaderType.VertexShader);
string vertShaderText =
#"
#version 150
in vec3 position;
in vec3 colour;
out vec3 Colour;
void main()
{
Colour = colour;
gl_Position = vec4(position, 1) ;
}";
GL.ShaderSource(vertexShaderID, vertShaderText);
GL.CompileShader(vertexShaderID);
fragmentShaderID = GL.CreateShader(ShaderType.FragmentShader);
string fragShaderText =
#"
#version 150
in vec3 Colour;
out vec4 outputF;
void main()
{
outputF = vec4(Colour, 1.0);
}";
GL.ShaderSource(fragmentShaderID, fragShaderText);
GL.CompileShader(fragmentShaderID);
shaderProgramID = GL.CreateProgram();
GL.AttachShader(shaderProgramID, fragmentShaderID);
GL.AttachShader(shaderProgramID, vertexShaderID);
GL.LinkProgram(shaderProgramID);
GL.UseProgram(shaderProgramID);
}
protected override void OnUnload(EventArgs e)
{
base.OnUnload(e);
GL.DeleteBuffers(vertexBufferObjectIDs.Length, vertexBufferObjectIDs);
GL.DeleteVertexArrays(1, ref vertexArrayID);
GL.UseProgram(0); GL.DetachShader(shaderProgramID, vertexShaderID);
GL.DetachShader(shaderProgramID, fragmentShaderID);
GL.DeleteShader(fragmentShaderID);
GL.DeleteShader(vertexShaderID);
GL.DeleteProgram(shaderProgramID);
}
protected override void OnRenderFrame(FrameEventArgs e)
{
base.OnRenderFrame(e);
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.DrawElements(BeginMode.Triangles, 3, DrawElementsType.UnsignedShort, IntPtr.Zero);
this.SwapBuffers();
}
}
}
can anyone see my mistake?

You didn't link up the locations of the attributes. You can fix it by using the appropriate calls to BindAttribLocation​, or use layout qualifiers with locations. Also, position is a vec3 but you give it only 2 floats.
Using layout qualifiers is an easy fix:
layout(location = 0) in vec2 position;
layout(location = 1) in vec3 colour;
That gives me this picture: http://i.imgur.com/H9FEXZ0.png which looks like it's probably what you had in mind.

vec4(position, 1)
In GLSL integers are not automatically type-promoted.
Kinda weird, because you got it right in your fragment shader.
Try this:
vec4(position, 1.0)

Related

Using a frame buffer yields strange results?

I am very new to OpenGL (OpenTK), and I tried to get my first frame buffer working so I could apply post processing effects. However, when I try to draw the frame buffer it draws in the wrong place.
My rendering code is here:
protected override void OnRenderFrame(FrameEventArgs args)
{
GL.BindFramebuffer(FramebufferTarget.Framebuffer, this._frameBufferObject);
//GL.BindTexture(TextureTarget.Texture2D, 0);
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
GL.Enable(EnableCap.DepthTest);
GL.UseProgram(this._shaderProgramHandle);
GL.BindVertexArray(this._vertexArrayHandle);
GL.BindBuffer(BufferTarget.ElementArrayBuffer, this._elementBufferObject);
GL.DrawElements(PrimitiveType.Triangles, 6, DrawElementsType.UnsignedInt, 0);
// Draw frame buffer
GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
GL.ActiveTexture(TextureUnit.Texture0);
GL.BindTexture(TextureTarget.Texture2D, this._frameBufferTextureColorHandle);
GL.ActiveTexture(TextureUnit.Texture0);
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.UseProgram(this._frameBufferProgramHandle);
int screenTextureLocation = GL.GetUniformLocation(this._frameBufferProgramHandle, "screenTexture");
GL.Uniform1(screenTextureLocation, 0);
GL.BindVertexArray(this._rectangleVertexArrayObject);
GL.Disable(EnableCap.DepthTest);
GL.DrawArrays(PrimitiveType.Triangles, 0, 6);
this.Context.SwapBuffers();
base.OnRenderFrame(args);
}
And the code that creates the array object is here:
this._rectangleVertexArrayObject = GL.GenVertexArray();
this._rectangleVertexBufferObject = GL.GenBuffer();
GL.BindVertexArray(this._rectangleVertexArrayObject);
GL.BindBuffer(BufferTarget.ArrayBuffer, this._rectangleVertexBufferObject);
GL.BufferData(BufferTarget.ArrayBuffer, rectangleVertices.Length * sizeof(float), rectangleVertices, BufferUsageHint.StaticDraw);
GL.VertexAttribPointer(0, 2, VertexAttribPointerType.Float, false, 4 * sizeof(float), 0);
GL.VertexAttribPointer(1, 2, VertexAttribPointerType.Float, false, 4 * sizeof(float), 2 * sizeof(float));
GL.EnableVertexAttribArray(0);
GL.EnableVertexAttribArray(1);
GL.BindVertexArray(0);
My shader code if it makes a difference:
string vertexShaderCode =
#"
#version 330 core
layout (location = 0) in vec3 aPosition;
layout (location = 1) in vec4 aColor;
layout (location = 2) in vec2 aTexCoord;
layout (location = 3) in float aTexId;
out vec4 vColor;
out vec2 texCoord;
out float texId;
void main(void)
{
vColor = aColor;
texCoord = aTexCoord;
texId = aTexId;
gl_Position = vec4(aPosition, 1.0);
}
";
string fragmentShaderCode =
#"
#version 330 core
in vec4 vColor;
in vec2 texCoord;
in float texId;
uniform sampler2D[2] textures;
out vec4 pixelColor;
void main()
{
if (texId == 0) {
pixelColor = texture(textures[int(0)], texCoord) * vColor;
}
else if (texId == 1) {
pixelColor = texture(textures[int(1)], texCoord) * vColor;
}
}
";
string frameBufferVertexShadeCode =
#"
#version 330 core
layout (location = 0) in vec2 aPosition;
layout (location = 1) in vec2 aTexCoord;
out vec2 texCoord;
void main()
{
gl_Position = vec4(aPosition.x, aPosition.y, 0.0, 1.0);
texCoord = aTexCoord;
}
";
string frameBufferFragmentShaderCode =
#"
#version 330 core
in vec2 texCoord;
out vec4 pixelColor;
uniform sampler2D screenTexture;
void main()
{
pixelColor = texture(screenTexture, texCoord);
}
";
And here is the list of the rectangle vertices:
float[] rectangleVertices =
{
// Coords // texCoords
+1.0f, -1.0f, +1.0f, +0.0f,
-1.0f, -1.0f, +0.0f, +0.0f,
-1.0f, +1.0f, +0.0f, +1.0f,
+1.0f, +1.0f, +1.0f, +1.0f,
+1.0f, -1.0f, +1.0f, +0.0f,
-1.0f, +1.0f, +0.0f, +1.0f,
};
The result created is also here

OpenTK triangle not drawing

I have been following a tutorial but the triangle doesn't show up for me and I have no idea what is wrong with my code for the triangle not to be appearing.
I am using OpenTK version 4.7.1
Here is my code:
This is Window.cs
Here is where I write OpenGl code
using OpenTK.Windowing.Common;
using OpenTK.Windowing.Desktop;
using OpenTK.Graphics.OpenGL4;
using OpenTK.Mathematics;
using System;
using System.Collections.Generic;
using System.Text;
namespace Models
{
public class Window : GameWindow
{
private int vertexBufferHandle;
private int shaderProgramHandle;
private int vertexArrayHandle;
public Window() : base(GameWindowSettings.Default, NativeWindowSettings.Default)
{
this.CenterWindow(new Vector2i(1280, 760));
}
protected override void OnResize(ResizeEventArgs e)
{
GL.Viewport(0, 0, e.Width, e.Height);
base.OnResize(e);
}
protected override void OnLoad()
{
GL.ClearColor(new Color4(0.3f, 0.4f, 0.5f, 1f));
float[] vertices =
{
0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
vertexBufferHandle = GL.GenBuffer();
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBufferHandle);
GL.BufferData(BufferTarget.ArrayBuffer, vertices.Length * sizeof(float), vertices, BufferUsageHint.StaticDraw);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
vertexArrayHandle = GL.GenVertexArray();
GL.BindVertexArray(vertexArrayHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBufferHandle);
GL.VertexAttribPointer(0, 2, VertexAttribPointerType.Float, false, 2 * sizeof(float), 0);
GL.EnableVertexAttribArray(0);
GL.BindVertexArray(0);
string vertexShader =
#"#version 330 core
layout (location = 0) in vec2 aPosition;
void main()
{
gl_Position = vec4(aPosition, 0, 1.0);
}";
string pixelShader =
#"#version 330 core
out vec4 FragColor;
void main()
{
FragColor = vec4(0.8, 0.2, 0.5, 1);
}";
int vertexShaderHandle = GL.CreateShader(ShaderType.VertexShader);
GL.ShaderSource(vertexShaderHandle, vertexShader);
GL.CompileShader(vertexShaderHandle);
int pixelShaderHandle = GL.CreateShader(ShaderType.FragmentShader);
GL.ShaderSource(pixelShaderHandle, pixelShader);
GL.CompileShader(pixelShaderHandle);
shaderProgramHandle = GL.CreateProgram();
GL.AttachShader(shaderProgramHandle, vertexShaderHandle);
GL.AttachShader(shaderProgramHandle, pixelShaderHandle);
GL.LinkProgram(shaderProgramHandle);
GL.DetachShader(shaderProgramHandle, vertexShaderHandle);
GL.DetachShader(shaderProgramHandle, pixelShaderHandle);
GL.DeleteShader(vertexShaderHandle);
GL.DeleteShader(pixelShaderHandle);
base.OnLoad();
}
protected override void OnUnload()
{
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
GL.DeleteBuffer(vertexBufferHandle);
GL.UseProgram(0);
GL.DeleteProgram(shaderProgramHandle);
base.OnUnload();
}
protected override void OnUpdateFrame(FrameEventArgs args)
{
base.OnUpdateFrame(args);
}
protected override void OnRenderFrame(FrameEventArgs args)
{
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.UseProgram(shaderProgramHandle);
GL.BindVertexArray(vertexArrayHandle);
GL.DrawArrays(PrimitiveType.Triangles, 0, 1);
Context.SwapBuffers();
base.OnRenderFrame(args);
}
}
}
and this is the Program.cs
Here is where I run all the code
using OpenTK;
using OpenTK.Windowing.Common;
using OpenTK.Windowing.Desktop;
using OpenTK.Mathematics;
using System;
namespace Models
{
class Program
{
static void Main(string[] args)
{
using (var window = new Window())
{
window.Run();
}
}
}
}
The last argument of DrawArrays is not the number of primitives but the number of vertices:
GL.DrawArrays(PrimitiveType.Triangles, 0, 1);
GL.DrawArrays(PrimitiveType.Triangles, 0, 3);

Unable to find entry point 'glCreateShader' in 'OpenGL32.dll

I am getting the error "Unable to find an entry point named 'glCreateShader' in DLL 'opengl32.dll'." Does anyone know what could be causing these errors? Here is the class that is causing the error
class PixelBlocks
{
private static ShaderProgram program;
public static void generateBlock(ref objStructs.Block Block)
{
Texture blockTex = Block.Texture;
VBO<Vector3> square;
VBO<int> elements;
float Scale = clientInfo.curScale;
Matrix4 trans;
Matrix4 SclFct;
program = new ShaderProgram(VertexShader, FragmentShader);
program.Use();
program["projection_matrix"].SetValue(Matrix4.CreatePerspectiveFieldOfView(0.45f, (float)Program.width / Program.height, 0.1f, 1000f));
program["view_matrix"].SetValue(Matrix4.LookAt(new Vector3(0, 0, 10), Vector3.Zero, Vector3.Up));
program["light_direction"].SetValue(new Vector3(0, 0, 1));
program["enable_lighting"].SetValue(Program.lighting);
square = new VBO<Vector3>(new Vector3[] {
new Vector3(-1, 1, 0),
new Vector3(1, 1, 0),
new Vector3(1, -1, 0),
new Vector3(-1, -1, 0) });
elements = new VBO<int>(new int[] { 0, 1, 2, 3 }, BufferTarget.ElementArrayBuffer);
trans = Matrix4.CreateTranslation(new Vector3(Block.Blk.x, Block.Blk.y, 0));
SclFct = Matrix4.CreateScaling(new Vector3(Scale, Scale, 0f));
Block.corners = square;
Block.elements = elements;
Block.trans = trans;
Block.Scale = SclFct;
}
public static bool drawBlocks(objStructs.Block[] Blocks)
{
for(int i = 0; i < Blocks.Length; i++)
{
try
{
Gl.UseProgram(program);
// set up the model matrix and draw the cube
program["model_matrix"].SetValue(Blocks[i].trans * Blocks[i].Scale);
Gl.BindBufferToShaderAttribute(Blocks[i].corners, program, "vertexPosition");
Gl.BindBuffer(Blocks[i].elements);
#pragma warning disable CS0618 // Type or member is obsolete
Gl.DrawElements(BeginMode.Quads, Blocks[i].elements.Count, DrawElementsType.UnsignedInt, IntPtr.Zero);
#pragma warning restore CS0618 // Type or member is obsolete
}
catch(Exception e)
{
Console.WriteLine(e);
return false;
}
}
return true;
}
public static string VertexShader = #"
#version 130
in vec3 vertexPosition;
in vec3 vertexNormal;
in vec2 vertexUV;
out vec3 normal;
out vec2 uv;
uniform mat4 projection_matrix;
uniform mat4 view_matrix;
uniform mat4 model_matrix;
void main(void)
{
normal = normalize((model_matrix * vec4(floor(vertexNormal), 0)).xyz);
uv = vertexUV;
gl_Position = projection_matrix * view_matrix * model_matrix * vec4(vertexPosition, 1);
}
";
public static string FragmentShader = #"
#version 130
uniform sampler2D texture;
uniform vec3 light_direction;
uniform bool enable_lighting;
in vec3 normal;
in vec2 uv;
out vec4 fragment;
void main(void)
{
float diffuse = max(dot(normal, light_direction), 0);
float ambient = 0.3;
float lighting = (enable_lighting ? max(diffuse, ambient) : 1);
fragment = lighting * texture2D(texture, uv);
}
";
}
I know that's a lot of code but I don't what is causing this error.
I am using this library: https://github.com/giawa/opengl4csharp which is why it's not like any other questions
Graphics: Intel Iris Pro Graphics Experimental Version and Recommended Version both Tried
Processor: Intel i5
IDE: Visual Studio 2015 Community

OpenTK - VertexBufferObject doesn't draw anything

I'm trying to learn how to draw with VBOs in C# OpenTK - following examples like http://www.opentk.com/node/2292 and VBOs Using Interleaved Vertices in C#.
I'm pretty sure I want the interleaved single array method like this, with a neat struct for each vertex. I got the code to compile with no errors, but it simply draws a blank brown screen, no white triangle. I'm sure I've made a stupid error, please help me learn from it!
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenTK;
using OpenTK.Graphics;
using OpenTK.Graphics.OpenGL;
using OpenTK.Input;
using System.Drawing;
using System.Runtime.InteropServices;
namespace VBOTest2
{
[StructLayout(LayoutKind.Sequential)]
public struct Vertex
{
public Vector3 Position;
public byte[] Colour;
public Vertex(byte[] colour, Vector3 position)
{
Colour = colour;
Position = position;
}
public static readonly int Stride = Marshal.SizeOf(default(Vertex));
}
public class VBOTest2 : GameWindow
{
uint vbo;
public VBOTest2() :
base(1, 1, new GraphicsMode(32, 24, 8, 0), "Test")
{
Width = 1500;
Height = 800;
VSync = VSyncMode.On;
ClientSize = new Size(1500, 800);
this.Location = new System.Drawing.Point(100, 300);
GL.Viewport(0, 0, Width, Height);
}
void CreateVertexBuffer()
{
Vertex[] vertices = new Vertex[3];
vertices[0] = new Vertex(new byte[]{255,255,255,255}, new Vector3(-1f, -1f, 0f));
vertices[1] = new Vertex(new byte[] { 255, 255, 255, 255 }, new Vector3(1f, -1f, 0f));
vertices[2] = new Vertex(new byte[] { 255, 255, 255, 255 }, new Vector3(0f, 1f, 0f));
GL.GenBuffers(1, out vbo);
GL.BindBuffer(BufferTarget.ArrayBuffer, vbo);
GL.BufferData<Vertex>(BufferTarget.ArrayBuffer, (IntPtr)Vertex.Stride, vertices, BufferUsageHint.StaticDraw);
}
protected override void OnLoad(EventArgs e)
{
GL.ClearColor(Color.Brown);
CreateVertexBuffer();
}
protected override void OnRenderFrame(FrameEventArgs e)
{
base.OnRenderFrame(e);
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.EnableClientState(ArrayCap.VertexArray);
GL.EnableClientState(ArrayCap.ColorArray);
GL.BindBuffer(BufferTarget.ArrayBuffer, vbo);
GL.VertexPointer(3, VertexPointerType.Float, Vertex.Stride, (IntPtr)(0));
GL.ColorPointer(4, ColorPointerType.UnsignedByte, Vertex.Stride, (IntPtr)(3 * sizeof(float)));
GL.DrawArrays(PrimitiveType.Triangles, 0, 3);
//release buffer
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
GL.DisableClientState(ArrayCap.VertexArray);
GL.DisableClientState(ArrayCap.ColorArray);
SwapBuffers();
}
}
}

error c0000: syntax error, unexpected '?' at token '?'

Alright I searched other peoples questions and could not find a solution to my problem. I am using OpenTK in C# and GLSL 330. It is producing the error message
error c0000: syntax error, unexpected '?' at token '?'
For some reason it doesn't like something I'm doing. So, here is my code I hope someone can tell
me what I'm doing wrong.
public static string vertexShaderSource = #"
#version 330
uniform mat4 pvm;
in vec4 Position;
in vec2 texCoord;
out vec2 texCoordV;
void main()
{
texCoordV = texCoord;
gl_Position = Position * pvm;
}";
public static string fragmentShaderSource = #"
#version 330
in vec2 texCoordV;
out vec4 colorOut;
void main()
{
colorOut = vec4(texCoord, 0.0, 0.0);
}";
public void Initalize()
{
style = GUI_Skin.styles[0];
vertices = new Vector3[6];
vertices[0] = new Vector3(0, 0, 0f);
vertices[1] = new Vector3(100, 0, 0f);
vertices[2] = new Vector3(0, 100, 0f);
vertices[3] = new Vector3(100, 0, 0f);
vertices[4] = new Vector3(0, 100, 0f);
vertices[5] = new Vector3(100, 100, 0f);
GL.GenBuffers(1, out vertHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertHandle);
GL.BufferData<Vector3>(BufferTarget.ArrayBuffer,
new IntPtr(vertices.Length * Vector3.SizeInBytes),
vertices, BufferUsageHint.StaticDraw);
texCoords = new Vector2[6];
texCoords[0] = new Vector2(0,0);
texCoords[1] = new Vector2(1, 0);
texCoords[2] = new Vector2(0, 1);
texCoords[3] = new Vector2(1, 0);
texCoords[4] = new Vector2(0, 1);
texCoords[5] = new Vector2(1, 1);
GL.GenBuffers(1, out texHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, texHandle);
GL.BufferData<Vector2>(BufferTarget.ArrayBuffer,
new IntPtr(texCoords.Length * Vector2.SizeInBytes),
texCoords, BufferUsageHint.StaticDraw);
}
public void Draw()
{
GL.EnableVertexAttribArray(vertHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertHandle);
GL.VertexAttribPointer(0, 3, VertexAttribPointerType.Float, false, Vector3.SizeInBytes, 0);
GL.EnableVertexAttribArray(texHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, texHandle);
GL.VertexAttribPointer(0, 2, VertexAttribPointerType.Float, false, Vector2.SizeInBytes, 0);
GL.DrawArrays(PrimitiveType.Triangles, 0, 6);
GL.DisableVertexAttribArray(vertHandle);
GL.DisableVertexAttribArray(texHandle);
}
Alright so the issues have been fixed. Thanks to the helpful comments above.
Lets start with the shader. The # symbol before the string declaration had to be removed and after every line \n had to be inserted. Also, I was calling transpose when I draw with the shader. Which could be fixed by changing the order of matrices.
public static void Run()
{
int uniformLocation = GL.GetUniformLocation(shaderProgramHandle, "pvm");
Matrix4 mat;
GL.GetFloat(GetPName.ProjectionMatrix, out mat);
GL.UniformMatrix4(uniformLocation, false, ref mat);
GL.UseProgram(shaderProgramHandle);
}
I changed from GL.UniformMatrix4(uniformLocation, true, ref mat); to GL.UniformMatrix4(uniformLocation, false, ref mat); and in the shader itself the order of gl_Position was changed from Position * pvm; to pvm * Position;
public static string vertexShaderSource = "#version 330\n" +
"uniform mat4 pvm;\n" +
"in vec4 Position;\n" +
"in vec2 texCoord;\n" +
"out vec2 texCoordV;\n" +
"void main()\n" +
"{\n" +
"texCoordV = texCoord;\n" +
"gl_Position = pvm * Position;\n" +
"}\n";
public static string fragmentShaderSource = "#version 330\n" +
"in vec2 texCoordV;\n" +
"out vec4 colorOut;" +
"void main()\n" +
"{\n" +
"colorOut = vec4(texCoordV, 0.0, 0.0);\n" +
"}\n" ;
After this was fixed I was getting an error where the rendering surface went white. The error was located within the Draw() function. Basically I wasn't assigning the array locations properly.
public void Draw()
{
GL.EnableVertexAttribArray(0);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertHandle);
GL.VertexAttribPointer(0, 3, VertexAttribPointerType.Float, false, Vector3.SizeInBytes, 0);
GL.EnableVertexAttribArray(1);
GL.BindBuffer(BufferTarget.ArrayBuffer, texHandle);
GL.VertexAttribPointer(1, 2, VertexAttribPointerType.Float, false, Vector2.SizeInBytes, 0);
GL.DrawArrays(PrimitiveType.Triangles, 0, 6);
GL.DisableVertexAttribArray(0);
GL.DisableVertexAttribArray(1);
}

Categories

Resources