Using a frame buffer yields strange results? - c#

I am very new to OpenGL (OpenTK), and I tried to get my first frame buffer working so I could apply post processing effects. However, when I try to draw the frame buffer it draws in the wrong place.
My rendering code is here:
protected override void OnRenderFrame(FrameEventArgs args)
{
GL.BindFramebuffer(FramebufferTarget.Framebuffer, this._frameBufferObject);
//GL.BindTexture(TextureTarget.Texture2D, 0);
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
GL.Enable(EnableCap.DepthTest);
GL.UseProgram(this._shaderProgramHandle);
GL.BindVertexArray(this._vertexArrayHandle);
GL.BindBuffer(BufferTarget.ElementArrayBuffer, this._elementBufferObject);
GL.DrawElements(PrimitiveType.Triangles, 6, DrawElementsType.UnsignedInt, 0);
// Draw frame buffer
GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
GL.ActiveTexture(TextureUnit.Texture0);
GL.BindTexture(TextureTarget.Texture2D, this._frameBufferTextureColorHandle);
GL.ActiveTexture(TextureUnit.Texture0);
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.UseProgram(this._frameBufferProgramHandle);
int screenTextureLocation = GL.GetUniformLocation(this._frameBufferProgramHandle, "screenTexture");
GL.Uniform1(screenTextureLocation, 0);
GL.BindVertexArray(this._rectangleVertexArrayObject);
GL.Disable(EnableCap.DepthTest);
GL.DrawArrays(PrimitiveType.Triangles, 0, 6);
this.Context.SwapBuffers();
base.OnRenderFrame(args);
}
And the code that creates the array object is here:
this._rectangleVertexArrayObject = GL.GenVertexArray();
this._rectangleVertexBufferObject = GL.GenBuffer();
GL.BindVertexArray(this._rectangleVertexArrayObject);
GL.BindBuffer(BufferTarget.ArrayBuffer, this._rectangleVertexBufferObject);
GL.BufferData(BufferTarget.ArrayBuffer, rectangleVertices.Length * sizeof(float), rectangleVertices, BufferUsageHint.StaticDraw);
GL.VertexAttribPointer(0, 2, VertexAttribPointerType.Float, false, 4 * sizeof(float), 0);
GL.VertexAttribPointer(1, 2, VertexAttribPointerType.Float, false, 4 * sizeof(float), 2 * sizeof(float));
GL.EnableVertexAttribArray(0);
GL.EnableVertexAttribArray(1);
GL.BindVertexArray(0);
My shader code if it makes a difference:
string vertexShaderCode =
#"
#version 330 core
layout (location = 0) in vec3 aPosition;
layout (location = 1) in vec4 aColor;
layout (location = 2) in vec2 aTexCoord;
layout (location = 3) in float aTexId;
out vec4 vColor;
out vec2 texCoord;
out float texId;
void main(void)
{
vColor = aColor;
texCoord = aTexCoord;
texId = aTexId;
gl_Position = vec4(aPosition, 1.0);
}
";
string fragmentShaderCode =
#"
#version 330 core
in vec4 vColor;
in vec2 texCoord;
in float texId;
uniform sampler2D[2] textures;
out vec4 pixelColor;
void main()
{
if (texId == 0) {
pixelColor = texture(textures[int(0)], texCoord) * vColor;
}
else if (texId == 1) {
pixelColor = texture(textures[int(1)], texCoord) * vColor;
}
}
";
string frameBufferVertexShadeCode =
#"
#version 330 core
layout (location = 0) in vec2 aPosition;
layout (location = 1) in vec2 aTexCoord;
out vec2 texCoord;
void main()
{
gl_Position = vec4(aPosition.x, aPosition.y, 0.0, 1.0);
texCoord = aTexCoord;
}
";
string frameBufferFragmentShaderCode =
#"
#version 330 core
in vec2 texCoord;
out vec4 pixelColor;
uniform sampler2D screenTexture;
void main()
{
pixelColor = texture(screenTexture, texCoord);
}
";
And here is the list of the rectangle vertices:
float[] rectangleVertices =
{
// Coords // texCoords
+1.0f, -1.0f, +1.0f, +0.0f,
-1.0f, -1.0f, +0.0f, +0.0f,
-1.0f, +1.0f, +0.0f, +1.0f,
+1.0f, +1.0f, +1.0f, +1.0f,
+1.0f, -1.0f, +1.0f, +0.0f,
-1.0f, +1.0f, +0.0f, +1.0f,
};
The result created is also here

Related

Aspect Ratio of imges are missing when displaying multiple textures on openGL control - OpenTK

I can display three textures equally on an OpenGL control using below code. OpenGL control is placed on bottom of screen (with height is half of the screen height and width is equal to screen width) . But it seems aspect ratio of images are missing in OpenGL Control.
Major part of my code are pasted here. Can you please suggest what is causing the issue ?
this.glControl1.BackColor = System.Drawing.Color.Blue;
this.glControl1.Location = new System.Drawing.Point(13, 412);
this.glControl1.Margin = new System.Windows.Forms.Padding(0);
this.glControl1.Name = "glControl1";
this.glControl1.Size = new System.Drawing.Size(469, 285);
this.glControl1.TabIndex = 8;
this.glControl1.VSync = false;
float[] vertices = {
// Left bottom triangle
-1f, -1f, 0f,
1f, -1f, 0f,
1f, 1f, 0f,
// Right top triangle
1f, 1f, 0f,
-1f, 1f, 0f,
-1f, -1f, 0f
};
int texSizeLoc;
int texSize1Loc;
int texSize2Loc;
public PlaywithTripleCam()
{
InitializeComponent();
this.SizeChanged += new EventHandler(PlaywithTripleCam_SizeChanged);
//ScreenWidth = Screen.PrimaryScreen.Bounds.Width;
//ScreenHeight = Screen.PrimaryScreen.Bounds.Height;
ScreenWidth = this.Width;
ScreenHeight = this.Height;
screenaspectratio =(float) ScreenWidth /(float) ScreenHeight;
//code for showing camera device list in three combo box
}
private void TripleCam_SizeChanged(object sender, EventArgs e)
{
glControl1.Width = this.Width;
glControl1.Height = this.Height / 2;
}
private void PlayButton_Click(object sender, EventArgs e)
{
StartPlaying();
GL.ClearColor(Color.MidnightBlue);
GL.Enable(EnableCap.DepthTest);
TexUtil.InitTexturing();
GL.Hint(HintTarget.PerspectiveCorrectionHint, HintMode.Nicest);
GL.DepthFunc(DepthFunction.Lequal);
GL.ColorMaterial(MaterialFace.FrontAndBack, ColorMaterialParameter.AmbientAndDiffuse);
GL.Enable(EnableCap.ColorMaterial);
GL.Enable(EnableCap.Blend);
GL.BlendFunc(BlendingFactor.SrcAlpha, BlendingFactor.OneMinusSrcAlpha);
GL.Ext.BindFramebuffer(FramebufferTarget.FramebufferExt, 0); // render per default onto screen, not some FBO
glControl1.Resize += new EventHandler(glControl1_Resize);
glControl1.Paint += new PaintEventHandler(glControl1_Paint);
Application.Idle += Application_Idle;
// Ensure that the viewport and projection matrix are set correctly.
glControl1_Resize(glControl1, EventArgs.Empty);
}
private void Application_Idle(object sender, EventArgs e)
{
while (glControl1.IsIdle)
{
Render();
}
}
public void Render()
{
GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0); // use the visible framebuffer
if (videoFrame != null)
lock (videoFrame)
{
if (videoTexture != -1)
GL.DeleteTextures(1, ref videoTexture);
videoTexture = LoadTexture(videoFrame);
videoFrame.Dispose();
videoFrame = null;
}
GC.Collect();
if (videoFrame2 != null)
lock (videoFrame2)
{
if (videoTexture2 != -1)
GL.DeleteTextures(1, ref videoTexture2);
videoTexture2 = LoadTexture(videoFrame2);
videoFrame2.Dispose();
videoFrame2 = null;
}
GC.Collect();
if (videoFrame3!= null)
lock (videoFrame3)
{
if (videoTexture3 != -1)
GL.DeleteTextures(1, ref videoTexture3);
videoTexture3 = LoadTexture(videoFrame3);
videoFrame3.Dispose();
videoFrame3 = null;
}
GC.Collect();
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
DrawImage(videoTexture, videoTexture2, videoTexture3);
}
private void CreateShaders()
{
/***********Vert Shader********************/
vertShader = GL.CreateShader(ShaderType.VertexShader);
GL.ShaderSource(vertShader, #"attribute vec3 a_position;
varying vec2 vTexCoordIn;
void main() {
vTexCoordIn=( a_position.xy+1)/2;
gl_Position = vec4(a_position,1);
}");
GL.CompileShader(vertShader);
/***********Frag Shader ****************/
fragShader = GL.CreateShader(ShaderType.FragmentShader);
GL.ShaderSource(fragShader, #"
uniform sampler2D sTexture;
uniform sampler2D sTexture1;
uniform sampler2D sTexture2;
uniform vec2 sTexSize;
uniform vec2 sTexSize1;
uniform vec2 sTexSize2;
varying vec2 vTexCoordIn;
void main ()
{
vec2 vTexCoord=vec2(vTexCoordIn.x,vTexCoordIn.y);
if ( vTexCoord.x < 1.0/3.0 )
{
vec2 uv = vec2(vTexCoord.x * 3.0, vTexCoord.y);
uv.y *= sTexSize.x / sTexSize.y;
if (uv.y > 1.0)
discard;
gl_FragColor = texture2D(sTexture, uv);
}
else if ( vTexCoord.x >= 1.0/3.0 && vTexCoord.x < 2.0/3.0 )
{
vec2 uv = vec2(1.0-(vTexCoord.x * 3.0 - 1.0), vTexCoord.y);
uv.y *= sTexSize1.x / sTexSize1.y;
if (uv.y > 1.0)
discard;
gl_FragColor = texture2D(sTexture1, uv);
}
else if ( vTexCoord.x >= 2.0/3.0 )
{
vec2 uv = vec2(vTexCoord.x * 3.0 - 2.0, vTexCoord.y);
uv.y *= sTexSize2.x / sTexSize2.y;
if (uv.y > 1.0)
discard;
gl_FragColor = texture2D(sTexture2, uv);
}}");
GL.CompileShader(fragShader);
}
public void DrawImage(int image, int image1,int image2)
{
GL.Viewport(new Rectangle(0, 0, this.Width, this.Height / 2));
GL.MatrixMode(MatrixMode.Projection);
GL.PushMatrix();
GL.LoadIdentity();
GL.MatrixMode(MatrixMode.Modelview);
GL.PushMatrix();
GL.LoadIdentity();
GL.Disable(EnableCap.Lighting);
GL.Enable(EnableCap.Texture2D);
GL.ActiveTexture(TextureUnit.Texture0);
GL.BindTexture(TextureTarget.Texture2D, image);
GL.Uniform1(positionLocation1, 0);
GL.ActiveTexture(TextureUnit.Texture1);
GL.BindTexture(TextureTarget.Texture2D, image1);
GL.Uniform1(positionLocation2, 1);
GL.ActiveTexture(TextureUnit.Texture2);
GL.BindTexture(TextureTarget.Texture2D, image2);
GL.Uniform1(positionLocation3, 2);
if(videoFrame !=null )
{
float texW = videoFrame.Width;
float texH = videoFrame.Height;
GL.Uniform2(texSizeLoc, texW, texH);
}
if (videoFrame2 != null)
{
float tex1W = videoFrame2.Width;
float tex1H = videoFrame2.Height;
GL.Uniform2(texSize1Loc, tex1W, tex1H);
}
if (videoFrame3 != null)
{
float tex2W = videoFrame3.Width;
float tex2H = videoFrame3.Height;
GL.Uniform2(texSize2Loc, tex2W, tex2H);
}
GL.Begin(PrimitiveType.Quads);
GL.TexCoord2(0, 1);
GL.Vertex3(0, 0, 0);
GL.TexCoord2(0, 0);
GL.Vertex3(1920, 0, 0);
GL.TexCoord2(1, 1);
GL.Vertex3(1920, 1080, 0);
GL.TexCoord2(1, 0);
GL.Vertex3(0, 1080, 0);
GL.End();
RunShaders();
GL.Disable(EnableCap.Texture2D);
GL.PopMatrix();
GL.MatrixMode(MatrixMode.Projection);
GL.PopMatrix();
GL.MatrixMode(MatrixMode.Modelview);
ErrorCode ec = GL.GetError();
if (ec != 0)
System.Console.WriteLine(ec.ToString());
Console.Read();
glControl1.SwapBuffers();
}
private void RunShaders()
{
GL.UseProgram(program);
GL.DrawArrays(PrimitiveType.Triangles, 0, vertices.Length / 3);
ErrorCode ec = GL.GetError();
if (ec != 0)
System.Console.WriteLine(ec.ToString());
Console.Read();
}
private void glControl1_Paint(object sender, PaintEventArgs e)
{
Render();
}
private void glControl1_Resize(object sender, EventArgs e)
{
Init();
}
private void Init()
{
CreateShaders();
CreateProgram();
InitBuffers();
}
private void CreateProgram()
{
program = GL.CreateProgram();
GL.AttachShader(program, vertShader);
GL.AttachShader(program, fragShader);
GL.LinkProgram(program);
}
private void InitBuffers()
{
buffer = GL.GenBuffer();
positionLocation = GL.GetAttribLocation(program, "a_position");
positionLocation1 = GL.GetUniformLocation(program, "sTexture");
positionLocation2 = GL.GetUniformLocation(program, "sTexture1");
positionLocation3 = GL.GetUniformLocation(program, "sTexture2");
texSizeLoc = GL.GetUniformLocation(program, "sTexSize");
texSize1Loc = GL.GetUniformLocation(program, "sTexSize1");
texSize2Loc = GL.GetUniformLocation(program, "sTexSize2");
GL.EnableVertexAttribArray(positionLocation);
GL.BindBuffer(BufferTarget.ArrayBuffer, buffer);
GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(vertices.Length * sizeof(float)), vertices, BufferUsageHint.StaticDraw);
GL.VertexAttribPointer(positionLocation, 3, VertexAttribPointerType.Float, false, 0, 0);
}
public int LoadTexture(Bitmap bitmap)
{
int tex = -1;
if (bitmap != null)
{
GL.Hint(HintTarget.PerspectiveCorrectionHint, HintMode.Nicest);
GL.GenTextures(1, out tex);
GL.BindTexture(TextureTarget.Texture2D, tex);
bitmap.RotateFlip(RotateFlipType.RotateNoneFlipY);
BitmapData data = bitmap.LockBits(new System.Drawing.Rectangle(0, 0, bitmap.Width, bitmap.Height),
ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, data.Width, data.Height, 0,
OpenTK.Graphics.OpenGL.PixelFormat.Bgra, PixelType.UnsignedByte, data.Scan0);
bitmap.UnlockBits(data);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)TextureWrapMode.ClampToEdge);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)TextureWrapMode.ClampToEdge);
}
return tex;
}
You have to consider the aspect ratio for each texture separately. Add 3 uniform variables with the textures sizes and multiply the aspect ratio to the v respectively v component of the texture coordinate.
Further you should discard fragments which are out of the bounds of the texture, this can be done by the discard keyword:
uniform sampler2D sTexture;
uniform sampler2D sTexture1;
uniform sampler2D sTexture2;
uniform vec2 sTexSize;
uniform vec2 sTexSize1;
uniform vec2 sTexSize2;
varying vec2 vTexCoordIn;
void main ()
{
vec2 vTexCoord=vec2(vTexCoordIn.x,vTexCoordIn.y);
if ( vTexCoord.x < 1.0/3.0 )
{
vec2 uv = vec2(vTexCoord.x * 3.0, vTexCoord.y);
uv.y *= sTexSize.x / sTexSize.y;
if (uv.y > 1.0)
discard;
gl_FragColor = texture2D(sTexture, uv);
}
else if ( vTexCoord.x >= 1.0/3.0 && vTexCoord.x < 2.0/3.0 )
{
vec2 uv = vec2(1.0-(vTexCoord.x * 3.0 - 1.0), vTexCoord.y);
uv.y *= sTexSize1.x / sTexSize1.y;
if (uv.y > 1.0)
discard;
gl_FragColor = texture2D(sTexture1, uv);
}
else if ( vTexCoord.x >= 2.0/3.0 )
{
vec2 uv = vec2(vTexCoord.x * 3.0 - 2.0, vTexCoord.y);
uv.y *= sTexSize2.x / sTexSize2.y;
if (uv.y > 1.0)
discard;
gl_FragColor = texture2D(sTexture2, uv);
}
}
Sst the values of the uniforms by GL.Uniform2:
texSizeLoc = GL.GetUniformLocation(program, "sTexSize");
texSize1Loc = GL.GetUniformLocation(program, "sTexSize1");
texSize2Loc = GL.GetUniformLocation(program, "sTexSize2");
float texW = ...;
float texH = ...;
GL.GetUniform2(texSizeLoc, texW, texH);
float tex1W = ...;
float tex1H = ...;
GL.GetUniform2(texSize1Loc, tex1W, tex1H);
float tex2W = ...;
float tex2H = ...;
GL.GetUniform2(texSize2Loc, tex2W, tex2H);

Unable to find entry point 'glCreateShader' in 'OpenGL32.dll

I am getting the error "Unable to find an entry point named 'glCreateShader' in DLL 'opengl32.dll'." Does anyone know what could be causing these errors? Here is the class that is causing the error
class PixelBlocks
{
private static ShaderProgram program;
public static void generateBlock(ref objStructs.Block Block)
{
Texture blockTex = Block.Texture;
VBO<Vector3> square;
VBO<int> elements;
float Scale = clientInfo.curScale;
Matrix4 trans;
Matrix4 SclFct;
program = new ShaderProgram(VertexShader, FragmentShader);
program.Use();
program["projection_matrix"].SetValue(Matrix4.CreatePerspectiveFieldOfView(0.45f, (float)Program.width / Program.height, 0.1f, 1000f));
program["view_matrix"].SetValue(Matrix4.LookAt(new Vector3(0, 0, 10), Vector3.Zero, Vector3.Up));
program["light_direction"].SetValue(new Vector3(0, 0, 1));
program["enable_lighting"].SetValue(Program.lighting);
square = new VBO<Vector3>(new Vector3[] {
new Vector3(-1, 1, 0),
new Vector3(1, 1, 0),
new Vector3(1, -1, 0),
new Vector3(-1, -1, 0) });
elements = new VBO<int>(new int[] { 0, 1, 2, 3 }, BufferTarget.ElementArrayBuffer);
trans = Matrix4.CreateTranslation(new Vector3(Block.Blk.x, Block.Blk.y, 0));
SclFct = Matrix4.CreateScaling(new Vector3(Scale, Scale, 0f));
Block.corners = square;
Block.elements = elements;
Block.trans = trans;
Block.Scale = SclFct;
}
public static bool drawBlocks(objStructs.Block[] Blocks)
{
for(int i = 0; i < Blocks.Length; i++)
{
try
{
Gl.UseProgram(program);
// set up the model matrix and draw the cube
program["model_matrix"].SetValue(Blocks[i].trans * Blocks[i].Scale);
Gl.BindBufferToShaderAttribute(Blocks[i].corners, program, "vertexPosition");
Gl.BindBuffer(Blocks[i].elements);
#pragma warning disable CS0618 // Type or member is obsolete
Gl.DrawElements(BeginMode.Quads, Blocks[i].elements.Count, DrawElementsType.UnsignedInt, IntPtr.Zero);
#pragma warning restore CS0618 // Type or member is obsolete
}
catch(Exception e)
{
Console.WriteLine(e);
return false;
}
}
return true;
}
public static string VertexShader = #"
#version 130
in vec3 vertexPosition;
in vec3 vertexNormal;
in vec2 vertexUV;
out vec3 normal;
out vec2 uv;
uniform mat4 projection_matrix;
uniform mat4 view_matrix;
uniform mat4 model_matrix;
void main(void)
{
normal = normalize((model_matrix * vec4(floor(vertexNormal), 0)).xyz);
uv = vertexUV;
gl_Position = projection_matrix * view_matrix * model_matrix * vec4(vertexPosition, 1);
}
";
public static string FragmentShader = #"
#version 130
uniform sampler2D texture;
uniform vec3 light_direction;
uniform bool enable_lighting;
in vec3 normal;
in vec2 uv;
out vec4 fragment;
void main(void)
{
float diffuse = max(dot(normal, light_direction), 0);
float ambient = 0.3;
float lighting = (enable_lighting ? max(diffuse, ambient) : 1);
fragment = lighting * texture2D(texture, uv);
}
";
}
I know that's a lot of code but I don't what is causing this error.
I am using this library: https://github.com/giawa/opengl4csharp which is why it's not like any other questions
Graphics: Intel Iris Pro Graphics Experimental Version and Recommended Version both Tried
Processor: Intel i5
IDE: Visual Studio 2015 Community

error c0000: syntax error, unexpected '?' at token '?'

Alright I searched other peoples questions and could not find a solution to my problem. I am using OpenTK in C# and GLSL 330. It is producing the error message
error c0000: syntax error, unexpected '?' at token '?'
For some reason it doesn't like something I'm doing. So, here is my code I hope someone can tell
me what I'm doing wrong.
public static string vertexShaderSource = #"
#version 330
uniform mat4 pvm;
in vec4 Position;
in vec2 texCoord;
out vec2 texCoordV;
void main()
{
texCoordV = texCoord;
gl_Position = Position * pvm;
}";
public static string fragmentShaderSource = #"
#version 330
in vec2 texCoordV;
out vec4 colorOut;
void main()
{
colorOut = vec4(texCoord, 0.0, 0.0);
}";
public void Initalize()
{
style = GUI_Skin.styles[0];
vertices = new Vector3[6];
vertices[0] = new Vector3(0, 0, 0f);
vertices[1] = new Vector3(100, 0, 0f);
vertices[2] = new Vector3(0, 100, 0f);
vertices[3] = new Vector3(100, 0, 0f);
vertices[4] = new Vector3(0, 100, 0f);
vertices[5] = new Vector3(100, 100, 0f);
GL.GenBuffers(1, out vertHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertHandle);
GL.BufferData<Vector3>(BufferTarget.ArrayBuffer,
new IntPtr(vertices.Length * Vector3.SizeInBytes),
vertices, BufferUsageHint.StaticDraw);
texCoords = new Vector2[6];
texCoords[0] = new Vector2(0,0);
texCoords[1] = new Vector2(1, 0);
texCoords[2] = new Vector2(0, 1);
texCoords[3] = new Vector2(1, 0);
texCoords[4] = new Vector2(0, 1);
texCoords[5] = new Vector2(1, 1);
GL.GenBuffers(1, out texHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, texHandle);
GL.BufferData<Vector2>(BufferTarget.ArrayBuffer,
new IntPtr(texCoords.Length * Vector2.SizeInBytes),
texCoords, BufferUsageHint.StaticDraw);
}
public void Draw()
{
GL.EnableVertexAttribArray(vertHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertHandle);
GL.VertexAttribPointer(0, 3, VertexAttribPointerType.Float, false, Vector3.SizeInBytes, 0);
GL.EnableVertexAttribArray(texHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, texHandle);
GL.VertexAttribPointer(0, 2, VertexAttribPointerType.Float, false, Vector2.SizeInBytes, 0);
GL.DrawArrays(PrimitiveType.Triangles, 0, 6);
GL.DisableVertexAttribArray(vertHandle);
GL.DisableVertexAttribArray(texHandle);
}
Alright so the issues have been fixed. Thanks to the helpful comments above.
Lets start with the shader. The # symbol before the string declaration had to be removed and after every line \n had to be inserted. Also, I was calling transpose when I draw with the shader. Which could be fixed by changing the order of matrices.
public static void Run()
{
int uniformLocation = GL.GetUniformLocation(shaderProgramHandle, "pvm");
Matrix4 mat;
GL.GetFloat(GetPName.ProjectionMatrix, out mat);
GL.UniformMatrix4(uniformLocation, false, ref mat);
GL.UseProgram(shaderProgramHandle);
}
I changed from GL.UniformMatrix4(uniformLocation, true, ref mat); to GL.UniformMatrix4(uniformLocation, false, ref mat); and in the shader itself the order of gl_Position was changed from Position * pvm; to pvm * Position;
public static string vertexShaderSource = "#version 330\n" +
"uniform mat4 pvm;\n" +
"in vec4 Position;\n" +
"in vec2 texCoord;\n" +
"out vec2 texCoordV;\n" +
"void main()\n" +
"{\n" +
"texCoordV = texCoord;\n" +
"gl_Position = pvm * Position;\n" +
"}\n";
public static string fragmentShaderSource = "#version 330\n" +
"in vec2 texCoordV;\n" +
"out vec4 colorOut;" +
"void main()\n" +
"{\n" +
"colorOut = vec4(texCoordV, 0.0, 0.0);\n" +
"}\n" ;
After this was fixed I was getting an error where the rendering surface went white. The error was located within the Draw() function. Basically I wasn't assigning the array locations properly.
public void Draw()
{
GL.EnableVertexAttribArray(0);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertHandle);
GL.VertexAttribPointer(0, 3, VertexAttribPointerType.Float, false, Vector3.SizeInBytes, 0);
GL.EnableVertexAttribArray(1);
GL.BindBuffer(BufferTarget.ArrayBuffer, texHandle);
GL.VertexAttribPointer(1, 2, VertexAttribPointerType.Float, false, Vector2.SizeInBytes, 0);
GL.DrawArrays(PrimitiveType.Triangles, 0, 6);
GL.DisableVertexAttribArray(0);
GL.DisableVertexAttribArray(1);
}

Draw texture with shader

I want to draw a texture in my shader but get an exception (see below).
I have following code:
int vertexArray;
//Pointer to Buffers
int vertexBuffer;
int colorBuffer;
int coordBuffer;
int texUniform; //Pointer to Uniform
int texture; //Pointer to Texture
Init
GL.Enable(EnableCap.Texture2D);
texture = LoadPNG("Resources\\Test.png");
//...
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (float)All.Nearest);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (float)All.Nearest);
GL.Hint(HintTarget.PerspectiveCorrectionHint, HintMode.Nicest);
vertexArray = GL.GenVertexArray();
GL.BindVertexArray(vertexArray);
float[] TexCoords = new float[] {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
}; //(Array.Length = 2*3)
//Arrays for Vertex (3*3) and Color (4*3)
//GenBuffer, BindBuffer and BufferData for Color and Vertex
coordBuffer = GL.GenBuffer();
GL.BindBuffer(BufferTarget.TextureBuffer, coordBuffer);
GL.BufferData(BufferTarget.TextureBuffer, (IntPtr)(sizeof(float) * TexCoords.Length), TexCoords, BufferUsageHint.StaticDraw);
//Load shader
texUniform = GL.GetUniformLocation(shaderProgram, "tex");
GL.Uniform1(texUniform, 0);
GL.ActiveTexture(TextureUnit.Texture0);
Draw
GL.UseProgram(shaderProgram);
GL.BindTexture(TextureTarget.Texture2D, texture);
GL.EnableVertexAttribArray(0);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBuffer);
GL.VertexAttribPointer(0, 3, VertexAttribPointerType.Float, false, 0, 0);
GL.EnableClientState(ArrayCap.VertexArray);
GL.EnableVertexAttribArray(1);
GL.BindBuffer(BufferTarget.ArrayBuffer, colorBuffer);
GL.VertexAttribPointer(1, 4, VertexAttribPointerType.Float, false, 0, 0);
GL.EnableClientState(ArrayCap.ColorArray);
GL.BindBuffer(BufferTarget.TextureBuffer, coordBuffer);
GL.TexCoordPointer(2, TexCoordPointerType.Float, Vector2.SizeInBytes, 0);
GL.EnableClientState(ArrayCap.TextureCoordArray);
GL.DrawArrays(PrimitiveType.Triangles, 0, 6); //<------ Exception
GL.DisableVertexAttribArray(0);
GL.DisableVertexAttribArray(1);
I get a System.AccessViolationException at GL.DrawArrays(...);. I suspect that i haven't loaded a buffer correctly or used a pointer in an incorrect way. The exception is caused by changes i did to get a texture with texture coordinates into the shader, that means vertex and color buffer are working.
I'm not sure what i am doing wrong. I tried different things with the shader but it seems it doesn't matter what i am doing with the shader...
At my last try:
Vertex Shader
#version 330 core
layout(location = 0) in vec3 position;
layout(location = 1) in vec4 color;
layout(location = 2) in vec2 texCoord;
out vec4 vColor;
out vec2 texCoords[];
void main(){
gl_Position = vec4(position, 1.0);
texCoords[0] = texCoord;
vColor = color;
}
Fragment Shader
#version 330 core
in vec4 vColor;
in vec2 texCoords[];
uniform sampler2D tex;
out vec4 fColor;
void main(void)
{
//fColor = vColor;
fColor = texture2D(Texture0, texCoords[0].st);
}
GetShaderInfoLog and GetProgramInfoLog do not return any errors when i comment GL.DrawArrays(...) and run the application.
What is wrong with my code?
Do not enable client state vertex arrays.
Replace the following:
GL.EnableClientState(ArrayCap.VertexArray);
...
GL.EnableClientState(ArrayCap.ColorArray);
...
GL.EnableClientState(ArrayCap.TextureCoordArray);
With:
GL.EnableVertexAttribArray(0);
...
GL.EnableVertexAttribArray(1);
...
GL.EnableVertexAttribArray(2);
At present, you are telling GL to source vertex attributes from glVertexPointer (...), glColorPointer (...) and glTexCoordPointer (...), none of which you actually have setup.
You might be able to get away with enabling the client state: ArrayCap.VertexArray because many drivers alias that to attribute 0, but the others are a recipe for disaster. Nevertheless, until you remove the EnableClientState (...) calls you are going to continue crashing.
Update:
I missed something in your texture coordinate setup...
You also need to replace this line:
GL.TexCoordPointer(2, TexCoordPointerType.Float, Vector2.SizeInBytes, 0);
With this:
GL.VertexAttribPointer(2, 2, VertexAttribPointerType.Float, false, 0, 0);

OpenTK basic triangle not drawing as it should

I am trying to draw the triangle with the colours and verticies specified but currently it seems like its picking some colour numbers for the positions and is not doing what its supposed to do
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using OpenTK;
using OpenTK.Graphics.OpenGL;
using OpenTK.Graphics;
namespace newTriangle
{
class Program
{
static void Main(string[] args)
{
MyWindow myWindow = new MyWindow();
myWindow.Run();
}
}
class MyWindow : GameWindow
{
private uint[] vertexBufferObjectIDs = new uint[2];
private int vertexArrayID, vertexShaderID, fragmentShaderID, shaderProgramID;
public MyWindow()
: base(800, // Width
600, // Height
GraphicsMode.Default,
"My OpenTK Window",
GameWindowFlags.Default,
DisplayDevice.Default,
3, // major
0, // minor
GraphicsContextFlags.ForwardCompatible) { }
protected override void OnLoad(EventArgs e)
{
base.OnLoad(e);
GL.ClearColor(Color4.CornflowerBlue);
GL.GenVertexArrays(1, out vertexArrayID);
GL.BindVertexArray(vertexArrayID);
ushort[] indices = new ushort[] { 0, 1, 2 };
float[] vertices = new float[] {-1.0f, 1.0f, 0.0f, 1.0f, 0.0f,
0.0f, -1.0f, 1.0f, 0.0f, 0.0f,
1.0f, 1.0f, 0.0f, 0.0f, 1.0f };
GL.GenBuffers(vertexBufferObjectIDs.Length, vertexBufferObjectIDs);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBufferObjectIDs[0]);
GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(vertices.Length * sizeof(float)), vertices, BufferUsageHint.StaticDraw);
GL.BindBuffer(BufferTarget.ElementArrayBuffer, vertexBufferObjectIDs[1]);
GL.BufferData(BufferTarget.ElementArrayBuffer, (IntPtr)(indices.Length * sizeof(ushort)), indices, BufferUsageHint.StaticDraw);
GL.VertexAttribPointer(0, 2, VertexAttribPointerType.Float, true, 5 * sizeof(float), 0);
GL.EnableVertexAttribArray(0);
GL.VertexAttribPointer(1, 3, VertexAttribPointerType.Float, true, 5 * sizeof(float), 2 * sizeof(float));
GL.EnableVertexAttribArray(1);
vertexShaderID = GL.CreateShader(ShaderType.VertexShader);
string vertShaderText =
#"
#version 150
in vec3 position;
in vec3 colour;
out vec3 Colour;
void main()
{
Colour = colour;
gl_Position = vec4(position, 1) ;
}";
GL.ShaderSource(vertexShaderID, vertShaderText);
GL.CompileShader(vertexShaderID);
fragmentShaderID = GL.CreateShader(ShaderType.FragmentShader);
string fragShaderText =
#"
#version 150
in vec3 Colour;
out vec4 outputF;
void main()
{
outputF = vec4(Colour, 1.0);
}";
GL.ShaderSource(fragmentShaderID, fragShaderText);
GL.CompileShader(fragmentShaderID);
shaderProgramID = GL.CreateProgram();
GL.AttachShader(shaderProgramID, fragmentShaderID);
GL.AttachShader(shaderProgramID, vertexShaderID);
GL.LinkProgram(shaderProgramID);
GL.UseProgram(shaderProgramID);
}
protected override void OnUnload(EventArgs e)
{
base.OnUnload(e);
GL.DeleteBuffers(vertexBufferObjectIDs.Length, vertexBufferObjectIDs);
GL.DeleteVertexArrays(1, ref vertexArrayID);
GL.UseProgram(0); GL.DetachShader(shaderProgramID, vertexShaderID);
GL.DetachShader(shaderProgramID, fragmentShaderID);
GL.DeleteShader(fragmentShaderID);
GL.DeleteShader(vertexShaderID);
GL.DeleteProgram(shaderProgramID);
}
protected override void OnRenderFrame(FrameEventArgs e)
{
base.OnRenderFrame(e);
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.DrawElements(BeginMode.Triangles, 3, DrawElementsType.UnsignedShort, IntPtr.Zero);
this.SwapBuffers();
}
}
}
can anyone see my mistake?
You didn't link up the locations of the attributes. You can fix it by using the appropriate calls to BindAttribLocation​, or use layout qualifiers with locations. Also, position is a vec3 but you give it only 2 floats.
Using layout qualifiers is an easy fix:
layout(location = 0) in vec2 position;
layout(location = 1) in vec3 colour;
That gives me this picture: http://i.imgur.com/H9FEXZ0.png which looks like it's probably what you had in mind.
vec4(position, 1)
In GLSL integers are not automatically type-promoted.
Kinda weird, because you got it right in your fragment shader.
Try this:
vec4(position, 1.0)

Categories

Resources