OpenCV webcam frames to OpenGL texture - c#

I am working on C# and using OpenTK(OpenGL wrapper) and EmguCV(OpenCV wrapper).
What I want to do is easy to understand: Grab the webcam video stream and put it on a GLControl.
I have a static class called Capturer which has a method that captures a frame and returns it as a cv::Mat wrapped object:
internal static void Initialize()
{
cap = new VideoCapture(1);
cap.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.Fps, 25);
cap.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameWidth, 1920);
cap.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameHeight, 1080);
}
internal static Mat GetCurrentFrame()
{
mat = cap.QueryFrame();
if (!mat.IsEmpty)
{
return mat;
}
return null;
}
Now in my GLControl Load event I initialize the capturer and OpenGL:
Capturer.Initialize();
GL.ClearColor(Color.Blue);
GL.Enable(EnableCap.Texture2D);
GL.Viewport(-glControl1.Width, -glControl1.Height, glControl1.Width * 2, glControl1.Height * 2);
And finally, in the GLControl Paint event:
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.MatrixMode(MatrixMode.Projection);
GL.LoadIdentity();
Mat m = Capturer.GetCurrentFrame();
if (m != null)
{
GL.GenTextures(1, out textureId);
GL.BindTexture(TextureTarget.Texture2D, this.textureId);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (float)TextureMinFilter.Nearest);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (float)TextureMagFilter.Linear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (float)TextureWrapMode.Clamp);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (float)TextureWrapMode.Clamp);
GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgb, 1920, 1080, 0, OpenTK.Graphics.OpenGL.PixelFormat.Bgr, PixelType.UnsignedByte, m.DataPointer);
}
m.Dispose();
glControl1.SwapBuffers();
glControl1.Invalidate();
This is showing a full Blue screen. I think the error is on m.DataPointer.
(I have tried rendering the frames with Bitmap using the property m.Bitmapand it works but the performance is so bad.)

Drawing a rectangle bounding the GLControl solved it:
GL.Begin(PrimitiveType.Quads);
GL.TexCoord2(0, 0); GL.Vertex2(0, 0);
GL.TexCoord2(0, 1); GL.Vertex2(0, 1);
GL.TexCoord2(1, 1); GL.Vertex2(1, 1);
GL.TexCoord2(1, 0); GL.Vertex2(1, 0);
GL.End();
m.Dispose();
Be sure to dispose the object after drawing the frame so you will not run out of memory.

Related

OpenTk OpenGl texture coordinate pointer not working

I pass the vertices to a buffer and the texture coordinates to an another buffer the vertices are draw perfectly, but the texture coordinate pointer is not doing anything, but in an another project it works without problems, I tried to copy from the other project but nothing happened. It produces the same problem without throwing any errors. However, it works perfectly in the immediate drawing mode.
void renderFrame(object o, FrameEventArgs e)
{
// Clear screen
GL.Translate(Input.GetMoveDir(.1f));
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)TextureWrapMode.Repeat);
//Render image
//Enable thing before use
GL.BindTexture(TextureTarget.Texture2D, texture.ID);
GL.Enable(EnableCap.Texture2D);
//Setup blending
GL.Enable(EnableCap.Blend);
GL.BlendFunc(BlendingFactor.SrcAlpha, BlendingFactor.OneMinusSrcAlpha);
// Immedaiate test
/*GL.Begin(BeginMode.Triangles);
for (int i = 0; i < vertexBuffer.Length; i++)
{
GL.Normal3(normalBuffer[i]);
GL.TexCoord2(uvBuffer[i]);
GL.Vertex3(vertexBuffer[i]);
}
GL.End();*/
//Enable arrays
GL.EnableClientState(ArrayCap.VertexArray);
GL.EnableClientState(ArrayCap.TextureCoordArray);
//GL.EnableClientState(ArrayCap.NormalArray);
//GL.Color3(Color.Red);
//Set vertex pointer
GL.BindBuffer(BufferTarget.ArrayBuffer, VEB);
GL.VertexPointer(3, VertexPointerType.Float, Vector3.SizeInBytes, 0);
//Set uv pointer
GL.BindBuffer(BufferTarget.ArrayBuffer, UVB);
GL.TexCoordPointer(2, TexCoordPointerType.Float, Vector2.SizeInBytes, 0);
//Set normal pointer
/*GL.BindBuffer(BufferTarget.ArrayBuffer, NOB);
GL.NormalPointer(NormalPointerType.Float, Vector3.SizeInBytes, 0);*/
//Draw
GL.DrawArrays(PrimitiveType.Quads, 0, vertexBuffer.Length);
// Swap buffers
GL.Flush();
window.SwapBuffers();
}
public void UpdateBuffers(Vector3[] vertices, Vector2[] uvs, Vector3[] normals)
{
//Vertex buffer
vertexBuffer = vertices;
GL.BindBuffer(BufferTarget.ArrayBuffer, VEB);
GL.BufferSubData<Vector3>(BufferTarget.ArrayBuffer, (IntPtr)0, (int)(Vector3.SizeInBytes * vertexBuffer.Length), vertexBuffer);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
//Uv buffer
uvBuffer = uvs;
GL.BindBuffer(BufferTarget.ArrayBuffer, UVB);
GL.BufferSubData<Vector2>(BufferTarget.ArrayBuffer, (IntPtr)0, (int)(Vector2.SizeInBytes * uvBuffer.Length), uvBuffer);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
//Normal buffer
normalBuffer = normals;
GL.BindBuffer(BufferTarget.ArrayBuffer, NOB);
GL.BufferSubData<Vector3>(BufferTarget.ArrayBuffer, (IntPtr)0, (int)(Vector3.SizeInBytes * normalBuffer.Length), normalBuffer);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
Console.WriteLine("Buffers updated");
}

OpenGL - Clear a single cubemap in a cubemap Array

I need to clear a specific cubemap layer within a cubemap array. I think this may be achievable with glClearTexSubImage but I'm not having any luck with it.
To give context, I'm shadow mapping. A cubemap array is encompassing 8 cubemap layers, with each containing scene/depth information for one of 8 light sources.
First, the entire cubemap array is cleared with
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
Next, the scene is rendered into the 8 cubemaps from the viewpoint of the 8 light sources.
for (int j = 0; j < lights.Count; j++)
{
// Create the light's view matrices
List<Matrix4> shadowTransforms = new List<Matrix4>();
shadowTransforms.Add(Matrix4.LookAt(lights[j].position, lights[j].position + new Vector3(1, 0, 0), new Vector3(0, -1, 0)));
shadowTransforms.Add(Matrix4.LookAt(lights[j].position, lights[j].position + new Vector3(-1, 0, 0), new Vector3(0, -1, 0)));
shadowTransforms.Add(Matrix4.LookAt(lights[j].position, lights[j].position + new Vector3(0, 1, 0), new Vector3(0, 0, 1)));
shadowTransforms.Add(Matrix4.LookAt(lights[j].position, lights[j].position + new Vector3(0, -1, 0), new Vector3(0, 0, -1)));
shadowTransforms.Add(Matrix4.LookAt(lights[j].position, lights[j].position + new Vector3(0, 0, 1), new Vector3(0, -1, 0)));
shadowTransforms.Add(Matrix4.LookAt(lights[j].position, lights[j].position + new Vector3(0, 0, -1), new Vector3(0, -1, 0)));
// Send uniforms to the shader
for (int i = 0; i < 6; i++)
{
Matrix4 shadowTransform = shadowTransforms[i];
GL.UniformMatrix4(shader.getUniformID("shadowTransforms[" + i + "]"), false, ref shadowTransform);
}
GL.Uniform1(shader.getUniformID("lightID"), j);
// Draw Scene
DrawSceneInstanced(shader);
}
This all works fine, updating every shadow map each frame. However in favour of optimisation I wish to update only a single shadow map each frame, meaning I need to clear individual cubemap layers separately.
How is this done?
FBO/Cubemap Array creation and attachment:
public CubeMapArray()
{
// Create the FBO
GL.GenFramebuffers(1, out FBO_handle);
// Create and bind the CubeMap array
GL.GenTextures(1, out cubeMapTextureHandle);
GL.BindTexture(TextureTarget.TextureCubeMapArray, cubeMapTextureHandle);
// Allocate storage space
GL.TexImage3D(TextureTarget.TextureCubeMapArray, 0, PixelInternalFormat.Rg16, size, size, layers * 6, 0, PixelFormat.Red, PixelType.Float, IntPtr.Zero);
// Set the suitable texture parameters
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Nearest);
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Nearest);
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureWrapS, (int)TextureWrapMode.ClampToEdge);
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureWrapT, (int)TextureWrapMode.ClampToEdge);
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureWrapR, (int)TextureWrapMode.ClampToEdge);
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureBaseLevel, 0);
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureMaxLevel, 0);
// Create and bind the CubeMap depth array
GL.GenTextures(1, out cubeMapDepthHandle);
GL.BindTexture(TextureTarget.TextureCubeMapArray, cubeMapDepthHandle);
// Allocate storage space
GL.TexImage3D(TextureTarget.TextureCubeMapArray, 0, PixelInternalFormat.DepthComponent, size, size, layers * 6, 0, PixelFormat.DepthComponent, PixelType.UnsignedByte, IntPtr.Zero);
// Set the suitable texture parameters
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureWrapS, (int)TextureWrapMode.ClampToEdge);
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureWrapT, (int)TextureWrapMode.ClampToEdge);
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureWrapR, (int)TextureWrapMode.ClampToEdge);
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureBaseLevel, 0);
GL.TexParameter(TextureTarget.TextureCubeMapArray, TextureParameterName.TextureMaxLevel, 0);
// Attach cubemap texture as the FBO's color buffer
GL.BindFramebuffer(FramebufferTarget.Framebuffer, FBO_handle);
GL.FramebufferTexture(FramebufferTarget.Framebuffer, FramebufferAttachment.ColorAttachment0, cubeMapTextureHandle, 0);
GL.FramebufferTexture(FramebufferTarget.Framebuffer, FramebufferAttachment.DepthAttachment, cubeMapDepthHandle, 0);
// Error check
var errorcheck = GL.CheckFramebufferStatus(FramebufferTarget.Framebuffer);
Console.WriteLine("CUBEMAP ARRAY: " + errorcheck);
// Bind default framebuffer
GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
}
Attach a specific layer of the texture array to the FBO using glFramebufferTextureLayer then use glClear() to clear that attachment.
Solution Attempt:
GL.BindFramebuffer(FramebufferTarget.Framebuffer, shadowMapArray.FBO_handle);
FramebufferTarget target = FramebufferTarget.Framebuffer;
FramebufferAttachment attachment = FramebufferAttachment.ColorAttachment0;
int level = 0;
int layer = currentShadowMap;
int texture = shadowMapArray.FBO_handle;
GL.FramebufferTextureLayer(target, attachment, texture, level, layer);
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);

Draw text using OpenTK

I'm going crazy trying to draw some text over an OpenGL window using OpenTK!
I followed some of the tutorials around but I can't make it work, when I enable the texture where the text is drawn, then I just have a white window and the QUAD I'm drawing for test just disappears.
If someone has the time to check the code, it is below. I can also send my test program to check it out faster. Thanks in advance for any help on this.
using System;
using System.Drawing;
using System.Drawing.Imaging;
using System.Windows.Forms;
using OpenTK.Graphics.OpenGL;
using System.Diagnostics;
namespace WindowsFormsApplication1
{
public partial class Form1 : Form
{
Bitmap textBmp;
int textTexture = -1;
public Form1()
{
InitializeComponent();
}
private void Form1_Load(object sender, EventArgs e)
{
if (!glControl1.Context.IsCurrent)
{
glControl1.MakeCurrent();
}
GL.MatrixMode(MatrixMode.Projection);
GL.LoadIdentity();
GL.Ortho(0, glControl1.Width, 0, glControl1.Height, -1000, 1000);
GL.Scale(1, 1, 1);
GL.Viewport(0, 0, glControl1.Width, glControl1.Height);
GL.ClearColor(Color.White);
// Better point and line drawing
GL.Hint(HintTarget.PointSmoothHint, HintMode.Nicest);
GL.Hint(HintTarget.LineSmoothHint, HintMode.Nicest);
GL.BlendFunc(BlendingFactorSrc.One, BlendingFactorDest.OneMinusSrcAlpha);
GL.Enable(EnableCap.PointSmooth);
GL.Enable(EnableCap.LineSmooth);
GL.Enable(EnableCap.Blend);
// Hide stuff behind in 3D
GL.Enable(EnableCap.DepthTest);
// Enable the texture
GL.Enable(EnableCap.Texture2D);
// Create Bitmap and OpenGL texture
textBmp = new Bitmap((int)glControl1.Width, (int)glControl1.Height);
textTexture = GL.GenTexture();
GL.BindTexture(TextureTarget.Texture2D, textTexture);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);
GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, textBmp.Width, textBmp.Height, 0,
OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.UnsignedByte, IntPtr.Zero);
ErrorCode errorCode = GL.GetError();
Debug.Assert(errorCode == ErrorCode.NoError, "OpenTK error!");
}
private void glControl1_Paint(object sender, PaintEventArgs e)
{
ErrorCode errorCode;
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit | ClearBufferMask.StencilBufferBit);
GL.PushMatrix();
GL.Color3(Color.Black);
GL.Begin(PrimitiveType.Quads);
GL.Vertex3(10, 10, 10);
GL.Vertex3(40, 10, 10);
GL.Vertex3(40, 50, 10);
GL.Vertex3(10, 50, 10);
GL.End();
if (textBmp != null)
{
using (Graphics gfx = Graphics.FromImage(textBmp))
{
gfx.Clear(Color.Transparent);
gfx.DrawString("text", new Font("Arial", 10), Brushes.Black, new PointF(textBmp.Width / 2, textBmp.Height));
}
BitmapData data = textBmp.LockBits(new Rectangle(0, 0, textBmp.Width, textBmp.Height), ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, (int)glControl1.Width, (int)glControl1.Height, 0,
OpenTK.Graphics.OpenGL.PixelFormat.Bgra, PixelType.UnsignedByte, data.Scan0);
textBmp.UnlockBits(data);
errorCode = GL.GetError();
Debug.Assert(errorCode == ErrorCode.NoError, "OpenTK error!");
GL.Begin(PrimitiveType.Quads);
GL.TexCoord2(0f, 1f); GL.Vertex2(0f, 0f);
GL.TexCoord2(1f, 1f); GL.Vertex2(1f, 0f);
GL.TexCoord2(1f, 0f); GL.Vertex2(1f, 1f);
GL.TexCoord2(0f, 0f); GL.Vertex2(0f, 1f);
GL.End();
}
errorCode = GL.GetError();
Debug.Assert(errorCode == ErrorCode.NoError, "OpenTK error!");
glControl1.SwapBuffers();
}
}
}
Ok, I finally managed to make it work.
On initialization I just did:
if (!control.Context.IsCurrent)
{
control.MakeCurrent();
}
GL.Ortho(0, controlWidth, 0, controlHeight, -1000, 1000);
GL.Scale(1, -1, 1); // I work with a top/left image and openGL is bottom/left
GL.Viewport(0, 0, controlWidth, controlHeight);
GL.ClearColor(Color.White);
GL.Hint(HintTarget.PointSmoothHint, HintMode.Nicest);
GL.Hint(HintTarget.LineSmoothHint, HintMode.Nicest);
GL.BlendFunc(BlendingFactorSrc.One, BlendingFactorDest.OneMinusSrcAlpha);
GL.BlendFunc(BlendingFactorSrc.SrcAlpha, BlendingFactorDest.OneMinusSrcAlpha);
GL.PolygonMode(MaterialFace.Front, PolygonMode.Line);
GL.Enable(EnableCap.PointSmooth);
GL.Enable(EnableCap.LineSmooth);
GL.Enable(EnableCap.Blend);
GL.Enable(EnableCap.DepthTest);
GL.ShadeModel(ShadingModel.Smooth);
GL.Enable(EnableCap.AutoNormal);
bmp = new Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
gfx = Graphics.FromImage(bmp);
gfx.TextRenderingHint = System.Drawing.Text.TextRenderingHint.AntiAlias;
texture = GL.GenTexture();
GL.BindTexture(TextureTarget.Texture2D, texture);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);
GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, bmp.Width, bmp.Height, 0,
OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.UnsignedByte, IntPtr.Zero);
Then to write text in the bitmap:
gfx.DrawString(text, font, brush, new PointF(x, y));
And to render:
if (!control.Context.IsCurrent)
{
control.MakeCurrent();
}
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
GL.MatrixMode(MatrixMode.Modelview);
GL.Enable(EnableCap.Texture2D);
GL.BindTexture(TextureTarget.Texture2D, Texture);
GL.Begin(PrimitiveType.Quads);
GL.TexCoord3(0.0f, 0.0f, 0f); GL.Vertex3(0f, 0f, 0f);
GL.TexCoord3(1.0f, 0.0f, 0f); GL.Vertex3(realWidth, 0f, 0f);
GL.TexCoord3(1.0f, 1.0f, 0f); GL.Vertex3(realWidth, realHeight, 0f);
GL.TexCoord3(0.0f, 1.0f, 0f); GL.Vertex3(0f, realHeight, 0f);
GL.End();
GL.Disable(EnableCap.Texture2D);
control.SwapBuffers();
That did the trick.
Very important (at least I think it is):
- the GL.Enable(EnableCap.Texture2D) just before rendering the quad with the texture and GL.Disable(EnableCap.Texture2D) afterwords.
- the GL.BindTexture(TextureTarget.Texture2D, Texture) after enabling GL.Enable(EnableCap.Texture2D).
Hope this helps someone. If I manage to have some time I'll make a C# class with it and post it here.

Why is triangle rendering background color over image?

I am displaying a bitmap
from png at 800x600 yet when rendered:
I have a Window class that inherits GameWindow and here are the overridden methods that (I believe) are relevant:
protected override void OnLoad(EventArgs e) {
base.OnLoad(e);
GrafxUtils.InitTexturing();
textureId = GrafxUtils.CreateTextureFromBitmap((Bitmap)currentImage);
OnResize(null);
GL.ClearColor(Color.Gray);
}
protected override void OnRenderFrame(FrameEventArgs e) {
base.OnRenderFrame(e);
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.MatrixMode(MatrixMode.Texture);
GL.LoadIdentity();
GL.BindTexture(TextureTarget.Texture2D, textureId);
GL.Begin(PrimitiveType.Quads);
// top-left
GL.TexCoord2(0, 0);
GL.Vertex2(0, 0);
// top-right
GL.TexCoord2(1, 0);
GL.Vertex2(currentImage.Width, 0);
// bottom-left
GL.TexCoord2(0, 1);
GL.Vertex2(0, currentImage.Height);
// bottom-right
GL.TexCoord2(1, 1);
GL.Vertex2(currentImage.Width, currentImage.Height);
GL.End();
SwapBuffers();
}
... and the CreateTextureFromBitmap method:
// utility method from GrafxUtils
public static int CreateTextureFromBitmap(Bitmap bitmap) {
BitmapData data = bitmap.LockBits(
new Rectangle(0, 0, bitmap.Width, bitmap.Height),
ImageLockMode.ReadOnly,
System.Drawing.Imaging.PixelFormat.Format32bppArgb);
var tex = GetBoundTexture();
GL.BindTexture(TextureTarget.Texture2D, tex);
GL.TexImage2D(
TextureTarget.Texture2D,
0,
PixelInternalFormat.Rgba,
data.Width, data.Height,
0,
OpenTK.Graphics.OpenGL.PixelFormat.Bgra,
PixelType.UnsignedByte,
data.Scan0);
bitmap.UnlockBits(data);
SetParameters();
return tex;
}
What would be causing the triangle to appear?
Usage of currentImage.Width/ currentImage.Height for vertex coordinates is not correct. It should have a range from -1 to 1. In your case, since you seem to be drawing from 0 to 1 (ie, quarter of the screen only), the top left should be (0,0) top right should have (1,0) and bottomleft should have (0,-1), bottomright should have (1,-1). If you instead wanted full screen quad, it should range from -1,-1 to 1,1.
As for the strange shape you are observing, you are drawing 2 triangles, but winding order not taken care of. ie, one triangle whose hypotenuse is from top left to bottom right, and another triangle whose hypotenuse is from bottom left to top right. Hence the shape. You can see for example,
http://msdn.microsoft.com/en-us/library/bb464051.aspx
And also,
Index Buffer Object and UV Coordinates don't play nice

OpenTK OpenGL texture not drawing

I am having issues drawing a texture onto my quad but it remains white. I have looked through a number of guides and I don't seem to be doing anything different from them.
To load the texture:
Bitmap bitmap = new Bitmap("Textures/Sprite_Can.png");
GL.GenTextures(1, out textureID);
GL.BindTexture(TextureTarget.Texture2D, textureID);
BitmapData data = bitmap.LockBits(new System.Drawing.Rectangle(0, 0, bitmap.Width, bitmap.Height),
ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, data.Width, data.Height, 0,
OpenTK.Graphics.OpenGL.PixelFormat.Bgra, PixelType.UnsignedByte, data.Scan0);
bitmap.UnlockBits(data);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)TextureWrapMode.Repeat);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)TextureWrapMode.Repeat);
Setup and apply an orthographic projection:
GL.MatrixMode(MatrixMode.Projection);
GL.LoadIdentity();
GL.Ortho(0, control.Width, 0, control.Height, -1, 1);
GL.Viewport(0, 0, control.Width, control.Height);
GL.MatrixMode(MatrixMode.Modelview);
GL.LoadIdentity();
GL.ClearColor(Color4.CornflowerBlue);
And finally the draw:
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
GL.LoadIdentity();
GL.Translate(30, 30, 0);
GL.BindTexture(TextureTarget.Texture2D, textureID);
GL.Begin(BeginMode.Quads);
GL.TexCoord2(0, 0);
GL.Vertex2(-1 * width / 2, 1 * height / 2);
GL.TexCoord2(1, 0);
GL.Vertex2(1 * width / 2, 1 * height / 2);
GL.TexCoord2(1, 1);
GL.Vertex2(1 * width / 2, -1 * height / 2);
GL.TexCoord2(0, 1);
GL.Vertex2(-1 * width / 2, -1 * height / 2);
GL.End();
GL.Flush();
control.SwapBuffers();
So basically, the quad draws just fine. However, the texture is not rendered. As a result, all I have is just a white square.
In the fixed-function OpenGL pipeline, you must also Enable texture units before a texture bound to one will be applied to anything you draw.
The normal OpenGL API binding for this is glEnable (GL_TEXTURE_2D). The OpenTK equivalent would be: GL.Enable (EnableCap.Texture2D).

Categories

Resources