Skybox is covering models - c#

I made solar system with sun in the centre and planets rotating around him. I wanted to add skybox, it is added properly but I cant see my planets.
Before adding skybox:
After adding skybox:
main skybox code:
// skybox VAO
unsigned int skyboxVAO, skyboxVBO;
glGenVertexArrays(1, &skyboxVAO);
glGenBuffers(1, &skyboxVBO);
glBindVertexArray(skyboxVAO);
glBindBuffer(GL_ARRAY_BUFFER, skyboxVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(skyboxVertices), &skyboxVertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
vector<std::string> faces
{
"res/textures/skybox/right.png",
"res/textures/skybox/left.png",
"res/textures/skybox/top.png",
"res/textures/skybox/bot.png",
"res/textures/skybox/front.png",
"res/textures/skybox/back.png"
};
unsigned int cubemapTexture = loadCubemap(faces);
window skybox code:
// draw skybox as last
glDepthFunc(GL_LEQUAL); // change depth function so depth test passes when values are equal to depth buffer's content
skyboxShader.use();
view = glm::mat4(glm::mat3(camera.GetViewMatrix())); // remove translation from the view matrix
skyboxShader.setMat4("view", view);
skyboxShader.setMat4("projection", projection);
// skybox cube
glBindVertexArray(skyboxVAO);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_CUBE_MAP, cubemapTexture);
glDrawArrays(GL_TRIANGLES, 0, 36);
glBindVertexArray(0);
glDepthFunc(GL_LESS); // set depth function back to default
vertex shader and fragment shader:
#version 330 core
layout (location = 0) in vec3 aPos;
out vec3 TexCoords;
uniform mat4 projection;
uniform mat4 view;
void main()
{
TexCoords = aPos;
vec4 pos = projection * view * vec4(aPos, 1.0);
gl_Position = projection * view * vec4(aPos, 1.0);
}
#version 330 core
out vec4 FragColor;
in vec3 TexCoords;
uniform samplerCube skybox;
void main()
{
FragColor = texture(skybox, TexCoords);
}

Related

OpenGL3 Rotate object around itself center

Ill try to rotate object in openGL (using OpenTK framework), but he rotate around zero point. Its logicly that object rotating around center, but i dont known how shoud hes rotating aroud itself center (or other point)
public static void Texture(Region region, float x, float y, float z, float rotateX, float rotateY, float rotateZ)
=> Texture(
region,
Matrix4.CreateTranslation(x, y, z),
Matrix4.CreateRotationX(rotateX) * Matrix4.CreateRotationY(rotateY) * Matrix4.CreateRotationZ(rotateZ),
TestGame.Camera.GetViewMatrix(),
TestGame.Camera.GetProjectionMatrix()
);
public static void Texture(Region region, Matrix4 translate, Matrix4 model, Matrix4 view, Matrix4 projection)
{
Shaders.TextureShader.Texture(TextureUnit.Texture0); //Bind texture as 0 in shader
region.Reference.Use(); //Bind texture as 0
Shaders.TextureShader.Matrix4("translate", translate);
Shaders.TextureShader.Matrix4("model", model);
Shaders.TextureShader.Matrix4("view", view);
Shaders.TextureShader.Matrix4("projection", projection);
Shaders.TextureShader.Draw();
}
#version 330 core
layout(location = 0) in vec3 aPosition;
layout(location = 1) in vec2 aTexCoord;
out vec2 texCoord;
uniform mat4 translate;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main(void)
{
texCoord = aTexCoord;
gl_Position = vec4(aPosition, 1) * translate * model * view * projection;
}
Matrix multiplications are not Commutative, the order mattes. Rotate the object before translating it:
gl_Position = vec4(aPosition, 1) * translate * model * view * projection;
gl_Position = vec4(aPosition, 1) * model * translate * view * projection;

encoding vertex positions into textures, positions dont match and index is always zero

I am trying to make a tool to encode vertex positions into a texture. The tool takes a sequence of Wavefront obj files and exports 2 textures. I am, for the most part, following this guide. I am using C# and Veldrid for my program. My program also shows a preview to see what the result looks like. I am having trouble getting my preview to use the textures correctly. The textures have the below mapping.
Texture 1:
RG - X Position
BA - Y Position
Texture 2:
RG - Z Position
BA - Normals, eventually haven't gotten their yet.
I have two issues. My first issue is the decoded position is not being decoded correctly. The second issue is that gl_VertexIndex seems to always be zero.
For my first issue, in order to see what was going on, I set the texture coords for the texture to 0, 0 to sample the first vertex of the first frame. I also removed any view transformation so that I could see the actual values in renderdoc.
In Renderdoc, the VS_Input is 11.67803, 1.00, -11.06608 and the VS_Out is 5.75159, 1.99283, -5.03286. When using gl_VertexIndex, all the vertices for VS_Out read the same thing.
#version 450
layout(location = 0) in vec3 Position;
layout(location = 1) in vec3 Normal;
layout(location = 2) in vec2 TexCoords;
layout(location = 3) in uint Id;
layout(location = 0) out vec3 outNormal;
layout(location = 1) out vec4 outDebug;
layout(set = 0, binding = 0) uniform MVP {
mat4 Model;
mat4 View;
mat4 Projection;
};
layout(set=0, binding=1) uniform sampler textureSampler;
layout(set=0, binding=2) uniform texture2D posTex;
layout(set=0, binding=3) uniform texture2D normalTex;
float RemapRange(float value, float from1, float to1, float from2, float to2){
return (value - from1) / (to1 - from1) * (to2 - from2) + from2;
}
float DecodeFloatRG (vec2 enc){
vec2 kDecodeDot = vec2 (1.0, 1 / 255.0);
return dot(enc, kDecodeDot);
}
void main(){
outDebug = Projection * View * Model * vec4(Position, 1.0f);
vec2 coords = vec2(0, 0);
vec4 pos = textureLod(sampler2D(posTex, textureSampler), coords, 0);
vec4 normal = textureLod(sampler2D(normalTex, textureSampler), coords, 0);
vec3 decodedPos;
decodedPos.x = DecodeFloatRG(pos.xy);
decodedPos.y = DecodeFloatRG(pos.zw);
decodedPos.z = DecodeFloatRG(normal.xy);
float x = RemapRange(decodedPos.x, 0.0f, 1.0f, -13.0f, 13.0f); //right now this is hardcoded
float y = RemapRange(decodedPos.y, 0.0f, 1.0f, -13.0f, 13.0f);
float z = RemapRange(decodedPos.z, 0.0f, 1.0f, -13.0f, 13.0f);
//gl_Position = Projection * View * Model * vec4(x, y, z, 1.0f);
gl_Position = vec4(x, y, z, 1.0f);
//gl_Position = vec4(Position, 1.0f);
outNormal = Normal;
}
For the second issue, the shader is the same, but instead I'm using:
coords = vec2(gl_VertexIndex, 0)
I'm also not sure that using vertex index is the best way to go about this, as it seems like most game engines don't have this exposed.
On the CPU side, I encode the textures using the below:
//https://forum.unity.com/threads/re-map-a-number-from-one-range-to-another.119437/
protected float RemapRange(float value, float from1, float to1, float from2, float to2){
return (value - from1) / (to1 - from1) * (to2 - from2) + from2;
}
//https://medium.com/tech-at-wildlife-studios/texture-animation-techniques-1daecb316657
protected Vector2 EncodeFloatRG (float v){
Vector2 kEncodeMul = new Vector2(1.0f, 255.0f);
float kEncodeBit = 1.0f / 255.0f;
Vector2 enc = kEncodeMul * v;
enc.X = fract(enc.X);
enc.Y = fract(enc.Y);
enc.X -= enc.Y * kEncodeBit;
return enc;
}
float fract(float x){
return x - MathF.Floor(x);
}
This is what the loop writing the pixels looks like. There is another one for the second texture, but it's pretty much the same.
posImg.Mutate(c => c.ProcessPixelRowsAsVector4(row =>
{
for (int x = 0; x < row.Length; x++)
{
var obj = meshes[y];
var vertex = obj.Vertices[x];
var pixel = new Vector4();
float X = RemapRange(vertex.Position.X, bounds.Min, bounds.Max, 0.0f, 1.0f);
float Y = RemapRange(vertex.Position.Y, bounds.Min, bounds.Max, 0.0f, 1.0f);
var encodedX = EncodeFloatRG(X);
var encodedY = EncodeFloatRG(Y);
pixel.X = encodedX.X;
pixel.Y = encodedX.Y;
pixel.Z = encodedY.X;
pixel.W = encodedY.Y;
row[x] = pixel;
}
y += 1;
}));
How I am creating and loading the textures in veldrid. As far as the sampler goes, it is a gd.PointSampler. I have tried turning SRGB on and off on the ImageSharpTexture() and using R8_G8_B8_A8_UNorm_SRgb and R8_G8_B8_A8_UNorm and pretty much any combo of those.
var posTex = new Veldrid.ImageSharp.ImageSharpTexture(posPath, false, true);
var normalTex = new Veldrid.ImageSharp.ImageSharpTexture(normalPath, false, true);
var posDeviceTex = posTex.CreateDeviceTexture(gd, gd.ResourceFactory);
var normalDeviceTex = normalTex.CreateDeviceTexture(gd, gd.ResourceFactory);
var posViewDesc = new TextureViewDescription(posDeviceTex, PixelFormat.R8_G8_B8_A8_UNorm_SRgb);
var normalViewDesc = new TextureViewDescription(normalDeviceTex, PixelFormat.R8_G8_B8_A8_UNorm_SRgb);
positionTexture = gd.ResourceFactory.CreateTextureView(posViewDesc);
normalTexture = gd.ResourceFactory.CreateTextureView(normalViewDesc);
EDIT:
I tried hard-coding the value of pixel (0, 0) of the texture in the shader like below. When I do this the result is correct and matches the original vertex position. When reading the pixel values of the texture in the shader and exporting them directly the values are wrong, so I am thinking there is some compression or color space weirdness going on when reading then texture in. Like in the shader the correct value for the pixel at 0,0 should be (0.9490196, 0.03529412, 0.5372549, 0.30588236), but in renderdoc it shows as (0.55492, 0.28516, 0.29102, 0.54314)
outDebug = Projection * View * Model * vec4(Position, 1.0f);
vec2 coords = vec2(0.0, 0.0);
vec4 pos = textureLod(sampler2D(posTex, textureSampler), coords, 0);
vec4 normal = textureLod(sampler2D(normalTex, textureSampler), coords, 0);
pos = vec4(0.9490196, 0.03529412, 0.5372549, 0.30588236);
normal = vec4(0.07058824, 0.96862745, 1, 1);
vec3 decodedPos;
decodedPos.x = DecodeFloatRG(pos.xy);
decodedPos.y = DecodeFloatRG(pos.zw);
decodedPos.z = DecodeFloatRG(normal.xy);
float x = RemapRange(decodedPos.x, 0.0f, 1.0f, -13.0f, 13.0f);
float y = RemapRange(decodedPos.y, 0.0f, 1.0f, -13.0f, 13.0f);
float z = RemapRange(decodedPos.z, 0.0f, 1.0f, -13.0f, 13.0f);
gl_Position = vec4(x, y, z, 1.0f);
Texture 1:
Texture 2:
Google Drive With Textures, Obj, and Metadata
So I figured this out, This code block need to be
var posTex = new Veldrid.ImageSharp.ImageSharpTexture(posPath, false, true);
var normalTex = new Veldrid.ImageSharp.ImageSharpTexture(normalPath, false, true);
var posDeviceTex = posTex.CreateDeviceTexture(gd, gd.ResourceFactory);
var normalDeviceTex = normalTex.CreateDeviceTexture(gd, gd.ResourceFactory);
var posViewDesc = new TextureViewDescription(posDeviceTex, PixelFormat.R8_G8_B8_A8_UNorm);
var normalViewDesc = new TextureViewDescription(normalDeviceTex, PixelFormat.R8_G8_B8_A8_UNorm);
positionTexture = gd.ResourceFactory.CreateTextureView(posViewDesc);
normalTexture = gd.ResourceFactory.CreateTextureView(normalViewDesc);
Instead. While fixing this someone also mentioned I needed to declare my TextureViews before my sampler if I wanted to use the same sampler for both textureViews.
as far as gl_VertexIndex goes I'm looking into how to map the data into a spare uv channel instead as this should always be available in any game engine.

Show half portion of image side by side - OpenGL

I have created two textures for two images. Now I want to show this textures in opengl in an order that left part of image2, full image1, right part of image2. I have done like below. Image1 shows at the center of opengl screen. But left and right part of screen is not correct (which should show left part of image2 and right part of image2 respectively)
Vertex shader:
attribute vec3 a_position;
varying vec2 vTexCoord;
void main() {
vTexCoord = (a_position.xy + 1) / 2;
gl_Position = vec4(a_position,1);
}
Fragment shader:
precision highp float;
uniform sampler2D sTexture;//texture for image 1
uniform sampler2D sTexture1;//texture for image 2
varying vec2 vTexCoord;
void main () {
if ( vTexCoord.x<=0.25 )
gl_FragColor = texture2D (sTexture1, vec2(vTexCoord.x/2.0, vTexCoord.y));
else if ( vTexCoord.x>0.25 && vTexCoord.x<0.75 )
gl_FragColor = texture2D (sTexture, vec2(vTexCoord.x*2.0, vTexCoord.y));
else if(vTexCoord.x>=0.75 )
gl_FragColor = texture2D (sTexture1, vec2(vTexCoord.x*2.0-1.0, vTexCoord.y));
}
Compiling shaders:
void CreateShaders() {
/***********Vert Shader********************/
vertShader = GL.CreateShader(ShaderType.VertexShader);
GL.ShaderSource(vertShader, vertexShaderSource);
GL.CompileShader(vertShader);
/***********Frag Shader ****************/
fragShader = GL.CreateShader(ShaderType.FragmentShader);
GL.ShaderSource(fragShader, fragmentShaderSource);
GL.CompileShader(fragShader);
}
I want to show this textures in opengl in an order that left part of image2, full image1, right part of image2
If the texture coordinate is less than 0.25 then you want to show the rang [0, 0.5] of image2. You have to map the x component of the texture coordinate from [0.0, 0.25] to [0, 0.5]:
vec2( vTexCoord.x*2.0, vTexCoord.y );
If the texture coordinate is grater then you want to show the rang [0.5, 1] of image2. You have to map the x component of the texture coordinate from [0.75, 1.0] to [0.5, 1.0]:
vec2( (vTexCoord.x-0.75)*2.0 + 0.5, vTexCoord.y );
or
vec2( vTexCoord.x*2.0 - 1.0, vTexCoord.y );
If the texture coordinate is grater than 0.25 and less than 0.75 then you want to show the full range of image1. You have to map the x component of the texture coordinate from [0.25, 0.75] to [0.0, 1.0]:
vec2( vTexCoord.x*2.0 - 0.5, vTexCoord.y );
The shader code my look like this
precision highp float;
uniform sampler2D sTexture;//texture for image 1
uniform sampler2D sTexture1;//texture for image 2
varying vec2 vTexCoord;
void main ()
{
float u = vTexCoord.x*2.0;
float a = 0.0;
if( vTexCoord.x > 0.75 )
{
u -= 1.0;
}
else if ( vTexCoord.x >= 0.25 )
{
u -= 0.5;
a = 1.0;
}
vec4 color1 = texture2D(sTexture1, vec2(u, texCoord.y));
vec4 color2 = texture2D(sTexture, vec2(u, texCoord.y));
gl_FragColor = mix(color2, color1, a);
}
Extension to the answer:
if I want to show only a portion of right image,for example (0.6,0.8).
You want to map [0.75, 1.0] to [0.6, 0.8]:
[0.75, 1.0] to [0, 1]: u' = (u-0.75)/0.25
[0, 1] to [0.6, 0.8]: u'' = u'*0.2+0.6
This leads to:
u = (vTexCoord.x-0.75)*0.2/0.25 + 0.6;
when mapping [0.75, 1.0] to [0.6, 0.8] i wish to display the remain portion of [0.75, 1.0] with black/white color. when using (vTexCoord.x-0.75)*0.2/0.25 + 0.6 image with portion [.6,.8] is filled in [0.75, 1.0]
You have to convert the RGB color to grayscal. I recommend to use the a formula for Relative luminance:
float grayscale = dot(color1, vec3(0.2126, 0.7152, 0.0722));
color1.rgb = vec3(grayscale);

C# OpenGL - Trying to get the light to move with the camera using shaders

I am currently building a test scene with shaders.
Here is my Vertext Shader code:
public const string VertexShaderText = #"
#version 130
in vec3 vertexPosition;
in vec3 vertexNormal;
in vec3 vertexTangent;
in vec2 vertexUV;
uniform vec3 light_direction;
out vec3 normal;
out vec2 uv;
out vec3 light;
uniform mat4 projection_matrix;
uniform mat4 view_matrix;
uniform mat4 model_matrix;
uniform bool enable_mapping;
void main(void)
{
normal = normalize((model_matrix * vec4(floor(vertexNormal), 0)).xyz);
uv = vertexUV;
mat3 tbnMatrix = mat3(vertexTangent, cross(vertexTangent, normal), normal);
light = (enable_mapping ? light_direction * tbnMatrix : light_direction);
gl_Position = projection_matrix * view_matrix * model_matrix * vec4(vertexPosition, 1);
}
";
Here is my Fragment Shader:
public const string FragmentShaderText = #"
#version 130
uniform sampler2D colorTexture;
uniform sampler2D normalTexture;
uniform bool enableToggleLighting;
uniform mat4 model_matrix;
uniform bool enable_mapping;
uniform float alpha;
uniform float ambi;
in vec3 normal;
in vec2 uv;
in vec3 light;
out vec4 fragment;
void main(void)
{
vec3 fragmentNormal = texture2D(normalTexture, uv).xyz * 2 - 1;
vec3 selectedNormal = (enable_mapping ? fragmentNormal : normal);
float diffuse = max(dot(selectedNormal, light), 0);
float ambient = ambi;
float lighting = (enableToggleLighting ? max(diffuse, ambient) : 1);
fragment = vec4(lighting * texture2D(colorTexture, uv).xyz, alpha);
}
";
My Project is initialized like this:
CanvasControlSettings.ShaderProgram.Use();
CanvasControlSettings.ShaderProgram["projection_matrix"].SetValue(mainSceneProj);
CanvasControlSettings.ShaderProgram["light_direction"].SetValue(new Vector3(0f, 0f, 1f));
CanvasControlSettings.ShaderProgram["enableToggleLighting"].SetValue(CanvasControlSettings.ToggleLighting);
CanvasControlSettings.ShaderProgram["normalTexture"].SetValue(1);
CanvasControlSettings.ShaderProgram["enable_mapping"].SetValue(CanvasControlSettings.ToggleNormalMapping);
My rotation moves the camera around the object. I want to move the light position along with the camera so that the shading is always visible.
How can I send the Camera Pos to the shader and implement this ?
From the front:
Rotated:
EDIT:
I Updated the Camera Position after rotate like this:
GL.Light(LightName.Light0, LightParameter.Position, new float[] { CanvasControlSettings.Camera.Position.X, CanvasControlSettings.Camera.Position.Y, CanvasControlSettings.Camera.Position.Z, 0.0f } );
And changed the light line to this
light = gl_LightSource[0].position.xyz;
This almost works perfectly accept that the material light color is now way to bright! I would show pictures but it seems I need more rep.
EDIT:
Ok, after following provided links, I found my way. I changed the Vertex Shader light code to:
vec4 lightPos4 = vec4(gl_LightSource[0].position.xyz, 1.0);
vec4 pos4 = model_matrix * vec4(vertexPosition, 1.0);
light = normalize((lightPos4 - pos4).xyz);
Works perfectly now. Thanks
You don't need to send the camera position to your shader. Just change your lights position to the camera position and send your light to your shader.

OpenGL 4 multisampling with FBO - FBO incomplete

I'm making 2D games in OpenTK (a C# wrapper for OpenGL 4), and all was well except for jagged edges of polygons and things jumping and stuttering instead of moving smoothly - so I'm trying to add in multisampling to antialias my textures.
My setup has several Cameras which render all their scene objects onto a FrameBufferObject texture (I would like this to be MSAA), which are then all drawn to the screen (no multisampling needed), one on top of the other.
Without multisampling, it worked fine, but now I tried to change all my Texture2D calls to Texture2DMultisample etc but now I get FBO Not Complete errors and it draws wrong. I believe I need to change my shaders too, but I want to solve this first.
The code below references a few classes like Texture that I've made, but I don't think that should impact this, and I don't want to clutter the post - will give mroe details if needed.
I set up the FBO for each camera with:
private void SetUpFBOTex()
{
_frameBufferTexture = new Texture(GL.GenTexture(), Window.W, Window.H);
GL.BindTexture(TextureTarget.Texture2DMultisample, _frameBufferTexture.ID);
GL.TexImage2DMultisample(TextureTargetMultisample.Texture2DMultisample, 0, PixelInternalFormat.Rgba8, Window.W, Window.H, true);
_frameBufferID = GL.GenFramebuffer();
}
and draw with:
public void Render(Matrix4 matrix)
{
GL.Enable(EnableCap.Multisample);
//Bind FBO to be the draw destination and clear it
GL.BindFramebuffer(FramebufferTarget.Framebuffer, _frameBufferID);
GL.FramebufferTexture2D(FramebufferTarget.Framebuffer, FramebufferAttachment.ColorAttachment0, TextureTarget.Texture2DMultisample, _frameBufferTexture.ID, 0);
GL.ClearColor(new Color4(0,0,0,0));
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
//draw stuff here
foreach (Layer l in Layers)
l.Render(Matrix);
GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
//Bind the FBO to be drawn
_frameBufferTexture.Bind();
//Translate to camera window position
Matrix4 fbomatrix = matrix * Matrix4.CreateTranslation(_window.x, _window.y, 0) * FBOMatrix;
//Bind shader
shader.Bind(ref fbomatrix, DrawType);
//Some OpenGL setup nonsense, binding vertices and index buffer and telling OpenGL where in the vertex struct things are, pointers &c
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBuffer);
GL.EnableVertexAttribArray(shader.LocationPosition);
GL.VertexAttribPointer(shader.LocationPosition, 2, VertexAttribPointerType.Float, false, Stride, 0);
if (shader.LocationTexture != -1)
{
GL.EnableVertexAttribArray(shader.LocationTexture);
GL.VertexAttribPointer(shader.LocationTexture, 2, VertexAttribPointerType.Float, false, Stride, 8);
}
GL.EnableVertexAttribArray(shader.LocationColour);
GL.VertexAttribPointer(shader.LocationColour, 4, VertexAttribPointerType.UnsignedByte, true, Stride, 16);
GL.BindBuffer(BufferTarget.ElementArrayBuffer, indexBuffer);
//Draw the damn quad
GL.DrawArrays(DrawType, 0, Vertices.Length);
//Cleanup
GL.DisableVertexAttribArray(shader.LocationPosition);
if (shader.LocationTexture != -1)
GL.DisableVertexAttribArray(shader.LocationTexture);
GL.DisableVertexAttribArray(shader.LocationColour);
}
Ok #Andon gets credit for this - if you write it as an answer I'll mark that as the solution. I was indeed doing antialiasing with 0 samples!
I'm posting the working antialiased drawing to multiple FBOS code for future OpenTK googlers.
private void SetUpFBOTex()
{
_frameBufferTexture = new Texture(GL.GenTexture(), Window.W, Window.H);
GL.BindTexture(TextureTarget.Texture2DMultisample, _frameBufferTexture.ID);
GL.TexImage2DMultisample(TextureTargetMultisample.Texture2DMultisample, 8, PixelInternalFormat.Rgba8, Window.W, Window.H, false);
_frameBufferID = GL.GenFramebuffer();
}
public void Render(Matrix4 matrix)
{
//Bind FBO to be the draw destination and clear it
GL.BindFramebuffer(FramebufferTarget.Framebuffer, _frameBufferID);
GL.FramebufferTexture2D(FramebufferTarget.Framebuffer, FramebufferAttachment.ColorAttachment0, TextureTarget.Texture2DMultisample, _frameBufferTexture.ID, 0);
GL.ClearColor(new Color4(0,0,0,0));
GL.Clear(ClearBufferMask.ColorBufferBit);
//draw stuff here
foreach (Layer l in Layers)
l.Render(Matrix);
//unbind FBO to allow drawing to screen again
GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
//Bind the FBO to be drawn
GL.BindTexture(TextureTarget.Texture2DMultisample, _frameBufferTexture.ID);
//Translate to camera window position
Matrix4 fbomatrix = matrix * Matrix4.CreateTranslation(_window.x, _window.y, 0) * FBOMatrix;
//Rotate camera FBO texture
if (_rotationAngle != 0f)
{
fbomatrix = Matrix4.CreateTranslation(RotationCentre.x, RotationCentre.y, 0) * fbomatrix;
fbomatrix = Matrix4.CreateRotationZ(_rotationAngle) * fbomatrix;
fbomatrix = Matrix4.CreateTranslation(-RotationCentre.x, -RotationCentre.y, 0) * fbomatrix;
}
shader.Bind(ref fbomatrix, DrawType);
//Some OpenGL setup nonsense, binding vertices and index buffer and telling OpenGL where in the vertex struct things are, pointers &c
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBuffer);
GL.EnableVertexAttribArray(shader.LocationPosition);
GL.VertexAttribPointer(shader.LocationPosition, 2, VertexAttribPointerType.Float, false, Stride, 0);
if (shader.LocationTexture != -1)
{
GL.EnableVertexAttribArray(shader.LocationTexture);
GL.VertexAttribPointer(shader.LocationTexture, 2, VertexAttribPointerType.Float, false, Stride, 8);
}
GL.EnableVertexAttribArray(shader.LocationColour);
GL.VertexAttribPointer(shader.LocationColour, 4, VertexAttribPointerType.UnsignedByte, true, Stride, 16);
GL.BindBuffer(BufferTarget.ElementArrayBuffer, indexBuffer);
//Draw the damn quad
GL.DrawArrays(DrawType, 0, Vertices.Length);
//Cleanup
GL.DisableVertexAttribArray(shader.LocationPosition);
if (shader.LocationTexture != -1)
GL.DisableVertexAttribArray(shader.LocationTexture);
GL.DisableVertexAttribArray(shader.LocationColour);
}
I have a wrapper class to control Shader code, here's the bind call:
internal void Bind(ref Matrix4 matrixMVP)
{
//Set this shader as active shader
GL.UseProgram(programID);
//Load position matrix into vertex shaders
GL.UniformMatrix4(LocationMVPMatrix, false, ref matrixMVP);
//Load active texture into fragment shaders
GL.Uniform1(LocationSampler, 0);
}
Fragment shader:
/// <summary>
/// Test for a Multisampled fragment shader - http://www.opentk.com/node/2251
/// </summary>
public const string fragmentShaderTestSrc =
#"
#version 330
uniform sampler2DMS Sampler;
in vec2 InTexture;
in vec4 OutColour;
out vec4 OutFragColor;
int samples = 16;
float div= 1.0/samples;
void main()
{
OutFragColor = vec4(0.0);
ivec2 texcoord = ivec2(textureSize(Sampler) * InTexture); // used to fetch msaa texel location
for (int i=0;i<samples;i++)
{
OutFragColor += texelFetch(Sampler, texcoord, i) * OutColour; // add color samples together
}
OutFragColor*= div; //devide by num of samples to get color avg.
}
";
Vertex shader:
/// <summary>
/// Default vertex shader that only applies specified matrix transformation
/// </summary>
public const string vertexShaderDefaultSrc =
#"
#version 330
uniform mat4 MVPMatrix;
layout (location = 0) in vec2 Position;
layout (location = 1) in vec2 Texture;
layout (location = 2) in vec4 Colour;
out vec2 InVTexture;
out vec4 vFragColorVs;
void main()
{
gl_Position = MVPMatrix * vec4(Position, 0, 1);
InVTexture = Texture;
vFragColorVs = Colour;
}";

Categories

Resources