• Advertisement
Sign in to follow this  

OpenGL Drawing code not working

This topic is 2630 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

Hello.

I'm developing an Android application with OpenGL ES 2.0 native code.

The native code compiles ok, and runs without any problem. But it doesn't draw anything.

Here is my code:

##################################################################################

#include <jni.h>
#include <android/log.h>
#include <stdio.h>
#include <string.h>

#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>

#ifdef __cplusplus
extern "C"
{
#endif

unsigned int shaderProgramID;
GLint vertexHandle;
GLint normalHandle;
GLint textureCoordHandle;

// Constants:
static const float kObjectScale = 3.f;

static const char* cubeMeshVertexShader = "
attribute vec4 vertexPosition;
attribute vec4 vertexNormal;
attribute vec2 vertexTexCoord;
varying vec2 texCoord;
varying vec4 normal;
uniform mat4 modelViewProjectionMatrix;

void main() {
gl_Position = modelViewProjectionMatrix * vertexPosition;
normal = vertexNormal;
texCoord = vertexTexCoord;
} ";


static const char* cubeMeshFragmentShader = "
precision mediump float;
varying vec2 texCoord;
varying vec4 normal;
uniform sampler2D texSampler2D;
void main() {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
} ";

static const char* meshVertexShaderNoNormalTexCoor = "
attribute vec4 vertexPosition;
void main() {
gl_Position = vertexPosition;
} ";

static const char* fragmentShaderNoNormalTexCoor = "
precision mediump float;
void main() {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
} ";

unsigned int
initShader(unsigned int shaderType, const char* source)
{
GLuint shader = glCreateShader((GLenum)shaderType);
if (shader)
{
glShaderSource(shader, 1, &source, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);

if (!compiled)
{
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen)
{
char* buf = (char*) malloc(infoLen);
if (buf)
{
glGetShaderInfoLog(shader, infoLen, NULL, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
return shader;
}


unsigned int
createProgramFromBuffer(const char* vertexShaderBuffer,
const char* fragmentShaderBuffer)
{

GLuint vertexShader = initShader(GL_VERTEX_SHADER, vertexShaderBuffer);
if (!vertexShader)
{
__android_log_write(ANDROID_LOG_ERROR, "createProgramFromBuffer", "init-vertexShader");
return 0;
}

GLuint fragmentShader = initShader(GL_FRAGMENT_SHADER,
fragmentShaderBuffer);
if (!fragmentShader)
{
__android_log_write(ANDROID_LOG_ERROR, "createProgramFromBuffer", "init-fragmentShader");
return 0;
}

GLuint program = glCreateProgram();
if (program)
{
glAttachShader(program, vertexShader);

glAttachShader(program, fragmentShader);

glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);

if (linkStatus != GL_TRUE)
{
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength)
{
char* buf = (char*) malloc(bufLength);
if (buf)
{
glGetProgramInfoLog(program, bufLength, NULL, buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
__android_log_write(ANDROID_LOG_ERROR, "glGetProgramiv", "linkStatus");
}
}
__android_log_write(ANDROID_LOG_VERBOSE, "LoaderRenderer", "createProgramFromBuffer-OK");
return program;
}


JNIEXPORT void JNICALL Java_com_company_tests_LoaderRenderer_initRendering
(JNIEnv *env, jobject obj, jint numVNormal, jint numVTexCoord, jint width, jint heigth)
{
glViewport(0, 0, width, heigth);
// Define clear color
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);

if ((numVNormal > 0) && (numVTexCoord > 0))
{
__android_log_write(ANDROID_LOG_VERBOSE, "initRendering", "(numVNormal > 0) && (numVTexCoord > 0)");
shaderProgramID = createProgramFromBuffer(cubeMeshVertexShader,
cubeMeshFragmentShader);
normalHandle = glGetAttribLocation(shaderProgramID,
"vertexNormal");
textureCoordHandle = glGetAttribLocation(shaderProgramID,
"vertexTexCoord");
}
else if (numVNormal == 0)
{
if (numVTexCoord == 0)
{
__android_log_write(ANDROID_LOG_VERBOSE, "initRendering", "(numVNormal == 0) && (numVTexCoord == 0)");
shaderProgramID =createProgramFromBuffer(meshVertexShaderNoNormalTexCoor,
fragmentShaderNoNormalTexCoor);
}
else
{
__android_log_write(ANDROID_LOG_VERBOSE, "initRendering", "(numVNormal == 0) && (numVTexCoord > 0)");
shaderProgramID = createProgramFromBuffer(meshVertexShaderNoNormalTexCoor,
fragmentShaderNoNormalTexCoor);
textureCoordHandle = glGetAttribLocation(shaderProgramID,
"vertexTexCoord");
}
}
else if ((numVNormal > 0) && (numVTexCoord == 0))
{
__android_log_write(ANDROID_LOG_VERBOSE, "initRendering", "(numVNormal > 0) && (numVTexCoord == 0)");
shaderProgramID =createProgramFromBuffer(meshVertexShaderNoNormalTexCoor,
fragmentShaderNoNormalTexCoor);
normalHandle = glGetAttribLocation(shaderProgramID,
"vertexNormal");
}

vertexHandle = glGetAttribLocation(shaderProgramID,
"vertexPosition");
__android_log_write(ANDROID_LOG_VERBOSE, "LoaderRenderer", "initRendering-OK");
}

JNIEXPORT void JNICALL Java_com_company_tests_LoaderRenderer_updateRendering(
JNIEnv* env, jobject obj, jint width, jint heigth)
{
// Set the viewport
glViewport(0, 0, width, heigth);
// Clear the color buffer
glClear(GL_COLOR_BUFFER_BIT);
}

JNIEXPORT void JNICALL Java_com_company_tests_LoaderRenderer_renderFrame
(JNIEnv *env, jobject obj,
jfloatArray vertices, jfloatArray normals,
jfloatArray texCoord, jintArray indices)
{
jfloat *vertPos, *vertNorm, *vertTexC;
jint *indicesArray;

jsize numNormals = env->GetArrayLength(normals);
jsize numTexCoords = env->GetArrayLength(texCoord);

GLsizei numIndices = (GLsizei) env->GetArrayLength(indices);

vertPos = env->GetFloatArrayElements(vertices, 0);
indicesArray = env->GetIntArrayElements(indices, 0);

if (numNormals > 0)
vertNorm = env->GetFloatArrayElements(normals, 0);
if (numTexCoords > 0)
vertTexC = env->GetFloatArrayElements(texCoord, 0);

// Clear color and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);

glUseProgram(shaderProgramID);

glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) vertPos);
if (numNormals > 0)
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) vertNorm);
if (numTexCoords > 0)
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) vertTexC);

glEnableVertexAttribArray(vertexHandle);
if (numNormals > 0)
glEnableVertexAttribArray(normalHandle);
if (numTexCoords > 0)
{
glEnableVertexAttribArray(textureCoordHandle);
}
glDrawElements(GL_TRIANGLES, numIndices, GL_UNSIGNED_SHORT,
(const GLvoid*) indicesArray);

glDisable(GL_DEPTH_TEST);

glDisableVertexAttribArray(vertexHandle);
if (numNormals > 0)
glDisableVertexAttribArray(normalHandle);
if (numTexCoords > 0)
glDisableVertexAttribArray(textureCoordHandle);

if (numNormals > 0)
env->ReleaseFloatArrayElements(normals, vertNorm, 0);
if (numTexCoords > 0)
env->ReleaseFloatArrayElements(texCoord, vertTexC, 0);

env->ReleaseFloatArrayElements(vertices, vertPos, 0);
env->ReleaseIntArrayElements(indices, indicesArray, 0);
}
#ifdef __cplusplus
}
#endif




Please, do you see where is my error?

Thanks.

[Edited by - VansFannel on December 8, 2010 7:52:35 AM]

Share this post


Link to post
Share on other sites
Advertisement
Quote:
Original post by VansFannel
Please, do you see where is my error?


No. But have you seen the FAQ and grasp what they write in "How to ask questions the smart way?" and further, did you read the famous "What have you tried?" website?

Share this post


Link to post
Share on other sites
Note there is also an article on debugging on gamedev.net and you still haven't read the other two thingies I've written about.

Share this post


Link to post
Share on other sites
Is there an article about debugging Android native code on gamedev.net?

I think it would be more useful if you recommend me articles about OpenGL ES 2.0 programming, or something about my question instead of http://www.catb.org/~esr/faqs/smart-questions.html.

I have put my code here because I though someone with experience on OpenGL programming could see my error easily.

I'm so sorry if you don't like the way I have done my question but I prefer to waste my time learning OpenGL and not talking with you about the way I ask.

Share this post


Link to post
Share on other sites
Quote:
Original post by VansFannel
Is there an article about debugging Android native code on gamedev.net?

I think it would be more useful if you recommend me articles about OpenGL ES 2.0 programming, or something about my question instead of http://www.catb.org/~esr/faqs/smart-questions.html.

I have put my code here because I though someone with experience on OpenGL programming could see my error easily.

I'm so sorry if you don't like the way I have done my question but I prefer to waste my time learning OpenGL and not talking with you about the way I ask.


You *should* spend some time on how to find errors in your code.

A question like
Quote:
Please, do you see where is my error?

upon a big fat chunk of badly formatted code only indicates that your real problem is not an OpenGL-problem, but a "can't help myself or have not tried hard enough"-problem.


A question like
Quote:
Please, do you see where is my error?

upon a big fat chunk of badly formatted code clearly shows that you don't even know where the error is at least roughly at. If that's not true, than it shows that you are lazy.


A statement like
Quote:
but I prefer to waste my time learning OpenGL and not talking with you about the way I ask.

shows that you might only be interested in ready solutions and not in learning how to help yourself. You don't want to waste time learning how to fish, you just want the fish. But you expect us to waste our unpaid and voluntary time with very, very non-specific questions and big code-dumps, not even remotely going in detail about your exact problem.


Before you state something like "phresnel, the funny thing is, you are wasting time with this post, why not with my code", then my answer is, no. I spent my time not only in assembling ready solutions, but also in other things, like touch-typing, so it took me <5 minutes, an average cigarette pause*, to write this shit.


* smoking hurts your body and your environment

Share this post


Link to post
Share on other sites
While you are answering my question, I'm checking my code to find my error.

If you believe that I haven't test my code enough before post it here, you are wrong. If I have posted my code here is because I can't find the error by myself.

And I see you have time to tell me how must I make my question, but you don't have time to check my code.

Sorry, but I don't have time to continue answering you.

Share this post


Link to post
Share on other sites
Quote:
Original post by VansFannel
While you are answering my question, I'm checking my code to find my error.

If you believe that I haven't test my code enough before post it here, you are wrong. If I have posted my code here is because I can't find the error by myself.

And I see you have time to tell me how must I make my question, but you don't have time to check my code.


Then, why not state what you have tried and what you've found out, so we don't waste time hunting for solutions that you were already unlucky at?

Quote:
Sorry, but I don't have time to continue answering you.

You know. We all have stuff to do, not only you. That's the reason why we often post seemingly harsh comments like this. It even happens that the most experienced here use to be older than many newbies, and therefore have even less time to waste.


Simply put: As long as you are on unpaid forums where ppl do voluntary work, if you want ppl to help you, try to minimize the time they must spend at your problem.



Quote:
And I see you have time to tell me how must I make my question, but you don't have time to check my code.

If you look back, you'll see I knew this cockiness comes.

Share this post


Link to post
Share on other sites
But if you would tell us what you have tried already, that would be a great help to us, that means we would have a better chance to help you.

So tell us what you have tried.

And yes, Phresnel took the time and effort to lecture you, but it is for the others: you seem to be a bit lazy based on your posting history, so we would benefit from your "improvement". And he's right: why do WE have to to ALL the debugging for you?

Again: even if you had done your work with it, we don't have a clue because you don't show us what you did.

Share this post


Link to post
Share on other sites
I think the question is simple: it doesn't draw anything on screen. I see a completely black screen. And glGetError doesn't return anything.

vertices, normals, texture coordinates has data, and attribute bindings don't throw any error.

There aren't errors but it doesn't draw anything.

May I have to do some initialization?

Share this post


Link to post
Share on other sites
What are you actually trying to do?
What do you expect from the code?
What do you want to be drawn?
Can you draw stuff at all? (Don't expect us to be "up to date" with your knowledge and project, and don't expect us to dig through your posting history just to understand your project).

Share this post


Link to post
Share on other sites
I'm trying to draw a CUBE.

I have exported a model from blender to Wavefront .OBJ., loaded using JAVA to arrays and pass to native code. The problem isn't what I'm trying to draw because I use a model parsed correctly to vertices, normals and texture coordinates and I see the same: a black screen.

Methods Order:

1. Java_com_company_tests_LoaderRenderer_initRendering

Uses to:
a ) Create program.
b ) Select correct vertex and fragment shader.
c ) Get attribute locations for attributes needed (I always need vertices coordinates and indices). Here I see if I will need normal and texture coordinate attributes.

2. Java_com_company_tests_LoaderRenderer_renderFrame

Here I draw the cube.
a ) I retrieve vertices, normals, texture coordinates and indices parameters.
b ) Enable vertices, normals and texture coordinate attributes.
c ) Draw model with glDrawElements.
d ) Disable attributes.
e ) JNI cleaning code.

Other functions are called from this functions.
Java_com_company_tests_LoaderRenderer_updateRendering is called when screen size change.

I use jfloatArray vertices, jfloatArray normals, jfloatArray texCoord, jintArray indices are arrays to draw the model. Cube's vertices are in these arrays. I have these arrays to draw many models.

I take a working example that read vertices, normals, texture coordinates and indices from a header file. I've modified the example to work with many models, not with only a model defined inside a header file.

If I use another model, it continues drawing nothing.

I haven't draw anything yet.

Do you need more details?

My project is an Android application with native code. All OpenGL ES 2.0 stuff is here.

Share this post


Link to post
Share on other sites
what's this?

env->ReleaseFloatArrayElements(vertices, vertPos, 0);

Are you deleting the vertices? if you are then nothing will draw according to your code. try committing this and the one below it out.

Share this post


Link to post
Share on other sites
I'm releasing pointer to array elements. This is necessary.

I've tried to remove those lines and application crash.

Thank you for your help.

Share this post


Link to post
Share on other sites
oops I don't know anything about the coding for the android yet so I'll stick to opengl since that's most likely the problem.

try a simple triangle with an orthographic projection. I would think if that works then it may be a problem with the vertices. Or even more so draw with points instead.

another thing about your vertex shader:


static const char* meshVertexShaderNoNormalTexCoor = "
attribute vec4 vertexPosition;
void main() {
gl_Position = vertexPosition;
} ";








This is fine but would really only show up for 2d stuff. you may need to create a modelview and projection matrix.

gl_Position = ProjectionMatrix * ModelViewMatrix * vertexPosition;

which ProjectionMatrix and ModelViewMatrix have to be shader uniforms. So it's not that it's drawing the cube but that the transformation are not right. at least for a 3d object.

use GLM math library: http://glm.g-truc.net/
This a good math library.

This is a shader from the opengl es 2.0 quick reference card
here: http://www.khronos.org/opengles/2_X/


VERTEX SHADER

uniform mat4 mvp_matrix; // model-view-projection matrix
uniform mat3 normal_matrix; // normal matrix
uniform vec3 ec_light_dir; // light direction in eye coords
attribute vec4 a_vertex; // vertex position
attribute vec3 a_normal; // vertex normal
attribute vec2 a_texcoord; // texture coordinates
varying float v_diffuse;
varying vec2 v_texcoord;

void main(void)
{
// put vertex normal into eye coords
vec3 ec_normal = normalize(normal_matrix * a_normal);
// emit diffuse scale factor, texcoord, and position
v_diffuse = max(dot(ec_light_dir, ec_normal), 0.0);
v_texcoord = a_texcoord;
gl_Position = mvp_matrix * a_vertex;
}

FRAGMENT SHADER

precision mediump float;
uniform sampler2D t_reflectance;
uniform vec4 i_ambient;
varying float v_diffuse;
varying vec2 v_texcoord;
void main (void)
{
vec4 color = texture2D(t_reflectance, v_texcoord);
gl_FragColor = color * (vec4(v_diffuse) + i_ambient);
}





Share this post


Link to post
Share on other sites
I've changed vertexShader and fragmentShader with these:


static const char* cubeMeshVertexShader = "
attribute vec4 vertexPosition;
attribute vec4 vertexNormal;
attribute vec2 vertexTexCoord;
varying vec2 texCoord;
varying vec4 normal;
void main() {
gl_Position = gl_ProjectionMatrix * gl_ModelViewMatrix * vertexPosition;
normal = vertexNormal;
texCoord = vertexTexCoord;
} ";


static const char* cubeMeshFragmentShader = "
precision mediump float;
varying vec2 texCoord;
varying vec4 normal;
uniform sampler2D texSampler2D;
void main() {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
} ";




And I see nothing.

[Edited by - VansFannel on December 8, 2010 12:52:11 PM]

Share this post


Link to post
Share on other sites
did you set the matrices themselves.
You define a matrix in c++ then pass that matrix to the shader so...

You need to make the matrices uniform and feed in the data to them:
gl_ProjectionMatrix
gl_ModelViewMatrix


uniform mat4 gl_ProjectionMatrix;
uniform mat4 gl_ModelViewMatrix;

The usual gl_ProjectionMatrix or gl_ModelViewMatrix doesn't work in opengl es 2.0 only 1.0 which mean those used to be built in functions are deprecated and won't work.

Use GLM math library to construct your matrices or some other math library. Android may already have a math lib for this reason.

This is an example of the code from glm. It's the same thing just the only 1 matrix is passed to the shader as a

uniform mat4 MVP

matrix, so you could do the same. It's no different then splitting up the matrix work.

You should really check out glm.
This is the link to example code on about hoe the the math lib works:
http://glm.g-truc.net/code.html


// glm::vec3, glm::vec4, glm::ivec4, glm::mat4
#include <glm/glm.hpp>
// glm::perspective
#include <glm/gtc/matrix_projection.hpp>
// glm::translate, glm::rotate, glm::scale
#include <glm/gtc/matrix_transform.hpp>
// glm::value_ptr
#include <glm/gtc/type_ptr.hpp>

{
glm::mat4 Projection =
glm::perspective(45.0f, 4.0f / 3.0f, 0.1f, 100.f);
glm::mat4 ViewTranslate = glm::translate(
glm::mat4(1.0f),
glm::vec3(0.0f, 0.0f, -Translate));
glm::mat4 ViewRotateX = glm::rotate(
ViewTranslate,
Rotate.y, glm::vec3(-1.0f, 0.0f, 0.0f));
glm::mat4 View = glm::rotate(
ViewRotateX,
Rotate.x, glm::vec3(0.0f, 1.0f, 0.0f));
glm::mat4 Model = glm::scale(
glm::mat4(1.0f),
glm::vec3(0.5f));
glm::mat4 ModelView = View * Model;

glUniformMatrix4fv(
LocationModelView , 1, GL_FALSE, glm::value_ptr(ModelView ));
}

glUniformMatrix4fv(
LocationProjection, 1, GL_FALSE, glm::value_ptr(Projection));
}


Share this post


Link to post
Share on other sites
You are right. The problem is with my ProjectionMatrix and ModelViewMatrix.

I don't know how to set up them, but I'm sure the problem is with them.

Thank you very much for your time and help.

If you need more information, tell me.

Share this post


Link to post
Share on other sites
if you look at my prev post that code at the bottom shows you how to make setup the projection and modelview matrices using glm math lib and there's the website to download the lib. Once you have your matrices setup pass them to shader as uniforms:

give it a try and if nothing shows up post again.

Share this post


Link to post
Share on other sites
Sign in to follow this  

  • Advertisement
  • Advertisement
  • Popular Tags

  • Advertisement
  • Popular Now

  • Similar Content

    • By LifeArtist
      Good Evening,
      I want to make a 2D game which involves displaying some debug information. Especially for collision, enemy sights and so on ...
      First of I was thinking about all those shapes which I need will need for debugging purposes: circles, rectangles, lines, polygons.
      I am really stucked right now because of the fundamental question:
      Where do I store my vertices positions for each line (object)? Currently I am not using a model matrix because I am using orthographic projection and set the final position within the VBO. That means that if I add a new line I would have to expand the "points" array and re-upload (recall glBufferData) it every time. The other method would be to use a model matrix and a fixed vbo for a line but it would be also messy to exactly create a line from (0,0) to (100,20) calculating the rotation and scale to make it fit.
      If I proceed with option 1 "updating the array each frame" I was thinking of having 4 draw calls every frame for the lines vao, polygons vao and so on. 
      In addition to that I am planning to use some sort of ECS based architecture. So the other question would be:
      Should I treat those debug objects as entities/components?
      For me it would make sense to treat them as entities but that's creates a new issue with the previous array approach because it would have for example a transform and render component. A special render component for debug objects (no texture etc) ... For me the transform component is also just a matrix but how would I then define a line?
      Treating them as components would'nt be a good idea in my eyes because then I would always need an entity. Well entity is just an id !? So maybe its a component?
      Regards,
      LifeArtist
    • By QQemka
      Hello. I am coding a small thingy in my spare time. All i want to achieve is to load a heightmap (as the lowest possible walking terrain), some static meshes (elements of the environment) and a dynamic character (meaning i can move, collide with heightmap/static meshes and hold a varying item in a hand ). Got a bunch of questions, or rather problems i can't find solution to myself. Nearly all are deal with graphics/gpu, not the coding part. My c++ is on high enough level.
      Let's go:
      Heightmap - i obviously want it to be textured, size is hardcoded to 256x256 squares. I can't have one huge texture stretched over entire terrain cause every pixel would be enormous. Thats why i decided to use 2 specified textures. First will be a tileset consisting of 16 square tiles (u v range from 0 to 0.25 for first tile and so on) and second a 256x256 buffer with 0-15 value representing index of the tile from tileset for every heigtmap square. Problem is, how do i blend the edges nicely and make some computationally cheap changes so its not obvious there are only 16 tiles? Is it possible to generate such terrain with some existing program?
      Collisions - i want to use bounding sphere and aabb. But should i store them for a model or entity instance? Meaning i have 20 same trees spawned using the same tree model, but every entity got its own transformation (position, scale etc). Storing collision component per instance grats faster access + is precalculated and transformed (takes additional memory, but who cares?), so i stick with this, right? What should i do if object is dynamically rotated? The aabb is no longer aligned and calculating per vertex min/max everytime object rotates/scales is pretty expensive, right?
      Drawing aabb - problem similar to above (storing aabb data per instance or model). This time in my opinion per model is enough since every instance also does not have own vertex buffer but uses the shared one (so 20 trees share reference to one tree model). So rendering aabb is about taking the model's aabb, transforming with instance matrix and voila. What about aabb vertex buffer (this is more of a cosmetic question, just curious, bumped onto it in time of writing this). Is it better to make it as 8 points and index buffer (12 lines), or only 2 vertices with min/max x/y/z and having the shaders dynamically generate 6 other vertices and draw the box? Or maybe there should be just ONE 1x1x1 cube box template moved/scaled per entity?
      What if one model got a diffuse texture and a normal map, and other has only diffuse? Should i pass some bool flag to shader with that info, or just assume that my game supports only diffuse maps without fancy stuff?
      There were several more but i forgot/solved them at time of writing
      Thanks in advance
    • By RenanRR
      Hi All,
      I'm reading the tutorials from learnOpengl site (nice site) and I'm having a question on the camera (https://learnopengl.com/Getting-started/Camera).
      I always saw the camera being manipulated with the lookat, but in tutorial I saw the camera being changed through the MVP arrays, which do not seem to be camera, but rather the scene that changes:
      Vertex Shader:
      #version 330 core layout (location = 0) in vec3 aPos; layout (location = 1) in vec2 aTexCoord; out vec2 TexCoord; uniform mat4 model; uniform mat4 view; uniform mat4 projection; void main() { gl_Position = projection * view * model * vec4(aPos, 1.0f); TexCoord = vec2(aTexCoord.x, aTexCoord.y); } then, the matrix manipulated:
      ..... glm::mat4 projection = glm::perspective(glm::radians(fov), (float)SCR_WIDTH / (float)SCR_HEIGHT, 0.1f, 100.0f); ourShader.setMat4("projection", projection); .... glm::mat4 view = glm::lookAt(cameraPos, cameraPos + cameraFront, cameraUp); ourShader.setMat4("view", view); .... model = glm::rotate(model, glm::radians(angle), glm::vec3(1.0f, 0.3f, 0.5f)); ourShader.setMat4("model", model);  
      So, some doubts:
      - Why use it like that?
      - Is it okay to manipulate the camera that way?
      -in this way, are not the vertex's positions that changes instead of the camera?
      - I need to pass MVP to all shaders of object in my scenes ?
       
      What it seems, is that the camera stands still and the scenery that changes...
      it's right?
       
       
      Thank you
       
    • By dpadam450
      Sampling a floating point texture where the alpha channel holds 4-bytes of packed data into the float. I don't know how to cast the raw memory to treat it as an integer so I can perform bit-shifting operations.

      int rgbValue = int(textureSample.w);//4 bytes of data packed as color
      // algorithm might not be correct and endianness might need switching.
      vec3 extractedData = vec3(  rgbValue & 0xFF000000,  (rgbValue << 8) & 0xFF000000, (rgbValue << 16) & 0xFF000000);
      extractedData /= 255.0f;
    • By Devashish Khandelwal
      While writing a simple renderer using OpenGL, I faced an issue with the glGetUniformLocation function. For some reason, the location is coming to be -1.
      Anyone has any idea .. what should I do?
  • Advertisement