'Why is this texture displaying all black?
Here is the function where I am calling all of my draw operations. I also compile the shaders inline in the function. It displays the quad correctly when I just use colors for each vertex but textures don't want to work at all.
Here is the code:
void doGL2() {
glTranslatef(0.f, 0.f, 0.f);
const GLchar* vertShader[] = { R"glsl(
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aColor;
layout (location = 2) in vec2 aTexPos;
out vec3 col;
out vec2 texturePos;
void main()
{
gl_Position = vec4(aPos, 1.0);
col = aColor;
texturePos = aTexPos;
}
)glsl" };
const GLchar* fragShader[] = { R"glsl(
#version 330 core
out vec4 FragColor;
in vec3 col;
in vec2 texturePos;
uniform sampler2D t;
uniform vec2 iResolution;
void main()
{
vec2 uv = gl_FragCoord.xy / iResolution;
//FragColor = vec4(iResolution, 1., 1.);
FragColor = mix(vec4(col, 1.0f), texture(t, gl_FragCoord.xy / vec2(1200., 675.)), 1.);
}
)glsl" };
vShader2 = glCreateShader(GL_VERTEX_SHADER);
fShader2 = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(vShader2, 1, vertShader, NULL);
glCompileShader(vShader2);
GLint success;
GLchar infoLog[1024];
glGetShaderiv(vShader2, GL_COMPILE_STATUS, &success);
if (!success)
{
glGetShaderInfoLog(vShader2, 1024, NULL, infoLog);
throw std::runtime_error("\noh no vert\n");
}
glShaderSource(fShader2, 1, fragShader, NULL);
glCompileShader(fShader2);
glGetShaderiv(fShader2, GL_COMPILE_STATUS, &success);
if (!success)
{
glGetShaderInfoLog(fShader2, 1024, NULL, infoLog);
throw std::runtime_error("\noh no frag\n");
}
shaderProgram2 = glCreateProgram();
glAttachShader(shaderProgram2, vShader2);
glAttachShader(shaderProgram2, fShader2);
glLinkProgram(shaderProgram2);
GLfloat screen[2] = { 1200.0f, 675.0f };
glUseProgram(shaderProgram2);
glUniform2fv(glGetUniformLocation(shaderProgram2, "iResolution"), 2, screen);
glUniform1i(glGetUniformLocation(shaderProgram2, "t"), 10);
//not sure if I should do this on cleanup or now
glDeleteShader(vShader2);
glDeleteShader(fShader2);
//lets make a temporary test texture
int width, height, channels;
unsigned char* data = stbi_load("tex2.jpg", &width, &height, &channels, 0);
GLuint tempT = 1;
glActiveTexture(GL_TEXTURE10);
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &tempT);
glBindTexture(GL_TEXTURE_2D, tempT);
// set texture options
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
//glPixelStorei(GL_UNPACK_ALIGNMENT, 2);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
stbi_image_free(data);
GLfloat vertices[] = {
// positions // colors // texture positions
0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, // bottom left
1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // bottom right
1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f // top right
,
0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, // bottom left
0.0f, 1.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, // top left
1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f // top right
};
glGenVertexArrays(1, &VAO2);
glGenBuffers(1, &VBO2);
glBindVertexArray(VAO2);
glBindBuffer(GL_ARRAY_BUFFER, VBO2);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
//positions
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
//colors
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
//texture positions
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(6 * sizeof(float)));
glEnableVertexAttribArray(2);
float initTime = glutGet(GLUT_ELAPSED_TIME);
float prevTime = initTime;
float dt = 0.0f;
while (prevTime - initTime < 1000.0f) {
dt = (glutGet(GLUT_ELAPSED_TIME) - prevTime) / 1000.0f;
prevTime = glutGet(GLUT_ELAPSED_TIME);
glClearColor(1.f, 1.f, 1.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAO2);
glBindTexture(GL_TEXTURE_2D, tempT);
glDrawArrays(GL_TRIANGLES, 0, 6);
SwapBuffers(GetDC(w));
glFlush();
}
}
For some reason, when I instead put the texture bound to GL_TEXTURE0
it displays the texture.
Edit:
I am making a further edit, as the content of my original question did not properly address the problem. It seems that I am unable to Bind any texture to any texture other than GL_TEXTURE0
. I have seen other forums with this problem but the answers in those problems all address other issues that are not present here. As you can see I am trying to bind this texture to GL_TEXTURE10
, however the output is all black. When I bind to GL_TEXTURE0
however, the texture shows up fine. I have changed the code to reflect Rabbid's answer as those were indeed errors but not errors that affected this problem; the code is up to date reflecting the current problem.
Also, I printed tempT
and it always returns as 1, meaning it is binding to GL_TEXTURE0
every time despite what I call in glActiveTexture()
.
Solution 1:[1]
If you are not generating mipmaps (with glGenerateMipmap
) it is important to set GL_TEXTURE_MIN_FILTER
. Since the default filter is GL_NEAREST_MIPMAP_LINEAR
, the texture would be "Mipmap Incomplete" if you do not change the minimize function to GL_NEAREST
or GL_LINEAR
.
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
In addition, the size of the image (with
, height
) must be specified instead of (1, 1):
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 1, 1, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
It is not guaranteed that stbi_load
generates a texture with 4 color channels. Force the stbi_load
to generate an image with 4 color channels, by explicitly pass 4 to the last parameter:
unsigned char* data = stbi_load("tex2.jpg", &width, &height, &channels, 0);
unsigned char* data = stbi_load("tex2.jpg", &width, &height, &channels, 4);
If you use a shader program, you don't need to enable texturing. Whether the texture is used or not is decided in the shader program. glEnable(GL_TEXTURE_2D);
is legacy OpenGL, remove it.
Aside from your code working fine, I tested it. However, the number of texture units is limited. Make sure you use a texture unit within range of your system. You can ger the number of texture units with GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS
. e.g.:
int max_units;
glGetIntegerv(GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS, &max_units);
std::cout << "GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS " << max_units << std::endl;
Also, I printed
tempT
and it always returns as 1, meaning it is binding to GL_TEXTURE0 every time despite what I call inglActiveTexture()
.
The texture unit has nothing to do with the texture object ID. The value of tempT
does not inidcate the texture unit in which the texture will be bouned. You must set the texture unit before binding the texture with glBindTexture
, but it has no effect on glGenTextures
. You can bind the texture to multiple texture units, or to different texture units at different times. The texture unit is the binding point between the texture object and the texture sampler uniform.
If you want to use a texture in a shader program, you need to make sure that the texture is bound to a texture unit and that unit is set to texture sampler uniform.
I suggest setting the texture unit before binding the texture and using it to draw the object:
GLint t_location = glGetUniformLocation(shaderProgram2, "t")
glUseProgram(shaderProgram2);
glUniform1i(t_location, 10);
glActiveTexture(GL_TEXTURE10);
glBindTexture(GL_TEXTURE_2D, tempT);
glDrawArrays(GL_TRIANGLES, 0, 6);
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
Solution | Source |
---|---|
Solution 1 |