I'm trying to render an EGLImageKHR
that I get from NVIDIA's decoder, to a frame buffer so I can render this frame buffer in the screen. I wrote a code that does the following:
Creates two textures, frameBufferTexture
and externalTexture
. We write our EGLImage to externalTexture
and draw to frameBufferTexture
from externalTexture
. Then we read from frameBufferTexture
with glReadPixels
:
glGenFramebuffers(1, &frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
glGenTextures(1, &externalTexture);
glGenTextures(1, &frameBufferTexture);
glBindTexture(GL_TEXTURE_2D, frameBufferTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, decodedNvFrame->width, decodedNvFrame->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glUniform1i(texLocation, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
glBindTexture(GL_TEXTURE_2D, frameBufferTexture);
EGLImageKHR hEglImage;
hEglImage = NvEGLImageFromFd(eglDisplay, decodedNvFrame->nvBuffer->planes[0].fd);
if (!hEglImage)
printf("Could not get EglImage from fd. Not rendering\n");
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, frameBufferTexture, 0);
glBindTexture(GL_TEXTURE_2D, externalTexture);
glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, hEglImage);
glUniform1i(texLocation, 0);
glBindVertexArray(vertexArrayObject);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glReadBuffer(GL_COLOR_ATTACHMENT0);
GLenum frameBufferStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (frameBufferStatus!=GL_FRAMEBUFFER_COMPLETE) {
printf("frameBufferStatus problem!\n");
abort();
}
glReadPixels(0, 0, 512, 512, GL_RGBA, GL_UNSIGNED_BYTE, r);
for (int i = 0; i < 100; ++i)
{
printf("%i ", r[i]);
}
printf("\n");
NvDestroyEGLImage(eglDisplay, hEglImage);
See that I'm using glReadPixels to get just a piece of the frame buffer so I can see what's happening.
This is what I get in the output:
0 0 0 255 0 0 0 255 0 0 0 255 ...
which I guess comes from the glTexImage2D
cal where I pass 0
. This means that glEGLImageTargetTexture2DOES
isn't pushing our image to the externalTexture
.
Here's the fragment shader:
#version 330 core
out vec4 FragColor;
in vec2 TexCoord;
uniform sampler2D tex;
void main()
{
FragColor = texture(tex, TexCoord);
}
If I change this shader to FragColor = vec4(1.0,texture(tex, TexCoord).r,0,1.0)
I get this output:
255 0 0 255 255 0 0 255 255 0 0 255 ...
which means the shader is working and is writing to the frame buffer. If I put
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, 512, 512, 0, GL_RGBA, GL_UNSIGNED_BYTE, d);
right after glBindTexture(GL_TEXTURE_2D, externalTexture);
, I can see the contents of d
, which means my fragment shader is writing data correctly from the texture externalTexture
.
This puts the problem in glEGLImageTargetTexture2DOES
. It's not filling externalTexture with my image!
Why?
ps: I suspect of eglDisplay
. Why do I need a display to create an EGLImageKHR
? I've seen code from NVIDIA that uses NvEGLImageFromFd
and passes an eglDisplay from an X11 window. But I'm in GTK and I don t get why an egl display is important, first of all because I'm rendering to a frame buffer, but secondly because I render to GTK's framebuffer when I want to render images.
Check the example implementations in nvidia
tegra_multimedia_api\argus\samples\utils\PreviewConsumer.cpp
tegra_multimedia_api\samples\common\classes\NvEglRenderer.cpp
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With