Home > Software design >  Black square renders instead of an OpenCV image
Black square renders instead of an OpenCV image

Time:01-13

I'm trying to render two images of size 256x256 with ushort data type. One must be in greyscale and another in RGB. However, both render as black squares. I believe that the fault lies somewhere in my openGL texture definition, but I'm not sure.

Here's my minimal version of the code.

#include "imgui.h"
#include "imgui_impl_glfw.h"
#include "imgui_impl_opengl3.h"
#include <glad/glad.h>    
#include <GLFW/glfw3.h>
#include <opencv2/opencv.hpp>

using namespace cv;


int main()
{
    //init glfw, window, glad, imgui
    glfwInit();
    const char* glsl_version = "#version 330 core";
    glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
    glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
    GLFWwindow* window = glfwCreateWindow(600, 400, "test", NULL, NULL);
    glfwMakeContextCurrent(window);
    gladLoadGL();
    glEnable(GL_BLEND);
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
    ImGui::CreateContext();
    ImGui::StyleColorsDark();
    ImGui_ImplGlfw_InitForOpenGL(window, true);
    ImGui_ImplOpenGL3_Init(glsl_version);


    //define image data
    ushort value;
    Mat_<ushort> grey = Mat_<ushort>(256, 256);
    Mat_<Vec3w> rgb = Mat_<Vec3w>(256, 256);


    for (int i = 0; i < grey.rows; i  )
        for (int j = 0; j < grey.cols; j  )
        {
            value = (i   j) / 256.0 * USHRT_MAX;
            grey.at<ushort>(i, j) = value;
            rgb.at<Vec3w>(i, j) = Vec3w(value, value, value);
        }

    
    //create textures
    GLuint greyID;
    GLuint rgbID;

    glGenTextures(1, &greyID);
    glBindTexture(GL_TEXTURE_2D, greyID);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_R16, 256, 256, 0, GL_RED, GL_UNSIGNED_SHORT, grey.data);

    glGenTextures(1, &rgbID);
    glBindTexture(GL_TEXTURE_2D, rgbID);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB16UI, 256, 256, 0, GL_RGB, GL_UNSIGNED_SHORT, rgb.data);


    while (!(glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS))
    {
        glfwPollEvents();

        ImGui_ImplOpenGL3_NewFrame();
        ImGui_ImplGlfw_NewFrame();
        ImGui::NewFrame();

        ImGui::Begin("Images");
        ImGui::Image((void*)(intptr_t)greyID, ImVec2(256, 256));
        ImGui::SameLine();
        ImGui::Image((void*)(intptr_t)rgbID, ImVec2(256, 256));
        ImGui::End();

        ImGui::Render();

        glClearColor(0.2f, 0.2f, 0.2f, 1.0f);
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);

        ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
        glfwSwapBuffers(window);
    }

    ImGui::DestroyContext();
    glfwDestroyWindow(window);
    glfwTerminate();
    return 1;
}

Here's the result:

1

CodePudding user response:

Your code has two problems.

First, as was discussed in the comments, in your case you probably want to use GL_RGB16 instead of GL_RGB16UI. That takes care of the texture error.

The second problem is that you need to add

glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

after glBindTexture.

The reason is that the default minifying filter is GL_NEAREST_MIPMAP_LINEAR, but you have only provided the first mip-map level (so the texture is incomplete). Alternatively, you could also reduce the max level. Take a look at the enter image description here

You may also want to calculate your color as

value = min((i   j) / 256.0), 1.0) * USHRT_MAX;
  • Related