4

I use OpenGL 3.2, GLFW and GLEW. I try to render simple triangle using VAO and simple shader on OS X (10.8.2), but nothing shows, only white screen. Shaders compile ok, GLEW inits ok, glGetString(GL_VERSION) shows 3.2, tried to put glGetError after every line, it didn't report any errors. I don't know what i do wrong. Here's the code:

#include "include/GL/glew.h"
#include "include/GL/glfw.h"
#include <cstdlib>
#include <iostream>

GLuint program;

char *textFileRead(char *fn) {


    FILE *fp;
    char *content = NULL;

    int count=0;

    if (fn != NULL) {
        fp = fopen(fn,"rt");

        if (fp != NULL) {

      fseek(fp, 0, SEEK_END);
      count = ftell(fp);
      rewind(fp);

            if (count > 0) {
                content = (char *)malloc(sizeof(char) * (count+1));
                count = fread(content,sizeof(char),count,fp);
                content[count] = '\0';
            }
            fclose(fp);
        }
    }
    return content;
}


void checkCompilationStatus(GLuint s) {
    GLint status = 0;

    glGetShaderiv(s, GL_COMPILE_STATUS, &status);
    if (status == 0) {
        int infologLength = 0;
        int charsWritten  = 0;

        glGetShaderiv(s, GL_INFO_LOG_LENGTH, &infologLength);

        if (infologLength > 0)
        {
            GLchar* infoLog = (GLchar *)malloc(infologLength);
            if (infoLog == NULL)
            {
                printf( "ERROR: Could not allocate InfoLog buffer");
                exit(1);
            }
            glGetShaderInfoLog(s, infologLength, &charsWritten, infoLog);
            printf( "Shader InfoLog:\n%s", infoLog );
            free(infoLog);
        }
    }

}


void setShaders() {
    GLuint v, f;
    char *vs = NULL,*fs = NULL;

    v = glCreateShader(GL_VERTEX_SHADER);
    f = glCreateShader(GL_FRAGMENT_SHADER);

    vs = textFileRead("minimal.vert");
    fs = textFileRead("minimal.frag");

    const char * vv = vs;
    const char * ff = fs;

    glShaderSource(v, 1, &vv,NULL);
    glShaderSource(f, 1, &ff,NULL);

    free(vs);free(fs);

    glCompileShader(v);
    checkCompilationStatus(v);
    glCompileShader(f);
    checkCompilationStatus(f);

    program = glCreateProgram();
    glAttachShader(program,v);
    glAttachShader(program,f);

    GLuint error;
    glLinkProgram(program);
    glUseProgram(program);
}


int main(int argc, char* argv[]) {

    glfwInit();

    glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, 3);
    glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, 2);
    glfwOpenWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
    glfwOpenWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);

    glfwOpenWindow(800, 600, 8, 8, 8, 8, 24, 8, GLFW_WINDOW);
    glViewport(0, 0, 800, 600);
    glfwSetWindowTitle("Triangle");

    glewExperimental = GL_TRUE;
    GLenum result = glewInit();
    if (result != GLEW_OK) {
        std::cout << "Error: " << glewGetErrorString(result) << std::endl;
    }

    std::cout << "VENDOR: " << glGetString(GL_VENDOR) << std::endl;
    std::cout << "RENDERER: " << glGetString(GL_RENDERER) << std::endl;
    std::cout << "VERSION: " << glGetString(GL_VERSION) << std::endl;
    std::cout << "GLSL: " << glGetString(GL_SHADING_LANGUAGE_VERSION) << std::endl;

    setShaders();

    GLfloat vertices[] = {
        1.0f, 1.0f, 0.f,
        -1.f, -1.f, 0.f,
        1.f, -1.f, 0.f
    };


    GLuint VertexArrayID;
    glGenVertexArrays(1, &VertexArrayID);
    glBindVertexArray(VertexArrayID);

    GLuint vertexbuffer;

    glGenBuffers(1, &vertexbuffer);
    glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
    glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);

    GLuint pos = glGetAttribLocation(program, "position");
    glEnableVertexAttribArray(pos);
    glVertexAttribPointer(pos, 3, GL_FLOAT, GL_FALSE, 0, 0);

    glClearColor(1.0, 1.0, 1.0, 1.0);

    while (glfwGetWindowParam(GLFW_OPENED)) {
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

        glDrawArrays(GL_TRIANGLES, 0, 3);

        glfwSwapBuffers();        
        glfwSleep(0.001);

   }

}

And here are the shaders, vertex shader:

#version 150

in vec3 position;

void main()
{   
      gl_Position = vec4(position, 0);
}

fragment shader:

#version 150

out vec4 out_color;

void main()
{
        out_color = vec4(1.0f, 0.0f, 0.0f, 1.0f);
}
4
  • Could you try disabling the depth test and if that doesn't work try disabling face culling? Commented Jan 12, 2013 at 16:18
  • i added glDisable(GL_DEPTH_TEST); glDisable(GL_CULL_FACE); after glew initialization, but still nothing Commented Jan 12, 2013 at 17:24
  • Just a thought, what happens if you say gl_Position = vec4(position, 1)? (Use 1 as the w component, not 0) Commented Jan 12, 2013 at 18:06
  • whoa! this works, thanks so much, can you explain why w parameter matters? Commented Jan 12, 2013 at 18:18

1 Answer 1

7

The w parameter in your vertex shader should be set to 1, not 0.

gl_Position = vec4(position, 1)

For more information see the section titled "Normalized Coordinates" under "Rasterization Overview" on this page

... The X, Y, and Z of each vertex's position is divided by W to get normalized device coordinates...

So your coordinates were being divided by 0. A number divided by 0 is undefined.

Sign up to request clarification or add additional context in comments.

Comments

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.