c++ - opengl 3.3 core profile render fails -



c++ - opengl 3.3 core profile render fails -

i'm trying render simple test shader opengl 3.3 core profile. black window.

glfwwindow* window; gluint vao; gluint vbo[2]; gluint program; const glfloat square[8] = { -1.0, -1.0, -1.0, 1.0, 1.0, 1.0, 1.0, -1.0 }; const glfloat indices[4] = { 0, 1, 2, 3 };

init opengl core context , window

if( !glfwinit() ) { std::cerr << "failed initialize glfw\n"; homecoming -1; } glfwwindowhint(glfw_samples, 4); glfwwindowhint(glfw_context_version_major, 3); glfwwindowhint(glfw_context_version_minor, 3); glfwwindowhint(glfw_opengl_forward_compat, gl_true); // open window , create opengl context window = glfwcreatewindow( 1024, 768, "", 0, 0); if( window == null ) { std::cerr << "failed open glfw window.\n"; glfwterminate(); homecoming -1; } glfwmakecontextcurrent(window); // initialize glew if (gl3winit()) { std::cerr << "failed initialize glew" << std::endl; homecoming -1; } if (!gl3wissupported(3, 3)) { std::cerr << "opengl version 3.3 not supported" << std::endl; homecoming -1; }

init vbo , index buffer, vao, , shader program, bind vertex shader input 0

glgenbuffers(2, vbo); glbindbuffer(gl_array_buffer, vbo[0]); glbufferdata(gl_array_buffer, 8*sizeof(glfloat), square, gl_static_draw); glbindbuffer(gl_array_buffer, vbo[1]); glbufferdata(gl_array_buffer, 4*sizeof(glushort), indices, gl_static_draw); glgenvertexarrays(1, &vao); glbindvertexarray(vao); programme = glcreateprogram(); gluint vertex_shader, fragment_shader; loadshader_file(vertex_shader, "shader/default.vsh", gl_vertex_shader); glattachshader(program, vertex_shader); loadshader_file(fragment_shader, "shader/default.fsh", gl_fragment_shader); glattachshader(program, fragment_shader); glbindattriblocation(program, 0, "pos"); gllinkprogram(program);

start rendering

gluseprogram(program); glbindbuffer(gl_array_buffer, vbo[0]); glvertexattribpointer( 0, 2, gl_float, gl_false, sizeof(glfloat)*2, (void*)0); glenablevertexattribarray(0); glbindbuffer(gl_element_array_buffer, vbo[1]); gldrawelements( gl_triangle_strip, 4, gl_unsigned_short, (void*)0); gldisablevertexattribarray(0); glfwswapbuffers(window);

vertex shader

#version 330 in vec2 pos; out vec2 c; void main(void) { gl_position = vec4(pos, 0.0, 1.0); c = (pos+1)*0.5; }

fragment shader

#version 330 in vec2 c; out vec4 color; void main(void) { color = vec4(c, 1, 1); }

the shaders compile without errors, apitrace can't find opengl errors

well, cannot good: const glfloat indices[4] = { 0, 1, 2, 3 };

you told opengl unsigned shorts, floating-point. nevermind fact glfloat twice size of glushort, way numbers represented different. floating-point vertex indices not create whole lot of sense.

instead, should use: const glushort indices[4] = { 0, 1, 2, 3 };

c++ opengl glsl

Comments

Popular posts from this blog

php - Android app custom user registration and login with cookie using facebook sdk -

django - Access session in user model .save() -

php - .htaccess Multiple Rewrite Rules / Prioritizing -