shader.vert:

#version 120
varying vec3 position;
varying vec3 normal;

void main()
{
    position = (vec3(gl_ModelViewMatrix*gl_Vertex));    //get the position of the vertex after translation, rotation, scaling
    normal = gl_NormalMatrix*gl_Normal;    //get the normal direction, after translation, rotation, scaling
    gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
}

shader.frag:

#version 120
varying vec3 position;
varying vec3 normal;
uniform vec3 lightColor;
uniform vec3 surfaceColor;
uniform float ambientScale;
void main()
{
    vec3 lightAmbient = lightColor * ambientScale;
    vec3 surfaceAmbient = surfaceColor;
    vec3 ambient = lightAmbient * surfaceAmbient;
    //vec3 lightDirection = normalize(position); // ***********
    //float n_dot_1 = max(0.0, dot(normalize(normal), position));
    vec3 lightDiffuse = lightColor;
    vec3 surfaceDiffuse = surfaceColor;
    //vec3 diffuse = (surfaceDiffuse * lightDiffuse) * n_dot_1;

    //gl_FragColor=vec4(ambient + diffuse, 1.0);
    gl_FragColor=vec4(ambient, 1.0);
}

*********** When this line is uncommented everything runs fine and the fragment colour is correct:
monk1

However, if that line is uncommented then this is what happens;
monk2

Why is this? Is it because my normals VBO passing is wrong? Is it because of the way I've written the shader itself. It's almost like the varying variable is causing the fragment shader to crash and each fragment is being defaulted as (1.0, 1.0, 1.0, 1.0). The following code is my implimentation for the VBO's.

Mesh.h:

#ifndef MESH_H
#define MESH_H

#include <string>
#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <cml/cml.h>
#include <GL/glfw.h>

typedef cml::vector3f vec3f;

class Mesh
{
    std::vector<vec3f> vertexList;
    std::vector<vec3f> normalList;
    std::vector<GLuint> indexList;
    std::vector<GLuint> normalIndexList;
    GLuint vbo[3];

    public:
        Mesh(const std::string&amp; fileName);
        void init();
        void display();
};

#endif

Mesh.cpp:

#include "Mesh.h"

Mesh::Mesh(const std::string&amp; fileName)
{
    std::string s;
    // std::ifstream file(fileName);
    std::ifstream file("untitled.obj");

    std::string line;
    while( std::getline(file, line)) {
        std::istringstream iss(line);
        std::string result;
        if (std::getline( iss, result , ' ')) {
            if (result == "v") {
                float f;
                vertexList.push_back(vec3f(0, 0, 0));
                for (int i = 0; i < 3; i++) {
                    iss >> f;
                    vertexList.back()[i] = f;
                }
            } else if (result == "vn") {
                float f;
                normalList.push_back(vec3f(0, 0, 0));
                for (int i = 0; i < 3; i++) {
                    iss >> f;
                    normalList.back()[i] = f;
                }
            } else if (result == "f") {
                while (std::getline(iss, s, ' ')) {
                    std::istringstream indexBlock(s);
                    for (int i = 0; i < 3; i++) {
                        std::string intString;
                        if (std::getline(indexBlock, intString, '/')) {
                            std::istringstream sstream(intString);
                            int index = -1;
                            sstream >> index;
                            if (!(index == -1)) {
                                if (i == 0) {
                                    indexList.push_back(index - 1);
                                } else if (i == 1) {
                                } else if (i == 2) {
                                    normalIndexList.push_back(index - 1);
                                }
                            }
                        }
                    }
                }
            }
        }
    }
    std::cout << "Loaded " << fileName << std::endl;
}

void Mesh::init()
{
    GLfloat tmp_normals[normalList.size()][3];

    unsigned int index = 0;

    for (int c = 0; c < indexList.size(); c++) {
        tmp_normals[indexList.at©][0] = normalList.at(normalIndexList.at©)[0];
        tmp_normals[indexList.at©][1] = normalList.at(normalIndexList.at©)[1];
        tmp_normals[indexList.at©][2] = normalList.at(normalIndexList.at©)[2];
        std::cout << normalList.at(normalIndexList.at©)[0] << " ";
    }

    glGenBuffers(3, vbo);

    glBindBuffer(GL_ARRAY_BUFFER, vbo[0]); // vertices
    glBufferData(GL_ARRAY_BUFFER, sizeof(float) * 3 * vertexList.size(), (const GLvoid*)&amp; vertexList.front(), GL_STATIC_DRAW);

    glBindBuffer(GL_ARRAY_BUFFER, vbo[1]); // normals
    glBufferData(GL_ARRAY_BUFFER, sizeof(float) * 3 * normalList.size(), (const GLvoid*)&amp; tmp_normals[0], GL_STATIC_DRAW);

    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo[2]); // indices
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, indexList.size() * sizeof(GLuint), (const GLvoid*)&amp; indexList.front(), GL_STATIC_DRAW);
}

void Mesh::display()
{
    glEnableClientState(GL_VERTEX_ARRAY);
    glEnableClientState(GL_NORMAL_ARRAY);


    glBindBuffer(GL_ARRAY_BUFFER, vbo[0]); // vertices
    glVertexPointer(3, GL_FLOAT, 0, 0);

    glBindBuffer(GL_ARRAY_BUFFER, vbo[1]); // normals
    glNormalPointer(GL_FLOAT, 0, 0);

    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo[2]); // indices
    glDrawElements(GL_TRIANGLES, indexList.size(), GL_UNSIGNED_INT, 0);

    glDisableClientState(GL_VERTEX_ARRAY);
    glDisableClientState(GL_NORMAL_ARRAY);

}

Thank you.

In your fragment shader (the uncommented lines) you're only calculating ambient color, which could give you the first and the second result (depending on input). This means you're not using normals at all. Also, have you checked the compilation log?

Also take a look at how VBO drawing is done in my engine, maybe it'll help you find the error:

// GL_ARRAY_BUFFER_ARB is an array of these structures (I'm also packing them):
#pragma pack(push)
#pragma pack(1)
struct vbo_vertex
{
    float x, y, z; // vertex coordinates
    float nx, ny, nz; // normal
};
#pragma pack(pop)

// ... you need <cstddef> for offsetof()

#define VBO_STRIDE (sizeof(vbo_vertex))
#define VBO_VERTEX_COORDS 3
#define VBO_VERTEX_OFFSET ((void *)offsetof(vbo_vertex, x))
#define VBO_NORMAL_OFFSET ((void *)offsetof(vbo_vertex, nx))

// ... in the draw function (n triangles)

glBindBuffer(GL_ARRAY_BUFFER, my_vbo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, my_indices_vbo);

glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(VBO_VERTEX_COORDS, GL_FLOAT, VBO_STRIDE, VBO_VERTEX_OFFSET);
glEnableClientState(GL_NORMAL_ARRAY);
glNormalPointer(GL_FLOAT, VBO_STRIDE, VBO_NORMAL_OFFSET);

glDrawElements(GL_TRIANGLES, n * VBO_VERTEX_COORDS, GL_UNSIGNED_INT, NULL);

glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);

glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);

Notice I'm only using one GL_ARRAY_BUFFER for both vertices coordinates and normals. Same approach is used here: http://sdickinson.com/wordpress/?p=122
Similar approach to yours: http://nehe.gamedev.net/tutorial/vertex_buffer_objects/22002/

OK, I have worked out that it's not the normals that are the problem by simplifying as much as possible.

main.h:

#ifndef MAIN_H
#define MAIN_H

#include <iostream>
#include <GL/glew.h>
#include <GL/glfw.h>
#include <stdlib.h>
#include <cml/cml.h>
#include "Shader.h"

typedef cml::vector3f vec3f;

#endif

main.cpp:

#include "main.h"

Shader* myShader;

void initGL()
{
    glClearColor(0, 0, 0, 1);
    glMatrixMode(GL_PROJECTION);
        glLoadIdentity();
        gluPerspective(50, 640.0 / 480.0, 1, 1000);
    glMatrixMode(GL_MODELVIEW);
    glEnable(GL_DEPTH_TEST);
    myShader = new Shader("shader.vert", "shader.frag");
}

void display()
{
    glLoadIdentity();
    glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
    glUseProgram(myShader->program);
    glBegin(GL_TRIANGLES);
        glVertex3f(0, 1, -4);
        glVertex3f(-1, -1, -4);
        glVertex3f(1, -1, -4);
    glEnd();
}

int main()
{
    int running = GL_TRUE;

    if (!glfwInit()) {
        exit(EXIT_FAILURE);
    }

    if (!glfwOpenWindow(640, 480, 0, 0, 0, 0, 0, 0, GLFW_WINDOW)) {
        glfwTerminate();
        exit( EXIT_FAILURE );
    }

    GLenum err = glewInit();
    if (GLEW_OK != err) {
      std::cout << "Error: " << glewGetErrorString(err) << std::cout;
    }

    initGL();

    while (running) {
        display();
        glfwSwapBuffers();

        running = !glfwGetKey(GLFW_KEY_ESC) &&
            glfwGetWindowParam(GLFW_OPENED);
    }
    glfwTerminate();
    exit(EXIT_SUCCESS);
}

Shader.h:

#ifndef SHADER_H
#define SHADER_H

#include "main.h"
#include <vector>
#include <string>
#include <fstream>

class Shader
{
    void loadFile(const char* fileName, std::string& str);
    GLuint load(std::string& shaderSource, GLuint shaderType);

    public:
        GLuint vs;
        GLuint fs;
        GLuint program;

        Shader(const char* vs, const char* fs);
        ~Shader();
};

#endif

Shader.cpp:

#include "Shader.h"

void Shader::loadFile(const char* fileName, std::string& str)
{
    std::ifstream in(fileName);
    if (!in.is_open()) {
        std::cout << "File " << fileName << " cannot be opened\n";
        return;
    }
    char line[300];
    while (!in.eof()) {
        in.getline(line, 300);
        str += line;
        str += '\n';
    }
}

GLuint Shader::load(std::string& shaderSource, GLuint shaderType)
{
    GLuint id = glCreateShader(shaderType);
    const char* csource = shaderSource.c_str();
    glShaderSource(id, 1, &csource, NULL);
    glCompileShader(id);
    char error[1000];
    glGetShaderInfoLog(id, 1000, NULL, error);
    std::cout << "Compile status: \n" << error << std::endl;
    return id;
}

Shader::Shader(const char* vn, const char* fn)
{
    std::string source;

    loadFile(vn, source);
    vs = load(source, GL_VERTEX_SHADER);

    source = "";

    loadFile(fn, source);
    vs = load(source, GL_FRAGMENT_SHADER);

    program = glCreateProgram();
    glAttachShader(program, vs);
    glAttachShader(program, fs);
    glLinkProgram(program);
    glUseProgram(program);

}

Shader::~Shader()
{
    glDetachShader(program, vs);
    glDetachShader(program, fs);
    glDeleteShader(vs);
    glDeleteShader(fs);
    glDeleteProgram(program);
}

shader.vert:

#version 120
varying vec4 color;

void main()
{
    color = vec4(0.0, 1.0, 0.0, 1.0);
    gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
}

shader.frag:

#version 120
varying vec4 color;

void main()
{
    //gl_FragColor=vec4(1.0, 0.0, 0.0, 1.0); // Triangle is red, works fine...

    gl_FragColor=vec4(color); // Triangle is white. ('color' not being passed from vertex shader???)
}

The commented code in shader.frag explains what is happening. Why isn't the varying vector being passed???

Thank you and sorry for asking the wrong question before.

The shader is fine. A varying variable is interpolated between vertices and passed to fragment shader. Since 'color' is (0.0, 1.0, 0.0, 1.0) for each vertex, it will be the same for each fragment.

The problem must be in your application, but I can't see any problems in the code you posted. I'll take an extra look at it later. You could wait for input from other users but I doubt you'll get any since this is a general programming forum. Maybe you should post on a more specialized forum?

venomxxl, thank you for looking at it.

I've now taken the time to completely rewrite and relearn how to implement everything and am using attributes instead. I'm up and running, apologies for wasting time!!

Be a part of the DaniWeb community

We're a friendly, industry-focused community of developers, IT pros, digital marketers, and technology enthusiasts meeting, networking, learning, and sharing knowledge.