thequarrymen33

08-17-2012, 12:37 AM

Hi all,

I have a very basic question. I am new to webgl and trying to draw a simple square. I am using the gl matrix library for matrix manipulation.

Javascript Code:

squareVertexPositionBuffer = gl.createBuffer();

gl.bindBuffer(gl.ARRAY_BUFFER, squareVertexPositionBuffer);

vertices = [

0.9, 0.9, 0.0,1.0,

-0.9, 0.9, 0.0,1.0,

0.9, -0.9, 0.0,1.0,

-0.9, -0.9, 0.0,1.0

];

squareVertexPositionBuffer.itemSize = 4;

squareVertexPositionBuffer.numItems = 4;

mat4.identity(pMatrix);

mat4.perspective(45, gl.viewportWidth / gl.viewportHeight, 0.1, 100.0, pMatrix);

mat4.identity(mvMatrix);

mat4.translate(mvMatrix, [-1.5, 0.0, -7.0]);

gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);

gl.vertexAttribPointer(shaderProgram.vertexPositio nAttribute, squareVertexPositionBuffer.itemSize, gl.FLOAT, false, 0, 0);

setMatrixUniforms();

gl.drawArrays(gl.TRIANGLE_STRIP, 0, squareVertexPositionBuffer.numItems);

shader code :

attribute vec3 aVertexPosition;

uniform mat4 uMVMatrix;

uniform mat4 uPMatrix;

varying vec3 debug;

void main(void) {

gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition.xyz, 1.0);

debug = aVertexPosition;

}

This seems to work out fine.Here i am passing the model view and perspective matrices as uniforms to the shader programs and multiplying them with the vertex coordinates there. But if multiply the model view and perspective matrices in the javascript and the then pass the modified vertices to the shader, it doesnt seem to work.

mat4.multiply( mvMatrix,pMatrix,mvMatrix );

mat4.multiply(mvMatrix,vertices,vertices );

Shader:

void main(void) {

gl_Position = vec4(aVertexPosition.xyz, 1.0);

debug = aVertexPosition;

}

I'm not able to spot the mistake. Help highly appreciated!

I have a very basic question. I am new to webgl and trying to draw a simple square. I am using the gl matrix library for matrix manipulation.

Javascript Code:

squareVertexPositionBuffer = gl.createBuffer();

gl.bindBuffer(gl.ARRAY_BUFFER, squareVertexPositionBuffer);

vertices = [

0.9, 0.9, 0.0,1.0,

-0.9, 0.9, 0.0,1.0,

0.9, -0.9, 0.0,1.0,

-0.9, -0.9, 0.0,1.0

];

squareVertexPositionBuffer.itemSize = 4;

squareVertexPositionBuffer.numItems = 4;

mat4.identity(pMatrix);

mat4.perspective(45, gl.viewportWidth / gl.viewportHeight, 0.1, 100.0, pMatrix);

mat4.identity(mvMatrix);

mat4.translate(mvMatrix, [-1.5, 0.0, -7.0]);

gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);

gl.vertexAttribPointer(shaderProgram.vertexPositio nAttribute, squareVertexPositionBuffer.itemSize, gl.FLOAT, false, 0, 0);

setMatrixUniforms();

gl.drawArrays(gl.TRIANGLE_STRIP, 0, squareVertexPositionBuffer.numItems);

shader code :

attribute vec3 aVertexPosition;

uniform mat4 uMVMatrix;

uniform mat4 uPMatrix;

varying vec3 debug;

void main(void) {

gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition.xyz, 1.0);

debug = aVertexPosition;

}

This seems to work out fine.Here i am passing the model view and perspective matrices as uniforms to the shader programs and multiplying them with the vertex coordinates there. But if multiply the model view and perspective matrices in the javascript and the then pass the modified vertices to the shader, it doesnt seem to work.

mat4.multiply( mvMatrix,pMatrix,mvMatrix );

mat4.multiply(mvMatrix,vertices,vertices );

Shader:

void main(void) {

gl_Position = vec4(aVertexPosition.xyz, 1.0);

debug = aVertexPosition;

}

I'm not able to spot the mistake. Help highly appreciated!