Navigation überspringen
Seiteninhalt:
08 Combined
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////119:/
// VERTEX SHADER
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////119:/
attribute vec4 aVertexPosition;
attribute vec4 aVertexColor;
attribute vec3 aVertexNormal;
attribute vec2 aTextureCoord;
uniform mat4 uNormalMatrix;
uniform mat4 uModelViewMatrix;
uniform mat4 uProjectionMatrix;
varying lowp vec4 vColor;
varying highp vec2 vTextureCoord;
varying highp vec3 vLighting;
void main () {
gl_Position = uProjectionMatrix * uModelViewMatrix * aVertexPosition;
vColor = aVertexColor;
vTextureCoord = aTextureCoord;
highp vec3 ambient_light = vec3( 0.3, 0.3, 0.3 );
highp vec3 directional_light_color = vec3( 1, 1, 1 );
highp vec3 directional_vector = normalize( vec3( 0.85, 0.8, 0.75 ) );
highp vec4 transformed_normal = uNormalMatrix * vec4( aVertexNormal, 1.0 );
highp float directional = max( dot( transformed_normal.xyz, directional_vector ), 0.0 );
vLighting = ambient_light + (directional_light_color * directional);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////119:/
// FRAGMENT SHADER
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////119:/
varying lowp vec4 vColor;
varying highp vec2 vTextureCoord;
varying highp vec3 vLighting;
uniform sampler2D uSampler;
void main () {
highp vec4 texel_color = texture2D( uSampler, vTextureCoord );
gl_FragColor = vec4( vColor.rgb * texel_color.rgb * vLighting, texel_color.a );
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////119:/
// WebGL TUTORIAL CODE
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////119:/
"use strict";
const DtoR = Math.PI / 180; // Convert degrees to radians
var frame_count = 0; // Count each drawn frame for calculating frames per second
/**
* init_shader_program()
* Compile and link the shader program
*/
function init_shader_program (gl_context, vertex_shader_source, fragment_shader_source) {
const gl = gl_context;
function load_shader (gl, type, source) {
const shader = gl.createShader( type );
gl.shaderSource( shader, source );
gl.compileShader( shader );
if (! gl.getShaderParameter( shader, gl.COMPILE_STATUS )) {
const compile_result = gl.getShaderInfoLog( shader )
gl.deleteShader( shader );
throw new Error( "Error while compiling shader program: " + compile_result );
}
return shader;
} // load_shader
const vertex_shader = load_shader( gl, gl.VERTEX_SHADER, vertex_shader_source );
const fragment_shader = load_shader( gl, gl.FRAGMENT_SHADER, fragment_shader_source );
const shader_program = gl.createProgram();
gl.attachShader( shader_program, vertex_shader );
gl.attachShader( shader_program, fragment_shader );
gl.linkProgram( shader_program );
if (! gl.getProgramParameter( shader_program, gl.LINK_STATUS )) {
const link_result = gl.getProgramInfoLog( shader_program );
throw new Error( "Error while linking shader program: " + link_result );
}
return shader_program;
} // init_shader_program
/**
* load_image_texture()
* Initialize a texture and load an image. When the image finished loading copy it into the texture.
*/
function load_image_texture (gl_context, url) {
const gl = gl_context;
// Create and select texture buffer
const texture = gl.createTexture();
gl.bindTexture( gl.TEXTURE_2D, texture );
// Because images have to be download over the internet they might take a moment until they are ready. Until
// then put a single pixel in the texture so we can use it immediately. When the image has finished downloading
// we'll update the texture with the contents of the image.
const level = 0;
const internalFormat = gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array( [0, 0, 255, 255] ); // Opaque blue
gl.texImage2D(
gl.TEXTURE_2D,
level,
internalFormat,
width,
height,
border,
srcFormat,
srcType,
pixel
);
// Create image object for downloading the texture
const image = new Image();
image.onload = function() {
// Select buffer and upload image data
gl.bindTexture( gl.TEXTURE_2D, texture );
gl.texImage2D(
gl.TEXTURE_2D,
level,
internalFormat,
srcFormat,
srcType,
image
);
// WebGL1 has different requirements for power of 2 images vs. non power of 2 images so check if the
// image is a power of 2 in both dimensions.
function is_power_of_2 (value) {
return (value & (value - 1)) == 0;
} // is_power_of_2
if (is_power_of_2( image.width ) && is_power_of_2( image.height )) {
// Yes, it's a power of 2. Generate mips.
gl.generateMipmap( gl.TEXTURE_2D );
} else {
// No, it's not a power of 2. Turn of mips and set wrapping to clamp to edge
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR );
}
};
// Trigger download
image.src = url;
return texture;
} // load_image_texture
/**
* setup_video()
*/
function setup_video (url, callback_update) {
const video = document.createElement( 'video' );
var playing = false;
var time_update = false;
video.autoplay = true;
video.muted = true;
video.loop = true;
// When the video has arrived, call the update function
function check_ready () {
if (playing && time_update) {
callback_update();
}
} // check_ready
// Waiting for these 2 events ensures there is data in the video
video.addEventListener( 'playing', ()=>{
playing = true;
check_ready();
});
video.addEventListener( 'timeupdate', ()=>{
time_update = true;
check_ready();
});
// Trigger download
video.src = url;
video.play();
return video;
} // setup_video
/**
* init_video_texture()
*/
function init_video_texture (gl_context) {
const gl = gl_context;
const texture = gl.createTexture();
gl.bindTexture( gl.TEXTURE_2D, texture );
// Because video has to be download over the internet they might take a moment until it's ready so put a single
// pixel in the texture so we can use it immediately
const level = 0;
const internal_format = gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const src_format = gl.RGBA;
const src_type = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array( [0, 0, 255, 255] ); // Opaque blue
gl.texImage2D(
gl.TEXTURE_2D,
level,
internal_format,
width,
height,
border,
src_format,
src_type,
pixel,
);
// Turn off mips and set wrapping to clamp to edge so it will work regardless of the dimensions of the video
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR );
return texture;
} // init_video_texture
/**
* update_video_texture()
*/
function update_video_texture (gl_context, texture, video) {
const gl = gl_context;
const level = 0;
const internal_format = gl.RGBA;
const src_format = gl.RGBA;
const src_type = gl.UNSIGNED_BYTE;
gl.bindTexture(
gl.TEXTURE_2D,
texture,
);
gl.texImage2D(
gl.TEXTURE_2D,
level,
internal_format,
src_format,
src_type,
video, // Instead of using an image, we pass the video element
// WebGL knows, how to pull the current frame out
);
} // update_video_texture
/**
* init_buffers()
* Make our data accessible to the GPU
*/
function init_buffers (gl) {
const positions = [
// Front face
-1.0, -1.0, 1.0,
1.0, -1.0, 1.0,
1.0, 1.0, 1.0,
-1.0, 1.0, 1.0,
// Back face
-1.0, -1.0, -1.0,
-1.0, 1.0, -1.0,
1.0, 1.0, -1.0,
1.0, -1.0, -1.0,
// Top face
-1.0, 1.0, -1.0,
-1.0, 1.0, 1.0,
1.0, 1.0, 1.0,
1.0, 1.0, -1.0,
// Bottom face
-1.0, -1.0, -1.0,
1.0, -1.0, -1.0,
1.0, -1.0, 1.0,
-1.0, -1.0, 1.0,
// Right face
1.0, -1.0, -1.0,
1.0, 1.0, -1.0,
1.0, 1.0, 1.0,
1.0, -1.0, 1.0,
// Left face
-1.0, -1.0, -1.0,
-1.0, -1.0, 1.0,
-1.0, 1.0, 1.0,
-1.0, 1.0, -1.0,
];
const face_colors = [
[1.0, 1.0, 1.0, 1.0], // Front face: white
[1.0, 0.0, 0.0, 1.0], // Back face: red
[0.0, 1.0, 0.0, 1.0], // Top face: green
[0.0, 0.0, 1.0, 1.0], // Bottom face: blue
[1.0, 1.0, 0.0, 1.0], // Right face: yellow
[1.0, 0.0, 1.0, 1.0], // Left face: purple
];
const vertex_normals = [
// Front
0.0, 0.0, 1.0,
0.0, 0.0, 1.0,
0.0, 0.0, 1.0,
0.0, 0.0, 1.0,
// Back
0.0, 0.0, -1.0,
0.0, 0.0, -1.0,
0.0, 0.0, -1.0,
0.0, 0.0, -1.0,
// Top
0.0, 1.0, 0.0,
0.0, 1.0, 0.0,
0.0, 1.0, 0.0,
0.0, 1.0, 0.0,
// Bottom
0.0, -1.0, 0.0,
0.0, -1.0, 0.0,
0.0, -1.0, 0.0,
0.0, -1.0, 0.0,
// Right
1.0, 0.0, 0.0,
1.0, 0.0, 0.0,
1.0, 0.0, 0.0,
1.0, 0.0, 0.0,
// Left
-1.0, 0.0, 0.0,
-1.0, 0.0, 0.0,
-1.0, 0.0, 0.0,
-1.0, 0.0, 0.0
];
// This array defines each face as two triangles, using the indices into the vertex array to specify each
// triangle's position
const indices = [
0, 1, 2, 0, 2, 3, // front
4, 5, 6, 4, 6, 7, // back
8, 9, 10, 8, 10, 11, // top
12, 13, 14, 12, 14, 15, // bottom
16, 17, 18, 16, 18, 19, // right
20, 21, 22, 20, 22, 23, // left
];
const texture_coordinates = [
// Front
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Back
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Top
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Bottom
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Right
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Left
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
];
// Convert the array of colors into a table for all the vertices
let colors = [];
for (let i = 0; i < face_colors.length; ++i) {
const c = face_colors[i];
colors = colors.concat( c, c, c, c ); // Repeat each color 4 times for the 4 vertices of the face
/*
// Alternatively, we can assign different colors for each of the vertices, creating a color gradient:
for (let j = 0; j < 4; ++j) {
const color = face_colors[(i + j) % face_colors.length]
color.forEach( (channel)=>{
colors.push( channel );
});
}
*/
}
// Create and select TEXTURE coordinates buffer and send data to WebGL
const texture_coord_buffer = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, texture_coord_buffer );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( texture_coordinates ), gl.STATIC_DRAW );
// Create and select NORMALS buffer and send data to WebGL
const normal_buffer = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, normal_buffer );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( vertex_normals ), gl.STATIC_DRAW );
// Create and select COLOR buffer and send data to WebGL
const color_buffer = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, color_buffer );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( colors ), gl.STATIC_DRAW );
// Create and select POSITION buffer and send data to WebGL
const position_buffer = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, position_buffer );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( positions ), gl.STATIC_DRAW );
// Create and select INDEX buffer and send data to WebGL
const index_buffer = gl.createBuffer();
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, index_buffer );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( indices ), gl.STATIC_DRAW );
return {
position : position_buffer,
color : color_buffer,
normal : normal_buffer,
textureCoord : texture_coord_buffer,
indices : index_buffer,
}
} // init_buffers
/**
* draw_scene()
*/
function draw_scene (gl_context, program_info, buffers, texture, rotation) {
const gl = gl_context;
// Fill canvas with grayish blue
gl.clearColor( 0.1, 0.3, 0.5, 1.0 );
gl.clearDepth( 1.0 );
gl.enable ( gl.DEPTH_TEST );
gl.depthFunc ( gl.LEQUAL );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
const field_of_view = 45 * DtoR;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const z_near = 0.1;
const z_far = 100.0;
// Set up camera
const projection_matrix = mat4.create();
mat4.perspective(
projection_matrix,
field_of_view,
aspect,
z_near,
z_far,
);
// Set up world coordinate transformation ("move the camera")
const model_view_matrix = mat4.create();
mat4.translate(
model_view_matrix,
model_view_matrix,
[-0.0, 0.0, -6.0],
);
mat4.rotate(
model_view_matrix,
model_view_matrix,
rotation,
[0, 0, 1],
);
mat4.rotate(
model_view_matrix,
model_view_matrix,
rotation * 0.7,
[0, 1, 0],
);
// Set up normal matrix
const normal_matrix = mat4.create();
mat4.invert( normal_matrix, model_view_matrix );
mat4.transpose( normal_matrix, normal_matrix );
// Tell WebGL how to pull out the POSITIONS from the position buffer into the vertexPosition attribute
{
const num_components = 3;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer( gl.ARRAY_BUFFER, buffers.position );
gl.vertexAttribPointer(
program_info.attribLocations.vertexPosition,
num_components,
type,
normalize,
stride,
offset,
);
gl.enableVertexAttribArray(
program_info.attribLocations.vertexPosition
);
}
// Tell WebGL how to pull out the COLORS from the color buffer into the vertexColor attribute
{
const num_components = 4;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer( gl.ARRAY_BUFFER, buffers.color );
gl.vertexAttribPointer(
program_info.attribLocations.vertexColor,
num_components,
type,
normalize,
stride,
offset,
);
gl.enableVertexAttribArray(
program_info.attribLocations.vertexColor,
);
}
// Tell WebGL how to pull out the NORMALS from the normal buffer into the vertextNormal attribute
{
const num_components = 3;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer( gl.ARRAY_BUFFER, buffers.normal );
gl.vertexAttribPointer(
program_info.attribLocations.vertexNormal,
num_components,
type,
normalize,
stride,
offset,
);
gl.enableVertexAttribArray( program_info.attribLocations.vertexNormal );
}
// Tell WebGL how to pull out the TEXTURE COORDINATES from the buffer into the textureCoord attribute
{
const numComponents = 2;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer( gl.ARRAY_BUFFER, buffers.textureCoord );
gl.vertexAttribPointer(
program_info.attribLocations.textureCoord,
numComponents,
type,
normalize,
stride,
offset
);
gl.enableVertexAttribArray( program_info.attribLocations.textureCoord );
}
// Tell WebGL which INDICES to use for indexing the vertices
{
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, buffers.indices );
}
// Upload the shader program to the GPU
gl.useProgram( program_info.program );
// Upload matrices
gl.uniformMatrix4fv(
program_info.uniformLocations.projectionMatrix,
false,
projection_matrix,
);
gl.uniformMatrix4fv(
program_info.uniformLocations.modelViewMatrix,
false,
model_view_matrix,
);
gl.uniformMatrix4fv(
program_info.uniformLocations.normalMatrix,
false,
normal_matrix,
);
if (texture !== null) {
// Specify the texture to map onto the faces
// Tell WebGL we want to affect texture unit 0
gl.activeTexture( gl.TEXTURE0 );
// Bind the texture to texture unit 0
gl.bindTexture( gl.TEXTURE_2D, texture );
// Tell the shader we bound the texture to texture unit 0
gl.uniform1i( program_info.uniformLocations.uSampler, 0 );
}
// Trigger rendering
{
const vertex_count = 36;
const type = gl.UNSIGNED_SHORT;
const offset = 0;
gl.drawElements( gl.TRIANGLES, vertex_count, type, offset );
}
++frame_count;
} // draw_scene
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////119:/
// PROGRAM ENTRY POINT
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////119:/
/**
* body.onload()
*/
window.addEventListener( "load", ()=>{
function computed_style( element ) {
return element.currentStyle || window.getComputedStyle( element );
} // computedStyle
// Get size of the <canvas> element
const canvas = document.querySelector( ".viewport" );
const style = computed_style( canvas );
const new_width = parseInt( style.width ); // We could use different dimensions, if so desired
const new_height = parseInt( style.height );
// Initialize the <canvas> element
// Set size of the DOM element (how large the canvas appears on the page) and its drawing surface
canvas.setAttribute( "width", canvas.width = new_width ); // Setting both sizes in one go
canvas.setAttribute( "height", canvas.height = new_height );
// Attempt to retreive the WebGL context
const gl = canvas.getContext( "webgl" );
if (! gl) throw new Error( "Unable to initialize WebGL." );
// Fill the canvas with a dark red. If this color appears on the page, something went wrong
gl.clearColor( 0.5, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT );
// Load shader sources
const vertex_shader_source = document.querySelector( "#vertex_shader" ).innerText;
const fragment_shader_source = document.querySelector( "#fragment_shader" ).innerText;
// Compile shaders
const shader_program = init_shader_program( gl, vertex_shader_source, fragment_shader_source );
// Store all info
const program_info = {
program: shader_program,
attribLocations: {
vertexPosition : gl.getAttribLocation( shader_program, "aVertexPosition" ),
vertexColor : gl.getAttribLocation( shader_program, "aVertexColor" ),
vertexNormal : gl.getAttribLocation( shader_program, "aVertexNormal" ),
textureCoord : gl.getAttribLocation( shader_program, "aTextureCoord" ),
},
uniformLocations: {
projectionMatrix : gl.getUniformLocation( shader_program, "uProjectionMatrix" ),
modelViewMatrix : gl.getUniformLocation( shader_program, "uModelViewMatrix" ),
normalMatrix : gl.getUniformLocation( shader_program, "uNormalMatrix" ),
uSampler : gl.getUniformLocation( shader_program, 'uSampler' ),
},
};
// Set up memory
const buffers = init_buffers( gl );
// Prepare texture
const video_texture = init_video_texture( gl );
const image_texture = load_image_texture( gl, 'cubetexture.png' );
// Load video
const video = setup_video( "babelfisch.webm", ()=>{
update_video_texture( gl, video_texture, video );
} );
// Animate
let then = 0;
let rotation = 0.0;
function render (now) {
now *= 0.001; // Convert to seconds
const delta_t = now - then;
then = now;
rotation += delta_t;
var texture;
switch (Math.floor( now ) % 2) {
case 0 : texture = video_texture; break;
case 1 : texture = image_texture; break;
}
draw_scene( gl, program_info, buffers, texture, rotation );
requestAnimationFrame( render );
} // render
requestAnimationFrame( render ); // Start render loop
// Display frames per second
setInterval( ()=>{
document.title = "F/s: " + frame_count;
frame_count = 0;
}, 1000 );
});
Content Management:
μCMS α1.6