webgl
<!DOCTYPE html>
<html>
<head>
<title><%= title %></title>
<link rel='stylesheet' href='/stylesheets/style.css' />
<style>
canvas{
border: 2px solid black;
background-color: black;
}
video{
display: none;
}
</style>
</head>
<body>
<canvas id="glcanvas" width="800px" height="600px"></canvas>
<script src="javascripts/glmatrix.js"
crossorigin="anonymous" defer></script>
<script defer src="javascripts/webgl-demo.js">
</script>
</body>
</html>
webgl-demo.js
var squareRotation = 0.0;//动画变量
var copyVideo = false;//视频纹理,确保视频正在播放并且时间轴已更新才为true
main();
function main(){
var canvas = document.getElementById('glcanvas');
var gl = canvas.getContext('webgl');
if (!gl) {
alert('Unable to initialize WebGL. Your browser or machine may not support it.');
return;
}
// 顶点着色器
const vsSource = `
attribute vec4 aVertexPosition;
attribute vec3 aVertexNormal;
attribute vec2 aTextureCoord;
uniform mat4 uNormalMatrix;
uniform mat4 uModelViewMatrix;
uniform mat4 uProjectionMatrix;
varying highp vec2 vTextureCoord;
varying highp vec3 vLighting;
void main(void) {
gl_Position = uProjectionMatrix * uModelViewMatrix * aVertexPosition;
vTextureCoord = aTextureCoord;
// Apply lighting effect
highp vec3 ambientLight = vec3(0.3, 0.3, 0.3);
highp vec3 directionalLightColor = vec3(1, 1, 1);
highp vec3 directionalVector = normalize(vec3(0.85, 0.8, 0.75));
highp vec4 transformedNormal = uNormalMatrix * vec4(aVertexNormal, 1.0);
highp float directional = max(dot(transformedNormal.xyz, directionalVector), 0.0);
vLighting = ambientLight + (directionalLightColor * directional);
}
`;
// 片段着色器
const fsSource = `
varying highp vec2 vTextureCoord;
varying highp vec3 vLighting;
uniform sampler2D uSampler;
void main(void) {
highp vec4 texelColor = texture2D(uSampler, vTextureCoord);
gl_FragColor = vec4(texelColor.rgb * vLighting, texelColor.a);
}
`;
//初始化着色器
const shaderProgram = initShaderProgram(gl,vsSource,fsSource);
//查找WebGL返回分配的输入位置
const programInfo = {
program: shaderProgram,
attribLocations:{
vertexPosition:gl.getAttribLocation(shaderProgram,'aVertexPosition'),
textureCoord:gl.getAttribLocation(shaderProgram,'aTextureCoord'),
vertexNormal: gl.getAttribLocation(shaderProgram, 'aVertexNormal'),
},
uniformLocations:{
projectionMatrix:gl.getUniformLocation(shaderProgram,'uProjectionMatrix'),
modelViewMatrix:gl.getUniformLocation(shaderProgram,'uModelViewMatrix'),
uSampler:gl.getUniformLocation(shaderProgram,'uSampler'),
normalMatrix: gl.getUniformLocation(shaderProgram, 'uNormalMatrix'),
},
};
const buffers = initBuffers(gl);
const texture = initTextures(gl);
const video = setupVideo('images/video.mp4')
// const texture = initTextures(gl,'images/dg.png');
// drawScene(gl,programInfo,buffers);
//重复动画
//创建video
var then = 0;
function render(now) {
now *= 0.001; // convert to seconds
const deltaTime = now - then;
then = now;
if(copyVideo){
updateTexture(gl,texture,video);
}
drawScene(gl, programInfo, buffers,texture, deltaTime);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
//创建video
function setupVideo(url){
const video = document.createElement('video');
var playing = false;
var timeupdate = false;
video.autoplay = true;
video.muted = true;
video.loop = true;
video.addEventListener('playing',function(){
playing = true;
checkReady();
},true);
video.addEventListener('timeupdate',function(){
timeupdate = true;
checkReady();
},true);
video.src = url;
video.play();
function checkReady(){
if(playing&&timeupdate){
copyVideo = true;
}
}
return video;
}
//用视频做纹理function updateTexture(gl,texture,video){
const level = 0;
const internalFormat = gl.RGBA;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
gl.bindTexture(gl.TEXTURE_2D,texture);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
srcFormat, srcType, video);
}
function isPowerOf2(value) {
return (value & (value - 1)) == 0;
}
//创建对象
//创建一个缓冲器来存储它的顶点和属性
function initBuffers(gl){
//调用 gl 的成员函数 createBuffer() 得到了缓冲对象并存储在顶点缓冲器
const positionBuffer = gl.createBuffer();
//调用 bindBuffer() 函数绑定上下文
gl.bindBuffer(gl.ARRAY_BUFFER,positionBuffer);
//立方体顶点位置
const positions = [
// Front face
-1.0, -1.0, 1.0,
1.0, -1.0, 1.0,
1.0, 1.0, 1.0,
-1.0, 1.0, 1.0,
// Back face
-1.0, -1.0, -1.0,
-1.0, 1.0, -1.0,
1.0, 1.0, -1.0,
1.0, -1.0, -1.0,
// Top face
-1.0, 1.0, -1.0,
-1.0, 1.0, 1.0,
1.0, 1.0, 1.0,
1.0, 1.0, -1.0,
// Bottom face
-1.0, -1.0, -1.0,
1.0, -1.0, -1.0,
1.0, -1.0, 1.0,
-1.0, -1.0, 1.0,
// Right face
1.0, -1.0, -1.0,
1.0, 1.0, -1.0,
1.0, 1.0, 1.0,
1.0, -1.0, 1.0,
// Left face
-1.0, -1.0, -1.0,
-1.0, -1.0, 1.0,
-1.0, 1.0, 1.0,
-1.0, 1.0, -1.0
];
//创建一个Javascript 数组去记录每一个正方体的每一个顶点
// var vertices = [
// 1.0,1.0,0.0,
// -1.0,1.0,0.0,
// 1.0,-1.0,0.0,
// -1.0,-1.0,0.0
// ]
//然后将其转化为 WebGL 浮点型类型的数组,并将其传到 gl 对象的 bufferData() 方法来建立对象的顶点
gl.bufferData(gl.ARRAY_BUFFER,new Float32Array(positions),gl.STATIC_DRAW);
// const colorBuffer = gl.createBuffer();
// //顶点颜色
// const colors = [
// [1.0, 1.0, 1.0, 1.0], // Front face: white
// [1.0, 0.0, 0.0, 1.0], // Back face: red
// [0.0, 1.0, 0.0, 1.0], // Top face: green
// [0.0, 0.0, 1.0, 1.0], // Bottom face: blue
// [1.0, 1.0, 0.0, 1.0], // Right face: yellow
// [1.0, 0.0, 1.0, 1.0] // Left face: purple
// ];
// var generatedColors = [];
// for (j=0; j<6; j++) {
// var c = colors[j];
// generatedColors = generatedColors.concat(c, c, c, c);
// }
// gl.bindBuffer(gl.ARRAY_BUFFER,colorBuffer);
// gl.bufferData(gl.ARRAY_BUFFER,new Float32Array(generatedColors),gl.STATIC_DRAW);
const normalBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER,normalBuffer);
const vertexNormals = [
// Front
0.0, 0.0, 1.0,
0.0, 0.0, 1.0,
0.0, 0.0, 1.0,
0.0, 0.0, 1.0,
// Back
0.0, 0.0, -1.0,
0.0, 0.0, -1.0,
0.0, 0.0, -1.0,
0.0, 0.0, -1.0,
// Top
0.0, 1.0, 0.0,
0.0, 1.0, 0.0,
0.0, 1.0, 0.0,
0.0, 1.0, 0.0,
// Bottom
0.0, -1.0, 0.0,
0.0, -1.0, 0.0,
0.0, -1.0, 0.0,
0.0, -1.0, 0.0,
// Right
1.0, 0.0, 0.0,
1.0, 0.0, 0.0,
1.0, 0.0, 0.0,
1.0, 0.0, 0.0,
// Left
-1.0, 0.0, 0.0,
-1.0, 0.0, 0.0,
-1.0, 0.0, 0.0,
-1.0, 0.0, 0.0
]
gl.bufferData(gl.ARRAY_BUFFER,new Float32Array(vertexNormals),gl.STATIC_DRAW);
const cubeVerticesTextureCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER,cubeVerticesTextureCoordBuffer);
const textureCoordinates = [
// Front
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Back
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Top
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Bottom
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Right
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Left
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0
]
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(textureCoordinates),
gl.STATIC_DRAW);
//元素(三角形)数组
const indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
const cubeVertexIndices = [
0, 1, 2, 0, 2, 3, // front
4, 5, 6, 4, 6, 7, // back
8, 9, 10, 8, 10, 11, // top
12, 13, 14, 12, 14, 15, // bottom
16, 17, 18, 16, 18, 19, // right
20, 21, 22, 20, 22, 23 // left
];
// Now send the element array to GL
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER,new Uint16Array(cubeVertexIndices), gl.STATIC_DRAW);
return {
position:positionBuffer,
textureCoord:cubeVerticesTextureCoordBuffer,
indices: indexBuffer,
normal: normalBuffer,
};
}
function drawScene(gl,programInfo,buffers,texture,deltaTime){
//用背景色擦除画布
gl.clearColor(0.0,0.0,0.0,1.0);
gl.clearDepth(1.0);
gl.enable(gl.DEPTH_TEST);
gl.depthFunc(gl.LEQUAL);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
//接着建立摄像机透视矩阵。设置45度的视图角度,并且设置一个适合实际图像的宽高比。 指定在摄像机距离0.1到100单位长度的范围内的物体可见
const fieldOfView = 45*Math.PI/180;
const aspect = gl.canvas.clientWidth/gl.canvas.clientHeight;
const zNear = 0.1;
const zFar = 100.0;
const projectionMatrix = mat4.create();
//动画变量
mat4.perspective(projectionMatrix,fieldOfView,aspect,zNear,zFar);
//加载特定位置,并把正方形放在距离摄像机6个单位的的位置
const modelViewMatrix = mat4.create();
mat4.translate(modelViewMatrix,modelViewMatrix,[-0.0,0.0,-6.0]);
//modelViewMatrix绕Z轴旋转
mat4.rotate(modelViewMatrix,modelViewMatrix,squareRotation,[0,0,1]);
mat4.rotate(modelViewMatrix,modelViewMatrix,squareRotation*0.7,[0,1,0])
//然后,我们绑定正方形的顶点缓冲到上下文,并配置好,再通过调用 drawArrays() 方法来画出对象
{
const numComponents = 3;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer(gl.ARRAY_BUFFER,buffers.position);
gl.vertexAttribPointer(programInfo.attribLocations.vertexPosition,numComponents,type,normalize,stride,offset);
gl.enableVertexAttribArray(programInfo.attribLocations.vertexPosition);
}
{
const numComponents = 2;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer(gl.ARRAY_BUFFER, buffers.textureCoord);
gl.vertexAttribPointer(programInfo.attribLocations.textureCoord,numComponents,type,normalize,stride,offset);
gl.enableVertexAttribArray(programInfo.attribLocations.textureCoord);
}
const normalMatrix = mat4.create();
mat4.invert(normalMatrix, modelViewMatrix);
mat4.transpose(normalMatrix, normalMatrix);
{
const numComponents = 3;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer(gl.ARRAY_BUFFER, buffers.normal);
gl.vertexAttribPointer(
programInfo.attribLocations.vertexNormal,
numComponents,
type,
normalize,
stride,
offset);
gl.enableVertexAttribArray(
programInfo.attribLocations.vertexNormal);
}
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER,buffers.indices);
gl.useProgram(programInfo.program);
gl.uniformMatrix4fv(programInfo.uniformLocations.projectionMatrix,false,projectionMatrix);
gl.uniformMatrix4fv(programInfo.uniformLocations.modelViewMatrix,false,modelViewMatrix);
gl.uniformMatrix4fv(
programInfo.uniformLocations.normalMatrix,
false,
normalMatrix);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.uniform1i(programInfo.uniformLocations.uSampler, 0);
{
const offset = 0;
const vertexCount = 36;
const type = gl.UNSIGNED_SHORT;
gl.drawElements(gl.TRIANGLES, vertexCount, type,offset);
}
squareRotation += deltaTime;
}
//已经定义了两个着色器,我们需要将它们传递给WebGL,编译并将它们连接在一起
function initShaderProgram(gl,vsSource,fsSource){
const vertexShader = loadShader(gl,gl.VERTEX_SHADER,vsSource);
const fragmentShader = loadShader(gl,gl.FRAGMENT_SHADER,fsSource);
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram,vertexShader);
gl.attachShader(shaderProgram,fragmentShader);
gl.linkProgram(shaderProgram);
// const textureCoordAttribute = gl.getAttribLocation(shaderProgram,'aTextureCoord');
// gl.enableVertexAttribArray(textureCoordAttribute);
// gl.vertexAttribPointer(texCoordAttribute,2,gl.FLOAT,false,0,0);
// vertexColorAttribute = gl.getAttribLocation(shaderProgram,'aVertexColor');
// gl.enableVertexAttribArray(vertexColorAttribute);
if(!gl.getProgramParameter(shaderProgram,gl.LINK_STATUS)){
alert('Unable to initialize the shader program:' + gl.getProgramInfoLog(shaderProgram));
return null;
}
return shaderProgram;
}
function loadShader(gl,type,source){
const shader = gl.createShader(type);
gl.shaderSource(shader,source);
gl.compileShader(shader);
if(!gl.getShaderParameter(shader,gl.COMPILE_STATUS)){
alert('An error occurred compiling the shaders:' + gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
function initTextures(gl, url){
texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D,texture);
const level = 0;
const internalFormat = gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([0,0,255,255]);
gl.texImage2D(gl.TEXTURE_2D,level,internalFormat,width,height,border,srcFormat,srcType,pixel);
// const cubeImage = new Image();
// cubeImage.onload = function(){
// gl.bindTexture(gl.TEXTURE_2D,texture);
// gl.texImage2D(gl.TEXTURE_2D,level,internalFormat,srcFormat,srcType,cubeImage);
// if(isPowerOf2(cubeImage.width)&&isPowerOf2(cubeImage.height)){
// gl.generateMipmap(gl.TEXTURE_2D);
// }else{
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_WRAP_S,gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_WRAP_T,gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_MIN_FILTER,gl.LINEAR);
// }
// }
// cubeImage.src = url;
return texture;
}
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 25岁的心里话
· 闲置电脑爆改个人服务器(超详细) #公网映射 #Vmware虚拟网络编辑器
· 零经验选手,Compose 一天开发一款小游戏!
· 通过 API 将Deepseek响应流式内容输出到前端
· AI Agent开发,如何调用三方的API Function,是通过提示词来发起调用的吗