Browse Source

Depth prepass with simplified opaque shapes

ssao
A.Olokhtonov 1 year ago
parent
commit
e86ffd6508
  1. 2
      client/client_recv.js
  2. 8
      client/index.js
  3. 6
      client/math.js
  4. 174
      client/webgl_draw.js
  5. 3
      client/webgl_listeners.js
  6. 168
      client/webgl_shaders.js

2
client/client_recv.js

@ -177,6 +177,8 @@ function handle_event(state, context, event) { @@ -177,6 +177,8 @@ function handle_event(state, context, event) {
geometry_add_stroke(state, context, event, state.events.length);
state.stroke_count++;
break;
}

8
client/index.js

@ -3,9 +3,9 @@ @@ -3,9 +3,9 @@
document.addEventListener('DOMContentLoaded', main);
const config = {
ws_url: 'wss://192.168.100.2/ws/',
ping_url: 'https://192.168.100.2/api/ping',
image_url: 'https://192.168.100.2/images/',
ws_url: 'wss://desk.some.website/ws/',
ping_url: 'https://desk.some.website/api/ping',
image_url: 'https://desk.some.website/images/',
sync_timeout: 1000,
ws_reconnect_timeout: 2000,
brush_preview_timeout: 1000,
@ -157,6 +157,7 @@ function main() { @@ -157,6 +157,7 @@ function main() {
'queue': [],
'events': [],
'stroke_count': 0,
'tools': {
'active': null,
@ -181,6 +182,7 @@ function main() { @@ -181,6 +182,7 @@ function main() {
'canvas': null,
'gl': null,
'debug_mode': false,
'do_prepass': true,
'frametime_window': [],
'frametime_window_head': 0,

6
client/math.js

@ -226,8 +226,10 @@ function segments_onscreen(state, context) { @@ -226,8 +226,10 @@ function segments_onscreen(state, context) {
total_points += event.points.length - 1;
}
}
state.onscreen_segments = new Uint32Array(total_points * 6);
if (total_points > 0) {
state.onscreen_segments = new Uint32Array(total_points * 6);
}
}
let at = 0;

174
client/webgl_draw.js

@ -11,6 +11,27 @@ function schedule_draw(state, context) { @@ -11,6 +11,27 @@ function schedule_draw(state, context) {
}
}
function upload_if_needed(context) {
const gl = context.gl;
if (context.need_static_allocate) {
if (config.debug_print) console.debug('static allocate');
gl.bufferData(gl.ARRAY_BUFFER, context.static_serializer.size, gl.DYNAMIC_DRAW);
context.need_static_allocate = false;
context.static_upload_from = 0;
context.need_static_upload = true;
}
if (context.need_static_upload) {
if (config.debug_print) console.debug('static upload');
const upload_offset = context.static_upload_from;
const upload_size = context.static_serializer.offset - upload_offset;
gl.bufferSubData(gl.ARRAY_BUFFER, upload_offset, new Uint8Array(context.static_serializer.buffer, upload_offset, upload_size));
context.need_static_upload = false;
context.static_upload_from = context.static_serializer.offset;
}
}
function draw(state, context) {
state.timers.raf = false;
@ -20,12 +41,6 @@ function draw(state, context) { @@ -20,12 +41,6 @@ function draw(state, context) {
let query = null;
if (context._DRAW_TO_TEXTURE) {
gl.bindFramebuffer(gl.FRAMEBUFFER, context.framebuffers['sdf'].tiles);
} else {
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
}
if (context.gpu_timer_ext !== null) {
query = gl.createQuery();
gl.beginQuery(context.gpu_timer_ext.TIME_ELAPSED_EXT, query);
@ -34,93 +49,84 @@ function draw(state, context) { @@ -34,93 +49,84 @@ function draw(state, context) {
let locations;
let buffers;
buffers = context.buffers['sdf'];
gl.bindBuffer(gl.ARRAY_BUFFER, buffers['b_packed_static']);
upload_if_needed(context);
gl.viewport(0, 0, context.canvas.width, context.canvas.height);
gl.clearColor(context.bgcolor.r, context.bgcolor.g, context.bgcolor.b, 1);
gl.clearDepth(0.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const before_clip = performance.now();
const index_count = segments_onscreen(state, context);
const after_clip = performance.now();
//console.debug('clip', after_clip - before_clip);
if (index_count > 0) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffers['b_packed_static_index']);
if (!context._DRAW_TO_TEXTURE) {
gl.viewport(0, 0, context.canvas.width, context.canvas.height);
gl.clearColor(context.bgcolor.r, context.bgcolor.g, context.bgcolor.b, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
} else {
//gl.clearBufferuiv(gl.COLOR, 0, new Uint8Array([0, 0, 0, 1]));
gl.viewport(0, 0, Math.ceil(context.canvas.width / config.tile_size), Math.ceil(context.canvas.height / config.tile_size));
gl.clearColor(context.bgcolor.r, context.bgcolor.g, context.bgcolor.b, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
}
const index_buffer = new Uint32Array(state.onscreen_segments.buffer, 0, index_count);
const static_points = context.static_serializer.offset / config.bytes_per_point;
//const dynamic_points = context.dynamic_serializer.offset / config.bytes_per_point;
// SDF
buffers = context.buffers['sdf'];
if (static_points > 0) {
// DEPTH PREPASS
if (!context._DRAW_TO_TEXTURE) {
locations = context.locations['sdf'].main;
gl.useProgram(context.programs['sdf'].main);
} else {
locations = context.locations['sdf'].tiles;
gl.useProgram(context.programs['sdf'].tiles);
}
if (context.do_prepass) {
gl.drawBuffers([gl.NONE]);
gl.uniform2f(locations['u_res'], context.canvas.width, context.canvas.height);
gl.uniform2f(locations['u_scale'], state.canvas.zoom, state.canvas.zoom);
gl.uniform2f(locations['u_translation'], state.canvas.offset.x, state.canvas.offset.y);
locations = context.locations['sdf'].opaque;
if (!context._DRAW_TO_TEXTURE) {
gl.uniform1i(locations['u_debugmode'], context.debug_mode ? 1 : 0);
}
gl.useProgram(context.programs['sdf'].opaque);
const static_points = context.static_serializer.offset / config.bytes_per_point;
const dynamic_points = context.dynamic_serializer.offset / config.bytes_per_point;
gl.uniform2f(locations['u_res'], context.canvas.width, context.canvas.height);
gl.uniform2f(locations['u_scale'], state.canvas.zoom, state.canvas.zoom);
gl.uniform2f(locations['u_translation'], state.canvas.offset.x, state.canvas.offset.y);
gl.uniform1i(locations['u_stroke_count'], state.stroke_count);
if (static_points > 0) {
gl.bindBuffer(gl.ARRAY_BUFFER, buffers['b_packed_static']);
gl.enableVertexAttribArray(locations['a_pos']);
gl.enableVertexAttribArray(locations['a_line']);
gl.enableVertexAttribArray(locations['a_stroke_id']);
gl.enableVertexAttribArray(locations['a_pos']);
gl.enableVertexAttribArray(locations['a_line']);
gl.vertexAttribPointer(locations['a_pos'], 3, gl.FLOAT, false, config.bytes_per_point, 0);
gl.vertexAttribPointer(locations['a_line'], 4, gl.FLOAT, false, config.bytes_per_point, 4 * 3);
gl.vertexAttribIPointer(locations['a_stroke_id'], 1, gl.INT, config.bytes_per_point, 4 * 3 + 4 * 4 + 4);
if (!context._DRAW_TO_TEXTURE) {
gl.enableVertexAttribArray(locations['a_color']);
}
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, index_buffer, gl.DYNAMIC_DRAW);
if (context._DRAW_TO_TEXTURE) {
gl.enableVertexAttribArray(locations['a_stroke_id']);
}
gl.drawElements(gl.TRIANGLES, index_count, gl.UNSIGNED_INT, 0);
}
gl.vertexAttribPointer(locations['a_pos'], 3, gl.FLOAT, false, config.bytes_per_point, 0);
gl.vertexAttribPointer(locations['a_line'], 4, gl.FLOAT, false, config.bytes_per_point, 4 * 3);
if (!context._DRAW_TO_TEXTURE) {
gl.vertexAttribPointer(locations['a_color'], 3, gl.UNSIGNED_BYTE, true, config.bytes_per_point, 4 * 3 + 4 * 4);
}
index_buffer.reverse();
if (context._DRAW_TO_TEXTURE) {
gl.vertexAttribIPointer(locations['a_stroke_id'], 1, gl.UNSIGNED_INT, config.bytes_per_point, 4 * 3 + 4 * 4 + 4);
}
// MAIN PASS
gl.drawBuffers([gl.BACK]);
if (context.need_static_allocate) {
if (config.debug_print) console.debug('static allocate');
gl.bufferData(gl.ARRAY_BUFFER, context.static_serializer.size, gl.DYNAMIC_DRAW);
context.need_static_allocate = false;
context.static_upload_from = 0;
context.need_static_upload = true;
}
locations = context.locations['sdf'].main;
if (context.need_static_upload) {
if (config.debug_print) console.debug('static upload');
const upload_offset = context.static_upload_from;
const upload_size = context.static_serializer.offset - upload_offset;
gl.bufferSubData(gl.ARRAY_BUFFER, upload_offset, new Uint8Array(context.static_serializer.buffer, upload_offset, upload_size));
context.need_static_upload = false;
context.static_upload_from = context.static_serializer.offset;
}
gl.useProgram(context.programs['sdf'].main);
const before_clip = performance.now();
const index_count = segments_onscreen(state, context);
const after_clip = performance.now();
//console.debug('clip', after_clip - before_clip);
gl.uniform2f(locations['u_res'], context.canvas.width, context.canvas.height);
gl.uniform2f(locations['u_scale'], state.canvas.zoom, state.canvas.zoom);
gl.uniform2f(locations['u_translation'], state.canvas.offset.x, state.canvas.offset.y);
gl.uniform1i(locations['u_stroke_count'], state.stroke_count);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffers['b_packed_static_index']);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint32Array(state.onscreen_segments.buffer, 0, index_count), gl.DYNAMIC_DRAW);
const after_index_uploads = performance.now();
// console.debug('index upload', after_index_uploads - after_clip);
gl.enableVertexAttribArray(locations['a_pos']);
gl.enableVertexAttribArray(locations['a_line']);
gl.enableVertexAttribArray(locations['a_color']);
gl.enableVertexAttribArray(locations['a_stroke_id']);
gl.vertexAttribPointer(locations['a_pos'], 3, gl.FLOAT, false, config.bytes_per_point, 0);
gl.vertexAttribPointer(locations['a_line'], 4, gl.FLOAT, false, config.bytes_per_point, 4 * 3);
gl.vertexAttribPointer(locations['a_color'], 3, gl.UNSIGNED_BYTE, true, config.bytes_per_point, 4 * 3 + 4 * 4);
gl.vertexAttribIPointer(locations['a_stroke_id'], 1, gl.INT, config.bytes_per_point, 4 * 3 + 4 * 4 + 4);
gl.drawElements(gl.TRIANGLES, index_count, gl.UNSIGNED_INT, 0);
index_buffer.reverse();
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, index_buffer, gl.DYNAMIC_DRAW);
gl.drawElements(gl.TRIANGLES, index_count, gl.UNSIGNED_INT, 0);
}
}
/*
@ -143,22 +149,6 @@ function draw(state, context) { @@ -143,22 +149,6 @@ function draw(state, context) {
gl.drawArrays(gl.TRIANGLES, 0, dynamic_points);
}
*/
/*
const next_tick = () => {
const wait_status = gl.clientWaitSync(sync, 0, 0);
const frame_end = performance.now();
if (wait_status === gl.ALREADY_SIGNALED || wait_status === gl.CONDITION_SATISFIED) {
const frametime_ms = frame_end - frame_start;
gl.deleteSync(sync);
if (config.debug_print) console.debug(frametime_ms);
} else {
setTimeout(next_tick, 0);
}
}
setTimeout(next_tick, 0);
*/
if (context.gpu_timer_ext) {
gl.endQuery(context.gpu_timer_ext.TIME_ELAPSED_EXT);

3
client/webgl_listeners.js

@ -71,6 +71,9 @@ function keydown(e, state, context) { @@ -71,6 +71,9 @@ function keydown(e, state, context) {
} else if (e.code === 'KeyD') {
context.debug_mode = !context.debug_mode;
schedule_draw(state, context);
} else if (e.code === 'KeyP') {
context.do_prepass = !context.do_prepass;
schedule_draw(state, context);
}
}

168
client/webgl_shaders.js

@ -1,19 +1,71 @@ @@ -1,19 +1,71 @@
const opaque_vs_src = `#version 300 es
in vec3 a_pos; // .z is radius
in vec4 a_line;
in int a_stroke_id;
uniform vec2 u_scale;
uniform vec2 u_res;
uniform vec2 u_translation;
uniform int u_stroke_count;
flat out int v_stroke_id;
void main() {
// Do not inflate quad (as opposed to the full sdf shader), thus only leaving the opaque part
// Shrink to not include the caps
vec2 line_dir = normalize(a_line.zw - a_line.xy);
int vertex_index = gl_VertexID % 4;
vec2 pos = a_pos.xy;
if (vertex_index == 0 || vertex_index == 2) {
// vertices on the "beginning" side of the line
pos.xy += line_dir * a_pos.z / 2.0;
} else {
// on the "ending" side of the line
pos.xy -= line_dir * a_pos.z / 2.0;
}
vec2 screen01 = (pos * u_scale + u_translation) / u_res;
vec2 screen02 = screen01 * 2.0;
screen02.y = 2.0 - screen02.y;
v_stroke_id = a_stroke_id;
gl_Position = vec4(screen02 - 1.0, (float(a_stroke_id) / float(u_stroke_count)) * 2.0 - 1.0, 1.0);
}
`;
const nop_fs_src = `#version 300 es
precision highp float;
flat in int v_stroke_id;
out vec4 FragColor;
void main() {
vec3 color = vec3(float(v_stroke_id * 3245 % 255) / 255.0, float(v_stroke_id * 7343 % 255) / 255.0, float(v_stroke_id * 5528 % 255) / 255.0);
FragColor = vec4(color, 1.0);
}
`;
const sdf_vs_src = `#version 300 es
in vec3 a_pos; // .z is radius
in vec4 a_line;
in vec3 a_color;
in uint a_stroke_id;
in int a_stroke_id;
uniform vec2 u_scale;
uniform vec2 u_res;
uniform vec2 u_translation;
uniform int u_stroke_count;
out vec4 v_line;
out vec2 v_texcoord;
out vec3 v_color;
flat out uint v_stroke_id;
flat out float v_thickness;
void main() {
@ -51,9 +103,8 @@ const sdf_vs_src = `#version 300 es @@ -51,9 +103,8 @@ const sdf_vs_src = `#version 300 es
v_line = a_line;
v_color = a_color;
v_thickness = a_pos.z;
v_stroke_id = a_stroke_id;
gl_Position = vec4(screen02 - 1.0, 0.0, 1);
gl_Position = vec4(screen02 - 1.0, (float(a_stroke_id) / float(u_stroke_count)) * 2.0 - 1.0, 1);
}
`;
@ -79,41 +130,16 @@ const sdf_fs_src = `#version 300 es @@ -79,41 +130,16 @@ const sdf_fs_src = `#version 300 es
float dist = length(pa - ba * h) - v_thickness / 2.0;
float fade = 0.5 * length(fwidth(v_texcoord));
float alpha = 1.0 - smoothstep(-fade, fade, dist);
// float alpha = 1.0 - step(0.0, dist);
float alpha = 1.0 - smoothstep(0.0, fade, dist);
if (u_debug_mode == 1) {
FragColor = vec4(1.0, 0.0, 0.0, 0.1);
} else {
FragColor = vec4(v_color * alpha, alpha);
// FragColor = vec4(v_color * alpha, 0.1 + alpha);
}
}
`;
const tiles_fs_src = `#version 300 es
precision highp float;
uniform int u_debug_mode;
in vec4 v_line;
in vec2 v_texcoord;
in vec3 v_color;
flat in uint v_stroke_id;
flat in float v_thickness;
//out uint TileId;
out vec4 FragColor;
void main() {
//TileId = uint(1);
vec3 color = vec3(float(v_stroke_id * 3245u % 255u) / 255.0, float(v_stroke_id * 7343u % 255u) / 255.0, float(v_stroke_id * 5528u % 255u) / 255.0);
FragColor = vec4(color, 1);
}
`;
const tquad_vs_src = `#version 300 es
in vec2 a_pos;
in vec2 a_texcoord;
@ -166,7 +192,13 @@ function init_webgl(state, context) { @@ -166,7 +192,13 @@ function init_webgl(state, context) {
gl.enable(gl.BLEND);
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
gl.enable(gl.DEPTH_TEST);
gl.depthFunc(gl.GEQUAL);
context.gpu_timer_ext = gl.getExtension('EXT_disjoint_timer_query_webgl2');
if (context.gpu_timer_ext === null) {
context.gpu_timer_ext = gl.getExtension('EXT_disjoint_timer_query');
}
const quad_vs = create_shader(gl, gl.VERTEX_SHADER, tquad_vs_src);
const quad_fs = create_shader(gl, gl.FRAGMENT_SHADER, tquad_fs_src);
@ -174,26 +206,27 @@ function init_webgl(state, context) { @@ -174,26 +206,27 @@ function init_webgl(state, context) {
const sdf_vs = create_shader(gl, gl.VERTEX_SHADER, sdf_vs_src);
const sdf_fs = create_shader(gl, gl.FRAGMENT_SHADER, sdf_fs_src);
const tiles_fs = create_shader(gl, gl.FRAGMENT_SHADER, tiles_fs_src);
const opaque_vs = create_shader(gl, gl.VERTEX_SHADER, opaque_vs_src);
const nop_fs = create_shader(gl, gl.FRAGMENT_SHADER, nop_fs_src);
context.programs['image'] = create_program(gl, quad_vs, quad_fs);
context.programs['sdf'] = {
'opaque': create_program(gl, opaque_vs, nop_fs),
'main': create_program(gl, sdf_vs, sdf_fs),
'tiles': create_program(gl, sdf_vs, tiles_fs), // same vertex shader
};
context.locations['image'] = {
'a_pos': gl.getAttribLocation(context.programs['image'], 'a_pos'),
'a_texcoord': gl.getAttribLocation(context.programs['image'], 'a_texcoord'),
'u_res': gl.getUniformLocation(context.programs['image'], 'u_res'),
'u_scale': gl.getUniformLocation(context.programs['image'], 'u_scale'),
'u_translation': gl.getUniformLocation(context.programs['image'], 'u_translation'),
'u_outline': gl.getUniformLocation(context.programs['image'], 'u_outline'),
'u_texture': gl.getUniformLocation(context.programs['image'], 'u_texture'),
};
context.locations['sdf'] = {
'opaque': {
'a_pos': gl.getAttribLocation(context.programs['sdf'].opaque, 'a_pos'),
'a_line': gl.getAttribLocation(context.programs['sdf'].opaque, 'a_line'),
'a_stroke_id': gl.getAttribLocation(context.programs['sdf'].opaque, 'a_stroke_id'),
'u_res': gl.getUniformLocation(context.programs['sdf'].opaque, 'u_res'),
'u_scale': gl.getUniformLocation(context.programs['sdf'].opaque, 'u_scale'),
'u_translation': gl.getUniformLocation(context.programs['sdf'].opaque, 'u_translation'),
'u_stroke_count': gl.getUniformLocation(context.programs['sdf'].opaque, 'u_stroke_count'),
},
'main': {
'a_pos': gl.getAttribLocation(context.programs['sdf'].main, 'a_pos'),
'a_line': gl.getAttribLocation(context.programs['sdf'].main, 'a_line'),
@ -203,31 +236,12 @@ function init_webgl(state, context) { @@ -203,31 +236,12 @@ function init_webgl(state, context) {
'u_res': gl.getUniformLocation(context.programs['sdf'].main, 'u_res'),
'u_scale': gl.getUniformLocation(context.programs['sdf'].main, 'u_scale'),
'u_translation': gl.getUniformLocation(context.programs['sdf'].main, 'u_translation'),
'u_texture_points': gl.getUniformLocation(context.programs['sdf'].main, 'u_texture_points'),
'u_texture_indices': gl.getUniformLocation(context.programs['sdf'].main, 'u_texture_indices'),
'u_debug_mode': gl.getUniformLocation(context.programs['sdf'].main, 'u_debug_mode'),
},
'tiles': {
'a_pos': gl.getAttribLocation(context.programs['sdf'].tiles, 'a_pos'),
'a_line': gl.getAttribLocation(context.programs['sdf'].tiles, 'a_line'),
'a_color': gl.getAttribLocation(context.programs['sdf'].tiles, 'a_color'),
'a_stroke_id': gl.getAttribLocation(context.programs['sdf'].tiles, 'a_stroke_id'),
'u_res': gl.getUniformLocation(context.programs['sdf'].tiles, 'u_res'),
'u_scale': gl.getUniformLocation(context.programs['sdf'].tiles, 'u_scale'),
'u_translation': gl.getUniformLocation(context.programs['sdf'].tiles, 'u_translation'),
'u_texture_points': gl.getUniformLocation(context.programs['sdf'].tiles, 'u_texture_points'),
'u_texture_indices': gl.getUniformLocation(context.programs['sdf'].tiles, 'u_texture_indices'),
'u_debug_mode': gl.getUniformLocation(context.programs['sdf'].tiles, 'u_debug_mode'),
'u_tile_size': gl.getUniformLocation(context.programs['sdf'].main, 'u_tile_size'),
'u_stroke_count': gl.getUniformLocation(context.programs['sdf'].main, 'u_stroke_count'),
}
};
context.buffers['image'] = {
'b_pos': context.gl.createBuffer(),
'b_texcoord': context.gl.createBuffer(),
};
context.buffers['sdf'] = {
'b_packed_static': gl.createBuffer(),
'b_packed_dynamic': gl.createBuffer(),
@ -235,27 +249,6 @@ function init_webgl(state, context) { @@ -235,27 +249,6 @@ function init_webgl(state, context) {
'b_packed_dynamic_index': gl.createBuffer(),
};
context.textures['sdf'] = {
'tiles': gl.createTexture(),
};
context.framebuffers['sdf'] = {
'tiles': gl.createFramebuffer(),
};
gl.bindTexture(gl.TEXTURE_2D, context.textures['sdf'].tiles);
//gl.texImage2D(gl.TEXTURE_2D, 0, gl.R32UI, context.canvas.width, context.canvas.height, 0, gl.RED_INTEGER, gl.UNSIGNED_INT, null);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, Math.ceil(context.canvas.width / config.tile_size), Math.ceil(context.canvas.height / config.tile_size), 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.bindFramebuffer(gl.FRAMEBUFFER, context.framebuffers['sdf'].tiles);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, context.textures['sdf'].tiles, 0);
// gl.bindFramebuffer(gl.FRAMEBUFFER, null);
context.textures['image'] = {};
const resize_canvas = (entries) => {
// https://www.khronos.org/webgl/wiki/HandlingHighDPI
const entry = entries[0];
@ -275,9 +268,6 @@ function init_webgl(state, context) { @@ -275,9 +268,6 @@ function init_webgl(state, context) {
context.canvas.width = width;
context.canvas.height = height;
gl.bindTexture(gl.TEXTURE_2D, context.textures['sdf'].tiles);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, Math.ceil(context.canvas.width / config.tile_size), Math.ceil(context.canvas.height / config.tile_size), 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
schedule_draw(state, context);
}
@ -287,7 +277,7 @@ function init_webgl(state, context) { @@ -287,7 +277,7 @@ function init_webgl(state, context) {
function create_shader(gl, type, source) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);

Loading…
Cancel
Save