The code is fantastic to be able to utilize ISF shaders in js. Really happy to have found this. I'm trying to figure a way to combine shaders on top of a single web cam / video texture. Tried something like the bottom but doesn't seem to want to render
Any ideas? Would love to add and fork if I could get this working.
const ISFRenderer = require('../dist/build.js').Renderer;
let video = null;
init();
async function loadFile(src, callback) {
const response = await fetch('examples/' + src);
const body = await response.text();
callback(body);
}
function init(){
console.log("successful load!");
// video //
video = document.createElement('video')
video.autoplay = true
navigator.mediaDevices.getUserMedia({
video: true
}).then(function(stream){
video.srcObject = stream
})
// canvas //
const container = document.createElement('div');
const canvas = document.createElement('canvas');
container.style.position = 'relative';
container.appendChild(canvas);
canvas.width = window.innerWidth / 2;
canvas.height = window.innerHeight / 2;
document.body.appendChild(container);
const gl = canvas.getContext('webgl2');
// ISF //
createRendering('dot.fs', 'dot.vs', gl, canvas, video, false); // fsFile, vsFile, ctx, teture, draw boolean
const tex = new THREE.Texture(canvas);
tex.needsUpdate = true
tex.flipX = false
tex.flipY = false
createRendering('feedback.fs', undefined, gl, canvas, tex, true);
}
function createRendering(fsFilename, vsFilename, ctx, _canvas, _input, toggleDraw) {
let fsSrc;
const fsLoaded = (response) => {
fsSrc = response;
if (vsFilename) {
loadFile(vsFilename, vsLoaded);
} else {
vsLoaded();
}
}
const vsLoaded = (vsSrc) => {
const renderer = new ISFRenderer(ctx);
renderer.loadSource(fsSrc, vsSrc);
const animate = () => {
requestAnimationFrame(animate);
// tapestryfract doesn't have inputImage so we'll need to check
// if ('inputImage' in renderer.uniforms) {
renderer.setValue('inputImage', _input);
// }
if (toggleDraw) {
renderer.draw(_canvas);
}
};
requestAnimationFrame(animate);
}
loadFile(fsFilename, fsLoaded);
}