diff --git a/packages/client/public/loader_decoders/corto.js b/packages/client/public/loader_decoders/corto.js new file mode 100644 index 0000000000..5db1917116 --- /dev/null +++ b/packages/client/public/loader_decoders/corto.js @@ -0,0 +1,1089 @@ +/* +Corto +Copyright (c) 2017-2020, Visual Computing Lab, ISTI - CNR +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +*/ + +BitStream = function(array) { + var t = this; + t.a = array; + t.current = array[0]; + t.position = 0; //position in the buffer + t.pending = 32; //bits still to read +}; + +BitStream.prototype = { + read: function(bits) { + var t = this; + if(bits > t.pending) { + t.pending = bits - t.pending; + var result = (t.current << t.pending)>>>0; //looks the same. + t.pending = 32 - t.pending; + + t.current = t.a[++t.position]; + result |= (t.current >>> t.pending); + t.current = (t.current & ((1<>>0; //slighting faster than mask. + return result; + } else { //splitting result in branch seems faster. + t.pending -= bits; + var result = (t.current >>> t.pending); + t.current = (t.current & ((1<>>0; //slighting faster than mask, + return result; + } + } +}; + +Stream = function(buffer, byteOffset, byteLength) { + var t = this; + t.data = buffer; + t.buffer = new Uint8Array(buffer); + t.pos = byteOffset?byteOffset:0; + t.view = new DataView(buffer); +}; + +Stream.prototype = { + logs: new Uint8Array(16768), + + readChar: function() { + var c = this.buffer[this.pos++]; + if(c > 127) c -= 256; + return c; + }, + + readUChar: function() { + return this.buffer[this.pos++]; + }, + + readShort: function() { + this.pos += 2; + return this.view.getInt16(this.pos-2, true); + }, + + readFloat: function() { + this.pos += 4; + return this.view.getFloat32(this.pos-4, true); + }, + + readInt: function() { + this.pos += 4; + return this.view.getInt32(this.pos-4, true); + }, + + readArray: function(n) { + var a = this.buffer.subarray(this.pos, this.pos+n); + this.pos += n; + return a; + }, + + readString: function() { + var n = this.readShort(); + var s = String.fromCharCode.apply(null, this.readArray(n-1)); + this.pos++; //null terminator of string. + return s; + }, + + readBitStream:function() { + var n = this.readInt(); + var pad = this.pos & 0x3; + if(pad != 0) + this.pos += 4 - pad; + var b = new BitStream(new Uint32Array(this.data, this.pos, n)); + this.pos += n*4; + return b; + }, + + //make decodearray2,3 later //TODO faster to create values here or passing them? + decodeArray: function(N, values) { + var t = this; + var bitstream = t.readBitStream(); + + var tunstall = new Tunstall; + while(t.logs.length < values.length) + t.logs = new Uint8Array(values.length); + + tunstall.decompress(this, t.logs); + + for(var i = 0; i < t.logs.readed; i++) { + var diff = t.logs[i]; + if(diff == 0) { + for(var c = 0; c < N; c++) + values[i*N + c] = 0; + continue; + } + var max = (1<>>1; + for(var c = 0; c < N; c++) + values[i*N + c] = bitstream.read(diff) - max; + } + return t.logs.readed; + }, + + //assumes values alread allocated + decodeValues: function(N, values) { + var t = this; + var bitstream = t.readBitStream(); + var tunstall = new Tunstall; + var size = values.length/N; + while(t.logs.length < size) + t.logs = new Uint8Array(size); + + for(var c = 0; c < N; c++) { + tunstall.decompress(this, t.logs); + + for(var i = 0; i < t.logs.readed; i++) { + var diff = t.logs[i]; + if(diff == 0) { + values[i*N + c] = 0; + continue; + } + + var val = bitstream.read(diff); + var middle = (1<<(diff-1))>>>0; + if(val < middle) + val = -val -middle; + values[i*N + c] = val; + } + } + return t.logs.readed; + }, + + //assumes values alread allocated + decodeDiffs: function(values) { + var t = this; + var bitstream = t.readBitStream(); + var tunstall = new Tunstall; + var size = values.length; + while(t.logs.length < size) + t.logs = new Uint8Array(size); + + tunstall.decompress(this, t.logs); + + for(var i = 0; i < t.logs.readed; i++) { + var diff = t.logs[i]; + if(diff == 0) { + values[i] = 0; + continue; + } + var max = (1<>>1; + values[i] = bitstream.read(diff) - max; + } + return t.logs.readed; + }, + + //assumes values alread allocated + decodeIndices: function(values) { + var t = this; + var bitstream = t.readBitStream(); + + var tunstall = new Tunstall; + var size = values.length; + while(t.logs.length < size) + t.logs = new Uint8Array(size); + + tunstall.decompress(this, t.logs); + + for(var i = 0; i < t.logs.readed; i++) { + var ret = t.logs[i]; + if(ret == 0) { + values[i] = 0; + continue; + } + values[i] = (1< 100000000) throw("TOO LARGE!"); + if(!data) + data = new Uint8Array(size); + if(data.length < size) + throw "Array for results too small"; + data.readed = size; + + var compressed_size = stream.readInt(); + if(size > 100000000) throw("TOO LARGE!"); + var compressed_data = stream.readArray(compressed_size); + if(size) + this._decompress(compressed_data, compressed_size, data, size); + return data; + }, + + createDecodingTables: function() { + var t = this; + var n_symbols = t.probs.length/2; + if(n_symbols <= 1) return; + + var queue = Tunstall.prototype.queue; + + var end = 0; //keep track of queue end + var pos = 0; //keep track of buffer first free space + var n_words = 0; + + //Here probs will range from 0 to 0xffff for better precision + for(var i = 0; i < n_symbols; i++) + queue[i] = t.probs[2*i+1] << 8; + + var max_repeat = Math.floor((t.dictionary_size - 1)/(n_symbols - 1)); + var repeat = 2; + var p0 = queue[0]; + var p1 = queue[1]; + var prob = (p0*p0)>>>16; + while(prob > p1 && repeat < max_repeat) { + prob = (prob*p0)>>> 16; + repeat++; + } + + if(repeat >= 16) { //Very low entropy results in large tables > 8K. + t.table[pos++] = t.probs[0]; + for(var k = 1; k < n_symbols; k++) { + for(var i = 0; i < repeat-1; i++) + t.table[pos++] = t.probs[0]; + t.table[pos++] = t.probs[2*k]; + } + t.starts[0] = (repeat-1)*n_symbols; + for(var k = 1; k < n_symbols; k++) + t.starts[k] = k; + + for(var col = 0; col < repeat; col++) { + for(var row = 1; row < n_symbols; row++) { + var off = (row + col*n_symbols); + if(col > 0) + queue[off] = (prob * queue[row]) >> 16; + t.index[off] = row*repeat - col; + t.lengths[off] = col+1; + } + if(col == 0) + prob = p0; + else + prob = (prob*p0) >>> 16; + } + var first = ((repeat-1)*n_symbols); + queue[first] = prob; + t.index[first] = 0; + t.lengths[first] = repeat; + + n_words = 1 + repeat*(n_symbols - 1); + end = repeat*n_symbols; + } else { + //initialize adding all symbols to queues + for(var i = 0; i < n_symbols; i++) { + queue[i] = t.probs[i*2+1]<<8; + t.index[i] = i; + t.lengths[i] = 1; + + t.starts[i] = i; + t.table[i] = t.probs[i*2]; + } + pos = n_symbols; + end = n_symbols; + n_words = n_symbols; + } + + //at each step we grow all queues using the most probable sequence + while(n_words < t.dictionary_size) { + //find highest probability word + var best = 0; + var max_prob = 0; + for(var i = 0; i < n_symbols; i++) { + var p = queue[t.starts[i]]; //front of queue probability. + if(p > max_prob) { + best = i; + max_prob = p; + } + } + var start = t.starts[best]; + var offset = t.index[start]; + var len = t.lengths[start]; + + for(var i = 0; i < n_symbols; i++) { + queue[end] = (queue[i] * queue[start])>>>16; + t.index[end] = pos; + t.lengths[end] = len + 1; + end++; + + for(var k = 0; k < len; k++) + t.table[pos + k] = t.table[offset + k]; //copy sequence of symbols + pos += len; + t.table[pos++] = t.probs[i*2]; //append symbol + if(i + n_words == t.dictionary_size - 1) + break; + } + if(i == n_symbols) + t.starts[best] += n_symbols; //move one column + n_words += n_symbols -1; + } + + var word = 0; + for(i = 0, row = 0; i < end; i ++, row++) { + if(row >= n_symbols) + row = 0; + if(t.starts[row] > i) continue; //skip deleted words + + t.index[word] = t.index[i]; + t.lengths[word] = t.lengths[i]; + word++; + } + }, + + _decompress: function(input, input_size, output, output_size) { + //TODO optimize using buffer arrays + var input_pos = 0; + var output_pos = 0; + if(this.probs.length == 2) { + var symbol = this.probs[0]; + for(var i = 0; i < output_size; i++) + output[i] = symbol; + return; + } + + while(input_pos < input_size-1) { + var symbol = input[input_pos++]; + var start = this.index[symbol]; + var end = start + this.lengths[symbol]; + for(var i = start; i < end; i++) + output[output_pos++] = this.table[i]; + } + + //last symbol might override so we check. + var symbol = input[input_pos]; + var start = this.index[symbol]; + var end = start + output_size - output_pos; + var length = output_size - output_pos; + for(var i = start; i < end; i++) + output[output_pos++] = this.table[i]; + + return output; + } +}; + +function Attribute(name, q, components, type, strategy) { + var t = this; + t.name = name; + t.q = q; //float + t.components = components; //integer + t.type = type; + t.strategy = strategy; +}; + +Attribute.prototype = { + Type: { UINT32:0, INT32:1, UINT16:2, INT16:3, UINT8:4, INT8:5, FLOAT:6, DOUBLE:7 }, + + Strategy: { PARALLEL:1, CORRELATED:2 }, + + init: function(nvert, nface) { + var t = this; + var n = nvert*t.components; + t.values = new Int32Array(n); //local workspace + + //init output buffers + switch(t.type) { + case t.Type.UINT32: + case t.Type.INT32: t.values = t.buffer = new Int32Array(n); break; //no point replicating. + case t.Type.UINT16: + case t.Type.INT16: t.buffer = new Int16Array(n); break; + case t.Type.UINT8: t.buffer = new Uint8Array(n); break; + case t.Type.INT8: t.buffer = new Int8Array(n); break; + case t.Type.FLOAT: + case t.Type.DOUBLE: t.buffer = new Float32Array(n); break; + default: throw "Error if reading"; + } + }, + + decode: function(nvert, stream) { + var t = this; + if(t.strategy & t.Strategy.CORRELATED) //correlated + stream.decodeArray(t.components, t.values); + else + stream.decodeValues(t.components, t.values); + }, + + deltaDecode: function(nvert, context) { + var t = this; + var values = t.values; + var N = t.components; + + if(t.strategy & t.Strategy.PARALLEL) { //parallel + var n = context.length/3; + for(var i = 1; i < n; i++) { + for(var c = 0; c < N; c++) { + values[i*N + c] += values[context[i*3]*N + c] + values[context[i*3+1]*N + c] - values[context[i*3+2]*N + c]; + } + } + } else if(context) { + var n = context.length/3; + for(var i = 1; i < n; i++) + for(var c = 0; c < N; c++) + values[i*N + c] += values[context[i*3]*N + c]; + } else { + for(var i = N; i < nvert*N; i++) + values[i] += values[i - N]; + } + }, + + postDelta: function() {}, + + dequantize: function(nvert) { + var t= this; + var n = t.components*nvert; + switch(t.type) { + case t.Type.UINT32: + case t.Type.INT32: break; + case t.Type.UINT16: + case t.Type.INT16: + case t.Type.UINT8: + case t.Type.INT8: + for(var i = 0; i < n; i++) + t.buffer[i] = t.values[i]*t.q; + break; + case t.Type.FLOAT: + case t.Type.DOUBLE: + for(var i = 0; i < n; i++) + t.buffer[i] = t.values[i]*t.q; + break; + } + } +}; + +/* COLOR ATTRIBUTE */ + +function ColorAttr(name, q, components, type, strategy) { + Attribute.call(this, name, q, components, type, strategy); + this.qc = []; + this.outcomponents = 3; +}; + +ColorAttr.prototype = Object.create(Attribute.prototype); + +ColorAttr.prototype.decode = function(nvert, stream) { + for(var c = 0; c < 4; c++) + this.qc[c] = stream.readUChar(); + Attribute.prototype.decode.call(this, nvert, stream); +}; + +ColorAttr.prototype.dequantize = function(nvert) { + var t = this; + + for(var i = 0; i < nvert; i++) { + var offset = i*4; + var rgboff = i*t.outcomponents; + + var e0 = t.values[offset + 0]; + var e1 = t.values[offset + 1]; + var e2 = t.values[offset + 2]; + + t.buffer[rgboff + 0] = ((e2 + e0)* t.qc[0])&0xff; + t.buffer[rgboff + 1] = e0* t.qc[1]; + t.buffer[rgboff + 2] = ((e1 + e0)* t.qc[2])&0xff; + t.buffer[offset + 3] = t.values[offset + 3] * t.qc[3]; + } +}; + +/* NORMAL ATTRIBUTE */ + +function NormalAttr(name, q, components, type, strategy) { + Attribute.call(this, name, q, components, type, strategy); +}; + +NormalAttr.prototype = Object.create(Attribute.prototype); + +NormalAttr.prototype.Prediction = { DIFF: 0, ESTIMATED: 1, BORDER: 2 }; + +NormalAttr.prototype.init = function(nvert, nface) { + var t = this; + var n = nvert*t.components; + t.values = new Int32Array(2*nvert); //local workspace + + //init output buffers + switch(t.type) { + case t.Type.INT16: t.buffer = new Int16Array(n); break; + case t.Type.FLOAT: + case t.Type.DOUBLE: t.buffer = new Float32Array(n); break; + default: throw "Error if reading"; + } +}; + +NormalAttr.prototype.decode = function(nvert, stream) { + var t = this; + t.prediction = stream.readUChar(); + + stream.decodeArray(2, t.values); +}; + +NormalAttr.prototype.deltaDecode = function(nvert, context) { + var t = this; + if(t.prediction != t.Prediction.DIFF) + return; + + if(context) { + for(var i = 1; i < nvert; i++) { + for(var c = 0; c < 2; c++) { + var d = t.values[i*2 + c]; + t.values[i*2 + c] += t.values[context[i*3]*2 + c]; + } + } + } else { //point clouds assuming values are already sorted by proximity. + for(var i = 2; i < nvert*2; i++) { + var d = t.values[i]; + t.values[i] += t.values[i-2]; + } + } +}; + +NormalAttr.prototype.postDelta = function(nvert, nface, attrs, index) { + var t = this; + //for border and estimate we need the position already deltadecoded but before dequantized + if(t.prediction == t.Prediction.DIFF) + return; + + if(!attrs.position) + throw "No position attribute found. Use DIFF normal strategy instead."; + + var coord = attrs.position; + + t.estimated = new Float32Array(nvert*3); + t.estimateNormals(nvert, coord.values, nface, index.faces); + + if(t.prediction == t.Prediction.BORDER) { + t.boundary = new Uint32Array(nvert); + t.markBoundary(nvert, nface, index.faces, t.boundary); + } + + t.computeNormals(nvert); +}; + +NormalAttr.prototype.dequantize = function(nvert) { + var t = this; + if(t.prediction != t.Prediction.DIFF) + return; + + for(var i = 0; i < nvert; i++) + t.toSphere(i, t.values, i, t.buffer, t.q) +}; + +NormalAttr.prototype.computeNormals = function(nvert) { + var t = this; + var norm = t.estimated; + + if(t.prediction == t.Prediction.ESTIMATED) { + for(var i = 0; i < nvert; i++) { + t.toOcta(i, norm, i, t.values, t.q); + t.toSphere(i, t.values, i, t.buffer, t.q); + } + + } else { //BORDER + var count = 0; //here for the border. + for(var i = 0, k = 0; i < nvert; i++, k+=3) { + if(t.boundary[i] != 0) { + t.toOcta(i, norm, count, t.values, t.q); + t.toSphere(count, t.values, i, t.buffer, t.q); + count++; + + } else { //no correction + var len = 1/Math.sqrt(norm[k]*norm[k] + norm[k+1]*norm[k+1] + norm[k+2]*norm[k+2]); + if(t.type == t.Type.INT16) + len *= 32767; + + t.buffer[k] = norm[k]*len; + t.buffer[k+1] = norm[k+1]*len; + t.buffer[k+2] = norm[k+2]*len; + } + } + } +}; + +NormalAttr.prototype.markBoundary = function( nvert, nface, index, boundary) { + for(var f = 0; f < nface*3; f += 3) { + boundary[index[f+0]] ^= index[f+1] ^ index[f+2]; + boundary[index[f+1]] ^= index[f+2] ^ index[f+0]; + boundary[index[f+2]] ^= index[f+0] ^ index[f+1]; + } +}; + + +NormalAttr.prototype.estimateNormals = function(nvert, coords, nface, index) { + var t = this; + for(var f = 0; f < nface*3; f += 3) { + var a = 3*index[f + 0]; + var b = 3*index[f + 1]; + var c = 3*index[f + 2]; + + var ba0 = coords[b+0] - coords[a+0]; + var ba1 = coords[b+1] - coords[a+1]; + var ba2 = coords[b+2] - coords[a+2]; + + var ca0 = coords[c+0] - coords[a+0]; + var ca1 = coords[c+1] - coords[a+1]; + var ca2 = coords[c+2] - coords[a+2]; + + var n0 = ba1*ca2 - ba2*ca1; + var n1 = ba2*ca0 - ba0*ca2; + var n2 = ba0*ca1 - ba1*ca0; + + t.estimated[a + 0] += n0; + t.estimated[a + 1] += n1; + t.estimated[a + 2] += n2; + t.estimated[b + 0] += n0; + t.estimated[b + 1] += n1; + t.estimated[b + 2] += n2; + t.estimated[c + 0] += n0; + t.estimated[c + 1] += n1; + t.estimated[c + 2] += n2; + } +}; + +//taks input in ingress at i offset, adds out at c offset +NormalAttr.prototype.toSphere = function(i, input, o, out, unit) { + + var t = this; + var j = i*2; + var k = o*3; + var av0 = input[j] > 0? input[j]:-input[j]; + var av1 = input[j+1] > 0? input[j+1]:-input[j+1]; + out[k] = input[j]; + out[k+1] = input[j+1]; + out[k+2] = unit - av0 - av1; + if (out[k+2] < 0) { + out[k] = (input[j] > 0)? unit - av1 : av1 - unit; + out[k+1] = (input[j+1] > 0)? unit - av0: av0 - unit; + } + var len = 1/Math.sqrt(out[k]*out[k] + out[k+1]*out[k+1] + out[k+2]*out[k+2]); + if(t.type == t.Type.INT16) + len *= 32767; + + out[k] *= len; + out[k+1] *= len; + out[k+2] *= len; +}; + +NormalAttr.prototype.toOcta = function(i, input, o, output, unit) { + var k = o*2; + var j = i*3; //input + + var av0 = input[j] > 0? input[j]:-input[j]; + var av1 = input[j+1] > 0? input[j+1]:-input[j+1]; + var av2 = input[j+2] > 0? input[j+2]:-input[j+2]; + var len = av0 + av1 + av2; + var p0 = input[j]/len; + var p1 = input[j+1]/len; + + var ap0 = p0 > 0? p0: -p0; + var ap1 = p1 > 0? p1: -p1; + + if(input[j+2] < 0) { + p0 = (input[j] >= 0)? 1.0 - ap1 : ap1 - 1; + p1 = (input[j+1] >= 0)? 1.0 - ap0 : ap0 - 1; + } + output[k] += p0*unit; + output[k+1] += p1*unit; +/* + Point2f p(v[0], v[1]); + p /= (fabs(v[0]) + fabs(v[1]) + fabs(v[2])); + + if(v[2] < 0) { + p = Point2f(1.0f - fabs(p[1]), 1.0f - fabs(p[0])); + if(v[0] < 0) p[0] = -p[0]; + if(v[1] < 0) p[1] = -p[1]; + } + return Point2i(p[0]*unit, p[1]*unit); +*/ +}; + +/* INDEX ATTRIBUTE */ + +function IndexAttr(nvert, nface, type) { + var t = this; + if((!type && nface < (1<<16)) || type == 0) //uint16 + t.faces = new Uint16Array(nface*3); + else if(!type || type == 2) //uint32 + t.faces = new Uint32Array(nface*3); + else + throw "Unsupported type"; + + t.prediction = new Uint32Array(nvert*3); +}; + +IndexAttr.prototype = { + decode: function(stream) { + var t = this; + + var max_front = stream.readInt(); + t.front = new Int32Array(max_front*5); + + var tunstall = new Tunstall; + t.clers = tunstall.decompress(stream); + t.bitstream = stream.readBitStream(); + }, + + decodeGroups: function(stream) { + var t = this; + var n = stream.readInt(); + t.groups = new Array(n); + for(var i = 0; i < n; i++) { + var end = stream.readInt(); + var np = stream.readUChar(); + var g = { end: end, properties: {} }; + for(var k = 0; k < np; k++) { + var key = stream.readString(); + g.properties[key] = stream.readString(); + } + t.groups[i] = g; + } + } +}; + +onmessage = function(job) { + if(typeof(job.data) == "string") return; + + this.fetch(job.data.url, { + headers: { + range: `bytes=${job.data.byteStart}-${job.data.byteEnd}` + } + }).then(response => response.arrayBuffer()) + .then(buffer => { + var decoder = new CortoDecoder(buffer); + var model = decoder.decode(); + var transferables = []; + for(var i in model) { + if(model[i].buffer instanceof ArrayBuffer) + transferables.push(model[i].buffer); + } + + //pass back job + postMessage({ geometry: model, request: job.data.request}, undefined, transferables); + }).catch(e => { + console.error('CortoWorker Error: ', e, job.data) + postMessage({geometry: null, request: job.data.request}) + }) +}; + + +function CortoDecoder(data, byteOffset, byteLength) { + if(byteOffset & 0x3) + throw "Memory aligned on 4 bytes is mandatory"; + + var t = this; + var stream = t.stream = new Stream(data, byteOffset, byteLength); + + var magic = stream.readInt(); + if(magic != 2021286656) return; + + var version = stream.readInt(); + t.entropy = stream.readUChar(); + //exif + t.geometry = {}; + var n = stream.readInt(); + for(var i = 0; i < n; i++) { + var key = stream.readString(); + t.geometry[key] = stream.readString(); + } + + //attributes + var n = stream.readInt(); + + t.attributes = {}; + for(var i = 0; i < n; i++) { + var a = {}; + var name = stream.readString(); + var codec = stream.readInt(); + var q = stream.readFloat(); + var components = stream.readUChar(); //internal number of components + var type = stream.readUChar(); //default type (same as it was in input), can be overridden + var strategy = stream.readUChar(); + var attr; + switch(codec) { + case 2: attr = NormalAttr; break; + case 3: attr = ColorAttr; break; + case 1: //generic codec + default: attr = Attribute; break; + } + t.attributes[name] = new attr(name, q, components, type, strategy); + } + +//TODO move this vars into an array. + t.geometry.nvert = t.nvert = t.stream.readInt(); + t.geometry.nface = t.nface = t.stream.readInt(); +}; + +CortoDecoder.prototype = { + decode: function() { + var t = this; + + t.last = new Uint32Array(t.nvert*3); //for parallelogram prediction + t.last_count = 0; + + for(var i in t.attributes) + t.attributes[i].init(t.nvert, t.nface); + + if(t.nface == 0) + t.decodePointCloud(); + else + t.decodeMesh(); + + return t.geometry; + }, + + decodePointCloud: function() { + var t = this; + t.index = new IndexAttr(t.nvert, t.nface, 0); + t.index.decodeGroups(t.stream); + t.geometry.groups = t.index.groups; + for(var i in t.attributes) { + var a = t.attributes[i]; + a.decode(t.nvert, t.stream); + a.deltaDecode(t.nvert); + a.dequantize(t.nvert); + t.geometry[a.name] = a.buffer; + } + }, + + decodeMesh: function() { + var t = this; + t.index = new IndexAttr(t.nvert, t.nface); + t.index.decodeGroups(t.stream); + t.index.decode(t.stream); + + t.vertex_count = 0; + var start = 0; + t.cler = 0; + for(var p = 0; p < t.index.groups.length; p++) { + var end = t.index.groups[p].end; + this.decodeFaces(start *3, end *3); + start = end; + } + t.geometry['index'] = t.index.faces; + t.geometry.groups = t.index.groups; + for(var i in t.attributes) + t.attributes[i].decode(t.nvert, t.stream); + for(var i in t.attributes) + t.attributes[i].deltaDecode(t.nvert, t.index.prediction); + for(var i in t.attributes) + t.attributes[i].postDelta(t.nvert, t.nface, t.attributes, t.index); + for(var i in t.attributes) { + var a = t.attributes[i]; + a.dequantize(t.nvert); + t.geometry[a.name] = a.buffer; + } + }, + + /* + An edge is: uint16_t face, uint16_t side, uint32_t prev, next, bool deleted + I do not want to create millions of small objects, I will use aUint32Array. + Problem is how long, sqrt(nface) we will over blow using nface. + */ + + ilog2: function(p) { + var k = 0; + while ( p>>=1 ) { ++k; } + return k; + }, + + decodeFaces: function(start, end) { + + var t = this; + var clers = t.index.clers; + var bitstream = t.index.bitstream; + + var front = t.index.front; + var front_count = 0; //count each integer so it's front_back*5 + + function addFront(_v0, _v1, _v2, _prev, _next) { + front[front_count] = _v0; + front[front_count+1] = _v1; + front[front_count+2] = _v2; + front[front_count+3] = _prev; + front[front_count+4] = _next; + front_count += 5; + } + + var faceorder = new Uint32Array((end - start)); + var order_front = 0; + var order_back = 0; + + var delayed = []; + + var splitbits = t.ilog2(t.nvert) + 1; + + var new_edge = -1; + + var prediction = t.index.prediction; + + while(start < end) { + + if(new_edge == -1 && order_front >= order_back && !delayed.length) { + + var last_index = t.vertex_count-1; + var vindex = []; + + var split = 0; + if(clers[t.cler++] == 6) { //split look ahead + split = bitstream.read(3); + } + + for(var k = 0; k < 3; k++) { + var v; + if(split & (1<= t.nvert || v0 >= t.nvert || opposite >= t.nvert) + throw "Topological error"; + t.index.faces[start] = v1; + t.index.faces[start+1] = v0; + t.index.faces[start+2] = opposite; + start += 3; + } + } +}; + diff --git a/packages/editor/src/components/properties/VolumetricNodeEditor.tsx b/packages/editor/src/components/properties/VolumetricNodeEditor.tsx index 80799c1200..fc71ede2a3 100755 --- a/packages/editor/src/components/properties/VolumetricNodeEditor.tsx +++ b/packages/editor/src/components/properties/VolumetricNodeEditor.tsx @@ -32,7 +32,6 @@ import { VolumetricComponent } from '@etherealengine/engine/src/scene/components import { PlayMode } from '@etherealengine/engine/src/scene/constants/PlayMode' import VideocamIcon from '@mui/icons-material/Videocam' - import { ItemTypes } from '../../constants/AssetTypes' import ArrayInputGroup from '../inputs/ArrayInputGroup' import BooleanInput from '../inputs/BooleanInput' @@ -41,7 +40,7 @@ import CompoundNumericInput from '../inputs/CompoundNumericInput' import InputGroup from '../inputs/InputGroup' import SelectInput from '../inputs/SelectInput' import NodeEditor from './NodeEditor' -import { EditorComponentType, commitProperties, commitProperty, updateProperty } from './Util' +import { EditorComponentType, commitProperty, updateProperty } from './Util' const PlayModeOptions = [ { @@ -74,9 +73,7 @@ export const VolumetricNodeEditor: EditorComponentType = (props) => { const volumetricComponent = useComponent(props.entity, VolumetricComponent) const toggle = () => { - commitProperties(VolumetricComponent, { - paused: !volumetricComponent.paused.value - }) + volumetricComponent.paused.set(!volumetricComponent.paused.value) } return ( @@ -93,13 +90,13 @@ export const VolumetricNodeEditor: EditorComponentType = (props) => { @@ -133,7 +130,7 @@ export const VolumetricNodeEditor: EditorComponentType = (props) => { /> {volumetricComponent.paths && volumetricComponent.paths.length > 0 && volumetricComponent.paths[0] && ( diff --git a/packages/engine/src/assets/functions/createGLTFLoader.ts b/packages/engine/src/assets/functions/createGLTFLoader.ts index 4cdbd6a682..a1602a57eb 100644 --- a/packages/engine/src/assets/functions/createGLTFLoader.ts +++ b/packages/engine/src/assets/functions/createGLTFLoader.ts @@ -69,6 +69,9 @@ export const createGLTFLoader = (keepMaterials = false) => { loader.register((parser) => new HubsComponentsExtension(parser)) loader.register((parser) => new VRMLoaderPlugin(parser, { helperRoot: new Group(), autoUpdateHumanBones: true })) loader.register((parser) => new CachedImageLoadExtension(parser)) + if (MeshoptDecoder.useWorkers) { + MeshoptDecoder.useWorkers(2) + } loader.setMeshoptDecoder(MeshoptDecoder) if (isClient) { diff --git a/packages/engine/src/assets/loaders/corto/CORTOLoader.d.ts b/packages/engine/src/assets/loaders/corto/CORTOLoader.d.ts new file mode 100644 index 0000000000..3adf6f3b43 --- /dev/null +++ b/packages/engine/src/assets/loaders/corto/CORTOLoader.d.ts @@ -0,0 +1,41 @@ + +/* +CPAL-1.0 License + +The contents of this file are subject to the Common Public Attribution License +Version 1.0. (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at +https://github.com/EtherealEngine/etherealengine/blob/dev/LICENSE. +The License is based on the Mozilla Public License Version 1.1, but Sections 14 +and 15 have been added to cover use of software over a computer network and +provide for limited attribution for the Original Developer. In addition, +Exhibit A has been modified to be consistent with Exhibit B. + +Software distributed under the License is distributed on an "AS IS" basis, +WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the +specific language governing rights and limitations under the License. + +The Original Code is Ethereal Engine. + +The Original Developer is the Initial Developer. The Initial Developer of the +Original Code is the Ethereal Engine team. + +All portions of the code written by the Ethereal Engine team are Copyright © 2021-2023 +Ethereal Engine. All Rights Reserved. +*/ + + +import { BufferGeometry, Loader, LoadingManager } from 'three' + +export class CORTOLoader { + constructor() + setDecoderPath(path: string): CORTOLoader + load( + url: string, + byteStart: number, + byteEnd: number, + onLoad: (geometry: BufferGeometry | null) => void, + ): void + preload(): Promise + dispose(): CORTOLoader +} diff --git a/packages/engine/src/assets/loaders/corto/CORTOLoader.js b/packages/engine/src/assets/loaders/corto/CORTOLoader.js new file mode 100644 index 0000000000..7bcfdb5236 --- /dev/null +++ b/packages/engine/src/assets/loaders/corto/CORTOLoader.js @@ -0,0 +1,130 @@ + +/* +CPAL-1.0 License + +The contents of this file are subject to the Common Public Attribution License +Version 1.0. (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at +https://github.com/EtherealEngine/etherealengine/blob/dev/LICENSE. +The License is based on the Mozilla Public License Version 1.1, but Sections 14 +and 15 have been added to cover use of software over a computer network and +provide for limited attribution for the Original Developer. In addition, +Exhibit A has been modified to be consistent with Exhibit B. + +Software distributed under the License is distributed on an "AS IS" basis, +WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the +specific language governing rights and limitations under the License. + +The Original Code is Ethereal Engine. + +The Original Developer is the Initial Developer. The Initial Developer of the +Original Code is the Ethereal Engine team. + +All portions of the code written by the Ethereal Engine team are Copyright © 2021-2023 +Ethereal Engine. All Rights Reserved. +*/ + + +import { BufferAttribute, BufferGeometry, FileLoader } from 'three' + +class CORTOLoader { + constructor() { + this.decoderPath = '' + this.decoderPending = null + + this.worker = null + this.lastRequest = 0 + this.callbacks = {} + + this.defaultAttributes = [ + { name: 'position', numComponents: '3' }, + { name: 'normal', numComponents: '3' }, + { name: 'color', numComponents: '4' }, + { name: 'uv', numComponents: '2' } + ] + } + + setDecoderPath(path) { + this.decoderPath = path + return this + } + + load(url, byteStart, byteEnd, onLoad) { + if (!this.decoderPending) { + this.preload() + } + + this.decoderPending.then(() => { + const request = this.lastRequest++ + this.worker.postMessage({ + request: request, + url: url, + byteStart: byteStart, + byteEnd: byteEnd + }) + this.callbacks[request] = { onLoad: onLoad } + }) + } + + preload() { + if (this.decoderPending) return this.decoderPending + + let that = this + let callbacks = this.callbacks + let lib = 'corto.js' + + this.decoderPending = this._loadLibrary(lib, 'text').then((text) => { + text = URL.createObjectURL(new Blob([text])) + this.worker = new Worker(text) + + this.worker.onmessage = function (e) { + var message = e.data + if (!callbacks[message.request]) return + + const callback = callbacks[message.request] + const geometry = that._createGeometry(message.geometry) + callback.onLoad(geometry) + delete callbacks[message.request] + } + }) + + return this.decoderPending + } + + dispose() { + if (this.worker) { + this.worker.terminate() + this.worker = null + } + return this + } + + _createGeometry(geometry) { + if (!geometry) { + return null + } + var bufferGeometry = new BufferGeometry() + + if (geometry.index) bufferGeometry.setIndex(new BufferAttribute(geometry.index, 1)) + + for (let i = 0; i < this.defaultAttributes.length; i++) { + let attr = this.defaultAttributes[i] + if (!geometry[attr.name]) continue + let buffer = geometry[attr.name] + bufferGeometry.setAttribute(attr.name, new BufferAttribute(buffer, attr.numComponents)) + } + return bufferGeometry + } + + _loadLibrary(url, responseType) { + var loader = new FileLoader(this.manager) + loader.setPath(this.decoderPath) + loader.setResponseType(responseType) + + return new Promise((resolve, reject) => { + loader.load(url, resolve, undefined, reject) + }) + } +} + +export { CORTOLoader } diff --git a/packages/engine/src/assets/loaders/gltf/KTX2Loader.d.ts b/packages/engine/src/assets/loaders/gltf/KTX2Loader.d.ts index b4ceb31f1b..b2cf7d1c89 100644 --- a/packages/engine/src/assets/loaders/gltf/KTX2Loader.d.ts +++ b/packages/engine/src/assets/loaders/gltf/KTX2Loader.d.ts @@ -44,7 +44,7 @@ export class KTX2Loader extends CompressedTextureLoader { load( url: string, onLoad: (texture: CompressedTexture) => void, - onProgress: (requrest: ProgressEvent) => void | undefined, - onError: ((event: ErrorEvent) => void) | undefined + onProgress?: (requrest: ProgressEvent) => void | undefined, + onError?: ((event: ErrorEvent) => void) | undefined ): CompressedTexture } \ No newline at end of file diff --git a/packages/engine/src/assets/loaders/gltf/meshopt_decoder.module.js b/packages/engine/src/assets/loaders/gltf/meshopt_decoder.module.js index 27f6eab7cc..be6375455c 100644 --- a/packages/engine/src/assets/loaders/gltf/meshopt_decoder.module.js +++ b/packages/engine/src/assets/loaders/gltf/meshopt_decoder.module.js @@ -1,126 +1,206 @@ // This file is part of meshoptimizer library and is distributed under the terms of MIT License. -// Copyright (C) 2016-2021, by Arseny Kapoulkine (arseny.kapoulkine@gmail.com) -var MeshoptDecoder = (function () { - 'use strict' - - // Built with clang version 13.0.0 (https://github.com/llvm/llvm-project fd1d8c2f04dde23bee0fb3a7d069a9b1046da979) - // Built from meshoptimizer 0.17 - var wasm_base = - 'B9h79tEBBBE8fV9gBB9gVUUUUUEU9gIUUUB9gEUEU9gIUUUEUIKQBEEEDDDILLVIEBEOWEUEC+Q/IEKR/LEdO9tw9t9vv95DBh9f9f939h79t9f9j9h229f9jT9vv7BB8a9tw79o9v9wT9f9kw9j9v9kw9WwvTw949C919m9mwvBEy9tw79o9v9wT9f9kw9j9v9kw69u9kw949C919m9mwvBDe9tw79o9v9wT9f9kw9j9v9kw69u9kw949Twg91w9u9jwBIl9tw79o9v9wT9f9kw9j9v9kws9p2Twv9P9jTBLk9tw79o9v9wT9f9kw9j9v9kws9p2Twv9R919hTBVl9tw79o9v9wT9f9kw9j9v9kws9p2Twvt949wBOL79iv9rBRQ+f8yQDBK/tSEhU8jJJJJBCJ/EB9rGV8kJJJJBC9+HODNADCEFAL0MBCUHOAIrBBC+gE9HMBAVAIALFGRAD9rADZ1JJJBHWCJ/ABAD9uC/wfBgGOCJDAOCJD6eHdAICEFHQDNDNADtMBCBHKINAKAE9PMDAdAEAK9rAKAdFAE6eGXCSFGOCL4CIFCD4HMDNDNDNDNAOC9wgGptMBCBHSCEHZAWCJDFHhAQHoINARAo9rAM6MLAoAMFHQCBHICBHLINARAQ9rCk6MIAWCJ/CBFALFHODNDNDNDNDNAoALCO4FrBBAICOg4CIgpLBEDIBKAO9CB83IBAOCWF9CB83IBXIKAOAQrBLAQrBBGaCO4GcAcCIsGce86BBAOCEFAQCLFAcFGcrBBAaCL4CIgGxAxCIsGxe86BBAOCDFAcAxFGcrBBAaCD4CIgGxAxCIsGxe86BBAOCIFAcAxFGcrBBAaCIgGaAaCIsGae86BBAOCLFAcAaFGcrBBAQrBEGaCO4GxAxCIsGxe86BBAOCVFAcAxFGcrBBAaCL4CIgGxAxCIsGxe86BBAOCOFAcAxFGcrBBAaCD4CIgGxAxCIsGxe86BBAOCRFAcAxFGcrBBAaCIgGaAaCIsGae86BBAOCWFAcAaFGcrBBAQrBDGaCO4GxAxCIsGxe86BBAOCdFAcAxFGcrBBAaCL4CIgGxAxCIsGxe86BBAOCQFAcAxFGcrBBAaCD4CIgGxAxCIsGxe86BBAOCKFAcAxFGcrBBAaCIgGaAaCIsGae86BBAOCXFAcAaFGarBBAQrBIGQCO4GcAcCIsGce86BBAOCMFAaAcFGarBBAQCL4CIgGcAcCIsGce86BBAOCpFAaAcFGarBBAQCD4CIgGcAcCIsGce86BBAOCSFAaAcFGOrBBAQCIgGQAQCIsGQe86BBAOAQFHQXDKAOAQrBWAQrBBGaCL4GcAcCSsGce86BBAOCEFAQCWFAcFGcrBBAaCSgGaAaCSsGae86BBAOCDFAcAaFGarBBAQrBEGcCL4GxAxCSsGxe86BBAOCIFAaAxFGarBBAcCSgGcAcCSsGce86BBAOCLFAaAcFGarBBAQrBDGcCL4GxAxCSsGxe86BBAOCVFAaAxFGarBBAcCSgGcAcCSsGce86BBAOCOFAaAcFGarBBAQrBIGcCL4GxAxCSsGxe86BBAOCRFAaAxFGarBBAcCSgGcAcCSsGce86BBAOCWFAaAcFGarBBAQrBLGcCL4GxAxCSsGxe86BBAOCdFAaAxFGarBBAcCSgGcAcCSsGce86BBAOCQFAaAcFGarBBAQrBVGcCL4GxAxCSsGxe86BBAOCKFAaAxFGarBBAcCSgGcAcCSsGce86BBAOCXFAaAcFGarBBAQrBOGcCL4GxAxCSsGxe86BBAOCMFAaAxFGarBBAcCSgGcAcCSsGce86BBAOCpFAaAcFGarBBAQrBRGQCL4GcAcCSsGce86BBAOCSFAaAcFGOrBBAQCSgGQAQCSsGQe86BBAOAQFHQXEKAOAQ8pBB83BBAOCWFAQCWF8pBB83BBAQCZFHQKAICDFHIALCZFGLAp6MBKAQtMDDNAXtMBAWASFrBBHICBHOAhHLINALAWCJ/CBFAOFrBBGoCE4CBAoCEg9r7AIFGI86BBALADFHLAOCEFGOAX9HMBKKAhCEFHhASCEFGSAD6HZAQHoASAD9HMBXIKKAQAMAD2FHcDNAXtMBCBHpCEHZAWCJDFHaINARAQ9rAM6MLAQtMDAQAMFHQAWApFrBBHICBHOAaHLINALAWCJ/CBFAOFrBBGoCE4CBAoCEg9r7AIFGI86BBALADFHLAOCEFGOAX9HMBKAaCEFHaApCEFGpAD6HZApAD9HMBKAcHQXDKCBHOCEHZINARAQ9rAM6MIAQtMEAOCEFGOAD6HZAQAMFHQADAO9HMBKAcHQXEKCBHQAZCEgMEKABAKAD2FAWCJDFAXAD2Z1JJJB8aAWAWCJDFAXCUFAD2FADZ1JJJB8aAXAKFHKAQMEKKC9+HOXDKAEtMBCBHOINAdAEAO9rAOAdFAE6eAOFGOAE6MBKKCBC99ARAQ9rADCAADCA0eseHOKAVCJ/EBF8kJJJJBAOK/YZEhU8jJJJJBC/AE9rGV8kJJJJBC9+HODNAECI9uGRChFAL0MBCUHOAIrBBGWC/wEgC/gE9HMBAWCSgGdCE0MBAVC/ABFCfECJEZ+JJJJB8aAVCuF9CU83IBAVC8wF9CU83IBAVCYF9CU83IBAVCAF9CU83IBAVCkF9CU83IBAVCZF9CU83IBAV9CU83IWAV9CU83IBALAIFC9wFHQAICEFGWARFHODNAEtMBCMCSAdCEseHKCBHXCBHMCBHdCBHICBHLINDNAOAQ9NMBC9+HOXIKDNDNAWrBBGRC/vE0MBAVC/ABFALARCL4CU7FCSgCITFGpYDLHSApYDBHZDNARCSgGpAK9PMBAVAIARCU7FCSgCDTFYDBAXApeHRAptHpDNDNADCD9HMBABAdCETFGhAZ87EBAhCDFAS87EBAhCLFAR87EBXEKABAdCDTFGhAZbDBAhCLFASbDBAhCWFARbDBKAXApFHXAVC/ABFALCITFGhARbDBAhASbDLAVAICDTFARbDBAVC/ABFALCEFCSgGLCITFGhAZbDBAhARbDLAIApFHIALCEFHLXDKDNDNApCSsMBAMApFApC987FCEFHMXEKAOCEFHRAO8sBBGpCfEgHhDNDNApCU9MMBARHOXEKAOCVFHOAhCfBgHhCRHpDNINAR8sBBGoCfBgApTAhvHhAoCU9KMEARCEFHRApCRFGpC8j9HMBXDKKARCEFHOKAhCE4CBAhCEg9r7AMFHMKDNDNADCD9HMBABAdCETFGRAZ87EBARCDFAS87EBARCLFAM87EBXEKABAdCDTFGRAZbDBARCLFASbDBARCWFAMbDBKAVC/ABFALCITFGRAMbDBARASbDLAVAICDTFAMbDBAVC/ABFALCEFCSgGLCITFGRAZbDBARAMbDLAICEFHIALCEFHLXEKDNARCPE0MBAXCEFGoAVAIAQARCSgFrBBGpCL49rCSgCDTFYDBApCZ6GheHRAVAIAp9rCSgCDTFYDBAoAhFGSApCSgGoeHpAotHoDNDNADCD9HMBABAdCETFGZAX87EBAZCDFAR87EBAZCLFAp87EBXEKABAdCDTFGZAXbDBAZCLFARbDBAZCWFApbDBKAVAICDTFAXbDBAVC/ABFALCITFGZARbDBAZAXbDLAVAICEFGICSgCDTFARbDBAVC/ABFALCEFCSgCITFGZApbDBAZARbDLAVAIAhFCSgGICDTFApbDBAVC/ABFALCDFCSgGLCITFGRAXbDBARApbDLALCEFHLAIAoFHIASAoFHXXEKAXCBAOrBBGZeGaARC/+EsGRFHSAZCSgHcAZCL4HxDNDNAZCS0MBASCEFHoXEKASHoAVAIAx9rCSgCDTFYDBHSKDNDNAcMBAoCEFHXXEKAoHXAVAIAZ9rCSgCDTFYDBHoKDNDNARtMBAOCEFHRXEKAOCDFHRAO8sBEGhCfEgHpDNAhCU9KMBAOCOFHaApCfBgHpCRHODNINAR8sBBGhCfBgAOTApvHpAhCU9KMEARCEFHRAOCRFGOC8j9HMBKAaHRXEKARCEFHRKApCE4CBApCEg9r7AMFGMHaKDNDNAxCSsMBARHpXEKARCEFHpAR8sBBGOCfEgHhDNAOCU9KMBARCVFHSAhCfBgHhCRHODNINAp8sBBGRCfBgAOTAhvHhARCU9KMEApCEFHpAOCRFGOC8j9HMBKASHpXEKApCEFHpKAhCE4CBAhCEg9r7AMFGMHSKDNDNAcCSsMBApHOXEKApCEFHOAp8sBBGRCfEgHhDNARCU9KMBApCVFHoAhCfBgHhCRHRDNINAO8sBBGpCfBgARTAhvHhApCU9KMEAOCEFHOARCRFGRC8j9HMBKAoHOXEKAOCEFHOKAhCE4CBAhCEg9r7AMFGMHoKDNDNADCD9HMBABAdCETFGRAa87EBARCDFAS87EBARCLFAo87EBXEKABAdCDTFGRAabDBARCLFASbDBARCWFAobDBKAVC/ABFALCITFGRASbDBARAabDLAVAICDTFAabDBAVC/ABFALCEFCSgCITFGRAobDBARASbDLAVAICEFGICSgCDTFASbDBAVC/ABFALCDFCSgCITFGRAabDBARAobDLAVAIAZCZ6AxCSsvFGICSgCDTFAobDBAIActAcCSsvFHIALCIFHLKAWCEFHWALCSgHLAICSgHIAdCIFGdAE6MBKKCBC99AOAQseHOKAVC/AEF8kJJJJBAOK+LLEVU8jJJJJBCZ9rHVC9+HODNAECVFAL0MBCUHOAIrBBC/+EgC/QE9HMBAV9CB83IWAICEFHRALAIFC98FHWDNAEtMBDNADCDsMBCBHdINDNARAW6MBC9+SKARCEFHOAR8sBBGLCfEgHIDNDNALCU9MMBAOHRXEKARCVFHRAICfBgHICRHLDNINAO8sBBGDCfBgALTAIvHIADCU9KMEAOCEFHOALCRFGLC8j9HMBXDKKAOCEFHRKABAdCDTFAICD4CBAICE4CEg9r7AVCWFAICEgCDTvGOYDBFGLbDBAOALbDBAdCEFGdAE9HMBXDKKCBHdINDNARAW6MBC9+SKARCEFHOAR8sBBGLCfEgHIDNDNALCU9MMBAOHRXEKARCVFHRAICfBgHICRHLDNINAO8sBBGDCfBgALTAIvHIADCU9KMEAOCEFHOALCRFGLC8j9HMBXDKKAOCEFHRKABAdCETFAICD4CBAICE4CEg9r7AVCWFAICEgCDTvGOYDBFGL87EBAOALbDBAdCEFGdAE9HMBKKCBC99ARAWseHOKAOK+lVOEUE99DUD99EUD99DNDNADCL9HMBAEtMEINDNDNjBBBzjBBB+/ABCDFGD8sBB+yAB8sBBGI+yGL+L+TABCEFGV8sBBGO+yGR+L+TGWjBBBB9gGdeAWjBB/+9CAWAWnjBBBBAWAdeGQAQ+MGKAICU9KeALmGLALnAQAKAOCU9KeARmGQAQnmm+R+VGRnmGW+LjBBB9P9dtMBAW+oHIXEKCJJJJ94HIKADAI86BBDNDNjBBBzjBBB+/AQjBBBB9geAQARnmGW+LjBBB9P9dtMBAW+oHDXEKCJJJJ94HDKAVAD86BBDNDNjBBBzjBBB+/ALjBBBB9geALARnmGW+LjBBB9P9dtMBAW+oHDXEKCJJJJ94HDKABAD86BBABCLFHBAECUFGEMBXDKKAEtMBINDNDNjBBBzjBBB+/ABCLFGD8uEB+yAB8uEBGI+yGL+L+TABCDFGV8uEBGO+yGR+L+TGWjBBBB9gGdeAWjB/+fsAWAWnjBBBBAWAdeGQAQ+MGKAICU9KeALmGLALnAQAKAOCU9KeARmGQAQnmm+R+VGRnmGW+LjBBB9P9dtMBAW+oHIXEKCJJJJ94HIKADAI87EBDNDNjBBBzjBBB+/AQjBBBB9geAQARnmGW+LjBBB9P9dtMBAW+oHDXEKCJJJJ94HDKAVAD87EBDNDNjBBBzjBBB+/ALjBBBB9geALARnmGW+LjBBB9P9dtMBAW+oHDXEKCJJJJ94HDKABAD87EBABCWFHBAECUFGEMBKKK/SILIUI99IUE99DNAEtMBCBHIABHLINDNDNj/zL81zALCOF8uEBGVCIv+y+VGOAL8uEB+ynGRjB/+fsnjBBBzjBBB+/ARjBBBB9gemGW+LjBBB9P9dtMBAW+oHdXEKCJJJJ94HdKALCLF8uEBHQALCDF8uEBHKABAVCEFCIgAIvCETFAd87EBDNDNAOAK+ynGWjB/+fsnjBBBzjBBB+/AWjBBBB9gemGX+LjBBB9P9dtMBAX+oHKXEKCJJJJ94HKKABAVCDFCIgAIvCETFAK87EBDNDNAOAQ+ynGOjB/+fsnjBBBzjBBB+/AOjBBBB9gemGX+LjBBB9P9dtMBAX+oHQXEKCJJJJ94HQKABAVCUFCIgAIvCETFAQ87EBDNDNjBBJzARARn+TAWAWn+TAOAOn+TGRjBBBBARjBBBB9ge+RjB/+fsnjBBBzmGR+LjBBB9P9dtMBAR+oHQXEKCJJJJ94HQKABAVCIgAIvCETFAQ87EBALCWFHLAICLFHIAECUFGEMBKKK9MBDNADCD4AE2GEtMBINABABYDBGDCWTCW91+yADCE91CJJJ/8IFCJJJ98g++nuDBABCLFHBAECUFGEMBKKK9TEIUCBCBYDJ1JJBGEABCIFC98gFGBbDJ1JJBDNDNABzBCZTGD9NMBCUHIABAD9rCffIFCZ4NBCUsMEKAEHIKAIK/lEEEUDNDNAEABvCIgtMBABHIXEKDNDNADCZ9PMBABHIXEKABHIINAIAEYDBbDBAICLFAECLFYDBbDBAICWFAECWFYDBbDBAICXFAECXFYDBbDBAICZFHIAECZFHEADC9wFGDCS0MBKKADCL6MBINAIAEYDBbDBAECLFHEAICLFHIADC98FGDCI0MBKKDNADtMBINAIAErBB86BBAICEFHIAECEFHEADCUFGDMBKKABK/AEEDUDNDNABCIgtMBABHIXEKAECfEgC+B+C+EW2HLDNDNADCZ9PMBABHIXEKABHIINAIALbDBAICXFALbDBAICWFALbDBAICLFALbDBAICZFHIADC9wFGDCS0MBKKADCL6MBINAIALbDBAICLFHIADC98FGDCI0MBKKDNADtMBINAIAE86BBAICEFHIADCUFGDMBKKABKKKEBCJWKLZ9kBB' - var wasm_simd = - 'B9h79tEBBBEkL9gBB9gVUUUUUEU9gIUUUB9gEUEUIKQBBEBEEDDDILVE9wEEEVIEBEOWEUEC+Q/aEKR/LEdO9tw9t9vv95DBh9f9f939h79t9f9j9h229f9jT9vv7BB8a9tw79o9v9wT9f9kw9j9v9kw9WwvTw949C919m9mwvBDy9tw79o9v9wT9f9kw9j9v9kw69u9kw949C919m9mwvBLe9tw79o9v9wT9f9kw9j9v9kw69u9kw949Twg91w9u9jwBVl9tw79o9v9wT9f9kw9j9v9kws9p2Twv9P9jTBOk9tw79o9v9wT9f9kw9j9v9kws9p2Twv9R919hTBRl9tw79o9v9wT9f9kw9j9v9kws9p2Twvt949wBWL79iv9rBdQ/49TQLBZIK9+EVU8jJJJJBCZ9rHBCBHEINCBHDCBHIINABCWFADFAICJUAEAD4CEgGLe86BBAIALFHIADCEFGDCW9HMBKAEC+Q+YJJBFAI86BBAECITC+Q1JJBFAB8pIW83IBAECEFGECJD9HMBKK/s8jLhUD97EUO978jJJJJBCJ/KB9rGV8kJJJJBC9+HODNADCEFAL0MBCUHOAIrBBC+gE9HMBAVAIALFGRAD9rAD/8QBBCJ/ABAD9uC/wfBgGLCJDALCJD6eHWAICEFHLDNDNADtMBCBHdINAdAE9PMDAWAEAd9rAdAWFAE6eGQCSFGOC9wgGKCI2HXAKCETHMAOCL4CIFCD4HpABAdAD2FHSCBHZDNINCEHhALHoCBHaDNINARAo9rAp6MIAVCJ/CBFAaAK2FHcAoApFHLCBHIDNAKC/AB6MBARAL9rC/gB6MBCBHOINAcAOFHIDNDNDNDNDNAoAOCO4FrBBGxCIgpLBEDIBKAIPXBBBBBBBBBBBBBBBBPKLBXIKAIALPBBLALPBBBGqCLP+MEAqPMBZEhDoIaLcVxOqRlGqCDP+MEAqPMBZEhDoIaLcVxOqRlPXIIIIIIIIIIIIIIIIP9OGlPXIIIIIIIIIIIIIIIIP8jGqP5B9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBAkC+Q+YJJBFPBBBGyAyPMBBBBBBBBBBBBBBBBAqP5E9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBP9uPMBEDILVORZhoacxqlPpAlAqP9SPKLBALCLFAkC+Q+YJJBFrBBAyPqBFFHLXDKAIALPBBWALPBBBGqCLP+MEAqPMBZEhDoIaLcVxOqRlPXSSSSSSSSSSSSSSSSP9OGlPXSSSSSSSSSSSSSSSSP8jGqP5B9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBAkC+Q+YJJBFPBBBGyAyPMBBBBBBBBBBBBBBBBAqP5E9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBP9uPMBEDILVORZhoacxqlPpAlAqP9SPKLBALCWFAkC+Q+YJJBFrBBAyPqBFFHLXEKAIALPBBBPKLBALCZFHLKDNDNDNDNDNAxCD4CIgpLBEDIBKAIPXBBBBBBBBBBBBBBBBPKLZXIKAIALPBBLALPBBBGqCLP+MEAqPMBZEhDoIaLcVxOqRlGqCDP+MEAqPMBZEhDoIaLcVxOqRlPXIIIIIIIIIIIIIIIIP9OGlPXIIIIIIIIIIIIIIIIP8jGqP5B9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBAkC+Q+YJJBFPBBBGyAyPMBBBBBBBBBBBBBBBBAqP5E9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBP9uPMBEDILVORZhoacxqlPpAlAqP9SPKLZALCLFAkC+Q+YJJBFrBBAyPqBFFHLXDKAIALPBBWALPBBBGqCLP+MEAqPMBZEhDoIaLcVxOqRlPXSSSSSSSSSSSSSSSSP9OGlPXSSSSSSSSSSSSSSSSP8jGqP5B9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBAkC+Q+YJJBFPBBBGyAyPMBBBBBBBBBBBBBBBBAqP5E9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBP9uPMBEDILVORZhoacxqlPpAlAqP9SPKLZALCWFAkC+Q+YJJBFrBBAyPqBFFHLXEKAIALPBBBPKLZALCZFHLKDNDNDNDNDNAxCL4CIgpLBEDIBKAIPXBBBBBBBBBBBBBBBBPKLAXIKAIALPBBLALPBBBGqCLP+MEAqPMBZEhDoIaLcVxOqRlGqCDP+MEAqPMBZEhDoIaLcVxOqRlPXIIIIIIIIIIIIIIIIP9OGlPXIIIIIIIIIIIIIIIIP8jGqP5B9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBAkC+Q+YJJBFPBBBGyAyPMBBBBBBBBBBBBBBBBAqP5E9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBP9uPMBEDILVORZhoacxqlPpAlAqP9SPKLAALCLFAkC+Q+YJJBFrBBAyPqBFFHLXDKAIALPBBWALPBBBGqCLP+MEAqPMBZEhDoIaLcVxOqRlPXSSSSSSSSSSSSSSSSP9OGlPXSSSSSSSSSSSSSSSSP8jGqP5B9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBAkC+Q+YJJBFPBBBGyAyPMBBBBBBBBBBBBBBBBAqP5E9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBP9uPMBEDILVORZhoacxqlPpAlAqP9SPKLAALCWFAkC+Q+YJJBFrBBAyPqBFFHLXEKAIALPBBBPKLAALCZFHLKDNDNDNDNDNAxCO4pLBEDIBKAIPXBBBBBBBBBBBBBBBBPKL8wXIKAIALPBBLALPBBBGqCLP+MEAqPMBZEhDoIaLcVxOqRlGqCDP+MEAqPMBZEhDoIaLcVxOqRlPXIIIIIIIIIIIIIIIIP9OGlPXIIIIIIIIIIIIIIIIP8jGqP5B9CJf/8/4/w/g/AB9+9Cu1+nGxCITC+Q1JJBFPBIBAxC+Q+YJJBFPBBBGyAyPMBBBBBBBBBBBBBBBBAqP5E9CJf/8/4/w/g/AB9+9Cu1+nGxCITC+Q1JJBFPBIBP9uPMBEDILVORZhoacxqlPpAlAqP9SPKL8wALCLFAxC+Q+YJJBFrBBAyPqBFFHLXDKAIALPBBWALPBBBGqCLP+MEAqPMBZEhDoIaLcVxOqRlPXSSSSSSSSSSSSSSSSP9OGlPXSSSSSSSSSSSSSSSSP8jGqP5B9CJf/8/4/w/g/AB9+9Cu1+nGxCITC+Q1JJBFPBIBAxC+Q+YJJBFPBBBGyAyPMBBBBBBBBBBBBBBBBAqP5E9CJf/8/4/w/g/AB9+9Cu1+nGxCITC+Q1JJBFPBIBP9uPMBEDILVORZhoacxqlPpAlAqP9SPKL8wALCWFAxC+Q+YJJBFrBBAyPqBFFHLXEKAIALPBBBPKL8wALCZFHLKAOC/ABFHIAOCJEFAK0MEAIHOARAL9rC/fB0MBKKDNDNAIAK9PMBAICI4HOINARAL9rCk6MDAcAIFHxDNDNDNDNDNAoAICO4FrBBAOCOg4CIgpLBEDIBKAxPXBBBBBBBBBBBBBBBBPKLBXIKAxALPBBLALPBBBGqCLP+MEAqPMBZEhDoIaLcVxOqRlGqCDP+MEAqPMBZEhDoIaLcVxOqRlPXIIIIIIIIIIIIIIIIP9OGlPXIIIIIIIIIIIIIIIIP8jGqP5B9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBAkC+Q+YJJBFPBBBGyAyPMBBBBBBBBBBBBBBBBAqP5E9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBP9uPMBEDILVORZhoacxqlPpAlAqP9SPKLBALCLFAkC+Q+YJJBFrBBAyPqBFFHLXDKAxALPBBWALPBBBGqCLP+MEAqPMBZEhDoIaLcVxOqRlPXSSSSSSSSSSSSSSSSP9OGlPXSSSSSSSSSSSSSSSSP8jGqP5B9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBAkC+Q+YJJBFPBBBGyAyPMBBBBBBBBBBBBBBBBAqP5E9CJf/8/4/w/g/AB9+9Cu1+nGkCITC+Q1JJBFPBIBP9uPMBEDILVORZhoacxqlPpAlAqP9SPKLBALCWFAkC+Q+YJJBFrBBAyPqBFFHLXEKAxALPBBBPKLBALCZFHLKAOCDFHOAICZFGIAK6MBKKALtMBAaCI6HhALHoAaCEFGOHaAOCLsMDXEKKCBHLAhCEgMDKDNAKtMBAVCJDFAZFHIAVAZFPBDBHyCBHxINAIAVCJ/CBFAxFGOPBLBGlCEP9tAlPXEEEEEEEEEEEEEEEEGqP9OP9hP9RGlAOAKFPBLBG8aCEP9tA8aAqP9OP9hP9RG8aPMBZEhDoIaLcVxOqRlGeAOAMFPBLBG3CEP9tA3AqP9OP9hP9RG3AOAXFPBLBG5CEP9tA5AqP9OP9hP9RG5PMBZEhDoIaLcVxOqRlG8ePMBEZhDIoaLVcxORqlGqAqPMBEDIBEDIBEDIBEDIAyP9uGyP9aDBBAIADFGOAyAqAqPMLVORLVORLVORLVORP9uGyP9aDBBAOADFGOAyAqAqPMWdQKWdQKWdQKWdQKP9uGyP9aDBBAOADFGOAyAqAqPMXMpSXMpSXMpSXMpSP9uGyP9aDBBAOADFGOAyAeA8ePMWdkyQK8aeXM35pS8e8fGqAqPMBEDIBEDIBEDIBEDIP9uGyP9aDBBAOADFGOAyAqAqPMLVORLVORLVORLVORP9uGyP9aDBBAOADFGOAyAqAqPMWdQKWdQKWdQKWdQKP9uGyP9aDBBAOADFGOAyAqAqPMXMpSXMpSXMpSXMpSP9uGyP9aDBBAOADFGOAyAlA8aPMWkdyQ8aKeX3M5p8eS8fGlA3A5PMWkdyQ8aKeX3M5p8eS8fG8aPMBEZhDIoaLVcxORqlGqAqPMBEDIBEDIBEDIBEDIP9uGyP9aDBBAOADFGOAyAqAqPMLVORLVORLVORLVORP9uGyP9aDBBAOADFGOAyAqAqPMWdQKWdQKWdQKWdQKP9uGyP9aDBBAOADFGOAyAqAqPMXMpSXMpSXMpSXMpSP9uGyP9aDBBAOADFGOAyAlA8aPMWdkyQK8aeXM35pS8e8fGqAqPMBEDIBEDIBEDIBEDIP9uGyP9aDBBAOADFGOAyAqAqPMLVORLVORLVORLVORP9uGyP9aDBBAOADFGOAyAqAqPMWdQKWdQKWdQKWdQKP9uGyP9aDBBAOADFGOAyAqAqPMXMpSXMpSXMpSXMpSP9uGyP9aDBBAOADFHIAxCZFGxAK6MBKKAZCLFGZAD6MBKASAVCJDFAQAD2/8QBBAVAVCJDFAQCUFAD2FAD/8QBBAQAdFHdC9+HOALMEXLKKC9+HOXDKAEtMBCBHOINAWAEAO9rAOAWFAE6eAOFGOAE6MBKKCBC99ARAL9rADCAADCA0eseHOKAVCJ/KBF8kJJJJBAOKWBZ+BJJJBK/UZEhU8jJJJJBC/AE9rGV8kJJJJBC9+HODNAECI9uGRChFAL0MBCUHOAIrBBGWC/wEgC/gE9HMBAWCSgGdCE0MBAVC/ABFCfECJE/8KBAVCuF9CU83IBAVC8wF9CU83IBAVCYF9CU83IBAVCAF9CU83IBAVCkF9CU83IBAVCZF9CU83IBAV9CU83IWAV9CU83IBALAIFC9wFHQAICEFGWARFHODNAEtMBCMCSAdCEseHKCBHXCBHMCBHdCBHICBHLINDNAOAQ9NMBC9+HOXIKDNDNAWrBBGRC/vE0MBAVC/ABFALARCL4CU7FCSgCITFGpYDLHSApYDBHZDNARCSgGpAK9PMBAVAIARCU7FCSgCDTFYDBAXApeHRAptHpDNDNADCD9HMBABAdCETFGhAZ87EBAhCDFAS87EBAhCLFAR87EBXEKABAdCDTFGhAZbDBAhCLFASbDBAhCWFARbDBKAXApFHXAVC/ABFALCITFGhARbDBAhASbDLAVAICDTFARbDBAVC/ABFALCEFCSgGLCITFGhAZbDBAhARbDLAIApFHIALCEFHLXDKDNDNApCSsMBAMApFApC987FCEFHMXEKAOCEFHRAO8sBBGpCfEgHhDNDNApCU9MMBARHOXEKAOCVFHOAhCfBgHhCRHpDNINAR8sBBGoCfBgApTAhvHhAoCU9KMEARCEFHRApCRFGpC8j9HMBXDKKARCEFHOKAhCE4CBAhCEg9r7AMFHMKDNDNADCD9HMBABAdCETFGRAZ87EBARCDFAS87EBARCLFAM87EBXEKABAdCDTFGRAZbDBARCLFASbDBARCWFAMbDBKAVC/ABFALCITFGRAMbDBARASbDLAVAICDTFAMbDBAVC/ABFALCEFCSgGLCITFGRAZbDBARAMbDLAICEFHIALCEFHLXEKDNARCPE0MBAXCEFGoAVAIAQARCSgFrBBGpCL49rCSgCDTFYDBApCZ6GheHRAVAIAp9rCSgCDTFYDBAoAhFGSApCSgGoeHpAotHoDNDNADCD9HMBABAdCETFGZAX87EBAZCDFAR87EBAZCLFAp87EBXEKABAdCDTFGZAXbDBAZCLFARbDBAZCWFApbDBKAVAICDTFAXbDBAVC/ABFALCITFGZARbDBAZAXbDLAVAICEFGICSgCDTFARbDBAVC/ABFALCEFCSgCITFGZApbDBAZARbDLAVAIAhFCSgGICDTFApbDBAVC/ABFALCDFCSgGLCITFGRAXbDBARApbDLALCEFHLAIAoFHIASAoFHXXEKAXCBAOrBBGZeGaARC/+EsGRFHSAZCSgHcAZCL4HxDNDNAZCS0MBASCEFHoXEKASHoAVAIAx9rCSgCDTFYDBHSKDNDNAcMBAoCEFHXXEKAoHXAVAIAZ9rCSgCDTFYDBHoKDNDNARtMBAOCEFHRXEKAOCDFHRAO8sBEGhCfEgHpDNAhCU9KMBAOCOFHaApCfBgHpCRHODNINAR8sBBGhCfBgAOTApvHpAhCU9KMEARCEFHRAOCRFGOC8j9HMBKAaHRXEKARCEFHRKApCE4CBApCEg9r7AMFGMHaKDNDNAxCSsMBARHpXEKARCEFHpAR8sBBGOCfEgHhDNAOCU9KMBARCVFHSAhCfBgHhCRHODNINAp8sBBGRCfBgAOTAhvHhARCU9KMEApCEFHpAOCRFGOC8j9HMBKASHpXEKApCEFHpKAhCE4CBAhCEg9r7AMFGMHSKDNDNAcCSsMBApHOXEKApCEFHOAp8sBBGRCfEgHhDNARCU9KMBApCVFHoAhCfBgHhCRHRDNINAO8sBBGpCfBgARTAhvHhApCU9KMEAOCEFHOARCRFGRC8j9HMBKAoHOXEKAOCEFHOKAhCE4CBAhCEg9r7AMFGMHoKDNDNADCD9HMBABAdCETFGRAa87EBARCDFAS87EBARCLFAo87EBXEKABAdCDTFGRAabDBARCLFASbDBARCWFAobDBKAVC/ABFALCITFGRASbDBARAabDLAVAICDTFAabDBAVC/ABFALCEFCSgCITFGRAobDBARASbDLAVAICEFGICSgCDTFASbDBAVC/ABFALCDFCSgCITFGRAabDBARAobDLAVAIAZCZ6AxCSsvFGICSgCDTFAobDBAIActAcCSsvFHIALCIFHLKAWCEFHWALCSgHLAICSgHIAdCIFGdAE6MBKKCBC99AOAQseHOKAVC/AEF8kJJJJBAOK+LLEVU8jJJJJBCZ9rHVC9+HODNAECVFAL0MBCUHOAIrBBC/+EgC/QE9HMBAV9CB83IWAICEFHRALAIFC98FHWDNAEtMBDNADCDsMBCBHdINDNARAW6MBC9+SKARCEFHOAR8sBBGLCfEgHIDNDNALCU9MMBAOHRXEKARCVFHRAICfBgHICRHLDNINAO8sBBGDCfBgALTAIvHIADCU9KMEAOCEFHOALCRFGLC8j9HMBXDKKAOCEFHRKABAdCDTFAICD4CBAICE4CEg9r7AVCWFAICEgCDTvGOYDBFGLbDBAOALbDBAdCEFGdAE9HMBXDKKCBHdINDNARAW6MBC9+SKARCEFHOAR8sBBGLCfEgHIDNDNALCU9MMBAOHRXEKARCVFHRAICfBgHICRHLDNINAO8sBBGDCfBgALTAIvHIADCU9KMEAOCEFHOALCRFGLC8j9HMBXDKKAOCEFHRKABAdCETFAICD4CBAICE4CEg9r7AVCWFAICEgCDTvGOYDBFGL87EBAOALbDBAdCEFGdAE9HMBKKCBC99ARAWseHOKAOK+epLIUO97EUE978jJJJJBCA9rHIDNDNADCL9HMBDNAEC98gGLtMBCBHVABHDINADADPBBBGOCkP+rECkP+sEP/6EGRAOCWP+rECkP+sEP/6EARP/gEAOCZP+rECkP+sEP/6EGWP/gEP/kEP/lEGdPXBBBBBBBBBBBBBBBBP+2EGQARPXBBBJBBBJBBBJBBBJGKP9OP9RP/kEGRPXBB/+9CBB/+9CBB/+9CBB/+9CARARP/mEAdAdP/mEAWAQAWAKP9OP9RP/kEGRARP/mEP/kEP/kEP/jEP/nEGWP/mEPXBBN0BBN0BBN0BBN0GQP/kEPXfBBBfBBBfBBBfBBBP9OAOPXBBBfBBBfBBBfBBBfP9OP9QARAWP/mEAQP/kECWP+rEPXBfBBBfBBBfBBBfBBP9OP9QAdAWP/mEAQP/kECZP+rEPXBBfBBBfBBBfBBBfBP9OP9QPKBBADCZFHDAVCLFGVAL6MBKKALAE9PMEAIAECIgGVCDTGDvCBCZAD9r/8KBAIABALCDTFGLAD/8QBBDNAVtMBAIAIPBLBGOCkP+rECkP+sEP/6EGRAOCWP+rECkP+sEP/6EARP/gEAOCZP+rECkP+sEP/6EGWP/gEP/kEP/lEGdPXBBBBBBBBBBBBBBBBP+2EGQARPXBBBJBBBJBBBJBBBJGKP9OP9RP/kEGRPXBB/+9CBB/+9CBB/+9CBB/+9CARARP/mEAdAdP/mEAWAQAWAKP9OP9RP/kEGRARP/mEP/kEP/kEP/jEP/nEGWP/mEPXBBN0BBN0BBN0BBN0GQP/kEPXfBBBfBBBfBBBfBBBP9OAOPXBBBfBBBfBBBfBBBfP9OP9QARAWP/mEAQP/kECWP+rEPXBfBBBfBBBfBBBfBBP9OP9QAdAWP/mEAQP/kECZP+rEPXBBfBBBfBBBfBBBfBP9OP9QPKLBKALAIAD/8QBBSKDNAEC98gGXtMBCBHVABHDINADCZFGLALPBBBGOPXBBBBBBffBBBBBBffGKP9OADPBBBGdAOPMLVORXMpScxql358e8fPXfUBBfUBBfUBBfUBBP9OP/6EAdAOPMBEDIWdQKZhoaky8aeGOCZP+sEP/6EGRP/gEAOCZP+rECZP+sEP/6EGWP/gEP/kEP/lEGOPXB/+fsB/+fsB/+fsB/+fsAWAOPXBBBBBBBBBBBBBBBBP+2EGQAWPXBBBJBBBJBBBJBBBJGMP9OP9RP/kEGWAWP/mEAOAOP/mEARAQARAMP9OP9RP/kEGOAOP/mEP/kEP/kEP/jEP/nEGRP/mEPXBBN0BBN0BBN0BBN0GQP/kECZP+rEAWARP/mEAQP/kEPXffBBffBBffBBffBBP9OP9QGWAOARP/mEAQP/kEPXffBBffBBffBBffBBP9OGOPMWdkyQK8aeXM35pS8e8fP9QPKBBADAdAKP9OAWAOPMBEZhDIoaLVcxORqlP9QPKBBADCAFHDAVCLFGVAX6MBKKAXAE9PMBAIAECIgGVCITGDFCBCAAD9r/8KBAIABAXCITFGLAD/8QBBDNAVtMBAIAIPBLZGOPXBBBBBBffBBBBBBffGKP9OAIPBLBGdAOPMLVORXMpScxql358e8fPXfUBBfUBBfUBBfUBBP9OP/6EAdAOPMBEDIWdQKZhoaky8aeGOCZP+sEP/6EGRP/gEAOCZP+rECZP+sEP/6EGWP/gEP/kEP/lEGOPXB/+fsB/+fsB/+fsB/+fsAWAOPXBBBBBBBBBBBBBBBBP+2EGQAWPXBBBJBBBJBBBJBBBJGMP9OP9RP/kEGWAWP/mEAOAOP/mEARAQARAMP9OP9RP/kEGOAOP/mEP/kEP/kEP/jEP/nEGRP/mEPXBBN0BBN0BBN0BBN0GQP/kECZP+rEAWARP/mEAQP/kEPXffBBffBBffBBffBBP9OP9QGWAOARP/mEAQP/kEPXffBBffBBffBBffBBP9OGOPMWdkyQK8aeXM35pS8e8fP9QPKLZAIAdAKP9OAWAOPMBEZhDIoaLVcxORqlP9QPKLBKALAIAD/8QBBKK/4WLLUE97EUV978jJJJJBC8w9rHIDNAEC98gGLtMBCBHVABHOINAIAOPBBBGRAOCZFGWPBBBGdPMLVORXMpScxql358e8fGQCZP+sEGKCLP+rEPKLBAOPXBBJzBBJzBBJzBBJzPX/zL81z/zL81z/zL81z/zL81zAKPXIBBBIBBBIBBBIBBBP9QP/6EP/nEGKARAdPMBEDIWdQKZhoaky8aeGRCZP+rECZP+sEP/6EP/mEGdAdP/mEAKARCZP+sEP/6EP/mEGXAXP/mEAKAQCZP+rECZP+sEP/6EP/mEGQAQP/mEP/kEP/kEP/lEPXBBBBBBBBBBBBBBBBP+4EP/jEPXB/+fsB/+fsB/+fsB/+fsGKP/mEPXBBN0BBN0BBN0BBN0GRP/kEPXffBBffBBffBBffBBGMP9OAXAKP/mEARP/kECZP+rEP9QGXAQAKP/mEARP/kECZP+rEAdAKP/mEARP/kEAMP9OP9QGKPMBEZhDIoaLVcxORqlGRP5BAIPBLBPeB+t+J83IBAOCWFARP5EAIPBLBPeE+t+J83IBAWAXAKPMWdkyQK8aeXM35pS8e8fGKP5BAIPBLBPeD+t+J83IBAOCkFAKP5EAIPBLBPeI+t+J83IBAOCAFHOAVCLFGVAL6MBKKDNALAE9PMBAIAECIgGVCITGOFCBCAAO9r/8KBAIABALCITFGWAO/8QBBDNAVtMBAIAIPBLBGRAIPBLZGdPMLVORXMpScxql358e8fGQCZP+sEGKCLP+rEPKLAAIPXBBJzBBJzBBJzBBJzPX/zL81z/zL81z/zL81z/zL81zAKPXIBBBIBBBIBBBIBBBP9QP/6EP/nEGKARAdPMBEDIWdQKZhoaky8aeGRCZP+rECZP+sEP/6EP/mEGdAdP/mEAKARCZP+sEP/6EP/mEGXAXP/mEAKAQCZP+rECZP+sEP/6EP/mEGQAQP/mEP/kEP/kEP/lEPXBBBBBBBBBBBBBBBBP+4EP/jEPXB/+fsB/+fsB/+fsB/+fsGKP/mEPXBBN0BBN0BBN0BBN0GRP/kEPXffBBffBBffBBffBBGMP9OAXAKP/mEARP/kECZP+rEP9QGXAQAKP/mEARP/kECZP+rEAdAKP/mEARP/kEAMP9OP9QGKPMBEZhDIoaLVcxORqlGRP5BAIPBLAPeB+t+J83IBAIARP5EAIPBLAPeE+t+J83IWAIAXAKPMWdkyQK8aeXM35pS8e8fGKP5BAIPBLAPeD+t+J83IZAIAKP5EAIPBLAPeI+t+J83IkKAWAIAO/8QBBKK+pDDIUE978jJJJJBC/AB9rHIDNADCD4AE2GLC98gGVtMBCBHDABHEINAEAEPBBBGOCWP+rECWP+sEP/6EAOCEP+sEPXBBJzBBJzBBJzBBJzP+uEPXBBJfBBJfBBJfBBJfP9OP/mEPKBBAECZFHEADCLFGDAV6MBKKDNAVAL9PMBAIALCIgGDCDTGEvCBC/ABAE9r/8KBAIABAVCDTFGVAE/8QBBDNADtMBAIAIPBLBGOCWP+rECWP+sEP/6EAOCEP+sEPXBBJzBBJzBBJzBBJzP+uEPXBBJfBBJfBBJfBBJfP9OP/mEPKLBKAVAIAE/8QBBKK9TEIUCBCBYDJ1JJBGEABCIFC98gFGBbDJ1JJBDNDNABzBCZTGD9NMBCUHIABAD9rCffIFCZ4NBCUsMEKAEHIKAIKKKEBCJWKLZ9tBB' - - // Uses bulk-memory and simd extensions - var detector = new Uint8Array([ - 0, 97, 115, 109, 1, 0, 0, 0, 1, 4, 1, 96, 0, 0, 3, 3, 2, 0, 0, 5, 3, 1, 0, 1, 12, 1, 0, 10, 22, 2, 12, 0, 65, 0, 65, - 0, 65, 0, 252, 10, 0, 0, 11, 7, 0, 65, 0, 253, 15, 26, 11 - ]) - - // Used to unpack wasm - var wasmpack = new Uint8Array([ - 32, 0, 65, 2, 1, 106, 34, 33, 3, 128, 11, 4, 13, 64, 6, 253, 10, 7, 15, 116, 127, 5, 8, 12, 40, 16, 19, 54, 20, 9, - 27, 255, 113, 17, 42, 67, 24, 23, 146, 148, 18, 14, 22, 45, 70, 69, 56, 114, 101, 21, 25, 63, 75, 136, 108, 28, 118, - 29, 73, 115 - ]) - - if (typeof WebAssembly !== 'object') { - // This module requires WebAssembly to function - return { - supported: false - } - } - - var wasm = wasm_base - - if (WebAssembly.validate(detector)) { - wasm = wasm_simd - } - - var instance - - var promise = WebAssembly.instantiate(unpack(wasm), {}).then(function (result) { - instance = result.instance - instance.exports.__wasm_call_ctors() - }) - - function unpack(data) { - var result = new Uint8Array(data.length) - for (var i = 0; i < data.length; ++i) { - var ch = data.charCodeAt(i) - result[i] = ch > 96 ? ch - 71 : ch > 64 ? ch - 65 : ch > 47 ? ch + 4 : ch > 46 ? 63 : 62 - } - var write = 0 - for (var i = 0; i < data.length; ++i) { - result[write++] = result[i] < 60 ? wasmpack[result[i]] : (result[i] - 60) * 64 + result[++i] - } - return result.buffer.slice(0, write) - } - - function decode(fun, target, count, size, source, filter) { - var sbrk = instance.exports.sbrk - var count4 = (count + 3) & ~3 // pad for SIMD filter - var tp = sbrk(count4 * size) - var sp = sbrk(source.length) - var heap = new Uint8Array(instance.exports.memory.buffer) - heap.set(source, sp) - var res = fun(tp, count, size, sp, source.length) - if (res == 0 && filter) { - filter(tp, count4, size) - } - target.set(heap.subarray(tp, tp + count * size)) - sbrk(tp - sbrk(0)) - if (res != 0) { - throw new Error('Malformed buffer data: ' + res) - } - } - - var filters = { - // legacy index-based enums for glTF - 0: '', - 1: 'meshopt_decodeFilterOct', - 2: 'meshopt_decodeFilterQuat', - 3: 'meshopt_decodeFilterExp', - // string-based enums for glTF - NONE: '', - OCTAHEDRAL: 'meshopt_decodeFilterOct', - QUATERNION: 'meshopt_decodeFilterQuat', - EXPONENTIAL: 'meshopt_decodeFilterExp' - } - - var decoders = { - // legacy index-based enums for glTF - 0: 'meshopt_decodeVertexBuffer', - 1: 'meshopt_decodeIndexBuffer', - 2: 'meshopt_decodeIndexSequence', - // string-based enums for glTF - ATTRIBUTES: 'meshopt_decodeVertexBuffer', - TRIANGLES: 'meshopt_decodeIndexBuffer', - INDICES: 'meshopt_decodeIndexSequence' - } - - return { - ready: promise, - supported: true, - decodeVertexBuffer: function (target, count, size, source, filter) { - decode( - instance.exports.meshopt_decodeVertexBuffer, - target, - count, - size, - source, - instance.exports[filters[filter]] - ) - }, - decodeIndexBuffer: function (target, count, size, source) { - decode(instance.exports.meshopt_decodeIndexBuffer, target, count, size, source) - }, - decodeIndexSequence: function (target, count, size, source) { - decode(instance.exports.meshopt_decodeIndexSequence, target, count, size, source) - }, - decodeGltfBuffer: function (target, count, size, source, mode, filter) { - decode(instance.exports[decoders[mode]], target, count, size, source, instance.exports[filters[filter]]) - } - } -})() - -export { MeshoptDecoder } +// Copyright (C) 2016-2022, by Arseny Kapoulkine (arseny.kapoulkine@gmail.com) +var MeshoptDecoder = (function() { + "use strict"; + + // Built with clang version 14.0.4 + // Built from meshoptimizer 0.18 + var wasm_base = "b9H79Tebbbe8Fv9Gbb9Gvuuuuueu9Giuuub9Geueu9Giuuueuikqbeeedddillviebeoweuec:q;iekr;leDo9TW9T9VV95dbH9F9F939H79T9F9J9H229F9Jt9VV7bb8A9TW79O9V9Wt9F9KW9J9V9KW9wWVtW949c919M9MWVbeY9TW79O9V9Wt9F9KW9J9V9KW69U9KW949c919M9MWVbdE9TW79O9V9Wt9F9KW9J9V9KW69U9KW949tWG91W9U9JWbiL9TW79O9V9Wt9F9KW9J9V9KWS9P2tWV9p9JtblK9TW79O9V9Wt9F9KW9J9V9KWS9P2tWV9r919HtbvL9TW79O9V9Wt9F9KW9J9V9KWS9P2tWVT949Wbol79IV9Rbrq:P8Yqdbk;3sezu8Jjjjjbcj;eb9Rgv8Kjjjjbc9:hodnadcefal0mbcuhoaiRbbc:Ge9hmbavaialfgrad9Radz1jjjbhwcj;abad9UhoaicefhldnadTmbaoc;WFbGgocjdaocjd6EhDcbhqinaqae9pmeaDaeaq9RaqaDfae6Egkcsfgocl4cifcd4hxdndndndnaoc9WGgmTmbcbhPcehsawcjdfhzalhHinaraH9Rax6midnaraHaxfgl9RcK6mbczhoinawcj;cbfaogifgoc9WfhOdndndndndnaHaic9WfgAco4fRbbaAci4coG4ciGPlbedibkaO9cb83ibaOcwf9cb83ibxikaOalRblalRbbgAco4gCaCciSgCE86bbaocGfalclfaCfgORbbaAcl4ciGgCaCciSgCE86bbaocVfaOaCfgORbbaAcd4ciGgCaCciSgCE86bbaoc7faOaCfgORbbaAciGgAaAciSgAE86bbaoctfaOaAfgARbbalRbegOco4gCaCciSgCE86bbaoc91faAaCfgARbbaOcl4ciGgCaCciSgCE86bbaoc4faAaCfgARbbaOcd4ciGgCaCciSgCE86bbaoc93faAaCfgARbbaOciGgOaOciSgOE86bbaoc94faAaOfgARbbalRbdgOco4gCaCciSgCE86bbaoc95faAaCfgARbbaOcl4ciGgCaCciSgCE86bbaoc96faAaCfgARbbaOcd4ciGgCaCciSgCE86bbaoc97faAaCfgARbbaOciGgOaOciSgOE86bbaoc98faAaOfgORbbalRbiglco4gAaAciSgAE86bbaoc99faOaAfgORbbalcl4ciGgAaAciSgAE86bbaoc9:faOaAfgORbbalcd4ciGgAaAciSgAE86bbaocufaOaAfgoRbbalciGglalciSglE86bbaoalfhlxdkaOalRbwalRbbgAcl4gCaCcsSgCE86bbaocGfalcwfaCfgORbbaAcsGgAaAcsSgAE86bbaocVfaOaAfgORbbalRbegAcl4gCaCcsSgCE86bbaoc7faOaCfgORbbaAcsGgAaAcsSgAE86bbaoctfaOaAfgORbbalRbdgAcl4gCaCcsSgCE86bbaoc91faOaCfgORbbaAcsGgAaAcsSgAE86bbaoc4faOaAfgORbbalRbigAcl4gCaCcsSgCE86bbaoc93faOaCfgORbbaAcsGgAaAcsSgAE86bbaoc94faOaAfgORbbalRblgAcl4gCaCcsSgCE86bbaoc95faOaCfgORbbaAcsGgAaAcsSgAE86bbaoc96faOaAfgORbbalRbvgAcl4gCaCcsSgCE86bbaoc97faOaCfgORbbaAcsGgAaAcsSgAE86bbaoc98faOaAfgORbbalRbogAcl4gCaCcsSgCE86bbaoc99faOaCfgORbbaAcsGgAaAcsSgAE86bbaoc9:faOaAfgORbbalRbrglcl4gAaAcsSgAE86bbaocufaOaAfgoRbbalcsGglalcsSglE86bbaoalfhlxekaOal8Pbb83bbaOcwfalcwf8Pbb83bbalczfhlkdnaiam9pmbaiczfhoaral9RcL0mekkaiam6mialTmidnakTmbawaPfRbbhOcbhoazhiinaiawcj;cbfaofRbbgAce4cbaAceG9R7aOfgO86bbaiadfhiaocefgoak9hmbkkazcefhzaPcefgPad6hsalhHaPad9hmexvkkcbhlasceGmdxikalaxad2fhCdnakTmbcbhHcehsawcjdfhminaral9Rax6mialTmdalaxfhlawaHfRbbhOcbhoamhiinaiawcj;cbfaofRbbgAce4cbaAceG9R7aOfgO86bbaiadfhiaocefgoak9hmbkamcefhmaHcefgHad6hsaHad9hmbkaChlxikcbhocehsinaral9Rax6mdalTmealaxfhlaocefgoad6hsadao9hmbkaChlxdkcbhlasceGTmekc9:hoxikabaqad2fawcjdfakad2z1jjjb8Aawawcjdfakcufad2fadz1jjjb8Aakaqfhqalmbkc9:hoxekcbc99aral9Radcaadca0ESEhokavcj;ebf8Kjjjjbaok;yzeHu8Jjjjjbc;ae9Rgv8Kjjjjbc9:hodnaeci9UgrcHfal0mbcuhoaiRbbgwc;WeGc;Ge9hmbawcsGgDce0mbavc;abfcFecjez:jjjjb8AavcUf9cu83ibavc8Wf9cu83ibavcyf9cu83ibavcaf9cu83ibavcKf9cu83ibavczf9cu83ibav9cu83iwav9cu83ibaialfc9WfhqaicefgwarfhodnaeTmbcmcsaDceSEhkcbhxcbhmcbhDcbhicbhlindnaoaq9nmbc9:hoxikdndnawRbbgrc;Ve0mbavc;abfalarcl4cu7fcsGcitfgPydlhsaPydbhzdnarcsGgPak9pmbavaiarcu7fcsGcdtfydbaxaPEhraPThPdndnadcd9hmbabaDcetfgHaz87ebaHcdfas87ebaHclfar87ebxekabaDcdtfgHazBdbaHclfasBdbaHcwfarBdbkaxaPfhxavc;abfalcitfgHarBdbaHasBdlavaicdtfarBdbavc;abfalcefcsGglcitfgHazBdbaHarBdlaiaPfhialcefhlxdkdndnaPcsSmbamaPfaPc987fcefhmxekaocefhrao8SbbgPcFeGhHdndnaPcu9mmbarhoxekaocvfhoaHcFbGhHcrhPdninar8SbbgOcFbGaPtaHVhHaOcu9kmearcefhraPcrfgPc8J9hmbxdkkarcefhokaHce4cbaHceG9R7amfhmkdndnadcd9hmbabaDcetfgraz87ebarcdfas87ebarclfam87ebxekabaDcdtfgrazBdbarclfasBdbarcwfamBdbkavc;abfalcitfgramBdbarasBdlavaicdtfamBdbavc;abfalcefcsGglcitfgrazBdbaramBdlaicefhialcefhlxekdnarcpe0mbaxcefgOavaiaqarcsGfRbbgPcl49RcsGcdtfydbaPcz6gHEhravaiaP9RcsGcdtfydbaOaHfgsaPcsGgOEhPaOThOdndnadcd9hmbabaDcetfgzax87ebazcdfar87ebazclfaP87ebxekabaDcdtfgzaxBdbazclfarBdbazcwfaPBdbkavaicdtfaxBdbavc;abfalcitfgzarBdbazaxBdlavaicefgicsGcdtfarBdbavc;abfalcefcsGcitfgzaPBdbazarBdlavaiaHfcsGgicdtfaPBdbavc;abfalcdfcsGglcitfgraxBdbaraPBdlalcefhlaiaOfhiasaOfhxxekaxcbaoRbbgzEgAarc;:eSgrfhsazcsGhCazcl4hXdndnazcs0mbascefhOxekashOavaiaX9RcsGcdtfydbhskdndnaCmbaOcefhxxekaOhxavaiaz9RcsGcdtfydbhOkdndnarTmbaocefhrxekaocdfhrao8SbegHcFeGhPdnaHcu9kmbaocofhAaPcFbGhPcrhodninar8SbbgHcFbGaotaPVhPaHcu9kmearcefhraocrfgoc8J9hmbkaAhrxekarcefhrkaPce4cbaPceG9R7amfgmhAkdndnaXcsSmbarhPxekarcefhPar8SbbgocFeGhHdnaocu9kmbarcvfhsaHcFbGhHcrhodninaP8SbbgrcFbGaotaHVhHarcu9kmeaPcefhPaocrfgoc8J9hmbkashPxekaPcefhPkaHce4cbaHceG9R7amfgmhskdndnaCcsSmbaPhoxekaPcefhoaP8SbbgrcFeGhHdnarcu9kmbaPcvfhOaHcFbGhHcrhrdninao8SbbgPcFbGartaHVhHaPcu9kmeaocefhoarcrfgrc8J9hmbkaOhoxekaocefhokaHce4cbaHceG9R7amfgmhOkdndnadcd9hmbabaDcetfgraA87ebarcdfas87ebarclfaO87ebxekabaDcdtfgraABdbarclfasBdbarcwfaOBdbkavc;abfalcitfgrasBdbaraABdlavaicdtfaABdbavc;abfalcefcsGcitfgraOBdbarasBdlavaicefgicsGcdtfasBdbavc;abfalcdfcsGcitfgraABdbaraOBdlavaiazcz6aXcsSVfgicsGcdtfaOBdbaiaCTaCcsSVfhialcifhlkawcefhwalcsGhlaicsGhiaDcifgDae6mbkkcbc99aoaqSEhokavc;aef8Kjjjjbaok:llevu8Jjjjjbcz9Rhvc9:hodnaecvfal0mbcuhoaiRbbc;:eGc;qe9hmbav9cb83iwaicefhraialfc98fhwdnaeTmbdnadcdSmbcbhDindnaraw6mbc9:skarcefhoar8SbbglcFeGhidndnalcu9mmbaohrxekarcvfhraicFbGhicrhldninao8SbbgdcFbGaltaiVhiadcu9kmeaocefhoalcrfglc8J9hmbxdkkaocefhrkabaDcdtfaicd4cbaice4ceG9R7avcwfaiceGcdtVgoydbfglBdbaoalBdbaDcefgDae9hmbxdkkcbhDindnaraw6mbc9:skarcefhoar8SbbglcFeGhidndnalcu9mmbaohrxekarcvfhraicFbGhicrhldninao8SbbgdcFbGaltaiVhiadcu9kmeaocefhoalcrfglc8J9hmbxdkkaocefhrkabaDcetfaicd4cbaice4ceG9R7avcwfaiceGcdtVgoydbfgl87ebaoalBdbaDcefgDae9hmbkkcbc99arawSEhokaok:Lvoeue99dud99eud99dndnadcl9hmbaeTmeindndnabcdfgd8Sbb:Yab8Sbbgi:Ygl:l:tabcefgv8Sbbgo:Ygr:l:tgwJbb;:9cawawNJbbbbawawJbbbb9GgDEgq:mgkaqaicb9iEalMgwawNakaqaocb9iEarMgqaqNMM:r:vglNJbbbZJbbb:;aDEMgr:lJbbb9p9DTmbar:Ohixekcjjjj94hikadai86bbdndnaqalNJbbbZJbbb:;aqJbbbb9GEMgq:lJbbb9p9DTmbaq:Ohdxekcjjjj94hdkavad86bbdndnawalNJbbbZJbbb:;awJbbbb9GEMgw:lJbbb9p9DTmbaw:Ohdxekcjjjj94hdkabad86bbabclfhbaecufgembxdkkaeTmbindndnabclfgd8Ueb:Yab8Uebgi:Ygl:l:tabcdfgv8Uebgo:Ygr:l:tgwJb;:FSawawNJbbbbawawJbbbb9GgDEgq:mgkaqaicb9iEalMgwawNakaqaocb9iEarMgqaqNMM:r:vglNJbbbZJbbb:;aDEMgr:lJbbb9p9DTmbar:Ohixekcjjjj94hikadai87ebdndnaqalNJbbbZJbbb:;aqJbbbb9GEMgq:lJbbb9p9DTmbaq:Ohdxekcjjjj94hdkavad87ebdndnawalNJbbbZJbbb:;awJbbbb9GEMgw:lJbbb9p9DTmbaw:Ohdxekcjjjj94hdkabad87ebabcwfhbaecufgembkkk;siliui99iue99dnaeTmbcbhiabhlindndnJ;Zl81Zalcof8UebgvciV:Y:vgoal8Ueb:YNgrJb;:FSNJbbbZJbbb:;arJbbbb9GEMgw:lJbbb9p9DTmbaw:OhDxekcjjjj94hDkalclf8Uebhqalcdf8UebhkabavcefciGaiVcetfaD87ebdndnaoak:YNgwJb;:FSNJbbbZJbbb:;awJbbbb9GEMgx:lJbbb9p9DTmbax:Ohkxekcjjjj94hkkabavcdfciGaiVcetfak87ebdndnaoaq:YNgoJb;:FSNJbbbZJbbb:;aoJbbbb9GEMgx:lJbbb9p9DTmbax:Ohqxekcjjjj94hqkabavcufciGaiVcetfaq87ebdndnJbbjZararN:tawawN:taoaoN:tgrJbbbbarJbbbb9GE:rJb;:FSNJbbbZMgr:lJbbb9p9DTmbar:Ohqxekcjjjj94hqkabavciGaiVcetfaq87ebalcwfhlaiclfhiaecufgembkkk9mbdnadcd4ae2geTmbinababydbgdcwtcw91:Yadce91cjjj;8ifcjjj98G::NUdbabclfhbaecufgembkkk9teiucbcbydj1jjbgeabcifc98GfgbBdj1jjbdndnabZbcztgd9nmbcuhiabad9RcFFifcz4nbcuSmekaehikaik;LeeeudndnaeabVciGTmbabhixekdndnadcz9pmbabhixekabhiinaiaeydbBdbaiclfaeclfydbBdbaicwfaecwfydbBdbaicxfaecxfydbBdbaiczfhiaeczfheadc9Wfgdcs0mbkkadcl6mbinaiaeydbBdbaeclfheaiclfhiadc98fgdci0mbkkdnadTmbinaiaeRbb86bbaicefhiaecefheadcufgdmbkkabk;aeedudndnabciGTmbabhixekaecFeGc:b:c:ew2hldndnadcz9pmbabhixekabhiinaialBdbaicxfalBdbaicwfalBdbaiclfalBdbaiczfhiadc9Wfgdcs0mbkkadcl6mbinaialBdbaiclfhiadc98fgdci0mbkkdnadTmbinaiae86bbaicefhiadcufgdmbkkabkkkebcjwklz9Kbb"; + var wasm_simd = "b9H79TebbbeKl9Gbb9Gvuuuuueu9Giuuub9Geueuikqbbebeedddilve9Weeeviebeoweuec:q;Aekr;leDo9TW9T9VV95dbH9F9F939H79T9F9J9H229F9Jt9VV7bb8A9TW79O9V9Wt9F9KW9J9V9KW9wWVtW949c919M9MWVbdY9TW79O9V9Wt9F9KW9J9V9KW69U9KW949c919M9MWVblE9TW79O9V9Wt9F9KW9J9V9KW69U9KW949tWG91W9U9JWbvL9TW79O9V9Wt9F9KW9J9V9KWS9P2tWV9p9JtboK9TW79O9V9Wt9F9KW9J9V9KWS9P2tWV9r919HtbrL9TW79O9V9Wt9F9KW9J9V9KWS9P2tWVT949Wbwl79IV9RbDq;t9tqlbzik9:evu8Jjjjjbcz9Rhbcbheincbhdcbhiinabcwfadfaicjuaead4ceGglE86bbaialfhiadcefgdcw9hmbkaec:q:yjjbfai86bbaecitc:q1jjbfab8Piw83ibaecefgecjd9hmbkk;h8JlHud97euo978Jjjjjbcj;kb9Rgv8Kjjjjbc9:hodnadcefal0mbcuhoaiRbbc:Ge9hmbavaialfgrad9Rad;8qbbcj;abad9UhoaicefhldnadTmbaoc;WFbGgocjdaocjd6EhwcbhDinaDae9pmeawaeaD9RaDawfae6Egqcsfgoc9WGgkci2hxakcethmaocl4cifcd4hPabaDad2fhscbhzdnincehHalhOcbhAdninaraO9RaP6miavcj;cbfaAak2fhCaOaPfhlcbhidnakc;ab6mbaral9Rc;Gb6mbcbhoinaCaofhidndndndndnaOaoco4fRbbgXciGPlbedibkaipxbbbbbbbbbbbbbbbbpklbxikaialpbblalpbbbgQclp:meaQpmbzeHdOiAlCvXoQrLgQcdp:meaQpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9ogLpxiiiiiiiiiiiiiiiip8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklbalclfaYpQbfaKc:q:yjjbfRbbfhlxdkaialpbbwalpbbbgQclp:meaQpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9ogLpxssssssssssssssssp8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklbalcwfaYpQbfaKc:q:yjjbfRbbfhlxekaialpbbbpklbalczfhlkdndndndndnaXcd4ciGPlbedibkaipxbbbbbbbbbbbbbbbbpklzxikaialpbblalpbbbgQclp:meaQpmbzeHdOiAlCvXoQrLgQcdp:meaQpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9ogLpxiiiiiiiiiiiiiiiip8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklzalclfaYpQbfaKc:q:yjjbfRbbfhlxdkaialpbbwalpbbbgQclp:meaQpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9ogLpxssssssssssssssssp8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklzalcwfaYpQbfaKc:q:yjjbfRbbfhlxekaialpbbbpklzalczfhlkdndndndndnaXcl4ciGPlbedibkaipxbbbbbbbbbbbbbbbbpklaxikaialpbblalpbbbgQclp:meaQpmbzeHdOiAlCvXoQrLgQcdp:meaQpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9ogLpxiiiiiiiiiiiiiiiip8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklaalclfaYpQbfaKc:q:yjjbfRbbfhlxdkaialpbbwalpbbbgQclp:meaQpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9ogLpxssssssssssssssssp8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklaalcwfaYpQbfaKc:q:yjjbfRbbfhlxekaialpbbbpklaalczfhlkdndndndndnaXco4Plbedibkaipxbbbbbbbbbbbbbbbbpkl8WxikaialpbblalpbbbgQclp:meaQpmbzeHdOiAlCvXoQrLgQcdp:meaQpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9ogLpxiiiiiiiiiiiiiiiip8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgXcitc:q1jjbfpbibaXc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgXcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spkl8WalclfaYpQbfaXc:q:yjjbfRbbfhlxdkaialpbbwalpbbbgQclp:meaQpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9ogLpxssssssssssssssssp8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgXcitc:q1jjbfpbibaXc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgXcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spkl8WalcwfaYpQbfaXc:q:yjjbfRbbfhlxekaialpbbbpkl8Walczfhlkaoc;abfhiaocjefak0meaihoaral9Rc;Fb0mbkkdndnaiak9pmbaici4hoinaral9RcK6mdaCaifhXdndndndndnaOaico4fRbbaocoG4ciGPlbedibkaXpxbbbbbbbbbbbbbbbbpklbxikaXalpbblalpbbbgQclp:meaQpmbzeHdOiAlCvXoQrLgQcdp:meaQpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9ogLpxiiiiiiiiiiiiiiiip8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklbalclfaYpQbfaKc:q:yjjbfRbbfhlxdkaXalpbbwalpbbbgQclp:meaQpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9ogLpxssssssssssssssssp8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklbalcwfaYpQbfaKc:q:yjjbfRbbfhlxekaXalpbbbpklbalczfhlkaocdfhoaiczfgiak6mbkkalTmbaAci6hHalhOaAcefgohAaoclSmdxekkcbhlaHceGmdkdnakTmbavcjdfazfhiavazfpbdbhYcbhXinaiavcj;cbfaXfgopblbgLcep9TaLpxeeeeeeeeeeeeeeeegQp9op9Hp9rgLaoakfpblbg8Acep9Ta8AaQp9op9Hp9rg8ApmbzeHdOiAlCvXoQrLgEaoamfpblbg3cep9Ta3aQp9op9Hp9rg3aoaxfpblbg5cep9Ta5aQp9op9Hp9rg5pmbzeHdOiAlCvXoQrLg8EpmbezHdiOAlvCXorQLgQaQpmbedibedibedibediaYp9UgYp9AdbbaiadfgoaYaQaQpmlvorlvorlvorlvorp9UgYp9AdbbaoadfgoaYaQaQpmwDqkwDqkwDqkwDqkp9UgYp9AdbbaoadfgoaYaQaQpmxmPsxmPsxmPsxmPsp9UgYp9AdbbaoadfgoaYaEa8EpmwDKYqk8AExm35Ps8E8FgQaQpmbedibedibedibedip9UgYp9AdbbaoadfgoaYaQaQpmlvorlvorlvorlvorp9UgYp9AdbbaoadfgoaYaQaQpmwDqkwDqkwDqkwDqkp9UgYp9AdbbaoadfgoaYaQaQpmxmPsxmPsxmPsxmPsp9UgYp9AdbbaoadfgoaYaLa8ApmwKDYq8AkEx3m5P8Es8FgLa3a5pmwKDYq8AkEx3m5P8Es8Fg8ApmbezHdiOAlvCXorQLgQaQpmbedibedibedibedip9UgYp9AdbbaoadfgoaYaQaQpmlvorlvorlvorlvorp9UgYp9AdbbaoadfgoaYaQaQpmwDqkwDqkwDqkwDqkp9UgYp9AdbbaoadfgoaYaQaQpmxmPsxmPsxmPsxmPsp9UgYp9AdbbaoadfgoaYaLa8ApmwDKYqk8AExm35Ps8E8FgQaQpmbedibedibedibedip9UgYp9AdbbaoadfgoaYaQaQpmlvorlvorlvorlvorp9UgYp9AdbbaoadfgoaYaQaQpmwDqkwDqkwDqkwDqkp9UgYp9AdbbaoadfgoaYaQaQpmxmPsxmPsxmPsxmPsp9UgYp9AdbbaoadfhiaXczfgXak6mbkkazclfgzad6mbkasavcjdfaqad2;8qbbavavcjdfaqcufad2fad;8qbbaqaDfhDc9:hoalmexikkc9:hoxekcbc99aral9Radcaadca0ESEhokavcj;kbf8Kjjjjbaokwbz:bjjjbk;uzeHu8Jjjjjbc;ae9Rgv8Kjjjjbc9:hodnaeci9UgrcHfal0mbcuhoaiRbbgwc;WeGc;Ge9hmbawcsGgDce0mbavc;abfcFecje;8kbavcUf9cu83ibavc8Wf9cu83ibavcyf9cu83ibavcaf9cu83ibavcKf9cu83ibavczf9cu83ibav9cu83iwav9cu83ibaialfc9WfhqaicefgwarfhodnaeTmbcmcsaDceSEhkcbhxcbhmcbhDcbhicbhlindnaoaq9nmbc9:hoxikdndnawRbbgrc;Ve0mbavc;abfalarcl4cu7fcsGcitfgPydlhsaPydbhzdnarcsGgPak9pmbavaiarcu7fcsGcdtfydbaxaPEhraPThPdndnadcd9hmbabaDcetfgHaz87ebaHcdfas87ebaHclfar87ebxekabaDcdtfgHazBdbaHclfasBdbaHcwfarBdbkaxaPfhxavc;abfalcitfgHarBdbaHasBdlavaicdtfarBdbavc;abfalcefcsGglcitfgHazBdbaHarBdlaiaPfhialcefhlxdkdndnaPcsSmbamaPfaPc987fcefhmxekaocefhrao8SbbgPcFeGhHdndnaPcu9mmbarhoxekaocvfhoaHcFbGhHcrhPdninar8SbbgOcFbGaPtaHVhHaOcu9kmearcefhraPcrfgPc8J9hmbxdkkarcefhokaHce4cbaHceG9R7amfhmkdndnadcd9hmbabaDcetfgraz87ebarcdfas87ebarclfam87ebxekabaDcdtfgrazBdbarclfasBdbarcwfamBdbkavc;abfalcitfgramBdbarasBdlavaicdtfamBdbavc;abfalcefcsGglcitfgrazBdbaramBdlaicefhialcefhlxekdnarcpe0mbaxcefgOavaiaqarcsGfRbbgPcl49RcsGcdtfydbaPcz6gHEhravaiaP9RcsGcdtfydbaOaHfgsaPcsGgOEhPaOThOdndnadcd9hmbabaDcetfgzax87ebazcdfar87ebazclfaP87ebxekabaDcdtfgzaxBdbazclfarBdbazcwfaPBdbkavaicdtfaxBdbavc;abfalcitfgzarBdbazaxBdlavaicefgicsGcdtfarBdbavc;abfalcefcsGcitfgzaPBdbazarBdlavaiaHfcsGgicdtfaPBdbavc;abfalcdfcsGglcitfgraxBdbaraPBdlalcefhlaiaOfhiasaOfhxxekaxcbaoRbbgzEgAarc;:eSgrfhsazcsGhCazcl4hXdndnazcs0mbascefhOxekashOavaiaX9RcsGcdtfydbhskdndnaCmbaOcefhxxekaOhxavaiaz9RcsGcdtfydbhOkdndnarTmbaocefhrxekaocdfhrao8SbegHcFeGhPdnaHcu9kmbaocofhAaPcFbGhPcrhodninar8SbbgHcFbGaotaPVhPaHcu9kmearcefhraocrfgoc8J9hmbkaAhrxekarcefhrkaPce4cbaPceG9R7amfgmhAkdndnaXcsSmbarhPxekarcefhPar8SbbgocFeGhHdnaocu9kmbarcvfhsaHcFbGhHcrhodninaP8SbbgrcFbGaotaHVhHarcu9kmeaPcefhPaocrfgoc8J9hmbkashPxekaPcefhPkaHce4cbaHceG9R7amfgmhskdndnaCcsSmbaPhoxekaPcefhoaP8SbbgrcFeGhHdnarcu9kmbaPcvfhOaHcFbGhHcrhrdninao8SbbgPcFbGartaHVhHaPcu9kmeaocefhoarcrfgrc8J9hmbkaOhoxekaocefhokaHce4cbaHceG9R7amfgmhOkdndnadcd9hmbabaDcetfgraA87ebarcdfas87ebarclfaO87ebxekabaDcdtfgraABdbarclfasBdbarcwfaOBdbkavc;abfalcitfgrasBdbaraABdlavaicdtfaABdbavc;abfalcefcsGcitfgraOBdbarasBdlavaicefgicsGcdtfasBdbavc;abfalcdfcsGcitfgraABdbaraOBdlavaiazcz6aXcsSVfgicsGcdtfaOBdbaiaCTaCcsSVfhialcifhlkawcefhwalcsGhlaicsGhiaDcifgDae6mbkkcbc99aoaqSEhokavc;aef8Kjjjjbaok:llevu8Jjjjjbcz9Rhvc9:hodnaecvfal0mbcuhoaiRbbc;:eGc;qe9hmbav9cb83iwaicefhraialfc98fhwdnaeTmbdnadcdSmbcbhDindnaraw6mbc9:skarcefhoar8SbbglcFeGhidndnalcu9mmbaohrxekarcvfhraicFbGhicrhldninao8SbbgdcFbGaltaiVhiadcu9kmeaocefhoalcrfglc8J9hmbxdkkaocefhrkabaDcdtfaicd4cbaice4ceG9R7avcwfaiceGcdtVgoydbfglBdbaoalBdbaDcefgDae9hmbxdkkcbhDindnaraw6mbc9:skarcefhoar8SbbglcFeGhidndnalcu9mmbaohrxekarcvfhraicFbGhicrhldninao8SbbgdcFbGaltaiVhiadcu9kmeaocefhoalcrfglc8J9hmbxdkkaocefhrkabaDcetfaicd4cbaice4ceG9R7avcwfaiceGcdtVgoydbfgl87ebaoalBdbaDcefgDae9hmbkkcbc99arawSEhokaok:EPliuo97eue978Jjjjjbca9Rhidndnadcl9hmbdnaec98GglTmbcbhvabhdinadadpbbbgocKp:RecKp:Sep;6egraocwp:RecKp:Sep;6earp;Geaoczp:RecKp:Sep;6egwp;Gep;Kep;LegDpxbbbbbbbbbbbbbbbbp:2egqarpxbbbjbbbjbbbjbbbjgkp9op9rp;Kegrpxbb;:9cbb;:9cbb;:9cbb;:9cararp;MeaDaDp;Meawaqawakp9op9rp;Kegrarp;Mep;Kep;Kep;Jep;Negwp;Mepxbbn0bbn0bbn0bbn0gqp;KepxFbbbFbbbFbbbFbbbp9oaopxbbbFbbbFbbbFbbbFp9op9qarawp;Meaqp;Kecwp:RepxbFbbbFbbbFbbbFbbp9op9qaDawp;Meaqp;Keczp:RepxbbFbbbFbbbFbbbFbp9op9qpkbbadczfhdavclfgval6mbkkalae9pmeaiaeciGgvcdtgdVcbczad9R;8kbaiabalcdtfglad;8qbbdnavTmbaiaipblbgocKp:RecKp:Sep;6egraocwp:RecKp:Sep;6earp;Geaoczp:RecKp:Sep;6egwp;Gep;Kep;LegDpxbbbbbbbbbbbbbbbbp:2egqarpxbbbjbbbjbbbjbbbjgkp9op9rp;Kegrpxbb;:9cbb;:9cbb;:9cbb;:9cararp;MeaDaDp;Meawaqawakp9op9rp;Kegrarp;Mep;Kep;Kep;Jep;Negwp;Mepxbbn0bbn0bbn0bbn0gqp;KepxFbbbFbbbFbbbFbbbp9oaopxbbbFbbbFbbbFbbbFp9op9qarawp;Meaqp;Kecwp:RepxbFbbbFbbbFbbbFbbp9op9qaDawp;Meaqp;Keczp:RepxbbFbbbFbbbFbbbFbp9op9qpklbkalaiad;8qbbskdnaec98GgxTmbcbhvabhdinadczfglalpbbbgopxbbbbbbFFbbbbbbFFgkp9oadpbbbgDaopmlvorxmPsCXQL358E8FpxFubbFubbFubbFubbp9op;6eaDaopmbediwDqkzHOAKY8AEgoczp:Sep;6egrp;Geaoczp:Reczp:Sep;6egwp;Gep;Kep;Legopxb;:FSb;:FSb;:FSb;:FSawaopxbbbbbbbbbbbbbbbbp:2egqawpxbbbjbbbjbbbjbbbjgmp9op9rp;Kegwawp;Meaoaop;Mearaqaramp9op9rp;Kegoaop;Mep;Kep;Kep;Jep;Negrp;Mepxbbn0bbn0bbn0bbn0gqp;Keczp:Reawarp;Meaqp;KepxFFbbFFbbFFbbFFbbp9op9qgwaoarp;Meaqp;KepxFFbbFFbbFFbbFFbbp9ogopmwDKYqk8AExm35Ps8E8Fp9qpkbbadaDakp9oawaopmbezHdiOAlvCXorQLp9qpkbbadcafhdavclfgvax6mbkkaxae9pmbaiaeciGgvcitgdfcbcaad9R;8kbaiabaxcitfglad;8qbbdnavTmbaiaipblzgopxbbbbbbFFbbbbbbFFgkp9oaipblbgDaopmlvorxmPsCXQL358E8FpxFubbFubbFubbFubbp9op;6eaDaopmbediwDqkzHOAKY8AEgoczp:Sep;6egrp;Geaoczp:Reczp:Sep;6egwp;Gep;Kep;Legopxb;:FSb;:FSb;:FSb;:FSawaopxbbbbbbbbbbbbbbbbp:2egqawpxbbbjbbbjbbbjbbbjgmp9op9rp;Kegwawp;Meaoaop;Mearaqaramp9op9rp;Kegoaop;Mep;Kep;Kep;Jep;Negrp;Mepxbbn0bbn0bbn0bbn0gqp;Keczp:Reawarp;Meaqp;KepxFFbbFFbbFFbbFFbbp9op9qgwaoarp;Meaqp;KepxFFbbFFbbFFbbFFbbp9ogopmwDKYqk8AExm35Ps8E8Fp9qpklzaiaDakp9oawaopmbezHdiOAlvCXorQLp9qpklbkalaiad;8qbbkk;4wllue97euv978Jjjjjbc8W9Rhidnaec98GglTmbcbhvabhoinaiaopbbbgraoczfgwpbbbgDpmlvorxmPsCXQL358E8Fgqczp:Segkclp:RepklbaopxbbjZbbjZbbjZbbjZpx;Zl81Z;Zl81Z;Zl81Z;Zl81Zakpxibbbibbbibbbibbbp9qp;6ep;NegkaraDpmbediwDqkzHOAKY8AEgrczp:Reczp:Sep;6ep;MegDaDp;Meakarczp:Sep;6ep;Megxaxp;Meakaqczp:Reczp:Sep;6ep;Megqaqp;Mep;Kep;Kep;Lepxbbbbbbbbbbbbbbbbp:4ep;Jepxb;:FSb;:FSb;:FSb;:FSgkp;Mepxbbn0bbn0bbn0bbn0grp;KepxFFbbFFbbFFbbFFbbgmp9oaxakp;Mearp;Keczp:Rep9qgxaqakp;Mearp;Keczp:ReaDakp;Mearp;Keamp9op9qgkpmbezHdiOAlvCXorQLgrp5baipblbpEb:T:j83ibaocwfarp5eaipblbpEe:T:j83ibawaxakpmwDKYqk8AExm35Ps8E8Fgkp5baipblbpEd:T:j83ibaocKfakp5eaipblbpEi:T:j83ibaocafhoavclfgval6mbkkdnalae9pmbaiaeciGgvcitgofcbcaao9R;8kbaiabalcitfgwao;8qbbdnavTmbaiaipblbgraipblzgDpmlvorxmPsCXQL358E8Fgqczp:Segkclp:RepklaaipxbbjZbbjZbbjZbbjZpx;Zl81Z;Zl81Z;Zl81Z;Zl81Zakpxibbbibbbibbbibbbp9qp;6ep;NegkaraDpmbediwDqkzHOAKY8AEgrczp:Reczp:Sep;6ep;MegDaDp;Meakarczp:Sep;6ep;Megxaxp;Meakaqczp:Reczp:Sep;6ep;Megqaqp;Mep;Kep;Kep;Lepxbbbbbbbbbbbbbbbbp:4ep;Jepxb;:FSb;:FSb;:FSb;:FSgkp;Mepxbbn0bbn0bbn0bbn0grp;KepxFFbbFFbbFFbbFFbbgmp9oaxakp;Mearp;Keczp:Rep9qgxaqakp;Mearp;Keczp:ReaDakp;Mearp;Keamp9op9qgkpmbezHdiOAlvCXorQLgrp5baipblapEb:T:j83ibaiarp5eaipblapEe:T:j83iwaiaxakpmwDKYqk8AExm35Ps8E8Fgkp5baipblapEd:T:j83izaiakp5eaipblapEi:T:j83iKkawaiao;8qbbkk:Pddiue978Jjjjjbc;ab9Rhidnadcd4ae2glc98GgvTmbcbhdabheinaeaepbbbgocwp:Recwp:Sep;6eaocep:SepxbbjZbbjZbbjZbbjZp:UepxbbjFbbjFbbjFbbjFp9op;Mepkbbaeczfheadclfgdav6mbkkdnaval9pmbaialciGgdcdtgeVcbc;abae9R;8kbaiabavcdtfgvae;8qbbdnadTmbaiaipblbgocwp:Recwp:Sep;6eaocep:SepxbbjZbbjZbbjZbbjZp:UepxbbjFbbjFbbjFbbjFp9op;Mepklbkavaiae;8qbbkk9teiucbcbydj1jjbgeabcifc98GfgbBdj1jjbdndnabZbcztgd9nmbcuhiabad9RcFFifcz4nbcuSmekaehikaikkkebcjwklz9Tbb"; + + var detector = new Uint8Array([0,97,115,109,1,0,0,0,1,4,1,96,0,0,3,3,2,0,0,5,3,1,0,1,12,1,0,10,22,2,12,0,65,0,65,0,65,0,252,10,0,0,11,7,0,65,0,253,15,26,11]); + var wasmpack = new Uint8Array([32,0,65,2,1,106,34,33,3,128,11,4,13,64,6,253,10,7,15,116,127,5,8,12,40,16,19,54,20,9,27,255,113,17,42,67,24,23,146,148,18,14,22,45,70,69,56,114,101,21,25,63,75,136,108,28,118,29,73,115]); + + if (typeof WebAssembly !== 'object') { + return { + supported: false, + }; + } + + var wasm = WebAssembly.validate(detector) ? wasm_simd : wasm_base; + + var instance; + + var ready = + WebAssembly.instantiate(unpack(wasm), {}) + .then(function(result) { + instance = result.instance; + instance.exports.__wasm_call_ctors(); + }); + + function unpack(data) { + var result = new Uint8Array(data.length); + for (var i = 0; i < data.length; ++i) { + var ch = data.charCodeAt(i); + result[i] = ch > 96 ? ch - 97 : ch > 64 ? ch - 39 : ch + 4; + } + var write = 0; + for (var i = 0; i < data.length; ++i) { + result[write++] = (result[i] < 60) ? wasmpack[result[i]] : (result[i] - 60) * 64 + result[++i]; + } + return result.buffer.slice(0, write); + } + + function decode(fun, target, count, size, source, filter) { + var sbrk = instance.exports.sbrk; + var count4 = (count + 3) & ~3; + var tp = sbrk(count4 * size); + var sp = sbrk(source.length); + var heap = new Uint8Array(instance.exports.memory.buffer); + heap.set(source, sp); + var res = fun(tp, count, size, sp, source.length); + if (res == 0 && filter) { + filter(tp, count4, size); + } + target.set(heap.subarray(tp, tp + count * size)); + sbrk(tp - sbrk(0)); + if (res != 0) { + throw new Error("Malformed buffer data: " + res); + } + } + + var filters = { + NONE: "", + OCTAHEDRAL: "meshopt_decodeFilterOct", + QUATERNION: "meshopt_decodeFilterQuat", + EXPONENTIAL: "meshopt_decodeFilterExp", + }; + + var decoders = { + ATTRIBUTES: "meshopt_decodeVertexBuffer", + TRIANGLES: "meshopt_decodeIndexBuffer", + INDICES: "meshopt_decodeIndexSequence", + }; + + var workers = []; + var requestId = 0; + + function createWorker(url) { + var worker = { + object: new Worker(url), + pending: 0, + requests: {} + }; + + worker.object.onmessage = function(event) { + var data = event.data; + + worker.pending -= data.count; + worker.requests[data.id][data.action](data.value); + + delete worker.requests[data.id]; + }; + + return worker; + } + + function initWorkers(count) { + var source = + "var instance; var ready = WebAssembly.instantiate(new Uint8Array([" + new Uint8Array(unpack(wasm)) + "]), {})" + + ".then(function(result) { instance = result.instance; instance.exports.__wasm_call_ctors(); });" + + "self.onmessage = workerProcess;" + + `function decode(fun, target, count, size, source, filter) { + var sbrk = instance.exports.sbrk; + var count4 = (count + 3) & ~3; + var tp = sbrk(count4 * size); + var sp = sbrk(source.length); + var heap = new Uint8Array(instance.exports.memory.buffer); + heap.set(source, sp); + var res = fun(tp, count, size, sp, source.length); + if (res == 0 && filter) { + filter(tp, count4, size); + } + target.set(heap.subarray(tp, tp + count * size)); + sbrk(tp - sbrk(0)); + if (res != 0) { + throw new Error("Malformed buffer data: " + res); + } + }` + + `function workerProcess(event) { + ready.then(function() { + var data = event.data; + try { + var target = new Uint8Array(data.count * data.size); + decode(instance.exports[data.mode], target, data.count, data.size, data.source, instance.exports[data.filter]); + self.postMessage({ id: data.id, count: data.count, action: "resolve", value: target }, [ target.buffer ]); + } catch (error) { + self.postMessage({ id: data.id, count: data.count, action: "reject", value: error }); + } + }); + }`; + + var blob = new Blob([source], {type: 'text/javascript'}); + var url = URL.createObjectURL(blob); + + for (var i = 0; i < count; ++i) { + workers[i] = createWorker(url); + } + + URL.revokeObjectURL(url); + } + + function decodeWorker(count, size, source, mode, filter) { + var worker = workers[0]; + + for (var i = 1; i < workers.length; ++i) { + if (workers[i].pending < worker.pending) { + worker = workers[i]; + } + } + + return new Promise(function (resolve, reject) { + var data = new Uint8Array(source); + var id = requestId++; + + worker.pending += count; + worker.requests[id] = { resolve: resolve, reject: reject }; + worker.object.postMessage({ id: id, count: count, size: size, source: data, mode: mode, filter: filter }, [ data.buffer ]); + }); + } + + function workerProcess(event) { + ready.then(function() { + var data = event.data; + try { + var target = new Uint8Array(data.count * data.size); + decode(instance.exports[data.mode], target, data.count, data.size, data.source, instance.exports[data.filter]); + self.postMessage({ id: data.id, count: data.count, action: "resolve", value: target }, [ target.buffer ]); + } catch (error) { + self.postMessage({ id: data.id, count: data.count, action: "reject", value: error }); + } + }); + } + + return { + ready: ready, + supported: true, + useWorkers: function(count) { + initWorkers(count); + }, + decodeVertexBuffer: function(target, count, size, source, filter) { + decode(instance.exports.meshopt_decodeVertexBuffer, target, count, size, source, instance.exports[filters[filter]]); + }, + decodeIndexBuffer: function(target, count, size, source) { + decode(instance.exports.meshopt_decodeIndexBuffer, target, count, size, source); + }, + decodeIndexSequence: function(target, count, size, source) { + decode(instance.exports.meshopt_decodeIndexSequence, target, count, size, source); + }, + decodeGltfBuffer: function(target, count, size, source, mode, filter) { + decode(instance.exports[decoders[mode]], target, count, size, source, instance.exports[filters[filter]]); + }, + decodeGltfBufferAsync: function(count, size, source, mode, filter) { + if (workers.length > 0) { + return decodeWorker(count, size, source, decoders[mode], filters[filter]); + } + + return ready.then(function() { + var target = new Uint8Array(count * size); + decode(instance.exports[decoders[mode]], target, count, size, source, instance.exports[filters[filter]]); + return target; + }); + } + }; +})(); + +export { MeshoptDecoder }; \ No newline at end of file diff --git a/packages/engine/src/assets/state/AssetLoaderState.ts b/packages/engine/src/assets/state/AssetLoaderState.ts index 36e1459b2b..936104a221 100644 --- a/packages/engine/src/assets/state/AssetLoaderState.ts +++ b/packages/engine/src/assets/state/AssetLoaderState.ts @@ -25,10 +25,12 @@ Ethereal Engine. All Rights Reserved. import { defineState } from '@etherealengine/hyperflux' import { createGLTFLoader } from '../../assets/functions/createGLTFLoader' +import { CORTOLoader } from '../loaders/corto/CORTOLoader' export const AssetLoaderState = defineState({ name: 'AssetLoaderState', initial: () => ({ - gltfLoader: createGLTFLoader() + gltfLoader: createGLTFLoader(), + cortoLoader: null! as CORTOLoader }) }) diff --git a/packages/engine/src/audio/components/PositionalAudioComponent.ts b/packages/engine/src/audio/components/PositionalAudioComponent.ts index d10ff41777..5b72aaebb1 100755 --- a/packages/engine/src/audio/components/PositionalAudioComponent.ts +++ b/packages/engine/src/audio/components/PositionalAudioComponent.ts @@ -27,6 +27,7 @@ import { useEffect } from 'react' import { defineComponent, + hasComponent, setComponent, useComponent, useOptionalComponent @@ -38,6 +39,7 @@ import { useEntityContext } from '../../ecs/functions/EntityFunctions' import { RendererState } from '../../renderer/RendererState' import { addObjectToGroup, removeObjectFromGroup } from '../../scene/components/GroupComponent' import { AudioNodeGroups, MediaComponent, MediaElementComponent } from '../../scene/components/MediaComponent' +import { VolumetricComponent } from '../../scene/components/VolumetricComponent' import { ObjectLayers } from '../../scene/constants/ObjectLayers' import { setObjectLayers } from '../../scene/functions/setObjectLayers' @@ -71,6 +73,7 @@ export const PositionalAudioComponent = defineComponent({ }, onSet: (entity, component, json) => { + if (hasComponent(entity, VolumetricComponent) || hasComponent(entity, MediaComponent)) return setComponent(entity, MediaComponent, {}) if (!json) return diff --git a/packages/engine/src/audio/systems/MediaSystem.ts b/packages/engine/src/audio/systems/MediaSystem.ts index 95a6ae8860..449cd63a37 100755 --- a/packages/engine/src/audio/systems/MediaSystem.ts +++ b/packages/engine/src/audio/systems/MediaSystem.ts @@ -36,7 +36,6 @@ import { EngineRenderer } from '../../renderer/WebGLRendererSystem' import { StandardCallbacks, setCallback } from '../../scene/components/CallbackComponent' import { MediaComponent } from '../../scene/components/MediaComponent' import { VideoComponent, VideoTexturePriorityQueueState } from '../../scene/components/VideoComponent' -import { VolumetricComponent, endLoadingEffect } from '../../scene/components/VolumetricComponent' import { AudioState, useAudioState } from '../AudioState' import { PositionalAudioComponent } from '../components/PositionalAudioComponent' @@ -102,7 +101,6 @@ globalThis.AudioEffectPlayer = AudioEffectPlayer const mediaQuery = defineQuery([MediaComponent]) const videoQuery = defineQuery([VideoComponent]) -const volumetricQuery = defineQuery([VolumetricComponent]) const audioQuery = defineQuery([PositionalAudioComponent]) const execute = () => { @@ -112,30 +110,6 @@ const execute = () => { setCallback(entity, StandardCallbacks.PAUSE, () => media.paused.set(true)) } - for (const entity of volumetricQuery()) { - const volumetric = getComponent(entity, VolumetricComponent) - const player = volumetric.player - if (player) { - player.update() - const height = volumetric.height - const step = volumetric.height / 150 - if (volumetric.loadingEffectActive && player.mesh) { - if (volumetric.loadingEffectTime <= height) { - player.mesh.traverse((child: any) => { - if (child['material']) { - if (child.material.uniforms) { - child.material.uniforms.time = volumetric.loadingEffectTime - } - } - }) - volumetric.loadingEffectTime += step - } else { - volumetric.loadingEffectActive = false - endLoadingEffect(entity, player.mesh) - } - } - } - } for (const entity of audioQuery()) getComponent(entity, PositionalAudioComponent).helper?.update() const videoPriorityQueue = getState(VideoTexturePriorityQueueState).queue diff --git a/packages/engine/src/scene/components/UVOL1Component.ts b/packages/engine/src/scene/components/UVOL1Component.ts new file mode 100644 index 0000000000..f257e6472c --- /dev/null +++ b/packages/engine/src/scene/components/UVOL1Component.ts @@ -0,0 +1,341 @@ +/* +CPAL-1.0 License + +The contents of this file are subject to the Common Public Attribution License +Version 1.0. (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at +https://github.com/EtherealEngine/etherealengine/blob/dev/LICENSE. +The License is based on the Mozilla Public License Version 1.1, but Sections 14 +and 15 have been added to cover use of software over a computer network and +provide for limited attribution for the Original Developer. In addition, +Exhibit A has been modified to be consistent with Exhibit B. + +Software distributed under the License is distributed on an "AS IS" basis, +WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the +specific language governing rights and limitations under the License. + +The Original Code is Ethereal Engine. + +The Original Developer is the Initial Developer. The Initial Developer of the +Original Code is the Ethereal Engine team. + +All portions of the code written by the Ethereal Engine team are Copyright © 2021-2023 +Ethereal Engine. All Rights Reserved. +*/ + +import { useVideoFrameCallback } from '@etherealengine/common/src/utils/useVideoFrameCallback' +import { getMutableState, getState } from '@etherealengine/hyperflux' +import { useEffect, useMemo, useRef } from 'react' +import { + BufferGeometry, + LinearFilter, + Mesh, + MeshBasicMaterial, + PlaneGeometry, + SRGBColorSpace, + ShaderMaterial, + Texture +} from 'three' +import { CORTOLoader } from '../../assets/loaders/corto/CORTOLoader' +import { AssetLoaderState } from '../../assets/state/AssetLoaderState' +import { AudioState } from '../../audio/AudioState' +import { iOS } from '../../common/functions/isMobile' +import { EngineState } from '../../ecs/classes/EngineState' +import { + defineComponent, + getMutableComponent, + hasComponent, + removeComponent, + setComponent, + useComponent +} from '../../ecs/functions/ComponentFunctions' +import { AnimationSystemGroup } from '../../ecs/functions/EngineFunctions' +import { useEntityContext } from '../../ecs/functions/EntityFunctions' +import { useExecute } from '../../ecs/functions/SystemFunctions' +import { EngineRenderer } from '../../renderer/WebGLRendererSystem' +import { addObjectToGroup, removeObjectFromGroup } from './GroupComponent' +import { MediaElementComponent } from './MediaComponent' +import { ShadowComponent } from './ShadowComponent' +import { UVOLDissolveComponent } from './UVOLDissolveComponent' +import { VolumetricComponent, handleAutoplay } from './VolumetricComponent' + +const decodeCorto = (url: string, start: number, end: number) => { + return new Promise((res, rej) => { + getState(AssetLoaderState).cortoLoader.load(url, start, end, (geometry) => { + res(geometry) + }) + }) +} + +interface FrameData { + frameNumber: number + keyframeNumber: number + startBytePosition: number + vertices: number + faces: number + meshLength: number +} + +interface ManifestSchema { + maxVertices: number + maxTriangles: number + frameData: FrameData[] + frameRate: number +} + +export const UVOL1Component = defineComponent({ + name: 'UVOL1Component', + + onInit: (entity) => { + return { + manifestPath: '', + data: {} as ManifestSchema, + firstGeometryFrameLoaded: false, + loadingEffectStarted: false, + loadingEffectEnded: false + } + }, + + onSet: (entity, component, json) => { + if (!json) return + if (json.manifestPath) { + component.manifestPath.set(json.manifestPath) + } + if (json.data) { + component.data.set(json.data) + } + }, + + reactor: UVOL1Reactor +}) + +function UVOL1Reactor() { + const entity = useEntityContext() + const volumetric = useComponent(entity, VolumetricComponent) + const component = useComponent(entity, UVOL1Component) + const videoElement = getMutableComponent(entity, MediaElementComponent).value + const audioContext = getState(AudioState).audioContext + const video = videoElement.element as HTMLVideoElement + + const meshBuffer = useMemo(() => new Map(), []) + const targetFramesToRequest = iOS ? 10 : 90 + + const videoTexture = useMemo(() => { + const element = videoElement.element as HTMLVideoElement + const texture = new Texture(element) + texture.generateMipmaps = false + texture.minFilter = LinearFilter + texture.magFilter = LinearFilter + ;(texture as any).isVideoTexture = true + ;(texture as any).update = () => {} + texture.colorSpace = SRGBColorSpace + return texture + }, []) + + const material = useMemo(() => { + const _material = new MeshBasicMaterial({ color: 0xffffff }) + _material.map = videoTexture + return _material + }, []) + + const defaultGeometry = useMemo(() => new PlaneGeometry(0.001, 0.001) as BufferGeometry, []) + + // @ts-ignore + const mesh: Mesh = useMemo( + () => new Mesh(defaultGeometry, material), + [] + ) + + const pendingRequests = useRef(0) + const nextFrameToRequest = useRef(0) + + useEffect(() => { + if (!getState(AssetLoaderState).cortoLoader) { + const loader = new CORTOLoader() + loader.setDecoderPath(getState(EngineState).publicPath + '/loader_decoders/') + loader.preload() + const assetLoaderState = getMutableState(AssetLoaderState) + assetLoaderState.cortoLoader.set(loader) + } + if (volumetric.useLoadingEffect.value) { + setComponent(entity, UVOLDissolveComponent) + } + const shadow = getMutableComponent(entity, ShadowComponent) + shadow.cast.set(true) + shadow.receive.set(true) + + video.src = component.manifestPath.value.replace('.manifest', '.mp4') + video.load() + video.addEventListener('ended', function setEnded() { + volumetric.ended.set(true) + video.removeEventListener('ended', setEnded) + }) + + return () => { + removeObjectFromGroup(entity, mesh) + videoTexture.dispose() + const numberOfFrames = component.data.value.frameData.length + removePlayedBuffer(numberOfFrames) + meshBuffer.clear() + video.src = '' + } + }, []) + + useEffect(() => { + if (component.loadingEffectStarted.value && !component.loadingEffectEnded.value) { + // Loading effect in progress. Let it finish + return + } + // If autoplay is enabled, play the video irrespective of paused state + if (volumetric.autoplay.value && volumetric.initialBuffersLoaded.value) { + handleAutoplay(audioContext, video, volumetric) + } + }, [volumetric.autoplay, volumetric.initialBuffersLoaded, component.loadingEffectEnded]) + + useEffect(() => { + if (volumetric.paused.value || !volumetric.initialBuffersLoaded.value) { + video.pause() + return + } + if (mesh.material !== material) { + mesh.material = material + mesh.material.needsUpdate = true + } + handleAutoplay(audioContext, video, volumetric) + }, [volumetric.paused]) + + useEffect(() => { + if (!component.firstGeometryFrameLoaded.value) return + let timer = -1 + + const prepareMesh = () => { + if (video.buffered.length === 0) { + // Video is not loaded yet, + // wait for a bit and try again + clearTimeout(timer) + timer = window.setTimeout(prepareMesh, 200) + return + } + + mesh.geometry = meshBuffer.get(0)! + mesh.geometry.attributes.position.needsUpdate = true + + videoTexture.needsUpdate = true + EngineRenderer.instance.renderer.initTexture(videoTexture) + + if (volumetric.useLoadingEffect.value) { + mesh.material = UVOLDissolveComponent.createDissolveMaterial(mesh) + mesh.material.needsUpdate = true + component.loadingEffectStarted.set(true) + } + + addObjectToGroup(entity, mesh) + } + + prepareMesh() + }, [component.firstGeometryFrameLoaded]) + + useVideoFrameCallback(video, (now, metadata) => { + if (!metadata) return + /** + * sync mesh frame to video texture frame + */ + const processFrame = (frameToPlay: number) => { + if (mesh.material instanceof ShaderMaterial && !hasComponent(entity, UVOLDissolveComponent)) { + const oldMaterial = mesh.material + mesh.material = material + mesh.material.needsUpdate = true + oldMaterial.dispose() + } + + if (meshBuffer.has(frameToPlay)) { + // @ts-ignore: value cannot be anything else other than BufferGeometry + mesh.geometry = meshBuffer.get(frameToPlay) + mesh.geometry.attributes.position.needsUpdate = true + + videoTexture.needsUpdate = true + EngineRenderer.instance.renderer.initTexture(videoTexture) + } + removePlayedBuffer(frameToPlay) + } + + const frameToPlay = Math.round(metadata.mediaTime * component.data.value.frameRate) + processFrame(frameToPlay) + }) + + useExecute( + () => { + const delta = getState(EngineState).deltaSeconds + + if ( + component.loadingEffectStarted.value && + !component.loadingEffectEnded.value && + // @ts-ignore + UVOLDissolveComponent.updateDissolveEffect(entity, mesh, delta) + ) { + removeComponent(entity, UVOLDissolveComponent) + mesh.material = material + mesh.material.needsUpdate = true + component.loadingEffectEnded.set(true) + return + } + + const numberOfFrames = component.data.value.frameData.length + if (nextFrameToRequest.current === numberOfFrames - 1) { + // Fetched all frames + return + } + + const minimumBufferLength = targetFramesToRequest * 2 + const meshBufferHasEnoughToPlay = meshBuffer.size >= Math.max(targetFramesToRequest * 2, 90) // 2 seconds + const meshBufferHasEnough = meshBuffer.size >= minimumBufferLength * 5 + + if (pendingRequests.current == 0 && !meshBufferHasEnough) { + const newLastFrame = Math.min(nextFrameToRequest.current + targetFramesToRequest - 1, numberOfFrames - 1) + for (let i = nextFrameToRequest.current; i <= newLastFrame; i++) { + const meshFilePath = component.manifestPath.value.replace('.manifest', '.drcs') + const byteStart = component.data.value.frameData[i].startBytePosition + const byteEnd = byteStart + component.data.value.frameData[i].meshLength + pendingRequests.current += 1 + decodeCorto(meshFilePath, byteStart, byteEnd) + .then((geometry) => { + if (!geometry) { + throw new Error('VDEBUG Entity ${entity} Invalid geometry frame: ' + i.toString()) + } + + meshBuffer.set(i, geometry) + pendingRequests.current -= 1 + + if (i === 0) { + component.firstGeometryFrameLoaded.set(true) + } + }) + .catch((e) => { + console.error('Error decoding corto frame: ', i, e) + pendingRequests.current -= 1 + }) + + nextFrameToRequest.current = newLastFrame + } + + if (meshBufferHasEnoughToPlay && !volumetric.initialBuffersLoaded.value) { + volumetric.initialBuffersLoaded.set(true) + } + } + }, + { + with: AnimationSystemGroup + } + ) + + const removePlayedBuffer = (currentFrame: number) => { + for (const [key, buffer] of meshBuffer.entries()) { + if (key < currentFrame) { + buffer.dispose() + meshBuffer.delete(key) + } + } + } + + return null +} diff --git a/packages/engine/src/scene/components/UVOL2Component.ts b/packages/engine/src/scene/components/UVOL2Component.ts new file mode 100644 index 0000000000..751a9b032d --- /dev/null +++ b/packages/engine/src/scene/components/UVOL2Component.ts @@ -0,0 +1,985 @@ +/* +CPAL-1.0 License + +The contents of this file are subject to the Common Public Attribution License +Version 1.0. (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at +https://github.com/EtherealEngine/etherealengine/blob/dev/LICENSE. +The License is based on the Mozilla Public License Version 1.1, but Sections 14 +and 15 have been added to cover use of software over a computer network and +provide for limited attribution for the Original Developer. In addition, +Exhibit A has been modified to be consistent with Exhibit B. + +Software distributed under the License is distributed on an "AS IS" basis, +WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the +specific language governing rights and limitations under the License. + +The Original Code is Ethereal Engine. + +The Original Developer is the Initial Developer. The Initial Developer of the +Original Code is the Ethereal Engine team. + +All portions of the code written by the Ethereal Engine team are Copyright © 2021-2023 +Ethereal Engine. All Rights Reserved. +*/ + +import { getState } from '@etherealengine/hyperflux' +import { useEffect, useMemo, useRef } from 'react' +import { + BufferGeometry, + CompressedTexture, + Group, + InterleavedBufferAttribute, + Mesh, + MeshBasicMaterial, + MeshStandardMaterial, + ShaderMaterial, + SphereGeometry, + Vector2 +} from 'three' +import { GLTF } from '../../assets/loaders/gltf/GLTFLoader' +import { AssetLoaderState } from '../../assets/state/AssetLoaderState' +import { AudioState } from '../../audio/AudioState' +import { EngineState } from '../../ecs/classes/EngineState' +import { + defineComponent, + getMutableComponent, + removeComponent, + setComponent, + useComponent +} from '../../ecs/functions/ComponentFunctions' +import { AnimationSystemGroup } from '../../ecs/functions/EngineFunctions' +import { useEntityContext } from '../../ecs/functions/EntityFunctions' +import { useExecute } from '../../ecs/functions/SystemFunctions' +import { EngineRenderer } from '../../renderer/WebGLRendererSystem' +import { + AudioFileFormat, + DRACOTarget, + FORMAT_TO_EXTENSION, + GLBTarget, + GeometryFormat, + PlayerManifest, + TextureFormat, + UVOL_TYPE, + UniformSolveTarget +} from '../constants/UVOLTypes' +import getFirstMesh from '../util/getFirstMesh' +import { addObjectToGroup, removeObjectFromGroup } from './GroupComponent' +import { MediaElementComponent } from './MediaComponent' +import { ShadowComponent } from './ShadowComponent' +import { UVOLDissolveComponent } from './UVOLDissolveComponent' +import { VolumetricComponent, handleAutoplay } from './VolumetricComponent' + +export const calculatePriority = (manifest: PlayerManifest) => { + const geometryTargets = Object.keys(manifest.geometry.targets) + geometryTargets.sort((a, b) => { + const aData = manifest.geometry.targets[a] + const bData = manifest.geometry.targets[b] + + // @ts-ignore + const aSimplificationRatio = aData.settings.simplificationRatio ?? 1 + + // @ts-ignore + const bSimplificationRatio = bData.settings.simplificationRatio ?? 1 + + const aMetric = aData.frameRate * aSimplificationRatio + const bMetric = bData.frameRate * bSimplificationRatio + return aMetric - bMetric + }) + geometryTargets.forEach((target, index) => { + manifest.geometry.targets[target].priority = index + }) + + const textureTargets = Object.keys(manifest.texture.baseColor.targets) + textureTargets.sort((a, b) => { + const aData = manifest.texture.baseColor.targets[a] + const bData = manifest.texture.baseColor.targets[b] + const aPixelPerSec = aData.frameRate * aData.settings.resolution.width * aData.settings.resolution.height + const bPixelPerSec = bData.frameRate * bData.settings.resolution.width * bData.settings.resolution.height + return aPixelPerSec - bPixelPerSec + }) + textureTargets.forEach((target, index) => { + manifest.texture.baseColor.targets[target].priority = index + }) + return manifest +} + +export const UVOL2Component = defineComponent({ + name: 'UVOL2Component', + + onInit: (entity) => { + return { + canPlay: false, + playbackStartTime: 0, + manifestPath: '', + isBuffering: false, + data: {} as PlayerManifest, + hasAudio: false, + geometryTarget: '', + textureTarget: '', + initialGeometryBuffersLoaded: false, + initialTextureBuffersLoaded: false, + firstGeometryFrameLoaded: false, + firstTextureFrameLoaded: false, + loadingEffectStarted: false, + loadingEffectEnded: false + } + }, + + onSet: (entity, component, json) => { + if (!json) return + if (json.manifestPath) { + component.manifestPath.set(json.manifestPath) + } + if (json.data) { + component.data.set(json.data) + } + }, + + reactor: UVOL2Reactor +}) + +const loadGeometryAsync = (url: string, targetData: DRACOTarget | GLBTarget | UniformSolveTarget) => { + return new Promise((resolve, reject) => { + const format = targetData.format + if (format === 'draco') { + getState(AssetLoaderState).gltfLoader.dracoLoader?.load(url, (geometry: BufferGeometry) => { + resolve(geometry) + }) + } else if (format === 'glb' || format === 'uniform-solve') { + getState(AssetLoaderState).gltfLoader.load(url, ({ scene }: GLTF) => { + const mesh = getFirstMesh(scene)! + resolve(mesh) + }) + } else { + reject('Invalid format') + } + }) +} + +const loadTextureAsync = (url: string) => { + return new Promise((resolve, reject) => { + getState(AssetLoaderState).gltfLoader.ktx2Loader!.load(url, (texture) => { + EngineRenderer.instance.renderer.initTexture(texture) + resolve(texture) + }) + }) +} + +const uniformSolveVertexShader = ` +#include +#include +out vec2 vMapUv; + +attribute vec4 keyframeA; +attribute vec4 keyframeB; +uniform float mixRatio; + +uniform vec2 repeat; +uniform vec2 offset; + +// HEADER_REPLACE_START +// HEADER_REPLACE_END + + +void main() { + // MAIN_REPLACE_START + // MAIN_REPLACE_END + + vMapUv = uv * repeat + offset; + + vec4 localPosition = vec4(position, 1.0); + + localPosition.x += mix(keyframeA.x, keyframeB.x, mixRatio); + localPosition.y += mix(keyframeA.y, keyframeB.y, mixRatio); + localPosition.z += mix(keyframeA.z, keyframeB.z, mixRatio); + + gl_Position = projectionMatrix * modelViewMatrix * localPosition; + #include +}` + +const uniformSolveFragmentShader = ` +#include +#include + +in vec2 vMapUv; +uniform sampler2D map; + +// HEADER_REPLACE_START +// HEADER_REPLACE_END + +void main() { + vec4 color = texture2D(map, vMapUv); + gl_FragColor = color; + + // MAIN_REPLACE_START + // MAIN_REPLACE_END + + #include +}` + +const countHashes = (str: string) => { + let result = 0 + for (let i = 0; i < str.length; i++) { + if (str[i] === '#') { + result++ + } + } + return result +} + +const resolvePath = ( + path: string, + manifestPath: string, + format: AudioFileFormat | GeometryFormat | TextureFormat, + target?: string, + index?: number +) => { + let resolvedPath = path + resolvedPath = path.replace('[ext]', FORMAT_TO_EXTENSION[format]) + resolvedPath = resolvedPath.replace('[type]', 'baseColor') + if (target !== undefined) { + resolvedPath = resolvedPath.replace('[target]', target) + } + if (index !== undefined) { + const padLength = countHashes(resolvedPath) + const paddedString = '[' + '#'.repeat(padLength) + ']' + const paddedIndex = index.toString().padStart(padLength, '0') + resolvedPath = resolvedPath.replace(paddedString, paddedIndex) + } + + if (!resolvedPath.startsWith('http')) { + // This is a relative path, resolve it w.r.t to manifestPath + const manifestPathSegments = manifestPath.split('/') + manifestPathSegments.pop() + manifestPathSegments.push(resolvedPath) + resolvedPath = manifestPathSegments.join('/') + } + + return resolvedPath +} + +const KEY_PADDING = 7 +const EPSILON = 0.00001 // For float comparison + +const createKey = (target: string, index: number) => { + return target + index.toString().padStart(KEY_PADDING, '0') +} + +function UVOL2Reactor() { + const entity = useEntityContext() + const volumetric = useComponent(entity, VolumetricComponent) + const component = useComponent(entity, UVOL2Component) + + // These are accessed very frequently, Better not to fetch from state everytime + const manifest = useRef(component.data.value) + const manifestPath = useMemo(() => component.manifestPath.value, []) + const geometryTargets = useRef(Object.keys(manifest.current.geometry.targets)) + const textureTargets = useRef(Object.keys(manifest.current.texture.baseColor.targets)) + + const mediaElement = getMutableComponent(entity, MediaElementComponent).value + const audioContext = getState(AudioState).audioContext + const audio = mediaElement.element + + const geometryBuffer = useMemo(() => new Map(), []) + const textureBuffer = useMemo(() => new Map(), []) + const maxBufferHealth = 10 // seconds + const minBufferToPlay = 2 // seconds + const bufferThreshold = 5 // seconds. If buffer health is less than this, fetch new data + const repeat = useMemo(() => new Vector2(1, 1), []) + const offset = useMemo(() => new Vector2(0, 0), []) + + const material = useMemo(() => { + if (manifest.current.type === UVOL_TYPE.UNIFORM_SOLVE_WITH_COMPRESSED_TEXTURE) { + return new ShaderMaterial({ + vertexShader: uniformSolveVertexShader, + fragmentShader: uniformSolveFragmentShader, + uniforms: { + repeat: { + value: new Vector2(1, 1) + }, + offset: { + value: new Vector2(0, 0) + }, + mixRatio: { + value: 0 + }, + map: { + value: null + } + } + }) + } + return new MeshBasicMaterial({ color: 0xffffff }) + }, []) + + const defaultGeometry = useMemo(() => new SphereGeometry(3, 32, 32) as BufferGeometry, []) + const mesh = useMemo(() => new Mesh(defaultGeometry, material), []) + const group = useMemo(() => { + const _group = new Group() + _group.add(mesh) + return _group + }, []) + + const pendingGeometryRequests = useRef(0) + const pendingTextureRequests = useRef(0) + + /** + * This says until how long can we play geometry buffers without fetching new data. + * For eg: If it geometryBufferHealth = 25, it implies, we can play upto 00:25 seconds + */ + const geometryBufferHealth = useRef(0) // in seconds + const textureBufferHealth = useRef(0) // in seconds + const currentTime = useRef(0) // in seconds + + useEffect(() => { + if (volumetric.useLoadingEffect.value) { + setComponent(entity, UVOLDissolveComponent) + } + + manifest.current = calculatePriority(component.data.get({ noproxy: true })) + component.data.set(manifest.current) + const shadow = getMutableComponent(entity, ShadowComponent) + if (manifest.current.type === UVOL_TYPE.UNIFORM_SOLVE_WITH_COMPRESSED_TEXTURE) { + // TODO: Cast shadows properly with uniform solve + shadow.cast.set(false) + shadow.receive.set(false) + } else { + shadow.cast.set(true) + shadow.receive.set(true) + } + + geometryTargets.current = Object.keys(manifest.current.geometry.targets) + geometryTargets.current.sort((a, b) => { + return manifest.current.geometry.targets[a].priority - manifest.current.geometry.targets[b].priority + }) + + textureTargets.current = Object.keys(manifest.current.texture.baseColor.targets) + textureTargets.current.sort((a, b) => { + return ( + manifest.current.texture.baseColor.targets[a].priority - manifest.current.texture.baseColor.targets[b].priority + ) + }) + + if (manifest.current.audio) { + component.hasAudio.set(true) + audio.src = resolvePath(manifest.current.audio.path, manifestPath, manifest.current.audio.formats[0]) + audio.playbackRate = manifest.current.audio.playbackRate + } + component.geometryTarget.set(geometryTargets.current[0]) + component.textureTarget.set(textureTargets.current[0]) + currentTime.current = volumetric.startTime.value + const intervalId = setInterval(bufferLoop, 3000) + bufferLoop() // calling now because setInterval will call after 3 seconds + + return () => { + removeObjectFromGroup(entity, group) + clearInterval(intervalId) + for (const texture of textureBuffer.values()) { + texture.dispose() + } + textureBuffer.clear() + + for (const value of geometryBuffer.values()) { + if (value instanceof Mesh) { + value.geometry.dispose() + } else if (value instanceof BufferGeometry) { + value.dispose() + } else if (value instanceof InterleavedBufferAttribute) { + mesh.geometry.setAttribute(value.name, value) + } + } + + mesh.geometry.dispose() + geometryBuffer.clear() + mesh.material.dispose() + audio.src = '' + } + }, []) + + const fetchNonUniformSolveGeometry = (startFrame: number, endFrame: number, target: string) => { + const targetData = manifest.current.geometry.targets[target] + const promises: Promise[] = [] + + const oldBufferHealth = geometryBufferHealth.current + const startTime = Date.now() + + for (let i = startFrame; i <= endFrame; i++) { + const frameURL = resolvePath(manifest.current.geometry.path, manifestPath, targetData.format, target, i) + pendingGeometryRequests.current++ + promises.push(loadGeometryAsync(frameURL, targetData)) + } + + Promise.allSettled(promises).then((values) => { + values.forEach((result, j) => { + const model = result.status === 'fulfilled' ? (result.value as Mesh) : null + if (!model) { + return + } + const i = j + startFrame + const key = createKey(target, i) + model.name = key + geometryBuffer.set(createKey(target, i), model) + geometryBufferHealth.current += 1 / targetData.frameRate + pendingGeometryRequests.current-- + if (!component.firstGeometryFrameLoaded.value) { + component.firstGeometryFrameLoaded.set(true) + } + if (geometryBufferHealth.current >= minBufferToPlay && !component.initialGeometryBuffersLoaded.value) { + component.initialGeometryBuffersLoaded.set(true) + } + }) + + const playTime = geometryBufferHealth.current - oldBufferHealth + const fetchTime = (Date.now() - startTime) / 1000 + const metric = fetchTime / playTime + adjustGeometryTarget(metric) + }) + } + + const fetchUniformSolveGeometry = (startSegment: number, endSegment: number, target: string, extraTime: number) => { + const targetData = manifest.current.geometry.targets[target] as UniformSolveTarget + const promises: Promise[] = [] + + const oldBufferHealth = geometryBufferHealth.current + const startTime = Date.now() + + for (let i = startSegment; i <= endSegment; i++) { + const segmentURL = resolvePath(manifest.current.geometry.path, manifestPath, targetData.format, target, i) + pendingGeometryRequests.current++ + promises.push(loadGeometryAsync(segmentURL, targetData)) + } + + Promise.allSettled(promises).then((values) => { + values.forEach((result, j) => { + const model = result.status === 'fulfilled' ? (result.value as Mesh) : null + if (!model) { + return + } + const i = j + startSegment + const positionMorphAttributes = model.geometry.morphAttributes.position as InterleavedBufferAttribute[] + const segmentDuration = positionMorphAttributes.length / targetData.frameRate + const segmentOffset = i * targetData.segmentFrameCount + + positionMorphAttributes.forEach((attr, index) => { + const key = createKey(target, segmentOffset + index) + attr.name = key + geometryBuffer.set(key, attr) + }) + + model.geometry.morphAttributes = {} + if (!component.firstGeometryFrameLoaded.value) { + // @ts-ignore + mesh.copy(model) + repeat.copy((model.material as MeshStandardMaterial).map?.repeat ?? repeat) + offset.copy((model.material as MeshStandardMaterial).map?.offset ?? offset) + mesh.material = material + component.firstGeometryFrameLoaded.set(true) + } + + geometryBufferHealth.current += segmentDuration + pendingGeometryRequests.current-- + + if (geometryBufferHealth.current >= minBufferToPlay && !component.initialGeometryBuffersLoaded.value) { + component.initialGeometryBuffersLoaded.set(true) + } + }) + + const playTime = geometryBufferHealth.current - oldBufferHealth + const fetchTime = (Date.now() - startTime) / 1000 + const metric = fetchTime / playTime + adjustGeometryTarget(metric) + if (extraTime >= 0) { + geometryBufferHealth.current -= extraTime + } + }) + } + + const adjustGeometryTarget = (metric: number) => { + if (metric >= 0.25) { + const currentTargetIndex = geometryTargets.current.indexOf(component.geometryTarget.value) + if (currentTargetIndex > 0) { + component.geometryTarget.set(geometryTargets.current[currentTargetIndex - 1]) + } + } else if (metric < 0.1) { + const currentTargetIndex = geometryTargets.current.indexOf(component.geometryTarget.value) + if (currentTargetIndex < geometryTargets.current.length - 1) { + component.geometryTarget.set(geometryTargets.current[currentTargetIndex + 1]) + } + } + } + + const adjustTextureTarget = (metric: number) => { + if (metric >= 0.25) { + const currentTargetIndex = textureTargets.current.indexOf(component.textureTarget.value) + if (currentTargetIndex > 0) { + component.textureTarget.set(textureTargets.current[currentTargetIndex - 1]) + } + } else if (metric < 0.1) { + const currentTargetIndex = textureTargets.current.indexOf(component.textureTarget.value) + if (currentTargetIndex < textureTargets.current.length - 1) { + component.textureTarget.set(textureTargets.current[currentTargetIndex + 1]) + } + } + } + + const fetchGeometry = () => { + const currentBufferLength = geometryBufferHealth.current - (currentTime.current - volumetric.startTime.value) + if (currentBufferLength >= Math.min(bufferThreshold, maxBufferHealth) || pendingGeometryRequests.current > 0) { + return + } + const target = component.geometryTarget.value ? component.geometryTarget.value : geometryTargets.current[0] + + const targetData = manifest.current.geometry.targets[target] + const frameRate = targetData.frameRate + const frameCount = targetData.frameCount + + const startFrame = Math.round((geometryBufferHealth.current + volumetric.startTime.value) * frameRate) + if (startFrame >= frameCount) { + // fetched all frames + return + } + + const framesToFetch = Math.round((maxBufferHealth - currentBufferLength) * frameRate) + const endFrame = Math.min(startFrame + framesToFetch, frameCount - 1) + + if (targetData.format === 'uniform-solve') { + const segmentFrameCount = targetData.segmentFrameCount + const startSegment = Math.floor(startFrame / segmentFrameCount) + const endSegment = Math.floor(endFrame / segmentFrameCount) + const startFrameTime = startFrame / frameRate + const startSegmentTime = startSegment * targetData.settings.segmentSize + + /** + * 'extraTime' worth buffers are fetched again, possibly with different target + * this happens when there is a change in segment size + * to avoid adding this part to bufferHealth again, subtract it. + */ + const extraTime = startFrameTime - startSegmentTime + fetchUniformSolveGeometry(startSegment, endSegment, target, extraTime) + } else { + fetchNonUniformSolveGeometry(startFrame, endFrame, target) + } + } + + const fetchTextures = () => { + const currentBufferLength = textureBufferHealth.current - (currentTime.current - volumetric.startTime.value) + if (currentBufferLength >= Math.min(bufferThreshold, maxBufferHealth) || pendingTextureRequests.current > 0) { + return + } + const target = component.textureTarget.value ? component.textureTarget.value : textureTargets.current[0] + const targetData = manifest.current.texture.baseColor.targets[target] + const frameRate = targetData.frameRate + const startFrame = Math.round((textureBufferHealth.current + volumetric.startTime.value) * frameRate) + if (startFrame >= targetData.frameCount) { + // fetched all frames + return + } + + const framesToFetch = Math.round((maxBufferHealth - currentBufferLength) * frameRate) + const endFrame = Math.min(startFrame + framesToFetch, targetData.frameCount - 1) + + if (!getState(AssetLoaderState).gltfLoader.ktx2Loader) { + throw new Error('KTX2Loader not initialized') + } + + const oldBufferHealth = geometryBufferHealth.current + const startTime = Date.now() + const promises: Promise[] = [] + + for (let i = startFrame; i <= endFrame; i++) { + const textureURL = resolvePath( + manifest.current.texture.baseColor.path, + manifestPath, + targetData.format, + target, + i + ) + pendingTextureRequests.current++ + promises.push(loadTextureAsync(textureURL)) + } + + Promise.allSettled(promises).then((values) => { + values.forEach((result, j) => { + const texture = result.status === 'fulfilled' ? (result.value as CompressedTexture) : null + if (!texture) { + return + } + const i = j + startFrame + const key = createKey(target, i) + texture.name = key + pendingTextureRequests.current-- + textureBuffer.set(key, texture) + textureBufferHealth.current += 1 / frameRate + if (textureBufferHealth.current >= minBufferToPlay && !component.initialTextureBuffersLoaded.value) { + component.initialTextureBuffersLoaded.set(true) + } + if (!component.firstTextureFrameLoaded.value) { + component.firstTextureFrameLoaded.set(true) + } + }) + + const playTime = textureBufferHealth.current - oldBufferHealth + const fetchTime = (Date.now() - startTime) / 1000 + const metric = fetchTime / playTime + adjustTextureTarget(metric) + }) + } + + const bufferLoop = () => { + fetchGeometry() + fetchTextures() + } + + useEffect(() => { + if (component.isBuffering.value) { + component.geometryTarget.set(geometryTargets.current[0]) + component.textureTarget.set(textureTargets.current[0]) + } + }, [component.isBuffering]) + + useEffect(() => { + if (!component.initialGeometryBuffersLoaded.value || !component.initialTextureBuffersLoaded.value) { + return + } + volumetric.initialBuffersLoaded.set(true) + }, [component.initialGeometryBuffersLoaded, component.initialTextureBuffersLoaded]) + + useEffect(() => { + if (!component.firstGeometryFrameLoaded.value || !component.firstTextureFrameLoaded.value) { + return + } + updateGeometry(currentTime.current) + updateTexture(currentTime.current) + + if (volumetric.useLoadingEffect.value) { + let headerTemplate: RegExp | undefined = /\/\/\sHEADER_REPLACE_START([\s\S]*?)\/\/\sHEADER_REPLACE_END/ + let mainTemplate: RegExp | undefined = /\/\/\sMAIN_REPLACE_START([\s\S]*?)\/\/\sMAIN_REPLACE_END/ + + if (manifest.current.type !== UVOL_TYPE.UNIFORM_SOLVE_WITH_COMPRESSED_TEXTURE) { + headerTemplate = undefined + mainTemplate = undefined + } + + mesh.material = UVOLDissolveComponent.createDissolveMaterial( + mesh, + headerTemplate, + mainTemplate, + headerTemplate, + mainTemplate + ) + mesh.material.needsUpdate = true + component.loadingEffectStarted.set(true) + } + + addObjectToGroup(entity, group) + }, [component.firstGeometryFrameLoaded, component.firstTextureFrameLoaded]) + + useEffect(() => { + if (component.loadingEffectStarted.value && !component.loadingEffectEnded.value) { + // Loading effect in progress. Let it finish + return + } + // If autoplay is enabled, play the audio irrespective of paused state + if (volumetric.autoplay.value && volumetric.initialBuffersLoaded.value) { + // Reset the loading effect's material + mesh.material = material + mesh.material.needsUpdate = true + + if (component.hasAudio.value) { + handleAutoplay(audioContext, audio, volumetric) + } else { + volumetric.paused.set(false) + } + } + }, [volumetric.autoplay, volumetric.initialBuffersLoaded, component.loadingEffectEnded]) + + useEffect(() => { + if (volumetric.paused.value) { + component.canPlay.set(false) + if (component.hasAudio.value) { + audio.pause() + } + return + } + component.playbackStartTime.set(Date.now()) + volumetric.startTime.set(currentTime.current) + geometryBufferHealth.current -= currentTime.current + textureBufferHealth.current -= currentTime.current + + if (mesh.material !== material) { + mesh.material = material + mesh.material.needsUpdate = true + } + if (component.hasAudio.value) { + handleAutoplay(audioContext, audio, volumetric) + } + component.canPlay.set(true) + }, [volumetric.paused]) + + const getAttribute = (name: string, target: string, index: number) => { + const key = createKey(target, index) + if (!geometryBuffer.has(key)) { + const frameRate = manifest.current.geometry.targets[target].frameRate + const targets = Object.keys(manifest.current.geometry.targets) + + for (let i = 0; i < targets.length; i++) { + const _target = targets[i] + const _targetData = manifest.current.geometry.targets[_target] + const _frameRate = _targetData.frameRate + const _index = Math.round((index * _frameRate) / frameRate) + if (geometryBuffer.has(createKey(_target, _index))) { + const attribute = geometryBuffer.get(createKey(_target, _index))! as InterleavedBufferAttribute + return attribute + } + } + } else { + const attribute = geometryBuffer.get(key)! as InterleavedBufferAttribute + return attribute + } + + return false + } + + /** + * Sets the attribute on the mesh's geometry + * And disposes the old attribute. Since that's not supported by three.js natively, + * we transfer the old attibute to a new geometry and dispose it. + */ + const setAttribute = (name: string, attribute: InterleavedBufferAttribute) => { + if (mesh.geometry.attributes[name] === attribute) { + return + } + + if (name === 'keyframeB') { + /** + * Disposing should be done only on keyframeA + * Because, keyframeA will use the previous buffer of keyframeB in the next frame. + */ + mesh.geometry.attributes[name] = attribute + mesh.geometry.attributes[name].needsUpdate = true + return + } + + const index = mesh.geometry.index + const geometry = new BufferGeometry() + geometry.setIndex(index) + + for (const key in mesh.geometry.attributes) { + if (key !== name) { + geometry.setAttribute(key, mesh.geometry.attributes[key]) + } + } + geometry.setAttribute(name, attribute) + geometry.boundingSphere = mesh.geometry.boundingSphere + geometry.boundingBox = mesh.geometry.boundingBox + const oldGeometry = mesh.geometry + mesh.geometry = geometry + + oldGeometry.index = null + for (const key in oldGeometry.attributes) { + if (key !== name) { + oldGeometry.deleteAttribute(key) + } + } + + // Dispose method exists only on rendered geometries + oldGeometry.dispose() + + const oldAttributeKey = oldGeometry.attributes[name]?.name + geometryBuffer.delete(oldAttributeKey) + } + + const setGeometry = (target: string, index: number) => { + const key = createKey(target, index) + const targetData = manifest.current.geometry.targets[target] + + if (!geometryBuffer.has(key)) { + const frameRate = targetData.frameRate + const targets = Object.keys(manifest.current.geometry.targets) + for (let i = 0; i < targets.length; i++) { + const _target = targets[i] + const _frameRate = manifest.current.geometry.targets[_target].frameRate + const _index = Math.round((index * _frameRate) / frameRate) + if (geometryBuffer.has(createKey(_target, _index))) { + setGeometry(_target, _index) + return + } + } + } else { + if (targetData.format === 'draco') { + const geometry = geometryBuffer.get(key)! as BufferGeometry + if (mesh.geometry !== geometry) { + const oldGeometry = mesh.geometry + mesh.geometry = geometry + mesh.geometry.attributes.position.needsUpdate = true + oldGeometry.dispose() + const oldGeometryKey = oldGeometry.name + geometryBuffer.delete(oldGeometryKey) + return + } + } else if (targetData.format === 'glb') { + const model = geometryBuffer.get(key)! as Mesh + const geometry = model.geometry + if (mesh.geometry !== geometry) { + const oldGeometry = mesh.geometry + mesh.geometry = geometry + mesh.geometry.attributes.position.needsUpdate = true + oldGeometry.dispose() + } + if (model.material instanceof MeshStandardMaterial && model.material.map) { + if (model.material.map.repeat) { + repeat.copy(model.material.map.repeat) + } + if (model.material.map.offset) { + offset.copy(model.material.map.offset) + } + } + const oldModelKey = model.name + geometryBuffer.delete(oldModelKey) + return + } + } + } + + const setTexture = (target: string, index: number) => { + const key = createKey(target, index) + if (!textureBuffer.has(key)) { + const targets = Object.keys(manifest.current.texture.baseColor.targets) + const frameRate = manifest.current.texture.baseColor.targets[target].frameRate + for (let i = 0; i < targets.length; i++) { + const _frameRate = manifest.current.texture.baseColor.targets[targets[i]].frameRate + const _index = Math.round((index * _frameRate) / frameRate) + if (textureBuffer.has(createKey(targets[i], _index))) { + setTexture(targets[i], _index) + return + } + } + } else { + const texture = textureBuffer.get(key)! + if (mesh.material instanceof ShaderMaterial) { + const oldTextureKey = mesh.material.uniforms.map.value?.name ?? '' + if (mesh.material.uniforms.map.value !== texture) { + mesh.material.uniforms.map.value = texture + mesh.material.uniforms.map.value.needsUpdate = true + texture.repeat.copy(repeat) + texture.offset.copy(offset) + mesh.material.uniforms.repeat.value = repeat + mesh.material.uniforms.offset.value = offset + const oldTexture = textureBuffer.get(oldTextureKey) + if (oldTexture) { + oldTexture.dispose() + } + textureBuffer.delete(oldTextureKey) + } + } else { + const oldTextureKey = mesh.material.map?.name ?? '' + if (mesh.material.map !== texture) { + texture.repeat.copy(repeat) + texture.offset.copy(offset) + mesh.material.map = texture + mesh.material.map.needsUpdate = true + const oldTexture = textureBuffer.get(oldTextureKey) + if (oldTexture) { + oldTexture.dispose() + } + textureBuffer.delete(oldTextureKey) + } + } + } + } + + const updateUniformSolve = (currentTime: number) => { + const geometryTarget = component.geometryTarget.value + const geometryFrame = currentTime * manifest.current.geometry.targets[geometryTarget].frameRate + const keyframeAIndex = Math.floor(geometryFrame) + const keyframeBIndex = Math.ceil(geometryFrame) + let mixRatio = geometryFrame - keyframeAIndex + + const keyframeA = getAttribute('position', geometryTarget, keyframeAIndex) + const keyframeB = getAttribute('position', geometryTarget, keyframeBIndex) + + if (!keyframeA && !keyframeB) { + return + } else if (!keyframeA && keyframeB) { + setAttribute('keyframeB', keyframeB) + ;(mesh.material as ShaderMaterial).uniforms.mixRatio.value = 1 + return + } else if (keyframeA && !keyframeB) { + setAttribute('keyframeA', keyframeA) + ;(mesh.material as ShaderMaterial).uniforms.mixRatio.value = 0 + return + } else if (keyframeA && keyframeB) { + const keyframeATarget = keyframeA.name.slice(0, -KEY_PADDING) + const keyframeBTarget = keyframeB.name.slice(0, -KEY_PADDING) + if (keyframeATarget === keyframeBTarget && keyframeATarget !== geometryTarget) { + // If both keyframes are of different target, update the mixRatio + const _geometryFrame = currentTime * manifest.current.geometry.targets[keyframeATarget].frameRate + const _keyframeAIndex = Math.floor(_geometryFrame) + mixRatio = _geometryFrame - _keyframeAIndex + } + setAttribute('keyframeA', keyframeA) + setAttribute('keyframeB', keyframeB) + ;(mesh.material as ShaderMaterial).uniforms.mixRatio.value = mixRatio + } + } + + const updateNonUniformSolve = (currentTime: number) => { + const geometryTarget = component.geometryTarget.value + const geometryFrame = Math.round(currentTime * manifest.current.geometry.targets[geometryTarget].frameRate) + setGeometry(geometryTarget, geometryFrame) + } + + const updateGeometry = (currentTime: number) => { + if (manifest.current.type === UVOL_TYPE.UNIFORM_SOLVE_WITH_COMPRESSED_TEXTURE) { + updateUniformSolve(currentTime) + } else { + updateNonUniformSolve(currentTime) + } + } + + const updateTexture = (currentTime: number) => { + const textureTarget = component.textureTarget.value + const textureFrame = Math.round(currentTime * manifest.current.texture.baseColor.targets[textureTarget].frameRate) + setTexture(textureTarget, textureFrame) + } + + const update = () => { + const delta = getState(EngineState).deltaSeconds + + if ( + component.loadingEffectStarted.value && + !component.loadingEffectEnded.value && + // @ts-ignore + UVOLDissolveComponent.updateDissolveEffect(entity, mesh, delta) + ) { + removeComponent(entity, UVOLDissolveComponent) + component.loadingEffectEnded.set(true) + mesh.material = material + mesh.material.needsUpdate = true + return + } + + if (!component.canPlay.value || !volumetric.initialBuffersLoaded.value) { + return + } + if (manifest.current.audio) { + currentTime.current = audio.currentTime + } else { + currentTime.current = volumetric.startTime.value + (Date.now() - component.playbackStartTime.value) / 1000 + } + if (currentTime.current > manifest.current.duration || audio.ended) { + volumetric.ended.set(true) + return + } + + updateGeometry(currentTime.current) + updateTexture(currentTime.current) + } + + useExecute(update, { + with: AnimationSystemGroup + }) + + return null +} diff --git a/packages/engine/src/scene/components/UVOLDissolveComponent.ts b/packages/engine/src/scene/components/UVOLDissolveComponent.ts new file mode 100644 index 0000000000..7677cfe9b5 --- /dev/null +++ b/packages/engine/src/scene/components/UVOLDissolveComponent.ts @@ -0,0 +1,221 @@ +/* +CPAL-1.0 License + +The contents of this file are subject to the Common Public Attribution License +Version 1.0. (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at +https://github.com/EtherealEngine/etherealengine/blob/dev/LICENSE. +The License is based on the Mozilla Public License Version 1.1, but Sections 14 +and 15 have been added to cover use of software over a computer network and +provide for limited attribution for the Original Developer. In addition, +Exhibit A has been modified to be consistent with Exhibit B. + +Software distributed under the License is distributed on an "AS IS" basis, +WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the +specific language governing rights and limitations under the License. + +The Original Code is Ethereal Engine. + +The Original Developer is the Initial Developer. The Initial Developer of the +Original Code is the Ethereal Engine team. + +All portions of the code written by the Ethereal Engine team are Copyright © 2021-2023 +Ethereal Engine. All Rights Reserved. +*/ + +import { + BufferGeometry, + Mesh, + MeshBasicMaterial, + ShaderLib, + ShaderMaterial, + Texture, + UniformsLib, + UniformsUtils +} from 'three' +import { Entity } from '../../ecs/classes/Entity' +import { defineComponent, getComponent } from '../../ecs/functions/ComponentFunctions' + +export const UVOLDissolveComponent = defineComponent({ + name: 'UVOLDissolveComponent', + onInit: (entity) => ({ + currentTime: 0, + duration: 2 + }), + + onSet: (entity, component, json) => { + if (!json) return + if (typeof json.duration === 'number') component.duration.set(json.duration) + }, + + /** + * If material is a shader material, + * This expects the texture to be at material.uniforms.map.value. + * Otherwise, the texture is not used. + * + * If the templates are strings, they are appended to the default templates. + * otherwise, they are used as regular expressions to replace the default templates. + */ + createDissolveMaterial( + mesh: Mesh, + vertexHeaderTemplate: string | RegExp = '#include ', + vertexTemplate: string | RegExp = '#include ', + fragmentHeaderTemplate: string | RegExp = '#include ', + fragmentTemplate: string | RegExp = '#include ' + ) { + const material = mesh.material + const isShaderMaterial = 'isShaderMaterial' in material + const texture: Texture = isShaderMaterial ? material.uniforms.map.value : material.map + if (!mesh.geometry.boundingBox) { + mesh.geometry.computeBoundingBox() + } + const minY = mesh.geometry.boundingBox!.min.y + const maxY = mesh.geometry.boundingBox!.max.y + const height = maxY - minY + + let uniforms = { + progress: { + value: 0 + }, + loadedHeight: { + value: 0 + }, + jitterWidth: { + value: 0.1 * height + }, + repeat: { + value: texture.repeat + }, + offset: { + value: texture.offset + }, + origin_texture: { + value: texture + } + } + + let vertexShader = '', + fragmentShader = '' + if (isShaderMaterial) { + vertexShader = material.vertexShader + fragmentShader = material.fragmentShader + uniforms = UniformsUtils.merge([material.uniforms, uniforms]) + } else { + const shader = ShaderLib['basic'] + vertexShader = shader.vertexShader + fragmentShader = shader.fragmentShader + Object.keys(shader.uniforms).forEach((key) => { + if (material[key]) { + uniforms[key] = shader.uniforms[key] + } + }) + } + + uniforms = UniformsUtils.merge([UniformsLib['lights'], uniforms]) + + let vertexShaderHeader = ` +varying vec2 vUv; +varying float positionY;` + if (typeof vertexHeaderTemplate === 'string') { + vertexShaderHeader = vertexHeaderTemplate + vertexShaderHeader + } + + let vertexShaderMain = ` +vUv = uv; +positionY = position.y;` + if (typeof vertexTemplate === 'string') { + vertexShaderMain = vertexTemplate + vertexShaderMain + } + + let fragmentShaderHeader = ` +varying vec2 vUv; +varying float positionY; +uniform float loadedHeight; +uniform vec2 repeat; +uniform vec2 offset; +uniform sampler2D origin_texture; +uniform float jitterWidth; +uniform float progress; + +vec4 sRGBToLinear( in vec4 value ) { + return vec4( mix( pow( value.rgb * 0.9478672986 + vec3( 0.0521327014 ), vec3( 2.4 ) ), value.rgb * 0.0773993808, vec3( lessThanEqual( value.rgb, vec3( 0.04045 ) ) ) ), value.a ); +}` + if (typeof fragmentHeaderTemplate === 'string') { + fragmentShaderHeader = fragmentHeaderTemplate + fragmentShaderHeader + } + + const textureShader = + 'isVideoTexture' in texture ? 'gl_FragColor = sRGBToLinear(textureColor);' : 'gl_FragColor = textureColor;' + + let fragmentShaderMain = ` +float offset2 = positionY - loadedHeight; + +vec2 transformedUV = vUv*repeat + offset; +vec4 textureColor = texture2D(origin_texture, transformedUV); + +float jitterDelta = (rand(gl_FragCoord.xy) - 0.5) * 200.0; // [-100, 100] +float localJitter = jitterWidth * (100.0 + jitterDelta) / 100.0; + +float lowerOffset = loadedHeight - localJitter; +float upperOffset = loadedHeight + localJitter; + +float randomR = (sin(progress) * 0.5) + 0.5; +float randomG = .5; +float randomB = (cos(progress) * 0.5) + 0.5; + +if (positionY < lowerOffset) { + ${textureShader} +} else if (positionY > upperOffset) { + discard; +} else { + gl_FragColor.r = randomR; + gl_FragColor.g = randomG; + gl_FragColor.b = randomB; +}` + if (typeof fragmentTemplate === 'string') { + fragmentShaderMain = fragmentTemplate + fragmentShaderMain + } + + vertexShader = vertexShader.replace(vertexHeaderTemplate, vertexShaderHeader) + vertexShader = vertexShader.replace(vertexTemplate, vertexShaderMain) + + fragmentShader = fragmentShader.replace(fragmentHeaderTemplate, fragmentShaderHeader) + fragmentShader = fragmentShader.replace(fragmentTemplate, fragmentShaderMain) + + const newMaterial = new ShaderMaterial({ + uniforms, + vertexShader, + fragmentShader, + lights: true, + fog: false, + transparent: material.transparent + }) + newMaterial.needsUpdate = true + newMaterial.visible = material.visible + + return newMaterial + }, + + /** + * Interpolates the duration to bounding box's minY and maxY. + * Returns true when the loading effect is finished. + */ + updateDissolveEffect(entity: Entity, mesh: Mesh, deltaTime: number) { + const dissolveComponent = getComponent(entity, UVOLDissolveComponent) + if (!dissolveComponent) return true + dissolveComponent.currentTime += deltaTime + const minY = mesh.geometry.boundingBox!.min.y + const maxY = mesh.geometry.boundingBox!.max.y + const duration = dissolveComponent.duration + + const loadedHeight = Math.min( + maxY, + Math.max(minY, minY + (maxY - minY) * (dissolveComponent.currentTime / duration)) + ) + + mesh.material.uniforms.loadedHeight.value = loadedHeight + mesh.material.uniforms.progress.value = dissolveComponent.currentTime / duration + + return dissolveComponent.currentTime >= duration + } +}) diff --git a/packages/engine/src/scene/components/VolumetricComponent.ts b/packages/engine/src/scene/components/VolumetricComponent.ts index 88c7ade955..cc9e5076ef 100755 --- a/packages/engine/src/scene/components/VolumetricComponent.ts +++ b/packages/engine/src/scene/components/VolumetricComponent.ts @@ -23,37 +23,62 @@ All portions of the code written by the Ethereal Engine team are Copyright © 20 Ethereal Engine. All Rights Reserved. */ -import type VolumetricPlayer from '@etherealengine/volumetric/dist/Player' import { useEffect } from 'react' -import { Box3, Material, Mesh, Object3D } from 'three' -import { createWorkerFromCrossOriginURL } from '@etherealengine/common/src/utils/createWorkerFromCrossOriginURL' -import { getState } from '@etherealengine/hyperflux' - -import { DissolveEffect } from '@etherealengine/engine/src/avatar/DissolveEffect' -import { AvatarDissolveComponent } from '@etherealengine/engine/src/avatar/components/AvatarDissolveComponent' -import { AvatarEffectComponent, MaterialMap } from '@etherealengine/engine/src/avatar/components/AvatarEffectComponent' +import { State, getState } from '@etherealengine/hyperflux' import { AudioState } from '../../audio/AudioState' -import { isClient } from '../../common/functions/getEnvironment' -import { iOS } from '../../common/functions/isMobile' -import { Entity } from '../../ecs/classes/Entity' import { + ComponentType, defineComponent, getComponent, getMutableComponent, - getOptionalComponent, - hasComponent, removeComponent, setComponent, useComponent } from '../../ecs/functions/ComponentFunctions' -import { createEntity, useEntityContext } from '../../ecs/functions/EntityFunctions' +import { useEntityContext } from '../../ecs/functions/EntityFunctions' import { EngineRenderer } from '../../renderer/WebGLRendererSystem' -import { TransformComponent } from '../../transform/components/TransformComponent' -import { addObjectToGroup } from '../components/GroupComponent' import { PlayMode } from '../constants/PlayMode' import { AudioNodeGroups, MediaElementComponent, createAudioNodeGroup, getNextTrack } from './MediaComponent' import { ShadowComponent } from './ShadowComponent' +import { UVOL1Component } from './UVOL1Component' +import { UVOL2Component } from './UVOL2Component' + +export function handleAutoplay( + audioContext: AudioContext, + media: HTMLMediaElement, + volumetric: State> +) { + const attachEventListeners = () => { + const playMedia = () => { + media.play() + audioContext.resume() + volumetric.paused.set(false) + window.removeEventListener('pointerdown', playMedia) + window.removeEventListener('keypress', playMedia) + window.removeEventListener('touchstart', playMedia) + EngineRenderer.instance.renderer.domElement.removeEventListener('pointerdown', playMedia) + EngineRenderer.instance.renderer.domElement.removeEventListener('touchstart', playMedia) + } + window.addEventListener('pointerdown', playMedia) + window.addEventListener('keypress', playMedia) + window.addEventListener('touchstart', playMedia) + EngineRenderer.instance.renderer.domElement.addEventListener('pointerdown', playMedia) + EngineRenderer.instance.renderer.domElement.addEventListener('touchstart', playMedia) + } + + // Try to play. If it fails, attach event listeners to play on user interaction + media + .play() + .catch((e) => { + if (e.name === 'NotAllowedError') { + attachEventListeners() + } + }) + .then(() => { + volumetric.paused.set(false) + }) +} export const VolumetricComponent = defineComponent({ name: 'EE_volumetric', @@ -61,24 +86,25 @@ export const VolumetricComponent = defineComponent({ onInit: (entity) => { return { - useLoadingEffect: true, - loadingEffectTime: 0, - loadingEffectActive: true, - player: undefined! as VolumetricPlayer, paths: [] as string[], - paused: false, - hasTrackStopped: true, + useLoadingEffect: true, + autoplay: true, + startTime: 0, + paused: true, + initialBuffersLoaded: false, + hasAudio: true, + ended: true, volume: 1, - height: 1.6, playMode: PlayMode.loop as PlayMode, - track: 0 + track: -1 } }, toJSON: (entity, component) => { return { - useLoadingEffect: component.useLoadingEffect.value, paths: component.paths.value, + useLoadingEffect: component.useLoadingEffect.value, + autoplay: component.autoplay.value, paused: component.paused.value, volume: component.volume.value, playMode: component.playMode.value @@ -87,17 +113,22 @@ export const VolumetricComponent = defineComponent({ onSet: (entity, component, json) => { setComponent(entity, ShadowComponent) - if (!json) return - if (typeof json?.useLoadingEffect === 'boolean' && json.useLoadingEffect !== component.useLoadingEffect.value) { + if (typeof json.paths === 'object') { + component.paths.set(json.paths) + } + + if (typeof json.useLoadingEffect === 'boolean') { component.useLoadingEffect.set(json.useLoadingEffect) } - if (typeof json.paths === 'object') { - component.paths.set(json.paths) + if (typeof json.autoplay === 'boolean') { + component.autoplay.set(json.autoplay) } - if (typeof json.paused === 'boolean') component.paused.set(json.paused) + if (typeof json.volume === 'number') { + component.volume.set(json.volume) + } // backwars-compat: convert from number enums to strings if ( @@ -135,200 +166,97 @@ export function VolumetricReactor() { const volumetric = useComponent(entity, VolumetricComponent) useEffect(() => { - if (isClient) { - import('@etherealengine/volumetric/dist/Player') - .then((module) => module.default) - .then((VolumetricPlayer) => { - const worker = createWorkerFromCrossOriginURL(VolumetricPlayer.defaultWorkerURL) - setComponent(entity, MediaElementComponent, { - element: document.createElement('video') as HTMLMediaElement - }) - const mediaElement = getMutableComponent(entity, MediaElementComponent) - const element = mediaElement.element.value - - element.autoplay = true - ;(element as HTMLVideoElement).playsInline = true - - element.preload = 'auto' - element.crossOrigin = 'anonymous' - - volumetric.player.set( - new VolumetricPlayer({ - renderer: EngineRenderer.instance.renderer as any, - onTrackEnd: () => { - volumetric.hasTrackStopped.set(true) - volumetric.track.set( - getNextTrack(volumetric.track.value, volumetric.paths.length, volumetric.playMode.value) - ) - }, - video: element as HTMLVideoElement, - V1Args: { - worker: worker, - targetFramesToRequest: iOS ? 10 : 90 - }, - paths: [], - playMode: PlayMode.loop - }) - ) - addObjectToGroup(entity, volumetric.player.value.mesh as any) - - const handleAutoplay = () => { - if (!volumetric.player.value.paused) volumetric.player.value.play() - } - - if (isClient) { - window.addEventListener('pointerup', handleAutoplay) - window.addEventListener('keypress', handleAutoplay) - window.addEventListener('touchend', handleAutoplay) - EngineRenderer.instance.renderer.domElement.addEventListener('pointerup', handleAutoplay) - EngineRenderer.instance.renderer.domElement.addEventListener('touchend', handleAutoplay) - } - - element.addEventListener('playing', () => { - if (audioContext.state == 'suspended') { - audioContext.resume() - } - - window.removeEventListener('pointerup', handleAutoplay) - window.removeEventListener('keypress', handleAutoplay) - window.removeEventListener('touchend', handleAutoplay) - EngineRenderer.instance.renderer.domElement.removeEventListener('pointerup', handleAutoplay) - EngineRenderer.instance.renderer.domElement.removeEventListener('touchend', handleAutoplay) - - const transform = getComponent(entity, TransformComponent) - if (!transform) return - if (volumetric.loadingEffectActive.value) { - volumetric.height.set(calculateHeight(volumetric.player.value.mesh as any) * transform.scale.y) - if (volumetric.loadingEffectTime.value === 0) - setupLoadingEffect(entity, volumetric.player.value!.mesh as any) - } - }) - - if (!AudioNodeGroups.get(element)) { - const source = audioContext.createMediaElementSource(element) - - if (audioContext.state == 'suspended') { - audioContext.resume() - } + setComponent(entity, MediaElementComponent, { + element: document.createElement('video') as HTMLMediaElement + }) + const videoElement = getMutableComponent(entity, MediaElementComponent) + const element = videoElement.element.value as HTMLVideoElement + element.playsInline = true + element.preload = 'auto' + element.crossOrigin = 'anonymous' - const audioNodes = createAudioNodeGroup(element, source, gainNodeMixBuses.soundEffects) + if (!AudioNodeGroups.get(element)) { + const source = audioContext.createMediaElementSource(element) + const audioNodes = createAudioNodeGroup(element, source, gainNodeMixBuses.soundEffects) - audioNodes.gain.gain.setTargetAtTime(volumetric.volume.value, audioContext.currentTime, 0.1) - } - }) + audioNodes.gain.gain.setTargetAtTime(volumetric.volume.value, audioContext.currentTime, 0.1) } }, []) - useEffect( - function updateVolume() { - const volume = volumetric.volume.value - const element = getOptionalComponent(entity, MediaElementComponent)?.element as HTMLMediaElement - if (!element) return - const audioNodes = AudioNodeGroups.get(element) - if (audioNodes) { - audioNodes.gain.gain.setTargetAtTime(volume, audioContext.currentTime, 0.1) - } - }, - [volumetric.volume, volumetric.player] - ) - useEffect(() => { - if (!volumetric.player.value) return - if (volumetric.hasTrackStopped.value) { - // Track is changed. Set the track path - if (volumetric.paths[volumetric.track.value].value) { - volumetric.loadingEffectActive.set(volumetric.useLoadingEffect.value) // set to user's value - volumetric.loadingEffectTime.set(0) - volumetric.player.value.setTrackPath(volumetric.paths[volumetric.track.value].value) - volumetric.hasTrackStopped.set(false) - } - } else { - /** Track isn't changed. Probably new path is added or edited. - * No need to set track path. - */ + if (!volumetric.ended.value) { + // If current track is not ended, don't change the track + return } - }, [volumetric.track, volumetric.paths, volumetric.player, volumetric.hasTrackStopped]) - useEffect(() => { - if (!volumetric.player.value) return - if (volumetric.paused.value) { - volumetric.player.value.pause() - } else { - volumetric.player.value.play() - } - }, [volumetric.paused, volumetric.player]) + const pathCount = volumetric.paths.value.length - return null -} + let nextTrack = getNextTrack(volumetric.track.value, pathCount, volumetric.playMode.value) + const ACCEPTED_TYPES = ['manifest', 'drcs', 'mp4', 'json'] -export const endLoadingEffect = (entity, object) => { - if (!hasComponent(entity, AvatarEffectComponent)) return - const plateComponent = getComponent(entity, AvatarEffectComponent) - plateComponent.originMaterials.forEach(({ id, material }) => { - object.traverse((obj) => { - if (obj.uuid === id) { - obj['material'] = material + // eslint-disable-next-line no-constant-condition + while (true) { + const path = volumetric.paths.value[nextTrack] + const extension = path ? path.split('.').pop() : '' + if (path && extension && ACCEPTED_TYPES.includes(extension)) { + break + } else { + if (nextTrack === volumetric.track.value) { + // If we've looped through all the tracks and none are valid, return + return + } + nextTrack = getNextTrack(nextTrack, pathCount, volumetric.playMode.value) + if (nextTrack === -1) return } - }) - }) - - let pillar: any = null! - let plate: any = null! - - const childrens = object.children - for (let i = 0; i < childrens.length; i++) { - if (childrens[i].name === 'pillar_obj') pillar = childrens[i] - if (childrens[i].name === 'plate_obj') plate = childrens[i] - } - - if (pillar !== null) { - pillar.traverse(function (child) { - if (child['material']) child['material'].dispose() - }) + } - pillar.parent.remove(pillar) - } + const resetTrack = () => { + // Overwriting with setComponent doesn't cleanup the component + removeComponent(entity, UVOL1Component) + removeComponent(entity, UVOL2Component) + volumetric.ended.set(false) + volumetric.initialBuffersLoaded.set(false) + volumetric.paused.set(true) + } - if (plate !== null) { - plate.traverse(function (child) { - if (child['material']) child['material'].dispose() - }) + resetTrack() - plate.parent.remove(plate) - } + volumetric.track.set(nextTrack) - removeComponent(entity, AvatarDissolveComponent) - removeComponent(entity, AvatarEffectComponent) -} + let manifestPath = volumetric.paths.value[nextTrack] + if (manifestPath.endsWith('.mp4')) { + // UVOL1 + manifestPath = manifestPath.replace('.mp4', '.manifest') + } else if (manifestPath.endsWith('.drcs')) { + // UVOL2 + manifestPath = manifestPath.replace('.drcs', '.manifest') + } -const setupLoadingEffect = (entity: Entity, obj: Object3D) => { - const materialList: Array = [] - obj.traverse((object: Mesh) => { - if (object.material && object.material.clone) { - // Transparency fix - const material = object.material.clone() - materialList.push({ - id: object.uuid, - material: material + fetch(manifestPath) + .then((response) => response.json()) + .then((json) => { + if ('type' in json) { + setComponent(entity, UVOL2Component, { + manifestPath: manifestPath, + data: json + }) + } else { + setComponent(entity, UVOL1Component, { + manifestPath: manifestPath, + data: json + }) + } }) - object.material = DissolveEffect.createDissolveMaterial(object as any) + }, [volumetric.paths, volumetric.playMode, volumetric.ended]) + + useEffect(() => { + const volume = volumetric.volume.value + const element = getComponent(entity, MediaElementComponent).element as HTMLVideoElement + const audioNodes = AudioNodeGroups.get(element) + if (audioNodes) { + audioNodes.gain.gain.setTargetAtTime(volume, audioContext.currentTime, 0.1) } - }) - if (hasComponent(entity, AvatarEffectComponent)) removeComponent(entity, AvatarEffectComponent) - const effectEntity = createEntity() - setComponent(effectEntity, AvatarEffectComponent, { - sourceEntity: entity, - opacityMultiplier: 0, - originMaterials: materialList - }) -} + }, [volumetric.volume]) -const calculateHeight = (obj: Object3D) => { - //calculate the uvol model height - const bbox = new Box3().setFromObject(obj) - let height = 1.5 - if (bbox.max.y != undefined && bbox.min.y != undefined) { - height = bbox.max.y - bbox.min.y - } - return height + return null } diff --git a/packages/engine/src/scene/constants/UVOLTypes.ts b/packages/engine/src/scene/constants/UVOLTypes.ts new file mode 100644 index 0000000000..bbff437dc0 --- /dev/null +++ b/packages/engine/src/scene/constants/UVOLTypes.ts @@ -0,0 +1,418 @@ +/* +CPAL-1.0 License + +The contents of this file are subject to the Common Public Attribution License +Version 1.0. (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at +https://github.com/EtherealEngine/etherealengine/blob/dev/LICENSE. +The License is based on the Mozilla Public License Version 1.1, but Sections 14 +and 15 have been added to cover use of software over a computer network and +provide for limited attribution for the Original Developer. In addition, +Exhibit A has been modified to be consistent with Exhibit B. + +Software distributed under the License is distributed on an "AS IS" basis, +WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the +specific language governing rights and limitations under the License. + +The Original Code is Ethereal Engine. + +The Original Developer is the Initial Developer. The Initial Developer of the +Original Code is the Ethereal Engine team. + +All portions of the code written by the Ethereal Engine team are Copyright © 2021-2023 +Ethereal Engine. All Rights Reserved. +*/ + +export enum UVOL_TYPE { + DRACO_WITH_COMPRESSED_TEXTURE = 0, + GLB_WITH_COMPRESSED_TEXTURE = 1, + UNIFORM_SOLVE_WITH_COMPRESSED_TEXTURE = 2 +} + +export type AudioFileFormat = 'mp3' | 'wav' + +export interface AudioInput { + /** + * Path to audio an audio file. + */ + path: string + /** + * The audio encoding format. + * + * The following options are supported: + * "mp3", "wav" - MP3 audio + */ + encodeTo: AudioFileFormat[] + /** + * Path template to the output audio data. + * + * The following template substitutions are supported: + * + * [ext] - the file extension of the texture, (e.g., ".mp3", ".wav", etc.) + * + * E.g. "output/audio[ext]" + */ + outputPath: string +} + +export type GeometryFormat = 'draco' | 'glb' | 'uniform-solve' + +export interface GeometryTarget { + /** + * Geometry encoding format. + */ + format: GeometryFormat + /** + * The frame rate to encode the geometry data at. + */ + frameRate: number + /** + * Total frame count. This information is supplied by the encoder. + */ + frameCount: number + + /** + * Priority of the geometry target. + * Calculated by the the player. + * Smaller targets are given smaller priority i.e., Player assumes smaller priority can be played on low-end devices + * @default 0 + */ + priority: number +} + +export interface DracoEncodeOptions { + /** + * Draco compression level. [0-10], most=10, least=0, default=0. + */ + compressionLevel?: number + /** + * The number of bits to quantize the position attribute. Default=11. + */ + positionQuantizationBits?: number + /** + * The number of bits to quantize the texture coordinate attribute. Default=10. + */ + textureQuantizationBits?: number + /** + * The number of bits to quantize the normal vector attribute. Default=8. + */ + normalQuantizationBits?: number + /** + * The number of bits to quantize any generic attribute. Default=8. + */ + genericQuantizationBits?: number +} + +export interface DRACOTarget extends GeometryTarget { + format: 'draco' + /** + * Draco encoding options for the geometry data. + */ + settings: DracoEncodeOptions + /** + * Scale of the model. + * This is read by the player, actual geometry data is not scaled. + * @default { + * x: 1, + * y: 1, + * z: 1 + * } + */ + scale: { + x: number + y: number + z: number + } +} + +export interface GLBEncodeOptions { + /** + * simplify meshes targeting triangle count ratio R (default: 1; R should be between 0 and 1) + * `simplifyAggressively` is not supported here, because sit changes the topology: + * @link https://meshoptimizer.org/#simplification + * @default 1 + */ + simplificationRatio?: number +} + +export interface GLBTarget extends GeometryTarget { + format: 'glb' + /** + * GLB encoding options for the geometry data. + */ + settings: GLBEncodeOptions +} + +export interface UniformSolveEncodeOptions { + /** + * simplify meshes targeting triangle count ratio R (default: 1; R should be between 0 and 1) + * `simplifyAggressively` is not supported here, because sit changes the topology: + * @link https://meshoptimizer.org/#simplification + * @default 1 + */ + simplificationRatio: number + /** + * Segment size in seconds. + * Implying, each segment consists `targetFrameRate` * `segmentSize` key frames. (Except the last segment, It may have less key frames.) + */ + segmentSize: number + + /** + * If set, normal attribute is removed from the segments. + * When using MeshBasicMaterial, this option is recommended. + * @default false + */ + excludeNormals: boolean +} + +export interface UniformSolveTarget extends GeometryTarget { + format: 'uniform-solve' + /** + * Encoding options for the Uniform solved GLBs + */ + settings: UniformSolveEncodeOptions + /** + * Number of segments + * This info is supplied by the encoder + */ + segmentCount: number + /** + * Number of frames in the the segments + * This info is supplied by the encoder + */ + segmentFrameCount: number +} + +export interface GeometryInput { + /** + * Path to geometry data. This can be a plain file path, or a file path with an index substitution pattern. + * + * Supported formats: + * Alembic - should be specified as a plain file path, eg: input/geometry.abc + * OBJ - should be specified with an index pattern, eg: input/frame_[0001-1000].obj + * GLB - should be specified as a plain file path, eg: input/geometry.glb + * + * When referencing indexed files, the index should be specified as a range, eg: frame_[00001-10000].obj + * If the first frame is 0, the index should be specified with all zeros, eg: frame_[00000-10000].obj + * Indexed file names should be 0-padded to the same number of digits, eg: frame_00001.obj, frame_00002.obj, etc. + */ + path: string + /** + * Frame rate of the geometry data. This is only required for OBJ files. + */ + frameRate: number + /** + * targets + */ + targets: Record +} + +export type TextureFormat = 'ktx2' | 'astc/ktx' + +export interface TextureTarget { + /** + * Texture encoding format. + */ + format: TextureFormat + /** + * The frame rate to encode the geometry data at. + */ + frameRate: number + /** + * Total frame count. This information is supplied by the encoder. + */ + frameCount: number + /** + * Priority of the texture target. + * Calculated by the encoder. + * Smaller targets are given smaller priority i.e., Player assumes smaller priority can be played on low-end devices + * @default 0 + */ + priority: number +} + +export interface KTX2EncodeOptions { + /** + * The compression_level parameter controls the encoder perf vs. file size tradeoff for ETC1S files + * It does not directly control file size vs. quality - see qualityLevel + * Range is [0, 5] + * @default 1 + */ + compressionLevel?: number + /** + * Sets the ETC1S encoder's quality level, which controls the file size vs. quality tradeoff + * Range is [1, 255] + * @default 128 + */ + qualityLevel?: number + /** + * Resize images to @e width X @e height. + * If not specified, uses the image as is. + */ + resolution: { + width: number + height: number + } + + /** + * Vertically flip images + */ + lower_left_maps_to_s0t0?: boolean +} +export interface KTX2TextureTarget extends TextureTarget { + format: 'ktx2' + settings: KTX2EncodeOptions +} + +export interface ASTCEncodeOptions { + blocksize: + | '4x4' + | '5x4' + | '5x5' + | '6x5' + | '6x6' + | '8x5' + | '8x6' + | '10x5' + | '10x6' + | '8x8' + | '10x8' + | '10x10' + | '12x10' + | '12x12' + quality: '-fastest' | '-fast' | '-medium' | '-thorough' | '-verythorough' | '-exhaustive' | number + yflip?: boolean + resolution: { + width: number + height: number + } +} + +export interface ASTCTextureTarget extends TextureTarget { + format: 'astc/ktx' + settings: ASTCEncodeOptions +} + +export interface TextureInput { + /** + * Path to texture data. This can be a plain file path, or a file path with an index substitution pattern. + * + * Supported formats: + * PNG - should be specified as with an index pattern, eg: input/baseColor/frame_[00001-10000].png + * JPEG - should be specified as with an index pattern, eg: input/baseColor/frame_[00001-10000].jpg + * + * When referencing indexed files, the index should be specified as a range, eg: frame_[00001-10000].png + * If the first frame is 0, the index should be specified with all zeros, eg: frame_[00000-10000].png + * Indexed file names should be 0-padded to the same number of digits, eg: frame_00001.png, frame_00002.png, etc. + * + * If the path is a single file, the frame number should be omitted, eg: baseColor.mp4 + */ + path: string + /** + * Frame rate of the texture data. When using indexed files, each file is assumed to be a single frame. + */ + frameRate: number + /** + * targets + */ + targets: Record +} + +export type OptionalTextureType = 'normal' | 'metallicRoughness' | 'emissive' | 'occlusion' +export type TextureType = 'baseColor' | OptionalTextureType + +export interface EncoderManifest { + audio?: AudioInput + geometry: GeometryInput | GeometryInput[] + /** + * Path template to the output geometry data. + * + * The following template substitutions are supported: + * [target] - one of the geometry targets, defined in the "targets" section + * [index] - the index of the frame + * [ext] - the file extension of the data + * + * E.g. "output/geometry_[target]/[index][ext]" + */ + geometryOutputPath: string + texture: { + baseColor: TextureInput | TextureInput[] + normal?: TextureInput | TextureInput[] + metallicRoughness?: TextureInput | TextureInput[] + emissive?: TextureInput | TextureInput[] + occlusion?: TextureInput | TextureInput[] + } + /** + * Path template to the output texture data. + * + * The following template substitutions are supported: + * [target] - one of the texture targets, defined in the "targets" section + * [index] - 0-padded index for each file with the same extension, e.g., ("000001", "000002", etc.) + * [ext] - the file extension of the texture, (e.g., ".mp4", ".ktx2", ".astc.ktx", etc.) + * + * E.g. "output/texture_[target]_[type]/[index][ext]"" + */ + textureOutputPath: string +} + +export interface BasePlayerManifest { + duration: number + audio?: { + path: AudioInput['outputPath'] + formats: AudioFileFormat[] + /** + * PlayBack rate. + * This is read by the player. Actual playbackRate data is not changed. + * @default 1 + */ + playbackRate: number + } + texture: { + baseColor: { + targets: Record + path: EncoderManifest['textureOutputPath'] + } + } & Partial<{ + [key in OptionalTextureType]: { + targets: Record + } + }> +} + +export interface DRACO_Manifest extends BasePlayerManifest { + type: UVOL_TYPE.DRACO_WITH_COMPRESSED_TEXTURE + geometry: { + targets: Record + path: EncoderManifest['geometryOutputPath'] + } +} + +export interface GLB_Manifest extends BasePlayerManifest { + type: UVOL_TYPE.GLB_WITH_COMPRESSED_TEXTURE + geometry: { + targets: Record + path: EncoderManifest['geometryOutputPath'] + } +} + +export interface UniformSolve_Manifest extends BasePlayerManifest { + type: UVOL_TYPE.UNIFORM_SOLVE_WITH_COMPRESSED_TEXTURE + geometry: { + targets: Record + path: EncoderManifest['geometryOutputPath'] + } +} + +export type PlayerManifest = DRACO_Manifest | GLB_Manifest | UniformSolve_Manifest + +export const ABC_TO_OBJ_PADDING = 7 + +export const FORMAT_TO_EXTENSION: Record = { + mp3: '.mp3', + wav: '.wav', + draco: '.drc', + glb: '.glb', + 'uniform-solve': '.glb', + ktx2: '.ktx2', + 'astc/ktx': '.ktx' +}