8 lines
208 KiB
JavaScript
8 lines
208 KiB
JavaScript
!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.panorama=e():t.panorama=e()}(window,(function(){return function(t){var e={};function n(r){if(e[r])return e[r].exports;var o=e[r]={i:r,l:!1,exports:{}};return t[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=t,n.c=e,n.d=function(t,e,r){n.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:r})},n.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},n.t=function(t,e){if(1&e&&(t=n(t)),8&e)return t;if(4&e&&"object"==typeof t&&t&&t.__esModule)return t;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:t}),2&e&&"string"!=typeof t)for(var o in t)n.d(r,o,function(e){return t[e]}.bind(null,o));return r},n.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return n.d(e,"a",e),e},n.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},n.p="",n(n.s=58)}([function(t,e,n){t.exports=n(30)},function(t,e){t.exports=function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}},function(t,e){function n(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}t.exports=function(t,e,r){return e&&n(t.prototype,e),r&&n(t,r),t}},function(t,e,n){var r=n(27),o=n(28),i=n(16),a=n(29);t.exports=function(t,e){return r(t)||o(t,e)||i(t,e)||a()}},function(t,e){function n(t,e,n,r,o,i,a){try{var s=t[i](a),u=s.value}catch(t){return void n(t)}s.done?e(u):Promise.resolve(u).then(r,o)}t.exports=function(t){return function(){var e=this,r=arguments;return new Promise((function(o,i){var a=t.apply(e,r);function s(t){n(a,o,i,s,u,"next",t)}function u(t){n(a,o,i,s,u,"throw",t)}s(void 0)}))}}},function(t,e,n){"use strict";n.d(e,"a",(function(){return s}));var r=n(1),o=n.n(r),i=n(2),a=n.n(i),s=function(){function t(){o()(this,t),this.ENV={}}return a()(t,null,[{key:"setEntry",value:function(t,e){this.ENV[t]=e}},{key:"env",value:function(){return this.ENV||(this.ENV=new t),this.ENV}}]),t}()},function(t,e,n){var r=n(31),o=n(32),i=n(16),a=n(33);t.exports=function(t){return r(t)||o(t)||i(t)||a()}},function(t,e,n){"use strict";var r=n(19),o=n(38),i=Object.prototype.toString;function a(t){return"[object Array]"===i.call(t)}function s(t){return null!==t&&"object"==typeof t}function u(t){return"[object Function]"===i.call(t)}function _(t,e){if(null!=t)if("object"!=typeof t&&(t=[t]),a(t))for(var n=0,r=t.length;n<r;n++)e.call(null,t[n],n,t);else for(var o in t)Object.prototype.hasOwnProperty.call(t,o)&&e.call(null,t[o],o,t)}t.exports={isArray:a,isArrayBuffer:function(t){return"[object ArrayBuffer]"===i.call(t)},isBuffer:o,isFormData:function(t){return"undefined"!=typeof FormData&&t instanceof FormData},isArrayBufferView:function(t){return"undefined"!=typeof ArrayBuffer&&ArrayBuffer.isView?ArrayBuffer.isView(t):t&&t.buffer&&t.buffer instanceof ArrayBuffer},isString:function(t){return"string"==typeof t},isNumber:function(t){return"number"==typeof t},isObject:s,isUndefined:function(t){return void 0===t},isDate:function(t){return"[object Date]"===i.call(t)},isFile:function(t){return"[object File]"===i.call(t)},isBlob:function(t){return"[object Blob]"===i.call(t)},isFunction:u,isStream:function(t){return s(t)&&u(t.pipe)},isURLSearchParams:function(t){return"undefined"!=typeof URLSearchParams&&t instanceof URLSearchParams},isStandardBrowserEnv:function(){return("undefined"==typeof navigator||"ReactNative"!==navigator.product)&&("undefined"!=typeof window&&"undefined"!=typeof document)},forEach:_,merge:function t(){var e={};function n(n,r){"object"==typeof e[r]&&"object"==typeof n?e[r]=t(e[r],n):e[r]=n}for(var r=0,o=arguments.length;r<o;r++)_(arguments[r],n);return e},extend:function(t,e,n){return _(e,(function(e,o){t[o]=n&&"function"==typeof e?r(e,n):e})),t},trim:function(t){return t.replace(/^\s*/,"").replace(/\s*$/,"")}}},function(t,e){t.exports=function(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}},function(t,e){function n(e){return t.exports=n=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)},n(e)}t.exports=n},function(t,e,n){var r=n(34);t.exports=function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&r(t,e)}},function(t,e,n){var r=n(35),o=n(36);t.exports=function(t,e){return!e||"object"!==r(e)&&"function"!=typeof e?o(t):e}},function(t,e,n){"use strict";(function(e){var r=n(7),o=n(41),i={"Content-Type":"application/x-www-form-urlencoded"};function a(t,e){!r.isUndefined(t)&&r.isUndefined(t["Content-Type"])&&(t["Content-Type"]=e)}var s,u={adapter:(("undefined"!=typeof XMLHttpRequest||void 0!==e)&&(s=n(20)),s),transformRequest:[function(t,e){return o(e,"Content-Type"),r.isFormData(t)||r.isArrayBuffer(t)||r.isBuffer(t)||r.isStream(t)||r.isFile(t)||r.isBlob(t)?t:r.isArrayBufferView(t)?t.buffer:r.isURLSearchParams(t)?(a(e,"application/x-www-form-urlencoded;charset=utf-8"),t.toString()):r.isObject(t)?(a(e,"application/json;charset=utf-8"),JSON.stringify(t)):t}],transformResponse:[function(t){if("string"==typeof t)try{t=JSON.parse(t)}catch(t){}return t}],timeout:0,xsrfCookieName:"XSRF-TOKEN",xsrfHeaderName:"X-XSRF-TOKEN",maxContentLength:-1,validateStatus:function(t){return t>=200&&t<300}};u.headers={common:{Accept:"application/json, text/plain, */*"}},r.forEach(["delete","get","head"],(function(t){u.headers[t]={}})),r.forEach(["post","put","patch"],(function(t){u.headers[t]=r.merge(i)})),t.exports=u}).call(this,n(40))},function(t,e,n){"use strict";var r=Object.prototype.hasOwnProperty,o=Array.isArray,i=function(){for(var t=[],e=0;e<256;++e)t.push("%"+((e<16?"0":"")+e.toString(16)).toUpperCase());return t}(),a=function(t,e){for(var n=e&&e.plainObjects?Object.create(null):{},r=0;r<t.length;++r)void 0!==t[r]&&(n[r]=t[r]);return n};t.exports={arrayToObject:a,assign:function(t,e){return Object.keys(e).reduce((function(t,n){return t[n]=e[n],t}),t)},combine:function(t,e){return[].concat(t,e)},compact:function(t){for(var e=[{obj:{o:t},prop:"o"}],n=[],r=0;r<e.length;++r)for(var i=e[r],a=i.obj[i.prop],s=Object.keys(a),u=0;u<s.length;++u){var _=s[u],l=a[_];"object"==typeof l&&null!==l&&-1===n.indexOf(l)&&(e.push({obj:a,prop:_}),n.push(l))}return function(t){for(;t.length>1;){var e=t.pop(),n=e.obj[e.prop];if(o(n)){for(var r=[],i=0;i<n.length;++i)void 0!==n[i]&&r.push(n[i]);e.obj[e.prop]=r}}}(e),t},decode:function(t,e,n){var r=t.replace(/\+/g," ");if("iso-8859-1"===n)return r.replace(/%[0-9a-f]{2}/gi,unescape);try{return decodeURIComponent(r)}catch(t){return r}},encode:function(t,e,n){if(0===t.length)return t;var r=t;if("symbol"==typeof t?r=Symbol.prototype.toString.call(t):"string"!=typeof t&&(r=String(t)),"iso-8859-1"===n)return escape(r).replace(/%u[0-9a-f]{4}/gi,(function(t){return"%26%23"+parseInt(t.slice(2),16)+"%3B"}));for(var o="",a=0;a<r.length;++a){var s=r.charCodeAt(a);45===s||46===s||95===s||126===s||s>=48&&s<=57||s>=65&&s<=90||s>=97&&s<=122?o+=r.charAt(a):s<128?o+=i[s]:s<2048?o+=i[192|s>>6]+i[128|63&s]:s<55296||s>=57344?o+=i[224|s>>12]+i[128|s>>6&63]+i[128|63&s]:(a+=1,s=65536+((1023&s)<<10|1023&r.charCodeAt(a)),o+=i[240|s>>18]+i[128|s>>12&63]+i[128|s>>6&63]+i[128|63&s])}return o},isBuffer:function(t){return!(!t||"object"!=typeof t)&&!!(t.constructor&&t.constructor.isBuffer&&t.constructor.isBuffer(t))},isRegExp:function(t){return"[object RegExp]"===Object.prototype.toString.call(t)},maybeMap:function(t,e){if(o(t)){for(var n=[],r=0;r<t.length;r+=1)n.push(e(t[r]));return n}return e(t)},merge:function t(e,n,i){if(!n)return e;if("object"!=typeof n){if(o(e))e.push(n);else{if(!e||"object"!=typeof e)return[e,n];(i&&(i.plainObjects||i.allowPrototypes)||!r.call(Object.prototype,n))&&(e[n]=!0)}return e}if(!e||"object"!=typeof e)return[e].concat(n);var s=e;return o(e)&&!o(n)&&(s=a(e,i)),o(e)&&o(n)?(n.forEach((function(n,o){if(r.call(e,o)){var a=e[o];a&&"object"==typeof a&&n&&"object"==typeof n?e[o]=t(a,n,i):e.push(n)}else e[o]=n})),e):Object.keys(n).reduce((function(e,o){var a=n[o];return r.call(e,o)?e[o]=t(e[o],a,i):e[o]=a,e}),s)}}},function(t,e){t.exports=function(t){throw new Error('"'+t+'" is read-only')}},function(t,e,n){"use strict";n.d(e,"a",(function(){return Nt}));var r=n(3),o=n.n(r),i=n(0),a=n.n(i),s=n(4),u=n.n(s),_=n(1),l=n.n(_),c=n(2),h=n.n(c),f=n(8),p=n.n(f),T=n(5);function E(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);e&&(r=r.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,r)}return n}function d(t){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{};e%2?E(Object(n),!0).forEach((function(e){p()(t,e,n[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(n)):E(Object(n)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(n,e))}))}return t}var g=function(){function t(){l()(this,t);var e=!T.a.env().canvas;this.fromPixels2DContext=e&&document.createElement("canvas").getContext("2d"),this.fromPixels2DContext2=e&&document.createElement("canvas").getContext("2d"),this.defaultWidth=224,this.defaultHeight=224,this.minPixels=225,this.pixels="",this.defaultParams={gapFillWith:"#000",mean:[0,0,0],std:[1,1,1]},this.imageData=null}return h()(t,[{key:"process",value:function(t){var e=t.input,n=(t.mode,t.channel,t.rotate,d(d({},this.defaultParams),t.params));if(!this.result){var r=o()(n.targetShape,4),i=(r[0],r[1]),a=r[2],s=r[3];this.result=new Float32Array(a*s*i)}return this.fromPixels(e,n)}},{key:"reshape",value:function(t,e,n){for(var r=n.sw,o=n.sh,i=e.width,a=e.height,s=Math.ceil((r-i)/2),u=Math.ceil((o-a)/2),_=t.data,l=[],c=[],h=[],f=e.mean,p=e.std,T=0;T<_.length;T+=4){var E=T/4,d=Math.floor(E/r),g=E-d*r-1;g>=s&&g<s+i&&d>=u&&d<u+a&&(l.push((_[T]/255-f[0])/p[0]),c.push((_[T+1]/255-f[1])/p[1]),h.push((_[T+2]/255-f[2])/p[2]))}var I=c.concat(h);return l.concat(I)}},{key:"allReshapeToRGB",value:function(t,e){var n=e.mean,r=e.std,i=e.normalizeType,a=void 0===i?0:i,s=e.targetShape,u=o()(s,4),_=(u[0],u[1]),l=u[2],c=u[3],h=t.data||t,f=this.result,p=0;if(!f){var T=o()(s,4),E=(T[0],T[1]),d=T[2],g=T[3];f=new Float32Array(d*g*E)}for(var I=0;I<l;++I)for(var m=I*c,x=0;x<c;++x)for(var v=m+x,R=0;R<_;++R){var H=4*v+(e.bgr?2-R:R);switch(a){case 0:f[p]=h[H]/255;break;case 1:f[p]=(h[H]-128)/128;break;default:f[p]=h[H]}f[p]-=n[R],f[p]/=r[R],p++}return f}},{key:"allReshapeToBGR",value:function(t,e){for(var n=o()(e.targetShape,4),r=(n[0],n[1]),i=n[2],a=n[3],s=t.data||t,u=e.mean,_=e.std,l=this.result,c=0,h=0;h<i;++h)for(var f=h*a,p=0;p<a;++p)for(var T=f+p,E=0;E<r;++E){var d=4*T+(2-E);l[c]=s[d],l[c]-=u[2-E],l[c]/=_[2-E],c++}return l}},{key:"reSize",value:function(t,e){var n=this.pixelWidth,r=this.pixelHeight;if(e&&e.scale){var o=n,i=r;if("number"==typeof e.scale)o=i=e.scale;else{var a=e.scale;o=a.w,i=a.h}return this.fromPixels2DContext.canvas.width=o,this.fromPixels2DContext.canvas.height=i,this.fromPixels2DContext.drawImage(t,0,0,o,i),this.setInputCanvas(t),{sw:o,sh:i}}}},{key:"resizeAndFitTargetSize",value:function(t,e){var n=this.pixelWidth,r=this.pixelHeight,o=n,i=r;n<r?(o=e.scale,i=Math.round(o*r/n)):(i=e.scale,o=Math.round(i*n/r)),this.fromPixels2DContext.canvas.width=o,this.fromPixels2DContext.canvas.height=i;var a=e.targetSize.width,s=e.targetSize.height;this.fromPixels2DContext.drawImage(t,0,0,o,i);var u=(o-a)/2,_=(i-s)/2;o=a,i=s;var l=this.getImageData(e,u,_,{sw:o,sh:i});return this.setInputCanvas(t),l}},{key:"fitToTargetSize",value:function(t,e,n){var r=e.targetSize.width,o=e.targetSize.height;this.fromPixels2DContext.canvas.width=r,this.fromPixels2DContext.canvas.height=o,this.fromPixels2DContext.fillStyle=e.gapFillWith,this.fromPixels2DContext.fillRect(0,0,o,r);var i=r,a=o,s=0,u=0;return r/o*this.pixelHeight/this.pixelWidth>=1?(i=Math.round(a*this.pixelWidth/this.pixelHeight),s=Math.floor((r-i)/2)):(a=Math.round(i*this.pixelHeight/this.pixelWidth),u=Math.floor((o-a)/2)),n?this.fromPixels2DContext.drawImage(t,s,u,i,a):this.fromPixels2DContext.drawImage(t,0,0,i,a),this.setInputCanvas(t),{sw:r,sh:o}}},{key:"setInputCanvas",value:function(t){var e=this.pixelWidth,n=this.pixelHeight;this.fromPixels2DContext2.canvas.width=e,this.fromPixels2DContext2.canvas.height=n,this.fromPixels2DContext2.drawImage(t,0,0,e,n)}},{key:"getImageData",value:function(t,e,n,r){var o=r.sw,i=r.sh;return this.fromPixels2DContext.getImageData(e,n,o,i)}},{key:"grayscale",value:function(t){for(var e=t.data,n=0;n<e.length;n+=4){var r=(e[n]+e[n+1]+e[n+2])/3;e[n]=r,e[n+1]=r,e[n+2]=r}return e}},{key:"fromPixels",value:function(t,e){var n,r,o;return(t instanceof HTMLImageElement||t instanceof HTMLVideoElement)&&(this.pixelWidth=t.naturalWidth||t.videoWidth||t.width,this.pixelHeight=t.naturalHeight||t.videoWidth||t.height,e.scale&&e.targetSize?(n=this.resizeAndFitTargetSize(t,e),r=this.fromPixels2DContext2.getImageData(0,0,this.pixelWidth,this.pixelHeight)):e.targetSize?(o=this.fitToTargetSize(t,e,e.center),n=this.getImageData(e,0,0,o),r=this.fromPixels2DContext2.getImageData(0,0,this.pixelWidth,this.pixelHeight)):(console.log("humanseg!"),o=this.reSize(t,e),n=this.getImageData(e,0,0,o),r=this.fromPixels2DContext2.getImageData(0,0,this.pixelWidth,this.pixelHeight))),this.imageData=n,e.gray&&(n=grayscale(n)),e.reShape&&(n=this.reshape(n,e,o)),[{data:n=this.allReshapeToRGB(n,e),shape:e.shape||e.targetShape,name:"image",canvas:r}]}}]),t}(),I=function(){function t(e,n){l()(this,t),this.version="0.0.1",this.data={},this.modelGonfig=e,this.options=n,this.multipart=!1,this.test=!1,this.chunkNum=0,this.params={type:"fetch"},this.fetch=T.a.env().fetch||this.fetchFunc,this.options&&(this.multipart=this.options.multipart,"binary"===n.dataType&&(this.binaryOption=n.options,this.dataType=n.dataType),n.test&&(this.test=!0)),this.loadOptions||(this.loadOptions={})}var e,n;return h()(t,[{key:"fetchOneChunk",value:function(t){return this.fetch(t,{type:"arrayBuffer"})}},{key:"fetchJson",value:function(t){return this.fetch(t,{type:"json"})}},{key:"fetchChunks",value:function(){for(var t=this,e=this.chunkNum||this.binaryOption.fileCount,n=[],r=1;r<=e;r++)n.push(this.fetchOneChunk(this.modelGonfig.dir+this.binaryOption.getFileName(r)));return Promise.all(n).then((function(e){var n,r=0,o=[];e.forEach((function(t){n=new Float32Array(t),o.push(n),r+=n.length})),t.allData=new Float32Array(r);var i=0;o.forEach((function(e){e.forEach((function(e){t.allData[i]=e,i+=1}))}))}))}},{key:"fetchData",value:function(t){var e=this.modelGonfig.dir+t+".json";return new Promise((function(t,n){fetchJson(e,{method:"get",mode:"cors",credentials:"include",headers:{"Content-Type":"application/json;charset=utf-8"}}).then((function(e){return t(e)})).then((function(t){return n(t)}))}))}},{key:"fetchAllData",value:(n=u()(a.a.mark((function t(e){var n,r=this;return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:return n=e.map((function(t){return t.name?r.fetchData(t.name).then((function(e){return t.data=e})):Promise.resolve()})),t.abrupt("return",Promise.all(n));case 4:case"end":return t.stop()}}),t)}))),function(t){return n.apply(this,arguments)})},{key:"traverse",value:function(t){var e,n=this,r=0;t.filter((function(t){return t.name})).forEach((function(t){e=t.shape.reduce((function(t,e){return t*e})),t.persistable&&(t.data=n.allData.slice(r,r+e),r+=e)}))}},{key:"fetchFunc",value:function(t,e){var n=e=e||this.params,r=n.method,o=void 0===r?"get":r,i=(n.mode,n.type),a=new Headers,s=fetch(t,{method:o,headers:a});switch(i){case"json":return s.then((function(t){return t.json()}));case"arrayBuffer":return s.then((function(t){return t.arrayBuffer()}));default:return s}}},{key:"fetchModel",value:function(t){var e=this;t=t||this.params;var n=this.modelGonfig.dir+this.modelGonfig.main,r=null;if(t&&"jsonp"===t.type){var o,i=document.createElement("script");i.src=n+"&jsonpCallback=fn",window.fn=function(t){o=t},document.body.appendChild(i),r=new Promise((function(t,e){i.onload=function(e){t(o)},i.onerror=function(){e(o)}})),this.data=r}else"fetch"===t.type?(r=new Promise((function(r,o){e.fetchJson(n,t).then((function(t){return r(t)})).then((function(t){return o(t)}))})),this.data=r):"xhr"===t.type&&(this.data=r);return r}},{key:"load",value:(e=u()(a.a.mark((function t(){var e,n,r=this;return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:return e=this,t.next=3,this.fetchModel();case 3:if(n=this.data=t.sent,this.chunkNum=n.chunkNum,!0!==this.multipart){t.next=13;break}if("binary"!==this.dataType){t.next=11;break}return t.next=9,this.fetchChunks().then((function(){return r.traverse(n.vars)}));case 9:t.next=13;break;case 11:return t.next=13,e.fetchAllData(n.vars);case 13:return t.abrupt("return",n);case 14:case"end":return t.stop()}}),t,this)}))),function(){return e.apply(this,arguments)})}]),t}(),m=n(6),x=n.n(m),v=function(){function t(e){l()(this,t),this.inputs=e.inputs,this.outputs=e.outputs,this.attrs=e.attrs||e["sub-attrs"],this.type=e.type,this.finish=!1,this.next=null,this.opData=null,this.id=+new Date+e.type+Math.floor(10*Math.random()+1)+e.idx}return h()(t,[{key:"execute",value:function(t,e){"feed"!==this.type?t.run(this.type,this.opData,e):+Date.now()}},{key:"inputsName",get:function(){if("feed"===this.type)return this.inputs.X;if("batchnorm"===this.type||"batch_norm"===this.type)return this.inputs.X;if("conv2d"===this.type)return this.inputs.Input;if("depthwise_conv2d"===this.type)return this.inputs.Input;if("conv2d_transpose"===this.type)return this.inputs.Input;if("elementwise_add"===this.type)return this.inputs.X.concat(this.inputs.Y);if("concat"===this.type)return this.inputs.X.concat(this.inputs.Y||[]);if("relu"===this.type||"leaky_relu"===this.type)return this.inputs.X;if("pool2d"===this.type)return this.inputs.X;if("mul"===this.type)return this.inputs.X;if("softmax"===this.type)return this.inputs.X;if("scale"===this.type)return this.inputs.X;if("fetch"===this.type)return this.inputs.X;if("box_coder"===this.type){var t=this.inputs,e=t.PriorBox,n=t.PriorBoxVar,r=t.TargetBox;return[].concat(x()(e),x()(n),x()(r))}if("prior_box"===this.type)return this.inputs.Input.concat(this.inputs.Image);if("connect"===this.type){var o=this.inputs,i=o.X,a=void 0===i?[]:i,s=o.Y,u=void 0===s?[]:s,_=o.Z,l=void 0===_?[]:_;return[].concat(x()(a),x()(u),x()(l))}return this.inputs.Input||this.inputs.X}},{key:"outputsName",get:function(){if(this.outputs.Output)return this.outputs.Output;if(this.outputs.out)return this.outputs.out;if("conv2d"===this.type)return this.outputs.Output;if("depthwise_conv2d"===this.type)return this.outputs.Output;if("box_coder"===this.type)return this.outputs.out=this.outputs.OutputBox,delete this.outputs.OutputBox,this.outputs.out;if("prior_box"===this.type){var t=this.outputs,e=t.Boxes,n=t.Variances;return this.outputs.out=[e[0],n[0]],delete this.outputs.Boxes,delete this.outputs.Variances,this.outputs.out}return"batchnorm"===this.type||"batch_norm"===this.type?(this.outputs.out=this.outputs.Y,delete this.outputs.Y,this.outputs.out):this.outputs.Y?(this.outpus.out=this.outputs.Y,this.outputs.out):this.outputs.Out||this.outputs.Output}}]),t}(),R={alpha:!1,antialias:!1,premultipliedAlpha:!1,preserveDrawingBuffer:!1,depth:!1,stencil:!1,failIfMajorPerformanceCaveat:!0},H=function(){function t(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};l()(this,t),this.version=2,this.opts=e,this.frameBufferSupportFloat=!0,this.isFloatTextureReadPixelsEnabled=!0,e.width_raw_canvas=Number(e.width_raw_canvas)||512,e.height_raw_canvas=Number(e.height_raw_canvas)||512;var n=null;if(this.opts.gl)n=this.opts.gl,this.version=1,this.internalFormat=n.RGBA,this.textureFormat=n.RGBA,this.downloadInternalFormat=n.RGBA,this.textureFloat=n.getExtension("OES_texture_float"),this.textureHalfFloat=n.getExtension("OES_texture_half_float"),this.frameBufferSupportFloat=this.isDownloadFloatTextureEnabled(n),this.isFloatTextureReadPixelsEnabled=this.isFloatTextureReadPixelsEnabledMethod(n,1);else{var r=T.a.env().canvas||e.el||document.createElement("canvas");!T.a.env().canvas&&r.addEventListener("webglcontextlost",(function(t){t.preventDefault(),console.log("webgl context is lost~")}),!1),(n=r.getContext("webgl2",R))?(this.version=2,this.textureFloat=n.getExtension("EXT_color_buffer_float"),this.internalFormat=n.R16F,this.textureFormat=n.RED,this.downloadInternalFormat=n.RGBA16F):(n=r.getContext("webgl",R)||r.getContext("experimental-webgl",R),this.version=1,this.internalFormat=n.RGBA,this.textureFormat=n.RGBA,this.downloadInternalFormat=n.RGBA,n?(this.textureFloat=n.getExtension("OES_texture_float"),this.textureHalfFloat=n.getExtension("OES_texture_half_float"),this.frameBufferSupportFloat=this.isDownloadFloatTextureEnabled(n),this.isFloatTextureReadPixelsEnabled=this.isFloatTextureReadPixelsEnabledMethod(n,1)):(this.version=0,alert("当前环境创建webgl context失败")))}this.maxTextureSize=n.getParameter(n.MAX_TEXTURE_SIZE),this.maxTextureImageUnits=n.getParameter(n.MAX_TEXTURE_IMAGE_UNITS),n.disable(n.DEPTH_TEST),n.disable(n.STENCIL_TEST),n.disable(n.BLEND),n.disable(n.DITHER),n.disable(n.POLYGON_OFFSET_FILL),n.disable(n.SAMPLE_COVERAGE),n.enable(n.SCISSOR_TEST),n.enable(n.CULL_FACE),n.cullFace(n.BACK),this.gl=n,this.initCache(),this.waits=0}return h()(t,[{key:"getWebglVersion",value:function(){return this.version}},{key:"getWebglMaxTextureSize",value:function(){return this.maxTextureSize}},{key:"getWebglMaxTextureImageUnits",value:function(){return this.maxTextureImageUnits}},{key:"getIsFrameBufferSupportFloat",value:function(){return this.frameBufferSupportFloat}},{key:"getIsFloatTextureReadPixelsEnabled",value:function(){return this.isFloatTextureReadPixelsEnabled}},{key:"isFloatTextureReadPixelsEnabledMethod",value:function(t,e){if(0===e)return!1;if(1===e){if(null==t.getExtension("OES_texture_float"))return!1}else if(null==t.getExtension("EXT_color_buffer_float")||null==t.getExtension("EXT_color_buffer_half_float"))return!1;var n=t.createFramebuffer(),r=t.createTexture();t.bindTexture(t.TEXTURE_2D,r);var o=2===e?t.RGBA32F:t.RGBA;t.texImage2D(t.TEXTURE_2D,0,o,1,1,0,t.RGBA,this.textureHalfFloat.HALF_FLOAT_OES,null),t.bindFramebuffer(t.FRAMEBUFFER,n),t.framebufferTexture2D(t.FRAMEBUFFER,t.COLOR_ATTACHMENT0,t.TEXTURE_2D,r,0);var i=t.checkFramebufferStatus(t.FRAMEBUFFER)===t.FRAMEBUFFER_COMPLETE;t.readPixels(0,0,1,1,t.RGBA,t.FLOAT,new Float32Array(4));var a=t.getError()===t.NO_ERROR;return i&&a}},{key:"initCache",value:function(){this.times=0;var t=this.gl,e=new Float32Array([-1,1,0,1,-1,-1,0,0,1,1,1,1,1,-1,1,0]);this.vertexBuffer=t.createBuffer(),t.bindBuffer(t.ARRAY_BUFFER,this.vertexBuffer),t.bufferData(t.ARRAY_BUFFER,e,t.STATIC_DRAW),this.vertexShader=null,this.initShader(2===this.version?"#version 300 es\nin vec4 position;\nout vec2 vCoord;\n\nvoid main() {\n vCoord.x = (position.x + 1.0) / 2.0;\n vCoord.y = (position.y + 1.0) / 2.0;\n gl_Position = position;\n}\n":"\nprecision highp float;\nprecision highp int;\n\nattribute vec4 position;\nvarying vec2 vCoord;\n\nvoid main() {\n vCoord.x = (position.x + 1.0) / 2.0;\n vCoord.y = (position.y + 1.0) / 2.0;\n gl_Position = position;\n}\n"),this.fragmentShader=null,this.prevTexture=null,this.currentTexture=null,this.frameBuffer=t.createFramebuffer(),t.bindFramebuffer(t.FRAMEBUFFER,this.frameBuffer),this.cacheTextures={},this.uniformLocations={},this.texturesMap={},this.pbo=t.createBuffer()}},{key:"runVertexShader",value:function(t){var e=this.gl,n=e.getAttribLocation(t,"position");e.enableVertexAttribArray(n),e.bindBuffer(e.ARRAY_BUFFER,this.vertexBuffer),e.vertexAttribPointer(n,2,e.FLOAT,!1,16,0)}},{key:"setOutProps",value:function(t){this.width_shape_out=t.width_shape||1,this.height_shape_out=t.height_shape||1,this.width_texture_out=t.width_texture||1,this.height_texture_out=t.height_texture||1,this.channel=t.channel||0,this.total_shape=t.total_shape||0}},{key:"isFloatingTexture",value:function(){return null!==this.textureFloat}},{key:"isDownloadFloatTextureEnabled",value:function(t){var e=t.createTexture();t.bindTexture(t.TEXTURE_2D,e);t.texImage2D(t.TEXTURE_2D,0,this.downloadInternalFormat,1,1,0,t.RGBA,t.FLOAT,null);var n=t.createFramebuffer();t.bindFramebuffer(t.FRAMEBUFFER,n),t.framebufferTexture2D(t.FRAMEBUFFER,t.COLOR_ATTACHMENT0,t.TEXTURE_2D,e,0);var r=t.checkFramebufferStatus(t.FRAMEBUFFER)===t.FRAMEBUFFER_COMPLETE;return t.bindTexture(t.TEXTURE_2D,null),t.bindFramebuffer(t.FRAMEBUFFER,null),t.deleteTexture(e),t.deleteFramebuffer(n),r}},{key:"createProgram",value:function(t,e,n){var r=this.gl,o=r.createProgram();r.attachShader(o,this.vertexShader),r.attachShader(o,t),r.linkProgram(o);var i=r.createTexture();return r.bindTexture(r.TEXTURE_2D,i),r.texParameteri(r.TEXTURE_2D,r.TEXTURE_MAG_FILTER,r.NEAREST),r.texParameteri(r.TEXTURE_2D,r.TEXTURE_MIN_FILTER,r.NEAREST),r.texParameteri(r.TEXTURE_2D,r.TEXTURE_WRAP_S,r.CLAMP_TO_EDGE),r.texParameteri(r.TEXTURE_2D,r.TEXTURE_WRAP_T,r.CLAMP_TO_EDGE),r.texImage2D(r.TEXTURE_2D,0,this.downloadInternalFormat,e.width_texture,e.height_texture,0,r.RGBA,n?this.isFloatTextureReadPixelsEnabled?this.frameBufferSupportFloat?r.FLOAT:this.textureHalfFloat.HALF_FLOAT_OES:r.UNSIGNED_BYTE:this.frameBufferSupportFloat?r.FLOAT:this.textureHalfFloat.HALF_FLOAT_OES,null),r.bindTexture(r.TEXTURE_2D,null),this.texturesMap[e.tensorId]=i,o}},{key:"setProgram",value:function(t,e){this.gl.useProgram(t),this.program=t,e||this.runVertexShader(t)}},{key:"attachShader",value:function(t){var e=this.gl,n=this.program;this.textureBufferIndex=this.textureBufferIndex+1>=2?0:1,this.fragmentShader&&e.detachShader(n,this.fragmentShader),this.gl.attachShader(n,t),this.fragmentShader=t,e.linkProgram(n),0==this.times++&&(e.useProgram(n),this.runVertexShader())}},{key:"create",value:function(t,e){var n=this.gl;this.program&&this.dispose();var r=this.program=n.createProgram();this.initShader(t),this.fragmentShader=this.initShader(e,"fragment"),this.gl.attachShader(r,this.vertexShader),this.gl.attachShader(r,this.fragmentShader),n.linkProgram(r),n.useProgram(r);var o=n.getAttribLocation(r,"position");n.enableVertexAttribArray(o),n.bindBuffer(n.ARRAY_BUFFER,this.vertexBuffer),n.vertexAttribPointer(o,2,n.FLOAT,!1,16,0)}},{key:"initShader",value:function(t){var e,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"vertex",r="vertex"===n?this.gl.VERTEX_SHADER:this.gl.FRAGMENT_SHADER;if("vertex"===n&&this.vertexShader)e=this.vertexShader;else if(e=this.gl.createShader(r),"vertex"===n&&(this.vertexShader=e),this.gl.shaderSource(e,t),this.gl.compileShader(e),!this.gl.getShaderParameter(e,this.gl.COMPILE_STATUS))throw new Error("compile: "+this.gl.getShaderInfoLog(e));return e}},{key:"updateShader",value:function(t){return this.gl.useProgram(this.program),this.fragmentShader&&(this.gl.detachShader(this.program,this.fragmentShader),this.gl.deleteShader(this.fragmentShader),this.gl.deleteTexture(this.texture)),this.fragmentShader=this.initShader(t,"fragment"),!0}},{key:"attachFrameBuffer",value:function(t,e){this.prevTexture=this.currentTexture,this.currentTexture=this.texturesMap[e];var n=this.gl;return n.framebufferTexture2D(n.FRAMEBUFFER,n.COLOR_ATTACHMENT0,n.TEXTURE_2D,this.currentTexture,0),n.viewport(0,0,this.width_texture_out,this.height_texture_out),n.scissor(0,0,this.width_texture_out,this.height_texture_out),this.frameBuffer}},{key:"frameBufferIsComplete",value:function(){var t,e,n,r=this.gl;switch(e=r.checkFramebufferStatus(r.FRAMEBUFFER)){case r.FRAMEBUFFER_COMPLETE:t="Framebuffer is complete.",n=!0;break;case r.FRAMEBUFFER_UNSUPPORTED:t="Framebuffer is unsupported",n=!1;break;case r.FRAMEBUFFER_INCOMPLETE_ATTACHMENT:t="Framebuffer incomplete attachment",n=!1;break;case r.FRAMEBUFFER_INCOMPLETE_DIMENSIONS:t="Framebuffer incomplete (missmatched) dimensions",n=!1;break;case r.FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:t="Framebuffer incomplete missing attachment",n=!1;break;default:t="Unexpected framebuffer status: "+e,n=!1}return{isComplete:n,message:t}}},{key:"initTexture",value:function(t,e,n,r){var o,i=this.gl;if(0===n&&"feed"===e.tensorId&&this.opts&&this.opts.usePipeLine&&this.texturesMap.image)o=this.texturesMap.image,e.data=null;else if(e.data){if(r&&(n>0||0===n&&"origin"!==e.tensor))o=this.cacheTextures[""+n][e.variable+"_"+e.tensor];else o=i.createTexture(),this.cacheTextures[""+n]=this.cacheTextures[""+n]||{},this.cacheTextures[""+n][e.variable+"_"+e.tensor]=o}else o=this.texturesMap[e.tensorId];if(i.activeTexture(i["TEXTURE".concat(t)]),i.bindTexture(i.TEXTURE_2D,o),e.data&&(!r||r&&0===n&&"origin"===e.tensor))if(i.texParameteri(i.TEXTURE_2D,i.TEXTURE_MAG_FILTER,i.NEAREST),i.texParameteri(i.TEXTURE_2D,i.TEXTURE_MIN_FILTER,i.NEAREST),i.texParameteri(i.TEXTURE_2D,i.TEXTURE_WRAP_S,i.CLAMP_TO_EDGE),i.texParameteri(i.TEXTURE_2D,i.TEXTURE_WRAP_T,i.CLAMP_TO_EDGE),2==this.version)i.texImage2D(i.TEXTURE_2D,0,this.internalFormat,e.width_texture,e.height_texture,0,this.textureFormat,i.FLOAT,e.data);else{e.width_texture,e.height_texture;for(var a=new Float32Array(e.width_texture*e.height_texture*4),s=0;s<e.data.length;s++)a[4*s]=e.data[s],a[4*s+1]=0,a[4*s+2]=0,a[4*s+3]=0;i.texImage2D(i.TEXTURE_2D,0,i.RGBA,e.width_texture,e.height_texture,0,i.RGBA,i.FLOAT,a)}}},{key:"getUniformLoc",value:function(t,e,n,r){if(n)return this.uniformLocations[""+e][t+r];var o=this.gl.getUniformLocation(this.program,t);return this.uniformLocations[""+e]=this.uniformLocations[""+e]||{},this.uniformLocations[""+e][t+r]=o,o}},{key:"makeTexure",value:function(t,e){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=this.gl,o=int(mod(float(this.textureBufferIndex),2)),i=this.textureBuffer[o];return r.bindTexture(r.TEXTURE_2D,i),r.texImage2D(r.TEXTURE_2D,0,r.RGBA,n.width_texture_out||this.width_texture_out,n.height_texture_out||this.height_texture_out,0,r.RGBA,t,e),this.attachFrameBuffer(),i}},{key:"render",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=arguments.length>2&&void 0!==arguments[2]&&arguments[2],r=arguments.length>3?arguments[3]:void 0,o=this.gl,i=this,a=0;t.forEach((function(t){if("texture"===t.type){var s=i.getUniformLoc(t.variable+"_"+t.tensor,e,n,r);if(!s)return;i.initTexture(a,t,e,n),o.uniform1i(s,a++)}else"uniform"===t.type&&o[t.setter](i.getUniformLoc(t.variable+"_"+t.tensor,e,n,r),t.data)})),o.drawArrays(o.TRIANGLE_STRIP,0,4)}},{key:"createPBO",value:function(){if(2==this.version){var t=this.gl,e=this.pbo;t.bindBuffer(t.PIXEL_PACK_BUFFER,e);var n=16*this.width_texture_out*this.height_texture_out;return t.bufferData(t.PIXEL_PACK_BUFFER,n,t.STREAM_READ),t.readPixels(0,0,this.width_texture_out,this.height_texture_out,t.RGBA,t.FLOAT,0),t.bindBuffer(t.PIXEL_PACK_BUFFER,null),e}var r=[],o=this.gl,i=o.FLOAT;return this.isFloatTextureReadPixelsEnabled?r=new Float32Array(this.width_texture_out*this.height_texture_out*4):(r=new Uint8Array(this.width_texture_out*this.height_texture_out*4),i=o.UNSIGNED_BYTE),o.readPixels(0,0,this.width_texture_out,this.height_texture_out,o.RGBA,i,r),this.isFloatTextureReadPixelsEnabled?r:new Float32Array(r.buffer)}},{key:"downloadFoat32TensorFromBuffer",value:function(t){var e=this.gl,n=4*this.width_texture_out*this.height_texture_out;if(2==this.version){var r=new Float32Array(n);e.bindBuffer(e.PIXEL_PACK_BUFFER,t),e.getBufferSubData(e.PIXEL_PACK_BUFFER,0,r),e.bindBuffer(e.PIXEL_PACK_BUFFER,null);for(var o=[],i=0;i<this.width_texture_out*this.height_texture_out;i++)o.push(r[4*i]);return o}for(var a=t,s=[],u=0;u<this.width_texture_out*this.height_texture_out;u++){var _=this.isFloatTextureReadPixelsEnabled?4*u:u;s.push(a[_])}return s}},{key:"getWebglError",value:function(t){var e=this.gl;switch(t){case e.NO_ERROR:return"NO_ERROR";case e.INVALID_ENUM:return"INVALID_ENUM";case e.INVALID_VALUE:return"INVALID_VALUE";case e.INVALID_OPERATION:return"INVALID_OPERATION";case e.INVALID_FRAMEBUFFER_OPERATION:return"INVALID_FRAMEBUFFER_OPERATION";case e.OUT_OF_MEMORY:return"OUT_OF_MEMORY";case e.CONTEXT_LOST_WEBGL:return"CONTEXT_LOST_WEBGL";default:return"Unknown error code ".concat(t)}}},{key:"createAndWaitForFence",value:function(){var t=this,e=this.gl,n=null!=e.fenceSync,r=function(){return!0};if(n){var o=e.fenceSync(e.SYNC_GPU_COMMANDS_COMPLETE,0);e.flush(),r=function(){var t=e.clientWaitSync(o,0,0);return t===e.ALREADY_SIGNALED||t===e.CONDITION_SATISFIED}}return new Promise((function(e){t.pollItem(r,e)}))}},{key:"pollItem",value:function(t,e){!function n(){t()?e():setTimeout(n,1)}()}},{key:"compute",value:function(){var t=this.gl,e=(Date.now(),new Float32Array(this.width_texture_out*this.height_texture_out*4));Date.now();t.readPixels(0,0,this.width_texture_out,this.height_texture_out,t.RGBA,t.FLOAT,e);for(var n=[],r=0;r<this.width_texture_out*this.height_texture_out;r++)n.push(e[4*r]);return n}},{key:"dispose",value:function(){var t=this,e=this.gl;this.cacheTextures={},this.programs.forEach((function(n){e.detachShader(n,t.vertexShader),e.deleteShader(t.vertexShader),e.deleteProgram(n)})),this.programs=[]}}]),t}(),O={params:"\n // 输入数据\n uniform sampler2D texture_origin;\n",func:"\n// start函数\nvoid main(void) {\n // 输出数据\n float o = getPixelsFromTexturePos_texture_origin(vCoord).r;\n float res = ACTIVE_FUNCTION(o, multi_value, bias_value);\n setOutput(res);\n}\n",confs:{dep:[{func:"getPixelsFromTexturePos",conf:{TEXTURE_NAME:"texture_origin"}}],conf:["WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","FUSE_RELU","MULTI_VALUE","BIAS_VALUE","ACTIVE_FUNCTION"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},A={common:{params:"\n // dynamic的input数据\n const float multi_value = float(MULTI_VALUE);\n const float bias_value = float(BIAS_VALUE);\n const bool fuse_relu = bool(FUSE_RELU);\n\n // 输出数据\n const int width_shape_out = WIDTH_SHAPE_OUT;\n const int height_shape_out = HEIGHT_SHAPE_OUT;\n const int width_texture_out = WIDTH_TEXTURE_OUT;\n const int height_texture_out = HEIGHT_TEXTURE_OUT;\n const int channel_out = CHANNEL_OUT;\n const int offset_y_out = OFFSET_Y_OUT;\n",func:"\n// 激活函数\nfloat prelu(float x, float p, float b) {\n float result = x;\n if (x < 0.0) {\n result = x * p;\n }\n \n return result;\n}\nfloat relu6(float x, float threshold, float b) {\n float result = max(0.0,x);\n result = min(result,threshold);\n return result;\n}\nfloat leakyRelu(float x, float p, float b) {\n float result = max(x, x * p);\n return result;\n}\n\nfloat scale(float x, float p, float b) {\n float result = p * x + b;\n return result;\n}\n\nfloat sigmoid(float x, float y, float z) {\n float result = 1.0 / (1.0 + exp(-x));\n return result;\n}\n\nfloat softmax(float x, float p, float b) {\n float result = exp(x) / (10.0 * exp(x));\n return result;\n}\n\n",prefix:"\n#ifdef GL_FRAGMENT_PRECISION_HIGH\n precision highp float;\n precision highp int;\n#else\n precision highp float;\n precision highp int;\n#endif\n varying vec2 vCoord;\n varying vec4 outColor;\n void setOutput(float result) {\n gl_FragColor.r = result;\n }\n",prefixHalf:"\n#ifdef GL_FRAGMENT_PRECISION_HIGH\n precision highp float;\n precision highp int;\n#else\n precision highp float;\n precision highp int;\n#endif\n\n #define isnan(value) isnan_custom(value)\n bool isnan_custom(float val) {\n return (val > 0. || val < 1. || val == 0.) ? false : true;\n }\n\n varying vec2 vCoord;\n varying vec4 outColor;\n void setOutput(float result) {\n if(isnan(result)) {\n gl_FragColor.r = 0.0;\n }else {\n gl_FragColor.r = result;\n }\n }\n",prefixUint:"\nprecision highp float;\nprecision highp int;\n\nvarying vec2 vCoord;\nvarying vec4 outColor;\n\nconst float FLOAT_MAX = 1.70141184e38;\nconst float FLOAT_MIN = 1.17549435e-38;\n\n#define isnan(value) isnan_custom(value)\nbool isnan_custom(float val) {\n return (val > 0. || val < 1. || val == 0.) ? false : true;\n}\n\nlowp vec4 encode_float(highp float v) {\n if (isnan(v)) {\n return vec4(255, 255, 255, 255);\n }\n\n highp float av = abs(v);\n\n if(av < FLOAT_MIN) {\n return vec4(0.0, 0.0, 0.0, 0.0);\n } else if(v > FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 127.0) / 255.0;\n } else if(v < -FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 255.0) / 255.0;\n }\n\n highp vec4 c = vec4(0,0,0,0);\n\n highp float e = floor(log2(av));\n highp float m = exp2(fract(log2(av))) - 1.0;\n\n c[2] = floor(128.0 * m);\n m -= c[2] / 128.0;\n c[1] = floor(32768.0 * m);\n m -= c[1] / 32768.0;\n c[0] = floor(8388608.0 * m);\n\n highp float ebias = e + 127.0;\n c[3] = floor(ebias / 2.0);\n ebias -= c[3] * 2.0;\n c[2] += floor(ebias) * 128.0;\n\n c[3] += 128.0 * step(0.0, -v);\n\n return c / 255.0;\n}\n\nvoid setOutput(float result) {\n gl_FragColor = encode_float(result);\n}\n",prefix2:"#version 300 es\n\n#ifdef GL_FRAGMENT_PRECISION_HIGH\n precision highp float;\n precision highp int;\n#else\n precision mediump float;\n precision mediump int;\n#endif\n\n// 顶点shader透传的材质坐标\n in vec2 vCoord;\n out vec4 outColor;\n void setOutput(float result) {\n outColor.r = result;\n }\n",suffix:"\nvec2 _2d_shape_texture_out = vec2(float(width_texture_out), float(height_texture_out));\nivec4 getOutputTensorPos() {\n // 获取原始长度\n vec2 outCoord = vCoord.xy * _2d_shape_texture_out;\n int x = int(outCoord.x / float(channel_out));\n int c = int(mod(outCoord.x, float(channel_out)));\n int y = int(mod(outCoord.y, float(height_shape_out)));\n int b = int(outCoord.y / float(height_shape_out));\n return ivec4(b, c, y, x);\n}\n\n\nivec4 getOutputTensorPosLimit() {\n // 获取原始长度\n vec2 outCoord = vCoord.xy * _2d_shape_texture_out;\n float offsetY = floor(outCoord.y / float(height_shape_out));\n int x = int(outCoord.x / float(channel_out));\n if (mod(offsetY, 4.0) > 0.0) {\n x += int(mod(offsetY, 4.0)) * int(ceil(float(width_shape_out) / 4.0));\n }\n int y = int(mod(outCoord.y, float(height_shape_out)));\n int c = int(mod(outCoord.x, float(channel_out)));\n int b = int(outCoord.y / float(4 * height_shape_out));\n return ivec4(b, c, y, x);\n}\n\nivec4 getOutputPackedTensorPos() {\n // 获取原始长度\n vec2 outCoord = vCoord.xy * _2d_shape_texture_out;\n int height = height_shape_out + offset_y_out;\n int x = int(outCoord.x);\n int c = int(outCoord.y / float(height / 2));\n int y = int(mod(outCoord.y, float(height / 2)));\n int b = 0;\n return ivec4(b, c, y, x);\n}\n",ivec56:"\nstruct ivec5 {\n int x;\n int y;\n int z;\n int w;\n int u;\n};\nstruct ivec6 {\n int x;\n int y;\n int z;\n int w;\n int u;\n int v;\n};\n"},ops:{conv2d_transpose:{params:"\n // conv2d的input数据\n\n // 常量\n // 卷积核\n const int length_shape_filter = LENGTH_SHAPE_FILTER;\n const int width_shape_filter = WIDTH_SHAPE_FILTER;\n const int height_shape_filter = HEIGHT_SHAPE_FILTER;\n const int width_texture_filter = WIDTH_TEXTURE_FILTER;\n const int height_texture_filter = HEIGHT_TEXTURE_FILTER;\n const int channel_filter = CHANNEL_FILTER;\n\n // 输入数据\n const int width_shape_origin = WIDTH_SHAPE_ORIGIN;\n const int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\n const int length_shape_origin = LENGTH_SHAPE_ORIGIN;\n const int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\n const int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\n const int channel_origin = CHANNEL_ORIGIN;\n\n // 计算相关\n // 拆分步长\n const int stride_h = int(STRIDES_X);\n const int stride_v = int(STRIDES_Y);\n // padding的数目\n const int padLeft = WIDTH_SHAPE_FILTER - PADDINGS_X - 1;\n const int padTop = HEIGHT_SHAPE_FILTER - PADDINGS_Y - 1;\n\n // dilation膨胀系数\n const int dilation_h = DILATIONS_X;\n const int dilation_v = DILATIONS_Y;\n // groups\n const int groups = GROUPS;\n\n // uniform变量\n // 卷积核\n uniform sampler2D texture_filter;\n\n // 输入数据\n uniform sampler2D texture_origin;\n",func:"\n // start函数\n void main(void) {\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n int x = oPos.a;\n int c = oPos.g;\n int y = oPos.b;\n int b = oPos.r;\n float res = 0.0;\n int temp_x = 0;\n int temp_y = 0;\n float o = 0.0;\n float f = 0.0;\n\n // 获取output的坐标\n int oTensorChannel = int(c * groups / channel_out) * channel_origin;\n int oy = y - padTop;\n const int groupLen = int(channel_origin / groups);\n int groupIndex = int(c / groupLen);\n\n for (int fy = 0; fy < height_shape_filter; fy++) {\n if (oy < 0) {\n oy += dilation_v;\n continue;\n }\n int ox = x - padLeft;\n for (int fx = 0; fx < width_shape_filter; fx++) {\n\n if (ox < 0) {\n ox += dilation_h;\n continue;\n }\n // channel计算\n for (int j = 0; j < groupLen; j++) {\n int curIndex = j + b * groupLen;\n \tif (int(mod(float(ox), float(stride_h))) == 0 && int(mod(float(oy), float(stride_v))) == 0) {\n\t\t\t\t\t\ttemp_x = int(floor(float(ox) / float(stride_h)));\n\t\t\t\t\t\ttemp_y = int(floor(float(oy) / float(stride_v)));\n if (temp_x < width_shape_origin && temp_y < height_shape_origin){\n\t\t\t\t\t\t o = getValueFromTensorPosLIMIT_ORIGIN_origin(b, curIndex , temp_y, temp_x);\n f = getValueFromTensorPosLIMIT_FILTER_filter(curIndex, int(c / groups), height_shape_filter-1-fy, width_shape_filter-1-fx);\n res += f * o;\n }\n\t\t\t\t\t}\n }\n ox += dilation_h;\n }\n oy += dilation_v;\n }\n setOutput(float(res));\n }\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"filter"}}],conf:["LENGTH_SHAPE_FILTER","WIDTH_SHAPE_FILTER","HEIGHT_SHAPE_FILTER","WIDTH_TEXTURE_FILTER","HEIGHT_TEXTURE_FILTER","CHANNEL_FILTER","WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","STRIDE_HORIZONTAL","STRIDE_VERTICAL","PAD_LEFT","PAD_TOP","DILATION_HORIZONTAL","DILATION_VERTICAL","GROUPS","MULTI_VALUE","BIAS_VALUE","ACTIVE_FUNCTION"],input:[{tensor:"filter",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},conv2d:{params:"\n // conv2d的input数据\n\n // 常量\n // 卷积核\n const int length_shape_filter = LENGTH_SHAPE_FILTER;\n const int width_shape_filter = WIDTH_SHAPE_FILTER;\n const int height_shape_filter = HEIGHT_SHAPE_FILTER;\n const int width_texture_filter = WIDTH_TEXTURE_FILTER;\n const int height_texture_filter = HEIGHT_TEXTURE_FILTER;\n const int channel_filter = CHANNEL_FILTER;\n\n // 输入数据\n const int width_shape_origin = WIDTH_SHAPE_ORIGIN;\n const int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\n const int length_shape_origin = LENGTH_SHAPE_ORIGIN;\n const int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\n const int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\n const int channel_origin = CHANNEL_ORIGIN;\n\n // bias\n const int width_shape_bias = WIDTH_SHAPE_BIAS;\n const int height_shape_bias = HEIGHT_SHAPE_BIAS;\n const int length_shape_bias = LENGTH_SHAPE_BIAS;\n const int width_texture_bias = WIDTH_TEXTURE_BIAS;\n const int height_texture_bias = HEIGHT_TEXTURE_BIAS;\n const int channel_bias = CHANNEL_BIAS;\n\n // 计算相关\n // 拆分步长\n const int stride_h = STRIDES_X;\n const int stride_v = STRIDES_Y;\n // padding的数目\n const int padLeft = PADDINGS_X;\n const int padTop = PADDINGS_Y;\n // dilation膨胀系数\n const int dilation_h = DILATIONS_X;\n const int dilation_v = DILATIONS_Y;\n // groups\n const int groups = GROUPS;\n\n // uniform变量\n // 卷积核\n uniform sampler2D texture_filter;\n\n // 输入数据\n uniform sampler2D texture_origin;\n\n\n // bias\n uniform sampler2D texture_bias;\n\n // 合并 channel 计算\n const int filter_nearest_vec4 = FILTER_NEAREST_VEC4;\n const int filter_remainder_vec4 = FILTER_REMAINDER_VEC4;\n",func:"\n // start函数\n void main(void) {\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n int x = oPos.a;\n int c = oPos.g;\n int y = oPos.b;\n int b = oPos.r;\n float res = 0.0;\n\n // 获取output的坐标\n int oTensorChannel = (c / (channel_out / groups)) * channel_filter;\n int oy = y * stride_v - padTop;\n for (int fy = 0; fy < height_shape_filter; fy++) {\n if (oy >= height_shape_origin) {\n break;\n }\n if (oy < 0) {\n oy += dilation_v;\n continue;\n }\n int ox = x * stride_h - padLeft;\n for (int fx = 0; fx < width_shape_filter; fx++) {\n if (ox >= width_shape_origin) {\n break;\n }\n if (ox < 0) {\n ox += dilation_h;\n continue;\n }\n // channel计算\n for (int j = 0; j < filter_nearest_vec4; j += 4) {\n vec4 fValues = vec4(\n getValueFromTensorPosLIMIT_FILTER_filter(c, j, fy, fx),\n getValueFromTensorPosLIMIT_FILTER_filter(c, j + 1, fy, fx),\n getValueFromTensorPosLIMIT_FILTER_filter(c, j + 2, fy, fx),\n getValueFromTensorPosLIMIT_FILTER_filter(c, j + 3, fy, fx)\n );\n\n vec4 oValues = vec4(\n getValueFromTensorPosLIMIT_ORIGIN_origin(b, oTensorChannel + j, oy, ox),\n getValueFromTensorPosLIMIT_ORIGIN_origin(b, oTensorChannel + j + 1, oy, ox),\n getValueFromTensorPosLIMIT_ORIGIN_origin(b, oTensorChannel + j + 2, oy, ox),\n getValueFromTensorPosLIMIT_ORIGIN_origin(b, oTensorChannel + j + 3, oy, ox)\n );\n\n res += dot(fValues, oValues);\n }\n\n if (filter_remainder_vec4 == 1) {\n res += dot(\n getValueFromTensorPosLIMIT_FILTER_filter(c, filter_nearest_vec4, fy, fx),\n getValueFromTensorPosLIMIT_ORIGIN_origin(b, oTensorChannel + filter_nearest_vec4, oy, ox));\n } else if (filter_remainder_vec4 == 2) {\n vec2 fValues = vec2(\n getValueFromTensorPosLIMIT_FILTER_filter(c, filter_nearest_vec4, fy, fx),\n getValueFromTensorPosLIMIT_FILTER_filter(c, filter_nearest_vec4 + 1, fy, fx)\n );\n vec2 oValues = vec2(\n getValueFromTensorPosLIMIT_ORIGIN_origin(b, oTensorChannel + filter_nearest_vec4, oy, ox),\n getValueFromTensorPosLIMIT_ORIGIN_origin(b, oTensorChannel + filter_nearest_vec4 + 1, oy, ox)\n );\n res += dot(fValues, oValues);\n } else if (filter_remainder_vec4 == 3) {\n vec3 fValues = vec3(\n getValueFromTensorPosLIMIT_FILTER_filter(c, filter_nearest_vec4, fy, fx),\n getValueFromTensorPosLIMIT_FILTER_filter(c, filter_nearest_vec4 + 1, fy, fx),\n getValueFromTensorPosLIMIT_FILTER_filter(c, filter_nearest_vec4 + 2, fy, fx)\n );\n vec3 oValues = vec3(\n getValueFromTensorPosLIMIT_ORIGIN_origin(b, oTensorChannel + filter_nearest_vec4, oy, ox),\n getValueFromTensorPosLIMIT_ORIGIN_origin(b, oTensorChannel + filter_nearest_vec4 + 1, oy, ox),\n getValueFromTensorPosLIMIT_ORIGIN_origin(b, oTensorChannel + filter_nearest_vec4 + 2, oy, ox)\n );\n res += dot(fValues, oValues);\n }\n\n ox += dilation_h;\n }\n oy += dilation_v;\n }\n\n float bi = getValueFromTensorPosLIMIT_BIAS_bias(0, 0, 0, c);\n res += bi;\n if (fuse_relu) {\n res = max(0.0, res);\n }\n\n setOutput(res);\n }\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"filter"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"bias"}},{func:"transferFromNHWCtoNCHW",conf:{}}],conf:["LENGTH_SHAPE_FILTER","WIDTH_SHAPE_FILTER","HEIGHT_SHAPE_FILTER","WIDTH_TEXTURE_FILTER","HEIGHT_TEXTURE_FILTER","CHANNEL_FILTER","WIDTH_SHAPE_BIAS","HEIGHT_SHAPE_BIAS","LENGTH_SHAPE_BIAS","WIDTH_TEXTURE_BIAS","HEIGHT_TEXTURE_BIAS","CHANNEL_BIAS","WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","STRIDE_HORIZONTAL","STRIDE_VERTICAL","PAD_LEFT","PAD_TOP","DILATION_HORIZONTAL","DILATION_VERTICAL","GROUPS","MULTI_VALUE","BIAS_VALUE","FUSE_RELU","ACTIVE_FUNCTION","FILTER_REMAINDER_VEC4","FILTER_NEAREST_VEC4"],input:[{tensor:"filter",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"bias",variable:"texture",setter:"initTexture",type:"texture"}]}},conv2d_depthwise:{params:"\n // conv2d的input数据\n\n // 常量\n // 卷积核\n const int length_shape_filter = LENGTH_SHAPE_FILTER;\n const int width_shape_filter = WIDTH_SHAPE_FILTER;\n const int height_shape_filter = HEIGHT_SHAPE_FILTER;\n const int width_texture_filter = WIDTH_TEXTURE_FILTER;\n const int height_texture_filter = HEIGHT_TEXTURE_FILTER;\n const int channel_filter = CHANNEL_FILTER;\n\n // 输入数据\n const int width_shape_origin = WIDTH_SHAPE_ORIGIN;\n const int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\n const int length_shape_origin = LENGTH_SHAPE_ORIGIN;\n const int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\n const int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\n const int channel_origin = CHANNEL_ORIGIN;\n\n // bias\n const int width_shape_bias = WIDTH_SHAPE_BIAS;\n const int height_shape_bias = HEIGHT_SHAPE_BIAS;\n const int length_shape_bias = LENGTH_SHAPE_BIAS;\n const int width_texture_bias = WIDTH_TEXTURE_BIAS;\n const int height_texture_bias = HEIGHT_TEXTURE_BIAS;\n const int channel_bias = CHANNEL_BIAS;\n\n // 计算相关\n // 拆分步长\n const int stride_h = STRIDES_X;\n const int stride_v = STRIDES_Y;\n // padding的数目\n const int padLeft = PADDINGS_X;\n const int padTop = PADDINGS_Y;\n // dilation膨胀系数\n const int dilation_h = DILATIONS_X;\n const int dilation_v = DILATIONS_Y;\n\n // uniform变量\n // 卷积核\n uniform sampler2D texture_filter;\n\n // 输入数据\n uniform sampler2D texture_origin;\n\n // bias\n uniform sampler2D texture_bias;\n",func:"\n // start函数\n void main(void) {\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n int x = oPos.a;\n int c = oPos.g;\n int y = oPos.b;\n int b = oPos.r;\n float res = 0.0;\n int top = y * stride_v - padTop;\n int left = x * stride_h - padLeft;\n for (int fy = 0; fy < height_shape_filter; fy++) {\n int oy = top + fy * dilation_v;\n if (oy >= height_shape_origin) {\n break;\n }\n if (oy < 0) {\n continue;\n }\n for (int fx = 0; fx < width_shape_filter; fx++) {\n int ox = left + fx * dilation_h;\n if (ox >= width_shape_origin) {\n break;\n }\n if (ox < 0) {\n continue;\n }\n // b默认是0\n float f = getValueFromTensorPosLIMIT_FILTER_filter(c, 0, fy, fx);\n float o = getValueFromTensorPosLIMIT_ORIGIN_origin(b, c, oy, ox);\n res += f * o;\n }\n }\n float bi = getValueFromTensorPosLIMIT_BIAS_bias(0, 0, 0, c);\n res += bi;\n if (fuse_relu) {\n res = max(0.0, res);\n }\n setOutput(res);\n }\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"filter"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"bias"}},{func:"transferFromNHWCtoNCHW",conf:{}}],conf:["LENGTH_SHAPE_FILTER","WIDTH_SHAPE_FILTER","HEIGHT_SHAPE_FILTER","WIDTH_TEXTURE_FILTER","HEIGHT_TEXTURE_FILTER","CHANNEL_FILTER","WIDTH_SHAPE_BIAS","HEIGHT_SHAPE_BIAS","LENGTH_SHAPE_BIAS","WIDTH_TEXTURE_BIAS","HEIGHT_TEXTURE_BIAS","CHANNEL_BIAS","WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","STRIDE_HORIZONTAL","STRIDE_VERTICAL","PAD_LEFT","PAD_TOP","DILATION_HORIZONTAL","DILATION_VERTICAL","MULTI_VALUE","BIAS_VALUE","ACTIVE_FUNCTION"],input:[{tensor:"filter",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"bias",variable:"texture",setter:"initTexture",type:"texture"}]}},conv2d_elementwise_add:{params:"\n // 卷积核\n const int length_shape_filter = LENGTH_SHAPE_FILTER;\n const int width_shape_filter = WIDTH_SHAPE_FILTER;\n const int height_shape_filter = HEIGHT_SHAPE_FILTER;\n const int width_texture_filter = WIDTH_TEXTURE_FILTER;\n const int height_texture_filter = HEIGHT_TEXTURE_FILTER;\n const int channel_filter = CHANNEL_FILTER;\n \n // 输入数据\n const int width_shape_origin = WIDTH_SHAPE_ORIGIN;\n const int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\n const int length_shape_origin = LENGTH_SHAPE_ORIGIN;\n const int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\n const int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\n const int channel_origin = CHANNEL_ORIGIN;\n \n // 计算相关\n // 拆分步长\n const int stride_h = STRIDES_X;\n const int stride_v = STRIDES_Y;\n // padding的数目\n const int padLeft = PADDINGS_X;\n const int padTop = PADDINGS_Y;\n // dilation膨胀系数\n const int dilation_h = DILATIONS_X;\n const int dilation_v = DILATIONS_Y;\n // groups\n const int groups = GROUPS;\n\n const int total_shape_out = TOTAL_SHAPE_OUT;\n // 加法\n const int axis = AXIS;\n \n // uniform变量\n // 卷积核\n uniform sampler2D texture_filter;\n \n // 输入数据\n uniform sampler2D texture_origin;\n \n // 加法\n uniform sampler2D texture_counter;\n // 加法用到的函数\n float getValueFromCounter(int index) {\n float xPos = float(index) / float(WIDTH_SHAPE_COUNTER);\n vec4 pixels = TEXTURE2D(texture_counter, vec2(xPos, 0.5));\n return pixels.r;\n }\n",func:"\n // start函数\n void main(void) {\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n\t\t//int sumVal = oPos.g + oPos.a * channel_out + oPos.b * channel_out * width_shape_out;\n //ivec4 new_oPos = transferFromNHWCtoNCHW(sumVal, channel_out, width_shape_out, height_shape_out, total_shape_out);\n int x = oPos.a;\n int c = oPos.g;\n int y = oPos.b;\n int b = oPos.r;\n int addAxis = oPos[axis];\n float res = getValueFromCounter(addAxis);\n\n // 获取output的坐标\n int oTensorChannel = (c / (channel_out / groups)) * channel_filter;\n int oy = y * stride_v - padTop;\n for (int fy = 0; fy < height_shape_filter; fy++) {\n if (oy >= height_shape_origin) {\n break;\n }\n if (oy < 0) {\n oy += dilation_v;\n continue;\n }\n int ox = x * stride_h - padLeft;\n for (int fx = 0; fx < width_shape_filter; fx++) {\n if (ox >= width_shape_origin) {\n break;\n }\n if (ox < 0) {\n ox += dilation_h;\n continue;\n }\n // channel计算\n for (int j = 0; j < channel_filter; j++) {\n float f = getValueFromTensorPosLIMIT_FILTER_filter(c, j, fy, fx);\n float o = getValueFromTensorPosLIMIT_ORIGIN_origin(b, oTensorChannel + j, oy, ox);\n res += f * o;\n }\n ox += dilation_h;\n }\n oy += dilation_v;\n }\n setOutput(ACTIVE_FUNCTION(res, multi_value, bias_value));\n // outColor.r = float(b);\n // outColor.g = float(c);\n // outColor.b = float(y);\n // outColor.a = float(x);\n }\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"filter"}},{func:"transferFromNHWCtoNCHW",conf:{}}],conf:["LENGTH_SHAPE_FILTER","WIDTH_SHAPE_FILTER","HEIGHT_SHAPE_FILTER","WIDTH_TEXTURE_FILTER","HEIGHT_TEXTURE_FILTER","CHANNEL_FILTER","WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","WIDTH_SHAPE_COUNTER","STRIDE_HORIZONTAL","STRIDE_VERTICAL","PAD_LEFT","PAD_TOP","DILATION_HORIZONTAL","DILATION_VERTICAL","GROUPS","AXIS","MULTI_VALUE","BIAS_VALUE","ACTIVE_FUNCTION"],input:[{tensor:"filter",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"counter",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},conv2d_elementwise_add_winograd:{params:"\n // 卷积核\n const int length_shape_filter = LENGTH_SHAPE_FILTER;\n const int width_shape_filter = WIDTH_SHAPE_FILTER;\n const int height_shape_filter = HEIGHT_SHAPE_FILTER;\n const int width_texture_filter = WIDTH_TEXTURE_FILTER;\n const int height_texture_filter = HEIGHT_TEXTURE_FILTER;\n const int channel_filter = CHANNEL_FILTER;\n\n // 输入数据\n const int width_shape_origin = WIDTH_SHAPE_ORIGIN;\n const int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\n const int length_shape_origin = LENGTH_SHAPE_ORIGIN;\n const int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\n const int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\n const int channel_origin = CHANNEL_ORIGIN;\n \n // 计算相关\n // padding的数目\n const int padLeft = PADDINGS_X;\n const int padTop = PADDINGS_Y;\n \n // 加法\n const int axis = AXIS;\n uniform float data_counter[TOTAL_SHAPE_COUNTER];\n \n // uniform变量\n // 卷积核\n uniform sampler2D texture_filter;\n \n // 输入数据\n uniform sampler2D texture_origin;\n // 加法用到的函数\n float getValueFromCounter(int index) {\n for (int i = 0; i < TOTAL_SHAPE_COUNTER; i++) {\n if (i == index) {\n return data_counter[i];\n }\n }\n return 0.0;\n }\n",func:"\n // start函数\n void main(void) {\n ivec4 oPos = getOutputPackedTensorPos();\n int x = oPos.a;\n int c = oPos.g;\n int y = oPos.b;\n int b = oPos.r;\n // b = 0;\n // c = 1;\n // y = 0;\n // x = 0;\n int addAxis = oPos[axis];\n float res = getValueFromCounter(addAxis);\n // 输出结果\n vec4 v4 = vec4(res);\n\n float I[16];\n float B[16];\n float T[16];\n float f[16];\n for (int cl = 0; cl < channel_filter; cl++) {\n // 获取output的坐标\n int oy = 2*y - padTop;\n // 计算输入 4 * 4矩阵 和filter\n for (int fy = 0; fy < 4; fy++) {\n int ox = 2*x - padLeft;\n int index = fy * 4;\n for (int fx = 0; fx < 4; fx++) {\n if (oy < 0 || oy >= height_shape_origin || ox >= width_shape_origin || ox < 0) {\n I[index + fx] = 0.0;\n } else {\n I[index + fx] = getValueFromTensorPos_origin(b, cl, oy, ox);\n }\n f[index + fx] = getValueFromTensorPos_filter(c, cl, fy, fx);\n ox += 1;\n }\n oy += 1;\n }\n // input转化\n float tmp1 = I[2] - I[10];\n float tmp2 = I[9] - I[1];\n B[0] = I[0] - I[8] - tmp1;\n B[1] = tmp1 - tmp2;\n B[2] = tmp1 + tmp2;\n B[3] = I[3] - I[11] + tmp2;\n tmp1 = I[6] + I[10];\n tmp2 = I[5] + I[9];\n B[4] = I[4] + I[8] - tmp1;\n B[5] = tmp1 + tmp2;\n B[6] = tmp1 - tmp2;\n B[7] = I[7] + I[11] - tmp2;\n tmp1 = I[10] - I[6];\n tmp2 = I[5] - I[9];\n B[8] = I[8] - I[4] - tmp1;\n B[9] = tmp1 - tmp2;\n B[10] = tmp1 + tmp2;\n B[11] = tmp2 - I[7] + I[11];\n tmp1 = I[14] - I[6];\n tmp2 = I[5] - I[13];\n B[12] = I[12] - I[4] - tmp1;\n B[13] = tmp1 - tmp2;\n B[14] = tmp1 + tmp2;\n B[15] = tmp2 - I[7] + I[15];\n // 点乘\n for (int i = 0; i < 16; i++) {\n T[i] = B[i] * f[i];\n }\n // final output\n tmp1 = T[1] + T[5] + T[9];\n tmp2 = T[2] + T[6] + T[10];\n v4[0] += T[0] + T[4] + T[8] + tmp1 + tmp2;\n v4[1] += T[3] + T[7] + T[11] + tmp1 - tmp2;\n tmp1 = T[5] - T[9] + T[13];\n tmp2 = T[6] - T[10] + T[14];\n v4[2] += T[4] - T[8] + T[12] + tmp1 + tmp2;\n v4[3] += T[7] - T[11] + T[15] + tmp1 - tmp2;\n }\n outColor.r = ACTIVE_FUNCTION(v4[0], multi_value, bias_value);\n outColor.g = ACTIVE_FUNCTION(v4[1], multi_value, bias_value);\n outColor.b = ACTIVE_FUNCTION(v4[2], multi_value, bias_value);\n outColor.a = ACTIVE_FUNCTION(v4[3], multi_value, bias_value);\n // outColor = v4;\n // outColor.r = I[0];\n // outColor.g = I[1];\n // outColor.b = I[2];\n // outColor.a = I[3];\n // outColor.r = float(b);\n // outColor.g = float(c);\n // outColor.b = float(y);\n // outColor.a = float(x);\n }\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"filter"}}],conf:["LENGTH_SHAPE_FILTER","WIDTH_SHAPE_FILTER","HEIGHT_SHAPE_FILTER","WIDTH_TEXTURE_FILTER","HEIGHT_TEXTURE_FILTER","CHANNEL_FILTER","WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","TOTAL_SHAPE_COUNTER","PAD_LEFT","PAD_TOP","AXIS","MULTI_VALUE","BIAS_VALUE","ACTIVE_FUNCTION"],input:[{tensor:"filter",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"counter",variable:"data",setter:"uniform1fv",type:"uniform"}]}},test:{params:"\n // 输入数据\n const int width_shape_origin = WIDTH_SHAPE_ORIGIN;\n const int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\n const int length_shape_origin = LENGTH_SHAPE_ORIGIN;\n const int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\n const int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\n const int channel_origin = CHANNEL_ORIGIN;\n\n uniform sampler2D texture_origin;\n\n",func:"\n// start函数\nvoid main(void) {\n // 输出数据\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n float o = getValueFromTensorPosLIMIT_ORIGIN_origin(oPos.r, oPos.g, oPos.b, oPos.a);\n\tsetOutput(float(o));\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}}],conf:["WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},dynamic:O,pool2d:{params:"\n// 常量\n// 池化大小\nconst int width_shape_pool = KSIZE_X;\nconst int height_shape_pool = KSIZE_Y;\nconst int type_pool = TYPE_POOL;\n// 输入数据\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\n\n// 计算相关\n// 拆分步长\nconst int stride_h = STRIDES_X;\nconst int stride_v = STRIDES_Y;\n// padding的数目\nconst int padLeft = PADDINGS_X;\nconst int padTop = PADDINGS_Y;\n\n// uniform变量\nuniform sampler2D texture_origin;\n",func:"\n// start函数\nvoid main(void) {\n\tfloat res = 0.0;\n // 获取output的坐标\n ivec4 out_pos = getOutputTensorPosLIMIT_OUT();\n // X、Y方向的移动步长\n int count_pool = 0;\n int oy_base = out_pos[2] * stride_v - padTop;\n int ox_base = out_pos[3] * stride_h - padLeft;\n for (int fy = 0; fy < height_shape_pool; fy++) {\n int oy = oy_base + fy;\n if (oy >= height_shape_origin) {\n break;\n }\n if (oy < 0) {\n continue;\n }\n for (int fx = 0; fx < width_shape_pool; fx++) {\n int ox = ox_base + fx;\n if (ox >= width_shape_origin) {\n break;\n }\n if (ox < 0) {\n continue;\n }\n // origin数据\n float curr = getValueFromTensorPosLIMIT_ORIGIN_origin(out_pos[0], out_pos[1], oy, ox);\n if (type_pool == 1) {\n if (curr > res) {\n res = curr;\n }\n } else {\n res += curr;\n // 在平均池化模式忽略填充值(exclusive默认为true)\n count_pool++;\n }\n }\n }\n if (type_pool != 1) {\n res = res / float(count_pool);\n }\n setOutput(res);\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}}],conf:["KSIZE_X","KSIZE_Y","TYPE_POOL","WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","STRIDES_X","STRIDES_Y","PADDING_X","PADDING_Y"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},pool2d_max:{params:"\n// 常量\n// 池化大小\nconst int width_shape_pool = KSIZE_X;\nconst int height_shape_pool = KSIZE_Y;\n// 输入数据\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\n\n// 计算相关\n// 拆分步长\nconst int stride_h = STRIDES_X;\nconst int stride_v = STRIDES_Y;\n// padding的数目\nconst int padLeft = PADDINGS_X;\nconst int padTop = PADDINGS_Y;\n\n\n// uniform变量\nuniform sampler2D texture_origin;\n",func:"\n// start函数\nvoid main(void) {\n float res = 0.0;\n // 获取output的坐标\n ivec4 out_pos = getOutputTensorPosLIMIT_OUT();\n int b = out_pos[0];\n int c = out_pos[1];\n int y = out_pos[2];\n int x = out_pos[3];\n // X、Y方向的移动步长\n int oy_base = out_pos[2] * stride_v - padTop;\n int ox_base = out_pos[3] * stride_h - padLeft;\n for (int fy = 0; fy < height_shape_pool; fy++) {\n int oy = oy_base + fy;\n if (oy >= height_shape_origin) {\n break;\n }\n if (oy < 0) {\n continue;\n }\n for (int fx = 0; fx < width_shape_pool; fx++) {\n int ox = ox_base + fx;\n if (ox >= width_shape_origin) {\n break;\n }\n if (ox < 0) {\n continue;\n }\n // origin数据\n float curr = getValueFromTensorPosLIMIT_ORIGIN_origin(out_pos[0], out_pos[1], oy, ox);\n res = max(res, curr);\n }\n } \n setOutput(res);\n // outColor.r = float(b);\n // outColor.g = float(c);\n // outColor.b = float(y);\n // outColor.a = float(x);\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}}],conf:["KSIZE_X","KSIZE_Y","WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","STRIDES_X","STRIDES_Y","PADDING_X","PADDING_Y"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},pool2d_winograd:{params:"\n// 常量\n// 池化大小\nconst int width_shape_pool = KSIZE_X;\nconst int height_shape_pool = KSIZE_Y;\nconst int type_pool = TYPE_POOL;\n// 输入数据\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\nconst int offset_x_origin = OFFSET_X_ORIGIN;\nconst int offset_y_origin = OFFSET_Y_ORIGIN;\n\n\n// 计算相关\n// 拆分步长\nconst int stride_h = STRIDES_X;\nconst int stride_v = STRIDES_Y;\n// padding的数目\nconst int padLeft = PADDINGS_X;\nconst int padTop = PADDINGS_Y;\n\n\n// uniform变量\nuniform sampler2D texture_origin;\n",func:"\n// start函数\nvoid main(void) {\n float res = (-1.0 / exp(-20.0));\n // 获取output的坐标\n ivec4 out_pos = getOutputTensorPos();\n // int b = out_pos[0];\n // int c = out_pos[1];\n // int y = out_pos[2];\n // int x = out_pos[3];\n // X、Y方向的移动步长\n int count_pool = 0;\n int oy_base = out_pos[2] * stride_v - padTop;\n int ox_base = out_pos[3] * stride_h - padLeft;\n // int offset = 0;\n // vec4 v4 = texture(texture_origin, vec2((float(0) + 0.5) / float(width_texture_origin), (float(1 * height_shape_origin / 2 + 0) + 0.5) / float(height_texture_origin)));\n for (int fy = 0; fy < height_shape_pool; fy++) {\n int oy = oy_base + fy;\n if (oy >= height_shape_origin) {\n break;\n }\n if (oy < 0) {\n continue;\n }\n for (int fx = 0; fx < width_shape_pool; fx++) {\n int ox = ox_base + fx;\n if (ox >= width_shape_origin) {\n break;\n }\n if (ox < 0) {\n continue;\n }\n // origin数据\n float curr = getValueFromTensorPosPacked_origin(out_pos[0], out_pos[1], oy, ox);\n // y = oy;\n // x = ox;\n // v4[offset++] = curr;\n if (type_pool == 1) {\n if (curr > res) {\n res = curr;\n }\n } else {\n res += curr;\n // 在平均池化模式忽略填充值(exclusive默认为true)\n count_pool++;\n }\n }\n }\n if (type_pool != 1) {\n res = res / float(count_pool);\n }\n setOutput(res);\n // outColor = v4;\n // outColor.r = float(b);\n // outColor.g = float(c);\n // outColor.b = float(y);\n // outColor.a = float(x);\n}\n",confs:{dep:[{func:"getValueFromTensorPosPacked",conf:{TENSOR_NAME:"origin"}}],conf:["KSIZE_X","KSIZE_Y","TYPE_POOL","WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","OFFSET_X_ORIGIN","OFFSET_Y_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","STRIDES_X","STRIDES_Y","PADDING_X","PADDING_Y"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},elementwise_add:{params:"\n // 输入数据\n const int axis = AXIS;\n const int width_shape_origin = WIDTH_SHAPE_ORIGIN;\n const int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\n const int length_shape_origin = LENGTH_SHAPE_ORIGIN;\n const int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\n const int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\n const int channel_origin = CHANNEL_ORIGIN;\n\n const int height_shape_counter = HEIGHT_SHAPE_COUNTER;\n const int width_shape_counter = WIDTH_SHAPE_COUNTER;\n const int length_shape_counter = LENGTH_SHAPE_COUNTER;\n const int width_texture_counter = WIDTH_TEXTURE_COUNTER;\n const int height_texture_counter = HEIGHT_TEXTURE_COUNTER;\n const int channel_counter = CHANNEL_COUNTER;\n \n uniform sampler2D texture_origin;\n\tuniform sampler2D texture_counter;\n\n",func:"\n// start函数\nvoid main(void) {\n // 输出数据\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n float o = getValueFromTensorPosLIMIT_ORIGIN_origin(oPos.r, oPos.g, oPos.b, oPos.a);\n\tivec4 pos_counter;\n\tfloat c = 0.0;\n\n\tif (axis == 1){\n c = getValueFromTensorPosLIMIT_COUNTER_counter(0, oPos.r, oPos.g, oPos.b);\n }\n else if (axis == 2){\n c = getValueFromTensorPosLIMIT_COUNTER_counter(0, 0, oPos.r, oPos.g);\n }\n else if (axis == 3){\n c = getValueFromTensorPosLIMIT_COUNTER_counter(0, 0, 0, oPos.r);\n }\n else {\n c = getValueFromTensorPosLIMIT_COUNTER_counter(oPos.r, oPos.g, oPos.b, oPos.a);\n }\n\tfloat res = c + o;\n\tsetOutput(float(res));\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"transferFromNHWCtoNCHW",conf:{}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"counter"}}],conf:["WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_COUNTER","HEIGHT_SHAPE_COUNTER","LENGTH_SHAPE_COUNTER","WIDTH_TEXTURE_COUNTER","HEIGHT_TEXTURE_COUNTER","CHANNEL_COUNTER","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","AXIS"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"counter",variable:"texture",setter:"initTexture",type:"texture"}]}},mul:{params:"\n// mul的input数据\n// 常量\n// 输入数据\nconst int length_shape_counter = LENGTH_SHAPE_COUNTER;\nconst int width_shape_counter = WIDTH_SHAPE_COUNTER;\nconst int height_shape_counter = HEIGHT_SHAPE_COUNTER;\nconst int width_texture_counter = WIDTH_TEXTURE_COUNTER;\nconst int height_texture_counter = HEIGHT_TEXTURE_COUNTER;\nconst int channel_counter = CHANNEL_COUNTER;\n\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\n\n// uniform变量\n// 输入数据\nuniform sampler2D texture_counter;\nuniform sampler2D texture_origin;\n",func:"\n// start函数\nvoid main(void) {\n float res = 0.0;\n // 获取output的坐标\n ivec4 out_pos = getOutputTensorPosLIMIT_OUT();\n for (int j = 0; j < width_shape_origin; j++) {\n float c = getValueFromTensorPosLIMIT_COUNTER_counter(out_pos[0], out_pos[1], j, out_pos[3]);\n float o = getValueFromTensorPosLIMIT_COUNTER_origin(out_pos[0], out_pos[1], out_pos[2], j);\n res += c * o;\n }\n setOutput(res);\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"counter"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}}],conf:["LENGTH_SHAPE_COUNTER","WIDTH_SHAPE_COUNTER","HEIGHT_SHAPE_COUNTER","WIDTH_TEXTURE_COUNTER","HEIGHT_TEXTURE_COUNTER","CHANNEL_COUNTER","WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT"],input:[{tensor:"counter",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},fc:{params:"\n// mul的input数据\n// 常量\n// 输入数据\n// weight\nconst int length_shape_weight = LENGTH_SHAPE_WEIGHT;\nconst int width_shape_weight = WIDTH_SHAPE_WEIGHT;\nconst int height_shape_weight = HEIGHT_SHAPE_WEIGHT;\nconst int width_texture_weight = WIDTH_TEXTURE_WEIGHT;\nconst int height_texture_weight = HEIGHT_TEXTURE_WEIGHT;\nconst int channel_weight = CHANNEL_WEIGHT;\n\n//input\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\n\n// bias\nconst int width_shape_bias = WIDTH_SHAPE_BIAS;\nconst int height_shape_bias = HEIGHT_SHAPE_BIAS;\nconst int length_shape_bias = LENGTH_SHAPE_BIAS;\nconst int width_texture_bias = WIDTH_TEXTURE_BIAS;\nconst int height_texture_bias = HEIGHT_TEXTURE_BIAS;\nconst int channel_bias = CHANNEL_BIAS;\n\n\n// uniform变量\n// 输入数据\nuniform sampler2D texture_weight;\nuniform sampler2D texture_origin;\nuniform sampler2D texture_bias;\n",func:"\n // start函数\n void main(void) {\n float res = 0.0;\n ivec4 out_pos = getOutputTensorPosLIMIT_OUT();\n float bias = getValueFromTensorPosLIMIT_BIAS_bias(out_pos.r, out_pos.g, out_pos.b, out_pos.a);\n\n for (int j = 0; j < width_shape_origin; j++) {\n float w = getValueFromTensorPosLIMIT_WEIGHT_weight(out_pos[0], out_pos[1], j, out_pos[3]);\n float o = getValueFromTensorPosLIMIT_ORIGIN_origin(out_pos[0], out_pos[1], out_pos[2], j);\n res += w * o;\n }\n\n res = res + bias;\n setOutput(res);\n }\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"weight"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"bias"}}],conf:[],input:[{tensor:"weight",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"bias",variable:"texture",setter:"initTexture",type:"texture"}]}},concat:{params:"\n// mul的input数据\nconst int axis = AXIS;\n\n// 常量\n// 输入数据\nconst int length_shape_counter = LENGTH_SHAPE_COUNTER;\nconst int width_shape_counter = WIDTH_SHAPE_COUNTER;\nconst int height_shape_counter = HEIGHT_SHAPE_COUNTER;\nconst int width_texture_counter = WIDTH_TEXTURE_COUNTER;\nconst int height_texture_counter = HEIGHT_TEXTURE_COUNTER;\nconst int channel_counter = CHANNEL_COUNTER;\nconst int total_shape_counter = TOTAL_SHAPE_COUNTER;\n\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\nconst int total_shape_origin = TOTAL_SHAPE_ORIGIN;\n\nconst int total_shape_out = TOTAL_SHAPE_OUT;\n\nconst int dim = DIM;\nconst int inputs_dim = INPUTS_DIM;\n\n\n// uniform变量\n// 输入数据\nuniform sampler2D texture_counter;\nuniform sampler2D texture_origin;\n",func:"\n// start函数\nvoid main(void) {\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n // 输出坐标转换为输入坐标\n//\tint sumVal = oPos.g + oPos.a * channel_out + oPos.b * channel_out * width_shape_out + oPos.r * channel_out * width_shape_out * height_shape_out;\n // ivec4 new_oPos = transferFromNHWCtoNCHW(sumVal, channel_out, width_shape_out, height_shape_out, total_shape_out);\n float o = 0.0;\n if (oPos[dim] > inputs_dim - 1) {\n oPos[dim] = oPos[dim] - inputs_dim;\n o = getValueFromTensorPosLIMIT_COUNTER_counter(oPos.r, oPos.g, oPos.b, oPos.a);\n }\n else {\n o = getValueFromTensorPosLIMIT_ORIGIN_origin(oPos.r, oPos.g, oPos.b, oPos.a);\n }\n\tsetOutput(float(o));\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"counter"}},{func:"transferFromNHWCtoNCHW"}],conf:["LENGTH_SHAPE_COUNTER","WIDTH_SHAPE_COUNTER","HEIGHT_SHAPE_COUNTER","WIDTH_TEXTURE_COUNTER","HEIGHT_TEXTURE_COUNTER","CHANNEL_COUNTER","WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"counter",variable:"texture",setter:"initTexture",type:"texture"}]}},concat_multi:{params:"\n// mul的input数据\nconst int axis = AXIS;\n\n// 常量\n// 输入数据\nconst int length_shape_counter = LENGTH_SHAPE_COUNTER;\nconst int width_shape_counter = WIDTH_SHAPE_COUNTER;\nconst int height_shape_counter = HEIGHT_SHAPE_COUNTER;\nconst int width_texture_counter = WIDTH_TEXTURE_COUNTER;\nconst int height_texture_counter = HEIGHT_TEXTURE_COUNTER;\nconst int channel_counter = CHANNEL_COUNTER;\nconst int total_shape_counter = TOTAL_SHAPE_COUNTER;\n\nconst int length_shape_appender = LENGTH_SHAPE_APPENDER;\nconst int width_shape_appender = WIDTH_SHAPE_APPENDER;\nconst int height_shape_appender = HEIGHT_SHAPE_APPENDER;\nconst int width_texture_appender = WIDTH_TEXTURE_APPENDER;\nconst int height_texture_appender = HEIGHT_TEXTURE_APPENDER;\nconst int channel_appender = CHANNEL_APPENDER;\nconst int total_shape_appender = TOTAL_SHAPE_APPENDER;\n\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\nconst int total_shape_origin = TOTAL_SHAPE_ORIGIN;\n\nconst int total_shape_out = TOTAL_SHAPE_OUT;\n\nconst int dim = DIM;\nconst int inputs_dim = INPUTS_DIM;\nconst int append_num = APPEND_NUM;\n\n\n// uniform变量\n// 输入数据\nuniform sampler2D texture_counter;\nuniform sampler2D texture_appender;\nuniform sampler2D texture_origin;\n",func:"\n// start函数\nvoid main(void) {\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n // 输出坐标转换为输入坐标\n float o = 0.0;\n int dim_total = inputs_dim + append_num;\n\n if (oPos[dim] < inputs_dim) {\n o = getValueFromTensorPosLIMIT_ORIGIN_origin(oPos.r, oPos.g, oPos.b, oPos.a);\n }\n else if (oPos[dim] < dim_total) {\n o = getValueFromTensorPosLIMIT_COUNTER_counter(oPos.r, oPos.g - inputs_dim, oPos.b, oPos.a);\n }\n else {\n o = getValueFromTensorPosLIMIT_APPENDER_appender(oPos.r, oPos.g - dim_total, oPos.b, oPos.a);\n }\n\tsetOutput(float(o));\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"counter"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"appender"}},{func:"transferFromNHWCtoNCHW"}],conf:["LENGTH_SHAPE_COUNTER","WIDTH_SHAPE_COUNTER","HEIGHT_SHAPE_COUNTER","WIDTH_TEXTURE_COUNTER","HEIGHT_TEXTURE_COUNTER","CHANNEL_COUNTER","LENGTH_SHAPE_APPENDER","WIDTH_SHAPE_APPENDER","HEIGHT_SHAPE_APPENDER","WIDTH_TEXTURE_APPENDER","HEIGHT_TEXTURE_APPENDER","CHANNEL_APPENDER","WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"counter",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"appender",variable:"texture",setter:"initTexture",type:"texture"}]}},split:{params:"\n// 常量\n\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\nconst int total_shape_origin = TOTAL_SHAPE_ORIGIN;\n\nconst int total_shape_out = TOTAL_SHAPE_OUT;\n\nconst int dim = DIM;\nconst int num = NUM;\nconst int target_length = TARGET_LENGTH;\n\n\n// 输入数据\nuniform sampler2D texture_origin;\n",func:"\n// start函数\nvoid main(void) {\n int length = int(target_length / num);\n ivec4 oPos = getOutputTensorPos();\n // 输出坐标转换为输入坐标\n\t//int sumVal = oPos.g + oPos.a * channel_out + oPos.b * channel_out * width_shape_out + oPos.r * channel_out * width_shape_out * height_shape_out;\n //ivec4 new_oPos = transferFromNHWCtoNCHW(sumVal, channel_out, width_shape_out, height_shape_out, total_shape_out);\n oPos[dim] = oPos[dim] + layer_run_time * length;\n\tfloat o = getValueFromTensorPos_origin(oPos.r, oPos.g, oPos.b, oPos.a);\n\tsetOutput(float(o));\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"transferFromNHWCtoNCHW"}],conf:["WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},relu:O,relu6:O,scale:O,sigmoid:O,softmax:{params:"\n// 输入数据\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int total_shape_origin = TOTAL_SHAPE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\nconst int axis = AXIS;\n// uniform变量\n// 输入数据\nuniform sampler2D texture_origin;\n",func:"\n// start函数\nvoid main(void) {\n ivec4 oPos = getOutputTensorPos();\n const int n = int(total_shape_origin/channel_origin/height_shape_origin/width_shape_origin);\n float o = getValueFromTensorPos_origin(oPos[0], oPos[1], oPos[2], oPos[3]);\n // 输出坐标转换为输入坐标\n float total = 0.0;\n float res = 0.0;\n if (axis == 0) {\n for (int i = 0; i < n; i++){\n float temp = getValueFromTensorPos_origin(i, oPos[1], oPos[2], oPos[3]);\n total += exp(temp);\n }\n res = exp(o) / total;\n }\n else if (axis == 1) {\n for (int i = 0; i < channel_origin; i++){\n float temp = getValueFromTensorPos_origin(oPos[0], i, oPos[2], oPos[3]);\n total += exp(temp);\n }\n res = exp(o) / total;\n }\n else if (axis == 2) {\n for (int i = 0; i < height_shape_origin; i++){\n float temp = getValueFromTensorPos_origin(oPos[0], oPos[1], i, oPos[3]);\n total += exp(temp);\n }\n res = exp(o) / total;\n }\n else {\n for (int i = 0; i < width_shape_origin; i++){\n float temp = getValueFromTensorPos_origin(oPos[0], oPos[1], oPos[2], i);\n total += exp(temp);\n }\n res = exp(o) / total;\n }\n setOutput(res);\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}}],conf:["WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},batchnorm:{params:"\n// 输入数据\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\nconst int total_shape_origin = TOTAL_SHAPE_ORIGIN;\n// 计算数据\nconst float epsilon = float(EPSILON);\nconst int width_texture_bias = WIDTH_TEXTURE_BIAS;\nconst int height_texture_bias = HEIGHT_TEXTURE_BIAS;\nconst int width_texture_variance = WIDTH_TEXTURE_VARIANCE;\nconst int height_texture_variance = HEIGHT_TEXTURE_VARIANCE;\nconst int width_texture_mean = WIDTH_TEXTURE_MEAN;\nconst int height_texture_mean = HEIGHT_TEXTURE_MEAN;\nconst int width_texture_scale = WIDTH_TEXTURE_SCALE;\nconst int height_texture_scale = HEIGHT_TEXTURE_SCALE;\n// 输入数据\nuniform sampler2D texture_origin;\nuniform sampler2D texture_scale;\nuniform sampler2D texture_bias;\nuniform sampler2D texture_variance;\nuniform sampler2D texture_mean;\n",func:"\n// start函数\nvoid main(void) {\n // 输出数据\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n float o = getValueFromTensorPosLIMIT_ORIGIN_origin(oPos.r, oPos.g, oPos.b, oPos.a);\n\n // 归一化数据\n vec4 scale = getPixelsFromTexturePos_texture_scale(vec2( float(oPos.g) / float(width_texture_scale) + 0.00001, 0.0));\n vec4 bias = getPixelsFromTexturePos_texture_bias(vec2( float(oPos.g) / float(width_texture_bias) + 0.00001, 0.0));\n vec4 mean = getPixelsFromTexturePos_texture_mean(vec2((float(oPos.g)) / float(width_texture_mean) + 0.00001, 0.0));\n vec4 variance = getPixelsFromTexturePos_texture_variance(vec2((float(oPos.g)) / float(width_texture_variance) + 0.00001, 0.0));\n\n float x = (o - mean[0]) / sqrt(variance[0] + epsilon);\n float res = scale[0] * x + bias[0];\n setOutput(res);\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"getPixelsFromTexturePos",conf:{TEXTURE_NAME:"texture_scale"}},{func:"getPixelsFromTexturePos",conf:{TEXTURE_NAME:"texture_bias"}},{func:"getPixelsFromTexturePos",conf:{TEXTURE_NAME:"texture_variance"}},{func:"getPixelsFromTexturePos",conf:{TEXTURE_NAME:"texture_mean"}},{func:"getPixelsFromTexturePos",conf:{TEXTURE_NAME:"texture_origin"}}],conf:["WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","TOTAL_SHAPE_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","EPSILON","WIDTH_TEXTURE_SCALE","HEIGHT_TEXTURE_SCALE","WIDTH_TEXTURE_BIAS","HEIGHT_TEXTURE_BIAS","WIDTH_TEXTURE_MEAN","HEIGHT_TEXTURE_MEAN","WIDTH_TEXTURE_VARIANCE","HEIGHT_TEXTURE_VARIANCE","MULTI_VALUE","BIAS_VALUE","ACTIVE_FUNCTION"],input:[{tensor:"scale",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"bias",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"mean",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"variance",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},reshape2:{params:"\n// 输入数据\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\nconst int total_shape_origin = TOTAL_SHAPE_ORIGIN;\n\n\n// 输入数据\n uniform sampler2D texture_origin;\n",func:"\n// start函数\nvoid main(void) {\n // 输出数据\n\tivec4 oPos = getOutputTensorPos();\n // 输出坐标转换为输入坐标\n\tint sumVal = oPos.a + oPos.b * width_shape_out + oPos.g * height_shape_out * width_shape_out + oPos.r * channel_out * width_shape_out * height_shape_out;\n \tivec4 new_oPos = transferFromNHWCtoNCHW(sumVal, channel_origin, width_shape_origin, height_shape_origin, total_shape_origin);\n\tfloat o = getValueFromTensorPos_origin(new_oPos.r, new_oPos.g, new_oPos.b, new_oPos.a);\n\tsetOutput(float(o));\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"transferFromNHWCtoNCHW",conf:{}}],conf:["WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","MULTI_VALUE","BIAS_VALUE","ACTIVE_FUNCTION"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},bilinear_interp:{params:"\n// 输入数据\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\nconst int total_shape_origin = TOTAL_SHAPE_ORIGIN;\nconst int total_shape_out = TOTAL_SHAPE_OUT;\nconst bool align_corners = ALIGN_CORNERS;\nconst int align_mode = ALIGN_MODE;\n// 输入数据\n uniform sampler2D texture_origin;\n",func:"\n// start函数\n\nvec4 getData(float n, float scale, bool align_flag, int in_len) {\n float m = align_flag ? ((n + 0.5) / scale - 0.5) : (n / scale);\n int a1 = int(floor(m));\n a1 = a1 > 0 ? a1 : 0;\n int a2 = (a1 + 1) < (in_len - 1) ? (a1 + 1) : (in_len - 1);\n\n float idx_src = (n + 0.5) / scale - 0.5;\n idx_src = idx_src > 0.0 ? idx_src : 0.0;\n float b1 = align_flag ? (idx_src - float(a1)) : (n / scale - float(a1));\n float b2 = 1.0 - b1;\n return vec4(float(a1), float(a2), b1, b2);\n}\n\nvoid main(void) {\n // 输出数据\n\tivec4 oPos = getOutputTensorPosLIMIT_OUT();\n // 输出坐标转换为输入坐标\n\t//int sumVal = oPos.g + oPos.a * channel_out + oPos.b * channel_out * width_shape_out + oPos.r * channel_out * width_shape_out * height_shape_out;\n \t//oPos = transferFromNHWCtoNCHW(sumVal, channel_out, width_shape_out, height_shape_out, total_shape_out);\n\n bool align_flag = align_mode == 0 && !align_corners;\n\n float scale_x = 0.0;\n float scale_y = 0.0;\n if (align_corners) {\n scale_x = float(width_shape_out - 1) / float(width_shape_origin - 1);\n scale_y = float(height_shape_out - 1) / float(height_shape_origin - 1);\n }\n else {\n scale_x = float(width_shape_out) / float(width_shape_origin);\n scale_y = float(height_shape_out) / float(height_shape_origin);\n }\n\n vec4 vx = getData(float(oPos.a), scale_x, align_flag, width_shape_origin);\n vec4 vy = getData(float(oPos.b), scale_y, align_flag, height_shape_origin);\n\n int x1 = int(vx.r);\n int x2 = int(vx.g);\n float x3 = vx.b;\n float x4 = vx.a;\n int y1 = int(vy.r);\n int y2 = int(vy.g);\n float y3 = vy.b;\n float y4 = vy.a;\n\n float value11 = getValueFromTensorPosLIMIT_ORIGIN_origin(oPos.r, oPos.g, y1, x1);\n float value12 = getValueFromTensorPosLIMIT_ORIGIN_origin(oPos.r, oPos.g, y2, x1);\n float value21 = getValueFromTensorPosLIMIT_ORIGIN_origin(oPos.r, oPos.g, y1, x2);\n float value22 = getValueFromTensorPosLIMIT_ORIGIN_origin(oPos.r, oPos.g, y2, x2);\n float value = x4 * y4 * value11 + x4 * y3 * value12 + x3 * y4 * value21 + x3 * y3 * value22;\n setOutput(float(value));\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"transferFromNHWCtoNCHW",conf:{}}],conf:["WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","MULTI_VALUE","BIAS_VALUE","ACTIVE_FUNCTION","ALIGN_CORNERS","ALIGN_MODE"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},nearest_interp:{params:"\n// 输入数据\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\nconst int total_shape_origin = TOTAL_SHAPE_ORIGIN;\nconst int total_shape_out = TOTAL_SHAPE_OUT;\nconst bool align_corners = ALIGN_CORNERS;\nconst int align_mode = ALIGN_MODE;\n// 输入数据\n uniform sampler2D texture_origin;\n",func:"\n// start函数\n\nint getData(float n, float scale, bool align_corners) {\n float m = align_corners ? (n / scale + 0.5) : (n / scale);\n return int(floor(m));\n}\n\nvoid main(void) {\n // 输出数据\n\tivec4 oPos = getOutputTensorPosLIMIT_OUT();\n\n float scale_x = 0.0;\n float scale_y = 0.0;\n if (align_corners) {\n scale_x = float(width_shape_out - 1) / float(width_shape_origin - 1);\n scale_y = float(height_shape_out - 1) / float(height_shape_origin - 1);\n }\n else {\n scale_x = float(width_shape_out) / float(width_shape_origin);\n scale_y = float(height_shape_out) / float(height_shape_origin);\n }\n\n int vx = getData(float(oPos.a), scale_x, align_corners);\n int vy = getData(float(oPos.b), scale_y, align_corners);\n\n float o = getValueFromTensorPosLIMIT_ORIGIN_origin(oPos.r, oPos.g, vy, vx);\n setOutput(o);\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"transferFromNHWCtoNCHW",conf:{}}],conf:["WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","MULTI_VALUE","BIAS_VALUE","ACTIVE_FUNCTION","ALIGN_CORNERS","ALIGN_MODE"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},transpose2:{params:"\n// 输入数据\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\nconst int total_shape_origin = TOTAL_SHAPE_ORIGIN;\n\n\nconst int perm_size = PERM_SIZE;\nconst int perm_0 = PERM_0;\nconst int perm_1 = PERM_1;\nconst int perm_2 = PERM_2;\nconst int perm_3 = PERM_3;\n\n// 输入数据\n uniform sampler2D texture_origin;\n",func:"\n// start函数\nvoid main(void) {\n // 输出数据\n\tivec4 oPos = getOutputTensorPos();\n\t // 重排遍历顺序\n\t//int sumVal = oPos.g + oPos.a * channel_out + oPos.b * channel_out * width_shape_out + oPos.r * channel_out * width_shape_out * height_shape_out;\n\t//ivec4 new_oPos = transferFromNHWCtoNCHW(sumVal, channel_out, width_shape_out, height_shape_out, total_shape_origin);\n\n\t// 转置 坐标变换\n\t//oPos = new_oPos;\n\tfloat o = 0.0;\n\tif (perm_size == 1) {\n\t\to = getValueFromTensorPos_origin(oPos[0], oPos[1], oPos[2], oPos[3]);\n\t}\n\telse if (perm_size == 2) {\n\t\to = getValueFromTensorPos_origin(oPos[0], oPos[1], oPos[(2 + perm_0)>3?3:(2 + perm_0)], oPos[(2 + perm_1)>3?3:(2 + perm_1)]);\n\t}\n\telse if (perm_size == 3) {\n\t\to = getValueFromTensorPos_origin(oPos[0], oPos[(1 + perm_0)>3?3:(1 + perm_0)], oPos[(1 + perm_1)>3?3:(1 + perm_1)], oPos[(1 + perm_2)>3?3:(1 + perm_2)]);\n\t}\n\telse if (perm_size == 4) {\n\t\to = getValueFromTensorPos_origin(oPos[perm_0], oPos[perm_1], oPos[perm_2], oPos[perm_3]);\n\t}\n\n\n\tsetOutput(float(o));\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"transferFromNHWCtoNCHW",conf:{}}],conf:["WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT","MULTI_VALUE","BIAS_VALUE","ACTIVE_FUNCTION"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"}]}},box_coder_encode:{params:"\n// mul的input数据\n// 常量\nconst bool normalized = bool(NORMALIZED);\nconst int axis = AXIS;\n// 输入数据\nconst int width_shape_priorbox = WIDTH_SHAPE_PRIORBOX;\nconst int height_shape_priorbox = HEIGHT_SHAPE_PRIORBOX;\nconst int length_shape_priorbox = LENGTH_SHAPE_PRIORBOX;\nconst int width_texture_priorbox = WIDTH_TEXTURE_PRIORBOX;\nconst int height_texture_priorbox = HEIGHT_TEXTURE_PRIORBOX;\nconst int channel_priorbox = CHANNEL_PRIORBOX;\n\nconst int length_shape_boxvar = LENGTH_SHAPE_BOXVAR;\nconst int width_shape_boxvar = WIDTH_SHAPE_BOXVAR;\nconst int height_shape_boxvar = HEIGHT_SHAPE_BOXVAR;\nconst int width_texture_boxvar = WIDTH_TEXTURE_BOXVAR;\nconst int height_texture_boxvar = HEIGHT_TEXTURE_BOXVAR;\nconst int channel_boxvar = CHANNEL_BOXVAR;\n\nconst int length_shape_targetbox = LENGTH_SHAPE_TARGETBOX;\nconst int width_shape_targetbox = WIDTH_SHAPE_TARGETBOX;\nconst int height_shape_targetbox = HEIGHT_SHAPE_TARGETBOX;\nconst int width_texture_targetbox = WIDTH_TEXTURE_TARGETBOX;\nconst int height_texture_targetbox = HEIGHT_TEXTURE_TARGETBOX;\nconst int channel_targetbox = CHANNEL_TARGETBOX;\n\n// uniform变量\n// 输入数据\nuniform sampler2D texture_targetbox;\nuniform sampler2D texture_priorbox;\nuniform sampler2D texture_boxvar;\n",func:"\n// start函数\nvec2 getPriorBoxData(int r, int g, int b, int m, int n) {\n float start = getValueFromTensorPosLIMIT_PRIORBOX_priorbox(r, g, b, m);\n float end = getValueFromTensorPosLIMIT_PRIORBOX_priorbox(r, g, b, n);\n float len = end - start;\n return vec2(start + len / 2.0, len);\n}\nvec2 getBoxVarData(int r, int g, int b, int m, int n) {\n return vec2(\n getValueFromTensorPosLIMIT_BOXVAR_boxvar(r, g, b, m),\n getValueFromTensorPosLIMIT_BOXVAR_boxvar(r, g, b, n)\n );\n}\nvec2 getTargetBoxData(int r, int g, int b, int m, int n) {\n float start = getValueFromTensorPosLIMIT_TARGETBOX_targetbox(r, g, b, m);\n float end = getValueFromTensorPosLIMIT_TARGETBOX_targetbox(r, g, b, n);\n float len = end - start;\n return vec2(start + len / 2.0, len);\n}\n\nvoid main(void) {\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n int r = int(oPos.r);\n int g = int(oPos.g);\n int b = int(oPos.b);\n int a = int(oPos.a);\n // 输出坐标转换为输入坐标\n float o = 0.0;\n\n int m = 0;\n int n = 0;\n if (a == 0 || a == 1) {\n m = 0;\n n = 2;\n }\n else {\n m = 1;\n n = 3;\n }\n vec2 priorbox = getPriorBoxData(r, g, b, m, n);\n vec2 boxvar = getBoxVarData(r, g, b, m, n);\n vec2 targetbox = getTargetBoxData(r, g, b, m, n);\n float p1 = priorbox.r;\n float p2 = priorbox.g;\n float t1 = targetbox.r;\n float t2 = targetbox.g;\n float v1 = boxvar.r;\n float v2 = boxvar.g;\n\n if (a == 0 || a == 1) {\n o = (t1 - p1) / p2 / v1;\n }\n else {\n o = log(abs(t2 / p2)) / v2;\n }\n\n\tsetOutput(float(o));\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"priorbox"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"boxvar"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"targetbox"}}],conf:["LENGTH_SHAPE_PRIORBOX","WIDTH_SHAPE_PRIORBOX","HEIGHT_SHAPE_PRIORBOX","WIDTH_TEXTURE_PRIORBOX","HEIGHT_TEXTURE_PRIORBOX","CHANNEL_PRIORBOX","WIDTH_SHAPE_BOXVAR","HEIGHT_SHAPE_BOXVAR","LENGTH_SHAPE_BOXVAR","WIDTH_TEXTURE_BOXVAR","HEIGHT_TEXTURE_BOXVAR","CHANNEL_BOXVAR","WIDTH_SHAPE_TARGETBOX","HEIGHT_SHAPE_TARGETBOX","LENGTH_SHAPE_TARGETBOX","WIDTH_TEXTURE_TARGETBOX","HEIGHT_TEXTURE_TARGETBOX","CHANNEL_TARGETBOX","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT"],input:[{tensor:"priorbox",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"boxvar",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"targetbox",variable:"texture",setter:"initTexture",type:"texture"}]}},box_coder_decode:{params:"\n// mul的input数据\n// 常量\nconst bool normalized = bool(NORMALIZED);\nconst int axis = AXIS;\n// 输入数据\nconst int width_shape_priorbox = WIDTH_SHAPE_PRIORBOX;\nconst int height_shape_priorbox = HEIGHT_SHAPE_PRIORBOX;\nconst int length_shape_priorbox = LENGTH_SHAPE_PRIORBOX;\nconst int width_texture_priorbox = WIDTH_TEXTURE_PRIORBOX;\nconst int height_texture_priorbox = HEIGHT_TEXTURE_PRIORBOX;\nconst int channel_priorbox = CHANNEL_PRIORBOX;\n\nconst int length_shape_boxvar = LENGTH_SHAPE_BOXVAR;\nconst int width_shape_boxvar = WIDTH_SHAPE_BOXVAR;\nconst int height_shape_boxvar = HEIGHT_SHAPE_BOXVAR;\nconst int width_texture_boxvar = WIDTH_TEXTURE_BOXVAR;\nconst int height_texture_boxvar = HEIGHT_TEXTURE_BOXVAR;\nconst int channel_boxvar = CHANNEL_BOXVAR;\n\nconst int length_shape_targetbox = LENGTH_SHAPE_TARGETBOX;\nconst int width_shape_targetbox = WIDTH_SHAPE_TARGETBOX;\nconst int height_shape_targetbox = HEIGHT_SHAPE_TARGETBOX;\nconst int width_texture_targetbox = WIDTH_TEXTURE_TARGETBOX;\nconst int height_texture_targetbox = HEIGHT_TEXTURE_TARGETBOX;\nconst int channel_targetbox = CHANNEL_TARGETBOX;\n\n// uniform变量\n// 输入数据\nuniform sampler2D texture_targetbox;\nuniform sampler2D texture_priorbox;\nuniform sampler2D texture_boxvar;\n",func:"\n// start函数\nvec2 getPriorBoxData(int r, int g, int b, int m, int n) {\n float start = getValueFromTensorPosLIMIT_PRIORBOX_priorbox(r, g, b, m);\n float end = getValueFromTensorPosLIMIT_PRIORBOX_priorbox(r, g, b, n);\n float len = end - start;\n return vec2(start + len / 2.0, len);\n}\nvec2 getBoxVarData(int r, int g, int b, int m, int n) {\n return vec2(\n getValueFromTensorPosLIMIT_BOXVAR_boxvar(r, g, b, m),\n getValueFromTensorPosLIMIT_BOXVAR_boxvar(r, g, b, n)\n );\n}\nvec2 getTargetBoxData(int r, int g, int b, int m, int n) {\n return vec2(\n getValueFromTensorPosLIMIT_TARGETBOX_targetbox(r, g, b, m),\n getValueFromTensorPosLIMIT_TARGETBOX_targetbox(r, g, b, n)\n );\n}\n\nvoid main(void) {\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n int r = int(oPos.r);\n int g = int(oPos.g);\n int b = int(oPos.b);\n int a = int(oPos.a);\n // 输出坐标转换为输入坐标\n float o = 0.0;\n\n int m = 0;\n int n = 0;\n if (a == 0 || a == 2) {\n m = 0;\n n = 2;\n }\n else {\n m = 1;\n n = 3;\n }\n vec2 priorbox = getPriorBoxData(r, g, b, m, n);\n vec2 boxvar = getBoxVarData(r, g, b, m, n);\n vec2 targetbox = getTargetBoxData(r, g, b, m, n);\n float p1 = priorbox.r;\n float p2 = priorbox.g;\n float t1 = targetbox.r;\n float t2 = targetbox.g;\n float v1 = boxvar.r;\n float v2 = boxvar.g;\n\n float b1 = p2 * v1 * t1 + p1;\n float b2 = exp(v2 * t2) * p2;\n if (a == 0 || a == 1) {\n o = b1 - b2 / 2.0;\n }\n else {\n o = b1 + b2 / 2.0;\n }\n\n\tsetOutput(float(o));\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"priorbox"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"boxvar"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"targetbox"}}],conf:["LENGTH_SHAPE_PRIORBOX","WIDTH_SHAPE_PRIORBOX","HEIGHT_SHAPE_PRIORBOX","WIDTH_TEXTURE_PRIORBOX","HEIGHT_TEXTURE_PRIORBOX","CHANNEL_PRIORBOX","WIDTH_SHAPE_BOXVAR","HEIGHT_SHAPE_BOXVAR","LENGTH_SHAPE_BOXVAR","WIDTH_TEXTURE_BOXVAR","HEIGHT_TEXTURE_BOXVAR","CHANNEL_BOXVAR","WIDTH_SHAPE_TARGETBOX","HEIGHT_SHAPE_TARGETBOX","LENGTH_SHAPE_TARGETBOX","WIDTH_TEXTURE_TARGETBOX","HEIGHT_TEXTURE_TARGETBOX","CHANNEL_TARGETBOX","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT"],input:[{tensor:"priorbox",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"boxvar",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"targetbox",variable:"texture",setter:"initTexture",type:"texture"}]}},prior_box:{params:"\n// 常量\n\nMIN_SIZES_CODE\nconst int min_sizes_len = MIN_LEN;\n\nMAX_SIZES_CODE\nconst int max_sizes_len = MAX_LEN;\n\nASPECT_RATIOS_CODE\nconst int aspect_ratios_len = ASPECT_LEN;\n\nVARIANCE_CODE\n\nconst bool flip = FLIP;\nconst bool clip = CLIP;\nconst float offsets = OFFSETS;\nconst float step_w = STEP_W;\nconst float step_h = STEP_H;\nconst bool min_max_aspect_ratios_order = MIN_MAX_ASPECT_ORDER;\nconst float epsilon = EPSILON;\n\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\nconst int total_shape_origin = TOTAL_SHAPE_ORIGIN;\n\nconst int width_shape_image = WIDTH_SHAPE_IMAGE;\nconst int height_shape_image = HEIGHT_SHAPE_IMAGE;\nconst int length_shape_image = LENGTH_SHAPE_IMAGE;\nconst int width_texture_image = WIDTH_TEXTURE_IMAGE;\nconst int height_texture_image = HEIGHT_TEXTURE_IMAGE;\nconst int channel_image = CHANNEL_IMAGE;\nconst int total_shape_image = TOTAL_SHAPE_IMAGE;\n\n\nconst int total_shape_out = TOTAL_SHAPE_OUT;\n\n// const int dim = DIM;\n// const int num = NUM;\n// const int target_length = TARGET_LENGTH;\n\n\n// 输入数据\nuniform sampler2D texture_origin;\nuniform sampler2D texture_image;\n",func:"\n// start函数\nvoid main(void) {\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n\n float h_i = float(oPos.r);\n float w_i = float(oPos.g);\n float prior_i = float(oPos.b);\n int a = oPos.a;\n\n // layer_run_time为1时,输出Variance tensor; 为0时,输出Boxes tensor\n if (layer_run_time == 1) {\n // getVariance();\n setOutput(getVariance(a));\n return;\n }\n\n // 读取min_sizes, max_sizes, aspectRatios数组\n // getMinSizes();\n // getMaxSizes();\n // getAspectRatios();\n\n float o = 0.0;\n float cx = (w_i + offsets) * step_w;\n float cy = (h_i + offsets) * step_h;\n\n float min_max_aspect_len = float(min_sizes_len + max_sizes_len + aspect_ratios_len);\n\n int min_size_i = int(floor(prior_i / min_max_aspect_len));\n int min_max_aspect_i = int(mod(prior_i, min_max_aspect_len));\n\n float box_width = 0.0;\n float box_height = 0.0;\n float min_size = getMinSizes(min_size_i);\n if (bool(min_max_aspect_ratios_order)) {\n // 候选框的输出以[min, max, aspect_ratios]的顺序输出\n if (min_max_aspect_i < min_sizes_len) {\n box_width = box_height = min_size / 2.0;\n\n }\n else if (bool(max_sizes_len) && (min_max_aspect_i < (min_sizes_len + max_sizes_len))) {\n float max_size = getMaxSizes(min_size_i);\n box_width = box_height = sqrt(min_size * max_size)/ 2.0;\n }\n else {\n float ar = getAspectRatios(min_max_aspect_i - min_sizes_len - max_sizes_len);\n box_width = min_size * sqrt(ar) / 2.;\n box_height = min_size / sqrt(ar) / 2.;\n }\n\n float prefix = (a == 0 || a == 1) ? -1.0 : 1.0;\n if (a == 0 || a == 2) {\n o = (cx + prefix * box_width) / float(width_shape_image);\n }\n else {\n o = (cy + prefix * box_height) / float(height_shape_image);\n }\n }\n else {\n // 文档里没有写这种逻辑,paddle跟paddle-lite源码逻辑是先输出aspect_ratios在输出max_size,没有min_size,先补上吧…\n if (min_max_aspect_i < aspect_ratios_len) {\n float ar = getAspectRatios(min_max_aspect_i - min_sizes_len - max_sizes_len);\n box_width = min_size * sqrt(ar) / 2.;\n box_height = min_size / sqrt(ar) / 2.;\n\n }\n else if (bool(max_sizes_len) && (min_max_aspect_i < aspect_ratios_len + max_sizes_len)) {\n float max_size = getMaxSizes(min_size_i);\n box_width = box_height = sqrt(min_size * max_size)/ 2.0;\n }\n else {\n box_width = box_height = min_size / 2.0;\n }\n\n box_width = (a == 0 || a == 1) ? (box_width * -1.0) : box_width;\n if (a == 0 || a == 2) {\n o = (cx + box_width) / float(width_shape_image);\n }\n else {\n o = (cy + box_height) / float(height_shape_image);\n }\n }\n\n if (clip) {\n o = min(max(0.0, o), 1.0);\n }\n\n\tsetOutput(float(o));\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"image"}}],conf:["WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","WIDTH_SHAPE_IMAGE","HEIGHT_SHAPE_IMAGE","LENGTH_SHAPE_IMAGE","WIDTH_TEXTURE_IMAGE","HEIGHT_TEXTURE_IMAGE","CHANNEL_IMAGE","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"image",variable:"texture",setter:"initTexture",type:"texture"}]}},connect:{params:"\n// mul的input数据\n// 常量\n// 输入数据\nNUMBERS_SHAPE_COUNTER\nNUMBERS_SHAPE_ORIGIN\nconst int length_shape_counter = LENGTH_SHAPE_COUNTER;\nconst int width_shape_counter = WIDTH_SHAPE_COUNTER;\nconst int height_shape_counter = HEIGHT_SHAPE_COUNTER;\nconst int width_texture_counter = WIDTH_TEXTURE_COUNTER;\nconst int height_texture_counter = HEIGHT_TEXTURE_COUNTER;\nconst int channel_counter = CHANNEL_COUNTER;\nconst int total_shape_counter = TOTAL_SHAPE_COUNTER;\n\nconst int width_shape_origin = WIDTH_SHAPE_ORIGIN;\nconst int height_shape_origin = HEIGHT_SHAPE_ORIGIN;\nconst int length_shape_origin = LENGTH_SHAPE_ORIGIN;\nconst int width_texture_origin = WIDTH_TEXTURE_ORIGIN;\nconst int height_texture_origin = HEIGHT_TEXTURE_ORIGIN;\nconst int channel_origin = CHANNEL_ORIGIN;\nconst int total_shape_origin = TOTAL_SHAPE_ORIGIN;\n\nconst int total_shape_out = TOTAL_SHAPE_OUT;\n\n\n\n// uniform变量\n// 输入数据\nuniform sampler2D texture_counter;\nuniform sampler2D texture_origin;\n",func:"\n// start函数\nvoid main(void) {\n ivec4 oPos = getOutputTensorPosLIMIT_OUT();\n float o = 0.0;\n ivec4 co;\n if (oPos.a < total_shape_origin) {\n // 取自origin\n getNumbersShapeOrigin();\n co = getTensorPosFromArrayIndex_origin(oPos.a);\n o = getValueFromTensorPosLIMIT_ORIGIN_origin(co.r, co.g, co.b, co.a);\n }\n else {\n // 取自counter\n getNumbersShapeCounter();\n co = getTensorPosFromArrayIndex_counter(oPos.a - total_shape_origin);\n o = getValueFromTensorPosLIMIT_COUNTER_counter(co.r, co.g, co.b, co.a);\n }\n\tsetOutput(float(o));\n}\n",confs:{dep:[{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"origin"}},{func:"getTensorPosFromArrayIndex",conf:{TENSOR_NAME:"origin",TENSOR_TYPE:"vec4"}},{func:"getValueFromTensorPos",conf:{TENSOR_NAME:"counter"}},{func:"getTensorPosFromArrayIndex",conf:{TENSOR_NAME:"counter",TENSOR_TYPE:"vec4"}},{func:"transferFromNHWCtoNCHW"}],conf:["LENGTH_SHAPE_COUNTER","WIDTH_SHAPE_COUNTER","HEIGHT_SHAPE_COUNTER","WIDTH_TEXTURE_COUNTER","HEIGHT_TEXTURE_COUNTER","CHANNEL_COUNTER","NUMBERS_SHAPE_COUNTER","WIDTH_SHAPE_ORIGIN","HEIGHT_SHAPE_ORIGIN","LENGTH_SHAPE_ORIGIN","WIDTH_TEXTURE_ORIGIN","HEIGHT_TEXTURE_ORIGIN","CHANNEL_ORIGIN","NUMBERS_SHAPE_ORIGIN","WIDTH_SHAPE_OUT","HEIGHT_SHAPE_OUT","WIDTH_TEXTURE_OUT","HEIGHT_TEXTURE_OUT","CHANNEL_OUT","OFFSET_Y_OUT"],input:[{tensor:"origin",variable:"texture",setter:"initTexture",type:"texture"},{tensor:"counter",variable:"texture",setter:"initTexture",type:"texture"}]}}},atoms:{getArrayIndexFromTensorPos:"\n\nint getArrayIndexFromTensorPos_TENSOR_NAME(TENSOR_TYPE tensorPos) {\n int index = 0;\n for (int i = 0; i < length_shape_TENSOR_NAME; i++) {\n index += tensorPos[i] * numbers_shape_TENSOR_NAME[i];\n }\n return index;\n}\n",getArrayIndexFromTexturePos:"\nint getArrayIndexFromTexturePos_TEXTURE_NAME(vec3 pos) {\n int x = int(floor(pos.x));\n int y = int(floor(pos.y));\n int d = int(floor(pos.z));\n return (width_TEXTURE_NAME * y + x) * 4 + d;\n}\n",getTensorPosFromArrayIndex:"\niTENSOR_TYPE getTensorPosFromArrayIndex_TENSOR_NAME(int n) {\n iTENSOR_TYPE pos;\n pos[0] = n / numbers_shape_TENSOR_NAME[0];\n for (int i = 1; i < length_shape_TENSOR_NAME; i++) {\n n = int(mod(float(n), float(numbers_shape_TENSOR_NAME[i - 1])));\n pos[i] = n / numbers_shape_TENSOR_NAME[i];\n }\n return pos;\n}\n",getTexturePosFromArrayIndex:"\nvec3 getTexturePosFromArrayIndex_TEXTURE_NAME(int n) {\n vec3 pos;\n pos.z = mod(float(n), 4.0);\n n /= 4;\n int y = n / width_TEXTURE_NAME;\n float width = float(width_TEXTURE_NAME);\n float x = mod(float(n), width);\n pos.x = x / width;\n pos.y = float(y) / float(height_TEXTURE_NAME);\n return pos;\n}\n",getValueFromTexturePos:"\nfloat getValueFromTexturePos_TEXTURE_NAME(vec3 pos) {\n vec4 pixels = TEXTURE2D(TEXTURE_NAME, pos.xy);\n int d = int(pos.z);\n if (d == 0) {\n return pixels.r;\n } else if (d == 1) {\n return pixels.g;\n } else if (d == 2) {\n return pixels.b;\n }\n return pixels.a;\n}\n",getValueFromTensorPos:"\n// 根据tensor坐标获取这个tensor位置的值\nfloat getValueFromTensorPos_TENSOR_NAME(int r, int g, int b, int a) {\n vec4 pixels = TEXTURE2D(texture_TENSOR_NAME,\n vec2(\n (float(a * channel_TENSOR_NAME + g) + 0.5) / float(width_texture_TENSOR_NAME),\n (float(r * height_shape_TENSOR_NAME + b) + 0.5) / float(height_texture_TENSOR_NAME)\n )\n );\n // 只用了r通道\n return pixels.r;\n}\n\n// 超限布局根据tensor坐标获取这个tensor位置的值\nfloat getValueFromTensorPosLimit_TENSOR_NAME(int r, int g, int b, int a) {\n float pieceW = ceil(float(width_shape_TENSOR_NAME) / 4.0);\n int x = int(mod(float(a), pieceW));\n int offsetY = 0;\n\n if ((float(a) / pieceW) >= 3.0) {\n offsetY = 3 * height_shape_TENSOR_NAME;\n }\n else if (float(a) / pieceW >= 2.0) {\n offsetY = 2 * height_shape_TENSOR_NAME;\n }\n else if (float(a) >= pieceW) {\n offsetY = height_shape_TENSOR_NAME;\n }\n vec4 pixels = TEXTURE2D(texture_TENSOR_NAME,\n vec2(\n (float(x * channel_TENSOR_NAME + g) + 0.5) / float(width_texture_TENSOR_NAME),\n (float(r * 4 * height_shape_TENSOR_NAME + b + offsetY) + 0.5) / float(height_texture_TENSOR_NAME)\n )\n );\n return pixels.r;\n}\n\n",getValueFromTensorPosPacked:"\nfloat getValueFromTensorPosPacked_TENSOR_NAME(int r, int g, int b, int a) {\n int y = b / 2;\n int yOffset = int(mod(float(b), 2.0));\n int x = a / 2;\n int xOffset = int(mod(float(a), 2.0));\n int height = height_shape_TENSOR_NAME + offset_y_TENSOR_NAME;\n vec4 pixels = TEXTURE2D(texture_TENSOR_NAME, vec2((float(x) + 0.5) / float(width_texture_TENSOR_NAME), (float(g * height / 2 + y) + 0.5) / float(height_texture_TENSOR_NAME)));\n int index = 0;\n if (xOffset == 0 && yOffset == 0) {\n return pixels[0];\n } \n else if (xOffset == 1 && yOffset == 0) {\n return pixels[1];\n }\n else if (xOffset == 0 && yOffset == 1) {\n return pixels[2];\n }\n return pixels[3];\n}\n",moveTexture2PosToReal:"\n\n// vec2 moveTexture2PosToReal_TEXTURE_NAME(vec2 v) {\n// return v * _2d_shape_TEXTURE_NAME;\n// // vec2 v2;\n// // v2.x = v.x * float(width_TEXTURE_NAME);\n// // v2.y = v.y * float(height_TEXTURE_NAME);\n// // return v2;\n// }\n\nvec2 moveTexture2PosToReal_TEXTURE_NAME(vec2 v) {\n vec2 v2;\n v2.x = v.x * float(width_TEXTURE_NAME);\n v2.y = v.y * float(height_TEXTURE_NAME);\n return v2;\n}\n",getPixelsFromTexturePos:"\n#define getPixelsFromTexturePos_TEXTURE_NAME(pos) TEXTURE2D(TEXTURE_NAME, pos)\n",getRangeSumFromArrayIndex:"\nfloat getRangeSumFromArrayIndex_TEXTURE_NAME(int start) {\n float result = 0.0;\n for (int i = 0; i < (width_shape_TENSOR_NAME * height_shape_TENSOR_NAME); i++) {\n vec3 pos = getTexturePosFromArrayIndex_TEXTURE_NAME(i + start);\n result += getValueFromTexturePos_TEXTURE_NAME(pos); \n }\n return result;\n}\n",getRangePowSumFromArrayIndex:"\nfloat getRangePowSumFromArrayIndex_TEXTURE_NAME(int start, float p, float mean) {\n float result = 0.0;\n for (int i = 0; i < (width_shape_TENSOR_NAME * height_shape_TENSOR_NAME); i++) {\n vec3 pos = getTexturePosFromArrayIndex_TEXTURE_NAME(i + start);\n result += pow(getValueFromTexturePos_TEXTURE_NAME(pos) - mean, p); \n }\n return result;\n}\n",sigmoid:"\nfloat sigmoid(float x, float y, float z) {\n float result = 1.0 / (1.0 + exp(-x));\n return result;\n}\n",prelu:"\nfloat prelu(float x, float p, float b) {\n float result = x;\n if (x < 0.0) {\n result = x * p;\n }\n return result;\n}\n",scale:"\nfloat scale(float x, float p, float b) {\n float result = p * x + b;\n return result;\n}\n",softmax:"\nfloat softmax(float x, float p, float b) {\n float result = x;\n if (x < 0.0) {\n result = x * p;\n }\n return result;\n}\n",transferFromNHWCtoNCHW:"\nivec4 transferFromNHWCtoNCHW( int sumVal, const int channel, const int width_shape, const int height_shape, const int total_shape) {\n\n\tint n_origin = int(total_shape/(channel * width_shape * height_shape));\n\tint new_a = int(mod(float(sumVal), float(width_shape)));\n\tsumVal = int((sumVal - new_a) / width_shape);\n\tint new_b = int(mod(float(sumVal), float(height_shape)));\n\tsumVal = int((sumVal - new_b) / height_shape);\n\tint new_g = int(mod(float(sumVal), float(channel)));\n\tsumVal = int((sumVal - new_g) / channel);\n\tint new_r = int(mod(float(sumVal), float(n_origin)));\n\treturn ivec4(new_r,new_g,new_b,new_a);\n}\n"}},N=function(){function t(e){l()(this,t),this.defaultOpts=Object.assign({},e),this.webglVersion=2,this.isFrameBufferSupportFloat=!0,this.texture2d="texture"}return h()(t,[{key:"setWebglVersion",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0;this.webglVersion=t,1===t&&(this.texture2d="texture2D")}},{key:"setIsFrameBufferSupportFloat",value:function(){var t=!(arguments.length>0&&void 0!==arguments[0])||arguments[0];this.isFrameBufferSupportFloat=t}},{key:"setIsFloatTextureReadPixelsEnabled",value:function(){var t=!(arguments.length>0&&void 0!==arguments[0])||arguments[0];this.isFloatTextureReadPixelsEnabled=t}},{key:"buildShader",value:function(t,e){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:void 0,r=arguments.length>3?arguments[3]:void 0,o="";return o=this.buildPrefix(r),o+=this.buildCommon(t),o+=void 0!==n?this.buildRuntime(n):"",o+=this.buildOp(t),e.texture2d=this.texture2d,o=this.populateData(o,e)}},{key:"buildPrefix",value:function(t){return 1===this.webglVersion?this.isFrameBufferSupportFloat?A.common.prefix:t&&!this.isFloatTextureReadPixelsEnabled?A.common.prefixUint:A.common.prefixHalf:A.common.prefix2}},{key:"buildCommon",value:function(t){return A.common.params+A.common.func}},{key:"buildRuntime",value:function(t){return"\n int layer_run_time = ".concat(t,";\n ")}},{key:"buildOp",value:function(t){var e=this,n=A.ops[t].params,r=A.atoms;return(A.ops[t].confs.dep||[]).map((function(t){var o=t.func,i=t.conf,a=r[o];n+=e.populateData(a,i)})),n+=this.buildSuffix(t),n+=A.ops[t].func}},{key:"buildSuffix",value:function(t){return A.common.suffix}},{key:"populateData",value:function(t,e){var n=t;for(var r in e)n=n.replace(new RegExp(r.toUpperCase(),"g"),void 0===e[r]?1:e[r]);return n}},{key:"getOpConfs",value:function(){var t={};for(var e in A.ops)A.ops.hasOwnProperty(e)&&(t[e]=A.ops[e].confs.input);return t}}]),t}(),y=function(){function t(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};l()(this,t),this.gpu=new H(e)}var e;return h()(t,[{key:"getWebglVersion",value:function(){return this.gpu.getWebglVersion()}},{key:"getWebglMaxTextureSize",value:function(){return this.gpu.getWebglMaxTextureSize()}},{key:"getWebglMaxTextureImageUnits",value:function(){return this.gpu.maxTextureImageUnits()}},{key:"getIsFrameBufferSupportFloat",value:function(){return this.gpu.getIsFrameBufferSupportFloat()}},{key:"getIsFloatTextureReadPixelsEnabled",value:function(){return this.gpu.getIsFloatTextureReadPixelsEnabled()}},{key:"run",value:function(t,e,n){var r=this;if(!e.isPass)return console.log("跳过当前op:"+t),this;var o=this.gpu;e.program.forEach((function(t,i){var a=e.outputTensors[i],s=a.tensorId;o.setOutProps(a),o.attachFrameBuffer(e.iLayer,s),o.setProgram(t,n),r.gpu.render(e.renderData,e.iLayer,n,i)}))}},{key:"read2",value:function(){return this.gpu.frameBufferIsComplete().isComplete?this.gpu.compute():null}},{key:"read",value:(e=u()(a.a.mark((function t(){var e;return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:return e=this.gpu.createPBO(),t.next=3,this.gpu.createAndWaitForFence();case 3:return t.abrupt("return",this.gpu.downloadFoat32TensorFromBuffer(e));case 4:case"end":return t.stop()}}),t,this)}))),function(){return e.apply(this,arguments)})},{key:"createProgram",value:function(t,e,n){var r=this.gpu.initShader(t,"fragment");return this.gpu.createProgram(r,e,n)}},{key:"dispose",value:function(){this.gpu.dispose()}}]),t}(),P=n(14),S=n.n(P);function b(t,e){var n;if("undefined"==typeof Symbol||null==t[Symbol.iterator]){if(Array.isArray(t)||(n=function(t,e){if(!t)return;if("string"==typeof t)return G(t,e);var n=Object.prototype.toString.call(t).slice(8,-1);"Object"===n&&t.constructor&&(n=t.constructor.name);if("Map"===n||"Set"===n)return Array.from(t);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return G(t,e)}(t))||e&&t&&"number"==typeof t.length){n&&(t=n);var r=0,o=function(){};return{s:o,n:function(){return r>=t.length?{done:!0}:{done:!1,value:t[r++]}},e:function(t){throw t},f:o}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var i,a=!0,s=!1;return{s:function(){n=t[Symbol.iterator]()},n:function(){var t=n.next();return a=t.done,t},e:function(t){s=!0,i=t},f:function(){try{a||null==n.return||n.return()}finally{if(s)throw i}}}}function G(t,e){(null==e||e>t.length)&&(e=t.length);for(var n=0,r=new Array(e);n<e;n++)r[n]=t[n];return r}var U=null,L={setTextureMaxSize:function(t){U=t},getQueryTime:function(t,e){return t.getQueryParameter(e,t.QUERY_RESULT)},beginQuery:function(t,e){if(2===e){var n=t.getExtension("EXT_disjoint_timer_query_webgl2");if(!n)return;var r=t.createQuery();return t.beginQuery(n.TIME_ELAPSED_EXT,r),r}return null},endQuery:function(t,e,n){if(2===e){var r=t.getExtension("EXT_disjoint_timer_query_webgl2");if(!r)return;t.endQuery(r.TIME_ELAPSED_EXT)}return n},getReshapeInPaddle:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=arguments.length>2&&void 0!==arguments[2]?arguments[2]:[],n=t.reduce((function(t,e){return t*e}));return 1===e.length?[1,n]:[e[0],n/e[0]]},getBroadcastShapeInPaddle:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:1,r=t,o=e;return t.length-e.length<0&&(r=e,o=t),o.concat(r.slice(n))},getBroadcastDims:function(){for(var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=t.length,r=[],o=0;o<n;o++){var i=n-1-o,a=t[i]||1,s=e[e.length-1-o]||1;s>1&&1===a&&r.unshift(i)}return r},getBroadcastShape:function(){for(var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=[],r=Math.max(t.length,e.length),o=0;o<r;o++){var i=t[t.length-o-1];null===i&&(i=1);var a=e[e.length-o-1];if(null===a&&(a=1),1===i)n.unshift(a);else if(1===a)n.unshift(i);else{if(i!==a)return null;n.unshift(i)}}return n},applyFilterWinograd:function(t,e){for(var n=o()(e,4),r=n[0],i=n[1],a=(n[2],n[3],0),s=0,u=new Float32Array(r*i*16),_=0;_<r;_++)for(var l=0;l<i;l++){var c=t.subarray(s,s+9),h=o()(c,9),f=h[0],p=h[1],T=h[2],E=h[3],d=h[4],g=h[5],I=h[6],m=h[7],x=h[8],v=[f,.5*f+.5*p+.5*T,.5*f-.5*p+.5*T,T,.5*f+.5*E+.5*I,.25*f+.25*p+.25*T+.25*E+.25*d+.25*g+.25*I+.25*m+.25*x,.25*f-.25*p+.25*T+.25*E-.25*d+.25*g+.25*I-.25*m+.25*x,.5*T+.5*g+.5*x,.5*f-.5*E+.5*I,.25*f+.25*p+.25*T-.25*E-.25*d-.25*g+.25*I+.25*m+.25*x,.25*f-.25*p+.25*T-.25*E+.25*d-.25*g+.25*I-.25*m+.25*x,.5*T-.5*g+.5*x,I,.5*I+.5*m+.5*x,.5*I-.5*m+.5*x,x];u.set(v,a),a+=16,s+=9}return u},getTextureInfoFromTensorShape:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=arguments.length>1&&void 0!==arguments[1]&&arguments[1],n=t[0],r=t[1],o=t[2],i=t[3],a=n*o,s=r*i,u=0,_=0,l=!1;if((a>U||s>U)&&(console.error("大小超限",t),a*=4,s=r*Math.ceil(i/4),l=!0,a>U||s>U)){var c="[".concat(s,"x").concat(a,"]"),h="[".concat(U,"x").concat(U,"]");throw new Error("Requested texture size "+c+" greater than WebGL maximum on this browser / GPU "+h+".")}return e&&(a=n*r*Math.ceil(o/2),s=Math.ceil(i/2),u=i%2,_=o%2),{offsetX:u,offsetY:_,exceedMax:l,shape:[4,a,s],zeroNumber:0}},getMaxItem:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=Math.max.apply(null,t),n=t.indexOf(e);return{value:e,index:n}},loadShader:function(t){var e=this;return u()(a.a.mark((function n(){var r;return a.a.wrap((function(n){for(;;)switch(n.prev=n.next){case 0:return n.next=2,fetch(e.getShaderFile(t));case 2:return r=n.sent,n.abrupt("return",r.text());case 4:case"end":return n.stop()}}),n)})))()},getShaderFile:function(t){var e=t.split("/");return"/"+e[e.length-1]},img2texture:function(){for(var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},e=t.height_texture,n=t.width_texture,r=t.shape,o=e*n*4,i=r[0],a=r[1],s=r[2],u=r[3],_=new Float32Array(i*a*s*u*4),l=0,c=0;c<o;c++){var h=c/(a*u)|0,f=c%(a*u),p=h/s|0,T=h%s,E=f%a,d=f/a|0,g=p*(a*s*u)+E*(s*u)+T*u+d;_[l]=t.data[g],l+=4}t.data=_},padToFourDimShape:function(t){var e=[];if(4==t.length)e=t;else if(t.length<4){for(var n=0;n<4-t.length;n++)e.push(1);e=e.concat(t)}return e},convertDataToMultiDim:function(t,e){var n,r=function(t,e){for(var n=[],r=0,o=Math.ceil(t.length/e);r<o;r++)n.push(t.slice(r*e,(r+1)*e));return n},o=t,i=b(e);try{for(i.s();!(n=i.n()).done;){o=r(o,n.value)}}catch(t){i.e(t)}finally{i.f()}return o},nhwc2nchw:function(t,e){for(var n=e[0],r=e[1],o=e[2],i=e[3],a=o*i,s=r*o*i,u=[],_=0;_<n;_++)for(var l=0;l<i;l++)for(var c=0;c<r;c++)for(var h=0;h<o;h++)u.push(t[_*s+c*a+h*i+l]);return u},nchw2nhwc:function(t,e){for(var n=e[0],r=e[1],o=e[2],i=e[3],a=o*i,s=r*o*i,u=[],_=0;_<n;_++)for(var l=0;l<o;l++)for(var c=0;c<i;c++)for(var h=0;h<r;h++)u.push(t[_*s+h*a+l*i+c]);return u},stridePrint:function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:20,n=e;if(t.length<=n)this.continuousPrint(t,n);else{var r=[],o=Math.floor(t.length/n);0==o&&(o=1),n=Math.floor(t.length/o);for(var i=0;i<n;i++)r.push(i*o+": "+t[i*o])}},continuousPrint:function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:100,n=[],r=e;t.length<=r&&(r=t.length);for(var o=0;o<r;o++)n.push(o+": "+t[o])},softmax:function(t){for(var e=new Float32Array(t.length),n=t[0],r=0,o=0,i=1;i<t.lenght;i++)n<(r=t[i])&&(n=r);for(var a=0;a<t.length;a++)r=Math.exp(t[a]-n),e[a]=r,o+=r;for(var s=0;s<t.length;s++)e[s]=e[s]/o;return e},formatReadData:function(t,e){if(e.length<4){for(var n=[],r=0;r<4-e.length;r++)n.push(1);e=n.concat(e)}var o=e[0],i=e[1],a=e[2],s=e[3];if(o*a<=U&&i*s<=U)return t;for(var u=0,_=[],l=Math.ceil(s/4),c=0;c<o;c++)for(var h=0;h<i;h++)for(var f=0;f<a;f++)for(var p=0;p<s;p++)u=Math.floor(p/l)*l*(a-1)+p+f*l,u+=c*i*a*s+h*a*s,_.push(t[u]);return _},toPercent:function(t){return Number(100*t).toFixed(3)+"%"},transfer2Vec:function(t){if(t){var e=t.map((function(t){return"float(".concat(t,")")}));return"vec".concat(e.length,"(")+e.join(", ")+")"}},getNumbersShape:function(t){for(var e=[],n=t.length,r=0;r<n-1;r++){var o=t.slice(r+1).reduce((function(t,e){return t*e}));e.push(o)}return e.push(1),e},batchShape:function(t){if(t.length<4){for(var e=[],n=0;n<4-t.length;n++)e.push(1);t=e.concat(t)}return t},getShapeLen:function(t){var e,n=0,r=b(t);try{for(r.s();!(e=r.n()).done;){n+=e.value.reduce((function(t,e){return t*e}))}}catch(t){r.e(t)}finally{r.f()}return n},calRealAxis:function(t,e,n){for(var r=0,o=t.length;r<o;r++)if(t[r]!==e[r]){n=r;break}return n}},w=function(){function t(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};l()(this,t),this.opts=e,this.isPacked=this.isPacked||!1,this.name=e.name,this.tensorId=e.type;var n=this.shape=e.shape;if(this.total=n.reduce((function(t,e){return t*e})),e.needBatch&&n.length<4){for(var r=[],o=0;o<4-n.length;o++)r.push(1);n=r.concat(n),this.shape=n}var i,a=L.getTextureInfoFromTensorShape(n,e.isPacked),s=a.offsetX,u=a.offsetY,_=a.exceedMax,c=(a.zeroNumber,a.shape);if(this.shape_texture=c,this.exceedMax=_,this.offsetX=s,this.offsetY=u,"image"===e.type||"x"===e.type)this.data=e.data;else if(e.data&&e.data.length){if(i=new Float32Array(e.data.length),e.notCompressed)this.shape_texture=[4,1,this.total/4],this.data=new Float32Array(e.data);else{n[0];var h=n[1],f=n[2],p=n[3];if(p){for(var T=0;T<e.data.length;T++){var E=T/(h*p)|0,d=T%(h*p),g=E/f|0,I=E%f,m=d%h,x=d/h|0,v=g*(h*f*p)+m*(f*p)+I*p+x;i[T]=e.data[v]}this.data=i}}e.data=null}}return h()(t,[{key:"getValue",value:function(){for(var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=[].concat(t),n=e.length,r=this.shape.length,o=0;o<r-n;o++)e.unshift(0);for(var i=0,a=0;a<r;a++)i+=e[a]*this.shapeNumbers[a];return this.data[i]}},{key:"dispose",value:function(){this.data&&(this.data=null)}},{key:"width_texture",get:function(){var t=this.shape_texture.length;return this.shape_texture[t-1]}},{key:"height_texture",get:function(){var t=this.shape_texture.length;return this.shape_texture[t-2]}},{key:"width_shape",get:function(){var t=this.shape.length;return this.shape[t-1]}},{key:"height_shape",get:function(){var t=this.shape.length;return this.shape[t-2]}},{key:"channel",get:function(){var t=this.shape.length;return t>=3?this.shape[t-3]:0}},{key:"offset_x",get:function(){return this.offsetX}},{key:"offset_y",get:function(){return this.offsetY}},{key:"limit",get:function(){return this.exceedMax?"Limit":""}},{key:"length_shape",get:function(){return this.shape.length||0}},{key:"numbers_shape",get:function(){for(var t=[],e=this.shape.length,n=0;n<e-1;n++){var r=this.shape.slice(n+1).reduce((function(t,e){return t*e}));t.push(r)}return t.push(1),t}},{key:"total_shape",get:function(){return this.total}}]),t}();function F(t,e){var n;if("undefined"==typeof Symbol||null==t[Symbol.iterator]){if(Array.isArray(t)||(n=function(t,e){if(!t)return;if("string"==typeof t)return D(t,e);var n=Object.prototype.toString.call(t).slice(8,-1);"Object"===n&&t.constructor&&(n=t.constructor.name);if("Map"===n||"Set"===n)return Array.from(t);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return D(t,e)}(t))||e&&t&&"number"==typeof t.length){n&&(t=n);var r=0,o=function(){};return{s:o,n:function(){return r>=t.length?{done:!0}:{done:!1,value:t[r++]}},e:function(t){throw t},f:o}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var i,a=!0,s=!1;return{s:function(){n=t[Symbol.iterator]()},n:function(){var t=n.next();return a=t.done,t},e:function(t){s=!0,i=t},f:function(){try{a||null==n.return||n.return()}finally{if(s)throw i}}}}function D(t,e){(null==e||e>t.length)&&(e=t.length);for(var n=0,r=new Array(e);n<e;n++)r[n]=t[n];return r}var C=["paddings","strides","dilations","ksize"],X=["length_shape","width_shape","height_shape","width_texture","height_texture","offset_x","offset_y","limit","channel","total_shape"],M={scale:{bias:"bias_value",scale:"multi_value"},pool2d:{pooling_type:"type_pool"},pool2d_winograd:{pooling_type:"type_pool"}},B={input:"origin",image:"image",x:"origin",filter:"filter",y:"counter",z:"appender",output:"out",out:"out",outputbox:"out",scale:"scale",bias:"bias",mean:"mean",variance:"variance",w:"weight",priorbox:"priorbox",priorboxvar:"boxvar",targetbox:"targetbox",aspect_ratios:"aspect_ratios",min_sizes:"min_sizes",max_sizes:"max_sizes",bboxes:"origin",scores:"counter"},k={conv2d:["needBatch","adaptPaddings","isApplySeparableConv","batchComputeConv2d"],conv2d_transpose:["needBatch"],batchnorm:["needBatch","mergeTensor"],test:["needBatch"],elementwise_add:["processAxis","needBatch"],conv2d_elementwise_add:["mergeAttrs","setActiveFunc","needBatch"],pool2d:["isMax","needBatch","setPacked","isGlobalPooling"],relu:["transToPrelu","needBatch"],relu6:["transToRelu6","needBatch"],leaky_relu:["transToLeakyrelu","needBatch"],mul:["reshape","needBatch"],bilinear_interp:["needBatch"],nearest_interp:["needBatch"],reshape2:["needBatch","inferShape"],transpose2:["needBatch","setPerm"],concat:["normalizeDim","needBatch"],concat_multi:["needBatch","normalizeDim","normalizeDim2"],split:["normalizeDim","needBatch"],softmax:["needBatch"],sigmoid:["needBatch","transToSigmoid"],scale:["needBatch"],fc:["flattenShape","needBatch"],box_coder:["setCodeType","needBatch"],prior_box:["processInputs","needBatch"],multiclass_nms:["replaceToConnectOp","needBatch"],connect:["replaceToConnectOp","needBatch"]},W=function(){function t(e){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},o=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{};l()(this,t),this.realName=e,this.name=e,this.attrs=o,this.checkIsMerge(),this.isPass=this.checkIsPass(),this.isPass&&(this.input=n,this.output=r,this.data={active_function:"scale",multi_value:"1.0",bias_value:"0.0",fuse_relu:!1},this.inputTensors=[],this.outputTensors=[],this.fShaderParams=[],this.buildTensor(),this.buildShaderParams())}return h()(t,[{key:"adaptPaddings",value:function(){for(var t in this.attrs)if(this.attrs.hasOwnProperty(t)&&"paddings"===t){var e=this.attrs[t],n=o()(e,2),r=n[0],i=n[1];return void(0===r&&1===i&&(this.attrs[t][1]=0))}}},{key:"inferShape",value:function(){if("reshape2"==this.name){var t=this.input.X[0].shape;this.attrs.shape&&(this.attrs.new_shape=this.attrs.shape,delete this.attrs.shape);for(var e=this.attrs.new_shape,n=0;n<e.length;n++)0==e[n]&&(e[n]=t[n]);for(var r=1,o=0;o<t.length;o++)r*=t[o];for(var i=-1,a=0;a<e.length;a++)-1!=e[a]?r/=e[a]:i=a;-1!=i&&(e[i]=r),this.output.Out[0].shape=e}}},{key:"buildTensor",value:function(){var t=this,e=[];for(var n in this.input){if(!n.length)break;if(this.input.hasOwnProperty(n))try{var r=this.input[n]||[{}];B[n.toLowerCase()]&&(r[0].tensorName=B[n.toLowerCase()],e.push(r[0]))}catch(t){console.log(t)}}delete this.output.Y;var o=function(n){if(t.output.hasOwnProperty(n))try{var r=t.output[n]||[{}];B[n.toLowerCase()]&&r.forEach((function(t){t.tensorName=B[n.toLowerCase()],e.push(t)}))}catch(t){}};for(var i in this.output)o(i);(k[this.name]||[]).forEach((function(n){t[n](e)})),e.forEach((function(e){if(e){var n=null,r=e.tensorName;n=e.notTensor?{name:r,data:new Float32Array(e.data),total_shape:e.data.length}:new w({type:e.name,name:r,shape:e.shape,data:e.data,needBatch:e.needBatch||!1,notCompressed:e.notCompressed||!1,isPacked:e.isPacked||!1}),"out"===r?t.outputTensors.push(n):t.inputTensors.push(n)}}))}},{key:"buildShaderParams",value:function(){var t=this;for(var e in this.attrs)if(this.attrs.hasOwnProperty(e)){var n=this.attrs[e];if(Array.isArray(n)&&C.indexOf(e)>-1)this.data[e+"_x"]=n[0],this.data[e+"_y"]=n[1];else{this.data[e]=n;var r=M[this.name];r&&r.hasOwnProperty(e)&&(this.data[r[e]]=n)}}this.inputTensors.forEach((function(e){X.forEach((function(n){t.data[n+"_"+e.name]=e[n]}))})),this.outputTensors.forEach((function(e){var n=JSON.parse(JSON.stringify(t.data));X.forEach((function(t){n[t+"_"+e.name]=e[t]})),t.fShaderParams.push(n)}))}},{key:"needBatch",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];t.forEach((function(t){return t.needBatch=!0}))}},{key:"setPerm",value:function(){var t=this.attrs.axis,e=t.length;if(3==e)t==[2,0,1]?t=[1,2,0]:t==[1,2,0]&&(t=[2,0,1]);else if(4==e){for(var n=[0,0,0,0],r=0;r<4;r++)n[[t[r]]]=r;t=n}this.data.perm_0=0,this.data.perm_1=0,this.data.perm_2=0,this.data.perm_3=0,e>=1&&(this.data.perm_0=t[0]),e>=2&&(this.data.perm_1=t[1]),e>=3&&(this.data.perm_2=t[2]),e>=4&&(this.data.perm_3=t[3]),this.data.perm_size=e}},{key:"calPadding",value:function(){var t=this.attrs,e=t.strides,n=t.ksize,r=t.padding_algorithm;if(r&&"same"===r.toLowerCase()){var i=L.padToFourDimShape(this.output.Out[0].shape),a=o()(i,4),s=a[2],u=a[3],_=Array.isArray(e)?e:[e,e]||!1,l=o()(_,2),c=l[0],h=l[1],f=Array.isArray(n)?n:[n,n]||!1,p=o()(f,2),T=p[0],E=p[1],d=Math.round(Math.max(((u+c-1)/c-1)*c+T-u,0)/2),g=Math.round(Math.max(((s+h-1)/h-1)*h+E-s,0)/2);this.attrs.paddings=[d,g]}}},{key:"isGlobalPooling",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=t.filter((function(t){return"origin"===t.tensorName}))[0]||{},n=e.shape&&e.shape.length||0;n>2&&this.attrs.global_pooling&&(this.attrs.ksize=[e.shape[n-2],e.shape[n-1]])}},{key:"mergeAttrs",value:function(){this.attrs=this.attrs.reduce((function(t,e){return Object.assign(t,e)}),{})}},{key:"isApplyWinoGrad",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=t.filter((function(t){var e=o()(t.shape,4),n=(e[0],e[1],e[2]),r=e[3];return 3===n&&3===r&&"filter"===t.tensorName}));e&&e.length&&(this.setPacked(t),this.applyWinograd(t),this.setOutputPacked(t),this.name+="_winograd")}},{key:"isApplySeparableConv",value:function(){var t,e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],n=this.attrs.groups,r=!1,i=e.filter((function(e){var i=e.shape,a=e.tensorName;"bias"===a&&(r=!0);var s=o()(i,4),u=s[0],_=s[1];s[2],s[3];return r||t||"out"!==a||(t=_),u===n&&1===_&&"filter"===e.tensorName}));i&&i.length&&(this.name+="_depthwise"),!r&&e.push({name:"conv1_scale_offset",needBatch:!0,persistable:!0,shape:[t],data:Array.from(new Float32Array(t),(function(t){return 0})),tensorName:"bias"})}},{key:"batchComputeConv2d",value:function(){var t=this.input.Filter[0].shape[1];this.attrs.filter_nearest_vec4=4*Math.floor(t/4),this.attrs.filter_remainder_vec4=t%4}},{key:"setPacked",value:function(){var t=this,e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],n=this.attrs.ispacked;e.forEach((function(e){"origin"===e.tensorName&&n&&(e.isPacked=!0,t.name.indexOf("pool")>-1&&(t.name+="_winograd"))}))}},{key:"applyWinograd",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];t.forEach((function(t){if("filter"===t.tensorName){var e=o()(t.shape,4),n=e[0],r=e[1];e[2],e[3];t.shape=[n,r,4,4],t.data=L.applyFilterWinograd(t.data,t.shape)}}))}},{key:"setOutputPacked",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];t.forEach((function(t){"out"===t.tensorName&&(t.isPacked=!0)}))}},{key:"isMax",value:function(){var t="max"===this.attrs.pooling_type?1:0;this.attrs.pooling_type=t,1===t&&(this.name+="_max")}},{key:"transToPrelu",value:function(){this.data.multi_value="0.0",this.data.active_function="prelu"}},{key:"transToRelu6",value:function(){this.data.multi_value=this.attrs.threshold,this.data.active_function="relu6"}},{key:"transToLeakyrelu",value:function(){this.data.multi_value=this.attrs.alpha,this.data.active_function="leakyRelu",this.name="relu"}},{key:"transToSigmoid",value:function(){this.data.active_function="sigmoid"}},{key:"setActiveFunc",value:function(){"leaky_relu"===this.realName.replace("conv2d-elementwise_add-","")&&(this.data.multi_value=this.attrs.alpha,this.data.active_function="leakyRelu")}},{key:"normalizeDim",value:function(){for(var t=L.padToFourDimShape(this.input.X[0].shape),e=L.padToFourDimShape(this.output.Out[0].shape),n=this.attrs.axis,r=0,o=t.length;r<o;r++)if(t[r]!==e[r]){n=r;break}for(var i=[],a=0;a<t[n];a++)i[a]=a;this.attrs.target_length=i.length,this.attrs.target_value=i,this.attrs.inputs_dim=t[n],this.attrs.dim=4-t.length+n}},{key:"normalizeDim2",value:function(){var t=L.padToFourDimShape(this.input.Y[0].shape),e=this.attrs.axis>-1?this.attrs.axis:t.length+this.attrs.axis;this.attrs.append_num=t[e]}},{key:"processDim",value:function(){if(-1!==this.attrs.axis){var t=this.input.X[0].shape;this.attrs.axis+=4-t.length}}},{key:"processAxis",value:function(){var t=this.input.X[0].shape,e=this.input.Y[0].shape,n=this.attrs.axis;this.attrs.axis=-1==n?t.length-e.length:4-e.length-n}},{key:"flattenShape",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=t.find((function(t){return t.shape.length>2}));if(e){var n=L.padToFourDimShape(e.shape);e.shape=[n[0]*n[2],n[1]*n[3]]}}},{key:"reshape",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=t.find((function(t){return"origin"===t.tensorName})),n=t.find((function(t){return"counter"===t.tensorName})),r=t.find((function(t){return"out"===t.tensorName||"output"===t.tensorName}));if(n.shape.length>e.shape.length&&(S()("input"),e=n,S()("counter"),n=e),e.shape.length>2&&2===n.shape.length){var o=L.getReshapeInPaddle(e.shape,n.shape,r.shape);e.shape=o}}},{key:"mergeTensor",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],e=["scale","bias","variance","mean"],n={};t.forEach((function(t,e){n[t.tensorName]=t,n[t.tensorName+"Index"]=e}));for(var r=0;r<e.length;r++)t[n[e[r]+"Index"]].data=n[e[r]].data}},{key:"setCodeType",value:function(){this.attrs.normalized=!!this.attrs.box_normalized,delete this.attrs.box_normalized,this.name="box_coder_"+(this.attrs.code_type&&"decode_center_size"===this.attrs.code_type?"decode":"encode")}},{key:"expandAspectRatios",value:function(t,e){var n,r=[1],o=F(t);try{for(o.s();!(n=o.n()).done;){var i,a=n.value,s=!1,u=F(r);try{for(u.s();!(i=u.n()).done;){var _=i.value;if(Math.abs(a-_)<1e-6){s=!0;break}}}catch(t){u.e(t)}finally{u.f()}s||(r.push(a),e&&r.push(1/a))}}catch(t){o.e(t)}finally{o.f()}return r.filter((function(t){return Math.abs(t-1)>=1e-6}))}},{key:"genArrCodeOfIndex",value:function(t,e){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:"float",r="get"+e.split("_").map((function(t){return t[0].toUpperCase()+t.slice(1)})).join("");if(!t)return{code:"void ".concat(r,"() {}\n"),len:0};Array.isArray(t)||(t=[t]);var o=t.length,i="".concat(n," ").concat(e,"[").concat(o,"];")+"\n";i+="float ".concat(r,"(int index) {\n");for(var a=0;a<o;a++)i=i+"if (".concat(a," == index) { return ").concat(n,"(").concat(t[a],");}")+"\n";return{code:i+="}",len:o}}},{key:"genArrCode",value:function(t,e){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:"float",r="get"+e.split("_").map((function(t){return t[0].toUpperCase()+t.slice(1)})).join("");if(!t)return{code:"void ".concat(r,"() {}\n"),len:0};Array.isArray(t)||(t=[t]);var o=t.length,i="".concat(n," ").concat(e,"[").concat(o,"];")+"\n";i+="void ".concat(r,"() {\n");for(var a=0;a<o;a++)i=i+"".concat(e,"[").concat(a,"] = ").concat(n,"(").concat(t[a],");")+"\n";return{code:i+="}",len:o}}},{key:"processInputs",value:function(){var t=this.attrs,e=t.min_sizes,n=t.max_sizes,r=t.aspect_ratios,o=t.step_w,i=t.step_h,a=t.step,s=t.min_max_aspect_ratios_order,u=t.variance,_=t.variances,l=t.offset,c=t.flip;r=this.expandAspectRatios(r,c);var h=this.genArrCodeOfIndex(e,"min_sizes"),f=h.code,p=h.len,T=this.genArrCodeOfIndex(n,"max_sizes"),E=T.code,d=T.len,g=this.genArrCodeOfIndex(r,"aspect_ratios"),I=g.code,m=g.len,x=this.genArrCodeOfIndex(u||_||[.1,.1,.2,.2],"variance").code;a&&2===a.length&&(o=a[0],i=a[1]);var v=this.input.Image[0].shape,R=v.length,H=this.input.Input[0].shape,O=H.length,A=[v[R-1],v[R-2]],N=A[0],y=A[1],P=[H[O-1],H[O-2]];o&&i||(o=N/P[0],i=y/P[1]),Object.assign(this.attrs,{min_len:p,min_sizes_code:f,max_len:d,max_sizes_code:E,aspect_len:m,aspect_ratios_code:I,min_max_aspect_order:s,step_w:"float(".concat(o||0,")"),step_h:"float(".concat(i||0,")"),offsets:"float(".concat(l||0,")"),variance_code:x,epsilon:1e-6}),delete this.attrs.min_max_aspect_ratios_order,delete this.attrs.min_sizes,delete this.attrs.max_sizes,delete this.attrs.aspect_ratios,delete this.attrs.variance,delete this.attrs.step,delete this.attrs.offset}},{key:"replaceToConnectOp",value:function(){for(var t=this.input,e=0,n=Object.keys(t);e<n.length;e++){var r=n[e],o=t[r]&&t[r][0],i=o.shape,a="numbers_shape_".concat(o.tensorName);this.attrs[a]=this.genArrCode(L.getNumbersShape(L.batchShape(i)),a,"int").code}this.name="connect"}},{key:"checkIsMerge",value:function(){return!!(this.name.indexOf("conv2d-elementwise_add")>-1&&Array.isArray(this.attrs))&&(this.name="conv2d_elementwise_add",!0)}},{key:"checkIsPass",value:function(){return"dropout"===this.name?"downgrade_in_infer"===this.attrs.dropout_implementation&&(this.name="scale",this.attrs.scale=this.attrs.dropout_prob,this.attrs.bias=0,!0):("depthwise_conv2d"===this.name&&(this.name="conv2d"),!0)}},{key:"dispose",value:function(){for(var t in this.input=null,this.output=null,this.attrs=null,this.tensor)this.tensor[t].dispose();this.tensor={}}}]),t}();function V(t,e){var n;if("undefined"==typeof Symbol||null==t[Symbol.iterator]){if(Array.isArray(t)||(n=function(t,e){if(!t)return;if("string"==typeof t)return j(t,e);var n=Object.prototype.toString.call(t).slice(8,-1);"Object"===n&&t.constructor&&(n=t.constructor.name);if("Map"===n||"Set"===n)return Array.from(t);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return j(t,e)}(t))||e&&t&&"number"==typeof t.length){n&&(t=n);var r=0,o=function(){};return{s:o,n:function(){return r>=t.length?{done:!0}:{done:!1,value:t[r++]}},e:function(t){throw t},f:o}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var i,a=!0,s=!1;return{s:function(){n=t[Symbol.iterator]()},n:function(){var t=n.next();return a=t.done,t},e:function(t){s=!0,i=t},f:function(){try{a||null==n.return||n.return()}finally{if(s)throw i}}}}function j(t,e){(null==e||e>t.length)&&(e=t.length);for(var n=0,r=new Array(e);n<e;n++)r[n]=t[n];return r}function z(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);e&&(r=r.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,r)}return n}function Y(t){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{};e%2?z(Object(n),!0).forEach((function(e){p()(t,e,n[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(n)):z(Object(n)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(n,e))}))}return t}function q(t,e,n){for(var r=[],o=0,i=t.length;o<i;o++){var a=t[o];a>e&&r.push({score:a,i:o})}return r.sort((function(t,e){return t.score-e.score})).slice(0,n)}function Z(t){var e=o()(t,4),n=e[0],r=e[1];return(e[2]-n)*(e[3]-r)}function K(t,e,n){var r=Z(t),i=Z(e),a=function(t,e){var n=o()(t,4),r=n[0],i=n[1],a=n[2],s=n[3],u=o()(e,4),_=u[0],l=u[1],c=u[2],h=u[3],f=Math.max(r,_),p=Math.max(i,l);return(Math.min(a,c)-f)*(Math.min(s,h)-p)}(t,e);return a/(r+i-a)>n}var Q={multiclass_nms:function(t,e){for(var n=t.BBoxes,r=t.Scores,o=n[0],i=r[0],a=e.nms_top_k,s=void 0===a?100:a,u=(e.nms_eta,e.nms_threshold),_=void 0===u?.44999998807907104:u,l=e.keep_top_k,c=void 0===l?100:l,h=e.background_label,f=void 0===h?0:h,p=e.score_threshold,T=void 0===p?.25:p,E=[],d=0,g=i.length;d<g;d++)if(d!==f)for(var I=q(i[d],T,s),m=I.pop();I.length;){var v=o[m.i];E.push(Y(Y({},m),{},{box:v,label:d}));var R,H=V(x()(I));try{for(H.s();!(R=H.n()).done;){var O=R.value.i;K(v,o[O],_)&&I.splice(O,1)}}catch(t){H.e(t)}finally{H.f()}m=I.pop()}return E.sort((function(t,e){return e.score-t.score})).slice(0,c).map((function(t){return[t.label,t.score].concat(x()(t.box))}))}};function J(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);e&&(r=r.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,r)}return n}function $(t){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{};e%2?J(Object(n),!0).forEach((function(e){p()(t,e,n[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(n)):J(Object(n)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(n,e))}))}return t}var tt=new N({}),et=tt.getOpConfs(),nt=!1,rt=function(){function t(e){l()(this,t),this.version="0.0.1",this.handler="io.IOHandler",this.weightMap="",this.options=e||{},this.feed=null,this.index=0,this.feedOp=null,this.fetchOp=null,this.postOps=[],this.multiOutputInfo=null,this.feedItem=null,this.test=!1,this.formatLayout="NCHW",this.isExecuted=!1,this.iLayer=0,this.queryList=[],this.glVersion=2,(nt=T.a.env().debug)&&(window.layerName=""),this.options&&this.options.options&&(!0===this.options.options.test&&(this.test=!0),this.options.options.formatLayout&&(this.formatLayout=this.options.options.formatLayout)),this.inst||(this.inst=new y(this.options.options),this.glVersion=this.inst.getWebglVersion(),tt.setWebglVersion(this.glVersion),tt.setIsFrameBufferSupportFloat(this.inst.getIsFrameBufferSupportFloat()),tt.setIsFloatTextureReadPixelsEnabled(this.inst.getIsFloatTextureReadPixelsEnabled()),L.setTextureMaxSize(this.inst.getWebglMaxTextureSize()))}return h()(t,[{key:"buildOpData",value:function(t,e){var n=this,r=this.constructExecutor(t),o=new W(t.type,r.inputs,r.outputs,r.attrs),i=o.name;o.program=[],o.program=o.outputTensors.map((function(t,r){var a=tt.buildShader(i,o.fShaderParams[r],r,e);return o.fsCode=a,n.inst.createProgram(a,t,e)})),o.renderData=et[i].map((function(e){var r=Object.assign({},e),i=o.inputTensors.find((function(t){return t.name===r.tensor}));return"texture"===r.type?(r.tensorId=i.opts.type,r.data=i.data,n.feedOp.id===t.id&&"origin"===r.tensor&&(r.shape=i.shape,n.feedItem=r),r.width_texture=i.width_texture,r.height_texture=i.height_texture,r.channel=i.channel):"uniform"===r.type&&(r.data=i[r.variable]),r})),o.iLayer=this.iLayer++,t.opData=o}},{key:"execute_",value:function(t){if("fetch"!==t.type){var e=this.inst.gpu.gl,n=L.beginQuery(e,this.glVersion);if(t.execute(this.inst,this.isExecuted),n&&(this.queryList.push({name:t.type,query:n,count:1}),n=L.endQuery(e,this.glVersion,n)),nt&&t&&t.opData&&t.opData.outputTensors&&t.opData.outputTensors[0]&&t.opData.outputTensors[0].tensorId&&t.opData.outputTensors[0].tensorId===(window&&window.layerName||""))console.log(t.type,window.layerName,"return!");else if(t.next){var r=t.next,o=this.getTensor(r);this.execute_(o[0])}}}},{key:"execute",value:function(t){this.feed=t;var e=this.getNetsStart(this.weightMap);return this.inst||(this.inst=y.init({width_raw_canvas:512,height_raw_canvas:512})),this.isExecuted&&this.updateFeed(),this.queryList=[],this.execute_(e[0]),this.isExecuted=!0,this.inst}},{key:"updateFeed",value:function(){this.feed.input&&this.feed.input[0]&&this.feed.input[0].data&&(this.feedItem.data=this.feed.input[0].data)}},{key:"predict",value:function(t,e){return this.execute_(t,!0,this.outputNodes)}},{key:"getShape",value:function(t){var e=this.getTensorAttr(t)[0].shape;return L.padToFourDimShape(e)}},{key:"getRealShape",value:function(t){return this.getTensorAttr(t)[0].shape}},{key:"getTensorAttr",value:function(t){return"fetch"===t?[{name:t}]:this.data.vars.filter((function(e,n){if(t===e.name)return e}))}},{key:"constructExecutor",value:function(t){var e=this,n=t.inputsName[0],r=t.inputs,i=t.outputs;return Object.keys(i).forEach((function(t){i[t].forEach((function(n,r){i[t][r]=e.getTensorAttr(n)[0]}))})),Object.keys(r).forEach((function(i){if(e.test)r[i]=e.getTensorAttr(r[i][0]),e.feedOp=t;else if("Input"===i&&"pixel"===n)r[i]=e.feed.input,e.feedOp=t;else if("Input"!==i||"image"!==n&&"x"!==n)if("X"===i&&r[i].length>1&&"connect"!==t.type){var a=o()(r[i],3),s=a[0],u=a[1],_=a[2];r.X=e.getTensorAttr(s),u&&(r.Y=e.getTensorAttr(u)),_&&(r.Z=e.getTensorAttr(_),t.type+="_multi")}else r[i]=e.getTensorAttr(r[i][0]);else r[i]=e.feed.input,e.feedOp=t})),{inputs:r,outputs:i,attrs:t.attrs,type:t.type,next:t.next}}},{key:"constructOpsMap",value:function(t){var e=this;return t.map((function(n,r){var o=n.outputsName[0],i=e.getNextExecutor(t,o);return i.length>0&&(n.next=i[0].id),n}))}},{key:"arrangeMap",value:function(t){var e={},n=[],r={},o=t;return o.forEach((function(t,n){t.outputsName.forEach((function(t,n){e[t]=!0}))})),o.forEach((function(t,o){n[o]=0,r[t.id]=o,t.inputsName.length>1?t.inputsName.forEach((function(t,r){1==e[t]&&n[o]++})):n[o]=t.inputsName.length})),this.topoSort(t,n,r),t}},{key:"topoSort",value:function(t,e,n){var r=[];r.push(t[0]);for(var o=t.slice(0),i=null,a=t[0];r.length>0;){null!=i&&(t[n[i.id]].next=a.id),i=a,a=r.pop();for(var s=0;s<a.outputsName.length;s++)for(var u=0;u<o.length;u++)for(var _=0;_<o[u].inputsName.length;_++)if(o[u].inputsName[_]==a.outputsName[s]&&(e[n[o[u].id]]--,0==e[n[o[u].id]])){r.push(t[n[o[u].id]]),o.splice(u,1),u--;break}}}},{key:"getNetsStart",value:function(t){return t.filter((function(t){if("feed"===t.type)return!0}))}},{key:"getNetsEnd",value:function(t){return t.filter((function(t){if("fetch"===t.type)return!0}))}},{key:"getTensor",value:function(t){return this.weightMap.filter((function(e,n){if(t===e.id)return e}))}},{key:"formatWeight",value:function(t){"NHWC"===this.formatLayout&&t.map((function(t){t.data&&t.shape&&(t.data=L.nhwc2nchw(t.data,t.shape))}))}},{key:"createOpsMap",value:function(t){var e=this,n=this.options.multiOutputConfig||{},r=n.deleteOpType,o=n.inputs,i=null,a=this.separateOps(t);if(a=t.map((function(t,n){t.idx=n,t.type===r&&(i=n);var o=new v(t);if(Q[t.type]){var a=e.configurePostOp(o);e.postOps.push(a)}return o})),i&&a.splice(i),o){var s=this.createMultiOutputOp(o,a.length);if(s)return a.concat(s)}return a}},{key:"separateOps",value:function(t){for(var e=["concat"],n=0,r=t.length;n<r;n++){var o=t[n];if(e.includes(o.type)){var i=Object.keys(o.inputs),a=Object.keys(o.outputs);if(1!==i.length||1!==a.length||!o.outputs[a[0]])continue;var s=o.inputs[i[0]],u=o.outputs[a[0]][0],_=s.length;if(_<3)continue;for(var l=[],c=s[0],h=x()(this.getRealShape(c)),f=1;f<_;f++){var T=s[f],E=u;if(f!==_-1){E=T+"_separate_tmp";var d=this.getRealShape(T),g=o.attrs.axis;g=g>-1?g:4+g,h.splice(g,1,h[g]+d[g]),this.data.vars.push({name:E,shape:x()(h)})}l.push($($({},o),{},{inputs:p()({},i,[c,T]),outputs:p()({},a,[E])})),c=E}t.splice.apply(t,[n,1].concat(l)),n=n+l.length-1,r=r+l.length-1}}return t}},{key:"createMultiOutputOp",value:function(t,e){if(!(t.length<2)){for(var n=x()(t),r=this.multiOutputInfo={},o=[],i=n.shift(),a=r[i]=this.getShape(i),s=0;n.length;){var u=n.shift(),_=n[0],l=_?_+"_connect_prefix":"fetchout",c={X:[i],Y:[u]};o.push(new v({attrs:{},inputs:$({},c),outputs:{Out:[l]},type:"connect",idx:s+e}));var h=this.getShape(u),f=L.getShapeLen([a,h]);r[u]=h,this.data.vars.push({name:l,shape:[1,1,1,f]}),i=l,a=[1,1,1,f],s++}return o}console.error("当前只有一个输出,多输出op最少2个输出")}},{key:"configurePostOp",value:function(t){var e=this;function n(t){for(var n={},r=0,o=Object.keys(t);r<o.length;r++){var i=o[r],a=t[i][0];n[a]={shape:e.getTensorAttr(a)[0].shape,key:i}}return n}var r=t.inputs,o=t.outputs;return{inputs:n(r),outputs:n(o),attrs:t.attrs,type:t.type}}},{key:"getNextExecutor",value:function(t,e){return t.filter((function(t,n){for(var r=0;r<t.inputsName.length;r++)if(e===t.inputsName[r])return!0}))}},{key:"dispose",value:function(){this.executor.dispose()}}]),t}(),ot=function(){function t(e){l()(this,t),this.version="0.0.1",this.loader="",this.options=e,this.graph="",this.multipart=!1,this.feed=null,this.fetchAll=null,this.index=0,this.feedOp=null,this.feedItem=null,this.test=!1,this.isExecuted=!1,this.iLayer=0,this.params={type:"fetch"},this.environment=T.a}var e;return h()(t,[{key:"load",value:(e=u()(a.a.mark((function t(){var e,n,r,o,i;return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:if(null!==this.options){t.next=2;break}throw new Error("modelGonfig in loadGraphModel() cannot be null. Please provide a url or an IOHandler that loads the model");case 2:return e=T.a.env(),n=e.getCache,r=e.setCache,o=n&&n("artifacts"),i=o||new I(this.options.urlConf,this.options.options),t.next=8,i.load();case 8:return!o&&r&&r(i,"artifact"),this.preGraph(i),t.abrupt("return",this);case 11:case"end":return t.stop()}}),t,this)}))),function(){return e.apply(this,arguments)})},{key:"preGraph",value:function(t){var e=new rt(this.options);window&&(window.graph=e),this.graph=e,this.graph.data=t.data,this.graph.formatWeight(this.graph.data.vars);var n=this.graph.createOpsMap(this.graph.data.ops),r=this.graph.constructOpsMap(n),o=this.graph.arrangeMap(r);this.graph.weightMap=o}},{key:"execute",value:function(t){var e=this;if(this.feed=this.graph.feed=t,!this.graph.isExecuted){var n=this.graph.weightMap.length;this.graph.weightMap.forEach((function(t,r){var o=t.type;if("feed"!==o&&"fetch"!==o){var i=r===n-2;e.graph.buildOpData(t,i)}}))}return this.graph.execute(t),this.graph.inst}},{key:"updateFeed",value:function(){this.graph.feedItem.data=this.graph.feed.input[0].data}},{key:"dispose",value:function(){this.graph.dispose()}}]),t}(),it=n(10),at=n.n(it),st=n(11),ut=n.n(st),_t=n(9),lt=n.n(_t),ct=function(){function t(e){l()(this,t);var n=e.inputShape,r=e.outputShape,o=e.attrs;this.options=e,this.inputShape=n,this.outputShape=r,this.attrs=o,this.isEntry=!1,this.isLast=!1,this.inputTexture=null,this.outputTexture=null,this.inputTextureLoc=null,this.outputTextureLoc=null,this.program=null,this.pos=[1,1],this.scale=[1,1]}return h()(t,[{key:"getFshaderSource",value:function(t){var e=t;if(this.attrs)for(var n=0,r=Object.keys(this.attrs);n<r.length;n++){var o=r[n],i=this.attrs[o];e=e.replace(o.toUpperCase(),i)}return e}},{key:"update",value:function(){}}]),t}();function ht(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,r=lt()(t);if(e){var o=lt()(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return ut()(this,n)}}var ft=function(t){at()(n,t);var e=ht(n);function n(t){var r;return l()(this,n),(r=e.call(this,t)).name="resize",r.needScale=!0,r.scaleLoc=null,r.fshaderSource=r.getFshaderSource("\n #ifdef GL_ES\n precision mediump float;\n #endif\n uniform sampler2D entry;\n varying vec2 v_TexCoord;\n void main() {\n gl_FragColor = texture2D(entry, v_TexCoord);\n }"),r}return h()(n,[{key:"update",value:function(t,e){this.resetPos(t,e)}},{key:"resetPos",value:function(t,e){var n=this.options,r=n.targetSize,o=n.scale;n.inputShape,n.outputShape;o&&r?this.resizeAndFitTargetSize(t,e):r?this.fitToTargetSize(t,e):this.resize(t,e)}},{key:"resizeAndFitTargetSize",value:function(t,e){var n=this.options,r=n.targetSize,i=n.scale,a=n.outputShape,s=o()(a,2),u=s[0],_=s[1],l=t,c=e;t<e?(l=i,c=Math.round(l*e/t)):(c=i,l=Math.round(c*t/e));r.width,r.height;this.scale=[l/u,c/_]}},{key:"fitToTargetSize",value:function(t,e){var n=this.options,r=n.targetSize,i=n.outputShape,a=r.width,s=r.height,u=o()(i,2),_=u[0],l=u[1],c=a,h=s;c/h*e/t>=1?(c=Math.round(h*t/e),Math.floor((a-c)/2)):(h=Math.round(c*e/t),Math.floor((s-h)/2)),this.scale=[c/_,h/l],this.options.center||(this.pos=this.scale)}},{key:"resize",value:function(t,e){var n=this.options,r=n.scale,i=n.outputShape,a=o()(i,2),s=a[0],u=a[1],_=t,l=e;t<e?(_=r||t,l=Math.round(_*e/t)):t>e?(l=r||e,_=Math.round(l*t/e)):_=l=params.scale||t,this.scale=[_/s,l/u]}}]),n}(ct);function pt(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,r=lt()(t);if(e){var o=lt()(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return ut()(this,n)}}var Tt=function(t){at()(n,t);var e=pt(n);function n(t){var r;return l()(this,n),(r=e.call(this,t)).name="format",r.normalizeAttrs(),r.fshaderSource=r.getFshaderSource("\n #ifdef GL_ES\n precision mediump float;\n #endif\n uniform sampler2D entry;\n varying vec2 v_TexCoord;\n const int width = WIDTH;\n const vec4 mean = MEAN;\n const vec4 std = STD;\n\n float getData(vec4 color, int id) {\n for (int i = 0; i < 3; i++) {\n if (i == id) {\n return (color[i] - mean[i]) / std[i];\n // return (color[i] - mean[i]);\n }\n }\n }\n\n void main() {\n float outPadding = 1.0 / float(width) / 2.0;\n float inWidth = float(width) / 3.0;\n float inPadding = 1.0 / inWidth / 2.0;\n float x = ceil((v_TexCoord.x + outPadding) * inWidth) / inWidth - inPadding;\n vec4 color = texture2D(entry, vec2(x, v_TexCoord.y));\n\n int channel = int(mod(v_TexCoord.x * float(width), 3.0));\n gl_FragColor.r = getData(color, channel);\n }"),r}return h()(n,[{key:"processMeanOrStd",value:function(t,e){var n=t;return t||(n="mean"===e?[0,0,0,0]:[1,1,1,1]),3===t.length&&n.push("mean"===e?0:1),"vec4(".concat(n.map((function(t){return"float(".concat(t,")")})).join(","),")")}},{key:"normalizeAttrs",value:function(){var t=this.attrs;t.mean=this.processMeanOrStd(t.mean,"mean"),t.std=this.processMeanOrStd(t.std,"std")}}]),n}(ct);n(18);function Et(t,e){var n;if("undefined"==typeof Symbol||null==t[Symbol.iterator]){if(Array.isArray(t)||(n=function(t,e){if(!t)return;if("string"==typeof t)return dt(t,e);var n=Object.prototype.toString.call(t).slice(8,-1);"Object"===n&&t.constructor&&(n=t.constructor.name);if("Map"===n||"Set"===n)return Array.from(t);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return dt(t,e)}(t))||e&&t&&"number"==typeof t.length){n&&(t=n);var r=0,o=function(){};return{s:o,n:function(){return r>=t.length?{done:!0}:{done:!1,value:t[r++]}},e:function(t){throw t},f:o}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var i,a=!0,s=!1;return{s:function(){n=t[Symbol.iterator]()},n:function(){var t=n.next();return a=t.done,t},e:function(t){s=!0,i=t},f:function(){try{a||null==n.return||n.return()}finally{if(s)throw i}}}}function dt(t,e){(null==e||e>t.length)&&(e=t.length);for(var n=0,r=new Array(e);n<e;n++)r[n]=t[n];return r}var gt={resize:ft,format:Tt},It=function(){function t(e,n,r,o,i){l()(this,t),this.options=Object.assign({},i);var a=this.options,s=a.mean,u=a.std,_=a.shape;this.fillColor=i.fill?this.getFillColor(i.fill):[0,0,0,1],this.pipe=o,this.pipe.push({name:"format",options:{outputShape:_,attrs:{mean:s,std:u,width:3*_[0]}}}),this.gl=e,2===(this.glVersion=n)?(this.textureFloat=e.getExtension("EXT_color_buffer_float"),this.internalFormat=e.R16F,this.textureFormat=e.RED,this.downloadInternalFormat=e.RGBA16F):(this.internalFormat=e.RGBA,this.textureFormat=e.RGBA,this.downloadInternalFormat=e.RGBA,this.textureHalfFloat=e.getExtension("OES_texture_half_float")),this.frameBufferSupportFloat=r,this.initPipeLine()}var e;return h()(t,[{key:"initPipeLine",value:function(){var t=this,e=this.gl;this.framebuffer=e.createFramebuffer(),e.bindFramebuffer(e.FRAMEBUFFER,this.framebuffer);var n=this.pipe;if(n&&n.length){var r,o,i=n.length;this.pipeLine=n.map((function(e,n){var a,s=e.name,u=e.options;if(r=new gt[s](u),0===n?(r.isEntry=!0,o=r):n===i-1&&(r.isLast=!0),r.inputShape=o.outputShape,t.initProgram(r),a=t.initTexture(r,r.outputShape),r.outputTexture=a.texture,r.outputTextureLoc=a.loc,r.isLast&&(t.outputTexture=r.outputTexture),r.isEntry)a=t.initTexture(r,r.inputShape||[]),r.inputTexture=a.texture,r.inputTextureLoc=a.loc;else{var _=o,l=_.inputTexture,c=_.inputTextureLoc;r.inputTexture=l,r.inputTextureLoc=c}return o=r,r}))}}},{key:"initShaders",value:function(t,e){var n=this.gl,r=this.createProgram(t,e);return r?(n.useProgram(r),n.program=r,r):(console.log("Failed to create program"),!1)}},{key:"createProgram",value:function(t,e){var n=this.gl,r=this.loadShader(n,n.VERTEX_SHADER,t),o=this.loadShader(n,n.FRAGMENT_SHADER,e);if(!r||!o)return null;var i=n.createProgram();return i?(n.attachShader(i,r),n.attachShader(i,o),n.linkProgram(i),i):null}},{key:"initProgram",value:function(t){var e=this.gl,n=this.initShaders("\n attribute vec4 a_Position;\n varying vec2 v_TexCoord;\n uniform vec2 u_scale;\n uniform vec2 u_pos;\n void main() {\n gl_Position = a_Position * vec4(vec2(u_scale), 1, 1) - (1.0 - vec4(vec2(u_pos), 1, 1));\n v_TexCoord.x = (a_Position.x + 1.0) / 2.0;\n v_TexCoord.y = (a_Position.y + 1.0) / 2.0;\n }",t.fshaderSource);if(n){t.program=n,this.initVertexBuffers(e);var r=t.scaleLocation=e.getUniformLocation(n,"u_scale");e.uniform2fv(r,[1,1]);var o=t.posLocation=e.getUniformLocation(n,"u_pos");e.uniform2fv(o,[0,0])}else console.log("Failed to intialize shaders.")}},{key:"initVertexBuffers",value:function(t){var e=new Float32Array([-1,1,0,1,-1,-1,0,0,1,1,1,1,1,-1,1,0]),n=t.createBuffer();if(!n)return console.log("Failed to create the buffer object"),-1;t.bindBuffer(t.ARRAY_BUFFER,n),t.bufferData(t.ARRAY_BUFFER,e,t.STATIC_DRAW);var r=e.BYTES_PER_ELEMENT,o=t.getAttribLocation(t.program,"a_Position");if(o<0)return console.log("Failed to get the storage location of a_Position"),-1;t.vertexAttribPointer(o,2,t.FLOAT,!1,4*r,0),t.enableVertexAttribArray(o)}},{key:"initTexture",value:function(t,e){var n=this.gl,r=o()(e,2),i=r[0],a=void 0===i?1:i,s=r[1],u=void 0===s?1:s,_=n.createTexture();if(!_)return console.log("Failed to create the texture object"),!1;n.bindTexture(n.TEXTURE_2D,_),"format"===t.name&&t.isLast?n.texImage2D(n.TEXTURE_2D,0,this.downloadInternalFormat,3*a,u,0,n.RGBA,this.frameBufferSupportFloat?n.FLOAT:this.textureHalfFloat.HALF_FLOAT_OES,null):n.texImage2D(n.TEXTURE_2D,0,n.RGBA,a,u,0,n.RGBA,n.UNSIGNED_BYTE,null),n.texParameteri(n.TEXTURE_2D,n.TEXTURE_MAG_FILTER,n.NEAREST),n.texParameteri(n.TEXTURE_2D,n.TEXTURE_MIN_FILTER,n.NEAREST),n.texParameteri(n.TEXTURE_2D,n.TEXTURE_WRAP_S,n.CLAMP_TO_EDGE),n.texParameteri(n.TEXTURE_2D,n.TEXTURE_WRAP_T,n.CLAMP_TO_EDGE);var l=n.getUniformLocation(t.program,"entry");return n.bindTexture(n.TEXTURE_2D,null),{texture:_,loc:l}}},{key:"render",value:(e=u()(a.a.mark((function t(e){var n,r,i,s,u,_,l,c,h,f,p,T,E,d,g,I,m,v,R,H,O;return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:n=e.input,r=e.width,i=e.height,s=this.gl,u=n instanceof Uint8Array||n instanceof Uint8ClampedArray||n instanceof Float32Array,_=this.pipeLine[0],l=_,c=Et(this.pipeLine);try{for(c.s();!(h=c.n()).done;)f=h.value,l=f,p=f.program,T=f.inputTexture,E=f.inputTextureLoc,d=f.outputTexture,s.useProgram(p),this.program=p,l.update(r,i),s.bindTexture(s.TEXTURE_2D,d),g=o()(l.outputShape,2),I=g[0],m=g[1],this.attachFrameBuffer(d,[l.isLast?3*I:I,m],l.pos),f.isEntry?(l.inputShape=[r,i],s.bindTexture(s.TEXTURE_2D,T),s.activeTexture(s.TEXTURE0),s.uniform1i(E,0),1!==this.glVersion||u?s.texImage2D(s.TEXTURE_2D,0,s.RGBA,r,i,0,s.RGBA,s.UNSIGNED_BYTE,n):s.texImage2D(s.TEXTURE_2D,0,s.RGBA,s.RGBA,s.UNSIGNED_BYTE,n)):(v=_.outputTexture,s.bindTexture(s.TEXTURE_2D,v),s.activeTexture(s.TEXTURE0),R=s.getUniformLocation(p,"entry"),s.uniform1i(R,0)),H=s.getUniformLocation(p,"u_scale"),s.uniform2fv(H,l.scale),O=s.getUniformLocation(p,"u_pos"),s.uniform2fv(O,l.pos),s.clearColor.apply(s,x()(this.fillColor)),s.clear(s.COLOR_BUFFER_BIT),s.drawArrays(s.TRIANGLE_STRIP,0,4)}catch(t){c.e(t)}finally{c.f()}case 7:case"end":return t.stop()}}),t,this)}))),function(t){return e.apply(this,arguments)})},{key:"read",value:function(){var t=this.gl;t.finish();var e=new Float32Array(1228800);t.readPixels(0,0,960,320,t.RGBA,t.FLOAT,e);var n=e.filter((function(t,e){return e%4==0}));console.log(new Array(n),"pixels===========")}},{key:"attachFrameBuffer",value:function(t,e,n){var r=o()(e,2),i=r[0],a=r[1],s=this.gl;return s.framebufferTexture2D(s.FRAMEBUFFER,s.COLOR_ATTACHMENT0,s.TEXTURE_2D,t,0),s.viewport(0,0,i,a),s.scissor(0,0,i,a),this.frameBuffer}},{key:"loadShader",value:function(t,e,n){var r=t.createShader(e);if(null==r)return console.log("unable to create shader"),null;if(t.shaderSource(r,n),t.compileShader(r),!t.getShaderParameter(r,t.COMPILE_STATUS)){var o=t.getShaderInfoLog(r);return console.log("Failed to compile shader: "+o),t.deleteShader(r),null}return r}},{key:"getFillColor",value:function(t){var e=0;0===t.indexOf("rgb")&&(e=0===t.indexOf("rgba")?1:2);var n=t.toLowerCase(),r=[];if(0===e){if(4===n.length){for(var o="#",i=1;i<4;i++)o+=n.slice(i,i+1).concat(n.slice(i,i+1));n=o}for(var a=1;a<7;a+=2)r.push(parseInt("0x"+n.slice(a,a+2)))}else r=n.slice(1===e?4:5,n.length-1).split(",").map((function(t){return parseInt(t)}));return 3===r.length&&r.push(1),r}}]),t}(),mt=function(){function t(e){l()(this,t);var n=e.inputs,r=e.type,o=e.attrs;this.options=e,this.type=r,this.attrs=o,this.handleInputs(n),this.computeFn=Q[r]}return h()(t,[{key:"handleInputs",value:function(t){for(var e=0,n=Object.keys(t);e<n.length;e++){t[n[e]].shape.reverse()}}},{key:"handleInputsData",value:function(t){for(var e=this.options.inputs,n=Object.keys(e),r=this.inputs={},o=0,i=n;o<i.length;o++){var a=i[o],s=e[a],u=s.shape,_=s.key,l=L.convertDataToMultiDim(t[a],u);r[_]=l}}},{key:"run",value:function(t){return this.handleInputsData(t),this.computeFn(this.inputs,this.attrs)}}]),t}(),xt=n(25),vt=n.n(xt),Rt=n(26),Ht=n.n(Rt);function Ot(t,e){var n;if("undefined"==typeof Symbol||null==t[Symbol.iterator]){if(Array.isArray(t)||(n=function(t,e){if(!t)return;if("string"==typeof t)return At(t,e);var n=Object.prototype.toString.call(t).slice(8,-1);"Object"===n&&t.constructor&&(n=t.constructor.name);if("Map"===n||"Set"===n)return Array.from(t);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return At(t,e)}(t))||e&&t&&"number"==typeof t.length){n&&(t=n);var r=0,o=function(){};return{s:o,n:function(){return r>=t.length?{done:!0}:{done:!1,value:t[r++]}},e:function(t){throw t},f:o}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var i,a=!0,s=!1;return{s:function(){n=t[Symbol.iterator]()},n:function(){var t=n.next();return a=t.done,t},e:function(t){s=!0,i=t},f:function(){try{a||null==n.return||n.return()}finally{if(s)throw i}}}}function At(t,e){(null==e||e>t.length)&&(e=t.length);for(var n=0,r=new Array(e);n<e;n++)r[n]=t[n];return r}var Nt=function(){function t(e){l()(this,t);var n={inputType:"image",needPreheat:!0,multiOutputConfig:null,usePipeLine:!!T.a.env().usePipeLine};this.modelConfig=Object.assign(n,e),this.flags={isRunning:!1,isPreheating:!1,runVideoPaused:!1},this.buffer=new Float32Array,this.io=new g,this.model=null,this.preheatFeed=null,this.utils=L,this.pipeLine=null,this.gpu=null,this.modelConfig.needPostProcess&&(this.postProcess=new PostProcess(e)),this.postOp=null,this.isMobile=navigator.userAgent.indexOf("iPhone")>=0||navigator.userAgent.indexOf("Android")>=0}var e,n,r,i,s,_,c,f;return h()(t,[{key:"loadModel",value:(f=u()(a.a.mark((function t(){var e,n,r,o,i,s,u,_,l,c,h,f,p,E,d,g,I,m,x,v,R,H;return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:return e=this.modelConfig,n=e.fileCount,r=e.needPreheat,o=e.mean,i=void 0===o?[0,0,0]:o,s=e.std,u=void 0===s?[1,1,1]:s,_=e.feedShape,l=e.targetSize,c=e.scale,h=e.fill,f=e.usePipeLine,p=e.inputType,E=e.multiOutputConfig,d=_.fw,g=_.fh,"/"!==(I=this.modelConfig.modelPath).charAt(I.length-1)&&(I+="/"),m={dir:0===I.indexOf("http")?I:"/".concat(I),main:"model.json"},x=new ot({urlConf:m,options:{multipart:!0,dataType:"binary",options:{fileCount:n,getFileName:function(t){return"chunk_".concat(t,".dat")}},feedShape:_,mean:i,std:u,scale:c,targetSize:l,inputType:p,usePipeLine:f},multiOutputConfig:E}),t.next=8,x.load();case 8:if(this.model=t.sent,this.gpu=this.model.graph.inst.gpu,this.gl=this.gpu.gl,this.glVersion=this.gpu.version,v=this.gpu.frameBufferSupportFloat,f&&(this.pipeLine=new It(this.gl,this.glVersion,v,[{name:"resize",options:{outputShape:[d,g],targetSize:l,scale:c}}],{mean:i,std:u,fill:h,shape:[d,g]})),r){t.next=16;break}return t.abrupt("return");case 16:return T.a.env().debug&&!this.isMobile&&(R=new CustomEvent("loaded",{detail:[]}),window.dispatchEvent(R)),t.next=19,this.preheat();case 19:return H=t.sent,t.abrupt("return",H);case 21:case"end":return t.stop()}}),t,this)}))),function(){return f.apply(this,arguments)})},{key:"checkModelLoaded",value:(c=u()(a.a.mark((function t(){return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:if(this.model){t.next=4;break}return console.info("It's better to preheat the model before running."),t.next=4,this.loadModel();case 4:case"end":return t.stop()}}),t,this)}))),function(){return c.apply(this,arguments)})},{key:"preheat",value:(_=u()(a.a.mark((function t(){var e,n,r,o,i,s,u,_,l,c=this;return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:return t.next=2,this.checkModelLoaded();case 2:return this.model.graph.iLayer=0,this.flags.isPreheating=!0,e=this.modelConfig,n=e.feedShape,r=e.usePipeLine,o=n.fh,i=n.fw,s=this.preheatFeed=[{data:new Float32Array(3*o*i).fill(5),name:r?"feed":"image",shape:[1,3,o,i]}],u=this.model.execute({input:s}),this.model.graph.postOps&&this.model.graph.postOps.length&&(this.postOp=new mt(this.model.graph.postOps[0])),t.next=11,this.runAfter(u);case 11:return _=t.sent,T.a.env().debug&&(this.isMobile?(console.log(window.layerName,"window.layerName"),console.log(this.modelConfig.fetchShape,"shape"),vt()({method:"post",url:"http://172.24.205.49:8912/save",data:Ht.a.stringify({fileName:"mobile",result:JSON.stringify(_),ua:navigator.userAgent,layerName:window.layerName,shape:JSON.stringify(Array.from(this.modelConfig.fetchShape)||[])}),headers:{"content-type":"application/x-www-form-urlencoded;charset=utf-8"}}).then((function(t){if(console.log(t),t&&t.data){var e=t.data,n=e.status,r=e.layerName,o=e.shape;0===n&&(console.log(r,"nextLayerName"),console.log(o,"shape"),window.layerName=r,c.modelConfig.fetchShape=o,console.log(c.model.graph),c.preheat())}else console.log("返回数据格式不正确")}))):(l=new CustomEvent("preheat",{detail:{data:_}}),window.dispatchEvent(l))),this.flags.isPreheating=!1,t.abrupt("return",_);case 15:case"end":return t.stop()}}),t,this)}))),function(){return _.apply(this,arguments)})},{key:"run",value:(s=u()(a.a.mark((function t(e,n){var r,o,i,s,u,_,l,c,h,f,p,T,E,d,g,I,m,x,v,R,H,O;return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:this.flags.isRunning=!0,r=this.modelConfig,o=r.feedShape,i=r.fill,s=r.targetSize,u=r.scale,_=r.mean,l=void 0===_?[0,0,0]:_,c=r.std,h=void 0===c?[1,1,1]:c,f=r.bgr,p=r.center,T=r.usePipeLine,E=r.normalizeType,d=o.fh,g=o.fw,I=r.inputType,x=e.data,v=e.width,R=e.height,T&&("[object HTMLImageElement]"===(H=Object.prototype.toString.call(e))||"[object HTMLVideoElement]"===H?I="htmlImageElement":"[object ImageData]"===H?I="ImageData":e.data&&(e.data instanceof Uint8Array||e.data instanceof Uint8ClampedArray||e.data instanceof Float32Array||e.data instanceof ArrayBuffer)&&(I="arraybuffer")),t.t0=I,t.next="video"===t.t0?10:"image"===t.t0?12:"htmlImageElement"===t.t0?15:"imageData"===t.t0?19:"arraybuffer"===t.t0?24:29;break;case 10:return m=[{data:e,shape:[1,3,d,g],name:"image"}],t.abrupt("break",30);case 12:return O={gapFillWith:i||"#000",mean:l||[0,0,0],std:h||[1,1,1],targetShape:[1,3,d,g],bgr:f,center:p,normalizeType:E,targetSize:s,scale:u},m=this.io.process({input:e,params:O}),t.abrupt("break",30);case 15:return R=e.naturalHeight||e.videoHeight,v=e.naturalWidth||e.videoWidth,m=[{data:{width:v,height:R,input:e},width:v,height:R,outShape:[1,3,d,g],shape:[1,4,R,v],name:"feed"}],t.abrupt("break",30);case 19:if(v&&R&&e){t.next=22;break}return console.log("imageData类型的数据,需要传ImageData类型数据"),t.abrupt("break",30);case 22:return m=[{data:{width:v,height:R,input:e},width:v,height:R,outShape:[1,3,d,g],shape:[1,4,R,v],name:"feed"}],t.abrupt("break",30);case 24:if(v&&R&&e){t.next=27;break}return console.log("arraybuffer类型的数据,需要传data,跟img对应的宽高"),t.abrupt("break",30);case 27:return m=[{data:{width:v,height:R,input:new Uint8Array(x)},width:v,height:R,outShape:[1,3,d,g],shape:[1,4,R,v],name:"feed"}],t.abrupt("break",30);case 29:return t.abrupt("break",30);case 30:return T&&(this.pipeLine.render(m[0].data),this.model.graph.inst.gpu.texturesMap.image=this.pipeLine.outputTexture,m=null),t.next=33,this.runWithFeed(m,n);case 33:case"end":return t.stop()}}),t,this)}))),function(t,e){return s.apply(this,arguments)})},{key:"processData",value:function(t,e){var n=o()(e,4),r=n[0],i=n[1],a=[r,n[2],n[3],i],s=L.nhwc2nchw(t,a);return L.stridePrint(s),L.continuousPrint(s),s}},{key:"runAfter",value:(i=u()(a.a.mark((function t(e){var n,r,o,i,s,u,_,l,c;return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:return t.next=2,e.read();case 2:if(n=t.sent,this.modelConfig.multiOutputConfig){t.next=5;break}return t.abrupt("return",this.processData(n,this.modelConfig.fetchShape));case 5:r=this.modelConfig.multiOutputConfig.inputs,o={},i=Ot(r);try{for(i.s();!(s=i.n()).done;)u=s.value,_=this.model.graph.multiOutputInfo[u],l=L.getShapeLen([_]),c=n.splice(0,l),o[u]=c}catch(t){i.e(t)}finally{i.f()}if(!this.postOp){t.next=11;break}return t.abrupt("return",this.postOp.run(o));case 11:return t.abrupt("return",o);case 12:case"end":return t.stop()}}),t,this)}))),function(t){return i.apply(this,arguments)})},{key:"runWithFeed",value:(r=u()(a.a.mark((function t(e,n){var r,o;return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:return t.next=2,this.checkModelLoaded();case 2:return r=this.model.execute({input:e}),t.next=5,this.runAfter(r);case 5:return o=t.sent,t.next=8,n;case 8:if(t.t0=t.sent,!t.t0){t.next=11;break}n(o);case 11:this.flags.isRunning=!1;case 12:case"end":return t.stop()}}),t,this)}))),function(t,e){return r.apply(this,arguments)})},{key:"runStream",value:(n=u()(a.a.mark((function t(e,n){var r,o=this;return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:return t.next=2,this.run(e,n);case 2:return r=t.sent,"video"!==this.modelConfig.inputType||this.flags.runVideoPaused||setTimeout(u()(a.a.mark((function t(){return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:return t.next=2,o.runStream(e,n);case 2:case"end":return t.stop()}}),t)}))),0),t.abrupt("return",r);case 5:case"end":return t.stop()}}),t,this)}))),function(t,e){return n.apply(this,arguments)})},{key:"stopStream",value:function(){this.flags.runVideoPaused=!0}},{key:"predict",value:(e=u()(a.a.mark((function t(e,n){return a.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:if(this.flags.runVideoPaused=!1,"function"!=typeof e){t.next=6;break}return t.next=4,this.runStream(e(),n);case 4:t.next=8;break;case 6:return t.next=8,this.runStream(e,n);case 8:case"end":return t.stop()}}),t,this)}))),function(t,n){return e.apply(this,arguments)})}]),t}();window&&(window.Paddlejs=Nt)},function(t,e,n){var r=n(17);t.exports=function(t,e){if(t){if("string"==typeof t)return r(t,e);var n=Object.prototype.toString.call(t).slice(8,-1);return"Object"===n&&t.constructor&&(n=t.constructor.name),"Map"===n||"Set"===n?Array.from(t):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?r(t,e):void 0}}},function(t,e){t.exports=function(t,e){(null==e||e>t.length)&&(e=t.length);for(var n=0,r=new Array(e);n<e;n++)r[n]=t[n];return r}},function(t,e){t.exports=function(t,e,n,r){var o=new Blob(void 0!==r?[r,t]:[t],{type:n||"application/octet-stream"});if(void 0!==window.navigator.msSaveBlob)window.navigator.msSaveBlob(o,e);else{var i=window.URL&&window.URL.createObjectURL?window.URL.createObjectURL(o):window.webkitURL.createObjectURL(o),a=document.createElement("a");a.style.display="none",a.href=i,a.setAttribute("download",e),void 0===a.download&&a.setAttribute("target","_blank"),document.body.appendChild(a),a.click(),setTimeout((function(){document.body.removeChild(a),window.URL.revokeObjectURL(i)}),200)}}},function(t,e,n){"use strict";t.exports=function(t,e){return function(){for(var n=new Array(arguments.length),r=0;r<n.length;r++)n[r]=arguments[r];return t.apply(e,n)}}},function(t,e,n){"use strict";var r=n(7),o=n(42),i=n(44),a=n(45),s=n(46),u=n(21),_="undefined"!=typeof window&&window.btoa&&window.btoa.bind(window)||n(47);t.exports=function(t){return new Promise((function(e,l){var c=t.data,h=t.headers;r.isFormData(c)&&delete h["Content-Type"];var f=new XMLHttpRequest,p="onreadystatechange",T=!1;if("undefined"==typeof window||!window.XDomainRequest||"withCredentials"in f||s(t.url)||(f=new window.XDomainRequest,p="onload",T=!0,f.onprogress=function(){},f.ontimeout=function(){}),t.auth){var E=t.auth.username||"",d=t.auth.password||"";h.Authorization="Basic "+_(E+":"+d)}if(f.open(t.method.toUpperCase(),i(t.url,t.params,t.paramsSerializer),!0),f.timeout=t.timeout,f[p]=function(){if(f&&(4===f.readyState||T)&&(0!==f.status||f.responseURL&&0===f.responseURL.indexOf("file:"))){var n="getAllResponseHeaders"in f?a(f.getAllResponseHeaders()):null,r={data:t.responseType&&"text"!==t.responseType?f.response:f.responseText,status:1223===f.status?204:f.status,statusText:1223===f.status?"No Content":f.statusText,headers:n,config:t,request:f};o(e,l,r),f=null}},f.onerror=function(){l(u("Network Error",t,null,f)),f=null},f.ontimeout=function(){l(u("timeout of "+t.timeout+"ms exceeded",t,"ECONNABORTED",f)),f=null},r.isStandardBrowserEnv()){var g=n(48),I=(t.withCredentials||s(t.url))&&t.xsrfCookieName?g.read(t.xsrfCookieName):void 0;I&&(h[t.xsrfHeaderName]=I)}if("setRequestHeader"in f&&r.forEach(h,(function(t,e){void 0===c&&"content-type"===e.toLowerCase()?delete h[e]:f.setRequestHeader(e,t)})),t.withCredentials&&(f.withCredentials=!0),t.responseType)try{f.responseType=t.responseType}catch(e){if("json"!==t.responseType)throw e}"function"==typeof t.onDownloadProgress&&f.addEventListener("progress",t.onDownloadProgress),"function"==typeof t.onUploadProgress&&f.upload&&f.upload.addEventListener("progress",t.onUploadProgress),t.cancelToken&&t.cancelToken.promise.then((function(t){f&&(f.abort(),l(t),f=null)})),void 0===c&&(c=null),f.send(c)}))}},function(t,e,n){"use strict";var r=n(43);t.exports=function(t,e,n,o,i){var a=new Error(t);return r(a,e,n,o,i)}},function(t,e,n){"use strict";t.exports=function(t){return!(!t||!t.__CANCEL__)}},function(t,e,n){"use strict";function r(t){this.message=t}r.prototype.toString=function(){return"Cancel"+(this.message?": "+this.message:"")},r.prototype.__CANCEL__=!0,t.exports=r},function(t,e,n){"use strict";var r=String.prototype.replace,o=/%20/g,i=n(13),a={RFC1738:"RFC1738",RFC3986:"RFC3986"};t.exports=i.assign({default:a.RFC3986,formatters:{RFC1738:function(t){return r.call(t,o,"+")},RFC3986:function(t){return String(t)}}},a)},function(t,e,n){t.exports=n(37)},function(t,e,n){"use strict";var r=n(56),o=n(57),i=n(24);t.exports={formats:i,parse:o,stringify:r}},function(t,e){t.exports=function(t){if(Array.isArray(t))return t}},function(t,e){t.exports=function(t,e){if("undefined"!=typeof Symbol&&Symbol.iterator in Object(t)){var n=[],r=!0,o=!1,i=void 0;try{for(var a,s=t[Symbol.iterator]();!(r=(a=s.next()).done)&&(n.push(a.value),!e||n.length!==e);r=!0);}catch(t){o=!0,i=t}finally{try{r||null==s.return||s.return()}finally{if(o)throw i}}return n}}},function(t,e){t.exports=function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}},function(t,e,n){var r=function(t){"use strict";var e=Object.prototype,n=e.hasOwnProperty,r="function"==typeof Symbol?Symbol:{},o=r.iterator||"@@iterator",i=r.asyncIterator||"@@asyncIterator",a=r.toStringTag||"@@toStringTag";function s(t,e,n,r){var o=e&&e.prototype instanceof l?e:l,i=Object.create(o.prototype),a=new v(r||[]);return i._invoke=function(t,e,n){var r="suspendedStart";return function(o,i){if("executing"===r)throw new Error("Generator is already running");if("completed"===r){if("throw"===o)throw i;return H()}for(n.method=o,n.arg=i;;){var a=n.delegate;if(a){var s=I(a,n);if(s){if(s===_)continue;return s}}if("next"===n.method)n.sent=n._sent=n.arg;else if("throw"===n.method){if("suspendedStart"===r)throw r="completed",n.arg;n.dispatchException(n.arg)}else"return"===n.method&&n.abrupt("return",n.arg);r="executing";var l=u(t,e,n);if("normal"===l.type){if(r=n.done?"completed":"suspendedYield",l.arg===_)continue;return{value:l.arg,done:n.done}}"throw"===l.type&&(r="completed",n.method="throw",n.arg=l.arg)}}}(t,n,a),i}function u(t,e,n){try{return{type:"normal",arg:t.call(e,n)}}catch(t){return{type:"throw",arg:t}}}t.wrap=s;var _={};function l(){}function c(){}function h(){}var f={};f[o]=function(){return this};var p=Object.getPrototypeOf,T=p&&p(p(R([])));T&&T!==e&&n.call(T,o)&&(f=T);var E=h.prototype=l.prototype=Object.create(f);function d(t){["next","throw","return"].forEach((function(e){t[e]=function(t){return this._invoke(e,t)}}))}function g(t,e){var r;this._invoke=function(o,i){function a(){return new e((function(r,a){!function r(o,i,a,s){var _=u(t[o],t,i);if("throw"!==_.type){var l=_.arg,c=l.value;return c&&"object"==typeof c&&n.call(c,"__await")?e.resolve(c.__await).then((function(t){r("next",t,a,s)}),(function(t){r("throw",t,a,s)})):e.resolve(c).then((function(t){l.value=t,a(l)}),(function(t){return r("throw",t,a,s)}))}s(_.arg)}(o,i,r,a)}))}return r=r?r.then(a,a):a()}}function I(t,e){var n=t.iterator[e.method];if(void 0===n){if(e.delegate=null,"throw"===e.method){if(t.iterator.return&&(e.method="return",e.arg=void 0,I(t,e),"throw"===e.method))return _;e.method="throw",e.arg=new TypeError("The iterator does not provide a 'throw' method")}return _}var r=u(n,t.iterator,e.arg);if("throw"===r.type)return e.method="throw",e.arg=r.arg,e.delegate=null,_;var o=r.arg;return o?o.done?(e[t.resultName]=o.value,e.next=t.nextLoc,"return"!==e.method&&(e.method="next",e.arg=void 0),e.delegate=null,_):o:(e.method="throw",e.arg=new TypeError("iterator result is not an object"),e.delegate=null,_)}function m(t){var e={tryLoc:t[0]};1 in t&&(e.catchLoc=t[1]),2 in t&&(e.finallyLoc=t[2],e.afterLoc=t[3]),this.tryEntries.push(e)}function x(t){var e=t.completion||{};e.type="normal",delete e.arg,t.completion=e}function v(t){this.tryEntries=[{tryLoc:"root"}],t.forEach(m,this),this.reset(!0)}function R(t){if(t){var e=t[o];if(e)return e.call(t);if("function"==typeof t.next)return t;if(!isNaN(t.length)){var r=-1,i=function e(){for(;++r<t.length;)if(n.call(t,r))return e.value=t[r],e.done=!1,e;return e.value=void 0,e.done=!0,e};return i.next=i}}return{next:H}}function H(){return{value:void 0,done:!0}}return c.prototype=E.constructor=h,h.constructor=c,h[a]=c.displayName="GeneratorFunction",t.isGeneratorFunction=function(t){var e="function"==typeof t&&t.constructor;return!!e&&(e===c||"GeneratorFunction"===(e.displayName||e.name))},t.mark=function(t){return Object.setPrototypeOf?Object.setPrototypeOf(t,h):(t.__proto__=h,a in t||(t[a]="GeneratorFunction")),t.prototype=Object.create(E),t},t.awrap=function(t){return{__await:t}},d(g.prototype),g.prototype[i]=function(){return this},t.AsyncIterator=g,t.async=function(e,n,r,o,i){void 0===i&&(i=Promise);var a=new g(s(e,n,r,o),i);return t.isGeneratorFunction(n)?a:a.next().then((function(t){return t.done?t.value:a.next()}))},d(E),E[a]="Generator",E[o]=function(){return this},E.toString=function(){return"[object Generator]"},t.keys=function(t){var e=[];for(var n in t)e.push(n);return e.reverse(),function n(){for(;e.length;){var r=e.pop();if(r in t)return n.value=r,n.done=!1,n}return n.done=!0,n}},t.values=R,v.prototype={constructor:v,reset:function(t){if(this.prev=0,this.next=0,this.sent=this._sent=void 0,this.done=!1,this.delegate=null,this.method="next",this.arg=void 0,this.tryEntries.forEach(x),!t)for(var e in this)"t"===e.charAt(0)&&n.call(this,e)&&!isNaN(+e.slice(1))&&(this[e]=void 0)},stop:function(){this.done=!0;var t=this.tryEntries[0].completion;if("throw"===t.type)throw t.arg;return this.rval},dispatchException:function(t){if(this.done)throw t;var e=this;function r(n,r){return a.type="throw",a.arg=t,e.next=n,r&&(e.method="next",e.arg=void 0),!!r}for(var o=this.tryEntries.length-1;o>=0;--o){var i=this.tryEntries[o],a=i.completion;if("root"===i.tryLoc)return r("end");if(i.tryLoc<=this.prev){var s=n.call(i,"catchLoc"),u=n.call(i,"finallyLoc");if(s&&u){if(this.prev<i.catchLoc)return r(i.catchLoc,!0);if(this.prev<i.finallyLoc)return r(i.finallyLoc)}else if(s){if(this.prev<i.catchLoc)return r(i.catchLoc,!0)}else{if(!u)throw new Error("try statement without catch or finally");if(this.prev<i.finallyLoc)return r(i.finallyLoc)}}}},abrupt:function(t,e){for(var r=this.tryEntries.length-1;r>=0;--r){var o=this.tryEntries[r];if(o.tryLoc<=this.prev&&n.call(o,"finallyLoc")&&this.prev<o.finallyLoc){var i=o;break}}i&&("break"===t||"continue"===t)&&i.tryLoc<=e&&e<=i.finallyLoc&&(i=null);var a=i?i.completion:{};return a.type=t,a.arg=e,i?(this.method="next",this.next=i.finallyLoc,_):this.complete(a)},complete:function(t,e){if("throw"===t.type)throw t.arg;return"break"===t.type||"continue"===t.type?this.next=t.arg:"return"===t.type?(this.rval=this.arg=t.arg,this.method="return",this.next="end"):"normal"===t.type&&e&&(this.next=e),_},finish:function(t){for(var e=this.tryEntries.length-1;e>=0;--e){var n=this.tryEntries[e];if(n.finallyLoc===t)return this.complete(n.completion,n.afterLoc),x(n),_}},catch:function(t){for(var e=this.tryEntries.length-1;e>=0;--e){var n=this.tryEntries[e];if(n.tryLoc===t){var r=n.completion;if("throw"===r.type){var o=r.arg;x(n)}return o}}throw new Error("illegal catch attempt")},delegateYield:function(t,e,n){return this.delegate={iterator:R(t),resultName:e,nextLoc:n},"next"===this.method&&(this.arg=void 0),_}},t}(t.exports);try{regeneratorRuntime=r}catch(t){Function("r","regeneratorRuntime = r")(r)}},function(t,e,n){var r=n(17);t.exports=function(t){if(Array.isArray(t))return r(t)}},function(t,e){t.exports=function(t){if("undefined"!=typeof Symbol&&Symbol.iterator in Object(t))return Array.from(t)}},function(t,e){t.exports=function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}},function(t,e){function n(e,r){return t.exports=n=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t},n(e,r)}t.exports=n},function(t,e){function n(e){return"function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?t.exports=n=function(t){return typeof t}:t.exports=n=function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},n(e)}t.exports=n},function(t,e){t.exports=function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}},function(t,e,n){"use strict";var r=n(7),o=n(19),i=n(39),a=n(12);function s(t){var e=new i(t),n=o(i.prototype.request,e);return r.extend(n,i.prototype,e),r.extend(n,e),n}var u=s(a);u.Axios=i,u.create=function(t){return s(r.merge(a,t))},u.Cancel=n(23),u.CancelToken=n(54),u.isCancel=n(22),u.all=function(t){return Promise.all(t)},u.spread=n(55),t.exports=u,t.exports.default=u},function(t,e){function n(t){return!!t.constructor&&"function"==typeof t.constructor.isBuffer&&t.constructor.isBuffer(t)}
|
||
/*!
|
||
* Determine if an object is a Buffer
|
||
*
|
||
* @author Feross Aboukhadijeh <https://feross.org>
|
||
* @license MIT
|
||
*/
|
||
t.exports=function(t){return null!=t&&(n(t)||function(t){return"function"==typeof t.readFloatLE&&"function"==typeof t.slice&&n(t.slice(0,0))}(t)||!!t._isBuffer)}},function(t,e,n){"use strict";var r=n(12),o=n(7),i=n(49),a=n(50);function s(t){this.defaults=t,this.interceptors={request:new i,response:new i}}s.prototype.request=function(t){"string"==typeof t&&(t=o.merge({url:arguments[0]},arguments[1])),(t=o.merge(r,this.defaults,{method:"get"},t)).method=t.method.toLowerCase();var e=[a,void 0],n=Promise.resolve(t);for(this.interceptors.request.forEach((function(t){e.unshift(t.fulfilled,t.rejected)})),this.interceptors.response.forEach((function(t){e.push(t.fulfilled,t.rejected)}));e.length;)n=n.then(e.shift(),e.shift());return n},o.forEach(["delete","get","head","options"],(function(t){s.prototype[t]=function(e,n){return this.request(o.merge(n||{},{method:t,url:e}))}})),o.forEach(["post","put","patch"],(function(t){s.prototype[t]=function(e,n,r){return this.request(o.merge(r||{},{method:t,url:e,data:n}))}})),t.exports=s},function(t,e){var n,r,o=t.exports={};function i(){throw new Error("setTimeout has not been defined")}function a(){throw new Error("clearTimeout has not been defined")}function s(t){if(n===setTimeout)return setTimeout(t,0);if((n===i||!n)&&setTimeout)return n=setTimeout,setTimeout(t,0);try{return n(t,0)}catch(e){try{return n.call(null,t,0)}catch(e){return n.call(this,t,0)}}}!function(){try{n="function"==typeof setTimeout?setTimeout:i}catch(t){n=i}try{r="function"==typeof clearTimeout?clearTimeout:a}catch(t){r=a}}();var u,_=[],l=!1,c=-1;function h(){l&&u&&(l=!1,u.length?_=u.concat(_):c=-1,_.length&&f())}function f(){if(!l){var t=s(h);l=!0;for(var e=_.length;e;){for(u=_,_=[];++c<e;)u&&u[c].run();c=-1,e=_.length}u=null,l=!1,function(t){if(r===clearTimeout)return clearTimeout(t);if((r===a||!r)&&clearTimeout)return r=clearTimeout,clearTimeout(t);try{r(t)}catch(e){try{return r.call(null,t)}catch(e){return r.call(this,t)}}}(t)}}function p(t,e){this.fun=t,this.array=e}function T(){}o.nextTick=function(t){var e=new Array(arguments.length-1);if(arguments.length>1)for(var n=1;n<arguments.length;n++)e[n-1]=arguments[n];_.push(new p(t,e)),1!==_.length||l||s(f)},p.prototype.run=function(){this.fun.apply(null,this.array)},o.title="browser",o.browser=!0,o.env={},o.argv=[],o.version="",o.versions={},o.on=T,o.addListener=T,o.once=T,o.off=T,o.removeListener=T,o.removeAllListeners=T,o.emit=T,o.prependListener=T,o.prependOnceListener=T,o.listeners=function(t){return[]},o.binding=function(t){throw new Error("process.binding is not supported")},o.cwd=function(){return"/"},o.chdir=function(t){throw new Error("process.chdir is not supported")},o.umask=function(){return 0}},function(t,e,n){"use strict";var r=n(7);t.exports=function(t,e){r.forEach(t,(function(n,r){r!==e&&r.toUpperCase()===e.toUpperCase()&&(t[e]=n,delete t[r])}))}},function(t,e,n){"use strict";var r=n(21);t.exports=function(t,e,n){var o=n.config.validateStatus;n.status&&o&&!o(n.status)?e(r("Request failed with status code "+n.status,n.config,null,n.request,n)):t(n)}},function(t,e,n){"use strict";t.exports=function(t,e,n,r,o){return t.config=e,n&&(t.code=n),t.request=r,t.response=o,t}},function(t,e,n){"use strict";var r=n(7);function o(t){return encodeURIComponent(t).replace(/%40/gi,"@").replace(/%3A/gi,":").replace(/%24/g,"$").replace(/%2C/gi,",").replace(/%20/g,"+").replace(/%5B/gi,"[").replace(/%5D/gi,"]")}t.exports=function(t,e,n){if(!e)return t;var i;if(n)i=n(e);else if(r.isURLSearchParams(e))i=e.toString();else{var a=[];r.forEach(e,(function(t,e){null!=t&&(r.isArray(t)&&(e+="[]"),r.isArray(t)||(t=[t]),r.forEach(t,(function(t){r.isDate(t)?t=t.toISOString():r.isObject(t)&&(t=JSON.stringify(t)),a.push(o(e)+"="+o(t))})))})),i=a.join("&")}return i&&(t+=(-1===t.indexOf("?")?"?":"&")+i),t}},function(t,e,n){"use strict";var r=n(7),o=["age","authorization","content-length","content-type","etag","expires","from","host","if-modified-since","if-unmodified-since","last-modified","location","max-forwards","proxy-authorization","referer","retry-after","user-agent"];t.exports=function(t){var e,n,i,a={};return t?(r.forEach(t.split("\n"),(function(t){if(i=t.indexOf(":"),e=r.trim(t.substr(0,i)).toLowerCase(),n=r.trim(t.substr(i+1)),e){if(a[e]&&o.indexOf(e)>=0)return;a[e]="set-cookie"===e?(a[e]?a[e]:[]).concat([n]):a[e]?a[e]+", "+n:n}})),a):a}},function(t,e,n){"use strict";var r=n(7);t.exports=r.isStandardBrowserEnv()?function(){var t,e=/(msie|trident)/i.test(navigator.userAgent),n=document.createElement("a");function o(t){var r=t;return e&&(n.setAttribute("href",r),r=n.href),n.setAttribute("href",r),{href:n.href,protocol:n.protocol?n.protocol.replace(/:$/,""):"",host:n.host,search:n.search?n.search.replace(/^\?/,""):"",hash:n.hash?n.hash.replace(/^#/,""):"",hostname:n.hostname,port:n.port,pathname:"/"===n.pathname.charAt(0)?n.pathname:"/"+n.pathname}}return t=o(window.location.href),function(e){var n=r.isString(e)?o(e):e;return n.protocol===t.protocol&&n.host===t.host}}():function(){return!0}},function(t,e,n){"use strict";function r(){this.message="String contains an invalid character"}r.prototype=new Error,r.prototype.code=5,r.prototype.name="InvalidCharacterError",t.exports=function(t){for(var e,n,o=String(t),i="",a=0,s="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";o.charAt(0|a)||(s="=",a%1);i+=s.charAt(63&e>>8-a%1*8)){if((n=o.charCodeAt(a+=3/4))>255)throw new r;e=e<<8|n}return i}},function(t,e,n){"use strict";var r=n(7);t.exports=r.isStandardBrowserEnv()?{write:function(t,e,n,o,i,a){var s=[];s.push(t+"="+encodeURIComponent(e)),r.isNumber(n)&&s.push("expires="+new Date(n).toGMTString()),r.isString(o)&&s.push("path="+o),r.isString(i)&&s.push("domain="+i),!0===a&&s.push("secure"),document.cookie=s.join("; ")},read:function(t){var e=document.cookie.match(new RegExp("(^|;\\s*)("+t+")=([^;]*)"));return e?decodeURIComponent(e[3]):null},remove:function(t){this.write(t,"",Date.now()-864e5)}}:{write:function(){},read:function(){return null},remove:function(){}}},function(t,e,n){"use strict";var r=n(7);function o(){this.handlers=[]}o.prototype.use=function(t,e){return this.handlers.push({fulfilled:t,rejected:e}),this.handlers.length-1},o.prototype.eject=function(t){this.handlers[t]&&(this.handlers[t]=null)},o.prototype.forEach=function(t){r.forEach(this.handlers,(function(e){null!==e&&t(e)}))},t.exports=o},function(t,e,n){"use strict";var r=n(7),o=n(51),i=n(22),a=n(12),s=n(52),u=n(53);function _(t){t.cancelToken&&t.cancelToken.throwIfRequested()}t.exports=function(t){return _(t),t.baseURL&&!s(t.url)&&(t.url=u(t.baseURL,t.url)),t.headers=t.headers||{},t.data=o(t.data,t.headers,t.transformRequest),t.headers=r.merge(t.headers.common||{},t.headers[t.method]||{},t.headers||{}),r.forEach(["delete","get","head","post","put","patch","common"],(function(e){delete t.headers[e]})),(t.adapter||a.adapter)(t).then((function(e){return _(t),e.data=o(e.data,e.headers,t.transformResponse),e}),(function(e){return i(e)||(_(t),e&&e.response&&(e.response.data=o(e.response.data,e.response.headers,t.transformResponse))),Promise.reject(e)}))}},function(t,e,n){"use strict";var r=n(7);t.exports=function(t,e,n){return r.forEach(n,(function(n){t=n(t,e)})),t}},function(t,e,n){"use strict";t.exports=function(t){return/^([a-z][a-z\d\+\-\.]*:)?\/\//i.test(t)}},function(t,e,n){"use strict";t.exports=function(t,e){return e?t.replace(/\/+$/,"")+"/"+e.replace(/^\/+/,""):t}},function(t,e,n){"use strict";var r=n(23);function o(t){if("function"!=typeof t)throw new TypeError("executor must be a function.");var e;this.promise=new Promise((function(t){e=t}));var n=this;t((function(t){n.reason||(n.reason=new r(t),e(n.reason))}))}o.prototype.throwIfRequested=function(){if(this.reason)throw this.reason},o.source=function(){var t;return{token:new o((function(e){t=e})),cancel:t}},t.exports=o},function(t,e,n){"use strict";t.exports=function(t){return function(e){return t.apply(null,e)}}},function(t,e,n){"use strict";var r=n(13),o=n(24),i=Object.prototype.hasOwnProperty,a={brackets:function(t){return t+"[]"},comma:"comma",indices:function(t,e){return t+"["+e+"]"},repeat:function(t){return t}},s=Array.isArray,u=Array.prototype.push,_=function(t,e){u.apply(t,s(e)?e:[e])},l=Date.prototype.toISOString,c=o.default,h={addQueryPrefix:!1,allowDots:!1,charset:"utf-8",charsetSentinel:!1,delimiter:"&",encode:!0,encoder:r.encode,encodeValuesOnly:!1,format:c,formatter:o.formatters[c],indices:!1,serializeDate:function(t){return l.call(t)},skipNulls:!1,strictNullHandling:!1},f=function t(e,n,o,i,a,u,l,c,f,p,T,E,d){var g,I=e;if("function"==typeof l?I=l(n,I):I instanceof Date?I=p(I):"comma"===o&&s(I)&&(I=r.maybeMap(I,(function(t){return t instanceof Date?p(t):t})).join(",")),null===I){if(i)return u&&!E?u(n,h.encoder,d,"key"):n;I=""}if("string"==typeof(g=I)||"number"==typeof g||"boolean"==typeof g||"symbol"==typeof g||"bigint"==typeof g||r.isBuffer(I))return u?[T(E?n:u(n,h.encoder,d,"key"))+"="+T(u(I,h.encoder,d,"value"))]:[T(n)+"="+T(String(I))];var m,x=[];if(void 0===I)return x;if(s(l))m=l;else{var v=Object.keys(I);m=c?v.sort(c):v}for(var R=0;R<m.length;++R){var H=m[R],O=I[H];if(!a||null!==O){var A=s(I)?"function"==typeof o?o(n,H):n:n+(f?"."+H:"["+H+"]");_(x,t(O,A,o,i,a,u,l,c,f,p,T,E,d))}}return x};t.exports=function(t,e){var n,r=t,u=function(t){if(!t)return h;if(null!==t.encoder&&void 0!==t.encoder&&"function"!=typeof t.encoder)throw new TypeError("Encoder has to be a function.");var e=t.charset||h.charset;if(void 0!==t.charset&&"utf-8"!==t.charset&&"iso-8859-1"!==t.charset)throw new TypeError("The charset option must be either utf-8, iso-8859-1, or undefined");var n=o.default;if(void 0!==t.format){if(!i.call(o.formatters,t.format))throw new TypeError("Unknown format option provided.");n=t.format}var r=o.formatters[n],a=h.filter;return("function"==typeof t.filter||s(t.filter))&&(a=t.filter),{addQueryPrefix:"boolean"==typeof t.addQueryPrefix?t.addQueryPrefix:h.addQueryPrefix,allowDots:void 0===t.allowDots?h.allowDots:!!t.allowDots,charset:e,charsetSentinel:"boolean"==typeof t.charsetSentinel?t.charsetSentinel:h.charsetSentinel,delimiter:void 0===t.delimiter?h.delimiter:t.delimiter,encode:"boolean"==typeof t.encode?t.encode:h.encode,encoder:"function"==typeof t.encoder?t.encoder:h.encoder,encodeValuesOnly:"boolean"==typeof t.encodeValuesOnly?t.encodeValuesOnly:h.encodeValuesOnly,filter:a,formatter:r,serializeDate:"function"==typeof t.serializeDate?t.serializeDate:h.serializeDate,skipNulls:"boolean"==typeof t.skipNulls?t.skipNulls:h.skipNulls,sort:"function"==typeof t.sort?t.sort:null,strictNullHandling:"boolean"==typeof t.strictNullHandling?t.strictNullHandling:h.strictNullHandling}}(e);"function"==typeof u.filter?r=(0,u.filter)("",r):s(u.filter)&&(n=u.filter);var l,c=[];if("object"!=typeof r||null===r)return"";l=e&&e.arrayFormat in a?e.arrayFormat:e&&"indices"in e?e.indices?"indices":"repeat":"indices";var p=a[l];n||(n=Object.keys(r)),u.sort&&n.sort(u.sort);for(var T=0;T<n.length;++T){var E=n[T];u.skipNulls&&null===r[E]||_(c,f(r[E],E,p,u.strictNullHandling,u.skipNulls,u.encode?u.encoder:null,u.filter,u.sort,u.allowDots,u.serializeDate,u.formatter,u.encodeValuesOnly,u.charset))}var d=c.join(u.delimiter),g=!0===u.addQueryPrefix?"?":"";return u.charsetSentinel&&("iso-8859-1"===u.charset?g+="utf8=%26%2310003%3B&":g+="utf8=%E2%9C%93&"),d.length>0?g+d:""}},function(t,e,n){"use strict";var r=n(13),o=Object.prototype.hasOwnProperty,i=Array.isArray,a={allowDots:!1,allowPrototypes:!1,arrayLimit:20,charset:"utf-8",charsetSentinel:!1,comma:!1,decoder:r.decode,delimiter:"&",depth:5,ignoreQueryPrefix:!1,interpretNumericEntities:!1,parameterLimit:1e3,parseArrays:!0,plainObjects:!1,strictNullHandling:!1},s=function(t){return t.replace(/&#(\d+);/g,(function(t,e){return String.fromCharCode(parseInt(e,10))}))},u=function(t,e){return t&&"string"==typeof t&&e.comma&&t.indexOf(",")>-1?t.split(","):t},_=function(t,e,n,r){if(t){var i=n.allowDots?t.replace(/\.([^.[]+)/g,"[$1]"):t,a=/(\[[^[\]]*])/g,s=n.depth>0&&/(\[[^[\]]*])/.exec(i),_=s?i.slice(0,s.index):i,l=[];if(_){if(!n.plainObjects&&o.call(Object.prototype,_)&&!n.allowPrototypes)return;l.push(_)}for(var c=0;n.depth>0&&null!==(s=a.exec(i))&&c<n.depth;){if(c+=1,!n.plainObjects&&o.call(Object.prototype,s[1].slice(1,-1))&&!n.allowPrototypes)return;l.push(s[1])}return s&&l.push("["+i.slice(s.index)+"]"),function(t,e,n,r){for(var o=r?e:u(e,n),i=t.length-1;i>=0;--i){var a,s=t[i];if("[]"===s&&n.parseArrays)a=[].concat(o);else{a=n.plainObjects?Object.create(null):{};var _="["===s.charAt(0)&&"]"===s.charAt(s.length-1)?s.slice(1,-1):s,l=parseInt(_,10);n.parseArrays||""!==_?!isNaN(l)&&s!==_&&String(l)===_&&l>=0&&n.parseArrays&&l<=n.arrayLimit?(a=[])[l]=o:a[_]=o:a={0:o}}o=a}return o}(l,e,n,r)}};t.exports=function(t,e){var n=function(t){if(!t)return a;if(null!==t.decoder&&void 0!==t.decoder&&"function"!=typeof t.decoder)throw new TypeError("Decoder has to be a function.");if(void 0!==t.charset&&"utf-8"!==t.charset&&"iso-8859-1"!==t.charset)throw new TypeError("The charset option must be either utf-8, iso-8859-1, or undefined");var e=void 0===t.charset?a.charset:t.charset;return{allowDots:void 0===t.allowDots?a.allowDots:!!t.allowDots,allowPrototypes:"boolean"==typeof t.allowPrototypes?t.allowPrototypes:a.allowPrototypes,arrayLimit:"number"==typeof t.arrayLimit?t.arrayLimit:a.arrayLimit,charset:e,charsetSentinel:"boolean"==typeof t.charsetSentinel?t.charsetSentinel:a.charsetSentinel,comma:"boolean"==typeof t.comma?t.comma:a.comma,decoder:"function"==typeof t.decoder?t.decoder:a.decoder,delimiter:"string"==typeof t.delimiter||r.isRegExp(t.delimiter)?t.delimiter:a.delimiter,depth:"number"==typeof t.depth||!1===t.depth?+t.depth:a.depth,ignoreQueryPrefix:!0===t.ignoreQueryPrefix,interpretNumericEntities:"boolean"==typeof t.interpretNumericEntities?t.interpretNumericEntities:a.interpretNumericEntities,parameterLimit:"number"==typeof t.parameterLimit?t.parameterLimit:a.parameterLimit,parseArrays:!1!==t.parseArrays,plainObjects:"boolean"==typeof t.plainObjects?t.plainObjects:a.plainObjects,strictNullHandling:"boolean"==typeof t.strictNullHandling?t.strictNullHandling:a.strictNullHandling}}(e);if(""===t||null==t)return n.plainObjects?Object.create(null):{};for(var l="string"==typeof t?function(t,e){var n,_={},l=e.ignoreQueryPrefix?t.replace(/^\?/,""):t,c=e.parameterLimit===1/0?void 0:e.parameterLimit,h=l.split(e.delimiter,c),f=-1,p=e.charset;if(e.charsetSentinel)for(n=0;n<h.length;++n)0===h[n].indexOf("utf8=")&&("utf8=%E2%9C%93"===h[n]?p="utf-8":"utf8=%26%2310003%3B"===h[n]&&(p="iso-8859-1"),f=n,n=h.length);for(n=0;n<h.length;++n)if(n!==f){var T,E,d=h[n],g=d.indexOf("]="),I=-1===g?d.indexOf("="):g+1;-1===I?(T=e.decoder(d,a.decoder,p,"key"),E=e.strictNullHandling?null:""):(T=e.decoder(d.slice(0,I),a.decoder,p,"key"),E=r.maybeMap(u(d.slice(I+1),e),(function(t){return e.decoder(t,a.decoder,p,"value")}))),E&&e.interpretNumericEntities&&"iso-8859-1"===p&&(E=s(E)),d.indexOf("[]=")>-1&&(E=i(E)?[E]:E),o.call(_,T)?_[T]=r.combine(_[T],E):_[T]=E}return _}(t,n):t,c=n.plainObjects?Object.create(null):{},h=Object.keys(l),f=0;f<h.length;++f){var p=h[f],T=_(p,l[p],n,"string"==typeof t);c=r.merge(c,T,n)}return r.compact(c)}},function(t,e,n){t.exports=n(59)},function(t,e,n){"use strict";n.r(e);var r=n(5),o=n(15);e.default={environment:r.a,runner:o.a}}]).default})); |