From 0c3933dc8a56c6c45aca88566cdf0d235210f584 Mon Sep 17 00:00:00 2001 From: Adnane Belmadiaf Date: Mon, 21 Oct 2024 17:45:26 +0200 Subject: [PATCH] feat(GLTFImporter): add vtkGLTFImporter --- .../docs/gallery/GLTFImporterWithIcon.jpg | Bin 0 -> 11409 bytes Documentation/content/examples/index.md | 2 + .../IO/Geometry/GLTFImporter/Animations.js | 256 ++++++++ Sources/IO/Geometry/GLTFImporter/Constants.js | 85 +++ Sources/IO/Geometry/GLTFImporter/Decoder.js | 78 +++ .../IO/Geometry/GLTFImporter/Extensions.js | 103 +++ .../GLTFImporter/ORMTexture.worker.js | 35 + Sources/IO/Geometry/GLTFImporter/Parser.js | 505 +++++++++++++++ Sources/IO/Geometry/GLTFImporter/Reader.js | 613 ++++++++++++++++++ Sources/IO/Geometry/GLTFImporter/Utils.js | 195 ++++++ .../GLTFImporter/example/controller.html | 89 +++ .../IO/Geometry/GLTFImporter/example/index.js | 347 ++++++++++ Sources/IO/Geometry/GLTFImporter/index.d.ts | 266 ++++++++ Sources/IO/Geometry/GLTFImporter/index.js | 277 ++++++++ Sources/IO/Geometry/index.js | 2 + 15 files changed, 2853 insertions(+) create mode 100644 Documentation/content/docs/gallery/GLTFImporterWithIcon.jpg create mode 100644 Sources/IO/Geometry/GLTFImporter/Animations.js create mode 100644 Sources/IO/Geometry/GLTFImporter/Constants.js create mode 100644 Sources/IO/Geometry/GLTFImporter/Decoder.js create mode 100644 Sources/IO/Geometry/GLTFImporter/Extensions.js create mode 100644 Sources/IO/Geometry/GLTFImporter/ORMTexture.worker.js create mode 100644 Sources/IO/Geometry/GLTFImporter/Parser.js create mode 100644 Sources/IO/Geometry/GLTFImporter/Reader.js create mode 100644 Sources/IO/Geometry/GLTFImporter/Utils.js create mode 100644 Sources/IO/Geometry/GLTFImporter/example/controller.html create mode 100644 Sources/IO/Geometry/GLTFImporter/example/index.js create mode 100644 Sources/IO/Geometry/GLTFImporter/index.d.ts create mode 100644 Sources/IO/Geometry/GLTFImporter/index.js diff --git a/Documentation/content/docs/gallery/GLTFImporterWithIcon.jpg b/Documentation/content/docs/gallery/GLTFImporterWithIcon.jpg new file mode 100644 index 0000000000000000000000000000000000000000..88cd7c320e645f8cb2de311c92087c869c1c4c21 GIT binary patch literal 11409 zcmb7qWmp`|()Qv6cgVuxZb24zcUxS8I|O$U+!wb%kj35IHAsM$w*WYbvI?>QI5;?f>B|LpUIj=2aL_TZFwk(YFtPA( zaqvkPfh0skB%CyKAVxuM5n%ytem-$|BUN!}T^T-pbtetox8^psHlix7zAlzNMpiZ! ze;a|r#ls^ZBzXe_zOj(xm$dl*F3)`cTvRw#xH<$lY5+Vg90D%f^AG?C0Kg->boPG= z96SOd67mZsg#&;?1Rx;6VIm{L!^0y00B|qPxQKXENcfx*8fFB9)LalvSGVA#`unr4f>sq4+x*|5N@x;02+^{cC?|_T~B)hr@-(!>8gz z;L@O$a5Z~g0bsl!@VE%L05QN36}t22piIor=<@2vYEw#m;L6Xdb{>)x;|C5>UL1@n zXvQG-Xzzkrx855LQJSAdzZ>r@reJ=!jb6UKrqRWH`mdo03!*;(~J>^a<>!KN=fV#F-ShkEQ7(*X6ZR8(sfsh21{?bW-yRhVNcR!MVx5zW+i0 z`i?4eSd%No%GD)i{V2l;qx_H9Dxd;!!o|a=#OeQQdI9hyaZvw?Eg}^vA{F=*m4ZoO4q#NRmnVf*t% zFSitSvPv9mQM;6os>x*tjf*rN09|~)hu?Q6=PI*qRuIrbVpks+D_J4e%O5X@MVakV z2cxGQ3H8_y+H2Utrt}o+z(mz6T+zT$4&y)}%2AjH3An@0o3A7(-|xhMu2)R-9IfE1 z)*XgiB)ZW>i3vFA9>m923P9eb6zi5Nr0Uz8E`3X18nZg){=Ba=;y3Z85pGxIb|?wY z3eY(~-5+!E1uR#!fP0;=?Mr(S?*vk(g}P3yjssra*hAm08y#mwy?YiF8kGg23T$Lc z7Kk4xm@Ih9XuUU+*vRVyb_I#GhF~6P4_(usMPUrDiBYS3Wh3!Dv1k^6NZT@3HKskDnQta&<9E41WDqHr~I8~ zdlu^NfAW5d2tEoWLSx~q~@5VM_l{e zEb);h3v+|KI^y3ZwhrTIDdQ5tpV0d~DJXL^zY-~b1SMCrT34>s?d)#wW5l_Vex5tA zYG4|ant?3UTg#nuhH+o)ySFVbt{Zq!uC6U{Y9nN;Vj)$z%wu$Qf+0BDvJh$?wk;`R zi)lZ0|6etH?W=bRBm}@kAr z%0RLaNB~4oHAX{rumrXchQ=%A9uAl!^4o6N#S^`14D_V4?rX6v#WQy$9e2ovd0RK1 zh{hUY9mS6SK|?dfB3yJ8(@+&K-p1PKS(sWSRl{tgvQ!Da5|n$5px|!1o?>Idko@CL z+C=wAH|3Y*Z7*R28=tapr@Bg>!`kPFR>WzPX8_eB(+85V2l{6~${+UkLrYaFiQ{)W z82V&L$i_8!7ja^_-uJyLsQ>3fbN^XwjPG~eceQ)jH(!N*zdgnLML}Bo4B*_{eFoU_ zuitvMoypcPd_Hyp-Fkndz&m;dU^IMv(rlvj$vdSOWc@wYgMHh&nt15FykQw2f8N+N zdY>V8Z#kv2f_dob$^W+cIT73_*v=mwLLDL%1 zxqfeC7WX{ss|Bxkh z7EajzeOEy$s1nhh@4p6~eNYb=->a!z-t7K*jQvq<>@mKmah$Uzx;d>ra@}SAa_6F(Twa`9=ha75*A8!wYj`Q3b$5VdC zKUU;kle)`aeT%miM;Vj;0^#MRnmQGq;`xK~cJ1V<=ovuMQ+DR%3;B?=sco#Vuu1aB zkgi&DKQe>(D<@YgZ_j%HH1(BP?Mmnvmj3qq#s1pr=@}3gh@;lNysz*i7x)sD=6w@7 zB=EbAqTJDkE8VL4A9{BwWcJ@LpO!Kr*|b&DAmWI*^?Xf4<#9Vej^9r)8M}e z>|f!hus3TNjdVjg~NvmVRlY%VbSbJZF1X(H9#bV?+4;-uVLNEU@0X<4mR zCiX@*d{_*V4r&l!{nL2V%5o;MZiyM%*uZr#v~5^QT<3O*p7xJNt4`kQZk@KPuUdYH z%-jEotV&|5mn;gV&4L3b2E$Orr<4EYQy`wo3jrfY;^ZH${0RK?YwaojA#Kd~U1Oeq z-aG|Y?cN`aZ^Ne9(W$~+brJg$lVtqYw-v^X=*=G0yenc~VBSLxrGy?eiM!}z-(SDB z@U{w#tvcwnHWAU0YI5}ra;8uMrV9nLQIuF*)N<50Ryaa8f!^524GtbW+xc_PfSscz zD_dd@=0$ckSZrE-?jeSID62rtbmE%~EYW>0Vp8GR=Gx#%#6(pHi_$?*?kn;DQs)Fa zFlO^edZ96q^+`!n8}vKv_pAgH!P`YP!nP9rL<5SNYaOoBZ(O0JSj^A^o+-L}Sq@oG z%=q(!E*C*c3oFdI0{3EfvRUTr39#qW%wskFHpW-G(uPc+TM)J=@-H7!{Ty~)c{0*a z2Rckz(8ecHBY&mtFUnAtv{J7fE^reUr>fvV=k}K@w1`R4={ppV1RJI;khG!)6f;4N zVaDJA#S>gjhkAM4GL!Q1Y^84%05G|wWscO|ifeA{#HE;E@zw_={qAHiMtP?N0)4+$ zyGy9xE1eYE5UuE9k(5VZpVg~s(6}O`-B}T zes*Sdt%~AwWvue@WLEL&MBJkUol4Z6=vUD*NCt3TnWS)DsU5upY8@6nz9|JpKt|#C zCar&>Odo%U!Ksi|7FJPAL%~6!tPVJSL!nW5BbDsT`Del;>cP3*@9B=e>6yo{*uh_$ zJ!h>+%0Rv0UCw-Qk9Vs2x1nzmLXkWU3@D$abVOh$?x6mGn2Q95{LH4K1Ho>Xf1kvb zZqn-XSxZqwQy@R<%=fz4cNa5C`%M%xuDnyDU$XHd-h~dIkabF~-oAG_o5B#baD8|F zt0pUSXxmk7P{Becm+iQ-ZhWi|+pJET)W*_kGJ2r3f9Dp<80O~Ema$z_U#Uj)(_r@t zO1@;xYkjcU2`^gK@vOY!bOq3!sjMU{GpK|}#!2cMHD4O)zF5L%?V4TQJA(#%e<}Hh2b{{ly z!{d!;vxM?2OB{V1#of`XclSE~EKp>L6X7Jg1T5aM3wAGK((NI7iDBj8syZvV*u# z>bh%dP(n`oTU<2ly4Lf~3EJN+u`@hyhb~(;XCZz9;V zvUQr1l)=k@vCiKenqh0CZW*G2Ug&d+(<0TvS+{uI7@t5CDoTv^8=TH#x3f{%N>G)> zaZ8HiQjgbSrwoPGrws9hmz%k={~IkYe8AFeS5c7G_&1GKvUzGYoyyLM&uOSmgX!$@ zZ56vyN%PWRittwHZb?! zdF3^|%L$!de?~e8FFDEzlCGUc!dG3Asodw5;1xJymK{#q z4mM!W(?YiCpl#8LFm6BDScSBV0@S@Gm}iEn99`b|EFx=0n+-y;+edKcyhoHidQ7#( z{Btjbb~+s@hyGC7^v$t`GI6fdyxM`CAxdj%BD}Dysgk+ApA4T2A0Z-!S;PM4Oa4>- zMaU2N*0rj%=HVd66Hx1XIt5KLGVlxAyeU&4aEr;Jm*4odG9o(IH(9A?!7wV;Fh|ao zaM?xZFHgN90g8F7xW4|A{5Y2$j)IDyY~086?+&ARsCh)HDA{KMG%}&Jp3ZaX$_scYLWts zN>GMUfeFHdsHvJH{{m3TD^CNfGNCX6QS<;l4>UvSF$YM0(}%LJl3(cg@R_FfKrxuK z%kHrWkaqV&*3XanJV&BaT`jMIt|G_04Ea)tu}y&yZ}kE)Oa^wF%js!YBlg@C8l)tv zp#;S|R#vtO4N0>?J@Vtf(La*7Z#dj(kI@vhgt!^+cM~v&8ZH$fYbekc1L6td%a}AO z4J|QInQs;0_-qPbyahVxqqCTaLI7p?&7plIgV_KEbqHI07x892C*DR}5pFFtKeMHm zW5+t0R74D810Xjw=+#!PNk>^($U&P(ScpJ)#VfrdqYZ@l%yJUzBi6 zDiJN`X2_Nd)ylY9Dw|9ZmvvhXLdX||2Hoa@Pr5?y6dyS1BS|lw0p_h8p;1S=+)jU1 z#NOT-kda9zK!ZMC4X6P1)(*RlcXnD-Cr%NXWvVdIz9iKMd*<@1gq*w0f>Mu|Xi5r0 zR7Ek%ojb;26|jg1Bx+=|mTfO}B2iqJVms>0ZERv`BAZp(ek);~8wIj(g0EJcp``NXAur-c+k>^d$*zYk{2auBIE4Qgw!a>5S#%MpAtFP)Df&(_W3}b4OLAOj?+d&X+$BcO zOBK>~p}a6c2VCg1zNUT8+&%lmXUFqdNYz zIn38Iq85KTYG^h;t!^kfIS{?$E!}m>cI3|r&+=prZK`)Z0E(c6pwLm+e|0eUPCc?y z`~gan(5ypC>4L_PFMB#tulCq`A*k!7Yxc=#5r4tPrJ#1er{<>oJ?HqEA6w|3*MX)T z5sPn3Lpw6Qp|?j`e{8QBD{m)Io?TjzC%)1~$b=iM_Q?kn_SLY@Wo}s6%+SZSY}FnE4doq-aa7o0UBgkyebv(n{AhJ3@IU&eNE zrh{eNXZL$yS|Zh8p%|inJq2YQ6{BE{2*mFp?HgkzTay^CO}Hyz@^MrXMwL=D?GKtO zJ6Vu1lJYs2AbXp;Or~wYzY}l!WpjS^Q}X!4I>VF(px=0{aPtgM$x2JHHO~tTQXoOh z(5Wor*oHpY75Fhuh7Rmxy{4qaw0#B?S6MXq4z?VMJ?{R|*lhnj|Dob|hnE*f#D6A- zAUl4|3HYvu4ZnppytFp{9oij*_<>nOL`=KO!o*9>bI$!ia1p3^Zh}3L;HzXeNI=Rg zE6LRaSE`~-w(<1!pzx2D|D6D$bK&A_B>`3?f1jD2J1?^fFJ@XT=Ib{-E5!sGYqbIqE$y8iaN+S?y~}Olx%+4(yK^tj*|{`KTTUoMT(W(We`e574jl{T9cc;!^&bTAe>k*;*+*D1iV++ zO$S-1qNI4G+~vl?ZthF}>Zq8`;5&hG$UJH@o)9jZaaWCk4I10LhGIMA8df=f8>xNM zh&Z`OIqG@LoDa!e4ywxVf*nS+U|D72wE`QJ+D^`jEG%JSuBLkbY>qx{9w{staGI#4SOx z`mmmOrrbrt+R=5qU#EOGqyxD!ht=Fn*gJwM*;9xNIXu4Kw3x|ek3isCDcmD*%+ zX*?FW!Py>f%2=a1I@I#oXe0}JOHF$vt$vbRw@IMOHY{U~e;?XOxf^MAn^tRcieh1M zae%y!5Uj^rHu(A5Fo7d>CtYC>WJ&s!RZlP-r|R;rU)}1Rn>H)28$MF)C?IUK3H=kl zH-<=^lVN=ul${6KdHVa88bZxnq%&bW@;Dd-8m%XCOl{*Ko)DF$}HN8*Uk$IHjTgYIRx0q@|gh`i*B#1AH7dI8_;Fa4479FgxTH1Ep- zzU2a|gbaMv8!b%{bGzEYs)hHs#t;kSbJBK$D-HaA#Ml0y2n!_Ob-SQaN7k;#(UHed-LIo*{{)QprF z_$4{FHr;mV+MBgH+GXpY{dKkEq8|I~F@I}xb{nR*F>Ogz@FQlC)8$=={0*x?^O~LB z+C3Ry`pAzFgk!|*+7#RvkPfAO1+hg)Aee)n?ziI)VFE`jAS!AV4^2bZ#(AV=gFxjB z!yHWT@)|B^T5(N|l(NAyL@^2|^2;-8__P-)`T0<-MMo(Xg$5o7BAmWlyJBjnq+Qej zqBeZUXL3b-QrnVnVnPyeUbOOk*a|w8ohw0^gPw`FXKu`AM{zt~CZ6VsmQcy)#Wea% zXhd1ta?_QAWf~a?x{3Y#_U|T7=E{NmH52S^fEXKYp8iE?L{YjMxhox`AhDIw?_@mzqRuQ;8dded| zP}MB<3l_;Cfa>>yGWxvhH{pg@Ejlu}FpoftwFXEZNg7kObB&{;t}h)QilFB{Y_omB z3I@DJT+$DGaqg_{=p}ODCVN-JgX2a zx@45fXQ?hTi3Ze`92(qJ+2(lJP{m$W#@JxarTCry;;7Lg_&w68gVHA--C~10AZm@y z)W8I19D8(s$1;sX!Fk`>Y;)nTb^=mUlSCoe;}%zfeZexwk-f7yUUu%==SEIww@RMm z_+cogD-pM`r;P3BlFNG%q*(`J`0dk1A1}XWK*RAQOUZkSQc4uM@zoDr35$PH$v+Tm zEe&6@U^z}P2Pu=0lVRNm$5Z#@ zgYFp1bMehKI+t82=D7=B`j8Lj#CPhP_sxc2w1ClI;U5IVWa#^iENkUeBBo8=K?30n z1`CR04bG9p17l=5mfQnjyzKLNX=_FVz|obH!30A!TFZNhw56m7b~Z5KQtsp&@6FnPljf_?AP)3BIsVI+;3?6n51u%~ct!>V^g%&s$4IHC2I!SeK+fKu2 zxaYnxR+HgO>U|uSmEDLd*4No!&~$Ld&@`^)Nge}Y&)E0l8T>ddR=PZDc}>k^`$XB{ zo50m>h<68h?DzO?h4+S_I_hvx*n>HS`8UsM_Gq}7>i6Nv9mDq#+^$>v zm91&G@S#4l(KE2ZNFF@&aBQNv^Jd$y#5?=}Ue+#OHef5poxATJqJVw~@Zhw!Q|`yp zm%>bTuT{$J!k^CoOM~mBS{(Im9sBw+gL+ZQ#jGh5aO}tqp+b;W)GOd|rL+uPsw-0l z7V()p?kJ^sd=QhB2!XrMbdr0bY9tp9BRO;aoc-r`dFd)o`9vRjAbhlQt1Sy#uOe5u zz(@hp)LvA$#+;TVyD(vOtf5OD3kaNOZhhe)&8m5X&)Cd9`!R;abdXIns($U z%*vz6bzr4;w~*Ek5{rQyDG!%-HiZ0C$vDzscq%c5Wed8;<(W7#l`O*dV<4xnMG~u; z*CEHw8${TRg%9|CgL@sr`ImA6BHZ_K1uZyAM)nEj>EA0x#hsem4R&wJgoD+7v+pix zJ6Sa1Atkada#Z=yC{FMvX2ZP;Coe4JsE$jF>JFbt6yFgD&)Ev~^OIb3V&Dd)?f$%} zK$l4oe@n=r;n0lXNaC|1KV@hsWMV}8hOPSJIvKK;>~~{QN3g3%bjY8w`_JP;&Cq;v zVNwaRU-TD~aX%2B0X%C+S7=eUdxlb0=H{uW_0)Vm!a1dH3QEmS<$0a^-HQcst-NHh z*ui2lD6kYSiceOygzqu&+Lrn=@V3~SY9*H&FWpW>J}sAodMWJp50>d)Vvz1C#ubbe zmoSoF7@Jy=JupVZo9XpUuhn>h$w7fkrm>l1a~VVL;{3`QCOI(G;HFtVDJ@CKaJbWm zS6Y-W;w#j=Vcx0>*`<+-`L<8dG3Zb-O&vURbC(u0BwSQcvnb^FXFa#~q+_+E#(E84 zwp;t=R^4~P7MXXsTE}t77Oj_jXDQnRGUVQ!&ZuiIlR=acFh7WCFiz`_ZQt7QLc5jB z6J0*}O<@uApFe4CW}r72!`1W*i>zNN1KTs+LH_lU)AIfZU zX`w=}5L9j1oOBUSig>mw(xo-*dHtC7WvC0#+X+3KJhB9C{K`a8V)<aJu1UL$#>g*T3_1IdEfY4e6ceWSOxrvih=N@MD;hFNX6xzcZE56F+b z(eElX(^7Ia#tIMI8%)2)Uihl^Xapa5BQs6y=31JUEJ9X3?-FtV= z19NCxc^;(pI<`V5U0YCY=zhNJ=mB%dHSSxqShhXk(oJp}0+F;jQHQp5Pt0aB_-6Au zLMSnY)3X6MI-NSKae21&T#}|ZqlUh*m3fFQ0kbNhW=X@JCsQUZSEsPoaAt)a{t!9x z&b}vy!?!?$S6MMEJLh~LN1eCx?01at=4jyTANH;JJvK%EvTHs4jvg9@f#{V+dEzt~ zjYXCHnS%U6c;j@gHa%S zI^gzgv~e-l8x5^mtD#K+-^N0@n2nfP)NHkoL>CS9@w3Xjmh22n^4^47W1)946RVbF zU=NnTKS|{NuZeDXW8o8c5>0O!E?QN$zQwI;tN7c0C*7szIs;;JWf#O9!SyUu$@jfs z%b&#AnbdIOIJ>i{R|cryO4Xa3eu)oBD3)&0i6`IJ#2q6wB0CU_Y&PJ;m9>?fTdBl$ zH+H`0U{v6_G`(ehleLYR9 zzc(9BULGqo`^GXaELJPZm949|qnb#LHg;5{L>3`<=$u=xvxlh@CHf6JCH{BA!LfrR zA2Jh><5Qa|Yin}rV}rmWsxA8gv}tS8XTVZQJ#NB?z5QbxR?>(h&rMmhM>&sh`Zkyg z=yDgX*{WX#XRK3f!}Ce`AOyK54S|1A#UW(=wyxzk-A0yuRmr5nIPN4pz9fC^6}v_B zaYGN|hHqW95C4RtODEz{y!Y;mBe53OW4~u3c_p;oV3#0mA8Fps-KQ+MdcvntiGo!s+fGdSbLpVs@ zaxFOAzQ@4T^Rj=X!xMIiE(K$aPR4g~q4d*mP3jZ_=sQD$ZYtHfS4Dq#Z905dX%J*D zV4wd4BsB~3(*b{0-5wwo%(p@-QfsX1e?nx1vkfkhYll`%Q6dTGC+l)#vw;;?EC7At zs83R83na1F3|EvE2DVvbS`G1K<0xPwf#P^`X580gtCP}17xE##ctr-ji0G0rAbd>9 z;~>Uk4-I)9ciyEc;rB1}8t%?}xc+yoU@)+Ohm~gDp~OJmWiDBvytFnuZlN|2}sqj2HrLMUwEuh6vSNI7TlHxMRZuR z3lDID9N6Y5W+<_SXVSy)nyFAVbBUqQRQr|)06!uTUw)I3S>5?DAJYvRzifJQU-qI5 z9|!1zRGBL8emC>C=6Fq-&bo6~Hs2SLl&EoOo>~%{c{yULLgU6+x7(%k5e6z~QV&F9 zkzqb+X(<&6m&@kaeIx@5(tRA(A}2P_bnhgEfGx%w+bOOGY-$}P1&Dya~CuhQO zLOU9a?B`YDuUbcfwCL4AxWaCHNt?$(6_V{t5qwgS0}?EjXUR$`_m!+eqIP6FNJF8^ z_OwD3`idlsUex1UsbvK@snrBAjEFQ^_CdvytD(;T;L91MG4rX>Wc!`%pAXCEmwg}G zvrz1dny}1_Igkn-E8Ln0sxR&lBo6Q~$)@G^3y?L3s#>t#G9q@7tG=2O;GLUoLz0B; z)B0RPP^J@mS|SMFYO{uvzw*UU?@CBeC}UtfsYm2|1-%R~ z(#CmuG`1C}9WNdV!nv-c2xM$Udq`Sdj*f~!r!4d5LCD6?AcPBXF(}(N0TemeL4JaSJ=bfE4n}IOfQEG z;uP*X{v|VhLiV2V^5f{)iEggjUYycdw5G?bZiuH8OX>Pjt?-`rdzP;PuByzYo;)YY z<{Lg8i6b_hmnObrcY}n7pW1eDseapTj_t#~b^O_kxt@~q-idrVWEamhsM=W3;lpuU z21gnkdk+#HuEpP*|NfQ19dM=!+30e8VC;4%+$4LO=O(lpdwr%SoiWu-g){|1RT0!V z`|FvsiSYssep-}{{mQ`XS^ekYrT=v6_qD?Nk$!Uah^-^ig5S>0hkfrDm7=q}BEC?$ zAosEGzMr=NDT3_Ob`lgzvJ1M}i)R!L`?eIJG#!aoMR)86 zxfc%#hcyh+T6|z0aVOiVji#8|1vzftFdVjg2m&9H^<@{Uf8tvUwdC@OD;ym76ev^@ z_zXb%Gz|=)DUj&{`BKet7Tq(f6##dXmMKNT=z%Fli~fS;&YzVBV%R24*@*-pq4k6r zY{)lK23KKyXkAk$GM!}-*3q^w=UNgvo9smW0zV`*iXlzd{I6*}BixTgF4~FnT`wD% zR(`VC4Zbq%r6arv!ID_MkmRp3IwsiYvJS^t(I2s8@D3S&Oc!+Giz|KQAdnXxRZSL6 zB^1RuW6l^bTxtHkpmSJfXWy8V3C8lvU(&UKuE5!|+oTlGlaZ*tqAb5{FzGsy<972E38X3TRHTii)-YUe_OuN zlaEp2&Y-hufhJjpk@@N_dLBSIif znceNF5ML*uQLQAX-?x{@1Jm#%_f&|eVm~5K`a}TWfz2i7kuzrbPSRx-d2DI1vHe8+cY}daC-WfC<=jQ0pu#D{_B^_p!SGNezWqRs0c8)Vyizs5&xTsHT zR0=XefL-H5oiLcX0UI?ueJy{m0b_I&S_c6RKUMj=PZE^H-M+#QVSdJVDqK_FFBP)l zvq5lRZ78(aBddb!u+4`#UrC|4x(^ee+lr3AM0RlOuc=xSpnB0nrjvviPu*56l(~DU z2S108P}opXwey){#a`x?seuC%zFKU46by>N|Gcz9wvJg+!;n)Y>{!8x?Gfgt?kdoh z+8Tdc=426(U#7%px)_7_wTIbHv=N{#n|W|BPd=>goX%Kju*VW9R zVI6y`K?b3{eA>Y2rm7smxE)Eus9(py(XcVNAfJM!0c_e8lMi_~vGgSFgYZ g^)MxaB%&sNt)D0IM>trqY}>h1gf<1X63@&34}G04Z2$lO literal 0 HcmV?d00001 diff --git a/Documentation/content/examples/index.md b/Documentation/content/examples/index.md index c88f6b13c2a..fa5df2d007c 100644 --- a/Documentation/content/examples/index.md +++ b/Documentation/content/examples/index.md @@ -173,6 +173,7 @@ This will allow you to see the some live code running in your browser. Just pick [![PLYWriter Example][PLYWriterWithIcon]](./PLYWriter.html "PLY writer(ply)") [![STLReader Example][STLReaderWithIcon]](./STLReader.html "STL reader(stl)") [![STLWriter Example][STLWriterWithIcon]](./STLWriter.html "STL writer(stl)") +[![GLTFImporter Example][GLTFImporter]](./GLTFImporter.html "GLTF importer(gltf, glb)") [![PolyDataReader Example][PolyDataReaderWithIcon]](./PolyDataReader.html "VTK legacy reader(VTK)") [![ElevationReader Example][ElevationReaderWithIcon]](./ElevationReader.html "Elevation reader(CSV, JPG)") [![OBJReader Example][OBJReaderWithIcon]](./OBJReader.html "OBJ reader(OBJ, MTL, JPG)") @@ -195,6 +196,7 @@ This will allow you to see the some live code running in your browser. Just pick [PLYWriterWithIcon]: ../docs/gallery/PLYWriterWithIcon.jpg [STLReaderWithIcon]: ../docs/gallery/STLReaderWithIcon.jpg [STLWriterWithIcon]: ../docs/gallery/STLWriterWithIcon.jpg +[GLTFImporter]: ../docs/gallery/GLTFImporterWithIcon.jpg [PolyDataReaderWithIcon]: ../docs/gallery/VTKReaderWithIcon.jpg [ElevationReaderWithIcon]: ../docs/gallery/ElevationReaderWithIcon.jpg [OBJReaderWithIcon]: ../docs/gallery/OBJReaderWithIcon.jpg diff --git a/Sources/IO/Geometry/GLTFImporter/Animations.js b/Sources/IO/Geometry/GLTFImporter/Animations.js new file mode 100644 index 00000000000..8c044a9db63 --- /dev/null +++ b/Sources/IO/Geometry/GLTFImporter/Animations.js @@ -0,0 +1,256 @@ +import macro from 'vtk.js/Sources/macros'; +import * as vtkMath from 'vtk.js/Sources/Common/Core/Math'; +import { quat, vec3 } from 'gl-matrix'; + +const { vtkDebugMacro, vtkWarningMacro } = macro; + +/** + * Create an animation channel + * @param {glTFChannel} glTFChannel + * @param {glTFChannel[]} glTFSamplers + * @returns + */ +function createAnimationChannel(glTFChannel, glTFSamplers) { + const path = glTFChannel.target.path; + const node = glTFChannel.target.node; + + function applyAnimation(value) { + let axisAngle; + let w; + let nq; + switch (path) { + case 'translation': + node.setPosition(value[0], value[1], value[2]); + break; + case 'rotation': + // Convert quaternion to axis-angle representation + nq = quat.normalize(quat.create(), value); + axisAngle = new Float64Array(3); + w = quat.getAxisAngle(axisAngle, nq); + // Apply rotation using rotateWXYZ + node.rotateWXYZ( + vtkMath.degreesFromRadians(w), + axisAngle[0], + axisAngle[1], + axisAngle[2] + ); + break; + case 'scale': + node.setScale(value[0], value[1], value[2]); + break; + default: + vtkWarningMacro(`Unsupported animation path: ${path}`); + } + } + + function animate(currentTime) { + const sampler = glTFSamplers[glTFChannel.sampler]; + const value = sampler.evaluate(currentTime, path); + applyAnimation(value); + } + + return { ...glTFChannel, animate }; +} + +/** + * Create an animation sampler + * @param {glTFSampler} glTFSampler + * @returns + */ +function createAnimationSampler(glTFSampler) { + let lastKeyframeIndex = 0; + + function findKeyframes(time) { + let i1 = lastKeyframeIndex; + while (i1 < glTFSampler.input.length - 1 && glTFSampler.input[i1] <= time) { + i1++; + } + const i0 = Math.max(0, i1 - 1); + lastKeyframeIndex = i0; + return [glTFSampler.input[i0], glTFSampler.input[i1], i0, i1]; + } + + function stepInterpolate(path, i0) { + const startIndex = i0 * 3; + const v0 = new Array(3); + for (let i = 0; i < 3; ++i) { + v0[i] = glTFSampler.output[startIndex + i]; + } + + return v0; + } + + function linearInterpolate(path, t0, t1, i0, i1, t) { + const ratio = (t - t0) / (t1 - t0); + const startIndex = i0 * 4; + const endIndex = i1 * 4; + + const v0 = new Array(4); + const v1 = new Array(4); + for (let i = 0; i < 4; ++i) { + v0[i] = glTFSampler.output[startIndex + i]; + v1[i] = glTFSampler.output[endIndex + i]; + } + + switch (path) { + case 'translation': + case 'scale': + return vec3.lerp(vec3.create(), v0, v1, ratio); + case 'rotation': + return quat.slerp(quat.create(), v0, v1, ratio); + default: + vtkWarningMacro(`Unsupported animation path: ${path}`); + return null; + } + } + + function cubicSplineInterpolate(path, t0, t1, i0, i1, time) { + const dt = t1 - t0; + const t = (time - t0) / dt; + const t2 = t * t; + const t3 = t2 * t; + + const p0 = glTFSampler.output[i0 * 3 + 1]; + const m0 = dt * glTFSampler.output[i0 * 3 + 2]; + const p1 = glTFSampler.output[i1 * 3 + 1]; + const m1 = dt * glTFSampler.output[i1 * 3]; + + if (Array.isArray(p0)) { + return p0.map((v, j) => { + const a = 2 * t3 - 3 * t2 + 1; + const b = t3 - 2 * t2 + t; + const c = -2 * t3 + 3 * t2; + const d = t3 - t2; + return a * v + b * m0[j] + c * p1[j] + d * m1[j]; + }); + } + + const a = 2 * t3 - 3 * t2 + 1; + const b = t3 - 2 * t2 + t; + const c = -2 * t3 + 3 * t2; + const d = t3 - t2; + return a * p0 + b * m0 + c * p1 + d * m1; + } + + function evaluate(time, path) { + const [t0, t1, i0, i1] = findKeyframes(time); + + let result; + + switch (glTFSampler.interpolation) { + case 'STEP': + result = stepInterpolate(path, i0); + break; + case 'LINEAR': + result = linearInterpolate(path, t0, t1, i0, i1, time); + break; + case 'CUBICSPLINE': + result = cubicSplineInterpolate(path, t0, t1, i0, i1, time); + break; + default: + throw new Error( + `Unknown interpolation method: ${glTFSampler.interpolation}` + ); + } + return result; + } + + return { ...glTFSampler, evaluate }; +} + +/** + * Create an animation + * @param {glTFAnimation} glTFAnimation + * @param {Map} nodes + * @returns + */ +function createAnimation(glTFAnimation, nodes) { + glTFAnimation.samplers = glTFAnimation.samplers.map((sampler) => + createAnimationSampler(sampler) + ); + + glTFAnimation.channels = glTFAnimation.channels.map((channel) => { + channel.target.node = nodes.get(`node-${channel.target.node}`); + return createAnimationChannel(channel, glTFAnimation.samplers); + }); + + function update(currentTime) { + glTFAnimation.channels.forEach((channel) => channel.animate(currentTime)); + } + + return { ...glTFAnimation, update }; +} + +/** + * Create an animation mixer + * @param {Map} nodes + * @param {*} accessors + * @returns + */ +function createAnimationMixer(nodes, accessors) { + const animations = new Map(); + const activeAnimations = new Map(); + + function addAnimation(glTFAnimation) { + const annimation = createAnimation(glTFAnimation, nodes, accessors); + animations.set(glTFAnimation.id, annimation); + vtkDebugMacro(`Animation "${glTFAnimation.id}" added to mixer`); + } + + function play(name, weight = 1) { + if (!animations.has(name)) { + vtkWarningMacro(`Animation "${name}" not found in mixer`); + return; + } + activeAnimations.set(name, { + animation: animations.get(name), + weight, + time: 0, + }); + vtkDebugMacro(`Playing animation "${name}" with weight ${weight}`); + } + + function stop(name) { + if (activeAnimations.delete(name)) { + vtkWarningMacro(`Stopped animation "${name}"`); + } else { + vtkWarningMacro(`Animation "${name}" was not playing`); + } + } + + function stopAll() { + activeAnimations.clear(); + vtkWarningMacro('Stopped all animations'); + } + + function update(deltaTime) { + // Normalize weights + const totalWeight = Array.from(activeAnimations.values()).reduce( + (sum, { weight }) => sum + weight, + 0 + ); + + activeAnimations.forEach(({ animation, weight, time }, name) => { + const normalizedWeight = totalWeight > 0 ? weight / totalWeight : 0; + const newTime = time + deltaTime; + activeAnimations.set(name, { animation, weight, time: newTime }); + + vtkDebugMacro( + `Updating animation "${name}" at time ${newTime.toFixed( + 3 + )} with normalized weight ${normalizedWeight.toFixed(3)}` + ); + + animation.update(newTime, normalizedWeight); + }); + } + + return { addAnimation, play, stop, stopAll, update }; +} + +export { + createAnimation, + createAnimationChannel, + createAnimationMixer, + createAnimationSampler, +}; diff --git a/Sources/IO/Geometry/GLTFImporter/Constants.js b/Sources/IO/Geometry/GLTFImporter/Constants.js new file mode 100644 index 00000000000..e86353e40f2 --- /dev/null +++ b/Sources/IO/Geometry/GLTFImporter/Constants.js @@ -0,0 +1,85 @@ +export const BINARY_HEADER_MAGIC = 'glTF'; +export const BINARY_HEADER_LENGTH = 12; +export const BINARY_CHUNK_TYPES = { JSON: 0x4e4f534a, BIN: 0x004e4942 }; +export const BINARY_HEADER_INTS = 3; +export const BINARY_CHUNK_HEADER_INTS = 2; + +export const MIN_LIGHT_ATTENUATION = 0.01; + +export const COMPONENTS = { + SCALAR: 1, + VEC2: 2, + VEC3: 3, + VEC4: 4, + MAT2: 4, + MAT3: 9, + MAT4: 16, +}; + +export const BYTES = { + 5120: 1, // BYTE + 5121: 1, // UNSIGNED_BYTE + 5122: 2, // SHORT + 5123: 2, // UNSIGNED_SHORT + 5125: 4, // UNSIGNED_INT + 5126: 4, // FLOAT +}; + +export const MODES = { + GL_POINTS: 0, + GL_LINES: 1, + GL_LINE_LOOP: 2, + GL_LINE_STRIP: 3, + GL_TRIANGLES: 4, + GL_TRIANGLE_STRIP: 5, + GL_TRIANGLE_FAN: 6, +}; + +export const ARRAY_TYPES = { + 5120: Int8Array, + 5121: Uint8Array, + 5122: Int16Array, + 5123: Uint16Array, + 5125: Uint32Array, + 5126: Float32Array, +}; + +export const GL_SAMPLER = { + NEAREST: 9728, + LINEAR: 9729, + NEAREST_MIPMAP_NEAREST: 9984, + LINEAR_MIPMAP_NEAREST: 9985, + NEAREST_MIPMAP_LINEAR: 9986, + LINEAR_MIPMAP_LINEAR: 9987, + REPEAT: 10497, + CLAMP_TO_EDGE: 33071, + MIRRORED_REPEAT: 33648, + TEXTURE_MAG_FILTER: 10240, + TEXTURE_MIN_FILTER: 10241, + TEXTURE_WRAP_S: 10242, + TEXTURE_WRAP_T: 10243, +}; + +export const DEFAULT_SAMPLER = { + magFilter: GL_SAMPLER.NEAREST, + minFilter: GL_SAMPLER.LINEAR_MIPMAP_LINEAR, + wrapS: GL_SAMPLER.REPEAT, + wrapT: GL_SAMPLER.REPEAT, +}; + +export const SEMANTIC_ATTRIBUTE_MAP = { + NORMAL: 'normal', + POSITION: 'position', + TEXCOORD_0: 'texcoord0', + TEXCOORD_1: 'texcoord1', + WEIGHTS_0: 'weight', + JOINTS_0: 'joint', + COLOR_0: 'color', + TANGENT: 'tangent', +}; + +export const ALPHA_MODE = { + OPAQUE: 'OPAQUE', + MASK: 'MASK', + BLEND: 'BLEND', +}; diff --git a/Sources/IO/Geometry/GLTFImporter/Decoder.js b/Sources/IO/Geometry/GLTFImporter/Decoder.js new file mode 100644 index 00000000000..7a4011f1899 --- /dev/null +++ b/Sources/IO/Geometry/GLTFImporter/Decoder.js @@ -0,0 +1,78 @@ +import BinaryHelper from 'vtk.js/Sources/IO/Core/BinaryHelper'; +import { + BINARY_CHUNK_TYPES, + BINARY_CHUNK_HEADER_INTS, + BINARY_HEADER_INTS, + BINARY_HEADER_LENGTH, + BINARY_HEADER_MAGIC, +} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants'; + +function getChunkInfo(headerStart, data) { + const header = new Uint32Array(data, headerStart, BINARY_CHUNK_HEADER_INTS); + const chunkStart = headerStart + BINARY_CHUNK_HEADER_INTS * 4; + const chunkLength = header[0]; + const chunkType = header[1]; + return { start: chunkStart, length: chunkLength, type: chunkType }; +} + +function getAllChunkInfos(data) { + const infos = []; + let chunkStart = BINARY_HEADER_INTS * 4; + while (chunkStart < data.byteLength) { + const chunkInfo = getChunkInfo(chunkStart, data); + infos.push(chunkInfo); + chunkStart += chunkInfo.length + BINARY_CHUNK_HEADER_INTS * 4; + } + return infos; +} + +function getJsonFromChunk(chunkInfo, data) { + const chunkLength = chunkInfo.length; + const jsonStart = (BINARY_HEADER_INTS + BINARY_CHUNK_HEADER_INTS) * 4; + const jsonSlice = new Uint8Array(data, jsonStart, chunkLength); + const stringBuffer = BinaryHelper.arrayBufferToString(jsonSlice); + return JSON.parse(stringBuffer); +} + +function getBufferFromChunk(chunkInfo, data) { + return data.slice(chunkInfo.start, chunkInfo.start + chunkInfo.length); +} + +function parseGLB(data) { + let json; + const buffers = []; + + const headerView = new DataView(data, 0, BINARY_HEADER_LENGTH); + + const header = { + magic: BinaryHelper.arrayBufferToString(new Uint8Array(data, 0, 4)), + version: headerView.getUint32(4, true), + length: headerView.getUint32(8, true), + }; + + if (header.magic !== BINARY_HEADER_MAGIC) { + throw new Error('Unsupported glTF-Binary header.'); + } else if (header.version < 2.0) { + throw new Error('Unsupported legacy binary file detected.'); + } + + const chunkInfos = getAllChunkInfos(data); + + chunkInfos.forEach((chunkInfo) => { + if (chunkInfo.type === BINARY_CHUNK_TYPES.JSON && !json) { + json = getJsonFromChunk(chunkInfo, data); + } else if (chunkInfo.type === BINARY_CHUNK_TYPES.BIN) { + buffers.push(getBufferFromChunk(chunkInfo, data)); + } + }); + + if (!json) { + throw new Error('glTF-Binary: JSON content not found.'); + } + if (!buffers) { + throw new Error('glTF-Binary: Binary chunk not found.'); + } + return { json, buffers }; +} + +export default parseGLB; diff --git a/Sources/IO/Geometry/GLTFImporter/Extensions.js b/Sources/IO/Geometry/GLTFImporter/Extensions.js new file mode 100644 index 00000000000..3075fcc8187 --- /dev/null +++ b/Sources/IO/Geometry/GLTFImporter/Extensions.js @@ -0,0 +1,103 @@ +import macro from 'vtk.js/Sources/macros'; +import * as vtkMath from 'vtk.js/Sources/Common/Core/Math'; +import vtkDracoReader from 'vtk.js/Sources/IO/Geometry/DracoReader'; +import vtkLight from 'vtk.js/Sources/Rendering/Core/Light'; + +import { MIN_LIGHT_ATTENUATION } from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants'; + +const { vtkWarningMacro } = macro; + +/** + * Handles the KHR_materials_unlit extension. + * + * @param {object} extension - The KHR_materials_unlit extension object. + * @param {vtkProperty} property - The vtkProperty instance to update. + */ +export function handleKHRMaterialsUnlit(extension, property) { + property.setLighting(true); +} + +/** + * Handles the KHR_materials_ior extension. + * + * @param {object} extension - The KHR_materials_unlit extension object. + * @param {vtkProperty} property - The vtkProperty instance to update. + */ +export function handleKHRMaterialsIor(extension, property) { + property.setBaseIOR(extension.ior); +} + +/** + * Handles the KHR_materials_specular extension. + * @param {object} extension - The KHR_materials_specular extension object. + * @param {vtkProperty} property - The vtkProperty instance to update. + */ +export function handleKHRMaterialsSpecular(extension, property) { + property.setSpecular(extension.specularFactor); + property.setSpecularColor(extension.specularColorFactor); +} + +/** + * Handles the KHR_lights_punctual extension. + * + * @param {object} extension - The KHR_lights_punctual extension object. + * @param {vtkRenderer} renderer - The vtkRenderer instance to add the light to. + */ +export function handleKHRLightsPunctual(extension, transformMatrix, model) { + const { light } = extension; + + const { color, intensity, range, spot, type } = light; + + const l = vtkLight.newInstance({ + color: color || [1, 1, 1], + intensity: intensity || 1.0, + }); + + // Apply the global transform to the light + l.setTransformMatrix(transformMatrix); + + // Handle range + if (range > 0) { + // Set quadratic values to get attenuation(range) ~= MIN_LIGHT_ATTENUATION + l.setAttenuationValues(1, 0, 1.0 / (range * range * MIN_LIGHT_ATTENUATION)); + } + + switch (type) { + case 'directional': + l.setPositional(false); + break; + case 'point': + l.setPositional(true); + l.setConeAngle(90); + break; + case 'spot': + l.setPositional(true); + l.setConeAngle(vtkMath.radiansFromDegrees(spot.outerConeAngle)); + break; + default: + vtkWarningMacro(`Unsupported light type: ${type}`); + } + + model.lights.set(light.name, l); +} + +/** + * Handles the KHR_draco_mesh_compression extension. + * + * @param {object} extension - The KHR_draco_mesh_compression extension object. + */ +export async function handleKHRDracoMeshCompression(extension) { + const reader = vtkDracoReader.newInstance(); + reader.parse(extension.bufferView); + return reader.getOutputData(); +} + +/** + * Handles the KHR_materials_variants extension. + * + * @param {object} extension - The KHR_materials_variants extension object. + * @param {object} model - The model object to update with variant information. + */ +export function handleKHRMaterialsVariants(extension, model) { + model.variants = extension.variants.map((v) => v.name); +} diff --git a/Sources/IO/Geometry/GLTFImporter/ORMTexture.worker.js b/Sources/IO/Geometry/GLTFImporter/ORMTexture.worker.js new file mode 100644 index 00000000000..7c48f871149 --- /dev/null +++ b/Sources/IO/Geometry/GLTFImporter/ORMTexture.worker.js @@ -0,0 +1,35 @@ +import registerWebworker from 'webworker-promise/lib/register'; + +/** + * + * @param {ArrayBuffer} imageBuffer + * @param {string} mimeType + * @param {string} channel + * @returns {Promise} + */ +registerWebworker(async ({ imageBuffer, mimeType, channel }) => { + const channelsMap = { + r: 0, + g: 1, + b: 2, + }; + + const blob = new Blob([imageBuffer], { type: mimeType }); + const img = await createImageBitmap(blob); + const canvas = new OffscreenCanvas(img.width, img.height); + const ctx = canvas.getContext('2d'); + + ctx.drawImage(img, 0, 0, img.width, img.height); + const bitmap = ctx.getImageData(0, 0, img.width, img.height); + + if (channel) { + const idx = channelsMap[channel]; + for (let i = 0; i < bitmap.data.length; i += 4) { + const channelValue = bitmap.data[i + idx]; + bitmap.data[i] = channelValue; // red channel + bitmap.data[i + 1] = channelValue; // green channel + bitmap.data[i + 2] = channelValue; // blue channel + } + } + return { bitmap }; +}); diff --git a/Sources/IO/Geometry/GLTFImporter/Parser.js b/Sources/IO/Geometry/GLTFImporter/Parser.js new file mode 100644 index 00000000000..3aa87b00702 --- /dev/null +++ b/Sources/IO/Geometry/GLTFImporter/Parser.js @@ -0,0 +1,505 @@ +/* eslint-disable guard-for-in */ +/* eslint-disable no-restricted-syntax */ +/* eslint-disable class-methods-use-this */ +import macro from 'vtk.js/Sources/macros'; + +import { + ALPHA_MODE, + BYTES, + COMPONENTS, + DEFAULT_SAMPLER, + GL_SAMPLER, + MODES, + SEMANTIC_ATTRIBUTE_MAP, +} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants'; + +import { + getAccessorArrayTypeAndLength, + getGLEnumFromSamplerParameter, + resolveUrl, +} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Utils'; + +const { vtkDebugMacro, vtkWarningMacro } = macro; + +class GLTFParser { + constructor(glTF, options = {}) { + const { json, baseUri = '' } = glTF; + + this.glTF = glTF; + this.options = options; + this.baseUri = baseUri; + this.json = json; + this.extensions = json.extensions || {}; + this.extensionsUsed = json.extensionsUsed || []; + } + + async parse() { + const buffers = this.json.buffers || []; + this.buffers = new Array(buffers.length).fill(null); + + const images = this.json.images || []; + this.images = new Array(images.length).fill({}); + await this.loadBuffers(); + await this.loadImages(); + this.resolveTree(); + + return this.glTF.json; + } + + resolveTree() { + this.json.scenes = this.json.scenes?.map((scene, idx) => + this.resolveScene(scene, idx) + ); + + this.json.cameras = this.json.cameras?.map((camera, idx) => + this.resolveCamera(camera, idx) + ); + + this.json.bufferViews = this.json.bufferViews?.map((bufView, idx) => + this.resolveBufferView(bufView, idx) + ); + + this.json.images = this.json.images?.map((image, idx) => + this.resolveImage(image, idx) + ); + + this.json.samplers = this.json.samplers?.map((sampler, idx) => + this.resolveSampler(sampler, idx) + ); + + this.json.textures = this.json.textures?.map((texture, idx) => + this.resolveTexture(texture, idx) + ); + + this.json.accessors = this.json.accessors?.map((accessor, idx) => + this.resolveAccessor(accessor, idx) + ); + + this.json.materials = this.json.materials?.map((material, idx) => + this.resolveMaterial(material, idx) + ); + + this.json.meshes = this.json.meshes?.map((mesh, idx) => + this.resolveMesh(mesh, idx) + ); + + this.json.nodes = this.json.nodes?.map((node, idx) => + this.resolveNode(node, idx) + ); + + this.json.skins = this.json.skins?.map((skin, idx) => + this.resolveSkin(skin, idx) + ); + + this.json.animations = this.json.animations?.map((animation, idx) => + this.resolveAnimation(animation, idx) + ); + } + + get(array, index) { + // check if already resolved + if (typeof index === 'object') { + return index; + } + const object = this.json[array] && this.json[array][index]; + if (!object) { + vtkWarningMacro(`glTF file error: Could not find ${array}[${index}]`); + } + return object; + } + + resolveScene(scene, index) { + scene.id = scene.id || `scene-${index}`; + scene.nodes = (scene.nodes || []).map((node) => this.get('nodes', node)); + return scene; + } + + resolveNode(node, index) { + node.id = node.id || `node-${index}`; + if (node.children) { + node.children = node.children.map((child) => this.get('nodes', child)); + } + if (node.mesh !== undefined) { + node.mesh = this.get('meshes', node.mesh); + } else if (node.meshes !== undefined && node.meshes.length) { + node.mesh = node.meshes.reduce( + (accum, meshIndex) => { + const mesh = this.get('meshes', meshIndex); + accum.id = mesh.id; + accum.primitives = accum.primitives.concat(mesh.primitives); + return accum; + }, + { primitives: [] } + ); + } + if (node.camera !== undefined) { + node.camera = this.get('cameras', node.camera); + } + if (node.skin !== undefined) { + node.skin = this.get('skins', node.skin); + } + + // Fill punctual lights objects + if (node.extensions?.KHR_lights_punctual) { + node.extensions.KHR_lights_punctual.light = + this.extensions?.KHR_lights_punctual.lights[ + node.extensions.KHR_lights_punctual.light + ]; + } + return node; + } + + resolveSkin(skin, index) { + skin.id = skin.id || `skin-${index}`; + skin.inverseBindMatrices = this.get('accessors', skin.inverseBindMatrices); + return skin; + } + + resolveMesh(mesh, index) { + mesh.id = mesh.id || `mesh-${index}`; + if (mesh.primitives) { + mesh.primitives = mesh.primitives.map((primitive, idx) => { + const attributes = primitive.attributes; + primitive.name = `submesh-${idx}`; + primitive.attributes = {}; + for (const attribute in attributes) { + const attr = SEMANTIC_ATTRIBUTE_MAP[attribute]; + primitive.attributes[attr] = this.get( + 'accessors', + attributes[attribute] + ); + } + if (primitive.indices !== undefined) { + primitive.indices = this.get('accessors', primitive.indices); + } + if (primitive.material !== undefined) { + primitive.material = this.get('materials', primitive.material); + } + if (primitive.mode === undefined) { + primitive.mode = MODES.GL_TRIANGLES; // Default one + } + + if (primitive.extensions?.KHR_draco_mesh_compression) { + vtkDebugMacro('Using Draco mesh compression'); + const bufferView = this.get( + 'bufferViews', + primitive.extensions.KHR_draco_mesh_compression.bufferView + ); + primitive.extensions.KHR_draco_mesh_compression.bufferView = + bufferView.data; + } + + return primitive; + }); + } + return mesh; + } + + resolveMaterial(material, index) { + material.id = material.id || `material-${index}`; + + if (material.alphaMode === undefined) + material.alphaMode = ALPHA_MODE.OPAQUE; + if (material.doubleSided === undefined) material.doubleSided = false; + if (material.alphaCutoff === undefined) material.alphaCutoff = 0.5; + + if (material.normalTexture) { + material.normalTexture = { ...material.normalTexture }; + material.normalTexture.texture = this.get( + 'textures', + material.normalTexture.index + ); + } + if (material.occlusionTexture) { + material.occlusionTexture = { ...material.occlusionTexture }; + material.occlusionTexture.texture = this.get( + 'textures', + material.occlusionTexture.index + ); + } + if (material.emissiveTexture) { + material.emissiveTexture = { ...material.emissiveTexture }; + material.emissiveTexture.texture = this.get( + 'textures', + material.emissiveTexture.index + ); + } + if (!material.emissiveFactor) { + material.emissiveFactor = material.emissiveTexture ? 1 : 0; + } else material.emissiveFactor = material.emissiveFactor[0]; + + if (material.pbrMetallicRoughness) { + material.pbrMetallicRoughness = { ...material.pbrMetallicRoughness }; + const mr = material.pbrMetallicRoughness; + if (mr.baseColorTexture) { + mr.baseColorTexture = { ...mr.baseColorTexture }; + mr.baseColorTexture.texture = this.get( + 'textures', + mr.baseColorTexture.index + ); + } + if (mr.metallicRoughnessTexture) { + mr.metallicRoughnessTexture = { ...mr.metallicRoughnessTexture }; + mr.metallicRoughnessTexture.texture = this.get( + 'textures', + mr.metallicRoughnessTexture.index + ); + } + } else { + material.pbrMetallicRoughness = { + baseColorFactor: [1, 1, 1, 1], + metallicFactor: 1.0, + roughnessFactor: 1.0, + }; + } + return material; + } + + /** + * Take values of particular accessor from interleaved buffer various parts of + * the buffer + */ + getValueFromInterleavedBuffer( + buffer, + byteOffset, + byteStride, + bytesPerElement, + count + ) { + const result = new Uint8Array(count * bytesPerElement); + for (let i = 0; i < count; i++) { + const elementOffset = byteOffset + i * byteStride; + result.set( + new Uint8Array( + buffer.arrayBuffer.slice( + elementOffset, + elementOffset + bytesPerElement + ) + ), + i * bytesPerElement + ); + } + return result.buffer; + } + + resolveAccessor(accessor, index) { + accessor.id = accessor.id || `accessor-${index}`; + if (accessor.bufferView !== undefined) { + // Draco encoded meshes don't have bufferView + accessor.bufferView = this.get('bufferViews', accessor.bufferView); + } + + // Look up enums + accessor.bytesPerComponent = BYTES[accessor.componentType]; + accessor.components = COMPONENTS[accessor.type]; + accessor.bytesPerElement = accessor.bytesPerComponent * accessor.components; + + // Create TypedArray for the accessor + // Note: The canonical way to instantiate is to ignore this array and create + // WebGLBuffer's using the bufferViews. + if (accessor.bufferView) { + const buffer = accessor.bufferView.buffer; + const { ArrayType, byteLength } = getAccessorArrayTypeAndLength( + accessor, + accessor.bufferView + ); + const byteOffset = + (accessor.bufferView.byteOffset || 0) + + (accessor.byteOffset || 0) + + buffer.byteOffset; + + let slicedBufffer = buffer.arrayBuffer.slice( + byteOffset, + byteOffset + byteLength + ); + + if (accessor.bufferView.byteStride) { + slicedBufffer = this.getValueFromInterleavedBuffer( + buffer, + byteOffset, + accessor.bufferView.byteStride, + accessor.bytesPerElement, + accessor.count + ); + } + accessor.value = new ArrayType(slicedBufffer); + } + + return accessor; + } + + resolveTexture(texture, index) { + texture.id = texture.id || `texture-${index}`; + texture.sampler = + 'sampler' in texture + ? this.get('samplers', texture.sampler) + : DEFAULT_SAMPLER; + + texture.source = this.get('images', texture.source); + + // Handle texture extensions sources + if (texture.extensions !== undefined) { + const extensionsNames = Object.keys(texture.extensions); + extensionsNames.forEach((extensionName) => { + const extension = texture.extensions[extensionName]; + switch (extensionName) { + case 'KHR_texture_basisu': + case 'EXT_texture_webp': + case 'EXT_texture_avif': + texture.source = this.get('images', extension.source); + break; + default: + vtkWarningMacro(`Unhandled extension: ${extensionName}`); + } + }); + } + return texture; + } + + resolveSampler(sampler, index) { + sampler.id = sampler.id || `sampler-${index}`; + + if (!Object.hasOwn(sampler, 'wrapS')) sampler.wrapS = GL_SAMPLER.REPEAT; + if (!Object.hasOwn(sampler, 'wrapT')) sampler.wrapT = GL_SAMPLER.REPEAT; + + if (!Object.hasOwn(sampler, 'minFilter')) + sampler.minFilter = GL_SAMPLER.LINEAR_MIPMAP_LINEAR; + if (!Object.hasOwn(sampler, 'magFilter')) + sampler.magFilter = GL_SAMPLER.NEAREST; + + // Map textual parameters to GL parameter values + sampler.parameters = {}; + for (const key in sampler) { + const glEnum = getGLEnumFromSamplerParameter(key); + if (glEnum !== undefined) { + sampler.parameters[glEnum] = sampler[key]; + } + } + return sampler; + } + + resolveImage(image, index) { + image.id = image.id || `image-${index}`; + if (image.bufferView !== undefined) { + image.bufferView = this.get('bufferViews', image.bufferView); + } + return image; + } + + resolveBufferView(bufferView, index) { + bufferView.id = bufferView.id || `bufferView-${index}`; + const bufferIndex = bufferView.buffer; + bufferView.buffer = this.buffers[bufferIndex]; + + const arrayBuffer = this.buffers[bufferIndex].arrayBuffer; + let byteOffset = this.buffers[bufferIndex].byteOffset || 0; + + if ('byteOffset' in bufferView) { + byteOffset += bufferView.byteOffset; + } + + bufferView.data = new Uint8Array( + arrayBuffer, + byteOffset, + bufferView.byteLength + ); + return bufferView; + } + + resolveCamera(camera, index) { + camera.id = camera.id || `camera-${index}`; + return camera; + } + + resolveAnimation(animation, index) { + animation.id = animation.id || `animation-${index}`; + animation.samplers.map((sampler) => { + sampler.input = this.get('accessors', sampler.input).value; + sampler.output = this.get('accessors', sampler.output).value; + return sampler; + }); + return animation; + } + + loadBuffers() { + const promises = this.json.buffers.map((buffer, idx) => + this.loadBuffer(buffer, idx).then(() => { + delete buffer.uri; + }) + ); + return Promise.all(promises); + } + + async loadBuffer(buffer, index) { + let arrayBuffer = buffer; + + if (buffer.uri) { + vtkDebugMacro('Loading uri', buffer.uri); + const uri = resolveUrl(buffer.uri, this.options.baseUri); + const response = await fetch(uri); + arrayBuffer = await response.arrayBuffer(); + } else if (this.glTF.glbBuffers) { + arrayBuffer = this.glTF.glbBuffers[index]; + } + + this.buffers[index] = { + arrayBuffer, + byteOffset: 0, + byteLength: arrayBuffer.byteLength, + }; + } + + loadImages() { + const images = this.json.images || []; + const promises = []; + + return new Promise((resolve, reject) => { + for (let i = 0; i < images.length; ++i) { + promises.push( + Promise.resolve( + this.loadImage(images[i], i).then(() => { + vtkDebugMacro('Texture loaded ', images[i]); + }) + ) + ); + } + + Promise.all(promises).then(() => resolve(this.images)); + }); + } + + async loadImage(image, index) { + let arrayBuffer; + let buffer; + + if (image.uri) { + vtkDebugMacro('Loading texture', image.uri); + const uri = resolveUrl(image.uri, this.options.baseUri); + const response = await fetch(uri); + + arrayBuffer = await response.arrayBuffer(); + image.uri = uri; + image.bufferView = { + data: arrayBuffer, + }; + } else if (image.bufferView) { + const bufferView = this.get('bufferViews', image.bufferView); + buffer = this.get('buffers', bufferView.buffer); + + // GLB buffer + if (this.glTF.glbBuffers) { + buffer = this.glTF.glbBuffers[bufferView.buffer]; + arrayBuffer = buffer.slice( + bufferView.byteOffset, + bufferView.byteOffset + bufferView.byteLength + ); + } + + image.bufferView = { + data: arrayBuffer, + }; + } + } +} + +export default GLTFParser; diff --git a/Sources/IO/Geometry/GLTFImporter/Reader.js b/Sources/IO/Geometry/GLTFImporter/Reader.js new file mode 100644 index 00000000000..bcea47de781 --- /dev/null +++ b/Sources/IO/Geometry/GLTFImporter/Reader.js @@ -0,0 +1,613 @@ +import macro from 'vtk.js/Sources/macros'; +import * as vtkMath from 'vtk.js/Sources/Common/Core/Math'; + +import vtkActor from 'vtk.js/Sources/Rendering/Core/Actor'; +import vtkCamera from 'vtk.js/Sources/Rendering/Core/Camera'; +import vtkDataArray from 'vtk.js/Sources/Common/Core/DataArray'; +import vtkPolyData from 'vtk.js/Sources/Common/DataModel/PolyData'; +import vtkMapper from 'vtk.js/Sources/Rendering/Core/Mapper'; +import vtkCellArray from 'vtk.js/Sources/Common/Core/CellArray'; +import vtkTransform from 'vtk.js/Sources/Common/Transform/Transform'; +import GLTFParser from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Parser'; +import { + ALPHA_MODE, + MODES, + SEMANTIC_ATTRIBUTE_MAP, +} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants'; +import { + createVTKTextureFromGLTFTexture, + loadImage, +} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Utils'; +import { + handleKHRDracoMeshCompression, + handleKHRLightsPunctual, + handleKHRMaterialsIor, + handleKHRMaterialsSpecular, + handleKHRMaterialsUnlit, + handleKHRMaterialsVariants, +} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Extensions'; + +import { mat4, quat, vec3 } from 'gl-matrix'; + +const { vtkWarningMacro, vtkDebugMacro } = macro; + +/** + * Parses a GLTF objects + * @param {Object} gltf - The GLTF object to parse + * @returns {glTF} The parsed GLTF object + */ +async function parseGLTF(gltf, options) { + const parser = new GLTFParser(gltf, options); + const tree = await parser.parse(); + return tree; +} + +/** + * Creates VTK polydata from a GLTF mesh + * @param {GLTFMesh} mesh - The GLTF mesh + * @returns {vtkPolyData} The created VTK polydata + */ +async function createPolyDataFromGLTFMesh(mesh) { + const primitive = mesh.primitives[0]; // For simplicity, we'll just use the first primitive + + if (!primitive || !primitive.attributes) { + vtkWarningMacro('Mesh has no position data, skipping'); + return null; + } + + const mode = primitive.mode; + + if (primitive.extensions?.KHR_draco_mesh_compression) { + return handleKHRDracoMeshCompression( + primitive.extensions.KHR_draco_mesh_compression + ); + } + + const polyData = vtkPolyData.newInstance(); + const cells = vtkCellArray.newInstance(); + const pointData = polyData.getPointData(); + + const attrs = Object.entries(primitive.attributes); + attrs.forEach(async ([attributeName, accessor]) => { + switch (attributeName) { + case SEMANTIC_ATTRIBUTE_MAP.POSITION: { + const position = primitive.attributes.position.value; + polyData + .getPoints() + .setData(position, primitive.attributes.position.component); + break; + } + case SEMANTIC_ATTRIBUTE_MAP.NORMAL: { + const normals = primitive.attributes.normal.value; + pointData.setNormals( + vtkDataArray.newInstance({ + name: 'Normals', + values: normals, + numberOfComponents: primitive.attributes.normal.components, + }) + ); + break; + } + case SEMANTIC_ATTRIBUTE_MAP.COLOR_0: { + const color = primitive.attributes.color.value; + pointData.setScalars( + vtkDataArray.newInstance({ + name: 'Scalars', + values: color, + numberOfComponents: primitive.attributes.color.components, + }) + ); + break; + } + case SEMANTIC_ATTRIBUTE_MAP.TEXCOORD_0: { + const tcoords0 = primitive.attributes.texcoord0.value; + const da = vtkDataArray.newInstance({ + name: 'TEXCOORD_0', + values: tcoords0, + numberOfComponents: primitive.attributes.texcoord0.components, + }); + pointData.addArray(da); + pointData.setActiveTCoords(da.getName()); + break; + } + case SEMANTIC_ATTRIBUTE_MAP.TEXCOORD_1: { + const tcoords = primitive.attributes.texcoord1.value; + const dac = vtkDataArray.newInstance({ + name: 'TEXCOORD_1', + values: tcoords, + numberOfComponents: primitive.attributes.texcoord1.components, + }); + pointData.addArray(dac); + break; + } + case SEMANTIC_ATTRIBUTE_MAP.TANGENT: { + const tangent = primitive.attributes.tangent.value; + const dat = vtkDataArray.newInstance({ + name: 'Tangents', + values: tangent, + numberOfComponents: primitive.attributes.tangent.components, + }); + pointData.addArray(dat); + break; + } + default: + vtkWarningMacro(`Unhandled attribute: ${attributeName}`); + } + }); + + // Handle indices if available + if (primitive.indices !== undefined) { + const indices = primitive.indices.value; + const nCells = indices.length - 2; + switch (mode) { + case MODES.GL_LINE_STRIP: + case MODES.GL_TRIANGLE_STRIP: + case MODES.GL_LINE_LOOP: + vtkWarningMacro('GL_LINE_LOOP not implemented'); + break; + default: + cells.resize((4 * indices.length) / 3); + for (let cellId = 0; cellId < nCells; cellId += 3) { + const cell = indices.slice(cellId, cellId + 3); + cells.insertNextCell(cell); + } + } + } + + switch (mode) { + case MODES.GL_TRIANGLES: + case MODES.GL_TRIANGLE_FAN: + polyData.setPolys(cells); + break; + case MODES.GL_LINES: + case MODES.GL_LINE_STRIP: + case MODES.GL_LINE_LOOP: + polyData.setLines(cells); + break; + case MODES.GL_POINTS: + polyData.setVerts(cells); + break; + case MODES.GL_TRIANGLE_STRIP: + polyData.setStrips(cells); + break; + default: + vtkWarningMacro('Invalid primitive draw mode. Ignoring connectivity.'); + } + + return polyData; +} + +/** + * Creates a VTK property from a GLTF material + * @param {*} model - The vtk model object + * @param {GLTFMaterial} material - The GLTF material + * @param {vtkActor} actor - The VTK actor + */ +async function createPropertyFromGLTFMaterial(model, material, actor) { + let metallicFactor = 1.0; + let roughnessFactor = 1.0; + const emissiveFactor = material.emissiveFactor; + + const property = actor.getProperty(); + const pbr = material.pbrMetallicRoughness; + + if (pbr !== undefined) { + if ( + !pbr?.metallicFactor || + pbr?.metallicFactor <= 0 || + pbr?.metallicFactor >= 1 + ) { + vtkWarningMacro( + 'Invalid material.pbrMetallicRoughness.metallicFactor value. Using default value instead.' + ); + } else metallicFactor = pbr.metallicFactor; + if ( + !pbr?.roughnessFactor || + pbr?.roughnessFactor <= 0 || + pbr?.roughnessFactor >= 1 + ) { + vtkWarningMacro( + 'Invalid material.pbrMetallicRoughness.roughnessFactor value. Using default value instead.' + ); + } else roughnessFactor = pbr.roughnessFactor; + + const color = pbr.baseColorFactor; + + if (color !== undefined) { + property.setDiffuseColor(color[0], color[1], color[2]); + property.setOpacity(color[3]); + } + + property.setMetallic(metallicFactor); + property.setRoughness(roughnessFactor); + property.setEmission(emissiveFactor); + + if (pbr.baseColorTexture) { + const extensions = pbr.baseColorTexture.extensions; + const tex = pbr.baseColorTexture.texture; + + if (tex.extensions !== undefined) { + const extensionsNames = Object.keys(tex.extensions); + extensionsNames.forEach((extensionName) => { + // TODO: Handle KHR_texture_basisu extension + // const extension = tex.extensions[extensionName]; + switch (extensionName) { + default: + vtkWarningMacro(`Unhandled extension: ${extensionName}`); + } + }); + } + + const sampler = tex.sampler; + const image = await loadImage(tex.source); + const diffuseTex = createVTKTextureFromGLTFTexture( + image, + sampler, + extensions + ); + + // FIXME: Workaround for textures not showing up in WebGL + const viewAPI = model.renderer.getRenderWindow(); + const isWebGL = viewAPI.getViews()[0].isA('vtkOpenGLRenderWindow'); + if (isWebGL) { + actor.addTexture(diffuseTex); + } else { + property.setDiffuseTexture(diffuseTex); + } + } + + if (pbr.metallicRoughnessTexture) { + const extensions = pbr.metallicRoughnessTexture.extensions; + const tex = pbr.metallicRoughnessTexture.texture; + const sampler = tex.sampler; + const metallicImage = await loadImage(tex.source, 'b'); + const metallicTex = createVTKTextureFromGLTFTexture( + metallicImage, + sampler, + extensions + ); + property.setMetallicTexture(metallicTex); + + const roughnessImage = await loadImage(tex.source, 'g'); + const roughnessTex = createVTKTextureFromGLTFTexture( + roughnessImage, + sampler, + extensions + ); + property.setRoughnessTexture(roughnessTex); + } + + // Handle ambient occlusion texture (occlusionTexture) + if (material.occlusionTexture) { + const extensions = material.occlusionTexture.extensions; + const tex = material.occlusionTexture.texture; + const sampler = tex.sampler; + const aoImage = await loadImage(tex.source, 'r'); + const aoTex = createVTKTextureFromGLTFTexture( + aoImage, + sampler, + extensions + ); + property.setAmbientOcclusionTexture(aoTex); + } + + // Handle emissive texture (emissiveTexture) + if (material.emissiveTexture) { + const extensions = material.emissiveTexture.extensions; + const tex = material.emissiveTexture.texture; + const sampler = tex.sampler; + const emissiveImage = await loadImage(tex.source); + const emissiveTex = createVTKTextureFromGLTFTexture( + emissiveImage, + sampler, + extensions + ); + property.setEmissionTexture(emissiveTex); + + // Handle mutiple Uvs + if (material.emissiveTexture.texCoord !== undefined) { + const pd = actor.getMapper().getInputData().getPointData(); + pd.setActiveTCoords(`TEXCOORD_${material.emissiveTexture.texCoord}`); + } + } + + // Handle normal texture (normalTexture) + if (material.normalTexture) { + const extensions = material.normalTexture.extensions; + const tex = material.normalTexture.texture; + const sampler = tex.sampler; + const normalImage = await loadImage(tex.source); + const normalTex = createVTKTextureFromGLTFTexture( + normalImage, + sampler, + extensions + ); + property.setNormalTexture(normalTex); + + if (material.normalTexture.scale !== undefined) { + property.setNormalStrength(material.normalTexture.scale); + } + } + } + + // Material extensions + if (material.extensions !== undefined) { + const extensionsNames = Object.keys(material.extensions); + extensionsNames.forEach((extensionName) => { + const extension = material.extensions[extensionName]; + switch (extensionName) { + case 'KHR_materials_unlit': + handleKHRMaterialsUnlit(extension, property); + break; + case 'KHR_materials_ior': + handleKHRMaterialsIor(extension, property); + break; + case 'KHR_materials_specular': + handleKHRMaterialsSpecular(extension, property); + break; + default: + vtkWarningMacro(`Unhandled extension: ${extensionName}`); + } + }); + } + + if (material.alphaMode !== ALPHA_MODE.OPAQUE) { + actor.setForceTranslucent(true); + } + + property.setBackfaceCulling(!material.doubleSided); +} + +/** + * Handles primitive extensions + * @param {*} extensions The extensions object + * @param {*} model The vtk model object + * @param {GLTFNode} node The GLTF node + */ +function handlePrimitiveExtensions(extensions, model, node) { + const extensionsNames = Object.keys(extensions); + extensionsNames.forEach((extensionName) => { + const extension = extensions[extensionName]; + switch (extensionName) { + case 'KHR_materials_variants': + model.variantMappings.set(node.id, extension.mappings); + break; + default: + vtkWarningMacro(`Unhandled extension: ${extensionName}`); + } + }); +} + +/** + * Creates a VTK actor from a GLTF mesh + * @param {GLTFMesh} mesh - The GLTF mesh + * @returns {vtkActor} The created VTK actor + */ +async function createActorFromGTLFNode(model, node, worldMatrix) { + const actor = vtkActor.newInstance(); + const mapper = vtkMapper.newInstance(); + mapper.setColorModeToDirectScalars(); + actor.setMapper(mapper); + actor.setUserMatrix(worldMatrix); + + if (node.mesh !== undefined) { + const polyData = await createPolyDataFromGLTFMesh(node.mesh); + mapper.setInputData(polyData); + + const primitive = node.mesh.primitives[0]; // the first one for now + + // Support for materials + if (primitive.material !== undefined) { + await createPropertyFromGLTFMaterial(model, primitive.material, actor); + } + + if (primitive.extensions !== undefined) { + handlePrimitiveExtensions(primitive.extensions, model, node); + } + } else { + const polyData = vtkPolyData.newInstance(); + mapper.setInputData(polyData); + } + + return actor; +} + +/** + * + * @param {GLTFAnimation} animation + * @returns + */ +function createGLTFAnimation(animation) { + vtkDebugMacro('Creating animation:', animation); + return { + name: animation.name, + channels: animation.channels, + samplers: animation.samplers, + getChannelByTargetNode(nodeIndex) { + return this.channels.filter( + (channel) => channel.target.node === nodeIndex + ); + }, + }; +} + +/** + * Gets the transformation matrix for a GLTF node + * @param {GLTFNode} node - The GLTF node + * @returns {mat4} The transformation matrix + */ +function getTransformationMatrix(node) { + // TRS + const translation = node.translation ?? vec3.create(); + const rotation = node.rotation ?? quat.create(); + const scale = node.scale ?? vec3.fromValues(1.0, 1.0, 1.0); + + const matrix = + node.matrix !== undefined + ? mat4.clone(node.matrix) + : mat4.fromRotationTranslationScale( + mat4.create(), + rotation, + translation, + scale + ); + return matrix; +} + +/** + * Processes a GLTF node + * @param {GLTFnode} node - The GLTF node + * @param {object} model The model object + * @param {vtkActor} parentActor The parent actor + * @param {mat4} parentMatrix The parent matrix + */ +async function processNode( + node, + model, + parentActor = null, + parentMatrix = mat4.create() +) { + node.transform = getTransformationMatrix(node); + const worldMatrix = mat4.multiply( + mat4.create(), + parentMatrix, + node.transform + ); + + // Create actor for the current node + const actor = await createActorFromGTLFNode(model, node, worldMatrix); + if (actor) { + actor.setUserMatrix(worldMatrix); + if (parentActor) { + actor.setParentProp(parentActor); + } + model.actors.set(node.id, actor); + } + + // Handle KHRLightsPunctual extension + if (node.extensions?.KHR_lights_punctual) { + handleKHRLightsPunctual( + node.extensions.KHR_lights_punctual, + node.transform, + model + ); + } + + if ( + node.children && + Array.isArray(node.children) && + node.children.length > 0 + ) { + await Promise.all( + node.children.map(async (child) => { + const parent = model.actors.get(node.id); + await processNode(child, model, parent, worldMatrix); + }) + ); + } +} + +/** + * Creates VTK actors from a GLTF object + * @param {glTF} glTF - The GLTF object + * @param {number} sceneId - The scene index to create actors for + * @returns {vtkActor[]} The created VTK actors + */ +async function createVTKObjects(model) { + model.animations = model.glTFTree.animations?.map(createGLTFAnimation); + + const extensionsNames = Object.keys(model.glTFTree?.extensions || []); + extensionsNames.forEach((extensionName) => { + const extension = model.glTFTree.extensions[extensionName]; + switch (extensionName) { + case 'KHR_materials_variants': + handleKHRMaterialsVariants(extension, model); + break; + case 'KHR_draco_mesh_compression': + break; + default: + vtkWarningMacro(`Unhandled extension: ${extensionName}`); + } + }); + + // Get the sceneId to process + const sceneId = model.sceneId ?? model.glTFTree.scene; + if (model.glTFTree.scenes?.length && model.glTFTree.scenes[sceneId]?.nodes) { + await Promise.all( + model.glTFTree.scenes[sceneId].nodes.map(async (node) => { + if (node) { + await processNode(node, model); + } else { + vtkWarningMacro(`Node not found in glTF.nodes`); + } + }) + ); + } else { + vtkWarningMacro('No valid scenes found in the glTF data'); + } +} + +/** + * Sets up the camera for a vtk renderer based on the bounds of the given actors. + * + * @param {GLTCamera} camera - The GLTF camera object + */ +function GLTFCameraToVTKCamera(glTFCamera) { + const camera = vtkCamera.newInstance(); + if (glTFCamera.type === 'perspective') { + const { yfov, znear, zfar } = glTFCamera.perspective; + camera.setClippingRange(znear, zfar); + camera.setParallelProjection(false); + camera.setViewAngle(vtkMath.degreesFromRadians(yfov)); + } else if (glTFCamera.type === 'orthographic') { + const { ymag, znear, zfar } = glTFCamera.orthographic; + camera.setClippingRange(znear, zfar); + camera.setParallelProjection(true); + camera.setParallelScale(ymag); + } else { + throw new Error('Unsupported camera type'); + } + + return camera; +} + +/** + * + * @param {vtkCamera} camera + * @param {*} transformMatrix + */ +function applyTransformToCamera(camera, transformMatrix) { + if (!camera || !transformMatrix) { + return; + } + + // At identity, camera position is origin, +y up, -z view direction + const position = [0, 0, 0]; + const viewUp = [0, 1, 0]; + const focus = [0, 0, -1]; + + const t = vtkTransform.newInstance(); + t.setMatrix(transformMatrix); + + // Transform position + t.transformPoint(position, position); + t.transformPoints(viewUp, viewUp); + t.transformPoints(focus, focus); + + focus[0] += position[0]; + focus[1] += position[1]; + focus[2] += position[2]; + + // Apply the transformed values to the camera + camera.setPosition(position); + camera.setFocalPoint(focus); + camera.setViewUp(viewUp); +} + +export { + applyTransformToCamera, + createPropertyFromGLTFMaterial, + parseGLTF, + createVTKObjects, + GLTFCameraToVTKCamera, +}; diff --git a/Sources/IO/Geometry/GLTFImporter/Utils.js b/Sources/IO/Geometry/GLTFImporter/Utils.js new file mode 100644 index 00000000000..5f98fdc91be --- /dev/null +++ b/Sources/IO/Geometry/GLTFImporter/Utils.js @@ -0,0 +1,195 @@ +import WebworkerPromise from 'webworker-promise'; +import macro from 'vtk.js/Sources/macros'; +import vtkTexture from 'vtk.js/Sources/Rendering/Core/Texture'; +import Worker from 'vtk.js/Sources/IO/Geometry/GLTFImporter/ORMTexture.worker'; +import { + BYTES, + COMPONENTS, + ARRAY_TYPES, + GL_SAMPLER, +} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants'; + +const { vtkWarningMacro, vtkErrorMacro } = macro; + +/** + * Get GL enum from sampler parameter + * @param {*} parameter The sampler parameter + * @returns The GL enum + */ +export function getGLEnumFromSamplerParameter(parameter) { + const GL_TEXTURE_MAG_FILTER = 0x2800; + const GL_TEXTURE_MIN_FILTER = 0x2801; + const GL_TEXTURE_WRAP_S = 0x2802; + const GL_TEXTURE_WRAP_T = 0x2803; + + const Mapping = { + magFilter: GL_TEXTURE_MAG_FILTER, + minFilter: GL_TEXTURE_MIN_FILTER, + wrapS: GL_TEXTURE_WRAP_S, + wrapT: GL_TEXTURE_WRAP_T, + }; + + return Mapping[parameter]; +} + +export function getAccessorArrayTypeAndLength(accessor, bufferView) { + const ArrayType = ARRAY_TYPES[accessor.componentType]; + const components = COMPONENTS[accessor.type]; + const bytesPerComponent = BYTES[accessor.componentType]; + const length = accessor.count * components; + const byteLength = accessor.count * components * bytesPerComponent; + return { ArrayType, length, byteLength }; +} + +/** + * Resolves a URL based on the original path + * @param {*} url The URL to resolve + * @param {*} originalPath The original path to resolve the URL against + * @returns The resolved URL or an empty string if the URL is invalid + */ +export function resolveUrl(url, originalPath) { + // Invalid URL + if (typeof url !== 'string' || url === '') return ''; + + try { + // Data URI + if (url.startsWith('data:')) return url; + + // Blob URL + if (url.startsWith('blob:')) return url; + + // Create URL object from the original path + const baseUrl = new URL(originalPath); + if (!baseUrl.pathname.includes('.') && !baseUrl.pathname.endsWith('/')) { + baseUrl.pathname += '/'; + } + + // Absolute URL (http://, https://, //) + if ( + url.startsWith('http:') || + url.startsWith('https:') || + url.startsWith('//') + ) { + return new URL(url, baseUrl).href; + } + + // Host Relative URL + if (url.startsWith('/')) { + return new URL(url, baseUrl).href; + } + + // Relative URL + return new URL(url, baseUrl).href; + } catch (error) { + vtkErrorMacro('Error resolving URL:', error); + return ''; + } +} + +/** + * Loads image from buffer or URI + * @param {*} image + * @param {*} channel + * @returns + */ +export async function loadImage(image, channel, forceReLoad = false) { + // Initialize cache if it doesn't exist + if (!image.cache) { + image.cache = {}; + } + + // Return cached result for the channel if available and not forced to reload + if (!forceReLoad && image.cache[channel]) { + return image.cache[channel]; + } + + const worker = new WebworkerPromise(new Worker()); + + if (image.bufferView) { + return worker + .postMessage({ + imageBuffer: image.bufferView.data, + mimeType: image.mimeType, + channel, + }) + .then((result) => { + // Cache the bitmap based on the channel + image.cache[channel] = result.bitmap; + return result.bitmap; + }) + .finally(() => { + worker.terminate(); + }); + } + + if (image.uri) { + vtkWarningMacro('Falling back to image uri', image.uri); + return new Promise((resolve, reject) => { + const img = new Image(); + img.crossOrigin = 'Anonymous'; + img.onload = () => { + image.cache[channel] = img; // Cache the loaded image based on the channel + resolve(img); + }; + img.onerror = reject; + img.src = image.uri; + }); + } + + return null; +} + +/** + * + * @param {*} image + * @param {*} sampler + * @param {*} extensions + * @returns + */ +export function createVTKTextureFromGLTFTexture(image, sampler, extensions) { + const texture = vtkTexture.newInstance(); + // Apply sampler settings + if (sampler) { + if ( + ('wrapS' in sampler && 'wrapT' in sampler) || + ('minFilter' in sampler && 'magFilter' in sampler) + ) { + if ( + sampler.wrapS === GL_SAMPLER.CLAMP_TO_EDGE || + sampler.wrapT === GL_SAMPLER.CLAMP_TO_EDGE + ) { + texture.setRepeat(false); + texture.setEdgeClamp(true); + } else if ( + sampler.wrapS === GL_SAMPLER.REPEAT || + sampler.wrapT === GL_SAMPLER.REPEAT + ) { + texture.setRepeat(true); + texture.setEdgeClamp(false); + } else { + vtkWarningMacro('Mirrored texture wrapping is not supported!'); + } + + const linearFilters = [ + GL_SAMPLER.LINEAR, + GL_SAMPLER.LINEAR_MIPMAP_NEAREST, + GL_SAMPLER.NEAREST_MIPMAP_LINEAR, + GL_SAMPLER.LINEAR_MIPMAP_LINEAR, + ]; + + if ( + linearFilters.includes(sampler.minFilter) || + linearFilters.includes(sampler.magFilter) + ) { + texture.setInterpolate(true); + } + } else { + texture.MipmapOn(); + texture.setInterpolate(true); + texture.setEdgeClamp(true); + } + } + + texture.setJsImageData(image); + return texture; +} diff --git a/Sources/IO/Geometry/GLTFImporter/example/controller.html b/Sources/IO/Geometry/GLTFImporter/example/controller.html new file mode 100644 index 00000000000..0470eff0f47 --- /dev/null +++ b/Sources/IO/Geometry/GLTFImporter/example/controller.html @@ -0,0 +1,89 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+

Options

+
+ Renderer + + + + + +
Models + +
Flavor + +
Scenes + +
Cameras + +
+ + + + + + + + + + + + + + +
+

Environment Properties

+
+ + +
+ + + 1.00 +
+ + + 1.00 +
+ + + 30 +
diff --git a/Sources/IO/Geometry/GLTFImporter/example/index.js b/Sources/IO/Geometry/GLTFImporter/example/index.js new file mode 100644 index 00000000000..365628c5ff4 --- /dev/null +++ b/Sources/IO/Geometry/GLTFImporter/example/index.js @@ -0,0 +1,347 @@ +import '@kitware/vtk.js/Rendering/Profiles/Geometry'; + +// Enable data soure for DataAccessHelper +import '@kitware/vtk.js/IO/Core/DataAccessHelper/LiteHttpDataAccessHelper'; // Just need HTTP +// import '@kitware/vtk.js/IO/Core/DataAccessHelper/HttpDataAccessHelper'; // HTTP + zip +// import '@kitware/vtk.js/IO/Core/DataAccessHelper/HtmlDataAccessHelper'; // html + base64 + zip +// import '@kitware/vtk.js/IO/Core/DataAccessHelper/JSZipDataAccessHelper'; // zip + +import vtkFullScreenRenderWindow from '@kitware/vtk.js/Rendering/Misc/FullScreenRenderWindow'; +import vtkTexture from '@kitware/vtk.js/Rendering/Core/Texture'; +import vtkURLExtract from '@kitware/vtk.js/Common/Core/URLExtract'; +import vtkResourceLoader from '@kitware/vtk.js/IO/Core/ResourceLoader'; + +import vtkGLTFImporter from '@kitware/vtk.js/IO/Geometry/GLTFImporter'; + +import controlPanel from './controller.html'; + +// ---------------------------------------------------------------------------- +// Example code +// ---------------------------------------------------------------------------- +let mixer; +let selectedModel; +let selectedFlavor; +const userParms = vtkURLExtract.extractURLParameters(); +const selectedScene = userParms.scene || 0; +const viewAPI = userParms.viewAPI || 'WebGL'; + +const baseUrl = + 'https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main'; +const modelsFolder = 'Models'; +const modelsDictionary = {}; + +function createTextureWithMipmap(src, level) { + const img = new Image(); + img.crossOrigin = 'Anonymous'; + img.src = src; + const tex = vtkTexture.newInstance(); + tex.setMipLevel(level); + img.onload = () => { + tex.setInterpolate(true); + tex.setEdgeClamp(true); + tex.setImage(img); + }; + return tex; +} + +const fullScreenRenderer = vtkFullScreenRenderWindow.newInstance(); +fullScreenRenderer.addController(controlPanel); + +const renderer = fullScreenRenderer.getRenderer(); +const renderWindow = fullScreenRenderer.getRenderWindow(); + +// Workaround for the variant switch +const variantsModels = [ + 'MaterialsVariantsShoe', + 'GlamVelvetSofa', + 'SheenChair', +]; + +const environmentTex = createTextureWithMipmap( + '/Data/pbr/kiara_dawn_4k.jpg', + 8 +); +renderer.setUseEnvironmentTextureAsBackground(false); + +if (variantsModels.includes(userParms.model)) { + renderer.setEnvironmentTextureDiffuseStrength(0); + renderer.setEnvironmentTextureSpecularStrength(0); +} else { + renderer.setEnvironmentTexture(environmentTex); + renderer.setEnvironmentTextureDiffuseStrength(1); + renderer.setEnvironmentTextureSpecularStrength(1); +} + +const reader = vtkGLTFImporter.newInstance({ + renderer, +}); + +const rootContainer = document.querySelector('body'); +const modelSelector = document.querySelector('.models'); +const flavorSelector = document.querySelector('.flavor'); +const scenesSelector = document.querySelector('.scenes'); +const camerasSelector = document.querySelector('.cameras'); +const animationsSelector = document.querySelector('.animations'); +const variantsSelector = document.querySelector('.variants'); + +const eSpecularChange = document.querySelector('.e-specular'); +const eDiffuseChange = document.querySelector('.e-diffuse'); +const angleChange = document.querySelector('.angle'); +const useTextureBackgroundChange = document.querySelector('.use-background'); + +// add a loading svg to the container and remove once the reader is ready +const loading = document.createElement('div'); +loading.innerHTML = ` + + + + + +`; +// loading message should center in the window +loading.style.position = 'absolute'; +loading.style.left = '50%'; +loading.style.top = '50%'; +loading.style.transform = 'translate(-50%, -50%)'; + +// ---------------------------------------------------------------------------- +function animateScene(lastTime = 0) { + const currentTime = performance.now(); + const dt = (currentTime - lastTime) / 1000; + + mixer.update(dt); + + renderWindow.render(); + requestAnimationFrame(() => animateScene(currentTime)); +} + +function ready() { + console.log('Ready'); + // remove loading message + loading.remove(); + + reader.importActors(); + reader.importCameras(); + reader.importLights(); + reader.importAnimations(); + + renderer.resetCamera(); + renderWindow.render(); + + // Play animations + const animations = reader.getAnimations(); + if (animations.length > 0) { + animations.forEach((animation, name) => { + const option = document.createElement('option'); + option.value = animation.id; + option.textContent = animation.id; + animationsSelector.appendChild(option); + }); + + // Play the first animation by default + const defaultAnimation = animations[0]; + mixer = reader.getAnimationMixer(); + mixer.play(defaultAnimation.id); + animateScene(); + document.querySelector('.animations-container').style.display = 'table-row'; + } + + const cameras = reader.getCameras(); + cameras.forEach((camera, name) => { + const option = document.createElement('option'); + option.value = name; + option.textContent = name; + camerasSelector.appendChild(option); + }); + + const scenes = reader.getScenes(); + if (scenes.length > 1) { + scenesSelector.innerHTML = ''; + scenes.forEach((scene, index) => { + const option = document.createElement('option'); + option.value = index; + option.textContent = `Scene ${index}`; + if (index === selectedScene) { + option.selected = true; + } + scenesSelector.appendChild(option); + }); + } + + const variants = reader.getVariants(); + if (variants.length > 1) { + variantsSelector.innerHTML = ''; + variants.forEach((variant, index) => { + console.log('Adding variant', variant); + const option = document.createElement('option'); + option.value = index; + option.textContent = variant; + variantsSelector.appendChild(option); + }); + document.querySelector('.variants-container').style.display = 'table-row'; + } +} + +// Convert the await fetch to a promise chain +fetch(`${baseUrl}/${modelsFolder}/model-index.json`) + .then((response) => response.json()) + .then((modelsJson) => { + modelsJson.forEach((entry) => { + if (entry.variants !== undefined && entry.name !== undefined) { + const variants = []; + + Object.keys(entry.variants).forEach((variant) => { + const fileName = entry.variants[variant]; + variants[ + variant + ] = `${modelsFolder}/${entry.name}/${variant}/${fileName}`; + }); + + modelsDictionary[entry.name] = variants; + } + }); + + const modelsNames = Object.keys(modelsDictionary); + modelsNames.forEach((modelName) => { + const option = document.createElement('option'); + option.value = modelName; + option.textContent = modelName; + if (userParms.model === modelName) { + option.selected = true; + } + modelSelector.appendChild(option); + }); + + selectedModel = userParms.model || modelsNames[0]; + const variants = Object.keys(modelsDictionary[selectedModel]); + + selectedFlavor = userParms.flavor || variants[0]; + variants.forEach((variant) => { + const option = document.createElement('option'); + option.value = variant; + option.textContent = variant; + if (variant === selectedFlavor) { + option.selected = true; + } + flavorSelector.appendChild(option); + }); + + const path = modelsDictionary[selectedModel][selectedFlavor]; + const url = `${baseUrl}/${path}`; + + if (selectedFlavor === 'glTF-Draco') { + vtkResourceLoader + .loadScript('https://unpkg.com/draco3d@1.3.4/draco_decoder_nodejs.js') + .then(() => { + // Set decoder function to the vtk reader + reader.setDracoDecoder(window.CreateDracoModule); + reader + .setUrl(url, { binary: true, sceneId: selectedScene }) + .then(reader.onReady(ready)); + }); + } else { + reader + .setUrl(url, { binary: true, sceneId: selectedScene }) + .then(reader.onReady(ready)); + } + }) + .catch((error) => { + console.error('Error fetching the model index:', error); + }); + +// ---------------------------------------------------------------------------- +// Use a file reader to load a local file +// ---------------------------------------------------------------------------- + +// Get the value of the radio button named 'renderer' and set the view API accordingly +document.querySelectorAll("input[name='viewAPI']").forEach((input) => { + if (input.value === viewAPI) { + input.checked = true; + } + input.addEventListener('change', (evt) => { + window.location = `?model=${selectedModel}&viewAPI=${evt.target.value}`; + }); +}); + +modelSelector.onchange = (evt) => { + window.location = `?model=${evt.target.value}&viewAPI=${viewAPI}`; +}; + +flavorSelector.onchange = (evt) => { + window.location = `?model=${selectedModel}&flavor=${evt.target.value}&scene=${selectedScene}&viewAPI=${viewAPI}`; +}; + +scenesSelector.onchange = (evt) => { + window.location = `?model=${selectedModel}&flavor=${selectedFlavor}&scene=${evt.target.value}&viewAPI=${viewAPI}`; +}; + +camerasSelector.onchange = (evt) => { + reader.setCamera(evt.target.value); + renderWindow.render(); +}; + +variantsSelector.onchange = async (evt) => { + console.log('Switching to variant', evt.target.value); + await reader.switchToVariant(Number(evt.target.value)); + renderWindow.render(); +}; + +useTextureBackgroundChange.addEventListener('input', (e) => { + const useTexturedBackground = Boolean(e.target.checked); + renderer.setUseEnvironmentTextureAsBackground(useTexturedBackground); + renderWindow.render(); +}); + +angleChange.addEventListener('input', (e) => { + const angle = Number(e.target.value); + renderer.getActiveCamera().setViewAngle(angle); + renderWindow.render(); +}); + +eSpecularChange.addEventListener('input', (e) => { + const specular = Number(e.target.value); + renderer.setEnvironmentTextureSpecularStrength(specular); + renderWindow.render(); +}); + +eDiffuseChange.addEventListener('input', (e) => { + const diffuse = Number(e.target.value); + renderer.setEnvironmentTextureDiffuseStrength(diffuse); + renderWindow.render(); +}); + +rootContainer.appendChild(loading); diff --git a/Sources/IO/Geometry/GLTFImporter/index.d.ts b/Sources/IO/Geometry/GLTFImporter/index.d.ts new file mode 100644 index 00000000000..8009302b4be --- /dev/null +++ b/Sources/IO/Geometry/GLTFImporter/index.d.ts @@ -0,0 +1,266 @@ +import { vtkAlgorithm, vtkObject } from '../../../interfaces'; +import HtmlDataAccessHelper from '../../Core/DataAccessHelper/HtmlDataAccessHelper'; +import HttpDataAccessHelper from '../../Core/DataAccessHelper/HttpDataAccessHelper'; +import JSZipDataAccessHelper from '../../Core/DataAccessHelper/JSZipDataAccessHelper'; +import LiteHttpDataAccessHelper from '../../Core/DataAccessHelper/LiteHttpDataAccessHelper'; + +import vtkActor from '../../../Rendering/Core/Actor'; +import vtkRenderer from '../../../Rendering/Core/Renderer'; +import vtkCamera from '../../../Rendering/Core/Camera'; + +interface IGLTFImporterOptions { + binary?: boolean; + compression?: string; + progressCallback?: any; +} + +export interface IGLTFAnimation { + id: string; + name: string; + channels: any[]; + samplers: any[]; +} + +export interface IGLTFAnimationMixer { + addAnimation: (glTFAnimation: object) => void; + play: (name: string, weight?: number) => void; + stop: (name: string) => void; + stopAll: () => void; + update: (deltaTime: number) => void; +} + +export interface IGLTFMaterialVariant { + material: number; + variants: number[]; +} + +/** + * + */ +export interface IGLTFImporterInitialValues {} + +type vtkGLTFImporterBase = vtkObject & + Omit< + vtkAlgorithm, + | 'getInputData' + | 'setInputData' + | 'setInputConnection' + | 'getInputConnection' + | 'addInputConnection' + | 'addInputData' + >; + +export interface vtkGLTFImporter extends vtkGLTFImporterBase { + /** + * Get the actors. + */ + getActors(): Map; + + /** + * Get the animation mixer. + */ + getAnimationMixer(): IGLTFAnimationMixer; + + /** + * Get the animations. + */ + getAnimations(): IGLTFAnimation[]; + + /** + * Get the base url. + */ + getBaseURL(): string; + + /** + * Get the cameras. + */ + getCameras(): Map; + + /** + * + */ + getDataAccessHelper(): + | HtmlDataAccessHelper + | HttpDataAccessHelper + | JSZipDataAccessHelper + | LiteHttpDataAccessHelper; + + /** + * Get the url of the object to load. + */ + getUrl(): string; + + /** + * Get the variant array. + */ + getVariants(): string[]; + + /** + * Get the variant mappings. + */ + getVariantMappings(): Map; + + /** + * Import the actors. + */ + importActors(): void; + + /** + * Import the animations. + */ + importAnimations(): void; + + /** + * Import the cameras. + */ + importCameras(): void; + + /** + * Import the lights. + */ + importLights(): void; + + /** + * Invoke the ready event. + */ + invokeReady(): void; + + /** + * Load the object data. + * @param {IGLTFImporterOptions} [options] + */ + loadData(options?: IGLTFImporterOptions): Promise; + + /** + * + * @param callback + */ + onReady(callback: () => void): void; + + /** + * Parse data. + * @param {String | ArrayBuffer} content The content to parse. + */ + parse(content: string | ArrayBuffer): void; + + /** + * Parse data as ArrayBuffer. + * @param {ArrayBuffer} content The content to parse. + */ + parseAsArrayBuffer(content: ArrayBuffer): void; + + /** + * Parse data as text. + * @param {String} content The content to parse. + */ + parseAsText(content: string): void; + + /** + * + * @param inData + * @param outData + */ + requestData(inData: any, outData: any): void; + + /** + * + * @param dataAccessHelper + */ + setDataAccessHelper( + dataAccessHelper: + | HtmlDataAccessHelper + | HttpDataAccessHelper + | JSZipDataAccessHelper + | LiteHttpDataAccessHelper + ): boolean; + + /** + * Set the url of the object to load. + * @param {String} url the url of the object to load. + * @param {IGLTFImporterOptions} [option] The Draco reader options. + */ + setUrl(url: string, option?: IGLTFImporterOptions): Promise; + + /** + * Set the camera id. + * @param cameraId + */ + setCamera(cameraId: string): void; + + /** + * Set the Draco decoder. + * @param mappings + */ + setDracoDecoder(decoder: any): void; + + /** + * Set the vtk Renderer. + * @param renderer + */ + setRenderer(renderer: vtkRenderer): void; + + /** + * Switch to a variant. + * @param variantIndex + */ + switchToVariant(variantIndex: number): void; +} + +/** + * Method used to decorate a given object (publicAPI+model) with vtkGLTFImporter characteristics. + * + * @param publicAPI object on which methods will be bounds (public) + * @param model object on which data structure will be bounds (protected) + * @param {IGLTFImporterInitialValues} [initialValues] (default: {}) + */ +export function extend( + publicAPI: object, + model: object, + initialValues?: IGLTFImporterInitialValues +): void; + +/** + * Method used to create a new instance of vtkGLTFImporter + * @param {IGLTFImporterInitialValues} [initialValues] for pre-setting some of its content + */ +export function newInstance( + initialValues?: IGLTFImporterInitialValues +): vtkGLTFImporter; + +/** + * Load the WASM decoder from url and set the decoderModule + * @param url + * @param binaryName + */ +export function setWasmBinary( + url: string, + binaryName: string +): Promise; + +/** + * vtkGLTFImporter can import glTF 2.0 files. + * + * The GL Transmission Format (glTF) is an API-neutral runtime asset delivery + * format. A glTF asset is represented by: + * * A JSON-formatted file (.gltf) containing a full scene description: node + * hierarchy, materials, cameras, as well as descriptor information for + * meshes, animations, and other constructs + * * Binary files (.bin) containing geometry and animation data, and other + * buffer-based data + * * Image files (.jpg, .png) for textures + * + * Supported extensions: + * * KHR_draco_mesh_compression + * * KHR_lights_punctual + * * KHR_materials_unlit + * * KHR_materials_ior + * * KHR_materials_specular + * * KHR_materials_variants + * * EXT_texture_webp + * * EXT_texture_avif + */ +export declare const vtkGLTFImporter: { + newInstance: typeof newInstance; + extend: typeof extend; +}; +export default vtkGLTFImporter; diff --git a/Sources/IO/Geometry/GLTFImporter/index.js b/Sources/IO/Geometry/GLTFImporter/index.js new file mode 100644 index 00000000000..553e60f680a --- /dev/null +++ b/Sources/IO/Geometry/GLTFImporter/index.js @@ -0,0 +1,277 @@ +import macro from 'vtk.js/Sources/macros'; + +import BinaryHelper from 'vtk.js/Sources/IO/Core/BinaryHelper'; +import DataAccessHelper from 'vtk.js/Sources/IO/Core/DataAccessHelper'; +import vtkDracoReader from 'vtk.js/Sources/IO/Geometry/DracoReader'; +import { + createVTKObjects, + parseGLTF, + GLTFCameraToVTKCamera, + applyTransformToCamera, + createPropertyFromGLTFMaterial, +} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Reader'; +import parseGLB from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Decoder'; +import { createAnimationMixer } from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Animations'; +import { BINARY_HEADER_MAGIC } from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants'; + +const { vtkDebugMacro, vtkErrorMacro } = macro; + +// ---------------------------------------------------------------------------- +// vtkGLTFImporter methods +// ---------------------------------------------------------------------------- + +function vtkGLTFImporter(publicAPI, model) { + // Set our className + model.classHierarchy.push('vtkGLTFImporter'); + + // Create default dataAccessHelper if not available + if (!model.dataAccessHelper) { + model.dataAccessHelper = DataAccessHelper.get('http'); + } + + // Internal method to fetch Array + function fetchData(url, option = {}) { + const { compression, progressCallback } = model; + if (option.binary) { + return model.dataAccessHelper.fetchBinary(url, { + compression, + progressCallback, + }); + } + return model.dataAccessHelper.fetchText(publicAPI, url, { + compression, + progressCallback, + }); + } + + // Set DataSet url + publicAPI.setUrl = (url, option = { binary: true }) => { + model.url = url; + + // Remove the file in the URL + const path = url.split('/'); + path.pop(); + model.baseURL = path.join('/'); + + model.compression = option.compression; + model.sceneId = option.sceneId ? option.sceneId : 0; + + // Fetch metadata + return publicAPI.loadData({ + progressCallback: option.progressCallback, + binary: !!option.binary, + }); + }; + + // Fetch the actual data arrays + publicAPI.loadData = (option = {}) => { + const promise = fetchData(model.url, option); + promise.then(publicAPI.parse); + return promise; + }; + + publicAPI.parse = (content) => { + if (typeof content === 'string') { + publicAPI.parseAsText(content); + } else { + publicAPI.parseAsBinary(content); + } + }; + + publicAPI.parseAsBinary = async (content) => { + if (!content) { + return; + } + if (content !== model.parseData) { + publicAPI.modified(); + } else { + return; + } + + const glTF = {}; + const options = { + baseUri: model.baseURL, + }; + + const magic = BinaryHelper.arrayBufferToString( + new Uint8Array(content, 0, 4) + ); + + if (magic === BINARY_HEADER_MAGIC) { + const { json, buffers } = parseGLB(content); + vtkDebugMacro('Loaded GLB', json, buffers); + glTF.glbBuffers = buffers; + glTF.json = json; + } else { + glTF.json = JSON.parse(BinaryHelper.arrayBufferToString(content)); + } + + if (glTF.json.asset === undefined || glTF.json.asset.version[0] < 2) { + vtkErrorMacro('Unsupported asset. glTF versions >=2.0 are supported.'); + return; + } + + model.glTFTree = await parseGLTF(glTF, options); + + model.actors = new Map(); + model.cameras = new Map(); + model.lights = new Map(); + model.animations = []; + model.variants = []; + model.variantMappings = new Map(); + + await createVTKObjects(model); + + model.scenes = model.glTFTree.scenes; + + publicAPI.invokeReady(); + }; + + publicAPI.parseAsText = (content) => { + if (!content) { + return; + } + if (content !== model.parseData) { + publicAPI.modified(); + } else { + return; + } + + model.parseData = content; + }; + + publicAPI.requestData = (inData, outData) => { + publicAPI.parse(model.parseData); + }; + + publicAPI.setDracoDecoder = (decoder) => { + vtkDracoReader.setDracoDecoder(decoder); + }; + + publicAPI.importActors = () => { + // Add actors to renderer + model.actors.forEach((actor) => model.renderer.addActor(actor)); + }; + + publicAPI.importCameras = () => { + // Set up camera + model.glTFTree.cameras?.forEach((glTFcamera) => { + const camera = GLTFCameraToVTKCamera(glTFcamera); + model.cameras.set(glTFcamera.id, camera); + }); + + model.scenes.forEach((scene) => { + scene.nodes.forEach((node) => { + const camera = model.cameras.get(node.camera?.id); + if (camera) { + applyTransformToCamera(camera, node.transform); + } + }); + }); + }; + + publicAPI.importAnimations = () => { + // Set up animations + if (model.glTFTree.animations?.length > 0) { + model.animationMixer = createAnimationMixer( + model.actors, + model.glTFTree.accessors + ); + model.glTFTree.animations.forEach((animation) => { + model.animationMixer.addAnimation(animation); + }); + } + model.animations = model.glTFTree.animations || []; + }; + + publicAPI.importLights = () => { + // Set up lights + model.lights?.forEach((light) => { + vtkDebugMacro('Adding light', light); + model.renderer.addLight(light); + }); + }; + + publicAPI.setCamera = (cameraId) => { + const camera = model.cameras.get(cameraId); + + if (!camera) { + vtkErrorMacro(`Camera ${cameraId} not found`); + return; + } + vtkDebugMacro('Setting camera', camera); + model.renderer.setActiveCamera(camera); + }; + + publicAPI.switchToVariant = async (variantIndex) => { + const promises = Array.from(model.actors).map(async ([nodeId, actor]) => { + vtkDebugMacro('Switching to variant', variantIndex, 'for node', nodeId); + const variantMappings = model.variantMappings.get(nodeId); + + if (variantMappings) { + const mapping = variantMappings.find((m) => + m.variants.includes(variantIndex) + ); + if (mapping) { + const variantMaterial = model.glTFTree.materials[mapping.material]; + await createPropertyFromGLTFMaterial(model, variantMaterial, actor); + } + } + }); + + await Promise.all(promises); + }; +} + +// ---------------------------------------------------------------------------- +// Object factory +// ---------------------------------------------------------------------------- + +const DEFAULT_VALUES = { + // baseURL: null, + // dataAccessHelper: null, + // url: null, +}; + +// ---------------------------------------------------------------------------- + +export function extend(publicAPI, model, initialValues = {}) { + Object.assign(model, DEFAULT_VALUES, initialValues); + + // Build VTK API + macro.obj(publicAPI, model); + macro.get(publicAPI, model, [ + 'url', + 'baseURL', + 'actors', + 'scenes', + 'cameras', + 'animations', + 'animationMixer', + 'variants', + 'variantMappings', + ]); + macro.set(publicAPI, model, ['renderer', 'dracoDecoder']); + macro.event(publicAPI, model, 'ready'); + + // vtkGLTFImporter methods + vtkGLTFImporter(publicAPI, model); + + // To support destructuring + if (!model.compression) { + model.compression = null; + } + if (!model.progressCallback) { + model.progressCallback = null; + } +} +// ---------------------------------------------------------------------------- + +export const newInstance = macro.newInstance(extend, 'vtkGLTFImporter'); + +// ---------------------------------------------------------------------------- + +export default { + extend, + newInstance, +}; diff --git a/Sources/IO/Geometry/index.js b/Sources/IO/Geometry/index.js index 084317488c5..d83543eb7ed 100644 --- a/Sources/IO/Geometry/index.js +++ b/Sources/IO/Geometry/index.js @@ -3,6 +3,7 @@ import vtkPLYReader from './PLYReader'; import vtkDracoReader from './DracoReader'; import vtkSTLWriter from './STLWriter'; import vtkPLYWriter from './PLYWriter'; +import vtkGLTFImporter from './GLTFImporter'; export default { vtkSTLReader, @@ -10,4 +11,5 @@ export default { vtkDracoReader, vtkSTLWriter, vtkPLYWriter, + vtkGLTFImporter, };