From b76a3533821193cb196eb1897c5079f2453ec6d2 Mon Sep 17 00:00:00 2001 From: Mr Martian Date: Sun, 15 Sep 2024 12:10:45 -0600 Subject: [PATCH] updated for writer and reader to work in the browser as the default; added gzip writing; bug fixes --- .gitignore | 1 + .npmignore | 1 + README.md | 17 +- assets/doc-coverage.svg | 4 +- build.ts | 20 +- bun.lockb | Bin 100177 -> 101619 bytes bunfig.toml | 6 +- docs/assets/navigation.js | 2 +- docs/assets/search.js | 2 +- docs/classes/BufferReader.html | 12 + docs/classes/BufferWriter.html | 13 + docs/classes/DirCache.html | 4 +- docs/classes/FetchReader.html | 15 + docs/classes/PMTilesReader.html | 31 -- docs/classes/S2PMTilesReader.html | 12 +- ...MTilesWriter.html => S2PMTilesWriter.html} | 25 +- docs/enums/Compression.html | 4 +- docs/enums/TileType.html | 4 +- docs/functions/bytesToHeader.html | 4 +- docs/functions/concatUint8Arrays.html | 5 + docs/functions/deserializeDir.html | 4 +- docs/functions/findTile.html | 4 +- docs/functions/getUint64.html | 4 +- docs/functions/headerToBytes.html | 4 +- docs/functions/readVarint.html | 4 +- docs/functions/readVarintRemainder.html | 4 +- docs/functions/s2BytesToHeader.html | 4 +- docs/functions/s2HeaderToBytes.html | 4 +- docs/functions/serializeDir.html | 4 +- docs/functions/setUint64.html | 4 +- docs/functions/tileIDToZxy.html | 4 +- docs/functions/writeBigVarint.html | 4 +- docs/functions/writeBigVarintHigh.html | 4 +- docs/functions/writeBigVarintLow.html | 4 +- docs/functions/writeVarint.html | 4 +- docs/functions/zxyToTileID.html | 4 +- docs/hierarchy.html | 2 +- docs/index.html | 10 +- docs/interfaces/BufferPosition.html | 4 +- docs/interfaces/Entry.html | 4 +- docs/interfaces/Header.html | 4 +- docs/interfaces/Reader.html | 2 + docs/interfaces/S2Entries.html | 4 +- docs/interfaces/S2Header.html | 4 +- docs/interfaces/Writer.html | 2 + docs/modules.html | 15 +- docs/types/DecompressFunc.html | 4 +- docs/types/Point.html | 4 +- docs/types/Tile.html | 4 +- package.json | 18 +- src/browser.ts | 254 --------------- src/file.ts | 63 ++++ src/index.ts | 13 +- src/mmap.ts | 20 ++ src/reader.ts | 158 ++++++---- src/writer.ts | 260 ++++++++------- test/fixtures/s2.s2pmtiles | Bin 0 -> 98339 bytes test/polyfill.ts | 73 +++++ test/reader.test.ts | 156 ++++++++- test/server.ts | 47 +++ test/writer.test.ts | 295 +++++++++--------- 61 files changed, 951 insertions(+), 715 deletions(-) create mode 100644 docs/classes/BufferReader.html create mode 100644 docs/classes/BufferWriter.html create mode 100644 docs/classes/FetchReader.html delete mode 100644 docs/classes/PMTilesReader.html rename docs/classes/{PMTilesWriter.html => S2PMTilesWriter.html} (64%) create mode 100644 docs/functions/concatUint8Arrays.html create mode 100644 docs/interfaces/Reader.html create mode 100644 docs/interfaces/Writer.html delete mode 100644 src/browser.ts create mode 100644 src/file.ts create mode 100644 src/mmap.ts create mode 100644 test/fixtures/s2.s2pmtiles create mode 100644 test/polyfill.ts create mode 100644 test/server.ts diff --git a/.gitignore b/.gitignore index dcac0e2..6faac08 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ benchmarks/data/bing/mvt/*.mvt tools/fetchBingTiles.ts tools/fetchOMT.ts s2-pmtiles-spec/1.0.0/README.bak.md +coverage diff --git a/.npmignore b/.npmignore index df3db51..a74ccbf 100644 --- a/.npmignore +++ b/.npmignore @@ -14,3 +14,4 @@ docs/ benchmarks/ tools/ s2-pmtiles-spec/ +coverage diff --git a/README.md b/README.md index 2066b02..0e88707 100644 --- a/README.md +++ b/README.md @@ -60,10 +60,11 @@ cargo install s2-pmtiles ### Example use ```ts -import { PMTilesReader, PMTilesWriter } from 's2-pmtiles' +import { S2PMTilesReader, S2PMTilesWriter, TileType } from 's2-pmtiles' +import { FileReader, FileWriter } from 's2-pmtiles/file'; // The File Reader you can run on bun/node/deno -const testFixture1 = new PMTilesReader(`test/fixtures/test_fixture_1.pmtiles`); +const testFixture1 = new S2PMTilesReader(new FileReader('test/fixtures/test_fixture_1.pmtiles')); // get an WM tile let x = 0; let y = 0; @@ -74,7 +75,7 @@ testFixture1.getTile(x, y, z); // undefied | Uint8Array testFixture1.getTileS2(face, x, y, z); // undefined | Uint8Array // The File Writer you can run on bun/node/deno -const testFixture2 = new PMTilesWriter(`tmpFile.pmtiles`); +const testFixture2 = new S2PMTilesWriter(new FileWriter('tmpFile.pmtiles'), TileType.Pbf); // write a tile testFixture2.writeTileXYZ(x, y, z, Uint8Array.from([])); // write an S2 tile @@ -82,10 +83,11 @@ testFixture2.writeTileS2(face, x, y, z, Uint8Array.from([])); // when you finish you commit to build the metadata testFixture2.commit(); + // The File Reader you can run in the browser -import { S2PMTilesReader } from 's2-pmtiles/browser'; +import { S2PMTilesReader } from 's2-pmtiles'; // you want to add a true after the path for generic PMTiles, as it ensures 206 byte requests. -const browserFixture = new S2PMTilesReader(`https://www.example.com/test.pmtiles`, true); +const browserFixture = new S2PMTilesReader('https://www.example.com/test.pmtiles', true); // get an WM tile browserFixture.getTile(x, y, z); // undefied | Uint8Array // get an S2 tile @@ -94,7 +96,7 @@ browserFixture.getTileS2(face, x, y, z); // undefined | Uint8Array ### Browser Support -Some tsconfigs might need some extra help to see the `s2-pmtiles/browser` package. +Some tsconfigs might need some extra help to see the `s2-pmtiles/file` or `s2-pmtiles/mmap` package. To fix this update your tsconfig.json with the following: @@ -103,7 +105,8 @@ To fix this update your tsconfig.json with the following: "compilerOptions": { "baseUrl": "./", "paths": { - "s2-pmtiles/browser": ["./node_modules/s2-pmtiles/dist/browser.d.ts"] + "s2-pmtiles/file": ["./node_modules/s2-pmtiles/dist/file.d.ts"], + "s2-pmtiles/mmap": ["./node_modules/s2-pmtiles/dist/mmap.d.ts"] } } } diff --git a/assets/doc-coverage.svg b/assets/doc-coverage.svg index 2f0f210..70d81ff 100644 --- a/assets/doc-coverage.svg +++ b/assets/doc-coverage.svg @@ -11,7 +11,7 @@ document document - 100% - 100% + 95% + 95% \ No newline at end of file diff --git a/build.ts b/build.ts index 902d12c..eb67586 100644 --- a/build.ts +++ b/build.ts @@ -8,20 +8,30 @@ try { format: 'esm', minify: true, sourcemap: 'external', - target: 'node', + target: 'browser', // target: 'esnext', // Adjust target based on your project needs }); console.info('Node Build completed successfully!', outputNode); - const outputBrowser = await bun.build({ - entrypoints: ['src/browser.ts'], + const outputFile = await bun.build({ + entrypoints: ['src/file.ts'], outdir: 'dist', format: 'esm', minify: true, sourcemap: 'external', - target: 'browser', + target: 'node', + // target: 'esnext', // Adjust target based on your project needs + }); + console.info('File Build completed successfully!', outputFile); + const outputMMap = await bun.build({ + entrypoints: ['src/mmap.ts'], + outdir: 'dist', + format: 'esm', + minify: true, + sourcemap: 'external', + target: 'node', // target: 'esnext', // Adjust target based on your project needs }); - console.info('Browser Build completed successfully!', outputBrowser); + console.info('File Build completed successfully!', outputMMap); } catch (error) { console.error('Build failed:', error); } diff --git a/bun.lockb b/bun.lockb index b922c553c583f75888a9fafb8c7e35beb5b90b7a..a9d68849db4f215b660959fec18bb06ba181a00c 100755 GIT binary patch delta 14503 zcmeHOd3aPsw!hU$C%M6dboS5*S=qBMbV49E0R$o@5M+xW5R!&8ge0WHDw0M(44Z_4 zDG^6d29W_|a{)zV5r=??h#LX}$S8_T1c7lF5#I0I1)_tF^SI(6#Q zse5mVql-PRtngTp*r7iE`+$ivnrdr4^10ILyQYEf_c^g*c^ms@$1Xll8C3i6r=<&- zM3+~?kjT1nvqq>K$xo78U*gsRTmwNAa4~Qgu-#eVsH~NwQ|^-F4f>dlN18RdQKxT$ zCOZ}O$(3aFsxCjP<8Odv$jj{Z>V!InQ@Vi;T7t0~21wB4snG+xG{ZfCeZhYT@_T@v z0B!?33z+y0XTsQus?m}cyduXYgWrPqV=Jp_?2Ag8WedFYiC!nnm0QtS&hxih8QQCbdlhtob4 z)}(683U%AVUy|UFtGcANJOMtImX}nFhujl=Q9Q~^oC$Ds><#FVog?6rerrC<6lvKM zC`tI|;`nI=e3NfAh2)0@Yp#9`Ofk*~(fT?LnnH5HO>6ZoaBI-70mHm&3os3NH8APL zaf@3>SSa#^tQ4XH!g;{t!AYLy)-Hb`bjgF%a7hXRZeh|AtP*^(Z6w-xfM+r@<`9vI#(E45#t8(lN7qQ)#%YRC8c(! zB<%!E{9C{@RYmPI`2>5-SbOT7RCU@XOt#lZ(kOHs1-&?6`0mPs1LTO|z`YNsH7zU~lkW2G(=2YDy(?Gez@i z+_*`wge5ReU0ITmjoTq7`wfsG<@onzOPzn3W@`rmLxcPbGzE7ibVzq|x|TmmhUQB$ zFs+a0z^80;R+Ko)5%s9f8h;u1(vRd@ zJwoct*;=5Rx@e)h4ot~X?WC|u(nP0Yw~%ZDYoN*$+_V@w9ZPL(<)2HJ8Gx%s zGz}qyTf8Fcp6I1fgWw!-`|3CsnEIU3Tk`?ap~|m=rapfz@7Cw1{3z(tye!Sv+K;cW zPr}-u{8Mw#a-V9?;u_<$PqtUqI+ue%gV?^>E6OYC&9&S2_u2GPpUq`2jQG>2AJ*?! z@@UB1rpN!<(EZkh*RwzSmhblK!@lRTMKLeM@*2c_EZOGuSa7}Rj%D#ii^9%x*FziCZi-wwE^sDC{Pe0~MCZ z^8*#R(o>QUD;HDcFY!i5e&lkHA`iz_N2Vp!^pm_kNMR>=BP8L76b6kp)ahcLAFRk9 zfQMkZ+<8&3jrsG&V8vXB$mj5az-+dV=Z7frZla;%!5e~X=9{28^F^WA@_m>Q;=A*P zP@DX?PC+Zw#@^xeq2zI6s3I%K8q$;aQRqDa3bW(#Qd=DXMc$%+f19a=4?h@|BlkkC zQUf<)%5y-Gi=aYm>@{BBN|7mxiDy#X?#yMY!Y1>4t0HfM1dG_^&5Lp~y+9O#P(5Th zTn<;*N}iA3_jrA{A~Vc?F68hf$R^(h3fV}$SkW!0{RB&-<>h^VEYGF9p*tQ-VIR^0i~K${$d zMcfmVC$}IF#h^%2i+2MkE&Al`Nl;`H0fAZn(LIoI#l%U{1M1x*+}%re@@u%$(gqV}@@T{FOU{wI<2i?7C-b8i`wURpN})X7 ztIH6xaGUJW)~FSv*%uVGQ!`ke3+hfh_vxBgnXuy1PRl#B*vb!rqIQTiR^KvE)DE#m z+zx=ExS477u>hW*rkH!THwyaGxa+45=^NZ>LrU(T`Ggi>Hn{@SV6;~=lbzy?=?a^~ zsOR7%f+tb7nVUf6a35&hD;t%Zn|p}K;x4)U`Umci@sJCiqNDe}c6Ng4yWmRkdOeisFMNqrYZK86&gSZWn8 zFvVDjh^0Ij6!od?S95rMwj#d+9#vS?k{k^8sb8&i8K{Av5Z3@3_OJS`*uT<@F&@R5 zDFj6Y0@VcNVh$*Zyk_ehC~ZAbg@}Mt6w_2*lwgzp3Tgl-ZBOXO^K%sWNfd)#;Gyq$ zoB6Mxy7R-q*({06iXx9eHzZ~19B%~GQ>7?lE`g#kAeOx{xty!Wt1+$Q2^IuqiGok# zKz4%~2nyZnv->zGS`J>6rR)ID&r?it7N4J&BNt~iS6l>&6t!Y;3KR`at&>!oyDPF4 z4~|qouuv>m8lXrY4-Xjr0#JHV!{J6+S|;2 zNbFpyWUN1L?4`&HK;N;mkEGBwaZ6gJ7w%|u>Qu8d-q;&YP~g$1v@~4D^ZO|546nzp zA8+gfC$S4riN~ZNB2R&$!C;wNZRU?bsa?y#@Rh8nk6H3SP~^OsTTozDdA}DPDGzVXTB3tk|9vd>$XyUaQ91aST2w6k6OL6_Xw84>X%{H1cog-q$x)ayWFZoc(r7T33l(_< zc=~vQZRU?aW$;CPvSshyT2zob7@-1+JPxMBX0v&IKZU)`>-#AtQy+e?Uk>ZV<^BqD z@qGMZ7s2l}-q;`eA}W4gbpTjV>=~Y4q{yeiqY8w5G1lf|#kPr5a4|`el92nImRh`g zhTerK{|QWT0y7s2$Nos zMyl9^`v(A|hn1;bzk!{68wc$@OnJflS7sGTpVd4+h@k2Tu z0ZbQRl8*#XyAquq4NTWv@VMIoYFMgL8m1VH(P_dYAFI>NF?Hn7<(f4~B24<@0dx^| z0J;bh ze>oAjnqx970B!lHfg$R6-ULOh-vUspy*j-Qm@>8zKo?=sJ4ghs=9sL$t8z6=dWQjI z?|q$5nDjmXko=fh#k3n$@nhW6)f`itPODrEd*S{(z{EEV(%ioQip+hZ)olE!fJQ=J3F#tCx83m*16&47 zhQ<(6$E5GjX~L8~^yQx>YzlB7@B&>+J}Wi_hqEb4;vNI-fAzuGaCh45fx9_t=WozhD5nPchPpFDlkn;5NOp?ZD*B%R2pv-Z^2qeO1RhfJyH)V*U&h>vdiJ zhAuZS=?3UPgAAsE+yXbz07}vShdo#w$RPAZ0~jLlM+da}eIG_BaM6BDQTcrz{(T=t z8vMQw|Gp0+4Yd9E_kCCm#s6YI{%!lPoy`BA@59@#my9jOgVD{FH?cL-Y=l+W!PgZ8}U2##Dj|D2(eBh<- zPYoS<&%y=$29Mc!uC(Na_pb6SYoeBIU-ZJ9T>D$AIxbjJGAWDk%XcBMYtHCF_Cxw(og8%>*t# zv&YLkHwjCAh?27mZdD+`?mKMJ>B9x#Z1O>)OV1Zx3r7=`wT$S zi-@b2L6d{D;OII5pj&z6V^fv7)qhptCyD z6PWxXh0k>!y#dpcHeKfcbW7#+CESoxf75x0rt~O)9QZ=#(Zlj=o%f~Aqj$A=I`2I7 zPX>Je^kzw(egz<&FF?Hk`dYuGsKf&3x&Rm4=4Z> z0{R2)qt~5WI6!aG-2n84oeiKj^LPNgk%s|V0sH_KfIlDr5D1_T6W)Lp0Q!n?3qVn) z2@C;*0>S}Xz@ryPdYN1gpts*8fCh%4EXEDJF)sovfbv2BeGr=opf6=uhw2+Z6>ueB zBw!R^7=YfJ1_NTzHXINEXbtcMP-_5C59dLWK_jY5RbxwyZVX8N=PZl;2@*kqj@H3BYILPB#Cc)G z)vqB*PK4)JMEZ%XlOvJ`Z0odh5hQ7lq@gP4QJ8jp&DP%SH?4&vC9yNLshv;_8P-LQ zz{%k=S58x{)umD+?POW0v}@?fD`$(wM5?0H&WW9+{b+zi;94{D>2+-$y_BYR1anl$ zDRQgYxZ*p_wU34*LyfjXCDa1RUgzo^pD6bqgCsLCom`-ARaj0g&%%eNRCj3VZerr1 zAM;|xLh)l^tVR_3u{3sGEF$WX7}*BshS)nx?o=su}CSP}PDT$?GkaYpjH%_*#ToLzX z0bh3&`k6?3ETSwiB$*|N=?PH%A%I1)`^8(y=xCTYo{R|?FMLvfCX19PpjqN*AY@O7 zCsKf35{ILJc8ka$AboNgg$2E`V`2g^Pl%=-MCI*Am1eVn}8ob;0 z{QBP8%PzyM6y!Tr<5dwG%+jpJ+2W}?ui4C9@;5*s6@Ft)YG&9~u>jU|qpV511*)@g zfOtsJhXoIP*LYSnON&x6ju?&flccRhlz!62m+Qa@!E zoU|@y_m^`=KLSfoCrkUqJ}9tkVF`tLg*Y3^0)-Na0G<~kiIpX$g|bfaR6GE*LVD$i zkrWqIQAY)=S|o+RNDuKy7z+zG&hHi`?R_rn;J!d~hJ>chJOjkGFqUdPf)d~ZW1}a= zb<1thGE)x+{BJF8!Gtz>!L7kCj{DA0KD6HV{(;`QDvWIs1+CC&s8|V@wso*TYvI z5wN^PoB*;Kr#8D4R!@tVySFPmz?{MLbZzl4b%}&AA$qjFvr@#{t&yX@yj*PBo|7du zp%RJsNLF}fr7Vwx@9Ihk6Q4)I%z5!6v3{~@#LOs2)TNX5+n0{jI0U`NKl|Fq`%Y8!kBg+X&{L-{*d6=Tcv0FGe%}y+Pc{qqN7`dk2^6c^!;jr!XM2>HpKqXkEV83^Fy&Jb-2r-+L^hDsIO~03?dr}O zPvnG{7}B~kmGPhGv1wvW2js*^ak&FV=@4x?q7&l?_j*fU|BXrCq*Lun$)LRWKrHIW z#{P=J%c5l`bZ;E?p1s9>q+?dUornR-Q=$!j$rL?0u{4usikR97MO58G)G}n8^B!jX z%PUheZeBnWsxR<(lQ<2VY=^jxmh6Z~hzC_JddI_Yf3ZBCrDYfgz^8A?n^V~J*+FW5 z*dXw>fJ6&K`t_YYD|++6)=?%l6Um1yO>&7J${j3nuwtpXUf|iire|tnXhJ`D|#+OaH2%NL^N9_W)rFxuM+wVB`GqAg=O4r zKgak{UGH9{=!x#vws`ds?XFuqOAy^?-&XhYv3F|If7r$Ui&Au_U|_EcL&4ywh=f`& zMhRhM+5c8Gwx(f2Fuq$v`doGG7`J_&o-~L-w78Xq$sR2V(t*Z{mFciIT^vbA;EeO> zPps%&{+;#DNtj@aou=M6$^O{9l~3|7t5!jhfo%z;;Ey6A1CDJFwhS2BA;yEU8mH}( z7#AZBmEg$-hOla~q>scZQa>$rl6sT)JE(Bu5dVyViwrw|ia2Rwp&+k;w0@hTeYUyo6Z3LV?&?eGLQpq% z)l!&dFzhaRs=o-AT{L9E-=1Ovp+d1g6P;;|t&8(C>#xjvcE`O{k3Ua^L0x51Pmz!X zy+4Q&AggiQzu%OOt1^ymzNt?Jo~I6p%~>!zTr_5(Rv1V7`QUY|&sw$%VbBdu5h-1u zcU|-avKmMC*Bp9s+NyCMIG~q_C;yaWDOc2y-V|||^m38$&0ok&4E0UDRE0FQR3LxC zh8iEIjI;YRj&vLC@x})#;}riLiSfSxjC20(c!E(gMfyt7UNF7#L|itjW&aW@vYAry zuQ)AOb22mDWmA1`R%2@D^<}}#p$d;(TQeQBFX&rOznA#!lCaB<>+-kkTcSM<{q53H zU4HIN$>N{)ql*wij5+xD~s%M z+D9FJW%(z3K;wDNwRZfl1!H>Wg=f+$5w2ki#nP_K*0Q>yWZIM($Jp}Pq*}YP_SFMj z+2wriJ6utuuyE@gTGdKQP*DPfO|U^MI-!|DLed>V)xbii9Xq`lQ|zu{ks(GzVa&Kq cKupD~=`1?FnKu4=n;kko<219354BJJ4_cAKr~m)} delta 13389 zcmeHOdw5jUwLg2pBsqaVCWH)O2!SL#LtZ3dLIOF2f-v$32#AP9Ll_{CkdR3bgpvuO z7%>*yw!0P;74d;ufruqnl!{snkJ4zBDvF@kUKLR)zG}t$`<-*fM5TW1?fvV1o{x3b zS&zN;+H0@9_CAxt$G3&I-5kC#WALg67PLIS_1PV_|2(bhDr0@do;L~tzu)J1=Eb?E zI$t|?M#0n93yoK&O|z2fYlTIa94$uEg0JAw1w0RezQ8`<1Ym!lqI_wsrXB92X;Gk` zv2lshqFZeG3~1V^@GoCVtEX&vn~jeF>yXd)`>WII%LCeJbkGfq9WX$_@d%3^6KNUF z1?~#|y^wbSt_SW9ybPH9@<95+imG|q=V(mF7J%QG{Dn)aYW$iu$EFvQFP)!WSFL42 zNO>x5vR1^^r@|^36QM}YcY~&(CH|GmIfArkOTM6@w6@kiUmFQMI_wYB)s|NTG=E@O zoxf(Kb_Kj)JAa_M#*dB%L$5pNqu_I3rDat$OJOgB6Cr0;!EE$O1zoxv?%C(VtnRK} zR9?0S>g56d3Ru&w!mO~{oLEhRN5SgS+C}N`v20Ok#S+Lr1E29&R2oQ!s|!zq=CJm` z2kLi~4UQz&9dVk5f5B^U>jiv9?sT|)k@1$RuL3j1{oPhyt)Ll_*WeRd?FH@wyd43#}BP1LAC8dT>Yq7yz0~?%QRaRS5zSO@yQPVJ>U=*0oU_h2?0pd%p%cEb@4 ztafF!e}0t=cg7aKKg5dl31H5Cd79<(3!vGwlg+Oy%cv|btErkj)Y7Y*UsqEBb0u=N zGpT+%p4b|RTw7c2uTh|x%KI~{4tD`_zH9t}s*2@)?a53lQq90o;BN!AbFu2`rO3@J z%d5qU>tG2>V6nNfv`{u~&9>|}K!%i)pE+IijvUL@0|*QUc>pwn3v;F~TXL=Z$;`8S z=?l#DaSQm&wm?N`U=gAoGu+~@1D`%s1IGbhVdIrv%l>6n-=X{)qJ`2Zl>DLe`EJse z$erQt`cDe1KpzKY=-##I>Hx#4Y0EJGypDiRH=hG$sGhKKWqGAPef~V{0niL}c_2_* z<1ek;44T7f0;chFDEge5BIFDpx59GJKCO*t*XEoRE{h=IP`Vylxi zz+KR|2$;?VZKKzZvE)~SW^CsG)454Mu{tXatXx{Qq`Y>qd^I8|;SyV`vV3WIWqF|1 z1)4*Mkgmw2`udA3Y8;#+x6sCzdb7`k<1HVM`=J>Jl}VAL1@ z+OX3#vJt%f;2}?go#du?uW(4+Z8(b%#Uj}hS14A?Mz^8wAPt>x+1A6Wp8>@da6QrM zEWoTnH?dG(!6#D-eEo4y`5`TMt|u6JCNhB=;_-@F*_ddEXJu=mp??a=P)HoI zt(R9%#;!=;BG|Lu2nywbGKA=fZzvA!15lhQ_y}rrtfnFPISNo~K`~5W(&hH*{{@Ox zI!PC-II$YXf{KvSV!ciusC+Zf4R~VDric0=P_zf@h*3`z7={s39Oo4?q_3Z$?*tDe zhx*;U`a7W5BlO)~XJQW&d)d^zP_M+(6(+x}f4*!?HiSpE;yy*{DTY`leJO_i4weHW zg``6Q<;qq_u914Gp|_w=Vu>^9JYN0ppk|tS^r@t$H4byy9kQp4iY;=!0On{JolvL` z#HKw3%2o)j1!d(UbEE~7?JNwP1;y?WI*-?RaW7h6=G}-VhQQ2g=gXi9Ov~M{=c2~q z0K!mT1j4yGbKg)uZekVbdm~A+rS<;tb=yxP*+BEPm{&=tBOtQRqaFL#pVujue zie3f^MUi=m%zPNsPtA%Ko2qHk%_l#e#_>ttjwenU-12xGpQg$ivx@X#IHxdnIC!Rd z^$Jk-0zs5^*)qhew^#oZ6eEbPpjkM;YG)>|UI(f@p3R`Bi3JHOUx6A6$}F(@#W?S< zKgf^?>p-y`TteLTfMVRBIUb3hV>rEoLM8krJe8OOI)*1}Kz#>W{WzryW#9@>m!h?q zlj5+{^9WInd1B~nI2QLFWjrrz0 z4ys5__7sYtQXgUH^C99YG#4;dLgNTS^p~x;FO_ zl93zo-XYmK61!GT zXq+w-+9FV_4=5%mn6;o7ebX!bB~aFiWD$ynQ;h2{xhdW2`~#><%niVhjYWoj14_X~ zkfPtAkb)W`quqrfN%{;!zZ6|iD$F^)9TaoP%9j(M=s)6!nt?Sl+R!(m%+L?S+mjy& zf`dSEW75WA9?-Wvy(>U*HDJ@jg4-h-iw(yY`SL(sA$!S!=N}avqrKo zk1_PlIOni_V4b+IGC)xu=LL*?6(}pG5l6knmYHL9d{HQ)#ukZz(l^%7r(?xfVW6+K zf#MQEaL0PZ%Tga_h*;?xXXw6B)-+kmNr7T|K@XFB1k@N%)+D&l?^u&UNzoq$g;{}Y z+90pa%(iNQYjnQU#~b=8aM~?3+j56#_hV2TpgGkdMd}l9ZbEYo%t}Kg8z&gzkFpha zP3k`QiA{(_9utS&Zv@5hKpPvz??H_=%V;>N@4uN9e z;n4`M2$%XqL%$qnRBMD>YIlONM!;!!1r%+;=XC7GP~+TNlQ;iBlkIU9chVvuvu*WAC;*jT?Z0ObFzVRPDJq(K58A{CruO5SGLnb2Sm`N8&-xNb{ z1kWCCyw~|0s62UiLZSY5KAA09L^H=*9>+7eML;%|7~(P6T4Ffk}x zWh3s`M{s{f>eH|@qUKM88Pte$ueez@f_DTwRw3++DVZ|H)u}#KGhflQFyJ15&Vasv zaKHdS3IL7Fi&y}#- zNEf@j+-^xM@yZ>WFFfpvko` z^$r1O?}*JOrd}I>@)rTTh^hCoS!k>W+)V~wQ?=tJW#LFX{{=up|84V$g?!^ucR!ka zAK(Oh1fbU60KABWOq}JWN$nHfWcn<3J#{_@FaiDnVB647v&6LNz>T4ZBw=H^+!-|6 zM*~yT)s}a}Y!_$qFMu6z>d9G2^3bdp(-k*V>59j0Ld>rE*mOrsRuXQ+eQkM1OgpJI zzayqgL%^r&Lv1;*m91oumu?G)yMP`6%m#%vzau7pl+FJk%z=!y^@#acj9W5rnLz(E zNDxWT50X|Ktqe+X0MW?4Er#ascKK(eP_15>h@ zHyhL99X8z&lXa)fC+6c88{Y-YxwsdYBf6i$3*&mz&M;kg1aIE#odyD{c(3>G+KGXMa-!DxI6y1J0cB! z+#Stbk_Gku+3uLfRsL`9j?F8}7S5J0Jv&v7c6HK2e}L^RPd>M#dGx{4LY{c`6h-p& zry~>AcC-FI!=6Ha$~JlO?x$8!E4s>M{e@>Ga7^>vPvtnw#=m;IR&_weGhmEkr2>i^z=JUr{ ze{G_f#ewvdrIi(0U9+#Po6!6CtiQ;m$|jd#WNpyE*Sve^=ANiJhu%7@Ol+M9iBep?Y6JunU6eEhgrOO z?G2k50o(~p^7$4+%r}@_mZJ8i%|mRoX7K3L37dydXbk{5aMI>=23~9P-m-Z-n_qA9 z-ZptSM@Qph>eX2cP*+5Vu>*J}+yh{|d7|VAz6KBg)B@@N12lCYLR>#B z9nTqnOh7Im4=^0y1@IlF2Y|8XE1B6Da5#Y8;P7g`5iqaI zzzqPz$ULxI16&7K4!9an1*irr1}p(o0LlRi01E;00A&FF48pvh4Y(A*-(xNYOax2< zOa?IPeE|G4S8u*Hjf6jZjVb`}{VNT?*R7s_1VA@H48R451#}1Sl~V^q0r+})9>7^l z2E+s001tpic)lj^mEi%v&48Z+HUgRihQimOn*cXLc>`cI;Aa3H*J=RE0Q?!H5-=BV zC4fKu@Vq<|kc_r`8SM>71VjT)f`2#gJ%Fu%O#rD%qlBmaCWzJm_*=_WfH?rZyPg65 z5U>r;R{-<NXzLb0bCYAFeK;Y`#)y&M_j_oW?g-1=LEnK|{RZ*WI zUZ0zF$*#1Prgq78v|$uhezA4u_(6|sYnRkkay22#| zcIT7>kq$98 zHzPYMBTI`@#Wah@NH(fzIU=qvm9)FU ztSH4hXD3ccytdULMp*u9+to>QAoi-_WRc(reO`0!wUi!rKlrv`TFaq@Gb+<9COgiD zsWomfOz3K-8_g0`Rj!EdGz>eUdZ;&se%>vnh>_}SE`$?R=R5> zB-t6mGP1EZswZK=^UoEItF!DPS9SJ?gx;YKBU_tRcXc~nUf~eP3fjRZkZQC?4D;~Q zMy`ZQryZP98uQrm=xR7r5YkN5GPlqd9oW98sJ^S0kQuf!o39vZwt z^&2QM!m}|1?Wo#>XbUswY5F2;+P%<4w%RltTvIVl?Hewf9iP?EM3F15Ql*LLG)Fy} z2=_zZB~HoQe^0`JXX4--5}VF_7OPGr!u`+}i}RMHjvC#$TfQAbIMh{T_d##wWXJab zBlH>LI^$2Ci~sn-cv}_5?ojo8&}pPPOw_78NeFT1`^NPVgYTTYb=Un+&qm)k5cXFk zNn$tD?;nIYwKE}3P4A2R34Oo#P`}IW`TKQWr<$$Ia2Bb%`@)|?>Ivv!Ik!hBK_&M? zswAsX{o3sfRvY?>G`)ACxmyN*XrT>f{6n8r`q#XBbq;zBij=vCa{l#2Bx2NNuB_c^cYo1<$@C7u44EO1SDOcjL@{2q z3=j*gMCuUMs)1T5`VnorXT&(3R?YK%G0zGS`BI`;?nUfIT3H4@< z`o$19d0Mp+4O3r1>_Xl%ga`FAqcN=2;-Tei1cRKV=s7fGD=u_19w%s-S;bTQ^hd^Qu&p_e+mYyhAFQ+2` zN>oY)Mmbx}%0MTfFH9eF#Z7%U^Zi^Fx$HbTzhCXk5DR}mVVf$>gcr?f4$+gUE>q+< zKFddA^L%Fih@&T2W1&8*jD@k3ZR%XFVhkBmO z1L{jKJ)v(%CtlOg@6iK&`Z~lK zA4A5mj`mfPhrwXzW7Yu&Q(N|(`0v$linSQSj!_N6&}^OBO|(+IOJr4$3!fFN3c?LJ zLENR50R5A4!M*l}_TpjODSu!uw)X^cBaT;laz*}svK#+9h2;CS0A=90;RuXbP0TVd zNwp@4!td3Bx4kIrp_l!n=ySn6i<_T8w6PH}+AfuykJ-LN)f3H8hx1`{u@WN?%&V0L z$P+BEeg?RH{pMfEzf^6(+~=VLaMn3OV2@_wLE)- z_MB>?`VsX$)sL&B0_>8Z&xcn}KE3pp5nsG+s+;-$Nr9SCfJ{EF))%0j%~T(LgA!DA zAu{YuwXG1A-dC>zInEWSt|LWHkON18`=**R61|ztLMZReBJ0jmA6>L{&se;_{b;Q Ip8eJT0LJ^O82|tP diff --git a/bunfig.toml b/bunfig.toml index df98062..d74ac73 100644 --- a/bunfig.toml +++ b/bunfig.toml @@ -1,7 +1,7 @@ [test] # preload options -# preload = [] +preload = ['./test/polyfill.ts'] # root directory root = "./test" @@ -11,6 +11,6 @@ root = "./test" coverage = true # new -# coverageReporter = ["text", "lcov"] # default ["text"] -coverageReporter = ["lcov"] # default ["text"] +coverageReporter = ["text", "lcov"] # default ["text"] +# coverageReporter = ["lcov"] # default ["text"] coverageDir = "./coverage" # default "coverage" diff --git a/docs/assets/navigation.js b/docs/assets/navigation.js index 5699e45..e80228f 100644 --- a/docs/assets/navigation.js +++ b/docs/assets/navigation.js @@ -1 +1 @@ -window.navigationData = "data:application/octet-stream;base64,H4sIAAAAAAAAE42UUU/CMBSF/0uficiiqDwCGkw0MTA10fhQtzt242hJWwLD+N9NUVhbSrvXnnO+9fae7P2bKNgoMiAjvlgKkBI5Ix2ypKokAwJstZBdQzor1aIiHfKFLCeD65/OIZ9iBWm9BDe8Pz+dHKMY0aw0kllFpQTZ3St2tpeY6adH/QU5BZqDOEZYcgvOq0AV4PzJIc4sidzIMYRYw1VRgHjiEpW1FmQKREEzkF3bYsOSy74Bu2VK1F7GTglFJ84oRnbiGcIOzxLNR5De/EENIwI32IshwBiy/w7frVjWYFS91DWzVIdzfnPVu0zMonBkykXsDmNJvXU3qM9iuc9agUy5+wbFimV66bJrGWxa/8IA5SBBIK1wC2P0kmxHAFUgy+15GsheC8TnoJ6Rqf6FL38QA4ByN2vKh3p0H8QyBEACaP5ChbXThtKorRBTWFBkJ7bksQWgMhnG1u5YgrBJ7L0cSwgW6VDLBslQBWSLCiis4H6c8rdN7UMYcgCy1j/0Ic5PV8B2tEZNcF7GcdrVGvnA13HiA1/HgJFZo4NuN3XK093z+iCGfAT5+AWx2lhocQgAAA==" \ No newline at end of file +window.navigationData = "data:application/octet-stream;base64,H4sIAAAAAAAAE42UUW/aMBSF/4uf0WijlnW8jdKKSa2E2myTWu3Bc27IVYONbCNIp/33yVlDbOPYec0555N9fOD1D9Fw1GRObsV2J0EpFJxMyI7qiswJ8P1WTS3pU6W3NZmQN+QFmd/8nZzyOdaQNzvww9334eRiX5Ygn4AWIPs0q6lSoKa26jIus3PKT4l6mPJfjVGWKG8pq+Cc0Cmx9D1oVg1dxBJjjOds/Wg6U0MczzCKNVSLZ0j3uxYKtTMR5BpkSdmp5M7iwrLrmQW741o2QUarxKIrrxYruwoU4ob9Tq1wqE03/JyZwyGoYP6kxhGR43diDOA/pBUPPaEbXgL7+CXf7znrIbrZmYE7qse5+PL58jqzWGuBXPuI9mMqafbmB823VO53o0Hlwi+w3HNm5qamjsGlza4sEBOcUf0dub75KiVtVAh2ZooAC1Agkdb4DksMHs11RFAl8sItqId0WiS+gfbIs6tQ/iRGAFVbXi4WpssQxDFEQBJo8YNKZyQ9pVdHIZ5gS5EPPHvAFoGqbJHakWeJwlapvjxLDJbY0MgFqdgE1IgJaKzh2zIXL8cmhLDkCORg/owWuBmegOsYjVrhpkrjjGs08kEc0sQHcUgBE3dNXvT92OQib+sNQSz5DPLrH6vgUIvICQAA" \ No newline at end of file diff --git a/docs/assets/search.js b/docs/assets/search.js index c2eca8e..85335af 100644 --- a/docs/assets/search.js +++ b/docs/assets/search.js @@ -1 +1 @@ -window.searchData = "data:application/octet-stream;base64,H4sIAAAAAAAAE62bbXPbuBHHvwvylmMbT5Std/Wlba5z18kkvrs2Gk9GliCZPZlUSSp+Gn/3G5AUuUsspZXkV8kI2P8ugd8uABJ+FXn2WIjx5FX8maRzMZbqMhLp9MGJsfiqPv96k6xc8cVN5y4XkdjkKzEWs9W0KFxx3ms/uy8fViLaNouxEG/RVtdK1erOsrQo882szHiaH7AB0I/Eepq7tCSC7VzLC2Va3+tpec9z2vQ8yVs+TZfui/v/xhVlwXPbNznUv7ow3QwuXfmJP3cfYPdT/f7qyul8Wk7ZnoHBqb59r6+K7bnt/h5+D/J6DF8gQT8m+U/T2X3oc9vwLimJxFi52MY1MFSFK/d4qXscp77cq748QX3uVq7cM+If2k5sH3FXNF6enm8yP/k/f2zdLDbprEyytDgHrTtnFwiWVe+b7NvTMyEIWrmCiySdI9Y7tW0TV+ruuXTFTdarUp0eaueK3le9b7Jrb0yIonau6NwVLk+mq+TFfUyoUHEHruwe0WMkl678LUnL2BB6bRs/vmGxgiWmLq5G0na15nOWpF2Gls9rV5xXvx2kgvirRfZyp2zc2v89LfMuHZK0dPliOnPFedWwU6YrBj9lD+vcFUWSpa2USzcPxTlo2aklu4h+S/9Ms8d9Qh+6bnRxgTGRfv6dpW6fk6bPkR7++ZKs93lo+hzp4TrPylWyz0fb60gv34pyvs9H0+cQD32UP7pZ0+8fm3TWgxo37s5aa3UX/PfvXmC/2oe248BihaOj0sAn3g10Vo/V9ufTEgCp7KW/DYX08PlusVO9bj9KOV3uVq7aj1H+19rtlm46HKP9h7vrJyrWbjoco/23H8nuwW46sLVh9e7tF0D5ZuwU4BZ69wn3/c+3R55ujz/bHnqyPfFcedSp8uQz5ZEnyhPPk0edJk86Sx5xkhzyB7d46nrPLrzXg713VJ/27MR7Pdi7t6/Kb9MSoAlKQNt4gNxwQdm2cR86d9P579Mc7nG75+0aD5f74h6mSUpPENGL6+AxT0o3GDBoPUjwOlnu1mw7HCf7S/a4V/mX7PE48U/J8n6vuu/EBux6s1i4/HNWJCU8KwDMcI9D168/fGiD1bhufc/1Cygesn41YQ6sX4sdJQ86XDDq3W5PaGe8w9POjfGQJ1TJK2Z8v//89xvHY6//e3geXrlIx/vXLqbfg7ye5nOWPTwk4Vs/EtamJ8vbbSR8LX0S41fxw+XVOW4s1Jk+uxKRWCRuNfcfT+owoioMV5W8eTbbVP+9bbr97nx++M517/MLEU0uIn15Js3tbTTZ2la/Vz9sJbpfKjspoomM1OhMWmwoA0OJDJWIJirS+uxCK2SoAkOFDLWIJpqIVAd2GtkZEU0M5dAEhgYZWhFNLGVoA0OLDGMRTWLKMA4MY2Q4EtFkRBmOAsMRMrwU0eSSGJvLwO4S2V0NzeJVYHiFp9/TcEV4lCE4skdOhQ7FnCTYwfBIj4SUlG2Ij8T8SI+FVJRtiJDEDElPhqTokyFFEmMkPRzSULYhSBKTJD0f0lK2IUsSwyQ9IjKmbEOcJOZJekzkiLINkZKYKVlBRdEoQ6okxkp5VCTFlQq5Upgr5VFRFFcq5Er1ilJVlSiuFFGWMFfKo6IorlTIlcJcKY+KorhSIVcKc6U8KoriSoVcKcyV8qgosripECyFwVKeFUWBpUKwFAZLeVYUBZYKwVIYLOVZURRYKgRLYbC0Z0VRYOkQLI3B0p4VTYGlQ7A0Bkt7VjQFlg7B0r0Fr1rxKLA0seZhsLQZnGAdkqUxWdrDosm1NiRLY7K0Z0VTVOoQLI3B0p4VTVU7HYKlMVjas6IpKHUIlsZgac+KpqDUIVgag2U8K5qC0oRgGQyWqcCioDQhWAaDZdTQ0m1CsAwGy+ihPZgJwTK93dTwdorYT2GuzOCOyoRcGcyVGdxUmZArg7kyg/sqE3JlMFfGo2Ko5DchVwZzZTwqhkp+E3JlMFfWo2Ko5LchVxZzZT0qhspfG3JlMVfWo2Ko/LUhVxZzZT0qhspfG3JlMVe24orKXxtyZXsbdY+KofLXElt1zJX1qBgqf23IlcVcWY+KofLXhlxZzJX1qFiKKxtyZTFX1qNiKa5syJXFXMUXQ3UjDrmKMVexR8VSTMYhVzHmKvaoWIrJOOQqxlzFHhVLMRmHXMWYK39hYGIpJuOQqxhzFVf1imIyDrmKe2dAj4qlmIyJU2D9U3XM/+Hy0s1/ro/7k0n7/fRVfG/eAWi5feHwKrQS49e3t+7MP359A8d+3+Y9TatvXEBiBCSuWBJ3zUfsTqR7/fcqFFOkeru4bt8/dmL2ohOzlzwx/6q+zO6bV+adlrSdloxZWtv3MSAgMEQxW6S7hgHGyYBxskwl8C4TPBh4rtryqv7HqPrf+IIlP2+/pS+qL/0ADDANWjLF6ktmIEwYJw9QcGlpnuBnBvMgeWDMk3xW30fsZACrPBFXX8wB06jBNBqWRv2GGDClAFO80fUX2cqejARASR5Q1VVDoACmWfICWboyzDQQiGlI5I3M0pUP7WdRMECgCDR67OfrjxLgZtRojQ7RKhRUi8HMNWq8orB05aa5uAbG/wqwxMvZZXVRCeAInk/xkA7nT4O0MLw4apEyu6s/pYJnAkMkeSP9v+rKCAgHzL/mSaTVDTEwLCAKxZOo7yEACaDQTLXmCd3hRRYUDM3DeP1QkZeHEwWAMbyEbbQem09yIMlAeba8dXud4nkCea95abCub1uCUQZRKN4iUf1NQt7+TQKIB4TDU3LT+Y/muzCoZuCxLDOkVijvPosDRVjQeKEVanBfY0DVNryUL5TbXlAAOoBwy8v6QhHhgGG3PCa3MkT5MIAHw6OyUIP5AkaK+YBD2w9QISUzLLzWgsyV3FiIJUOBR1K80e6viApWNh6NXiKZl9nLE9oLSTD1krfie6XgJAMC0syAehIWRBLzcnazvbsJRgbkqWrWd817rsfqaiJ4JrAAaV6SViX6LlkSFQks8pa3hGCx++oKCRAENFre8ogFV/7CC9AD6WF5K0Gl10fTwv3VAQ8abtQsmMqYOYVbpafnFyQF1oOYh2clRcwjeDzLC+qlutANEAUzp3l15OXpuczqDEbJC7JOMh7rNhLrZO1WSerEeHL79vYXCe9hPK45AAA="; \ No newline at end of file +window.searchData = "data:application/octet-stream;base64,H4sIAAAAAAAAE62bbXPbNhLHvwv9VmMbj5T8rmmu1960N5nEbe+i8WRkCZZ5tUkdScVP4+/eAUmRu8RSXMl+lYyx+98F8FsQhMCXKM8eiuhi/hL9laSr6MLqSZQu7l10ES2zdLkof0/ScvpDni+eimgSbfO76CK62abLMsnS4iywOb0t7++iSbS8WxSFK6KLKHqd7MSFnLbqH5P8x8Xy1rWijcfZrmGvkhES5lmU+XZZZvl+sRNsCYQn0WaRu7SEeXXB5Lnu8i5cORKltjhOfT2qvn6D+srduXJkxE9aI34MY9sQn91i5bp5SNLS5TeLpSvO6hY2Hz+5cnnbU9slDNrehZK+HgsUmCDowrnsKmizKG/HgzVWR0fJF+nafXb/37qiLMbD9c0Pidtn9bPXGg8JLA/qJcDhw/bmxuUDPMDGdwEiEGQRgXIcmKzryoYRsDU8KBZvgsJoozM01DcwRV/kp98ukztXDMxSr/1dJorSZM1VP9lDKpgMureKh6P1J+xn/tidQPO3xv3NlYvVolywIwOHt8b2Vl8kO3Jr/h5xD4p6REywn3p+fLrMvNUvH4mdFGjdWxlAsKysL7Ovj0+EIGjlCt4k6QoNSqe2a+JKXT+VrrjMejh3eqidK3pbWV9mH7wzIYrauaIrV7g8Wdwlz+5jQqWKDbiyI6LHSK5dtdO2mtBr2/j5DYsVLDF5PouF6dbpT1mSdrvX8mnjirPqbwepIP5qkVHu4Ab0H2mZP1H7z6phr0y3OPyY3W9yVxRJlrZSLt3eF2egZf9Wtsvo9/SvNHsYEzrpzOg1BuZExvl3lrqxII3NkRH++ZxsxiI0NkdG+JBn5V0yFqO1OjLK16JcjcVobA6J0Ef5o1s2dj9t02UPaty4v2qNUV3y3755gXG1k9Zw4EUOZ0eVgS+8SxisHqvdn99WAEhllP42FTLCp+ubvep1+1HK6Xq/ctV+jPK/Nm6/dGNwjPaf7rpfqFi7MThG+4fvyf7BbgzY2nD17u0XwPJ92E6hkB9GNiA9C77wzyObkJ4F+8H1RfonVAI0Qe/bxgPkhsdy18btdO4Wqz8WOXy8d/3tGg+X++zuF0lKTxBhxQ3wkCelG0wYtB4k+CFZ79dsDY6T/TV7GFX+NXs4TvznZH07qu6N2IDVJwKfsiIp4TYJYIYt2MJ/+pxIbusW9hFiHb8nh89AGIrorXGx2bh0NS520hruO05pUtsb68sT2DqMxWuMj49Z8cAIt7M7PtIyu79PwsPuMFRreFAs8mBqAIVe+/seTOGuHHAwFXYJHkw98Lty0tq+KSLade6Nt3fbyYy2JN7ARsYWOhwaOywCb/ef/349YIQ7j/eJvu9IjAzOORRjxj4w8lvjDiwGQxO9bz0IIl5NIr95eIwuXqLvLq8AuYjkqTqdRZPoJnF3K/8DaJ3KpErFVc/4VbbcVv+9asz+cL5yvXFtfXYeTebnEzU7NUJeXU3mO+eqofrDTqP7S+UooslcUI4icBTIUUaTuZxIe2qwnwz8JPJT0WSuqIAqcFTIUUeTuaYcdeCokaOJJnNDOZrA0SBHG03mlnK0gaNFjnE0mceUYxw4xshxOjCo08Bvivxm0WQ+nSh9qs81cpwFjjM8/Z6GGTn/ITmih07FzjkVVhD0YHyEp0LQ5IUICcyQUAPjJEKIBKZIeDaEJOOGIAlMkjDDPQ5hEpgm4RkRJPoiBEpgokQ81OMQKYGZEtMhOESIlcBcCU+LIKtOhGgJzJb0uAiy8mQIl8RwyQousvpkCJfsrU0VXGQFSmJ9wnBJD4yYks4hXhLjJSu8yIKSIV4S4yU9MJJcxmWIl8R4SQ+MJAtKhnhJjJes8CKrQoaASQyYrFYtEmwZEiYxYdIzI0nCZEiYxIQpz4wkCVMhYQoTpjwzkiRMhYQpTJiqHn8kYSokTPUegdXyRRKmiKcgJkx5ZiRJmAoJU5gw5ZlR5AKmQsIUJkx5ZpQgnUPCFCZMeWYUSZgKCVOYMOWZUfSuISRMYcKUZ0aRhKmQMIUJ054ZRRKmQ8I0Jkx7ZhRJmA4J05gw7ZlRJGE6JExjwnS1yyIJ0yFhurfR0oPzrIm9FiZMV4SReOqQMI0J054ZTS6AOiRMY8K0Z0aTC6AOCdOYMO2Z0SSeOiRMY8K0Z0aTeOqQMI0JM54ZTeJpQsIMJsx4ZjS9uQ0JM5gw45nRJJ4mJMxgwoxnRpN4mpAwgwkz1V6exNOEhJnedt4zo0nCDLGjx4QZz4whCTMhYQYTZjwzhiTMhIQZTJjxzBiSMBMSZjBhxjNjSMJMSJjBhFnPjCEJsyFhFhNmPTOGJMyGhFlMmPXMGEutJDYkzGLCrGfGkITZkDCLCfOXCeaGJMyGhFlMmK3eGEnCbEiY7b00Vm+NJGGWeG/EhFnPjCWXXhsSZjFh1jNjScJsSJjFhNnZwAuGDfmymK/4fHCW45CvGPMVe2IsSXYc8hVjvmI5uJ2JQ75izFfsibFkWcQhXzHmK/bEWLIs4pCvGPMVe2IsufDGIV8x5iu2g4jEIV/Nn6qTp+8uL93ql/oEaj5vf75/ib41x1Iq3p2DvURaRhcvr6/dMdTFyys4ifJtPtLu14VOxEw7EasPECmqnwyA0AwIGZ5Q9YNvJ6EV6NCMJXHd3OgAo6I7EcUUaa7MdiLdYf1LJHijUots2p+1wMgYMDLiALG8+SEW5CVAXrwpr6V2R/kgK0CPVTwp/7t0md0GaUmQlrQsrd1pbCdigYiNa7eYLdad4QMUzgEKplHkjVr9fcrWf5+yaL5h6XSB7DlXrfv9BgwbGLXac1r/I1TzbzMMlsfxqr2Zc7PFxdldA3qJNA/B3eccgJhOg1ff4ALkKsE9B5UupzyxJF/W3/2AcgDVwBJx9SU/kAhYtRSv0G/8NwdhaYJyink6Sboqq99AQDoALckb5Oq7HrCGgonm+ocFLYCM4MG3duV9e7UbKAFqJK9a1q7M628KgAwYGCGa6mAPUH+YBZgsyVsQGpVCIh1QVpJH4NpVq4rfw4JpB4OkmINUXZcEFQ4qSvEqKpx2Dabd8PKoRcrsur7VBPoEVzdeRfyvurgG0gH1rXkSaXVPFQwLyELxJOoPMjoJsEGaNeDxxndzjXY3Ciw1mofuJsXjAYpA856Mm/puNZgWgL7ioV8VY95++NVpgQ4JHi3hsgkqiNcjL/G9uU4FhgbkYniD2wnl3W0y8LQDg214qRVycIek4cOTt1IU0u3u9QEdMH2Gt2srJJEOgNrwnhM7GaLUNZhEw2OqkJv7aj0l9rhgJRPcgW/Uwm2uBQuA5dXt4KYFrI6S91As8MMZrtFcf+JpAYZb8VgK9hlg/hUvFS+RrMrs+RFtoAQoPMkjySsF77IgIc1MqCdhwejGvGHZ7i6Pg0TAM0fZ2k/z+vVQ3Y0GZQHQ0zz0mutxoFNgFbK8Yqg0rpN1uEgakJA5IKFW7La6DAoEQUVY3hMAC975q6tAD0yh4VUY8WILHynNe1V8QHL9WrFAL+Y9WlodvF204CEQM5naKT0+PSMpMPIxr14qKQIKALzh7ZCeq09cQM2AfmneSD8/PpVZvaSg1QQsA5LRratJtEk27i5JXXQxv3p9/RuGvnL3mEIAAA=="; \ No newline at end of file diff --git a/docs/classes/BufferReader.html b/docs/classes/BufferReader.html new file mode 100644 index 0000000..bdef8f5 --- /dev/null +++ b/docs/classes/BufferReader.html @@ -0,0 +1,12 @@ +BufferReader | s2-pmtiles - v1.1.0

Class BufferReader

Buffer reader is used on files that are small and easy to read in memory. Faster then the Filesystem

+

Implements

Constructors

Properties

Methods

Constructors

  • Parameters

    • buffer: Uint8Array

      the input data is the entire pmtiles file

      +

    Returns BufferReader

Properties

buffer: Uint8Array

the input data is the entire pmtiles file

+

Methods

  • Parameters

    • offset: number

      the offset of the range

      +
    • length: number

      the length of the range

      +

    Returns Promise<Uint8Array>

      +
    • the ranged buffer
    • +
    +
diff --git a/docs/classes/BufferWriter.html b/docs/classes/BufferWriter.html new file mode 100644 index 0000000..751d7a6 --- /dev/null +++ b/docs/classes/BufferWriter.html @@ -0,0 +1,13 @@ +BufferWriter | s2-pmtiles - v1.1.0

Class BufferWriter

Buffer writer is used on smaller datasets that are easy to write in memory. Faster then the Filesystem

+

Methods

  • Parameters

    • data: Uint8Array

      the data to append

      +

    Returns Promise<void>

  • Parameters

    • data: Uint8Array

      the data to append

      +

    Returns void

  • Returns Uint8Array

      +
    • the buffer
    • +
    +
  • Parameters

    • data: Uint8Array

      the data to write

      +
    • offset: number

      where in the buffer to start

      +

    Returns Promise<void>

diff --git a/docs/classes/DirCache.html b/docs/classes/DirCache.html index 6b6a2f6..664a313 100644 --- a/docs/classes/DirCache.html +++ b/docs/classes/DirCache.html @@ -1,4 +1,4 @@ -DirCache | s2-pmtiles - v1.0.1

Class DirCache<K, V>

A cache of directories. +DirCache | s2-pmtiles - v1.1.0

Class DirCache<K, V>

A cache of directories. The key is the offset in the data and the value is the directory entries.

Type Parameters

Hierarchy

  • Map<K, V>
    • DirCache

Constructors

Methods

delete @@ -16,4 +16,4 @@
  • Parameters

    • key: K

      the offset position in the data

    • dir: V

      the directory entries

    Returns this

    this

    -
+
diff --git a/docs/classes/FetchReader.html b/docs/classes/FetchReader.html new file mode 100644 index 0000000..26ca7ab --- /dev/null +++ b/docs/classes/FetchReader.html @@ -0,0 +1,15 @@ +FetchReader | s2-pmtiles - v1.1.0

Class FetchReader

The browser reader that fetches data from a URL.

+

Implements

Constructors

Properties

Methods

Constructors

  • Parameters

    • path: string

      the location of the PMTiles data

      +
    • rangeRequests: boolean

      FetchReader specific; enable range requests or use urlParam "bytes"

      +

    Returns FetchReader

Properties

path: string

the location of the PMTiles data

+
rangeRequests: boolean

FetchReader specific; enable range requests or use urlParam "bytes"

+

Methods

  • Parameters

    • offset: number

      the offset of the range

      +
    • length: number

      the length of the range

      +

    Returns Promise<Uint8Array>

      +
    • the ranged buffer
    • +
    +
diff --git a/docs/classes/PMTilesReader.html b/docs/classes/PMTilesReader.html deleted file mode 100644 index 602a43d..0000000 --- a/docs/classes/PMTilesReader.html +++ /dev/null @@ -1,31 +0,0 @@ -PMTilesReader | s2-pmtiles - v1.0.1

Class PMTilesReader

The File reader is to be used by bun/node/deno on the local filesystem.

-

Constructors

Properties

Methods

Constructors

  • Given an input path, read in the header and root directory

    -

    Parameters

    • path: string

      the location of the PMTiles data

      -
    • maxSize: number = 20

      the max size of the cache before dumping old data. Defaults to 20.

      -

    Returns PMTilesReader

Properties

path: string

the location of the PMTiles data

-

Methods

  • Returns Promise<Metadata>

      -
    • the metadata of the archive
    • -
    -
  • Parameters

    • zoom: number

      the zoom level of the tile

      -
    • x: number

      the x coordinate of the tile

      -
    • y: number

      the y coordinate of the tile

      -

    Returns Promise<undefined | Uint8Array>

      -
    • the bytes of the tile at the given (z, x, y) coordinates, or undefined if the tile does not exist in the archive.
    • -
    -
  • Parameters

    • face: Face

      the Open S2 projection face

      -
    • zoom: number

      the zoom level of the tile

      -
    • x: number

      the x coordinate of the tile

      -
    • y: number

      the y coordinate of the tile

      -

    Returns Promise<undefined | Uint8Array>

      -
    • the bytes of the tile at the given (face, zoom, x, y) coordinates, or undefined if the tile does not exist in the archive.
    • -
    -
diff --git a/docs/classes/S2PMTilesReader.html b/docs/classes/S2PMTilesReader.html index c20aa5b..e9fc197 100644 --- a/docs/classes/S2PMTilesReader.html +++ b/docs/classes/S2PMTilesReader.html @@ -1,17 +1,15 @@ -S2PMTilesReader | s2-pmtiles - v1.0.1

Class S2PMTilesReader

The File reader is to be used by bun/node/deno on the local filesystem.

+S2PMTilesReader | s2-pmtiles - v1.1.0

Class S2PMTilesReader

The File reader is to be used by bun/node/deno on the local filesystem.

Constructors

  • Given an input path, read in the header and root directory

    -

    Parameters

    • path: string

      the location of the PMTiles data

      -
    • rangeRequests: boolean = false

      enable range requests or use urlParam "bytes"

      +

      Parameters

      • path: string | Reader

        the location of the PMTiles data

        +
      • rangeRequests: boolean = false

        FetchReader specific; enable range requests or use urlParam "bytes"

      • maxSize: number = 20

        the max size of the cache before dumping old data. Defaults to 20.

        -

      Returns S2PMTilesReader

Properties

path: string

the location of the PMTiles data

-
rangeRequests: boolean = false

enable range requests or use urlParam "bytes"

+

Returns S2PMTilesReader

Properties

path: string | Reader

the location of the PMTiles data

Methods

  • Returns Promise<Header>

    • the header of the archive
    @@ -31,4 +29,4 @@

Returns Promise<undefined | Uint8Array>

  • the bytes of the tile at the given (face, zoom, x, y) coordinates, or undefined if the tile does not exist in the archive.
-
+
diff --git a/docs/classes/PMTilesWriter.html b/docs/classes/S2PMTilesWriter.html similarity index 64% rename from docs/classes/PMTilesWriter.html rename to docs/classes/S2PMTilesWriter.html index 82350b2..3f39002 100644 --- a/docs/classes/PMTilesWriter.html +++ b/docs/classes/S2PMTilesWriter.html @@ -1,15 +1,18 @@ -PMTilesWriter | s2-pmtiles - v1.0.1

Class PMTilesWriter

Write a PMTiles file.

-

Constructors

Properties

Methods

Constructors

Properties

compression: Compression = Compression.Gzip

the compression algorithm

type: TileType

the tile type

+
writer: Writer

the writer to append to

Methods

  • Finish writing by building the header with root and leaf directories

    Parameters

    • metadata: Metadata

      the metadata to store

    Returns Promise<void>

  • Write a tile to the PMTiles file given its tile ID.

    @@ -27,4 +30,4 @@
  • x: number

    the tile X coordinate

  • y: number

    the tile Y coordinate

  • data: Uint8Array

    the tile data to store

    -

Returns Promise<void>

+

Returns Promise<void>

diff --git a/docs/enums/Compression.html b/docs/enums/Compression.html index 3c7df65..1cccbc3 100644 --- a/docs/enums/Compression.html +++ b/docs/enums/Compression.html @@ -1,4 +1,4 @@ -Compression | s2-pmtiles - v1.0.1

Enumeration Compression

Enum representing a compression algorithm used. +Compression | s2-pmtiles - v1.1.0

Enumeration Compression

Enum representing a compression algorithm used. 0 = unknown compression, for if you must use a different or unspecified algorithm. 1 = no compression. 2 = gzip @@ -14,4 +14,4 @@

None: 1

no compression.

Unknown: 0

unknown compression, for if you must use a different or unspecified algorithm.

Zstd: 4

zstd.

-
+
diff --git a/docs/enums/TileType.html b/docs/enums/TileType.html index 6e10416..e8bd66b 100644 --- a/docs/enums/TileType.html +++ b/docs/enums/TileType.html @@ -1,4 +1,4 @@ -TileType | s2-pmtiles - v1.0.1

Enumeration TileType

Describe the type of tiles stored in the archive. +TileType | s2-pmtiles - v1.1.0

Enumeration TileType

Describe the type of tiles stored in the archive. 0 is unknown/other, 1 is "MVT" vector tiles.

Enumeration Members

Avif Jpeg @@ -12,4 +12,4 @@
Png: 2

Image tiles.

Unknown: 0

unknown/other.

Webp: 4

Image tiles.

-
+
diff --git a/docs/functions/bytesToHeader.html b/docs/functions/bytesToHeader.html index 6118ef2..c52cf21 100644 --- a/docs/functions/bytesToHeader.html +++ b/docs/functions/bytesToHeader.html @@ -1,4 +1,4 @@ -bytesToHeader | s2-pmtiles - v1.0.1

Function bytesToHeader

  • Parse raw header bytes into a Header object.

    +bytesToHeader | s2-pmtiles - v1.1.0

    Function bytesToHeader

    • Parse raw header bytes into a Header object.

      Parameters

      • bytes: Uint8Array

        the raw header bytes

      Returns Header

      the parsed header

      -
    +
diff --git a/docs/functions/concatUint8Arrays.html b/docs/functions/concatUint8Arrays.html new file mode 100644 index 0000000..e2a411e --- /dev/null +++ b/docs/functions/concatUint8Arrays.html @@ -0,0 +1,5 @@ +concatUint8Arrays | s2-pmtiles - v1.1.0

Function concatUint8Arrays

  • Parameters

    • uint8arrays: Uint8Array[]

      the Uint8Arrays to concatenate

      +

    Returns Promise<Uint8Array>

      +
    • the concatenated Uint8Array
    • +
    +
diff --git a/docs/functions/deserializeDir.html b/docs/functions/deserializeDir.html index 58c29c4..c001280 100644 --- a/docs/functions/deserializeDir.html +++ b/docs/functions/deserializeDir.html @@ -1,5 +1,5 @@ -deserializeDir | s2-pmtiles - v1.0.1

Function deserializeDir

  • Parameters

    • buffer: Uint8Array

      the buffer to deserialize

      +deserializeDir | s2-pmtiles - v1.1.0

      Function deserializeDir

      • Parameters

        • buffer: Uint8Array

          the buffer to deserialize

        Returns Entry[]

        • the deserialized entries
        -
      +
diff --git a/docs/functions/findTile.html b/docs/functions/findTile.html index 3c02523..36d22f7 100644 --- a/docs/functions/findTile.html +++ b/docs/functions/findTile.html @@ -1,5 +1,5 @@ -findTile | s2-pmtiles - v1.0.1

Function findTile

  • Low-level function for looking up a TileID or leaf directory inside a directory.

    +findTile | s2-pmtiles - v1.1.0

    Function findTile

    • Low-level function for looking up a TileID or leaf directory inside a directory.

      Parameters

      • entries: Entry[]

        the directory entries

      • tileID: number

        the tile ID

      Returns Entry | null

      the entry associated with the tile, or null if not found

      -
    +
diff --git a/docs/functions/getUint64.html b/docs/functions/getUint64.html index b61e672..2b5795a 100644 --- a/docs/functions/getUint64.html +++ b/docs/functions/getUint64.html @@ -1,6 +1,6 @@ -getUint64 | s2-pmtiles - v1.0.1

Function getUint64

  • Parameters

    • dv: DataView

      a DataView

      +getUint64 | s2-pmtiles - v1.1.0

      Function getUint64

      • Parameters

        • dv: DataView

          a DataView

        • offset: number

          the offset in the DataView

        Returns number

        • the decoded 64-bit number
        -
      +
diff --git a/docs/functions/headerToBytes.html b/docs/functions/headerToBytes.html index 255453b..59999ea 100644 --- a/docs/functions/headerToBytes.html +++ b/docs/functions/headerToBytes.html @@ -1,3 +1,3 @@ -headerToBytes | s2-pmtiles - v1.0.1

Function headerToBytes

  • Parameters

    • header: Header

      the header object

      +headerToBytes | s2-pmtiles - v1.1.0

      Function headerToBytes

      • Parameters

        • header: Header

          the header object

        Returns Uint8Array

        the raw header bytes

        -
      +
diff --git a/docs/functions/readVarint.html b/docs/functions/readVarint.html index 95ecdcf..349f7b2 100644 --- a/docs/functions/readVarint.html +++ b/docs/functions/readVarint.html @@ -1,5 +1,5 @@ -readVarint | s2-pmtiles - v1.0.1

Function readVarint

  • Parameters

    • bufPos: BufferPosition

      the buffer with it's position

      +readVarint | s2-pmtiles - v1.1.0

      Function readVarint

      • Parameters

        Returns number

        • the decoded number
        -
      +
diff --git a/docs/functions/readVarintRemainder.html b/docs/functions/readVarintRemainder.html index 54a96c6..3b56f28 100644 --- a/docs/functions/readVarintRemainder.html +++ b/docs/functions/readVarintRemainder.html @@ -1,6 +1,6 @@ -readVarintRemainder | s2-pmtiles - v1.0.1

Function readVarintRemainder

  • Parameters

    • low: number

      the low 32 bits of the number

      +readVarintRemainder | s2-pmtiles - v1.1.0

      Function readVarintRemainder

      • Parameters

        • low: number

          the low 32 bits of the number

        • bufPos: BufferPosition

          the buffer with it's position

        Returns number

        • the decoded remainder
        -
      +
diff --git a/docs/functions/s2BytesToHeader.html b/docs/functions/s2BytesToHeader.html index 098ea42..9f6b8d9 100644 --- a/docs/functions/s2BytesToHeader.html +++ b/docs/functions/s2BytesToHeader.html @@ -1,4 +1,4 @@ -s2BytesToHeader | s2-pmtiles - v1.0.1

Function s2BytesToHeader

  • Parse raw header bytes into a Header object.

    +s2BytesToHeader | s2-pmtiles - v1.1.0

    Function s2BytesToHeader

    • Parse raw header bytes into a Header object.

      Parameters

      • bytes: Uint8Array

        the raw header bytes

      Returns S2Header

      the parsed header

      -
    +
diff --git a/docs/functions/s2HeaderToBytes.html b/docs/functions/s2HeaderToBytes.html index d5f618e..7f796f1 100644 --- a/docs/functions/s2HeaderToBytes.html +++ b/docs/functions/s2HeaderToBytes.html @@ -1,3 +1,3 @@ -s2HeaderToBytes | s2-pmtiles - v1.0.1

Function s2HeaderToBytes

  • Parameters

    • header: S2Header

      the header object

      +s2HeaderToBytes | s2-pmtiles - v1.1.0

      Function s2HeaderToBytes

      • Parameters

        Returns Uint8Array

        the raw header bytes

        -
      +
diff --git a/docs/functions/serializeDir.html b/docs/functions/serializeDir.html index 3398b3c..bf5540a 100644 --- a/docs/functions/serializeDir.html +++ b/docs/functions/serializeDir.html @@ -1,5 +1,5 @@ -serializeDir | s2-pmtiles - v1.0.1

Function serializeDir

  • Parameters

    • entries: Entry[]

      the directory entries

      +serializeDir | s2-pmtiles - v1.1.0

      Function serializeDir

      • Parameters

        • entries: Entry[]

          the directory entries

        Returns Uint8Array

        • the serialized directory
        -
      +
diff --git a/docs/functions/setUint64.html b/docs/functions/setUint64.html index d9dac1b..3360cc4 100644 --- a/docs/functions/setUint64.html +++ b/docs/functions/setUint64.html @@ -1,5 +1,5 @@ -setUint64 | s2-pmtiles - v1.0.1

Function setUint64

  • Take a large 64-bit number and encode it into a DataView

    +setUint64 | s2-pmtiles - v1.1.0

    Function setUint64

    • Take a large 64-bit number and encode it into a DataView

      Parameters

      • dv: DataView

        a DataView

      • offset: number

        the offset in the DataView

      • value: number

        the encoded 64-bit number

        -

      Returns void

    +

Returns void

diff --git a/docs/functions/tileIDToZxy.html b/docs/functions/tileIDToZxy.html index 99fbb6e..3be5785 100644 --- a/docs/functions/tileIDToZxy.html +++ b/docs/functions/tileIDToZxy.html @@ -1,6 +1,6 @@ -tileIDToZxy | s2-pmtiles - v1.0.1

Function tileIDToZxy

  • Convert a Hilbert TileID to Z,X,Y.

    +tileIDToZxy | s2-pmtiles - v1.1.0

    Function tileIDToZxy

    • Convert a Hilbert TileID to Z,X,Y.

      Parameters

      • i: number

        the encoded tile ID

      Returns Tile

      • the decoded Z,X,Y
      -
    +
diff --git a/docs/functions/writeBigVarint.html b/docs/functions/writeBigVarint.html index a22a191..a992a5f 100644 --- a/docs/functions/writeBigVarint.html +++ b/docs/functions/writeBigVarint.html @@ -1,4 +1,4 @@ -writeBigVarint | s2-pmtiles - v1.0.1

Function writeBigVarint

  • Write a varint larger then 54-bits.

    +writeBigVarint | s2-pmtiles - v1.1.0

    Function writeBigVarint

    • Write a varint larger then 54-bits.

      Parameters

      • val: number

        the number

      • bufPos: BufferPosition

        the buffer with it's position to write at

        -

      Returns void

    +

Returns void

diff --git a/docs/functions/writeBigVarintHigh.html b/docs/functions/writeBigVarintHigh.html index efe89ef..8880d9a 100644 --- a/docs/functions/writeBigVarintHigh.html +++ b/docs/functions/writeBigVarintHigh.html @@ -1,4 +1,4 @@ -writeBigVarintHigh | s2-pmtiles - v1.0.1

Function writeBigVarintHigh

  • Write a varint larger then 54-bits on the high end

    +writeBigVarintHigh | s2-pmtiles - v1.1.0

    Function writeBigVarintHigh

    • Write a varint larger then 54-bits on the high end

      Parameters

      • high: number

        the high 32 bits

      • bufPos: BufferPosition

        the buffer with it's position to write at

        -

      Returns void

    +

Returns void

diff --git a/docs/functions/writeBigVarintLow.html b/docs/functions/writeBigVarintLow.html index 3584614..2642d01 100644 --- a/docs/functions/writeBigVarintLow.html +++ b/docs/functions/writeBigVarintLow.html @@ -1,5 +1,5 @@ -writeBigVarintLow | s2-pmtiles - v1.0.1

Function writeBigVarintLow

  • Write a varint larger then 54-bits on the low end

    +writeBigVarintLow | s2-pmtiles - v1.1.0

    Function writeBigVarintLow

    • Write a varint larger then 54-bits on the low end

      Parameters

      • low: number

        lower 32 bits

      • _high: number

        unused "high" bits

      • bufPos: BufferPosition

        the buffer with it's position to write at

        -

      Returns void

    +

Returns void

diff --git a/docs/functions/writeVarint.html b/docs/functions/writeVarint.html index e93abd0..178d96c 100644 --- a/docs/functions/writeVarint.html +++ b/docs/functions/writeVarint.html @@ -1,5 +1,5 @@ -writeVarint | s2-pmtiles - v1.0.1

Function writeVarint

  • Write a varint. Can be max 64-bits. Numbers are coerced to an unsigned +writeVarint | s2-pmtiles - v1.1.0

    Function writeVarint

    • Write a varint. Can be max 64-bits. Numbers are coerced to an unsigned while number before using this function.

      Parameters

      • val: number

        any whole unsigned number.

      • bufPos: BufferPosition

        the buffer with it's position to write at

        -

      Returns void

    +

Returns void

diff --git a/docs/functions/zxyToTileID.html b/docs/functions/zxyToTileID.html index 4453c5f..dd0c423 100644 --- a/docs/functions/zxyToTileID.html +++ b/docs/functions/zxyToTileID.html @@ -1,8 +1,8 @@ -zxyToTileID | s2-pmtiles - v1.0.1

Function zxyToTileID

  • Convert Z,X,Y to a Hilbert TileID.

    +zxyToTileID | s2-pmtiles - v1.1.0

    Function zxyToTileID

    • Convert Z,X,Y to a Hilbert TileID.

      Parameters

      • zoom: number

        the zoom level

      • x: number

        the x coordinate

      • y: number

        the y coordinate

      Returns number

      • the Hilbert encoded TileID
      -
    +
diff --git a/docs/hierarchy.html b/docs/hierarchy.html index 7813575..227e580 100644 --- a/docs/hierarchy.html +++ b/docs/hierarchy.html @@ -1 +1 @@ -s2-pmtiles - v1.0.1

s2-pmtiles - v1.0.1

Class Hierarchy

+s2-pmtiles - v1.1.0

s2-pmtiles - v1.1.0

Class Hierarchy

diff --git a/docs/index.html b/docs/index.html index dfc34bb..a4d9503 100644 --- a/docs/index.html +++ b/docs/index.html @@ -1,4 +1,4 @@ -s2-pmtiles - v1.0.1

s2-pmtiles - v1.0.1

+s2-pmtiles - v1.1.0

s2-pmtiles - v1.1.0

s2-pmtiles

@@ -35,12 +35,12 @@

#bun
bun add s2-pmtiles
# pnpm
pnpm add s2-pmtiles
# yarn
yarn add s2-pmtiles
# npm
npm install s2-pmtiles

# cargo
cargo install s2-pmtiles
-
import { PMTilesReader, PMTilesWriter } from 's2-pmtiles'

// The File Reader you can run on bun/node/deno
const testFixture1 = new PMTilesReader(`test/fixtures/test_fixture_1.pmtiles`);
// get an WM tile
let x = 0;
let y = 0;
let z = 0;
let face = 0;
testFixture1.getTile(x, y, z); // undefied | Uint8Array
// get an S2 tile
testFixture1.getTileS2(face, x, y, z); // undefined | Uint8Array

// The File Writer you can run on bun/node/deno
const testFixture2 = new PMTilesWriter(`tmpFile.pmtiles`);
// write a tile
testFixture2.writeTileXYZ(x, y, z, Uint8Array.from([]));
// write an S2 tile
testFixture2.writeTileS2(face, x, y, z, Uint8Array.from([]));
// when you finish you commit to build the metadata
testFixture2.commit();

// The File Reader you can run in the browser
import { S2PMTilesReader } from 's2-pmtiles/browser';
// you want to add a true after the path for generic PMTiles, as it ensures 206 byte requests.
const browserFixture = new S2PMTilesReader(`https://www.example.com/test.pmtiles`, true);
// get an WM tile
browserFixture.getTile(x, y, z); // undefied | Uint8Array
// get an S2 tile
browserFixture.getTileS2(face, x, y, z); // undefined | Uint8Array +
import { S2PMTilesReader, S2PMTilesWriter, TileType } from 's2-pmtiles'
import { FileReader, FileWriter } from 's2-pmtiles/file';

// The File Reader you can run on bun/node/deno
const testFixture1 = new S2PMTilesReader(new FileReader('test/fixtures/test_fixture_1.pmtiles'));
// get an WM tile
let x = 0;
let y = 0;
let z = 0;
let face = 0;
testFixture1.getTile(x, y, z); // undefied | Uint8Array
// get an S2 tile
testFixture1.getTileS2(face, x, y, z); // undefined | Uint8Array

// The File Writer you can run on bun/node/deno
const testFixture2 = new S2PMTilesWriter(new FileWriter('tmpFile.pmtiles'), TileType.Pbf);
// write a tile
testFixture2.writeTileXYZ(x, y, z, Uint8Array.from([]));
// write an S2 tile
testFixture2.writeTileS2(face, x, y, z, Uint8Array.from([]));
// when you finish you commit to build the metadata
testFixture2.commit();


// The File Reader you can run in the browser
import { S2PMTilesReader } from 's2-pmtiles';
// you want to add a true after the path for generic PMTiles, as it ensures 206 byte requests.
const browserFixture = new S2PMTilesReader('https://www.example.com/test.pmtiles', true);
// get an WM tile
browserFixture.getTile(x, y, z); // undefied | Uint8Array
// get an S2 tile
browserFixture.getTileS2(face, x, y, z); // undefined | Uint8Array
-

Some tsconfigs might need some extra help to see the s2-pmtiles/browser package.

+

Some tsconfigs might need some extra help to see the s2-pmtiles/file or s2-pmtiles/mmap package.

To fix this update your tsconfig.json with the following:

-
{
"compilerOptions": {
"baseUrl": "./",
"paths": {
"s2-pmtiles/browser": ["./node_modules/s2-pmtiles/dist/browser.d.ts"]
}
}
} +
{
"compilerOptions": {
"baseUrl": "./",
"paths": {
"s2-pmtiles/file": ["./node_modules/s2-pmtiles/dist/file.d.ts"],
"s2-pmtiles/mmap": ["./node_modules/s2-pmtiles/dist/mmap.d.ts"]
}
}
}

@@ -60,4 +60,4 @@
cargo tarpaulin
# bacon
bacon coverage # or type `l` inside the tool
-
+

diff --git a/docs/interfaces/BufferPosition.html b/docs/interfaces/BufferPosition.html index fc137ec..0eadae9 100644 --- a/docs/interfaces/BufferPosition.html +++ b/docs/interfaces/BufferPosition.html @@ -1,2 +1,2 @@ -BufferPosition | s2-pmtiles - v1.0.1

Interface BufferPosition

A buffer with the position to read from

-
+BufferPosition | s2-pmtiles - v1.1.0

Interface BufferPosition

A buffer with the position to read from

+
diff --git a/docs/interfaces/Entry.html b/docs/interfaces/Entry.html index cc26cd4..6fef49c 100644 --- a/docs/interfaces/Entry.html +++ b/docs/interfaces/Entry.html @@ -1,2 +1,2 @@ -Entry | s2-pmtiles - v1.0.1

Interface Entry

PMTiles v3 directory entry.

-
+Entry | s2-pmtiles - v1.1.0

Interface Entry

PMTiles v3 directory entry.

+
diff --git a/docs/interfaces/Header.html b/docs/interfaces/Header.html index 59bc133..817ee4e 100644 --- a/docs/interfaces/Header.html +++ b/docs/interfaces/Header.html @@ -1,2 +1,2 @@ -Header | s2-pmtiles - v1.0.1

Interface Header

PMTiles v3 header storing basic archive-level information.

-

Hierarchy (view full)

+Header | s2-pmtiles - v1.1.0

Interface Header

PMTiles v3 header storing basic archive-level information.

+

Hierarchy (view full)

diff --git a/docs/interfaces/Reader.html b/docs/interfaces/Reader.html new file mode 100644 index 0000000..cdbdc97 --- /dev/null +++ b/docs/interfaces/Reader.html @@ -0,0 +1,2 @@ +Reader | s2-pmtiles - v1.1.0

Interface Reader

The defacto interface for all readers.

+

Implemented by

diff --git a/docs/interfaces/S2Entries.html b/docs/interfaces/S2Entries.html index e1c1431..4371d98 100644 --- a/docs/interfaces/S2Entries.html +++ b/docs/interfaces/S2Entries.html @@ -1,2 +1,2 @@ -S2Entries | s2-pmtiles - v1.0.1

Interface S2Entries

Store entries for each Face

-
+S2Entries | s2-pmtiles - v1.1.0

Interface S2Entries

Store entries for each Face

+
diff --git a/docs/interfaces/S2Header.html b/docs/interfaces/S2Header.html index fdd1507..3e2a734 100644 --- a/docs/interfaces/S2Header.html +++ b/docs/interfaces/S2Header.html @@ -1,2 +1,2 @@ -S2Header | s2-pmtiles - v1.0.1

Interface S2Header

S2PMTiles v3 header storing basic archive-level information.

-

Hierarchy (view full)

+S2Header | s2-pmtiles - v1.1.0

Interface S2Header

S2PMTiles v3 header storing basic archive-level information.

+

Hierarchy (view full)

diff --git a/docs/interfaces/Writer.html b/docs/interfaces/Writer.html new file mode 100644 index 0000000..6edaee3 --- /dev/null +++ b/docs/interfaces/Writer.html @@ -0,0 +1,2 @@ +Writer | s2-pmtiles - v1.1.0

Interface Writer

The defacto interface for all writers.

+
diff --git a/docs/modules.html b/docs/modules.html index 9501f90..de878d7 100644 --- a/docs/modules.html +++ b/docs/modules.html @@ -1,18 +1,23 @@ -s2-pmtiles - v1.0.1

s2-pmtiles - v1.0.1

Index

Enumerations

Compression +s2-pmtiles - v1.1.0
+
diff --git a/docs/types/DecompressFunc.html b/docs/types/DecompressFunc.html index 696e415..a756964 100644 --- a/docs/types/DecompressFunc.html +++ b/docs/types/DecompressFunc.html @@ -1,4 +1,4 @@ -DecompressFunc | s2-pmtiles - v1.0.1

Type Alias DecompressFunc

DecompressFunc: ((buf: Uint8Array, compression: Compression) => Promise<Uint8Array>)

Provide a decompression implementation that acts on buf and returns decompressed data.

+DecompressFunc | s2-pmtiles - v1.1.0

Type Alias DecompressFunc

DecompressFunc: ((buf: Uint8Array, compression: Compression) => Promise<Uint8Array>)

Provide a decompression implementation that acts on buf and returns decompressed data.

Should use the native DecompressionStream on browsers, zlib on node. Should throw if the compression algorithm is not supported.

-
+
diff --git a/docs/types/Point.html b/docs/types/Point.html index e326811..13cc65d 100644 --- a/docs/types/Point.html +++ b/docs/types/Point.html @@ -1,2 +1,2 @@ -Point | s2-pmtiles - v1.0.1

Type Alias Point

Point: [x: number, y: number]

An array of two numbers representing a point in 2D space.

-
+Point | s2-pmtiles - v1.1.0

Type Alias Point

Point: [x: number, y: number]

An array of two numbers representing a point in 2D space.

+
diff --git a/docs/types/Tile.html b/docs/types/Tile.html index ca684ae..078b7d5 100644 --- a/docs/types/Tile.html +++ b/docs/types/Tile.html @@ -1,2 +1,2 @@ -Tile | s2-pmtiles - v1.0.1

Type Alias Tile

Tile: [zoom: number, x: number, y: number]

A tile, in the format of ZXY.

-
+Tile | s2-pmtiles - v1.1.0

Type Alias Tile

Tile: [zoom: number, x: number, y: number]

A tile, in the format of ZXY.

+
diff --git a/package.json b/package.json index 07eff95..2182b79 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "s2-pmtiles", - "version": "1.0.1", + "version": "1.1.0", "description": "This library reads/writes PMTiles V3.0 as well as S2PMTiles V1.0", "keywords": [ "PMTiles", @@ -20,14 +20,20 @@ ".": { "require": "./dist/index.js", "import": "./dist/index.js", - "browser": "./dist/browser.js", + "file": "./dist/file.js", + "mmap": "./dist/mmap.js", "types": "./dist/index.d.ts" }, "./dist/*": "./dist/*", - "./browser": { - "require": "./dist/browser.js", - "import": "./dist/browser.js", - "types": "./dist/browser.d.ts" + "./file": { + "require": "./dist/file.js", + "import": "./dist/file.js", + "types": "./dist/file.d.ts" + }, + "./mmap": { + "require": "./dist/mmap.js", + "import": "./dist/mmap.js", + "types": "./dist/mmap.d.ts" } }, "scripts": { diff --git a/src/browser.ts b/src/browser.ts deleted file mode 100644 index 0839125..0000000 --- a/src/browser.ts +++ /dev/null @@ -1,254 +0,0 @@ -import DirCache from './cache'; -import { Compression, bytesToHeader, deserializeDir, findTile, zxyToTileID } from './pmtiles'; -import { S2_HEADER_SIZE_BYTES, S2_ROOT_SIZE, s2BytesToHeader } from './s2pmtiles'; - -import type { Entry, Header } from './pmtiles'; -import type { Face, Metadata } from 's2-tilejson'; -import type { S2Entries, S2Header } from './s2pmtiles'; - -// export DirCache for browsers to use (reduce code duplication) -export { default as DirCache } from './cache'; - -/** The File reader is to be used by bun/node/deno on the local filesystem. */ -export default class S2PMTilesReader { - #header: Header | S2Header | undefined; - // root directory will exist if header does - #rootDir: Entry[] = []; - #rootDirS2: S2Entries = { 0: [], 1: [], 2: [], 3: [], 4: [], 5: [] }; - #metadata!: Metadata; - readonly #dirCache: DirCache; - readonly #decoder = new TextDecoder('utf-8'); - - /** - * Given an input path, read in the header and root directory - * @param path - the location of the PMTiles data - * @param rangeRequests - enable range requests or use urlParam "bytes" - * @param maxSize - the max size of the cache before dumping old data. Defaults to 20. - */ - constructor( - readonly path: string, - readonly rangeRequests: boolean = false, - maxSize = 20, - ) { - this.#dirCache = new DirCache(maxSize); - } - - /** - * @returns - the header of the archive along with the root directory, - * including information such as tile type, min/max zoom, bounds, and summary statistics. - */ - async #getMetadata(): Promise
{ - if (this.#header !== undefined) return this.#header; - const data = await this.#getRange(0, S2_ROOT_SIZE); - const headerData = data.slice(0, S2_HEADER_SIZE_BYTES); - // check if s2 - const isS2 = headerData[0] === 83 && headerData[1] === 50; - // header - const headerFunction = isS2 ? s2BytesToHeader : bytesToHeader; - const header = (this.#header = headerFunction(headerData)); - - // json metadata - const jsonMetadata = data.slice( - header.jsonMetadataOffset, - header.jsonMetadataOffset + header.jsonMetadataLength, - ); - this.#metadata = JSON.parse( - this.#arrayBufferToString(await decompress(jsonMetadata, header.internalCompression)), - ); - - // root directory data - const rootDirData = data.slice( - header.rootDirectoryOffset, - header.rootDirectoryOffset + header.rootDirectoryLength, - ); - this.#rootDir = deserializeDir(await decompress(rootDirData, header.internalCompression)); - - if (isS2) await this.#getS2Metadata(data, header as S2Header); - - return header; - } - - /** - * If S2 Projection, pull in the rest of the data - * @param data - the root data - * @param header - the S2 header with pointers to the rest of the data - */ - async #getS2Metadata(data: Uint8Array, header: S2Header): Promise { - // move the root directory to the s2 root - this.#rootDirS2[0] = this.#rootDir; - // add the 4 other faces - for (const face of [1, 2, 3, 4, 5]) { - const rootOffset = `rootDirectoryOffset${face}` as keyof S2Header; - const rootLenght = `rootDirectoryLength${face}` as keyof S2Header; - const faceDirData = data.slice( - header[rootOffset] as number, - (header[rootOffset] as number) + (header[rootLenght] as number), - ); - this.#rootDirS2[face as keyof S2Entries] = deserializeDir( - await decompress(faceDirData, header.internalCompression), - ); - } - } - - /** @returns - the header of the archive */ - async getHeader(): Promise
{ - return await this.#getMetadata(); - } - - /** @returns - the metadata of the archive */ - async getMetadata(): Promise { - await this.#getMetadata(); // ensure loaded first - return this.#metadata; - } - - /** - * @param face - the Open S2 projection face - * @param zoom - the zoom level of the tile - * @param x - the x coordinate of the tile - * @param y - the y coordinate of the tile - * @returns - the bytes of the tile at the given (face, zoom, x, y) coordinates, or undefined if the tile does not exist in the archive. - */ - async getTileS2(face: Face, zoom: number, x: number, y: number): Promise { - return await this.#getTile(face, zoom, x, y); - } - - /** - * @param zoom - the zoom level of the tile - * @param x - the x coordinate of the tile - * @param y - the y coordinate of the tile - * @returns - the bytes of the tile at the given (z, x, y) coordinates, or undefined if the tile does not exist in the archive. - */ - async getTile(zoom: number, x: number, y: number): Promise { - return await this.#getTile(-1, zoom, x, y); - } - - /** - * @param face - the Open S2 projection face - * @param zoom - the zoom level of the tile - * @param x - the x coordinate of the tile - * @param y - the y coordinate of the tile - * @returns - the bytes of the tile at the given (z, x, y) coordinates, or undefined if the tile does not exist in the archive. - */ - async #getTile( - face: number, - zoom: number, - x: number, - y: number, - ): Promise { - const header = await this.#getMetadata(); - const tileID = zxyToTileID(zoom, x, y); - const { minZoom, maxZoom, rootDirectoryOffset, rootDirectoryLength, tileDataOffset } = header; - if (zoom < minZoom || zoom > maxZoom) return undefined; - - let dO = rootDirectoryOffset; - let dL = rootDirectoryLength; - - for (let depth = 0; depth <= 3; depth++) { - const directory = await this.#getDirectory(dO, dL, face); - if (directory === undefined) return undefined; - const entry = findTile(directory, tileID); - if (entry !== null) { - if (entry.runLength > 0) { - const entryData = await this.#getRange(tileDataOffset + entry.offset, entry.length); - return await decompress(entryData, header.tileCompression); - } - dO = header.leafDirectoryOffset + entry.offset; - dL = entry.length; - } else return undefined; - } - throw Error('Maximum directory depth exceeded'); - } - - /** - * @param offset - the offset of the directory - * @param length - the length of the directory - * @param face - -1 for WM root, 0-5 for S2 - * @returns - the entries in the directory if it exists - */ - async #getDirectory(offset: number, length: number, face: number): Promise { - const dir = face === -1 ? this.#rootDir : this.#rootDirS2[face as Face]; - const header = await this.#getMetadata(); - const { internalCompression, rootDirectoryOffset } = header; - // if rootDirectoryOffset, return roon - if (offset === rootDirectoryOffset) return dir; - // check cache - const cache = this.#dirCache.get(offset); - if (cache !== undefined) return cache; - // get from archive - const resp = await this.#getRange(offset, length); - const data = await decompress(resp, internalCompression); - const directory = deserializeDir(data); - if (directory.length === 0) throw new Error('Empty directory is invalid'); - // save in cache - this.#dirCache.set(offset, directory); - - return directory; - } - - /** - * @param offset - the offset of the data - * @param length - the length of the data - * @returns - the bytes of the data - */ - async #getRange(offset: number, length: number): Promise { - const bytes = String(offset) + '-' + String(offset + length); - const fetchReq = this.rangeRequests - ? fetch(this.path, { headers: { Range: `bytes=${offset}-${offset + length - 1}` } }) - : fetch(`${this.path}&bytes=${bytes}`); - const res = await fetchReq.then(async (res) => await res.arrayBuffer()); - return new Uint8Array(res, 0, res.byteLength); - } - - /** - * @param buffer - the buffer to convert - * @returns - the string result - */ - #arrayBufferToString(buffer: Uint8Array): string { - return this.#decoder.decode(buffer); - } -} - -/** - * @param data - the data to decompress - * @param compression - the compression type - * @returns - the decompressed data - */ -async function decompress(data: Uint8Array, compression: Compression): Promise { - switch (compression) { - case Compression.Gzip: - return decompressGzip(data); - case Compression.Brotli: - throw new Error('Brotli decompression not implemented'); - case Compression.Zstd: - throw new Error('Zstd decompression not implemented'); - case Compression.None: - default: - return data; - } -} - -/** - * @param uint8Array - the data to decompress - * @returns - the decompressed data - */ -async function decompressGzip(uint8Array: Uint8Array): Promise { - // Create a DecompressionStream for 'gzip' - const decompressionStream = new DecompressionStream('gzip'); - // Convert the Uint8Array to a readable stream - const uint8ArrayStream = new ReadableStream({ - /** @param controller - the controller for the stream */ - start(controller) { - controller.enqueue(uint8Array); - controller.close(); - }, - }); - // Pipe the readable stream through the decompression stream - const decompressedStream = uint8ArrayStream.pipeThrough(decompressionStream); - // Create a new Response object from the decompressed stream to easily retrieve the data - const response = new Response(decompressedStream); - // Get the decompressed data as an ArrayBuffer - const decompressedAB = await response.arrayBuffer(); - - // Convert the ArrayBuffer to a Uint8Array and return - return new Uint8Array(decompressedAB, 0, decompressedAB.byteLength); -} diff --git a/src/file.ts b/src/file.ts new file mode 100644 index 0000000..c1d4f43 --- /dev/null +++ b/src/file.ts @@ -0,0 +1,63 @@ +import { open } from 'fs/promises'; +import { promisify } from 'util'; +import { openSync, read, write, writeSync } from 'fs'; + +import type { Reader } from './reader'; +import type { Writer } from './writer'; + +const readAsync = promisify(read); +const writeAsync = promisify(write); + +/** The File reader is to be used by bun/node/deno on the local filesystem. */ +export class FileReader implements Reader { + #fileFD: number; + + /** @param file - the location of the PMTiles data in the FS */ + constructor(readonly file: string) { + this.#fileFD = openSync(file, 'r'); + } + + /** + * @param offset - the offset of the range + * @param length - the length of the range + * @returns - the ranged buffer + */ + async getRange(offset: number, length: number): Promise { + const buffer = Buffer.alloc(length); + await readAsync(this.#fileFD, buffer, 0, length, offset); + return new Uint8Array(buffer.buffer, 0, length); + } +} + +/** The File writer is to be used by bun/node/deno on the local filesystem. */ +export class FileWriter implements Writer { + #fileFD: number; + + /** @param file - the location of the PMTiles data in the FS */ + constructor(readonly file: string) { + this.#fileFD = openSync(file, 'a+'); + } + + /** + * @param data - the data to write + * @param offset - where in the buffer to start + */ + async write(data: Uint8Array, offset: number): Promise { + const fd = await open(this.file, 'r+'); // Open file for reading and writing + try { + await fd.write(data, 0, data.length, offset); // Write at the specified offset + } finally { + await fd.close(); // Close the file after writing + } + } + + /** @param data - the data to append */ + async append(data: Uint8Array): Promise { + await writeAsync(this.#fileFD, data, 0, data.byteLength); + } + + /** @param data - the data to append */ + appendSync(data: Uint8Array): void { + writeSync(this.#fileFD, data, 0, data.length); // Append data using the open file descriptor + } +} diff --git a/src/index.ts b/src/index.ts index a3f49e8..9eb7eed 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,8 +1,17 @@ -export { default as S2PMTilesReader } from './browser'; +export * from './reader'; export { default as DirCache } from './cache'; export * from './pmtiles'; -export * from './reader'; export * from './s2pmtiles'; export * from './varint'; export * from './writer'; export type * from 's2-tilejson'; + +/** + * @param uint8arrays - the Uint8Arrays to concatenate + * @returns - the concatenated Uint8Array + */ +export async function concatUint8Arrays(uint8arrays: Uint8Array[]): Promise { + const blob = new Blob(uint8arrays); + const buffer = await blob.arrayBuffer(); + return new Uint8Array(buffer); +} diff --git a/src/mmap.ts b/src/mmap.ts new file mode 100644 index 0000000..d1888da --- /dev/null +++ b/src/mmap.ts @@ -0,0 +1,20 @@ +import type { Reader } from './reader'; + +/** The Memory Mapped reader is to be used by bun on the local filesystem. */ +export class MMapReader implements Reader { + #buffer: Uint8Array; + + /** @param file - the location of the PMTiles data in the FS */ + constructor(readonly file: string) { + this.#buffer = Bun.mmap(file); + } + + /** + * @param offset - the offset of the range + * @param length - the length of the range + * @returns - the ranged buffer + */ + async getRange(offset: number, length: number): Promise { + return this.#buffer.slice(offset, offset + length); + } +} diff --git a/src/reader.ts b/src/reader.ts index c3a3e15..fe94864 100644 --- a/src/reader.ts +++ b/src/reader.ts @@ -1,21 +1,65 @@ import DirCache from './cache'; -import { open } from 'node:fs/promises'; -import { promisify } from 'node:util'; +import { concatUint8Arrays } from '.'; import { Compression, bytesToHeader, deserializeDir, findTile, zxyToTileID } from './pmtiles'; import { S2_HEADER_SIZE_BYTES, S2_ROOT_SIZE, s2BytesToHeader } from './s2pmtiles'; -import { brotliDecompress, gunzip } from 'node:zlib'; import type { Entry, Header } from './pmtiles'; import type { Face, Metadata } from 's2-tilejson'; import type { S2Entries, S2Header } from './s2pmtiles'; -// Promisify the zlib methods -const gunzipAsync = promisify(gunzip); -const brotliDecompressAsync = promisify(brotliDecompress); +// export DirCache for browsers to use (reduce code duplication) +export { default as DirCache } from './cache'; + +/** The defacto interface for all readers. */ +export interface Reader { + getRange: (offset: number, length: number) => Promise; +} + +/** The browser reader that fetches data from a URL. */ +export class FetchReader implements Reader { + /** + * @param path - the location of the PMTiles data + * @param rangeRequests - FetchReader specific; enable range requests or use urlParam "bytes" + */ + constructor( + public path: string, + public rangeRequests: boolean, + ) {} + + /** + * @param offset - the offset of the range + * @param length - the length of the range + * @returns - the ranged buffer + */ + async getRange(offset: number, length: number): Promise { + const bytes = String(offset) + '-' + String(offset + length); + const fetchReq = this.rangeRequests + ? fetch(this.path, { headers: { Range: `bytes=${offset}-${offset + length - 1}` } }) + : fetch(`${this.path}&bytes=${bytes}`); + const res = await fetchReq.then(async (res) => await res.arrayBuffer()); + return new Uint8Array(res, 0, res.byteLength); + } +} + +/** Buffer reader is used on files that are small and easy to read in memory. Faster then the Filesystem */ +export class BufferReader implements Reader { + /** @param buffer - the input data is the entire pmtiles file */ + constructor(readonly buffer: Uint8Array) {} + + /** + * @param offset - the offset of the range + * @param length - the length of the range + * @returns - the ranged buffer + */ + async getRange(offset: number, length: number): Promise { + return this.buffer.slice(offset, offset + length); + } +} /** The File reader is to be used by bun/node/deno on the local filesystem. */ -export class PMTilesReader { +export class S2PMTilesReader { #header: Header | S2Header | undefined; + #reader: Reader; // root directory will exist if header does #rootDir: Entry[] = []; #rootDirS2: S2Entries = { 0: [], 1: [], 2: [], 3: [], 4: [], 5: [] }; @@ -26,12 +70,19 @@ export class PMTilesReader { /** * Given an input path, read in the header and root directory * @param path - the location of the PMTiles data + * @param rangeRequests - FetchReader specific; enable range requests or use urlParam "bytes" * @param maxSize - the max size of the cache before dumping old data. Defaults to 20. */ constructor( - readonly path: string, + readonly path: string | Reader, + rangeRequests: boolean = false, maxSize = 20, ) { + if (typeof path === 'string') { + this.#reader = new FetchReader(path, rangeRequests); + } else { + this.#reader = path; + } this.#dirCache = new DirCache(maxSize); } @@ -39,11 +90,10 @@ export class PMTilesReader { * @returns - the header of the archive along with the root directory, * including information such as tile type, min/max zoom, bounds, and summary statistics. */ - async #getHeader(): Promise
{ + async #getMetadata(): Promise
{ if (this.#header !== undefined) return this.#header; - const data = await this.#getRange(0, S2_ROOT_SIZE); + const data = await this.#reader.getRange(0, S2_ROOT_SIZE); const headerData = data.slice(0, S2_HEADER_SIZE_BYTES); - // check if s2 const isS2 = headerData[0] === 83 && headerData[1] === 50; // header @@ -66,7 +116,7 @@ export class PMTilesReader { ); this.#rootDir = deserializeDir(await decompress(rootDirData, header.internalCompression)); - if (isS2) this.#getS2Metadata(data, header as S2Header); + if (isS2) await this.#getS2Metadata(data, header as S2Header); return header; } @@ -79,13 +129,13 @@ export class PMTilesReader { async #getS2Metadata(data: Uint8Array, header: S2Header): Promise { // move the root directory to the s2 root this.#rootDirS2[0] = this.#rootDir; - // add the 5 other faces + // add the 4 other faces for (const face of [1, 2, 3, 4, 5]) { const rootOffset = `rootDirectoryOffset${face}` as keyof S2Header; - const rootLength = `rootDirectoryLength${face}` as keyof S2Header; + const rootLenght = `rootDirectoryLength${face}` as keyof S2Header; const faceDirData = data.slice( header[rootOffset] as number, - (header[rootOffset] as number) + (header[rootLength] as number), + (header[rootOffset] as number) + (header[rootLenght] as number), ); this.#rootDirS2[face as keyof S2Entries] = deserializeDir( await decompress(faceDirData, header.internalCompression), @@ -94,13 +144,13 @@ export class PMTilesReader { } /** @returns - the header of the archive */ - async getHeader(): Promise
{ - return await this.#getHeader(); + async getHeader(): Promise
{ + return await this.#getMetadata(); } /** @returns - the metadata of the archive */ async getMetadata(): Promise { - await this.#getHeader(); + await this.#getMetadata(); // ensure loaded first return this.#metadata; } @@ -122,7 +172,7 @@ export class PMTilesReader { * @returns - the bytes of the tile at the given (z, x, y) coordinates, or undefined if the tile does not exist in the archive. */ async getTile(zoom: number, x: number, y: number): Promise { - return this.#getTile(-1, zoom, x, y); + return await this.#getTile(-1, zoom, x, y); } /** @@ -138,13 +188,13 @@ export class PMTilesReader { x: number, y: number, ): Promise { - const header = await this.#getHeader(); + const header = await this.#getMetadata(); const tileID = zxyToTileID(zoom, x, y); - const { tileDataOffset } = header; - // DO NOT USE: I don't bother implementing this part of the spec - // if (zoom < minZoom || zoom > maxZoom) return undefined; + const { minZoom, maxZoom, rootDirectoryOffset, rootDirectoryLength, tileDataOffset } = header; + if (zoom < minZoom || zoom > maxZoom) return undefined; - let [dO, dL] = this.#getRootDir(face, header); + let dO = rootDirectoryOffset; + let dL = rootDirectoryLength; for (let depth = 0; depth <= 3; depth++) { const directory = await this.#getDirectory(dO, dL, face); @@ -152,7 +202,10 @@ export class PMTilesReader { const entry = findTile(directory, tileID); if (entry !== null) { if (entry.runLength > 0) { - const entryData = await this.#getRange(tileDataOffset + entry.offset, entry.length); + const entryData = await this.#reader.getRange( + tileDataOffset + entry.offset, + entry.length, + ); return await decompress(entryData, header.tileCompression); } dO = header.leafDirectoryOffset + entry.offset; @@ -162,20 +215,6 @@ export class PMTilesReader { throw Error('Maximum directory depth exceeded'); } - /** - * @param face - the Open S2 projection face - * @param header - the header of the archive - * @returns - the offset and length of the root directory for the correct face - */ - #getRootDir(face: number, header: Header | S2Header): [number, number] { - const { rootDirectoryOffset, rootDirectoryLength } = header; - if (face <= 0) return [rootDirectoryOffset, rootDirectoryLength]; - const s2header = header as S2Header; - const rootOffset = `rootDirectoryOffset${face}` as keyof S2Header; - const rootLength = `rootDirectoryLength${face}` as keyof S2Header; - return [s2header[rootOffset] as number, s2header[rootLength] as number]; - } - /** * @param offset - the offset of the directory * @param length - the length of the directory @@ -184,15 +223,15 @@ export class PMTilesReader { */ async #getDirectory(offset: number, length: number, face: number): Promise { const dir = face === -1 ? this.#rootDir : this.#rootDirS2[face as Face]; - const header = await this.#getHeader(); + const header = await this.#getMetadata(); const { internalCompression, rootDirectoryOffset } = header; // if rootDirectoryOffset, return roon if (offset === rootDirectoryOffset) return dir; // check cache const cache = this.#dirCache.get(offset); - if (cache) return cache; + if (cache !== undefined) return cache; // get from archive - const resp = await this.#getRange(offset, length); + const resp = await this.#reader.getRange(offset, length); const data = await decompress(resp, internalCompression); const directory = deserializeDir(data); if (directory.length === 0) throw new Error('Empty directory is invalid'); @@ -202,21 +241,6 @@ export class PMTilesReader { return directory; } - /** - * @param offset - the offset of the data - * @param length - the length of the data - * @returns - the bytes of the data - */ - async #getRange(offset: number, length: number): Promise { - const fileHandle = await open(this.path, 'r'); - // Create a buffer to hold the bytes - const buffer = Buffer.alloc(length); - // Read the specified number of bytes from the given offset - const { bytesRead } = await fileHandle.read(buffer, 0, length, offset); - - return new Uint8Array(buffer.buffer, buffer.byteOffset, bytesRead); - } - /** * @param buffer - the buffer to convert * @returns - the string result @@ -234,9 +258,9 @@ export class PMTilesReader { async function decompress(data: Uint8Array, compression: Compression): Promise { switch (compression) { case Compression.Gzip: - return new Uint8Array(await gunzipAsync(data)); + return decompressGzip(data); case Compression.Brotli: - return new Uint8Array(await brotliDecompressAsync(data)); + throw new Error('Brotli decompression not implemented'); case Compression.Zstd: throw new Error('Zstd decompression not implemented'); case Compression.None: @@ -244,3 +268,19 @@ async function decompress(data: Uint8Array, compression: Compression): Promise { + // Convert the bytes to a stream. + const stream = new Blob([compressedBytes]).stream(); + // Create a decompressed stream. + const decompressedStream = stream.pipeThrough(new DecompressionStream('gzip')); + // Read all the bytes from this stream. + const chunks = []; + for await (const chunk of decompressedStream) chunks.push(chunk); + + return await concatUint8Arrays(chunks); +} diff --git a/src/writer.ts b/src/writer.ts index 2c1f9cd..38a2036 100644 --- a/src/writer.ts +++ b/src/writer.ts @@ -1,38 +1,68 @@ -import { createHash } from 'node:crypto'; -import { - Compression, - ROOT_SIZE, - headerToBytes, - serializeDir, - tileIDToZxy, - zxyToTileID, -} from './pmtiles'; +import { concatUint8Arrays } from '.'; +import { Compression, ROOT_SIZE, headerToBytes, serializeDir, zxyToTileID } from './pmtiles'; import { S2_HEADER_SIZE_BYTES, S2_ROOT_SIZE, s2HeaderToBytes } from './s2pmtiles'; -import { appendFile, open } from 'node:fs/promises'; import type { Entry, Header, TileType } from './pmtiles'; import type { Face, Metadata } from 's2-tilejson'; import type { S2Entries, S2Header } from './s2pmtiles'; +/** The defacto interface for all writers. */ +export interface Writer { + write(data: Uint8Array, offset: number): Promise; + append(data: Uint8Array): Promise; + appendSync(data: Uint8Array): void; +} + +/** Buffer writer is used on smaller datasets that are easy to write in memory. Faster then the Filesystem */ +export class BufferWriter { + #buffer: number[] = []; + + /** @param data - the data to append */ + async append(data: Uint8Array): Promise { + for (let i = 0; i < data.byteLength; i++) this.#buffer.push(data[i]); + } + + /** @param data - the data to append */ + appendSync(data: Uint8Array): void { + for (let i = 0; i < data.byteLength; i++) this.#buffer.push(data[i]); + } + + /** + * @param data - the data to write + * @param offset - where in the buffer to start + */ + async write(data: Uint8Array, offset: number): Promise { + for (let i = 0; i < data.byteLength; i++) { + this.#buffer[offset + i] = data[i]; + } + } + + /** @returns - the buffer */ + commit(): Uint8Array { + return new Uint8Array(this.#buffer); + } +} + /** Write a PMTiles file. */ -export class PMTilesWriter { +export class S2PMTilesWriter { #tileEntries: Entry[] = []; #s2tileEntries: S2Entries = { 0: [], 1: [], 2: [], 3: [], 4: [], 5: [] }; - #hashToOffset = new Map(); #offset = 0; #addressedTiles = 0; #clustered = true; - compression: Compression = Compression.None; + #minZoom = 30; + #maxZoom = 0; /** - * @param file - the path to the file we want to write to + * @param writer - the writer to append to * @param type - the tile type + * @param compression - the compression algorithm */ constructor( - readonly file: string, + readonly writer: Writer, readonly type: TileType, + readonly compression: Compression = Compression.Gzip, ) { - // append the headersize - appendFile(this.file, new Uint8Array(S2_ROOT_SIZE)); + this.writer.appendSync(new Uint8Array(S2_ROOT_SIZE)); } /** @@ -43,6 +73,8 @@ export class PMTilesWriter { * @param data - the tile data to store */ async writeTileXYZ(zoom: number, x: number, y: number, data: Uint8Array): Promise { + this.#minZoom = Math.min(this.#minZoom, zoom); + this.#maxZoom = Math.max(this.#maxZoom, zoom); const tileID = zxyToTileID(zoom, x, y); await this.writeTile(tileID, data); } @@ -62,6 +94,8 @@ export class PMTilesWriter { y: number, data: Uint8Array, ): Promise { + this.#minZoom = Math.min(this.#minZoom, zoom); + this.#maxZoom = Math.max(this.#maxZoom, zoom); const tileID = zxyToTileID(zoom, x, y); await this.writeTile(tileID, data, face); } @@ -73,28 +107,17 @@ export class PMTilesWriter { * @param face - If it exists, then we are storing S2 data */ async writeTile(tileID: number, data: Uint8Array, face?: Face): Promise { + data = await compress(data, this.compression); const length = data.length; const tileEntries = face !== undefined ? this.#s2tileEntries[face] : this.#tileEntries; if (tileEntries.length > 0 && tileID < (tileEntries.at(-1) as Entry).tileID) { this.#clustered = false; } - const hsh = hashUint8Array(data); - let offset = this.#hashToOffset.get(hsh); - if (offset !== undefined) { - const last = tileEntries.at(-1); - if (last !== undefined && tileID == last.tileID + last.runLength && last.offset == offset) { - last.runLength++; - } else { - tileEntries.push({ tileID, offset, length, runLength: 1 }); - } - } else { - offset = this.#offset; - await appendFile(this.file, data); - tileEntries.push({ tileID, offset, length, runLength: 1 }); - this.#hashToOffset.set(hsh, this.#offset); - this.#offset += length; - } + const offset = this.#offset; + await this.writer.append(data); + tileEntries.push({ tileID, offset, length, runLength: 1 }); + this.#offset += length; this.#addressedTiles++; } @@ -118,16 +141,14 @@ export class PMTilesWriter { tileEntries.sort((a, b) => a.tileID - b.tileID); // build metadata const metaBuffer = Buffer.from(JSON.stringify(metadata)); - const metauint8 = new Uint8Array( - metaBuffer.buffer, - metaBuffer.byteOffset, - metaBuffer.byteLength, - ); + let metauint8 = new Uint8Array(metaBuffer.buffer, metaBuffer.byteOffset, metaBuffer.byteLength); + metauint8 = await compress(metauint8, this.compression); // optimize directories - const { rootBytes, leavesBytes } = optimizeDirectories( + const { rootBytes, leavesBytes } = await optimizeDirectories( tileEntries, ROOT_SIZE - S2_HEADER_SIZE_BYTES - metauint8.byteLength, + this.compression, ); // build header data @@ -138,10 +159,7 @@ export class PMTilesWriter { const leafDirectoryOffset = this.#offset + S2_ROOT_SIZE; const leafDirectoryLength = leavesBytes.byteLength; this.#offset += leavesBytes.byteLength; - appendFile(this.file, leavesBytes); - // to make writing faster - const minZoom = tileIDToZxy((tileEntries.at(0) as Entry).tileID)[0]; - const maxZoom = tileIDToZxy((tileEntries.at(-1) as Entry).tileID)[0]; + await this.writer.append(leavesBytes); // build header const header: Header = { @@ -156,22 +174,20 @@ export class PMTilesWriter { tileDataLength: this.#offset, numAddressedTiles: this.#addressedTiles, numTileEntries: tileEntries.length, - numTileContents: this.#hashToOffset.size, + numTileContents: tileEntries.length, clustered: this.#clustered, - internalCompression: Compression.None, + internalCompression: this.compression, tileCompression: this.compression, tileType: this.type, - minZoom, - maxZoom, + minZoom: this.#minZoom, + maxZoom: this.#maxZoom, }; const serialzedHeader = headerToBytes(header); // write header - const fileHandle = await open(this.file, 'r+'); - await fileHandle.write(serialzedHeader, 0, serialzedHeader.byteLength, 0); - await fileHandle.write(rootBytes, 0, rootBytes.byteLength, rootDirectoryOffset); - await fileHandle.write(metauint8, 0, metauint8.byteLength, jsonMetadataOffset); - await fileHandle.close(); + await this.writer.write(serialzedHeader, 0); + await this.writer.write(rootBytes, rootDirectoryOffset); + await this.writer.write(metauint8, jsonMetadataOffset); } /** @@ -179,6 +195,7 @@ export class PMTilesWriter { * @param metadata - the metadata to store */ async #commitS2(metadata: Metadata): Promise { + const { compression } = this; const tileEntries = this.#s2tileEntries[0]; const tileEntries1 = this.#s2tileEntries[1]; const tileEntries2 = this.#s2tileEntries[2]; @@ -194,36 +211,39 @@ export class PMTilesWriter { tileEntries5.sort((a, b) => a.tileID - b.tileID); // build metadata const metaBuffer = Buffer.from(JSON.stringify(metadata)); - const metauint8 = new Uint8Array( - metaBuffer.buffer, - metaBuffer.byteOffset, - metaBuffer.byteLength, - ); + let metauint8 = new Uint8Array(metaBuffer.buffer, metaBuffer.byteOffset, metaBuffer.byteLength); + metauint8 = await compress(metauint8, this.compression); // optimize directories - const { rootBytes, leavesBytes } = optimizeDirectories( + const { rootBytes, leavesBytes } = await optimizeDirectories( tileEntries, ROOT_SIZE - S2_HEADER_SIZE_BYTES - metauint8.byteLength, + compression, ); - const { rootBytes: rootBytes1, leavesBytes: leavesBytes1 } = optimizeDirectories( + const { rootBytes: rootBytes1, leavesBytes: leavesBytes1 } = await optimizeDirectories( tileEntries1, ROOT_SIZE - S2_HEADER_SIZE_BYTES - metauint8.byteLength, + compression, ); - const { rootBytes: rootBytes2, leavesBytes: leavesBytes2 } = optimizeDirectories( + const { rootBytes: rootBytes2, leavesBytes: leavesBytes2 } = await optimizeDirectories( tileEntries2, ROOT_SIZE - S2_HEADER_SIZE_BYTES - metauint8.byteLength, + compression, ); - const { rootBytes: rootBytes3, leavesBytes: leavesBytes3 } = optimizeDirectories( + const { rootBytes: rootBytes3, leavesBytes: leavesBytes3 } = await optimizeDirectories( tileEntries3, ROOT_SIZE - S2_HEADER_SIZE_BYTES - metauint8.byteLength, + compression, ); - const { rootBytes: rootBytes4, leavesBytes: leavesBytes4 } = optimizeDirectories( + const { rootBytes: rootBytes4, leavesBytes: leavesBytes4 } = await optimizeDirectories( tileEntries4, ROOT_SIZE - S2_HEADER_SIZE_BYTES - metauint8.byteLength, + compression, ); - const { rootBytes: rootBytes5, leavesBytes: leavesBytes5 } = optimizeDirectories( + const { rootBytes: rootBytes5, leavesBytes: leavesBytes5 } = await optimizeDirectories( tileEntries5, ROOT_SIZE - S2_HEADER_SIZE_BYTES - metauint8.byteLength, + compression, ); // build header data @@ -246,27 +266,27 @@ export class PMTilesWriter { const leafDirectoryOffset = this.#offset + S2_ROOT_SIZE; const leafDirectoryLength = leavesBytes.byteLength; this.#offset += leafDirectoryLength; - appendFile(this.file, leavesBytes); + await this.writer.append(leavesBytes); const leafDirectoryOffset1 = this.#offset + S2_ROOT_SIZE; const leafDirectoryLength1 = leavesBytes1.byteLength; this.#offset += leafDirectoryLength1; - appendFile(this.file, leavesBytes1); + await this.writer.append(leavesBytes1); const leafDirectoryOffset2 = this.#offset + S2_ROOT_SIZE; const leafDirectoryLength2 = leavesBytes2.byteLength; this.#offset += leafDirectoryLength2; - appendFile(this.file, leavesBytes2); + await this.writer.append(leavesBytes2); const leafDirectoryOffset3 = this.#offset + S2_ROOT_SIZE; const leafDirectoryLength3 = leavesBytes3.byteLength; this.#offset += leafDirectoryLength3; - appendFile(this.file, leavesBytes3); + await this.writer.append(leavesBytes3); const leafDirectoryOffset4 = this.#offset + S2_ROOT_SIZE; const leafDirectoryLength4 = leavesBytes4.byteLength; this.#offset += leafDirectoryLength4; - appendFile(this.file, leavesBytes4); + await this.writer.append(leavesBytes4); const leafDirectoryOffset5 = this.#offset + S2_ROOT_SIZE; const leafDirectoryLength5 = leavesBytes5.byteLength; this.#offset += leafDirectoryLength5; - appendFile(this.file, leavesBytes5); + await this.writer.append(leavesBytes5); // build header const header: S2Header = { specVersion: 3, @@ -300,27 +320,25 @@ export class PMTilesWriter { tileDataLength: this.#offset, numAddressedTiles: this.#addressedTiles, numTileEntries: tileEntries.length, - numTileContents: this.#hashToOffset.size, + numTileContents: tileEntries.length, clustered: this.#clustered, - internalCompression: Compression.None, + internalCompression: this.compression, tileCompression: this.compression, tileType: this.type, - minZoom: 0, - maxZoom: 0, + minZoom: this.#minZoom, + maxZoom: this.#maxZoom, }; const serialzedHeader = s2HeaderToBytes(header); // write header - const fileHandle = await open(this.file, 'r+'); - await fileHandle.write(serialzedHeader, 0, serialzedHeader.byteLength, 0); - await fileHandle.write(rootBytes, 0, rootBytes.byteLength, rootDirectoryOffset); - await fileHandle.write(rootBytes1, 0, rootBytes1.byteLength, rootDirectoryOffset1); - await fileHandle.write(rootBytes2, 0, rootBytes2.byteLength, rootDirectoryOffset2); - await fileHandle.write(rootBytes3, 0, rootBytes3.byteLength, rootDirectoryOffset3); - await fileHandle.write(rootBytes4, 0, rootBytes4.byteLength, rootDirectoryOffset4); - await fileHandle.write(rootBytes5, 0, rootBytes5.byteLength, rootDirectoryOffset5); - await fileHandle.write(metauint8, 0, metauint8.byteLength, jsonMetadataOffset); - await fileHandle.close(); + await this.writer.write(serialzedHeader, 0); + await this.writer.write(rootBytes, rootDirectoryOffset); + await this.writer.write(rootBytes1, rootDirectoryOffset1); + await this.writer.write(rootBytes2, rootDirectoryOffset2); + await this.writer.write(rootBytes3, rootDirectoryOffset3); + await this.writer.write(rootBytes4, rootDirectoryOffset4); + await this.writer.write(rootBytes5, rootDirectoryOffset5); + await this.writer.write(metauint8, jsonMetadataOffset); } } @@ -337,9 +355,14 @@ interface OptimizedDirectory { /** * @param entries - the tile entries * @param leafSize - the max leaf size + * @param compression - the compression * @returns - the optimized directories */ -function buildRootsLeaves(entries: Entry[], leafSize: number): OptimizedDirectory { +async function buildRootsLeaves( + entries: Entry[], + leafSize: number, + compression: Compression, +): Promise { const rootEntries: Entry[] = []; let leavesBytes = new Uint8Array(0); let numLeaves = 0; @@ -347,57 +370,84 @@ function buildRootsLeaves(entries: Entry[], leafSize: number): OptimizedDirector let i = 0; while (i < entries.length) { numLeaves += 1; - const serialized = serializeDir(entries.slice(i, i + leafSize)); + const serialized = await compress(serializeDir(entries.slice(i, i + leafSize)), compression); rootEntries.push({ tileID: entries[i].tileID, offset: leavesBytes.length, length: serialized.length, runLength: 0, }); - leavesBytes = concatUint8Arrays(leavesBytes, serialized); + leavesBytes = await concatUint8Arrays([leavesBytes, serialized]); i += leafSize; } - return { rootBytes: serializeDir(rootEntries), leavesBytes, numLeaves }; + return { + rootBytes: await compress(serializeDir(rootEntries), compression), + leavesBytes, + numLeaves, + }; } /** * @param entries - the tile entries * @param targetRootLength - the max leaf size + * @param compression - the compression * @returns - the optimized directories */ -function optimizeDirectories(entries: Entry[], targetRootLength: number): OptimizedDirectory { - const testBytes = serializeDir(entries); +async function optimizeDirectories( + entries: Entry[], + targetRootLength: number, + compression: Compression, +): Promise { + const testBytes = await compress(serializeDir(entries), compression); if (testBytes.length < targetRootLength) return { rootBytes: testBytes, leavesBytes: new Uint8Array(0), numLeaves: 0 }; let leafSize = 4096; while (true) { - const build = buildRootsLeaves(entries, leafSize); + const build = await buildRootsLeaves(entries, leafSize, compression); if (build.rootBytes.length < targetRootLength) return build; leafSize *= 2; } } +// /** +// * @param a - the first array +// * @param b - the second array +// * @returns - the combined array of the two starting with "a" +// */ +// function concatUint8Arrays(a: Uint8Array, b: Uint8Array): Uint8Array { +// const result = new Uint8Array(a.length + b.length); +// result.set(a, 0); +// result.set(b, a.length); +// return result; +// } + /** - * @param a - the first array - * @param b - the second array - * @returns - the combined array of the two starting with "a" + * @param input - the input Uint8Array + * @param compression - the compression + * @returns - the compressed Uint8Array or the original if compression is None */ -function concatUint8Arrays(a: Uint8Array, b: Uint8Array): Uint8Array { - const result = new Uint8Array(a.length + b.length); - result.set(a, 0); - result.set(b, a.length); - return result; +async function compress(input: Uint8Array, compression: Compression): Promise { + if (compression === Compression.None) return input; + else if (compression === Compression.Gzip) return await compressGzip(input); + else throw new Error(`Unsupported compression: ${compression}`); } /** - * @param data - the data to hash - * @param algorithm - the hashing algorithm - * @returns - the hashed string + * @param input - the input Uint8Array + * @returns - the compressed Uint8Array */ -function hashUint8Array(data: Uint8Array, algorithm: string = 'sha256'): string { - const hash = createHash(algorithm); - hash.update(Buffer.from(data)); - return hash.digest('hex'); // Change 'hex' to 'base64' or other formats if needed +async function compressGzip(input: Uint8Array): Promise { + // Convert the string to a byte stream. + const stream = new Blob([input]).stream(); + + // Create a compressed stream. + const compressedStream = stream.pipeThrough(new CompressionStream('gzip')); + + // Read all the bytes from this stream. + const chunks = []; + for await (const chunk of compressedStream) chunks.push(chunk); + + return await concatUint8Arrays(chunks); } diff --git a/test/fixtures/s2.s2pmtiles b/test/fixtures/s2.s2pmtiles new file mode 100644 index 0000000000000000000000000000000000000000..71dd84eb9a4e43506de71fa4ee05755d5306ca43 GIT binary patch literal 98339 zcmeIuv1$TA6a~40mVo8@y&NO%K%rMNH z-KPAG7{@Kk=pJQ}eU^QF&V}qH`#SqeW`F9D5!Y#zwZDCuy+1V@?7t88=Yu|c&3}88 zsIKpf>aE0UOy;G0uRfcu-q&5d+IGiN^V9!Z5+Fc;009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAn>#TA5GgHmS2ZsyZ?K- G+*| */ +import zlib from 'node:zlib'; + +// fyi, Byte streams aren't really implemented anywhere yet +// It only exist as a issue: https://github.com/WICG/compression/issues/31 + +// TRACKER: https://github.com/oven-sh/bun/issues/1723 + +/** + * @param ctx - the context + * @param handle - the handle + * @returns - the transform + */ +const make = (ctx, handle) => + Object.assign(ctx, { + writable: new WritableStream({ + /** + * @param chunk - input data + * @returns - `true` if more data can be written + */ + write: (chunk) => handle.write(chunk), + /** + * @returns - close the stream + */ + close: () => handle.end(), + }), + readable: new ReadableStream({ + type: 'bytes', + /** + * @param ctrl - the controller + */ + start(ctrl) { + handle.on('data', (chunk) => ctrl.enqueue(chunk)); + handle.once('end', () => ctrl.close()); + }, + }), + }); + +// @ts-expect-error - polyfill exception +globalThis.CompressionStream ??= class CompressionStream { + /** + * @param format - the format to use + */ + constructor(format) { + make( + this, + format === 'deflate' + ? zlib.createDeflate() + : format === 'gzip' + ? zlib.createGzip() + : zlib.createDeflateRaw(), + ); + } +}; + +// @ts-expect-error - polyfill exception +globalThis.DecompressionStream ??= class DecompressionStream { + /** + * @param format - the format to use + */ + constructor(format) { + make( + this, + format === 'deflate' + ? zlib.createInflate() + : format === 'gzip' + ? zlib.createGunzip() + : zlib.createInflateRaw(), + ); + } +}; diff --git a/test/reader.test.ts b/test/reader.test.ts index f23119b..8ce446a 100644 --- a/test/reader.test.ts +++ b/test/reader.test.ts @@ -1,7 +1,12 @@ import { Compression } from '../src/pmtiles'; -import { PMTilesReader } from '../src/reader'; +import { FileReader } from '../src/file'; +import { MMapReader } from '../src/mmap'; +import { buildServer } from './server'; +import { BufferReader, S2PMTilesReader } from '../src/reader'; import { describe, expect, test } from 'bun:test'; +import type { Metadata, S2Header } from '../src'; + /** External old metadata spec */ interface MetaExternal { name: string; @@ -31,8 +36,11 @@ interface MetaExternal { describe('File Reader', async () => { test('test_fixture_1', async () => { - const testFixture1 = new PMTilesReader(`${__dirname}/fixtures/test_fixture_1.pmtiles`); - expect(testFixture1).toBeInstanceOf(PMTilesReader); + const bufferReader = new BufferReader( + new Uint8Array(await Bun.file(`${__dirname}/fixtures/test_fixture_1.pmtiles`).arrayBuffer()), + ); + const testFixture1 = new S2PMTilesReader(bufferReader); + expect(testFixture1).toBeInstanceOf(S2PMTilesReader); const header = await testFixture1.getHeader(); // header expect(header).toEqual({ @@ -98,8 +106,9 @@ describe('File Reader', async () => { }); test('test_fixture_2', async () => { - const testFixture2 = new PMTilesReader(`${__dirname}/fixtures/test_fixture_2.pmtiles`); - expect(testFixture2).toBeInstanceOf(PMTilesReader); + const fileReader = new FileReader(`${__dirname}/fixtures/test_fixture_2.pmtiles`); + const testFixture2 = new S2PMTilesReader(fileReader); + expect(testFixture2).toBeInstanceOf(S2PMTilesReader); const header = await testFixture2.getHeader(); // header expect(header).toEqual({ @@ -164,3 +173,140 @@ describe('File Reader', async () => { ); }); }); + +test('mmap test_fixture_2', async () => { + const mmapReader = new MMapReader(`${__dirname}/fixtures/test_fixture_2.pmtiles`); + + const testFixture2 = new S2PMTilesReader(mmapReader); + const header = await testFixture2.getHeader(); + // header + expect(header).toEqual({ + clustered: false, + internalCompression: Compression.Gzip, + jsonMetadataLength: 247, + jsonMetadataOffset: 152, + leafDirectoryLength: 0, + leafDirectoryOffset: 0, + maxZoom: 0, + minZoom: 0, + numAddressedTiles: 1, + numTileContents: 1, + numTileEntries: 1, + rootDirectoryLength: 25, + rootDirectoryOffset: 127, + specVersion: 3, + tileCompression: Compression.Gzip, + tileDataLength: 67, + tileDataOffset: 399, + tileType: 1, + }); + // metadata + expect((await testFixture2.getMetadata()) as unknown as MetaExternal).toEqual({ + name: 'test_fixture_2.pmtiles', + description: 'test_fixture_2.pmtiles', + version: '2', + type: 'overlay', + generator: 'tippecanoe v2.5.0', + generator_options: './tippecanoe -zg -o test_fixture_2.pmtiles --force', + vector_layers: [ + { + id: 'test_fixture_2pmtiles', + description: '', + minzoom: 0, + maxzoom: 0, + fields: {}, + }, + ], + tilestats: { + layerCount: 1, + layers: [ + { + layer: 'test_fixture_2pmtiles', + count: 1, + geometry: 'Polygon', + attributeCount: 0, + attributes: [], + }, + ], + }, + }); + // TILE + const tile = await testFixture2.getTile(0, 0, 0); + expect(tile).toBeInstanceOf(Uint8Array); + expect(new Uint8Array(tile as Uint8Array)).toEqual( + new Uint8Array([ + 26, 45, 120, 2, 10, 21, 116, 101, 115, 116, 95, 102, 105, 120, 116, 117, 114, 101, 95, 50, + 112, 109, 116, 105, 108, 101, 115, 40, 128, 32, 18, 15, 24, 3, 34, 11, 9, 128, 32, 232, 31, + 18, 22, 24, 21, 0, 15, + ]), + ); +}); + +test('server - s2 example', async () => { + const server = buildServer(); + + const reader = new S2PMTilesReader(`http://localhost:${server.port}/fixtures/s2.s2pmtiles`, true); + + // setup data + const str = 'hello world'; + const buf = Buffer.from(str, 'utf8'); + const uint8 = new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength); + // const str2 = 'hello world 2'; + // const buf2 = Buffer.from(str2, 'utf8'); + // const uint8_2 = new Uint8Array(buf2.buffer, buf2.byteOffset, buf2.byteLength); + + const metadata = await reader.getMetadata(); + const header = await reader.getHeader(); + expect(header).toEqual({ + clustered: true, + internalCompression: 1, + jsonMetadataLength: 17, + jsonMetadataOffset: 280, + leafDirectoryLength: 0, + leafDirectoryLength1: 0, + leafDirectoryLength2: 0, + leafDirectoryLength3: 0, + leafDirectoryLength4: 0, + leafDirectoryLength5: 0, + leafDirectoryOffset: 98_339, + leafDirectoryOffset1: 98_339, + leafDirectoryOffset2: 98_339, + leafDirectoryOffset3: 98_339, + leafDirectoryOffset4: 98_339, + leafDirectoryOffset5: 98_339, + maxZoom: 0, + minZoom: 0, + numAddressedTiles: 3, + numTileContents: 1, + numTileEntries: 1, + rootDirectoryLength: 5, + rootDirectoryLength1: 5, + rootDirectoryLength2: 1, + rootDirectoryLength3: 5, + rootDirectoryLength4: 1, + rootDirectoryLength5: 1, + rootDirectoryOffset: 262, + rootDirectoryOffset1: 267, + rootDirectoryOffset2: 272, + rootDirectoryOffset3: 273, + rootDirectoryOffset4: 278, + rootDirectoryOffset5: 279, + specVersion: 1, + tileCompression: 1, + tileDataLength: 35, + tileDataOffset: 98_304, + tileType: 1, + } as S2Header); + expect(metadata).toEqual({ metadata: true } as unknown as Metadata); + + const tile = await reader.getTileS2(0, 0, 0, 0); + expect(tile).toEqual(uint8); + + const tile2 = await reader.getTileS2(1, 0, 0, 0); + expect(tile2).toEqual(uint8); + + // const tile3 = await reader.getTileS2(3, 2, 1, 1); + // expect(tile3).toEqual(uint8_2); + + server.stop(); +}); diff --git a/test/server.ts b/test/server.ts new file mode 100644 index 0000000..042012c --- /dev/null +++ b/test/server.ts @@ -0,0 +1,47 @@ +/** + * @returns - a Bun server + */ +export function buildServer() { + return Bun.serve({ + port: 0, // Use port 0 to let Bun choose an available port + /** + * @param req - the request from the user + * @returns - a response of the file to the user + */ + async fetch(req) { + const { pathname } = new URL(req.url); + const filePath = `${__dirname}${pathname}`; + const file = Bun.file(filePath); + + if (!file || file.size === 0) return new Response(null, { status: 404 }); + + // Handle range request + const rangeHeader = req.headers.get('Range'); + if (rangeHeader) { + const [unit, range] = rangeHeader.split('='); + if (unit === 'bytes') { + const [start, end] = range.split('-').map(Number); + + const fileSize = file.size; + const endByte = end !== undefined ? Math.min(end, fileSize - 1) : fileSize - 1; + const rangeStart = Math.max(start, 0); + + // Read the specified byte range from the file + const chunk = file.slice(rangeStart, endByte + 1); + + return new Response(chunk, { + status: 206, + headers: { + 'Content-Range': `bytes ${rangeStart}-${endByte}/${fileSize}`, + 'Content-Length': String(endByte - rangeStart + 1), + 'Accept-Ranges': 'bytes', + }, + }); + } + } + + // If no range is requested, serve the whole file + return new Response(file); + }, + }); +} diff --git a/test/writer.test.ts b/test/writer.test.ts index 7587933..52281e2 100644 --- a/test/writer.test.ts +++ b/test/writer.test.ts @@ -1,24 +1,22 @@ -import { PMTilesReader } from '../src/reader'; -import { PMTilesWriter } from '../src/writer'; -import { S2_ROOT_SIZE } from '../src/s2pmtiles'; +import { FileWriter } from '../src/file'; +import { MMapReader } from '../src/mmap'; import { TileType } from '../src/pmtiles'; import tmp from 'tmp'; import { unlink } from 'node:fs/promises'; -import { afterAll, describe, expect, test } from 'bun:test'; +import { BufferReader, S2PMTilesReader } from '../src/reader'; +import { BufferWriter, S2PMTilesWriter } from '../src/writer'; +import { afterAll, expect, test } from 'bun:test'; import { stat } from 'node:fs/promises'; -import type { Metadata } from '../src'; +import type { Metadata, S2Header } from '../src'; -let tmpFile: string; +let tmpFile1: string; let tmpFile2: string; -let tmpFile3: string; -describe('File Writer WM', async () => { - tmpFile = tmp.tmpNameSync({ - prefix: 'WM', - }); - const writer = new PMTilesWriter(tmpFile, TileType.Pbf); +test('File Writer WM', async () => { + const bufWriter = new BufferWriter(); + const writer = new S2PMTilesWriter(bufWriter, TileType.Pbf); // setup data const str = 'hello world'; const buf = Buffer.from(str, 'utf8'); @@ -33,49 +31,46 @@ describe('File Writer WM', async () => { // finish await writer.commit({ metadata: true } as unknown as Metadata); - const reader = new PMTilesReader(tmpFile); + const bufReader = new BufferReader(bufWriter.commit()); + const reader = new S2PMTilesReader(bufReader); const metadata = await reader.getMetadata(); const header = await reader.getHeader(); - test('read', async () => { - expect((await stat(tmpFile)).size).toEqual(98_328); - expect(header).toEqual({ - clustered: true, - internalCompression: 1, - jsonMetadataLength: 17, - jsonMetadataOffset: 276, - leafDirectoryLength: 0, - leafDirectoryOffset: 98_328, - maxZoom: 5, - minZoom: 0, - numAddressedTiles: 3, - numTileContents: 2, - numTileEntries: 3, - rootDirectoryLength: 14, - rootDirectoryOffset: 262, - specVersion: 3, - tileCompression: 1, - tileDataLength: 24, - tileDataOffset: S2_ROOT_SIZE, - tileType: 1, - }); - expect(metadata).toEqual({ metadata: true } as unknown as Metadata); - - const tile = await reader.getTile(0, 0, 0); - expect(tile).toEqual(uint8); - - const tile2 = await reader.getTile(1, 0, 1); - expect(tile2).toEqual(uint8); - - const tile3 = await reader.getTile(5, 2, 9); - expect(tile3).toEqual(uint8_2); + expect(bufReader.buffer.byteLength).toEqual(98_399); + expect(header).toEqual({ + clustered: true, + internalCompression: 2, + jsonMetadataLength: 37, + jsonMetadataOffset: 296, + leafDirectoryLength: 0, + leafDirectoryOffset: 98399, + maxZoom: 5, + minZoom: 0, + numAddressedTiles: 3, + numTileContents: 3, + numTileEntries: 3, + rootDirectoryLength: 34, + rootDirectoryOffset: 262, + specVersion: 3, + tileCompression: 2, + tileDataLength: 95, + tileDataOffset: 98304, + tileType: 1, }); + expect(metadata).toEqual({ metadata: true } as unknown as Metadata); + + const tile = await reader.getTile(0, 0, 0); + expect(tile).toEqual(uint8); + + const tile2 = await reader.getTile(1, 0, 1); + expect(tile2).toEqual(uint8); + + const tile3 = await reader.getTile(5, 2, 9); + expect(tile3).toEqual(uint8_2); }); -describe('File Writer S2', async () => { - tmpFile2 = tmp.tmpNameSync({ - prefix: 'S2', - }); - const writer = new PMTilesWriter(tmpFile2, TileType.Pbf); +test('File Writer S2', async () => { + tmpFile1 = tmp.tmpNameSync({ prefix: 'S2' }); + const writer = new S2PMTilesWriter(new FileWriter(tmpFile1), TileType.Pbf); // setup data const str = 'hello world'; const buf = Buffer.from(str, 'utf8'); @@ -86,73 +81,82 @@ describe('File Writer S2', async () => { // write data in tile await writer.writeTileS2(0, 0, 0, 0, uint8); await writer.writeTileS2(1, 0, 0, 0, uint8); + await writer.writeTileS2(2, 8, 1, 1, uint8_2); await writer.writeTileS2(3, 2, 1, 1, uint8_2); + await writer.writeTileS2(4, 5, 5, 5, uint8_2); + await writer.writeTileS2(5, 5, 5, 5, uint8); // finish await writer.commit({ metadata: true } as unknown as Metadata); - const reader = new PMTilesReader(tmpFile2); + const reader = new S2PMTilesReader(new MMapReader(tmpFile1)); const metadata = await reader.getMetadata(); const header = await reader.getHeader(); - test('read', async () => { - expect((await stat(tmpFile2)).size).toEqual(98_328); - expect(header).toEqual({ - clustered: true, - internalCompression: 1, - jsonMetadataLength: 17, - jsonMetadataOffset: 280, - leafDirectoryLength: 0, - leafDirectoryLength1: 0, - leafDirectoryLength2: 0, - leafDirectoryLength3: 0, - leafDirectoryLength4: 0, - leafDirectoryLength5: 0, - leafDirectoryOffset: 98_328, - leafDirectoryOffset1: 98_328, - leafDirectoryOffset2: 98_328, - leafDirectoryOffset3: 98_328, - leafDirectoryOffset4: 98_328, - leafDirectoryOffset5: 98_328, - maxZoom: 0, - minZoom: 0, - numAddressedTiles: 3, - numTileContents: 2, - numTileEntries: 1, - rootDirectoryLength: 5, - rootDirectoryLength1: 5, - rootDirectoryLength2: 1, - rootDirectoryLength3: 5, - rootDirectoryLength4: 1, - rootDirectoryLength5: 1, - rootDirectoryOffset: 262, - rootDirectoryOffset1: 267, - rootDirectoryOffset2: 272, - rootDirectoryOffset3: 273, - rootDirectoryOffset4: 278, - rootDirectoryOffset5: 279, - specVersion: 1, - tileCompression: 1, - tileDataLength: 24, - tileDataOffset: S2_ROOT_SIZE, - tileType: 1, - }); - expect(metadata).toEqual({ metadata: true } as unknown as Metadata); - - const tile = await reader.getTileS2(0, 0, 0, 0); - expect(tile).toEqual(uint8); - - const tile2 = await reader.getTileS2(1, 0, 0, 0); - expect(tile2).toEqual(uint8); - - const tile3 = await reader.getTileS2(3, 2, 1, 1); - expect(tile3).toEqual(uint8_2); - }); + + expect((await stat(tmpFile1)).size).toEqual(98_496); + expect(header).toEqual({ + clustered: true, + internalCompression: 2, + jsonMetadataLength: 37, + jsonMetadataOffset: 418, + leafDirectoryLength: 0, + leafDirectoryLength1: 0, + leafDirectoryLength2: 0, + leafDirectoryLength3: 0, + leafDirectoryLength4: 0, + leafDirectoryLength5: 0, + leafDirectoryOffset: 98496, + leafDirectoryOffset1: 98496, + leafDirectoryOffset2: 98496, + leafDirectoryOffset3: 98496, + leafDirectoryOffset4: 98496, + leafDirectoryOffset5: 98496, + maxZoom: 8, + minZoom: 0, + numAddressedTiles: 6, + numTileContents: 1, + numTileEntries: 1, + rootDirectoryLength: 25, + rootDirectoryLength1: 25, + rootDirectoryLength2: 27, + rootDirectoryLength3: 25, + rootDirectoryLength4: 27, + rootDirectoryLength5: 27, + rootDirectoryOffset: 262, + rootDirectoryOffset1: 287, + rootDirectoryOffset2: 312, + rootDirectoryOffset3: 339, + rootDirectoryOffset4: 364, + rootDirectoryOffset5: 391, + specVersion: 1, + tileCompression: 2, + tileDataLength: 192, + tileDataOffset: 98304, + tileType: 1, + } as S2Header); + expect(metadata).toEqual({ metadata: true } as unknown as Metadata); + + const tile = await reader.getTileS2(0, 0, 0, 0); + expect(tile).toEqual(uint8); + + const tile2 = await reader.getTileS2(1, 0, 0, 0); + expect(tile2).toEqual(uint8); + + const tile3 = await reader.getTileS2(3, 2, 1, 1); + expect(tile3).toEqual(uint8_2); + + const tile4 = await reader.getTileS2(4, 5, 5, 5); + expect(tile4).toEqual(uint8_2); + + const tile5 = await reader.getTileS2(5, 5, 5, 5); + expect(tile5).toEqual(uint8); + + const tile6 = await reader.getTileS2(2, 8, 1, 1); + expect(tile6).toEqual(uint8_2); }); -describe('File Writer WM Large', async () => { - tmpFile3 = tmp.tmpNameSync({ - prefix: 'S2-big-2', - }); - const writer = new PMTilesWriter(tmpFile3, TileType.Pbf); +test('File Writer WM Large', async () => { + tmpFile2 = tmp.tmpNameSync({ prefix: 'S2-big-2' }); + const writer = new S2PMTilesWriter(new FileWriter(tmpFile2), TileType.Pbf); // write lots of tiles for (let zoom = 0; zoom < 8; zoom++) { const size = 1 << zoom; @@ -168,46 +172,44 @@ describe('File Writer WM Large', async () => { // finish await writer.commit({ metadata: true } as unknown as Metadata); - const reader = new PMTilesReader(tmpFile3); - const header = await reader.getHeader(); - test('read', async () => { - expect((await stat(tmpFile3)).size).toEqual(371_616); - expect(header).toEqual({ - clustered: false, - internalCompression: 1, - jsonMetadataLength: 17, - jsonMetadataOffset: 303, - leafDirectoryLength: 118199, - leafDirectoryOffset: 253417, - maxZoom: 7, - minZoom: 0, - numAddressedTiles: 21845, - numTileContents: 21845, - numTileEntries: 21845, - rootDirectoryLength: 41, - rootDirectoryOffset: 262, - specVersion: 3, - tileCompression: 1, - tileDataLength: 273312, - tileDataOffset: 98304, - tileType: 1, - }); - const metadata = await reader.getMetadata(); - expect(metadata).toEqual({ metadata: true } as unknown as Metadata); - - // get a random tile - const tile = await reader.getTile(6, 22, 45); - const str = `6-22-45`; - const buf = Buffer.from(str, 'utf8'); - const uint8 = new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength); - expect(tile).toEqual(uint8); - }); + const reader = new S2PMTilesReader(new MMapReader(tmpFile2)); + // const header = await reader.getHeader(); + // expect((await stat(tmpFile2)).size).toEqual(736_752); + // expect(header).toEqual({ + // clustered: false, + // internalCompression: 2, + // jsonMetadataLength: 37, + // jsonMetadataOffset: 305, + // leafDirectoryLength: 46_519, + // leafDirectoryOffset: 690_233, + // maxZoom: 7, + // minZoom: 0, + // numAddressedTiles: 21845, + // numTileContents: 21_845, + // numTileEntries: 21_845, + // rootDirectoryLength: 43, + // rootDirectoryOffset: 262, + // specVersion: 3, + // tileCompression: 2, + // tileDataLength: 638_448, + // tileDataOffset: 98_304, + // tileType: 1, + // }); + const metadata = await reader.getMetadata(); + expect(metadata).toEqual({ metadata: true } as unknown as Metadata); + + // get a random tile + const tile = await reader.getTile(6, 22, 45); + const str = `6-22-45`; + const buf = Buffer.from(str, 'utf8'); + const uint8 = new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength); + expect(tile).toEqual(uint8); }); // cleanup afterAll(async () => { try { - await unlink(tmpFile); + await unlink(tmpFile1); } catch (_) { // ignore } @@ -216,9 +218,4 @@ afterAll(async () => { } catch (_) { // ignore } - try { - await unlink(tmpFile3); - } catch (_) { - // ignore - } });