From 4ff4d9390cbf2d11a45aa8c6484304b537915435 Mon Sep 17 00:00:00 2001 From: JernKunpittaya <61564542+JernKunpittaya@users.noreply.github.com> Date: Wed, 24 Apr 2024 18:43:48 +0700 Subject: [PATCH 1/5] del submodule --- .gitmodules | 6 ------ zkstats/onnx2circom/keras2circom | 1 - zkstats/onnx2circom/onnx2keras | 1 - 3 files changed, 8 deletions(-) delete mode 100644 .gitmodules delete mode 160000 zkstats/onnx2circom/keras2circom delete mode 160000 zkstats/onnx2circom/onnx2keras diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 88ded7f..0000000 --- a/.gitmodules +++ /dev/null @@ -1,6 +0,0 @@ -[submodule "zkstats/onnx2circom/onnx2keras"] - path = zkstats/onnx2circom/onnx2keras - url = git@github.com:JernKunpittaya/onnx2keras.git -[submodule "zkstats/onnx2circom/keras2circom"] - path = zkstats/onnx2circom/keras2circom - url = git@github.com:JernKunpittaya/keras2circom.git diff --git a/zkstats/onnx2circom/keras2circom b/zkstats/onnx2circom/keras2circom deleted file mode 160000 index d922440..0000000 --- a/zkstats/onnx2circom/keras2circom +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d92244052d921d3bbb51f4219786aab5daacaf68 diff --git a/zkstats/onnx2circom/onnx2keras b/zkstats/onnx2circom/onnx2keras deleted file mode 160000 index e88f682..0000000 --- a/zkstats/onnx2circom/onnx2keras +++ /dev/null @@ -1 +0,0 @@ -Subproject commit e88f682892ee47a290f6c041c33ef9320fd37f07 From a94d64003fdb7e9e01ff6e9b4c5ed135e760f3e7 Mon Sep 17 00:00:00 2001 From: JernKunpittaya <61564542+JernKunpittaya@users.noreply.github.com> Date: Wed, 24 Apr 2024 18:44:23 +0700 Subject: [PATCH 2/5] hello --- zkstats/onnx2circom/keras2circom/hello.txt | 1 + 1 file changed, 1 insertion(+) create mode 100644 zkstats/onnx2circom/keras2circom/hello.txt diff --git a/zkstats/onnx2circom/keras2circom/hello.txt b/zkstats/onnx2circom/keras2circom/hello.txt new file mode 100644 index 0000000..b6fc4c6 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/hello.txt @@ -0,0 +1 @@ +hello \ No newline at end of file From 5163abee79b829dbdfb091cf5f26419428fbb496 Mon Sep 17 00:00:00 2001 From: JernKunpittaya <61564542+JernKunpittaya@users.noreply.github.com> Date: Wed, 24 Apr 2024 18:46:18 +0700 Subject: [PATCH 3/5] test keras2circom --- zkstats/onnx2circom/keras2circom/.gitignore | 199 + zkstats/onnx2circom/keras2circom/LICENSE | 21 + zkstats/onnx2circom/keras2circom/README.md | 115 + .../onnx2circom/keras2circom/environment.yml | 175 + .../example/MeanCheck/MeanCheck.circom | 13 + .../MeanCheck/gen_MeanCheck_keras.ipynb | 280 + .../example/MeanCheck/mean_keras.keras | Bin 0 -> 7654 bytes .../example/dense/dense_keras.ipynb | 255 + .../example/dense/dense_keras.keras | Bin 0 -> 12348 bytes zkstats/onnx2circom/keras2circom/hello.txt | 2 +- .../keras2circom/keras2circom/circom.py | 389 + .../keras2circom/keras2circom/model.py | 110 + .../keras2circom/keras2circom/script.py | 273 + .../keras2circom/keras2circom/transpiler.py | 292 + .../keras2circom/keras2circom/util.py | 116 + zkstats/onnx2circom/keras2circom/main.py | 26 + .../onnx2circom/keras2circom/models/model.h5 | Bin 0 -> 62520 bytes .../keras2circom/models/model.ipynb | 258 + .../keras2circom/package-lock.json | 2728 +++++ zkstats/onnx2circom/keras2circom/package.json | 26 + .../onnx2circom/keras2circom/requirements.txt | 3 + .../onnx2circom/keras2circom/setup-circom.sh | 10 + .../keras2circom/test/accuracy.ipynb | 10090 ++++++++++++++++ .../onnx2circom/keras2circom/test/circuit.js | 50 + .../keras2circom/test/load_input.ipynb | 80 + 25 files changed, 15510 insertions(+), 1 deletion(-) create mode 100644 zkstats/onnx2circom/keras2circom/.gitignore create mode 100644 zkstats/onnx2circom/keras2circom/LICENSE create mode 100644 zkstats/onnx2circom/keras2circom/README.md create mode 100644 zkstats/onnx2circom/keras2circom/environment.yml create mode 100644 zkstats/onnx2circom/keras2circom/example/MeanCheck/MeanCheck.circom create mode 100644 zkstats/onnx2circom/keras2circom/example/MeanCheck/gen_MeanCheck_keras.ipynb create mode 100644 zkstats/onnx2circom/keras2circom/example/MeanCheck/mean_keras.keras create mode 100644 zkstats/onnx2circom/keras2circom/example/dense/dense_keras.ipynb create mode 100644 zkstats/onnx2circom/keras2circom/example/dense/dense_keras.keras create mode 100644 zkstats/onnx2circom/keras2circom/keras2circom/circom.py create mode 100644 zkstats/onnx2circom/keras2circom/keras2circom/model.py create mode 100644 zkstats/onnx2circom/keras2circom/keras2circom/script.py create mode 100644 zkstats/onnx2circom/keras2circom/keras2circom/transpiler.py create mode 100644 zkstats/onnx2circom/keras2circom/keras2circom/util.py create mode 100644 zkstats/onnx2circom/keras2circom/main.py create mode 100644 zkstats/onnx2circom/keras2circom/models/model.h5 create mode 100644 zkstats/onnx2circom/keras2circom/models/model.ipynb create mode 100644 zkstats/onnx2circom/keras2circom/package-lock.json create mode 100644 zkstats/onnx2circom/keras2circom/package.json create mode 100644 zkstats/onnx2circom/keras2circom/requirements.txt create mode 100644 zkstats/onnx2circom/keras2circom/setup-circom.sh create mode 100644 zkstats/onnx2circom/keras2circom/test/accuracy.ipynb create mode 100644 zkstats/onnx2circom/keras2circom/test/circuit.js create mode 100644 zkstats/onnx2circom/keras2circom/test/load_input.ipynb diff --git a/zkstats/onnx2circom/keras2circom/.gitignore b/zkstats/onnx2circom/keras2circom/.gitignore new file mode 100644 index 0000000..6cd2e88 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/.gitignore @@ -0,0 +1,199 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Typescript v1 declaration files +typings/ + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file +.env + +# next.js build output +.next + +tmp + +.DS_Store + +output/ +X_test/ +y_test.json \ No newline at end of file diff --git a/zkstats/onnx2circom/keras2circom/LICENSE b/zkstats/onnx2circom/keras2circom/LICENSE new file mode 100644 index 0000000..8a2c6df --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 drCathieSo.eth + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/zkstats/onnx2circom/keras2circom/README.md b/zkstats/onnx2circom/keras2circom/README.md new file mode 100644 index 0000000..2aaf433 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/README.md @@ -0,0 +1,115 @@ +# keras2circom + +keras2circom is a python tool that transpiles a tf.keras model into a circom circuit. + +## Experimental Version for stats + +Install the dependencies. You can use pip: + +```bash +pip install -r requirements.txt +``` + +You will also need to install circom and snarkjs. You can run the following commands to install them: + +```bash +bash setup-circom.sh +``` + +Last but not least, run + +```bash +npm install +``` + +First, Look at their supported function in example/dense folder + +- Just run notebook in dense_keras.ipynb to generate keras file +- Then, run `python main.py ./example/dense/dense_keras.keras` to generate `output` folder, consisting of + - `circuit.circom` which contains circom file + - `circuit.json` which contains every predetermined value that we know before hand (like weight, bias, etc.) + - `circuit.py` which contains the algorithm to calcualte (off-chain) the final value of functions we are interested in, so we can provide them as witness (as input signal) in circuit.circom as well. We run circuit.py by creating `input.json` (inside output folder) like { "in": ["4", "4", "7"] }, then run `python output/circuit.py output/circuit.json output/input.json`, and we will get output.json + - Briefly, with `input.json`, `circuit.json`, and `output.json` we can verify in `circuit.circom` and get output as well + +Now, we will look at our customed layer 'MeanCheck' + +Note that it doesnt really make sense to have this layer because we want customed layer for each operation, not for the set of operations like MeanCheck, but this to give an overall idea how to do it, and it's more trivial trying to write smaller operation. Btw, we still dont support Decimal point mean witness, but can do with adding dec like in other template examples + +First, since default implementation of this library install circom template in node_modules already, we will hand-code our MeanCheck template by copying example/MeanCheck/MeanCheck.circom into node_modules/circomlib-ml/circuits/MeanCheck.circom + +Then, the rest is just the same as above + +- Just run notebook in gen_MeanCheck.ipynb to generate keras file +- Then, run `python main.py ./example/MeanCheck/mean_keras.keras` to generate `output` folder, consisting of + - `circuit.circom` + - `circuit.json`, which nothing since it this circuit has 2 inputs: one is input so unknown, while the other is witness which is unknown at first as well + - `circuit.py` which allows us to calculate mean_check_out, Inside `output` folder, we create `input.json` like `{ "in": ["4", "4", "7"] }`, then run `python output/circuit.py output/circuit.json output/input.json`, and we will get `output.json` + - Briefly, with `input.json`, `circuit.json`, and `output.json` we can verify in circuit.circom and get `output` as well + +## ============================================================== + + ORIGINAL README BELOW + +## ============================================================== + +## Installation + +First, clone the repository: + +```bash +git clone https://github.com/socathie/keras2circom.git +``` + +Then, install the dependencies. You can use pip: + +```bash +pip install -r requirements.txt +``` + +If you use conda, you can also create a new environment with the following command: + +```bash +conda env create -f environment.yml +``` + +You will also need to install circom and snarkjs. You can run the following commands to install them: + +```bash +bash setup-circom.sh +``` + +Last but not least, run + +```bash +npm install +``` + +## Usage + +To use the package, you can run the following command: + +```bash +python main.py [-o ] [--raw] +``` + +For example, to transpile the model in `models/model.h5` into a circom circuit, you can run: + +```bash +python main.py models/model.h5 +``` + +The output will be in the `output` directory. + +If you want to transpile the model into a circom circuit with "raw" output, i.e. no ArgMax at the end, you can run: + +```bash +python main.py models/model.h5 --raw +``` + +## Testing + +To test the package, you can run the following command: + +```bash +npm test +``` diff --git a/zkstats/onnx2circom/keras2circom/environment.yml b/zkstats/onnx2circom/keras2circom/environment.yml new file mode 100644 index 0000000..bc9b83c --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/environment.yml @@ -0,0 +1,175 @@ +name: keras2circom +channels: + - defaults + - conda-forge +dependencies: + - _ipython_minor_entry_point=8.7.0=h8cf3c4a_0 + - absl-py=1.3.0=py39hca03da5_0 + - aiohttp=3.8.3=py39h80987f9_0 + - aiosignal=1.2.0=pyhd3eb1b0_0 + - appnope=0.1.3=pyhd8ed1ab_0 + - asttokens=2.2.1=pyhd8ed1ab_0 + - astunparse=1.6.3=py_0 + - async-timeout=4.0.2=py39hca03da5_0 + - attrs=22.1.0=py39hca03da5_0 + - backcall=0.2.0=pyh9f0ad1d_0 + - backports=1.0=pyhd8ed1ab_3 + - backports.functools_lru_cache=1.6.4=pyhd8ed1ab_0 + - blas=1.0=openblas + - blinker=1.4=py39hca03da5_0 + - brotli=1.0.9=h1a28f6b_7 + - brotli-bin=1.0.9=h1a28f6b_7 + - brotlipy=0.7.0=py39h1a28f6b_1002 + - bzip2=1.0.8=h620ffc9_4 + - c-ares=1.18.1=h1a28f6b_0 + - ca-certificates=2022.10.11=hca03da5_0 + - cachetools=4.2.2=pyhd3eb1b0_0 + - certifi=2022.9.24=py39hca03da5_0 + - cffi=1.15.1=py39h80987f9_3 + - charset-normalizer=2.0.4=pyhd3eb1b0_0 + - click=8.0.4=py39hca03da5_0 + - comm=0.1.2=pyhd8ed1ab_0 + - contourpy=1.0.5=py39h525c30c_0 + - cryptography=38.0.1=py39h834c97f_0 + - cycler=0.11.0=pyhd3eb1b0_0 + - dataclasses=0.8=pyh6d0b6a4_7 + - debugpy=1.6.4=py39h23fbdae_0 + - decorator=5.1.1=pyhd8ed1ab_0 + - docopt=0.6.2=py39hca03da5_1 + - entrypoints=0.4=pyhd8ed1ab_0 + - executing=1.2.0=pyhd8ed1ab_0 + - flatbuffers=2.0.0=hc377ac9_0 + - flit-core=3.6.0=pyhd3eb1b0_0 + - fonttools=4.25.0=pyhd3eb1b0_0 + - freetype=2.11.0=h1192e45_0 + - frozenlist=1.3.3=py39h80987f9_0 + - gast=0.4.0=pyhd3eb1b0_0 + - giflib=5.2.1=h1a28f6b_0 + - google-auth=2.6.0=pyhd3eb1b0_0 + - google-auth-oauthlib=0.4.4=pyhd3eb1b0_0 + - google-pasta=0.2.0=pyhd3eb1b0_0 + - grpc-cpp=1.47.1=h503f348_6 + - grpcio=1.47.1=py39h13431ec_6 + - h5py=3.7.0=py39h7fe8675_0 + - hdf5=1.12.1=h160e8cb_2 + - icu=70.1=h6b3803e_0 + - idna=3.4=py39hca03da5_0 + - importlib-metadata=4.11.3=py39hca03da5_0 + - ipykernel=6.19.2=pyh736e0ef_0 + - ipython=8.7.0=pyhd1c38e8_0 + - jedi=0.18.2=pyhd8ed1ab_0 + - jpeg=9e=h1a28f6b_0 + - jupyter_client=7.4.8=pyhd8ed1ab_0 + - jupyter_core=5.1.0=py39h2804cbe_0 + - keras=2.10.0=pyhd8ed1ab_0 + - keras-preprocessing=1.1.2=pyhd3eb1b0_0 + - kiwisolver=1.4.2=py39hc377ac9_0 + - krb5=1.19.2=h3b8d789_0 + - lcms2=2.12=hba8e193_0 + - lerc=3.0=hc377ac9_0 + - libabseil=20220623.0=cxx17_h28b99d4_6 + - libblas=3.9.0=16_osxarm64_openblas + - libbrotlicommon=1.0.9=h1a28f6b_7 + - libbrotlidec=1.0.9=h1a28f6b_7 + - libbrotlienc=1.0.9=h1a28f6b_7 + - libcblas=3.9.0=16_osxarm64_openblas + - libcurl=7.84.0=hc6d1d07_0 + - libcxx=14.0.6=h848a8c0_0 + - libdeflate=1.8=h1a28f6b_5 + - libedit=3.1.20210910=h1a28f6b_0 + - libev=4.33=h1a28f6b_1 + - libffi=3.4.2=hca03da5_6 + - libgfortran=5.0.0=11_3_0_hca03da5_28 + - libgfortran5=11.3.0=h009349e_28 + - liblapack=3.9.0=16_osxarm64_openblas + - libnghttp2=1.46.0=h95c9599_0 + - libopenblas=0.3.21=h269037a_0 + - libpng=1.6.38=h76d750c_0 + - libprotobuf=3.21.7=hb5ab8b9_0 + - libsodium=1.0.18=h27ca646_1 + - libsqlite=3.39.4=h76d750c_0 + - libssh2=1.10.0=hf27765b_0 + - libtiff=4.4.0=had003b8_0 + - libwebp=1.2.4=h68602c7_0 + - libwebp-base=1.2.4=h1a28f6b_0 + - libzlib=1.2.12=h03a7124_3 + - llvm-openmp=14.0.6=hc6e5704_0 + - lz4-c=1.9.4=h313beb8_0 + - markdown=3.3.4=py39hca03da5_0 + - matplotlib=3.6.2=py39hca03da5_0 + - matplotlib-base=3.6.2=py39h8bbb115_0 + - matplotlib-inline=0.1.6=pyhd8ed1ab_0 + - multidict=6.0.2=py39h1a28f6b_0 + - munkres=1.1.4=py_0 + - ncurses=6.3=h1a28f6b_3 + - nest-asyncio=1.5.6=pyhd8ed1ab_0 + - nomkl=3.0=0 + - numpy=1.23.4=py39h1398885_0 + - numpy-base=1.23.4=py39h90707a3_0 + - oauthlib=3.2.1=py39hca03da5_0 + - openblas=0.3.21=hca03da5_0 + - openblas-devel=0.3.21=hca03da5_0 + - openssl=1.1.1s=h1a28f6b_0 + - opt_einsum=3.3.0=pyhd3eb1b0_1 + - packaging=21.3=pyhd3eb1b0_0 + - parso=0.8.3=pyhd8ed1ab_0 + - pexpect=4.8.0=pyh1a96a4e_2 + - pickleshare=0.7.5=py_1003 + - pillow=9.2.0=py39h4d1bdd5_1 + - pip=22.3.1=py39hca03da5_0 + - pipreqs=0.4.11=pyhd8ed1ab_0 + - platformdirs=2.6.0=pyhd8ed1ab_0 + - prompt-toolkit=3.0.36=pyha770c72_0 + - protobuf=4.21.7=py39h23fbdae_0 + - psutil=5.9.4=py39h02fc5c5_0 + - ptyprocess=0.7.0=pyhd3deb0d_0 + - pure_eval=0.2.2=pyhd8ed1ab_0 + - pyasn1=0.4.8=pyhd3eb1b0_0 + - pyasn1-modules=0.2.8=py_0 + - pycparser=2.21=pyhd3eb1b0_0 + - pygments=2.13.0=pyhd8ed1ab_0 + - pyjwt=2.4.0=py39hca03da5_0 + - pyopenssl=22.0.0=pyhd3eb1b0_0 + - pyparsing=3.0.9=py39hca03da5_0 + - pysocks=1.7.1=py39hca03da5_0 + - python=3.9.13=hc596b02_0_cpython + - python-dateutil=2.8.2=pyhd8ed1ab_0 + - python-flatbuffers=2.0=pyhd3eb1b0_0 + - python_abi=3.9=3_cp39 + - pyzmq=24.0.1=py39h0553236_1 + - re2=2022.06.01=h9a09cb3_1 + - readline=8.2=h1a28f6b_0 + - requests=2.28.1=py39hca03da5_0 + - requests-oauthlib=1.3.0=py_0 + - rsa=4.7.2=pyhd3eb1b0_1 + - scipy=1.9.3=py39h18313fe_2 + - setuptools=65.5.0=py39hca03da5_0 + - six=1.16.0=pyhd3eb1b0_1 + - snappy=1.1.9=hc377ac9_0 + - sqlite=3.39.3=h1058600_0 + - stack_data=0.6.2=pyhd8ed1ab_0 + - tensorboard=2.10.1=pyhd8ed1ab_0 + - tensorboard-data-server=0.6.1=py39haa0b8cc_4 + - tensorboard-plugin-wit=1.6.0=py_0 + - tensorflow=2.10.0=cpu_py39h2839aeb_0 + - tensorflow-base=2.10.0=cpu_py39h0d4f425_0 + - tensorflow-estimator=2.10.0=cpu_py39h63f9d84_0 + - termcolor=2.1.0=py39hca03da5_0 + - tk=8.6.12=hb8d0fd4_0 + - tornado=6.2=py39h02fc5c5_1 + - traitlets=5.7.1=pyhd8ed1ab_0 + - typing_extensions=4.4.0=py39hca03da5_0 + - tzdata=2022g=h04d1e81_0 + - urllib3=1.26.13=py39hca03da5_0 + - wcwidth=0.2.5=pyh9f0ad1d_2 + - werkzeug=2.0.3=pyhd3eb1b0_0 + - wheel=0.37.1=pyhd3eb1b0_0 + - wrapt=1.14.1=py39h1a28f6b_0 + - xz=5.2.8=h80987f9_0 + - yarg=0.1.9=py_1 + - yarl=1.8.1=py39h1a28f6b_0 + - zeromq=4.3.4=hbdafb3b_1 + - zipp=3.8.0=py39hca03da5_0 + - zlib=1.2.12=h5a0b063_3 + - zstd=1.5.2=h8574219_0 +prefix: ~/keras2circom diff --git a/zkstats/onnx2circom/keras2circom/example/MeanCheck/MeanCheck.circom b/zkstats/onnx2circom/keras2circom/example/MeanCheck/MeanCheck.circom new file mode 100644 index 0000000..f0560c3 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/example/MeanCheck/MeanCheck.circom @@ -0,0 +1,13 @@ +pragma circom 2.0.0; + +template MeanCheck (nInputs) { + signal input in[1][nInputs]; + signal input out[1]; + + signal sum_till[nInputs]; + sum_till[0] <== in[0][0]; + for (var i = 1; i>\n" + ] + }, + { + "data": { + "text/html": [ + "
Model: \"functional_1\"\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1mModel: \"functional_1\"\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
+       "┃ Layer (type)                     Output Shape                  Param # ┃\n",
+       "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
+       "│ input_layer (InputLayer)        │ (None, 1, 3)           │             0 │\n",
+       "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
+       "│ mean_check (MeanCheck)          │ (None, 1)              │             0 │\n",
+       "└─────────────────────────────────┴────────────────────────┴───────────────┘\n",
+       "
\n" + ], + "text/plain": [ + "┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n", + "┃\u001b[1m \u001b[0m\u001b[1mLayer (type) \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1mOutput Shape \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1m Param #\u001b[0m\u001b[1m \u001b[0m┃\n", + "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n", + "│ input_layer (\u001b[38;5;33mInputLayer\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1\u001b[0m, \u001b[38;5;34m3\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n", + "├─────────────────────────────────┼────────────────────────┼───────────────┤\n", + "│ mean_check (\u001b[38;5;33mMeanCheck\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n", + "└─────────────────────────────────┴────────────────────────┴───────────────┘\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Total params: 0 (0.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Total params: \u001b[0m\u001b[38;5;34m0\u001b[0m (0.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Trainable params: 0 (0.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Trainable params: \u001b[0m\u001b[38;5;34m0\u001b[0m (0.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Non-trainable params: 0 (0.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Non-trainable params: \u001b[0m\u001b[38;5;34m0\u001b[0m (0.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Layerrss: [, ]\n" + ] + } + ], + "source": [ + "from keras import backend as K\n", + "K.clear_session()\n", + "def get_model():\n", + " inputs = keras.Input(shape = (1,3))\n", + " # let's use 4, 4, 7\n", + " layer = MeanCheck(3)\n", + " # print('config: ', layer.get_config())\n", + " # print('weight: ', layer.get_weights())\n", + " # outputs = keras.layers.Dense(64, activation=\"relu\", name=\"dense_1\")(inputs)\n", + " outputs = layer(inputs)\n", + " model = keras.Model(inputs, outputs)\n", + " return model\n", + "\n", + "model = get_model()\n", + "model.summary()\n", + "print('Layerrss: ',model.layers)\n", + "model.save('mean_keras.keras')" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "input: >\n" + ] + }, + { + "data": { + "text/html": [ + "
Model: \"functional_1\"\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1mModel: \"functional_1\"\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
+       "┃ Layer (type)                     Output Shape                  Param # ┃\n",
+       "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
+       "│ input_layer (InputLayer)        │ (None, 1, 3)           │             0 │\n",
+       "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
+       "│ mean_check_1 (MeanCheck)        │ (None, 1)              │             0 │\n",
+       "└─────────────────────────────────┴────────────────────────┴───────────────┘\n",
+       "
\n" + ], + "text/plain": [ + "┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n", + "┃\u001b[1m \u001b[0m\u001b[1mLayer (type) \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1mOutput Shape \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1m Param #\u001b[0m\u001b[1m \u001b[0m┃\n", + "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n", + "│ input_layer (\u001b[38;5;33mInputLayer\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1\u001b[0m, \u001b[38;5;34m3\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n", + "├─────────────────────────────────┼────────────────────────┼───────────────┤\n", + "│ mean_check_1 (\u001b[38;5;33mMeanCheck\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n", + "└─────────────────────────────────┴────────────────────────┴───────────────┘\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Total params: 0 (0.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Total params: \u001b[0m\u001b[38;5;34m0\u001b[0m (0.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Trainable params: 0 (0.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Trainable params: \u001b[0m\u001b[38;5;34m0\u001b[0m (0.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Non-trainable params: 0 (0.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Non-trainable params: \u001b[0m\u001b[38;5;34m0\u001b[0m (0.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "keras.saving.get_custom_objects().clear()\n", + "custom_objects = {\"MeanCheck\": MeanCheck}\n", + "with keras.saving.custom_object_scope(custom_objects):\n", + " model = keras.models.load_model('mean_keras.keras')\n", + "model.summary()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/zkstats/onnx2circom/keras2circom/example/MeanCheck/mean_keras.keras b/zkstats/onnx2circom/keras2circom/example/MeanCheck/mean_keras.keras new file mode 100644 index 0000000000000000000000000000000000000000..906f4eb1995e9eec063591a4bc6e75b798a11005 GIT binary patch literal 7654 zcmeHM%WD%s7@xFNYkg3qU=ftKCxyD?p&~tK#kN-JBdCXhgms#19(B80b~n`$0v;7F z{sZ276fb)8B6#=c#iKVpdeiTlnXk!CHUwK0((a_YvorI(=QrOBox$V@%J=d5{8+kKpW{s(|0YtVH@O8V4-MLKe z!uiaF^V#f7E;oJg(scfk(JDkVeZ2B5_j;Hyd=7!8lIK?HRi$UuD>ob)B?*mY;FnD3 zvz@@KG~7}M+PQ;jOAZeL%jK(rcs@ENB!ZBpK?H4POIRKHyzcU4p(^wnHb`{%2Dk@Y z51WQ=h8pYAd(&NOgtw4U*wog$%)?U63Tk`}?R)4p9A_$>ol5767_x9fbQ=}N<6%CB zyuljx1Ej5RCqQ$^@;a)pgaJki`hB}v4?^3w%jCTj9?Jmqb$8ioxIhe16y6mj zR;!4lPn0=LXp5eX`4(ozG9#t;Dgf`{dUsbtLtkion9$wE{Z4q`W3P zeNs51V5~!W#&5Y3AfjZMQT3lzz3{sLyCRmF!llG-dwuBmmDI_5uSP(}FXgf43O^JV z!m9u~b?%_Pf9LLOiVbaL^|J@0uq{2Ry>?LclU-?rZ=%!jz5|m6lC@VJC}{EPw~uWe zmhMyj8pvQIMsGU;dLB5dB)wAWj|6udFGW}T$S5gjU1o@(Pu&Z@+D-qD~tKh&23O^JFI)@QX7ieIcHSJ@ByKic}kbE7-lwW@ylM0D0eO$I@ z=wB+^v}aJ+dgX!1qtc_xYC@Cq?4Zu@|FK)o1GFxeC?O~_Xx$b5l36kjq_*~-ANWsD zD5OWnu^dg_dIWmq0X;wVI}bcPAtlrN>w1#*?v8+-2Wa^}()QQCeIDqA->;-s;WF9B zb~pZZ>Ya{X%42DTACgb|%fH&k-YeXa9v#PIM^Z;4P*_Y2O$^hb@4g=;>SrRbk=s(8 pRlo3Tv#!8r$Mt6?W857}n=v!eedAJC+_xVohT-=X>eHvN^&9s-Lrnkx literal 0 HcmV?d00001 diff --git a/zkstats/onnx2circom/keras2circom/example/dense/dense_keras.ipynb b/zkstats/onnx2circom/keras2circom/example/dense/dense_keras.ipynb new file mode 100644 index 0000000..20a7f37 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/example/dense/dense_keras.ipynb @@ -0,0 +1,255 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from keras.models import load_model\n", + "import keras\n", + "import tensorflow as tf\n", + "import torch.nn as nn\n", + "from keras import layers\n", + "from keras import backend as K\n", + "import numpy as np\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/jernkun/anaconda3/lib/python3.11/site-packages/keras/src/layers/core/dense.py:88: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead.\n", + " super().__init__(activity_regularizer=activity_regularizer, **kwargs)\n" + ] + }, + { + "data": { + "text/html": [ + "
Model: \"functional_1\"\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1mModel: \"functional_1\"\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
+       "┃ Layer (type)                     Output Shape                  Param # ┃\n",
+       "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
+       "│ input_layer (InputLayer)        │ (None, 1, 3)           │             0 │\n",
+       "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
+       "│ dense (Dense)                   │ (None, 1, 1)           │             4 │\n",
+       "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
+       "│ dense_1 (Dense)                 │ (None, 1, 2)           │             4 │\n",
+       "└─────────────────────────────────┴────────────────────────┴───────────────┘\n",
+       "
\n" + ], + "text/plain": [ + "┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n", + "┃\u001b[1m \u001b[0m\u001b[1mLayer (type) \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1mOutput Shape \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1m Param #\u001b[0m\u001b[1m \u001b[0m┃\n", + "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n", + "│ input_layer (\u001b[38;5;33mInputLayer\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1\u001b[0m, \u001b[38;5;34m3\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n", + "├─────────────────────────────────┼────────────────────────┼───────────────┤\n", + "│ dense (\u001b[38;5;33mDense\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1\u001b[0m, \u001b[38;5;34m1\u001b[0m) │ \u001b[38;5;34m4\u001b[0m │\n", + "├─────────────────────────────────┼────────────────────────┼───────────────┤\n", + "│ dense_1 (\u001b[38;5;33mDense\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1\u001b[0m, \u001b[38;5;34m2\u001b[0m) │ \u001b[38;5;34m4\u001b[0m │\n", + "└─────────────────────────────────┴────────────────────────┴───────────────┘\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Total params: 8 (32.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Total params: \u001b[0m\u001b[38;5;34m8\u001b[0m (32.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Trainable params: 8 (32.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Trainable params: \u001b[0m\u001b[38;5;34m8\u001b[0m (32.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Non-trainable params: 0 (0.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Non-trainable params: \u001b[0m\u001b[38;5;34m0\u001b[0m (0.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "\n", + "K.clear_session()\n", + "def get_model():\n", + " inputs = keras.Input(shape = (1,3))\n", + " layer1 = keras.layers.Dense(1, input_shape = (1,3))(inputs)\n", + " layer2 = keras.layers.Dense(2, input_shape = (1,1))\n", + " outputs = layer2(layer1)\n", + " \n", + " model = keras.Model(inputs, outputs)\n", + " return model\n", + "\n", + "model = get_model()\n", + "model.summary()\n", + "\n", + "model.save('dense_keras.keras')\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
Model: \"functional_1\"\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1mModel: \"functional_1\"\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
+       "┃ Layer (type)                     Output Shape                  Param # ┃\n",
+       "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
+       "│ input_layer (InputLayer)        │ (None, 1, 3)           │             0 │\n",
+       "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
+       "│ dense (Dense)                   │ (None, 1, 1)           │             4 │\n",
+       "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
+       "│ dense_1 (Dense)                 │ (None, 1, 2)           │             4 │\n",
+       "└─────────────────────────────────┴────────────────────────┴───────────────┘\n",
+       "
\n" + ], + "text/plain": [ + "┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n", + "┃\u001b[1m \u001b[0m\u001b[1mLayer (type) \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1mOutput Shape \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1m Param #\u001b[0m\u001b[1m \u001b[0m┃\n", + "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n", + "│ input_layer (\u001b[38;5;33mInputLayer\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1\u001b[0m, \u001b[38;5;34m3\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n", + "├─────────────────────────────────┼────────────────────────┼───────────────┤\n", + "│ dense (\u001b[38;5;33mDense\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1\u001b[0m, \u001b[38;5;34m1\u001b[0m) │ \u001b[38;5;34m4\u001b[0m │\n", + "├─────────────────────────────────┼────────────────────────┼───────────────┤\n", + "│ dense_1 (\u001b[38;5;33mDense\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m1\u001b[0m, \u001b[38;5;34m2\u001b[0m) │ \u001b[38;5;34m4\u001b[0m │\n", + "└─────────────────────────────────┴────────────────────────┴───────────────┘\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Total params: 8 (32.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Total params: \u001b[0m\u001b[38;5;34m8\u001b[0m (32.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Trainable params: 8 (32.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Trainable params: \u001b[0m\u001b[38;5;34m8\u001b[0m (32.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Non-trainable params: 0 (0.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Non-trainable params: \u001b[0m\u001b[38;5;34m0\u001b[0m (0.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "model = load_model('dense_keras.keras')\n", + "model.summary()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/zkstats/onnx2circom/keras2circom/example/dense/dense_keras.keras b/zkstats/onnx2circom/keras2circom/example/dense/dense_keras.keras new file mode 100644 index 0000000000000000000000000000000000000000..c516dc1eb44736a8d103fab3005d01fee6fe69c7 GIT binary patch literal 12348 zcmeHNO>7%g5PpsuFomE5RV4ff+-*TQq{enqs8XQ>DJelkVDTeGMA2qrZ{lq?yV34C zKaHaH7{s9*df>z4iMKW6=^4JhVtT&iI@)Mf z8_pGS1#Kv+0gH_m4mT-^nyqxKIaa_$06S;~ z?B$BV_Y*oa+j2}p*s$iC`n+XuGTavcf@9jc1(PkqwyuD!`Od0a)HLo#0rIx(dT#Ks zV=cJelA?}p3hZ(-#v7QAaJlv<$0w%eO7ZGgx%Zl=J?whTHayfGcU;KRas+3=p7~;L z0x((gq{u+L5M1w?Wmohj07y)>LO16_$gRU80>KT<@D|b4!j7!#!teC}pyhhHu5Q)+ zJCx*81z&N!bx?l>JfY!9L)kq1&u(tcg20zoQJf8Eg9cG>#rc0*?}$BZ?&TjvkSXyG z9u>H!$3v5k94h}PaCC+JLjq(!J)|%TYqo-0)I-Q)V||0OYH}mt!Yv((6S}pa?r=yc z77!|nC4@Pm0Zj{s^EStiIJcW}jGnJQn;!i6MtSX3#`vd~XEQv7$gP^zVm0t{)w7>Y zOy<{yyZd(tY&7 zOc!jO}t-esQ9IGYzy&2+7ey`*r{_5<%jRRKapZx^;3QDamlQ{ z4z`z{ko_cA7V%AVD&F@X(xIFhcMg;k`?ZT>H=hdI`A`}o5qj4aP|txu(rE+$^qh?!_?O9Ja5$B>M@+f202qKh-ag-Jvhy z&VhlarE2GJLz8^=&~Np=DxjVNN(mjA6V;`oN52nb&~ecwt#vLko(Wk52~pCgU$!y@Oy{WMO?PS@1M^}0ZHzo zy<3TPss&X1(sRWu;)m$-SSesh@n)O|c37%$Y#VV)d{rFpkbTFHr&za){b;xp8x*>y zx?~4F#;WHB(e{8C>&w0jzkTp4%lb2{|Ej1n;GPlVU0sr42Jb-9soQvRC^tTA4QD=@ z;@MwtsyQ3JBhD@a-#t?c<7~3tTM)2-b zBC%oatw!~L)Z*f=dVk$6{%)U?3K2#Rh*{G~JKOME^r>z6I0i}znt`li`SQCBgo`k+894_<@zB*NXjlMDU5>(~Oy5gn!=83J|jcZK& Y4UDM1DObAK{TQc6!{4`12Cwnf{~RVE3;+NC literal 0 HcmV?d00001 diff --git a/zkstats/onnx2circom/keras2circom/hello.txt b/zkstats/onnx2circom/keras2circom/hello.txt index b6fc4c6..88caeb4 100644 --- a/zkstats/onnx2circom/keras2circom/hello.txt +++ b/zkstats/onnx2circom/keras2circom/hello.txt @@ -1 +1 @@ -hello \ No newline at end of file +hellosd \ No newline at end of file diff --git a/zkstats/onnx2circom/keras2circom/keras2circom/circom.py b/zkstats/onnx2circom/keras2circom/keras2circom/circom.py new file mode 100644 index 0000000..5f39548 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/keras2circom/circom.py @@ -0,0 +1,389 @@ +# Ref: https://github.com/zk-ml/uchikoma/blob/main/python/uchikoma/circom.py +from __future__ import annotations + +import typing + +import os +from os import path +import json +from dataclasses import dataclass + +import re +import numpy as np + +class SafeDict(dict): + def __missing__(self, key): + return '{' + key + '}' + +# template string for circom +circom_template_string = '''pragma circom 2.0.0; + +{include} +template Model() {brace_left} +{signal} +{component} +{main} +{brace_right} + +component main = Model(); +''' + +templates: typing.Dict[str, Template] = { + +} + +def parse_shape(shape: typing.List[int]) -> str: + '''parse shape to integers enclosed by []''' + shape_str = '' + for dim in shape: + shape_str += '[{}]'.format(dim) + return shape_str + +def parse_index(shape: typing.List[int]) -> str: + '''parse shape to indices enclosed by []''' + index_str = '' + for i in range(len(shape)): + index_str += '[i{}]'.format(i) + return index_str + +@dataclass +class Template: + op_name: str + fpath: str + + args: typing.Dict[str] + + input_names: typing.List[str] = None + input_dims: typing.List[int] = None + output_names: typing.List[str] = None + output_dims: typing.List[int] = None + + def __str__(self) -> str: + args_str = ', '.join(self.args) + args_str = '(' + args_str + ')' + return '{:>20}{:30} {}{}{}{} \t<-- {}'.format( + self.op_name, args_str, + self.input_names, self.input_dims, + self.output_names, self.output_dims, + self.fpath) + +def file_parse(fpath): + '''parse circom file and register templates''' + with open(fpath, 'r') as f: + lines = f.read().split('\n') + + lines = [l for l in lines if not l.strip().startswith('//')] + lines = ' '.join(lines) + + lines = re.sub('/\*.*?\*/', 'IGN', lines) + + funcs = re.findall('template (\w+) ?\((.*?)\) ?\{(.*?)\}', lines) + for func in funcs: + op_name = func[0].strip() + args = func[1].split(',') + main = func[2].strip() + assert op_name not in templates, \ + 'duplicated template: {} in {} vs. {}'.format( + op_name, templates[op_name].fpath, fpath) + + signals = re.findall('signal (\w+) (\w+)(.*?);', main) + infos = [[] for i in range(4)] + for sig in signals: + sig_types = ['input', 'output'] + assert sig[0] in sig_types, sig[1] + ' | ' + main + idx = sig_types.index(sig[0]) + infos[idx*2+0].append(sig[1]) + + sig_dim = sig[2].count('[') + infos[idx*2+1].append(sig_dim) + templates[op_name] = Template( + op_name, fpath, + [a.strip() for a in args], + *infos) + + +def dir_parse(dir_path, skips=[]): + '''parse circom files in a directory''' + names = os.listdir(dir_path) + for name in names: + if name in skips: + continue + + fpath = path.join(dir_path, name) + if os.path.isdir(fpath): + dir_parse(fpath) + elif os.path.isfile(fpath): + if fpath.endswith('.circom'): + file_parse(fpath) + +@dataclass +class Signal: + name: str + shape: typing.List[int] + value: typing.Any = None + + def inject_signal(self, comp_name: str) -> str: + '''inject signal into the beginning of the circuit''' + if self.value is not None or self.name == 'out' or self.name == 'remainder': + return 'signal input {}_{}{};\n'.format( + comp_name, self.name, parse_shape(self.shape)) + return '' + + def inject_main(self, comp_name: str, prev_comp_name: str = None, prev_signal: Signal = None) -> str: + '''inject signal into main''' + inject_str = '' + if self.value is not None or self.name == 'out' or self.name == 'remainder': + if comp_name.endswith('softmax') and self.name == 'out': + inject_str += '{}.out <== {}_out[0];\n'.format( + comp_name, comp_name) + return inject_str + for i in range(len(self.shape)): + inject_str += '{}for (var i{} = 0; i{} < {}; i{}++) {{\n'.format( + ' '*i*4, i, i, self.shape[i], i) + if 'activation' in comp_name or 're_lu' in comp_name: + inject_str += '{}{}{}.{} <== {}_{}{};\n'.format(' '*(i+1)*4, + comp_name, parse_index(self.shape), self.name, + comp_name, self.name, parse_index(self.shape)) + else: + inject_str += '{}{}.{}{} <== {}_{}{};\n'.format(' '*(i+1)*4, + comp_name, self.name, parse_index(self.shape), + comp_name, self.name, parse_index(self.shape)) + inject_str += '}'*len(self.shape)+'\n' + return inject_str + + if self.shape != prev_signal.shape: + raise ValueError('shape mismatch: {} vs. {}'.format(self.shape, prev_signal.shape)) + + for i in range(len(self.shape)): + inject_str += '{}for (var i{} = 0; i{} < {}; i{}++) {{\n'.format( + ' '*i*4, i, i, self.shape[i], i) + + if 'activation' in comp_name or 're_lu' in comp_name: + inject_str += '{}{}{}.{} <== {}.{}{};\n'.format(' '*(i+1)*4, + comp_name, parse_index(self.shape), self.name, + prev_comp_name, prev_signal.name, parse_index(self.shape)) + elif 'activation' in prev_comp_name or 're_lu' in prev_comp_name: + inject_str += '{}{}.{}{} <== {}{}.{};\n'.format(' '*(i+1)*4, + comp_name, self.name, parse_index(self.shape), + prev_comp_name, parse_index(self.shape), prev_signal.name) + else: + inject_str += '{}{}.{}{} <== {}.{}{};\n'.format(' '*(i+1)*4, + comp_name, self.name, parse_index(self.shape), + prev_comp_name, prev_signal.name, parse_index(self.shape)) + inject_str += '}'*len(self.shape)+'\n' + return inject_str + + def inject_input_signal(self) -> str: + '''inject the circuit input signal''' + if self.value is not None: + raise ValueError('input signal should not have value') + return 'signal input in{};\n'.format(parse_shape(self.shape)) + + def inject_output_signal(self) -> str: + '''inject the circuit output signal''' + if self.value is not None: + raise ValueError('output signal should not have value') + return 'signal output out{};\n'.format(parse_shape(self.shape)) + + def inject_input_main(self, comp_name: str) -> str: + '''inject the circuit input signal into main''' + if self.value is not None: + raise ValueError('input signal should not have value') + inject_str = '' + for i in range(len(self.shape)): + inject_str += '{}for (var i{} = 0; i{} < {}; i{}++) {{\n'.format( + ' '*i*4, i, i, self.shape[i], i) + inject_str += '{}{}.{}{} <== in{};\n'.format(' '*(i+1)*4, + comp_name, self.name, parse_index(self.shape), + parse_index(self.shape)) + inject_str += '}'*len(self.shape)+'\n' + return inject_str + + def inject_output_main(self, prev_comp_name: str, prev_signal: Signal) -> str: + '''inject the circuit output signal into main''' + if self.value is not None: + raise ValueError('output signal should not have value') + if self.shape != prev_signal.shape: + raise ValueError('shape mismatch: {} vs. {}'.format(self.shape, prev_signal.shape)) + + if 'softmax' in prev_comp_name: + return 'out[0] <== {}.out;\n'.format(prev_comp_name) + + inject_str = '' + + for i in range(len(self.shape)): + inject_str += '{}for (var i{} = 0; i{} < {}; i{}++) {{\n'.format( + ' '*i*4, i, i, self.shape[i], i) + + if 're_lu' in prev_comp_name: + inject_str += '{}out{} <== {}{}.{};\n'.format(' '*(i+1)*4, + parse_index(self.shape), + prev_comp_name, parse_index(self.shape), prev_signal.name) + else: + inject_str += '{}out{} <== {}.{}{};\n'.format(' '*(i+1)*4, + parse_index(self.shape), + prev_comp_name, prev_signal.name, parse_index(self.shape)) + inject_str += '}'*len(self.shape)+'\n' + return inject_str + +@dataclass +class Component: + name: str + template: Template + inputs: typing.List[Signal] + outputs: typing.List[Signal] + # optional args + args: typing.Dict[str, typing.Any] = None + + def inject_include(self) -> str: + '''include the component template''' + return 'include "{}";\n'.format(self.template.fpath) + + def inject_signal(self, prev_comp: Component = None, last_comp: bool = False) -> str: + '''inject the component signals''' + inject_str = '' + for signal in self.inputs: + if signal.name == 'out' or signal.name == 'remainder': + inject_str += signal.inject_signal(self.name) + if last_comp is True and signal.name == 'out': + inject_str += signal.inject_output_signal() + elif signal.value is None and prev_comp is None: + inject_str += signal.inject_input_signal() + elif signal.value is not None: + inject_str += signal.inject_signal(self.name) + return inject_str + + def inject_component(self) -> str: + '''inject the component declaration''' + if self.template.op_name == 'ReLU': + for signal in self.inputs: + if signal.name == 'out': + output_signal = signal + break + inject_str = 'component {}{};\n'.format(self.name, parse_shape(output_signal.shape)) + for i in range(len(output_signal.shape)): + inject_str += '{}for (var i{} = 0; i{} < {}; i{}++) {{\n'.format( + ' '*i*4, i, i, output_signal.shape[i], i) + inject_str += '{}{}{} = ReLU();\n'.format(' '*(i+1)*4, + self.name, parse_index(output_signal.shape)) + inject_str += '}'*len(output_signal.shape)+'\n' + return inject_str + + return 'component {} = {}({});\n'.format( + self.name, self.template.op_name, self.parse_args(self.template.args, self.args)) + + def inject_main(self, prev_comp: Component = None, last_comp: bool = False) -> str: + '''inject the component main''' + inject_str = '' + for signal in self.inputs: + if signal.value is not None or signal.name == 'out' or signal.name == 'remainder': + inject_str += signal.inject_main(self.name) + elif prev_comp is None: + inject_str += signal.inject_input_main(self.name) + else: + for sig in prev_comp.inputs: + if sig.name == 'out': + output_signal = sig + break + if output_signal is None: + output_signal = prev_comp.outputs[0] + inject_str += signal.inject_main(self.name, prev_comp.name, output_signal) + print + if last_comp: + for signal in self.inputs: + if signal.name == 'out': + inject_str += signal.inject_output_main(self.name, signal) + break + for signal in self.outputs: + inject_str += signal.inject_output_main(self.name, signal) + return inject_str + + def to_json(self, dec: int) -> typing.Dict[str, typing.Any]: + '''convert the component params to json format''' + json_dict = {} + for signal in self.inputs: + if signal.value is not None: + if signal.name == 'bias' or signal.name == 'b': + scaling = float(10**(2*dec)) + else: + scaling = float(10**dec) + value = [str(int(v*scaling)) for v in signal.value.flatten().tolist()] + # reshape the value to match the circom shape + if len(signal.shape) > 1: + value = np.array(value).reshape(signal.shape).tolist() + json_dict.update({f'{self.name}_{signal.name}': value}) + return json_dict + + @staticmethod + def parse_args(template_args: typing.List[str], args: typing.Dict[str, typing.Any]) -> str: + '''parse the args to a format string, ready to be injected''' + args_str = '{'+'}, {'.join(template_args)+'}' + return args_str.format(**args) + +@dataclass +class Circuit: + components: typing.List[Component] + + def __init__(self): + self.components = [] + + def add_component(self, component: Component): + self.components.append(component) + + def add_components(self, components: typing.List[Component]): + self.components.extend(components) + + def inject_include(self) -> str: + '''inject the include statements''' + inject_str = [] + for component in self.components: + inject_str.append(component.inject_include()) + return ''.join(set(inject_str)) + + def inject_signal(self) -> str: + '''inject the signal declarations''' + # edit: make it ok for just one layer + if (len(self.components)==1): + inject_str = self.components[0].inject_signal(last_comp= True) + else: + inject_str = self.components[0].inject_signal() + for i in range(1, len(self.components)): + inject_str += self.components[i].inject_signal(self.components[i-1], i==len(self.components)-1) + return inject_str + + def inject_component(self) -> str: + '''inject the component declarations''' + inject_str = '' + for component in self.components: + inject_str += component.inject_component() + return inject_str + + def inject_main(self) -> str: + '''inject the main template''' + # edit: make it work in case 1 layer + if (len(self.components)==1): + inject_str = self.components[0].inject_main(last_comp = True) + else: + inject_str = self.components[0].inject_main() + for i in range(1, len(self.components)): + inject_str += self.components[i].inject_main(self.components[i-1], i==len(self.components)-1) + return inject_str + def to_circom(self) -> str: + '''convert the circuit to a circom file''' + return circom_template_string.format(**{ + 'include': self.inject_include(), + 'brace_left': '{', + 'signal': self.inject_signal(), + 'component': self.inject_component(), + 'main': self.inject_main(), + 'brace_right': '}', + }) + + def to_json(self, dec: int) -> str: + '''convert the model weights to json format''' + json_dict = {} + + for component in self.components: + json_dict.update(component.to_json(dec)) + + return json.dumps(json_dict) \ No newline at end of file diff --git a/zkstats/onnx2circom/keras2circom/keras2circom/model.py b/zkstats/onnx2circom/keras2circom/keras2circom/model.py new file mode 100644 index 0000000..ba4a2c9 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/keras2circom/model.py @@ -0,0 +1,110 @@ +# Read keras model into list of parameters like op, input, output, weight, bias +from __future__ import annotations +from dataclasses import dataclass +import typing +from tensorflow.keras.models import load_model +from tensorflow.keras.layers import Layer as KerasLayer +import numpy as np +import keras + +from zkstats.onnx2circom.onnx2keras.layers import ( + TFReciprocal, + TFSqrt, + TFExp, + TFLog, + TFReduceSum, + TFReduceMean, + TFReduceMax, + TFReduceMin, + # TFArgMax, + # TFArgMin, + # TFErf, +) + +onnx2circom_ops_raw = [ + TFLog, # log_e(n) + TFReduceSum, # sum(n) + TFReduceMean, + TFReduceMax, + TFReduceMin, + # TFArgMax, + # TFArgMin, + TFReciprocal, # 1/n + TFSqrt, # sqrt(n) + TFExp, # e^n + # TFErf, +] +onnx2circom_ops = [str(op.__name__) for op in onnx2circom_ops_raw] + +keras2circom_ops = [ + 'Activation', + 'AveragePooling2D', + 'BatchNormalization', + 'Conv2D', + 'Dense', + 'Flatten', + 'GlobalAveragePooling2D', + 'GlobalMaxPooling2D', + 'MaxPooling2D', + 'ReLU', + 'Softmax', +] + + +supported_ops = keras2circom_ops + onnx2circom_ops + + +skip_ops = [ + 'Dropout', + 'InputLayer', +] + + +# read each layer in a model and convert it to a class called Layer +@dataclass +class Layer: + ''' A single layer in a Keras model. ''' + op: str + name: str + input: typing.List[int] + output: typing.List[int] + config: typing.Dict[str, typing.Any] + weights: typing.List[np.ndarray] + + def __init__(self, layer: KerasLayer): + self.op = layer.__class__.__name__ + self.name = layer.name + self.input = layer.input.shape[1:] + self.output = layer.output.shape[1:] + # FIXME: this only works for data shape in [1, N, 1] + # Add "nInputs" to `self.config` + shape = layer.input.shape + if len(shape) != 3 or shape[0] != 1 or shape[2] != 1: + raise Exception(f'Unsupported input shape: {self.op=}, {shape=}') + n_inputs = shape[1] + self.config = {**layer.get_config(), **{"nInputs": n_inputs}} + self.weights = layer.get_weights() + + +class Model: + layers: typing.List[Layer] + + def __init__(self, filename: str, raw: bool = False): + ''' Load a Keras model from a file. ''' + # edit : allow reading customed layer + keras.saving.get_custom_objects().clear() + # Only if the torch model name is in this custom_objects, model.summary() will print the mapped name in keras + # E.g. without this line, the model.summary() will print the layer name as `tf_reduce_sum (TFReduceSum)` + # with `TFReduceSum: SumCheck`, the model.summary() will print the layer name as `sum_check (SumCheck)` + custom_objects = {op.__name__: op for op in onnx2circom_ops_raw} + with keras.saving.custom_object_scope(custom_objects): + model = load_model(filename) + self.layers = [Layer(layer) for layer in model.layers if self._for_transpilation(layer.__class__.__name__)] + + @staticmethod + def _for_transpilation(op: str) -> bool: + if op in skip_ops: + return False + if op in supported_ops: + return True + raise NotImplementedError(f'Unsupported op: {op}') \ No newline at end of file diff --git a/zkstats/onnx2circom/keras2circom/keras2circom/script.py b/zkstats/onnx2circom/keras2circom/keras2circom/script.py new file mode 100644 index 0000000..379fb4a --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/keras2circom/script.py @@ -0,0 +1,273 @@ +from .circom import Circuit, Component + +# template string for circuit.py +python_template_string = '''""" Make an interger-only circuit of the corresponding CIRCOM circuit. + +Usage: + circuit.py [-o ] + circuit.py (-h | --help) + +Options: + -h --help Show this screen. + -o --output= Output directory [default: output]. + +""" + +from docopt import docopt +import json + +try: + from keras2circom.util import * +except: + import sys + import os + # add parent directory to sys.path + sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + from keras2circom.util import * + +def inference(input, circuit): + out = input['in'] + output = {brackets} + +{components} + return out, output + + +def main(): + """ Main entry point of the app """ + args = docopt(__doc__) + + # parse input.json + with open(args['']) as f: + input = json.load(f) + + # parse circuit.json + with open(args['']) as f: + circuit = json.load(f) + + out, output = inference(input, circuit) + + # write output.json + with open(args['--output'] + '/output.json', 'w') as f: + json.dump(output, f) + +if __name__ == "__main__": + """ This is executed when run from the command line """ + main() +''' + + +def to_py(circuit: Circuit, dec: int) -> str: + comp_str = "" + + for component in circuit.components: + comp_str += transpile_component(component, dec) + + return python_template_string.format( + brackets="{}", + components=comp_str, + ) + +def transpile_component(component: Component, dec: int) -> str: + comp_str = "" + if component.template.op_name == "AveragePooling2D": + comp_str += " out, remainder = AveragePooling2DInt({nRows}, {nCols}, {nChannels}, {poolSize}, {strides}, {input})\n".format( + nRows=component.args["nRows"], + nCols=component.args["nCols"], + nChannels=component.args["nChannels"], + poolSize=component.args["poolSize"], + strides=component.args["strides"], + input="out" + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + comp_str += " output['{name}_remainder'] = remainder\n".format( + name=component.name, + ) + return comp_str+"\n" + + elif component.template.op_name == "BatchNormalization2D": + comp_str += " out, remainder = BatchNormalizationInt({nRows}, {nCols}, {nChannels}, {n}, {input}, {a}, {b})\n".format( + nRows=component.args["nRows"], + nCols=component.args["nCols"], + nChannels=component.args["nChannels"], + n=component.args["n"], + input="out", + a="circuit['{name}_a']".format(name=component.name), + b="circuit['{name}_b']".format(name=component.name), + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + comp_str += " output['{name}_remainder'] = remainder\n".format( + name=component.name, + ) + return comp_str+"\n" + + elif component.template.op_name == "Conv1D": + comp_str += " out, remainder = Conv1DInt({nInputs}, {nChannels}, {nFilters}, {kernelSize}, {strides}, {n}, {input}, {weights}, {bias})\n".format( + nInputs=component.args["nInputs"], + nChannels=component.args["nChannels"], + nFilters=component.args["nFilters"], + kernelSize=component.args["kernelSize"], + strides=component.args["strides"], + n=component.args["n"], + input="out", + weights="circuit['{name}_weights']".format(name=component.name), + bias="circuit['{name}_bias']".format(name=component.name), + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + comp_str += " output['{name}_remainder'] = remainder\n".format( + name=component.name, + ) + return comp_str+"\n" + + elif component.template.op_name == "Conv2D": + comp_str += " out, remainder = Conv2DInt({nRows}, {nCols}, {nChannels}, {nFilters}, {kernelSize}, {strides}, {n}, {input}, {weights}, {bias})\n".format( + nRows=component.args["nRows"], + nCols=component.args["nCols"], + nChannels=component.args["nChannels"], + nFilters=component.args["nFilters"], + kernelSize=component.args["kernelSize"], + strides=component.args["strides"], + n=component.args["n"], + input="out", + weights="circuit['{name}_weights']".format(name=component.name), + bias="circuit['{name}_bias']".format(name=component.name), + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + comp_str += " output['{name}_remainder'] = remainder\n".format( + name=component.name, + ) + return comp_str+"\n" + + elif component.template.op_name == "Dense": + comp_str += " out, remainder = DenseInt({nInputs}, {nOutputs}, {n}, {input}, {weights}, {bias})\n".format( + nInputs=component.args["nInputs"], + nOutputs=component.args["nOutputs"], + n=component.args["n"], + input="out", + weights="circuit['{name}_weights']".format(name=component.name), + bias="circuit['{name}_bias']".format(name=component.name), + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + comp_str += " output['{name}_remainder'] = remainder\n".format( + name=component.name, + ) + return comp_str+"\n" + # edit: add MeanCheck + elif component.template.op_name == "TFReduceMean": + comp_str += " out = TFReduceMeanInt({nInputs}, {input})\n".format( + nInputs=component.args["nInputs"], + input="out", + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + + return comp_str+"\n" + + elif component.template.op_name == "TFReduceSum": + comp_str += " out = TFReduceSumInt({nInputs}, {input})\n".format( + nInputs=component.args["nInputs"], + input="out", + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + + return comp_str+"\n" + + elif component.template.op_name == "TFLog": + comp_str += " out = TFLogInt({e}, {input})\n".format( + e=component.args["e"], + input="out", + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + + return comp_str+"\n" + + elif component.template.op_name == "GlobalAveragePooling2D": + comp_str += " out, remainder = GlobalAveragePooling2DInt({nRows}, {nCols}, {nChannels}, {input})\n".format( + nRows=component.args["nRows"], + nCols=component.args["nCols"], + nChannels=component.args["nChannels"], + input="out" + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + comp_str += " output['{name}_remainder'] = remainder\n".format( + name=component.name, + ) + return comp_str+"\n" + + elif component.template.op_name == "GlobalMaxPooling2D": + comp_str += " out = GlobalMaxPooling2DInt({nRows}, {nCols}, {nChannels}, {input})\n".format( + nRows=component.args["nRows"], + nCols=component.args["nCols"], + nChannels=component.args["nChannels"], + input="out" + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + return comp_str+"\n" + + elif component.template.op_name == "MaxPooling2D": + comp_str += " out = MaxPooling2DInt({nRows}, {nCols}, {nChannels}, {poolSize}, {strides}, {input})\n".format( + nRows=component.args["nRows"], + nCols=component.args["nCols"], + nChannels=component.args["nChannels"], + poolSize=component.args["poolSize"], + strides=component.args["strides"], + input="out" + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + return comp_str+"\n" + + elif component.template.op_name == "Flatten2D": + comp_str += " out = Flatten2DInt({nRows}, {nCols}, {nChannels}, {input})\n".format( + nRows=component.args["nRows"], + nCols=component.args["nCols"], + nChannels=component.args["nChannels"], + input="out" + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + return comp_str+"\n" + + elif component.template.op_name == "ReLU": + nRows, nCols, nChannels = component.inputs[0].shape + comp_str += " out = ReLUInt({nRows}, {nCols}, {nChannels}, {input})\n".format( + nRows=nRows, + nCols=nCols, + nChannels=nChannels, + input="out" + ) + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + return comp_str+"\n" + + elif component.template.op_name == "ArgMax": + comp_str += " out = ArgMaxInt(out)\n" + comp_str += " output['{name}_out'] = out\n".format( + name=component.name, + ) + return comp_str+"\n" + + else: + raise ValueError("Unknown component type: {}".format(component.template.op_name)) \ No newline at end of file diff --git a/zkstats/onnx2circom/keras2circom/keras2circom/transpiler.py b/zkstats/onnx2circom/keras2circom/keras2circom/transpiler.py new file mode 100644 index 0000000..8f4d6a9 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/keras2circom/transpiler.py @@ -0,0 +1,292 @@ +from .circom import * +from .model import * +from .script import * + +import os + +def transpile(filename: str, output_dir: str = 'output', raw: bool = False, dec: int = 18) -> Circuit: + ''' Transpile a Keras model to a CIRCOM circuit.''' + + model = Model(filename, raw) + + circuit = Circuit() + for layer in model.layers[:-1]: + circuit.add_components(transpile_layer(layer, dec)) + + res = transpile_layer(model.layers[-1], dec, True) + circuit.add_components(res) + + if raw: + if circuit.components[-1].template.op_name == 'ArgMax': + circuit.components.pop() + # create output directory if it doesn't exist + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + with open(output_dir + '/circuit.circom', 'w') as f: + f.write(circuit.to_circom()) + + with open(output_dir + '/circuit.json', 'w') as f: + f.write(circuit.to_json(int(dec))) + + with open(output_dir + '/circuit.py', 'w') as f: + f.write(to_py(circuit, int(dec))) + + return circuit + +def transpile_layer(layer: Layer, dec: int = 18, last: bool = False) -> typing.List[Component]: + ''' Transpile a Keras layer to CIRCOM component(s).''' + if layer.op == 'Activation': + if layer.config['activation'] == 'softmax': + if last: + return transpile_ArgMax(layer) + raise ValueError('Softmax must be the last layer') + if layer.config['activation'] == 'relu': + return transpile_ReLU(layer) + if layer.config['activation'] == 'linear': + return [] + raise NotImplementedError(f'Activation {layer.config["activation"]} not implemented') + + if layer.op == 'Softmax': + if last: + return transpile_ArgMax(layer) + raise ValueError('Softmax must be the last layer') + + if layer.op == 'ReLU': + return transpile_ReLU(layer) + + if layer.op == 'AveragePooling2D': + return transpile_AveragePooling2D(layer) + + if layer.op == 'BatchNormalization': + return transpile_BatchNormalization2D(layer, dec) + + if layer.op == 'Conv2D': + return transpile_Conv2D(layer, dec) + + if layer.op == 'Dense': + return transpile_Dense(layer, dec, last) + + if layer.op == 'Flatten': + return transpile_Flatten2D(layer) + + if layer.op == 'GlobalAveragePooling2D': + return transpile_GlobalAveragePooling2D(layer) + + if layer.op == 'GlobalMaxPooling2D': + return transpile_GlobalMaxPooling2D(layer) + + if layer.op == 'MaxPooling2D': + return transpile_MaxPooling2D(layer) + + if layer.op =='TFReduceMean': + return transpile_TFReduceMean(layer) + + if layer.op =='TFReduceSum': + return transpile_TFReduceSum(layer) + + if layer.op =='TFLog': + return transpile_TFLog(layer) + + raise NotImplementedError(f'Layer {layer.op} is not supported yet.') + +def transpile_ArgMax(layer: Layer) -> typing.List[Component]: + return [Component(layer.name, templates['ArgMax'], [Signal('in', layer.output), Signal('out', (1,))], [], {'n': layer.output[0]})] + +def transpile_ReLU(layer: Layer) -> typing.List[Component]: + return [Component(layer.name, templates['ReLU'], [Signal('in', layer.output), Signal('out', layer.output)], [])] + +def transpile_TFReduceMean(layer: Layer) -> typing.List[Component]: + return [Component(layer.name, templates['TFReduceMean'], [Signal('in', layer.input), Signal('out', (1,))], [], {'nInputs':layer.config['nInputs']})] + +def transpile_TFReduceSum(layer: Layer) -> typing.List[Component]: + return [Component(layer.name, templates['TFReduceSum'], [Signal('in', layer.input), Signal('out', (1,))], [], {'nInputs':layer.config['nInputs']})] + +def transpile_TFLog(layer: Layer) -> typing.List[Component]: + print(f"!@# layer.output={layer.output}") + return [Component(layer.name, templates['TFLog'], [Signal('in', layer.input), Signal('out', (1,))], [], {'e': 2})] + +def transpile_AveragePooling2D(layer: Layer) -> typing.List[Component]: + if layer.config['data_format'] != 'channels_last': + raise NotImplementedError('Only data_format="channels_last" is supported') + if layer.config['padding'] != 'valid': + raise NotImplementedError('Only padding="valid" is supported') + if layer.config['pool_size'][0] != layer.config['pool_size'][1]: + raise NotImplementedError('Only pool_size[0] == pool_size[1] is supported') + if layer.config['strides'][0] != layer.config['strides'][1]: + raise NotImplementedError('Only strides[0] == strides[1] is supported') + + return [Component(layer.name, templates['AveragePooling2D'], [Signal('in', layer.input), Signal('out', layer.output), Signal('remainder', layer.output)],[],{ + 'nRows': layer.input[0], + 'nCols': layer.input[1], + 'nChannels': layer.input[2], + 'poolSize': layer.config['pool_size'][0], + 'strides': layer.config['strides'][0], + })] + +def transpile_BatchNormalization2D(layer: Layer, dec: int) -> typing.List[Component]: + if layer.input.__len__() != 3: + raise NotImplementedError('Only 2D inputs are supported') + if layer.config['axis'][0] != 3: + raise NotImplementedError('Only axis=3 is supported') + if layer.config['center'] != True: + raise NotImplementedError('Only center=True is supported') + if layer.config['scale'] != True: + raise NotImplementedError('Only scale=True is supported') + + gamma = layer.weights[0] + beta = layer.weights[1] + moving_mean = layer.weights[2] + moving_var = layer.weights[3] + epsilon = layer.config['epsilon'] + + a = gamma/(moving_var+epsilon)**.5 + b = beta-gamma*moving_mean/(moving_var+epsilon)**.5 + + return [Component(layer.name, templates['BatchNormalization2D'], [ + Signal('in', layer.input), + Signal('a', a.shape, a), + Signal('b', b.shape, b), + Signal('out', layer.output), + Signal('remainder', layer.output), + ],[],{ + 'nRows': layer.input[0], + 'nCols': layer.input[1], + 'nChannels': layer.input[2], + 'n': '10**'+dec, + })] + +def transpile_Conv2D(layer: Layer, dec: int) -> typing.List[Component]: + if layer.config['data_format'] != 'channels_last': + raise NotImplementedError('Only data_format="channels_last" is supported') + if layer.config['padding'] != 'valid': + raise NotImplementedError('Only padding="valid" is supported') + if layer.config['strides'][0] != layer.config['strides'][1]: + raise NotImplementedError('Only strides[0] == strides[1] is supported') + if layer.config['kernel_size'][0] != layer.config['kernel_size'][1]: + raise NotImplementedError('Only kernel_size[0] == kernel_size[1] is supported') + if layer.config['dilation_rate'][0] != 1: + raise NotImplementedError('Only dilation_rate[0] == 1 is supported') + if layer.config['dilation_rate'][1] != 1: + raise NotImplementedError('Only dilation_rate[1] == 1 is supported') + if layer.config['groups'] != 1: + raise NotImplementedError('Only groups == 1 is supported') + if layer.config['activation'] not in ['linear', 'relu']: + raise NotImplementedError(f'Activation {layer.config["activation"]} is not supported') + + if layer.config['use_bias'] == False: + layer.weights.append(np.zeros(layer.weights[0].shape[-1])) + + conv = Component(layer.name, templates['Conv2D'], [ + Signal('in', layer.input), + Signal('weights', layer.weights[0].shape, layer.weights[0]), + Signal('bias', layer.weights[1].shape, layer.weights[1]), + Signal('out', layer.output), + Signal('remainder', layer.output), + ],[],{ + 'nRows': layer.input[0], + 'nCols': layer.input[1], + 'nChannels': layer.input[2], + 'nFilters': layer.config['filters'], + 'kernelSize': layer.config['kernel_size'][0], + 'strides': layer.config['strides'][0], + 'n': '10**'+dec, + }) + + if layer.config['activation'] == 'relu': + activation = Component(layer.name+'_re_lu', templates['ReLU'], [Signal('in', layer.output), Signal('out', layer.output)], []) + return [conv, activation] + + return [conv] + +def transpile_Dense(layer: Layer, dec: int, last: bool = False) -> typing.List[Component]: + if not last and layer.config['activation'] == 'softmax': + raise NotImplementedError('Softmax is only supported as last layer') + if layer.config['activation'] not in ['linear', 'relu', 'softmax']: + raise NotImplementedError(f'Activation {layer.config["activation"]} is not supported') + if layer.config['use_bias'] == False: + layer.weights.append(np.zeros(layer.weights[0].shape[-1])) + + dense = Component(layer.name, templates['Dense'], [ + Signal('in', layer.input), + Signal('weights', layer.weights[0].shape, layer.weights[0]), + Signal('bias', layer.weights[1].shape, layer.weights[1]), + Signal('out', layer.output), + Signal('remainder', layer.output), + ],[],{ + 'nInputs': layer.input[0], + 'nOutputs': layer.output[0], + 'n': '10**'+dec, + }) + + if layer.config['activation'] == 'relu': + activation = Component(layer.name+'_re_lu', templates['ReLU'], [Signal('in', layer.output), Signal('out', layer.output)], []) + return [dense, activation] + + if layer.config['activation'] == 'softmax': + activation = Component(layer.name+'_softmax', templates['ArgMax'], [Signal('in', layer.output), Signal('out', (1,))], [], {'n': layer.output[0]}) + return [dense, activation] + + return [dense] + +def transpile_Flatten2D(layer: Layer) -> typing.List[Component]: + if layer.input.__len__() != 3: + raise NotImplementedError('Only 2D inputs are supported') + + return [Component(layer.name, templates['Flatten2D'], [ + Signal('in', layer.input), + Signal('out', layer.output), + ],[],{ + 'nRows': layer.input[0], + 'nCols': layer.input[1], + 'nChannels': layer.input[2], + })] + +def transpile_GlobalAveragePooling2D(layer: Layer) -> typing.List[Component]: + if layer.config['data_format'] != 'channels_last': + raise NotImplementedError('Only data_format="channels_last" is supported') + if layer.config['keepdims']: + raise NotImplementedError('Only keepdims=False is supported') + + return [Component(layer.name, templates['GlobalAveragePooling2D'], [ + Signal('in', layer.input), + Signal('out', layer.output), + Signal('remainder', layer.output), + ],[],{ + 'nRows': layer.input[0], + 'nCols': layer.input[1], + 'nChannels': layer.input[2], + })] + +def transpile_GlobalMaxPooling2D(layer: Layer) -> typing.List[Component]: + if layer.config['data_format'] != 'channels_last': + raise NotImplementedError('Only data_format="channels_last" is supported') + if layer.config['keepdims']: + raise NotImplementedError('Only keepdims=False is supported') + + return [Component(layer.name, templates['GlobalMaxPooling2D'], [ + Signal('in', layer.input), + Signal('out', layer.output), + ],[],{ + 'nRows': layer.input[0], + 'nCols': layer.input[1], + 'nChannels': layer.input[2], + })] + +def transpile_MaxPooling2D(layer: Layer) -> typing.List[Component]: + if layer.config['data_format'] != 'channels_last': + raise NotImplementedError('Only data_format="channels_last" is supported') + if layer.config['padding'] != 'valid': + raise NotImplementedError('Only padding="valid" is supported') + if layer.config['pool_size'][0] != layer.config['pool_size'][1]: + raise NotImplementedError('Only pool_size[0] == pool_size[1] is supported') + if layer.config['strides'][0] != layer.config['strides'][1]: + raise NotImplementedError('Only strides[0] == strides[1] is supported') + + return [Component(layer.name, templates['MaxPooling2D'], [Signal('in', layer.input), Signal('out', layer.output)], [],{ + 'nRows': layer.input[0], + 'nCols': layer.input[1], + 'nChannels': layer.input[2], + 'poolSize': layer.config['pool_size'][0], + 'strides': layer.config['strides'][0], + })] diff --git a/zkstats/onnx2circom/keras2circom/keras2circom/util.py b/zkstats/onnx2circom/keras2circom/keras2circom/util.py new file mode 100644 index 0000000..c009bfc --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/keras2circom/util.py @@ -0,0 +1,116 @@ +import torch + +# assume all inputs are strings +def AveragePooling2DInt (nRows, nCols, nChannels, poolSize, strides, input): + out = [[[0 for _ in range(nChannels)] for _ in range((nCols-poolSize)//strides + 1)] for _ in range((nRows-poolSize)//strides + 1)] + remainder = [[[None for _ in range(nChannels)] for _ in range((nCols-poolSize)//strides + 1)] for _ in range((nRows-poolSize)//strides + 1)] + for i in range((nRows-poolSize)//strides + 1): + for j in range((nCols-poolSize)//strides + 1): + for k in range(nChannels): + for x in range(poolSize): + for y in range(poolSize): + out[i][j][k] += int(input[i*strides+x][j*strides+y][k]) + remainder[i][j][k] = str(out[i][j][k] % poolSize**2) + out[i][j][k] = str(out[i][j][k] // poolSize**2) + return out, remainder + +def BatchNormalizationInt(nRows, nCols, nChannels, n, X_in, a_in, b_in): + out = [[[None for _ in range(nChannels)] for _ in range(nCols)] for _ in range(nRows)] + remainder = [[[None for _ in range(nChannels)] for _ in range(nCols)] for _ in range(nRows)] + for i in range(nRows): + for j in range(nCols): + for k in range(nChannels): + out[i][j][k] = int(X_in[i][j][k])*int(a_in[k]) + int(b_in[k]) + remainder[i][j][k] = str(out[i][j][k] % n) + out[i][j][k] = str(out[i][j][k] // n) + return out, remainder + +def Conv1DInt(nInputs, nChannels, nFilters, kernelSize, strides, n, input, weights, bias): + out = [[0 for _ in range(nFilters)] for j in range((nInputs - kernelSize)//strides + 1)] + remainder = [[None for _ in range(nFilters)] for _ in range((nInputs - kernelSize)//strides + 1)] + for i in range((nInputs - kernelSize)//strides + 1): + for j in range(nFilters): + for k in range(kernelSize): + for l in range(nChannels): + out[i][j] += int(input[i*strides + k][l])*int(weights[k][l][j]) + out[i][j] += int(bias[j]) + remainder[i][j] = str(out[i][j] % n) + out[i][j] = str(out[i][j] // n) + return out, remainder + +def Conv2DInt(nRows, nCols, nChannels, nFilters, kernelSize, strides, n, input, weights, bias): + out = [[[0 for _ in range(nFilters)] for _ in range((nCols - kernelSize)//strides + 1)] for _ in range((nRows - kernelSize)//strides + 1)] + remainder = [[[None for _ in range(nFilters)] for _ in range((nCols - kernelSize)//strides + 1)] for _ in range((nRows - kernelSize)//strides + 1)] + for i in range((nRows - kernelSize)//strides + 1): + for j in range((nCols - kernelSize)//strides + 1): + for m in range(nFilters): + for k in range(nChannels): + for x in range(kernelSize): + for y in range(kernelSize): + out[i][j][m] += int(input[i*strides+x][j*strides+y][k])*int(weights[x][y][k][m]) + out[i][j][m] += int(bias[m]) + remainder[i][j][m] = str(out[i][j][m] % n) + out[i][j][m] = str(out[i][j][m] // n) + return out, remainder + +def DenseInt(nInputs, nOutputs, n, input, weights, bias): + out = [0 for _ in range(nOutputs)] + remainder = [None for _ in range(nOutputs)] + for j in range(nOutputs): + for i in range(nInputs): + out[j] += int(input[i])*int(weights[i][j]) + out[j] += int(bias[j]) + remainder[j] = str(out[j] % n) + out[j] = str(out[j] // n) + return out, remainder + +# edit: add MeanCheck +def TFReduceMeanInt(nInputs, input): + result = 0 + print("TFReduceMeanInt: nInputs=", nInputs) + for i in range(nInputs): + result += int(input[i]) + return result/nInputs + +def TFReduceSumInt(nInputs, input): + result = 0 + print("TFReduceSumInt: nInputs=", nInputs) + for i in range(nInputs): + result += int(input[i]) + return result + + +def TFLogInt(e, input): + print(f"TFLogInt: {e=}, {input=}") + # TODO: now return the first item only + return torch.log(torch.Tensor(input)).tolist()[0] + +def GlobalAveragePooling2DInt(nRows, nCols, nChannels, input): + out = [0 for _ in range(nChannels)] + remainder = [None for _ in range(nChannels)] + for k in range(nChannels): + for i in range(nRows): + for j in range(nCols): + out[k] += int(input[i][j][k]) + remainder[k] = str(out[k] % (nRows * nCols)) + out[k] = str(out[k] // (nRows * nCols)) + return out, remainder + +def GlobalMaxPooling2DInt(nRows, nCols, nChannels, input): + out = [max(int(input[i][j][k]) for i in range(nRows) for j in range(nCols)) for k in range(nChannels)] + return out + +def MaxPooling2DInt(nRows, nCols, nChannels, poolSize, strides, input): + out = [[[str(max(int(input[i*strides + x][j*strides + y][k]) for x in range(poolSize) for y in range(poolSize))) for k in range(nChannels)] for j in range((nCols - poolSize) // strides + 1)] for i in range((nRows - poolSize) // strides + 1)] + return out + +def Flatten2DInt(nRows, nCols, nChannels, input): + out = [str(int(input[i][j][k])) for i in range(nRows) for j in range(nCols) for k in range(nChannels)] + return out + +def ReLUInt(nRows, nCols, nChannels, input): + out = [[[str(max(int(input[i][j][k]), 0)) for k in range(nChannels)] for j in range(nCols)] for i in range(nRows)] + return out + +def ArgMaxInt(input): + return [input.index(str(max(int(input[i]) for i in range(len(input)))))] \ No newline at end of file diff --git a/zkstats/onnx2circom/keras2circom/main.py b/zkstats/onnx2circom/keras2circom/main.py new file mode 100644 index 0000000..05c4c78 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/main.py @@ -0,0 +1,26 @@ +""" Transpile a Keras model to a CIRCOM circuit. + +Usage: + main.py [-o ] [--raw] [-d ] + main.py (-h | --help) + +Options: + -h --help Show this screen. + -o --output= Output directory [default: output]. + --raw Output raw model outputs instead of the argmax of outputs [default: False]. + -d --decimals= Number of decimals for model precision [default: 18]. + +""" +from docopt import docopt + +from keras2circom import circom, transpiler + +def main(): + """ Main entry point of the app """ + args = docopt(__doc__) + circom.dir_parse('node_modules/circomlib-ml/circuits/', skips=['util.circom', 'circomlib-matrix', 'circomlib', 'crypto']) + transpiler.transpile(args[''], args['--output'], args['--raw'], args['--decimals']) + +if __name__ == "__main__": + """ This is executed when run from the command line """ + main() \ No newline at end of file diff --git a/zkstats/onnx2circom/keras2circom/models/model.h5 b/zkstats/onnx2circom/keras2circom/models/model.h5 new file mode 100644 index 0000000000000000000000000000000000000000..bea33285b248693fc1e7710be77c475cfb1d3e9b GIT binary patch literal 62520 zcmeHw2V4}p^Z0=@r7DVuD2QM|5KxfYWG{ybirBDYr8lKXv7%x_P!v#_2#Tm!P%K#B zc5jbnZ>Xr)u_5-Z*#CQXyH7Yp-}C>z@B6+#xsQ@fGMUUwW+sztb~o11zMpzCy=ENo zPep~Jz|oRivVXoXKkh4O37co@-IxnGCXHdzE%MR}IgS!XT&K&FYcco@n@9_snT8{W zJ2-G?NESbvGR-)gF48L&&mZND7r%uB936T)iy3ucxV2=`BaEIgSx#WEmv?~cbZ>uO zzwj`+FgPUKKhS@ccW9k5F)XXEj9RhHze$@j`YNl}Y8*!itQ<#!(~_gYnc^Ml9_BjD zJ2cEcIEb!kBPS{z;1@^=ceDNrfC;D8}dE?m5Lf$m^$gB!iaTgS6Ckk~e(v|ctiy0WHKag+GAE=m1;T@$3$8VPOB7w;_CjYl3 z!1@C>MqjqD>@JXA$kAFRu446!wc|7=X??*U7OqJ>I94D1Mo8;pq{$zDKkw zZe`islEY!$tTa&IaKgQV!h%D60)nSY+cDEQN8&%|qS;2C0q$X8u0igB-bTRCs9!{o zXE^N|-2;rw42`7DgD#(K!~#ehmAEdzeJ1Ti>ALZhlqO|9~2P~U}o5ZEB$XZkw)zmK2uU>MAPja-m`}oO<0I~XqdQxk9$CvH(e0w z?(6IARi}WZP>SEmXbue|TSgc82YCcX1bMjz(KC>Q{E0N6EDL>ugQoScmm!};MoVQ{ z4=-`0#LK#S1c>Ph4~-C$^*!2s`~$+Jva+Tj=qV_Op8LY+*-p%VPcy@wVy45wL;V?w ztjr80atLwv@}hk>4P`XVJ-}bmrI&lSyQ@!dXrOzzxZKmvJ&48@=1PxZxR@j_{{VOK z0J?^{hfDCY$b3VCBSORs(*;KE^dL`@z=@Fr_y>8rO9m?<%-hw&-(3vG%9$aTo=w92 zX(H$;ny!?USO0+E(BSY^T!Beox!_i0xvCcuZ;eN=Jp36OEe=1|-5}HBV|lL9&p4QVrc__)8RC zqHck~f!;yk5rH&*%kCIP_valF<{uCoBq^}86l;km4IvqBMz_N}-Rr8lhc~T?zc!G* z?ty_nYdbWEw$FNUkmx{Q@HBB;6X@+8^oyM{7^cx0*M?$_1|ao$E*x?pWz%FO#3n42EBHm`wZ6*2Ij9gZ*iMHoOKU+mJ&Pb?|J4r4x9!1 zp|h?#xZe=>J;Bry-H+wb_viDM#4r8XLH#;|(ew15n88>9d{0h46bg;~Z=1mwuhlq# z$;@nZ6;J9aq`L4uos(6}AFJJP??I#LyX-%FNvO9g?Fs(88xeO*2R#wu7>1dSgS>s| z$#I&uYgj>>UTtv`;tt848t7aXKP0xj4!9L-%T8lfX6K6MiDanF>w#NGLcij>HV-UVK-e1+B z?kG>Br7VrVzb;f|Bl3x2px}sbag<&citSE}j}F-9(CcLJo|yP2*(amZ*+v1uVd8N% z@}$>UzQLjXbZqSENiSQ%=%6_?IAo@Hm;=4(MV+T4=>BGvJeUq(2TSV5gt~`>e6v_) zV}5r}Pcf?Rt}DgCdOZSY;D}In&zVMZ=%ohTO1i=7?L~*nEG3LH6_d~IwSIN6U$+w*d%4T5@K{pWssMU5DcO8v)}XXkwzRU8{;X}SF$6|1uwvuGInB-^M%gTmpW!*!(7LnBUkkrz3Vq8`|xVF~4EPIMEeO(k1SJ zVZNd6f9wy;k&GgT-qn=;*nJ*$5@7dH*m{Wzq(Rwjpyo`wZ=8z^J{F!)N={Rzp2aIs zExL@|pJCIloP>Komkjkn{nDP?O6Wm&_xSLO1~JqJp=b0`WazLks2^??6^qq)fEQQw;=<^ z>PHChEcj9$##(BR79AhyutHR^!YLIk6kabsbpQv43>E9 zCK(ncFJ6;N)+bC}y!Ml{WAfr_$-14%i(_rcC6gELC5ZpC${@xqS!Xi!;=b$lFD|YN zFAmKcKF9Xsb7ytw6t9iHo;z#keBF=n)s>4DjZb=>jMLzGG8&}u_Zv$<>IW5A)#q4B z6rDZ0$>G$PN`>RXl(T-YTn4UyIR{qBEL+*tYk`%tJ?k&4m^{Z?TJFfmepkQxaFtBH zmIYwo{;dh?2W92;)t<%YQgu1f4dkzmu(=_hk-Z(qT9sq{g*i|r$M`{6ejDvsRw9l0 zx)NyU{ADd`kPI+ukIr=)|9@Pw`Y-DT{%-wm=$z*a#-D6qc#zsj!_QSz*-8P$b9v+M z_mhCEoftAAFl6+ImB@Ft^d(iS{cu^pKbf+2B0CSNabg&}-=8Ob7B4g5_aFCFCHE&7 zg2`ukQzEi?r0+h{3}v+y~I)()%V9_vHI5q`!O7=zX|p~hGYF`mcx(X{_V9{8!GuD zA{6Z&>d4!zjeNq-NNmTS5`JD-NF(i)=9=lJtn)#6XKE&`&o1Nr! z@EOF@SJU9N)@eZw->?Tc4t`F)n$Z5R0S22n+tw#a7YMap?>6!l?+J8wvb zj+AXuBAOtuL3f%aqD4CQsAcJ8RP#6OkioV+D9&Li?)1=}dU1Llug|TKXla|dXi9Y? z)wyI4)z)Vgl`#E`aGjkG@22HDG;z)q;pH+v{>}CY!i)zMqD^g=kxN6*Q}5Tsp`@1? zAZ&;mzx%%Cl!9Lz@_1l{QgziS>#LEX>=sLTDFGCZJH$#f_nHy)B;p!XbgmZ7vK_`N zdp#CCJFiA*EIv%Z08O-eq!~({=t!A7TuoK4eu@U4Swodfz)+513>xNrMEDX_p!lcZ z!ZZARqDrfAsCjBMy0g0()#_F|)U8KH{+DO7NO@m*6yIVrDj$ldt2^IN25H&oYWzuF zliSTiq8aX#-J3&X9yCNjshfBc3ODf%k)O$>_m)%3=3644_V=m7$xDUCxeKVw)t`BT zCl5hKbUyLCIGMa*-cFSIp#G@SenVk*hi;+~${*4EK{L_*7rTUBYtQkULzHswA55kw?Amn4tFbxN@}4MISA92tXN~yCZqmINqr@I-(K%b|L5SO8f=Bn^C8bJLp=-1zz}Df=V~* zC9G~sipF%xp^}3-Q~nE;_zK4@sFSBW(apCl_)5YY-WbhlDx}>8G|u@9S`~4gQX8fq z((AO1T9w}wHEpBJhj%>D_OVfDpzBJ}*mw?l2i4KR7dU^(({5;}ax^vM+GXLr={Ha- z(gU@b)*TO>b6>a=kaB$y=;Y?zQX4>zbUX`Mmb1zk(^X z!B~#pe9K7md~|z}lDZbZAh$oRTjP&5&-2A&HeKYI^;=1ahF4LFv1+2BQL22e!V+P- z$NQtm5nsX zjY##5GF2Oyf;7)2Qmc&ts?G8ib_&^xn!SFHwihhqJ=&}#Iuz%GPL^X-RFW$SG|?7D z?u|pAL_2VEYO64GY%V!O=NeUZr&>sLD5WkRNuktzx}lF(LQqB98eWr0LgCH!%DmHk z$5TtXo1x3J9xf}oM6J2+T4j^(J8;v8g|Kc9Lp)Zh#Dry@VyI9`r0jp}{02CWxr^1E2hMgzm!@Qs`F z5IHqFgFckEqCN+-;P>!&&MP)kqWDpr`FE=#gsYs+qsXEH^yS?e;qa6-)RAS;)Xcl_ zl>OFM=(XuRJSX`grQ!L6$`4pfX{v1kVxvi&q_ zm(hjVtRO(EZ4RLqw-)k@bM*Q4CPVO7_vRzL;=X+Qw9`oSbpmoFwu0FLhPqO9f?kzV-*=#92Jx$LA!IM^ze3b!%fT?>@X9R1>! zk~cRCaYv6aWS2c2Jjb>7!I|x*cvGG&=@QJtUnQQ0Kpw-5W3Q0at;_Kb^UKKA_#r&^ zvw$oq)FbmsTjLg%oA6sLMv;Z*_6e+1H{qo>YVpXm?Fq{&JN!t`u7v$KMZBHIX7W`; zFVbU4Cf?<^Jw7G+74al(3h`q3aePT@fBZ^pD()CLjkuT`O;k1whT%`liIcDFa3hz# zQ1ewgoIAi1a`fM0IZ3lf{Z?hftQHw${^n=I=gN4HK57^~w`ZK7^Zcbar)`1lNYIWb zt<4i?o#B9eXEY#6!H8)4rvl3<1BtUUXF`162H3mRHh8gP0B#a?4_`gw08cQm9Bi4C zh@a;!!qI^VywApVutqHoZyDx_`-VI5*7POdoNgKT?baJ`wKoC0V)=1!WPB>Fr!xmP zDU9HeL$^U};Cy`4ge3d|Sj>x0R>4g*Yw?8Pf%x*UI9{I0Qy3etA6K|3z&)H|ctxU4 zcu41De3FJbZ^@~pyb{xQFmHWd{9wvO*bI%q(|HqM%Cc1W@q{8g+(C&~otq7`Zh2r~ z9#=p`|9J3ew<}1+2r#m`A9z8o21Cwl;I==pnrjlNk9|qRv8es(n8yPhjC{0?J1<2O zd*(hHtF;~Wlj!8{k0$=PxH92?BCh=BIX zkFgvq1!UJHOtZ%1H<5tsehj;g)n(2#uwr1(wg0hS!dCoZ%I3X!unhf#50+lp`;cnPn|S2t69wP9pZBwUTDE&Om7IrXj8S|R`)BqWGgL}=qy7G5 z0RN9x%i1ryZ&b;=*M(*K>k|#q5?0^rSb)En%I>FDN)DQQJtrXhewn|B{5m;ZcAu3kXVY)%e|7)0h>3#|WX8fpCja+lW%X*tkiQrI?}F`T z^Tw{xQb{z<8^4Q2)aSFIc|&VNeK_{HI<}PcFLrFfUrgCJs@z%H@Y!%_s>bn?T>NSr z#lDw|P3xZ56hA9iH?9&tgVoUeY~7Jkdbo^z{LGG2#!BHE?MNzz-|erg9T_qLaAEX^ zm4|Hn#XQC;x%VlC;j(~#GL@YtpD}jJ+NZ1@|4FpJ9oS!uzu35owRbk|V$*Nye>MJc z#-)Q1#iC@!=AVea3Bw^R1t|6rPpaf0wJq-v4O{$j-Oye&vyg^(kTB1H#US zj`gema*mB#W5!FHZejZSyW>{Ldng#?7QdT9^44!_%9SvxXh1fa$ES5_YX>9}EbUpMnZgmX#F&(Vm+vV|NxZkZdcB(56 z=dVSrat0%!XED{YSAwvMdl9wzRucBq}BjSDPS%|C;m!a##807NB zlB$JIg%idOra0%{qe%UOROO(nl(m@;#Z^_N7L4K{>#3vh@RQ|~T}o3b#-yAYv~D;G z8Bt5#iqxPEu5CiyF!w`MUFFbHxrJ!d2urGZQZ-eQGENlja1eD~wVVQ3+QNP09V+x{ zHq}SR5-ppkBJ7oYp3JVO5oWgvKouV=&|!FUdh1Kyt3OZsEda2LcLa<__}B% zK5j=SW&H87aKhRVyi|Ejs?FR%)T7R)g|k}ZAZ*JKYTK(eqWCgB{z*3j{^-3Q&<>?X zX#D%mRAJ{+BHxxd)YwC=yrVt=)PA!J)JDG(8oRA4atT|G9yWDmeT$F4%pwyZ(hrrbc?3+JQPx9zDgw++Nm^<%_{GIi0M zAu+;U3-Wmd&*o7axP>>;Vwn)UDc}vdm_qEGyN)njHvmm?Sb#D#B81*Ui;@5F87L!G zS(G}c0`)VDB~>Qq;I`2zyt2Lv(YjaK{PE#-XyME->S&sp=tO^i)D0s@LseC(>*rah zQ<4>>V`YK{g+)@D=eJQqKjrW)WYtg+?^D5wMFMK0cLEjLbP@IV_(p1gZ3P-%)Ey1q zn9ei1rbLbK+fP0pREVc}mhqOa)TP`xJ4lbYeW`o;$!J5tOe9=~(5Rg$=(v_1x`PX; z%M;}IK8~s)EL1>cYrjA}M?OZ8{6$pvqK`ab=LOWlHl`w%+QT?cRZZk~ySLC$KT@c) zcMIvnbwRJsRq(dN51__H4Mna_H+hdOPotR~MZyxLRcLG?SM>DCNW5v!_tf4HS_bm& zqEX}I(KF*|yt5~k@Yb1(qrByIP*$_LQ8qb8dCNO+QA>{<=yB{J>atlD3OG2Dsysgl zX)3)##S`68!IMSE_K>lt)MNeYO7-E)S$i8(RY=Uar^a%A90qtR1rlg_s|e2C0>VlO*-)1LBfzTykr%Ht@~qmpN#t%l@A#8bojJg3itA458+9Z{e3I4au9me=A+ z9&e9>U7phm59(mZtgMx|+M8P;8nY8sl$L?q#gE^Z~{I<48r0`Pc_v)xHa^hyv zvF{`_V6OHMMVhbg5G)m7%LlO*QnTG$wwa?iD7X+6p5S*?I-#wkj9#P2W(fjvf`>>^hyN zn`%lO8?hYq=Mu!7iXKqIzYN76Fcyur)E7-xT7Yz0h6%3)okT^Wx1qoZpQ&`O#k@7X z6y={)fL3-qOZm^)!E0797`>XKBT_USh$<~Agl)WD3k7#fsk=E(gg3@b6m3qD<140& zMI5D*)Y8ZTVP6GBk&WDVQTgyl)U*`ixrS_^xGR>S08MAK=yNeXuGui;S1zKaUQ-gC zwX8(*rr+jG`1l#MzVMJL66FY6+&2}qb>B;wzj=;-9&1b~s4nMa@TVX-lb6)dD+(0) z7*9>u$U!^To~OdlGE_ADA->Qp54CBtp9;L(lyAK%7y~Y|DQ{yR%D}%bx_-A18J=B6 zWo#VAd%mhhSg|xy_{6z2@3;xcn}7HKvZcD9XhRZ>5Ns0Rchy8kM~_8rPm|C!D=rlr zaFW-FJD&gcfHHrePfJlGbx3%^B}4d3C0umO`~n)jx0YJ=lAs=yRzvLcLZs5s8FgsY zo;UGS1ttI5iRvw1g|r?kizaw03f;DTpvGJ>6S>bhi!#>d32#rzMyi7bqXDYz`0W;% ziPE>vKyfeSMRW5L$^3r1sWX@I(T46;RR8l)Xxr^J_|Q%#h>qrAq9;1~)WlvIqQ{qu zc`+LR>eVrdm)Ld~Z~OsWQEYlGj&qMu!QE1+ZM`<5{aY^xsr2#Glgs*O+y@>S+GM0~ z?&-JagLMqCXHY!vjbd9eDg)suZ)2(H-Q@XtnWem!P5X%M&a|Pt6pstfee$9D9T+Bb z%P&Vp>aL<2pYwSk=AVVv$G#9w3UZ*lT%Qxep5;=7{nqfDw2x8Mf*7jT$Vfc>Y)ig- z%Z}6|-C5}Jo+Bt{d;-Ov`vvzqDiEH0sEMuyED<)Tz9qcYSC?AZ=7jL_uF*ohcap4>!RTW1S{ zKjx#=%Z1ebd*_AM^}C6RFH>mPU_+r{`&w$-2L-gWmoW<08!6g4-V1e*+k{5V{zPs2 zY%DZ)jzaHj6GYnc_VIEvjYJ`~S9zmKZ=qN@E$YF&8KUwBblmXqDivaviMD?5L_SHz zRL#L*#EchCyaiY(HRpaLKI-sC;hVB{qLBXb{6oqnqH&$B@K#-UhW56$#19q|)F=JD zyo1r(s0)D<(l9zi+<4l78s#&Uipc!Td+Dc1o!x#O<%V>k#=eddnhn>duBmTA7q+HS zcAH&LSj86MqS>*?`ceY5yDU|B|4vUdO1m>J$uf&qJn}JbXH6vUL((*A*p&jx@Jv_H zwVGY%kdY_3$vqP7tlCGOp2bJQ2OOYdfV0$|evi-sg(2wl12g`pw(p5SS(zx}Nh&YP z>@<~^ehiu2*hr2hT8Oqh3Z<^j6H!Zs2T^MV?m?ADdHgAwwxY5QxoC`O6Vc5a3(9`< z4&n8#%|rq-Z#>{aOOcYCx5#9Cu1M|JR-~PBL)b_27#hE{kg^N7!h6}Z6Tj{J6y!N# zG4*J1HZn^%hECx7aRb%nD8q0ERiRGLS)=pGeP<4#cQY1IGp=@`7ELWj?=>L5Z)hgv z6TJ)FGdN2<3@N3Wt|}9b+-of|&uovU7wklt)sv_udFO>imd+?eKbDHS?TDs0Z$p=^ zZ=;-nqG-s)3c|PuWQb^Bh^Dov=GpT|?01XU?*YkvcZmHiQh0EPYk>DO`a8zbFV{=o6B_o% zdEo+<;Qx{IXXlZ$mP#2>`5)n@k<7nW08A|`J_=R4G???hiqc9EHS zab4Z^;AW`mpT%!XU)F^ezx%One{Jb}-TuViq^jGW_*+wT+qb9lb^8;4 zJF0GfhIGE6bBZ&T(3SKrEmoTT%rE%GN#PjQq?yLQzl{WB{emOIc>$w8tRUD=WU=$_ zkLPDsE4CdwFH4M_E@b_V?DI9wjNY7-MB9(lsmM&!H8#@-@FQ&5bQ3#{AEM8eX{)^~;8}M)ABS#7E|NZyB z7&38|j3Z>;clDndAgNw`{SMoVq#yUh`n`gRAH)4_wGGWLvhn55<`?G-spQyr@OP8r z@6K;S^9z?*pRn?ge6NAdh`-k$E@S5bJ8|Y0ehukw0n=~g8Y$AB>F<`cQaIL6`~oSB zP5y@^(2)MtGWx@^_MeH5?oPRet{_gxXG{3koew~%i-#x$lbAGYtf6_1Rl94|<4s6QWYvl#Dq%jpspdr5~ zdmrIvelcd7RGw^H(#Yv?tasoSA$`fwY|UuSXdD zX5nP@ou&78NxzMc93`wbw6D&tA6a>_@AG8SZ|i^cy`Ek2q+{^kd9NqSUwQ72HG$Q? znu|Y%``v1rXo>^p@W*hJ`j#gyrypg!(Z6jij zb{}%ROGiP(WFBtd7f+seV~JP2-b@ILdO-(+QDntE3sS@Ii@?`>Dt@EKO=8*W8hGSY zZ&Gjl2-2hY0NGNxH)sdjlea4l;k7y|h~DQyals`AGREgK5xZ|C+2>UP*j%%QXhOWj zCzW&|i^)^?>n@9kve(N6mps&oLH%}Pc$zsZJQ0kq|71gI1R=baPb!?;avov40FdQ= zCy99qF>r(K0{q#t!&tk?x8bWElLQV%wj|YTH+kZcg}^1`z992SIi6>F1u7`5$5$5` zk&hnjC!S6|M=p^YMhu;Jm?*4K;FT}uLV5lJe9YmN?D{iuldpR^p$Do-P>ni%0cwM=5tyvF-0S;|luPk}<>%lu+tv7|f)3<`IGh~(U;{b&r6-|ny^>HZRpxCB9ZMRXS&pkX zIFeCQ7Q^{lHxs+L`9ur8nSfdoj&pKmlU46*aP2+wu+VmPVEU0kWb4`E$*?P^(DCwiz$R&kRr}^7nMcw^ghl3a2a}{U^L2 z?+(<2y>eC(*<(K8Ri~SQM@r>*fciN>+0*Al(ZEh*pTgIIm2E|Y4krRXQhuJ8yUzx{ zH98G0>hg*>U}{OO2+(CeewM(ZWBIlOv$dPugN7(`@<_EcMzj5e#D~(w8yu%zKieZc#=3ja2HWA zAeJ;c@d=I&DQTZjAOF@i~fie%zSH}Y!7SlInp9I^PpN__Yj59q$D zJ$`6aC^>thIeBBxZqiL&3CB5MMAw!Zajt%67 zn7`>P-ZJkK0n*pv;fnfX#uEz0_3BS{>vfs9xjmM!S{RIfhLpN!AGfstu@*T7zbSCZq@A3=LlKt}3c#gzgN03WX? z95b9v_*~(W`cy5sB?br12eJrf>@vQk`YveuDF+YfZUJNNe<0c~G$l)BZ6@SnDhc1z zk@(!J>V#?WOnk%1{)MBBPGT!A4JNxfL2|Tud&2l>5`JxeYdF$+87=?^N&D65f~6gk z$SA98wl2bcL_+>ad|zxSTsk=ppD|%Lx%;dmd2pJ#Fz=QQUY^yG@a0tELzcCmZ9X3# ztU$|T&l*B*OAg7;FMwA@#R-z!g!uZopWukZL_GRz4^mwThiQ2`$bD22(QeOT;Lx!c zAGoCn-YYpD7u0+q&kk-w-0fUK_A?A2T4x#%r@Fkr-*)eb$KKeA|8D-ak>5rM{F@}u z(E3s~F8|s3vbIpl6&q*$n|N%bV@-x?YD%TmYMYi*Tm&F`IW$S=yCEB~2aEGm-9ksXiUTdKbw z_YL{QEzENXtVI5)ed8{av`5DxY5Ft$)h(66v3^3DYyA7WNT4D8^((3`fxmlRXXi0i z@3Kxy`(yWmk1+X~Gt%-u{41m1ES#*q*Qfgbe(u}&sEdT@|NMLP-O8lW{dc}spVb$Y zk{{~;tA8rDehl}!)jsn)fX#d9K`b5}B3M?mA55xIh0hbF5u>fu2x~7yMAc4%3m5Ez zH?O?qULR%&mET+vlq@qOR*%1r4OiGG*jgnoIM1C-BviOUJAWJKr{oJtymCRMofZ)^ za43;v(F~ru{fRg+$AAbeQ6`@)a>cu~D27!h-(Y7a%n@*s&9S*JmI3t*-Qc^dqr^M& zB*8ly1@fZ7Be?Qo2D}!NhK-{<;gA(1q2Y}Qrd0I+JH{pwBTnTB(5`{RlV(P^THA%d zpmGoptpRX~<9orx-NOaF%HCl8caVhLh;0JBxC-LMkgoU}^`~G{!cuO(=Lw*X=@}5S zF_)X0u(IHD-wg-uF3l`B#Bn2x-lcQDl$wF5`%J)gvJD&u9|(Hd_b2pp=7FO2DMZT5 z^@0yOwh}u8CipAjF4%S0y)fDFG}yBC2B8qq4H`7P4^VGIB6w3AVK**Ukgl~EDotJp z$t|0(l}DFDlRz7C+0Nxe>uft1qkn=p`Och(-{M5JkLr$>+(^N*l;c2crxZaz3ZI-D z8BH2hIpUU&Uy+fg>?ba6v={H`=H&YrR0n^ zXNbIktH_wJ3%JAVQr!830@PAnO}c9}^GU>IwGtSTVhLBgE-N%HEF=zEEfSa&?uPT` zT7siF9O8}*k35ka3}!Uj3B!UmLYFLMGU;(Nk-fc!*rs+5K1>L~z1=xv)J!GvdaqhI z{j3~bv-2bD5wwMz7*;H1XeDvA*~0z zAq<3ka;tq8+&pk3Zt3%+@a?!|f}vr>g_*|-fo=ybteMuIFn2d6HatooxTz|z^&|m& z^S+Sq-#?jnV4MUatv zpCy>j{gI$oD;zHE-;-F>H-v~acn;i_l)^g~<%md?eS}GyWl*hiI-F~|7@jbVA||fb zL=1hmnV9cA2FAtifF}}H7pCsMD=1$VE{Jp#f$s4y0qLwjBwS4sxIFbB;+s0dW&wlY z39pxeS2-mDUDptx(Lah9JVFt^yoX5LPlp6qHQNtt=BeOYC+pzrqf|+Q%XbLNPnm_D zhv!1Qgpcsdg(l>~3uZ)AK>%T;`2kj>Sm7U)@`=M*CyCOPj|-zd?}3l27C_A%^N2%x z?-n|(DIh9~@4)N&1bmiZLAakwC48bI2)B`fg5XOJV4ofe6bn_-G#w zJTve5UT4f$l}sP(C4!& z9vPh`Fl;(opxdMgc_eQSv1fP-{L8XH^4zW0#P0k{f{eYx@xFIf;8O>-AQug2L&|f? z!QsIdq36b8SU#sMIcG{VQTfiA9I#*?o*H`vE*hIhtkH}hTYRu3b@r*_gUqz?KI{fnM0nu^AP$~ zh2ynIp7?TY8#$%tXCe*H5-9Rk;}?rolM8eQk?tQzGHSa4PvL$IK4W)pxa{$qLbD#7 zN&Sya$Ok1`q2H!xvf5<>(aA{;>SlXG>!lZQ=bYu>%t>F;`bIm_`BoH|J!2IvXcb3R z_nJ!Xa9mA3Sg--drm67~&W<87o6jUIa!v5Oh3#M!=!rjk)`PT8Iza3{wS}~+=!O@$ z@5Z;^I}0kNo08$=QGxTvQfySdFH|m+C!GwqM9|H9#5FGiJZNPQzUYGr%xQKElIK-P zy(aTP67e4Uyf24H**uxh(N!v=2qG(XVT?lVy>639SW8t~c2f@wtz627E zBF_3fDcHmGp&Jy($OaZ!O?1>sM8zR^x$Eb zz~5RJxVi^C61<=IXlVeWGmRjr)0R}O&Lj>xsgfV(W#S5V9s`YGs^IFL6k`My^9x|_E8b+)0{|Tu5J!%cO}A+ z>e^sQ%Z)_l*($*VbQrE(vlxauoF{VMt|L7Cdy%HdFkPxx%A25Sa4 zgXe=f!#e@ZNwZC6g0=Y@$bBjOV4uh$JmhW`z^9KUK29GdxO!!yV0~s&XcsCZI<2b_ ztnRj(7%b`m3+FlFk;blYW%C%K!i6J9s1CvSyBuLtohgLj%?zSOem1!xs1$CV5{0MC zSS;`!98UlPV*zH83ENCfgldb6i5?nDiA_sl$oxSmFlpWzJOsBBBzaj8=WASuFDu&O zbNcp&oSAvVP`Q0XSY9Bpw8<)%NX9}ve-Q|C(IA|UG!;ae^FhRwD7ejdKC#GhCQ-R( zDLJ-vf4JT%8CN34Lv9htLDh#iOo#O6DyYxMTObq{_a)K?Nb!q=Uy6| zW-L#x(B_gAzzJ4{%i)7^zYtAFDwD1b%A_FM4e!0&7tgRh0;*kVh$+i^lAMmGxh?V! z0JHKU;^DgtqO9#M;(aSe{N$W;+7<6!ct@OgPG|0TB^< zK*2mSLPM7{juL@1bk338=VQ+^ zvgaGw^xOJhJ?E&~M>;YIG6UoylmC0O-^!!Dow0ng>95<_7KT4oGg&_VU3T`*$)n(Z zR31OJ7gkSLKgHS!n|@pWtM*b`Db=8V_FV8M+z}dOA~5%zFX-Jt1xEVj0gDI)JZ(}Y ze7Z3Weo21{G@|suZnGVj*R3mHPtVOkSM+(&7>Ded$Kv?4%7#EsbC|Z%^QJ zI}gm()P{)zy1_x*TBshq04D8CfePF7VQhIPP&IW5HfH!iuqY3JbenW|*oFs9RvSRR z^K_7H-x8dkaRgLW6k}sDJA!>h^I@w_*3e*Q3%De1EvD?#0a%`KfbDLlV-LBb!240d zVc62mpfu71s)383nOZWKHW|VPmzu-5uL9vo!!n3J34nelm&2^d$zadu4xkD?#GWLj zg5@K=fKW0C>dg8CDq5Mt^lrIW&n3Db?%G8FY%{PscQSw?76m^js=;}j7Laf`iX9qi z3*OE1fHxM#U^b?+LH2PT98fL*5&k`4H)R#5o*x7rzU&OgBqaeuvsK{q^k(?RD+zGx z{58-oO&M<7Yz!7_YlDJr2SB>haBf8L3|JO>9ymRF1tvz=fNB0Fz-FF4ymxp%P%kgT zB1hUlMeh)BZh$_NxAMgNqk96U1w&xR+d-gJyPaU;n^Ew7?0QT!tUcV+J`Ox_O9Ndq zr@>8eJ!#&I;O35s_?LatVE@^<5T1SwcJ^%x%+syF&1g5U_3A265wjMC+-w8A{5rt9 znZ1BhbbBytjT#(hz5+aOngvun{NSOke^iGG9&)oqwTW#RTWK8Qi$s2WD051YH+* z1+QIw;Im^l!OUsuJTm_CJSdvsv- zuymMxdJot(!xRiCSPP5?M}w&FN(>}AK%R9FXc=(>Y}9kdUF9FGNH zxt$pd?6w$FdC&qn-WUPm+HA*;=~}{vPN^Vb^cgVsbU$coVTv7oPp`Y?DS#biOM#bF zA$fI|!D0RPK_k^DP}#)*ob){bT6gFN+?JmM=K~Yrr&c5w9NZZy z=L`Z#IW2+r({|AGp#bbmCcxBtzVOJK2rw(O4=gS!2bO2EG3#^o@PO(;aASZ7BupF( ztF2FBvphB7442iQqT3Eo)#NVhqh13725Ljy;!}`&YcL$=Py(a7pT_Q#bpW4tYr@ZC z27ni7RltQC2WN!jfzeknc-E~Y2t1|<3PL+V)f@XkvN{E#FSx<2)+fP^g$T;;*M^hK zwqt@v1EKkZSD>fTesJhRDUqXhnW3H&TRWaAz-{q=bH2ooO~ z$}kxtlmG8#zm-Q92|@q)`~S5yQqKQf_y50@NB#8_yI;YkzpfuOjDASv{Z$M8UHb8r zuEwiI2{cNeQ3C(l5?IWw=8g@Y$2BbKfMuO@#5T3zVq5HbVY6!vb2rTHf^|{O#L)3< zY@wMycg~Y2?&(Mh^G-N}B@hd_=1WFlr?Q4|`R6uZbFVGIc<=MMO?=IP&F&?bqg!+6 zyWb3+JU#;0=bQy@>O$;HT1TjJxCIPcb`^|hzZ+Y>t2q{=JPTapM*;0=pRr-KrT}+0 z$EL?5gQ^35;Nx}|5Z!YvR=Y|A+ql#U7`F8Us`j5SjSM~TEP%w`xod;ZD;9&P6(fNA zgEB0u<25e!CG>Db89CRl3uY*2J%Eog3!uwG`xZR01)hjsRT`9e6~w0Cce04Ci~?ZeUoPto$EXa%^*W~FGdv^leiIB~WLY-YyDkHM*!UXyIGMuKpkg@ey#P$u zw+==VI{4?2H{r!j#{hSBCD;X8!pJ~FnB1ffH0xsn*B2VX;=2W4Kv*XHpgtd7&YKQV z%XV;`M^oJ6nKNGga0b*H+7e$p(h}}JeFMIKI0YX&zZ+h$Aq?~EvJ+E}+X(z;#el1r zJQicV1KZr;GIyJkE>KeR0gfYcF!)IgUIyF2p+1qYm5TrxJ!uJh987`F?Oy?Jde6zU z{R`M_-Yyt&sDhjPau#;icp9cyvj*E=J{fC2|0o7-@5RcqOS!(oE@R`SHpfifs(}mc ztFY#dGr(&49;L_H{n%Y=FJQfHFP7?d9&4j`3fPwq2b!HV;J}xa*bvQ5&^QT)a<2rS zY2rLM(<2p3p4kH4*Di*;Y~A7J(oI0qCIed;ng^yE9s%oB62Q)OO(8dE1n9eFEy!)= z0(A>&L74nJZ2iLNSl4-LG4&HWu+Djdf#uk{n5{}GW_Q038`sevWMbXKY~Qo#SbRY| zsJT~-P1^Kx~z4t1~UYcC$e4 zQkMtVf^aQhzBd^R<4yoMj!i&{um`w%Ck5OfOMwIK1x#Ac!#?e31{Z*5K+j+g2&S^Z zgY>6ZmZCGL>USEPg(eO`Tm_t5T_A(w^XgL5HyYS#?%gNyGa4oF)cJAe7X($~ zVOjG@aHH)tuui=>R323dGnz!fT_w}NL;E9G#gs+R{ALK;YS!H);W;@!(!@lg9vkgc2tlG|(opS^scevkr`=gz11QqzEc*9TzL@U^h{ zI(OK^VlF&OX~Gbb(=d40OZa*Z{ha>bGT72M8%9K|fTm#>?t8}se?;ryYmbG%u1iN~ zJWCOpp3s4c+P%Sv=5H}4{f=;A?bPL%_l9gBgDr4?9ZlMKeqosD^| z9*uQ4osC_4kOdw*oB%qUQwDhG0^oD2HAr;Q0(XN}0sZPV*p>DcVC%3!+|6NTV5R<9 zOly(`(9)QRZN9n^(>Z$!`}{}&yw~aua+@@P%AN=dGV2aC`szT3fJ+7qEP)J$E^w3EJB_0;S?hnCgkiAXIG=<~E$f-jpl>@w?lDWoMDL@AW37j71Y?iqgBvy>n6S?T@YH1`7C&YM z_V&t7&}MWEb}94$cVpUpY+@<_`u1!JlamHuK522FsNYoX!E8ft=Jg1WG{qjY6W-v4 zcD%|BGF^n7c}nk}XRBj&Jq}=@VNMnn{dg}}nAempzrekJyBfTmtO+jozr^Cy zhhUcPRswuRGB}v&gcU~fz^(nt7&)^Xi>b;7>T0b)mD@y+o0bNWx8Db$K0841%0i$N zu?S@4Hv{p5--6NpR zvX2@ZTq*z~mp{Y;2HXX%3$kG4`i0o61IwVru8Cj{^$y!`p&4|`%cb|;Z(-aRJM0za z0=k#p2X|MUP$@0PqS8Y_-vBp|mB)dcWk#_3t39CY zv=r=B_-Lr-+!Ssbo(k{9_(6O^E2vUl1l7;9!?muez^ism@b@KAaL+wsSgG3|f6vK+ z#epZle9;AvSHXo^QvozcZUI&0^`YB{_E0tBI7qMD2M5q+(UiZa!o2XOpsC?fh$0iA z=`B@g%%21Uc^5!o4;R?FHwWjpi-%cHPJ_yW&oPr)1sK*h3_A9{4*HJl3ztWffL^PP zf*hp)*mR>U+@ReHn{#;-=KHECINVwryc0A9BJwt7Wj+ooNfTgh1@@r((J`P`S|+#X z##HRxjK`R-cV|pJ!IKNK+hf5C&v74&9K{VR+=0#a4ddPz*q=N0#SyN7_AIP-3laDB z5CPZZ&2H}Mk73-k+jnu(j*P(eCSBn!dG?-b5}#x{yXi*m_8}{==_W$%+{>FWi{ca9 zD|`h^V|5PJ?8OM|%iSZ`8nZHNc98(6SLcBB)keTgE)jT8?ZKF)J1~ta6=3KGJ7Czf zKep_yDHg)D1;_5(!a`nXf;D~xSX#wo%;(+;Y_pOIn8?)wpK})gw~JiNdHZT`e~1!H by*m&Pwb#Js>T%%luAacYvKBO*wG;e5&;RT* literal 0 HcmV?d00001 diff --git a/zkstats/onnx2circom/keras2circom/models/model.ipynb b/zkstats/onnx2circom/keras2circom/models/model.ipynb new file mode 100644 index 0000000..4593c01 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/models/model.ipynb @@ -0,0 +1,258 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "# list of supported layers\n", + "from tensorflow.keras.layers import (\n", + " Input,\n", + " Activation,\n", + " AveragePooling2D,\n", + " BatchNormalization,\n", + " Conv2D,\n", + " Dense,\n", + " Dropout,\n", + " Flatten,\n", + " GlobalAveragePooling2D,\n", + " GlobalMaxPooling2D,\n", + " MaxPooling2D,\n", + " ReLU,\n", + " Softmax,\n", + " )\n", + "from tensorflow.keras import Model\n", + "from tensorflow.keras.datasets import mnist\n", + "from tensorflow.keras.utils import to_categorical\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import tensorflow as tf" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "# load MNIST dataset\n", + "(X_train, y_train), (X_test, y_test) = mnist.load_data()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "# convert y_train and y_test to one-hot encoding\n", + "y_train = to_categorical(y_train)\n", + "y_test = to_categorical(y_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "# reshape X_train and X_test to 4D tensor\n", + "X_train = X_train.reshape(X_train.shape[0], 28, 28, 1)\n", + "X_test = X_test.reshape(X_test.shape[0], 28, 28, 1)\n", + "\n", + "#normalizing\n", + "X_train = X_train.astype('float32')\n", + "X_test = X_test.astype('float32')\n", + "X_train /= 255.0\n", + "X_test /= 255.0" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "inputs = Input(shape=(28,28,1))\n", + "out = Conv2D(4, 3, use_bias=False)(inputs)\n", + "out = BatchNormalization()(out)\n", + "out = Activation('relu')(out)\n", + "out = MaxPooling2D()(out)\n", + "out = Conv2D(8, 3, use_bias=True, strides=2)(out)\n", + "out = ReLU()(out)\n", + "out = AveragePooling2D()(out)\n", + "out = Flatten()(out)\n", + "# out = Dropout(0.5)(out)\n", + "out = Dense(10, activation=\"softmax\")(out)\n", + "model = Model(inputs, out)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Model: \"model\"\n", + "_________________________________________________________________\n", + " Layer (type) Output Shape Param # \n", + "=================================================================\n", + " input_1 (InputLayer) [(None, 28, 28, 1)] 0 \n", + " \n", + " conv2d (Conv2D) (None, 26, 26, 4) 36 \n", + " \n", + " batch_normalization (BatchN (None, 26, 26, 4) 16 \n", + " ormalization) \n", + " \n", + " activation (Activation) (None, 26, 26, 4) 0 \n", + " \n", + " max_pooling2d (MaxPooling2D (None, 13, 13, 4) 0 \n", + " ) \n", + " \n", + " conv2d_1 (Conv2D) (None, 6, 6, 8) 296 \n", + " \n", + " re_lu (ReLU) (None, 6, 6, 8) 0 \n", + " \n", + " average_pooling2d (AverageP (None, 3, 3, 8) 0 \n", + " ooling2D) \n", + " \n", + " flatten (Flatten) (None, 72) 0 \n", + " \n", + " dense (Dense) (None, 10) 730 \n", + " \n", + "=================================================================\n", + "Total params: 1,078\n", + "Trainable params: 1,070\n", + "Non-trainable params: 8\n", + "_________________________________________________________________\n" + ] + } + ], + "source": [ + "model.summary()" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "model.compile(\n", + " loss='categorical_crossentropy',\n", + " optimizer='adam',\n", + " metrics=['acc']\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/15\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-11-26 21:47:52.776729: W tensorflow/core/platform/profile_utils/cpu_utils.cc:128] Failed to get CPU frequency: 0 Hz\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1875/1875 [==============================] - 11s 6ms/step - loss: 0.5203 - acc: 0.8386 - val_loss: 0.2099 - val_acc: 0.9363\n", + "Epoch 2/15\n", + "1875/1875 [==============================] - 11s 6ms/step - loss: 0.1926 - acc: 0.9419 - val_loss: 0.1497 - val_acc: 0.9543\n", + "Epoch 3/15\n", + "1875/1875 [==============================] - 10s 5ms/step - loss: 0.1551 - acc: 0.9522 - val_loss: 0.1263 - val_acc: 0.9591\n", + "Epoch 4/15\n", + "1875/1875 [==============================] - 10s 5ms/step - loss: 0.1361 - acc: 0.9580 - val_loss: 0.1139 - val_acc: 0.9628\n", + "Epoch 5/15\n", + "1875/1875 [==============================] - 10s 5ms/step - loss: 0.1253 - acc: 0.9617 - val_loss: 0.1031 - val_acc: 0.9679\n", + "Epoch 6/15\n", + "1875/1875 [==============================] - 11s 6ms/step - loss: 0.1168 - acc: 0.9636 - val_loss: 0.0976 - val_acc: 0.9697\n", + "Epoch 7/15\n", + "1875/1875 [==============================] - 10s 5ms/step - loss: 0.1113 - acc: 0.9650 - val_loss: 0.0923 - val_acc: 0.9711\n", + "Epoch 8/15\n", + "1875/1875 [==============================] - 10s 5ms/step - loss: 0.1072 - acc: 0.9673 - val_loss: 0.0884 - val_acc: 0.9732\n", + "Epoch 9/15\n", + "1875/1875 [==============================] - 12s 7ms/step - loss: 0.1026 - acc: 0.9683 - val_loss: 0.0879 - val_acc: 0.9725\n", + "Epoch 10/15\n", + "1875/1875 [==============================] - 11s 6ms/step - loss: 0.0999 - acc: 0.9691 - val_loss: 0.0928 - val_acc: 0.9719\n", + "Epoch 11/15\n", + "1875/1875 [==============================] - 10s 5ms/step - loss: 0.0968 - acc: 0.9702 - val_loss: 0.0954 - val_acc: 0.9699\n", + "Epoch 12/15\n", + "1875/1875 [==============================] - 10s 5ms/step - loss: 0.0945 - acc: 0.9706 - val_loss: 0.0841 - val_acc: 0.9740\n", + "Epoch 13/15\n", + "1875/1875 [==============================] - 10s 5ms/step - loss: 0.0926 - acc: 0.9718 - val_loss: 0.0826 - val_acc: 0.9748\n", + "Epoch 14/15\n", + "1875/1875 [==============================] - 10s 5ms/step - loss: 0.0893 - acc: 0.9723 - val_loss: 0.0803 - val_acc: 0.9751\n", + "Epoch 15/15\n", + "1875/1875 [==============================] - 10s 5ms/step - loss: 0.0892 - acc: 0.9723 - val_loss: 0.0767 - val_acc: 0.9757\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.fit(X_train, y_train, epochs=15, batch_size=32, validation_data=(X_test, y_test))" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "model.save('model.h5')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "keras2circom", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + }, + "orig_nbformat": 4, + "vscode": { + "interpreter": { + "hash": "71414dc221f26c27f268040756e42b4f7499507456a67f7434828e3314a20678" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/zkstats/onnx2circom/keras2circom/package-lock.json b/zkstats/onnx2circom/keras2circom/package-lock.json new file mode 100644 index 0000000..71cd19e --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/package-lock.json @@ -0,0 +1,2728 @@ +{ + "name": "keras2circom", + "version": "2.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "keras2circom", + "version": "2.0.0", + "license": "MIT", + "devDependencies": { + "await-exec": "^0.1.2", + "chai": "^4.3.7", + "circom_tester": "^0.0.19", + "circomlib-ml": "^2.1.0", + "mocha": "^10.2.0" + } + }, + "node_modules/@iden3/bigarray": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/@iden3/bigarray/-/bigarray-0.0.2.tgz", + "integrity": "sha512-Xzdyxqm1bOFF6pdIsiHLLl3HkSLjbhqJHVyqaTxXt3RqXBEnmsUmEW47H7VOi/ak7TdkRpNkxjyK5Zbkm+y52g==", + "dev": true + }, + "node_modules/@iden3/binfileutils": { + "version": "0.0.11", + "resolved": "https://registry.npmjs.org/@iden3/binfileutils/-/binfileutils-0.0.11.tgz", + "integrity": "sha512-LylnJoZ0CTdgErnKY8OxohvW4K+p6UHD3sxt+3P9AmMyBQjYR4IpoqoYZZ+9aMj89cmCQ21UvdhndAx04er3NA==", + "dev": true, + "dependencies": { + "fastfile": "0.0.20", + "ffjavascript": "^0.2.48" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/async": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", + "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==", + "dev": true + }, + "node_modules/available-typed-arrays": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", + "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/await-exec": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/await-exec/-/await-exec-0.1.2.tgz", + "integrity": "sha512-BQUiyBLScS0+YPnnCZZGjb78mZ8sQ8aKgxarDPNw05rpbaCS7VIQSLy2tgjZKct9Dn1xLbKMXOpA98OWei90zA==", + "dev": true + }, + "node_modules/b4a": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.1.tgz", + "integrity": "sha512-AsKjNhz72yxteo/0EtQEiwkMUgk/tGmycXlbG4g3Ard2/ULtNLUykGOkeK0egmN27h0xMAhb76jYccW+XTBExA==", + "dev": true + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/bfj": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/bfj/-/bfj-7.0.2.tgz", + "integrity": "sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw==", + "dev": true, + "dependencies": { + "bluebird": "^3.5.5", + "check-types": "^11.1.1", + "hoopy": "^0.1.4", + "tryer": "^1.0.1" + }, + "engines": { + "node": ">= 8.0.0" + } + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/blake2b-wasm": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-2.4.0.tgz", + "integrity": "sha512-S1kwmW2ZhZFFFOghcx73+ZajEfKBqhP82JMssxtLVMxlaPea1p9uoLiUZ5WYyHn0KddwbLc+0vh4wR0KBNoT5w==", + "dev": true, + "dependencies": { + "b4a": "^1.0.1", + "nanoassert": "^2.0.0" + } + }, + "node_modules/bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chai": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.7.tgz", + "integrity": "sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^4.1.2", + "get-func-name": "^2.0.0", + "loupe": "^2.3.1", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/check-types": { + "version": "11.2.2", + "resolved": "https://registry.npmjs.org/check-types/-/check-types-11.2.2.tgz", + "integrity": "sha512-HBiYvXvn9Z70Z88XKjz3AEKd4HJhBXsa3j7xFnITAzoS8+q6eIGi8qDB8FKPBAjtuxjI/zFpwuiCb8oDtKOYrA==", + "dev": true + }, + "node_modules/child_process": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/child_process/-/child_process-1.0.2.tgz", + "integrity": "sha512-Wmza/JzL0SiWz7kl6MhIKT5ceIlnFPJX+lwUGj7Clhy5MMldsSoJR0+uvRzOS5Kv45Mq7t1PoE8TsOA9bzvb6g==", + "dev": true + }, + "node_modules/chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/circom_runtime": { + "version": "0.1.21", + "resolved": "https://registry.npmjs.org/circom_runtime/-/circom_runtime-0.1.21.tgz", + "integrity": "sha512-qTkud630B/GK8y76hnOaaS1aNuF6prfV0dTrkeRsiJKnlP1ryQbP2FWLgDOPqn6aKyaPlam+Z+DTbBhkEzh8dA==", + "dev": true, + "dependencies": { + "ffjavascript": "0.2.56" + }, + "bin": { + "calcwit": "calcwit.js" + } + }, + "node_modules/circom_runtime/node_modules/ffjavascript": { + "version": "0.2.56", + "resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.56.tgz", + "integrity": "sha512-em6G5Lrj7ucIqj4TYEgyoHs/j99Urwwqa4+YxEVY2hggnpRimVj+noX5pZQTxI1pvtiekZI4rG65JBf0xraXrg==", + "dev": true, + "dependencies": { + "wasmbuilder": "0.0.16", + "wasmcurves": "0.2.0", + "web-worker": "^1.2.0" + } + }, + "node_modules/circom_tester": { + "version": "0.0.19", + "resolved": "https://registry.npmjs.org/circom_tester/-/circom_tester-0.0.19.tgz", + "integrity": "sha512-SNHaBsGxcBH6XsVWfsRbRPA7NF8m8AMKJI9dtJJCFGUtOTT2+zsoIqAwi50z6XCnO4TtjyXq7AeXa1PLHqT0tw==", + "dev": true, + "dependencies": { + "chai": "^4.3.6", + "child_process": "^1.0.2", + "ffjavascript": "^0.2.56", + "fnv-plus": "^1.3.1", + "r1csfile": "^0.0.41", + "snarkjs": "0.5.0", + "tmp-promise": "^3.0.3", + "util": "^0.12.4" + } + }, + "node_modules/circomlib-ml": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/circomlib-ml/-/circomlib-ml-2.1.0.tgz", + "integrity": "sha512-ROoT/siaxxqkXHHCNMtBoo0ekkzbwmK1IKfHyMoOC14vNiZeqCivAGIOQ0IF47+F4HlQVF1QS9FexaHYQxMIHA==", + "dev": true + }, + "node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-eql": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz", + "integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/ejs": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.8.tgz", + "integrity": "sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ==", + "dev": true, + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/fastfile": { + "version": "0.0.20", + "resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.20.tgz", + "integrity": "sha512-r5ZDbgImvVWCP0lA/cGNgQcZqR+aYdFx3u+CtJqUE510pBUVGMn4ulL/iRTI4tACTYsNJ736uzFxEBXesPAktA==", + "dev": true + }, + "node_modules/ffjavascript": { + "version": "0.2.57", + "resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.57.tgz", + "integrity": "sha512-V+vxZ/zPNcthrWmqfe/1YGgqdkTamJeXiED0tsk7B84g40DKlrTdx47IqZuiygqAVG6zMw4qYuvXftIJWsmfKQ==", + "dev": true, + "dependencies": { + "wasmbuilder": "0.0.16", + "wasmcurves": "0.2.0", + "web-worker": "^1.2.0" + } + }, + "node_modules/filelist": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", + "dev": true, + "dependencies": { + "minimatch": "^5.0.1" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "bin": { + "flat": "cli.js" + } + }, + "node_modules/fnv-plus": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/fnv-plus/-/fnv-plus-1.3.1.tgz", + "integrity": "sha512-Gz1EvfOneuFfk4yG458dJ3TLJ7gV19q3OM/vVvvHf7eT02Hm1DleB4edsia6ahbKgAYxO9gvyQ1ioWZR+a00Yw==", + "dev": true + }, + "node_modules/for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "dev": true, + "dependencies": { + "is-callable": "^1.1.3" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/hoopy": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/hoopy/-/hoopy-0.1.4.tgz", + "integrity": "sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ==", + "dev": true, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/is-arguments": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", + "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-function": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", + "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.10", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz", + "integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==", + "dev": true, + "dependencies": { + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jake": { + "version": "10.8.5", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.5.tgz", + "integrity": "sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw==", + "dev": true, + "dependencies": { + "async": "^3.2.3", + "chalk": "^4.0.2", + "filelist": "^1.0.1", + "minimatch": "^3.0.4" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jake/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/jake/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/js-sha3": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz", + "integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/logplease": { + "version": "1.2.15", + "resolved": "https://registry.npmjs.org/logplease/-/logplease-1.2.15.tgz", + "integrity": "sha512-jLlHnlsPSJjpwUfcNyUxXCl33AYg2cHhIf9QhGL2T4iPT0XPB+xP1LRKFPgIg1M/sg9kAJvy94w9CzBNrfnstA==", + "dev": true + }, + "node_modules/loupe": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.6.tgz", + "integrity": "sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.0" + } + }, + "node_modules/minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/mocha": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", + "integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==", + "dev": true, + "dependencies": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.3", + "debug": "4.3.4", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.2.0", + "he": "1.2.0", + "js-yaml": "4.1.0", + "log-symbols": "4.1.0", + "minimatch": "5.0.1", + "ms": "2.1.3", + "nanoid": "3.3.3", + "serialize-javascript": "6.0.0", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "workerpool": "6.2.1", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha.js" + }, + "engines": { + "node": ">= 14.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/mochajs" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/nanoassert": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/nanoassert/-/nanoassert-2.0.0.tgz", + "integrity": "sha512-7vO7n28+aYO4J+8w96AzhmU8G+Y/xpPDJz/se19ICsqj/momRbb9mh9ZUtkoJ5X3nTnPdhEJyc0qnM6yAsHBaA==", + "dev": true + }, + "node_modules/nanoid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz", + "integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==", + "dev": true, + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/r1csfile": { + "version": "0.0.41", + "resolved": "https://registry.npmjs.org/r1csfile/-/r1csfile-0.0.41.tgz", + "integrity": "sha512-Q1WDF3u1vYeAwjHo4YuddkA8Aq0TulbKjmGm99+Atn13Lf5fTsMZBnBV9T741w8iSyPFG6Uh6sapQby77sREqA==", + "dev": true, + "dependencies": { + "@iden3/bigarray": "0.0.2", + "@iden3/binfileutils": "0.0.11", + "fastfile": "0.0.20", + "ffjavascript": "0.2.56" + } + }, + "node_modules/r1csfile/node_modules/ffjavascript": { + "version": "0.2.56", + "resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.56.tgz", + "integrity": "sha512-em6G5Lrj7ucIqj4TYEgyoHs/j99Urwwqa4+YxEVY2hggnpRimVj+noX5pZQTxI1pvtiekZI4rG65JBf0xraXrg==", + "dev": true, + "dependencies": { + "wasmbuilder": "0.0.16", + "wasmcurves": "0.2.0", + "web-worker": "^1.2.0" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/snarkjs": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/snarkjs/-/snarkjs-0.5.0.tgz", + "integrity": "sha512-KWz8mZ2Y+6wvn6GGkQo6/ZlKwETdAGohd40Lzpwp5TUZCn6N6O4Az1SuX1rw/qREGL6Im+ycb19suCFE8/xaKA==", + "dev": true, + "dependencies": { + "@iden3/binfileutils": "0.0.11", + "bfj": "^7.0.2", + "blake2b-wasm": "^2.4.0", + "circom_runtime": "0.1.21", + "ejs": "^3.1.6", + "fastfile": "0.0.20", + "ffjavascript": "0.2.56", + "js-sha3": "^0.8.0", + "logplease": "^1.2.15", + "r1csfile": "0.0.41" + }, + "bin": { + "snarkjs": "build/cli.cjs" + } + }, + "node_modules/snarkjs/node_modules/ffjavascript": { + "version": "0.2.56", + "resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.56.tgz", + "integrity": "sha512-em6G5Lrj7ucIqj4TYEgyoHs/j99Urwwqa4+YxEVY2hggnpRimVj+noX5pZQTxI1pvtiekZI4rG65JBf0xraXrg==", + "dev": true, + "dependencies": { + "wasmbuilder": "0.0.16", + "wasmcurves": "0.2.0", + "web-worker": "^1.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/tmp": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", + "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", + "dev": true, + "dependencies": { + "rimraf": "^3.0.0" + }, + "engines": { + "node": ">=8.17.0" + } + }, + "node_modules/tmp-promise": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-3.0.3.tgz", + "integrity": "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ==", + "dev": true, + "dependencies": { + "tmp": "^0.2.0" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tryer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/tryer/-/tryer-1.0.1.tgz", + "integrity": "sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA==", + "dev": true + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/util": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" + } + }, + "node_modules/wasmbuilder": { + "version": "0.0.16", + "resolved": "https://registry.npmjs.org/wasmbuilder/-/wasmbuilder-0.0.16.tgz", + "integrity": "sha512-Qx3lEFqaVvp1cEYW7Bfi+ebRJrOiwz2Ieu7ZG2l7YyeSJIok/reEQCQCuicj/Y32ITIJuGIM9xZQppGx5LrQdA==", + "dev": true + }, + "node_modules/wasmcurves": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/wasmcurves/-/wasmcurves-0.2.0.tgz", + "integrity": "sha512-3e2rbxdujOwaod657gxgmdhZNn+i1qKdHO3Y/bK+8E7bV8ttV/fu5FO4/WLBACF375cK0QDLOP+65Na63qYuWA==", + "dev": true, + "dependencies": { + "wasmbuilder": "0.0.16" + } + }, + "node_modules/web-worker": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.2.0.tgz", + "integrity": "sha512-PgF341avzqyx60neE9DD+XS26MMNMoUQRz9NOZwW32nPQrF6p77f1htcnjBSEV8BGMKZ16choqUG4hyI0Hx7mA==", + "dev": true + }, + "node_modules/which-typed-array": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz", + "integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==", + "dev": true, + "dependencies": { + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.0", + "is-typed-array": "^1.1.10" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/workerpool": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", + "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==", + "dev": true + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "dependencies": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + }, + "dependencies": { + "@iden3/bigarray": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/@iden3/bigarray/-/bigarray-0.0.2.tgz", + "integrity": "sha512-Xzdyxqm1bOFF6pdIsiHLLl3HkSLjbhqJHVyqaTxXt3RqXBEnmsUmEW47H7VOi/ak7TdkRpNkxjyK5Zbkm+y52g==", + "dev": true + }, + "@iden3/binfileutils": { + "version": "0.0.11", + "resolved": "https://registry.npmjs.org/@iden3/binfileutils/-/binfileutils-0.0.11.tgz", + "integrity": "sha512-LylnJoZ0CTdgErnKY8OxohvW4K+p6UHD3sxt+3P9AmMyBQjYR4IpoqoYZZ+9aMj89cmCQ21UvdhndAx04er3NA==", + "dev": true, + "requires": { + "fastfile": "0.0.20", + "ffjavascript": "^0.2.48" + } + }, + "ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true + }, + "async": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", + "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==", + "dev": true + }, + "available-typed-arrays": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", + "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "dev": true + }, + "await-exec": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/await-exec/-/await-exec-0.1.2.tgz", + "integrity": "sha512-BQUiyBLScS0+YPnnCZZGjb78mZ8sQ8aKgxarDPNw05rpbaCS7VIQSLy2tgjZKct9Dn1xLbKMXOpA98OWei90zA==", + "dev": true + }, + "b4a": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.1.tgz", + "integrity": "sha512-AsKjNhz72yxteo/0EtQEiwkMUgk/tGmycXlbG4g3Ard2/ULtNLUykGOkeK0egmN27h0xMAhb76jYccW+XTBExA==", + "dev": true + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "bfj": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/bfj/-/bfj-7.0.2.tgz", + "integrity": "sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw==", + "dev": true, + "requires": { + "bluebird": "^3.5.5", + "check-types": "^11.1.1", + "hoopy": "^0.1.4", + "tryer": "^1.0.1" + } + }, + "binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true + }, + "blake2b-wasm": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-2.4.0.tgz", + "integrity": "sha512-S1kwmW2ZhZFFFOghcx73+ZajEfKBqhP82JMssxtLVMxlaPea1p9uoLiUZ5WYyHn0KddwbLc+0vh4wR0KBNoT5w==", + "dev": true, + "requires": { + "b4a": "^1.0.1", + "nanoassert": "^2.0.0" + } + }, + "bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "dev": true + }, + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true + }, + "chai": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.7.tgz", + "integrity": "sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A==", + "dev": true, + "requires": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^4.1.2", + "get-func-name": "^2.0.0", + "loupe": "^2.3.1", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "dependencies": { + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==", + "dev": true + }, + "check-types": { + "version": "11.2.2", + "resolved": "https://registry.npmjs.org/check-types/-/check-types-11.2.2.tgz", + "integrity": "sha512-HBiYvXvn9Z70Z88XKjz3AEKd4HJhBXsa3j7xFnITAzoS8+q6eIGi8qDB8FKPBAjtuxjI/zFpwuiCb8oDtKOYrA==", + "dev": true + }, + "child_process": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/child_process/-/child_process-1.0.2.tgz", + "integrity": "sha512-Wmza/JzL0SiWz7kl6MhIKT5ceIlnFPJX+lwUGj7Clhy5MMldsSoJR0+uvRzOS5Kv45Mq7t1PoE8TsOA9bzvb6g==", + "dev": true + }, + "chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "requires": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "fsevents": "~2.3.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + } + }, + "circom_runtime": { + "version": "0.1.21", + "resolved": "https://registry.npmjs.org/circom_runtime/-/circom_runtime-0.1.21.tgz", + "integrity": "sha512-qTkud630B/GK8y76hnOaaS1aNuF6prfV0dTrkeRsiJKnlP1ryQbP2FWLgDOPqn6aKyaPlam+Z+DTbBhkEzh8dA==", + "dev": true, + "requires": { + "ffjavascript": "0.2.56" + }, + "dependencies": { + "ffjavascript": { + "version": "0.2.56", + "resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.56.tgz", + "integrity": "sha512-em6G5Lrj7ucIqj4TYEgyoHs/j99Urwwqa4+YxEVY2hggnpRimVj+noX5pZQTxI1pvtiekZI4rG65JBf0xraXrg==", + "dev": true, + "requires": { + "wasmbuilder": "0.0.16", + "wasmcurves": "0.2.0", + "web-worker": "^1.2.0" + } + } + } + }, + "circom_tester": { + "version": "0.0.19", + "resolved": "https://registry.npmjs.org/circom_tester/-/circom_tester-0.0.19.tgz", + "integrity": "sha512-SNHaBsGxcBH6XsVWfsRbRPA7NF8m8AMKJI9dtJJCFGUtOTT2+zsoIqAwi50z6XCnO4TtjyXq7AeXa1PLHqT0tw==", + "dev": true, + "requires": { + "chai": "^4.3.6", + "child_process": "^1.0.2", + "ffjavascript": "^0.2.56", + "fnv-plus": "^1.3.1", + "r1csfile": "^0.0.41", + "snarkjs": "0.5.0", + "tmp-promise": "^3.0.3", + "util": "^0.12.4" + } + }, + "circomlib-ml": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/circomlib-ml/-/circomlib-ml-2.1.0.tgz", + "integrity": "sha512-ROoT/siaxxqkXHHCNMtBoo0ekkzbwmK1IKfHyMoOC14vNiZeqCivAGIOQ0IF47+F4HlQVF1QS9FexaHYQxMIHA==", + "dev": true + }, + "cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "requires": { + "ms": "2.1.2" + }, + "dependencies": { + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true + }, + "deep-eql": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz", + "integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==", + "dev": true, + "requires": { + "type-detect": "^4.0.0" + } + }, + "diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true + }, + "ejs": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.8.tgz", + "integrity": "sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ==", + "dev": true, + "requires": { + "jake": "^10.8.5" + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "fastfile": { + "version": "0.0.20", + "resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.20.tgz", + "integrity": "sha512-r5ZDbgImvVWCP0lA/cGNgQcZqR+aYdFx3u+CtJqUE510pBUVGMn4ulL/iRTI4tACTYsNJ736uzFxEBXesPAktA==", + "dev": true + }, + "ffjavascript": { + "version": "0.2.57", + "resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.57.tgz", + "integrity": "sha512-V+vxZ/zPNcthrWmqfe/1YGgqdkTamJeXiED0tsk7B84g40DKlrTdx47IqZuiygqAVG6zMw4qYuvXftIJWsmfKQ==", + "dev": true, + "requires": { + "wasmbuilder": "0.0.16", + "wasmcurves": "0.2.0", + "web-worker": "^1.2.0" + } + }, + "filelist": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", + "dev": true, + "requires": { + "minimatch": "^5.0.1" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true + }, + "fnv-plus": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/fnv-plus/-/fnv-plus-1.3.1.tgz", + "integrity": "sha512-Gz1EvfOneuFfk4yG458dJ3TLJ7gV19q3OM/vVvvHf7eT02Hm1DleB4edsia6ahbKgAYxO9gvyQ1ioWZR+a00Yw==", + "dev": true + }, + "for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "dev": true, + "requires": { + "is-callable": "^1.1.3" + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==", + "dev": true + }, + "get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + } + }, + "glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "dependencies": { + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + } + } + }, + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dev": true, + "requires": { + "get-intrinsic": "^1.1.3" + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "dev": true + }, + "has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "dev": true, + "requires": { + "has-symbols": "^1.0.2" + } + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true + }, + "hoopy": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/hoopy/-/hoopy-0.1.4.tgz", + "integrity": "sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "is-arguments": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", + "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "dev": true + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "is-generator-function": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", + "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", + "dev": true, + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true + }, + "is-typed-array": { + "version": "1.1.10", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz", + "integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==", + "dev": true, + "requires": { + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.0" + } + }, + "is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true + }, + "jake": { + "version": "10.8.5", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.5.tgz", + "integrity": "sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw==", + "dev": true, + "requires": { + "async": "^3.2.3", + "chalk": "^4.0.2", + "filelist": "^1.0.1", + "minimatch": "^3.0.4" + }, + "dependencies": { + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + } + } + }, + "js-sha3": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz", + "integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==", + "dev": true + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "requires": { + "argparse": "^2.0.1" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" + } + }, + "log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "requires": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + } + }, + "logplease": { + "version": "1.2.15", + "resolved": "https://registry.npmjs.org/logplease/-/logplease-1.2.15.tgz", + "integrity": "sha512-jLlHnlsPSJjpwUfcNyUxXCl33AYg2cHhIf9QhGL2T4iPT0XPB+xP1LRKFPgIg1M/sg9kAJvy94w9CzBNrfnstA==", + "dev": true + }, + "loupe": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.6.tgz", + "integrity": "sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA==", + "dev": true, + "requires": { + "get-func-name": "^2.0.0" + } + }, + "minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + } + }, + "mocha": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", + "integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==", + "dev": true, + "requires": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.3", + "debug": "4.3.4", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.2.0", + "he": "1.2.0", + "js-yaml": "4.1.0", + "log-symbols": "4.1.0", + "minimatch": "5.0.1", + "ms": "2.1.3", + "nanoid": "3.3.3", + "serialize-javascript": "6.0.0", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "workerpool": "6.2.1", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + } + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "nanoassert": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/nanoassert/-/nanoassert-2.0.0.tgz", + "integrity": "sha512-7vO7n28+aYO4J+8w96AzhmU8G+Y/xpPDJz/se19ICsqj/momRbb9mh9ZUtkoJ5X3nTnPdhEJyc0qnM6yAsHBaA==", + "dev": true + }, + "nanoid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz", + "integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==", + "dev": true + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true + }, + "pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true + }, + "r1csfile": { + "version": "0.0.41", + "resolved": "https://registry.npmjs.org/r1csfile/-/r1csfile-0.0.41.tgz", + "integrity": "sha512-Q1WDF3u1vYeAwjHo4YuddkA8Aq0TulbKjmGm99+Atn13Lf5fTsMZBnBV9T741w8iSyPFG6Uh6sapQby77sREqA==", + "dev": true, + "requires": { + "@iden3/bigarray": "0.0.2", + "@iden3/binfileutils": "0.0.11", + "fastfile": "0.0.20", + "ffjavascript": "0.2.56" + }, + "dependencies": { + "ffjavascript": { + "version": "0.2.56", + "resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.56.tgz", + "integrity": "sha512-em6G5Lrj7ucIqj4TYEgyoHs/j99Urwwqa4+YxEVY2hggnpRimVj+noX5pZQTxI1pvtiekZI4rG65JBf0xraXrg==", + "dev": true, + "requires": { + "wasmbuilder": "0.0.16", + "wasmcurves": "0.2.0", + "web-worker": "^1.2.0" + } + } + } + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "requires": { + "picomatch": "^2.2.1" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true + }, + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + }, + "serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } + }, + "snarkjs": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/snarkjs/-/snarkjs-0.5.0.tgz", + "integrity": "sha512-KWz8mZ2Y+6wvn6GGkQo6/ZlKwETdAGohd40Lzpwp5TUZCn6N6O4Az1SuX1rw/qREGL6Im+ycb19suCFE8/xaKA==", + "dev": true, + "requires": { + "@iden3/binfileutils": "0.0.11", + "bfj": "^7.0.2", + "blake2b-wasm": "^2.4.0", + "circom_runtime": "0.1.21", + "ejs": "^3.1.6", + "fastfile": "0.0.20", + "ffjavascript": "0.2.56", + "js-sha3": "^0.8.0", + "logplease": "^1.2.15", + "r1csfile": "0.0.41" + }, + "dependencies": { + "ffjavascript": { + "version": "0.2.56", + "resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.56.tgz", + "integrity": "sha512-em6G5Lrj7ucIqj4TYEgyoHs/j99Urwwqa4+YxEVY2hggnpRimVj+noX5pZQTxI1pvtiekZI4rG65JBf0xraXrg==", + "dev": true, + "requires": { + "wasmbuilder": "0.0.16", + "wasmcurves": "0.2.0", + "web-worker": "^1.2.0" + } + } + } + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "tmp": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", + "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", + "dev": true, + "requires": { + "rimraf": "^3.0.0" + } + }, + "tmp-promise": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-3.0.3.tgz", + "integrity": "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ==", + "dev": true, + "requires": { + "tmp": "^0.2.0" + } + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "tryer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/tryer/-/tryer-1.0.1.tgz", + "integrity": "sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA==", + "dev": true + }, + "type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true + }, + "util": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" + } + }, + "wasmbuilder": { + "version": "0.0.16", + "resolved": "https://registry.npmjs.org/wasmbuilder/-/wasmbuilder-0.0.16.tgz", + "integrity": "sha512-Qx3lEFqaVvp1cEYW7Bfi+ebRJrOiwz2Ieu7ZG2l7YyeSJIok/reEQCQCuicj/Y32ITIJuGIM9xZQppGx5LrQdA==", + "dev": true + }, + "wasmcurves": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/wasmcurves/-/wasmcurves-0.2.0.tgz", + "integrity": "sha512-3e2rbxdujOwaod657gxgmdhZNn+i1qKdHO3Y/bK+8E7bV8ttV/fu5FO4/WLBACF375cK0QDLOP+65Na63qYuWA==", + "dev": true, + "requires": { + "wasmbuilder": "0.0.16" + } + }, + "web-worker": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.2.0.tgz", + "integrity": "sha512-PgF341avzqyx60neE9DD+XS26MMNMoUQRz9NOZwW32nPQrF6p77f1htcnjBSEV8BGMKZ16choqUG4hyI0Hx7mA==", + "dev": true + }, + "which-typed-array": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz", + "integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==", + "dev": true, + "requires": { + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.0", + "is-typed-array": "^1.1.10" + } + }, + "workerpool": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", + "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==", + "dev": true + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true + }, + "yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "requires": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + } + }, + "yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true + }, + "yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "requires": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + } + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true + } + } +} diff --git a/zkstats/onnx2circom/keras2circom/package.json b/zkstats/onnx2circom/keras2circom/package.json new file mode 100644 index 0000000..2459899 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/package.json @@ -0,0 +1,26 @@ +{ + "name": "keras2circom", + "version": "2.0.0", + "description": "keras2circom circuit tests", + "main": "index.js", + "scripts": { + "test": "mocha --max-old-space-size=4000" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/socathie/keras2circom.git" + }, + "author": "Cathie So, PhD", + "license": "MIT", + "bugs": { + "url": "https://github.com/socathie/keras2circom/issues" + }, + "homepage": "https://github.com/socathie/keras2circom#readme", + "devDependencies": { + "await-exec": "^0.1.2", + "chai": "^4.3.7", + "circom_tester": "^0.0.19", + "circomlib-ml": "^2.1.0", + "mocha": "^10.2.0" + } +} diff --git a/zkstats/onnx2circom/keras2circom/requirements.txt b/zkstats/onnx2circom/keras2circom/requirements.txt new file mode 100644 index 0000000..cbcaf96 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/requirements.txt @@ -0,0 +1,3 @@ +docopt==0.6.2 +numpy==1.26.4 +tensorflow==2.16.1 diff --git a/zkstats/onnx2circom/keras2circom/setup-circom.sh b/zkstats/onnx2circom/keras2circom/setup-circom.sh new file mode 100644 index 0000000..27f4eae --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/setup-circom.sh @@ -0,0 +1,10 @@ +curl --proto '=https' --tlsv1.2 https://sh.rustup.rs -sSf | sh + +git clone https://github.com/iden3/circom.git + +cd circom + +cargo build --release +cargo install --path circom + +npm install -g snarkjs@latest diff --git a/zkstats/onnx2circom/keras2circom/test/accuracy.ipynb b/zkstats/onnx2circom/keras2circom/test/accuracy.ipynb new file mode 100644 index 0000000..d8c0aa0 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/test/accuracy.ipynb @@ -0,0 +1,10090 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!cd .. && python main.py models/model.h5" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "import os\n", + "# add parent directory to sys.path\n", + "sys.path.append(os.path.dirname((os.getcwd())))\n", + "from output.circuit import inference\n", + "import json" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "with open('../output/circuit.json') as f:\n", + " circuit = json.load(f)\n", + "\n", + "with open('y_test.json') as f:\n", + " y_test = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "#0: 100.00%\n", + "#1: 100.00%\n", + "#2: 100.00%\n", + "#3: 100.00%\n", + "#4: 100.00%\n", + "#5: 100.00%\n", + "#6: 100.00%\n", + "#7: 100.00%\n", + "#8: 100.00%\n", + "#9: 100.00%\n", + "#10: 100.00%\n", + "#11: 100.00%\n", + "#12: 100.00%\n", + "#13: 100.00%\n", + "#14: 100.00%\n", + "#15: 100.00%\n", + "#16: 100.00%\n", + "#17: 100.00%\n", + "#18: 94.74%\n", + "#19: 95.00%\n", + "#20: 95.24%\n", + "#21: 95.45%\n", + "#22: 95.65%\n", + "#23: 95.83%\n", + "#24: 96.00%\n", + "#25: 96.15%\n", + "#26: 96.30%\n", + "#27: 96.43%\n", + "#28: 96.55%\n", + "#29: 96.67%\n", + "#30: 96.77%\n", + "#31: 96.88%\n", + "#32: 96.97%\n", + "#33: 97.06%\n", + "#34: 97.14%\n", + "#35: 97.22%\n", + "#36: 97.30%\n", + "#37: 97.37%\n", + "#38: 97.44%\n", + "#39: 97.50%\n", + "#40: 97.56%\n", + "#41: 97.62%\n", + "#42: 97.67%\n", + "#43: 97.73%\n", + "#44: 97.78%\n", + "#45: 97.83%\n", + "#46: 97.87%\n", + "#47: 97.92%\n", + "#48: 97.96%\n", + "#49: 98.00%\n", + "#50: 98.04%\n", + "#51: 98.08%\n", + "#52: 98.11%\n", + "#53: 98.15%\n", + "#54: 98.18%\n", + "#55: 98.21%\n", + "#56: 98.25%\n", + "#57: 98.28%\n", + "#58: 98.31%\n", + "#59: 98.33%\n", + "#60: 98.36%\n", + "#61: 98.39%\n", + "#62: 98.41%\n", + "#63: 98.44%\n", + "#64: 98.46%\n", + "#65: 98.48%\n", + "#66: 98.51%\n", + "#67: 98.53%\n", + "#68: 98.55%\n", + "#69: 98.57%\n", + "#70: 98.59%\n", + "#71: 98.61%\n", + "#72: 98.63%\n", + "#73: 98.65%\n", + "#74: 98.67%\n", + "#75: 98.68%\n", + "#76: 98.70%\n", + "#77: 98.72%\n", + "#78: 98.73%\n", + "#79: 98.75%\n", + "#80: 98.77%\n", + "#81: 98.78%\n", + "#82: 98.80%\n", + "#83: 98.81%\n", + "#84: 98.82%\n", + "#85: 98.84%\n", + "#86: 98.85%\n", + "#87: 98.86%\n", + "#88: 98.88%\n", + "#89: 98.89%\n", + "#90: 98.90%\n", + "#91: 98.91%\n", + "#92: 98.92%\n", + "#93: 98.94%\n", + "#94: 98.95%\n", + "#95: 98.96%\n", + "#96: 98.97%\n", + "#97: 98.98%\n", + "#98: 98.99%\n", + "#99: 99.00%\n", + "#100: 99.01%\n", + "#101: 99.02%\n", + "#102: 99.03%\n", + "#103: 99.04%\n", + "#104: 99.05%\n", + "#105: 99.06%\n", + "#106: 99.07%\n", + "#107: 99.07%\n", + "#108: 99.08%\n", + "#109: 99.09%\n", + "#110: 99.10%\n", + "#111: 99.11%\n", + "#112: 99.12%\n", + "#113: 99.12%\n", + "#114: 99.13%\n", + "#115: 99.14%\n", + "#116: 99.15%\n", + "#117: 99.15%\n", + "#118: 99.16%\n", + "#119: 99.17%\n", + "#120: 99.17%\n", + "#121: 99.18%\n", + "#122: 99.19%\n", + "#123: 99.19%\n", + "#124: 99.20%\n", + "#125: 99.21%\n", + "#126: 99.21%\n", + "#127: 99.22%\n", + "#128: 99.22%\n", + "#129: 99.23%\n", + "#130: 99.24%\n", + "#131: 99.24%\n", + "#132: 99.25%\n", + "#133: 99.25%\n", + "#134: 99.26%\n", + "#135: 99.26%\n", + "#136: 99.27%\n", + "#137: 99.28%\n", + "#138: 99.28%\n", + "#139: 99.29%\n", + "#140: 99.29%\n", + "#141: 99.30%\n", + "#142: 99.30%\n", + "#143: 99.31%\n", + "#144: 99.31%\n", + "#145: 99.32%\n", + "#146: 99.32%\n", + "#147: 99.32%\n", + "#148: 99.33%\n", + "#149: 99.33%\n", + "#150: 99.34%\n", + "#151: 98.68%\n", + "#152: 98.69%\n", + "#153: 98.70%\n", + "#154: 98.71%\n", + "#155: 98.72%\n", + "#156: 98.73%\n", + "#157: 98.73%\n", + "#158: 98.74%\n", + "#159: 98.75%\n", + "#160: 98.76%\n", + "#161: 98.77%\n", + "#162: 98.77%\n", + "#163: 98.78%\n", + "#164: 98.79%\n", + "#165: 98.80%\n", + "#166: 98.80%\n", + "#167: 98.81%\n", + "#168: 98.82%\n", + "#169: 98.82%\n", + "#170: 98.83%\n", + "#171: 98.84%\n", + "#172: 98.84%\n", + "#173: 98.85%\n", + "#174: 98.86%\n", + "#175: 98.86%\n", + "#176: 98.87%\n", + "#177: 98.88%\n", + "#178: 98.88%\n", + "#179: 98.89%\n", + "#180: 98.90%\n", + "#181: 98.90%\n", + "#182: 98.91%\n", + "#183: 98.91%\n", + "#184: 98.92%\n", + "#185: 98.92%\n", + "#186: 98.93%\n", + "#187: 98.94%\n", + "#188: 98.94%\n", + "#189: 98.95%\n", + "#190: 98.95%\n", + "#191: 98.96%\n", + "#192: 98.96%\n", + "#193: 98.97%\n", + "#194: 98.97%\n", + "#195: 98.98%\n", + "#196: 98.98%\n", + "#197: 98.99%\n", + "#198: 98.99%\n", + "#199: 99.00%\n", + "#200: 99.00%\n", + "#201: 99.01%\n", + "#202: 99.01%\n", + "#203: 99.02%\n", + "#204: 99.02%\n", + "#205: 99.03%\n", + "#206: 99.03%\n", + "#207: 99.04%\n", + "#208: 99.04%\n", + "#209: 99.05%\n", + "#210: 99.05%\n", + "#211: 99.06%\n", + "#212: 99.06%\n", + "#213: 99.07%\n", + "#214: 99.07%\n", + "#215: 99.07%\n", + "#216: 99.08%\n", + "#217: 99.08%\n", + "#218: 99.09%\n", + "#219: 99.09%\n", + "#220: 99.10%\n", + "#221: 99.10%\n", + "#222: 99.10%\n", + "#223: 99.11%\n", + "#224: 99.11%\n", + "#225: 99.12%\n", + "#226: 99.12%\n", + "#227: 99.12%\n", + "#228: 99.13%\n", + "#229: 99.13%\n", + "#230: 99.13%\n", + "#231: 99.14%\n", + "#232: 99.14%\n", + "#233: 99.15%\n", + "#234: 99.15%\n", + "#235: 99.15%\n", + "#236: 99.16%\n", + "#237: 99.16%\n", + "#238: 99.16%\n", + "#239: 99.17%\n", + "#240: 99.17%\n", + "#241: 98.76%\n", + "#242: 98.77%\n", + "#243: 98.77%\n", + "#244: 98.78%\n", + "#245: 98.78%\n", + "#246: 98.79%\n", + "#247: 98.39%\n", + "#248: 98.39%\n", + "#249: 98.40%\n", + "#250: 98.41%\n", + "#251: 98.41%\n", + "#252: 98.42%\n", + "#253: 98.43%\n", + "#254: 98.43%\n", + "#255: 98.44%\n", + "#256: 98.44%\n", + "#257: 98.45%\n", + "#258: 98.46%\n", + "#259: 98.08%\n", + "#260: 98.08%\n", + "#261: 98.09%\n", + "#262: 98.10%\n", + "#263: 98.11%\n", + "#264: 97.74%\n", + "#265: 97.74%\n", + "#266: 97.38%\n", + "#267: 97.39%\n", + "#268: 97.40%\n", + "#269: 97.41%\n", + "#270: 97.42%\n", + "#271: 97.43%\n", + "#272: 97.44%\n", + "#273: 97.45%\n", + "#274: 97.45%\n", + "#275: 97.46%\n", + "#276: 97.47%\n", + "#277: 97.48%\n", + "#278: 97.49%\n", + "#279: 97.50%\n", + "#280: 97.51%\n", + "#281: 97.52%\n", + "#282: 97.17%\n", + "#283: 97.18%\n", + "#284: 97.19%\n", + "#285: 97.20%\n", + "#286: 97.21%\n", + "#287: 97.22%\n", + "#288: 97.23%\n", + "#289: 97.24%\n", + "#290: 97.25%\n", + "#291: 97.26%\n", + "#292: 97.27%\n", + "#293: 97.28%\n", + "#294: 97.29%\n", + "#295: 97.30%\n", + "#296: 97.31%\n", + "#297: 97.32%\n", + "#298: 97.32%\n", + "#299: 97.33%\n", + "#300: 97.34%\n", + "#301: 97.35%\n", + "#302: 97.36%\n", + "#303: 97.37%\n", + "#304: 97.38%\n", + "#305: 97.39%\n", + "#306: 97.39%\n", + "#307: 97.40%\n", + "#308: 97.41%\n", + "#309: 97.42%\n", + "#310: 97.43%\n", + "#311: 97.44%\n", + "#312: 97.44%\n", + "#313: 97.45%\n", + "#314: 97.46%\n", + "#315: 97.47%\n", + "#316: 97.48%\n", + "#317: 97.48%\n", + "#318: 97.49%\n", + "#319: 97.50%\n", + "#320: 97.20%\n", + "#321: 97.20%\n", + "#322: 97.21%\n", + "#323: 97.22%\n", + "#324: 97.23%\n", + "#325: 97.24%\n", + "#326: 97.25%\n", + "#327: 97.26%\n", + "#328: 97.26%\n", + "#329: 97.27%\n", + "#330: 97.28%\n", + "#331: 97.29%\n", + "#332: 97.30%\n", + "#333: 97.31%\n", + "#334: 97.31%\n", + "#335: 97.32%\n", + "#336: 97.33%\n", + "#337: 97.34%\n", + "#338: 97.35%\n", + "#339: 97.35%\n", + "#340: 97.36%\n", + "#341: 97.37%\n", + "#342: 97.38%\n", + "#343: 97.38%\n", + "#344: 97.39%\n", + "#345: 97.40%\n", + "#346: 97.41%\n", + "#347: 97.41%\n", + "#348: 97.42%\n", + "#349: 97.43%\n", + "#350: 97.44%\n", + "#351: 97.44%\n", + "#352: 97.45%\n", + "#353: 97.46%\n", + "#354: 97.46%\n", + "#355: 97.47%\n", + "#356: 97.48%\n", + "#357: 97.49%\n", + "#358: 97.21%\n", + "#359: 97.22%\n", + "#360: 97.23%\n", + "#361: 97.24%\n", + "#362: 97.25%\n", + "#363: 97.25%\n", + "#364: 97.26%\n", + "#365: 97.27%\n", + "#366: 97.28%\n", + "#367: 97.28%\n", + "#368: 97.29%\n", + "#369: 97.30%\n", + "#370: 97.30%\n", + "#371: 97.31%\n", + "#372: 97.32%\n", + "#373: 97.33%\n", + "#374: 97.33%\n", + "#375: 97.34%\n", + "#376: 97.35%\n", + "#377: 97.35%\n", + "#378: 97.36%\n", + "#379: 97.37%\n", + "#380: 97.38%\n", + "#381: 97.38%\n", + "#382: 97.39%\n", + "#383: 97.40%\n", + "#384: 97.40%\n", + "#385: 97.41%\n", + "#386: 97.42%\n", + "#387: 97.42%\n", + "#388: 97.43%\n", + "#389: 97.44%\n", + "#390: 97.44%\n", + "#391: 97.45%\n", + "#392: 97.46%\n", + "#393: 97.46%\n", + "#394: 97.47%\n", + "#395: 97.47%\n", + "#396: 97.48%\n", + "#397: 97.49%\n", + "#398: 97.49%\n", + "#399: 97.50%\n", + "#400: 97.51%\n", + "#401: 97.51%\n", + "#402: 97.52%\n", + "#403: 97.28%\n", + "#404: 97.28%\n", + "#405: 97.29%\n", + "#406: 97.30%\n", + "#407: 97.30%\n", + "#408: 97.31%\n", + "#409: 97.32%\n", + "#410: 97.32%\n", + "#411: 97.33%\n", + "#412: 97.34%\n", + "#413: 97.34%\n", + "#414: 97.35%\n", + "#415: 97.36%\n", + "#416: 97.36%\n", + "#417: 97.37%\n", + "#418: 97.37%\n", + "#419: 97.38%\n", + "#420: 97.39%\n", + "#421: 97.39%\n", + "#422: 97.40%\n", + "#423: 97.41%\n", + "#424: 97.41%\n", + "#425: 97.42%\n", + "#426: 97.42%\n", + "#427: 97.43%\n", + "#428: 97.44%\n", + "#429: 97.44%\n", + "#430: 97.45%\n", + "#431: 97.45%\n", + "#432: 97.46%\n", + "#433: 97.47%\n", + "#434: 97.47%\n", + "#435: 97.25%\n", + "#436: 97.25%\n", + "#437: 97.26%\n", + "#438: 97.27%\n", + "#439: 97.27%\n", + "#440: 97.28%\n", + "#441: 97.29%\n", + "#442: 97.29%\n", + "#443: 97.30%\n", + "#444: 97.30%\n", + "#445: 97.09%\n", + "#446: 97.09%\n", + "#447: 97.10%\n", + "#448: 97.10%\n", + "#449: 97.11%\n", + "#450: 97.12%\n", + "#451: 97.12%\n", + "#452: 97.13%\n", + "#453: 97.14%\n", + "#454: 97.14%\n", + "#455: 97.15%\n", + "#456: 97.16%\n", + "#457: 97.16%\n", + "#458: 97.17%\n", + "#459: 97.17%\n", + "#460: 97.18%\n", + "#461: 97.19%\n", + "#462: 97.19%\n", + "#463: 97.20%\n", + "#464: 96.99%\n", + "#465: 97.00%\n", + "#466: 97.00%\n", + "#467: 97.01%\n", + "#468: 97.01%\n", + "#469: 97.02%\n", + "#470: 97.03%\n", + "#471: 97.03%\n", + "#472: 97.04%\n", + "#473: 97.05%\n", + "#474: 97.05%\n", + "#475: 97.06%\n", + "#476: 97.06%\n", + "#477: 97.07%\n", + "#478: 97.08%\n", + "#479: 97.08%\n", + "#480: 97.09%\n", + "#481: 97.10%\n", + "#482: 97.10%\n", + "#483: 97.11%\n", + "#484: 97.11%\n", + "#485: 97.12%\n", + "#486: 97.13%\n", + "#487: 97.13%\n", + "#488: 97.14%\n", + "#489: 97.14%\n", + "#490: 97.15%\n", + "#491: 97.15%\n", + "#492: 97.16%\n", + "#493: 97.17%\n", + "#494: 97.17%\n", + "#495: 96.98%\n", + "#496: 96.98%\n", + "#497: 96.99%\n", + "#498: 96.99%\n", + "#499: 97.00%\n", + "#500: 97.01%\n", + "#501: 97.01%\n", + "#502: 97.02%\n", + "#503: 97.02%\n", + "#504: 97.03%\n", + "#505: 97.04%\n", + "#506: 97.04%\n", + "#507: 97.05%\n", + "#508: 97.05%\n", + "#509: 97.06%\n", + "#510: 97.06%\n", + "#511: 97.07%\n", + "#512: 97.08%\n", + "#513: 97.08%\n", + "#514: 97.09%\n", + "#515: 97.09%\n", + "#516: 97.10%\n", + "#517: 97.10%\n", + "#518: 97.11%\n", + "#519: 97.12%\n", + "#520: 97.12%\n", + "#521: 97.13%\n", + "#522: 97.13%\n", + "#523: 97.14%\n", + "#524: 97.14%\n", + "#525: 97.15%\n", + "#526: 97.15%\n", + "#527: 97.16%\n", + "#528: 97.16%\n", + "#529: 97.17%\n", + "#530: 97.18%\n", + "#531: 97.18%\n", + "#532: 97.19%\n", + "#533: 97.19%\n", + "#534: 97.20%\n", + "#535: 97.20%\n", + "#536: 97.21%\n", + "#537: 97.21%\n", + "#538: 97.22%\n", + "#539: 97.22%\n", + "#540: 97.23%\n", + "#541: 97.23%\n", + "#542: 97.05%\n", + "#543: 97.06%\n", + "#544: 97.06%\n", + "#545: 97.07%\n", + "#546: 97.07%\n", + "#547: 97.08%\n", + "#548: 97.09%\n", + "#549: 97.09%\n", + "#550: 97.10%\n", + "#551: 97.10%\n", + "#552: 97.11%\n", + "#553: 97.11%\n", + "#554: 97.12%\n", + "#555: 97.12%\n", + "#556: 97.13%\n", + "#557: 97.13%\n", + "#558: 97.14%\n", + "#559: 97.14%\n", + "#560: 97.15%\n", + "#561: 97.15%\n", + "#562: 97.16%\n", + "#563: 97.16%\n", + "#564: 97.17%\n", + "#565: 97.17%\n", + "#566: 97.18%\n", + "#567: 97.18%\n", + "#568: 97.19%\n", + "#569: 97.19%\n", + "#570: 97.20%\n", + "#571: 97.20%\n", + "#572: 97.21%\n", + "#573: 97.21%\n", + "#574: 97.22%\n", + "#575: 97.22%\n", + "#576: 97.23%\n", + "#577: 97.23%\n", + "#578: 97.24%\n", + "#579: 97.24%\n", + "#580: 97.25%\n", + "#581: 97.25%\n", + "#582: 97.08%\n", + "#583: 97.09%\n", + "#584: 97.09%\n", + "#585: 97.10%\n", + "#586: 97.10%\n", + "#587: 97.11%\n", + "#588: 97.11%\n", + "#589: 97.12%\n", + "#590: 97.12%\n", + "#591: 97.13%\n", + "#592: 97.13%\n", + "#593: 97.14%\n", + "#594: 97.14%\n", + "#595: 97.15%\n", + "#596: 97.15%\n", + "#597: 97.16%\n", + "#598: 97.16%\n", + "#599: 97.17%\n", + "#600: 97.17%\n", + "#601: 97.18%\n", + "#602: 97.18%\n", + "#603: 97.19%\n", + "#604: 97.19%\n", + "#605: 97.03%\n", + "#606: 97.03%\n", + "#607: 97.04%\n", + "#608: 97.04%\n", + "#609: 97.05%\n", + "#610: 97.05%\n", + "#611: 97.06%\n", + "#612: 97.06%\n", + "#613: 97.07%\n", + "#614: 97.07%\n", + "#615: 97.08%\n", + "#616: 97.08%\n", + "#617: 97.09%\n", + "#618: 97.09%\n", + "#619: 97.10%\n", + "#620: 97.10%\n", + "#621: 97.11%\n", + "#622: 97.11%\n", + "#623: 97.12%\n", + "#624: 97.12%\n", + "#625: 97.12%\n", + "#626: 97.13%\n", + "#627: 97.13%\n", + "#628: 97.14%\n", + "#629: 97.14%\n", + "#630: 97.15%\n", + "#631: 97.15%\n", + "#632: 97.16%\n", + "#633: 97.16%\n", + "#634: 97.17%\n", + "#635: 97.17%\n", + "#636: 97.17%\n", + "#637: 97.18%\n", + "#638: 97.18%\n", + "#639: 97.19%\n", + "#640: 97.19%\n", + "#641: 97.20%\n", + "#642: 97.20%\n", + "#643: 97.20%\n", + "#644: 97.21%\n", + "#645: 97.21%\n", + "#646: 97.22%\n", + "#647: 97.22%\n", + "#648: 97.23%\n", + "#649: 97.23%\n", + "#650: 97.24%\n", + "#651: 97.24%\n", + "#652: 97.24%\n", + "#653: 97.25%\n", + "#654: 97.25%\n", + "#655: 97.26%\n", + "#656: 97.26%\n", + "#657: 97.26%\n", + "#658: 97.27%\n", + "#659: 97.27%\n", + "#660: 97.28%\n", + "#661: 97.28%\n", + "#662: 97.29%\n", + "#663: 97.29%\n", + "#664: 97.29%\n", + "#665: 97.30%\n", + "#666: 97.30%\n", + "#667: 97.31%\n", + "#668: 97.31%\n", + "#669: 97.31%\n", + "#670: 97.32%\n", + "#671: 97.32%\n", + "#672: 97.33%\n", + "#673: 97.33%\n", + "#674: 97.33%\n", + "#675: 97.34%\n", + "#676: 97.34%\n", + "#677: 97.35%\n", + "#678: 97.35%\n", + "#679: 97.35%\n", + "#680: 97.36%\n", + "#681: 97.36%\n", + "#682: 97.36%\n", + "#683: 97.37%\n", + "#684: 97.23%\n", + "#685: 97.23%\n", + "#686: 97.23%\n", + "#687: 97.24%\n", + "#688: 97.24%\n", + "#689: 97.10%\n", + "#690: 97.11%\n", + "#691: 97.11%\n", + "#692: 97.11%\n", + "#693: 97.12%\n", + "#694: 97.12%\n", + "#695: 97.13%\n", + "#696: 97.13%\n", + "#697: 97.13%\n", + "#698: 97.14%\n", + "#699: 97.14%\n", + "#700: 97.15%\n", + "#701: 97.15%\n", + "#702: 97.16%\n", + "#703: 97.16%\n", + "#704: 97.16%\n", + "#705: 97.17%\n", + "#706: 97.17%\n", + "#707: 97.18%\n", + "#708: 97.18%\n", + "#709: 97.18%\n", + "#710: 97.19%\n", + "#711: 97.19%\n", + "#712: 97.19%\n", + "#713: 97.20%\n", + "#714: 97.20%\n", + "#715: 97.21%\n", + "#716: 97.21%\n", + "#717: 97.08%\n", + "#718: 97.08%\n", + "#719: 97.08%\n", + "#720: 97.09%\n", + "#721: 97.09%\n", + "#722: 97.10%\n", + "#723: 97.10%\n", + "#724: 97.10%\n", + "#725: 97.11%\n", + "#726: 96.97%\n", + "#727: 96.98%\n", + "#728: 96.98%\n", + "#729: 96.99%\n", + "#730: 96.99%\n", + "#731: 96.99%\n", + "#732: 97.00%\n", + "#733: 97.00%\n", + "#734: 97.01%\n", + "#735: 97.01%\n", + "#736: 97.01%\n", + "#737: 97.02%\n", + "#738: 97.02%\n", + "#739: 97.03%\n", + "#740: 97.03%\n", + "#741: 97.04%\n", + "#742: 97.04%\n", + "#743: 97.04%\n", + "#744: 97.05%\n", + "#745: 97.05%\n", + "#746: 97.05%\n", + "#747: 97.06%\n", + "#748: 97.06%\n", + "#749: 97.07%\n", + "#750: 97.07%\n", + "#751: 97.07%\n", + "#752: 97.08%\n", + "#753: 97.08%\n", + "#754: 97.09%\n", + "#755: 97.09%\n", + "#756: 97.09%\n", + "#757: 97.10%\n", + "#758: 97.10%\n", + "#759: 97.11%\n", + "#760: 97.11%\n", + "#761: 97.11%\n", + "#762: 97.12%\n", + "#763: 97.12%\n", + "#764: 97.12%\n", + "#765: 97.13%\n", + "#766: 97.13%\n", + "#767: 97.14%\n", + "#768: 97.14%\n", + "#769: 97.14%\n", + "#770: 97.15%\n", + "#771: 97.15%\n", + "#772: 97.15%\n", + "#773: 97.16%\n", + "#774: 97.16%\n", + "#775: 97.16%\n", + "#776: 97.17%\n", + "#777: 97.17%\n", + "#778: 97.18%\n", + "#779: 97.18%\n", + "#780: 97.18%\n", + "#781: 97.19%\n", + "#782: 97.19%\n", + "#783: 97.19%\n", + "#784: 97.20%\n", + "#785: 97.20%\n", + "#786: 97.20%\n", + "#787: 97.21%\n", + "#788: 97.21%\n", + "#789: 97.22%\n", + "#790: 97.22%\n", + "#791: 97.22%\n", + "#792: 97.23%\n", + "#793: 97.23%\n", + "#794: 97.23%\n", + "#795: 97.24%\n", + "#796: 97.24%\n", + "#797: 97.24%\n", + "#798: 97.25%\n", + "#799: 97.25%\n", + "#800: 97.25%\n", + "#801: 97.26%\n", + "#802: 97.26%\n", + "#803: 97.26%\n", + "#804: 97.27%\n", + "#805: 97.27%\n", + "#806: 97.27%\n", + "#807: 97.28%\n", + "#808: 97.28%\n", + "#809: 97.28%\n", + "#810: 97.29%\n", + "#811: 97.29%\n", + "#812: 97.29%\n", + "#813: 97.17%\n", + "#814: 97.18%\n", + "#815: 97.18%\n", + "#816: 97.18%\n", + "#817: 97.19%\n", + "#818: 97.19%\n", + "#819: 97.20%\n", + "#820: 97.20%\n", + "#821: 97.20%\n", + "#822: 97.21%\n", + "#823: 97.21%\n", + "#824: 97.21%\n", + "#825: 97.22%\n", + "#826: 97.22%\n", + "#827: 97.22%\n", + "#828: 97.23%\n", + "#829: 97.23%\n", + "#830: 97.23%\n", + "#831: 97.24%\n", + "#832: 97.24%\n", + "#833: 97.24%\n", + "#834: 97.25%\n", + "#835: 97.25%\n", + "#836: 97.25%\n", + "#837: 97.26%\n", + "#838: 97.26%\n", + "#839: 97.26%\n", + "#840: 97.27%\n", + "#841: 97.27%\n", + "#842: 97.27%\n", + "#843: 97.27%\n", + "#844: 97.28%\n", + "#845: 97.28%\n", + "#846: 97.28%\n", + "#847: 97.29%\n", + "#848: 97.29%\n", + "#849: 97.29%\n", + "#850: 97.30%\n", + "#851: 97.30%\n", + "#852: 97.30%\n", + "#853: 97.31%\n", + "#854: 97.31%\n", + "#855: 97.31%\n", + "#856: 97.32%\n", + "#857: 97.32%\n", + "#858: 97.32%\n", + "#859: 97.33%\n", + "#860: 97.33%\n", + "#861: 97.33%\n", + "#862: 97.33%\n", + "#863: 97.34%\n", + "#864: 97.34%\n", + "#865: 97.34%\n", + "#866: 97.35%\n", + "#867: 97.35%\n", + "#868: 97.35%\n", + "#869: 97.36%\n", + "#870: 97.36%\n", + "#871: 97.36%\n", + "#872: 97.37%\n", + "#873: 97.37%\n", + "#874: 97.37%\n", + "#875: 97.37%\n", + "#876: 97.38%\n", + "#877: 97.38%\n", + "#878: 97.38%\n", + "#879: 97.39%\n", + "#880: 97.39%\n", + "#881: 97.39%\n", + "#882: 97.40%\n", + "#883: 97.40%\n", + "#884: 97.40%\n", + "#885: 97.40%\n", + "#886: 97.41%\n", + "#887: 97.41%\n", + "#888: 97.41%\n", + "#889: 97.42%\n", + "#890: 97.42%\n", + "#891: 97.42%\n", + "#892: 97.42%\n", + "#893: 97.43%\n", + "#894: 97.43%\n", + "#895: 97.43%\n", + "#896: 97.44%\n", + "#897: 97.44%\n", + "#898: 97.33%\n", + "#899: 97.33%\n", + "#900: 97.34%\n", + "#901: 97.34%\n", + "#902: 97.34%\n", + "#903: 97.35%\n", + "#904: 97.35%\n", + "#905: 97.35%\n", + "#906: 97.35%\n", + "#907: 97.36%\n", + "#908: 97.36%\n", + "#909: 97.36%\n", + "#910: 97.37%\n", + "#911: 97.37%\n", + "#912: 97.37%\n", + "#913: 97.37%\n", + "#914: 97.38%\n", + "#915: 97.38%\n", + "#916: 97.38%\n", + "#917: 97.39%\n", + "#918: 97.39%\n", + "#919: 97.39%\n", + "#920: 97.39%\n", + "#921: 97.40%\n", + "#922: 97.40%\n", + "#923: 97.40%\n", + "#924: 97.41%\n", + "#925: 97.41%\n", + "#926: 97.41%\n", + "#927: 97.41%\n", + "#928: 97.42%\n", + "#929: 97.42%\n", + "#930: 97.42%\n", + "#931: 97.42%\n", + "#932: 97.43%\n", + "#933: 97.43%\n", + "#934: 97.43%\n", + "#935: 97.44%\n", + "#936: 97.44%\n", + "#937: 97.44%\n", + "#938: 97.44%\n", + "#939: 97.45%\n", + "#940: 97.45%\n", + "#941: 97.45%\n", + "#942: 97.45%\n", + "#943: 97.46%\n", + "#944: 97.46%\n", + "#945: 97.46%\n", + "#946: 97.47%\n", + "#947: 97.36%\n", + "#948: 97.37%\n", + "#949: 97.37%\n", + "#950: 97.37%\n", + "#951: 97.37%\n", + "#952: 97.38%\n", + "#953: 97.38%\n", + "#954: 97.38%\n", + "#955: 97.38%\n", + "#956: 97.28%\n", + "#957: 97.29%\n", + "#958: 97.29%\n", + "#959: 97.29%\n", + "#960: 97.29%\n", + "#961: 97.30%\n", + "#962: 97.30%\n", + "#963: 97.30%\n", + "#964: 97.31%\n", + "#965: 97.20%\n", + "#966: 97.21%\n", + "#967: 97.21%\n", + "#968: 97.21%\n", + "#969: 97.22%\n", + "#970: 97.22%\n", + "#971: 97.22%\n", + "#972: 97.23%\n", + "#973: 97.23%\n", + "#974: 97.23%\n", + "#975: 97.23%\n", + "#976: 97.24%\n", + "#977: 97.24%\n", + "#978: 97.24%\n", + "#979: 97.24%\n", + "#980: 97.25%\n", + "#981: 97.25%\n", + "#982: 97.25%\n", + "#983: 97.26%\n", + "#984: 97.26%\n", + "#985: 97.26%\n", + "#986: 97.26%\n", + "#987: 97.27%\n", + "#988: 97.27%\n", + "#989: 97.27%\n", + "#990: 97.28%\n", + "#991: 97.28%\n", + "#992: 97.28%\n", + "#993: 97.28%\n", + "#994: 97.29%\n", + "#995: 97.29%\n", + "#996: 97.29%\n", + "#997: 97.29%\n", + "#998: 97.30%\n", + "#999: 97.30%\n", + "#1000: 97.30%\n", + "#1001: 97.31%\n", + "#1002: 97.31%\n", + "#1003: 97.31%\n", + "#1004: 97.31%\n", + "#1005: 97.32%\n", + "#1006: 97.32%\n", + "#1007: 97.32%\n", + "#1008: 97.32%\n", + "#1009: 97.33%\n", + "#1010: 97.33%\n", + "#1011: 97.33%\n", + "#1012: 97.33%\n", + "#1013: 97.34%\n", + "#1014: 97.24%\n", + "#1015: 97.24%\n", + "#1016: 97.25%\n", + "#1017: 97.25%\n", + "#1018: 97.25%\n", + "#1019: 97.25%\n", + "#1020: 97.26%\n", + "#1021: 97.26%\n", + "#1022: 97.26%\n", + "#1023: 97.27%\n", + "#1024: 97.27%\n", + "#1025: 97.27%\n", + "#1026: 97.27%\n", + "#1027: 97.28%\n", + "#1028: 97.28%\n", + "#1029: 97.28%\n", + "#1030: 97.28%\n", + "#1031: 97.29%\n", + "#1032: 97.29%\n", + "#1033: 97.29%\n", + "#1034: 97.29%\n", + "#1035: 97.30%\n", + "#1036: 97.30%\n", + "#1037: 97.30%\n", + "#1038: 97.31%\n", + "#1039: 97.21%\n", + "#1040: 97.21%\n", + "#1041: 97.22%\n", + "#1042: 97.22%\n", + "#1043: 97.22%\n", + "#1044: 97.22%\n", + "#1045: 97.23%\n", + "#1046: 97.23%\n", + "#1047: 97.23%\n", + "#1048: 97.24%\n", + "#1049: 97.24%\n", + "#1050: 97.24%\n", + "#1051: 97.24%\n", + "#1052: 97.25%\n", + "#1053: 97.25%\n", + "#1054: 97.25%\n", + "#1055: 97.25%\n", + "#1056: 97.26%\n", + "#1057: 97.26%\n", + "#1058: 97.26%\n", + "#1059: 97.26%\n", + "#1060: 97.27%\n", + "#1061: 97.27%\n", + "#1062: 97.18%\n", + "#1063: 97.18%\n", + "#1064: 97.18%\n", + "#1065: 97.19%\n", + "#1066: 97.19%\n", + "#1067: 97.19%\n", + "#1068: 97.10%\n", + "#1069: 97.10%\n", + "#1070: 97.11%\n", + "#1071: 97.11%\n", + "#1072: 97.11%\n", + "#1073: 97.11%\n", + "#1074: 97.12%\n", + "#1075: 97.12%\n", + "#1076: 97.12%\n", + "#1077: 97.12%\n", + "#1078: 97.13%\n", + "#1079: 97.13%\n", + "#1080: 97.13%\n", + "#1081: 97.13%\n", + "#1082: 97.14%\n", + "#1083: 97.14%\n", + "#1084: 97.14%\n", + "#1085: 97.15%\n", + "#1086: 97.15%\n", + "#1087: 97.15%\n", + "#1088: 97.15%\n", + "#1089: 97.16%\n", + "#1090: 97.16%\n", + "#1091: 97.16%\n", + "#1092: 97.16%\n", + "#1093: 97.17%\n", + "#1094: 97.17%\n", + "#1095: 97.17%\n", + "#1096: 97.17%\n", + "#1097: 97.18%\n", + "#1098: 97.18%\n", + "#1099: 97.18%\n", + "#1100: 97.18%\n", + "#1101: 97.19%\n", + "#1102: 97.19%\n", + "#1103: 97.19%\n", + "#1104: 97.19%\n", + "#1105: 97.20%\n", + "#1106: 97.20%\n", + "#1107: 97.11%\n", + "#1108: 97.11%\n", + "#1109: 97.12%\n", + "#1110: 97.12%\n", + "#1111: 97.12%\n", + "#1112: 97.04%\n", + "#1113: 97.04%\n", + "#1114: 96.95%\n", + "#1115: 96.95%\n", + "#1116: 96.96%\n", + "#1117: 96.96%\n", + "#1118: 96.96%\n", + "#1119: 96.96%\n", + "#1120: 96.97%\n", + "#1121: 96.97%\n", + "#1122: 96.97%\n", + "#1123: 96.98%\n", + "#1124: 96.98%\n", + "#1125: 96.89%\n", + "#1126: 96.89%\n", + "#1127: 96.90%\n", + "#1128: 96.90%\n", + "#1129: 96.90%\n", + "#1130: 96.91%\n", + "#1131: 96.91%\n", + "#1132: 96.91%\n", + "#1133: 96.91%\n", + "#1134: 96.92%\n", + "#1135: 96.92%\n", + "#1136: 96.92%\n", + "#1137: 96.92%\n", + "#1138: 96.93%\n", + "#1139: 96.93%\n", + "#1140: 96.93%\n", + "#1141: 96.94%\n", + "#1142: 96.94%\n", + "#1143: 96.85%\n", + "#1144: 96.86%\n", + "#1145: 96.86%\n", + "#1146: 96.86%\n", + "#1147: 96.86%\n", + "#1148: 96.87%\n", + "#1149: 96.87%\n", + "#1150: 96.87%\n", + "#1151: 96.88%\n", + "#1152: 96.88%\n", + "#1153: 96.88%\n", + "#1154: 96.88%\n", + "#1155: 96.89%\n", + "#1156: 96.89%\n", + "#1157: 96.89%\n", + "#1158: 96.89%\n", + "#1159: 96.90%\n", + "#1160: 96.90%\n", + "#1161: 96.90%\n", + "#1162: 96.90%\n", + "#1163: 96.91%\n", + "#1164: 96.91%\n", + "#1165: 96.91%\n", + "#1166: 96.92%\n", + "#1167: 96.92%\n", + "#1168: 96.92%\n", + "#1169: 96.92%\n", + "#1170: 96.93%\n", + "#1171: 96.93%\n", + "#1172: 96.93%\n", + "#1173: 96.93%\n", + "#1174: 96.94%\n", + "#1175: 96.94%\n", + "#1176: 96.94%\n", + "#1177: 96.94%\n", + "#1178: 96.95%\n", + "#1179: 96.95%\n", + "#1180: 96.95%\n", + "#1181: 96.95%\n", + "#1182: 96.87%\n", + "#1183: 96.88%\n", + "#1184: 96.88%\n", + "#1185: 96.88%\n", + "#1186: 96.88%\n", + "#1187: 96.89%\n", + "#1188: 96.89%\n", + "#1189: 96.89%\n", + "#1190: 96.89%\n", + "#1191: 96.81%\n", + "#1192: 96.81%\n", + "#1193: 96.82%\n", + "#1194: 96.82%\n", + "#1195: 96.82%\n", + "#1196: 96.83%\n", + "#1197: 96.83%\n", + "#1198: 96.83%\n", + "#1199: 96.83%\n", + "#1200: 96.84%\n", + "#1201: 96.84%\n", + "#1202: 96.84%\n", + "#1203: 96.84%\n", + "#1204: 96.85%\n", + "#1205: 96.85%\n", + "#1206: 96.85%\n", + "#1207: 96.85%\n", + "#1208: 96.86%\n", + "#1209: 96.86%\n", + "#1210: 96.86%\n", + "#1211: 96.86%\n", + "#1212: 96.87%\n", + "#1213: 96.87%\n", + "#1214: 96.87%\n", + "#1215: 96.88%\n", + "#1216: 96.88%\n", + "#1217: 96.88%\n", + "#1218: 96.88%\n", + "#1219: 96.89%\n", + "#1220: 96.89%\n", + "#1221: 96.89%\n", + "#1222: 96.89%\n", + "#1223: 96.90%\n", + "#1224: 96.90%\n", + "#1225: 96.90%\n", + "#1226: 96.82%\n", + "#1227: 96.82%\n", + "#1228: 96.83%\n", + "#1229: 96.83%\n", + "#1230: 96.83%\n", + "#1231: 96.83%\n", + "#1232: 96.84%\n", + "#1233: 96.84%\n", + "#1234: 96.84%\n", + "#1235: 96.84%\n", + "#1236: 96.85%\n", + "#1237: 96.85%\n", + "#1238: 96.85%\n", + "#1239: 96.85%\n", + "#1240: 96.86%\n", + "#1241: 96.86%\n", + "#1242: 96.86%\n", + "#1243: 96.86%\n", + "#1244: 96.87%\n", + "#1245: 96.87%\n", + "#1246: 96.87%\n", + "#1247: 96.79%\n", + "#1248: 96.80%\n", + "#1249: 96.80%\n", + "#1250: 96.80%\n", + "#1251: 96.81%\n", + "#1252: 96.81%\n", + "#1253: 96.81%\n", + "#1254: 96.81%\n", + "#1255: 96.82%\n", + "#1256: 96.82%\n", + "#1257: 96.82%\n", + "#1258: 96.82%\n", + "#1259: 96.83%\n", + "#1260: 96.75%\n", + "#1261: 96.75%\n", + "#1262: 96.75%\n", + "#1263: 96.76%\n", + "#1264: 96.76%\n", + "#1265: 96.76%\n", + "#1266: 96.76%\n", + "#1267: 96.77%\n", + "#1268: 96.77%\n", + "#1269: 96.77%\n", + "#1270: 96.77%\n", + "#1271: 96.78%\n", + "#1272: 96.78%\n", + "#1273: 96.78%\n", + "#1274: 96.78%\n", + "#1275: 96.79%\n", + "#1276: 96.79%\n", + "#1277: 96.79%\n", + "#1278: 96.79%\n", + "#1279: 96.80%\n", + "#1280: 96.80%\n", + "#1281: 96.80%\n", + "#1282: 96.80%\n", + "#1283: 96.81%\n", + "#1284: 96.81%\n", + "#1285: 96.81%\n", + "#1286: 96.81%\n", + "#1287: 96.82%\n", + "#1288: 96.82%\n", + "#1289: 96.82%\n", + "#1290: 96.75%\n", + "#1291: 96.75%\n", + "#1292: 96.75%\n", + "#1293: 96.75%\n", + "#1294: 96.76%\n", + "#1295: 96.76%\n", + "#1296: 96.76%\n", + "#1297: 96.76%\n", + "#1298: 96.77%\n", + "#1299: 96.69%\n", + "#1300: 96.69%\n", + "#1301: 96.70%\n", + "#1302: 96.70%\n", + "#1303: 96.70%\n", + "#1304: 96.70%\n", + "#1305: 96.71%\n", + "#1306: 96.71%\n", + "#1307: 96.71%\n", + "#1308: 96.72%\n", + "#1309: 96.72%\n", + "#1310: 96.72%\n", + "#1311: 96.72%\n", + "#1312: 96.73%\n", + "#1313: 96.73%\n", + "#1314: 96.73%\n", + "#1315: 96.73%\n", + "#1316: 96.74%\n", + "#1317: 96.74%\n", + "#1318: 96.74%\n", + "#1319: 96.67%\n", + "#1320: 96.67%\n", + "#1321: 96.67%\n", + "#1322: 96.67%\n", + "#1323: 96.68%\n", + "#1324: 96.68%\n", + "#1325: 96.68%\n", + "#1326: 96.68%\n", + "#1327: 96.69%\n", + "#1328: 96.69%\n", + "#1329: 96.69%\n", + "#1330: 96.69%\n", + "#1331: 96.70%\n", + "#1332: 96.70%\n", + "#1333: 96.70%\n", + "#1334: 96.70%\n", + "#1335: 96.71%\n", + "#1336: 96.71%\n", + "#1337: 96.71%\n", + "#1338: 96.71%\n", + "#1339: 96.72%\n", + "#1340: 96.72%\n", + "#1341: 96.72%\n", + "#1342: 96.72%\n", + "#1343: 96.73%\n", + "#1344: 96.73%\n", + "#1345: 96.73%\n", + "#1346: 96.73%\n", + "#1347: 96.74%\n", + "#1348: 96.74%\n", + "#1349: 96.74%\n", + "#1350: 96.74%\n", + "#1351: 96.75%\n", + "#1352: 96.75%\n", + "#1353: 96.75%\n", + "#1354: 96.75%\n", + "#1355: 96.76%\n", + "#1356: 96.76%\n", + "#1357: 96.76%\n", + "#1358: 96.76%\n", + "#1359: 96.76%\n", + "#1360: 96.77%\n", + "#1361: 96.77%\n", + "#1362: 96.77%\n", + "#1363: 96.77%\n", + "#1364: 96.70%\n", + "#1365: 96.71%\n", + "#1366: 96.71%\n", + "#1367: 96.71%\n", + "#1368: 96.71%\n", + "#1369: 96.72%\n", + "#1370: 96.72%\n", + "#1371: 96.72%\n", + "#1372: 96.72%\n", + "#1373: 96.72%\n", + "#1374: 96.73%\n", + "#1375: 96.73%\n", + "#1376: 96.73%\n", + "#1377: 96.73%\n", + "#1378: 96.74%\n", + "#1379: 96.74%\n", + "#1380: 96.74%\n", + "#1381: 96.74%\n", + "#1382: 96.75%\n", + "#1383: 96.68%\n", + "#1384: 96.68%\n", + "#1385: 96.68%\n", + "#1386: 96.68%\n", + "#1387: 96.69%\n", + "#1388: 96.69%\n", + "#1389: 96.69%\n", + "#1390: 96.69%\n", + "#1391: 96.70%\n", + "#1392: 96.70%\n", + "#1393: 96.70%\n", + "#1394: 96.70%\n", + "#1395: 96.70%\n", + "#1396: 96.71%\n", + "#1397: 96.71%\n", + "#1398: 96.71%\n", + "#1399: 96.71%\n", + "#1400: 96.72%\n", + "#1401: 96.72%\n", + "#1402: 96.72%\n", + "#1403: 96.72%\n", + "#1404: 96.73%\n", + "#1405: 96.73%\n", + "#1406: 96.73%\n", + "#1407: 96.73%\n", + "#1408: 96.74%\n", + "#1409: 96.74%\n", + "#1410: 96.74%\n", + "#1411: 96.74%\n", + "#1412: 96.74%\n", + "#1413: 96.75%\n", + "#1414: 96.75%\n", + "#1415: 96.68%\n", + "#1416: 96.68%\n", + "#1417: 96.69%\n", + "#1418: 96.69%\n", + "#1419: 96.69%\n", + "#1420: 96.69%\n", + "#1421: 96.69%\n", + "#1422: 96.70%\n", + "#1423: 96.70%\n", + "#1424: 96.70%\n", + "#1425: 96.70%\n", + "#1426: 96.71%\n", + "#1427: 96.71%\n", + "#1428: 96.71%\n", + "#1429: 96.71%\n", + "#1430: 96.72%\n", + "#1431: 96.72%\n", + "#1432: 96.72%\n", + "#1433: 96.72%\n", + "#1434: 96.72%\n", + "#1435: 96.73%\n", + "#1436: 96.73%\n", + "#1437: 96.73%\n", + "#1438: 96.73%\n", + "#1439: 96.74%\n", + "#1440: 96.67%\n", + "#1441: 96.67%\n", + "#1442: 96.67%\n", + "#1443: 96.68%\n", + "#1444: 96.68%\n", + "#1445: 96.68%\n", + "#1446: 96.68%\n", + "#1447: 96.69%\n", + "#1448: 96.69%\n", + "#1449: 96.69%\n", + "#1450: 96.69%\n", + "#1451: 96.69%\n", + "#1452: 96.70%\n", + "#1453: 96.70%\n", + "#1454: 96.70%\n", + "#1455: 96.70%\n", + "#1456: 96.71%\n", + "#1457: 96.71%\n", + "#1458: 96.71%\n", + "#1459: 96.71%\n", + "#1460: 96.71%\n", + "#1461: 96.72%\n", + "#1462: 96.72%\n", + "#1463: 96.72%\n", + "#1464: 96.72%\n", + "#1465: 96.73%\n", + "#1466: 96.73%\n", + "#1467: 96.73%\n", + "#1468: 96.73%\n", + "#1469: 96.73%\n", + "#1470: 96.74%\n", + "#1471: 96.74%\n", + "#1472: 96.74%\n", + "#1473: 96.74%\n", + "#1474: 96.75%\n", + "#1475: 96.75%\n", + "#1476: 96.75%\n", + "#1477: 96.75%\n", + "#1478: 96.75%\n", + "#1479: 96.76%\n", + "#1480: 96.76%\n", + "#1481: 96.76%\n", + "#1482: 96.76%\n", + "#1483: 96.77%\n", + "#1484: 96.77%\n", + "#1485: 96.77%\n", + "#1486: 96.77%\n", + "#1487: 96.77%\n", + "#1488: 96.78%\n", + "#1489: 96.78%\n", + "#1490: 96.78%\n", + "#1491: 96.78%\n", + "#1492: 96.78%\n", + "#1493: 96.79%\n", + "#1494: 96.79%\n", + "#1495: 96.79%\n", + "#1496: 96.79%\n", + "#1497: 96.80%\n", + "#1498: 96.80%\n", + "#1499: 96.80%\n", + "#1500: 96.80%\n", + "#1501: 96.80%\n", + "#1502: 96.81%\n", + "#1503: 96.81%\n", + "#1504: 96.81%\n", + "#1505: 96.81%\n", + "#1506: 96.81%\n", + "#1507: 96.82%\n", + "#1508: 96.82%\n", + "#1509: 96.82%\n", + "#1510: 96.82%\n", + "#1511: 96.83%\n", + "#1512: 96.83%\n", + "#1513: 96.83%\n", + "#1514: 96.83%\n", + "#1515: 96.83%\n", + "#1516: 96.84%\n", + "#1517: 96.84%\n", + "#1518: 96.84%\n", + "#1519: 96.84%\n", + "#1520: 96.84%\n", + "#1521: 96.85%\n", + "#1522: 96.85%\n", + "#1523: 96.85%\n", + "#1524: 96.85%\n", + "#1525: 96.85%\n", + "#1526: 96.86%\n", + "#1527: 96.79%\n", + "#1528: 96.80%\n", + "#1529: 96.80%\n", + "#1530: 96.73%\n", + "#1531: 96.74%\n", + "#1532: 96.74%\n", + "#1533: 96.74%\n", + "#1534: 96.74%\n", + "#1535: 96.74%\n", + "#1536: 96.75%\n", + "#1537: 96.75%\n", + "#1538: 96.75%\n", + "#1539: 96.75%\n", + "#1540: 96.76%\n", + "#1541: 96.76%\n", + "#1542: 96.76%\n", + "#1543: 96.76%\n", + "#1544: 96.76%\n", + "#1545: 96.77%\n", + "#1546: 96.77%\n", + "#1547: 96.77%\n", + "#1548: 96.77%\n", + "#1549: 96.77%\n", + "#1550: 96.78%\n", + "#1551: 96.78%\n", + "#1552: 96.78%\n", + "#1553: 96.72%\n", + "#1554: 96.72%\n", + "#1555: 96.72%\n", + "#1556: 96.72%\n", + "#1557: 96.73%\n", + "#1558: 96.73%\n", + "#1559: 96.67%\n", + "#1560: 96.67%\n", + "#1561: 96.67%\n", + "#1562: 96.67%\n", + "#1563: 96.68%\n", + "#1564: 96.68%\n", + "#1565: 96.68%\n", + "#1566: 96.68%\n", + "#1567: 96.68%\n", + "#1568: 96.69%\n", + "#1569: 96.69%\n", + "#1570: 96.69%\n", + "#1571: 96.69%\n", + "#1572: 96.69%\n", + "#1573: 96.70%\n", + "#1574: 96.70%\n", + "#1575: 96.70%\n", + "#1576: 96.70%\n", + "#1577: 96.70%\n", + "#1578: 96.71%\n", + "#1579: 96.71%\n", + "#1580: 96.71%\n", + "#1581: 96.71%\n", + "#1582: 96.72%\n", + "#1583: 96.72%\n", + "#1584: 96.72%\n", + "#1585: 96.72%\n", + "#1586: 96.72%\n", + "#1587: 96.73%\n", + "#1588: 96.73%\n", + "#1589: 96.73%\n", + "#1590: 96.73%\n", + "#1591: 96.73%\n", + "#1592: 96.74%\n", + "#1593: 96.74%\n", + "#1594: 96.74%\n", + "#1595: 96.74%\n", + "#1596: 96.74%\n", + "#1597: 96.75%\n", + "#1598: 96.75%\n", + "#1599: 96.75%\n", + "#1600: 96.75%\n", + "#1601: 96.75%\n", + "#1602: 96.76%\n", + "#1603: 96.76%\n", + "#1604: 96.76%\n", + "#1605: 96.76%\n", + "#1606: 96.76%\n", + "#1607: 96.70%\n", + "#1608: 96.71%\n", + "#1609: 96.71%\n", + "#1610: 96.71%\n", + "#1611: 96.65%\n", + "#1612: 96.65%\n", + "#1613: 96.65%\n", + "#1614: 96.66%\n", + "#1615: 96.66%\n", + "#1616: 96.66%\n", + "#1617: 96.66%\n", + "#1618: 96.66%\n", + "#1619: 96.67%\n", + "#1620: 96.67%\n", + "#1621: 96.61%\n", + "#1622: 96.61%\n", + "#1623: 96.61%\n", + "#1624: 96.62%\n", + "#1625: 96.62%\n", + "#1626: 96.62%\n", + "#1627: 96.62%\n", + "#1628: 96.62%\n", + "#1629: 96.63%\n", + "#1630: 96.63%\n", + "#1631: 96.63%\n", + "#1632: 96.63%\n", + "#1633: 96.63%\n", + "#1634: 96.64%\n", + "#1635: 96.64%\n", + "#1636: 96.64%\n", + "#1637: 96.64%\n", + "#1638: 96.64%\n", + "#1639: 96.65%\n", + "#1640: 96.59%\n", + "#1641: 96.59%\n", + "#1642: 96.59%\n", + "#1643: 96.59%\n", + "#1644: 96.60%\n", + "#1645: 96.60%\n", + "#1646: 96.60%\n", + "#1647: 96.60%\n", + "#1648: 96.60%\n", + "#1649: 96.61%\n", + "#1650: 96.61%\n", + "#1651: 96.61%\n", + "#1652: 96.61%\n", + "#1653: 96.61%\n", + "#1654: 96.62%\n", + "#1655: 96.62%\n", + "#1656: 96.62%\n", + "#1657: 96.62%\n", + "#1658: 96.62%\n", + "#1659: 96.63%\n", + "#1660: 96.63%\n", + "#1661: 96.63%\n", + "#1662: 96.63%\n", + "#1663: 96.63%\n", + "#1664: 96.64%\n", + "#1665: 96.64%\n", + "#1666: 96.64%\n", + "#1667: 96.64%\n", + "#1668: 96.64%\n", + "#1669: 96.65%\n", + "#1670: 96.65%\n", + "#1671: 96.65%\n", + "#1672: 96.65%\n", + "#1673: 96.65%\n", + "#1674: 96.66%\n", + "#1675: 96.66%\n", + "#1676: 96.66%\n", + "#1677: 96.66%\n", + "#1678: 96.66%\n", + "#1679: 96.67%\n", + "#1680: 96.67%\n", + "#1681: 96.61%\n", + "#1682: 96.61%\n", + "#1683: 96.62%\n", + "#1684: 96.62%\n", + "#1685: 96.62%\n", + "#1686: 96.56%\n", + "#1687: 96.56%\n", + "#1688: 96.57%\n", + "#1689: 96.57%\n", + "#1690: 96.57%\n", + "#1691: 96.57%\n", + "#1692: 96.57%\n", + "#1693: 96.58%\n", + "#1694: 96.58%\n", + "#1695: 96.58%\n", + "#1696: 96.58%\n", + "#1697: 96.58%\n", + "#1698: 96.59%\n", + "#1699: 96.59%\n", + "#1700: 96.59%\n", + "#1701: 96.59%\n", + "#1702: 96.59%\n", + "#1703: 96.60%\n", + "#1704: 96.60%\n", + "#1705: 96.60%\n", + "#1706: 96.60%\n", + "#1707: 96.60%\n", + "#1708: 96.61%\n", + "#1709: 96.55%\n", + "#1710: 96.55%\n", + "#1711: 96.55%\n", + "#1712: 96.56%\n", + "#1713: 96.56%\n", + "#1714: 96.56%\n", + "#1715: 96.56%\n", + "#1716: 96.56%\n", + "#1717: 96.51%\n", + "#1718: 96.51%\n", + "#1719: 96.51%\n", + "#1720: 96.51%\n", + "#1721: 96.52%\n", + "#1722: 96.52%\n", + "#1723: 96.52%\n", + "#1724: 96.52%\n", + "#1725: 96.52%\n", + "#1726: 96.53%\n", + "#1727: 96.53%\n", + "#1728: 96.53%\n", + "#1729: 96.53%\n", + "#1730: 96.53%\n", + "#1731: 96.54%\n", + "#1732: 96.54%\n", + "#1733: 96.54%\n", + "#1734: 96.54%\n", + "#1735: 96.54%\n", + "#1736: 96.55%\n", + "#1737: 96.49%\n", + "#1738: 96.49%\n", + "#1739: 96.49%\n", + "#1740: 96.50%\n", + "#1741: 96.50%\n", + "#1742: 96.50%\n", + "#1743: 96.50%\n", + "#1744: 96.50%\n", + "#1745: 96.51%\n", + "#1746: 96.51%\n", + "#1747: 96.45%\n", + "#1748: 96.46%\n", + "#1749: 96.46%\n", + "#1750: 96.46%\n", + "#1751: 96.46%\n", + "#1752: 96.46%\n", + "#1753: 96.47%\n", + "#1754: 96.41%\n", + "#1755: 96.41%\n", + "#1756: 96.41%\n", + "#1757: 96.42%\n", + "#1758: 96.42%\n", + "#1759: 96.42%\n", + "#1760: 96.42%\n", + "#1761: 96.42%\n", + "#1762: 96.43%\n", + "#1763: 96.43%\n", + "#1764: 96.43%\n", + "#1765: 96.43%\n", + "#1766: 96.43%\n", + "#1767: 96.44%\n", + "#1768: 96.44%\n", + "#1769: 96.44%\n", + "#1770: 96.44%\n", + "#1771: 96.44%\n", + "#1772: 96.45%\n", + "#1773: 96.45%\n", + "#1774: 96.45%\n", + "#1775: 96.45%\n", + "#1776: 96.45%\n", + "#1777: 96.46%\n", + "#1778: 96.46%\n", + "#1779: 96.46%\n", + "#1780: 96.46%\n", + "#1781: 96.46%\n", + "#1782: 96.47%\n", + "#1783: 96.47%\n", + "#1784: 96.47%\n", + "#1785: 96.47%\n", + "#1786: 96.47%\n", + "#1787: 96.48%\n", + "#1788: 96.48%\n", + "#1789: 96.48%\n", + "#1790: 96.48%\n", + "#1791: 96.48%\n", + "#1792: 96.49%\n", + "#1793: 96.49%\n", + "#1794: 96.49%\n", + "#1795: 96.49%\n", + "#1796: 96.49%\n", + "#1797: 96.50%\n", + "#1798: 96.50%\n", + "#1799: 96.50%\n", + "#1800: 96.50%\n", + "#1801: 96.50%\n", + "#1802: 96.51%\n", + "#1803: 96.51%\n", + "#1804: 96.51%\n", + "#1805: 96.51%\n", + "#1806: 96.51%\n", + "#1807: 96.52%\n", + "#1808: 96.52%\n", + "#1809: 96.52%\n", + "#1810: 96.52%\n", + "#1811: 96.52%\n", + "#1812: 96.53%\n", + "#1813: 96.53%\n", + "#1814: 96.53%\n", + "#1815: 96.53%\n", + "#1816: 96.53%\n", + "#1817: 96.53%\n", + "#1818: 96.54%\n", + "#1819: 96.54%\n", + "#1820: 96.54%\n", + "#1821: 96.54%\n", + "#1822: 96.54%\n", + "#1823: 96.55%\n", + "#1824: 96.55%\n", + "#1825: 96.55%\n", + "#1826: 96.55%\n", + "#1827: 96.55%\n", + "#1828: 96.56%\n", + "#1829: 96.56%\n", + "#1830: 96.56%\n", + "#1831: 96.56%\n", + "#1832: 96.56%\n", + "#1833: 96.56%\n", + "#1834: 96.57%\n", + "#1835: 96.57%\n", + "#1836: 96.57%\n", + "#1837: 96.57%\n", + "#1838: 96.57%\n", + "#1839: 96.58%\n", + "#1840: 96.58%\n", + "#1841: 96.58%\n", + "#1842: 96.58%\n", + "#1843: 96.58%\n", + "#1844: 96.59%\n", + "#1845: 96.59%\n", + "#1846: 96.59%\n", + "#1847: 96.59%\n", + "#1848: 96.59%\n", + "#1849: 96.59%\n", + "#1850: 96.60%\n", + "#1851: 96.60%\n", + "#1852: 96.60%\n", + "#1853: 96.60%\n", + "#1854: 96.60%\n", + "#1855: 96.61%\n", + "#1856: 96.61%\n", + "#1857: 96.61%\n", + "#1858: 96.61%\n", + "#1859: 96.61%\n", + "#1860: 96.61%\n", + "#1861: 96.62%\n", + "#1862: 96.62%\n", + "#1863: 96.62%\n", + "#1864: 96.62%\n", + "#1865: 96.62%\n", + "#1866: 96.63%\n", + "#1867: 96.63%\n", + "#1868: 96.63%\n", + "#1869: 96.63%\n", + "#1870: 96.63%\n", + "#1871: 96.63%\n", + "#1872: 96.64%\n", + "#1873: 96.64%\n", + "#1874: 96.64%\n", + "#1875: 96.64%\n", + "#1876: 96.64%\n", + "#1877: 96.65%\n", + "#1878: 96.65%\n", + "#1879: 96.65%\n", + "#1880: 96.65%\n", + "#1881: 96.65%\n", + "#1882: 96.65%\n", + "#1883: 96.60%\n", + "#1884: 96.60%\n", + "#1885: 96.61%\n", + "#1886: 96.61%\n", + "#1887: 96.61%\n", + "#1888: 96.61%\n", + "#1889: 96.61%\n", + "#1890: 96.62%\n", + "#1891: 96.62%\n", + "#1892: 96.62%\n", + "#1893: 96.62%\n", + "#1894: 96.62%\n", + "#1895: 96.62%\n", + "#1896: 96.63%\n", + "#1897: 96.63%\n", + "#1898: 96.63%\n", + "#1899: 96.63%\n", + "#1900: 96.63%\n", + "#1901: 96.58%\n", + "#1902: 96.58%\n", + "#1903: 96.59%\n", + "#1904: 96.59%\n", + "#1905: 96.59%\n", + "#1906: 96.59%\n", + "#1907: 96.59%\n", + "#1908: 96.60%\n", + "#1909: 96.60%\n", + "#1910: 96.60%\n", + "#1911: 96.60%\n", + "#1912: 96.60%\n", + "#1913: 96.60%\n", + "#1914: 96.61%\n", + "#1915: 96.61%\n", + "#1916: 96.61%\n", + "#1917: 96.61%\n", + "#1918: 96.61%\n", + "#1919: 96.61%\n", + "#1920: 96.62%\n", + "#1921: 96.62%\n", + "#1922: 96.62%\n", + "#1923: 96.62%\n", + "#1924: 96.62%\n", + "#1925: 96.63%\n", + "#1926: 96.63%\n", + "#1927: 96.63%\n", + "#1928: 96.63%\n", + "#1929: 96.63%\n", + "#1930: 96.63%\n", + "#1931: 96.64%\n", + "#1932: 96.64%\n", + "#1933: 96.64%\n", + "#1934: 96.64%\n", + "#1935: 96.64%\n", + "#1936: 96.64%\n", + "#1937: 96.65%\n", + "#1938: 96.65%\n", + "#1939: 96.65%\n", + "#1940: 96.65%\n", + "#1941: 96.65%\n", + "#1942: 96.65%\n", + "#1943: 96.66%\n", + "#1944: 96.66%\n", + "#1945: 96.66%\n", + "#1946: 96.66%\n", + "#1947: 96.66%\n", + "#1948: 96.66%\n", + "#1949: 96.67%\n", + "#1950: 96.67%\n", + "#1951: 96.67%\n", + "#1952: 96.67%\n", + "#1953: 96.67%\n", + "#1954: 96.68%\n", + "#1955: 96.63%\n", + "#1956: 96.63%\n", + "#1957: 96.63%\n", + "#1958: 96.63%\n", + "#1959: 96.63%\n", + "#1960: 96.63%\n", + "#1961: 96.64%\n", + "#1962: 96.64%\n", + "#1963: 96.64%\n", + "#1964: 96.64%\n", + "#1965: 96.64%\n", + "#1966: 96.64%\n", + "#1967: 96.65%\n", + "#1968: 96.65%\n", + "#1969: 96.65%\n", + "#1970: 96.65%\n", + "#1971: 96.65%\n", + "#1972: 96.65%\n", + "#1973: 96.66%\n", + "#1974: 96.66%\n", + "#1975: 96.66%\n", + "#1976: 96.66%\n", + "#1977: 96.66%\n", + "#1978: 96.66%\n", + "#1979: 96.67%\n", + "#1980: 96.67%\n", + "#1981: 96.67%\n", + "#1982: 96.67%\n", + "#1983: 96.67%\n", + "#1984: 96.68%\n", + "#1985: 96.68%\n", + "#1986: 96.68%\n", + "#1987: 96.68%\n", + "#1988: 96.68%\n", + "#1989: 96.68%\n", + "#1990: 96.69%\n", + "#1991: 96.69%\n", + "#1992: 96.69%\n", + "#1993: 96.69%\n", + "#1994: 96.69%\n", + "#1995: 96.69%\n", + "#1996: 96.70%\n", + "#1997: 96.70%\n", + "#1998: 96.70%\n", + "#1999: 96.70%\n", + "#2000: 96.70%\n", + "#2001: 96.70%\n", + "#2002: 96.70%\n", + "#2003: 96.71%\n", + "#2004: 96.71%\n", + "#2005: 96.71%\n", + "#2006: 96.71%\n", + "#2007: 96.71%\n", + "#2008: 96.71%\n", + "#2009: 96.72%\n", + "#2010: 96.72%\n", + "#2011: 96.72%\n", + "#2012: 96.72%\n", + "#2013: 96.72%\n", + "#2014: 96.72%\n", + "#2015: 96.73%\n", + "#2016: 96.73%\n", + "#2017: 96.73%\n", + "#2018: 96.68%\n", + "#2019: 96.68%\n", + "#2020: 96.68%\n", + "#2021: 96.69%\n", + "#2022: 96.69%\n", + "#2023: 96.69%\n", + "#2024: 96.69%\n", + "#2025: 96.64%\n", + "#2026: 96.65%\n", + "#2027: 96.65%\n", + "#2028: 96.65%\n", + "#2029: 96.65%\n", + "#2030: 96.65%\n", + "#2031: 96.65%\n", + "#2032: 96.66%\n", + "#2033: 96.66%\n", + "#2034: 96.66%\n", + "#2035: 96.66%\n", + "#2036: 96.66%\n", + "#2037: 96.66%\n", + "#2038: 96.67%\n", + "#2039: 96.67%\n", + "#2040: 96.67%\n", + "#2041: 96.67%\n", + "#2042: 96.67%\n", + "#2043: 96.62%\n", + "#2044: 96.63%\n", + "#2045: 96.63%\n", + "#2046: 96.63%\n", + "#2047: 96.63%\n", + "#2048: 96.63%\n", + "#2049: 96.63%\n", + "#2050: 96.64%\n", + "#2051: 96.64%\n", + "#2052: 96.64%\n", + "#2053: 96.64%\n", + "#2054: 96.64%\n", + "#2055: 96.64%\n", + "#2056: 96.65%\n", + "#2057: 96.65%\n", + "#2058: 96.65%\n", + "#2059: 96.65%\n", + "#2060: 96.65%\n", + "#2061: 96.65%\n", + "#2062: 96.66%\n", + "#2063: 96.66%\n", + "#2064: 96.66%\n", + "#2065: 96.66%\n", + "#2066: 96.66%\n", + "#2067: 96.66%\n", + "#2068: 96.67%\n", + "#2069: 96.67%\n", + "#2070: 96.62%\n", + "#2071: 96.62%\n", + "#2072: 96.62%\n", + "#2073: 96.62%\n", + "#2074: 96.63%\n", + "#2075: 96.63%\n", + "#2076: 96.63%\n", + "#2077: 96.63%\n", + "#2078: 96.63%\n", + "#2079: 96.63%\n", + "#2080: 96.64%\n", + "#2081: 96.64%\n", + "#2082: 96.64%\n", + "#2083: 96.64%\n", + "#2084: 96.64%\n", + "#2085: 96.64%\n", + "#2086: 96.65%\n", + "#2087: 96.65%\n", + "#2088: 96.65%\n", + "#2089: 96.65%\n", + "#2090: 96.65%\n", + "#2091: 96.65%\n", + "#2092: 96.66%\n", + "#2093: 96.66%\n", + "#2094: 96.66%\n", + "#2095: 96.66%\n", + "#2096: 96.66%\n", + "#2097: 96.66%\n", + "#2098: 96.62%\n", + "#2099: 96.62%\n", + "#2100: 96.62%\n", + "#2101: 96.62%\n", + "#2102: 96.62%\n", + "#2103: 96.63%\n", + "#2104: 96.63%\n", + "#2105: 96.63%\n", + "#2106: 96.63%\n", + "#2107: 96.63%\n", + "#2108: 96.63%\n", + "#2109: 96.59%\n", + "#2110: 96.59%\n", + "#2111: 96.59%\n", + "#2112: 96.59%\n", + "#2113: 96.59%\n", + "#2114: 96.60%\n", + "#2115: 96.60%\n", + "#2116: 96.60%\n", + "#2117: 96.60%\n", + "#2118: 96.55%\n", + "#2119: 96.56%\n", + "#2120: 96.56%\n", + "#2121: 96.56%\n", + "#2122: 96.56%\n", + "#2123: 96.56%\n", + "#2124: 96.56%\n", + "#2125: 96.57%\n", + "#2126: 96.57%\n", + "#2127: 96.57%\n", + "#2128: 96.57%\n", + "#2129: 96.57%\n", + "#2130: 96.53%\n", + "#2131: 96.53%\n", + "#2132: 96.53%\n", + "#2133: 96.53%\n", + "#2134: 96.53%\n", + "#2135: 96.49%\n", + "#2136: 96.49%\n", + "#2137: 96.49%\n", + "#2138: 96.49%\n", + "#2139: 96.50%\n", + "#2140: 96.50%\n", + "#2141: 96.50%\n", + "#2142: 96.50%\n", + "#2143: 96.50%\n", + "#2144: 96.50%\n", + "#2145: 96.51%\n", + "#2146: 96.51%\n", + "#2147: 96.51%\n", + "#2148: 96.51%\n", + "#2149: 96.51%\n", + "#2150: 96.51%\n", + "#2151: 96.51%\n", + "#2152: 96.52%\n", + "#2153: 96.52%\n", + "#2154: 96.52%\n", + "#2155: 96.52%\n", + "#2156: 96.52%\n", + "#2157: 96.52%\n", + "#2158: 96.53%\n", + "#2159: 96.53%\n", + "#2160: 96.53%\n", + "#2161: 96.53%\n", + "#2162: 96.53%\n", + "#2163: 96.53%\n", + "#2164: 96.54%\n", + "#2165: 96.54%\n", + "#2166: 96.54%\n", + "#2167: 96.54%\n", + "#2168: 96.54%\n", + "#2169: 96.54%\n", + "#2170: 96.55%\n", + "#2171: 96.55%\n", + "#2172: 96.55%\n", + "#2173: 96.55%\n", + "#2174: 96.55%\n", + "#2175: 96.55%\n", + "#2176: 96.55%\n", + "#2177: 96.56%\n", + "#2178: 96.56%\n", + "#2179: 96.56%\n", + "#2180: 96.56%\n", + "#2181: 96.56%\n", + "#2182: 96.56%\n", + "#2183: 96.57%\n", + "#2184: 96.57%\n", + "#2185: 96.52%\n", + "#2186: 96.48%\n", + "#2187: 96.48%\n", + "#2188: 96.48%\n", + "#2189: 96.44%\n", + "#2190: 96.44%\n", + "#2191: 96.44%\n", + "#2192: 96.44%\n", + "#2193: 96.44%\n", + "#2194: 96.45%\n", + "#2195: 96.45%\n", + "#2196: 96.45%\n", + "#2197: 96.45%\n", + "#2198: 96.45%\n", + "#2199: 96.45%\n", + "#2200: 96.46%\n", + "#2201: 96.46%\n", + "#2202: 96.46%\n", + "#2203: 96.46%\n", + "#2204: 96.46%\n", + "#2205: 96.46%\n", + "#2206: 96.47%\n", + "#2207: 96.47%\n", + "#2208: 96.47%\n", + "#2209: 96.47%\n", + "#2210: 96.47%\n", + "#2211: 96.47%\n", + "#2212: 96.48%\n", + "#2213: 96.48%\n", + "#2214: 96.48%\n", + "#2215: 96.48%\n", + "#2216: 96.48%\n", + "#2217: 96.48%\n", + "#2218: 96.48%\n", + "#2219: 96.49%\n", + "#2220: 96.49%\n", + "#2221: 96.49%\n", + "#2222: 96.49%\n", + "#2223: 96.49%\n", + "#2224: 96.49%\n", + "#2225: 96.50%\n", + "#2226: 96.50%\n", + "#2227: 96.50%\n", + "#2228: 96.50%\n", + "#2229: 96.50%\n", + "#2230: 96.50%\n", + "#2231: 96.51%\n", + "#2232: 96.51%\n", + "#2233: 96.51%\n", + "#2234: 96.51%\n", + "#2235: 96.51%\n", + "#2236: 96.51%\n", + "#2237: 96.51%\n", + "#2238: 96.52%\n", + "#2239: 96.52%\n", + "#2240: 96.52%\n", + "#2241: 96.52%\n", + "#2242: 96.52%\n", + "#2243: 96.52%\n", + "#2244: 96.53%\n", + "#2245: 96.53%\n", + "#2246: 96.53%\n", + "#2247: 96.53%\n", + "#2248: 96.53%\n", + "#2249: 96.53%\n", + "#2250: 96.53%\n", + "#2251: 96.54%\n", + "#2252: 96.54%\n", + "#2253: 96.54%\n", + "#2254: 96.54%\n", + "#2255: 96.54%\n", + "#2256: 96.54%\n", + "#2257: 96.55%\n", + "#2258: 96.55%\n", + "#2259: 96.55%\n", + "#2260: 96.55%\n", + "#2261: 96.55%\n", + "#2262: 96.55%\n", + "#2263: 96.55%\n", + "#2264: 96.56%\n", + "#2265: 96.56%\n", + "#2266: 96.52%\n", + "#2267: 96.52%\n", + "#2268: 96.52%\n", + "#2269: 96.52%\n", + "#2270: 96.52%\n", + "#2271: 96.52%\n", + "#2272: 96.52%\n", + "#2273: 96.53%\n", + "#2274: 96.53%\n", + "#2275: 96.53%\n", + "#2276: 96.53%\n", + "#2277: 96.53%\n", + "#2278: 96.53%\n", + "#2279: 96.54%\n", + "#2280: 96.49%\n", + "#2281: 96.49%\n", + "#2282: 96.50%\n", + "#2283: 96.50%\n", + "#2284: 96.50%\n", + "#2285: 96.50%\n", + "#2286: 96.50%\n", + "#2287: 96.50%\n", + "#2288: 96.51%\n", + "#2289: 96.51%\n", + "#2290: 96.51%\n", + "#2291: 96.51%\n", + "#2292: 96.51%\n", + "#2293: 96.51%\n", + "#2294: 96.51%\n", + "#2295: 96.52%\n", + "#2296: 96.52%\n", + "#2297: 96.52%\n", + "#2298: 96.52%\n", + "#2299: 96.52%\n", + "#2300: 96.52%\n", + "#2301: 96.52%\n", + "#2302: 96.53%\n", + "#2303: 96.53%\n", + "#2304: 96.53%\n", + "#2305: 96.53%\n", + "#2306: 96.53%\n", + "#2307: 96.53%\n", + "#2308: 96.49%\n", + "#2309: 96.49%\n", + "#2310: 96.50%\n", + "#2311: 96.50%\n", + "#2312: 96.50%\n", + "#2313: 96.50%\n", + "#2314: 96.50%\n", + "#2315: 96.50%\n", + "#2316: 96.50%\n", + "#2317: 96.51%\n", + "#2318: 96.51%\n", + "#2319: 96.51%\n", + "#2320: 96.51%\n", + "#2321: 96.51%\n", + "#2322: 96.51%\n", + "#2323: 96.51%\n", + "#2324: 96.52%\n", + "#2325: 96.52%\n", + "#2326: 96.52%\n", + "#2327: 96.52%\n", + "#2328: 96.52%\n", + "#2329: 96.52%\n", + "#2330: 96.53%\n", + "#2331: 96.53%\n", + "#2332: 96.53%\n", + "#2333: 96.53%\n", + "#2334: 96.53%\n", + "#2335: 96.53%\n", + "#2336: 96.53%\n", + "#2337: 96.54%\n", + "#2338: 96.54%\n", + "#2339: 96.54%\n", + "#2340: 96.54%\n", + "#2341: 96.54%\n", + "#2342: 96.54%\n", + "#2343: 96.54%\n", + "#2344: 96.55%\n", + "#2345: 96.55%\n", + "#2346: 96.55%\n", + "#2347: 96.55%\n", + "#2348: 96.55%\n", + "#2349: 96.55%\n", + "#2350: 96.55%\n", + "#2351: 96.56%\n", + "#2352: 96.56%\n", + "#2353: 96.56%\n", + "#2354: 96.56%\n", + "#2355: 96.56%\n", + "#2356: 96.56%\n", + "#2357: 96.56%\n", + "#2358: 96.57%\n", + "#2359: 96.57%\n", + "#2360: 96.57%\n", + "#2361: 96.57%\n", + "#2362: 96.57%\n", + "#2363: 96.57%\n", + "#2364: 96.58%\n", + "#2365: 96.58%\n", + "#2366: 96.58%\n", + "#2367: 96.58%\n", + "#2368: 96.58%\n", + "#2369: 96.58%\n", + "#2370: 96.58%\n", + "#2371: 96.59%\n", + "#2372: 96.59%\n", + "#2373: 96.59%\n", + "#2374: 96.59%\n", + "#2375: 96.59%\n", + "#2376: 96.59%\n", + "#2377: 96.59%\n", + "#2378: 96.60%\n", + "#2379: 96.60%\n", + "#2380: 96.60%\n", + "#2381: 96.60%\n", + "#2382: 96.60%\n", + "#2383: 96.60%\n", + "#2384: 96.60%\n", + "#2385: 96.61%\n", + "#2386: 96.61%\n", + "#2387: 96.61%\n", + "#2388: 96.61%\n", + "#2389: 96.61%\n", + "#2390: 96.61%\n", + "#2391: 96.61%\n", + "#2392: 96.62%\n", + "#2393: 96.62%\n", + "#2394: 96.62%\n", + "#2395: 96.62%\n", + "#2396: 96.62%\n", + "#2397: 96.62%\n", + "#2398: 96.62%\n", + "#2399: 96.62%\n", + "#2400: 96.63%\n", + "#2401: 96.63%\n", + "#2402: 96.63%\n", + "#2403: 96.63%\n", + "#2404: 96.63%\n", + "#2405: 96.63%\n", + "#2406: 96.63%\n", + "#2407: 96.64%\n", + "#2408: 96.64%\n", + "#2409: 96.64%\n", + "#2410: 96.64%\n", + "#2411: 96.64%\n", + "#2412: 96.64%\n", + "#2413: 96.64%\n", + "#2414: 96.65%\n", + "#2415: 96.65%\n", + "#2416: 96.65%\n", + "#2417: 96.65%\n", + "#2418: 96.65%\n", + "#2419: 96.65%\n", + "#2420: 96.65%\n", + "#2421: 96.66%\n", + "#2422: 96.66%\n", + "#2423: 96.66%\n", + "#2424: 96.66%\n", + "#2425: 96.66%\n", + "#2426: 96.66%\n", + "#2427: 96.66%\n", + "#2428: 96.67%\n", + "#2429: 96.67%\n", + "#2430: 96.67%\n", + "#2431: 96.67%\n", + "#2432: 96.67%\n", + "#2433: 96.67%\n", + "#2434: 96.67%\n", + "#2435: 96.67%\n", + "#2436: 96.68%\n", + "#2437: 96.68%\n", + "#2438: 96.68%\n", + "#2439: 96.68%\n", + "#2440: 96.68%\n", + "#2441: 96.68%\n", + "#2442: 96.68%\n", + "#2443: 96.69%\n", + "#2444: 96.69%\n", + "#2445: 96.69%\n", + "#2446: 96.69%\n", + "#2447: 96.69%\n", + "#2448: 96.69%\n", + "#2449: 96.69%\n", + "#2450: 96.70%\n", + "#2451: 96.70%\n", + "#2452: 96.70%\n", + "#2453: 96.70%\n", + "#2454: 96.66%\n", + "#2455: 96.66%\n", + "#2456: 96.66%\n", + "#2457: 96.66%\n", + "#2458: 96.67%\n", + "#2459: 96.67%\n", + "#2460: 96.67%\n", + "#2461: 96.67%\n", + "#2462: 96.63%\n", + "#2463: 96.63%\n", + "#2464: 96.63%\n", + "#2465: 96.63%\n", + "#2466: 96.64%\n", + "#2467: 96.64%\n", + "#2468: 96.64%\n", + "#2469: 96.64%\n", + "#2470: 96.64%\n", + "#2471: 96.64%\n", + "#2472: 96.64%\n", + "#2473: 96.65%\n", + "#2474: 96.65%\n", + "#2475: 96.65%\n", + "#2476: 96.65%\n", + "#2477: 96.65%\n", + "#2478: 96.65%\n", + "#2479: 96.65%\n", + "#2480: 96.65%\n", + "#2481: 96.66%\n", + "#2482: 96.66%\n", + "#2483: 96.66%\n", + "#2484: 96.66%\n", + "#2485: 96.66%\n", + "#2486: 96.66%\n", + "#2487: 96.66%\n", + "#2488: 96.63%\n", + "#2489: 96.63%\n", + "#2490: 96.63%\n", + "#2491: 96.63%\n", + "#2492: 96.63%\n", + "#2493: 96.63%\n", + "#2494: 96.63%\n", + "#2495: 96.63%\n", + "#2496: 96.64%\n", + "#2497: 96.64%\n", + "#2498: 96.64%\n", + "#2499: 96.64%\n", + "#2500: 96.64%\n", + "#2501: 96.64%\n", + "#2502: 96.64%\n", + "#2503: 96.65%\n", + "#2504: 96.65%\n", + "#2505: 96.65%\n", + "#2506: 96.65%\n", + "#2507: 96.65%\n", + "#2508: 96.65%\n", + "#2509: 96.65%\n", + "#2510: 96.65%\n", + "#2511: 96.66%\n", + "#2512: 96.66%\n", + "#2513: 96.66%\n", + "#2514: 96.66%\n", + "#2515: 96.66%\n", + "#2516: 96.66%\n", + "#2517: 96.66%\n", + "#2518: 96.67%\n", + "#2519: 96.67%\n", + "#2520: 96.67%\n", + "#2521: 96.67%\n", + "#2522: 96.67%\n", + "#2523: 96.67%\n", + "#2524: 96.67%\n", + "#2525: 96.67%\n", + "#2526: 96.68%\n", + "#2527: 96.68%\n", + "#2528: 96.68%\n", + "#2529: 96.68%\n", + "#2530: 96.68%\n", + "#2531: 96.68%\n", + "#2532: 96.68%\n", + "#2533: 96.65%\n", + "#2534: 96.61%\n", + "#2535: 96.61%\n", + "#2536: 96.61%\n", + "#2537: 96.61%\n", + "#2538: 96.61%\n", + "#2539: 96.61%\n", + "#2540: 96.62%\n", + "#2541: 96.62%\n", + "#2542: 96.62%\n", + "#2543: 96.62%\n", + "#2544: 96.62%\n", + "#2545: 96.62%\n", + "#2546: 96.62%\n", + "#2547: 96.62%\n", + "#2548: 96.63%\n", + "#2549: 96.63%\n", + "#2550: 96.63%\n", + "#2551: 96.63%\n", + "#2552: 96.63%\n", + "#2553: 96.63%\n", + "#2554: 96.63%\n", + "#2555: 96.64%\n", + "#2556: 96.64%\n", + "#2557: 96.64%\n", + "#2558: 96.64%\n", + "#2559: 96.64%\n", + "#2560: 96.60%\n", + "#2561: 96.60%\n", + "#2562: 96.61%\n", + "#2563: 96.61%\n", + "#2564: 96.61%\n", + "#2565: 96.61%\n", + "#2566: 96.61%\n", + "#2567: 96.61%\n", + "#2568: 96.61%\n", + "#2569: 96.61%\n", + "#2570: 96.62%\n", + "#2571: 96.62%\n", + "#2572: 96.62%\n", + "#2573: 96.62%\n", + "#2574: 96.62%\n", + "#2575: 96.62%\n", + "#2576: 96.62%\n", + "#2577: 96.63%\n", + "#2578: 96.63%\n", + "#2579: 96.63%\n", + "#2580: 96.63%\n", + "#2581: 96.63%\n", + "#2582: 96.63%\n", + "#2583: 96.63%\n", + "#2584: 96.63%\n", + "#2585: 96.64%\n", + "#2586: 96.64%\n", + "#2587: 96.64%\n", + "#2588: 96.64%\n", + "#2589: 96.64%\n", + "#2590: 96.64%\n", + "#2591: 96.64%\n", + "#2592: 96.64%\n", + "#2593: 96.65%\n", + "#2594: 96.61%\n", + "#2595: 96.61%\n", + "#2596: 96.61%\n", + "#2597: 96.57%\n", + "#2598: 96.58%\n", + "#2599: 96.58%\n", + "#2600: 96.58%\n", + "#2601: 96.58%\n", + "#2602: 96.58%\n", + "#2603: 96.58%\n", + "#2604: 96.58%\n", + "#2605: 96.58%\n", + "#2606: 96.59%\n", + "#2607: 96.59%\n", + "#2608: 96.59%\n", + "#2609: 96.59%\n", + "#2610: 96.59%\n", + "#2611: 96.59%\n", + "#2612: 96.59%\n", + "#2613: 96.60%\n", + "#2614: 96.60%\n", + "#2615: 96.60%\n", + "#2616: 96.60%\n", + "#2617: 96.60%\n", + "#2618: 96.60%\n", + "#2619: 96.60%\n", + "#2620: 96.60%\n", + "#2621: 96.61%\n", + "#2622: 96.61%\n", + "#2623: 96.61%\n", + "#2624: 96.61%\n", + "#2625: 96.61%\n", + "#2626: 96.61%\n", + "#2627: 96.61%\n", + "#2628: 96.61%\n", + "#2629: 96.62%\n", + "#2630: 96.62%\n", + "#2631: 96.62%\n", + "#2632: 96.62%\n", + "#2633: 96.62%\n", + "#2634: 96.62%\n", + "#2635: 96.62%\n", + "#2636: 96.59%\n", + "#2637: 96.59%\n", + "#2638: 96.59%\n", + "#2639: 96.59%\n", + "#2640: 96.59%\n", + "#2641: 96.59%\n", + "#2642: 96.59%\n", + "#2643: 96.60%\n", + "#2644: 96.60%\n", + "#2645: 96.60%\n", + "#2646: 96.60%\n", + "#2647: 96.60%\n", + "#2648: 96.60%\n", + "#2649: 96.60%\n", + "#2650: 96.61%\n", + "#2651: 96.61%\n", + "#2652: 96.61%\n", + "#2653: 96.61%\n", + "#2654: 96.57%\n", + "#2655: 96.57%\n", + "#2656: 96.58%\n", + "#2657: 96.58%\n", + "#2658: 96.58%\n", + "#2659: 96.58%\n", + "#2660: 96.58%\n", + "#2661: 96.58%\n", + "#2662: 96.58%\n", + "#2663: 96.58%\n", + "#2664: 96.59%\n", + "#2665: 96.59%\n", + "#2666: 96.59%\n", + "#2667: 96.59%\n", + "#2668: 96.59%\n", + "#2669: 96.59%\n", + "#2670: 96.59%\n", + "#2671: 96.59%\n", + "#2672: 96.60%\n", + "#2673: 96.60%\n", + "#2674: 96.60%\n", + "#2675: 96.60%\n", + "#2676: 96.60%\n", + "#2677: 96.60%\n", + "#2678: 96.60%\n", + "#2679: 96.60%\n", + "#2680: 96.61%\n", + "#2681: 96.61%\n", + "#2682: 96.61%\n", + "#2683: 96.61%\n", + "#2684: 96.61%\n", + "#2685: 96.61%\n", + "#2686: 96.61%\n", + "#2687: 96.61%\n", + "#2688: 96.62%\n", + "#2689: 96.62%\n", + "#2690: 96.62%\n", + "#2691: 96.62%\n", + "#2692: 96.62%\n", + "#2693: 96.62%\n", + "#2694: 96.62%\n", + "#2695: 96.62%\n", + "#2696: 96.63%\n", + "#2697: 96.63%\n", + "#2698: 96.63%\n", + "#2699: 96.63%\n", + "#2700: 96.63%\n", + "#2701: 96.63%\n", + "#2702: 96.63%\n", + "#2703: 96.63%\n", + "#2704: 96.64%\n", + "#2705: 96.64%\n", + "#2706: 96.64%\n", + "#2707: 96.64%\n", + "#2708: 96.64%\n", + "#2709: 96.64%\n", + "#2710: 96.64%\n", + "#2711: 96.64%\n", + "#2712: 96.65%\n", + "#2713: 96.65%\n", + "#2714: 96.65%\n", + "#2715: 96.65%\n", + "#2716: 96.65%\n", + "#2717: 96.65%\n", + "#2718: 96.65%\n", + "#2719: 96.65%\n", + "#2720: 96.66%\n", + "#2721: 96.66%\n", + "#2722: 96.66%\n", + "#2723: 96.66%\n", + "#2724: 96.66%\n", + "#2725: 96.66%\n", + "#2726: 96.66%\n", + "#2727: 96.66%\n", + "#2728: 96.67%\n", + "#2729: 96.67%\n", + "#2730: 96.67%\n", + "#2731: 96.67%\n", + "#2732: 96.67%\n", + "#2733: 96.67%\n", + "#2734: 96.67%\n", + "#2735: 96.67%\n", + "#2736: 96.68%\n", + "#2737: 96.68%\n", + "#2738: 96.68%\n", + "#2739: 96.68%\n", + "#2740: 96.68%\n", + "#2741: 96.68%\n", + "#2742: 96.68%\n", + "#2743: 96.68%\n", + "#2744: 96.68%\n", + "#2745: 96.69%\n", + "#2746: 96.69%\n", + "#2747: 96.69%\n", + "#2748: 96.69%\n", + "#2749: 96.69%\n", + "#2750: 96.69%\n", + "#2751: 96.69%\n", + "#2752: 96.69%\n", + "#2753: 96.70%\n", + "#2754: 96.70%\n", + "#2755: 96.70%\n", + "#2756: 96.70%\n", + "#2757: 96.70%\n", + "#2758: 96.70%\n", + "#2759: 96.70%\n", + "#2760: 96.67%\n", + "#2761: 96.67%\n", + "#2762: 96.67%\n", + "#2763: 96.67%\n", + "#2764: 96.67%\n", + "#2765: 96.67%\n", + "#2766: 96.68%\n", + "#2767: 96.68%\n", + "#2768: 96.68%\n", + "#2769: 96.68%\n", + "#2770: 96.64%\n", + "#2771: 96.65%\n", + "#2772: 96.65%\n", + "#2773: 96.65%\n", + "#2774: 96.65%\n", + "#2775: 96.65%\n", + "#2776: 96.65%\n", + "#2777: 96.65%\n", + "#2778: 96.62%\n", + "#2779: 96.62%\n", + "#2780: 96.58%\n", + "#2781: 96.59%\n", + "#2782: 96.59%\n", + "#2783: 96.59%\n", + "#2784: 96.59%\n", + "#2785: 96.59%\n", + "#2786: 96.59%\n", + "#2787: 96.59%\n", + "#2788: 96.59%\n", + "#2789: 96.59%\n", + "#2790: 96.60%\n", + "#2791: 96.60%\n", + "#2792: 96.60%\n", + "#2793: 96.60%\n", + "#2794: 96.60%\n", + "#2795: 96.60%\n", + "#2796: 96.60%\n", + "#2797: 96.60%\n", + "#2798: 96.61%\n", + "#2799: 96.61%\n", + "#2800: 96.61%\n", + "#2801: 96.61%\n", + "#2802: 96.61%\n", + "#2803: 96.61%\n", + "#2804: 96.61%\n", + "#2805: 96.61%\n", + "#2806: 96.62%\n", + "#2807: 96.62%\n", + "#2808: 96.62%\n", + "#2809: 96.62%\n", + "#2810: 96.62%\n", + "#2811: 96.62%\n", + "#2812: 96.62%\n", + "#2813: 96.62%\n", + "#2814: 96.63%\n", + "#2815: 96.63%\n", + "#2816: 96.63%\n", + "#2817: 96.63%\n", + "#2818: 96.63%\n", + "#2819: 96.63%\n", + "#2820: 96.63%\n", + "#2821: 96.63%\n", + "#2822: 96.63%\n", + "#2823: 96.60%\n", + "#2824: 96.60%\n", + "#2825: 96.60%\n", + "#2826: 96.60%\n", + "#2827: 96.61%\n", + "#2828: 96.61%\n", + "#2829: 96.61%\n", + "#2830: 96.61%\n", + "#2831: 96.61%\n", + "#2832: 96.61%\n", + "#2833: 96.61%\n", + "#2834: 96.61%\n", + "#2835: 96.61%\n", + "#2836: 96.62%\n", + "#2837: 96.62%\n", + "#2838: 96.62%\n", + "#2839: 96.62%\n", + "#2840: 96.62%\n", + "#2841: 96.62%\n", + "#2842: 96.62%\n", + "#2843: 96.62%\n", + "#2844: 96.63%\n", + "#2845: 96.63%\n", + "#2846: 96.63%\n", + "#2847: 96.63%\n", + "#2848: 96.63%\n", + "#2849: 96.63%\n", + "#2850: 96.63%\n", + "#2851: 96.63%\n", + "#2852: 96.64%\n", + "#2853: 96.64%\n", + "#2854: 96.64%\n", + "#2855: 96.64%\n", + "#2856: 96.64%\n", + "#2857: 96.64%\n", + "#2858: 96.64%\n", + "#2859: 96.64%\n", + "#2860: 96.64%\n", + "#2861: 96.65%\n", + "#2862: 96.65%\n", + "#2863: 96.65%\n", + "#2864: 96.65%\n", + "#2865: 96.65%\n", + "#2866: 96.65%\n", + "#2867: 96.65%\n", + "#2868: 96.65%\n", + "#2869: 96.66%\n", + "#2870: 96.66%\n", + "#2871: 96.66%\n", + "#2872: 96.66%\n", + "#2873: 96.66%\n", + "#2874: 96.66%\n", + "#2875: 96.66%\n", + "#2876: 96.66%\n", + "#2877: 96.66%\n", + "#2878: 96.67%\n", + "#2879: 96.67%\n", + "#2880: 96.67%\n", + "#2881: 96.67%\n", + "#2882: 96.67%\n", + "#2883: 96.67%\n", + "#2884: 96.67%\n", + "#2885: 96.67%\n", + "#2886: 96.67%\n", + "#2887: 96.68%\n", + "#2888: 96.68%\n", + "#2889: 96.68%\n", + "#2890: 96.68%\n", + "#2891: 96.68%\n", + "#2892: 96.68%\n", + "#2893: 96.68%\n", + "#2894: 96.68%\n", + "#2895: 96.69%\n", + "#2896: 96.65%\n", + "#2897: 96.65%\n", + "#2898: 96.65%\n", + "#2899: 96.66%\n", + "#2900: 96.66%\n", + "#2901: 96.66%\n", + "#2902: 96.66%\n", + "#2903: 96.66%\n", + "#2904: 96.66%\n", + "#2905: 96.66%\n", + "#2906: 96.66%\n", + "#2907: 96.66%\n", + "#2908: 96.67%\n", + "#2909: 96.67%\n", + "#2910: 96.67%\n", + "#2911: 96.67%\n", + "#2912: 96.67%\n", + "#2913: 96.67%\n", + "#2914: 96.67%\n", + "#2915: 96.67%\n", + "#2916: 96.67%\n", + "#2917: 96.68%\n", + "#2918: 96.68%\n", + "#2919: 96.68%\n", + "#2920: 96.68%\n", + "#2921: 96.68%\n", + "#2922: 96.68%\n", + "#2923: 96.68%\n", + "#2924: 96.68%\n", + "#2925: 96.68%\n", + "#2926: 96.69%\n", + "#2927: 96.65%\n", + "#2928: 96.65%\n", + "#2929: 96.66%\n", + "#2930: 96.66%\n", + "#2931: 96.66%\n", + "#2932: 96.66%\n", + "#2933: 96.66%\n", + "#2934: 96.66%\n", + "#2935: 96.66%\n", + "#2936: 96.66%\n", + "#2937: 96.66%\n", + "#2938: 96.67%\n", + "#2939: 96.63%\n", + "#2940: 96.63%\n", + "#2941: 96.63%\n", + "#2942: 96.64%\n", + "#2943: 96.64%\n", + "#2944: 96.64%\n", + "#2945: 96.64%\n", + "#2946: 96.64%\n", + "#2947: 96.64%\n", + "#2948: 96.64%\n", + "#2949: 96.64%\n", + "#2950: 96.65%\n", + "#2951: 96.65%\n", + "#2952: 96.61%\n", + "#2953: 96.61%\n", + "#2954: 96.62%\n", + "#2955: 96.62%\n", + "#2956: 96.62%\n", + "#2957: 96.62%\n", + "#2958: 96.62%\n", + "#2959: 96.62%\n", + "#2960: 96.62%\n", + "#2961: 96.62%\n", + "#2962: 96.63%\n", + "#2963: 96.63%\n", + "#2964: 96.63%\n", + "#2965: 96.63%\n", + "#2966: 96.63%\n", + "#2967: 96.63%\n", + "#2968: 96.63%\n", + "#2969: 96.63%\n", + "#2970: 96.63%\n", + "#2971: 96.64%\n", + "#2972: 96.64%\n", + "#2973: 96.64%\n", + "#2974: 96.64%\n", + "#2975: 96.64%\n", + "#2976: 96.64%\n", + "#2977: 96.64%\n", + "#2978: 96.64%\n", + "#2979: 96.64%\n", + "#2980: 96.65%\n", + "#2981: 96.65%\n", + "#2982: 96.65%\n", + "#2983: 96.65%\n", + "#2984: 96.65%\n", + "#2985: 96.65%\n", + "#2986: 96.65%\n", + "#2987: 96.65%\n", + "#2988: 96.65%\n", + "#2989: 96.66%\n", + "#2990: 96.66%\n", + "#2991: 96.66%\n", + "#2992: 96.66%\n", + "#2993: 96.66%\n", + "#2994: 96.66%\n", + "#2995: 96.63%\n", + "#2996: 96.63%\n", + "#2997: 96.63%\n", + "#2998: 96.63%\n", + "#2999: 96.63%\n", + "#3000: 96.63%\n", + "#3001: 96.64%\n", + "#3002: 96.64%\n", + "#3003: 96.64%\n", + "#3004: 96.64%\n", + "#3005: 96.61%\n", + "#3006: 96.61%\n", + "#3007: 96.61%\n", + "#3008: 96.61%\n", + "#3009: 96.61%\n", + "#3010: 96.61%\n", + "#3011: 96.61%\n", + "#3012: 96.61%\n", + "#3013: 96.62%\n", + "#3014: 96.62%\n", + "#3015: 96.62%\n", + "#3016: 96.62%\n", + "#3017: 96.62%\n", + "#3018: 96.62%\n", + "#3019: 96.62%\n", + "#3020: 96.62%\n", + "#3021: 96.62%\n", + "#3022: 96.63%\n", + "#3023: 96.63%\n", + "#3024: 96.63%\n", + "#3025: 96.63%\n", + "#3026: 96.63%\n", + "#3027: 96.63%\n", + "#3028: 96.63%\n", + "#3029: 96.63%\n", + "#3030: 96.60%\n", + "#3031: 96.60%\n", + "#3032: 96.60%\n", + "#3033: 96.61%\n", + "#3034: 96.61%\n", + "#3035: 96.61%\n", + "#3036: 96.61%\n", + "#3037: 96.61%\n", + "#3038: 96.61%\n", + "#3039: 96.61%\n", + "#3040: 96.61%\n", + "#3041: 96.61%\n", + "#3042: 96.62%\n", + "#3043: 96.62%\n", + "#3044: 96.62%\n", + "#3045: 96.62%\n", + "#3046: 96.62%\n", + "#3047: 96.62%\n", + "#3048: 96.62%\n", + "#3049: 96.62%\n", + "#3050: 96.62%\n", + "#3051: 96.63%\n", + "#3052: 96.63%\n", + "#3053: 96.63%\n", + "#3054: 96.63%\n", + "#3055: 96.63%\n", + "#3056: 96.63%\n", + "#3057: 96.63%\n", + "#3058: 96.63%\n", + "#3059: 96.63%\n", + "#3060: 96.60%\n", + "#3061: 96.60%\n", + "#3062: 96.60%\n", + "#3063: 96.61%\n", + "#3064: 96.61%\n", + "#3065: 96.61%\n", + "#3066: 96.61%\n", + "#3067: 96.61%\n", + "#3068: 96.61%\n", + "#3069: 96.61%\n", + "#3070: 96.61%\n", + "#3071: 96.61%\n", + "#3072: 96.62%\n", + "#3073: 96.58%\n", + "#3074: 96.59%\n", + "#3075: 96.59%\n", + "#3076: 96.59%\n", + "#3077: 96.59%\n", + "#3078: 96.59%\n", + "#3079: 96.59%\n", + "#3080: 96.59%\n", + "#3081: 96.59%\n", + "#3082: 96.59%\n", + "#3083: 96.60%\n", + "#3084: 96.60%\n", + "#3085: 96.60%\n", + "#3086: 96.60%\n", + "#3087: 96.60%\n", + "#3088: 96.60%\n", + "#3089: 96.60%\n", + "#3090: 96.60%\n", + "#3091: 96.60%\n", + "#3092: 96.61%\n", + "#3093: 96.61%\n", + "#3094: 96.61%\n", + "#3095: 96.61%\n", + "#3096: 96.61%\n", + "#3097: 96.61%\n", + "#3098: 96.61%\n", + "#3099: 96.61%\n", + "#3100: 96.61%\n", + "#3101: 96.62%\n", + "#3102: 96.62%\n", + "#3103: 96.62%\n", + "#3104: 96.62%\n", + "#3105: 96.62%\n", + "#3106: 96.62%\n", + "#3107: 96.62%\n", + "#3108: 96.62%\n", + "#3109: 96.62%\n", + "#3110: 96.62%\n", + "#3111: 96.63%\n", + "#3112: 96.63%\n", + "#3113: 96.63%\n", + "#3114: 96.60%\n", + "#3115: 96.60%\n", + "#3116: 96.60%\n", + "#3117: 96.60%\n", + "#3118: 96.60%\n", + "#3119: 96.60%\n", + "#3120: 96.60%\n", + "#3121: 96.60%\n", + "#3122: 96.61%\n", + "#3123: 96.61%\n", + "#3124: 96.61%\n", + "#3125: 96.61%\n", + "#3126: 96.61%\n", + "#3127: 96.61%\n", + "#3128: 96.61%\n", + "#3129: 96.61%\n", + "#3130: 96.61%\n", + "#3131: 96.62%\n", + "#3132: 96.62%\n", + "#3133: 96.62%\n", + "#3134: 96.62%\n", + "#3135: 96.62%\n", + "#3136: 96.62%\n", + "#3137: 96.62%\n", + "#3138: 96.62%\n", + "#3139: 96.62%\n", + "#3140: 96.63%\n", + "#3141: 96.63%\n", + "#3142: 96.63%\n", + "#3143: 96.63%\n", + "#3144: 96.63%\n", + "#3145: 96.63%\n", + "#3146: 96.63%\n", + "#3147: 96.63%\n", + "#3148: 96.63%\n", + "#3149: 96.63%\n", + "#3150: 96.64%\n", + "#3151: 96.64%\n", + "#3152: 96.64%\n", + "#3153: 96.64%\n", + "#3154: 96.64%\n", + "#3155: 96.64%\n", + "#3156: 96.64%\n", + "#3157: 96.64%\n", + "#3158: 96.64%\n", + "#3159: 96.65%\n", + "#3160: 96.65%\n", + "#3161: 96.65%\n", + "#3162: 96.65%\n", + "#3163: 96.65%\n", + "#3164: 96.65%\n", + "#3165: 96.65%\n", + "#3166: 96.65%\n", + "#3167: 96.65%\n", + "#3168: 96.66%\n", + "#3169: 96.66%\n", + "#3170: 96.66%\n", + "#3171: 96.66%\n", + "#3172: 96.66%\n", + "#3173: 96.66%\n", + "#3174: 96.66%\n", + "#3175: 96.66%\n", + "#3176: 96.66%\n", + "#3177: 96.66%\n", + "#3178: 96.67%\n", + "#3179: 96.67%\n", + "#3180: 96.67%\n", + "#3181: 96.67%\n", + "#3182: 96.67%\n", + "#3183: 96.67%\n", + "#3184: 96.67%\n", + "#3185: 96.67%\n", + "#3186: 96.67%\n", + "#3187: 96.68%\n", + "#3188: 96.68%\n", + "#3189: 96.65%\n", + "#3190: 96.65%\n", + "#3191: 96.65%\n", + "#3192: 96.65%\n", + "#3193: 96.65%\n", + "#3194: 96.65%\n", + "#3195: 96.65%\n", + "#3196: 96.65%\n", + "#3197: 96.65%\n", + "#3198: 96.66%\n", + "#3199: 96.66%\n", + "#3200: 96.66%\n", + "#3201: 96.66%\n", + "#3202: 96.66%\n", + "#3203: 96.66%\n", + "#3204: 96.66%\n", + "#3205: 96.66%\n", + "#3206: 96.66%\n", + "#3207: 96.66%\n", + "#3208: 96.67%\n", + "#3209: 96.67%\n", + "#3210: 96.67%\n", + "#3211: 96.67%\n", + "#3212: 96.67%\n", + "#3213: 96.67%\n", + "#3214: 96.67%\n", + "#3215: 96.67%\n", + "#3216: 96.67%\n", + "#3217: 96.67%\n", + "#3218: 96.68%\n", + "#3219: 96.68%\n", + "#3220: 96.68%\n", + "#3221: 96.68%\n", + "#3222: 96.68%\n", + "#3223: 96.68%\n", + "#3224: 96.68%\n", + "#3225: 96.68%\n", + "#3226: 96.68%\n", + "#3227: 96.69%\n", + "#3228: 96.69%\n", + "#3229: 96.69%\n", + "#3230: 96.69%\n", + "#3231: 96.69%\n", + "#3232: 96.69%\n", + "#3233: 96.69%\n", + "#3234: 96.69%\n", + "#3235: 96.69%\n", + "#3236: 96.69%\n", + "#3237: 96.70%\n", + "#3238: 96.70%\n", + "#3239: 96.67%\n", + "#3240: 96.67%\n", + "#3241: 96.67%\n", + "#3242: 96.67%\n", + "#3243: 96.67%\n", + "#3244: 96.67%\n", + "#3245: 96.67%\n", + "#3246: 96.64%\n", + "#3247: 96.64%\n", + "#3248: 96.65%\n", + "#3249: 96.65%\n", + "#3250: 96.65%\n", + "#3251: 96.65%\n", + "#3252: 96.65%\n", + "#3253: 96.65%\n", + "#3254: 96.65%\n", + "#3255: 96.65%\n", + "#3256: 96.65%\n", + "#3257: 96.65%\n", + "#3258: 96.66%\n", + "#3259: 96.66%\n", + "#3260: 96.66%\n", + "#3261: 96.66%\n", + "#3262: 96.63%\n", + "#3263: 96.63%\n", + "#3264: 96.63%\n", + "#3265: 96.63%\n", + "#3266: 96.63%\n", + "#3267: 96.63%\n", + "#3268: 96.64%\n", + "#3269: 96.64%\n", + "#3270: 96.64%\n", + "#3271: 96.64%\n", + "#3272: 96.64%\n", + "#3273: 96.64%\n", + "#3274: 96.64%\n", + "#3275: 96.64%\n", + "#3276: 96.64%\n", + "#3277: 96.64%\n", + "#3278: 96.65%\n", + "#3279: 96.65%\n", + "#3280: 96.65%\n", + "#3281: 96.65%\n", + "#3282: 96.65%\n", + "#3283: 96.65%\n", + "#3284: 96.65%\n", + "#3285: 96.65%\n", + "#3286: 96.65%\n", + "#3287: 96.65%\n", + "#3288: 96.66%\n", + "#3289: 96.63%\n", + "#3290: 96.63%\n", + "#3291: 96.63%\n", + "#3292: 96.63%\n", + "#3293: 96.63%\n", + "#3294: 96.63%\n", + "#3295: 96.63%\n", + "#3296: 96.63%\n", + "#3297: 96.63%\n", + "#3298: 96.64%\n", + "#3299: 96.64%\n", + "#3300: 96.64%\n", + "#3301: 96.64%\n", + "#3302: 96.64%\n", + "#3303: 96.64%\n", + "#3304: 96.64%\n", + "#3305: 96.64%\n", + "#3306: 96.64%\n", + "#3307: 96.64%\n", + "#3308: 96.65%\n", + "#3309: 96.65%\n", + "#3310: 96.65%\n", + "#3311: 96.65%\n", + "#3312: 96.65%\n", + "#3313: 96.65%\n", + "#3314: 96.65%\n", + "#3315: 96.65%\n", + "#3316: 96.62%\n", + "#3317: 96.62%\n", + "#3318: 96.63%\n", + "#3319: 96.60%\n", + "#3320: 96.60%\n", + "#3321: 96.60%\n", + "#3322: 96.60%\n", + "#3323: 96.60%\n", + "#3324: 96.60%\n", + "#3325: 96.60%\n", + "#3326: 96.60%\n", + "#3327: 96.60%\n", + "#3328: 96.61%\n", + "#3329: 96.61%\n", + "#3330: 96.61%\n", + "#3331: 96.61%\n", + "#3332: 96.61%\n", + "#3333: 96.58%\n", + "#3334: 96.58%\n", + "#3335: 96.58%\n", + "#3336: 96.58%\n", + "#3337: 96.58%\n", + "#3338: 96.59%\n", + "#3339: 96.59%\n", + "#3340: 96.59%\n", + "#3341: 96.59%\n", + "#3342: 96.59%\n", + "#3343: 96.59%\n", + "#3344: 96.59%\n", + "#3345: 96.59%\n", + "#3346: 96.59%\n", + "#3347: 96.59%\n", + "#3348: 96.60%\n", + "#3349: 96.60%\n", + "#3350: 96.60%\n", + "#3351: 96.60%\n", + "#3352: 96.60%\n", + "#3353: 96.60%\n", + "#3354: 96.60%\n", + "#3355: 96.60%\n", + "#3356: 96.60%\n", + "#3357: 96.61%\n", + "#3358: 96.61%\n", + "#3359: 96.61%\n", + "#3360: 96.61%\n", + "#3361: 96.61%\n", + "#3362: 96.61%\n", + "#3363: 96.61%\n", + "#3364: 96.61%\n", + "#3365: 96.61%\n", + "#3366: 96.61%\n", + "#3367: 96.62%\n", + "#3368: 96.62%\n", + "#3369: 96.62%\n", + "#3370: 96.62%\n", + "#3371: 96.62%\n", + "#3372: 96.62%\n", + "#3373: 96.62%\n", + "#3374: 96.62%\n", + "#3375: 96.62%\n", + "#3376: 96.62%\n", + "#3377: 96.63%\n", + "#3378: 96.63%\n", + "#3379: 96.63%\n", + "#3380: 96.63%\n", + "#3381: 96.63%\n", + "#3382: 96.63%\n", + "#3383: 96.63%\n", + "#3384: 96.63%\n", + "#3385: 96.63%\n", + "#3386: 96.63%\n", + "#3387: 96.64%\n", + "#3388: 96.64%\n", + "#3389: 96.64%\n", + "#3390: 96.64%\n", + "#3391: 96.64%\n", + "#3392: 96.64%\n", + "#3393: 96.64%\n", + "#3394: 96.64%\n", + "#3395: 96.64%\n", + "#3396: 96.64%\n", + "#3397: 96.65%\n", + "#3398: 96.65%\n", + "#3399: 96.65%\n", + "#3400: 96.65%\n", + "#3401: 96.65%\n", + "#3402: 96.65%\n", + "#3403: 96.65%\n", + "#3404: 96.65%\n", + "#3405: 96.65%\n", + "#3406: 96.65%\n", + "#3407: 96.65%\n", + "#3408: 96.66%\n", + "#3409: 96.66%\n", + "#3410: 96.66%\n", + "#3411: 96.66%\n", + "#3412: 96.66%\n", + "#3413: 96.66%\n", + "#3414: 96.66%\n", + "#3415: 96.66%\n", + "#3416: 96.66%\n", + "#3417: 96.66%\n", + "#3418: 96.67%\n", + "#3419: 96.67%\n", + "#3420: 96.67%\n", + "#3421: 96.67%\n", + "#3422: 96.67%\n", + "#3423: 96.67%\n", + "#3424: 96.67%\n", + "#3425: 96.67%\n", + "#3426: 96.67%\n", + "#3427: 96.67%\n", + "#3428: 96.68%\n", + "#3429: 96.68%\n", + "#3430: 96.68%\n", + "#3431: 96.68%\n", + "#3432: 96.68%\n", + "#3433: 96.68%\n", + "#3434: 96.68%\n", + "#3435: 96.68%\n", + "#3436: 96.68%\n", + "#3437: 96.68%\n", + "#3438: 96.69%\n", + "#3439: 96.69%\n", + "#3440: 96.69%\n", + "#3441: 96.69%\n", + "#3442: 96.69%\n", + "#3443: 96.69%\n", + "#3444: 96.69%\n", + "#3445: 96.69%\n", + "#3446: 96.69%\n", + "#3447: 96.69%\n", + "#3448: 96.69%\n", + "#3449: 96.70%\n", + "#3450: 96.70%\n", + "#3451: 96.70%\n", + "#3452: 96.70%\n", + "#3453: 96.70%\n", + "#3454: 96.70%\n", + "#3455: 96.70%\n", + "#3456: 96.70%\n", + "#3457: 96.70%\n", + "#3458: 96.70%\n", + "#3459: 96.71%\n", + "#3460: 96.71%\n", + "#3461: 96.71%\n", + "#3462: 96.71%\n", + "#3463: 96.71%\n", + "#3464: 96.71%\n", + "#3465: 96.71%\n", + "#3466: 96.71%\n", + "#3467: 96.71%\n", + "#3468: 96.71%\n", + "#3469: 96.71%\n", + "#3470: 96.72%\n", + "#3471: 96.72%\n", + "#3472: 96.72%\n", + "#3473: 96.72%\n", + "#3474: 96.72%\n", + "#3475: 96.72%\n", + "#3476: 96.72%\n", + "#3477: 96.72%\n", + "#3478: 96.72%\n", + "#3479: 96.72%\n", + "#3480: 96.73%\n", + "#3481: 96.73%\n", + "#3482: 96.73%\n", + "#3483: 96.73%\n", + "#3484: 96.73%\n", + "#3485: 96.73%\n", + "#3486: 96.73%\n", + "#3487: 96.73%\n", + "#3488: 96.73%\n", + "#3489: 96.73%\n", + "#3490: 96.73%\n", + "#3491: 96.74%\n", + "#3492: 96.74%\n", + "#3493: 96.74%\n", + "#3494: 96.74%\n", + "#3495: 96.74%\n", + "#3496: 96.74%\n", + "#3497: 96.74%\n", + "#3498: 96.74%\n", + "#3499: 96.74%\n", + "#3500: 96.74%\n", + "#3501: 96.74%\n", + "#3502: 96.75%\n", + "#3503: 96.72%\n", + "#3504: 96.72%\n", + "#3505: 96.72%\n", + "#3506: 96.72%\n", + "#3507: 96.72%\n", + "#3508: 96.72%\n", + "#3509: 96.72%\n", + "#3510: 96.72%\n", + "#3511: 96.73%\n", + "#3512: 96.73%\n", + "#3513: 96.73%\n", + "#3514: 96.73%\n", + "#3515: 96.73%\n", + "#3516: 96.73%\n", + "#3517: 96.73%\n", + "#3518: 96.73%\n", + "#3519: 96.73%\n", + "#3520: 96.71%\n", + "#3521: 96.71%\n", + "#3522: 96.71%\n", + "#3523: 96.71%\n", + "#3524: 96.71%\n", + "#3525: 96.71%\n", + "#3526: 96.71%\n", + "#3527: 96.71%\n", + "#3528: 96.71%\n", + "#3529: 96.71%\n", + "#3530: 96.71%\n", + "#3531: 96.72%\n", + "#3532: 96.72%\n", + "#3533: 96.72%\n", + "#3534: 96.72%\n", + "#3535: 96.72%\n", + "#3536: 96.72%\n", + "#3537: 96.72%\n", + "#3538: 96.72%\n", + "#3539: 96.72%\n", + "#3540: 96.72%\n", + "#3541: 96.73%\n", + "#3542: 96.73%\n", + "#3543: 96.73%\n", + "#3544: 96.73%\n", + "#3545: 96.73%\n", + "#3546: 96.73%\n", + "#3547: 96.73%\n", + "#3548: 96.73%\n", + "#3549: 96.73%\n", + "#3550: 96.73%\n", + "#3551: 96.73%\n", + "#3552: 96.74%\n", + "#3553: 96.74%\n", + "#3554: 96.74%\n", + "#3555: 96.74%\n", + "#3556: 96.74%\n", + "#3557: 96.74%\n", + "#3558: 96.74%\n", + "#3559: 96.71%\n", + "#3560: 96.71%\n", + "#3561: 96.72%\n", + "#3562: 96.72%\n", + "#3563: 96.72%\n", + "#3564: 96.72%\n", + "#3565: 96.72%\n", + "#3566: 96.72%\n", + "#3567: 96.72%\n", + "#3568: 96.72%\n", + "#3569: 96.72%\n", + "#3570: 96.72%\n", + "#3571: 96.72%\n", + "#3572: 96.73%\n", + "#3573: 96.73%\n", + "#3574: 96.73%\n", + "#3575: 96.73%\n", + "#3576: 96.73%\n", + "#3577: 96.73%\n", + "#3578: 96.73%\n", + "#3579: 96.73%\n", + "#3580: 96.73%\n", + "#3581: 96.73%\n", + "#3582: 96.73%\n", + "#3583: 96.74%\n", + "#3584: 96.74%\n", + "#3585: 96.74%\n", + "#3586: 96.74%\n", + "#3587: 96.74%\n", + "#3588: 96.74%\n", + "#3589: 96.74%\n", + "#3590: 96.74%\n", + "#3591: 96.74%\n", + "#3592: 96.74%\n", + "#3593: 96.74%\n", + "#3594: 96.75%\n", + "#3595: 96.75%\n", + "#3596: 96.75%\n", + "#3597: 96.72%\n", + "#3598: 96.72%\n", + "#3599: 96.72%\n", + "#3600: 96.72%\n", + "#3601: 96.72%\n", + "#3602: 96.72%\n", + "#3603: 96.73%\n", + "#3604: 96.73%\n", + "#3605: 96.73%\n", + "#3606: 96.73%\n", + "#3607: 96.73%\n", + "#3608: 96.73%\n", + "#3609: 96.73%\n", + "#3610: 96.73%\n", + "#3611: 96.73%\n", + "#3612: 96.73%\n", + "#3613: 96.73%\n", + "#3614: 96.74%\n", + "#3615: 96.74%\n", + "#3616: 96.74%\n", + "#3617: 96.74%\n", + "#3618: 96.74%\n", + "#3619: 96.74%\n", + "#3620: 96.74%\n", + "#3621: 96.74%\n", + "#3622: 96.74%\n", + "#3623: 96.74%\n", + "#3624: 96.74%\n", + "#3625: 96.75%\n", + "#3626: 96.75%\n", + "#3627: 96.75%\n", + "#3628: 96.75%\n", + "#3629: 96.75%\n", + "#3630: 96.75%\n", + "#3631: 96.75%\n", + "#3632: 96.75%\n", + "#3633: 96.75%\n", + "#3634: 96.75%\n", + "#3635: 96.75%\n", + "#3636: 96.76%\n", + "#3637: 96.76%\n", + "#3638: 96.76%\n", + "#3639: 96.76%\n", + "#3640: 96.76%\n", + "#3641: 96.76%\n", + "#3642: 96.76%\n", + "#3643: 96.76%\n", + "#3644: 96.76%\n", + "#3645: 96.76%\n", + "#3646: 96.76%\n", + "#3647: 96.77%\n", + "#3648: 96.77%\n", + "#3649: 96.77%\n", + "#3650: 96.77%\n", + "#3651: 96.77%\n", + "#3652: 96.77%\n", + "#3653: 96.77%\n", + "#3654: 96.77%\n", + "#3655: 96.77%\n", + "#3656: 96.77%\n", + "#3657: 96.77%\n", + "#3658: 96.78%\n", + "#3659: 96.78%\n", + "#3660: 96.78%\n", + "#3661: 96.78%\n", + "#3662: 96.78%\n", + "#3663: 96.78%\n", + "#3664: 96.78%\n", + "#3665: 96.78%\n", + "#3666: 96.78%\n", + "#3667: 96.78%\n", + "#3668: 96.78%\n", + "#3669: 96.78%\n", + "#3670: 96.79%\n", + "#3671: 96.79%\n", + "#3672: 96.79%\n", + "#3673: 96.79%\n", + "#3674: 96.79%\n", + "#3675: 96.79%\n", + "#3676: 96.79%\n", + "#3677: 96.79%\n", + "#3678: 96.79%\n", + "#3679: 96.79%\n", + "#3680: 96.79%\n", + "#3681: 96.80%\n", + "#3682: 96.80%\n", + "#3683: 96.80%\n", + "#3684: 96.80%\n", + "#3685: 96.80%\n", + "#3686: 96.80%\n", + "#3687: 96.80%\n", + "#3688: 96.80%\n", + "#3689: 96.80%\n", + "#3690: 96.80%\n", + "#3691: 96.80%\n", + "#3692: 96.80%\n", + "#3693: 96.81%\n", + "#3694: 96.81%\n", + "#3695: 96.81%\n", + "#3696: 96.81%\n", + "#3697: 96.81%\n", + "#3698: 96.81%\n", + "#3699: 96.81%\n", + "#3700: 96.81%\n", + "#3701: 96.81%\n", + "#3702: 96.81%\n", + "#3703: 96.81%\n", + "#3704: 96.82%\n", + "#3705: 96.82%\n", + "#3706: 96.82%\n", + "#3707: 96.82%\n", + "#3708: 96.82%\n", + "#3709: 96.82%\n", + "#3710: 96.82%\n", + "#3711: 96.82%\n", + "#3712: 96.82%\n", + "#3713: 96.82%\n", + "#3714: 96.82%\n", + "#3715: 96.82%\n", + "#3716: 96.83%\n", + "#3717: 96.83%\n", + "#3718: 96.83%\n", + "#3719: 96.83%\n", + "#3720: 96.83%\n", + "#3721: 96.83%\n", + "#3722: 96.83%\n", + "#3723: 96.83%\n", + "#3724: 96.83%\n", + "#3725: 96.83%\n", + "#3726: 96.81%\n", + "#3727: 96.81%\n", + "#3728: 96.81%\n", + "#3729: 96.81%\n", + "#3730: 96.81%\n", + "#3731: 96.81%\n", + "#3732: 96.81%\n", + "#3733: 96.81%\n", + "#3734: 96.81%\n", + "#3735: 96.81%\n", + "#3736: 96.82%\n", + "#3737: 96.82%\n", + "#3738: 96.79%\n", + "#3739: 96.79%\n", + "#3740: 96.79%\n", + "#3741: 96.79%\n", + "#3742: 96.79%\n", + "#3743: 96.79%\n", + "#3744: 96.80%\n", + "#3745: 96.80%\n", + "#3746: 96.80%\n", + "#3747: 96.80%\n", + "#3748: 96.80%\n", + "#3749: 96.80%\n", + "#3750: 96.80%\n", + "#3751: 96.80%\n", + "#3752: 96.80%\n", + "#3753: 96.80%\n", + "#3754: 96.80%\n", + "#3755: 96.81%\n", + "#3756: 96.81%\n", + "#3757: 96.78%\n", + "#3758: 96.78%\n", + "#3759: 96.78%\n", + "#3760: 96.78%\n", + "#3761: 96.78%\n", + "#3762: 96.78%\n", + "#3763: 96.79%\n", + "#3764: 96.79%\n", + "#3765: 96.79%\n", + "#3766: 96.79%\n", + "#3767: 96.76%\n", + "#3768: 96.76%\n", + "#3769: 96.76%\n", + "#3770: 96.76%\n", + "#3771: 96.77%\n", + "#3772: 96.77%\n", + "#3773: 96.77%\n", + "#3774: 96.77%\n", + "#3775: 96.77%\n", + "#3776: 96.77%\n", + "#3777: 96.77%\n", + "#3778: 96.77%\n", + "#3779: 96.77%\n", + "#3780: 96.75%\n", + "#3781: 96.75%\n", + "#3782: 96.75%\n", + "#3783: 96.75%\n", + "#3784: 96.75%\n", + "#3785: 96.75%\n", + "#3786: 96.75%\n", + "#3787: 96.75%\n", + "#3788: 96.75%\n", + "#3789: 96.75%\n", + "#3790: 96.76%\n", + "#3791: 96.76%\n", + "#3792: 96.76%\n", + "#3793: 96.76%\n", + "#3794: 96.76%\n", + "#3795: 96.76%\n", + "#3796: 96.76%\n", + "#3797: 96.76%\n", + "#3798: 96.76%\n", + "#3799: 96.76%\n", + "#3800: 96.76%\n", + "#3801: 96.76%\n", + "#3802: 96.77%\n", + "#3803: 96.77%\n", + "#3804: 96.77%\n", + "#3805: 96.77%\n", + "#3806: 96.77%\n", + "#3807: 96.77%\n", + "#3808: 96.74%\n", + "#3809: 96.75%\n", + "#3810: 96.75%\n", + "#3811: 96.75%\n", + "#3812: 96.75%\n", + "#3813: 96.75%\n", + "#3814: 96.75%\n", + "#3815: 96.75%\n", + "#3816: 96.75%\n", + "#3817: 96.75%\n", + "#3818: 96.75%\n", + "#3819: 96.75%\n", + "#3820: 96.75%\n", + "#3821: 96.76%\n", + "#3822: 96.76%\n", + "#3823: 96.76%\n", + "#3824: 96.76%\n", + "#3825: 96.76%\n", + "#3826: 96.76%\n", + "#3827: 96.76%\n", + "#3828: 96.76%\n", + "#3829: 96.76%\n", + "#3830: 96.76%\n", + "#3831: 96.76%\n", + "#3832: 96.76%\n", + "#3833: 96.77%\n", + "#3834: 96.77%\n", + "#3835: 96.77%\n", + "#3836: 96.77%\n", + "#3837: 96.77%\n", + "#3838: 96.77%\n", + "#3839: 96.77%\n", + "#3840: 96.77%\n", + "#3841: 96.77%\n", + "#3842: 96.77%\n", + "#3843: 96.77%\n", + "#3844: 96.78%\n", + "#3845: 96.78%\n", + "#3846: 96.78%\n", + "#3847: 96.78%\n", + "#3848: 96.78%\n", + "#3849: 96.78%\n", + "#3850: 96.75%\n", + "#3851: 96.75%\n", + "#3852: 96.76%\n", + "#3853: 96.76%\n", + "#3854: 96.76%\n", + "#3855: 96.73%\n", + "#3856: 96.73%\n", + "#3857: 96.73%\n", + "#3858: 96.73%\n", + "#3859: 96.74%\n", + "#3860: 96.74%\n", + "#3861: 96.74%\n", + "#3862: 96.74%\n", + "#3863: 96.74%\n", + "#3864: 96.74%\n", + "#3865: 96.74%\n", + "#3866: 96.74%\n", + "#3867: 96.74%\n", + "#3868: 96.74%\n", + "#3869: 96.74%\n", + "#3870: 96.75%\n", + "#3871: 96.75%\n", + "#3872: 96.75%\n", + "#3873: 96.75%\n", + "#3874: 96.75%\n", + "#3875: 96.75%\n", + "#3876: 96.75%\n", + "#3877: 96.75%\n", + "#3878: 96.75%\n", + "#3879: 96.75%\n", + "#3880: 96.75%\n", + "#3881: 96.75%\n", + "#3882: 96.76%\n", + "#3883: 96.76%\n", + "#3884: 96.76%\n", + "#3885: 96.76%\n", + "#3886: 96.76%\n", + "#3887: 96.76%\n", + "#3888: 96.76%\n", + "#3889: 96.76%\n", + "#3890: 96.76%\n", + "#3891: 96.76%\n", + "#3892: 96.76%\n", + "#3893: 96.76%\n", + "#3894: 96.77%\n", + "#3895: 96.77%\n", + "#3896: 96.77%\n", + "#3897: 96.77%\n", + "#3898: 96.77%\n", + "#3899: 96.77%\n", + "#3900: 96.77%\n", + "#3901: 96.77%\n", + "#3902: 96.77%\n", + "#3903: 96.77%\n", + "#3904: 96.77%\n", + "#3905: 96.77%\n", + "#3906: 96.75%\n", + "#3907: 96.75%\n", + "#3908: 96.75%\n", + "#3909: 96.75%\n", + "#3910: 96.75%\n", + "#3911: 96.75%\n", + "#3912: 96.75%\n", + "#3913: 96.76%\n", + "#3914: 96.76%\n", + "#3915: 96.76%\n", + "#3916: 96.76%\n", + "#3917: 96.76%\n", + "#3918: 96.76%\n", + "#3919: 96.76%\n", + "#3920: 96.76%\n", + "#3921: 96.76%\n", + "#3922: 96.76%\n", + "#3923: 96.76%\n", + "#3924: 96.76%\n", + "#3925: 96.77%\n", + "#3926: 96.77%\n", + "#3927: 96.77%\n", + "#3928: 96.77%\n", + "#3929: 96.77%\n", + "#3930: 96.77%\n", + "#3931: 96.77%\n", + "#3932: 96.77%\n", + "#3933: 96.77%\n", + "#3934: 96.77%\n", + "#3935: 96.77%\n", + "#3936: 96.77%\n", + "#3937: 96.78%\n", + "#3938: 96.78%\n", + "#3939: 96.78%\n", + "#3940: 96.78%\n", + "#3941: 96.78%\n", + "#3942: 96.78%\n", + "#3943: 96.75%\n", + "#3944: 96.76%\n", + "#3945: 96.76%\n", + "#3946: 96.76%\n", + "#3947: 96.76%\n", + "#3948: 96.76%\n", + "#3949: 96.76%\n", + "#3950: 96.76%\n", + "#3951: 96.76%\n", + "#3952: 96.76%\n", + "#3953: 96.76%\n", + "#3954: 96.76%\n", + "#3955: 96.76%\n", + "#3956: 96.77%\n", + "#3957: 96.77%\n", + "#3958: 96.77%\n", + "#3959: 96.77%\n", + "#3960: 96.77%\n", + "#3961: 96.77%\n", + "#3962: 96.77%\n", + "#3963: 96.77%\n", + "#3964: 96.77%\n", + "#3965: 96.77%\n", + "#3966: 96.77%\n", + "#3967: 96.77%\n", + "#3968: 96.78%\n", + "#3969: 96.78%\n", + "#3970: 96.78%\n", + "#3971: 96.78%\n", + "#3972: 96.78%\n", + "#3973: 96.78%\n", + "#3974: 96.78%\n", + "#3975: 96.78%\n", + "#3976: 96.76%\n", + "#3977: 96.76%\n", + "#3978: 96.76%\n", + "#3979: 96.76%\n", + "#3980: 96.76%\n", + "#3981: 96.76%\n", + "#3982: 96.76%\n", + "#3983: 96.76%\n", + "#3984: 96.76%\n", + "#3985: 96.76%\n", + "#3986: 96.76%\n", + "#3987: 96.77%\n", + "#3988: 96.77%\n", + "#3989: 96.77%\n", + "#3990: 96.77%\n", + "#3991: 96.77%\n", + "#3992: 96.77%\n", + "#3993: 96.77%\n", + "#3994: 96.77%\n", + "#3995: 96.77%\n", + "#3996: 96.77%\n", + "#3997: 96.77%\n", + "#3998: 96.77%\n", + "#3999: 96.78%\n", + "#4000: 96.78%\n", + "#4001: 96.78%\n", + "#4002: 96.78%\n", + "#4003: 96.78%\n", + "#4004: 96.78%\n", + "#4005: 96.78%\n", + "#4006: 96.78%\n", + "#4007: 96.78%\n", + "#4008: 96.78%\n", + "#4009: 96.78%\n", + "#4010: 96.78%\n", + "#4011: 96.78%\n", + "#4012: 96.79%\n", + "#4013: 96.79%\n", + "#4014: 96.79%\n", + "#4015: 96.79%\n", + "#4016: 96.79%\n", + "#4017: 96.79%\n", + "#4018: 96.79%\n", + "#4019: 96.79%\n", + "#4020: 96.79%\n", + "#4021: 96.79%\n", + "#4022: 96.79%\n", + "#4023: 96.79%\n", + "#4024: 96.80%\n", + "#4025: 96.80%\n", + "#4026: 96.80%\n", + "#4027: 96.80%\n", + "#4028: 96.80%\n", + "#4029: 96.80%\n", + "#4030: 96.80%\n", + "#4031: 96.80%\n", + "#4032: 96.80%\n", + "#4033: 96.80%\n", + "#4034: 96.80%\n", + "#4035: 96.80%\n", + "#4036: 96.80%\n", + "#4037: 96.81%\n", + "#4038: 96.81%\n", + "#4039: 96.81%\n", + "#4040: 96.81%\n", + "#4041: 96.81%\n", + "#4042: 96.81%\n", + "#4043: 96.81%\n", + "#4044: 96.81%\n", + "#4045: 96.81%\n", + "#4046: 96.81%\n", + "#4047: 96.81%\n", + "#4048: 96.81%\n", + "#4049: 96.81%\n", + "#4050: 96.82%\n", + "#4051: 96.82%\n", + "#4052: 96.82%\n", + "#4053: 96.82%\n", + "#4054: 96.82%\n", + "#4055: 96.82%\n", + "#4056: 96.82%\n", + "#4057: 96.82%\n", + "#4058: 96.82%\n", + "#4059: 96.82%\n", + "#4060: 96.82%\n", + "#4061: 96.82%\n", + "#4062: 96.83%\n", + "#4063: 96.80%\n", + "#4064: 96.80%\n", + "#4065: 96.80%\n", + "#4066: 96.80%\n", + "#4067: 96.80%\n", + "#4068: 96.81%\n", + "#4069: 96.81%\n", + "#4070: 96.81%\n", + "#4071: 96.81%\n", + "#4072: 96.81%\n", + "#4073: 96.81%\n", + "#4074: 96.81%\n", + "#4075: 96.81%\n", + "#4076: 96.81%\n", + "#4077: 96.81%\n", + "#4078: 96.79%\n", + "#4079: 96.79%\n", + "#4080: 96.79%\n", + "#4081: 96.79%\n", + "#4082: 96.79%\n", + "#4083: 96.79%\n", + "#4084: 96.79%\n", + "#4085: 96.79%\n", + "#4086: 96.79%\n", + "#4087: 96.80%\n", + "#4088: 96.80%\n", + "#4089: 96.80%\n", + "#4090: 96.80%\n", + "#4091: 96.80%\n", + "#4092: 96.80%\n", + "#4093: 96.80%\n", + "#4094: 96.80%\n", + "#4095: 96.80%\n", + "#4096: 96.80%\n", + "#4097: 96.80%\n", + "#4098: 96.80%\n", + "#4099: 96.80%\n", + "#4100: 96.81%\n", + "#4101: 96.81%\n", + "#4102: 96.81%\n", + "#4103: 96.81%\n", + "#4104: 96.81%\n", + "#4105: 96.81%\n", + "#4106: 96.81%\n", + "#4107: 96.81%\n", + "#4108: 96.81%\n", + "#4109: 96.81%\n", + "#4110: 96.81%\n", + "#4111: 96.81%\n", + "#4112: 96.81%\n", + "#4113: 96.82%\n", + "#4114: 96.82%\n", + "#4115: 96.82%\n", + "#4116: 96.79%\n", + "#4117: 96.79%\n", + "#4118: 96.80%\n", + "#4119: 96.80%\n", + "#4120: 96.80%\n", + "#4121: 96.80%\n", + "#4122: 96.80%\n", + "#4123: 96.77%\n", + "#4124: 96.78%\n", + "#4125: 96.78%\n", + "#4126: 96.78%\n", + "#4127: 96.78%\n", + "#4128: 96.78%\n", + "#4129: 96.78%\n", + "#4130: 96.78%\n", + "#4131: 96.78%\n", + "#4132: 96.78%\n", + "#4133: 96.78%\n", + "#4134: 96.78%\n", + "#4135: 96.78%\n", + "#4136: 96.79%\n", + "#4137: 96.79%\n", + "#4138: 96.79%\n", + "#4139: 96.79%\n", + "#4140: 96.79%\n", + "#4141: 96.79%\n", + "#4142: 96.79%\n", + "#4143: 96.79%\n", + "#4144: 96.79%\n", + "#4145: 96.79%\n", + "#4146: 96.79%\n", + "#4147: 96.79%\n", + "#4148: 96.79%\n", + "#4149: 96.80%\n", + "#4150: 96.80%\n", + "#4151: 96.80%\n", + "#4152: 96.80%\n", + "#4153: 96.80%\n", + "#4154: 96.80%\n", + "#4155: 96.80%\n", + "#4156: 96.80%\n", + "#4157: 96.80%\n", + "#4158: 96.80%\n", + "#4159: 96.80%\n", + "#4160: 96.80%\n", + "#4161: 96.80%\n", + "#4162: 96.81%\n", + "#4163: 96.78%\n", + "#4164: 96.78%\n", + "#4165: 96.78%\n", + "#4166: 96.78%\n", + "#4167: 96.79%\n", + "#4168: 96.79%\n", + "#4169: 96.79%\n", + "#4170: 96.79%\n", + "#4171: 96.79%\n", + "#4172: 96.79%\n", + "#4173: 96.79%\n", + "#4174: 96.79%\n", + "#4175: 96.79%\n", + "#4176: 96.77%\n", + "#4177: 96.77%\n", + "#4178: 96.77%\n", + "#4179: 96.77%\n", + "#4180: 96.77%\n", + "#4181: 96.77%\n", + "#4182: 96.77%\n", + "#4183: 96.77%\n", + "#4184: 96.77%\n", + "#4185: 96.77%\n", + "#4186: 96.78%\n", + "#4187: 96.78%\n", + "#4188: 96.78%\n", + "#4189: 96.78%\n", + "#4190: 96.78%\n", + "#4191: 96.78%\n", + "#4192: 96.78%\n", + "#4193: 96.78%\n", + "#4194: 96.78%\n", + "#4195: 96.78%\n", + "#4196: 96.78%\n", + "#4197: 96.78%\n", + "#4198: 96.78%\n", + "#4199: 96.79%\n", + "#4200: 96.79%\n", + "#4201: 96.79%\n", + "#4202: 96.79%\n", + "#4203: 96.79%\n", + "#4204: 96.79%\n", + "#4205: 96.77%\n", + "#4206: 96.77%\n", + "#4207: 96.77%\n", + "#4208: 96.77%\n", + "#4209: 96.77%\n", + "#4210: 96.77%\n", + "#4211: 96.77%\n", + "#4212: 96.77%\n", + "#4213: 96.77%\n", + "#4214: 96.77%\n", + "#4215: 96.77%\n", + "#4216: 96.77%\n", + "#4217: 96.78%\n", + "#4218: 96.78%\n", + "#4219: 96.78%\n", + "#4220: 96.78%\n", + "#4221: 96.78%\n", + "#4222: 96.78%\n", + "#4223: 96.78%\n", + "#4224: 96.78%\n", + "#4225: 96.78%\n", + "#4226: 96.78%\n", + "#4227: 96.78%\n", + "#4228: 96.78%\n", + "#4229: 96.78%\n", + "#4230: 96.79%\n", + "#4231: 96.79%\n", + "#4232: 96.79%\n", + "#4233: 96.79%\n", + "#4234: 96.79%\n", + "#4235: 96.79%\n", + "#4236: 96.79%\n", + "#4237: 96.79%\n", + "#4238: 96.79%\n", + "#4239: 96.79%\n", + "#4240: 96.79%\n", + "#4241: 96.79%\n", + "#4242: 96.79%\n", + "#4243: 96.80%\n", + "#4244: 96.80%\n", + "#4245: 96.80%\n", + "#4246: 96.80%\n", + "#4247: 96.80%\n", + "#4248: 96.80%\n", + "#4249: 96.80%\n", + "#4250: 96.80%\n", + "#4251: 96.80%\n", + "#4252: 96.80%\n", + "#4253: 96.80%\n", + "#4254: 96.80%\n", + "#4255: 96.80%\n", + "#4256: 96.78%\n", + "#4257: 96.78%\n", + "#4258: 96.78%\n", + "#4259: 96.78%\n", + "#4260: 96.78%\n", + "#4261: 96.79%\n", + "#4262: 96.79%\n", + "#4263: 96.79%\n", + "#4264: 96.79%\n", + "#4265: 96.77%\n", + "#4266: 96.77%\n", + "#4267: 96.77%\n", + "#4268: 96.77%\n", + "#4269: 96.77%\n", + "#4270: 96.77%\n", + "#4271: 96.77%\n", + "#4272: 96.77%\n", + "#4273: 96.77%\n", + "#4274: 96.77%\n", + "#4275: 96.77%\n", + "#4276: 96.77%\n", + "#4277: 96.77%\n", + "#4278: 96.77%\n", + "#4279: 96.78%\n", + "#4280: 96.78%\n", + "#4281: 96.78%\n", + "#4282: 96.78%\n", + "#4283: 96.78%\n", + "#4284: 96.78%\n", + "#4285: 96.78%\n", + "#4286: 96.78%\n", + "#4287: 96.78%\n", + "#4288: 96.78%\n", + "#4289: 96.78%\n", + "#4290: 96.78%\n", + "#4291: 96.78%\n", + "#4292: 96.79%\n", + "#4293: 96.79%\n", + "#4294: 96.79%\n", + "#4295: 96.79%\n", + "#4296: 96.79%\n", + "#4297: 96.79%\n", + "#4298: 96.79%\n", + "#4299: 96.79%\n", + "#4300: 96.79%\n", + "#4301: 96.79%\n", + "#4302: 96.79%\n", + "#4303: 96.79%\n", + "#4304: 96.79%\n", + "#4305: 96.80%\n", + "#4306: 96.77%\n", + "#4307: 96.77%\n", + "#4308: 96.77%\n", + "#4309: 96.77%\n", + "#4310: 96.78%\n", + "#4311: 96.78%\n", + "#4312: 96.78%\n", + "#4313: 96.78%\n", + "#4314: 96.78%\n", + "#4315: 96.78%\n", + "#4316: 96.78%\n", + "#4317: 96.78%\n", + "#4318: 96.78%\n", + "#4319: 96.78%\n", + "#4320: 96.78%\n", + "#4321: 96.78%\n", + "#4322: 96.78%\n", + "#4323: 96.79%\n", + "#4324: 96.79%\n", + "#4325: 96.79%\n", + "#4326: 96.79%\n", + "#4327: 96.79%\n", + "#4328: 96.79%\n", + "#4329: 96.79%\n", + "#4330: 96.79%\n", + "#4331: 96.79%\n", + "#4332: 96.79%\n", + "#4333: 96.79%\n", + "#4334: 96.79%\n", + "#4335: 96.79%\n", + "#4336: 96.80%\n", + "#4337: 96.80%\n", + "#4338: 96.80%\n", + "#4339: 96.80%\n", + "#4340: 96.80%\n", + "#4341: 96.80%\n", + "#4342: 96.80%\n", + "#4343: 96.80%\n", + "#4344: 96.80%\n", + "#4345: 96.80%\n", + "#4346: 96.80%\n", + "#4347: 96.80%\n", + "#4348: 96.80%\n", + "#4349: 96.80%\n", + "#4350: 96.81%\n", + "#4351: 96.81%\n", + "#4352: 96.81%\n", + "#4353: 96.81%\n", + "#4354: 96.81%\n", + "#4355: 96.81%\n", + "#4356: 96.81%\n", + "#4357: 96.81%\n", + "#4358: 96.81%\n", + "#4359: 96.81%\n", + "#4360: 96.81%\n", + "#4361: 96.81%\n", + "#4362: 96.81%\n", + "#4363: 96.81%\n", + "#4364: 96.82%\n", + "#4365: 96.82%\n", + "#4366: 96.82%\n", + "#4367: 96.82%\n", + "#4368: 96.82%\n", + "#4369: 96.82%\n", + "#4370: 96.82%\n", + "#4371: 96.82%\n", + "#4372: 96.82%\n", + "#4373: 96.82%\n", + "#4374: 96.82%\n", + "#4375: 96.82%\n", + "#4376: 96.82%\n", + "#4377: 96.83%\n", + "#4378: 96.83%\n", + "#4379: 96.83%\n", + "#4380: 96.83%\n", + "#4381: 96.83%\n", + "#4382: 96.83%\n", + "#4383: 96.83%\n", + "#4384: 96.83%\n", + "#4385: 96.83%\n", + "#4386: 96.83%\n", + "#4387: 96.83%\n", + "#4388: 96.83%\n", + "#4389: 96.83%\n", + "#4390: 96.83%\n", + "#4391: 96.81%\n", + "#4392: 96.81%\n", + "#4393: 96.81%\n", + "#4394: 96.81%\n", + "#4395: 96.82%\n", + "#4396: 96.82%\n", + "#4397: 96.82%\n", + "#4398: 96.82%\n", + "#4399: 96.82%\n", + "#4400: 96.80%\n", + "#4401: 96.80%\n", + "#4402: 96.80%\n", + "#4403: 96.80%\n", + "#4404: 96.80%\n", + "#4405: 96.80%\n", + "#4406: 96.80%\n", + "#4407: 96.80%\n", + "#4408: 96.80%\n", + "#4409: 96.80%\n", + "#4410: 96.80%\n", + "#4411: 96.80%\n", + "#4412: 96.80%\n", + "#4413: 96.81%\n", + "#4414: 96.81%\n", + "#4415: 96.81%\n", + "#4416: 96.81%\n", + "#4417: 96.81%\n", + "#4418: 96.81%\n", + "#4419: 96.81%\n", + "#4420: 96.81%\n", + "#4421: 96.81%\n", + "#4422: 96.81%\n", + "#4423: 96.81%\n", + "#4424: 96.81%\n", + "#4425: 96.81%\n", + "#4426: 96.81%\n", + "#4427: 96.82%\n", + "#4428: 96.82%\n", + "#4429: 96.82%\n", + "#4430: 96.82%\n", + "#4431: 96.82%\n", + "#4432: 96.82%\n", + "#4433: 96.82%\n", + "#4434: 96.82%\n", + "#4435: 96.82%\n", + "#4436: 96.82%\n", + "#4437: 96.82%\n", + "#4438: 96.82%\n", + "#4439: 96.82%\n", + "#4440: 96.83%\n", + "#4441: 96.83%\n", + "#4442: 96.83%\n", + "#4443: 96.80%\n", + "#4444: 96.81%\n", + "#4445: 96.81%\n", + "#4446: 96.81%\n", + "#4447: 96.81%\n", + "#4448: 96.81%\n", + "#4449: 96.81%\n", + "#4450: 96.81%\n", + "#4451: 96.81%\n", + "#4452: 96.81%\n", + "#4453: 96.81%\n", + "#4454: 96.81%\n", + "#4455: 96.81%\n", + "#4456: 96.81%\n", + "#4457: 96.81%\n", + "#4458: 96.82%\n", + "#4459: 96.82%\n", + "#4460: 96.82%\n", + "#4461: 96.82%\n", + "#4462: 96.82%\n", + "#4463: 96.82%\n", + "#4464: 96.82%\n", + "#4465: 96.82%\n", + "#4466: 96.82%\n", + "#4467: 96.82%\n", + "#4468: 96.82%\n", + "#4469: 96.82%\n", + "#4470: 96.82%\n", + "#4471: 96.82%\n", + "#4472: 96.83%\n", + "#4473: 96.83%\n", + "#4474: 96.83%\n", + "#4475: 96.83%\n", + "#4476: 96.83%\n", + "#4477: 96.83%\n", + "#4478: 96.83%\n", + "#4479: 96.83%\n", + "#4480: 96.83%\n", + "#4481: 96.83%\n", + "#4482: 96.83%\n", + "#4483: 96.83%\n", + "#4484: 96.83%\n", + "#4485: 96.83%\n", + "#4486: 96.84%\n", + "#4487: 96.84%\n", + "#4488: 96.84%\n", + "#4489: 96.84%\n", + "#4490: 96.84%\n", + "#4491: 96.84%\n", + "#4492: 96.84%\n", + "#4493: 96.84%\n", + "#4494: 96.84%\n", + "#4495: 96.84%\n", + "#4496: 96.84%\n", + "#4497: 96.82%\n", + "#4498: 96.82%\n", + "#4499: 96.82%\n", + "#4500: 96.80%\n", + "#4501: 96.80%\n", + "#4502: 96.80%\n", + "#4503: 96.80%\n", + "#4504: 96.80%\n", + "#4505: 96.78%\n", + "#4506: 96.78%\n", + "#4507: 96.78%\n", + "#4508: 96.78%\n", + "#4509: 96.78%\n", + "#4510: 96.79%\n", + "#4511: 96.79%\n", + "#4512: 96.79%\n", + "#4513: 96.79%\n", + "#4514: 96.79%\n", + "#4515: 96.79%\n", + "#4516: 96.79%\n", + "#4517: 96.79%\n", + "#4518: 96.79%\n", + "#4519: 96.79%\n", + "#4520: 96.79%\n", + "#4521: 96.79%\n", + "#4522: 96.79%\n", + "#4523: 96.79%\n", + "#4524: 96.80%\n", + "#4525: 96.80%\n", + "#4526: 96.80%\n", + "#4527: 96.80%\n", + "#4528: 96.80%\n", + "#4529: 96.80%\n", + "#4530: 96.80%\n", + "#4531: 96.80%\n", + "#4532: 96.80%\n", + "#4533: 96.80%\n", + "#4534: 96.80%\n", + "#4535: 96.80%\n", + "#4536: 96.78%\n", + "#4537: 96.78%\n", + "#4538: 96.78%\n", + "#4539: 96.78%\n", + "#4540: 96.78%\n", + "#4541: 96.79%\n", + "#4542: 96.79%\n", + "#4543: 96.79%\n", + "#4544: 96.79%\n", + "#4545: 96.79%\n", + "#4546: 96.79%\n", + "#4547: 96.79%\n", + "#4548: 96.79%\n", + "#4549: 96.79%\n", + "#4550: 96.79%\n", + "#4551: 96.79%\n", + "#4552: 96.79%\n", + "#4553: 96.79%\n", + "#4554: 96.79%\n", + "#4555: 96.80%\n", + "#4556: 96.80%\n", + "#4557: 96.80%\n", + "#4558: 96.80%\n", + "#4559: 96.80%\n", + "#4560: 96.80%\n", + "#4561: 96.80%\n", + "#4562: 96.80%\n", + "#4563: 96.80%\n", + "#4564: 96.80%\n", + "#4565: 96.80%\n", + "#4566: 96.80%\n", + "#4567: 96.80%\n", + "#4568: 96.80%\n", + "#4569: 96.81%\n", + "#4570: 96.81%\n", + "#4571: 96.78%\n", + "#4572: 96.79%\n", + "#4573: 96.79%\n", + "#4574: 96.79%\n", + "#4575: 96.79%\n", + "#4576: 96.79%\n", + "#4577: 96.79%\n", + "#4578: 96.77%\n", + "#4579: 96.77%\n", + "#4580: 96.77%\n", + "#4581: 96.77%\n", + "#4582: 96.77%\n", + "#4583: 96.77%\n", + "#4584: 96.77%\n", + "#4585: 96.77%\n", + "#4586: 96.77%\n", + "#4587: 96.77%\n", + "#4588: 96.77%\n", + "#4589: 96.78%\n", + "#4590: 96.78%\n", + "#4591: 96.78%\n", + "#4592: 96.78%\n", + "#4593: 96.78%\n", + "#4594: 96.78%\n", + "#4595: 96.78%\n", + "#4596: 96.78%\n", + "#4597: 96.78%\n", + "#4598: 96.78%\n", + "#4599: 96.78%\n", + "#4600: 96.78%\n", + "#4601: 96.78%\n", + "#4602: 96.78%\n", + "#4603: 96.79%\n", + "#4604: 96.79%\n", + "#4605: 96.79%\n", + "#4606: 96.79%\n", + "#4607: 96.79%\n", + "#4608: 96.79%\n", + "#4609: 96.79%\n", + "#4610: 96.79%\n", + "#4611: 96.79%\n", + "#4612: 96.79%\n", + "#4613: 96.79%\n", + "#4614: 96.79%\n", + "#4615: 96.77%\n", + "#4616: 96.77%\n", + "#4617: 96.77%\n", + "#4618: 96.77%\n", + "#4619: 96.77%\n", + "#4620: 96.78%\n", + "#4621: 96.78%\n", + "#4622: 96.78%\n", + "#4623: 96.78%\n", + "#4624: 96.78%\n", + "#4625: 96.78%\n", + "#4626: 96.78%\n", + "#4627: 96.78%\n", + "#4628: 96.78%\n", + "#4629: 96.78%\n", + "#4630: 96.78%\n", + "#4631: 96.78%\n", + "#4632: 96.78%\n", + "#4633: 96.78%\n", + "#4634: 96.79%\n", + "#4635: 96.79%\n", + "#4636: 96.79%\n", + "#4637: 96.79%\n", + "#4638: 96.79%\n", + "#4639: 96.77%\n", + "#4640: 96.77%\n", + "#4641: 96.77%\n", + "#4642: 96.77%\n", + "#4643: 96.77%\n", + "#4644: 96.77%\n", + "#4645: 96.77%\n", + "#4646: 96.77%\n", + "#4647: 96.77%\n", + "#4648: 96.77%\n", + "#4649: 96.77%\n", + "#4650: 96.77%\n", + "#4651: 96.78%\n", + "#4652: 96.78%\n", + "#4653: 96.78%\n", + "#4654: 96.78%\n", + "#4655: 96.78%\n", + "#4656: 96.78%\n", + "#4657: 96.78%\n", + "#4658: 96.78%\n", + "#4659: 96.78%\n", + "#4660: 96.78%\n", + "#4661: 96.78%\n", + "#4662: 96.78%\n", + "#4663: 96.78%\n", + "#4664: 96.78%\n", + "#4665: 96.79%\n", + "#4666: 96.79%\n", + "#4667: 96.79%\n", + "#4668: 96.79%\n", + "#4669: 96.79%\n", + "#4670: 96.79%\n", + "#4671: 96.79%\n", + "#4672: 96.79%\n", + "#4673: 96.79%\n", + "#4674: 96.79%\n", + "#4675: 96.79%\n", + "#4676: 96.79%\n", + "#4677: 96.79%\n", + "#4678: 96.79%\n", + "#4679: 96.79%\n", + "#4680: 96.80%\n", + "#4681: 96.80%\n", + "#4682: 96.80%\n", + "#4683: 96.80%\n", + "#4684: 96.80%\n", + "#4685: 96.80%\n", + "#4686: 96.80%\n", + "#4687: 96.80%\n", + "#4688: 96.80%\n", + "#4689: 96.80%\n", + "#4690: 96.80%\n", + "#4691: 96.80%\n", + "#4692: 96.80%\n", + "#4693: 96.80%\n", + "#4694: 96.81%\n", + "#4695: 96.81%\n", + "#4696: 96.81%\n", + "#4697: 96.81%\n", + "#4698: 96.81%\n", + "#4699: 96.81%\n", + "#4700: 96.81%\n", + "#4701: 96.81%\n", + "#4702: 96.81%\n", + "#4703: 96.81%\n", + "#4704: 96.81%\n", + "#4705: 96.81%\n", + "#4706: 96.81%\n", + "#4707: 96.81%\n", + "#4708: 96.81%\n", + "#4709: 96.82%\n", + "#4710: 96.82%\n", + "#4711: 96.82%\n", + "#4712: 96.82%\n", + "#4713: 96.82%\n", + "#4714: 96.82%\n", + "#4715: 96.82%\n", + "#4716: 96.82%\n", + "#4717: 96.82%\n", + "#4718: 96.82%\n", + "#4719: 96.82%\n", + "#4720: 96.82%\n", + "#4721: 96.82%\n", + "#4722: 96.82%\n", + "#4723: 96.82%\n", + "#4724: 96.83%\n", + "#4725: 96.83%\n", + "#4726: 96.83%\n", + "#4727: 96.83%\n", + "#4728: 96.83%\n", + "#4729: 96.83%\n", + "#4730: 96.83%\n", + "#4731: 96.83%\n", + "#4732: 96.83%\n", + "#4733: 96.83%\n", + "#4734: 96.83%\n", + "#4735: 96.83%\n", + "#4736: 96.83%\n", + "#4737: 96.81%\n", + "#4738: 96.81%\n", + "#4739: 96.81%\n", + "#4740: 96.79%\n", + "#4741: 96.79%\n", + "#4742: 96.80%\n", + "#4743: 96.80%\n", + "#4744: 96.80%\n", + "#4745: 96.80%\n", + "#4746: 96.80%\n", + "#4747: 96.80%\n", + "#4748: 96.80%\n", + "#4749: 96.80%\n", + "#4750: 96.80%\n", + "#4751: 96.78%\n", + "#4752: 96.78%\n", + "#4753: 96.78%\n", + "#4754: 96.78%\n", + "#4755: 96.78%\n", + "#4756: 96.78%\n", + "#4757: 96.78%\n", + "#4758: 96.79%\n", + "#4759: 96.79%\n", + "#4760: 96.79%\n", + "#4761: 96.77%\n", + "#4762: 96.77%\n", + "#4763: 96.77%\n", + "#4764: 96.77%\n", + "#4765: 96.77%\n", + "#4766: 96.77%\n", + "#4767: 96.77%\n", + "#4768: 96.77%\n", + "#4769: 96.77%\n", + "#4770: 96.77%\n", + "#4771: 96.77%\n", + "#4772: 96.77%\n", + "#4773: 96.77%\n", + "#4774: 96.77%\n", + "#4775: 96.78%\n", + "#4776: 96.78%\n", + "#4777: 96.78%\n", + "#4778: 96.78%\n", + "#4779: 96.78%\n", + "#4780: 96.78%\n", + "#4781: 96.78%\n", + "#4782: 96.78%\n", + "#4783: 96.76%\n", + "#4784: 96.76%\n", + "#4785: 96.76%\n", + "#4786: 96.76%\n", + "#4787: 96.76%\n", + "#4788: 96.76%\n", + "#4789: 96.76%\n", + "#4790: 96.76%\n", + "#4791: 96.77%\n", + "#4792: 96.77%\n", + "#4793: 96.77%\n", + "#4794: 96.77%\n", + "#4795: 96.77%\n", + "#4796: 96.77%\n", + "#4797: 96.77%\n", + "#4798: 96.77%\n", + "#4799: 96.77%\n", + "#4800: 96.77%\n", + "#4801: 96.77%\n", + "#4802: 96.77%\n", + "#4803: 96.77%\n", + "#4804: 96.77%\n", + "#4805: 96.77%\n", + "#4806: 96.78%\n", + "#4807: 96.76%\n", + "#4808: 96.74%\n", + "#4809: 96.74%\n", + "#4810: 96.74%\n", + "#4811: 96.74%\n", + "#4812: 96.74%\n", + "#4813: 96.74%\n", + "#4814: 96.74%\n", + "#4815: 96.74%\n", + "#4816: 96.74%\n", + "#4817: 96.74%\n", + "#4818: 96.74%\n", + "#4819: 96.74%\n", + "#4820: 96.74%\n", + "#4821: 96.74%\n", + "#4822: 96.74%\n", + "#4823: 96.75%\n", + "#4824: 96.75%\n", + "#4825: 96.75%\n", + "#4826: 96.75%\n", + "#4827: 96.75%\n", + "#4828: 96.75%\n", + "#4829: 96.75%\n", + "#4830: 96.75%\n", + "#4831: 96.75%\n", + "#4832: 96.75%\n", + "#4833: 96.75%\n", + "#4834: 96.75%\n", + "#4835: 96.75%\n", + "#4836: 96.75%\n", + "#4837: 96.75%\n", + "#4838: 96.73%\n", + "#4839: 96.74%\n", + "#4840: 96.74%\n", + "#4841: 96.74%\n", + "#4842: 96.74%\n", + "#4843: 96.74%\n", + "#4844: 96.74%\n", + "#4845: 96.74%\n", + "#4846: 96.74%\n", + "#4847: 96.74%\n", + "#4848: 96.74%\n", + "#4849: 96.74%\n", + "#4850: 96.74%\n", + "#4851: 96.74%\n", + "#4852: 96.72%\n", + "#4853: 96.72%\n", + "#4854: 96.73%\n", + "#4855: 96.73%\n", + "#4856: 96.73%\n", + "#4857: 96.73%\n", + "#4858: 96.73%\n", + "#4859: 96.73%\n", + "#4860: 96.71%\n", + "#4861: 96.71%\n", + "#4862: 96.71%\n", + "#4863: 96.71%\n", + "#4864: 96.71%\n", + "#4865: 96.71%\n", + "#4866: 96.71%\n", + "#4867: 96.71%\n", + "#4868: 96.71%\n", + "#4869: 96.71%\n", + "#4870: 96.72%\n", + "#4871: 96.72%\n", + "#4872: 96.72%\n", + "#4873: 96.72%\n", + "#4874: 96.70%\n", + "#4875: 96.70%\n", + "#4876: 96.70%\n", + "#4877: 96.70%\n", + "#4878: 96.70%\n", + "#4879: 96.68%\n", + "#4880: 96.68%\n", + "#4881: 96.68%\n", + "#4882: 96.68%\n", + "#4883: 96.68%\n", + "#4884: 96.68%\n", + "#4885: 96.68%\n", + "#4886: 96.69%\n", + "#4887: 96.69%\n", + "#4888: 96.69%\n", + "#4889: 96.69%\n", + "#4890: 96.67%\n", + "#4891: 96.67%\n", + "#4892: 96.67%\n", + "#4893: 96.67%\n", + "#4894: 96.67%\n", + "#4895: 96.67%\n", + "#4896: 96.67%\n", + "#4897: 96.67%\n", + "#4898: 96.67%\n", + "#4899: 96.67%\n", + "#4900: 96.67%\n", + "#4901: 96.67%\n", + "#4902: 96.68%\n", + "#4903: 96.68%\n", + "#4904: 96.68%\n", + "#4905: 96.68%\n", + "#4906: 96.68%\n", + "#4907: 96.68%\n", + "#4908: 96.68%\n", + "#4909: 96.68%\n", + "#4910: 96.68%\n", + "#4911: 96.68%\n", + "#4912: 96.68%\n", + "#4913: 96.68%\n", + "#4914: 96.68%\n", + "#4915: 96.68%\n", + "#4916: 96.68%\n", + "#4917: 96.69%\n", + "#4918: 96.69%\n", + "#4919: 96.69%\n", + "#4920: 96.69%\n", + "#4921: 96.69%\n", + "#4922: 96.69%\n", + "#4923: 96.69%\n", + "#4924: 96.69%\n", + "#4925: 96.69%\n", + "#4926: 96.69%\n", + "#4927: 96.69%\n", + "#4928: 96.69%\n", + "#4929: 96.69%\n", + "#4930: 96.69%\n", + "#4931: 96.70%\n", + "#4932: 96.70%\n", + "#4933: 96.70%\n", + "#4934: 96.70%\n", + "#4935: 96.70%\n", + "#4936: 96.70%\n", + "#4937: 96.70%\n", + "#4938: 96.70%\n", + "#4939: 96.70%\n", + "#4940: 96.70%\n", + "#4941: 96.70%\n", + "#4942: 96.70%\n", + "#4943: 96.70%\n", + "#4944: 96.70%\n", + "#4945: 96.70%\n", + "#4946: 96.71%\n", + "#4947: 96.71%\n", + "#4948: 96.71%\n", + "#4949: 96.71%\n", + "#4950: 96.71%\n", + "#4951: 96.71%\n", + "#4952: 96.71%\n", + "#4953: 96.71%\n", + "#4954: 96.71%\n", + "#4955: 96.71%\n", + "#4956: 96.69%\n", + "#4957: 96.69%\n", + "#4958: 96.69%\n", + "#4959: 96.69%\n", + "#4960: 96.69%\n", + "#4961: 96.69%\n", + "#4962: 96.70%\n", + "#4963: 96.70%\n", + "#4964: 96.70%\n", + "#4965: 96.70%\n", + "#4966: 96.70%\n", + "#4967: 96.70%\n", + "#4968: 96.70%\n", + "#4969: 96.70%\n", + "#4970: 96.70%\n", + "#4971: 96.70%\n", + "#4972: 96.70%\n", + "#4973: 96.70%\n", + "#4974: 96.70%\n", + "#4975: 96.70%\n", + "#4976: 96.70%\n", + "#4977: 96.71%\n", + "#4978: 96.69%\n", + "#4979: 96.69%\n", + "#4980: 96.69%\n", + "#4981: 96.69%\n", + "#4982: 96.69%\n", + "#4983: 96.69%\n", + "#4984: 96.69%\n", + "#4985: 96.69%\n", + "#4986: 96.69%\n", + "#4987: 96.69%\n", + "#4988: 96.69%\n", + "#4989: 96.69%\n", + "#4990: 96.69%\n", + "#4991: 96.69%\n", + "#4992: 96.70%\n", + "#4993: 96.70%\n", + "#4994: 96.70%\n", + "#4995: 96.70%\n", + "#4996: 96.70%\n", + "#4997: 96.70%\n", + "#4998: 96.70%\n", + "#4999: 96.70%\n", + "#5000: 96.70%\n", + "#5001: 96.70%\n", + "#5002: 96.70%\n", + "#5003: 96.70%\n", + "#5004: 96.70%\n", + "#5005: 96.70%\n", + "#5006: 96.70%\n", + "#5007: 96.71%\n", + "#5008: 96.71%\n", + "#5009: 96.71%\n", + "#5010: 96.71%\n", + "#5011: 96.71%\n", + "#5012: 96.71%\n", + "#5013: 96.71%\n", + "#5014: 96.71%\n", + "#5015: 96.71%\n", + "#5016: 96.71%\n", + "#5017: 96.71%\n", + "#5018: 96.71%\n", + "#5019: 96.71%\n", + "#5020: 96.71%\n", + "#5021: 96.71%\n", + "#5022: 96.72%\n", + "#5023: 96.72%\n", + "#5024: 96.72%\n", + "#5025: 96.72%\n", + "#5026: 96.72%\n", + "#5027: 96.72%\n", + "#5028: 96.72%\n", + "#5029: 96.72%\n", + "#5030: 96.72%\n", + "#5031: 96.72%\n", + "#5032: 96.72%\n", + "#5033: 96.72%\n", + "#5034: 96.72%\n", + "#5035: 96.72%\n", + "#5036: 96.72%\n", + "#5037: 96.72%\n", + "#5038: 96.73%\n", + "#5039: 96.73%\n", + "#5040: 96.73%\n", + "#5041: 96.73%\n", + "#5042: 96.73%\n", + "#5043: 96.73%\n", + "#5044: 96.73%\n", + "#5045: 96.73%\n", + "#5046: 96.73%\n", + "#5047: 96.73%\n", + "#5048: 96.73%\n", + "#5049: 96.73%\n", + "#5050: 96.73%\n", + "#5051: 96.73%\n", + "#5052: 96.73%\n", + "#5053: 96.74%\n", + "#5054: 96.74%\n", + "#5055: 96.74%\n", + "#5056: 96.74%\n", + "#5057: 96.74%\n", + "#5058: 96.74%\n", + "#5059: 96.74%\n", + "#5060: 96.74%\n", + "#5061: 96.74%\n", + "#5062: 96.74%\n", + "#5063: 96.74%\n", + "#5064: 96.74%\n", + "#5065: 96.74%\n", + "#5066: 96.74%\n", + "#5067: 96.74%\n", + "#5068: 96.74%\n", + "#5069: 96.75%\n", + "#5070: 96.75%\n", + "#5071: 96.75%\n", + "#5072: 96.75%\n", + "#5073: 96.75%\n", + "#5074: 96.75%\n", + "#5075: 96.75%\n", + "#5076: 96.75%\n", + "#5077: 96.75%\n", + "#5078: 96.75%\n", + "#5079: 96.75%\n", + "#5080: 96.75%\n", + "#5081: 96.75%\n", + "#5082: 96.75%\n", + "#5083: 96.75%\n", + "#5084: 96.76%\n", + "#5085: 96.76%\n", + "#5086: 96.76%\n", + "#5087: 96.76%\n", + "#5088: 96.76%\n", + "#5089: 96.76%\n", + "#5090: 96.76%\n", + "#5091: 96.76%\n", + "#5092: 96.76%\n", + "#5093: 96.76%\n", + "#5094: 96.76%\n", + "#5095: 96.76%\n", + "#5096: 96.76%\n", + "#5097: 96.76%\n", + "#5098: 96.76%\n", + "#5099: 96.76%\n", + "#5100: 96.77%\n", + "#5101: 96.77%\n", + "#5102: 96.77%\n", + "#5103: 96.77%\n", + "#5104: 96.77%\n", + "#5105: 96.77%\n", + "#5106: 96.77%\n", + "#5107: 96.77%\n", + "#5108: 96.77%\n", + "#5109: 96.77%\n", + "#5110: 96.77%\n", + "#5111: 96.77%\n", + "#5112: 96.77%\n", + "#5113: 96.77%\n", + "#5114: 96.77%\n", + "#5115: 96.77%\n", + "#5116: 96.78%\n", + "#5117: 96.78%\n", + "#5118: 96.78%\n", + "#5119: 96.78%\n", + "#5120: 96.78%\n", + "#5121: 96.78%\n", + "#5122: 96.78%\n", + "#5123: 96.78%\n", + "#5124: 96.78%\n", + "#5125: 96.78%\n", + "#5126: 96.78%\n", + "#5127: 96.78%\n", + "#5128: 96.78%\n", + "#5129: 96.78%\n", + "#5130: 96.78%\n", + "#5131: 96.78%\n", + "#5132: 96.79%\n", + "#5133: 96.79%\n", + "#5134: 96.79%\n", + "#5135: 96.79%\n", + "#5136: 96.79%\n", + "#5137: 96.79%\n", + "#5138: 96.79%\n", + "#5139: 96.79%\n", + "#5140: 96.77%\n", + "#5141: 96.77%\n", + "#5142: 96.77%\n", + "#5143: 96.77%\n", + "#5144: 96.77%\n", + "#5145: 96.77%\n", + "#5146: 96.77%\n", + "#5147: 96.78%\n", + "#5148: 96.78%\n", + "#5149: 96.78%\n", + "#5150: 96.78%\n", + "#5151: 96.78%\n", + "#5152: 96.78%\n", + "#5153: 96.78%\n", + "#5154: 96.78%\n", + "#5155: 96.78%\n", + "#5156: 96.78%\n", + "#5157: 96.78%\n", + "#5158: 96.78%\n", + "#5159: 96.76%\n", + "#5160: 96.76%\n", + "#5161: 96.76%\n", + "#5162: 96.77%\n", + "#5163: 96.77%\n", + "#5164: 96.77%\n", + "#5165: 96.77%\n", + "#5166: 96.77%\n", + "#5167: 96.77%\n", + "#5168: 96.77%\n", + "#5169: 96.77%\n", + "#5170: 96.77%\n", + "#5171: 96.77%\n", + "#5172: 96.77%\n", + "#5173: 96.77%\n", + "#5174: 96.77%\n", + "#5175: 96.77%\n", + "#5176: 96.75%\n", + "#5177: 96.76%\n", + "#5178: 96.76%\n", + "#5179: 96.76%\n", + "#5180: 96.76%\n", + "#5181: 96.76%\n", + "#5182: 96.76%\n", + "#5183: 96.74%\n", + "#5184: 96.74%\n", + "#5185: 96.74%\n", + "#5186: 96.74%\n", + "#5187: 96.74%\n", + "#5188: 96.74%\n", + "#5189: 96.74%\n", + "#5190: 96.74%\n", + "#5191: 96.74%\n", + "#5192: 96.75%\n", + "#5193: 96.75%\n", + "#5194: 96.75%\n", + "#5195: 96.75%\n", + "#5196: 96.75%\n", + "#5197: 96.75%\n", + "#5198: 96.75%\n", + "#5199: 96.75%\n", + "#5200: 96.75%\n", + "#5201: 96.75%\n", + "#5202: 96.75%\n", + "#5203: 96.75%\n", + "#5204: 96.75%\n", + "#5205: 96.75%\n", + "#5206: 96.75%\n", + "#5207: 96.75%\n", + "#5208: 96.76%\n", + "#5209: 96.76%\n", + "#5210: 96.76%\n", + "#5211: 96.76%\n", + "#5212: 96.76%\n", + "#5213: 96.76%\n", + "#5214: 96.76%\n", + "#5215: 96.76%\n", + "#5216: 96.76%\n", + "#5217: 96.76%\n", + "#5218: 96.76%\n", + "#5219: 96.76%\n", + "#5220: 96.76%\n", + "#5221: 96.76%\n", + "#5222: 96.76%\n", + "#5223: 96.76%\n", + "#5224: 96.77%\n", + "#5225: 96.77%\n", + "#5226: 96.77%\n", + "#5227: 96.77%\n", + "#5228: 96.77%\n", + "#5229: 96.77%\n", + "#5230: 96.77%\n", + "#5231: 96.77%\n", + "#5232: 96.77%\n", + "#5233: 96.77%\n", + "#5234: 96.77%\n", + "#5235: 96.77%\n", + "#5236: 96.77%\n", + "#5237: 96.77%\n", + "#5238: 96.77%\n", + "#5239: 96.77%\n", + "#5240: 96.78%\n", + "#5241: 96.78%\n", + "#5242: 96.78%\n", + "#5243: 96.78%\n", + "#5244: 96.78%\n", + "#5245: 96.78%\n", + "#5246: 96.78%\n", + "#5247: 96.78%\n", + "#5248: 96.78%\n", + "#5249: 96.78%\n", + "#5250: 96.78%\n", + "#5251: 96.78%\n", + "#5252: 96.78%\n", + "#5253: 96.78%\n", + "#5254: 96.78%\n", + "#5255: 96.78%\n", + "#5256: 96.79%\n", + "#5257: 96.79%\n", + "#5258: 96.79%\n", + "#5259: 96.79%\n", + "#5260: 96.79%\n", + "#5261: 96.79%\n", + "#5262: 96.79%\n", + "#5263: 96.79%\n", + "#5264: 96.79%\n", + "#5265: 96.79%\n", + "#5266: 96.79%\n", + "#5267: 96.79%\n", + "#5268: 96.79%\n", + "#5269: 96.79%\n", + "#5270: 96.79%\n", + "#5271: 96.79%\n", + "#5272: 96.79%\n", + "#5273: 96.80%\n", + "#5274: 96.80%\n", + "#5275: 96.80%\n", + "#5276: 96.80%\n", + "#5277: 96.80%\n", + "#5278: 96.80%\n", + "#5279: 96.80%\n", + "#5280: 96.80%\n", + "#5281: 96.80%\n", + "#5282: 96.80%\n", + "#5283: 96.80%\n", + "#5284: 96.80%\n", + "#5285: 96.80%\n", + "#5286: 96.80%\n", + "#5287: 96.80%\n", + "#5288: 96.80%\n", + "#5289: 96.81%\n", + "#5290: 96.81%\n", + "#5291: 96.81%\n", + "#5292: 96.81%\n", + "#5293: 96.81%\n", + "#5294: 96.81%\n", + "#5295: 96.81%\n", + "#5296: 96.81%\n", + "#5297: 96.81%\n", + "#5298: 96.81%\n", + "#5299: 96.81%\n", + "#5300: 96.81%\n", + "#5301: 96.81%\n", + "#5302: 96.81%\n", + "#5303: 96.81%\n", + "#5304: 96.81%\n", + "#5305: 96.81%\n", + "#5306: 96.82%\n", + "#5307: 96.82%\n", + "#5308: 96.82%\n", + "#5309: 96.82%\n", + "#5310: 96.82%\n", + "#5311: 96.82%\n", + "#5312: 96.82%\n", + "#5313: 96.82%\n", + "#5314: 96.82%\n", + "#5315: 96.82%\n", + "#5316: 96.82%\n", + "#5317: 96.82%\n", + "#5318: 96.82%\n", + "#5319: 96.82%\n", + "#5320: 96.82%\n", + "#5321: 96.82%\n", + "#5322: 96.83%\n", + "#5323: 96.83%\n", + "#5324: 96.83%\n", + "#5325: 96.83%\n", + "#5326: 96.83%\n", + "#5327: 96.83%\n", + "#5328: 96.83%\n", + "#5329: 96.83%\n", + "#5330: 96.83%\n", + "#5331: 96.83%\n", + "#5332: 96.83%\n", + "#5333: 96.83%\n", + "#5334: 96.83%\n", + "#5335: 96.83%\n", + "#5336: 96.83%\n", + "#5337: 96.83%\n", + "#5338: 96.83%\n", + "#5339: 96.84%\n", + "#5340: 96.84%\n", + "#5341: 96.84%\n", + "#5342: 96.84%\n", + "#5343: 96.84%\n", + "#5344: 96.84%\n", + "#5345: 96.84%\n", + "#5346: 96.84%\n", + "#5347: 96.84%\n", + "#5348: 96.84%\n", + "#5349: 96.84%\n", + "#5350: 96.84%\n", + "#5351: 96.84%\n", + "#5352: 96.84%\n", + "#5353: 96.84%\n", + "#5354: 96.84%\n", + "#5355: 96.84%\n", + "#5356: 96.85%\n", + "#5357: 96.85%\n", + "#5358: 96.85%\n", + "#5359: 96.85%\n", + "#5360: 96.85%\n", + "#5361: 96.85%\n", + "#5362: 96.85%\n", + "#5363: 96.85%\n", + "#5364: 96.85%\n", + "#5365: 96.85%\n", + "#5366: 96.85%\n", + "#5367: 96.85%\n", + "#5368: 96.85%\n", + "#5369: 96.85%\n", + "#5370: 96.85%\n", + "#5371: 96.85%\n", + "#5372: 96.85%\n", + "#5373: 96.86%\n", + "#5374: 96.86%\n", + "#5375: 96.86%\n", + "#5376: 96.86%\n", + "#5377: 96.86%\n", + "#5378: 96.86%\n", + "#5379: 96.86%\n", + "#5380: 96.86%\n", + "#5381: 96.86%\n", + "#5382: 96.86%\n", + "#5383: 96.86%\n", + "#5384: 96.86%\n", + "#5385: 96.86%\n", + "#5386: 96.86%\n", + "#5387: 96.86%\n", + "#5388: 96.86%\n", + "#5389: 96.86%\n", + "#5390: 96.87%\n", + "#5391: 96.87%\n", + "#5392: 96.87%\n", + "#5393: 96.87%\n", + "#5394: 96.87%\n", + "#5395: 96.87%\n", + "#5396: 96.87%\n", + "#5397: 96.87%\n", + "#5398: 96.87%\n", + "#5399: 96.87%\n", + "#5400: 96.87%\n", + "#5401: 96.87%\n", + "#5402: 96.87%\n", + "#5403: 96.87%\n", + "#5404: 96.87%\n", + "#5405: 96.87%\n", + "#5406: 96.87%\n", + "#5407: 96.88%\n", + "#5408: 96.88%\n", + "#5409: 96.88%\n", + "#5410: 96.88%\n", + "#5411: 96.88%\n", + "#5412: 96.88%\n", + "#5413: 96.88%\n", + "#5414: 96.88%\n", + "#5415: 96.88%\n", + "#5416: 96.88%\n", + "#5417: 96.88%\n", + "#5418: 96.88%\n", + "#5419: 96.88%\n", + "#5420: 96.88%\n", + "#5421: 96.88%\n", + "#5422: 96.88%\n", + "#5423: 96.88%\n", + "#5424: 96.88%\n", + "#5425: 96.89%\n", + "#5426: 96.89%\n", + "#5427: 96.89%\n", + "#5428: 96.89%\n", + "#5429: 96.89%\n", + "#5430: 96.89%\n", + "#5431: 96.89%\n", + "#5432: 96.89%\n", + "#5433: 96.89%\n", + "#5434: 96.89%\n", + "#5435: 96.89%\n", + "#5436: 96.89%\n", + "#5437: 96.89%\n", + "#5438: 96.89%\n", + "#5439: 96.89%\n", + "#5440: 96.89%\n", + "#5441: 96.89%\n", + "#5442: 96.90%\n", + "#5443: 96.90%\n", + "#5444: 96.90%\n", + "#5445: 96.90%\n", + "#5446: 96.90%\n", + "#5447: 96.90%\n", + "#5448: 96.90%\n", + "#5449: 96.90%\n", + "#5450: 96.90%\n", + "#5451: 96.90%\n", + "#5452: 96.90%\n", + "#5453: 96.90%\n", + "#5454: 96.90%\n", + "#5455: 96.90%\n", + "#5456: 96.90%\n", + "#5457: 96.90%\n", + "#5458: 96.90%\n", + "#5459: 96.90%\n", + "#5460: 96.91%\n", + "#5461: 96.91%\n", + "#5462: 96.91%\n", + "#5463: 96.91%\n", + "#5464: 96.91%\n", + "#5465: 96.91%\n", + "#5466: 96.91%\n", + "#5467: 96.91%\n", + "#5468: 96.91%\n", + "#5469: 96.91%\n", + "#5470: 96.91%\n", + "#5471: 96.91%\n", + "#5472: 96.91%\n", + "#5473: 96.91%\n", + "#5474: 96.91%\n", + "#5475: 96.91%\n", + "#5476: 96.91%\n", + "#5477: 96.91%\n", + "#5478: 96.92%\n", + "#5479: 96.92%\n", + "#5480: 96.92%\n", + "#5481: 96.92%\n", + "#5482: 96.92%\n", + "#5483: 96.92%\n", + "#5484: 96.92%\n", + "#5485: 96.92%\n", + "#5486: 96.92%\n", + "#5487: 96.92%\n", + "#5488: 96.92%\n", + "#5489: 96.92%\n", + "#5490: 96.92%\n", + "#5491: 96.92%\n", + "#5492: 96.92%\n", + "#5493: 96.92%\n", + "#5494: 96.92%\n", + "#5495: 96.93%\n", + "#5496: 96.93%\n", + "#5497: 96.93%\n", + "#5498: 96.93%\n", + "#5499: 96.93%\n", + "#5500: 96.93%\n", + "#5501: 96.93%\n", + "#5502: 96.93%\n", + "#5503: 96.93%\n", + "#5504: 96.93%\n", + "#5505: 96.93%\n", + "#5506: 96.93%\n", + "#5507: 96.93%\n", + "#5508: 96.93%\n", + "#5509: 96.93%\n", + "#5510: 96.93%\n", + "#5511: 96.93%\n", + "#5512: 96.93%\n", + "#5513: 96.94%\n", + "#5514: 96.94%\n", + "#5515: 96.94%\n", + "#5516: 96.94%\n", + "#5517: 96.94%\n", + "#5518: 96.94%\n", + "#5519: 96.94%\n", + "#5520: 96.94%\n", + "#5521: 96.94%\n", + "#5522: 96.94%\n", + "#5523: 96.94%\n", + "#5524: 96.94%\n", + "#5525: 96.94%\n", + "#5526: 96.94%\n", + "#5527: 96.94%\n", + "#5528: 96.94%\n", + "#5529: 96.94%\n", + "#5530: 96.94%\n", + "#5531: 96.95%\n", + "#5532: 96.95%\n", + "#5533: 96.95%\n", + "#5534: 96.95%\n", + "#5535: 96.95%\n", + "#5536: 96.95%\n", + "#5537: 96.95%\n", + "#5538: 96.95%\n", + "#5539: 96.95%\n", + "#5540: 96.95%\n", + "#5541: 96.95%\n", + "#5542: 96.95%\n", + "#5543: 96.95%\n", + "#5544: 96.95%\n", + "#5545: 96.95%\n", + "#5546: 96.95%\n", + "#5547: 96.95%\n", + "#5548: 96.95%\n", + "#5549: 96.95%\n", + "#5550: 96.96%\n", + "#5551: 96.96%\n", + "#5552: 96.96%\n", + "#5553: 96.96%\n", + "#5554: 96.96%\n", + "#5555: 96.96%\n", + "#5556: 96.96%\n", + "#5557: 96.96%\n", + "#5558: 96.96%\n", + "#5559: 96.96%\n", + "#5560: 96.96%\n", + "#5561: 96.96%\n", + "#5562: 96.96%\n", + "#5563: 96.96%\n", + "#5564: 96.96%\n", + "#5565: 96.96%\n", + "#5566: 96.96%\n", + "#5567: 96.96%\n", + "#5568: 96.97%\n", + "#5569: 96.97%\n", + "#5570: 96.97%\n", + "#5571: 96.97%\n", + "#5572: 96.97%\n", + "#5573: 96.97%\n", + "#5574: 96.97%\n", + "#5575: 96.97%\n", + "#5576: 96.97%\n", + "#5577: 96.97%\n", + "#5578: 96.97%\n", + "#5579: 96.97%\n", + "#5580: 96.97%\n", + "#5581: 96.97%\n", + "#5582: 96.97%\n", + "#5583: 96.97%\n", + "#5584: 96.97%\n", + "#5585: 96.97%\n", + "#5586: 96.98%\n", + "#5587: 96.98%\n", + "#5588: 96.98%\n", + "#5589: 96.98%\n", + "#5590: 96.98%\n", + "#5591: 96.98%\n", + "#5592: 96.98%\n", + "#5593: 96.98%\n", + "#5594: 96.98%\n", + "#5595: 96.98%\n", + "#5596: 96.98%\n", + "#5597: 96.98%\n", + "#5598: 96.98%\n", + "#5599: 96.98%\n", + "#5600: 96.96%\n", + "#5601: 96.97%\n", + "#5602: 96.97%\n", + "#5603: 96.97%\n", + "#5604: 96.97%\n", + "#5605: 96.97%\n", + "#5606: 96.97%\n", + "#5607: 96.97%\n", + "#5608: 96.97%\n", + "#5609: 96.97%\n", + "#5610: 96.97%\n", + "#5611: 96.97%\n", + "#5612: 96.97%\n", + "#5613: 96.97%\n", + "#5614: 96.97%\n", + "#5615: 96.97%\n", + "#5616: 96.97%\n", + "#5617: 96.97%\n", + "#5618: 96.97%\n", + "#5619: 96.98%\n", + "#5620: 96.98%\n", + "#5621: 96.98%\n", + "#5622: 96.98%\n", + "#5623: 96.98%\n", + "#5624: 96.98%\n", + "#5625: 96.98%\n", + "#5626: 96.98%\n", + "#5627: 96.98%\n", + "#5628: 96.98%\n", + "#5629: 96.98%\n", + "#5630: 96.98%\n", + "#5631: 96.98%\n", + "#5632: 96.98%\n", + "#5633: 96.98%\n", + "#5634: 96.98%\n", + "#5635: 96.98%\n", + "#5636: 96.98%\n", + "#5637: 96.98%\n", + "#5638: 96.99%\n", + "#5639: 96.99%\n", + "#5640: 96.99%\n", + "#5641: 96.99%\n", + "#5642: 96.99%\n", + "#5643: 96.99%\n", + "#5644: 96.99%\n", + "#5645: 96.99%\n", + "#5646: 96.99%\n", + "#5647: 96.99%\n", + "#5648: 96.99%\n", + "#5649: 96.99%\n", + "#5650: 96.99%\n", + "#5651: 96.99%\n", + "#5652: 96.99%\n", + "#5653: 96.99%\n", + "#5654: 96.99%\n", + "#5655: 96.99%\n", + "#5656: 96.99%\n", + "#5657: 97.00%\n", + "#5658: 97.00%\n", + "#5659: 97.00%\n", + "#5660: 97.00%\n", + "#5661: 97.00%\n", + "#5662: 97.00%\n", + "#5663: 97.00%\n", + "#5664: 97.00%\n", + "#5665: 97.00%\n", + "#5666: 97.00%\n", + "#5667: 97.00%\n", + "#5668: 97.00%\n", + "#5669: 97.00%\n", + "#5670: 97.00%\n", + "#5671: 97.00%\n", + "#5672: 97.00%\n", + "#5673: 97.00%\n", + "#5674: 97.00%\n", + "#5675: 97.00%\n", + "#5676: 97.01%\n", + "#5677: 97.01%\n", + "#5678: 97.01%\n", + "#5679: 97.01%\n", + "#5680: 97.01%\n", + "#5681: 97.01%\n", + "#5682: 97.01%\n", + "#5683: 97.01%\n", + "#5684: 97.01%\n", + "#5685: 97.01%\n", + "#5686: 97.01%\n", + "#5687: 97.01%\n", + "#5688: 97.01%\n", + "#5689: 97.01%\n", + "#5690: 97.01%\n", + "#5691: 97.01%\n", + "#5692: 97.01%\n", + "#5693: 97.01%\n", + "#5694: 97.01%\n", + "#5695: 97.02%\n", + "#5696: 97.02%\n", + "#5697: 97.02%\n", + "#5698: 97.02%\n", + "#5699: 97.02%\n", + "#5700: 97.02%\n", + "#5701: 97.02%\n", + "#5702: 97.02%\n", + "#5703: 97.02%\n", + "#5704: 97.02%\n", + "#5705: 97.02%\n", + "#5706: 97.02%\n", + "#5707: 97.02%\n", + "#5708: 97.02%\n", + "#5709: 97.02%\n", + "#5710: 97.02%\n", + "#5711: 97.02%\n", + "#5712: 97.02%\n", + "#5713: 97.02%\n", + "#5714: 97.03%\n", + "#5715: 97.03%\n", + "#5716: 97.03%\n", + "#5717: 97.03%\n", + "#5718: 97.03%\n", + "#5719: 97.03%\n", + "#5720: 97.03%\n", + "#5721: 97.03%\n", + "#5722: 97.03%\n", + "#5723: 97.03%\n", + "#5724: 97.03%\n", + "#5725: 97.03%\n", + "#5726: 97.03%\n", + "#5727: 97.03%\n", + "#5728: 97.03%\n", + "#5729: 97.03%\n", + "#5730: 97.03%\n", + "#5731: 97.03%\n", + "#5732: 97.03%\n", + "#5733: 97.04%\n", + "#5734: 97.02%\n", + "#5735: 97.02%\n", + "#5736: 97.02%\n", + "#5737: 97.02%\n", + "#5738: 97.02%\n", + "#5739: 97.02%\n", + "#5740: 97.02%\n", + "#5741: 97.02%\n", + "#5742: 97.02%\n", + "#5743: 97.02%\n", + "#5744: 97.02%\n", + "#5745: 97.02%\n", + "#5746: 97.02%\n", + "#5747: 97.03%\n", + "#5748: 97.03%\n", + "#5749: 97.03%\n", + "#5750: 97.03%\n", + "#5751: 97.03%\n", + "#5752: 97.03%\n", + "#5753: 97.03%\n", + "#5754: 97.03%\n", + "#5755: 97.03%\n", + "#5756: 97.03%\n", + "#5757: 97.03%\n", + "#5758: 97.03%\n", + "#5759: 97.03%\n", + "#5760: 97.03%\n", + "#5761: 97.03%\n", + "#5762: 97.03%\n", + "#5763: 97.03%\n", + "#5764: 97.03%\n", + "#5765: 97.03%\n", + "#5766: 97.03%\n", + "#5767: 97.04%\n", + "#5768: 97.04%\n", + "#5769: 97.04%\n", + "#5770: 97.04%\n", + "#5771: 97.04%\n", + "#5772: 97.04%\n", + "#5773: 97.04%\n", + "#5774: 97.04%\n", + "#5775: 97.04%\n", + "#5776: 97.04%\n", + "#5777: 97.04%\n", + "#5778: 97.04%\n", + "#5779: 97.04%\n", + "#5780: 97.04%\n", + "#5781: 97.04%\n", + "#5782: 97.04%\n", + "#5783: 97.04%\n", + "#5784: 97.04%\n", + "#5785: 97.04%\n", + "#5786: 97.05%\n", + "#5787: 97.05%\n", + "#5788: 97.05%\n", + "#5789: 97.05%\n", + "#5790: 97.05%\n", + "#5791: 97.05%\n", + "#5792: 97.05%\n", + "#5793: 97.05%\n", + "#5794: 97.05%\n", + "#5795: 97.05%\n", + "#5796: 97.05%\n", + "#5797: 97.05%\n", + "#5798: 97.05%\n", + "#5799: 97.05%\n", + "#5800: 97.05%\n", + "#5801: 97.05%\n", + "#5802: 97.05%\n", + "#5803: 97.05%\n", + "#5804: 97.05%\n", + "#5805: 97.05%\n", + "#5806: 97.06%\n", + "#5807: 97.06%\n", + "#5808: 97.06%\n", + "#5809: 97.06%\n", + "#5810: 97.06%\n", + "#5811: 97.06%\n", + "#5812: 97.06%\n", + "#5813: 97.06%\n", + "#5814: 97.06%\n", + "#5815: 97.06%\n", + "#5816: 97.06%\n", + "#5817: 97.06%\n", + "#5818: 97.06%\n", + "#5819: 97.06%\n", + "#5820: 97.06%\n", + "#5821: 97.06%\n", + "#5822: 97.06%\n", + "#5823: 97.06%\n", + "#5824: 97.06%\n", + "#5825: 97.06%\n", + "#5826: 97.07%\n", + "#5827: 97.07%\n", + "#5828: 97.07%\n", + "#5829: 97.07%\n", + "#5830: 97.07%\n", + "#5831: 97.07%\n", + "#5832: 97.07%\n", + "#5833: 97.07%\n", + "#5834: 97.07%\n", + "#5835: 97.07%\n", + "#5836: 97.07%\n", + "#5837: 97.07%\n", + "#5838: 97.07%\n", + "#5839: 97.07%\n", + "#5840: 97.07%\n", + "#5841: 97.06%\n", + "#5842: 97.04%\n", + "#5843: 97.04%\n", + "#5844: 97.04%\n", + "#5845: 97.04%\n", + "#5846: 97.04%\n", + "#5847: 97.04%\n", + "#5848: 97.04%\n", + "#5849: 97.04%\n", + "#5850: 97.04%\n", + "#5851: 97.04%\n", + "#5852: 97.04%\n", + "#5853: 97.04%\n", + "#5854: 97.05%\n", + "#5855: 97.05%\n", + "#5856: 97.05%\n", + "#5857: 97.05%\n", + "#5858: 97.05%\n", + "#5859: 97.05%\n", + "#5860: 97.05%\n", + "#5861: 97.05%\n", + "#5862: 97.05%\n", + "#5863: 97.05%\n", + "#5864: 97.05%\n", + "#5865: 97.05%\n", + "#5866: 97.05%\n", + "#5867: 97.05%\n", + "#5868: 97.05%\n", + "#5869: 97.05%\n", + "#5870: 97.05%\n", + "#5871: 97.05%\n", + "#5872: 97.05%\n", + "#5873: 97.05%\n", + "#5874: 97.06%\n", + "#5875: 97.06%\n", + "#5876: 97.06%\n", + "#5877: 97.06%\n", + "#5878: 97.06%\n", + "#5879: 97.06%\n", + "#5880: 97.06%\n", + "#5881: 97.06%\n", + "#5882: 97.06%\n", + "#5883: 97.06%\n", + "#5884: 97.06%\n", + "#5885: 97.06%\n", + "#5886: 97.06%\n", + "#5887: 97.04%\n", + "#5888: 97.05%\n", + "#5889: 97.05%\n", + "#5890: 97.05%\n", + "#5891: 97.05%\n", + "#5892: 97.05%\n", + "#5893: 97.05%\n", + "#5894: 97.05%\n", + "#5895: 97.05%\n", + "#5896: 97.05%\n", + "#5897: 97.05%\n", + "#5898: 97.05%\n", + "#5899: 97.05%\n", + "#5900: 97.05%\n", + "#5901: 97.05%\n", + "#5902: 97.05%\n", + "#5903: 97.05%\n", + "#5904: 97.05%\n", + "#5905: 97.05%\n", + "#5906: 97.05%\n", + "#5907: 97.05%\n", + "#5908: 97.06%\n", + "#5909: 97.06%\n", + "#5910: 97.06%\n", + "#5911: 97.06%\n", + "#5912: 97.06%\n", + "#5913: 97.06%\n", + "#5914: 97.06%\n", + "#5915: 97.06%\n", + "#5916: 97.06%\n", + "#5917: 97.06%\n", + "#5918: 97.06%\n", + "#5919: 97.06%\n", + "#5920: 97.06%\n", + "#5921: 97.06%\n", + "#5922: 97.06%\n", + "#5923: 97.06%\n", + "#5924: 97.06%\n", + "#5925: 97.06%\n", + "#5926: 97.06%\n", + "#5927: 97.06%\n", + "#5928: 97.07%\n", + "#5929: 97.07%\n", + "#5930: 97.07%\n", + "#5931: 97.07%\n", + "#5932: 97.07%\n", + "#5933: 97.07%\n", + "#5934: 97.07%\n", + "#5935: 97.07%\n", + "#5936: 97.07%\n", + "#5937: 97.07%\n", + "#5938: 97.07%\n", + "#5939: 97.07%\n", + "#5940: 97.07%\n", + "#5941: 97.07%\n", + "#5942: 97.07%\n", + "#5943: 97.07%\n", + "#5944: 97.07%\n", + "#5945: 97.07%\n", + "#5946: 97.07%\n", + "#5947: 97.07%\n", + "#5948: 97.08%\n", + "#5949: 97.08%\n", + "#5950: 97.08%\n", + "#5951: 97.08%\n", + "#5952: 97.08%\n", + "#5953: 97.08%\n", + "#5954: 97.08%\n", + "#5955: 97.06%\n", + "#5956: 97.06%\n", + "#5957: 97.06%\n", + "#5958: 97.06%\n", + "#5959: 97.06%\n", + "#5960: 97.06%\n", + "#5961: 97.06%\n", + "#5962: 97.07%\n", + "#5963: 97.07%\n", + "#5964: 97.07%\n", + "#5965: 97.07%\n", + "#5966: 97.07%\n", + "#5967: 97.07%\n", + "#5968: 97.07%\n", + "#5969: 97.07%\n", + "#5970: 97.07%\n", + "#5971: 97.07%\n", + "#5972: 97.07%\n", + "#5973: 97.05%\n", + "#5974: 97.05%\n", + "#5975: 97.05%\n", + "#5976: 97.06%\n", + "#5977: 97.06%\n", + "#5978: 97.06%\n", + "#5979: 97.06%\n", + "#5980: 97.06%\n", + "#5981: 97.06%\n", + "#5982: 97.06%\n", + "#5983: 97.06%\n", + "#5984: 97.06%\n", + "#5985: 97.06%\n", + "#5986: 97.06%\n", + "#5987: 97.06%\n", + "#5988: 97.06%\n", + "#5989: 97.06%\n", + "#5990: 97.06%\n", + "#5991: 97.06%\n", + "#5992: 97.06%\n", + "#5993: 97.06%\n", + "#5994: 97.06%\n", + "#5995: 97.06%\n", + "#5996: 97.07%\n", + "#5997: 97.05%\n", + "#5998: 97.05%\n", + "#5999: 97.05%\n", + "#6000: 97.05%\n", + "#6001: 97.05%\n", + "#6002: 97.05%\n", + "#6003: 97.05%\n", + "#6004: 97.05%\n", + "#6005: 97.05%\n", + "#6006: 97.05%\n", + "#6007: 97.05%\n", + "#6008: 97.05%\n", + "#6009: 97.05%\n", + "#6010: 97.06%\n", + "#6011: 97.06%\n", + "#6012: 97.06%\n", + "#6013: 97.06%\n", + "#6014: 97.06%\n", + "#6015: 97.06%\n", + "#6016: 97.06%\n", + "#6017: 97.06%\n", + "#6018: 97.06%\n", + "#6019: 97.06%\n", + "#6020: 97.06%\n", + "#6021: 97.06%\n", + "#6022: 97.06%\n", + "#6023: 97.05%\n", + "#6024: 97.05%\n", + "#6025: 97.05%\n", + "#6026: 97.05%\n", + "#6027: 97.05%\n", + "#6028: 97.05%\n", + "#6029: 97.05%\n", + "#6030: 97.05%\n", + "#6031: 97.05%\n", + "#6032: 97.05%\n", + "#6033: 97.05%\n", + "#6034: 97.05%\n", + "#6035: 97.05%\n", + "#6036: 97.05%\n", + "#6037: 97.05%\n", + "#6038: 97.05%\n", + "#6039: 97.05%\n", + "#6040: 97.05%\n", + "#6041: 97.05%\n", + "#6042: 97.05%\n", + "#6043: 97.05%\n", + "#6044: 97.06%\n", + "#6045: 97.06%\n", + "#6046: 97.06%\n", + "#6047: 97.06%\n", + "#6048: 97.06%\n", + "#6049: 97.06%\n", + "#6050: 97.06%\n", + "#6051: 97.06%\n", + "#6052: 97.06%\n", + "#6053: 97.06%\n", + "#6054: 97.06%\n", + "#6055: 97.06%\n", + "#6056: 97.06%\n", + "#6057: 97.06%\n", + "#6058: 97.06%\n", + "#6059: 97.05%\n", + "#6060: 97.05%\n", + "#6061: 97.05%\n", + "#6062: 97.05%\n", + "#6063: 97.05%\n", + "#6064: 97.05%\n", + "#6065: 97.03%\n", + "#6066: 97.03%\n", + "#6067: 97.03%\n", + "#6068: 97.03%\n", + "#6069: 97.03%\n", + "#6070: 97.04%\n", + "#6071: 97.04%\n", + "#6072: 97.04%\n", + "#6073: 97.04%\n", + "#6074: 97.04%\n", + "#6075: 97.04%\n", + "#6076: 97.04%\n", + "#6077: 97.04%\n", + "#6078: 97.04%\n", + "#6079: 97.04%\n", + "#6080: 97.04%\n", + "#6081: 97.04%\n", + "#6082: 97.04%\n", + "#6083: 97.04%\n", + "#6084: 97.04%\n", + "#6085: 97.03%\n", + "#6086: 97.03%\n", + "#6087: 97.03%\n", + "#6088: 97.03%\n", + "#6089: 97.03%\n", + "#6090: 97.03%\n", + "#6091: 97.01%\n", + "#6092: 97.01%\n", + "#6093: 97.01%\n", + "#6094: 97.01%\n", + "#6095: 97.01%\n", + "#6096: 97.01%\n", + "#6097: 97.02%\n", + "#6098: 97.02%\n", + "#6099: 97.02%\n", + "#6100: 97.02%\n", + "#6101: 97.02%\n", + "#6102: 97.02%\n", + "#6103: 97.02%\n", + "#6104: 97.02%\n", + "#6105: 97.02%\n", + "#6106: 97.02%\n", + "#6107: 97.02%\n", + "#6108: 97.02%\n", + "#6109: 97.02%\n", + "#6110: 97.02%\n", + "#6111: 97.02%\n", + "#6112: 97.02%\n", + "#6113: 97.02%\n", + "#6114: 97.02%\n", + "#6115: 97.02%\n", + "#6116: 97.02%\n", + "#6117: 97.03%\n", + "#6118: 97.03%\n", + "#6119: 97.03%\n", + "#6120: 97.03%\n", + "#6121: 97.03%\n", + "#6122: 97.03%\n", + "#6123: 97.03%\n", + "#6124: 97.03%\n", + "#6125: 97.03%\n", + "#6126: 97.03%\n", + "#6127: 97.03%\n", + "#6128: 97.03%\n", + "#6129: 97.03%\n", + "#6130: 97.03%\n", + "#6131: 97.03%\n", + "#6132: 97.03%\n", + "#6133: 97.03%\n", + "#6134: 97.03%\n", + "#6135: 97.03%\n", + "#6136: 97.03%\n", + "#6137: 97.03%\n", + "#6138: 97.04%\n", + "#6139: 97.04%\n", + "#6140: 97.04%\n", + "#6141: 97.04%\n", + "#6142: 97.04%\n", + "#6143: 97.04%\n", + "#6144: 97.04%\n", + "#6145: 97.04%\n", + "#6146: 97.04%\n", + "#6147: 97.04%\n", + "#6148: 97.04%\n", + "#6149: 97.04%\n", + "#6150: 97.04%\n", + "#6151: 97.04%\n", + "#6152: 97.04%\n", + "#6153: 97.04%\n", + "#6154: 97.04%\n", + "#6155: 97.04%\n", + "#6156: 97.04%\n", + "#6157: 97.04%\n", + "#6158: 97.04%\n", + "#6159: 97.05%\n", + "#6160: 97.05%\n", + "#6161: 97.05%\n", + "#6162: 97.05%\n", + "#6163: 97.05%\n", + "#6164: 97.05%\n", + "#6165: 97.05%\n", + "#6166: 97.05%\n", + "#6167: 97.05%\n", + "#6168: 97.05%\n", + "#6169: 97.05%\n", + "#6170: 97.05%\n", + "#6171: 97.05%\n", + "#6172: 97.04%\n", + "#6173: 97.04%\n", + "#6174: 97.04%\n", + "#6175: 97.04%\n", + "#6176: 97.04%\n", + "#6177: 97.04%\n", + "#6178: 97.04%\n", + "#6179: 97.04%\n", + "#6180: 97.04%\n", + "#6181: 97.04%\n", + "#6182: 97.04%\n", + "#6183: 97.04%\n", + "#6184: 97.04%\n", + "#6185: 97.04%\n", + "#6186: 97.04%\n", + "#6187: 97.04%\n", + "#6188: 97.04%\n", + "#6189: 97.04%\n", + "#6190: 97.04%\n", + "#6191: 97.04%\n", + "#6192: 97.05%\n", + "#6193: 97.05%\n", + "#6194: 97.05%\n", + "#6195: 97.05%\n", + "#6196: 97.05%\n", + "#6197: 97.05%\n", + "#6198: 97.05%\n", + "#6199: 97.05%\n", + "#6200: 97.05%\n", + "#6201: 97.05%\n", + "#6202: 97.05%\n", + "#6203: 97.05%\n", + "#6204: 97.05%\n", + "#6205: 97.05%\n", + "#6206: 97.05%\n", + "#6207: 97.05%\n", + "#6208: 97.05%\n", + "#6209: 97.05%\n", + "#6210: 97.05%\n", + "#6211: 97.05%\n", + "#6212: 97.05%\n", + "#6213: 97.06%\n", + "#6214: 97.06%\n", + "#6215: 97.06%\n", + "#6216: 97.06%\n", + "#6217: 97.06%\n", + "#6218: 97.06%\n", + "#6219: 97.06%\n", + "#6220: 97.06%\n", + "#6221: 97.06%\n", + "#6222: 97.06%\n", + "#6223: 97.06%\n", + "#6224: 97.06%\n", + "#6225: 97.06%\n", + "#6226: 97.06%\n", + "#6227: 97.06%\n", + "#6228: 97.06%\n", + "#6229: 97.06%\n", + "#6230: 97.06%\n", + "#6231: 97.06%\n", + "#6232: 97.06%\n", + "#6233: 97.06%\n", + "#6234: 97.06%\n", + "#6235: 97.07%\n", + "#6236: 97.07%\n", + "#6237: 97.07%\n", + "#6238: 97.07%\n", + "#6239: 97.07%\n", + "#6240: 97.07%\n", + "#6241: 97.07%\n", + "#6242: 97.07%\n", + "#6243: 97.07%\n", + "#6244: 97.07%\n", + "#6245: 97.07%\n", + "#6246: 97.07%\n", + "#6247: 97.07%\n", + "#6248: 97.07%\n", + "#6249: 97.07%\n", + "#6250: 97.07%\n", + "#6251: 97.07%\n", + "#6252: 97.07%\n", + "#6253: 97.07%\n", + "#6254: 97.07%\n", + "#6255: 97.07%\n", + "#6256: 97.08%\n", + "#6257: 97.08%\n", + "#6258: 97.08%\n", + "#6259: 97.08%\n", + "#6260: 97.08%\n", + "#6261: 97.08%\n", + "#6262: 97.08%\n", + "#6263: 97.08%\n", + "#6264: 97.08%\n", + "#6265: 97.08%\n", + "#6266: 97.08%\n", + "#6267: 97.08%\n", + "#6268: 97.08%\n", + "#6269: 97.08%\n", + "#6270: 97.08%\n", + "#6271: 97.08%\n", + "#6272: 97.08%\n", + "#6273: 97.08%\n", + "#6274: 97.08%\n", + "#6275: 97.08%\n", + "#6276: 97.08%\n", + "#6277: 97.09%\n", + "#6278: 97.09%\n", + "#6279: 97.09%\n", + "#6280: 97.09%\n", + "#6281: 97.09%\n", + "#6282: 97.09%\n", + "#6283: 97.09%\n", + "#6284: 97.09%\n", + "#6285: 97.09%\n", + "#6286: 97.09%\n", + "#6287: 97.09%\n", + "#6288: 97.09%\n", + "#6289: 97.09%\n", + "#6290: 97.09%\n", + "#6291: 97.09%\n", + "#6292: 97.09%\n", + "#6293: 97.09%\n", + "#6294: 97.09%\n", + "#6295: 97.09%\n", + "#6296: 97.09%\n", + "#6297: 97.09%\n", + "#6298: 97.09%\n", + "#6299: 97.10%\n", + "#6300: 97.10%\n", + "#6301: 97.10%\n", + "#6302: 97.10%\n", + "#6303: 97.10%\n", + "#6304: 97.10%\n", + "#6305: 97.10%\n", + "#6306: 97.10%\n", + "#6307: 97.10%\n", + "#6308: 97.10%\n", + "#6309: 97.10%\n", + "#6310: 97.10%\n", + "#6311: 97.10%\n", + "#6312: 97.10%\n", + "#6313: 97.10%\n", + "#6314: 97.10%\n", + "#6315: 97.10%\n", + "#6316: 97.10%\n", + "#6317: 97.10%\n", + "#6318: 97.10%\n", + "#6319: 97.10%\n", + "#6320: 97.10%\n", + "#6321: 97.11%\n", + "#6322: 97.11%\n", + "#6323: 97.11%\n", + "#6324: 97.11%\n", + "#6325: 97.11%\n", + "#6326: 97.11%\n", + "#6327: 97.11%\n", + "#6328: 97.11%\n", + "#6329: 97.11%\n", + "#6330: 97.11%\n", + "#6331: 97.11%\n", + "#6332: 97.11%\n", + "#6333: 97.11%\n", + "#6334: 97.11%\n", + "#6335: 97.11%\n", + "#6336: 97.11%\n", + "#6337: 97.11%\n", + "#6338: 97.11%\n", + "#6339: 97.11%\n", + "#6340: 97.11%\n", + "#6341: 97.11%\n", + "#6342: 97.11%\n", + "#6343: 97.12%\n", + "#6344: 97.12%\n", + "#6345: 97.12%\n", + "#6346: 97.12%\n", + "#6347: 97.12%\n", + "#6348: 97.12%\n", + "#6349: 97.12%\n", + "#6350: 97.12%\n", + "#6351: 97.12%\n", + "#6352: 97.12%\n", + "#6353: 97.12%\n", + "#6354: 97.12%\n", + "#6355: 97.12%\n", + "#6356: 97.12%\n", + "#6357: 97.12%\n", + "#6358: 97.12%\n", + "#6359: 97.12%\n", + "#6360: 97.12%\n", + "#6361: 97.12%\n", + "#6362: 97.12%\n", + "#6363: 97.12%\n", + "#6364: 97.12%\n", + "#6365: 97.13%\n", + "#6366: 97.13%\n", + "#6367: 97.13%\n", + "#6368: 97.13%\n", + "#6369: 97.13%\n", + "#6370: 97.13%\n", + "#6371: 97.13%\n", + "#6372: 97.13%\n", + "#6373: 97.13%\n", + "#6374: 97.13%\n", + "#6375: 97.13%\n", + "#6376: 97.13%\n", + "#6377: 97.13%\n", + "#6378: 97.13%\n", + "#6379: 97.13%\n", + "#6380: 97.13%\n", + "#6381: 97.13%\n", + "#6382: 97.13%\n", + "#6383: 97.13%\n", + "#6384: 97.13%\n", + "#6385: 97.13%\n", + "#6386: 97.13%\n", + "#6387: 97.14%\n", + "#6388: 97.14%\n", + "#6389: 97.14%\n", + "#6390: 97.14%\n", + "#6391: 97.14%\n", + "#6392: 97.14%\n", + "#6393: 97.14%\n", + "#6394: 97.14%\n", + "#6395: 97.14%\n", + "#6396: 97.14%\n", + "#6397: 97.14%\n", + "#6398: 97.14%\n", + "#6399: 97.14%\n", + "#6400: 97.14%\n", + "#6401: 97.14%\n", + "#6402: 97.14%\n", + "#6403: 97.14%\n", + "#6404: 97.14%\n", + "#6405: 97.14%\n", + "#6406: 97.14%\n", + "#6407: 97.14%\n", + "#6408: 97.14%\n", + "#6409: 97.15%\n", + "#6410: 97.15%\n", + "#6411: 97.15%\n", + "#6412: 97.15%\n", + "#6413: 97.15%\n", + "#6414: 97.15%\n", + "#6415: 97.15%\n", + "#6416: 97.15%\n", + "#6417: 97.15%\n", + "#6418: 97.15%\n", + "#6419: 97.15%\n", + "#6420: 97.15%\n", + "#6421: 97.15%\n", + "#6422: 97.15%\n", + "#6423: 97.15%\n", + "#6424: 97.15%\n", + "#6425: 97.15%\n", + "#6426: 97.15%\n", + "#6427: 97.15%\n", + "#6428: 97.15%\n", + "#6429: 97.15%\n", + "#6430: 97.15%\n", + "#6431: 97.15%\n", + "#6432: 97.16%\n", + "#6433: 97.16%\n", + "#6434: 97.16%\n", + "#6435: 97.16%\n", + "#6436: 97.16%\n", + "#6437: 97.16%\n", + "#6438: 97.16%\n", + "#6439: 97.16%\n", + "#6440: 97.16%\n", + "#6441: 97.16%\n", + "#6442: 97.16%\n", + "#6443: 97.16%\n", + "#6444: 97.16%\n", + "#6445: 97.16%\n", + "#6446: 97.16%\n", + "#6447: 97.16%\n", + "#6448: 97.16%\n", + "#6449: 97.16%\n", + "#6450: 97.16%\n", + "#6451: 97.16%\n", + "#6452: 97.16%\n", + "#6453: 97.16%\n", + "#6454: 97.16%\n", + "#6455: 97.17%\n", + "#6456: 97.17%\n", + "#6457: 97.17%\n", + "#6458: 97.17%\n", + "#6459: 97.17%\n", + "#6460: 97.17%\n", + "#6461: 97.17%\n", + "#6462: 97.17%\n", + "#6463: 97.17%\n", + "#6464: 97.17%\n", + "#6465: 97.17%\n", + "#6466: 97.17%\n", + "#6467: 97.17%\n", + "#6468: 97.17%\n", + "#6469: 97.17%\n", + "#6470: 97.17%\n", + "#6471: 97.17%\n", + "#6472: 97.17%\n", + "#6473: 97.17%\n", + "#6474: 97.17%\n", + "#6475: 97.17%\n", + "#6476: 97.17%\n", + "#6477: 97.18%\n", + "#6478: 97.18%\n", + "#6479: 97.18%\n", + "#6480: 97.18%\n", + "#6481: 97.18%\n", + "#6482: 97.18%\n", + "#6483: 97.18%\n", + "#6484: 97.18%\n", + "#6485: 97.18%\n", + "#6486: 97.18%\n", + "#6487: 97.18%\n", + "#6488: 97.18%\n", + "#6489: 97.18%\n", + "#6490: 97.18%\n", + "#6491: 97.18%\n", + "#6492: 97.18%\n", + "#6493: 97.18%\n", + "#6494: 97.18%\n", + "#6495: 97.18%\n", + "#6496: 97.18%\n", + "#6497: 97.18%\n", + "#6498: 97.18%\n", + "#6499: 97.18%\n", + "#6500: 97.19%\n", + "#6501: 97.19%\n", + "#6502: 97.19%\n", + "#6503: 97.19%\n", + "#6504: 97.19%\n", + "#6505: 97.17%\n", + "#6506: 97.17%\n", + "#6507: 97.17%\n", + "#6508: 97.17%\n", + "#6509: 97.17%\n", + "#6510: 97.17%\n", + "#6511: 97.17%\n", + "#6512: 97.17%\n", + "#6513: 97.18%\n", + "#6514: 97.18%\n", + "#6515: 97.18%\n", + "#6516: 97.18%\n", + "#6517: 97.18%\n", + "#6518: 97.18%\n", + "#6519: 97.18%\n", + "#6520: 97.18%\n", + "#6521: 97.18%\n", + "#6522: 97.18%\n", + "#6523: 97.18%\n", + "#6524: 97.18%\n", + "#6525: 97.18%\n", + "#6526: 97.18%\n", + "#6527: 97.18%\n", + "#6528: 97.18%\n", + "#6529: 97.18%\n", + "#6530: 97.18%\n", + "#6531: 97.18%\n", + "#6532: 97.18%\n", + "#6533: 97.18%\n", + "#6534: 97.18%\n", + "#6535: 97.18%\n", + "#6536: 97.19%\n", + "#6537: 97.19%\n", + "#6538: 97.19%\n", + "#6539: 97.19%\n", + "#6540: 97.19%\n", + "#6541: 97.19%\n", + "#6542: 97.19%\n", + "#6543: 97.19%\n", + "#6544: 97.19%\n", + "#6545: 97.19%\n", + "#6546: 97.19%\n", + "#6547: 97.19%\n", + "#6548: 97.19%\n", + "#6549: 97.19%\n", + "#6550: 97.19%\n", + "#6551: 97.19%\n", + "#6552: 97.19%\n", + "#6553: 97.19%\n", + "#6554: 97.19%\n", + "#6555: 97.18%\n", + "#6556: 97.18%\n", + "#6557: 97.18%\n", + "#6558: 97.18%\n", + "#6559: 97.18%\n", + "#6560: 97.18%\n", + "#6561: 97.18%\n", + "#6562: 97.18%\n", + "#6563: 97.18%\n", + "#6564: 97.18%\n", + "#6565: 97.18%\n", + "#6566: 97.18%\n", + "#6567: 97.18%\n", + "#6568: 97.18%\n", + "#6569: 97.18%\n", + "#6570: 97.18%\n", + "#6571: 97.19%\n", + "#6572: 97.17%\n", + "#6573: 97.17%\n", + "#6574: 97.17%\n", + "#6575: 97.17%\n", + "#6576: 97.17%\n", + "#6577: 97.17%\n", + "#6578: 97.16%\n", + "#6579: 97.16%\n", + "#6580: 97.16%\n", + "#6581: 97.16%\n", + "#6582: 97.16%\n", + "#6583: 97.16%\n", + "#6584: 97.16%\n", + "#6585: 97.16%\n", + "#6586: 97.16%\n", + "#6587: 97.16%\n", + "#6588: 97.16%\n", + "#6589: 97.16%\n", + "#6590: 97.16%\n", + "#6591: 97.16%\n", + "#6592: 97.16%\n", + "#6593: 97.16%\n", + "#6594: 97.16%\n", + "#6595: 97.16%\n", + "#6596: 97.17%\n", + "#6597: 97.15%\n", + "#6598: 97.15%\n", + "#6599: 97.14%\n", + "#6600: 97.14%\n", + "#6601: 97.14%\n", + "#6602: 97.14%\n", + "#6603: 97.14%\n", + "#6604: 97.14%\n", + "#6605: 97.14%\n", + "#6606: 97.14%\n", + "#6607: 97.14%\n", + "#6608: 97.14%\n", + "#6609: 97.14%\n", + "#6610: 97.14%\n", + "#6611: 97.14%\n", + "#6612: 97.14%\n", + "#6613: 97.14%\n", + "#6614: 97.14%\n", + "#6615: 97.14%\n", + "#6616: 97.14%\n", + "#6617: 97.14%\n", + "#6618: 97.14%\n", + "#6619: 97.15%\n", + "#6620: 97.15%\n", + "#6621: 97.15%\n", + "#6622: 97.15%\n", + "#6623: 97.15%\n", + "#6624: 97.15%\n", + "#6625: 97.15%\n", + "#6626: 97.15%\n", + "#6627: 97.15%\n", + "#6628: 97.15%\n", + "#6629: 97.15%\n", + "#6630: 97.15%\n", + "#6631: 97.15%\n", + "#6632: 97.15%\n", + "#6633: 97.15%\n", + "#6634: 97.15%\n", + "#6635: 97.15%\n", + "#6636: 97.15%\n", + "#6637: 97.15%\n", + "#6638: 97.15%\n", + "#6639: 97.15%\n", + "#6640: 97.15%\n", + "#6641: 97.15%\n", + "#6642: 97.15%\n", + "#6643: 97.16%\n", + "#6644: 97.16%\n", + "#6645: 97.16%\n", + "#6646: 97.16%\n", + "#6647: 97.16%\n", + "#6648: 97.16%\n", + "#6649: 97.16%\n", + "#6650: 97.16%\n", + "#6651: 97.16%\n", + "#6652: 97.16%\n", + "#6653: 97.16%\n", + "#6654: 97.16%\n", + "#6655: 97.16%\n", + "#6656: 97.16%\n", + "#6657: 97.16%\n", + "#6658: 97.16%\n", + "#6659: 97.16%\n", + "#6660: 97.16%\n", + "#6661: 97.16%\n", + "#6662: 97.16%\n", + "#6663: 97.16%\n", + "#6664: 97.16%\n", + "#6665: 97.16%\n", + "#6666: 97.17%\n", + "#6667: 97.17%\n", + "#6668: 97.17%\n", + "#6669: 97.17%\n", + "#6670: 97.17%\n", + "#6671: 97.17%\n", + "#6672: 97.17%\n", + "#6673: 97.17%\n", + "#6674: 97.17%\n", + "#6675: 97.17%\n", + "#6676: 97.17%\n", + "#6677: 97.17%\n", + "#6678: 97.17%\n", + "#6679: 97.17%\n", + "#6680: 97.17%\n", + "#6681: 97.17%\n", + "#6682: 97.17%\n", + "#6683: 97.17%\n", + "#6684: 97.17%\n", + "#6685: 97.17%\n", + "#6686: 97.17%\n", + "#6687: 97.17%\n", + "#6688: 97.17%\n", + "#6689: 97.17%\n", + "#6690: 97.18%\n", + "#6691: 97.18%\n", + "#6692: 97.18%\n", + "#6693: 97.18%\n", + "#6694: 97.18%\n", + "#6695: 97.18%\n", + "#6696: 97.18%\n", + "#6697: 97.18%\n", + "#6698: 97.18%\n", + "#6699: 97.18%\n", + "#6700: 97.18%\n", + "#6701: 97.18%\n", + "#6702: 97.18%\n", + "#6703: 97.18%\n", + "#6704: 97.18%\n", + "#6705: 97.18%\n", + "#6706: 97.18%\n", + "#6707: 97.18%\n", + "#6708: 97.18%\n", + "#6709: 97.18%\n", + "#6710: 97.18%\n", + "#6711: 97.18%\n", + "#6712: 97.18%\n", + "#6713: 97.18%\n", + "#6714: 97.19%\n", + "#6715: 97.19%\n", + "#6716: 97.19%\n", + "#6717: 97.19%\n", + "#6718: 97.19%\n", + "#6719: 97.19%\n", + "#6720: 97.19%\n", + "#6721: 97.19%\n", + "#6722: 97.19%\n", + "#6723: 97.19%\n", + "#6724: 97.19%\n", + "#6725: 97.19%\n", + "#6726: 97.19%\n", + "#6727: 97.19%\n", + "#6728: 97.19%\n", + "#6729: 97.19%\n", + "#6730: 97.19%\n", + "#6731: 97.19%\n", + "#6732: 97.19%\n", + "#6733: 97.19%\n", + "#6734: 97.19%\n", + "#6735: 97.19%\n", + "#6736: 97.19%\n", + "#6737: 97.20%\n", + "#6738: 97.20%\n", + "#6739: 97.20%\n", + "#6740: 97.18%\n", + "#6741: 97.18%\n", + "#6742: 97.18%\n", + "#6743: 97.18%\n", + "#6744: 97.18%\n", + "#6745: 97.18%\n", + "#6746: 97.18%\n", + "#6747: 97.18%\n", + "#6748: 97.18%\n", + "#6749: 97.19%\n", + "#6750: 97.19%\n", + "#6751: 97.19%\n", + "#6752: 97.19%\n", + "#6753: 97.19%\n", + "#6754: 97.19%\n", + "#6755: 97.17%\n", + "#6756: 97.16%\n", + "#6757: 97.16%\n", + "#6758: 97.16%\n", + "#6759: 97.16%\n", + "#6760: 97.16%\n", + "#6761: 97.16%\n", + "#6762: 97.16%\n", + "#6763: 97.16%\n", + "#6764: 97.16%\n", + "#6765: 97.16%\n", + "#6766: 97.16%\n", + "#6767: 97.16%\n", + "#6768: 97.16%\n", + "#6769: 97.16%\n", + "#6770: 97.16%\n", + "#6771: 97.16%\n", + "#6772: 97.17%\n", + "#6773: 97.17%\n", + "#6774: 97.17%\n", + "#6775: 97.17%\n", + "#6776: 97.17%\n", + "#6777: 97.17%\n", + "#6778: 97.17%\n", + "#6779: 97.17%\n", + "#6780: 97.17%\n", + "#6781: 97.17%\n", + "#6782: 97.17%\n", + "#6783: 97.16%\n", + "#6784: 97.16%\n", + "#6785: 97.16%\n", + "#6786: 97.16%\n", + "#6787: 97.16%\n", + "#6788: 97.16%\n", + "#6789: 97.16%\n", + "#6790: 97.16%\n", + "#6791: 97.16%\n", + "#6792: 97.16%\n", + "#6793: 97.16%\n", + "#6794: 97.16%\n", + "#6795: 97.16%\n", + "#6796: 97.16%\n", + "#6797: 97.16%\n", + "#6798: 97.16%\n", + "#6799: 97.16%\n", + "#6800: 97.16%\n", + "#6801: 97.16%\n", + "#6802: 97.16%\n", + "#6803: 97.16%\n", + "#6804: 97.16%\n", + "#6805: 97.16%\n", + "#6806: 97.16%\n", + "#6807: 97.17%\n", + "#6808: 97.17%\n", + "#6809: 97.17%\n", + "#6810: 97.17%\n", + "#6811: 97.17%\n", + "#6812: 97.17%\n", + "#6813: 97.17%\n", + "#6814: 97.17%\n", + "#6815: 97.17%\n", + "#6816: 97.17%\n", + "#6817: 97.17%\n", + "#6818: 97.17%\n", + "#6819: 97.17%\n", + "#6820: 97.17%\n", + "#6821: 97.17%\n", + "#6822: 97.17%\n", + "#6823: 97.17%\n", + "#6824: 97.17%\n", + "#6825: 97.17%\n", + "#6826: 97.17%\n", + "#6827: 97.17%\n", + "#6828: 97.17%\n", + "#6829: 97.17%\n", + "#6830: 97.17%\n", + "#6831: 97.18%\n", + "#6832: 97.18%\n", + "#6833: 97.18%\n", + "#6834: 97.18%\n", + "#6835: 97.18%\n", + "#6836: 97.18%\n", + "#6837: 97.18%\n", + "#6838: 97.18%\n", + "#6839: 97.18%\n", + "#6840: 97.18%\n", + "#6841: 97.18%\n", + "#6842: 97.18%\n", + "#6843: 97.18%\n", + "#6844: 97.18%\n", + "#6845: 97.18%\n", + "#6846: 97.18%\n", + "#6847: 97.18%\n", + "#6848: 97.18%\n", + "#6849: 97.18%\n", + "#6850: 97.18%\n", + "#6851: 97.18%\n", + "#6852: 97.18%\n", + "#6853: 97.18%\n", + "#6854: 97.18%\n", + "#6855: 97.18%\n", + "#6856: 97.19%\n", + "#6857: 97.19%\n", + "#6858: 97.19%\n", + "#6859: 97.19%\n", + "#6860: 97.19%\n", + "#6861: 97.19%\n", + "#6862: 97.19%\n", + "#6863: 97.19%\n", + "#6864: 97.19%\n", + "#6865: 97.19%\n", + "#6866: 97.19%\n", + "#6867: 97.19%\n", + "#6868: 97.19%\n", + "#6869: 97.19%\n", + "#6870: 97.19%\n", + "#6871: 97.19%\n", + "#6872: 97.19%\n", + "#6873: 97.19%\n", + "#6874: 97.19%\n", + "#6875: 97.19%\n", + "#6876: 97.19%\n", + "#6877: 97.19%\n", + "#6878: 97.19%\n", + "#6879: 97.19%\n", + "#6880: 97.20%\n", + "#6881: 97.20%\n", + "#6882: 97.20%\n", + "#6883: 97.20%\n", + "#6884: 97.20%\n", + "#6885: 97.20%\n", + "#6886: 97.20%\n", + "#6887: 97.20%\n", + "#6888: 97.20%\n", + "#6889: 97.20%\n", + "#6890: 97.20%\n", + "#6891: 97.20%\n", + "#6892: 97.20%\n", + "#6893: 97.20%\n", + "#6894: 97.20%\n", + "#6895: 97.20%\n", + "#6896: 97.20%\n", + "#6897: 97.20%\n", + "#6898: 97.20%\n", + "#6899: 97.20%\n", + "#6900: 97.20%\n", + "#6901: 97.20%\n", + "#6902: 97.20%\n", + "#6903: 97.20%\n", + "#6904: 97.20%\n", + "#6905: 97.21%\n", + "#6906: 97.21%\n", + "#6907: 97.21%\n", + "#6908: 97.21%\n", + "#6909: 97.21%\n", + "#6910: 97.21%\n", + "#6911: 97.21%\n", + "#6912: 97.21%\n", + "#6913: 97.21%\n", + "#6914: 97.21%\n", + "#6915: 97.21%\n", + "#6916: 97.21%\n", + "#6917: 97.21%\n", + "#6918: 97.21%\n", + "#6919: 97.21%\n", + "#6920: 97.21%\n", + "#6921: 97.21%\n", + "#6922: 97.21%\n", + "#6923: 97.21%\n", + "#6924: 97.21%\n", + "#6925: 97.21%\n", + "#6926: 97.21%\n", + "#6927: 97.21%\n", + "#6928: 97.21%\n", + "#6929: 97.22%\n", + "#6930: 97.22%\n", + "#6931: 97.22%\n", + "#6932: 97.22%\n", + "#6933: 97.22%\n", + "#6934: 97.22%\n", + "#6935: 97.22%\n", + "#6936: 97.22%\n", + "#6937: 97.22%\n", + "#6938: 97.22%\n", + "#6939: 97.22%\n", + "#6940: 97.22%\n", + "#6941: 97.22%\n", + "#6942: 97.22%\n", + "#6943: 97.22%\n", + "#6944: 97.22%\n", + "#6945: 97.22%\n", + "#6946: 97.22%\n", + "#6947: 97.22%\n", + "#6948: 97.22%\n", + "#6949: 97.22%\n", + "#6950: 97.22%\n", + "#6951: 97.22%\n", + "#6952: 97.22%\n", + "#6953: 97.22%\n", + "#6954: 97.23%\n", + "#6955: 97.23%\n", + "#6956: 97.23%\n", + "#6957: 97.23%\n", + "#6958: 97.23%\n", + "#6959: 97.23%\n", + "#6960: 97.23%\n", + "#6961: 97.23%\n", + "#6962: 97.23%\n", + "#6963: 97.23%\n", + "#6964: 97.23%\n", + "#6965: 97.23%\n", + "#6966: 97.23%\n", + "#6967: 97.23%\n", + "#6968: 97.23%\n", + "#6969: 97.23%\n", + "#6970: 97.23%\n", + "#6971: 97.23%\n", + "#6972: 97.23%\n", + "#6973: 97.23%\n", + "#6974: 97.23%\n", + "#6975: 97.23%\n", + "#6976: 97.23%\n", + "#6977: 97.23%\n", + "#6978: 97.23%\n", + "#6979: 97.23%\n", + "#6980: 97.24%\n", + "#6981: 97.24%\n", + "#6982: 97.24%\n", + "#6983: 97.24%\n", + "#6984: 97.24%\n", + "#6985: 97.24%\n", + "#6986: 97.24%\n", + "#6987: 97.24%\n", + "#6988: 97.24%\n", + "#6989: 97.24%\n", + "#6990: 97.24%\n", + "#6991: 97.24%\n", + "#6992: 97.24%\n", + "#6993: 97.24%\n", + "#6994: 97.24%\n", + "#6995: 97.24%\n", + "#6996: 97.24%\n", + "#6997: 97.24%\n", + "#6998: 97.24%\n", + "#6999: 97.24%\n", + "#7000: 97.24%\n", + "#7001: 97.24%\n", + "#7002: 97.24%\n", + "#7003: 97.24%\n", + "#7004: 97.24%\n", + "#7005: 97.25%\n", + "#7006: 97.25%\n", + "#7007: 97.25%\n", + "#7008: 97.25%\n", + "#7009: 97.25%\n", + "#7010: 97.25%\n", + "#7011: 97.25%\n", + "#7012: 97.25%\n", + "#7013: 97.25%\n", + "#7014: 97.25%\n", + "#7015: 97.25%\n", + "#7016: 97.25%\n", + "#7017: 97.25%\n", + "#7018: 97.25%\n", + "#7019: 97.25%\n", + "#7020: 97.25%\n", + "#7021: 97.25%\n", + "#7022: 97.25%\n", + "#7023: 97.25%\n", + "#7024: 97.25%\n", + "#7025: 97.25%\n", + "#7026: 97.25%\n", + "#7027: 97.25%\n", + "#7028: 97.25%\n", + "#7029: 97.25%\n", + "#7030: 97.26%\n", + "#7031: 97.26%\n", + "#7032: 97.26%\n", + "#7033: 97.26%\n", + "#7034: 97.26%\n", + "#7035: 97.26%\n", + "#7036: 97.26%\n", + "#7037: 97.26%\n", + "#7038: 97.26%\n", + "#7039: 97.26%\n", + "#7040: 97.24%\n", + "#7041: 97.25%\n", + "#7042: 97.25%\n", + "#7043: 97.25%\n", + "#7044: 97.25%\n", + "#7045: 97.25%\n", + "#7046: 97.25%\n", + "#7047: 97.25%\n", + "#7048: 97.25%\n", + "#7049: 97.25%\n", + "#7050: 97.25%\n", + "#7051: 97.25%\n", + "#7052: 97.25%\n", + "#7053: 97.25%\n", + "#7054: 97.25%\n", + "#7055: 97.25%\n", + "#7056: 97.25%\n", + "#7057: 97.25%\n", + "#7058: 97.25%\n", + "#7059: 97.25%\n", + "#7060: 97.25%\n", + "#7061: 97.25%\n", + "#7062: 97.25%\n", + "#7063: 97.25%\n", + "#7064: 97.25%\n", + "#7065: 97.25%\n", + "#7066: 97.25%\n", + "#7067: 97.26%\n", + "#7068: 97.26%\n", + "#7069: 97.26%\n", + "#7070: 97.26%\n", + "#7071: 97.26%\n", + "#7072: 97.26%\n", + "#7073: 97.26%\n", + "#7074: 97.26%\n", + "#7075: 97.26%\n", + "#7076: 97.26%\n", + "#7077: 97.26%\n", + "#7078: 97.26%\n", + "#7079: 97.26%\n", + "#7080: 97.26%\n", + "#7081: 97.26%\n", + "#7082: 97.26%\n", + "#7083: 97.26%\n", + "#7084: 97.26%\n", + "#7085: 97.26%\n", + "#7086: 97.26%\n", + "#7087: 97.26%\n", + "#7088: 97.26%\n", + "#7089: 97.26%\n", + "#7090: 97.26%\n", + "#7091: 97.26%\n", + "#7092: 97.26%\n", + "#7093: 97.27%\n", + "#7094: 97.27%\n", + "#7095: 97.27%\n", + "#7096: 97.27%\n", + "#7097: 97.27%\n", + "#7098: 97.27%\n", + "#7099: 97.27%\n", + "#7100: 97.27%\n", + "#7101: 97.27%\n", + "#7102: 97.27%\n", + "#7103: 97.27%\n", + "#7104: 97.27%\n", + "#7105: 97.27%\n", + "#7106: 97.27%\n", + "#7107: 97.27%\n", + "#7108: 97.27%\n", + "#7109: 97.27%\n", + "#7110: 97.27%\n", + "#7111: 97.27%\n", + "#7112: 97.27%\n", + "#7113: 97.27%\n", + "#7114: 97.27%\n", + "#7115: 97.27%\n", + "#7116: 97.27%\n", + "#7117: 97.27%\n", + "#7118: 97.27%\n", + "#7119: 97.28%\n", + "#7120: 97.28%\n", + "#7121: 97.26%\n", + "#7122: 97.26%\n", + "#7123: 97.26%\n", + "#7124: 97.26%\n", + "#7125: 97.26%\n", + "#7126: 97.26%\n", + "#7127: 97.26%\n", + "#7128: 97.26%\n", + "#7129: 97.27%\n", + "#7130: 97.27%\n", + "#7131: 97.27%\n", + "#7132: 97.27%\n", + "#7133: 97.27%\n", + "#7134: 97.27%\n", + "#7135: 97.27%\n", + "#7136: 97.27%\n", + "#7137: 97.27%\n", + "#7138: 97.27%\n", + "#7139: 97.27%\n", + "#7140: 97.27%\n", + "#7141: 97.27%\n", + "#7142: 97.27%\n", + "#7143: 97.27%\n", + "#7144: 97.27%\n", + "#7145: 97.27%\n", + "#7146: 97.27%\n", + "#7147: 97.27%\n", + "#7148: 97.27%\n", + "#7149: 97.27%\n", + "#7150: 97.27%\n", + "#7151: 97.27%\n", + "#7152: 97.27%\n", + "#7153: 97.27%\n", + "#7154: 97.27%\n", + "#7155: 97.28%\n", + "#7156: 97.28%\n", + "#7157: 97.28%\n", + "#7158: 97.28%\n", + "#7159: 97.28%\n", + "#7160: 97.28%\n", + "#7161: 97.28%\n", + "#7162: 97.28%\n", + "#7163: 97.28%\n", + "#7164: 97.28%\n", + "#7165: 97.28%\n", + "#7166: 97.28%\n", + "#7167: 97.28%\n", + "#7168: 97.28%\n", + "#7169: 97.28%\n", + "#7170: 97.28%\n", + "#7171: 97.28%\n", + "#7172: 97.28%\n", + "#7173: 97.28%\n", + "#7174: 97.28%\n", + "#7175: 97.28%\n", + "#7176: 97.28%\n", + "#7177: 97.28%\n", + "#7178: 97.28%\n", + "#7179: 97.28%\n", + "#7180: 97.28%\n", + "#7181: 97.28%\n", + "#7182: 97.29%\n", + "#7183: 97.29%\n", + "#7184: 97.29%\n", + "#7185: 97.29%\n", + "#7186: 97.29%\n", + "#7187: 97.29%\n", + "#7188: 97.29%\n", + "#7189: 97.29%\n", + "#7190: 97.29%\n", + "#7191: 97.29%\n", + "#7192: 97.29%\n", + "#7193: 97.29%\n", + "#7194: 97.29%\n", + "#7195: 97.29%\n", + "#7196: 97.29%\n", + "#7197: 97.29%\n", + "#7198: 97.29%\n", + "#7199: 97.29%\n", + "#7200: 97.29%\n", + "#7201: 97.29%\n", + "#7202: 97.29%\n", + "#7203: 97.29%\n", + "#7204: 97.29%\n", + "#7205: 97.29%\n", + "#7206: 97.29%\n", + "#7207: 97.29%\n", + "#7208: 97.30%\n", + "#7209: 97.30%\n", + "#7210: 97.30%\n", + "#7211: 97.30%\n", + "#7212: 97.30%\n", + "#7213: 97.30%\n", + "#7214: 97.30%\n", + "#7215: 97.30%\n", + "#7216: 97.30%\n", + "#7217: 97.30%\n", + "#7218: 97.30%\n", + "#7219: 97.30%\n", + "#7220: 97.30%\n", + "#7221: 97.30%\n", + "#7222: 97.30%\n", + "#7223: 97.30%\n", + "#7224: 97.30%\n", + "#7225: 97.30%\n", + "#7226: 97.30%\n", + "#7227: 97.30%\n", + "#7228: 97.30%\n", + "#7229: 97.30%\n", + "#7230: 97.30%\n", + "#7231: 97.30%\n", + "#7232: 97.30%\n", + "#7233: 97.30%\n", + "#7234: 97.30%\n", + "#7235: 97.31%\n", + "#7236: 97.31%\n", + "#7237: 97.31%\n", + "#7238: 97.31%\n", + "#7239: 97.31%\n", + "#7240: 97.31%\n", + "#7241: 97.31%\n", + "#7242: 97.31%\n", + "#7243: 97.31%\n", + "#7244: 97.31%\n", + "#7245: 97.31%\n", + "#7246: 97.31%\n", + "#7247: 97.31%\n", + "#7248: 97.31%\n", + "#7249: 97.31%\n", + "#7250: 97.31%\n", + "#7251: 97.31%\n", + "#7252: 97.31%\n", + "#7253: 97.31%\n", + "#7254: 97.31%\n", + "#7255: 97.31%\n", + "#7256: 97.31%\n", + "#7257: 97.31%\n", + "#7258: 97.31%\n", + "#7259: 97.31%\n", + "#7260: 97.31%\n", + "#7261: 97.31%\n", + "#7262: 97.32%\n", + "#7263: 97.32%\n", + "#7264: 97.32%\n", + "#7265: 97.32%\n", + "#7266: 97.32%\n", + "#7267: 97.32%\n", + "#7268: 97.32%\n", + "#7269: 97.32%\n", + "#7270: 97.32%\n", + "#7271: 97.32%\n", + "#7272: 97.32%\n", + "#7273: 97.32%\n", + "#7274: 97.32%\n", + "#7275: 97.32%\n", + "#7276: 97.32%\n", + "#7277: 97.32%\n", + "#7278: 97.32%\n", + "#7279: 97.32%\n", + "#7280: 97.32%\n", + "#7281: 97.32%\n", + "#7282: 97.32%\n", + "#7283: 97.32%\n", + "#7284: 97.32%\n", + "#7285: 97.32%\n", + "#7286: 97.32%\n", + "#7287: 97.32%\n", + "#7288: 97.32%\n", + "#7289: 97.33%\n", + "#7290: 97.33%\n", + "#7291: 97.33%\n", + "#7292: 97.33%\n", + "#7293: 97.33%\n", + "#7294: 97.33%\n", + "#7295: 97.33%\n", + "#7296: 97.33%\n", + "#7297: 97.33%\n", + "#7298: 97.33%\n", + "#7299: 97.33%\n", + "#7300: 97.33%\n", + "#7301: 97.33%\n", + "#7302: 97.33%\n", + "#7303: 97.33%\n", + "#7304: 97.33%\n", + "#7305: 97.33%\n", + "#7306: 97.33%\n", + "#7307: 97.33%\n", + "#7308: 97.33%\n", + "#7309: 97.33%\n", + "#7310: 97.33%\n", + "#7311: 97.33%\n", + "#7312: 97.33%\n", + "#7313: 97.33%\n", + "#7314: 97.33%\n", + "#7315: 97.33%\n", + "#7316: 97.33%\n", + "#7317: 97.34%\n", + "#7318: 97.34%\n", + "#7319: 97.34%\n", + "#7320: 97.34%\n", + "#7321: 97.34%\n", + "#7322: 97.34%\n", + "#7323: 97.34%\n", + "#7324: 97.34%\n", + "#7325: 97.34%\n", + "#7326: 97.34%\n", + "#7327: 97.34%\n", + "#7328: 97.34%\n", + "#7329: 97.34%\n", + "#7330: 97.34%\n", + "#7331: 97.34%\n", + "#7332: 97.34%\n", + "#7333: 97.34%\n", + "#7334: 97.34%\n", + "#7335: 97.34%\n", + "#7336: 97.34%\n", + "#7337: 97.34%\n", + "#7338: 97.34%\n", + "#7339: 97.34%\n", + "#7340: 97.34%\n", + "#7341: 97.34%\n", + "#7342: 97.34%\n", + "#7343: 97.34%\n", + "#7344: 97.35%\n", + "#7345: 97.35%\n", + "#7346: 97.35%\n", + "#7347: 97.35%\n", + "#7348: 97.35%\n", + "#7349: 97.35%\n", + "#7350: 97.35%\n", + "#7351: 97.35%\n", + "#7352: 97.35%\n", + "#7353: 97.35%\n", + "#7354: 97.35%\n", + "#7355: 97.35%\n", + "#7356: 97.35%\n", + "#7357: 97.35%\n", + "#7358: 97.35%\n", + "#7359: 97.35%\n", + "#7360: 97.35%\n", + "#7361: 97.35%\n", + "#7362: 97.35%\n", + "#7363: 97.35%\n", + "#7364: 97.35%\n", + "#7365: 97.35%\n", + "#7366: 97.35%\n", + "#7367: 97.35%\n", + "#7368: 97.35%\n", + "#7369: 97.35%\n", + "#7370: 97.35%\n", + "#7371: 97.35%\n", + "#7372: 97.36%\n", + "#7373: 97.36%\n", + "#7374: 97.36%\n", + "#7375: 97.36%\n", + "#7376: 97.36%\n", + "#7377: 97.36%\n", + "#7378: 97.36%\n", + "#7379: 97.36%\n", + "#7380: 97.36%\n", + "#7381: 97.36%\n", + "#7382: 97.36%\n", + "#7383: 97.36%\n", + "#7384: 97.36%\n", + "#7385: 97.36%\n", + "#7386: 97.36%\n", + "#7387: 97.36%\n", + "#7388: 97.36%\n", + "#7389: 97.36%\n", + "#7390: 97.36%\n", + "#7391: 97.36%\n", + "#7392: 97.36%\n", + "#7393: 97.36%\n", + "#7394: 97.36%\n", + "#7395: 97.36%\n", + "#7396: 97.36%\n", + "#7397: 97.36%\n", + "#7398: 97.36%\n", + "#7399: 97.36%\n", + "#7400: 97.37%\n", + "#7401: 97.37%\n", + "#7402: 97.37%\n", + "#7403: 97.37%\n", + "#7404: 97.37%\n", + "#7405: 97.37%\n", + "#7406: 97.37%\n", + "#7407: 97.37%\n", + "#7408: 97.37%\n", + "#7409: 97.37%\n", + "#7410: 97.37%\n", + "#7411: 97.37%\n", + "#7412: 97.37%\n", + "#7413: 97.37%\n", + "#7414: 97.37%\n", + "#7415: 97.37%\n", + "#7416: 97.37%\n", + "#7417: 97.37%\n", + "#7418: 97.37%\n", + "#7419: 97.37%\n", + "#7420: 97.37%\n", + "#7421: 97.37%\n", + "#7422: 97.37%\n", + "#7423: 97.37%\n", + "#7424: 97.37%\n", + "#7425: 97.37%\n", + "#7426: 97.37%\n", + "#7427: 97.37%\n", + "#7428: 97.38%\n", + "#7429: 97.38%\n", + "#7430: 97.38%\n", + "#7431: 97.38%\n", + "#7432: 97.38%\n", + "#7433: 97.38%\n", + "#7434: 97.38%\n", + "#7435: 97.38%\n", + "#7436: 97.38%\n", + "#7437: 97.38%\n", + "#7438: 97.38%\n", + "#7439: 97.38%\n", + "#7440: 97.38%\n", + "#7441: 97.38%\n", + "#7442: 97.38%\n", + "#7443: 97.38%\n", + "#7444: 97.38%\n", + "#7445: 97.38%\n", + "#7446: 97.38%\n", + "#7447: 97.38%\n", + "#7448: 97.38%\n", + "#7449: 97.38%\n", + "#7450: 97.38%\n", + "#7451: 97.37%\n", + "#7452: 97.37%\n", + "#7453: 97.37%\n", + "#7454: 97.37%\n", + "#7455: 97.37%\n", + "#7456: 97.37%\n", + "#7457: 97.37%\n", + "#7458: 97.37%\n", + "#7459: 97.37%\n", + "#7460: 97.37%\n", + "#7461: 97.37%\n", + "#7462: 97.37%\n", + "#7463: 97.37%\n", + "#7464: 97.37%\n", + "#7465: 97.37%\n", + "#7466: 97.38%\n", + "#7467: 97.38%\n", + "#7468: 97.38%\n", + "#7469: 97.38%\n", + "#7470: 97.38%\n", + "#7471: 97.38%\n", + "#7472: 97.38%\n", + "#7473: 97.38%\n", + "#7474: 97.38%\n", + "#7475: 97.38%\n", + "#7476: 97.38%\n", + "#7477: 97.38%\n", + "#7478: 97.38%\n", + "#7479: 97.38%\n", + "#7480: 97.38%\n", + "#7481: 97.38%\n", + "#7482: 97.38%\n", + "#7483: 97.38%\n", + "#7484: 97.38%\n", + "#7485: 97.38%\n", + "#7486: 97.38%\n", + "#7487: 97.38%\n", + "#7488: 97.38%\n", + "#7489: 97.38%\n", + "#7490: 97.38%\n", + "#7491: 97.38%\n", + "#7492: 97.38%\n", + "#7493: 97.38%\n", + "#7494: 97.38%\n", + "#7495: 97.39%\n", + "#7496: 97.39%\n", + "#7497: 97.39%\n", + "#7498: 97.39%\n", + "#7499: 97.39%\n", + "#7500: 97.39%\n", + "#7501: 97.39%\n", + "#7502: 97.39%\n", + "#7503: 97.39%\n", + "#7504: 97.39%\n", + "#7505: 97.39%\n", + "#7506: 97.39%\n", + "#7507: 97.39%\n", + "#7508: 97.39%\n", + "#7509: 97.39%\n", + "#7510: 97.39%\n", + "#7511: 97.39%\n", + "#7512: 97.39%\n", + "#7513: 97.39%\n", + "#7514: 97.38%\n", + "#7515: 97.38%\n", + "#7516: 97.38%\n", + "#7517: 97.38%\n", + "#7518: 97.38%\n", + "#7519: 97.38%\n", + "#7520: 97.38%\n", + "#7521: 97.38%\n", + "#7522: 97.38%\n", + "#7523: 97.38%\n", + "#7524: 97.38%\n", + "#7525: 97.38%\n", + "#7526: 97.38%\n", + "#7527: 97.38%\n", + "#7528: 97.38%\n", + "#7529: 97.38%\n", + "#7530: 97.38%\n", + "#7531: 97.38%\n", + "#7532: 97.38%\n", + "#7533: 97.39%\n", + "#7534: 97.39%\n", + "#7535: 97.39%\n", + "#7536: 97.39%\n", + "#7537: 97.39%\n", + "#7538: 97.39%\n", + "#7539: 97.39%\n", + "#7540: 97.39%\n", + "#7541: 97.39%\n", + "#7542: 97.39%\n", + "#7543: 97.39%\n", + "#7544: 97.39%\n", + "#7545: 97.39%\n", + "#7546: 97.39%\n", + "#7547: 97.39%\n", + "#7548: 97.39%\n", + "#7549: 97.39%\n", + "#7550: 97.39%\n", + "#7551: 97.39%\n", + "#7552: 97.39%\n", + "#7553: 97.39%\n", + "#7554: 97.39%\n", + "#7555: 97.39%\n", + "#7556: 97.39%\n", + "#7557: 97.39%\n", + "#7558: 97.39%\n", + "#7559: 97.39%\n", + "#7560: 97.39%\n", + "#7561: 97.39%\n", + "#7562: 97.40%\n", + "#7563: 97.40%\n", + "#7564: 97.40%\n", + "#7565: 97.40%\n", + "#7566: 97.40%\n", + "#7567: 97.40%\n", + "#7568: 97.40%\n", + "#7569: 97.40%\n", + "#7570: 97.40%\n", + "#7571: 97.40%\n", + "#7572: 97.40%\n", + "#7573: 97.40%\n", + "#7574: 97.40%\n", + "#7575: 97.40%\n", + "#7576: 97.40%\n", + "#7577: 97.40%\n", + "#7578: 97.40%\n", + "#7579: 97.40%\n", + "#7580: 97.40%\n", + "#7581: 97.40%\n", + "#7582: 97.40%\n", + "#7583: 97.40%\n", + "#7584: 97.40%\n", + "#7585: 97.40%\n", + "#7586: 97.40%\n", + "#7587: 97.40%\n", + "#7588: 97.40%\n", + "#7589: 97.40%\n", + "#7590: 97.40%\n", + "#7591: 97.41%\n", + "#7592: 97.41%\n", + "#7593: 97.41%\n", + "#7594: 97.41%\n", + "#7595: 97.41%\n", + "#7596: 97.41%\n", + "#7597: 97.41%\n", + "#7598: 97.41%\n", + "#7599: 97.41%\n", + "#7600: 97.41%\n", + "#7601: 97.41%\n", + "#7602: 97.41%\n", + "#7603: 97.41%\n", + "#7604: 97.41%\n", + "#7605: 97.41%\n", + "#7606: 97.41%\n", + "#7607: 97.41%\n", + "#7608: 97.41%\n", + "#7609: 97.41%\n", + "#7610: 97.41%\n", + "#7611: 97.41%\n", + "#7612: 97.41%\n", + "#7613: 97.41%\n", + "#7614: 97.41%\n", + "#7615: 97.41%\n", + "#7616: 97.41%\n", + "#7617: 97.41%\n", + "#7618: 97.41%\n", + "#7619: 97.41%\n", + "#7620: 97.42%\n", + "#7621: 97.42%\n", + "#7622: 97.42%\n", + "#7623: 97.42%\n", + "#7624: 97.42%\n", + "#7625: 97.42%\n", + "#7626: 97.42%\n", + "#7627: 97.42%\n", + "#7628: 97.42%\n", + "#7629: 97.42%\n", + "#7630: 97.42%\n", + "#7631: 97.42%\n", + "#7632: 97.42%\n", + "#7633: 97.42%\n", + "#7634: 97.42%\n", + "#7635: 97.42%\n", + "#7636: 97.42%\n", + "#7637: 97.42%\n", + "#7638: 97.42%\n", + "#7639: 97.42%\n", + "#7640: 97.42%\n", + "#7641: 97.42%\n", + "#7642: 97.42%\n", + "#7643: 97.42%\n", + "#7644: 97.42%\n", + "#7645: 97.42%\n", + "#7646: 97.42%\n", + "#7647: 97.42%\n", + "#7648: 97.42%\n", + "#7649: 97.42%\n", + "#7650: 97.43%\n", + "#7651: 97.43%\n", + "#7652: 97.43%\n", + "#7653: 97.43%\n", + "#7654: 97.43%\n", + "#7655: 97.43%\n", + "#7656: 97.43%\n", + "#7657: 97.43%\n", + "#7658: 97.43%\n", + "#7659: 97.43%\n", + "#7660: 97.43%\n", + "#7661: 97.43%\n", + "#7662: 97.43%\n", + "#7663: 97.43%\n", + "#7664: 97.43%\n", + "#7665: 97.43%\n", + "#7666: 97.43%\n", + "#7667: 97.43%\n", + "#7668: 97.43%\n", + "#7669: 97.43%\n", + "#7670: 97.43%\n", + "#7671: 97.43%\n", + "#7672: 97.43%\n", + "#7673: 97.43%\n", + "#7674: 97.43%\n", + "#7675: 97.43%\n", + "#7676: 97.43%\n", + "#7677: 97.43%\n", + "#7678: 97.43%\n", + "#7679: 97.43%\n", + "#7680: 97.44%\n", + "#7681: 97.44%\n", + "#7682: 97.44%\n", + "#7683: 97.44%\n", + "#7684: 97.44%\n", + "#7685: 97.44%\n", + "#7686: 97.44%\n", + "#7687: 97.44%\n", + "#7688: 97.44%\n", + "#7689: 97.44%\n", + "#7690: 97.44%\n", + "#7691: 97.44%\n", + "#7692: 97.44%\n", + "#7693: 97.44%\n", + "#7694: 97.44%\n", + "#7695: 97.44%\n", + "#7696: 97.44%\n", + "#7697: 97.44%\n", + "#7698: 97.44%\n", + "#7699: 97.44%\n", + "#7700: 97.44%\n", + "#7701: 97.44%\n", + "#7702: 97.44%\n", + "#7703: 97.44%\n", + "#7704: 97.44%\n", + "#7705: 97.44%\n", + "#7706: 97.44%\n", + "#7707: 97.44%\n", + "#7708: 97.44%\n", + "#7709: 97.44%\n", + "#7710: 97.45%\n", + "#7711: 97.45%\n", + "#7712: 97.45%\n", + "#7713: 97.45%\n", + "#7714: 97.45%\n", + "#7715: 97.45%\n", + "#7716: 97.45%\n", + "#7717: 97.45%\n", + "#7718: 97.45%\n", + "#7719: 97.45%\n", + "#7720: 97.45%\n", + "#7721: 97.45%\n", + "#7722: 97.45%\n", + "#7723: 97.45%\n", + "#7724: 97.45%\n", + "#7725: 97.45%\n", + "#7726: 97.45%\n", + "#7727: 97.45%\n", + "#7728: 97.45%\n", + "#7729: 97.45%\n", + "#7730: 97.45%\n", + "#7731: 97.45%\n", + "#7732: 97.45%\n", + "#7733: 97.45%\n", + "#7734: 97.45%\n", + "#7735: 97.45%\n", + "#7736: 97.45%\n", + "#7737: 97.45%\n", + "#7738: 97.45%\n", + "#7739: 97.45%\n", + "#7740: 97.46%\n", + "#7741: 97.46%\n", + "#7742: 97.46%\n", + "#7743: 97.46%\n", + "#7744: 97.46%\n", + "#7745: 97.46%\n", + "#7746: 97.46%\n", + "#7747: 97.46%\n", + "#7748: 97.46%\n", + "#7749: 97.46%\n", + "#7750: 97.46%\n", + "#7751: 97.46%\n", + "#7752: 97.46%\n", + "#7753: 97.46%\n", + "#7754: 97.46%\n", + "#7755: 97.46%\n", + "#7756: 97.46%\n", + "#7757: 97.46%\n", + "#7758: 97.46%\n", + "#7759: 97.46%\n", + "#7760: 97.46%\n", + "#7761: 97.46%\n", + "#7762: 97.46%\n", + "#7763: 97.46%\n", + "#7764: 97.46%\n", + "#7765: 97.46%\n", + "#7766: 97.46%\n", + "#7767: 97.46%\n", + "#7768: 97.46%\n", + "#7769: 97.46%\n", + "#7770: 97.46%\n", + "#7771: 97.47%\n", + "#7772: 97.47%\n", + "#7773: 97.47%\n", + "#7774: 97.47%\n", + "#7775: 97.47%\n", + "#7776: 97.47%\n", + "#7777: 97.47%\n", + "#7778: 97.47%\n", + "#7779: 97.47%\n", + "#7780: 97.47%\n", + "#7781: 97.47%\n", + "#7782: 97.47%\n", + "#7783: 97.47%\n", + "#7784: 97.47%\n", + "#7785: 97.47%\n", + "#7786: 97.47%\n", + "#7787: 97.47%\n", + "#7788: 97.47%\n", + "#7789: 97.47%\n", + "#7790: 97.47%\n", + "#7791: 97.47%\n", + "#7792: 97.47%\n", + "#7793: 97.47%\n", + "#7794: 97.47%\n", + "#7795: 97.47%\n", + "#7796: 97.47%\n", + "#7797: 97.47%\n", + "#7798: 97.47%\n", + "#7799: 97.47%\n", + "#7800: 97.47%\n", + "#7801: 97.48%\n", + "#7802: 97.48%\n", + "#7803: 97.48%\n", + "#7804: 97.48%\n", + "#7805: 97.48%\n", + "#7806: 97.48%\n", + "#7807: 97.48%\n", + "#7808: 97.48%\n", + "#7809: 97.48%\n", + "#7810: 97.48%\n", + "#7811: 97.48%\n", + "#7812: 97.48%\n", + "#7813: 97.48%\n", + "#7814: 97.48%\n", + "#7815: 97.48%\n", + "#7816: 97.48%\n", + "#7817: 97.48%\n", + "#7818: 97.48%\n", + "#7819: 97.48%\n", + "#7820: 97.48%\n", + "#7821: 97.48%\n", + "#7822: 97.48%\n", + "#7823: 97.48%\n", + "#7824: 97.48%\n", + "#7825: 97.48%\n", + "#7826: 97.48%\n", + "#7827: 97.48%\n", + "#7828: 97.48%\n", + "#7829: 97.48%\n", + "#7830: 97.48%\n", + "#7831: 97.48%\n", + "#7832: 97.48%\n", + "#7833: 97.49%\n", + "#7834: 97.49%\n", + "#7835: 97.49%\n", + "#7836: 97.49%\n", + "#7837: 97.49%\n", + "#7838: 97.49%\n", + "#7839: 97.49%\n", + "#7840: 97.49%\n", + "#7841: 97.49%\n", + "#7842: 97.49%\n", + "#7843: 97.49%\n", + "#7844: 97.49%\n", + "#7845: 97.48%\n", + "#7846: 97.48%\n", + "#7847: 97.48%\n", + "#7848: 97.48%\n", + "#7849: 97.48%\n", + "#7850: 97.48%\n", + "#7851: 97.48%\n", + "#7852: 97.48%\n", + "#7853: 97.48%\n", + "#7854: 97.48%\n", + "#7855: 97.48%\n", + "#7856: 97.48%\n", + "#7857: 97.48%\n", + "#7858: 97.48%\n", + "#7859: 97.48%\n", + "#7860: 97.48%\n", + "#7861: 97.48%\n", + "#7862: 97.48%\n", + "#7863: 97.48%\n", + "#7864: 97.48%\n", + "#7865: 97.48%\n", + "#7866: 97.48%\n", + "#7867: 97.48%\n", + "#7868: 97.48%\n", + "#7869: 97.48%\n", + "#7870: 97.48%\n", + "#7871: 97.48%\n", + "#7872: 97.49%\n", + "#7873: 97.49%\n", + "#7874: 97.49%\n", + "#7875: 97.49%\n", + "#7876: 97.49%\n", + "#7877: 97.49%\n", + "#7878: 97.49%\n", + "#7879: 97.49%\n", + "#7880: 97.49%\n", + "#7881: 97.49%\n", + "#7882: 97.49%\n", + "#7883: 97.49%\n", + "#7884: 97.49%\n", + "#7885: 97.49%\n", + "#7886: 97.48%\n", + "#7887: 97.48%\n", + "#7888: 97.48%\n", + "#7889: 97.48%\n", + "#7890: 97.48%\n", + "#7891: 97.48%\n", + "#7892: 97.48%\n", + "#7893: 97.48%\n", + "#7894: 97.48%\n", + "#7895: 97.48%\n", + "#7896: 97.48%\n", + "#7897: 97.48%\n", + "#7898: 97.48%\n", + "#7899: 97.48%\n", + "#7900: 97.48%\n", + "#7901: 97.48%\n", + "#7902: 97.48%\n", + "#7903: 97.48%\n", + "#7904: 97.48%\n", + "#7905: 97.48%\n", + "#7906: 97.48%\n", + "#7907: 97.48%\n", + "#7908: 97.48%\n", + "#7909: 97.48%\n", + "#7910: 97.48%\n", + "#7911: 97.48%\n", + "#7912: 97.49%\n", + "#7913: 97.49%\n", + "#7914: 97.49%\n", + "#7915: 97.47%\n", + "#7916: 97.47%\n", + "#7917: 97.47%\n", + "#7918: 97.47%\n", + "#7919: 97.47%\n", + "#7920: 97.48%\n", + "#7921: 97.48%\n", + "#7922: 97.48%\n", + "#7923: 97.48%\n", + "#7924: 97.48%\n", + "#7925: 97.48%\n", + "#7926: 97.48%\n", + "#7927: 97.46%\n", + "#7928: 97.47%\n", + "#7929: 97.47%\n", + "#7930: 97.47%\n", + "#7931: 97.47%\n", + "#7932: 97.47%\n", + "#7933: 97.47%\n", + "#7934: 97.47%\n", + "#7935: 97.47%\n", + "#7936: 97.47%\n", + "#7937: 97.47%\n", + "#7938: 97.47%\n", + "#7939: 97.47%\n", + "#7940: 97.47%\n", + "#7941: 97.47%\n", + "#7942: 97.47%\n", + "#7943: 97.47%\n", + "#7944: 97.47%\n", + "#7945: 97.47%\n", + "#7946: 97.47%\n", + "#7947: 97.47%\n", + "#7948: 97.47%\n", + "#7949: 97.47%\n", + "#7950: 97.47%\n", + "#7951: 97.47%\n", + "#7952: 97.47%\n", + "#7953: 97.47%\n", + "#7954: 97.47%\n", + "#7955: 97.47%\n", + "#7956: 97.47%\n", + "#7957: 97.47%\n", + "#7958: 97.47%\n", + "#7959: 97.47%\n", + "#7960: 97.48%\n", + "#7961: 97.48%\n", + "#7962: 97.48%\n", + "#7963: 97.48%\n", + "#7964: 97.48%\n", + "#7965: 97.48%\n", + "#7966: 97.48%\n", + "#7967: 97.48%\n", + "#7968: 97.48%\n", + "#7969: 97.48%\n", + "#7970: 97.48%\n", + "#7971: 97.48%\n", + "#7972: 97.48%\n", + "#7973: 97.48%\n", + "#7974: 97.48%\n", + "#7975: 97.48%\n", + "#7976: 97.48%\n", + "#7977: 97.48%\n", + "#7978: 97.48%\n", + "#7979: 97.48%\n", + "#7980: 97.48%\n", + "#7981: 97.48%\n", + "#7982: 97.48%\n", + "#7983: 97.48%\n", + "#7984: 97.48%\n", + "#7985: 97.48%\n", + "#7986: 97.48%\n", + "#7987: 97.48%\n", + "#7988: 97.48%\n", + "#7989: 97.48%\n", + "#7990: 97.48%\n", + "#7991: 97.48%\n", + "#7992: 97.49%\n", + "#7993: 97.49%\n", + "#7994: 97.49%\n", + "#7995: 97.49%\n", + "#7996: 97.49%\n", + "#7997: 97.49%\n", + "#7998: 97.49%\n", + "#7999: 97.49%\n", + "#8000: 97.49%\n", + "#8001: 97.49%\n", + "#8002: 97.49%\n", + "#8003: 97.49%\n", + "#8004: 97.49%\n", + "#8005: 97.49%\n", + "#8006: 97.49%\n", + "#8007: 97.49%\n", + "#8008: 97.49%\n", + "#8009: 97.49%\n", + "#8010: 97.49%\n", + "#8011: 97.49%\n", + "#8012: 97.49%\n", + "#8013: 97.49%\n", + "#8014: 97.49%\n", + "#8015: 97.49%\n", + "#8016: 97.49%\n", + "#8017: 97.49%\n", + "#8018: 97.49%\n", + "#8019: 97.49%\n", + "#8020: 97.49%\n", + "#8021: 97.49%\n", + "#8022: 97.49%\n", + "#8023: 97.50%\n", + "#8024: 97.50%\n", + "#8025: 97.50%\n", + "#8026: 97.50%\n", + "#8027: 97.50%\n", + "#8028: 97.50%\n", + "#8029: 97.50%\n", + "#8030: 97.50%\n", + "#8031: 97.50%\n", + "#8032: 97.50%\n", + "#8033: 97.50%\n", + "#8034: 97.50%\n", + "#8035: 97.50%\n", + "#8036: 97.50%\n", + "#8037: 97.50%\n", + "#8038: 97.50%\n", + "#8039: 97.50%\n", + "#8040: 97.50%\n", + "#8041: 97.50%\n", + "#8042: 97.50%\n", + "#8043: 97.50%\n", + "#8044: 97.50%\n", + "#8045: 97.50%\n", + "#8046: 97.50%\n", + "#8047: 97.49%\n", + "#8048: 97.49%\n", + "#8049: 97.49%\n", + "#8050: 97.49%\n", + "#8051: 97.49%\n", + "#8052: 97.49%\n", + "#8053: 97.49%\n", + "#8054: 97.49%\n", + "#8055: 97.49%\n", + "#8056: 97.49%\n", + "#8057: 97.49%\n", + "#8058: 97.49%\n", + "#8059: 97.48%\n", + "#8060: 97.48%\n", + "#8061: 97.48%\n", + "#8062: 97.48%\n", + "#8063: 97.48%\n", + "#8064: 97.48%\n", + "#8065: 97.48%\n", + "#8066: 97.48%\n", + "#8067: 97.48%\n", + "#8068: 97.48%\n", + "#8069: 97.48%\n", + "#8070: 97.48%\n", + "#8071: 97.49%\n", + "#8072: 97.49%\n", + "#8073: 97.49%\n", + "#8074: 97.49%\n", + "#8075: 97.49%\n", + "#8076: 97.49%\n", + "#8077: 97.49%\n", + "#8078: 97.49%\n", + "#8079: 97.49%\n", + "#8080: 97.49%\n", + "#8081: 97.49%\n", + "#8082: 97.49%\n", + "#8083: 97.49%\n", + "#8084: 97.49%\n", + "#8085: 97.49%\n", + "#8086: 97.49%\n", + "#8087: 97.49%\n", + "#8088: 97.49%\n", + "#8089: 97.49%\n", + "#8090: 97.49%\n", + "#8091: 97.48%\n", + "#8092: 97.48%\n", + "#8093: 97.48%\n", + "#8094: 97.47%\n", + "#8095: 97.47%\n", + "#8096: 97.47%\n", + "#8097: 97.47%\n", + "#8098: 97.47%\n", + "#8099: 97.47%\n", + "#8100: 97.47%\n", + "#8101: 97.47%\n", + "#8102: 97.47%\n", + "#8103: 97.47%\n", + "#8104: 97.47%\n", + "#8105: 97.47%\n", + "#8106: 97.47%\n", + "#8107: 97.47%\n", + "#8108: 97.47%\n", + "#8109: 97.47%\n", + "#8110: 97.47%\n", + "#8111: 97.47%\n", + "#8112: 97.47%\n", + "#8113: 97.47%\n", + "#8114: 97.47%\n", + "#8115: 97.47%\n", + "#8116: 97.47%\n", + "#8117: 97.47%\n", + "#8118: 97.48%\n", + "#8119: 97.48%\n", + "#8120: 97.48%\n", + "#8121: 97.48%\n", + "#8122: 97.48%\n", + "#8123: 97.48%\n", + "#8124: 97.48%\n", + "#8125: 97.48%\n", + "#8126: 97.48%\n", + "#8127: 97.48%\n", + "#8128: 97.48%\n", + "#8129: 97.48%\n", + "#8130: 97.48%\n", + "#8131: 97.48%\n", + "#8132: 97.48%\n", + "#8133: 97.48%\n", + "#8134: 97.48%\n", + "#8135: 97.48%\n", + "#8136: 97.48%\n", + "#8137: 97.48%\n", + "#8138: 97.48%\n", + "#8139: 97.48%\n", + "#8140: 97.48%\n", + "#8141: 97.48%\n", + "#8142: 97.48%\n", + "#8143: 97.48%\n", + "#8144: 97.48%\n", + "#8145: 97.48%\n", + "#8146: 97.48%\n", + "#8147: 97.48%\n", + "#8148: 97.48%\n", + "#8149: 97.48%\n", + "#8150: 97.48%\n", + "#8151: 97.49%\n", + "#8152: 97.49%\n", + "#8153: 97.49%\n", + "#8154: 97.49%\n", + "#8155: 97.49%\n", + "#8156: 97.49%\n", + "#8157: 97.49%\n", + "#8158: 97.49%\n", + "#8159: 97.49%\n", + "#8160: 97.49%\n", + "#8161: 97.49%\n", + "#8162: 97.49%\n", + "#8163: 97.49%\n", + "#8164: 97.49%\n", + "#8165: 97.49%\n", + "#8166: 97.49%\n", + "#8167: 97.49%\n", + "#8168: 97.49%\n", + "#8169: 97.49%\n", + "#8170: 97.49%\n", + "#8171: 97.49%\n", + "#8172: 97.49%\n", + "#8173: 97.49%\n", + "#8174: 97.49%\n", + "#8175: 97.49%\n", + "#8176: 97.49%\n", + "#8177: 97.49%\n", + "#8178: 97.49%\n", + "#8179: 97.49%\n", + "#8180: 97.49%\n", + "#8181: 97.49%\n", + "#8182: 97.49%\n", + "#8183: 97.50%\n", + "#8184: 97.50%\n", + "#8185: 97.50%\n", + "#8186: 97.50%\n", + "#8187: 97.50%\n", + "#8188: 97.50%\n", + "#8189: 97.50%\n", + "#8190: 97.50%\n", + "#8191: 97.50%\n", + "#8192: 97.50%\n", + "#8193: 97.50%\n", + "#8194: 97.50%\n", + "#8195: 97.50%\n", + "#8196: 97.50%\n", + "#8197: 97.50%\n", + "#8198: 97.50%\n", + "#8199: 97.50%\n", + "#8200: 97.50%\n", + "#8201: 97.50%\n", + "#8202: 97.50%\n", + "#8203: 97.50%\n", + "#8204: 97.50%\n", + "#8205: 97.50%\n", + "#8206: 97.50%\n", + "#8207: 97.50%\n", + "#8208: 97.50%\n", + "#8209: 97.50%\n", + "#8210: 97.50%\n", + "#8211: 97.50%\n", + "#8212: 97.50%\n", + "#8213: 97.50%\n", + "#8214: 97.50%\n", + "#8215: 97.50%\n", + "#8216: 97.51%\n", + "#8217: 97.51%\n", + "#8218: 97.51%\n", + "#8219: 97.51%\n", + "#8220: 97.51%\n", + "#8221: 97.51%\n", + "#8222: 97.51%\n", + "#8223: 97.51%\n", + "#8224: 97.51%\n", + "#8225: 97.51%\n", + "#8226: 97.51%\n", + "#8227: 97.51%\n", + "#8228: 97.51%\n", + "#8229: 97.51%\n", + "#8230: 97.51%\n", + "#8231: 97.51%\n", + "#8232: 97.51%\n", + "#8233: 97.51%\n", + "#8234: 97.51%\n", + "#8235: 97.51%\n", + "#8236: 97.51%\n", + "#8237: 97.51%\n", + "#8238: 97.51%\n", + "#8239: 97.51%\n", + "#8240: 97.51%\n", + "#8241: 97.51%\n", + "#8242: 97.51%\n", + "#8243: 97.51%\n", + "#8244: 97.51%\n", + "#8245: 97.51%\n", + "#8246: 97.51%\n", + "#8247: 97.51%\n", + "#8248: 97.51%\n", + "#8249: 97.52%\n", + "#8250: 97.52%\n", + "#8251: 97.52%\n", + "#8252: 97.52%\n", + "#8253: 97.52%\n", + "#8254: 97.52%\n", + "#8255: 97.52%\n", + "#8256: 97.52%\n", + "#8257: 97.52%\n", + "#8258: 97.52%\n", + "#8259: 97.52%\n", + "#8260: 97.52%\n", + "#8261: 97.52%\n", + "#8262: 97.52%\n", + "#8263: 97.52%\n", + "#8264: 97.52%\n", + "#8265: 97.52%\n", + "#8266: 97.52%\n", + "#8267: 97.52%\n", + "#8268: 97.52%\n", + "#8269: 97.52%\n", + "#8270: 97.52%\n", + "#8271: 97.52%\n", + "#8272: 97.52%\n", + "#8273: 97.52%\n", + "#8274: 97.52%\n", + "#8275: 97.52%\n", + "#8276: 97.52%\n", + "#8277: 97.52%\n", + "#8278: 97.51%\n", + "#8279: 97.51%\n", + "#8280: 97.51%\n", + "#8281: 97.51%\n", + "#8282: 97.51%\n", + "#8283: 97.51%\n", + "#8284: 97.51%\n", + "#8285: 97.51%\n", + "#8286: 97.51%\n", + "#8287: 97.51%\n", + "#8288: 97.51%\n", + "#8289: 97.52%\n", + "#8290: 97.52%\n", + "#8291: 97.52%\n", + "#8292: 97.52%\n", + "#8293: 97.52%\n", + "#8294: 97.52%\n", + "#8295: 97.52%\n", + "#8296: 97.52%\n", + "#8297: 97.52%\n", + "#8298: 97.52%\n", + "#8299: 97.52%\n", + "#8300: 97.52%\n", + "#8301: 97.52%\n", + "#8302: 97.52%\n", + "#8303: 97.52%\n", + "#8304: 97.52%\n", + "#8305: 97.52%\n", + "#8306: 97.52%\n", + "#8307: 97.52%\n", + "#8308: 97.52%\n", + "#8309: 97.52%\n", + "#8310: 97.52%\n", + "#8311: 97.52%\n", + "#8312: 97.52%\n", + "#8313: 97.52%\n", + "#8314: 97.52%\n", + "#8315: 97.52%\n", + "#8316: 97.51%\n", + "#8317: 97.51%\n", + "#8318: 97.51%\n", + "#8319: 97.51%\n", + "#8320: 97.51%\n", + "#8321: 97.51%\n", + "#8322: 97.50%\n", + "#8323: 97.50%\n", + "#8324: 97.50%\n", + "#8325: 97.49%\n", + "#8326: 97.49%\n", + "#8327: 97.49%\n", + "#8328: 97.49%\n", + "#8329: 97.49%\n", + "#8330: 97.49%\n", + "#8331: 97.49%\n", + "#8332: 97.49%\n", + "#8333: 97.49%\n", + "#8334: 97.49%\n", + "#8335: 97.49%\n", + "#8336: 97.49%\n", + "#8337: 97.49%\n", + "#8338: 97.49%\n", + "#8339: 97.49%\n", + "#8340: 97.49%\n", + "#8341: 97.49%\n", + "#8342: 97.49%\n", + "#8343: 97.50%\n", + "#8344: 97.50%\n", + "#8345: 97.50%\n", + "#8346: 97.50%\n", + "#8347: 97.50%\n", + "#8348: 97.50%\n", + "#8349: 97.50%\n", + "#8350: 97.50%\n", + "#8351: 97.50%\n", + "#8352: 97.50%\n", + "#8353: 97.49%\n", + "#8354: 97.49%\n", + "#8355: 97.49%\n", + "#8356: 97.49%\n", + "#8357: 97.49%\n", + "#8358: 97.49%\n", + "#8359: 97.49%\n", + "#8360: 97.49%\n", + "#8361: 97.49%\n", + "#8362: 97.49%\n", + "#8363: 97.49%\n", + "#8364: 97.49%\n", + "#8365: 97.49%\n", + "#8366: 97.49%\n", + "#8367: 97.49%\n", + "#8368: 97.49%\n", + "#8369: 97.49%\n", + "#8370: 97.49%\n", + "#8371: 97.49%\n", + "#8372: 97.49%\n", + "#8373: 97.49%\n", + "#8374: 97.49%\n", + "#8375: 97.48%\n", + "#8376: 97.48%\n", + "#8377: 97.48%\n", + "#8378: 97.48%\n", + "#8379: 97.48%\n", + "#8380: 97.48%\n", + "#8381: 97.48%\n", + "#8382: 97.48%\n", + "#8383: 97.48%\n", + "#8384: 97.48%\n", + "#8385: 97.48%\n", + "#8386: 97.48%\n", + "#8387: 97.48%\n", + "#8388: 97.48%\n", + "#8389: 97.49%\n", + "#8390: 97.49%\n", + "#8391: 97.49%\n", + "#8392: 97.49%\n", + "#8393: 97.49%\n", + "#8394: 97.49%\n", + "#8395: 97.49%\n", + "#8396: 97.49%\n", + "#8397: 97.49%\n", + "#8398: 97.49%\n", + "#8399: 97.49%\n", + "#8400: 97.49%\n", + "#8401: 97.49%\n", + "#8402: 97.49%\n", + "#8403: 97.49%\n", + "#8404: 97.49%\n", + "#8405: 97.49%\n", + "#8406: 97.49%\n", + "#8407: 97.49%\n", + "#8408: 97.48%\n", + "#8409: 97.48%\n", + "#8410: 97.47%\n", + "#8411: 97.47%\n", + "#8412: 97.47%\n", + "#8413: 97.47%\n", + "#8414: 97.47%\n", + "#8415: 97.47%\n", + "#8416: 97.47%\n", + "#8417: 97.47%\n", + "#8418: 97.47%\n", + "#8419: 97.47%\n", + "#8420: 97.47%\n", + "#8421: 97.47%\n", + "#8422: 97.47%\n", + "#8423: 97.47%\n", + "#8424: 97.47%\n", + "#8425: 97.47%\n", + "#8426: 97.47%\n", + "#8427: 97.47%\n", + "#8428: 97.47%\n", + "#8429: 97.47%\n", + "#8430: 97.47%\n", + "#8431: 97.47%\n", + "#8432: 97.47%\n", + "#8433: 97.47%\n", + "#8434: 97.47%\n", + "#8435: 97.48%\n", + "#8436: 97.48%\n", + "#8437: 97.48%\n", + "#8438: 97.48%\n", + "#8439: 97.48%\n", + "#8440: 97.48%\n", + "#8441: 97.48%\n", + "#8442: 97.48%\n", + "#8443: 97.48%\n", + "#8444: 97.48%\n", + "#8445: 97.48%\n", + "#8446: 97.48%\n", + "#8447: 97.48%\n", + "#8448: 97.48%\n", + "#8449: 97.48%\n", + "#8450: 97.48%\n", + "#8451: 97.48%\n", + "#8452: 97.48%\n", + "#8453: 97.48%\n", + "#8454: 97.48%\n", + "#8455: 97.48%\n", + "#8456: 97.48%\n", + "#8457: 97.48%\n", + "#8458: 97.48%\n", + "#8459: 97.48%\n", + "#8460: 97.48%\n", + "#8461: 97.48%\n", + "#8462: 97.48%\n", + "#8463: 97.48%\n", + "#8464: 97.48%\n", + "#8465: 97.48%\n", + "#8466: 97.48%\n", + "#8467: 97.48%\n", + "#8468: 97.48%\n", + "#8469: 97.49%\n", + "#8470: 97.49%\n", + "#8471: 97.49%\n", + "#8472: 97.49%\n", + "#8473: 97.49%\n", + "#8474: 97.49%\n", + "#8475: 97.49%\n", + "#8476: 97.49%\n", + "#8477: 97.49%\n", + "#8478: 97.49%\n", + "#8479: 97.49%\n", + "#8480: 97.49%\n", + "#8481: 97.49%\n", + "#8482: 97.49%\n", + "#8483: 97.49%\n", + "#8484: 97.49%\n", + "#8485: 97.49%\n", + "#8486: 97.49%\n", + "#8487: 97.49%\n", + "#8488: 97.49%\n", + "#8489: 97.49%\n", + "#8490: 97.49%\n", + "#8491: 97.49%\n", + "#8492: 97.49%\n", + "#8493: 97.49%\n", + "#8494: 97.49%\n", + "#8495: 97.49%\n", + "#8496: 97.49%\n", + "#8497: 97.49%\n", + "#8498: 97.49%\n", + "#8499: 97.49%\n", + "#8500: 97.49%\n", + "#8501: 97.49%\n", + "#8502: 97.50%\n", + "#8503: 97.50%\n", + "#8504: 97.50%\n", + "#8505: 97.50%\n", + "#8506: 97.50%\n", + "#8507: 97.50%\n", + "#8508: 97.50%\n", + "#8509: 97.50%\n", + "#8510: 97.50%\n", + "#8511: 97.50%\n", + "#8512: 97.50%\n", + "#8513: 97.50%\n", + "#8514: 97.50%\n", + "#8515: 97.50%\n", + "#8516: 97.50%\n", + "#8517: 97.50%\n", + "#8518: 97.50%\n", + "#8519: 97.50%\n", + "#8520: 97.50%\n", + "#8521: 97.50%\n", + "#8522: 97.49%\n", + "#8523: 97.49%\n", + "#8524: 97.49%\n", + "#8525: 97.49%\n", + "#8526: 97.49%\n", + "#8527: 97.49%\n", + "#8528: 97.49%\n", + "#8529: 97.49%\n", + "#8530: 97.49%\n", + "#8531: 97.49%\n", + "#8532: 97.49%\n", + "#8533: 97.49%\n", + "#8534: 97.49%\n", + "#8535: 97.49%\n", + "#8536: 97.49%\n", + "#8537: 97.49%\n", + "#8538: 97.49%\n", + "#8539: 97.49%\n", + "#8540: 97.49%\n", + "#8541: 97.49%\n", + "#8542: 97.50%\n", + "#8543: 97.50%\n", + "#8544: 97.50%\n", + "#8545: 97.50%\n", + "#8546: 97.50%\n", + "#8547: 97.50%\n", + "#8548: 97.50%\n", + "#8549: 97.50%\n", + "#8550: 97.50%\n", + "#8551: 97.50%\n", + "#8552: 97.50%\n", + "#8553: 97.50%\n", + "#8554: 97.50%\n", + "#8555: 97.50%\n", + "#8556: 97.50%\n", + "#8557: 97.50%\n", + "#8558: 97.50%\n", + "#8559: 97.50%\n", + "#8560: 97.50%\n", + "#8561: 97.50%\n", + "#8562: 97.50%\n", + "#8563: 97.50%\n", + "#8564: 97.50%\n", + "#8565: 97.50%\n", + "#8566: 97.50%\n", + "#8567: 97.50%\n", + "#8568: 97.50%\n", + "#8569: 97.50%\n", + "#8570: 97.50%\n", + "#8571: 97.50%\n", + "#8572: 97.50%\n", + "#8573: 97.50%\n", + "#8574: 97.50%\n", + "#8575: 97.50%\n", + "#8576: 97.50%\n", + "#8577: 97.51%\n", + "#8578: 97.51%\n", + "#8579: 97.51%\n", + "#8580: 97.51%\n", + "#8581: 97.51%\n", + "#8582: 97.51%\n", + "#8583: 97.51%\n", + "#8584: 97.51%\n", + "#8585: 97.51%\n", + "#8586: 97.51%\n", + "#8587: 97.51%\n", + "#8588: 97.51%\n", + "#8589: 97.51%\n", + "#8590: 97.51%\n", + "#8591: 97.51%\n", + "#8592: 97.51%\n", + "#8593: 97.51%\n", + "#8594: 97.51%\n", + "#8595: 97.51%\n", + "#8596: 97.51%\n", + "#8597: 97.51%\n", + "#8598: 97.51%\n", + "#8599: 97.51%\n", + "#8600: 97.51%\n", + "#8601: 97.51%\n", + "#8602: 97.51%\n", + "#8603: 97.51%\n", + "#8604: 97.51%\n", + "#8605: 97.51%\n", + "#8606: 97.51%\n", + "#8607: 97.51%\n", + "#8608: 97.51%\n", + "#8609: 97.51%\n", + "#8610: 97.51%\n", + "#8611: 97.52%\n", + "#8612: 97.52%\n", + "#8613: 97.52%\n", + "#8614: 97.52%\n", + "#8615: 97.52%\n", + "#8616: 97.52%\n", + "#8617: 97.52%\n", + "#8618: 97.52%\n", + "#8619: 97.52%\n", + "#8620: 97.52%\n", + "#8621: 97.52%\n", + "#8622: 97.52%\n", + "#8623: 97.52%\n", + "#8624: 97.52%\n", + "#8625: 97.52%\n", + "#8626: 97.52%\n", + "#8627: 97.52%\n", + "#8628: 97.52%\n", + "#8629: 97.52%\n", + "#8630: 97.52%\n", + "#8631: 97.52%\n", + "#8632: 97.52%\n", + "#8633: 97.52%\n", + "#8634: 97.52%\n", + "#8635: 97.52%\n", + "#8636: 97.52%\n", + "#8637: 97.52%\n", + "#8638: 97.52%\n", + "#8639: 97.52%\n", + "#8640: 97.52%\n", + "#8641: 97.52%\n", + "#8642: 97.52%\n", + "#8643: 97.52%\n", + "#8644: 97.52%\n", + "#8645: 97.52%\n", + "#8646: 97.53%\n", + "#8647: 97.53%\n", + "#8648: 97.53%\n", + "#8649: 97.53%\n", + "#8650: 97.53%\n", + "#8651: 97.53%\n", + "#8652: 97.53%\n", + "#8653: 97.53%\n", + "#8654: 97.53%\n", + "#8655: 97.53%\n", + "#8656: 97.53%\n", + "#8657: 97.53%\n", + "#8658: 97.53%\n", + "#8659: 97.53%\n", + "#8660: 97.53%\n", + "#8661: 97.53%\n", + "#8662: 97.53%\n", + "#8663: 97.53%\n", + "#8664: 97.53%\n", + "#8665: 97.53%\n", + "#8666: 97.53%\n", + "#8667: 97.53%\n", + "#8668: 97.53%\n", + "#8669: 97.53%\n", + "#8670: 97.53%\n", + "#8671: 97.53%\n", + "#8672: 97.53%\n", + "#8673: 97.53%\n", + "#8674: 97.53%\n", + "#8675: 97.53%\n", + "#8676: 97.53%\n", + "#8677: 97.53%\n", + "#8678: 97.53%\n", + "#8679: 97.53%\n", + "#8680: 97.53%\n", + "#8681: 97.54%\n", + "#8682: 97.54%\n", + "#8683: 97.54%\n", + "#8684: 97.54%\n", + "#8685: 97.54%\n", + "#8686: 97.54%\n", + "#8687: 97.54%\n", + "#8688: 97.54%\n", + "#8689: 97.54%\n", + "#8690: 97.54%\n", + "#8691: 97.54%\n", + "#8692: 97.54%\n", + "#8693: 97.54%\n", + "#8694: 97.54%\n", + "#8695: 97.54%\n", + "#8696: 97.54%\n", + "#8697: 97.54%\n", + "#8698: 97.54%\n", + "#8699: 97.54%\n", + "#8700: 97.54%\n", + "#8701: 97.54%\n", + "#8702: 97.54%\n", + "#8703: 97.54%\n", + "#8704: 97.54%\n", + "#8705: 97.54%\n", + "#8706: 97.54%\n", + "#8707: 97.54%\n", + "#8708: 97.54%\n", + "#8709: 97.54%\n", + "#8710: 97.54%\n", + "#8711: 97.54%\n", + "#8712: 97.54%\n", + "#8713: 97.54%\n", + "#8714: 97.54%\n", + "#8715: 97.54%\n", + "#8716: 97.55%\n", + "#8717: 97.55%\n", + "#8718: 97.55%\n", + "#8719: 97.55%\n", + "#8720: 97.55%\n", + "#8721: 97.55%\n", + "#8722: 97.55%\n", + "#8723: 97.55%\n", + "#8724: 97.55%\n", + "#8725: 97.55%\n", + "#8726: 97.55%\n", + "#8727: 97.55%\n", + "#8728: 97.55%\n", + "#8729: 97.55%\n", + "#8730: 97.55%\n", + "#8731: 97.55%\n", + "#8732: 97.55%\n", + "#8733: 97.55%\n", + "#8734: 97.55%\n", + "#8735: 97.55%\n", + "#8736: 97.55%\n", + "#8737: 97.55%\n", + "#8738: 97.55%\n", + "#8739: 97.55%\n", + "#8740: 97.55%\n", + "#8741: 97.55%\n", + "#8742: 97.55%\n", + "#8743: 97.55%\n", + "#8744: 97.55%\n", + "#8745: 97.55%\n", + "#8746: 97.55%\n", + "#8747: 97.55%\n", + "#8748: 97.55%\n", + "#8749: 97.55%\n", + "#8750: 97.55%\n", + "#8751: 97.55%\n", + "#8752: 97.56%\n", + "#8753: 97.56%\n", + "#8754: 97.56%\n", + "#8755: 97.56%\n", + "#8756: 97.56%\n", + "#8757: 97.56%\n", + "#8758: 97.56%\n", + "#8759: 97.56%\n", + "#8760: 97.56%\n", + "#8761: 97.56%\n", + "#8762: 97.56%\n", + "#8763: 97.56%\n", + "#8764: 97.56%\n", + "#8765: 97.56%\n", + "#8766: 97.56%\n", + "#8767: 97.56%\n", + "#8768: 97.56%\n", + "#8769: 97.56%\n", + "#8770: 97.56%\n", + "#8771: 97.56%\n", + "#8772: 97.56%\n", + "#8773: 97.56%\n", + "#8774: 97.56%\n", + "#8775: 97.56%\n", + "#8776: 97.56%\n", + "#8777: 97.56%\n", + "#8778: 97.56%\n", + "#8779: 97.56%\n", + "#8780: 97.56%\n", + "#8781: 97.56%\n", + "#8782: 97.56%\n", + "#8783: 97.56%\n", + "#8784: 97.56%\n", + "#8785: 97.56%\n", + "#8786: 97.56%\n", + "#8787: 97.56%\n", + "#8788: 97.57%\n", + "#8789: 97.57%\n", + "#8790: 97.57%\n", + "#8791: 97.57%\n", + "#8792: 97.57%\n", + "#8793: 97.57%\n", + "#8794: 97.57%\n", + "#8795: 97.57%\n", + "#8796: 97.57%\n", + "#8797: 97.57%\n", + "#8798: 97.57%\n", + "#8799: 97.57%\n", + "#8800: 97.57%\n", + "#8801: 97.57%\n", + "#8802: 97.57%\n", + "#8803: 97.57%\n", + "#8804: 97.57%\n", + "#8805: 97.57%\n", + "#8806: 97.57%\n", + "#8807: 97.57%\n", + "#8808: 97.57%\n", + "#8809: 97.57%\n", + "#8810: 97.57%\n", + "#8811: 97.57%\n", + "#8812: 97.57%\n", + "#8813: 97.57%\n", + "#8814: 97.57%\n", + "#8815: 97.57%\n", + "#8816: 97.57%\n", + "#8817: 97.57%\n", + "#8818: 97.57%\n", + "#8819: 97.57%\n", + "#8820: 97.57%\n", + "#8821: 97.57%\n", + "#8822: 97.57%\n", + "#8823: 97.57%\n", + "#8824: 97.58%\n", + "#8825: 97.58%\n", + "#8826: 97.58%\n", + "#8827: 97.58%\n", + "#8828: 97.58%\n", + "#8829: 97.58%\n", + "#8830: 97.58%\n", + "#8831: 97.58%\n", + "#8832: 97.58%\n", + "#8833: 97.58%\n", + "#8834: 97.58%\n", + "#8835: 97.58%\n", + "#8836: 97.58%\n", + "#8837: 97.58%\n", + "#8838: 97.58%\n", + "#8839: 97.58%\n", + "#8840: 97.58%\n", + "#8841: 97.58%\n", + "#8842: 97.58%\n", + "#8843: 97.58%\n", + "#8844: 97.58%\n", + "#8845: 97.58%\n", + "#8846: 97.58%\n", + "#8847: 97.58%\n", + "#8848: 97.58%\n", + "#8849: 97.58%\n", + "#8850: 97.58%\n", + "#8851: 97.58%\n", + "#8852: 97.58%\n", + "#8853: 97.58%\n", + "#8854: 97.58%\n", + "#8855: 97.58%\n", + "#8856: 97.58%\n", + "#8857: 97.58%\n", + "#8858: 97.58%\n", + "#8859: 97.58%\n", + "#8860: 97.58%\n", + "#8861: 97.59%\n", + "#8862: 97.59%\n", + "#8863: 97.59%\n", + "#8864: 97.59%\n", + "#8865: 97.59%\n", + "#8866: 97.59%\n", + "#8867: 97.59%\n", + "#8868: 97.59%\n", + "#8869: 97.59%\n", + "#8870: 97.59%\n", + "#8871: 97.59%\n", + "#8872: 97.59%\n", + "#8873: 97.59%\n", + "#8874: 97.59%\n", + "#8875: 97.59%\n", + "#8876: 97.59%\n", + "#8877: 97.59%\n", + "#8878: 97.59%\n", + "#8879: 97.59%\n", + "#8880: 97.59%\n", + "#8881: 97.59%\n", + "#8882: 97.59%\n", + "#8883: 97.59%\n", + "#8884: 97.59%\n", + "#8885: 97.59%\n", + "#8886: 97.59%\n", + "#8887: 97.59%\n", + "#8888: 97.59%\n", + "#8889: 97.59%\n", + "#8890: 97.59%\n", + "#8891: 97.59%\n", + "#8892: 97.59%\n", + "#8893: 97.59%\n", + "#8894: 97.59%\n", + "#8895: 97.59%\n", + "#8896: 97.59%\n", + "#8897: 97.59%\n", + "#8898: 97.60%\n", + "#8899: 97.60%\n", + "#8900: 97.60%\n", + "#8901: 97.60%\n", + "#8902: 97.60%\n", + "#8903: 97.60%\n", + "#8904: 97.60%\n", + "#8905: 97.60%\n", + "#8906: 97.60%\n", + "#8907: 97.60%\n", + "#8908: 97.60%\n", + "#8909: 97.60%\n", + "#8910: 97.60%\n", + "#8911: 97.60%\n", + "#8912: 97.60%\n", + "#8913: 97.60%\n", + "#8914: 97.60%\n", + "#8915: 97.60%\n", + "#8916: 97.60%\n", + "#8917: 97.60%\n", + "#8918: 97.60%\n", + "#8919: 97.60%\n", + "#8920: 97.60%\n", + "#8921: 97.60%\n", + "#8922: 97.60%\n", + "#8923: 97.60%\n", + "#8924: 97.60%\n", + "#8925: 97.60%\n", + "#8926: 97.60%\n", + "#8927: 97.60%\n", + "#8928: 97.60%\n", + "#8929: 97.60%\n", + "#8930: 97.60%\n", + "#8931: 97.60%\n", + "#8932: 97.60%\n", + "#8933: 97.60%\n", + "#8934: 97.60%\n", + "#8935: 97.61%\n", + "#8936: 97.61%\n", + "#8937: 97.61%\n", + "#8938: 97.61%\n", + "#8939: 97.61%\n", + "#8940: 97.61%\n", + "#8941: 97.61%\n", + "#8942: 97.61%\n", + "#8943: 97.61%\n", + "#8944: 97.61%\n", + "#8945: 97.61%\n", + "#8946: 97.61%\n", + "#8947: 97.61%\n", + "#8948: 97.61%\n", + "#8949: 97.61%\n", + "#8950: 97.61%\n", + "#8951: 97.61%\n", + "#8952: 97.61%\n", + "#8953: 97.61%\n", + "#8954: 97.61%\n", + "#8955: 97.61%\n", + "#8956: 97.61%\n", + "#8957: 97.61%\n", + "#8958: 97.61%\n", + "#8959: 97.61%\n", + "#8960: 97.61%\n", + "#8961: 97.61%\n", + "#8962: 97.61%\n", + "#8963: 97.61%\n", + "#8964: 97.61%\n", + "#8965: 97.61%\n", + "#8966: 97.61%\n", + "#8967: 97.61%\n", + "#8968: 97.61%\n", + "#8969: 97.61%\n", + "#8970: 97.61%\n", + "#8971: 97.61%\n", + "#8972: 97.62%\n", + "#8973: 97.62%\n", + "#8974: 97.62%\n", + "#8975: 97.62%\n", + "#8976: 97.62%\n", + "#8977: 97.62%\n", + "#8978: 97.62%\n", + "#8979: 97.62%\n", + "#8980: 97.62%\n", + "#8981: 97.62%\n", + "#8982: 97.62%\n", + "#8983: 97.62%\n", + "#8984: 97.62%\n", + "#8985: 97.62%\n", + "#8986: 97.62%\n", + "#8987: 97.62%\n", + "#8988: 97.62%\n", + "#8989: 97.62%\n", + "#8990: 97.62%\n", + "#8991: 97.62%\n", + "#8992: 97.62%\n", + "#8993: 97.62%\n", + "#8994: 97.62%\n", + "#8995: 97.62%\n", + "#8996: 97.62%\n", + "#8997: 97.62%\n", + "#8998: 97.62%\n", + "#8999: 97.62%\n", + "#9000: 97.62%\n", + "#9001: 97.62%\n", + "#9002: 97.62%\n", + "#9003: 97.62%\n", + "#9004: 97.62%\n", + "#9005: 97.62%\n", + "#9006: 97.62%\n", + "#9007: 97.62%\n", + "#9008: 97.62%\n", + "#9009: 97.61%\n", + "#9010: 97.61%\n", + "#9011: 97.61%\n", + "#9012: 97.61%\n", + "#9013: 97.61%\n", + "#9014: 97.62%\n", + "#9015: 97.60%\n", + "#9016: 97.60%\n", + "#9017: 97.60%\n", + "#9018: 97.61%\n", + "#9019: 97.59%\n", + "#9020: 97.59%\n", + "#9021: 97.59%\n", + "#9022: 97.60%\n", + "#9023: 97.60%\n", + "#9024: 97.58%\n", + "#9025: 97.58%\n", + "#9026: 97.59%\n", + "#9027: 97.59%\n", + "#9028: 97.59%\n", + "#9029: 97.59%\n", + "#9030: 97.59%\n", + "#9031: 97.59%\n", + "#9032: 97.59%\n", + "#9033: 97.59%\n", + "#9034: 97.59%\n", + "#9035: 97.59%\n", + "#9036: 97.58%\n", + "#9037: 97.58%\n", + "#9038: 97.58%\n", + "#9039: 97.58%\n", + "#9040: 97.58%\n", + "#9041: 97.58%\n", + "#9042: 97.58%\n", + "#9043: 97.58%\n", + "#9044: 97.58%\n", + "#9045: 97.58%\n", + "#9046: 97.58%\n", + "#9047: 97.58%\n", + "#9048: 97.58%\n", + "#9049: 97.58%\n", + "#9050: 97.58%\n", + "#9051: 97.58%\n", + "#9052: 97.58%\n", + "#9053: 97.58%\n", + "#9054: 97.58%\n", + "#9055: 97.58%\n", + "#9056: 97.58%\n", + "#9057: 97.58%\n", + "#9058: 97.58%\n", + "#9059: 97.58%\n", + "#9060: 97.58%\n", + "#9061: 97.58%\n", + "#9062: 97.58%\n", + "#9063: 97.58%\n", + "#9064: 97.58%\n", + "#9065: 97.58%\n", + "#9066: 97.58%\n", + "#9067: 97.58%\n", + "#9068: 97.59%\n", + "#9069: 97.59%\n", + "#9070: 97.59%\n", + "#9071: 97.57%\n", + "#9072: 97.58%\n", + "#9073: 97.58%\n", + "#9074: 97.58%\n", + "#9075: 97.58%\n", + "#9076: 97.58%\n", + "#9077: 97.58%\n", + "#9078: 97.58%\n", + "#9079: 97.58%\n", + "#9080: 97.58%\n", + "#9081: 97.58%\n", + "#9082: 97.58%\n", + "#9083: 97.58%\n", + "#9084: 97.58%\n", + "#9085: 97.58%\n", + "#9086: 97.58%\n", + "#9087: 97.58%\n", + "#9088: 97.58%\n", + "#9089: 97.58%\n", + "#9090: 97.58%\n", + "#9091: 97.58%\n", + "#9092: 97.58%\n", + "#9093: 97.58%\n", + "#9094: 97.58%\n", + "#9095: 97.58%\n", + "#9096: 97.58%\n", + "#9097: 97.58%\n", + "#9098: 97.58%\n", + "#9099: 97.58%\n", + "#9100: 97.58%\n", + "#9101: 97.58%\n", + "#9102: 97.58%\n", + "#9103: 97.58%\n", + "#9104: 97.58%\n", + "#9105: 97.58%\n", + "#9106: 97.58%\n", + "#9107: 97.58%\n", + "#9108: 97.58%\n", + "#9109: 97.59%\n", + "#9110: 97.59%\n", + "#9111: 97.59%\n", + "#9112: 97.59%\n", + "#9113: 97.59%\n", + "#9114: 97.59%\n", + "#9115: 97.59%\n", + "#9116: 97.59%\n", + "#9117: 97.59%\n", + "#9118: 97.59%\n", + "#9119: 97.59%\n", + "#9120: 97.59%\n", + "#9121: 97.59%\n", + "#9122: 97.59%\n", + "#9123: 97.59%\n", + "#9124: 97.59%\n", + "#9125: 97.59%\n", + "#9126: 97.59%\n", + "#9127: 97.59%\n", + "#9128: 97.59%\n", + "#9129: 97.59%\n", + "#9130: 97.59%\n", + "#9131: 97.59%\n", + "#9132: 97.59%\n", + "#9133: 97.59%\n", + "#9134: 97.59%\n", + "#9135: 97.59%\n", + "#9136: 97.59%\n", + "#9137: 97.59%\n", + "#9138: 97.59%\n", + "#9139: 97.59%\n", + "#9140: 97.59%\n", + "#9141: 97.59%\n", + "#9142: 97.59%\n", + "#9143: 97.59%\n", + "#9144: 97.59%\n", + "#9145: 97.59%\n", + "#9146: 97.59%\n", + "#9147: 97.60%\n", + "#9148: 97.60%\n", + "#9149: 97.60%\n", + "#9150: 97.60%\n", + "#9151: 97.60%\n", + "#9152: 97.60%\n", + "#9153: 97.60%\n", + "#9154: 97.60%\n", + "#9155: 97.60%\n", + "#9156: 97.60%\n", + "#9157: 97.60%\n", + "#9158: 97.60%\n", + "#9159: 97.60%\n", + "#9160: 97.60%\n", + "#9161: 97.60%\n", + "#9162: 97.60%\n", + "#9163: 97.60%\n", + "#9164: 97.60%\n", + "#9165: 97.60%\n", + "#9166: 97.60%\n", + "#9167: 97.60%\n", + "#9168: 97.60%\n", + "#9169: 97.60%\n", + "#9170: 97.60%\n", + "#9171: 97.60%\n", + "#9172: 97.60%\n", + "#9173: 97.60%\n", + "#9174: 97.60%\n", + "#9175: 97.60%\n", + "#9176: 97.60%\n", + "#9177: 97.60%\n", + "#9178: 97.60%\n", + "#9179: 97.60%\n", + "#9180: 97.60%\n", + "#9181: 97.60%\n", + "#9182: 97.60%\n", + "#9183: 97.60%\n", + "#9184: 97.60%\n", + "#9185: 97.61%\n", + "#9186: 97.61%\n", + "#9187: 97.61%\n", + "#9188: 97.61%\n", + "#9189: 97.61%\n", + "#9190: 97.61%\n", + "#9191: 97.61%\n", + "#9192: 97.61%\n", + "#9193: 97.61%\n", + "#9194: 97.61%\n", + "#9195: 97.61%\n", + "#9196: 97.61%\n", + "#9197: 97.61%\n", + "#9198: 97.61%\n", + "#9199: 97.61%\n", + "#9200: 97.61%\n", + "#9201: 97.61%\n", + "#9202: 97.60%\n", + "#9203: 97.60%\n", + "#9204: 97.60%\n", + "#9205: 97.60%\n", + "#9206: 97.60%\n", + "#9207: 97.60%\n", + "#9208: 97.60%\n", + "#9209: 97.60%\n", + "#9210: 97.60%\n", + "#9211: 97.60%\n", + "#9212: 97.60%\n", + "#9213: 97.60%\n", + "#9214: 97.60%\n", + "#9215: 97.60%\n", + "#9216: 97.60%\n", + "#9217: 97.60%\n", + "#9218: 97.60%\n", + "#9219: 97.60%\n", + "#9220: 97.60%\n", + "#9221: 97.60%\n", + "#9222: 97.60%\n", + "#9223: 97.60%\n", + "#9224: 97.60%\n", + "#9225: 97.60%\n", + "#9226: 97.60%\n", + "#9227: 97.61%\n", + "#9228: 97.61%\n", + "#9229: 97.61%\n", + "#9230: 97.61%\n", + "#9231: 97.61%\n", + "#9232: 97.61%\n", + "#9233: 97.61%\n", + "#9234: 97.61%\n", + "#9235: 97.61%\n", + "#9236: 97.61%\n", + "#9237: 97.61%\n", + "#9238: 97.61%\n", + "#9239: 97.61%\n", + "#9240: 97.61%\n", + "#9241: 97.61%\n", + "#9242: 97.61%\n", + "#9243: 97.61%\n", + "#9244: 97.61%\n", + "#9245: 97.61%\n", + "#9246: 97.61%\n", + "#9247: 97.61%\n", + "#9248: 97.61%\n", + "#9249: 97.61%\n", + "#9250: 97.61%\n", + "#9251: 97.61%\n", + "#9252: 97.61%\n", + "#9253: 97.61%\n", + "#9254: 97.61%\n", + "#9255: 97.61%\n", + "#9256: 97.61%\n", + "#9257: 97.61%\n", + "#9258: 97.61%\n", + "#9259: 97.61%\n", + "#9260: 97.61%\n", + "#9261: 97.61%\n", + "#9262: 97.61%\n", + "#9263: 97.61%\n", + "#9264: 97.61%\n", + "#9265: 97.61%\n", + "#9266: 97.62%\n", + "#9267: 97.62%\n", + "#9268: 97.62%\n", + "#9269: 97.62%\n", + "#9270: 97.62%\n", + "#9271: 97.62%\n", + "#9272: 97.62%\n", + "#9273: 97.62%\n", + "#9274: 97.62%\n", + "#9275: 97.62%\n", + "#9276: 97.62%\n", + "#9277: 97.62%\n", + "#9278: 97.62%\n", + "#9279: 97.62%\n", + "#9280: 97.62%\n", + "#9281: 97.62%\n", + "#9282: 97.62%\n", + "#9283: 97.62%\n", + "#9284: 97.62%\n", + "#9285: 97.62%\n", + "#9286: 97.62%\n", + "#9287: 97.62%\n", + "#9288: 97.62%\n", + "#9289: 97.62%\n", + "#9290: 97.62%\n", + "#9291: 97.62%\n", + "#9292: 97.62%\n", + "#9293: 97.62%\n", + "#9294: 97.62%\n", + "#9295: 97.62%\n", + "#9296: 97.62%\n", + "#9297: 97.62%\n", + "#9298: 97.62%\n", + "#9299: 97.62%\n", + "#9300: 97.62%\n", + "#9301: 97.62%\n", + "#9302: 97.62%\n", + "#9303: 97.62%\n", + "#9304: 97.62%\n", + "#9305: 97.63%\n", + "#9306: 97.63%\n", + "#9307: 97.63%\n", + "#9308: 97.63%\n", + "#9309: 97.63%\n", + "#9310: 97.63%\n", + "#9311: 97.63%\n", + "#9312: 97.63%\n", + "#9313: 97.63%\n", + "#9314: 97.63%\n", + "#9315: 97.63%\n", + "#9316: 97.63%\n", + "#9317: 97.63%\n", + "#9318: 97.63%\n", + "#9319: 97.63%\n", + "#9320: 97.63%\n", + "#9321: 97.63%\n", + "#9322: 97.63%\n", + "#9323: 97.63%\n", + "#9324: 97.63%\n", + "#9325: 97.63%\n", + "#9326: 97.63%\n", + "#9327: 97.63%\n", + "#9328: 97.63%\n", + "#9329: 97.63%\n", + "#9330: 97.63%\n", + "#9331: 97.63%\n", + "#9332: 97.63%\n", + "#9333: 97.63%\n", + "#9334: 97.63%\n", + "#9335: 97.63%\n", + "#9336: 97.63%\n", + "#9337: 97.63%\n", + "#9338: 97.63%\n", + "#9339: 97.63%\n", + "#9340: 97.63%\n", + "#9341: 97.63%\n", + "#9342: 97.63%\n", + "#9343: 97.63%\n", + "#9344: 97.64%\n", + "#9345: 97.64%\n", + "#9346: 97.64%\n", + "#9347: 97.64%\n", + "#9348: 97.64%\n", + "#9349: 97.64%\n", + "#9350: 97.64%\n", + "#9351: 97.64%\n", + "#9352: 97.64%\n", + "#9353: 97.64%\n", + "#9354: 97.64%\n", + "#9355: 97.64%\n", + "#9356: 97.64%\n", + "#9357: 97.64%\n", + "#9358: 97.64%\n", + "#9359: 97.64%\n", + "#9360: 97.64%\n", + "#9361: 97.64%\n", + "#9362: 97.64%\n", + "#9363: 97.64%\n", + "#9364: 97.64%\n", + "#9365: 97.64%\n", + "#9366: 97.64%\n", + "#9367: 97.64%\n", + "#9368: 97.64%\n", + "#9369: 97.64%\n", + "#9370: 97.64%\n", + "#9371: 97.64%\n", + "#9372: 97.64%\n", + "#9373: 97.64%\n", + "#9374: 97.64%\n", + "#9375: 97.64%\n", + "#9376: 97.64%\n", + "#9377: 97.64%\n", + "#9378: 97.64%\n", + "#9379: 97.64%\n", + "#9380: 97.64%\n", + "#9381: 97.64%\n", + "#9382: 97.64%\n", + "#9383: 97.64%\n", + "#9384: 97.65%\n", + "#9385: 97.65%\n", + "#9386: 97.65%\n", + "#9387: 97.65%\n", + "#9388: 97.65%\n", + "#9389: 97.65%\n", + "#9390: 97.65%\n", + "#9391: 97.65%\n", + "#9392: 97.65%\n", + "#9393: 97.65%\n", + "#9394: 97.65%\n", + "#9395: 97.65%\n", + "#9396: 97.65%\n", + "#9397: 97.65%\n", + "#9398: 97.65%\n", + "#9399: 97.65%\n", + "#9400: 97.65%\n", + "#9401: 97.65%\n", + "#9402: 97.65%\n", + "#9403: 97.65%\n", + "#9404: 97.65%\n", + "#9405: 97.65%\n", + "#9406: 97.65%\n", + "#9407: 97.65%\n", + "#9408: 97.65%\n", + "#9409: 97.65%\n", + "#9410: 97.65%\n", + "#9411: 97.65%\n", + "#9412: 97.65%\n", + "#9413: 97.65%\n", + "#9414: 97.65%\n", + "#9415: 97.65%\n", + "#9416: 97.65%\n", + "#9417: 97.65%\n", + "#9418: 97.65%\n", + "#9419: 97.65%\n", + "#9420: 97.65%\n", + "#9421: 97.65%\n", + "#9422: 97.65%\n", + "#9423: 97.65%\n", + "#9424: 97.66%\n", + "#9425: 97.66%\n", + "#9426: 97.66%\n", + "#9427: 97.66%\n", + "#9428: 97.66%\n", + "#9429: 97.66%\n", + "#9430: 97.66%\n", + "#9431: 97.66%\n", + "#9432: 97.66%\n", + "#9433: 97.66%\n", + "#9434: 97.66%\n", + "#9435: 97.66%\n", + "#9436: 97.66%\n", + "#9437: 97.66%\n", + "#9438: 97.66%\n", + "#9439: 97.66%\n", + "#9440: 97.66%\n", + "#9441: 97.66%\n", + "#9442: 97.66%\n", + "#9443: 97.66%\n", + "#9444: 97.66%\n", + "#9445: 97.66%\n", + "#9446: 97.66%\n", + "#9447: 97.66%\n", + "#9448: 97.66%\n", + "#9449: 97.66%\n", + "#9450: 97.66%\n", + "#9451: 97.66%\n", + "#9452: 97.66%\n", + "#9453: 97.66%\n", + "#9454: 97.66%\n", + "#9455: 97.66%\n", + "#9456: 97.66%\n", + "#9457: 97.66%\n", + "#9458: 97.66%\n", + "#9459: 97.66%\n", + "#9460: 97.66%\n", + "#9461: 97.66%\n", + "#9462: 97.66%\n", + "#9463: 97.66%\n", + "#9464: 97.67%\n", + "#9465: 97.67%\n", + "#9466: 97.67%\n", + "#9467: 97.67%\n", + "#9468: 97.67%\n", + "#9469: 97.67%\n", + "#9470: 97.67%\n", + "#9471: 97.67%\n", + "#9472: 97.67%\n", + "#9473: 97.67%\n", + "#9474: 97.67%\n", + "#9475: 97.67%\n", + "#9476: 97.67%\n", + "#9477: 97.67%\n", + "#9478: 97.67%\n", + "#9479: 97.67%\n", + "#9480: 97.67%\n", + "#9481: 97.67%\n", + "#9482: 97.67%\n", + "#9483: 97.67%\n", + "#9484: 97.67%\n", + "#9485: 97.67%\n", + "#9486: 97.67%\n", + "#9487: 97.67%\n", + "#9488: 97.67%\n", + "#9489: 97.67%\n", + "#9490: 97.67%\n", + "#9491: 97.67%\n", + "#9492: 97.67%\n", + "#9493: 97.67%\n", + "#9494: 97.67%\n", + "#9495: 97.67%\n", + "#9496: 97.67%\n", + "#9497: 97.67%\n", + "#9498: 97.67%\n", + "#9499: 97.67%\n", + "#9500: 97.67%\n", + "#9501: 97.67%\n", + "#9502: 97.67%\n", + "#9503: 97.67%\n", + "#9504: 97.67%\n", + "#9505: 97.68%\n", + "#9506: 97.68%\n", + "#9507: 97.68%\n", + "#9508: 97.68%\n", + "#9509: 97.68%\n", + "#9510: 97.68%\n", + "#9511: 97.68%\n", + "#9512: 97.68%\n", + "#9513: 97.68%\n", + "#9514: 97.68%\n", + "#9515: 97.68%\n", + "#9516: 97.68%\n", + "#9517: 97.68%\n", + "#9518: 97.68%\n", + "#9519: 97.68%\n", + "#9520: 97.68%\n", + "#9521: 97.68%\n", + "#9522: 97.68%\n", + "#9523: 97.68%\n", + "#9524: 97.68%\n", + "#9525: 97.68%\n", + "#9526: 97.68%\n", + "#9527: 97.68%\n", + "#9528: 97.68%\n", + "#9529: 97.68%\n", + "#9530: 97.67%\n", + "#9531: 97.67%\n", + "#9532: 97.67%\n", + "#9533: 97.67%\n", + "#9534: 97.67%\n", + "#9535: 97.67%\n", + "#9536: 97.67%\n", + "#9537: 97.67%\n", + "#9538: 97.67%\n", + "#9539: 97.67%\n", + "#9540: 97.66%\n", + "#9541: 97.66%\n", + "#9542: 97.66%\n", + "#9543: 97.66%\n", + "#9544: 97.66%\n", + "#9545: 97.66%\n", + "#9546: 97.66%\n", + "#9547: 97.66%\n", + "#9548: 97.66%\n", + "#9549: 97.66%\n", + "#9550: 97.67%\n", + "#9551: 97.67%\n", + "#9552: 97.67%\n", + "#9553: 97.67%\n", + "#9554: 97.67%\n", + "#9555: 97.67%\n", + "#9556: 97.67%\n", + "#9557: 97.67%\n", + "#9558: 97.67%\n", + "#9559: 97.67%\n", + "#9560: 97.67%\n", + "#9561: 97.67%\n", + "#9562: 97.67%\n", + "#9563: 97.67%\n", + "#9564: 97.67%\n", + "#9565: 97.67%\n", + "#9566: 97.67%\n", + "#9567: 97.67%\n", + "#9568: 97.67%\n", + "#9569: 97.67%\n", + "#9570: 97.67%\n", + "#9571: 97.67%\n", + "#9572: 97.67%\n", + "#9573: 97.67%\n", + "#9574: 97.67%\n", + "#9575: 97.67%\n", + "#9576: 97.67%\n", + "#9577: 97.67%\n", + "#9578: 97.67%\n", + "#9579: 97.67%\n", + "#9580: 97.67%\n", + "#9581: 97.67%\n", + "#9582: 97.67%\n", + "#9583: 97.67%\n", + "#9584: 97.67%\n", + "#9585: 97.67%\n", + "#9586: 97.67%\n", + "#9587: 97.66%\n", + "#9588: 97.66%\n", + "#9589: 97.66%\n", + "#9590: 97.66%\n", + "#9591: 97.66%\n", + "#9592: 97.66%\n", + "#9593: 97.67%\n", + "#9594: 97.67%\n", + "#9595: 97.67%\n", + "#9596: 97.67%\n", + "#9597: 97.67%\n", + "#9598: 97.67%\n", + "#9599: 97.67%\n", + "#9600: 97.67%\n", + "#9601: 97.67%\n", + "#9602: 97.67%\n", + "#9603: 97.67%\n", + "#9604: 97.67%\n", + "#9605: 97.67%\n", + "#9606: 97.67%\n", + "#9607: 97.67%\n", + "#9608: 97.67%\n", + "#9609: 97.67%\n", + "#9610: 97.67%\n", + "#9611: 97.67%\n", + "#9612: 97.67%\n", + "#9613: 97.67%\n", + "#9614: 97.66%\n", + "#9615: 97.66%\n", + "#9616: 97.66%\n", + "#9617: 97.66%\n", + "#9618: 97.66%\n", + "#9619: 97.66%\n", + "#9620: 97.66%\n", + "#9621: 97.66%\n", + "#9622: 97.66%\n", + "#9623: 97.66%\n", + "#9624: 97.66%\n", + "#9625: 97.66%\n", + "#9626: 97.66%\n", + "#9627: 97.66%\n", + "#9628: 97.66%\n", + "#9629: 97.66%\n", + "#9630: 97.66%\n", + "#9631: 97.66%\n", + "#9632: 97.66%\n", + "#9633: 97.66%\n", + "#9634: 97.66%\n", + "#9635: 97.67%\n", + "#9636: 97.67%\n", + "#9637: 97.67%\n", + "#9638: 97.67%\n", + "#9639: 97.67%\n", + "#9640: 97.67%\n", + "#9641: 97.67%\n", + "#9642: 97.66%\n", + "#9643: 97.66%\n", + "#9644: 97.66%\n", + "#9645: 97.66%\n", + "#9646: 97.66%\n", + "#9647: 97.66%\n", + "#9648: 97.66%\n", + "#9649: 97.66%\n", + "#9650: 97.66%\n", + "#9651: 97.66%\n", + "#9652: 97.66%\n", + "#9653: 97.66%\n", + "#9654: 97.66%\n", + "#9655: 97.66%\n", + "#9656: 97.66%\n", + "#9657: 97.66%\n", + "#9658: 97.66%\n", + "#9659: 97.66%\n", + "#9660: 97.66%\n", + "#9661: 97.66%\n", + "#9662: 97.66%\n", + "#9663: 97.66%\n", + "#9664: 97.66%\n", + "#9665: 97.66%\n", + "#9666: 97.66%\n", + "#9667: 97.66%\n", + "#9668: 97.66%\n", + "#9669: 97.66%\n", + "#9670: 97.66%\n", + "#9671: 97.66%\n", + "#9672: 97.66%\n", + "#9673: 97.66%\n", + "#9674: 97.66%\n", + "#9675: 97.66%\n", + "#9676: 97.66%\n", + "#9677: 97.66%\n", + "#9678: 97.67%\n", + "#9679: 97.67%\n", + "#9680: 97.67%\n", + "#9681: 97.67%\n", + "#9682: 97.67%\n", + "#9683: 97.67%\n", + "#9684: 97.67%\n", + "#9685: 97.67%\n", + "#9686: 97.67%\n", + "#9687: 97.67%\n", + "#9688: 97.67%\n", + "#9689: 97.67%\n", + "#9690: 97.67%\n", + "#9691: 97.67%\n", + "#9692: 97.67%\n", + "#9693: 97.67%\n", + "#9694: 97.67%\n", + "#9695: 97.67%\n", + "#9696: 97.67%\n", + "#9697: 97.67%\n", + "#9698: 97.66%\n", + "#9699: 97.66%\n", + "#9700: 97.66%\n", + "#9701: 97.66%\n", + "#9702: 97.66%\n", + "#9703: 97.66%\n", + "#9704: 97.66%\n", + "#9705: 97.66%\n", + "#9706: 97.66%\n", + "#9707: 97.66%\n", + "#9708: 97.66%\n", + "#9709: 97.66%\n", + "#9710: 97.66%\n", + "#9711: 97.66%\n", + "#9712: 97.66%\n", + "#9713: 97.66%\n", + "#9714: 97.66%\n", + "#9715: 97.66%\n", + "#9716: 97.66%\n", + "#9717: 97.66%\n", + "#9718: 97.66%\n", + "#9719: 97.66%\n", + "#9720: 97.66%\n", + "#9721: 97.67%\n", + "#9722: 97.67%\n", + "#9723: 97.67%\n", + "#9724: 97.67%\n", + "#9725: 97.67%\n", + "#9726: 97.67%\n", + "#9727: 97.67%\n", + "#9728: 97.67%\n", + "#9729: 97.66%\n", + "#9730: 97.66%\n", + "#9731: 97.66%\n", + "#9732: 97.66%\n", + "#9733: 97.66%\n", + "#9734: 97.66%\n", + "#9735: 97.66%\n", + "#9736: 97.66%\n", + "#9737: 97.66%\n", + "#9738: 97.66%\n", + "#9739: 97.66%\n", + "#9740: 97.66%\n", + "#9741: 97.66%\n", + "#9742: 97.66%\n", + "#9743: 97.66%\n", + "#9744: 97.66%\n", + "#9745: 97.65%\n", + "#9746: 97.65%\n", + "#9747: 97.65%\n", + "#9748: 97.65%\n", + "#9749: 97.65%\n", + "#9750: 97.65%\n", + "#9751: 97.65%\n", + "#9752: 97.65%\n", + "#9753: 97.65%\n", + "#9754: 97.65%\n", + "#9755: 97.65%\n", + "#9756: 97.65%\n", + "#9757: 97.65%\n", + "#9758: 97.65%\n", + "#9759: 97.65%\n", + "#9760: 97.65%\n", + "#9761: 97.65%\n", + "#9762: 97.64%\n", + "#9763: 97.64%\n", + "#9764: 97.64%\n", + "#9765: 97.64%\n", + "#9766: 97.65%\n", + "#9767: 97.65%\n", + "#9768: 97.64%\n", + "#9769: 97.64%\n", + "#9770: 97.63%\n", + "#9771: 97.63%\n", + "#9772: 97.63%\n", + "#9773: 97.63%\n", + "#9774: 97.63%\n", + "#9775: 97.63%\n", + "#9776: 97.63%\n", + "#9777: 97.63%\n", + "#9778: 97.63%\n", + "#9779: 97.63%\n", + "#9780: 97.63%\n", + "#9781: 97.63%\n", + "#9782: 97.62%\n", + "#9783: 97.62%\n", + "#9784: 97.62%\n", + "#9785: 97.62%\n", + "#9786: 97.62%\n", + "#9787: 97.62%\n", + "#9788: 97.62%\n", + "#9789: 97.62%\n", + "#9790: 97.62%\n", + "#9791: 97.62%\n", + "#9792: 97.61%\n", + "#9793: 97.61%\n", + "#9794: 97.61%\n", + "#9795: 97.61%\n", + "#9796: 97.61%\n", + "#9797: 97.61%\n", + "#9798: 97.61%\n", + "#9799: 97.61%\n", + "#9800: 97.61%\n", + "#9801: 97.61%\n", + "#9802: 97.61%\n", + "#9803: 97.61%\n", + "#9804: 97.61%\n", + "#9805: 97.61%\n", + "#9806: 97.61%\n", + "#9807: 97.61%\n", + "#9808: 97.61%\n", + "#9809: 97.61%\n", + "#9810: 97.61%\n", + "#9811: 97.60%\n", + "#9812: 97.61%\n", + "#9813: 97.61%\n", + "#9814: 97.61%\n", + "#9815: 97.61%\n", + "#9816: 97.61%\n", + "#9817: 97.61%\n", + "#9818: 97.61%\n", + "#9819: 97.61%\n", + "#9820: 97.61%\n", + "#9821: 97.61%\n", + "#9822: 97.61%\n", + "#9823: 97.61%\n", + "#9824: 97.61%\n", + "#9825: 97.61%\n", + "#9826: 97.61%\n", + "#9827: 97.61%\n", + "#9828: 97.61%\n", + "#9829: 97.61%\n", + "#9830: 97.61%\n", + "#9831: 97.61%\n", + "#9832: 97.61%\n", + "#9833: 97.61%\n", + "#9834: 97.61%\n", + "#9835: 97.61%\n", + "#9836: 97.61%\n", + "#9837: 97.61%\n", + "#9838: 97.61%\n", + "#9839: 97.60%\n", + "#9840: 97.60%\n", + "#9841: 97.60%\n", + "#9842: 97.60%\n", + "#9843: 97.60%\n", + "#9844: 97.60%\n", + "#9845: 97.60%\n", + "#9846: 97.60%\n", + "#9847: 97.60%\n", + "#9848: 97.60%\n", + "#9849: 97.60%\n", + "#9850: 97.60%\n", + "#9851: 97.60%\n", + "#9852: 97.60%\n", + "#9853: 97.61%\n", + "#9854: 97.61%\n", + "#9855: 97.61%\n", + "#9856: 97.60%\n", + "#9857: 97.60%\n", + "#9858: 97.60%\n", + "#9859: 97.60%\n", + "#9860: 97.60%\n", + "#9861: 97.60%\n", + "#9862: 97.60%\n", + "#9863: 97.60%\n", + "#9864: 97.60%\n", + "#9865: 97.60%\n", + "#9866: 97.60%\n", + "#9867: 97.59%\n", + "#9868: 97.59%\n", + "#9869: 97.59%\n", + "#9870: 97.59%\n", + "#9871: 97.59%\n", + "#9872: 97.59%\n", + "#9873: 97.59%\n", + "#9874: 97.59%\n", + "#9875: 97.59%\n", + "#9876: 97.59%\n", + "#9877: 97.59%\n", + "#9878: 97.59%\n", + "#9879: 97.59%\n", + "#9880: 97.59%\n", + "#9881: 97.59%\n", + "#9882: 97.59%\n", + "#9883: 97.59%\n", + "#9884: 97.59%\n", + "#9885: 97.59%\n", + "#9886: 97.59%\n", + "#9887: 97.59%\n", + "#9888: 97.59%\n", + "#9889: 97.59%\n", + "#9890: 97.59%\n", + "#9891: 97.59%\n", + "#9892: 97.58%\n", + "#9893: 97.57%\n", + "#9894: 97.57%\n", + "#9895: 97.57%\n", + "#9896: 97.58%\n", + "#9897: 97.58%\n", + "#9898: 97.58%\n", + "#9899: 97.58%\n", + "#9900: 97.58%\n", + "#9901: 97.58%\n", + "#9902: 97.58%\n", + "#9903: 97.58%\n", + "#9904: 97.58%\n", + "#9905: 97.57%\n", + "#9906: 97.57%\n", + "#9907: 97.57%\n", + "#9908: 97.57%\n", + "#9909: 97.57%\n", + "#9910: 97.57%\n", + "#9911: 97.57%\n", + "#9912: 97.57%\n", + "#9913: 97.57%\n", + "#9914: 97.57%\n", + "#9915: 97.57%\n", + "#9916: 97.57%\n", + "#9917: 97.57%\n", + "#9918: 97.57%\n", + "#9919: 97.57%\n", + "#9920: 97.57%\n", + "#9921: 97.57%\n", + "#9922: 97.57%\n", + "#9923: 97.57%\n", + "#9924: 97.57%\n", + "#9925: 97.57%\n", + "#9926: 97.57%\n", + "#9927: 97.57%\n", + "#9928: 97.57%\n", + "#9929: 97.57%\n", + "#9930: 97.57%\n", + "#9931: 97.57%\n", + "#9932: 97.57%\n", + "#9933: 97.57%\n", + "#9934: 97.57%\n", + "#9935: 97.57%\n", + "#9936: 97.57%\n", + "#9937: 97.57%\n", + "#9938: 97.58%\n", + "#9939: 97.58%\n", + "#9940: 97.58%\n", + "#9941: 97.58%\n", + "#9942: 97.57%\n", + "#9943: 97.57%\n", + "#9944: 97.57%\n", + "#9945: 97.57%\n", + "#9946: 97.57%\n", + "#9947: 97.57%\n", + "#9948: 97.57%\n", + "#9949: 97.57%\n", + "#9950: 97.57%\n", + "#9951: 97.57%\n", + "#9952: 97.57%\n", + "#9953: 97.57%\n", + "#9954: 97.57%\n", + "#9955: 97.57%\n", + "#9956: 97.57%\n", + "#9957: 97.57%\n", + "#9958: 97.57%\n", + "#9959: 97.57%\n", + "#9960: 97.57%\n", + "#9961: 97.57%\n", + "#9962: 97.57%\n", + "#9963: 97.57%\n", + "#9964: 97.57%\n", + "#9965: 97.57%\n", + "#9966: 97.57%\n", + "#9967: 97.57%\n", + "#9968: 97.57%\n", + "#9969: 97.57%\n", + "#9970: 97.57%\n", + "#9971: 97.57%\n", + "#9972: 97.57%\n", + "#9973: 97.57%\n", + "#9974: 97.57%\n", + "#9975: 97.57%\n", + "#9976: 97.57%\n", + "#9977: 97.57%\n", + "#9978: 97.57%\n", + "#9979: 97.58%\n", + "#9980: 97.58%\n", + "#9981: 97.58%\n", + "#9982: 97.57%\n", + "#9983: 97.57%\n", + "#9984: 97.57%\n", + "#9985: 97.57%\n", + "#9986: 97.57%\n", + "#9987: 97.57%\n", + "#9988: 97.57%\n", + "#9989: 97.57%\n", + "#9990: 97.57%\n", + "#9991: 97.57%\n", + "#9992: 97.57%\n", + "#9993: 97.57%\n", + "#9994: 97.57%\n", + "#9995: 97.57%\n", + "#9996: 97.57%\n", + "#9997: 97.57%\n", + "#9998: 97.57%\n", + "#9999: 97.57%\n" + ] + } + ], + "source": [ + "correct = 0\n", + "for i in range(10000):\n", + " with open(f\"X_test/{i}.json\", \"r\") as f:\n", + " input = json.load(f)\n", + " out, _ = inference(input, circuit)\n", + " correct += 1 if out[0] == y_test[i] else 0\n", + " print(f\"#{i}: {correct / (i + 1) * 100:.2f}%\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "keras2circom", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/zkstats/onnx2circom/keras2circom/test/circuit.js b/zkstats/onnx2circom/keras2circom/test/circuit.js new file mode 100644 index 0000000..388609d --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/test/circuit.js @@ -0,0 +1,50 @@ +const chai = require('chai'); +const fs = require('fs'); + +const wasm_tester = require('circom_tester').wasm; + +const F1Field = require('ffjavascript').F1Field; +const Scalar = require('ffjavascript').Scalar; +exports.p = Scalar.fromString('21888242871839275222246405745257275088548364400416034343698204186575808495617'); +const Fr = new F1Field(exports.p); + +const assert = chai.assert; + +const exec = require('await-exec'); + +const input = require('../test/X_test/0.json'); + +describe('keras2circom test', function () { + this.timeout(100000000); + + describe('softmax output', async () => { + it('softmax output test', async () => { + await exec('python main.py models/model.h5 && python output/circuit.py output/circuit.json test/X_test/0.json'); + + const model = JSON.parse(fs.readFileSync('./output/circuit.json')); + const output = JSON.parse(fs.readFileSync('./output/output.json')); + + const INPUT = {...model, ...input, ...output}; + + const circuit = await wasm_tester('./output/circuit.circom'); + const witness = await circuit.calculateWitness(INPUT, true); + assert(Fr.eq(Fr.e(witness[0]),Fr.e(1))); + assert(Fr.eq(Fr.e(witness[1]),Fr.e(7))); + }); + }); + + describe('raw output', async () => { + it('raw output test', async () => { + await exec('python main.py models/model.h5 --raw && python output/circuit.py output/circuit.json test/X_test/0.json'); + + const model = JSON.parse(fs.readFileSync('./output/circuit.json')); + const output = JSON.parse(fs.readFileSync('./output/output.json')); + + const INPUT = {...model, ...input, ...output}; + + const circuit = await wasm_tester('./output/circuit.circom'); + const witness = await circuit.calculateWitness(INPUT, true); + assert(Fr.eq(Fr.e(witness[0]),Fr.e(1))); + }); + }); +}); \ No newline at end of file diff --git a/zkstats/onnx2circom/keras2circom/test/load_input.ipynb b/zkstats/onnx2circom/keras2circom/test/load_input.ipynb new file mode 100644 index 0000000..b6a9790 --- /dev/null +++ b/zkstats/onnx2circom/keras2circom/test/load_input.ipynb @@ -0,0 +1,80 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from tensorflow.keras.datasets import mnist\n", + "import json\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "# load MNIST dataset\n", + "_, (X_test, y_test) = mnist.load_data()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "#normalizing\n", + "X_test = X_test.astype('float32')\n", + "X_test /= 255.0" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "for i in range(len(X_test)):\n", + " X = [str(int(x * float(10**18))) for x in X_test[i].flatten().tolist()]\n", + " X = np.array(X).reshape(28, 28, 1).tolist()\n", + " with open(f'X_test/{i}.json', 'w') as f:\n", + " json.dump({\"in\": X}, f)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "with open('y_test.json', 'w') as f:\n", + " json.dump(y_test.tolist(), f)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "keras2circom", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From ab856da044839a7946e9b6350599beb10509b826 Mon Sep 17 00:00:00 2001 From: JernKunpittaya <61564542+JernKunpittaya@users.noreply.github.com> Date: Thu, 25 Apr 2024 00:56:09 +0700 Subject: [PATCH 4/5] finish del_sub --- tests/onnx2circom/test_onnx_to_circom.py | 6 +- zkstats/onnx2circom/README.md | 77 ++- zkstats/onnx2circom/keras2circom/hello.txt | 1 - zkstats/onnx2circom/onnx2keras/.gitignore | 8 + zkstats/onnx2circom/onnx2keras/LICENSE | 201 ++++++ zkstats/onnx2circom/onnx2keras/__init__.py | 0 zkstats/onnx2circom/onnx2keras/converter.py | 100 +++ .../onnx2circom/onnx2keras/example/README.md | 28 + .../onnx2keras/example/example.keras | Bin 0 -> 8237 bytes .../onnx2keras/example/gen_onnx.ipynb | 243 +++++++ .../onnx2keras/example/read_keras.ipynb | 227 +++++++ .../onnx2circom/onnx2keras/layers/__init__.py | 6 + .../onnx2keras/layers/activations_layers.py | 179 ++++++ .../onnx2keras/layers/common_layers.py | 323 ++++++++++ .../onnx2keras/layers/conv_layers.py | 308 +++++++++ .../onnx2keras/layers/deformation_layers.py | 306 +++++++++ .../onnx2keras/layers/dimension_utils.py | 40 ++ .../onnx2keras/layers/mathematics_layers.py | 593 ++++++++++++++++++ zkstats/onnx2circom/onnx2keras/readme.md | 164 +++++ .../onnx2circom/onnx2keras/requirements.txt | 6 + .../onnx2circom/onnx2keras/torchvison_test.py | 39 ++ .../onnx2circom/onnx2keras/utils/__init__.py | 5 + .../onnx2circom/onnx2keras/utils/builder.py | 168 +++++ .../onnx2keras/utils/dataloader.py | 72 +++ .../onnx2keras/utils/onnx_loader.py | 77 +++ .../onnx2keras/utils/op_registry.py | 40 ++ .../onnx2keras/utils/output_check.py | 92 +++ 27 files changed, 3286 insertions(+), 23 deletions(-) delete mode 100644 zkstats/onnx2circom/keras2circom/hello.txt create mode 100644 zkstats/onnx2circom/onnx2keras/.gitignore create mode 100644 zkstats/onnx2circom/onnx2keras/LICENSE create mode 100644 zkstats/onnx2circom/onnx2keras/__init__.py create mode 100644 zkstats/onnx2circom/onnx2keras/converter.py create mode 100644 zkstats/onnx2circom/onnx2keras/example/README.md create mode 100644 zkstats/onnx2circom/onnx2keras/example/example.keras create mode 100644 zkstats/onnx2circom/onnx2keras/example/gen_onnx.ipynb create mode 100644 zkstats/onnx2circom/onnx2keras/example/read_keras.ipynb create mode 100644 zkstats/onnx2circom/onnx2keras/layers/__init__.py create mode 100644 zkstats/onnx2circom/onnx2keras/layers/activations_layers.py create mode 100644 zkstats/onnx2circom/onnx2keras/layers/common_layers.py create mode 100644 zkstats/onnx2circom/onnx2keras/layers/conv_layers.py create mode 100644 zkstats/onnx2circom/onnx2keras/layers/deformation_layers.py create mode 100644 zkstats/onnx2circom/onnx2keras/layers/dimension_utils.py create mode 100644 zkstats/onnx2circom/onnx2keras/layers/mathematics_layers.py create mode 100644 zkstats/onnx2circom/onnx2keras/readme.md create mode 100644 zkstats/onnx2circom/onnx2keras/requirements.txt create mode 100644 zkstats/onnx2circom/onnx2keras/torchvison_test.py create mode 100644 zkstats/onnx2circom/onnx2keras/utils/__init__.py create mode 100644 zkstats/onnx2circom/onnx2keras/utils/builder.py create mode 100644 zkstats/onnx2circom/onnx2keras/utils/dataloader.py create mode 100644 zkstats/onnx2circom/onnx2keras/utils/onnx_loader.py create mode 100644 zkstats/onnx2circom/onnx2keras/utils/op_registry.py create mode 100644 zkstats/onnx2circom/onnx2keras/utils/output_check.py diff --git a/tests/onnx2circom/test_onnx_to_circom.py b/tests/onnx2circom/test_onnx_to_circom.py index abfedbd..5afbec5 100644 --- a/tests/onnx2circom/test_onnx_to_circom.py +++ b/tests/onnx2circom/test_onnx_to_circom.py @@ -14,8 +14,8 @@ # NOTE: Change the path to your own path -CIRCOM_2_ARITHC_PROJECT_ROOT = Path('/path/to/circom-2-arithc-project-root') -MP_SPDZ_PROJECT_ROOT = Path('/path/to/mp-spdz-project-root') +CIRCOM_2_ARITHC_PROJECT_ROOT = Path('/Users/jernkun/circom-2-arithc') +MP_SPDZ_PROJECT_ROOT = Path('/Users/jernkun/MP-SPDZ') def test_onnx_to_circom(tmp_path): @@ -97,6 +97,7 @@ def compile_and_check(model_type: Type[nn.Module], data: torch.Tensor, tmp_path: # for convenience (which input is from which party). Now just put every input to party 0. # Assume the input data is a 1-d tensor user_config_path = MP_SPDZ_PROJECT_ROOT / f"Configs/{model_name}.json" + user_config_path.parent.mkdir(parents=True, exist_ok=True) with open(user_config_path, 'w') as f: json.dump({"inputs_from": { "0": input_names, @@ -107,6 +108,7 @@ def compile_and_check(model_type: Type[nn.Module], data: torch.Tensor, tmp_path: # Prepare data for party 0 data_list = data.reshape(-1) input_0_path = MP_SPDZ_PROJECT_ROOT / 'Player-Data/Input-P0-0' + input_0_path.parent.mkdir(parents=True, exist_ok=True) with open(input_0_path, 'w') as f: # TODO: change int to float f.write(' '.join([str(int(x)) for x in data_list.tolist()])) diff --git a/zkstats/onnx2circom/README.md b/zkstats/onnx2circom/README.md index 2c0e516..318dc15 100644 --- a/zkstats/onnx2circom/README.md +++ b/zkstats/onnx2circom/README.md @@ -1,37 +1,74 @@ -# onnx2circom +# Steps to run -## Submodules -- onnx2keras -- keras2circom -- circomlib-ml +## Test onnx2keras + +Run the test: + +```bash +pytest -s tests/onnx2circom/test_onnx_to_keras.py +``` + +## Test onnx2circom + +### circom-2-arithc + +Clone circom-2-arithc. Use a fork for now. Will change to the official repo soon. + +```bash +cd .. +git clone https://github.com/mhchia/circom-2-arithc.git +cd circom-2-arithc +git checkout mpcstats +cp .env.example .env +circom_2_arithc_project_root=$(pwd) +``` + +Build the compiler: -## Clone and sync submodules ```bash -git submodule init +cargo build --release ``` +### MP-SPDZ + +Clone the repo + ```bash -git submodule update +cd .. +git clone https://github.com/data61/MP-SPDZ +cd MP-SPDZ +git remote add kevin_mpc https://github.com/mhchia/MP-SPDZ.git +git fetch kevin_mpc +git checkout arith-executor +mp_spdz_project_root=$(pwd) ``` -## Run onnx2circom +Build the MPC vm for `semi` protocol + ```bash -$ python3 main.py model.onnx --circom_path model.circom +make -j8 semi-party.x +# Make sure `semi-party.x` exists +ls semi-party.x ``` -See circom code in `model.circom` + +### Run the test + +Modify the configs in `tests/onnx2circom/test_onnx_to_circom.py` to point to the correct paths. Just fill in the paths to the two projects you just cloned. + ```bash -$ ls model.circom -model.circom +# NOTE: Change the path to your own path +CIRCOM_2_ARITHC_PROJECT_ROOT = Path('/path/to/circom-2-arithc-project-root') +MP_SPDZ_PROJECT_ROOT = Path('/path/to/mp-spdz-project-root') ``` -## Import +Go back to the zkstats library project root -```python -from zkstats.onnx2circom import onnx_to_circom +```bash +cd ../zk-stats-lib +``` -... +Run the test: -model_path = "model.onnx" -circom_path = "model.circom" -onnx_to_circom(model_path, circom_path) +```bash +pytest -s tests/onnx2circom/test_onnx_to_circom.py ``` diff --git a/zkstats/onnx2circom/keras2circom/hello.txt b/zkstats/onnx2circom/keras2circom/hello.txt deleted file mode 100644 index 88caeb4..0000000 --- a/zkstats/onnx2circom/keras2circom/hello.txt +++ /dev/null @@ -1 +0,0 @@ -hellosd \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/.gitignore b/zkstats/onnx2circom/onnx2keras/.gitignore new file mode 100644 index 0000000..cc2292c --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/.gitignore @@ -0,0 +1,8 @@ +*.onnx +*.tflite +__pycache__/ +.ipynb_checkpoints/ +test.py +gen_model.py +models/ +unit_test/ \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/LICENSE b/zkstats/onnx2circom/onnx2keras/LICENSE new file mode 100644 index 0000000..343a0ab --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [MPolaris] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/zkstats/onnx2circom/onnx2keras/__init__.py b/zkstats/onnx2circom/onnx2keras/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/zkstats/onnx2circom/onnx2keras/converter.py b/zkstats/onnx2circom/onnx2keras/converter.py new file mode 100644 index 0000000..31b8f21 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/converter.py @@ -0,0 +1,100 @@ +from pathlib import Path +import os +import sys +import logging +import argparse + +# add .. to the PYTHONPATH to make the import `onnx2circom` work +file_path = Path(__file__).resolve() +sys.path.append(str(file_path.parent.parent)) +print('pathhh: ', str(file_path.parent.parent)) + +from onnx2keras.utils import load_onnx_modelproto, keras_builder, tflite_builder, get_elements_error +__version__ = __VERSION__ = "1.2.0" + +logging.basicConfig(level=logging.INFO) +LOG = logging.getLogger("converter running:") + +def onnx_converter(onnx_model_path:str, output_path:str=None, + input_node_names:list=None, output_node_names:list=None, + need_simplify:bool=True, target_formats:list = ['keras', 'tflite'], + native_groupconv:bool=False, + weight_quant:bool=False, int8_model:bool=False, image_root:str=None, + int8_mean:list or float = [123.675, 116.28, 103.53], int8_std:list or float = [58.395, 57.12, 57.375])->float: + if not isinstance(target_formats, list) and 'keras' not in target_formats and 'tflite' not in target_formats: + raise KeyError("'keras' or 'tflite' should in list") + + model_proto = load_onnx_modelproto(onnx_model_path, input_node_names, output_node_names, need_simplify) + + keras_model = keras_builder(model_proto, native_groupconv) + + onnx_path, model_name = os.path.split(onnx_model_path) + if output_path is None: + output_path = onnx_path + output_path = os.path.join(output_path, model_name.split('.')[0]) + + keras_model_path = None + if 'keras' in target_formats: + keras_model_path = output_path + ".keras" + keras_model.save(keras_model_path) + LOG.info(f"keras model saved in {keras_model_path}") + + convert_result = {"keras":keras_model_path, "keras_error":0} + # ignore quantization model + if int8_model: + return convert_result + + error_dict = {} + try: + error_dict = get_elements_error(model_proto, keras_model_path) + keras_error = error_dict.get("keras", None) + if keras_error: + if keras_error > 1e-2: + LOG.error("h5 model elements' max error has reached {:^.4E}, but convert is done, please check {} carefully!".format(keras_error, keras_model_path)) + elif keras_error > 1e-4: + LOG.warning("h5 model elements' max error is {:^.4E}, pass, h5 saved in {}".format(keras_error, keras_model_path)) + else: + LOG.info("h5 model elements' max error is {:^.4E}, pass, h5 saved in {}".format(keras_error, keras_model_path)) + except: + LOG.warning("convert is successed, but model running is failed, please check carefully!") + + convert_result["keras_error"] = error_dict.get("keras", None) + convert_result["tflite_error"] = error_dict.get("tflite", None) + return convert_result + +def parse_opt(): + parser = argparse.ArgumentParser() + parser.add_argument('--weights', type=str, required=True, help='onnx model path') + parser.add_argument('--outpath', type=str, default=None, help='tflite model save path') + parser.add_argument('--input-node-names', nargs="+", default=None, help='which inputs is you want, support middle layers, None will using onnx orignal inputs') + parser.add_argument('--output-node-names', nargs="+", default=None, help='which outputs is you want, support middle layers, None will using onnx orignal outputs') + parser.add_argument('--nosimplify', default=False, action='store_true', help='do not simplify model') + parser.add_argument("--native-groupconv", default=False, action='store_true', help='using native method for groupconv, only support for tflite version >= 2.9') + parser.add_argument('--weigthquant', default=False, action='store_true', help='tflite weigth int8 quant') + parser.add_argument('--int8', default=False, action='store_true', help='tflite weigth int8 quant, include input output') + parser.add_argument('--imgroot', type=str, default=None, help='when int8=True, imgroot should give for calculating running_mean and running_norm') + parser.add_argument('--int8mean', type=float, nargs='+', default=[123.675, 116.28, 103.53], help='int8 image preprocesses mean, float or list') + parser.add_argument('--int8std', type=float, nargs='+', default=[58.395, 57.12, 57.375], help='int8 image preprocesses std, float or list') + parser.add_argument('--formats', nargs='+', default=['keras', 'tflite'], help='available formats are (h5, tflite)') + opt = parser.parse_args() + return opt + +def run(): + opt = parse_opt() + onnx_converter( + onnx_model_path = opt.weights, + need_simplify = not opt.nosimplify, + input_node_names = opt.input_node_names, + output_node_names = opt.output_node_names, + output_path = opt.outpath, + target_formats = opt.formats, + native_groupconv = opt.native_groupconv, + weight_quant=opt.weigthquant, + int8_model=opt.int8, + int8_mean=opt.int8mean, + int8_std=opt.int8std, + image_root=opt.imgroot + ) + +if __name__ == "__main__": + run() \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/example/README.md b/zkstats/onnx2circom/onnx2keras/example/README.md new file mode 100644 index 0000000..3599099 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/example/README.md @@ -0,0 +1,28 @@ +This branch differs a lot from original already, so instead of explaining how it's different, let's just explain how to use this + +-Edit mathematics_layers some of defomration_layers, and some of common_layers.py make each math function become their own layer because 'keras2circom' will only look at model.layers, so need to make sure our math computation is included in 'layer' +-The flow start from generating onnx file in gen_onnx.ipynb file, generating example.onnx +-Then in command line, run +`python converter.py --weights "./example/example.onnx" --outpath "./example/" --formats "keras"`which will convert from example.onnx into example.keras +-Then in read_keras.ipynb notebook, it downloads this keras format and run in over same input, seeing that it get the same result as onnx. Carefully look at model.layers which is the function that keras2circom uses to extract layer for circom template, and for each layer we can call .get_config() as shown in read_keras.ipynb notebook. + +Now we support the following operations which are enough for all pytorch function used for original zk-stats-lib, we can support more operations once we found out alternative operations that result in easier circom template or due to a change of implementation in zkstats lib itself. + +- \*, +, -, / +- ==, <, > +- torch.where +- torch.logical_and, torch.logical_or, NOT +- torch.abs, reciprocal, sort, exp, log +- torch.floor, torch.ceil +- torch.min, torch.max +- torch.sum, torch.mean +- .size() +- array indexing, eg. x = x[0] +- .float() +- @ (matrix multiplication) +- torch.transpose +- torch.ones_like +- torch.tensor(). Note that this will treat everything as constant, so shouldnt put variable in there +- For-loop —> Split & Squeeze +- Unsqueeze +- torch.cat diff --git a/zkstats/onnx2circom/onnx2keras/example/example.keras b/zkstats/onnx2circom/onnx2keras/example/example.keras new file mode 100644 index 0000000000000000000000000000000000000000..c4649541d32c55ca1da8addf2c76500b628f3ed1 GIT binary patch literal 8237 zcmeHM&2Jk;6dxxoghEk^DjIQN*%LzPVmm2Bkr1h9qCgN>&_g84+K%nDz3pb#+L=un zSw@^txgc(c|DZ?2i6h7U89i{|$O&~O)j~2MBn);HGPbNiJsOYeO1!FRu%XAIAm!BQiMTVdPS*-x5T z6rd($(GzLIgRvkIUMq_m60GwG%{C$~gdg*LCA>ZUOlbrqJ(mQAlYQm2OnDgdx-!+` zkqAI##E&38m{#q%wjbu%gyE0lZYDp)iOQ*=;X0R%jxRdAiw;&xE6(bQQ>r0g@<@%j zttjDgwTu%*m!|>+$rtiFME$<9?)E`24ceiQK^inE_Be|ocra}O42N+&$zl)&3{_|~ z&@vic636$-8fzV+x6frKfJ@jA+OU|!`_}f2X48t8JdiD)>xa`oo)*0fVv(eNJLO%q zXewxRH@?cguW4xHeanHz1zRiW0FFs|go^_x4N+ckittzK_YrP{%|8snc1I$5J@?&@ zl2}L{OFzjZPAbCi`(AM%tW+tawN~_T%A^`8=TfF&J(IZjJ=D?)Qvuq$kZ>`kRL`RN zvjqVh5&>jr^Ituhu^r8Zt^Bo2GrzW)^>U z7PY31;icy9Fh6YmKB?VY{%j#&^Ox$_eZwD$miVg3PJ<_Czx?d;n+3KosOoROsFe-u zn11&qJx+c(hTkM-^Zf)aoyw`R>OjT9Z{9xk;1wM{)vr?tM(*^n5wPokV>Eql@{bgc zyjrRu{kk~Z&(=xsG22=Ey`^eZ{f7{w$pZ5E}EdD+syM~wfK6bYGchcx={!$%t41Y*J?Jxh;KK6^@ zE!nYoOn&6;I0BX3!ornBO8Uvq>?HlQ0(R`9yFJzZZa$oP2X05cqK{6T_KjsY@$iMo VFD{kc^DxTTBK&@aHh2?z{{X1m{Y?M> literal 0 HcmV?d00001 diff --git a/zkstats/onnx2circom/onnx2keras/example/gen_onnx.ipynb b/zkstats/onnx2circom/onnx2keras/example/gen_onnx.ipynb new file mode 100644 index 0000000..c72b05d --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/example/gen_onnx.ipynb @@ -0,0 +1,243 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "class MyModel(nn.Module):\n", + " def __init__(self):\n", + " super(MyModel, self).__init__() \n", + "\n", + " def forward(self,x):\n", + " # return [torch.ones_like(x)]\n", + " # return [x.size()]\n", + " # return [x==y]\n", + " # return [torch.mean(x)+4]\n", + "# (x[:,:,0] ==MagicNumber)\n", + " # print(\"hey: \", x[:,0,:])\n", + " # return [torch.cat((x, torch.ones_like(x)))]\n", + " return(x[:,0,:].unsqueeze(-1))\n", + " count_equal = 1\n", + " count_all = 0\n", + " for ele in x[0]:\n", + " print('eleee: ', ele)\n", + " count_all+=(torch.sum((x==ele[0]).float())<=count_equal).float()\n", + " return count_all\n", + " return [torch.sum(torch.tensor([3.0]*x.size()[1]))]\n", + "\n", + " return (torch.transpose(x, 1,2))\n", + "\n", + " # return [torch.where(x>40, 3, x)]\n", + " # return [torch.sum(x, axis = [0,1,2])]\n", + " # return [x.unsqueeze(-1)]\n", + " # return torch.cat((x,y), dim = 1)\n", + " # return [x@y]\n", + " return [torch.ones_like(x)]\n", + " return [torch.ones_like(x)+torch.tensor(8)]\n", + "\n", + " # return [x==40]\n", + " return [torch.logical_or(x>20, y<10)]\n", + " # return [x-torch.log(y)+5]\n", + " return [torch.where(x>20, 2, 1)]\n", + " return [x+torch.log(y)+z]" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "og x: tensor([[[10.],\n", + " [40.],\n", + " [50.]]])\n", + "result: tensor([[[10.]]])\n" + ] + } + ], + "source": [ + "model = MyModel()\n", + "input1 = torch.tensor([10, 40, 50], dtype = torch.float32).reshape(1,-1,1)\n", + "input2 = torch.tensor([13, 4, 7], dtype = torch.float32).reshape(1,-1,1)\n", + "# input3 = torch.tensor([3, 14, 7], dtype = torch.float32).reshape(1,-1,1)\n", + "x = torch.tensor([10, 40, 50], dtype = torch.float32).reshape(1,-1,1)\n", + "y =torch.tensor([13, 4, 7], dtype = torch.float32).reshape(1,-1,1)\n", + "print('og x: ', x)\n", + "# print('y: ', y)\n", + "# print('result: ', x@y)\n", + "print('result: ', model.forward(x))\n", + "\n", + "torch.onnx.export(model, # model being run\n", + " (x) , # model input (or a tuple for multiple inputs)\n", + " 'example.onnx', # where to save the model (can be a file or file-like object)\n", + " export_params=True, # store the trained parameter weights inside the model file\n", + " opset_version=11, # the ONNX version to export the model to\n", + " do_constant_folding=True, # whether to execute constant folding for optimization\n", + " input_names = ['input'], # the model's input names\n", + " output_names = ['output'], # the model's output names\n", + " dynamic_axes={'input' : {0 : 'batch_size'}, # variable length axes\n", + " 'output' : {0 : 'batch_size'}})\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [] + }, + { + "cell_type": "code", + "execution_count": 284, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[[10.],\n", + " [40.],\n", + " [50.]]])\n", + "tensor(100.)\n", + "tensor([[10.],\n", + " [40.],\n", + " [50.]])\n", + "tensor([[100.]])\n", + "tensor([[10., 40., 50.]])\n", + "tensor(100.)\n" + ] + } + ], + "source": [ + "print(input1)\n", + "print(torch.sum(input1))\n", + "print(torch.sum(input1, axis=0))\n", + "print(torch.sum(input1, axis=1))\n", + "print(torch.sum(input1, axis=2))\n", + "print(torch.sum(input1, axis=[0,1,2]))" + ] + }, + { + "cell_type": "code", + "execution_count": 410, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "og size: torch.Size([1, 2, 3, 4, 5])\n", + "torch.Size([1, 2, 3, 4, 5, 1])\n" + ] + } + ], + "source": [ + "x = torch.zeros(1, 2, 3, 4, 5)\n", + "print('og size: ', x.size())\n", + "# print(torch.squeeze(x,4).size())\n", + "print(torch.unsqueeze(x, -1).size())" + ] + }, + { + "cell_type": "code", + "execution_count": 418, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "res: tf.Tensor(\n", + "[[3 4]\n", + " [1 2]\n", + " [3 4]], shape=(3, 2), dtype=int32)\n", + "shappee (3, 2)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2024-04-21 20:27:33.894869: W tensorflow/core/framework/local_rendezvous.cc:404] Local rendezvous is aborting with status: INVALID_ARGUMENT: Expected begin and size arguments to be 1-D tensors of size 2, but got shapes [3] and [2] instead.\n" + ] + }, + { + "ename": "InvalidArgumentError", + "evalue": "{{function_node __wrapped__Slice_device_/job:localhost/replica:0/task:0/device:CPU:0}} Expected begin and size arguments to be 1-D tensors of size 2, but got shapes [3] and [2] instead. [Op:Slice]", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mInvalidArgumentError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[418], line 14\u001b[0m\n\u001b[1;32m 11\u001b[0m \u001b[39mprint\u001b[39m(\u001b[39m'\u001b[39m\u001b[39mres: \u001b[39m\u001b[39m'\u001b[39m,res)\n\u001b[1;32m 12\u001b[0m \u001b[39mprint\u001b[39m(\u001b[39m'\u001b[39m\u001b[39mshappee\u001b[39m\u001b[39m'\u001b[39m, res\u001b[39m.\u001b[39mshape)\n\u001b[0;32m---> 14\u001b[0m res2\u001b[39m=\u001b[39m keras\u001b[39m.\u001b[39;49mops\u001b[39m.\u001b[39;49mslice(data, indices, res\u001b[39m.\u001b[39;49mshape)\n", + "File \u001b[0;32m/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/keras/src/ops/core.py:163\u001b[0m, in \u001b[0;36mslice\u001b[0;34m(inputs, start_indices, shape)\u001b[0m\n\u001b[1;32m 161\u001b[0m \u001b[39mif\u001b[39;00m any_symbolic_tensors((inputs, start_indices, shape)):\n\u001b[1;32m 162\u001b[0m \u001b[39mreturn\u001b[39;00m Slice()\u001b[39m.\u001b[39msymbolic_call(inputs, start_indices, shape)\n\u001b[0;32m--> 163\u001b[0m \u001b[39mreturn\u001b[39;00m backend\u001b[39m.\u001b[39;49mcore\u001b[39m.\u001b[39;49mslice(inputs, start_indices, shape)\n", + "File \u001b[0;32m/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/keras/src/backend/tensorflow/core.py:226\u001b[0m, in \u001b[0;36mslice\u001b[0;34m(inputs, start_indices, shape)\u001b[0m\n\u001b[1;32m 225\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mslice\u001b[39m(inputs, start_indices, shape):\n\u001b[0;32m--> 226\u001b[0m \u001b[39mreturn\u001b[39;00m tf\u001b[39m.\u001b[39;49mslice(inputs, start_indices, shape)\n", + "File \u001b[0;32m/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/tensorflow/python/util/traceback_utils.py:153\u001b[0m, in \u001b[0;36mfilter_traceback..error_handler\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 151\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mException\u001b[39;00m \u001b[39mas\u001b[39;00m e:\n\u001b[1;32m 152\u001b[0m filtered_tb \u001b[39m=\u001b[39m _process_traceback_frames(e\u001b[39m.\u001b[39m__traceback__)\n\u001b[0;32m--> 153\u001b[0m \u001b[39mraise\u001b[39;00m e\u001b[39m.\u001b[39mwith_traceback(filtered_tb) \u001b[39mfrom\u001b[39;00m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 154\u001b[0m \u001b[39mfinally\u001b[39;00m:\n\u001b[1;32m 155\u001b[0m \u001b[39mdel\u001b[39;00m filtered_tb\n", + "File \u001b[0;32m/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/tensorflow/python/eager/execute.py:53\u001b[0m, in \u001b[0;36mquick_execute\u001b[0;34m(op_name, num_outputs, inputs, attrs, ctx, name)\u001b[0m\n\u001b[1;32m 51\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m 52\u001b[0m ctx\u001b[39m.\u001b[39mensure_initialized()\n\u001b[0;32m---> 53\u001b[0m tensors \u001b[39m=\u001b[39m pywrap_tfe\u001b[39m.\u001b[39mTFE_Py_Execute(ctx\u001b[39m.\u001b[39m_handle, device_name, op_name,\n\u001b[1;32m 54\u001b[0m inputs, attrs, num_outputs)\n\u001b[1;32m 55\u001b[0m \u001b[39mexcept\u001b[39;00m core\u001b[39m.\u001b[39m_NotOkStatusException \u001b[39mas\u001b[39;00m e:\n\u001b[1;32m 56\u001b[0m \u001b[39mif\u001b[39;00m name \u001b[39mis\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mNone\u001b[39;00m:\n", + "\u001b[0;31mInvalidArgumentError\u001b[0m: {{function_node __wrapped__Slice_device_/job:localhost/replica:0/task:0/device:CPU:0}} Expected begin and size arguments to be 1-D tensors of size 2, but got shapes [3] and [2] instead. [Op:Slice]" + ] + } + ], + "source": [ + "import tensorflow as tf\n", + "import keras\n", + "# Initializing the input\n", + "data = tf.constant([[1, 2], [3, 4], [5, 6]])\n", + "indices = tf.constant([1, 0, 1])\n", + " \n", + "# Calculating result\n", + "res = tf.gather(data, indices, 2)\n", + " \n", + "# Printing the result\n", + "print('res: ',res)\n", + "print('shappee', res.shape)\n", + "\n", + "res2= keras.ops.slice(data, indices, res.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.0" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/zkstats/onnx2circom/onnx2keras/example/read_keras.ipynb b/zkstats/onnx2circom/onnx2keras/example/read_keras.ipynb new file mode 100644 index 0000000..fdd143a --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/example/read_keras.ipynb @@ -0,0 +1,227 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "from keras.models import load_model\n", + "import keras\n", + "import tensorflow as tf\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "sys.path.append('../')\n", + "# import all layers we support\n", + "from layers.mathematics_layers import TFAdd, TFSub, TFMul, TFDiv, TFEqual, TFLess, TFGreater,TFWhere, TFNot, TFAnd, TFOr, TFAbs, TFReciprocal, TFSqrt, TFExp, TFLog, TFFloor, TFCeil, TFReduceMax, TFReduceMin, TFReduceSum, TFReduceMean, TFShape, TFConstantOfShape, TFMatMul\n", + "from layers.deformation_layers import TFTranspose, TFGather,TFConcat, TFSplit,TFUnsqueeze, TFSqueeze\n", + "from layers.common_layers import TFCast" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
Model: \"functional_1\"\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1mModel: \"functional_1\"\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
+       "┃ Layer (type)                     Output Shape                  Param # ┃\n",
+       "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
+       "│ input_layer (InputLayer)        │ (1, 3, 1)              │             0 │\n",
+       "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
+       "│ tf_gather (TFGather)            │ (1, 1)                 │             0 │\n",
+       "├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
+       "│ tf_unsqueeze (TFUnsqueeze)      │ (1, 1, 1)              │             0 │\n",
+       "└─────────────────────────────────┴────────────────────────┴───────────────┘\n",
+       "
\n" + ], + "text/plain": [ + "┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n", + "┃\u001b[1m \u001b[0m\u001b[1mLayer (type) \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1mOutput Shape \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1m Param #\u001b[0m\u001b[1m \u001b[0m┃\n", + "┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n", + "│ input_layer (\u001b[38;5;33mInputLayer\u001b[0m) │ (\u001b[38;5;34m1\u001b[0m, \u001b[38;5;34m3\u001b[0m, \u001b[38;5;34m1\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n", + "├─────────────────────────────────┼────────────────────────┼───────────────┤\n", + "│ tf_gather (\u001b[38;5;33mTFGather\u001b[0m) │ (\u001b[38;5;34m1\u001b[0m, \u001b[38;5;34m1\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n", + "├─────────────────────────────────┼────────────────────────┼───────────────┤\n", + "│ tf_unsqueeze (\u001b[38;5;33mTFUnsqueeze\u001b[0m) │ (\u001b[38;5;34m1\u001b[0m, \u001b[38;5;34m1\u001b[0m, \u001b[38;5;34m1\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n", + "└─────────────────────────────────┴────────────────────────┴───────────────┘\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Total params: 0 (0.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Total params: \u001b[0m\u001b[38;5;34m0\u001b[0m (0.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Trainable params: 0 (0.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Trainable params: \u001b[0m\u001b[38;5;34m0\u001b[0m (0.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
 Non-trainable params: 0 (0.00 B)\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m Non-trainable params: \u001b[0m\u001b[38;5;34m0\u001b[0m (0.00 B)\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "All Layer: \n", + "\n", + "\n", + "{'batch_shape': (1, 3, 1), 'dtype': 'float32', 'sparse': False, 'name': 'input_layer'}\n", + "\n", + "{'name': 'tf_gather', 'trainable': True, 'dtype': 'float32', 'tensor_grap': {'input': {'class_name': '__keras_tensor__', 'config': {'shape': [1, 3, 1], 'dtype': 'float32', 'keras_history': ['input_layer', 0, 0]}}, '/Constant_output_0': 0}, 'node_weights': {}, 'node_inputs': ['input', '/Constant_output_0'], 'node_attribute': {'axis': 1}, 'indices': 0, 'axis': 1}\n", + "\n", + "{'name': 'tf_unsqueeze', 'trainable': True, 'dtype': 'float32', 'tensor_grap': {'input': {'class_name': '__keras_tensor__', 'config': {'shape': [1, 3, 1], 'dtype': 'float32', 'keras_history': ['input_layer', 0, 0]}}, '/Constant_output_0': 0, '/Gather_output_0': {'class_name': '__keras_tensor__', 'config': {'shape': [1, 1], 'dtype': 'float32', 'keras_history': ['tf_gather', 0, 0]}}}, 'node_weights': {}, 'node_inputs': ['/Gather_output_0'], 'node_attribute': {'axes': [-1]}, 'axis': -1}\n" + ] + } + ], + "source": [ + "# @keras.saving.register_keras_serializable()\n", + "\n", + "from keras import backend as K\n", + "K.clear_session()\n", + "keras.saving.get_custom_objects().clear()\n", + "custom_objects = {\"TFAdd\":TFAdd,\"TFSub\": TFSub,\"TFMul\": TFMul,\"TFDiv\": TFDiv,\"TFEqual\": TFEqual,\"TFLess\": TFLess,\"TFGreater\": TFGreater,\"TFWhere\":TFWhere, \"TFNot\":TFNot, \"TFAnd\":TFAnd,\"TFOr\": TFOr,\"TFAbs\": TFAbs, \"TFReciprocal\":TFReciprocal,\"TFSqrt\": TFSqrt,\"TFExp\": TFExp,\"TFLog\": TFLog,\"TFFloor\": TFFloor,\"TFCeil\": TFCeil,\"TFReduceMax\": TFReduceMax, \"TFReduceMin\":TFReduceMin,\"TFReduceSum\": TFReduceSum,\"TFReduceMean\": TFReduceMean,\"TFShape\": TFShape,\"TFConstantOfShape\": TFConstantOfShape,\"TFMatMul\": TFMatMul,\"TFTranspose\": TFTranspose,\"TFGather\": TFGather,\"TFConcat\":TFConcat,\"TFSplit\": TFSplit,\"TFUnsqueeze\":TFUnsqueeze,\"TFSqueeze\":TFSqueeze, \"TFCast\":TFCast}\n", + "\n", + "\n", + "with keras.saving.custom_object_scope(custom_objects):\n", + " model = load_model(\"example.keras\")\n", + " model.summary()\n", + " print(\"All Layer: \\n\")\n", + " for layer in model.layers:\n", + " # print layer\n", + " print(layer)\n", + " # print detailed config of each layer\n", + " print(layer.get_config())\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "x: tensor([[[10.],\n", + " [40.],\n", + " [50.]]])\n", + "y: tensor([[[13.],\n", + " [ 4.],\n", + " [ 7.]]])\n", + "WARNING:tensorflow:5 out of the last 5 calls to .one_step_on_data_distributed at 0x31301ff40> triggered tf.function retracing. Tracing is expensive and the excessive number of tracings could be due to (1) creating @tf.function repeatedly in a loop, (2) passing tensors with different shapes, (3) passing Python objects instead of tensors. For (1), please define your @tf.function outside of the loop. For (2), @tf.function has reduce_retracing=True option that can avoid unnecessary retracing. For (3), please refer to https://www.tensorflow.org/guide/function#controlling_retracing and https://www.tensorflow.org/api_docs/python/tf/function for more details.\n", + "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n" + ] + }, + { + "data": { + "text/plain": [ + "array([[[10.]]], dtype=float32)" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# To make sure still get the sam result as onnx.\n", + "import torch\n", + "input1 = torch.tensor([10, 40, 50], dtype = torch.float32).reshape(1,-1,1)\n", + "input2 = torch.tensor([13, 4, 7], dtype = torch.float32).reshape(1,-1,1)\n", + "# input3 = torch.tensor([3, 14, 7], dtype = torch.float32).reshape(1,-1,1)\n", + "x = torch.tensor([10, 40, 50], dtype = torch.float32).reshape(1,-1,1)\n", + "y =torch.tensor([13, 4, 7], dtype = torch.float32).reshape(1,-1,1)\n", + "print(\"x: \", x)\n", + "print(\"y: \", y)\n", + "# print('new y: ', np.expand_dims(y, axis=1))\n", + "# print('actual: ', x@y)\n", + "# model.forward(x,y)\n", + "model.predict((x))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.0" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/zkstats/onnx2circom/onnx2keras/layers/__init__.py b/zkstats/onnx2circom/onnx2keras/layers/__init__.py new file mode 100644 index 0000000..e86d8f8 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/layers/__init__.py @@ -0,0 +1,6 @@ +from .conv_layers import * +from .dimension_utils import * +from .common_layers import * +from .activations_layers import * +from .mathematics_layers import * +from .deformation_layers import * \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/layers/activations_layers.py b/zkstats/onnx2circom/onnx2keras/layers/activations_layers.py new file mode 100644 index 0000000..e18e892 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/layers/activations_layers.py @@ -0,0 +1,179 @@ +import numpy as np +import tensorflow as tf +from tensorflow import keras + +from .dimension_utils import channel_to_last_dimension, tensor_NCD_to_NDC_format +from ..utils.op_registry import OPERATOR + +@OPERATOR.register_operator("Relu") +class TFRelu(): + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return keras.activations.relu(inputs) + +@OPERATOR.register_operator("HardSigmoid") +class TFHardSigmoid(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + self.alpha = node_attribute.get("alpha", 0.2) + self.beta = node_attribute.get("beta", 0.5) + + def __call__(self, inputs): + return tf.clip_by_value(self.alpha*inputs+self.beta, 0, 1) + +@OPERATOR.register_operator("HardSwish") +class TFHardSwish(): + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return inputs*tf.clip_by_value(inputs/6+0.5, 0, 1) + +@OPERATOR.register_operator("Mish") +class TFMish(): + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return inputs*tf.tanh(tf.math.log(tf.math.exp(inputs)+1)) + +@OPERATOR.register_operator("Sigmoid") +class TFSigmoid(): + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return keras.activations.sigmoid(inputs) + +@OPERATOR.register_operator("LeakyRelu") +class TFLeakyRelu(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + self.alpha = node_attribute.get('alpha', 0.01) + + def __call__(self, inputs): + return keras.activations.relu(inputs, alpha=self.alpha) + +@OPERATOR.register_operator("PRelu") +class TFPRelu(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + if 'slope' in node_attribute: + self.slope = node_attribute['slope'] + elif node_inputs[1] in node_weights: + self.slope = node_weights[node_inputs[1]] + else: + self.slope = tensor_grap[node_inputs[1]] + input_tensor_shape = tensor_grap[node_inputs[0]].shape + if isinstance(self.slope, np.ndarray): + while self.slope.ndim < input_tensor_shape.ndims: + self.slope = self.slope[np.newaxis, :] + self.slope = tensor_NCD_to_NDC_format(self.slope) + if self.slope.ndim > 1: + # remove batchsize + self.slope = self.slope[0] + + self.PRelu = tf.keras.layers.PReLU(weights=[self.slope], shared_axes = [i for i in range(1, input_tensor_shape.ndims-1)]) + + def __call__(self, inputs): + return self.PRelu(inputs) + +@OPERATOR.register_operator("Sin") +class TFSin(): + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return tf.sin(inputs) + +@OPERATOR.register_operator("Sinh") +class TFSinh(): + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return tf.sinh(inputs) + +@OPERATOR.register_operator("Cos") +class TFCos(): + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return tf.cos(inputs) + +@OPERATOR.register_operator("Cosh") +class TFCosh(): + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return tf.cosh(inputs) + +@OPERATOR.register_operator("Tan") +class TFTan(): + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return tf.tan(inputs) + +@OPERATOR.register_operator("Tanh") +class TFTanh(): + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return tf.tanh(inputs) + +@OPERATOR.register_operator("Softmax") +class TFSoftmax(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + self.axis = channel_to_last_dimension(node_attribute.get('axis', -1)) + + def __call__(self, inputs): + return keras.activations.softmax(inputs, axis=self.axis) + +@OPERATOR.register_operator("Softplus") +class TFSoftplus(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return keras.activations.softplus(inputs) + +@OPERATOR.register_operator("Softsign") +class TFSoftsign(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return keras.activations.softsign(inputs) + +@OPERATOR.register_operator("Selu") +class TFSelu(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return keras.activations.selu(inputs) + +@OPERATOR.register_operator("Elu") +class TFElu(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return keras.activations.elu(inputs) + +@OPERATOR.register_operator("Celu") +class TFCelu(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + self.alpha = node_attribute.get("alpha", 1.0) + + def __call__(self, inputs): + return tf.maximum(inputs, 0) + tf.minimum(0, self.alpha*(tf.exp(inputs/self.alpha)-1)) \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/layers/common_layers.py b/zkstats/onnx2circom/onnx2keras/layers/common_layers.py new file mode 100644 index 0000000..b0b6daf --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/layers/common_layers.py @@ -0,0 +1,323 @@ +import math +import logging +import numpy as np +import tensorflow as tf +from tensorflow import keras + +from ..utils.op_registry import OPERATOR +from .dimension_utils import intfloat_to_list + +LOG = logging.getLogger("common_layers :") + +@OPERATOR.register_operator("BatchNormalization") +class TFBatchNormalization(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + epsilon = node_attribute.get("epsilon", 1e-5) + momentum = node_attribute.get("momentum", 0.9) + + self.bn = keras.layers.BatchNormalization( + gamma_initializer=keras.initializers.Constant(node_weights[node_inputs[1]]), + beta_initializer=keras.initializers.Constant(node_weights[node_inputs[2]]), + moving_mean_initializer=keras.initializers.Constant(node_weights[node_inputs[3]]), + moving_variance_initializer=keras.initializers.Constant(node_weights[node_inputs[4]]), + epsilon=epsilon, + momentum=momentum) + + def __call__(self, inputs): + return self.bn(inputs) + +@OPERATOR.register_operator("InstanceNormalization") +class TFInstanceNormalization(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.epsilon = node_attribute.get("epsilon", 1e-5) + self.scale = node_weights[node_inputs[1]] + self.bias = node_weights[node_inputs[2]] + + def __call__(self, inputs): + axes = tuple(range(1, len(inputs.shape)-1)) + mean = tf.reduce_mean(inputs, axis=axes, keepdims=True) + var = tf.math.reduce_variance(inputs, axis= axes, keepdims=True) + return self.scale*(inputs - mean)/tf.sqrt(var + self.epsilon) + self.bias + +@OPERATOR.register_operator("Pad") +class TFPad(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + if node_attribute.get("pads") is not None: + pads = node_attribute['pads'] + elif node_inputs[1] in node_weights: + pads = node_weights[node_inputs[1]] + else: + pads = tensor_grap[node_inputs[1]] + self.pad = [[pads[0], pads[4]], [pads[2], pads[6]], [pads[3], pads[7]], [pads[1], pads[5]]] + self.model = node_attribute.get("mode", "constant").upper() + + def __call__(self, inputs): + return tf.pad(inputs, self.pad, mode=self.model) + +@OPERATOR.register_operator("Clip") +class TFClip(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + if "min" in node_attribute: + self.min = node_attribute.get("min") + else: + self.min = tensor_grap[node_inputs[1]] if node_inputs[1] in tensor_grap else node_weights[node_inputs[1]] + if "max" in node_attribute: + self.max = node_attribute.get("max") + else: + self.max = tensor_grap[node_inputs[2]] if node_inputs[2] in tensor_grap else node_weights[node_inputs[2]] + + def __call__(self, inputs): + if float(self.min) == 0 and float(self.max) == 6: + return tf.nn.relu6(inputs) + return tf.clip_by_value(inputs, self.min, self.max) + +@OPERATOR.register_operator("TFGlobalMaxPool") +class TFGlobalMaxPool(): + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return tf.reduce_max(inputs, axis=[i for i in range(1, len(inputs.shape)-1)], keepdims=True) + +@OPERATOR.register_operator("GlobalAveragePool") +class TFGlobalAveragePool(): + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def __call__(self, inputs): + return tf.reduce_mean(inputs, axis=[i for i in range(1, len(inputs.shape)-1)], keepdims=True) + +@OPERATOR.register_operator("AveragePool") +class TFAveragePool(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + kernel_shape = intfloat_to_list(node_attribute.get("kernel_shape", [2, 2]), 2) + strides = intfloat_to_list(node_attribute.get("strides", [1, 1]), 2) + dilations = intfloat_to_list(node_attribute.get("dilations", [1, 1]), 2) + ceil_mode = node_attribute.get("ceil_mode", 0) + pads = intfloat_to_list(node_attribute.get("pads", [0, 0, 0, 0]), 4) + + func = math.floor if ceil_mode == 0 else math.ceil + + pad_mode = "SAME" + input_shape = tensor_grap[node_inputs[0]].shape + for i in range(len(input_shape)-2): + pad_shape = pads[i] + pads[i+2] + output_shape_raw = (input_shape[1+i]+pad_shape-((kernel_shape[i]-1)*dilations[i]+1))/strides[i]+1 + if func(output_shape_raw) != input_shape[1+i]: + pad_mode = "VALID" + break + + self.avg_pool = keras.layers.AveragePooling2D(pool_size=kernel_shape, strides=strides, padding=pad_mode) + + self.pad = None + if pad_mode == "VALID" and pads is not None and np.sum(pads) > 0: + self.pad = keras.layers.ZeroPadding2D(padding=((pads[0], pads[2]), (pads[1], pads[3]))) + + def __call__(self, inputs): + if self.pad: + inputs = self.pad(inputs) + return self.avg_pool(inputs) + +@OPERATOR.register_operator("MaxPool") +class TFMaxPool(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + kernel_shape = intfloat_to_list(node_attribute.get("kernel_shape", [2, 2]), 2) + strides = intfloat_to_list(node_attribute.get("strides", [1, 1]), 2) + dilations = intfloat_to_list(node_attribute.get("dilations", [1, 1]), 2) + ceil_mode = node_attribute.get("ceil_mode", 0) + pads = intfloat_to_list(node_attribute.get("pads", [0, 0, 0, 0]), 4) + + func = math.floor if ceil_mode == 0 else math.ceil + + pad_mode = "SAME" + input_shape = tensor_grap[node_inputs[0]].shape + for i in range(len(input_shape)-2): + pad_shape = pads[i] + pads[i+2] + output_shape_raw = (input_shape[1+i]+pad_shape-((kernel_shape[i]-1)*dilations[i]+1))/strides[i]+1 + if func(output_shape_raw) != input_shape[1+i]: + pad_mode = "VALID" + break + + self.max_pool = keras.layers.MaxPool2D(pool_size=kernel_shape, strides=strides, padding=pad_mode) + + self.pad = None + if pad_mode == "VALID" and pads is not None and np.sum(pads) > 0: + self.pad = keras.layers.ZeroPadding2D(padding=((pads[0], pads[2]), (pads[1], pads[3]))) + + def __call__(self, inputs): + if self.pad: + inputs = self.pad(inputs) + return self.max_pool(inputs) + +@OPERATOR.register_operator("Upsample") +class TFUpsample(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + _, h, w, _ = tensor_grap[node_inputs[0]].shape + scale = node_weights[node_inputs[1]] + + self.scale = (int(h*scale[2]), int(w*scale[3])) + if node_attribute.get("mode", "nearest").lower() == 'nearest': + self.method = tf.image.ResizeMethod.NEAREST_NEIGHBOR + else: + self.method = tf.image.ResizeMethod.BILINEAR + + def __call__(self, inputs): + return tf.image.resize(inputs, self.scale, method=self.method) + + +@OPERATOR.register_operator("Constant") +class TFConstant(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.val = node_attribute['value'] + + def __call__(self, *args, **kwargs): + return self.val + +@OPERATOR.register_operator("ScatterND") +class TFScatterND(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.indices = node_weights[node_inputs[1]] + shape_len = len(tensor_grap[node_inputs[0]].shape) + self.trans_in = [0, shape_len-1] + [n for n in range(1, shape_len-1)] + self.trans_out = [0] + [n for n in range(2, shape_len)] + [1] + if node_inputs[2] in tensor_grap: + self.updates = tf.transpose(tensor_grap[node_inputs[2]], perm=self.trans_in) + else: + self.updates = node_weights[node_inputs[2]] + + def __call__(self, inputs): + inputs = tf.transpose(inputs, perm=self.trans_in) + inputs = tf.tensor_scatter_nd_update(inputs, self.indices, self.updates) + inputs = tf.transpose(inputs, perm=self.trans_out) + return inputs + +@OPERATOR.register_operator("Resize") +class TFResize(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + if node_inputs[-1] in node_weights: + _, _, nh, nw = node_weights[node_inputs[-1]] + if len(node_inputs) != 4: + _, h, w, _ = tensor_grap[node_inputs[0]].shape + nh, nw = int(h*nh), int(w*nw) + self.scale = (nh, nw) + else: + scales = tensor_grap[node_inputs[0]].shape[1:3]*tensor_grap[node_inputs[2]][2:3] + self.scale = scales + + if node_attribute.get("mode", "nearest").lower() == 'nearest': + self.method = tf.image.ResizeMethod.NEAREST_NEIGHBOR + else: + self.method = tf.image.ResizeMethod.BILINEAR + + def __call__(self, inputs): + return tf.image.resize(inputs, self.scale, method=self.method) + +@OPERATOR.register_operator("Gemm") +class TFGemm(): + ''' + 全连接函数, torch.linear, tf.layers.dense, keras.layers.Dense + ''' + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + if len(node_inputs) > 2: + weights = [node_weights[node_inputs[1]].T, node_weights[node_inputs[2]]] + else: + weights = [node_weights[node_inputs[1]].T] + + self.dense = keras.layers.Dense(weights[0].shape[1], + weights=weights, + use_bias=len(weights)==2) + + def __call__(self, inputs): + return self.dense(inputs) + +@OPERATOR.register_operator("Identity") +class TFIdentity(): + def __init__(self, *args, **kwargs): + super().__init__() + + def __call__(self, inputs): + return inputs + +@OPERATOR.register_operator("Dropout") +class TFDropout(): + ''' + Dropout will be ignored in deployment. + ''' + def __init__(self, *args, **kwargs): + super().__init__() + + def __call__(self, inputs): + return inputs + +@OPERATOR.register_operator("Cast") +class TFCast(keras.layers.Layer): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + + self.cast_to = int(node_attribute.get("to", 1)) + assert self.cast_to > 0 and self.cast_to < 12, f"Unknown cast type [{self.cast_to}]" + self.np_cast_map = { + 1: np.float32, + 2: np.uint8, + 3: np.int8, + 5: np.int16, + 6: np.int32, + 7: np.int64, + 9: np.bool_, + 10: np.float16, + 11: np.double, + } + self.tf_cast_map = { + 1: tf.float32, + 2: tf.uint8, + 3: tf.int8, + 5: tf.int16, + 6: tf.int32, + 7: tf.int64, + 9: tf.bool, + 10: tf.float16, + 11: tf.double, + } + + def call(self, inputs): + if isinstance(inputs, list): + for i in range(len(inputs)): + if isinstance(inputs[i], np.ndarray) or isinstance(inputs[i], np.generic): + inputs[i] = self.np_cast_map[self.cast_to](inputs[i]) + else: + inputs[i] = keras.ops.cast(input[i], dtype=self.tf_cast_map[self.cast_to]) + # inputs[i] = tf.cast(input[i], dtype=self.tf_cast_map[self.cast_to]) + else: + if isinstance(inputs, np.ndarray) or isinstance(inputs, np.generic): + inputs = self.np_cast_map[self.cast_to](inputs) + else: + inputs = keras.ops.cast(inputs, dtype=self.tf_cast_map[self.cast_to]) + # inputs = tf.cast(inputs, dtype=self.tf_cast_map[self.cast_to]) + + return inputs + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + "cast_to": self.cast_to + }) + return config \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/layers/conv_layers.py b/zkstats/onnx2circom/onnx2keras/layers/conv_layers.py new file mode 100644 index 0000000..0f240f9 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/layers/conv_layers.py @@ -0,0 +1,308 @@ +''' + Author: MPolaris && yutaka329 && lkdci + + Thanks for yutaka329 with your pad tricks. + https://github.com/MPolaris/onnx2tflite/issues/5 + + Thanks for lkdci with your native method of group conv + https://github.com/MPolaris/onnx2tflite/issues/19 +''' +import logging +import tensorflow as tf +from tensorflow import keras +from ..utils.op_registry import OPERATOR + +LOG = logging.getLogger("convolution_layers :") + +# Whether to implement grouped convolution using the native `keras.layers.Conv2D` class with groups !=1 argument. +# This implementation is supported only with tflite version >= 2.9. +# If set to `False`, the grouped convolution is built using regular conv per group then concatenated as a workaround +# to support older version of tflite. +# Using the native keras implementation results in a simplified tflite graph and supposed to run faster. +# See https://github.com/MPolaris/onnx2tflite/issues/19 for more details. +USE_NATIVE_GROUP_CONV = False + +@OPERATOR.register_operator("ConvTranspose") +class TFConvTranspose(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + # out_channel, in_channel = node_weights[node_inputs[1]].shape[:2] + dilations, group = node_attribute.get('dilations', 1), node_attribute.get('group', 1) + pads = node_attribute['pads'] if "pads" in node_attribute else None + kernel_shape, strides = node_attribute.get('kernel_shape', 1), node_attribute.get('strides', 1) + + weights = node_weights[node_inputs[1]].transpose(2,3,1,0) + bias = node_weights[node_inputs[2]] if len(node_inputs) == 3 else None + height, width, n_filters, channels = weights.shape + self.pad = None + self.conv = keras.layers.Conv2DTranspose(filters=n_filters, kernel_size=(height, width), strides=strides, padding='VALID', use_bias=False if bias is None else True, + weights=[weights] if bias is None else [weights, bias], + dilation_rate=dilations) + if pads is not None and max(pads) != 0: + padding = None + if len(pads) == 2 and (pads[0] > 0 or pads[1] > 0): + padding = (pads[0], pads[1]) + elif len(pads) == 4 and (pads[0] > 0 or pads[1] > 0 or pads[2] > 0 or pads[3] > 0): + padding = ((pads[0], pads[2]), (pads[1], pads[3])) + self.pad = keras.layers.Cropping2D(padding) + + def __call__(self, inputs): + inputs = self.conv(inputs) + if self.pad: + inputs = self.pad(inputs) + return inputs + +@OPERATOR.register_operator("Conv") +class Convlution(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + out_channel, in_channel = node_weights[node_inputs[1]].shape[:2] + dilations, group = node_attribute.get('dilations', 1), node_attribute.get('group', 1) + pads = node_attribute['pads'] if "pads" in node_attribute else None + kernel_shape, strides = node_attribute.get('kernel_shape', 1), node_attribute.get('strides', 1) + + weights = node_weights[node_inputs[1]] if node_inputs[1] in node_weights else tensor_grap[node_inputs[1]] + out_channel, in_channel = weights.shape[:2] + + channel_sequence = [2+i for i in range(len(weights.shape)-2)] + [1, 0] + weights = weights.transpose(*channel_sequence) + + bias = None + if len(node_inputs) == 3: + bias = node_weights[node_inputs[2]] if node_inputs[2] in node_weights else tensor_grap[node_inputs[2]] + + if group == 1: + self.conv = TFConv(in_channel, out_channel, kernel_shape, strides, dilations, pads, weights, bias) + elif group == out_channel: + self.conv = TFDepthwiseConv(kernel_shape, strides, dilations, pads, weights, bias) + else: + if USE_NATIVE_GROUP_CONV: + self.conv = TFConv(in_channel, out_channel, kernel_shape, strides, dilations, pads, weights, bias, group=group) + LOG.warning(f"Group Convolution is detected, using native method, only supported tflite version >= 2.9, \ + if compatibility error occurs and please make USE_NATIVE_GROUP_CONV=False!") + else: + self.conv = TFGroupConv(in_channel, out_channel, kernel_shape, strides, dilations, pads, weights, bias, group=group) + + def __call__(self, inputs): + return self.conv(inputs) + +class TFConv(): + # Standard convolution + def __init__(self, in_channel_num, out_channel_num, kernel_size=1, + strides=1, dilations=1, pads=None, weights=None, bias=None, group=1): + super().__init__() + + if len(weights.shape) == 3: + self.conv1d_init(in_channel_num, out_channel_num, kernel_size, strides, dilations, pads, weights, bias, group) + elif len(weights.shape) == 4: + self.conv2d_init(in_channel_num, out_channel_num, kernel_size, strides, dilations, pads, weights, bias, group) + elif len(weights.shape) == 5: + self.conv3d_init(in_channel_num, out_channel_num, kernel_size, strides, dilations, pads, weights, bias, group) + else: + raise NotImplementedError(f"Conv{len(weights.shape)-2}d is not implemented") + + def conv1d_init(self, in_channel_num, out_channel_num, kernel_size=1, + strides=1, dilations=1, pads=None, weights=None, bias=None, group=1): + self.pad =None + if pads is not None and max(pads) == 1 and max(strides) == 1: + self.conv = keras.layers.Conv1D( + out_channel_num, kernel_size, strides, "SAME", use_bias=False if bias is None else True, + weights=[weights] if bias is None else [weights, bias], + dilation_rate=dilations, groups=group) + else: + self.conv = keras.layers.Conv1D( + out_channel_num, kernel_size, strides, "VALID", use_bias=False if bias is None else True, + weights=[weights] if bias is None else [weights, bias], + dilation_rate=dilations, groups=group) + if pads is not None and max(pads) != 0: + self.pad = keras.layers.ZeroPadding1D(padding=pads) + + def conv2d_init(self, in_channel_num, out_channel_num, kernel_size=1, + strides=1, dilations=1, pads=None, weights=None, bias=None, group=1): + if isinstance(dilations, int): + dilations = (dilations, dilations) + if isinstance(strides, int): + strides = (strides, strides) + if dilations[0] != 1 and strides[0] != 1: + raise Exception("Currently, specifying any dilation_rate value != 1 is incompatible with specifying any stride value != 1.") + + self.pad =None + if pads is not None and max(pads) == 1 and max(strides) == 1: + self.conv = keras.layers.Conv2D( + out_channel_num, kernel_size, strides, "SAME", use_bias=False if bias is None else True, + weights=[weights] if bias is None else [weights, bias], + dilation_rate=dilations, groups=group) + else: + self.conv = keras.layers.Conv2D( + out_channel_num, kernel_size, strides, "VALID", use_bias=False if bias is None else True, + weights=[weights] if bias is None else [weights, bias], + dilation_rate=dilations, groups=group) + if pads is not None and max(pads) != 0: + padding = None + if len(pads) == 2 and (pads[0] > 0 or pads[1] > 0): + padding = (pads[0], pads[1]) + elif len(pads) == 4 and (pads[0] > 0 or pads[1] > 0 or pads[2] > 0 or pads[3] > 0): + padding = ((pads[0], pads[2]), (pads[1], pads[3])) + self.pad = keras.layers.ZeroPadding2D(padding=padding) + + def conv3d_init(self, in_channel_num, out_channel_num, kernel_size=1, + strides=1, dilations=1, pads=None, weights=None, bias=None, group=1): + raise NotImplementedError("Conv3d is not implemented") + + def __call__(self, inputs): + if self.pad: + inputs = self.pad(inputs) + return self.conv(inputs) + +class TFGroupConv(): + ''' + Group Convolution, using split method to implement, not native. + ''' + def __init__(self, in_channel_num, out_channel_num, kernel_size=1, + strides=1, dilations=1, pads=None, weights=None, bias=None, group=1): + super().__init__() + + if len(weights.shape) == 3: + self.groupconv1d_init(in_channel_num, out_channel_num, kernel_size, strides, dilations, pads, weights, bias, group) + elif len(weights.shape) == 4: + self.groupconv2d_init(in_channel_num, out_channel_num, kernel_size, strides, dilations, pads, weights, bias, group) + else: + raise NotImplementedError(f"GroupConv{len(weights.shape)-2}d is not implemented") + + def groupconv1d_init(self, in_channel_num, out_channel_num, kernel_size=1, + strides=1, dilations=1, pads=None, weights=None, bias=None, group=1): + self.cin = in_channel_num + self.groups = group + out_channel_num = int(out_channel_num//group) + self.convs = [] + for i in range(group): + if pads is not None and max(pads) == 1 and max(strides) == 1: + self.convs.append(keras.layers.Conv1D( + out_channel_num, kernel_size, strides, 'SAME', use_bias=False if bias is None else True, + dilation_rate=dilations, + weights=[weights[:, :, i*out_channel_num:(i+1)*out_channel_num]] if bias is None else [weights[:, :, i*out_channel_num:(i+1)*out_channel_num], bias[i*out_channel_num:(i+1)*out_channel_num]])) + else: + self.convs.append(keras.layers.Conv1D( + out_channel_num, kernel_size, strides, 'VALID', use_bias=False if bias is None else True, + dilation_rate=dilations, + weights=[weights[:, :, i*out_channel_num:(i+1)*out_channel_num]] if bias is None else [weights[:, :, i*out_channel_num:(i+1)*out_channel_num], bias[i*out_channel_num:(i+1)*out_channel_num]])) + self.pad =None + if pads is not None and (max(pads) != 0 and not (max(pads) == 1 and max(strides) == 1)): + self.pad = keras.layers.ZeroPadding1D(padding=pads) + + def groupconv2d_init(self, in_channel_num, out_channel_num, kernel_size=1, + strides=1, dilations=1, pads=None, weights=None, bias=None, group=1): + if isinstance(dilations, int): + dilations = (dilations, dilations) + if isinstance(strides, int): + strides = (strides, strides) + if dilations[0] != 1 and strides[0] != 1: + raise Exception("Currently, specifying any dilation_rate value != 1 is incompatible with specifying any stride value != 1.") + self.cin = in_channel_num + self.groups = group + out_channel_num = int(out_channel_num//group) + + self.convs = [] + for i in range(group): + if pads is not None and max(pads) == 1 and max(strides) == 1: + self.convs.append(keras.layers.Conv2D( + out_channel_num, kernel_size, strides, 'SAME', use_bias=False if bias is None else True, + dilation_rate=dilations, + weights=[weights[:, :, :, i*out_channel_num:(i+1)*out_channel_num]] if bias is None else [weights[:, :, :, i*out_channel_num:(i+1)*out_channel_num], bias[i*out_channel_num:(i+1)*out_channel_num]])) + else: + self.convs.append(keras.layers.Conv2D( + out_channel_num, kernel_size, strides, 'VALID', use_bias=False if bias is None else True, + dilation_rate=dilations, + weights=[weights[:, :, :, i*out_channel_num:(i+1)*out_channel_num]] if bias is None else [weights[:, :, :, i*out_channel_num:(i+1)*out_channel_num], bias[i*out_channel_num:(i+1)*out_channel_num]])) + self.pad =None + if pads is not None and (max(pads) != 0 and not (max(pads) == 1 and max(strides) == 1)): + padding = None + if len(pads) == 2 and (pads[0] > 0 or pads[1] > 0): + padding = (pads[0], pads[1]) + elif len(pads) == 4 and (pads[0] > 0 or pads[1] > 0 or pads[2] > 0 or pads[3] > 0): + padding = ((pads[0], pads[2]), (pads[1], pads[3])) + self.pad = keras.layers.ZeroPadding2D(padding=padding) + + def __call__(self, inputs): + if self.pad is not None: + inputs = self.pad(inputs) + outs = [] + in_s = tf.split(inputs, num_or_size_splits=self.groups, axis=-1) + for i in range(self.groups): + outs.append(self.convs[i](in_s[i])) + outs = tf.concat(outs, axis=-1) + return outs + +class TFDepthwiseConv(): + # Depthwise Convolution, group = 1 + def __init__(self, kernel_size=1, strides=1, dilations=1, pads=None, weights=None, bias=None) -> None: + super().__init__() + if len(weights.shape) == 3: + weights = weights.transpose(0, 2, 1) + self.dwconv1d_init(kernel_size, strides, dilations, pads, weights, bias) + elif len(weights.shape) == 4: + weights = weights.transpose(0, 1, 3, 2) + self.dwconv2d_init(kernel_size, strides, dilations, pads, weights, bias) + else: + raise NotImplementedError(f"DepthwiseConv{len(weights.shape)-2}d is not implemented") + + def dwconv1d_init(self, kernel_size=1, strides=1, dilations=1, pads=None, weights=None, bias=None): + self.pad =None + if pads is not None and max(pads) == 1 and max(strides) == 1: + self.conv = keras.layers.DepthwiseConv1D( + kernel_size, strides, "SAME", use_bias=False if bias is None else True, + weights=[weights] if bias is None else [weights, bias], + dilation_rate=dilations, + activation=None, + kernel_initializer='zeros', + bias_initializer='zeros' + ) + else: + self.conv = keras.layers.DepthwiseConv1D( + kernel_size, strides, "VALID", use_bias=False if bias is None else True, + weights=[weights] if bias is None else [weights, bias], + dilation_rate=dilations, + activation=None, + kernel_initializer='zeros', + bias_initializer='zeros' + ) + if pads is not None and max(pads) != 0: + self.pad = keras.layers.ZeroPadding1D(padding=pads) + + def dwconv2d_init(self, kernel_size=1, strides=1, dilations=1, pads=None, weights=None, bias=None): + if isinstance(dilations, int): + dilations = (dilations, dilations) + if isinstance(strides, int): + strides = (strides, strides) + + self.pad =None + if pads is not None and max(pads) == 1 and max(strides) == 1: + self.conv = keras.layers.DepthwiseConv2D( + kernel_size, strides, "SAME", use_bias=False if bias is None else True, + weights=[weights] if bias is None else [weights, bias], + dilation_rate=dilations, + activation=None, + kernel_initializer='zeros', + bias_initializer='zeros' + ) + else: + self.conv = keras.layers.DepthwiseConv2D( + kernel_size, strides, "VALID", use_bias=False if bias is None else True, + weights=[weights] if bias is None else [weights, bias], + dilation_rate=dilations, + activation=None, + kernel_initializer='zeros', + bias_initializer='zeros' + ) + if pads is not None and max(pads) != 0: + padding = None + if len(pads) == 2 and (pads[0] > 0 or pads[1] > 0): + padding = (pads[0], pads[1]) + elif len(pads) == 4 and (pads[0] > 0 or pads[1] > 0 or pads[2] > 0 or pads[3] > 0): + padding = ((pads[0], pads[2]), (pads[1], pads[3])) + self.pad = keras.layers.ZeroPadding2D(padding=padding) + + def __call__(self, inputs): + if self.pad: + inputs = self.pad(inputs) + return self.conv(inputs) \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/layers/deformation_layers.py b/zkstats/onnx2circom/onnx2keras/layers/deformation_layers.py new file mode 100644 index 0000000..1500137 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/layers/deformation_layers.py @@ -0,0 +1,306 @@ +import logging +import tensorflow as tf + +from ..utils.op_registry import OPERATOR +from . import dimension_utils +import keras +LOG = logging.getLogger("deformation_layers :") + +@OPERATOR.register_operator("Transpose") +class TFTranspose(keras.layers.Layer): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs)->None: + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + + self.perm_list = node_attribute['perm'] + + # self.trans_in, self.trans_out = None, None + # if kwargs.get("perm_list"): + # self.perm_list = kwargs.get("perm_list") + # elif len(node_attribute['perm']) > 4: + # self.perm_list = [] + # for axis in node_attribute['perm']: + # new_axis = dimension_utils.channel_to_last_dimension(axis) + # if new_axis == -1: + # new_axis = max(node_attribute['perm']) + # self.perm_list.append(new_axis) + # self.perm_list = dimension_utils.shape_NCD_to_NDC_format(self.perm_list) + # else: + # self.perm_list = [i for i in node_attribute['perm']] + # LOG.info("Transpose will process tensor after change back to NCHW format.") + # shape_len = len(tensor_grap[node_inputs[0]].shape) + # self.trans_in = [0, shape_len-1] + [n for n in range(1, shape_len-1)] + # self.trans_out = [0] + [n for n in range(2, len(self.perm_list))] + [1] + + def call(self, inputs): + # if self.trans_in and self.trans_out: + # inputs = keras.ops.transpose(inputs, self.trans_in) + # inputs = keras.ops.transpose(inputs, self.perm_list) + # inputs = keras.ops.transpose(inputs, self.trans_out) + # return inputs + # else: + return keras.ops.transpose(inputs, self.perm_list) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + # 'trans_in':self.trans_in, + 'perm_list':self.perm_list, + # 'trans_out':self.trans_out + }) + return config +@OPERATOR.register_operator("Slice") +class TFSlice(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + if len(node_inputs) == 1: + self.starts = node_attribute['starts'][0] + self.ends = node_attribute['ends'][0] + self.axis = dimension_utils.channel_to_last_dimension(node_attribute['axes'][0]) + self.steps = 1 + else: + self.starts = node_weights[node_inputs[1]][0] if node_inputs[1] in node_weights else tensor_grap[node_inputs[1]][0] + self.axis = node_weights[node_inputs[3]][0] if node_inputs[3] in node_weights else tensor_grap[node_inputs[3]][0] + self.axis = dimension_utils.channel_to_last_dimension(self.axis) + self.ends = node_weights[node_inputs[2]][0] if node_inputs[2] in node_weights else tensor_grap[node_inputs[2]][0] + self.ends = min(self.ends, tensor_grap[node_inputs[0]].shape[self.axis]) + if len(node_inputs) < 5: + self.steps = 1 + else: + self.steps = node_weights[node_inputs[4]][0] if node_inputs[4] in node_weights else tensor_grap[node_inputs[4]][0] + + def __call__(self, inputs): + indices = tf.keras.backend.arange(self.starts, self.ends, step=self.steps) + return tf.gather(inputs, indices, axis=self.axis) + + +@OPERATOR.register_operator("Gather") +class TFGather(keras.layers.Layer): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + + self.axis = node_attribute.get('axis', 0) + self.indices = tensor_grap[node_inputs[1]] if node_inputs[1] in tensor_grap else node_weights[node_inputs[1]] + + def call(self,inputs, *args): + return keras.ops.take(inputs, self.indices, axis=self.axis) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + 'indices':self.indices, + 'axis': self.axis + }) + return config + + +@OPERATOR.register_operator("Concat") +class TFConcat(keras.layers.Layer): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + + # self._axis = dimension_utils.channel_to_last_dimension(node_attribute['axis']) + self._axis = node_attribute['axis'] + # self._gather = [tensor_grap[x] if x in tensor_grap else dimension_utils.tensor_NCD_to_NDC_format(node_weights[x]) for x in node_inputs] + self._gather = [tensor_grap[x] if x in tensor_grap else node_weights[x] for x in node_inputs] + print('gatherrrrs: ', self._gather) + def call(self, *args, **kwargs): + print('Call gatherrrrs: ', self._gather) + return keras.ops.concatenate((args), axis = self._axis) + return tf.concat(self._gather, axis=self._axis) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + 'axis':self._axis, + 'gather':self._gather + }) + return config + +@OPERATOR.register_operator("Reshape") +class TFReshape(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.out_shape = node_weights[node_inputs[1]] + self.trans_in, self.trans_out = None, None + LOG.info("Reshape will process tensor after change back to NCHW format.") + shape_len = len(tensor_grap[node_inputs[0]].shape) + self.trans_in = [0, shape_len-1] + [n for n in range(1, shape_len-1)] + self.trans_out = [0] + [n for n in range(2, len(self.out_shape))] + [1] + + def __call__(self, inputs): + inputs = tf.transpose(inputs, perm=self.trans_in) + inputs = tf.reshape(inputs, shape=self.out_shape) + inputs = tf.transpose(inputs, perm=self.trans_out) + return inputs + +@OPERATOR.register_operator("Flatten") +class TFFlatten(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs)->None: + super().__init__() + num_elements = int(tensor_grap[node_inputs[0]].shape.num_elements()/tensor_grap[node_inputs[0]].shape[0]) + input_shape = tensor_grap[node_inputs[0]].shape + self.flat = tf.keras.layers.Flatten() + ''' + ensure memory order match, for example: + onnx = (B, 2, 3, 4).reshape(B, -1) + tflite = (B, 3, 4, 2).reshape(B, -1) + we can observe that: + onnx.shape == tflite.shape, but np.sum(onnx-tflite) != 0 + it's cause memory order of two vars is different, we must make tflite back to onnx by transpose. + generally, this situation is general one, below is just special situation and most appear in cnn. + onnx = (B, 512, 1, 1) + tflite = (B, 1, 1, 512) + or = (B, 1, 512, 1) + these memory order are all same. + ''' + self.perm = None + if num_elements != max(input_shape[1:]): + self.perm = [0, len(input_shape)-1] + for i in range(len(input_shape)-2): + self.perm.append(i+1) + + def __call__(self, inputs): + if self.perm: + inputs = tf.transpose(inputs, perm=self.perm) + return self.flat(inputs) + +@OPERATOR.register_operator("Split") +class TFSplit(keras.layers.Layer): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs)->None: + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + + index = kwargs.get('index', 0) + start = 0 + for i in range(index): + start += int(node_attribute['split'][i]) + end = start + node_attribute['split'][index] + self.indices = keras.ops.arange(start, end, 1) + self.axis = node_attribute.get("axis", 0) + + def call(self, inputs): + return keras.ops.take(inputs, indices=self.indices, axis=self.axis) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + "indices": self.indices, + "axis": self.axis + }) + return config + +@OPERATOR.register_operator("Expand") +class TFExpand(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs)->None: + super().__init__() + self.shape = dimension_utils.shape_NCD_to_NDC_format(node_weights[node_inputs[1]]) + + def __call__(self, inputs): + for i in range(len(self.shape)): + if int(self.shape[i]//inputs.shape[i]) > 1: + inputs = tf.repeat(inputs, repeats=int(self.shape[i]//inputs.shape[i]), axis=i) + elif self.shape[i] < inputs.shape[i] and self.shape[i] != 1: + inputs = tf.repeat(inputs, repeats=int(self.shape[i]), axis=i) + return inputs + +@OPERATOR.register_operator("Unsqueeze") +class TFUnsqueeze(keras.layers.Layer): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs)->None: + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + + self.axis = node_attribute['axes'][0] + # self.axis = dimension_utils.channel_to_last_dimension(node_attribute['axes'][0]) + + def call(self, inputs): + return keras.ops.expand_dims(inputs, self.axis) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + "axis": self.axis + }) + return config + +@OPERATOR.register_operator("Squeeze") +class TFSqueeze(keras.layers.Layer): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs)->None: + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + # self.axis = dimension_utils.channel_to_last_dimension(node_attribute['axes'][0]) + self.axis = node_attribute['axes'][0] + + def call(self, inputs): + return keras.ops.squeeze(inputs, self.axis) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + "axis": self.axis + }) + return config + +@OPERATOR.register_operator("DepthToSpace") +class TFDepthToSpace(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs)->None: + super().__init__() + self.block_size = node_attribute.get("blocksize", 2) + self.mode = node_attribute.get("mode", "DCR") + + def __call__(self, inputs): + if self.mode == "DCR": + return tf.nn.depth_to_space(inputs, self.block_size) + elif self.mode == "CRD": + # help want, native tensorflow is not support CRD mode, this way will generate 5 dims op. + b, h, w, c = inputs.shape + tmp = tf.reshape(inputs, [b, h, w, c//(self.block_size * self.block_size), self.block_size, self.block_size]) + tmp = tf.transpose(tmp, perm=[0, 1, 4, 2, 5, 3]) + tmp = tf.reshape(tmp, [b, h*self.block_size, w*self.block_size, c//(self.block_size * self.block_size)]) + return tmp + else: + raise KeyError(f"For DepthToSpace, mode must be [DCR, CRD], not {self.mode}") \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/layers/dimension_utils.py b/zkstats/onnx2circom/onnx2keras/layers/dimension_utils.py new file mode 100644 index 0000000..51e23e4 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/layers/dimension_utils.py @@ -0,0 +1,40 @@ +import tensorflow as tf +''' + shape and axis transform utils func. +''' +def channel_to_last_dimension(axis): + ''' + make channel first to channel last + ''' + if axis == 0: + axis = 0 + elif axis == 1: + axis = -1 + else: + axis -= 1 + return axis + +def shape_NCD_to_NDC_format(shape:list or tuple): + ''' + make shape format from channel first to channel last + ''' + if len(shape) <= 2: + return tuple(shape) + new_shape = [shape[0], *shape[2:], shape[1]] + return tuple(new_shape) + +def tensor_NCD_to_NDC_format(tensor): + ''' + make tensor format from channel first to channel last + ''' + if(len(tensor.shape) > 2): + shape = [i for i in range(len(tensor.shape))] + shape = shape_NCD_to_NDC_format(shape) + tensor = tf.transpose(tensor, perm=shape) + return tensor + +def intfloat_to_list(x:int or float, lens:int): + if isinstance(x, (int, float)): + return [x]*lens + else: + return x \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/layers/mathematics_layers.py b/zkstats/onnx2circom/onnx2keras/layers/mathematics_layers.py new file mode 100644 index 0000000..91d0331 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/layers/mathematics_layers.py @@ -0,0 +1,593 @@ +import logging +import numpy as np +import tensorflow as tf + +from ..utils.op_registry import OPERATOR +from . import dimension_utils +import keras + +LOG = logging.getLogger("calculations_layers :") + +def np2tf(x): + if isinstance(x, np.ndarray): + x = tf.convert_to_tensor(x, dtype=tf.float32) + return x, False + return x, True + +def match_tensor(x1:tf.Tensor or np.ndarray, x2:tf.Tensor or np.ndarray): + + x1, f1 = np2tf(x1) + x2, f2 = np2tf(x2) + + # no need to transpose if all var are tensor, we assume tensor are computed by gragh. + if f1 and f2: + return x1, x2 + + # ensure tensor is set to x1, weights set to x2 + if f2: + x1, x2 = x2, x1 + + # if x1.shape.ndims != x2.shape.ndims: + # while x2.shape.ndims < x1.shape.ndims: + # x2 = tf.expand_dims(x2, axis=0) + if len(x1.shape) != len(x2.shape): + while len(x2.shape) < len(x1.shape): + x2 = tf.expand_dims(x2, axis=0) + + # new_shape = dimension_utils.shape_NCD_to_NDC_format([i for i in range(len(x2.shape))]) + # x2 = tf.transpose(x2, new_shape) + return (x2, x1) if f2 else (x1, x2) + + +@OPERATOR.register_operator("Add") +class TFAdd(keras.layers.Layer): + def __init__(self,tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + self.first_operand = tensor_grap[node_inputs[0]] if node_inputs[0] in tensor_grap else node_weights[node_inputs[0]] + self.second_operand = tensor_grap[node_inputs[1]] if node_inputs[1] in tensor_grap else node_weights[node_inputs[1]] + self.first_operand, self.second_operand = match_tensor(self.first_operand, self.second_operand) + + def call(self, first_operand, second_operand,*args, **kwargs): + return keras.ops.add(first_operand, second_operand) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + "first_operand": self.first_operand, + "second_operand": self.second_operand, + }) + return config + +@OPERATOR.register_operator("Sub") +class TFSub(keras.layers.Layer): + def __init__(self,tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + self.first_operand = tensor_grap[node_inputs[0]] if node_inputs[0] in tensor_grap else node_weights[node_inputs[0]] + self.second_operand = tensor_grap[node_inputs[1]] if node_inputs[1] in tensor_grap else node_weights[node_inputs[1]] + self.first_operand, self.second_operand = match_tensor(self.first_operand, self.second_operand) + + def call(self, first_operand, second_operand,*args, **kwargs): + return keras.ops.subtract(first_operand, second_operand) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + "first_operand": self.first_operand, + "second_operand": self.second_operand, + }) + return config + +@OPERATOR.register_operator("Mul") +class TFMul(keras.layers.Layer): + def __init__(self,tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + self.first_operand = tensor_grap[node_inputs[0]] if node_inputs[0] in tensor_grap else node_weights[node_inputs[0]] + self.second_operand = tensor_grap[node_inputs[1]] if node_inputs[1] in tensor_grap else node_weights[node_inputs[1]] + self.first_operand, self.second_operand = match_tensor(self.first_operand, self.second_operand) + + def call(self, first_operand, second_operand,*args, **kwargs): + return keras.ops.multiply(first_operand, second_operand) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + "first_operand": self.first_operand, + "second_operand": self.second_operand, + }) + return config + +@OPERATOR.register_operator("Div") +class TFDiv(keras.layers.Layer): + def __init__(self,tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + self.first_operand = tensor_grap[node_inputs[0]] if node_inputs[0] in tensor_grap else node_weights[node_inputs[0]] + self.second_operand = tensor_grap[node_inputs[1]] if node_inputs[1] in tensor_grap else node_weights[node_inputs[1]] + self.first_operand, self.second_operand = match_tensor(self.first_operand, self.second_operand) + + + def call(self,first_operand, second_operand, *args, **kwargs): + return keras.ops.divide(first_operand, second_operand) + + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + "first_operand": self.first_operand, + "second_operand": self.second_operand, + }) + return config + +@OPERATOR.register_operator("Equal") +class TFEqual(keras.layers.Layer): + def __init__(self,tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + self.first_operand = tensor_grap[node_inputs[0]] if node_inputs[0] in tensor_grap else node_weights[node_inputs[0]] + self.second_operand = tensor_grap[node_inputs[1]] if node_inputs[1] in tensor_grap else node_weights[node_inputs[1]] + self.first_operand, self.second_operand = match_tensor(self.first_operand, self.second_operand) + + def call(self, first_operand, second_operand,*args, **kwargs): + return keras.ops.equal(first_operand, second_operand) + + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + "first_operand": self.first_operand, + "second_operand": self.second_operand, + }) + return config + +@OPERATOR.register_operator("Less") +class TFLess(keras.layers.Layer): + def __init__(self,tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + self.first_operand = tensor_grap[node_inputs[0]] if node_inputs[0] in tensor_grap else node_weights[node_inputs[0]] + self.second_operand = tensor_grap[node_inputs[1]] if node_inputs[1] in tensor_grap else node_weights[node_inputs[1]] + self.first_operand, self.second_operand = match_tensor(self.first_operand, self.second_operand) + + + def call(self,first_operand, second_operand, *args, **kwargs): + return keras.ops.less(first_operand, second_operand) + + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + "first_operand": self.first_operand, + "second_operand": self.second_operand, + }) + return config + +@OPERATOR.register_operator("Greater") +class TFGreater(keras.layers.Layer): + def __init__(self,tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + self.first_operand = tensor_grap[node_inputs[0]] if node_inputs[0] in tensor_grap else node_weights[node_inputs[0]] + self.second_operand = tensor_grap[node_inputs[1]] if node_inputs[1] in tensor_grap else node_weights[node_inputs[1]] + self.first_operand, self.second_operand = match_tensor(self.first_operand, self.second_operand) + + + def call(self,first_operand, second_operand, *args, **kwargs): + return keras.ops.greater(first_operand, second_operand) + + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + "first_operand": self.first_operand, + "second_operand": self.second_operand, + + }) + return config + + +@OPERATOR.register_operator("Where") +class TFWhere(keras.layers.Layer): + def __init__(self,tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + self.true_value = tensor_grap[node_inputs[1]] if node_inputs[1] in tensor_grap else node_weights[node_inputs[1]] + self.false_value = tensor_grap[node_inputs[2]] if node_inputs[2] in tensor_grap else node_weights[node_inputs[2]] + self.true_value, self.false_value = match_tensor(self.true_value, self.false_value) + + + def call(self, condition, true_value, false_value, *args,**kwargs): + return keras.ops.where(condition, true_value, false_value) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + "true_value": self.true_value, + "false_value": self.false_value + }) + return config + +@OPERATOR.register_operator("Not") +class TFNot(keras.layers.Layer): + def __init__(self, *args, **kwargs): + super().__init__() + + + def call(self,input, *args, **kwargs): + return keras.ops.logical_not(input) + +@OPERATOR.register_operator("And") +class TFAnd(keras.layers.Layer): + def __init__(self, *args, **kwargs): + super().__init__() + + def call(self, *args, **kwargs): + return keras.ops.logical_and(args[0], args[1]) + +@OPERATOR.register_operator("Or") +class TFOr(keras.layers.Layer): + def __init__(self, *args, **kwargs): + super().__init__() + + def call(self, *args, **kwargs): + return keras.ops.logical_or(args[0], args[1]) + + +@OPERATOR.register_operator("Abs") +class TFAbs(keras.layers.Layer): + def __init__(self, *args, **kwargs): + super().__init__() + + def call(self,input, *args, **kwargs): + return keras.ops.absolute(input) + +@OPERATOR.register_operator("Reciprocal") +class TFReciprocal(keras.layers.Layer): + def __init__(self, *args, **kwargs): + super().__init__() + + def __call__(self, inputs, *args, **kwargs): + return keras.ops.reciprocal(inputs) + +@OPERATOR.register_operator("Sqrt") +class TFSqrt(keras.layers.Layer): + def __init__(self, *args, **kwargs): + super().__init__() + + def __call__(self, inputs, *args, **kwargs): + return keras.ops.sqrt(inputs) + +@OPERATOR.register_operator("Exp") +class TFExp(keras.layers.Layer): + def __init__(self, *args, **kwargs): + super().__init__() + + def cal(self, inputs, *args, **kwargs): + return keras.ops.exp(inputs) + + +@OPERATOR.register_operator("Log") +class TFLog(keras.layers.Layer): + def __init__(self, *args, **kwargs): + super().__init__() + + def call(self, inputs, *args, **kwargs): + return keras.ops.log(inputs) + +@OPERATOR.register_operator("Floor") +class TFFloor(keras.layers.Layer): + def __init__(self, *args, **kwargs): + super().__init__() + + def call(self, inputs, *args, **kwargs): + return keras.ops.floor(inputs) + +@OPERATOR.register_operator("Ceil") +class TFCeil(keras.layers.Layer): + def __init__(self, *args, **kwargs): + super().__init__() + + def call(self, inputs, *args, **kwargs): + return keras.ops.ceil(inputs) + + +@OPERATOR.register_operator("ReduceMax") +class TFReduceMax(keras.layers.Layer): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + + self.keep_dims = node_attribute.get("keepdims", 1) == 1 + self.axes = node_attribute.get("axes", None) + self.initial = node_attribute.get("initial", None) + + def call(self, inputs, *args, **kwargs): + return keras.ops.max(inputs, axis=self.axes, keepdims=self.keep_dims, initial= self.initial) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute + }) + return config + +@OPERATOR.register_operator("ReduceMin") +class TFReduceMin(keras.layers.Layer): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + + self.keep_dims = node_attribute.get("keepdims", 1) == 1 + self.axes = node_attribute.get("axes", None) + self.initial = node_attribute.get("initial", None) + + def call(self, inputs, *args, **kwargs): + return keras.ops.min(inputs, axis=self.axes, keepdims=self.keep_dims, initial = self.initial) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute + }) + return config + + +@OPERATOR.register_operator("ReduceSum") +class TFReduceSum(keras.layers.Layer): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + + self.keep_dims = node_attribute.get("keepdims", 1) == 1 + self.axes = node_attribute.get("axes", None) + + def call(self, inputs, *args, **kwargs): + return keras.ops.sum(inputs, axis = self.axes, keepdims=self.keep_dims) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute + }) + return config + + +@OPERATOR.register_operator("ReduceMean") +class TFReduceMean(keras.layers.Layer): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + + self.keep_dims = node_attribute.get("keepdims", 1) == 1 + self.axes = node_attribute.get("axes", None) + + def call(self, inputs, *args, **kwargs): + return keras.ops.mean(inputs, axis = self.axes, keepdims=self.keep_dims) + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute + }) + return config + + +@OPERATOR.register_operator("Shape") +class TFShape(keras.layers.Layer): + def __init__(self, *args, **kwargs): + super().__init__() + + def call(self, inputs, *args, **kwargs): + return keras.ops.array([*keras.ops.shape(inputs)]) + + +@OPERATOR.register_operator("ConstantOfShape") +class TFConstantOfShape(keras.layers.Layer): + def __init__(self,tensor_grap,node_weights, node_inputs,node_attribute,*args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + self.node_attribute = node_attribute + self.value = self.node_attribute['value'] + + def call(self, inputs,*args, **kwargs): + # print("should be one:::: ", self.node_attribute['value'][0]) + # print('type : ', type(self.node_attribute['value'][0])) + # print('hey: ', self.value) + # print('typppp: ', type(self.value)) + if 'config' in self.value: + # print("configggg") + fill_in = self.value['config']['value'][0] + else: + # print("numpy float") + fill_in = self.value[0] + + # hey: {'class_name': '__numpy__', 'config': {'value': [1.0], 'dtype': 'float32'}} + print('inpuuutt size: ', inputs) + + print('shapeyy const size: ', keras.ops.full(inputs.shape, fill_in).shape) + return keras.ops.full(inputs, fill_in) + + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + 'node_attribute':self.node_attribute, + 'value': self.value + }) + return config + + + +@OPERATOR.register_operator("MatMul") +class TFMatMul(keras.layers.Layer): + def __init__(self, tensor_grap, node_weights, node_inputs, *args, **kwargs): + super().__init__() + self.tensor_grap = tensor_grap + self.node_weights = node_weights + self.node_inputs = node_inputs + + self.first_operand = tensor_grap[node_inputs[0]] if node_inputs[0] in tensor_grap else node_weights[node_inputs[0]] + self.second_operand = tensor_grap[node_inputs[1]] if node_inputs[1] in tensor_grap else node_weights[node_inputs[1]] + + def call(self,first_operand, second_operand, *args, **kwargs): + return keras.ops.matmul(first_operand, second_operand) + + + def get_config(self): + config = super().get_config() + config.update({ + "tensor_grap":self.tensor_grap, + 'node_weights':self.node_weights, + 'node_inputs':self.node_inputs, + "first_operand": self.first_operand, + "second_operand": self.second_operand + }) + return config + + +# TO SUPPORT LATER + +# @OPERATOR.register_operator("Pow") +# class TFPow(keras.layers.Layer): +# def __init__(self, tensor_grap, node_weights, node_inputs, *args, **kwargs): +# super().__init__() +# self.tensor_grap = tensor_grap +# self.node_weights = node_weights +# self.node_inputs = node_inputs +# self.power_index = node_weights[node_inputs[1]] + +# def call(self, inputs, *args, **kwargs): +# return keras.ops.power(inputs, self.power_index) + +# def get_config(self): +# config = super().get_config() +# config.update({ +# "tensor_grap":self.tensor_grap, +# 'node_weights':self.node_weights, +# 'node_inputs':self.node_inputs +# }) +# return config + + + +# @OPERATOR.register_operator("ArgMax") +# class TFArgMax(): +# def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): +# super().__init__() +# self.axis = dimension_utils.channel_to_last_dimension(node_attribute.get('axis', 0)) +# self.keepdims = node_attribute.get("keepdims", 1) == 1 + +# def __call__(self, inputs, *args, **kwargs): +# _inputs = tf.argmax(inputs, axis=self.axis) +# if self.keepdims: +# _inputs = tf.expand_dims(_inputs, axis=self.axis) +# return _inputs + +# @OPERATOR.register_operator("ArgMin") +# class TFArgMin(): +# def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs): +# super().__init__() +# self.axis = dimension_utils.channel_to_last_dimension(node_attribute.get('axis', 0)) +# self.keepdims = node_attribute.get("keepdims", 1) == 1 + +# def __call__(self, inputs, *args, **kwargs): +# _inputs = tf.argmax(inputs, axis=self.axis) +# if self.keepdims: +# _inputs = tf.expand_dims(_inputs, axis=self.axis) +# return _inputs + +# @OPERATOR.register_operator("Erf") +# class TFErf(): +# def __init__(self, *args, **kwargs) -> None: +# pass + +# def __call__(self, inputs): +# inputs = tf.math.erf(inputs) +# return inputs \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/readme.md b/zkstats/onnx2circom/onnx2keras/readme.md new file mode 100644 index 0000000..71fa528 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/readme.md @@ -0,0 +1,164 @@ +# ONNX->Keras and ONNX->TFLite tools +## Welcome +If you have some good ideas, welcome to discuss or give project PRs. + +## How to use +```cmd +pip install -r requirements.txt +``` +```python +# base +python converter.py --weights "./your_model.onnx" + +# give save path +python converter.py --weights "./your_model.onnx" --outpath "./save_path" + +# save tflite model +python converter.py --weights "./your_model.onnx" --outpath "./save_path" --formats "tflite" + +# save keras and tflite model +python converter.py --weights "./your_model.onnx" --outpath "./save_path" --formats "tflite" "keras" + +# cutoff model, redefine inputs and outputs, support middle layers +python converter.py --weights "./your_model.onnx" --outpath "./save_path" --formats "tflite" --input-node-names "layer_inputname" --output-node-names "layer_outname1" "layer_outname2" + +# quantify model weight, only weight +python converter.py --weights "./your_model.onnx" --formats "tflite" --weigthquant + +# quantify model weight, include input and output +## recommend +python converter.py --weights "./your_model.onnx" --formats "tflite" --int8 --imgroot "./dataset_path" --int8mean 0 0 0 --int8std 255 255 255 +## generate random data, instead of read from image file +python converter.py --weights "./your_model.onnx" --formats "tflite" --int8 +``` +--- +## Features +- High Consistency. Compare to ONNX outputs, average error less than 1e-5 per elements. +- More Faster. Output tensorflow-lite model 30% faster than [onnx_tf](https://github.com/onnx/onnx-tensorflow). +- Auto Channel Align. Auto convert pytorch format(NCWH) to tensorflow format(NWHC). +- Deployment Support. Support output quantitative model, include fp16 quantization and uint8 quantization. +- Code Friendly. I've been trying to keep the code structure simple and clear. +--- + +## Pytorch -> ONNX -> Tensorflow-Keras -> Tensorflow-Lite + +- ### From torchvision to tensorflow-lite +```python +import torch +import torchvision +_input = torch.randn(1, 3, 224, 224) +model = torchvision.models.mobilenet_v2(True) +# use default settings is ok +torch.onnx.export(model, _input, './mobilenetV2.onnx', opset_version=11)# or opset_version=13 + +from converter import onnx_converter +onnx_converter( + onnx_model_path = "./mobilenetV2.onnx", + need_simplify = True, + output_path = "./", + target_formats = ['tflite'], # or ['keras'], ['keras', 'tflite'] + weight_quant = False, + int8_model = False, + int8_mean = None, + int8_std = None, + image_root = None +) +``` +- ### From custom pytorch model to tensorflow-lite-int8 +```python +import torch +import torch.nn as nn +import torch.nn.functional as F + +class MyModel(nn.Module): + def __init__(self): + self.conv = nn.Sequential( + nn.Conv2d(3, 64, kernel_size=3, padding=1), + nn.BatchNorm2d(64), + nn.ReLU(inplace=True), + ) + + def forward(self, x): + return self.conv(x) + +model = MyModel() +model.load_state_dict(torch.load("model_checkpoint.pth", map_location="cpu")) + +_input = torch.randn(1, 3, 224, 224) +torch.onnx.export(model, _input, './mymodel.onnx', opset_version=11)# or opset_version=13 + +from converter import onnx_converter +onnx_converter( + onnx_model_path = "./mymodel.onnx", + need_simplify = True, + output_path = "./", + target_formats = ['tflite'], #or ['keras'], ['keras', 'tflite'] + weight_quant = False, + int8_model = True, # do quantification + int8_mean = [123.675, 116.28, 103.53], # give mean of image preprocessing + int8_std = [58.395, 57.12, 57.375], # give std of image preprocessing + image_root = "./dataset/train" # give image folder of train +) +``` +--- +## Validated models +- [SSD](https://github.com/qfgaohao/pytorch-ssd) +- [HRNet](HRNet-Facial-Landmark-Detection) +- [YOLOX](https://github.com/Megvii-BaseDetection/YOLOX) +- [YOLOV3](https://github.com/ultralytics/yolov3) +- [YOLOV4](https://github.com/Tianxiaomo/pytorch-YOLOv4) +- [YOLOV5](https://github.com/ultralytics/yolov5) +- [YOLOV6](https://github.com/meituan/YOLOv6) +- [YOLOV7](https://github.com/WongKinYiu/yolov7) +- [MoveNet](https://github.com/fire717/movenet.pytorch) +- [UNet\FPN](https://github.com/bigmb/Unet-Segmentation-Pytorch-Nest-of-Unets) +- MLP(custom) +- DCGAN(custom) +- [AutoEncoder/VAE](https://github.com/AntixK/PyTorch-VAE) +- all torchvision classification models +- some segmation models in torchvision +- 1D or 2D CNN without special operators(custom) +--- +## Add operator by yourself +When you counter unspported operator, you can choose to add it by yourself or make an issue.
+It's very simple to implement a new operator parser by following these steps below.
+Step 0: Select a corresponding layer code file in [layers folder](./layers/), such as activations_layers.py for 'HardSigmoid'.
+Step 1: Open it, and edit it: +```python +# all operators regist through OPERATOR register. +# regist operator's name is onnx operator name. +@OPERATOR.register_operator("HardSigmoid") +class TFHardSigmoid(): + def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None: + ''' + :param tensor_grap: dict, key is node name, value is tensorflow-keras node output tensor. + :param node_weights: dict, key is node name, value is static data, such as weight/bias/constant, weight should be transfom by dimension_utils.tensor_NCD_to_NDC_format at most time. + :param node_inputs: List[str], stored node input names, indicates which nodes the input comes from, tensor_grap and node_weights are possible. + :param node_attribute: dict, key is attribute name, such as 'axis' or 'perm'. value type is indeterminate, such as List[int] or int or float. notice that type of 'axis' value should be adjusted form NCHW to NHWC by dimension_utils.channel_to_last_dimension or dimension_utils.shape_NCD_to_NDC_format. + ''' + super().__init__() + self.alpha = node_attribute.get("alpha", 0.2) + self.beta = node_attribute.get("beta", 0.5) + + def __call__(self, inputs): + return tf.clip_by_value(self.alpha*inputs+self.beta, 0, 1) +``` +Step 2: Make it work without error.
+Step 3: Convert model to tflite without any quantification.
+## TODO +- [ ] support Transofomer, VIT\Swin Trasnformer etc... +- [x] support cutoff onnx model and specify output layer +- [x] optimize comfirm_acc.py(removed, The output checker will run automatically.) + +--- +## Limitation +- The number of operators can not cover all models. +- Friendly to 1D/2D vision CNN, and not support 3D CNN. +- Bad support for some math or channel change operators(such as Squeeze\MatMul). +--- + +## Emmmmmmm +It's too disgusting for first(batch) or second(channel) axis change. There are always circumstances that have not been taken into account. + +# License +This software is covered by Apache-2.0 license. diff --git a/zkstats/onnx2circom/onnx2keras/requirements.txt b/zkstats/onnx2circom/onnx2keras/requirements.txt new file mode 100644 index 0000000..bc87c8c --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/requirements.txt @@ -0,0 +1,6 @@ +onnx +onnxruntime +onnx-simplifier +numpy +tensorflow>=2.5 +opencv-python \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/torchvison_test.py b/zkstats/onnx2circom/onnx2keras/torchvison_test.py new file mode 100644 index 0000000..107c073 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/torchvison_test.py @@ -0,0 +1,39 @@ +''' + unit test for torchvision models +''' +import os +import pytest + +import torch +import torchvision +from converter import onnx_converter + +MODEL_ROOT = "./unit_test" +os.makedirs(MODEL_ROOT, exist_ok=True) + +@pytest.mark.filterwarnings('ignore::UserWarning') +@pytest.mark.filterwarnings('ignore::DeprecationWarning') +def test_resnet(): + model = torchvision.models.resnet18(False) + onnx_model_path = os.path.join(MODEL_ROOT, "resnet18.onnx") + torch.onnx.export(model, torch.randn(1, 3, 224, 224), onnx_model_path, opset_version=13) + error = onnx_converter(onnx_model_path, need_simplify = True, output_path = MODEL_ROOT, target_formats = ['tflite'])['tflite_error'] + assert error < 1e-3 + +@pytest.mark.filterwarnings('ignore::UserWarning') +@pytest.mark.filterwarnings('ignore::DeprecationWarning') +def test_mobilenet(): + model = torchvision.models.mobilenet_v2(False) + onnx_model_path = os.path.join(MODEL_ROOT, "mobilenet_v2.onnx") + torch.onnx.export(model, torch.randn(1, 3, 224, 224), onnx_model_path, opset_version=13) + error = onnx_converter(onnx_model_path, need_simplify = True, output_path = MODEL_ROOT, target_formats = ['tflite'])['tflite_error'] + assert error < 1e-3 + +@pytest.mark.filterwarnings('ignore::UserWarning') +@pytest.mark.filterwarnings('ignore::DeprecationWarning') +def test_deeplabv3(): + model = torchvision.models.segmentation.deeplabv3_resnet50(False) + onnx_model_path = os.path.join(MODEL_ROOT, "deeplabv3_resnet50.onnx") + torch.onnx.export(model, torch.randn(1, 3, 512, 1024), onnx_model_path, opset_version=13) + error = onnx_converter(onnx_model_path, need_simplify = True, output_path = MODEL_ROOT, target_formats = ['tflite'])['tflite_error'] + assert error < 1e-3 \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/utils/__init__.py b/zkstats/onnx2circom/onnx2keras/utils/__init__.py new file mode 100644 index 0000000..b3068eb --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/utils/__init__.py @@ -0,0 +1,5 @@ +from .output_check import get_elements_error +from .onnx_loader import load_onnx_modelproto +from .builder import keras_builder, tflite_builder + +__all__ = ['load_onnx_modelproto', 'keras_builder', 'tflite_builder', 'get_elements_error'] \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/utils/builder.py b/zkstats/onnx2circom/onnx2keras/utils/builder.py new file mode 100644 index 0000000..e3361ab --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/utils/builder.py @@ -0,0 +1,168 @@ +import os +os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' + +import tensorflow as tf +from tensorflow import keras +from onnx import numpy_helper +from .op_registry import OPERATOR +from .dataloader import RandomLoader, ImageLoader + +from ..layers import conv_layers + +# copy from https://github.com/gmalivenko/onnx2keras +def decode_node_attribute(node)->dict: + """ + Parse ONNX attributes to Python dictionary + :param args: ONNX attributes object + :return: Python dictionary + """ + def onnx_attribute_to_dict(onnx_attr): + """ + Parse ONNX attribute + :param onnx_attr: ONNX attribute + :return: Python data type + """ + if onnx_attr.HasField('t'): + return numpy_helper.to_array(getattr(onnx_attr, 't')) + + for attr_type in ['f', 'i']: + if onnx_attr.HasField(attr_type): + return getattr(onnx_attr, attr_type) + + # s need to be decode, bytes to string + if onnx_attr.HasField('s'): + return getattr(onnx_attr, 's').decode() + + for attr_type in ['floats', 'ints', 'strings']: + if getattr(onnx_attr, attr_type): + return list(getattr(onnx_attr, attr_type)) + return {arg.name: onnx_attribute_to_dict(arg) for arg in node.attribute} + +def keras_builder(onnx_model, native_groupconv:bool=False): + + conv_layers.USE_NATIVE_GROUP_CONV = native_groupconv + + model_graph = onnx_model.graph + + ''' + init onnx model's build-in tensors + ''' + onnx_weights = dict() + for initializer in model_graph.initializer: + onnx_weights[initializer.name] = numpy_helper.to_array(initializer) + print('onnx weights: ', onnx_weights) + # NOT needed, since we get rid of simplify from other libraries + # print('INITIALIZER: ', model_graph.initializer) + # for init in model_graph.initializer: + # tf_tensor[init.name] = keras.backend.constant(init.raw_data, name = init.name, dtype = init.data_type) + ''' + build input nodes + ''' + tf_tensor, input_shape = {}, [] + # print('\n\n inputt: ', model_graph.input) + # print('\n\n\n') + inputs_name = [] + for inp in model_graph.input: + input_shape = [x.dim_value for x in inp.type.tensor_type.shape.dim] + if input_shape == []: + continue + batch_size = 1 if input_shape[0] <= 0 else input_shape[0] + # why original code flip this dimension + # input_shape = input_shape[2:] + input_shape[1:2] + input_shape = input_shape[1:] + inputs_name.append(inp.name) + # print("INPUT NAMEME: ", inp.name) + tf_tensor[inp.name] = keras.Input(shape=input_shape, batch_size=batch_size) + # print('builddd shape: ', tf_tensor[inp.name].shape) + + ''' + build model inline node by iterate onnx nodes. + ''' + + # print('+++++++===== input: ', model_graph) + # print('NODE model graph: ', model_graph.node) + # print('======NODE model graph: ', model_graph.node[0].input) + # node = what happens to inputs + for node in model_graph.node: + op_name, node_inputs, node_outputs = node.op_type, node.input, node.output + print("\n\nop name: ", op_name) + new_node_inputs = [] + for ele in node_inputs: + new_node_inputs.append(ele) + node_inputs = new_node_inputs + op_attr = decode_node_attribute(node) + print('op_attr::: ', op_attr) + tf_operator = OPERATOR.get(op_name) + if tf_operator is None: + raise KeyError(f"{op_name} not implemented yet") + # _inputs = None + # if len(node_inputs) > 0: + # _inputs = tf_tensor[node_inputs[0]] if node_inputs[0] in tf_tensor else onnx_weights[node_inputs[0]] + # print('First inputt: ', _inputs) + # if len(node_inputs)>1: + # print('Another input: ',tf_tensor[node_inputs[1]] ) + print('node outpussss: ', node_outputs) + for index in range(len(node_outputs)): + # all inputs to this op + print('node_inputs: ', node_inputs) + # print('deserialize: ', *node_inputs) + print('tf operator: ', tf_operator) + # output = tf_operator(tf_tensor, onnx_weights, node_inputs, op_attr, index=index)(_inputs) + _inputs = [] + for inner in node_inputs: + if inner in tf_tensor: + _inputs.append(tf_tensor[inner]) + elif inner in onnx_weights: + _inputs.append(onnx_weights[inner]) + else: + raise KeyError('NO info about this input') + + # print("BEFORE: INPUTT: ", _inputs) + output = tf_operator(tf_tensor, onnx_weights, node_inputs, op_attr, index=index)(*_inputs) + print("outputt: ", output) + tf_tensor[node_outputs[index]] = output + # print("tffff updated: ", tf_tensor) + + ''' + build keras model + ''' + input_nodes = [tf_tensor[x.name] for x in model_graph.input] + # print("FINAL inputs: ", input_nodes) + outputs_nodes = [tf_tensor[x.name] for x in model_graph.output] + # print("FINAL outputs: ", outputs_nodes) + keras_model = keras.Model(inputs=input_nodes, outputs=outputs_nodes) + keras_model.trainable = False + keras_model.summary() + print("All Layers: ", keras_model.layers) + print('\n\n\n') + print("Config ALL Layers: ") + for layer in keras_model.layers: + print('Name: ',layer.name) + print(layer.get_config()) + # for layer in keras_model.layers: + # layer.trainable = True + # print('Later All Layers: ',keras_model.layers ) + + return keras_model + +def tflite_builder(keras_model, weight_quant:bool=False, int8_model:bool=False, image_root:str=None, + int8_mean:list or float = [123.675, 116.28, 103.53], int8_std:list or float = [58.395, 57.12, 57.375]): + converter = tf.lite.TFLiteConverter.from_keras_model(keras_model) + converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS, tf.lite.OpsSet.SELECT_TF_OPS] + if weight_quant or int8_model: + converter.experimental_new_converter = True + converter.optimizations = [tf.lite.Optimize.DEFAULT] + + if int8_model: + assert len(keras_model.inputs) == 1, f"help want, only support single input model." + shape = list(keras_model.inputs[0].shape) + dataset = RandomLoader(shape) if image_root is None else ImageLoader(image_root, shape, int8_mean, int8_std) + converter.representative_dataset = lambda: dataset + converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8, tf.lite.OpsSet.SELECT_TF_OPS] + converter.target_spec.supported_types = [] + converter.inference_input_type = tf.uint8 + converter.inference_output_type = tf.uint8 + converter.experimental_new_converter = True + + tflite_model = converter.convert() + return tflite_model \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/utils/dataloader.py b/zkstats/onnx2circom/onnx2keras/utils/dataloader.py new file mode 100644 index 0000000..5282a16 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/utils/dataloader.py @@ -0,0 +1,72 @@ +import os +import cv2 +import logging +import numpy as np + +LOG = logging.getLogger("Quantization DataLoder :") + +class RandomLoader(object): + def __init__(self, target_size): + self.target_size = target_size + LOG.warning(f"Generate quantization data from random, it's will lead to accuracy problem!") + + def __iter__(self): + self.index = 0 + return self + + def __next__(self): + if self.index > 5: + raise StopIteration() + self.index += 1 + return [np.random.randn(*self.target_size).astype(np.float32)] + +class ImageLoader(object): + ''' + generate data for quantization from image datas. + img_quan_data = (img - mean)/std, it's important for accuracy of model. + ''' + VALID_FORMAT = ['.jpg', '.png', '.jpeg'] + + def __init__(self, img_root, target_size, mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375]) -> None: + assert os.path.exists(img_root), F"{img_root} is not exists, please check!" + self.fns = os.listdir(img_root) + self.fns = list(filter(lambda fn: os.path.splitext(fn)[-1].lower() in self.VALID_FORMAT, self.fns)) + self.nums = len(self.fns) + assert self.nums > 0, f"No images detected in {img_root}." + if self.nums > 100: + LOG.warning(f"{self.nums} images detected, the number of recommended images is less than 100.") + else: + LOG.info(f"{self.nums} images detected.") + self.fns = [os.path.join(img_root, fn) for fn in self.fns] + + self.batch, self.size = target_size[0], target_size[1:-1] + if isinstance(mean, list): + mean = np.array(mean, dtype=np.float32) + if isinstance(std, list): + std = np.array(std, dtype=np.float32) + self.mean, self.std = mean, std + + def __iter__(self): + self.index = 0 + return self + + def __next__(self): + if self.index >= self.nums: + raise StopIteration() + + _input = cv2.imread(self.fns[self.index]) + _input = cv2.resize(_input, self.size)[:, :, ::-1]#BGR->RGB + _input = _input.astype(np.float32) + + if self.mean is not None: + _input = (_input - self.mean) + if self.std is not None: + _input = _input/self.std + + _input = np.expand_dims(_input, axis=0) + if self.batch > 1: + _input = np.repeat(_input, self.batch, axis=0).astype(np.float32) + + self.index += 1 + return [_input] + \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/utils/onnx_loader.py b/zkstats/onnx2circom/onnx2keras/utils/onnx_loader.py new file mode 100644 index 0000000..15558f3 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/utils/onnx_loader.py @@ -0,0 +1,77 @@ +import os +import onnx +import logging +from onnxsim import simplify + +LOG = logging.getLogger("onnx_loader running:") +LOG.setLevel(logging.INFO) + +def clean_model_input(model_proto): + inputs = model_proto.graph.input + name_to_input = {} + for input in inputs: + name_to_input[input.name] = input + + names = [] + for initializer in model_proto.graph.initializer: + if initializer.name in name_to_input: + inputs.remove(name_to_input[initializer.name]) + names.append(initializer.name) + + if len(names) > 0: + LOG.warning(f"[{len(names)}] redundant input nodes are removed.\n \ + nodes name : {','.join(names)}") + +def get_onnx_submodel(onnx_model_path:str, input_node_names:list=None, output_node_names:list=None): + ''' + cutoff onnx model + ''' + model_proto = onnx.load(onnx_model_path) + if input_node_names is None: + input_node_names = [] + for inp in model_proto.graph.input: + input_node_names.append(inp.name) + + if output_node_names is None: + output_node_names = [] + for oup in model_proto.graph.output: + output_node_names.append(oup.name) + del model_proto + + new_model_path = os.path.splitext(onnx_model_path)[0] + "_sub.onnx" + onnx.utils.extract_model(onnx_model_path, new_model_path, input_node_names, output_node_names) + model_proto = onnx.load(new_model_path) + return model_proto + +def get_proto(onnx_model_path:str, input_node_names:list=None, output_node_names:list=None): + if input_node_names is None and output_node_names is None: + return onnx.load(onnx_model_path) + else: + return get_onnx_submodel(onnx_model_path, input_node_names, output_node_names) + +def load_onnx_modelproto(onnx_model_path:str, input_node_names:list=None, output_node_names:list=None, need_simplify:bool=True): + if not os.path.exists(onnx_model_path): + LOG.error(f"{onnx_model_path} is not exists.") + raise FileExistsError(f"{onnx_model_path} is not exists.") + model_proto = get_proto(onnx_model_path, input_node_names, output_node_names) + dynamic_input = False + # print("FIRST onnx loader proto:: ", model_proto.graph) + # for inp in model_proto.graph.input: + # for x in inp.type.tensor_type.shape.dim: + # if x.dim_value <= 0: + # dynamic_input = True + # break + +# ==== simplify lib --> make Constant Layer becomes Initializer: weird! + + if need_simplify: + success = False + try: + model_proto, success = simplify(model_proto, check_n=1) + except: + success = False + if not success: + LOG.warning(f"onnxsim is failed, maybe make convert fails.") + model_proto = onnx.load(onnx_model_path) + clean_model_input(model_proto) + return model_proto \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/utils/op_registry.py b/zkstats/onnx2circom/onnx2keras/utils/op_registry.py new file mode 100644 index 0000000..c96a5f3 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/utils/op_registry.py @@ -0,0 +1,40 @@ +class Registry(object): + def __init__(self, name) -> None: + self._name = name + self._operator_dict = dict() + + def __len__(self): + return len(self._operator_dict) + + @property + def name(self): + return self._name + + @property + def operator_dict(self): + return self._operator_dict + + def get(self, key): + return self._operator_dict.get(key, None) + + def _register_operator(self, op_class, op_name=None): + if (not isinstance(op_name, str)) or op_name is None: + op_name = op_class.__name__ + + if self._operator_dict.get(op_name, None): + raise KeyError(f'{op_name} is already registered in {self._name}') + + self._operator_dict[op_name] = op_class + + def register_operator(self, name=None, op_class=None): + if op_class is not None: + self._register_operator(op_class, name) + return op_class + + def _register(cls): + self._register_operator(cls, name) + return cls + + return _register + +OPERATOR = Registry("TensorflowOP") \ No newline at end of file diff --git a/zkstats/onnx2circom/onnx2keras/utils/output_check.py b/zkstats/onnx2circom/onnx2keras/utils/output_check.py new file mode 100644 index 0000000..65afd53 --- /dev/null +++ b/zkstats/onnx2circom/onnx2keras/utils/output_check.py @@ -0,0 +1,92 @@ +import os +import numpy as np +import tensorflow as tf +import onnxruntime as ort + +def tflite_run(model_path:str) -> np.ndarray: + ''' + tflite runtime + ''' + tflite_runtime = tf.lite.Interpreter(model_path, num_threads=4) + tflite_runtime.allocate_tensors() + input_details, output_details = tflite_runtime.get_input_details(), tflite_runtime.get_output_details() + for i in range(len(input_details)): + tflite_runtime.set_tensor(input_details[i]['index'], np.ones(input_details[i]['shape'], dtype=np.float32)) + tflite_runtime.invoke() + + # only compare one output is ok. + tflite_output = tflite_runtime.get_tensor(output_details[0]['index']) + if len(tflite_output.shape) > 2: + shape = [i for i in range(len(tflite_output.shape))] + newshape = [shape[0], shape[-1], *shape[1:-1]] + tflite_output = tflite_output.transpose(*newshape) + + return tflite_output + +def keras_run(model_path:str) -> np.ndarray: + ''' + keras runtime + ''' + keras_runtime = tf.keras.models.load_model(model_path) + _input = [] + for inp in keras_runtime.inputs: + _input.append(np.ones(list(inp.shape), dtype=np.float32)) + + keras_output = keras_runtime.predict(_input) + # only compare one output is ok. + if isinstance(keras_output, list): + keras_output = keras_output[0] + + if len(keras_output.shape) > 2: + shape = [i for i in range(len(keras_output.shape))] + newshape = [shape[0], shape[-1], *shape[1:-1]] + keras_output = keras_output.transpose(*newshape) + + return keras_output + + +def get_elements_error(onnx_proto, keras_model_path:str, tflite_model_path:str) -> dict: + ''' + use ones input arr to check model. + ''' + result = {} + # test onnx + onnx_runtime = ort.InferenceSession(onnx_proto.SerializeToString()) + onnx_inputs = {} + for inp in onnx_runtime.get_inputs(): + shape = inp.shape + if isinstance(shape[0], str) or shape[0] < 1: + shape[0] = 1 + onnx_inputs[inp.name] = np.ones(shape, dtype=np.float32) + if len(shape) > 2: + _transpose_index = [i for i in range(len(shape))] + _transpose_index = _transpose_index[0:1] + _transpose_index[2:] + _transpose_index[1:2] + onnx_outputs = onnx_runtime.run([], onnx_inputs) + + if keras_model_path is not None: + # test keras model + keras_output = keras_run(keras_model_path) + # get max error + keras_max_error = 1000 + for onnx_output in onnx_outputs: + if onnx_output.shape != keras_output.shape: + continue + diff = np.abs(onnx_output - keras_output) + max_diff = np.max(diff) + keras_max_error = min(keras_max_error, max_diff) + result['keras'] = keras_max_error + + if tflite_model_path is not None: + # test tflite + tflite_output = tflite_run(tflite_model_path) + # get max error + tflite_max_error = 1000 + for onnx_output in onnx_outputs: + if onnx_output.shape != tflite_output.shape: + continue + diff = np.abs(onnx_output - tflite_output) + max_diff = np.max(diff) + tflite_max_error = min(tflite_max_error, max_diff) + result['tflite'] = tflite_max_error + + return result \ No newline at end of file From 110f9e8ace06617242f7758d3f1db25f84efe4f0 Mon Sep 17 00:00:00 2001 From: JernKunpittaya <61564542+JernKunpittaya@users.noreply.github.com> Date: Thu, 25 Apr 2024 16:45:32 +0700 Subject: [PATCH 5/5] clean path + readme update --- tests/onnx2circom/README.md | 24 +++++--- tests/onnx2circom/test_onnx_to_circom.py | 4 +- zkstats/onnx2circom/README.md | 75 ++++++------------------ 3 files changed, 34 insertions(+), 69 deletions(-) diff --git a/tests/onnx2circom/README.md b/tests/onnx2circom/README.md index b3abb46..318dc15 100644 --- a/tests/onnx2circom/README.md +++ b/tests/onnx2circom/README.md @@ -2,31 +2,29 @@ ## Test onnx2keras -Follow [the instructions](../../zkstats/onnx2circom/README.md) to sync submodules: - -Go to the root of the repo and sync the submodules: -```bash -cd ../../ -git submodule init -git submodule update -``` - Run the test: + ```bash pytest -s tests/onnx2circom/test_onnx_to_keras.py ``` ## Test onnx2circom + ### circom-2-arithc + Clone circom-2-arithc. Use a fork for now. Will change to the official repo soon. + ```bash cd .. git clone https://github.com/mhchia/circom-2-arithc.git cd circom-2-arithc +git checkout mpcstats +cp .env.example .env circom_2_arithc_project_root=$(pwd) ``` Build the compiler: + ```bash cargo build --release ``` @@ -34,14 +32,19 @@ cargo build --release ### MP-SPDZ Clone the repo + ```bash cd .. git clone https://github.com/data61/MP-SPDZ cd MP-SPDZ +git remote add kevin_mpc https://github.com/mhchia/MP-SPDZ.git +git fetch kevin_mpc +git checkout arith-executor mp_spdz_project_root=$(pwd) ``` Build the MPC vm for `semi` protocol + ```bash make -j8 semi-party.x # Make sure `semi-party.x` exists @@ -51,6 +54,7 @@ ls semi-party.x ### Run the test Modify the configs in `tests/onnx2circom/test_onnx_to_circom.py` to point to the correct paths. Just fill in the paths to the two projects you just cloned. + ```bash # NOTE: Change the path to your own path CIRCOM_2_ARITHC_PROJECT_ROOT = Path('/path/to/circom-2-arithc-project-root') @@ -58,11 +62,13 @@ MP_SPDZ_PROJECT_ROOT = Path('/path/to/mp-spdz-project-root') ``` Go back to the zkstats library project root + ```bash cd ../zk-stats-lib ``` Run the test: + ```bash pytest -s tests/onnx2circom/test_onnx_to_circom.py ``` diff --git a/tests/onnx2circom/test_onnx_to_circom.py b/tests/onnx2circom/test_onnx_to_circom.py index 5afbec5..382fe47 100644 --- a/tests/onnx2circom/test_onnx_to_circom.py +++ b/tests/onnx2circom/test_onnx_to_circom.py @@ -14,8 +14,8 @@ # NOTE: Change the path to your own path -CIRCOM_2_ARITHC_PROJECT_ROOT = Path('/Users/jernkun/circom-2-arithc') -MP_SPDZ_PROJECT_ROOT = Path('/Users/jernkun/MP-SPDZ') +CIRCOM_2_ARITHC_PROJECT_ROOT = Path('/path/to/circom-2-arithc-project-root') +MP_SPDZ_PROJECT_ROOT = Path('/path/to/mp-spdz-project-root') def test_onnx_to_circom(tmp_path): diff --git a/zkstats/onnx2circom/README.md b/zkstats/onnx2circom/README.md index 318dc15..c1c63a0 100644 --- a/zkstats/onnx2circom/README.md +++ b/zkstats/onnx2circom/README.md @@ -1,74 +1,33 @@ -# Steps to run +# onnx2circom -## Test onnx2keras - -Run the test: +## Run onnx2circom ```bash -pytest -s tests/onnx2circom/test_onnx_to_keras.py +$ python3 main.py model.onnx --circom_path model.circom ``` -## Test onnx2circom - -### circom-2-arithc - -Clone circom-2-arithc. Use a fork for now. Will change to the official repo soon. +See circom code in `model.circom` ```bash -cd .. -git clone https://github.com/mhchia/circom-2-arithc.git -cd circom-2-arithc -git checkout mpcstats -cp .env.example .env -circom_2_arithc_project_root=$(pwd) +$ ls model.circom +model.circom ``` -Build the compiler: - -```bash -cargo build --release -``` +## Import -### MP-SPDZ +```python +from zkstats.onnx2circom import onnx_to_circom -Clone the repo +... -```bash -cd .. -git clone https://github.com/data61/MP-SPDZ -cd MP-SPDZ -git remote add kevin_mpc https://github.com/mhchia/MP-SPDZ.git -git fetch kevin_mpc -git checkout arith-executor -mp_spdz_project_root=$(pwd) -``` - -Build the MPC vm for `semi` protocol - -```bash -make -j8 semi-party.x -# Make sure `semi-party.x` exists -ls semi-party.x +model_path = "model.onnx" +circom_path = "model.circom" +onnx_to_circom(model_path, circom_path) ``` -### Run the test +Note that this onnx2circom originally comes from two modified forked repos as follows -Modify the configs in `tests/onnx2circom/test_onnx_to_circom.py` to point to the correct paths. Just fill in the paths to the two projects you just cloned. +- https://github.com/JernKunpittaya/onnx2keras/tree/stats_onnx2keras +- https://github.com/JernKunpittaya/keras2circom/tree/stats_keras2circom -```bash -# NOTE: Change the path to your own path -CIRCOM_2_ARITHC_PROJECT_ROOT = Path('/path/to/circom-2-arithc-project-root') -MP_SPDZ_PROJECT_ROOT = Path('/path/to/mp-spdz-project-root') -``` - -Go back to the zkstats library project root - -```bash -cd ../zk-stats-lib -``` - -Run the test: - -```bash -pytest -s tests/onnx2circom/test_onnx_to_circom.py -``` +Our implementation for zkstats can make onnx2circom diverge a lot from these fork repos, so we migrate all codes here without using submodules anymore.