From 06dab189cb517cabe81503caa755849d99c23b97 Mon Sep 17 00:00:00 2001 From: Ilyas Moutawwakil <57442720+IlyasMoutawwakil@users.noreply.github.com> Date: Mon, 19 Feb 2024 09:42:13 +0100 Subject: [PATCH] moved complex examples (#127) --- .gitignore | 5 +- README.md | 6 +- examples/api_launch.py | 1 - examples/fast-mteb/README.md | 44 -- .../artifacts/forward_latency_plot.png | Bin 31081 -> 0 bytes .../artifacts/forward_throughput_plot.png | Bin 44934 -> 0 bytes examples/fast-mteb/artifacts/full_report.csv | 52 -- examples/fast-mteb/artifacts/rich_table.svg | 479 --------------- examples/fast-mteb/artifacts/short_report.csv | 52 -- .../fast-mteb/configs/bge_base_config.yaml | 31 - .../bge_batch_size_sweep_baseline.yaml | 12 - .../bge_batch_size_sweep_ort_cuda_o4.yaml | 10 - .../bge_batch_size_sweep_ort_trt_fp16.yaml | 11 - .../configs/bge_seq_len_sweep_baseline.yaml | 12 - .../bge_seq_len_sweep_ort_cuda_o4.yaml | 10 - .../bge_seq_len_sweep_ort_trt_fp16.yaml | 11 - .../0/.hydra/config.yaml | 69 --- .../0/.hydra/hydra.yaml | 175 ------ .../0/.hydra/overrides.yaml | 2 - .../0/hydra_config.yaml | 69 --- .../0/inference_results.csv | 2 - .../10/.hydra/config.yaml | 69 --- .../10/.hydra/hydra.yaml | 175 ------ .../10/.hydra/overrides.yaml | 2 - .../10/hydra_config.yaml | 69 --- .../10/inference_results.csv | 2 - .../7/.hydra/config.yaml | 69 --- .../7/.hydra/hydra.yaml | 175 ------ .../7/.hydra/overrides.yaml | 2 - .../7/hydra_config.yaml | 69 --- .../7/inference_results.csv | 2 - .../4/.hydra/config.yaml | 69 --- .../4/.hydra/hydra.yaml | 175 ------ .../4/.hydra/overrides.yaml | 2 - .../4/hydra_config.yaml | 69 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 69 --- .../1/.hydra/hydra.yaml | 175 ------ .../1/.hydra/overrides.yaml | 2 - .../1/hydra_config.yaml | 69 --- .../1/inference_results.csv | 2 - .../11/.hydra/config.yaml | 69 --- .../11/.hydra/hydra.yaml | 175 ------ .../11/.hydra/overrides.yaml | 2 - .../11/hydra_config.yaml | 69 --- .../11/inference_results.csv | 2 - .../8/.hydra/config.yaml | 69 --- .../8/.hydra/hydra.yaml | 175 ------ .../8/.hydra/overrides.yaml | 2 - .../8/hydra_config.yaml | 69 --- .../8/inference_results.csv | 2 - .../5/.hydra/config.yaml | 69 --- .../5/.hydra/hydra.yaml | 175 ------ .../5/.hydra/overrides.yaml | 2 - .../5/hydra_config.yaml | 69 --- .../5/inference_results.csv | 2 - .../2/.hydra/config.yaml | 69 --- .../2/.hydra/hydra.yaml | 175 ------ .../2/.hydra/overrides.yaml | 2 - .../2/hydra_config.yaml | 69 --- .../2/inference_results.csv | 2 - .../9/.hydra/config.yaml | 69 --- .../9/.hydra/hydra.yaml | 175 ------ .../9/.hydra/overrides.yaml | 2 - .../9/hydra_config.yaml | 69 --- .../9/inference_results.csv | 2 - .../6/.hydra/config.yaml | 69 --- .../6/.hydra/hydra.yaml | 175 ------ .../6/.hydra/overrides.yaml | 2 - .../6/hydra_config.yaml | 69 --- .../6/inference_results.csv | 2 - .../3/.hydra/config.yaml | 69 --- .../3/.hydra/hydra.yaml | 175 ------ .../3/.hydra/overrides.yaml | 2 - .../3/hydra_config.yaml | 69 --- .../3/inference_results.csv | 2 - .../multirun.yaml | 242 -------- .../0/.hydra/config.yaml | 79 --- .../0/.hydra/hydra.yaml | 175 ------ .../0/.hydra/overrides.yaml | 2 - .../0/hydra_config.yaml | 80 --- .../0/inference_results.csv | 2 - .../10/.hydra/config.yaml | 79 --- .../10/.hydra/hydra.yaml | 175 ------ .../10/.hydra/overrides.yaml | 2 - .../10/hydra_config.yaml | 80 --- .../10/inference_results.csv | 2 - .../7/.hydra/config.yaml | 79 --- .../7/.hydra/hydra.yaml | 175 ------ .../7/.hydra/overrides.yaml | 2 - .../7/hydra_config.yaml | 80 --- .../7/inference_results.csv | 2 - .../4/.hydra/config.yaml | 79 --- .../4/.hydra/hydra.yaml | 175 ------ .../4/.hydra/overrides.yaml | 2 - .../4/hydra_config.yaml | 80 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 79 --- .../1/.hydra/hydra.yaml | 175 ------ .../1/.hydra/overrides.yaml | 2 - .../1/hydra_config.yaml | 80 --- .../1/inference_results.csv | 2 - .../11/.hydra/config.yaml | 79 --- .../11/.hydra/hydra.yaml | 175 ------ .../11/.hydra/overrides.yaml | 2 - .../11/hydra_config.yaml | 80 --- .../11/inference_results.csv | 2 - .../8/.hydra/config.yaml | 79 --- .../8/.hydra/hydra.yaml | 175 ------ .../8/.hydra/overrides.yaml | 2 - .../8/hydra_config.yaml | 80 --- .../8/inference_results.csv | 2 - .../5/.hydra/config.yaml | 79 --- .../5/.hydra/hydra.yaml | 175 ------ .../5/.hydra/overrides.yaml | 2 - .../5/hydra_config.yaml | 80 --- .../5/inference_results.csv | 2 - .../2/.hydra/config.yaml | 79 --- .../2/.hydra/hydra.yaml | 175 ------ .../2/.hydra/overrides.yaml | 2 - .../2/hydra_config.yaml | 80 --- .../2/inference_results.csv | 2 - .../9/.hydra/config.yaml | 79 --- .../9/.hydra/hydra.yaml | 175 ------ .../9/.hydra/overrides.yaml | 2 - .../9/hydra_config.yaml | 80 --- .../9/inference_results.csv | 2 - .../6/.hydra/config.yaml | 79 --- .../6/.hydra/hydra.yaml | 175 ------ .../6/.hydra/overrides.yaml | 2 - .../6/hydra_config.yaml | 80 --- .../6/inference_results.csv | 2 - .../3/.hydra/config.yaml | 79 --- .../3/.hydra/hydra.yaml | 175 ------ .../3/.hydra/overrides.yaml | 2 - .../3/hydra_config.yaml | 80 --- .../3/inference_results.csv | 2 - .../multirun.yaml | 252 -------- .../0/.hydra/config.yaml | 80 --- .../0/.hydra/hydra.yaml | 175 ------ .../0/.hydra/overrides.yaml | 2 - .../0/hydra_config.yaml | 82 --- .../0/inference_results.csv | 2 - .../7/.hydra/config.yaml | 80 --- .../7/.hydra/hydra.yaml | 175 ------ .../7/.hydra/overrides.yaml | 2 - .../7/hydra_config.yaml | 82 --- .../7/inference_results.csv | 2 - .../4/.hydra/config.yaml | 80 --- .../4/.hydra/hydra.yaml | 175 ------ .../4/.hydra/overrides.yaml | 2 - .../4/hydra_config.yaml | 82 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 80 --- .../1/.hydra/hydra.yaml | 175 ------ .../1/.hydra/overrides.yaml | 2 - .../1/hydra_config.yaml | 82 --- .../1/inference_results.csv | 2 - .../8/.hydra/config.yaml | 80 --- .../8/.hydra/hydra.yaml | 175 ------ .../8/.hydra/overrides.yaml | 2 - .../8/hydra_config.yaml | 82 --- .../8/inference_results.csv | 2 - .../5/.hydra/config.yaml | 80 --- .../5/.hydra/hydra.yaml | 175 ------ .../5/.hydra/overrides.yaml | 2 - .../5/hydra_config.yaml | 82 --- .../5/inference_results.csv | 2 - .../2/.hydra/config.yaml | 80 --- .../2/.hydra/hydra.yaml | 175 ------ .../2/.hydra/overrides.yaml | 2 - .../2/hydra_config.yaml | 82 --- .../2/inference_results.csv | 2 - .../9/.hydra/config.yaml | 80 --- .../9/.hydra/hydra.yaml | 175 ------ .../9/.hydra/overrides.yaml | 2 - .../9/hydra_config.yaml | 82 --- .../6/.hydra/config.yaml | 80 --- .../6/.hydra/hydra.yaml | 175 ------ .../6/.hydra/overrides.yaml | 2 - .../6/hydra_config.yaml | 82 --- .../6/inference_results.csv | 2 - .../3/.hydra/config.yaml | 80 --- .../3/.hydra/hydra.yaml | 175 ------ .../3/.hydra/overrides.yaml | 2 - .../3/hydra_config.yaml | 82 --- .../3/inference_results.csv | 2 - .../multirun.yaml | 253 -------- .../3/.hydra/config.yaml | 69 --- .../3/.hydra/hydra.yaml | 175 ------ .../3/.hydra/overrides.yaml | 2 - .../3/hydra_config.yaml | 69 --- .../3/inference_results.csv | 2 - .../0/.hydra/config.yaml | 69 --- .../0/.hydra/hydra.yaml | 175 ------ .../0/.hydra/overrides.yaml | 2 - .../0/hydra_config.yaml | 69 --- .../0/inference_results.csv | 2 - .../4/.hydra/config.yaml | 69 --- .../4/.hydra/hydra.yaml | 175 ------ .../4/.hydra/overrides.yaml | 2 - .../4/hydra_config.yaml | 69 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 69 --- .../1/.hydra/hydra.yaml | 175 ------ .../1/.hydra/overrides.yaml | 2 - .../1/hydra_config.yaml | 69 --- .../1/inference_results.csv | 2 - .../5/.hydra/config.yaml | 69 --- .../5/.hydra/hydra.yaml | 175 ------ .../5/.hydra/overrides.yaml | 2 - .../5/hydra_config.yaml | 69 --- .../5/inference_results.csv | 2 - .../2/.hydra/config.yaml | 69 --- .../2/.hydra/hydra.yaml | 175 ------ .../2/.hydra/overrides.yaml | 2 - .../2/hydra_config.yaml | 69 --- .../2/inference_results.csv | 2 - .../multirun.yaml | 242 -------- .../3/.hydra/config.yaml | 79 --- .../3/.hydra/hydra.yaml | 175 ------ .../3/.hydra/overrides.yaml | 2 - .../3/hydra_config.yaml | 80 --- .../3/inference_results.csv | 2 - .../0/.hydra/config.yaml | 79 --- .../0/.hydra/hydra.yaml | 175 ------ .../0/.hydra/overrides.yaml | 2 - .../0/hydra_config.yaml | 80 --- .../0/inference_results.csv | 2 - .../4/.hydra/config.yaml | 79 --- .../4/.hydra/hydra.yaml | 175 ------ .../4/.hydra/overrides.yaml | 2 - .../4/hydra_config.yaml | 80 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 79 --- .../1/.hydra/hydra.yaml | 175 ------ .../1/.hydra/overrides.yaml | 2 - .../1/hydra_config.yaml | 80 --- .../1/inference_results.csv | 2 - .../5/.hydra/config.yaml | 79 --- .../5/.hydra/hydra.yaml | 175 ------ .../5/.hydra/overrides.yaml | 2 - .../5/hydra_config.yaml | 80 --- .../5/inference_results.csv | 2 - .../2/.hydra/config.yaml | 79 --- .../2/.hydra/hydra.yaml | 175 ------ .../2/.hydra/overrides.yaml | 2 - .../2/hydra_config.yaml | 80 --- .../2/inference_results.csv | 2 - .../multirun.yaml | 252 -------- .../3/.hydra/config.yaml | 80 --- .../3/.hydra/hydra.yaml | 175 ------ .../3/.hydra/overrides.yaml | 2 - .../3/hydra_config.yaml | 82 --- .../3/inference_results.csv | 2 - .../0/.hydra/config.yaml | 80 --- .../0/.hydra/hydra.yaml | 175 ------ .../0/.hydra/overrides.yaml | 2 - .../0/hydra_config.yaml | 82 --- .../0/inference_results.csv | 2 - .../4/.hydra/config.yaml | 80 --- .../4/.hydra/hydra.yaml | 175 ------ .../4/.hydra/overrides.yaml | 2 - .../4/hydra_config.yaml | 82 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 80 --- .../1/.hydra/hydra.yaml | 175 ------ .../1/.hydra/overrides.yaml | 2 - .../1/hydra_config.yaml | 82 --- .../1/inference_results.csv | 2 - .../5/.hydra/config.yaml | 80 --- .../5/.hydra/hydra.yaml | 175 ------ .../5/.hydra/overrides.yaml | 2 - .../5/hydra_config.yaml | 82 --- .../5/inference_results.csv | 2 - .../2/.hydra/config.yaml | 80 --- .../2/.hydra/hydra.yaml | 175 ------ .../2/.hydra/overrides.yaml | 2 - .../2/hydra_config.yaml | 82 --- .../2/inference_results.csv | 2 - .../multirun.yaml | 253 -------- examples/fast-mteb/report.py | 222 ------- examples/fast-mteb/script.sh | 7 - examples/openvino_diffusion.yaml | 3 +- examples/running-llamas/README.md | 66 --- .../Llama-13b/decode_throughput_bar_plot.png | Bin 41316 -> 0 bytes .../Llama-13b/decode_throughput_line_plot.png | Bin 64674 -> 0 bytes .../artifacts/Llama-13b/full_report.csv | 31 - ...generate_max_memory_allocated_bar_plot.png | Bin 44083 -> 0 bytes ...enerate_max_memory_allocated_line_plot.png | Bin 61570 -> 0 bytes .../generate_max_memory_reserved_bar_plot.png | Bin 46918 -> 0 bytes ...generate_max_memory_reserved_line_plot.png | Bin 61567 -> 0 bytes .../peak_decode_throughput_bar_plot.png | Bin 40338 -> 0 bytes .../Llama-13b/prefill_latency_bar_plot.png | Bin 36349 -> 0 bytes .../Llama-13b/prefill_latency_line_plot.png | Bin 59534 -> 0 bytes .../artifacts/Llama-13b/short_report.csv | 31 - .../Llama-65b/decode_throughput_bar_plot.png | Bin 32942 -> 0 bytes .../Llama-65b/decode_throughput_line_plot.png | Bin 43750 -> 0 bytes .../artifacts/Llama-65b/full_report.csv | 9 - ...generate_max_memory_allocated_bar_plot.png | Bin 39213 -> 0 bytes ...enerate_max_memory_allocated_line_plot.png | Bin 47608 -> 0 bytes .../generate_max_memory_reserved_bar_plot.png | Bin 39831 -> 0 bytes ...generate_max_memory_reserved_line_plot.png | Bin 48540 -> 0 bytes .../peak_decode_throughput_bar_plot.png | Bin 30405 -> 0 bytes .../Llama-65b/prefill_latency_bar_plot.png | Bin 30959 -> 0 bytes .../Llama-65b/prefill_latency_line_plot.png | Bin 41751 -> 0 bytes .../artifacts/Llama-65b/short_report.csv | 9 - .../Llama-7b/decode_throughput_bar_plot.png | Bin 41663 -> 0 bytes .../Llama-7b/decode_throughput_line_plot.png | Bin 70079 -> 0 bytes .../artifacts/Llama-7b/full_report.csv | 36 -- ...generate_max_memory_allocated_bar_plot.png | Bin 44722 -> 0 bytes ...enerate_max_memory_allocated_line_plot.png | Bin 64550 -> 0 bytes .../generate_max_memory_reserved_bar_plot.png | Bin 47072 -> 0 bytes ...generate_max_memory_reserved_line_plot.png | Bin 63481 -> 0 bytes .../peak_decode_throughput_bar_plot.png | Bin 40604 -> 0 bytes .../Llama-7b/prefill_latency_bar_plot.png | Bin 36913 -> 0 bytes .../Llama-7b/prefill_latency_line_plot.png | Bin 58741 -> 0 bytes .../artifacts/Llama-7b/short_report.csv | 36 -- examples/running-llamas/configs/_base_.yaml | 40 -- .../running-llamas/configs/fp16+bt+tp=2.yaml | 22 - examples/running-llamas/configs/fp16+bt.yaml | 9 - .../running-llamas/configs/fp16+dp=2.yaml | 15 - .../running-llamas/configs/fp16+fa2+tp=2.yaml | 22 - examples/running-llamas/configs/fp16+fa2.yaml | 9 - .../configs/fp16+gptq+exllamav1+dp=2.yaml | 27 - .../configs/fp16+gptq+exllamav1.yaml | 20 - .../configs/fp16+gptq+exllamav2+dp=2.yaml | 27 - .../configs/fp16+gptq+exllamav2.yaml | 20 - .../running-llamas/configs/fp16+tp=2.yaml | 21 - examples/running-llamas/configs/fp16.yaml | 6 - examples/running-llamas/report.py | 289 --------- examples/running-mistrals/README.md | 60 -- .../A100-80GB/forward_latency_plot.png | Bin 39767 -> 0 bytes .../A100-80GB/forward_memory_plot.png | Bin 44926 -> 0 bytes .../artifacts/A100-80GB/full_report.csv | 21 - .../A100-80GB/generate_memory_plot.png | Bin 45209 -> 0 bytes .../A100-80GB/generate_throughput_plot.png | Bin 50050 -> 0 bytes .../artifacts/A100-80GB/rich_table.svg | 235 -------- .../artifacts/A100-80GB/short_report.csv | 21 - examples/running-mistrals/configs/_base_.yaml | 36 -- examples/running-mistrals/configs/awq.yaml | 6 - examples/running-mistrals/configs/bnb.yaml | 11 - examples/running-mistrals/configs/gptq.yaml | 6 - .../0/.hydra/config.yaml | 71 --- .../0/.hydra/hydra.yaml | 174 ------ .../0/.hydra/overrides.yaml | 1 - .../0/hydra_config.yaml | 77 --- .../0/inference_results.csv | 2 - .../4/.hydra/config.yaml | 71 --- .../4/.hydra/hydra.yaml | 174 ------ .../4/.hydra/overrides.yaml | 1 - .../4/hydra_config.yaml | 77 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 71 --- .../1/.hydra/hydra.yaml | 174 ------ .../1/.hydra/overrides.yaml | 1 - .../1/hydra_config.yaml | 77 --- .../1/inference_results.csv | 2 - .../2/.hydra/config.yaml | 71 --- .../2/.hydra/hydra.yaml | 174 ------ .../2/.hydra/overrides.yaml | 1 - .../2/hydra_config.yaml | 77 --- .../2/inference_results.csv | 2 - .../3/.hydra/config.yaml | 71 --- .../3/.hydra/hydra.yaml | 174 ------ .../3/.hydra/overrides.yaml | 1 - .../3/hydra_config.yaml | 77 --- .../3/inference_results.csv | 2 - .../0/.hydra/config.yaml | 73 --- .../0/.hydra/hydra.yaml | 174 ------ .../0/.hydra/overrides.yaml | 1 - .../0/hydra_config.yaml | 80 --- .../0/inference_results.csv | 2 - .../4/.hydra/config.yaml | 73 --- .../4/.hydra/hydra.yaml | 174 ------ .../4/.hydra/overrides.yaml | 1 - .../4/hydra_config.yaml | 80 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 73 --- .../1/.hydra/hydra.yaml | 174 ------ .../1/.hydra/overrides.yaml | 1 - .../1/hydra_config.yaml | 80 --- .../1/inference_results.csv | 2 - .../2/.hydra/config.yaml | 73 --- .../2/.hydra/hydra.yaml | 174 ------ .../2/.hydra/overrides.yaml | 1 - .../2/hydra_config.yaml | 80 --- .../2/inference_results.csv | 2 - .../3/.hydra/config.yaml | 73 --- .../3/.hydra/hydra.yaml | 174 ------ .../3/.hydra/overrides.yaml | 1 - .../3/hydra_config.yaml | 80 --- .../3/inference_results.csv | 2 - .../0/.hydra/config.yaml | 70 --- .../0/.hydra/hydra.yaml | 174 ------ .../0/.hydra/overrides.yaml | 1 - .../0/hydra_config.yaml | 77 --- .../0/inference_results.csv | 2 - .../4/.hydra/config.yaml | 70 --- .../4/.hydra/hydra.yaml | 174 ------ .../4/.hydra/overrides.yaml | 1 - .../4/hydra_config.yaml | 77 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 70 --- .../1/.hydra/hydra.yaml | 174 ------ .../1/.hydra/overrides.yaml | 1 - .../1/hydra_config.yaml | 77 --- .../1/inference_results.csv | 2 - .../2/.hydra/config.yaml | 70 --- .../2/.hydra/hydra.yaml | 174 ------ .../2/.hydra/overrides.yaml | 1 - .../2/hydra_config.yaml | 77 --- .../2/inference_results.csv | 2 - .../3/.hydra/config.yaml | 70 --- .../3/.hydra/hydra.yaml | 174 ------ .../3/.hydra/overrides.yaml | 1 - .../3/hydra_config.yaml | 77 --- .../3/inference_results.csv | 2 - .../multirun.yaml | 243 -------- .../0/.hydra/config.yaml | 71 --- .../0/.hydra/hydra.yaml | 174 ------ .../0/.hydra/overrides.yaml | 1 - .../0/hydra_config.yaml | 77 --- .../0/inference_results.csv | 2 - .../4/.hydra/config.yaml | 71 --- .../4/.hydra/hydra.yaml | 174 ------ .../4/.hydra/overrides.yaml | 1 - .../4/hydra_config.yaml | 77 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 71 --- .../1/.hydra/hydra.yaml | 174 ------ .../1/.hydra/overrides.yaml | 1 - .../1/hydra_config.yaml | 77 --- .../1/inference_results.csv | 2 - .../2/.hydra/config.yaml | 71 --- .../2/.hydra/hydra.yaml | 174 ------ .../2/.hydra/overrides.yaml | 1 - .../2/hydra_config.yaml | 77 --- .../2/inference_results.csv | 2 - .../3/.hydra/config.yaml | 71 --- .../3/.hydra/hydra.yaml | 174 ------ .../3/.hydra/overrides.yaml | 1 - .../3/hydra_config.yaml | 77 --- .../3/inference_results.csv | 2 - examples/running-mistrals/report.py | 229 ------- examples/running-vicunas/README.md | 61 -- .../A100-80GB/forward_latency_plot.png | Bin 52063 -> 0 bytes .../A100-80GB/forward_memory_plot.png | Bin 44235 -> 0 bytes .../artifacts/A100-80GB/full_report.csv | 26 - .../A100-80GB/generate_memory_plot.png | Bin 51647 -> 0 bytes .../A100-80GB/generate_throughput_plot.png | Bin 58622 -> 0 bytes .../artifacts/A100-80GB/rich_table.svg | 275 --------- .../artifacts/A100-80GB/short_report.csv | 26 - examples/running-vicunas/configs/_base_.yaml | 36 -- .../running-vicunas/configs/awq+gemm.yaml | 6 - .../running-vicunas/configs/awq+gemv.yaml | 6 - examples/running-vicunas/configs/bnb.yaml | 11 - examples/running-vicunas/configs/gptq.yaml | 6 - .../0/.hydra/config.yaml | 70 --- .../0/.hydra/hydra.yaml | 174 ------ .../0/.hydra/overrides.yaml | 1 - .../0/hydra_config.yaml | 76 --- .../0/inference_results.csv | 2 - .../4/.hydra/config.yaml | 70 --- .../4/.hydra/hydra.yaml | 174 ------ .../4/.hydra/overrides.yaml | 1 - .../4/hydra_config.yaml | 76 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 70 --- .../1/.hydra/hydra.yaml | 174 ------ .../1/.hydra/overrides.yaml | 1 - .../1/hydra_config.yaml | 76 --- .../1/inference_results.csv | 2 - .../2/.hydra/config.yaml | 70 --- .../2/.hydra/hydra.yaml | 174 ------ .../2/.hydra/overrides.yaml | 1 - .../2/hydra_config.yaml | 76 --- .../2/inference_results.csv | 2 - .../3/.hydra/config.yaml | 70 --- .../3/.hydra/hydra.yaml | 174 ------ .../3/.hydra/overrides.yaml | 1 - .../3/hydra_config.yaml | 76 --- .../3/inference_results.csv | 2 - .../0/.hydra/config.yaml | 70 --- .../0/.hydra/hydra.yaml | 174 ------ .../0/.hydra/overrides.yaml | 1 - .../0/hydra_config.yaml | 76 --- .../0/inference_results.csv | 2 - .../4/.hydra/config.yaml | 70 --- .../4/.hydra/hydra.yaml | 174 ------ .../4/.hydra/overrides.yaml | 1 - .../4/hydra_config.yaml | 76 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 70 --- .../1/.hydra/hydra.yaml | 174 ------ .../1/.hydra/overrides.yaml | 1 - .../1/hydra_config.yaml | 76 --- .../1/inference_results.csv | 2 - .../2/.hydra/config.yaml | 70 --- .../2/.hydra/hydra.yaml | 174 ------ .../2/.hydra/overrides.yaml | 1 - .../2/hydra_config.yaml | 76 --- .../2/inference_results.csv | 2 - .../3/.hydra/config.yaml | 70 --- .../3/.hydra/hydra.yaml | 174 ------ .../3/.hydra/overrides.yaml | 1 - .../3/hydra_config.yaml | 76 --- .../3/inference_results.csv | 2 - .../0/.hydra/config.yaml | 72 --- .../0/.hydra/hydra.yaml | 174 ------ .../0/.hydra/overrides.yaml | 1 - .../0/hydra_config.yaml | 79 --- .../0/inference_results.csv | 2 - .../4/.hydra/config.yaml | 72 --- .../4/.hydra/hydra.yaml | 174 ------ .../4/.hydra/overrides.yaml | 1 - .../4/hydra_config.yaml | 79 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 72 --- .../1/.hydra/hydra.yaml | 174 ------ .../1/.hydra/overrides.yaml | 1 - .../1/hydra_config.yaml | 79 --- .../1/inference_results.csv | 2 - .../2/.hydra/config.yaml | 72 --- .../2/.hydra/hydra.yaml | 174 ------ .../2/.hydra/overrides.yaml | 1 - .../2/hydra_config.yaml | 79 --- .../2/inference_results.csv | 2 - .../3/.hydra/config.yaml | 72 --- .../3/.hydra/hydra.yaml | 174 ------ .../3/.hydra/overrides.yaml | 1 - .../3/hydra_config.yaml | 79 --- .../3/inference_results.csv | 2 - .../0/.hydra/config.yaml | 70 --- .../0/.hydra/hydra.yaml | 174 ------ .../0/.hydra/overrides.yaml | 1 - .../0/hydra_config.yaml | 76 --- .../0/inference_results.csv | 2 - .../4/.hydra/config.yaml | 70 --- .../4/.hydra/hydra.yaml | 174 ------ .../4/.hydra/overrides.yaml | 1 - .../4/hydra_config.yaml | 76 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 70 --- .../1/.hydra/hydra.yaml | 174 ------ .../1/.hydra/overrides.yaml | 1 - .../1/hydra_config.yaml | 76 --- .../1/inference_results.csv | 2 - .../2/.hydra/config.yaml | 70 --- .../2/.hydra/hydra.yaml | 174 ------ .../2/.hydra/overrides.yaml | 1 - .../2/hydra_config.yaml | 76 --- .../2/inference_results.csv | 2 - .../3/.hydra/config.yaml | 70 --- .../3/.hydra/hydra.yaml | 174 ------ .../3/.hydra/overrides.yaml | 1 - .../3/hydra_config.yaml | 76 --- .../3/inference_results.csv | 2 - .../0/.hydra/config.yaml | 70 --- .../0/.hydra/hydra.yaml | 174 ------ .../0/.hydra/overrides.yaml | 1 - .../0/hydra_config.yaml | 76 --- .../0/inference_results.csv | 2 - .../4/.hydra/config.yaml | 70 --- .../4/.hydra/hydra.yaml | 174 ------ .../4/.hydra/overrides.yaml | 1 - .../4/hydra_config.yaml | 76 --- .../4/inference_results.csv | 2 - .../1/.hydra/config.yaml | 70 --- .../1/.hydra/hydra.yaml | 174 ------ .../1/.hydra/overrides.yaml | 1 - .../1/hydra_config.yaml | 76 --- .../1/inference_results.csv | 2 - .../2/.hydra/config.yaml | 70 --- .../2/.hydra/hydra.yaml | 174 ------ .../2/.hydra/overrides.yaml | 1 - .../2/hydra_config.yaml | 76 --- .../2/inference_results.csv | 2 - .../3/.hydra/config.yaml | 70 --- .../3/.hydra/hydra.yaml | 174 ------ .../3/.hydra/overrides.yaml | 1 - .../3/hydra_config.yaml | 76 --- .../3/inference_results.csv | 2 - examples/running-vicunas/report.py | 231 -------- examples/training-llamas/README.md | 54 -- .../artifacts/Llama-2-13b-hf/full_report.csv | 393 ------------ .../peak_training_throughput.png | Bin 37616 -> 0 bytes .../artifacts/Llama-2-13b-hf/short_report.csv | 15 - .../training_throughput_bar_plot.png | Bin 32054 -> 0 bytes .../training_throughput_line_plot.png | Bin 47250 -> 0 bytes .../artifacts/Llama-2-7b-hf/full_report.csv | 561 ------------------ .../peak_training_throughput.png | Bin 40152 -> 0 bytes .../artifacts/Llama-2-7b-hf/short_report.csv | 21 - .../training_throughput_bar_plot.png | Bin 37158 -> 0 bytes .../training_throughput_line_plot.png | Bin 61746 -> 0 bytes examples/training-llamas/configs/_base_.yaml | 41 -- .../training-llamas/configs/fp16+dp=2.yaml | 15 - .../training-llamas/configs/fp16+fsdp=2.yaml | 19 - .../configs/fp16+peft+bnb-4bit+dp=2.yaml | 24 - .../configs/fp16+peft+bnb-4bit.yaml | 15 - .../configs/fp16+peft+dp=2+zero3.yaml | 28 - .../configs/fp16+peft+dp=2.yaml | 20 - .../configs/fp16+peft+gptq-4bit+dp=2.yaml | 16 - .../configs/fp16+peft+gptq-4bit.yaml | 16 - .../training-llamas/configs/fp16+peft.yaml | 11 - examples/training-llamas/configs/fp16.yaml | 6 - examples/training-llamas/report.py | 242 -------- examples/whisper/README.md | 72 --- .../whisper/configs/whisper_auto_opt+qnt.yaml | 13 - .../whisper/configs/whisper_auto_opt.yaml | 16 - .../whisper/configs/whisper_auto_qnt.yaml | 12 - .../whisper/configs/whisper_baseline.yaml | 29 - .../5/.hydra/config.yaml | 110 ---- .../5/.hydra/hydra.yaml | 177 ------ .../5/.hydra/overrides.yaml | 4 - .../5/hydra_config.yaml | 109 ---- .../5/inference_results.csv | 2 - .../whisper_auto_opt(O1)/6/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O1)/6/.hydra/hydra.yaml | 177 ------ .../6/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O1)/6/hydra_config.yaml | 109 ---- .../6/inference_results.csv | 2 - .../whisper_auto_opt(O2)/7/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O2)/7/.hydra/hydra.yaml | 177 ------ .../7/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O2)/7/hydra_config.yaml | 109 ---- .../7/inference_results.csv | 2 - .../whisper_auto_opt(O3)/8/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O3)/8/.hydra/hydra.yaml | 177 ------ .../8/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O3)/8/hydra_config.yaml | 109 ---- .../8/inference_results.csv | 2 - .../whisper_auto_opt(O4)/9/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O4)/9/.hydra/hydra.yaml | 177 ------ .../9/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O4)/9/hydra_config.yaml | 109 ---- .../9/inference_results.csv | 2 - .../15/.hydra/config.yaml | 110 ---- .../15/.hydra/hydra.yaml | 177 ------ .../15/.hydra/overrides.yaml | 4 - .../15/hydra_config.yaml | 109 ---- .../15/inference_results.csv | 2 - .../16/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O1)/16/.hydra/hydra.yaml | 177 ------ .../16/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O1)/16/hydra_config.yaml | 109 ---- .../16/inference_results.csv | 2 - .../17/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O2)/17/.hydra/hydra.yaml | 177 ------ .../17/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O2)/17/hydra_config.yaml | 109 ---- .../17/inference_results.csv | 2 - .../18/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O3)/18/.hydra/hydra.yaml | 177 ------ .../18/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O3)/18/hydra_config.yaml | 109 ---- .../18/inference_results.csv | 2 - .../19/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O4)/19/.hydra/hydra.yaml | 177 ------ .../19/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O4)/19/hydra_config.yaml | 109 ---- .../19/inference_results.csv | 2 - .../0/.hydra/config.yaml | 110 ---- .../0/.hydra/hydra.yaml | 177 ------ .../0/.hydra/overrides.yaml | 4 - .../0/hydra_config.yaml | 109 ---- .../0/inference_results.csv | 2 - .../whisper_auto_opt(O1)/1/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O1)/1/.hydra/hydra.yaml | 177 ------ .../1/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O1)/1/hydra_config.yaml | 109 ---- .../1/inference_results.csv | 2 - .../whisper_auto_opt(O2)/2/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O2)/2/.hydra/hydra.yaml | 177 ------ .../2/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O2)/2/hydra_config.yaml | 109 ---- .../2/inference_results.csv | 2 - .../whisper_auto_opt(O3)/3/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O3)/3/.hydra/hydra.yaml | 177 ------ .../3/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O3)/3/hydra_config.yaml | 109 ---- .../3/inference_results.csv | 2 - .../whisper_auto_opt(O4)/4/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O4)/4/.hydra/hydra.yaml | 177 ------ .../4/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O4)/4/hydra_config.yaml | 109 ---- .../4/inference_results.csv | 2 - .../10/.hydra/config.yaml | 110 ---- .../10/.hydra/hydra.yaml | 177 ------ .../10/.hydra/overrides.yaml | 4 - .../10/hydra_config.yaml | 109 ---- .../10/inference_results.csv | 2 - .../11/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O1)/11/.hydra/hydra.yaml | 177 ------ .../11/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O1)/11/hydra_config.yaml | 109 ---- .../11/inference_results.csv | 2 - .../12/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O2)/12/.hydra/hydra.yaml | 177 ------ .../12/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O2)/12/hydra_config.yaml | 109 ---- .../12/inference_results.csv | 2 - .../13/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O3)/13/.hydra/hydra.yaml | 177 ------ .../13/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O3)/13/hydra_config.yaml | 109 ---- .../13/inference_results.csv | 2 - .../14/.hydra/config.yaml | 110 ---- .../whisper_auto_opt(O4)/14/.hydra/hydra.yaml | 177 ------ .../14/.hydra/overrides.yaml | 4 - .../whisper_auto_opt(O4)/14/hydra_config.yaml | 109 ---- .../14/inference_results.csv | 2 - .../whisper_baseline/1/.hydra/config.yaml | 66 --- .../whisper_baseline/1/.hydra/hydra.yaml | 175 ------ .../whisper_baseline/1/.hydra/overrides.yaml | 3 - .../whisper_baseline/1/hydra_config.yaml | 66 --- .../whisper_baseline/1/inference_results.csv | 2 - .../whisper_baseline/3/.hydra/config.yaml | 66 --- .../whisper_baseline/3/.hydra/hydra.yaml | 175 ------ .../whisper_baseline/3/.hydra/overrides.yaml | 3 - .../whisper_baseline/3/hydra_config.yaml | 66 --- .../whisper_baseline/3/inference_results.csv | 2 - .../whisper_baseline/0/.hydra/config.yaml | 66 --- .../whisper_baseline/0/.hydra/hydra.yaml | 175 ------ .../whisper_baseline/0/.hydra/overrides.yaml | 3 - .../whisper_baseline/0/hydra_config.yaml | 66 --- .../whisper_baseline/0/inference_results.csv | 2 - .../whisper_baseline/2/.hydra/config.yaml | 66 --- .../whisper_baseline/2/.hydra/hydra.yaml | 175 ------ .../whisper_baseline/2/.hydra/overrides.yaml | 3 - .../whisper_baseline/2/hydra_config.yaml | 66 --- .../whisper_baseline/2/inference_results.csv | 2 - .../cuda_128_10/forward_throughput.png | Bin 57908 -> 0 bytes .../cuda_128_10/generate_throughput.png | Bin 61251 -> 0 bytes .../reports/cuda_128_10/inference_report.csv | 7 - .../reports/cuda_128_10/rich_table.svg | 129 ---- .../cuda_128_100/forward_throughput.png | Bin 58341 -> 0 bytes .../cuda_128_100/generate_throughput.png | Bin 60528 -> 0 bytes .../reports/cuda_128_100/inference_report.csv | 7 - .../reports/cuda_128_100/rich_table.svg | 129 ---- .../reports/cuda_64_10/forward_throughput.png | Bin 57810 -> 0 bytes .../cuda_64_10/generate_throughput.png | Bin 60243 -> 0 bytes .../reports/cuda_64_10/inference_report.csv | 7 - .../whisper/reports/cuda_64_10/rich_table.svg | 129 ---- .../cuda_64_100/forward_throughput.png | Bin 58139 -> 0 bytes .../cuda_64_100/generate_throughput.png | Bin 59021 -> 0 bytes .../reports/cuda_64_100/inference_report.csv | 7 - .../reports/cuda_64_100/rich_table.svg | 129 ---- examples/whisper/scripts/benchmark.sh | 10 - examples/whisper/scripts/report.sh | 11 - 750 files changed, 6 insertions(+), 48109 deletions(-) delete mode 100644 examples/fast-mteb/README.md delete mode 100644 examples/fast-mteb/artifacts/forward_latency_plot.png delete mode 100644 examples/fast-mteb/artifacts/forward_throughput_plot.png delete mode 100644 examples/fast-mteb/artifacts/full_report.csv delete mode 100644 examples/fast-mteb/artifacts/rich_table.svg delete mode 100644 examples/fast-mteb/artifacts/short_report.csv delete mode 100644 examples/fast-mteb/configs/bge_base_config.yaml delete mode 100644 examples/fast-mteb/configs/bge_batch_size_sweep_baseline.yaml delete mode 100644 examples/fast-mteb/configs/bge_batch_size_sweep_ort_cuda_o4.yaml delete mode 100644 examples/fast-mteb/configs/bge_batch_size_sweep_ort_trt_fp16.yaml delete mode 100644 examples/fast-mteb/configs/bge_seq_len_sweep_baseline.yaml delete mode 100644 examples/fast-mteb/configs/bge_seq_len_sweep_ort_cuda_o4.yaml delete mode 100644 examples/fast-mteb/configs/bge_seq_len_sweep_ort_trt_fp16.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(None)_sequence_length(None)/multirun.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(None)_sequence_length(None)/multirun.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(None)_sequence_length(None)/multirun.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(None)_sequence_length(None)/multirun.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(None)_sequence_length(None)/multirun.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/.hydra/config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/.hydra/hydra.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/.hydra/overrides.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/hydra_config.yaml delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/inference_results.csv delete mode 100644 examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(None)_sequence_length(None)/multirun.yaml delete mode 100644 examples/fast-mteb/report.py delete mode 100644 examples/fast-mteb/script.sh delete mode 100644 examples/running-llamas/README.md delete mode 100644 examples/running-llamas/artifacts/Llama-13b/decode_throughput_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-13b/decode_throughput_line_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-13b/full_report.csv delete mode 100644 examples/running-llamas/artifacts/Llama-13b/generate_max_memory_allocated_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-13b/generate_max_memory_allocated_line_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-13b/generate_max_memory_reserved_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-13b/generate_max_memory_reserved_line_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-13b/peak_decode_throughput_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-13b/prefill_latency_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-13b/prefill_latency_line_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-13b/short_report.csv delete mode 100644 examples/running-llamas/artifacts/Llama-65b/decode_throughput_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-65b/decode_throughput_line_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-65b/full_report.csv delete mode 100644 examples/running-llamas/artifacts/Llama-65b/generate_max_memory_allocated_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-65b/generate_max_memory_allocated_line_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-65b/generate_max_memory_reserved_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-65b/generate_max_memory_reserved_line_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-65b/peak_decode_throughput_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-65b/prefill_latency_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-65b/prefill_latency_line_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-65b/short_report.csv delete mode 100644 examples/running-llamas/artifacts/Llama-7b/decode_throughput_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-7b/decode_throughput_line_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-7b/full_report.csv delete mode 100644 examples/running-llamas/artifacts/Llama-7b/generate_max_memory_allocated_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-7b/generate_max_memory_allocated_line_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-7b/generate_max_memory_reserved_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-7b/generate_max_memory_reserved_line_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-7b/peak_decode_throughput_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-7b/prefill_latency_bar_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-7b/prefill_latency_line_plot.png delete mode 100644 examples/running-llamas/artifacts/Llama-7b/short_report.csv delete mode 100644 examples/running-llamas/configs/_base_.yaml delete mode 100644 examples/running-llamas/configs/fp16+bt+tp=2.yaml delete mode 100644 examples/running-llamas/configs/fp16+bt.yaml delete mode 100644 examples/running-llamas/configs/fp16+dp=2.yaml delete mode 100644 examples/running-llamas/configs/fp16+fa2+tp=2.yaml delete mode 100644 examples/running-llamas/configs/fp16+fa2.yaml delete mode 100644 examples/running-llamas/configs/fp16+gptq+exllamav1+dp=2.yaml delete mode 100644 examples/running-llamas/configs/fp16+gptq+exllamav1.yaml delete mode 100644 examples/running-llamas/configs/fp16+gptq+exllamav2+dp=2.yaml delete mode 100644 examples/running-llamas/configs/fp16+gptq+exllamav2.yaml delete mode 100644 examples/running-llamas/configs/fp16+tp=2.yaml delete mode 100644 examples/running-llamas/configs/fp16.yaml delete mode 100644 examples/running-llamas/report.py delete mode 100644 examples/running-mistrals/README.md delete mode 100644 examples/running-mistrals/artifacts/A100-80GB/forward_latency_plot.png delete mode 100644 examples/running-mistrals/artifacts/A100-80GB/forward_memory_plot.png delete mode 100644 examples/running-mistrals/artifacts/A100-80GB/full_report.csv delete mode 100644 examples/running-mistrals/artifacts/A100-80GB/generate_memory_plot.png delete mode 100644 examples/running-mistrals/artifacts/A100-80GB/generate_throughput_plot.png delete mode 100644 examples/running-mistrals/artifacts/A100-80GB/rich_table.svg delete mode 100644 examples/running-mistrals/artifacts/A100-80GB/short_report.csv delete mode 100644 examples/running-mistrals/configs/_base_.yaml delete mode 100644 examples/running-mistrals/configs/awq.yaml delete mode 100644 examples/running-mistrals/configs/bnb.yaml delete mode 100644 examples/running-mistrals/configs/gptq.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(None)-sequence_length(512)-new_tokens(1000)/multirun.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml delete mode 100644 examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv delete mode 100644 examples/running-mistrals/report.py delete mode 100644 examples/running-vicunas/README.md delete mode 100644 examples/running-vicunas/artifacts/A100-80GB/forward_latency_plot.png delete mode 100644 examples/running-vicunas/artifacts/A100-80GB/forward_memory_plot.png delete mode 100644 examples/running-vicunas/artifacts/A100-80GB/full_report.csv delete mode 100644 examples/running-vicunas/artifacts/A100-80GB/generate_memory_plot.png delete mode 100644 examples/running-vicunas/artifacts/A100-80GB/generate_throughput_plot.png delete mode 100644 examples/running-vicunas/artifacts/A100-80GB/rich_table.svg delete mode 100644 examples/running-vicunas/artifacts/A100-80GB/short_report.csv delete mode 100644 examples/running-vicunas/configs/_base_.yaml delete mode 100644 examples/running-vicunas/configs/awq+gemm.yaml delete mode 100644 examples/running-vicunas/configs/awq+gemv.yaml delete mode 100644 examples/running-vicunas/configs/bnb.yaml delete mode 100644 examples/running-vicunas/configs/gptq.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml delete mode 100644 examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv delete mode 100644 examples/running-vicunas/report.py delete mode 100644 examples/training-llamas/README.md delete mode 100644 examples/training-llamas/artifacts/Llama-2-13b-hf/full_report.csv delete mode 100644 examples/training-llamas/artifacts/Llama-2-13b-hf/peak_training_throughput.png delete mode 100644 examples/training-llamas/artifacts/Llama-2-13b-hf/short_report.csv delete mode 100644 examples/training-llamas/artifacts/Llama-2-13b-hf/training_throughput_bar_plot.png delete mode 100644 examples/training-llamas/artifacts/Llama-2-13b-hf/training_throughput_line_plot.png delete mode 100644 examples/training-llamas/artifacts/Llama-2-7b-hf/full_report.csv delete mode 100644 examples/training-llamas/artifacts/Llama-2-7b-hf/peak_training_throughput.png delete mode 100644 examples/training-llamas/artifacts/Llama-2-7b-hf/short_report.csv delete mode 100644 examples/training-llamas/artifacts/Llama-2-7b-hf/training_throughput_bar_plot.png delete mode 100644 examples/training-llamas/artifacts/Llama-2-7b-hf/training_throughput_line_plot.png delete mode 100644 examples/training-llamas/configs/_base_.yaml delete mode 100644 examples/training-llamas/configs/fp16+dp=2.yaml delete mode 100644 examples/training-llamas/configs/fp16+fsdp=2.yaml delete mode 100644 examples/training-llamas/configs/fp16+peft+bnb-4bit+dp=2.yaml delete mode 100644 examples/training-llamas/configs/fp16+peft+bnb-4bit.yaml delete mode 100644 examples/training-llamas/configs/fp16+peft+dp=2+zero3.yaml delete mode 100644 examples/training-llamas/configs/fp16+peft+dp=2.yaml delete mode 100644 examples/training-llamas/configs/fp16+peft+gptq-4bit+dp=2.yaml delete mode 100644 examples/training-llamas/configs/fp16+peft+gptq-4bit.yaml delete mode 100644 examples/training-llamas/configs/fp16+peft.yaml delete mode 100644 examples/training-llamas/configs/fp16.yaml delete mode 100644 examples/training-llamas/report.py delete mode 100644 examples/whisper/README.md delete mode 100644 examples/whisper/configs/whisper_auto_opt+qnt.yaml delete mode 100644 examples/whisper/configs/whisper_auto_opt.yaml delete mode 100644 examples/whisper/configs/whisper_auto_qnt.yaml delete mode 100644 examples/whisper/configs/whisper_baseline.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/inference_results.csv delete mode 100644 examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/.hydra/config.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/.hydra/hydra.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/.hydra/overrides.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/hydra_config.yaml delete mode 100644 examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/inference_results.csv delete mode 100644 examples/whisper/reports/cuda_128_10/forward_throughput.png delete mode 100644 examples/whisper/reports/cuda_128_10/generate_throughput.png delete mode 100644 examples/whisper/reports/cuda_128_10/inference_report.csv delete mode 100644 examples/whisper/reports/cuda_128_10/rich_table.svg delete mode 100644 examples/whisper/reports/cuda_128_100/forward_throughput.png delete mode 100644 examples/whisper/reports/cuda_128_100/generate_throughput.png delete mode 100644 examples/whisper/reports/cuda_128_100/inference_report.csv delete mode 100644 examples/whisper/reports/cuda_128_100/rich_table.svg delete mode 100644 examples/whisper/reports/cuda_64_10/forward_throughput.png delete mode 100644 examples/whisper/reports/cuda_64_10/generate_throughput.png delete mode 100644 examples/whisper/reports/cuda_64_10/inference_report.csv delete mode 100644 examples/whisper/reports/cuda_64_10/rich_table.svg delete mode 100644 examples/whisper/reports/cuda_64_100/forward_throughput.png delete mode 100644 examples/whisper/reports/cuda_64_100/generate_throughput.png delete mode 100644 examples/whisper/reports/cuda_64_100/inference_report.csv delete mode 100644 examples/whisper/reports/cuda_64_100/rich_table.svg delete mode 100644 examples/whisper/scripts/benchmark.sh delete mode 100644 examples/whisper/scripts/report.sh diff --git a/.gitignore b/.gitignore index a8e86c83..ff31100b 100644 --- a/.gitignore +++ b/.gitignore @@ -167,8 +167,7 @@ sweeps/ data/ version.txt +.engine/ actions-runner/ experiments/ -examples/ -.engine/ -amdsmi \ No newline at end of file +amdsmi/ diff --git a/README.md b/README.md index 49889327..8b5e7368 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@

Optimum-Benchmark Logo

-

All benchmarks are wrong, some will cost you less than the others.

+

All benchmarks are wrong, some will cost you less than others.

Optimum-Benchmark 🏋️

-Optimum-Benchmark is a unified [multi-backend & multi-device](#backends--devices-) utility for benchmarking [Transformers](https://github.com/huggingface/transformers), [Diffusers](https://github.com/huggingface/diffusers), [PEFT](https://github.com/huggingface/peft), [TIMM](https://github.com/huggingface/pytorch-image-models) and [Optimum](https://github.com/huggingface/optimum) flavors, along with all their supported [optimizations & quantization schemes](#backend-features-), for [inference & training](#benchmark-features-%EF%B8%8F), in [distributed & non-distributed settings](#backend-features-), in the most correct and scalable way possible (no need to even download model weights). +Optimum-Benchmark is a unified [multi-backend & multi-device](#backends--devices-) utility for benchmarking [Transformers](https://github.com/huggingface/transformers), [Diffusers](https://github.com/huggingface/diffusers), [PEFT](https://github.com/huggingface/peft), [TIMM](https://github.com/huggingface/pytorch-image-models) and [Optimum](https://github.com/huggingface/optimum) flavors, along with all their supported [optimizations & quantization schemes](#backend-features-), for [inference & training](#benchmark-features-%EF%B8%8F), in [distributed & non-distributed settings](#backend-features-), in the most correct, efficient and scalable way possible (you don't even need to download the weights). *News* 📰 - PYPI release soon. @@ -128,7 +128,7 @@ optimum-benchmark --config-dir examples --config-name pytorch_bert -m backend.de ### Configurations structure 📁 -You can create custom configuration files following the [examples here]([examples](https://github.com/IlyasMoutawwakil/optimum-benchmark-examples)). +You can create custom and more complex configuration files following these [examples]([examples](https://github.com/IlyasMoutawwakil/optimum-benchmark-examples)). ## Features 🎨 diff --git a/examples/api_launch.py b/examples/api_launch.py index 987ec8c9..734dfd3c 100644 --- a/examples/api_launch.py +++ b/examples/api_launch.py @@ -4,7 +4,6 @@ from optimum_benchmark.launchers.torchrun.config import TorchrunConfig from optimum_benchmark.logging_utils import setup_logging - if __name__ == "__main__": setup_logging(level="INFO") launcher_config = TorchrunConfig(nproc_per_node=2) diff --git a/examples/fast-mteb/README.md b/examples/fast-mteb/README.md deleted file mode 100644 index fee12ad6..00000000 --- a/examples/fast-mteb/README.md +++ /dev/null @@ -1,44 +0,0 @@ -# Optimum-Benchmark x MTEB - -A set of performance benchmarks using [`BAAI/bge-base-en-v1.5`](https://huggingface.co/BAAI/bge-base-en-v1.5), the number one embedding model on the [`Massive Text Embedding Benchmark (MTEB) Leaderboard`](https://huggingface.co/spaces/mteb/leaderboard). - -For these benchmarks in particular, we recommend using a docker image with TensorRT intalled, for example [`nvcr.io/nvidia/tensorrt:22.12-py3`](../../docker/gpu.dockerfile) or removing the `TensorrtExecutionProvider` experiments from `configs/`. - -Just run `script.sh` from this directory: - -```bash -sh script.sh -``` - -This script will run sweeps over batch sizes and sequence lengths for each backend using the config files in `configs/`. The results of the sweeps will be saved in `experiments/`. Then run the reporting script `report.py`: - -```bash -python report.py -e experiments/ -``` - -Which will generate the plots and csv files in `artifact/`. - -## Results - -### Latency - -For latency, we consider the case of a server processing requests one at a time (i.e. batch size of 1). -We achieve 1 to 2 milliseconds latency for the forward pass of the embedding model using either `CUDAExecutionProvider` with `O4` optimization level or `TensorrtExecutionProvider` with `fp16` precision. This can be seen as a 5x to 7x speedup over the baseline pytorch model. - -

- Latency -

- -### Throughput - -For throughput, we consider the case of a server processing requests of average length (i.e. sequence length of 256). -We achieve a throughput of +2000 samples per second for the forward pass of the embedding model at an optimal batch size of 128 using either `CUDAExecutionProvider` with `O4` optimization level or `TensorrtExecutionProvider` with `fp16` precision. This can be seen as a 7.5x increase over the baseline pytorch model. - -

- Throughput -

- -### Notes - -- The `TensorrtExecutionProvider` requires engine building, which can take a few minutes during model loading and the first forward pass. It also rebuilds the engine every time the sequence length changes. This is why I think that `CUDAExecutionProvider` with `O4` optimization level is the sweetspot for GPU inference. -- Some other cases, such as processing big batches of short sequences, can demonstrate even higher speedups (~15x). We don't study them here. diff --git a/examples/fast-mteb/artifacts/forward_latency_plot.png b/examples/fast-mteb/artifacts/forward_latency_plot.png deleted file mode 100644 index c4aecc6dbbe596441079c48856f52f2201347ab5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 31081 zcmd?Rc{rDC+ckVrN+?t)k*O3altMCukdUb;^N`Yj49QF(q)g(yR0D9MzF z3@I5)=9zaLuInD&dwZVkeYW@e{`$VQ?H#9FI|&7S0Z*?VV5CTJoMd=ICT= zZ?|KcqY6+QFM4{Wn{JT59^(&>YXEOSsh;zUu*7UVxPDe}p-6m|ad ze&xOT=kNY%aW!0R(7fc2#}W5TKVMyCeU?{-``@;n(huERazbKk<(1JxyyvVgi)W}j zjjL(A&uUb-{!}5Yu%$pqf{w!8-k1SqIl;$U=ox;$Z!PF+I^g>A_U+>zKmL}xyJLsQ zDn)u0R#w)C&(%7vw95E1Vv#{@;OFPB-M)ni4hjl-!NjP5zw&P_rVYnm)heF;|KP_G z`X^3ow%)#0QAw%(9s?_zxT>nE^Y5=mT5QAFt!UIZMa9Hew%^w1w0eI-s8{RYuV>9& z9VM3s^3s0%u%4Kl%;?@?Suj5*vqx6e;@B}dD=RB0Y3Vl=6^w7*yy?~Yb^m8q*V5Y> zfyb+29?GyqVsYEMx-NN)|Hj`EjGgQ6Q966Q##>xd^dDZUeZjf&WQx$8YeLe}Yv<F(wh6KgMH}*30~70=`<0a! z&&?V7ozrS6YDuXTKjb#-;kt*uAi zWNYr*RL~$UAh77UO{L2Xs~ppIspUM3}-jTFd znVlLXtGE5w`}W}~c~uRKP+g7TpOrfIRhHwp$L+h!F1BlzN&o}fQ-^Ow#(f9&@Ar$1 z<#}}Y34@Z7Qe$Id`|~O}C1>XstM#hS`S9%X`ilEUjKBH(nZ2~M^h8Yp9SPv>?jSt0 zdS745D@lr6w063uvQ9+n-tK_FIQYwTk#9spD-()v5~c@zGSp$m+RU z%eaqhZpu7$>E%l)T=*MS{|&aX9SbltbEh@WLja-g)b+6>oKNY`yG^kQg4e&j4PK@D z;obF3@%Qgv)JxXdUl$|i6|zp!F7C*4o7xwVA_hfo-q6%N&@9HWv^akJ603kpdvdJz z`^*+BNrHV_{)xH=%e^PQE&cRRZ)v(&flBJ(Cl1}!e4(5>84v+wxU{%!Z6g0kXO1veF|X&Pj%c((el& zK73eg`}U%Ng88dmRk2-nk`?0>8q!QMPSo%__tetK&rJyAx(-}TIuIp1`-X-x^BQHl zxz#8z>r6Ab!Rop?rDUypLniahA8zkhx@ZZLLZtAar`GRQhOU!bQFfJE3HgJChbMjY zbVmu}3;VY9nWq{`u_fO+Ixb*|zqh}+sHi3Hgwt_KRceHo5VmKBr$P=&;#uGnjJPwlzGz&+uG@O z_4S8Ba=fN@A&ms=I@4r6*io|CBv)d|3a%h*O~JVLs=P?vS_c{$8d~4?s#&{Tdg+mP z2C)X}bw$O+0aveHT_a}hD^c0dU_h}Tkyd7MOG-+9>+IYpn{JRE)7|nao8sjc5C~vV z>~G3EK(Syub&VYz9n)O!d|Jr$LYhtv4lIkz*A%mr<6}W4~%&;C8?D_iv$s z!o$O9Gphm^Ifg>tm6x;ND67|*zCO?QK1x&+xn+Z<$JEF}ir3T2%jwDeL#c-oY$~Jq zH*VY){Xl!OGU9q4*4eSMF4yH$y!PhAp6_L09F950Z_COU`TT>z!Z>VfY!Yz7J4X8& z)SKVu7+)9ERKBlr<;KtB1uN`Nt>fY0p-~83FMHIs<=$?Wl~Yqw9SCYgdOod~JwjAa zTpVvw=f#T`oiwZZTs~A_Z|NutOH2OU&fUB}Kin4BW|DhD#16Nih`M-pkDIRT)T2YG zi>b3yBes10EEX0Pw7zS^E&X-Ba+rBW^kohY4zjLTu_7lca#pVV+(4@=UsOfOU1_Hq z9ciX{!p<%(kqs zP(kCC7+>40pfE!}O=4K-LHSvK0oALUWhvfWyV!BVYor`_QOpHO8Xg^DJk#{z4Uf-E z#3_^Fz~vP?f7Kn7FG0A{P@-n}%ID8Z1+L%@aqN7*zwThno?zW1bs-baYr|jD`%Tpk z9z6Ip$7$)ZWy{o?QNC9qMw2Ceg~`wPUb(Uy0l86g$F5za)$vM23Knkp>spD zlv=UxNNOP;K4^R`@Zq5xew4Gm_MBX)_dw%a{pBd99GXQ-+HpJT#esYG?nU9an4Emv zZ%qC;^HmMO;uM|x;m9WURl{g0@9AHXhK7c5ns;`o%YH!6TtYpWxqP@>P#~Xm z`pbn}m%a|%_$@iF-MtOzVR#s!dkKGdjeMSt=xOe-%7@C^R+Nw@J34p+OaEe75T&w{6qMdons{#?pw( z-3?>U%-V{wJS^DC&U*AWid$P-o4*ZWRj+v1RF(bEG;cSl?mamu(g?f$eWjEzq71f?T=c=k6JiAK2QcYb10^4Opu$Kdm)RY9?cY{ z)RLDf@7u?RKMm4u(&Jl`FT6OSg|xJQmz8s`&z?JXI7CG6LFj5DZF@Tr508Cra{@Z1hSasg zx-KO@H^q9=Qc`C-76ad1L}={iIIzVmpDW!ouc*yu{))8IPqhkVRaKJ89L}D7b8X`R z^W(>tdU<)x43_a2<~j?lW+*Yrp5{B<`g-l6r7M(y*#>`Qw-vpA&)}HDh1#pvd!*vP zn777VDe9QJyEnFO2*Q?C7ue#jn4>r=*)v^Sd{I~IUU7CGFu z<$dd@i-e@4DhhP<1I=|nYzUO)Qm((b0MU^_wV&H)ZaMNyx%1h%bLRqDO-<<>ZEPOY zIt3`w0HLpJ?nv9GL-4*z!6ZMY{<;&hdw$|RfT9?^q*P*fl2n;8aLYkR|%aKp;bS#Q-$a(6& zM&yJI(n3k}$;!$aFk61S)p2^fL(1;CygiQClTWK=kh6aF=7^{F^LzNwMAgM(eMJ{>cY!@4wv$y(gEZr`>5$N)Z31guG# z=r3q@nk$I`Fu)&L`{!0=SI>`hv!3g39KR-T_bF;F%T>FSJn=+~UyFT9ZDmi_Q zwX3r;;1V;BB{n6;(yYLHtBFMuS}m$@sIwx{63FT1shszbB7nYoZBOjNpN9H$v_flt zI7*{SbIQzPp`D+bw)cMZP#4g6u>Z@Bq^znOt+hkKG^`4JZASvgnUtmr(^ z^g?I__ilgmk_R~Ss3ZGA^Yi6$oqH}2%t#I`>J~cJq#;JtNIs=Nb0i2rA_{eX@w#>E z0Co2Zmol8ht_b@aJ&o<_{+hGn+^?p9^|J19fa5Pc#pfsP`6U(YMS}g7-R2FLJQL>}p|yA8rcL*4 z>mHo=`ck|p+n!lQMh4i*3VR~uHnf2tVw^SF^4!i!^o`=D8^zFQ7NUOKzI_`>eMy#m z+qM?hr-=0K=4?^#xyj=jHf)H)^`y@Auff^4cWRp4vf#lO)H-+b54XfoXi5!PSdZ^p0i6Fe&x7NY>i|Iy;%7VFD*l@GrCId!>~|H<;Y!I` z9{v6M_e-IZQVCt-hojDR-o!VzGB z%idgEO3H4MX#t?H!nP=@Hy7yC)YN>@kT_Rq^Jh-~;aI3{66Td%;YS?C&2qdYdAf^L*y>yPGmo(TTEmzK>jmqaA4%pi|vk_+vCEIGB|o zK=H#p83QRPDf&f=7U^W$nPm9Pdq-w3LtzO`F)PTgZfK}-F^WK07dJ1Y2z(qF8Bs=4 ztK9YRUddH%8OP4ypM7jAS0*_6V9Pc|?>Zx#XSaOauGL1_c9og2Xnr^-<>st2!)Lwok&b83C;#Ix2#Rv-+`Q}tKpsg z{Z4INT?r1*J`}N}EJ-{PvN0Qyy;qviaAHzYFdZX1JsJuNb8}wxsBK$;rbKc2_bDn~ zSUo;9dP>~p!y28e(?fuH^zg=c>Lta zKAd!AboRQ&dlzuE_GoxTr>?OMPxjiiYb{Gd*A_K39bL0_Z7>p72sWJD6bbgEPPB+3 zckkZyH{ND&^q^^3L~?TSpOKLdt_8s%7c}T&&ZsL#i|*OO<{qI)&bV;@=6BFleY%}3}e!Qx5q^GX_-ijKdK`sUUsm+f~Ua>!T z@F2%KI;>mCV|L0V&Gt)|`7Zm`nPNeuk*nud@Uq&zAT<Q{OcZ{@!M*3gu%SB%YEd;9=D;z=%)P(3JP-T_!N;|<5E*E zqbuVV7uU%ZvmWdJ(yMhPrI36TIMSQnqt;8j~6gH0WhocMplxDK-;*#aMZp}gXI!lm1 zsELb&4?Xf+ujm)o)~<`paue_T$kGwY%Z=Y(OldxI-Yl(xAKwyav9;jED<$Er}zyCTy;nBT}Urdf{-O4@T zEs4x~F(4pd1PzIvt2$ZGfCrIuahaJrsdm66^}5(SZv2&c$B_MyhQim2i;I^;Z^G*7 zoUMLO`xMyYX;xNghE-XjElw!`anTaC*SZ|>sI?BFm%Ok@-ByPzt7tq&dkNUzDqDbh zgi@&eKx1uVbFMkO_ zT{)FE9uD40iJHC+V&8gHt*T)1=Q_$4rG<9UB5*_-Hf-Ez0rC%BcNr;U?d|QfZHu1r z*4EattXp^a$B!TUMw?h8c7?yQPBF~j2Z{uvtP{Ke#JkI;!3=c`4gY`ux|J(eIxb%- zvYG*fXz#YD;@Vo$lppoV#8dBG#`kBWM{fg4l6s_Y|FT5>sJ8L!tj!s+zP+HImH?!p zKCM}|E*11{LRbizqTemA2*<KGtDu3=|BTEcwg=l5RjID_0%)FFiU&;Fm%6!5vs1=njbbO z=QS0Fs#22W&9BQdM(J(?H|vLXJ0T(AsY~DCulZiLTH4BSx zW22$*>+`HAdEND?9H1+X_BTgXbRq(`eHJ}(2m-;k?(P)FR-9t8-S~Jv7;ee|>DRUi zXyOme4zE`)UPweDoa%;6?0|7<<&Md-Z!CT`Mja9C4EkpCfCFi*&CME`bhAPmj z!$`Nz9?uCD@Rv)&>sRe`{cX&|DM62bCXEOHR^q5NbL?;Q<6Dou@Xg2&KiQCW1!-4Z z9GQqDzwqd22Hw8QsXz~e0Mpe2JoTD6LueF?fF z5Sug;_M7_vOHlg*(9nzoFjz0QI)knPKMT+hBms8f#ECS%_Wm>6yPX%B6Cgq`{=uMp zx%QlAL7y%rDOm=#DBgKqH9*l1XgwYWonU`pY3#~XS9dSMbe}o1{M4ya;8mAU=`fFVB6{{KN_RO`A4NOiv342q+yjMJh(9b=TZq8hYi5PLeuXe{=T1=y|Y^=RG{g z=~7WqAsiwtEp6hYLAvQakoklqe|D_G5{LQxXm6y1PkW(XcWa&u7_UX3?@7@;Sq4Dx zrmk)k0hkO7MNwwn!T$b>A6CxJ%p{KD2p*WeFF zPk^lw$0wM0--Pp~376xq|G8c1!cAx8~x3{-{c-$4}2qn4@ z*qIdo2`#m{Qab=R(FK6%uAmKXC;bF4q4>#9o0CR)WX>^RDc(j1s6GXBwVgr;i=FE? zR8~>J^RXLdonHH)vNCB`mA7oGO`$K1ZYwaDltViO{BQ{@24YJ1p;I7$J02ZV3`$g8u2?*Z%T_n>0<85VVfq|VM zOr?w*GiB-N=`jTNht7(LTC?-y>ip^7GL$c9NIWgcd#6tIWv872;QRCE4@EJp-?IjN z>^^5_3BX*6LWWUPQ==yh8_*3}5f1c0k>018-o0B!;3XPe=&(mpX3N9CmOjw9O$&zI zRtD-v5wyDYzCMu>sB%P0prgodS44@Hf$}BF&7SjrD8kmD7Ezm*@3a5%j7~^M2uj&~ z=a^<32?8!j&mmEL&p1Vu_% z>u|d1m7|#_EzpEDW}f1hnVHcp@b(f)#z!nc($zl0`dqqqkDIVUDBClThzv6<13n!l z^bmlIZrr)gXsRT~Ph(Sx(Z!z~|83Ivg(K&4A7EJnI(YDXlSJrMx*UQbA-I*n&Uwz)ZkQ?H5?c<6T(c2Dg5 zA9EpZ>;RhB6kSoAe^E55Tk<}FF}^mp=AGRs-ti0tp6uv#C)q39)2CL?Ka^_VT>>uApE-6=#%FF1)sD6iUAhV=#2cR@ zWWV0q+I7(Mn2inp&Yg#Bj}~~(wz|_0Z9pql?uot>8b!!8bno81tIe^M9+ngr_shx2 z`NWPSOrH4czeVvz= zRJZ(DXMem1ge>pLPL3C+8diX;k93cC-psdQgK}q85c+(QAy9t_hvzr!#sU5p82fsN zJe2~*9AGX0P25Au+1dF|ceSy^r{^k$xe0kz?icg~oGH@t6dUUy}k zWGTb}^HZlT<6JxUeiEt@@xiSwbbz*+(Gn3_Woqi`iZ~-nnAeAbCQ=he&Vu@`eKbw1 zjFy#6mfbh;EmUjh{kRnrW7V`t{2wC5p_J2Dn?&5Y4s@UxuN{4Jhv0Nb>s@i&NAkhg zKpbBj@Y~O3BCw@bXe<)zB;~e*E}x;+->HTaF&Or=KR=;5xhUt>m~+!MSp| zu3peQgmTxti_|?be>Xc?8tnl&w$;_zwnz7Rrvb$o{23oN`11VPuxr|IMK(h8rO&+F z+{AZgE@{WbNO(o%5dsiE6SD2(jZ6*Ru>b5eWMpK-J#?*e>Ug`XywmY*X(_*u(0<_r zcnCI*I_JGBEnNaU`OYAHJ+g*8RW>u-(r))!aak#ss!_T@{Cc7eK; zt?e@8GLmQkk*;t1eb&WgAEDo_T}x~P(o}*1Xxms2*P2{ebyxZ#sEu~q)q;HF7DO%v zF$oCyRw_5rmo|VHP{TI>5a9-saH~hWIj?ea`2_`+ppB?~fvO9Z?LN_Mo6J_NT7@Mq zZf(t1Fi7GU$s8IRQ-ONzJwNLTEkiB&)Q`-M;GTsvr?H%g^HC7M(GtIO>e5O(m#+U% z*Wz4MP{>+Fh4;rmUaDzo?t>A-0o(WX?OR?csWn*Ey`YEmW1>dV=0LfUx`mu@@zSNG zP#Z5Gjpb)vvO%W^(s<{3InT{(x=K-{rcd< zpjA>CiYbU{2$k>+5Gu*JyP5{_Ut`vpJI=>O5!-_R13+LL9NUZ)aCiw1ELt$L65J&$ z1#eM#NmqON>r2*}nwkl+xQw9;jvAd&9$CO5_rZ4k85=7_dOSNf0tn}HVlc*;^SRum=lWe1!y0ae}bmV$iAfzaEI|;6>KEhPmb<<`RWyE1aOqR zySzN=NO^#2_u-BtGrExzZTYe*;G_AG$B(*IOz;sPB-m$)q{Z5#1K--)wcA~~`}zn; zn{p_X9sSi3@XzQ?*%Usz-9SH9G~88HgAzH?ThEBJ@_zWyuWVZo?>M4?Xh#ccYju0% zd|+}2xN_x6t-y3qBzBX~c)8_dm2b(oZp)vWIz?JKM79)MNI1k`v5Y=FKC%j`qY?pT zG1Y)r=0*O?2<7w-Kh^737r68$J7YA_1`V|4k%Wue3&d&^1F(SQ^hh2P4!w|n_~XZq zCmNsf!I^cftG?h1Q2Ir3Ik1@^IOTq~6@+~&z9drOIa+UEm~F?6jUr0fd3SeyF|lx2 zUu8}e!Pe0Yn-#%SLLr5zA-r$q@r@g+2~~|v)ZINE0@mSbY&=g?*e~Jsnf!uGI*|5X($E|&E_kmhht6I8( zYjb8sOw4-NJi>@I1u+bnO*p$P%|r%u#rUNIU&HIWeHD|4Kd*>>51sNrG)!LE1?lB0mH+xfL0rD{kW6MShH`i_7HCv>87CTldfE7 z4Be8H(@!Q4wE-v#U9a-*hCMOLX5XF8{De61U`HJq&CSW#|LYZ;RydGLRtaiouf22g z=HU&^pBfr2p|Q2zek9_ts3_okNv=1)u=^HDC<;9lSaBW*Z-F4T`+xl+N*I@{`)Xz$ zxwqhyX{<-LRRVQ`=D2YC_F8-(!={qcR`yGAGKzxYW%jM+r%#7soxdYN=zc(0Xr~_D z3J#ODe5xbWD0|xyrZsUl^l;qF&z!je!vPTh30H`gfU1|3MgRhW6Q#y?J}?LQbTM2h z09NXB4B_=k&!JNFx4bgweYkA-^2(iy@8&%L=7lq1=kux9-t@q`!=i6(+BCLFpX)m|pY3w$1?_n!7TtD-hrW+7W^V_=|VT7A6qp z0j9}*el*xY9TfIYsRqp0wPI3U{POvD4lEMJf*ViT0(#OxClW-d;!zoNOY8 zm1E_+&|zLdr$jT6u6ho=F0s)tEniM!&DMm!T|~=`0DJ zE(S@gV^q)`BJDDWDgl*HUt*%z1Ih`!krgs$Ez5V}U;F+0Dh$5s@PR_;GDK`i>U?l8 z_9{*UG9eShV&cw761|QxCm4&jNsyQ^eK`EZXa^mBepvfZ`v~X);>m$kq8;Bq0TV^i z1UNGblm*h}rkNF71*3l-610@}%=!2+nsQ-5pV?6Z!hU`JysN=ePp^)@F9V&}M#2I@ zaM)kx&q7a6uYgXb9nJOe<6Vjz&%f+DOx%Z%+{x4Ku=!Ak_E8(;Rbq1Tef$z>yotjB zj+uu~-~Z+|%5mHXZZa4QQ3uwW7=Q2HyT|y#3k3B)c~PL}j!Qh=+G}N?2oZ7>S)&+w z1D`+X|0CV=hGepaEUm2*Kh?Pc5s}{`#}s1iY6d!de;`P!?|{5gjz8#$-2}CiqKID= zR))oNbV5%gp>&gO=KYx%oDm$YQUoUpJ9{Abdvi3H>f)OY#=M1ixUm5Xp^6B@ZiGzq$UVq=z)YSKrfCs>Tba0$9P~D9gh=DaU z)U7>k+iz|&2?-BpAw=O(8*Cr3a}w7v53If~VMsJKHZ}(-2zXr#3K6_4VSN-86=5h& zd(i^rT5(&{cbqDe1j6e@MH$48ZAEhiV)~l9D#@Ay`O_;D4%fg9M*1dT8gOb@R&YzA3cd$&VEuJ z+CyDSFmy@QV=T^fpp_bu&(hCv z?paCB0Q3s=I;bs#)-ybMl$N^6C4C{)AiV=32-@F86oIm22OPf9a!Q<_z~4PK%uw8Y ze2~CTTm*{ZW5-vZ>K768xc|B!4huhNX4>1$g-Wq~IJ|z)dwd^Txghc1lCWI~UuM!v z(fkQoP?g7Grqz_AugLl)>Yih6YX;UlFjauI7R9Qgqhlx(BmxbUbW2b(njGm{J5RBL zWZd#urBc4<1_QbwkLpKrtY3c>6d!0|jiz~Q3NP?Bx`Cj}my1CORFe|eG7d5U zZ6mRn*1iDFNdVh~p612_wjYhT?xT-JE~!9_sezi36k`Xa|KUVMYu3Gc_w;9I6mZ~l zFf(A=Ol%NDtSXqBVg${16g|zJc_~~*zlnpY<=OZJuDrn(*ES(;52Zk+y{AtBGvzh=E46#Av<8E50{ks@UiVj(o$8NO079oN?Rd) zfSp;6IK#80eyBjn0RVP5cTNTK2cUL|lL1zty{en~25#LAhKgy ztj&7d+6!s)Q?&OAeFUhS^7%Mi4`_1<2~a>nLO=Rui^xe^+hE8Rox8^mumL|ta`>a6 z<(PP1jkO|$T;a12E!0*OclX_g(~N~mh?xi&P`%h63YI=QvUv0gV%nU3WI4@8Ty+O8O*u3aO01`dfZ{7&~8=g~YOd&VPD_PHPW z_Pr$XMoRC5Vm~IBkkep zsFkiAPrG36J#yp-PL&;D|L`T>Kn3MwjrZ1z^l(+c5cCZ@mSYL$YbYWsf9E)Hz7kOe zOc}77*rCUUtbnNMvL3(?sO{$QO`^MZbHemo2GCZMq9ZUdF`>542`nuOh-DFxRO2%? z78W${%n&*8A8Z5vE2={DIG`EQC$YuWzVMl!D;-%`}eK<3V-vR_}j?J#(SGFOoK zM=&95X5!{&ppm$dnKHyHHq8<>X<+}!d8 z_X=9_o@W$j9WFxQ0@SJp+yK>%0_1@3+hY3qWpxgDEYM3Y-G&8`x({2+s-T(>*iahA z6NYW##qtn<4&!kU{`$IZcNat~+?~Y9PxQDnq0xx)={@WI>4}j+uM5P1J?M9R(ZBP; z)&hS=F@W<~Vxn{EqIh@iWDDbwF9mx^m`lQ71Bxl(Mw3#X`GAfjE$+^fpS0ZxaZW_7 z-BDXSyrhY~wJ7|MGcph}X`_i;20CD&wzPoMB(@_mH$=)Yx+_-H263y>b|eLwa2M7h z0IdwqNk>TH`_C{~o0!*Nd;}5&4_!O+LN#7c(M$C6_JK1>%bUL->ixEBlXN$TlqDZ&_GDPbOwCvXcVd}gXIJ*O!wNY0LQ zB`|Ze;|Wk}FGNLg5K9+O5z%_2V2+Bf%Bwg4wT;{v2&Dv2(iHERWS{xD$*~V)Py!E8 z1V|c}oO}r=w*z`2AsN`W7>4(~FTJp4yEQ$I$`T+Rl)MgX)xc|ed-VA1HXm*g@sxP7 zQ7OuB?Zo){B4If0=BD~YN3apV3o6*XL0AGUu@J;LnDnjXf##hKU{9&@e|SMcS7Tlb z-@XKRtir{q4}}neK!xC(a7|ToJw&<#dr9yLjxJ1OQSOVJo;`i40H~5E^1>-iv`b)m z<|Cr19n=Y-(0m^|{Y3G+NZ2lX9@M(H)?FzY4-XH|u`kCV9GBqRBd%TJUmh?$F<}n( z0Le?=e*O$ZiWO;zIfleTS}y#LI9#^IWyIW-$VfVA(H6y@nrLPTWr0DGP*hq4dOtF{ z((u$?2Uep-pnoFDp^J{kStIBN5adP&L<^#>_+PqI+}teF=CaG~>l$)_z9dB|%6|7b)ovzLn7vTLwjfhC5|??>**ohX-p zDTi{CIapcQnB8Mc5y1B0L*(Y03yKo8AOY6)<8q#1s*jhmU10pX8Izop^bNyi|7!69kQmhZ28V~0Fr`AYuOuEAZ^FtN%zUm8)f$uw zvQ8mFEYcA^e-`jxP~C;u(#gtlGAanOfOh~}yfFBtpx|5k8rg^BDr8HyZRy&ZNuGe$ zgA0b=GO!55DUGwFD)bv-Ph*bLCc@+Zp8>=Y78UuW6eJUt$PeP3(zd7r6+p*UNE3}O zY*!(i{h-tnkhiay!X0d+ucArxT*l{s`32HV0y(LGjYI#ghZYE$=@Lj(5T)f|+^F$v zaKguo^W51GIFW&Vl6GH-M-Yq<4P*l3s3X?uXu5DfhiN6$J*Shv}X}EqD?CRLk^zl66r@A2RwQ*W`|9691V^0Q zLhVob3so}d=;)lEo!Uv_4m3VhXkg|Q0K`NcDK0>~Z6qAdX-SWDU!c9V=7xelu64St zv4l@K;)fFMcO?dpBcT?w_xCe-cuX5-d-pV?^AIwUG}HY2v|v5#O=5w5P@bXn3ur>G zSMc=Q0|XM&TMSj~H%_Sjq0Ie*zG!pXN&4z%|svC`-9|_^Xt|K-+Ax1{@CQy@( z%phdF{G}Fa1Uz&2rbx(2Kws$Fhg^3fPDfT4Qo8Xt;qdWuBje-zJ%k;SpFhF1@IPk0 zj~Y=pI>Z`ESQ%`eHs*9cMDI*3_t|@a+QQiCh+@?}O4p`hHw zkALI>ROrglKyK01(@W}t?QqggN$~m`oMB8*svSHi`s{0Pkos^-7csl9QgDhEm6fS>ZYMs-f&!oT?2Sp4QwbK z#?x**sEmbDTcOZXii|v7{iPfD9#|<@vq$%J9dsCg8nbc*8j7j_doh=6heiK z2P&8EynLp`z$=ZtITy~*7t8+d^oJ8)y(l>Nh{T4RPlTgm?{Db*WnbMGqB26|!;=d& zaA8Iq{A71GJvt45Gct&bI{5~EiBIRJ-?QzmpdgaGihw7^ zTTJj?4h>}@1_4rq_4RM$>IovZqMs-VU?hW9U%!6U|7Z{K21J!V>Hr@Ea+FJD z07p58n`BcqUTR`2B>ehjw5t?Sy~xkg*t8&ZxlCr7u1%P0_89AjQ71814~Ly!SX|-} zH8M0*gtmpEFcTdJOA5?pgbN1;r&sxfgj6KcNgmpdJ?Ll1ARuK9|0s(hFMJ0$=R{wt zd4Bxh&f{vzy@#EYDlLu155=aIWZAh`-eIk|Bk^dd7$#qG2h7k1VZ3u0#&h5+2sW4} z`^m<7MVR{4-%n;g;Hl*!B5iXP9rgPBAT8{6CqG^#acEtg!q<)#_YE8h;JN|B6mX7c ztd}ouczncDX$^VsCguAKg_!t`!;ly0x?tIeyMMiYZ_ke({#f2rX&F3j!nM8k85Af7 zSJz9gUcIV)fnkk@LhBZOpo$;xEe6}}2R83*Ny%RLh$`DMPenymqp?<+>Ew zY8cmGY(l}z{CguZBu>&d;%yPgTLF=gYyf63>?cdi1M-}FA4-1jM>T%0tZXUf{S-mi zo@b>efhAs(Li-1jD1kz9lRs9uBrD>3^(}EXCopXXLc%3^FAi&HwhjN8I2{f@G?Qyk zhKc~?KjAeOBsP4c}tI%>jRy!|=`{ z_SZJ3{!%K8kB`T64ks}w!3aZ?S-7&X>4ay+g-J+&YO;jXO6pntz3sSqjjHA^I&)O} zr_lU~i>hFDY#rv8gW*1IN5=_AF<0;BCzmm0mH@#8qYb@UQCPh0`^jUpVt0Ur2@khy zm0*et+rrIj;^s*p)?6z2kFxjon@~azbNE71QpSpFV-x~ThIq6TFV7Dq{0??#`e-+Zs5>R0x zs4|z289}o4g+mLfYxPqLCBh#O-~xnq`IrX@sA$FS426KFcjbN7gOpw)>n;K3lNkJl zupQ*6WoO#A$zd(;15x5#8Q1Kj?!|&K+#o0@EQ{|BlG}ybwE9Mqh!96J`c$yME0E^i;t?V6%(gazv z3TwV{>0^hNw$-C$VZk+!SODW*XHq-F#rG;G6nfa7f+d}d)8RQ7VLByyaq9EqH-L@* z`lXLFp^lE4~_sF?$%Knyq%ao232?R!VULfSF$J);P` zCqZaU*eAzGZ;Ra`zA`W%SS+;&dy+-e>Q3m>tU*%(<4k9?DTbqPuPJ;aRV7>_xs0%K z7+Y%?fpFd3d-78c-r<5yaVzpI6fKdKQ=&#d&tL!o!78VS?^yUnrTe+{NN)a!aJ;pK z1}z*N1*>s9k{!C8>(S2_QsG+M=wpz4a1dO8ON_MA<>mAQXlnIImY_W{ zKDiDv>$!Ej(H8$T=WO6zGJ!3O=6WuPQ1*yVB@J0ZI+zpWuy ziC+G%qZs0gEd-HK3X*~d9RJL8pU#-$H41)EJ5yzrljm5ei7mrlNi+Yiq{{~};W)uN z(8M@Setvwr@3W{?X{e4KuqY{k`4_f0@9{H<)-6H?--K4vBWmx!djLN-5x3+HG zx|}eHDk_10C-Jjb84b9X(H%gCWI``Taug+ou_NNt{YN#$%AoSFf!?HFzB~YVP$G|X z%P{M3?)bad^l}yA;Fx2pFpii}o9>H|F23Enk6!slMy_8p0q6_nx-&ZF#*IXMmW3?# ziqw{t7H*gh!efqec#)HhWdA9W9V@I8oS>sr2oZQ6PBc!fAMggTfFhiTgW9o$gFwun z)+;KblsBf!z-=h0s4T%!1rr}R#Ja@H@} z9902*JL=RF1VqLh`)*&4RH&jVtad1geC09x~p-gig zhE+-d>i*MH4cj0)AFW)w5M0KbI+P9LvSm8|+(0&8>JJXSIrI@Y<7mKT-1xZ_Lm6yw7A`P zYe;V0!j`fkG|)Q{1(Kq2b8|_rjNY8+92S_p{1?)pt$zDfdG|mS+6<^KwXd<}4@I{A zwFdV;(m?wWNAS=AXkvz%6=sP-d~F7vR>JP9VN1`#)6yftwjFu47|IGQMXYD|2*PU} zrk{#Le90TWLV^(Pz$CTvni?93X7hw*Gd8}PTZ=E^7uJzL{Jkc&G*ANnI6)Je859ZC#i)qnF)fFaJ)vH4--T zYbdCOfh1?!EC*o& z&Ad7@Ia6U*a1KDf5M3JHE#9aKyjqL_-S`&cmn`tmh}51bdHa_5#4!4EfedHfye(%vs80k$SeqsrfNBFRhDapx8x$ix`LF_Z_Zx&I$7|{V&72<3}06Y3?;zcnrF#)FS zfRVv4-%}bY*&$PxJ(%MFg{&5q@R6L%Zl&rJCkl!CGUiF*zXc9DPHNI(=u2vm_@wJ}nh zd0HMC=+A6uQW6Ob0yThO4UjbP zmL(zG4e7D}mMRzfLc1wMzXy?Iftv^4OD10?z);lxeLCZCI)Rkdpn#%|mS8G)IkFX$B$WQPb`sh!8 ze^%fhG_mpYAPL_~i#! zG#hJc;zA_W5X_-7gRV`Qgb?}fNC;w}5~Nj%x1loj*!J7Ib^l(gM-Nj9 z%uTBP4FIH&4}@1UkQd*Pm-0cW1C}N)3j}3|mjWsN+d}Pui*jbDLiCorj~oOjih^`^ z0kS9FrzZyfjZg|WT!X+g4oINHjGBAym#}6kL@{Cg6kl}vf4!Gdm=;N32L3JJg&)zs zv1(A$Q~w2q3nA=Sf&0yQkZzbki{9|C64?mljbSRwX+&|*p)?dho+iEpst{}e>#9{3 zp(QE(6R_1UBCn7joFZB#2*awt&SVq^3NU#eBB;~9i+sY;KPVA|D1|-=M$Ii_DOq;$ z+MxA?L>xq&s2zr!LWZ7CejwYF@}oA0R+~P4WCp^mg0Akh=w}Pe%omhJ@=neKW>oeoL4+?*?Quiz z`sv0pav$%KS?SCnisb4MKwitqd3IsSoz+X~KWh5_=U)6t`gmj=(&w7n*et=z-$)Ax zz@qPZl*B@bdMaZfK)=@>Y~g^I9yOuzh45l03UlYTi?-gnvADc5C%K{%bN!CSrq0fkz8ADQg6q3=HSv*mKAp9jKLaHs-Rgw9 zQq%hpdnu$Uh_>WGqBS?iKGx~0v8?Vr4BbDk+l9n1DZ2Xz$3-1vkhW^;+>xM=fx)>! zw|A5Iq8g|9TE~YpXl6rSw>kAVuHEkZylJm0pLuxI^o}l%oTss@=Vx=xGn%x*Srmg{ zO(O3OLE{v0Tti(w_Kz_R zT)^e&ITUu~^0er=w?Q*j1Ff^7=Q`fbSf!gxi+*m9oNpZNn!jxFQmn1f?>cwVxmzt& zf^Ci8zH=ou_FNSC5nDX=YrgZTm&aTN4b6PX@3vT<{t&kLl0g3-Jg*$S`DAoeb@BA| zO`G*+o|-I8vp3T^c`2*Spi=VOsfz8o{mB(3Lsf6ex(c{V^WMv68Hjd{t9WPdbnobh zedg{~s`7<%b6!`%>E%PAv&oNRbo=YoRouF?gR1B)PCw>q&aj&MbyF|hbh0eJ__*?w zLQYFubh0#SHviG56@ATJ!$+xvX0GExS)1hrl-0_8GDHsJu6*WwaIgRRoe8N+o>s@l zpGP!Vv|E>XT5-5VR-F{(3mMs=sBw3*KMP)AwwGdo4P4PEW9gsyD_qhGE)obXL!ksF z;SDG9=7(`|_Pwp*?`wPN={`@HzhZWnO?^CXR+YSTB{jyh^!F7y;V%U~-XnY8`Ligt z>`!TibMvX|fGHXN1+)Er$2K6ghtj!4C{*OVp8l#~a8Jf~&}`+RwOjK^``8H`CYsoB zxmcN(iTULkFMj;2Qf`Q^J+SZO&$UzeN`=DBj1%3LeAhibE0V?6e%b6tjZLAd z%wl6gSg)haZyMLlk7d=GD~qdsHx*i4p>fGhZu;h<)J-M5HZ}^PI|jTC)YU7!I6MD% z({0-FO=jJ+#yf6Vu%EHSaq`Z+rVVX6{C~25nWlbF6!A{LPV+#_Y`7X}^6++M8Dk9^NK8A#Qxj zkoz8OCI>~&z1DQy>c^G5)ANlNe(;Hy8}C{kq$y;6hi%ucLuSeCZmWL|Z#b@Fd?O>^ zd|Pr~Py1FcvujKy+}5i{nn$+AvVNiCrV=Yg!?!hv7_M+Mu`Xwvn!9m~&5kZ!U=hPa z#7&{7Z<(*n2c-8|w8jR^gl_&ct#;Ygg$DjyH8Sdn9xMR1`}KHD2t{Ckj)mM~QtjmU zk9zSG!h%iSeg{+RC5*w05auISZ=r(jI@+?RypFu{L~Wg|3-iJ&S9V$cPkj-D1NlET zj{lA6g1mwm&5p)Zy%npQa|hOX>zwXW**M){BpDHC{@0D%{O=4Jq-C|GmA{M+i=O07 zkc9Y-XZ_b}v$<#m`Gw;;^3qK*oyiQu4=#pEmYh0*O`jZm?o zNx3r7snJe{clI`g{>4+;xN}n|JZAi)MG9lN z%MBEt9k*kcw%_abq*JzOl`eNa7XHquY@zZj>h8748x7!J#V;T`F#6tw?>dLvE>6CpU{*+wP>3Jco{ z8b?0uF%YS%@zXFcb5Bz{Amngbcm04SRUJX2B8?~vuFS6dQ|gf>mSG#>DWB@7o0v@N z=BY*Fdv(qyHj-apxnhLtdTA@Y?Tfb$@Bd=RTstkdQ;GM!Z$^kSUHHM%AIFpVm1uqp z=U0*GlfaRY*KMkb5zB6CeI5@k*SJ*vq033#Kvebg52>_MyqU*D84HX6GBT^W>dgN5Ps1v(xxAq+{l(sUrgd z|Esh!kE=QF+xV~4sEig;m=R@}lx2|8v@b})p^Y}FY1E)pgz6|tszXUNNwk+ndkUrH zh=Y)FqOn99QlYd;+kJf+&;5G-c;=7$x#y30&5LO{zu$NLT-WvffO&x&hODB+%Y0^u zAo5;Hmq0U=nk+t|_V71=458>pBRLe0i$rw11~dj7O_dy9y?W)|Tq)ds1P(@bP#&Z6 zQQf(V%UoI;6}Sy=Z;Z|tms#2H!|p{|ZZhWrIpJo0f`6>~qq+UsIL}MO_|E6ySVqBv zcfZeRzjf;(Ex)`L0Xy3xX&z+^Ykpo(-?(4>NWg7Q2*cFnZt~=nd-sdscF6eqNo4Et ziyV*STx)qh@80PvK@|z(#p1bRk9t^VUBz5iZ4h#`*fF=# zb4z;V_+hK~{I-gfdI4fP@^wa{gEp);O}bpoaNP^$o7i3%7&`La^R{bPwP%@l>yP#p zKXvv8>--RYvO-fRv7{PcwJ?<3CNXE-F7mFF_4eAhJr8TOx4Hs!Q2(n3hf8kh)I3|c zFX(2L-EM|kH96KfU#~x3oS9<%Lz8uQ*6|LHEWxRNy|p#`;@xp8_ej0$(Kn{T3gxD+ z>=;%KTYbBq>r}>QTy4&^^0Med#n+)Owv3xt=OcDkx*P(A55r zJpK9=nvf^H47ZJH7s%xaeWz+Jz=X&S%4@64t9jPLZmX;z@ACw)DHGAnZE9siG#m(y)D<|1$^eoE|EScqCe=*DJ@WK8WpO%}B+6pnM zqkCCb5AI-e`PB;=toJX+_qBVw>-pnp%zijaRp4B&*$%=P+$aJJ5X-nypVoBzY4e3( zuq3r>hF$~;H?4nvUq{rq&26+-p-XM|lSoVB{P6fUwuiCcM|hnHDZL}!p8d^8L!wqg zgXUi&gL4GEE=pznvVBoNSp2a*vxxY!gJ&*(U;B8KS?Fn%FDrAOmJdqqH(0L`osScP z@9SaToM#ehLYny8_|iw2)u9d|+s)&gBm^`ztF!hRa36nVbM}c`|4sPmN8zTjlm6yK zvfYi6d96{Cc&b9aHQgK>CA_X=pWD5p`=|l8t%CLHH>OEdxv%1xf}CLa?k~drEwco3 zc2t!=3-VvyUl!FRx&Pp}BYkb-b)EnDyX`^#xnFIT{sC}KR~Jk{(T$BS{I&eo!aQd< zJIL4CW>QhRO-+s5Gp(nC;N@`-hdc}`D5A`aV{*rrpifuhyb)bJo-gdOD)y7#+zTf{*n8sI@8Mujc_Jjb^Iqz%YssgcMpTJC zv18`dMdJ;QW|}s+nX=iVzBPIemc94Ss*#H=O-wKy9~Br- z_4b(6C-y}>EpvW=t3YGI${|0$;PTa1CA+p1+t_utnp|qSn|ig?oJ-fP|J)$?-y8(~ zTXW`r^0U`{lY+A(Y!<&i{LmT&vn}ef zi1KqA8f;#*J8NFGY|gJ1JNeQ4@ZKwxkD~^r={KRwnO9U~ZY|!CULT7 zwS(Gytf;8y{x`E!qDnTTZC-S8Js4DNlXuJKm5GJ#vskk?x6(0gM%bdl7|`mhuI0K~ z`A7vrX+oVW96+iKb;y$dF_^Bhz=U2A-+7a5L|^Z)m6BK~rtr}M|Zr@;&3 zJv9PU65?C@W3?vL;Sh8)Du&}YfhPyt+LoMi@TpTc3qCxg9-%@{MTI&*A|dNLwA@&T z0_?(9ibZ%!WCUlL;M^cr0a?xAq$PG9q!E7Cwmq51B_yMqh(hravH`+Pk0jq#rw@6% zZXuGHhj%3rmwNuq;Mm43Tke7>Kz@6uDO~tE&?bNj7h_btduRPpvj4-;5CE_hSyq5K z@NLo>`WfoSNf?{B;8LONOcXBH*QI|-*TI(#$sz47evC)9GW^RZexnpmLDio<-rSQY z`e81@5<7DSh`9AG7ZGB*Sm2wZ1lQ8q8vREFQo4jIK{5R%lxo~Qf&iL<;-}pm?<@Qi9EiAw^A0E&4db zmBQyEYU2{CLJ=yTB9Dst@Cp!a4qnPu{YO(s&NH|@TT$laUrzA5-!&S8pcm*gP^M>* z@qygPa3xS;)z}@%B4WYcjg9nfu_-wqic01>h)Nhvt@IEcBK=7i&=J(gaSsy#l9b2+ z@Kr75O+_#Q7XrRxbL9!+-Wbp>P>G~YHsCY-g6~Dw=`ppjExi{Vm%FqfKg`Fc^y#DFlCGE6B#=pi!mEaH$bta_YL>;~bxEv51Ue>~ z^t^kQo^GNej`9d5BERcG49G4NA}T5n>O1kqFw~*Pm3v2yG6h?5fk{5a?}Mu% zO4(AJeC~J?sJbnw*1!wJ+5^3VJcdvlq!7-ARc0u$*v|;wYb<-DQzKy0I4m8HDP^;7 z^IDTvGZbB9OCL`{Kwp^=f%T4Tpr46&0HsD0_6iK}r!g7lZIS#WkZiHdG6)T~QCT*- zqlL$g+0Q|8fZg={>XSGF!%rg#^^G_HGH|7kTMJMKR%ooop&T?+cqM?192jgY5_Quu^MDodJ4|{>xxD1nS zEj$l_Wo2cs^qPe#J4@KZPKkA17NXALJ6T{QuvuuqJwan1^=q(>PxUKE2=T z{CvQj5cvr~?kp67X)x&LArQTkc@XJ>!SM)1PJjdJ@@l8cM`0mK#=>b_w=P_6>1;LE zC4rJH7-NJEZb1lw?mgxfI`9(VW=g;HWSG8wMge+6XBh|vIwrZ z6ZnWV2VWTLiE$^UR*uaMM6<4+yRtxBAkqq}o;Z95KnIlY4`2vomx;|J`5Vm*PGQnI z0*X$ji=*g|<1&W#L!2^V+raFJ7>TLl)ea{|$H=bFo;`0iVCLO~wHV(HlYxg>O1c{I zX_w_4t0Uv%#o>t}0T$#rB4DX>!Dn$B0v0RKA;NE$W+d#PYyzfH+JIsPW_c3ARV2m1Ol4~F ze5)#gC>F?;ID=D`^KuD6Xn?i<9X$|`=ODTRgC&GkMWVRC`eEf$bU;-?BHTATffPpk z2)JG%i7G@>*d5)04yI;pE#Oo@xFos+_IC6032fK6u&|!L&^1OJJq#v4=rxeiw+9}C zN_@09)9RA_0M04!47z&>E|=EYgMAgqpjZpv90+S-@OEtTk-m)=Pkp+m2@cXs#bIhn z-Y<^SuVJ$=8@7Gj*o~Yl*>IqP+q=1iQ;q>mB=o?Tt3^(YXn>f@>tQ_jde$NZWmhsi z68IDEFW>2b43H=jW92h#DwtFASFDi2@s26*p83Y3`23aeZpseA1V%}5fCnn^I}#@2 z&&f&H%!@;3gOZNGc?wd#9Y#X|*qwGeItCCm1ZUA_NSDB;lV*K`$ei@d;j=*GYAT}O zl#|0wwf%iPi-;I;Nhgkd$*+nU& zg@MIn0p4s4tMhucXy$WR4rz9RKQ0RvZ!+8#EpVFY2baE_=8wEcj`wuS6sXa*7!d{l zd;-rMb1hyoXHy&VZgA{|&!B7kT~i~Y;KDE{{J}`2h2l37L3N0-ju5B)GTbhOZngU; zB`G-%>j~fx$O?WG&!3m!e5qvCvq)NI-~KwCRWl&{$X8DIJl zX~Ie?f^-5QK_*(R`5MU=xK5AU83!21o9Ca*e1Fv!*?OvfAuiEUE;GdoCR`Z*k5F3b zS_)%+c%A~`*{B-#r>#lYv!yY%Pr`9a=>#BDaKLCr&Z=YcCP!!eh_`>^aWYm!oolt{wNyV-Mv5UU^Px_xEGv6IhqcESke{yx36}4{Uf*;rdH{w zRHF}2VxJ~N2AH@gVu;>t$3b3@r-1id(1?-lZvHpvb}<11V5&^(M*@`671$K1(GQWx zGuVr>;N+tqCbmW#M~ULLq#EgOIXodc%3;WH(X|gJw>v22Mc3zheY65#@h1LE%e#*Y z_YFzqE)}Drqu9&DCuH9symgo7@PU!|OnuuRisYf4+2D)03SYlK0#6oP1xzfAi+BeC zPo6x{Z1o=ebov`+7u$a2)c` z9|wEdcWKw@*om|pEoxZAX_6wD%`k9LorZ=@bDaJPE?sf}xIBkcZV%8E2%rWCCOSAe z(m-}8NB$Nmh5b01lUeBX>(^bSI);4_(|{GvL)vgFy2HSZrFb61e~b+!Y&vN}2ueuf z_(mPIT&N)j2Ulfu&cdn{LVc_FBY1-$H~?;}!K%l@rW@^mEqf6)29VUpcD56+rqA8Y z25INuSElxhV)B77YDqG;5!gr@(i!cGq}yCyLw&u4NrJZR+j5y-cbW;zL+`6>4?8?8 z9;g*a_ncq7k$Mn`$W#1Tn8}Cd(Pa#Gbzufn*~Nj9$+#tw$H}-8%r#uuWcZ%c+HK%r z?91`7L7ny?JZ!Yx!WB+)@FZN75EHg0^?i6g9fyZXAOdRf-mvf!r;sm6T{G|>C$O#> zYXu=ZL3uW)1p*9AW6n=9BKY2MDlXeHG(ouzg}iE(ZDsDFfoNpR)kzGHJsCE|1$_;F z#}*lX3@J_TJ`sS;wiTZ~{$b-T%4{HZ4&S?lRU?*1&+J@09Aov}ZBrPs#=^D{-O}hA zpKzg5^=}+zp{oL4yi_DGSgaaJP9pNryM`r>cw@K%^KcUX3`hXhF2#$Sn5Dx%79Gbi z=a{6vi&e!wAM$aN z;ktb?cvR}Nau%bT#FF-oyPWZRWU`f-6#GZ4Q;&TzaU44EG7>JqX55}|nmLh@l5+nr z&PRk=!mag8`Y5hb#w)PU8_VtiyP*X`gdkT;BK9P>36;e-z zHx(@*%)pBX##w!_Z6R8qEdnZI8NTU!T;6Vj4;{5kuqrH8-7_5lEoq$UiEfOiYJjg@ zD-v-~%IMLQ`GJzuZlR?*yc~}{1i!+DPAu97k_O&xV$8IuiyjnlMMWcA+1BHD$-xli zz)2s1!@j8e*bR|4c)x$t4}wQ7pi7YBI-FVOqf#~x#T06O#lr+Tff^Nm&r?)wh?OFs zDzHVMFZ{Qo~$=FXb&5OhNVViCd%?#svP{UInC z&ZVPq5?~)hy22p>opXW?mBc7Wx%@;D=-ebkE2$iSUULgCT)2=2>H^Kdj?EnOn;^xK u`T>_psZfdiAC=EsS82$FwLpn|Ms#Eiu&=V2=Y*@S5Fh}41_`d+|5FSwg diff --git a/examples/fast-mteb/artifacts/forward_throughput_plot.png b/examples/fast-mteb/artifacts/forward_throughput_plot.png deleted file mode 100644 index 4be441a3b46e3a556fc724ed012759de571eb665..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44934 zcmeFZ^;cGF)Hb>vItA%Q5K+28Pyq==LXegYk&*@pDUpy8kOrlsk#3|@zyLu)NdXZ= zT4_9Ux!?C2XN>QM^A8-yaBTO(!&>XU?-|#e^SV~J#_gM=L<~d-f{@-)QqV*YtUv_8 z%n{(hCt|}>i|`*wcSU`7EvNhLp60Grh?=>(^8+XM2euaH9$LA&**ZCj@JsLu^PaPD zcXxJ^5)g3sKVRT?a3lx;d~fHfH^*Ma)FVfVMgi;Qo85Gl=Yq-Q2-tK&8=84! z3E1F&254{t|ND&O|NjS@&i}_I_{s$Z1)ZY^C#Ry~__1|*dgOHdnci52v>#ENQm~l) zSc0dH65+(;~V#oALW%4WijUtI1Y}EgokQGIw`(je1ApcPT>HgoK2w9`96=`OV=QN~)?< zbybQ9&z}c{5Yu+{^xz&I9v14BGNg&x2KC2ZTv!@RUedfMH;{hcTQ4y7Q_a18iixjZ zcj_7&8WOY|xI>w*JlxE1nr&r%{P;2V*OxC|kh8FuBPM@uxn~+I&aKeTbpTN%nUe{d|g_KL3X~sFWC+__q_NCmhASeo$YNo_ykwR z(aw&0_xEr3hdG_4lDc|WS=stcR?CSB^X6r}GNbEeW?2Idkqc*#kx!qp3JUacEXmES ztgYqwljRK!ugnUI*^SQmvEGPbA;reVetowZFNWos+;o%2U`}z+{@!eW%h4Z)wJ){A z@}VU2zf{!KZ{EMpxo4!MMVXf7Zpfj7?SYBs{+~GV4JU3>R2G(cWSmEEWF$>IH{r!a4_-7U~jt&m8;hZWe zDgqVQA6Hk4h&z1A)s$J9C|Kx=dF8$HedzP&p7OR+BE;+TSn}}CFJeDGKYe|Dv(?d} zw{PE)Z~vNqyX4AiSVscJ6#VSjnWl%oBKl%jcLv;7M+y1)`5U}L zFE-NM*Jn9ec&DPK2K_pNgkzM$*Sal}%-Ma#rOVO*0Rc0A+D;FNKYaKwG(7w`GLrZV z(`AH}l@*M%s3mmP|A^FMb@Z_U`}If~KIfA|j~RdXAsrpv9GKGP=4N1Pt2CTlB^Z%l zZ-lWr{P6gA=OeN|y^edwf9B%t7qao3YFV#dVOv{U!w+P*ttcreg{%gsCMG6i+2h(f zIxswXWx+=LdPO5?V)hmOCqCMGU%%|x$p7RR}PrlUJ%ip|-42|;9B!EMKV7K7RLGFbM7v_Ke9B+2`T%w1MytwL3VPax}UY&-| zAb4+McA+5CU+&GrU(P?r_!4Em4GOd=fZ0|09=Hnj4uQEmRbYQ{#q&%wgBTX_LMDL4 zqLPl5-)PQcZ}l?|>ME3+s&jP?UupS=B?9{NAADAP!A6er-<58pvPm`cjkq(pTXPum}-k2qNhC; zs+rQvw{G3iueGBNJ;NMPqF25>(Bi#=2MeGRwIL5BWyDiaQE3kN%komnqZ5o%$h;E= z!o&g?XAt-`1stOH_M*l3^4qxcbeEmaF^E4PFf}!8_B*0^bt761)-;+l+ww?arsYu> z{0;-SLWypvT$}&N=3+mkjjgTA(mi4NtJkg#H+k5r_|;AHCankG8ND7o>iYPR%+&wj0u~n5P^qB=5|N}MAtBN1wS{FG za3W^+xi|!kg&$r>J`E4=IQ;$7yKi@AN8ZOrs`nW)akv7Oh)6Zo&)CGoo|Cp)>gs*J zzSdVh{AJ+2HW4S0%PuHLuAXtVdvL%%*;{=m#h6FIp#-___qOv`@{nV9Q%qVr)S>pHf$Sv#`sU48%kM6#)WKT@%| z>D)_a`S*B-47+`Osv*~8N2CJMLHyUGw6p;x zCMG)M;rDl`oipqo_O}-9`nM6m0dygP7cX8+ii_)Tn5{6!#%`CBlWTBrbaW&^zBYST ze`$i7zy(rM`@huKc9D>f=)1ZKZy0(1WTB_;M+9Lxj)q;Kq<7z^iA}!o+-PvPe*HQL zQe9iCEcUue8x=)D?fi$hmo;vo5(!+^B_MvyCdh-D{l{B zVPT7{t*xG7`I(#G#uGos%e}2W`^D_7O?Dv_Zp$BBzUr2nyhkeDzEv`rycxqHRH^?? z8Zsnje81PxqHCk_OIKDAY3Ynx929@1J!h%CuG!k!z8fBnJ96CL->;B4-G9>4(^Hh7 zb87nWBV_oL%1R+HEXKhPAMQs9)q&Td2Y0Zu>}@6CH1iIrfUHWRFC#ZV3^_^t%SV~N zQSI%rGuViQg#|ei6A9RriM>z=Vn&u%w(p`DT zTdY_9!9gbA^eW_NuXuIWn0c)_&8Ei2d5G*v%F5qfcFoMNz@KIZSXoG7BK6_k#z%*d zk&!Mi!pP$psy^@$FUwvj_a6x2wY^AK{z8Er^&9O~?nGrFh)Is=Nl7%EYH7g<2~;nx zJ*w09@IKhS045;lyYCD+#QE#=)YMRgxqNLoJ$N@HQQ}e=#6+c3iK8=^kTNzkH9Y>J zI!#Fi>)@9!3@A+4n|k|}peu|F_1UwGjKs3TC)3&T_$g^=-yjfAbxlt*3#XDSe|WjT zyb~$VI%hByk)uv3AZKP~{_G#xxU#zX4Q$FtqNc4)=0-FFE)Nfn z>Wj;jZ>MXDii-9j&6(TTMO=IIM^;`QJMdezv#YBd-~jJT#bkbC1hM|`g6q>3A(#Cq zuBx;xpJAQ$6+#YfZX#sv$B)9QkAM{*qwA&qi1QQ|$|B$pcbbWU4Fpt$G zWZ8?Jr_Fd7^9G*h);C37qXzP10n-}>1`Hc15ZDF9#rZdz@7%fL?Ch-3c&qpg3)^V1 zPUM~uy=sf!QBr*iJT`YcaZXMS3L+C3o8#jt>)S3TEHN`NY5$SDe7TVHYJ@>RrG6Bo z-JLo}aAb&=<(P7O(?>B+=3GrKmTNx2;E~@MM|=AEb}~wi&QRz2{9bJ?`v_jwIXry! zWizNWwr%#`@=cy!Xlp2UR9)a7S z0Cn+WP;5jHmaJTa()7Epe|~+%M!?R)E~;l_-L1YqG(LVa>9+_j5hZ63qOWm>DN0lJCO(YFZ0x+054AcdD9e<_jL!_O@nQt}nd|5G3QOcPDWc zi?lBRGL@T^Ge-GVMxXaBzy%%Y@n(-gt%5Uv^R+oICFHNIJwPII2Lf|`@1n=r>?K6Q ztKXjZX!l3lfc?%RRsguzjYiKZDg+bTi)&KT(sFLUlp-J{<*S&2_$TCd=&qV7j4K~@ zMrYEr;5PGAqZ_{Qn}>Ni3QBo$mdLHddsbE<5Ngc6);TmYdc#_7zqtGujFjp?pIOSi zePyJ;<<}Q6e&hjcDG4cQCBUE`tE-)#KPU2(Kj?ou?HZSmfFmQ*#-3RX;m!-PYLS?M$@?P&P$p}7Cy%0&oa?eE`{2C`R-3=L_}xBUTxx0wNZ z*9BGA*LtVrptA^C6+y&PHkyrQlyH?1>8)^CxPihSJ~PGAi^zJ+({HZ>x3-s5m@Yd9 zKFWCUA__v=E6CqQ+({U0yt(bA!OQ-~4Ff*`HaD87DS;EPB3UjAy~t=l45sj%EZ%ki zD_SUkt6oq^Dy*UNSN-`~%5#0{PCpKE3F{Bk8dza!-QC@0?SUAZ+16{2vP$dT1!Ax_ zzx5CwpB`;+AI-D{#Eg!P#*|-W%X?}7yIJe=+s@0|I~x)totPa}GG6b*M1ww$g!_*n z3}nI}C5~@R!D6DKLv;>@OfvpV0204NNB*2?xdP~CxI|9?g`-NhZgoH~Jwr@P47aCR zd12Wb)zKTxC=q5n%C!>U8FcdNZ zmw5RhVB_H6w154|2!7ECFxYwV?OC)iTIltnF31>Y5BQXO3oT}66#ZrQALPCHv_+UA6|9iTV^Y?f3HTL6Y zAWuRP?1QY@zqHAb+sRdb&=;J*ZpDxSFo#xueX&3OHF%h+o?bFNJ$@SyZ-@Z)% zoY03&Os%UEx1XqB+o)A8H)*jj_1(fnFOi?0zXoN({p{>)?q!RpO2{u&uvsGE3Y|tq zMiUU;CTLB4J66Zb%QZj3{YqXCBZg39>`i_BnvP<=r%wsSS{`}k7ZmjI^6^b}h7jv$ zCQ#L8OioTd_|_f%Zffd9xI&JcR_7&|4Tx&hkTr$i;^+mpPepnsAOlXkkaSCV@&tQi zZ0tU|B&Y(c?8jc4r>3Tcm;_w+kS*I4u&7MAtFK=HvAX(Su`V5O`oiz@VEJ8lcQ8F8 zW3@or@$(kn1I?utE7iEBe7K7$FHMQ>$9l)!I5Vz=2XRI?sFlq2A>R!b9o+}J3Yyk2nG*Vh*{@#I3>%3L_n zxo_Ws07oSoGDC|HI~v0gtSxo-?p^MMDd>7RLzw|hh$3HZMFO@&g8zOhVA34;=FJtf zoV2&MM{8QB+e5Xvbb12ML7&;_dkIC9VF3 zw~idfjIi1FfC4J{C1XRzW+WA`f9G%;m4Q81^xc4(MLr^4K#|QH|2_0 zT)YRp3g^A8;=)c`=y$a4FJ}fgLsEqNk%QKJjjk7<>c$|@hC?e@H1YiSVUv=c{tCKi zs4GWBM>j4M{(L@NZB6FH_9rXroNBr_>E8PEljvyD@9)#`Sy?mL$925VK%gK({ylg; zH`#2lGxIDwUJsO0E=vQ{03XHp0ig4$-c8tkT@>l4fp-~j>>iQL8W^i}6 z0>oIVBFFyzet?}fpth<-C}4LJai-gN%!tiQz{U^+ExHHk=;>WLGC#qJ?zH%Lp#2!b zdPl`|{XIx?&YqsjiF=!y?Qn!U*VJ#^zZyW(}eY;NJZWRtR9v^39u%IX4)OJF{#l?j-K#+6d;xtJ~NzLQ>1_tbslJpz1 zZO}t38))?P^Q-jQvUptf4DEX7`fC3`oR9an0Cx01-)D4P{r2q}uzB-qYiNPL_2+%2 zOy*G|peZFawI0Y6jmV}YAV2((e6@v_nV;JKU8Sn1!nhq;H|_IXVJ;ACYbAXkR^zLy ztJ5(sd~1JK^9kXYrsr;(8hiQ$ovPvC#_fiE5OQ5q zLfakzrw13GF-dCjkE{K&FkNmVhLq#y!iNV39esWHP{aS2YOMYu3`?!3u9laTMWAu; z1R~=TAv}D1{3u3=GXNFlOAlzNsDg{M3fOsh(SBz)WSPiBg0%cN%I%f7e)=*+&U2^6 zQoJM{L!V|y7`_#!^CrcrpGA!h(N7(seHyE-B5OlJ^OrU>CD+|CJ+bl(%^%m$f=C_7 zu_$tOj-~*;b|DyUXtH3*SqeHX^O7I{NpN-#T5ulu1P_&RT;cWf*Tj}NlSD!e;Kc|hqnK*q63axFGDCNOx82$xC9udQ5 z4U6v0O1_zti(LL6nCJ5D-J|tkswB9iOtL@zetG}rcQ-m^(cyPvRD5fy!&4tfqV3aU z^$g_$dDw$vtFA+Y%$2iwqn1fS$;Vwk0rBy`j4E@ zaQ@e7R5Co7>8rL~4!`NUIrB=G@d*$-3N_J6q26CV6q*O$_D+VCJO7#IK?3vjagrH& zjbtaL#;87%aCElN!wLnJv9+*7qhG|vBxhWZhdrR3Yf<%$N$GZPRz2LjdQHXThf9rE zkCBG8)oYSfX&kSy-X=x6ms%gN@Y;#75}8}lL&csHL{e8-xlYCSg)ypAI$vAmbG)i1 zYvdX_uw|z=esjkNlkMkm;=A*L0)|r4ZCnWN*+ZB+=AI0&sKT6$a zDc2H?bmw)GC!`%MBHk=su_va{_fut&?WN0T^NV4;L)dVteU@B0O63lho|>uQS@JJw z*^Mu`81xzC4dcJhpVegfajJ$f3kMU@)+wTo2&Wd<#_@u)JI~3*EEK--Hi3G^r=XxT zu7pPyA68N^VEewrJcqcb@(f3s)s?#2PZMP+Muc5>4*AMt{36rUXW93?*@deQHo~4I z(WyO9Z3wJLE#-=GuL*9Zl2elYKX>zuxaS(W@3~@U{hmAXV#`PF-OCnd&1@4&c*HLj zYT?B^bsTjSYNPP-Nh zv1N)k1!y7q?7O(^iP5pPccR=yy*sAE2x%h1Lb7oT7)E=Fp27YtdJqYmi}H!j*ZlBB zAOHQYcMbg)68uBS&4*6XvDzuJ1;~qRf|Vn4xM(`K3slAo-oL&J#zUv+nV0o#HgDm( zmSX)@bRo{vqtw?FPqhx|<$|y8IT0Z$!L71A=esA0Bg#a}g`5nimb~7?s6|>B%LeLA z%Wn7ki6^Sr-4I{DsCFrim3Mojl~{9W z0NJn{$?+xFl*aLv%U5Khrp?P_TU;3apA~A%!`A=dQQtA>*%tmZ+}&N~!B&=gJ@SRn zvDfO^nLJwQx-!AlPdSV0X|j9iuH`uy@jR%%HsV?0eQ^1OiNR5_0 zi++;cp|2ii5u}U7wm71&Vy$E)|G$RGntSh_4kp!NS>{|bW~4)8+buO zr%jfAQkZvtkLAy_7asCsyaq36&z^vp?{M%1IXelMiL#bXOTh9CK)2dG0vg9}+@rpF z6HC0Bk>kbs?j>pNj{oQ|M1_D$r*Y)tl-D7R1d)==f}Q=qKMPfOS?u-qLnH)&LOau} z*yUSvZx-+@o_{hgd=;LFC!u)pX5@Am4O?!6vhrZ(`yXT&HaHPQozLsDtJ)1RkpQq@ zBuxy!rxVbn-_f5Q7*}~&eHs}N@;~;{F3}~0p4k4kNC9#vvUw<(nWLd_kyBSEgHk3% z$Py2xLEctSTnxRwD*})P%H#Ig*|r*i@=sE9$5j$*GG(OSy*`J%iNB!wyCyc{Lf_}N zI1jGw&`JA`4|{D9dX>q99sCXbX0}NT$=7ry`MOeLp)+4#Mn(o&s2kMSJceEt+Cz|s zE)y&(tGt{Hp_6cA0?2~}164p#me9T?kL+pi+G0m!IpedTodXb0zr{xk8ue)N8-|v6 zfZp*Dg&Q}}5j}M2Nnt=8tyOT_91!kh*F}P>SFc8Bj~8#!1_So^-i`f$C;~y;ABy&c zcBrig*lgPpUBSV5hxOdlf8zc&d$dRWxw-J&u z4s8K5bMwd5)*o{$`<_96KL~2PhZ{4|rzc0)2s9AMU{GV!>PruWWG?{f%}MLbc|hXO z1i?Wb%rw)(kO=K9WR;diLql~Q#(n^kmp(wLevX5all|PeEG3Vl(KgxsmAffb5_AO{ zeb`8=OUV;@mS?+>alUDtXD|V0@B_rG0}Y40+aD5P$zZ@+kj&GQHJTcl-tPI~Fn)H6o0>vRNY{Uuz%PC5(uSRmuPZ&d2cc3pHVH50Yh7T9 zBNKIm5#Bv(q>7}cZIWL0K7;c;2JI&@iw`IKiP*~l&ckcXxJVVgp;^K2-GhC1Lc^=gXKjQwC z$ykP!x%&As8|{(Y<`SK)JA39G_a=GCl5^ z3jX3`$hZzJ_H$EYSr?nM4M+7C#wCPNPE9(fRe{ohx%gG7ya#d&X((sro6qgT)H&;L z0{^6^e6;MCOP@Y2W;m##p;9XwyM1e+@4Toe6^tG)Z4@IKsv38jy5Jw9mpF-_6S^vKBvt43-}WO5Uh!X>KB;$G&&BhpgGF zL)-s|h^Sjwdr!!F{x_c+yR6pD;lXsB;C|$KYeZO*wAl)YeiM>JirC7xchV0Pr5WGK zq);OjGP0xV$_o|p{W7@>8KD2C_Eqnf5p9mr__FY@OIqgG?Szz^XjkyE=Q@+!*l|VK zYi({t;BvsM+PgH+#l;1D&?pt!{aeopUR-$bMs+5&3qBPT7S{ZFh>y5^f7bz3Qo}OT zu@#k-2m6t{H!;~l`T+MK;59ll8CW!b)SSv~b1x0wknw!15a0BM=t44=!70(I^i?VB z!gYeT)e&@FMfRV$?TGa=WM8vW*kdtJ$cl{1pU=$P@hii&vEw=YJj8#`bbYz;+4(UX z1@@i?vksZmCQj8-vY*_}@lbpD8*jaR->rL=#PxADv6;oq(vZXyC61uzRF((tb~nAX zdRml>4^t(IQZJvS38OTRI3hw*pm7K>LT`!kYJk2G2n`05(evj`Qe{woeE9!v@YKcuTuF0PmZ=H54M+;D#Q@cRHv4ih}rplDpDTZO2r z#E}>oFFs!57n(CRRi|2K8lQ{gHr0>8L-@5Ni`69emQ z?=~=`2A0GT!id}&;IAvj98KGTcV;JKmF^ZgR|mG8{4qp1M)+hb!LMGOub-ZnNU=|a zdX?!J%QYf2TGP{;L#Gc31j+zrKNhbSCXHyL7JL=$nntl)Gq^zd4vOXfEpDO5dbl{! z{&tb%mtMyA+T$fFtJhD6tDA-%%uZE_JeG(FY!EzmYVx6NURKk6I3_!f*oKkjlT@O_ z1h<8X?)9UD$0?!WwF=b1KYQ`_ubZ8<#uQw*gw;7Q^rBsm>S0M zv39(1;apANuE@Y-Z-RgRu6FNH{Sc%q{Z#ENnC-bo)M|BAnDlw`#GG$z`RenFwR)g&EWT&1)We~9;#_+ZG7 ziI%9jshqB>NDe1uGT?`F()TC<(Mp|Np6AM3d?Jh;!w?ROQjc}4FP*5a6h-YhW#2*c zu@bk4wJ#VVA|rw0WUfsPZn%%QTvi7oYZ13TqYe}7p0*oe+imknHAk`JcWE}I3Tg%U zIW7t|7tqNRKkNP>Z=YSoGucTvG$!!Xhw}PjvOlMQvdsCY0k6L_P~~5Mk&!I)4j5;( zo(2^78$Blotf_j2Nf-F>P&=-KFxe-cjnDG?yK z&4)uCubC|OS6eM^G!mUP(@mF|S6IcO`>J7Yej(oUFtz#8$-BP)_e#3y{xoJZv>mozgAU+MAb)jND>`~EwG)liY= zactGOLCq+Y_t)k*#Ho~`^-N7HUf3I%KOwUuuKhhwHidu7 zT*Fka!b$Toxl~b8jYc74lxdc{r-{XZ=7R~mXjG~MibiMzUCic?Z*F`_Z ztb~6bk(SEfcnz49H=WN?R2eEd*|N?+V?%_3^d)PbGpo zZ$D_b)AJY-CU%&AvH!i~p(exosNjK+Xt{c62z83^NtA28i;kL%R_gj}UI=<1yrx<8 zMX5jP)_GyCEPR)xQs(cpt=(qXmZ%8Oo?p7EhbisVw8vtjCbf*XrPin?8BVI>&QoGv zOB?>!&f|dcb}oNlHucHhvcLpzh zX|@3>wdTr@>xG^c6!+H@#>)!+f$!NMs<}Wf6jBFc#(gJAJs3snd6rUZ=EEMJpY_wPAN9bQYb6*GJJ1M4?@Gn_vn8iC)^LfV zphxi99Y61u)nb)d9Zww5+QFJH!Ro30H|WV}#7^E#hHXKkRM-EmC9YvnG5c)WAQ8HO zciLn(K2;C-O-pujk^kgd~&~}V??teob=V=f7z5da#3vy*Trgt`Gt!`BVGd+0u z(V8@XvO9ySWFl9YRF*lSF!i3u#qV5A{yk~3Z~A5XQbkK*$k}=0lqwZox~OGk^o9LO zwQVoiG!lt(V_CT+$wM@~zEeZ1WovEi|7sV`F6_*rTG-+M>upQ_ys zNouG#! zreK-+V`(~8OMvEa2z0=$6b;D==Zn2A3E^>E(|Z|y%qdrSJRL6*5%yf4SkQA(z|VS; zTu@Np=x33fs93jikvR!;ST`N*@q6ix1+Mjc``bz6=WzUHS8g!26S7i&5hE`#!u(T* z_Edo5uDJ8T#d9{eu`kcqtYR5|$CS2WM1Pbh5ELN_?Csq*ww!klE3S#~Sx0XfQQrQx zj-kKB8r1Wq@tLJrQPZyRZ=b*$J!%e`so@IjqscU-JY$`)KB>X7p1+iN&xzjnWp)?2 z{C86f+Tl3qLLtc=?b+e74?Z}({qpkh$1%+PKb=gOJlJKdq z)Udc7Z!;-y$BWdMdK=^|so$#Ts7ke!jTLY{uvTQ(W2S5$p|!c1qA5TwC#{xR6fxF0 z_1}W}R|grDbdp<3zRQ+|uwI|6z!bh`P^k{YthRPJJ@y7h5gQ*L>G$v7 zZNAiSO-)amTUws)wpHREXQ$jIJw+bl4N6Ov(EZ#K*W8*~B37J{PP=uJ;2^>FC$?BR z4~udXCVUvpIPMoNUcHms=EFpMRr(;iIkb-5+haX%BbO9&&8~&}DRi;LN$vB6Yi@Bn zv&d?8BAdX3+Mf?&t*{;8OJ_)z zUQ#Q#^U~t2>YDMX3rVTAHM^c$mdj|PSCf8rdUcJq9v0gL667b)8-Q~@2UFdfFkQ~d z%lk6LOidjEVp!HnqITJiWx|3urkDER==^@7RFOKE{=4l|!tH)); z$jjnMh1&#dxXK)UW!qC-Ep7%WpYc&v2yjaf12}<4p zkGIotjpoemHj?OXbUGajs#p;C?r8|w!-!95C2sI>cR^!QjGF-o<*U);@lg!#3u)EM zl@GEA&UmOnfJ-#?o^9j7>cDCT{w?r(viu(PP?!PDRmA`AiRa}jR~BIS8(Oqdu($M@ zz%(mxh)(s38)Ir?RRL{-&BH}+v#-`6tP@9qX0zAZ(_je7RkeFh@n=@m(Pe(^VV{)5 z;@0Ln@5wr{Tsy<>Iv6-zW2K%yHJ-t-znKBuF5NOCLWF>jumeT$C}#_1YANCl3^33Q zmZI+O8R;S7^vQl2kQ{D1y_y~J@@~fyE3I!459e}{&ZDC$=JP$|FQ&Sj7hQWU%QBi7<34T+27NX{k7_k6~IoP zYVshy=Cez_D#{U4^X>s;LL7v|$|MQ*G{w1W4`<46N^J15B_0R>QzElac36Fo<-sMSE|{u-x@-Z9T74c?9Xd-qBSPG3AC)|ZFrGj(WLcuv?w zPk|VteEs2IM#Gd5ih4F9pIwb!BLhC9WFU?48d40PK2c2-QlG`Jne{!>sO8>1m zIr&P)lYVJj@w)-B(M~V9NPNu*K z>lhlUQ0Th~Jr*F6MFX9Kos$y}rqZ&rt$vLs>~V+mWl%r=C(C&F@Zr;lZ_9VKfB)_Q z(tP7GkTL$z89+z^{0K`ZyirL-_^QcR0$<=m#a~#h6}N@VCUP)CHs?;l*n#|!QO0F9 zZnCBEd@xYrEbEw@ZXF$Go5mu)Y7`a=(}9KwGz$#g<`vo!=u7czzyE%*EwT8jo+eph zAZ(Ge_cjiCN8kZf08awy-dE6q{sH`>!`+o9&{d>9N&&*I%~XRBO2Y)URnb~Zc=-G^ znTKn1aC{*Gt@}ase*MocH0Le5;^#wS=m${TAazuF@7xa#4mMkS8wcIJ4wMjWyu6*m zgsq=T)1FJ)UOP)A^F|!}#B;}a34QufHvK(5R;B%8ID=6*f`6%uUvm!(wa zZ#g#Cn*;Puly&-rB7R}lX^~$dzut?8+l>-{j-s>KduJ0R2!gl>IINuz6y|^Y_{aZ( z#o5d>d&fOnURuHx5GeQEQ_0hu*?}H=4zSZvC5qD<;6^Fm%>y@J6A_^VwnYfA1sFta z0_Wz;fZtX32`A7tb@Hjo2Lzd*wjwc$6#yhLQHQVRD48R)#BZEo1+yecPXNBonhi9199hwA*5E9qZ7p{t9s=DvJk z)hzJx@&cAWzSq&8(l2gkvs6+tb?=gTk1}Rw6+_;vvQ>R#aW2!*9c)~IoFOf+@%&JQ z48U5tL)uN_%Yq^lo2O)(o49EtxUU(o87R*2;Ko|fILqe$uzS{|t*f_ri{~?S9w-8a z+5!SltpX$f>vpc84YaAyFK1oTOG5VVXy-UiLW>#C|GC}|i0?nFpS%e4(L z;FwHFd#H>b93EN#9WcnWt1p84!+x8V3c`uM8h|%Zb7?6J_LS{lNCbQB3O9>#)q}Z# zkFs+?ou=wMI)ri+H0_BzdajLs5tOVcC?X;c!UvbVbz|UQv`x zZQ~!c^*Y^OAVak>z<3QwB0X~^RG&9ABm@`9f)I-~dO=y(APh=HXLon)uP26Qskw09 zy^=k*nq%RsSRiSoErPBwrJEr_TL961B5P2jY!ieV2*g37WE&u!fG=Z^D51Cas)9_4 zQEGTcM+d=~GkEA=hW+woI?zxVp`H5U-{YoshJX1@Tcbc|umIF@RPpfe*Ow6Br&xd# z(a2fi-dl2DkE3ckRAm4eCybO)+zif$)$L#@XNXGBRnnQ;`kh5dWuOpW|4n!7Sev(otf z>lZ0>FOhbj7In~dp@JS0fjFfK&kYk)R0@pE3K(eu)7rb^C5TE;MijL2yFP!W2LI{{ z!X=h2r4nQd;L4v-iF*7e@nQuK?KDFq_jGf8vJ+*M6Q2s@^UvR|w6d}?2kyBSP~w0K zI8mEJ20lEi3!FKKw25r` zNCTj3lwAixHc-MYa{8yFq?o~P%DoI-yd;SGZYf&!3u9Iq_RwyLa2Y-NYur+hyfgg| z34!Ev<@<-1#H7(=fcPbxo`K3L`?kF6VmcP*1(t@IJ6mqa2N8>D@H+%-jRZ6Z*tjdD zjBC6S2k^nD0WIBZokk5lG*|7~AFAubd|`Q+3tIUO)^RCeWluCkb}trVwU)5+zM zXFvq zVN*j4-#mphWX@>d(A~wZa~N!jK<`&Hhhxh54Y=n7WMudVz#VA;#d%Px2yeUnhN|=p82S^kYuW>gaemA(UPyolY(c#&% zXZtG!nL{9Y%D%0&q}ZbCj&ZX`2A3k-1(2a#p7!qi7j`%PaRwoq$bz5+gb_s#lKbz$ zk<+Y->SR>m4w=v+`Q2FCyt26%I)g;Z|E%;b7<8FQ3jG1$l-BZRJWf4o6g@^b{b#KdSwIzZYEk)rAk6UL1S&!B;!Jr!fT zE0!$75k$%0Sau3tWcSY>npF)B)@CX}*_!In1v0=z_V zsIBu%nTYP(IGybO`4$ZV!ITo@fv5pk9_RHdu%EproT^b!{2X{cKh>!$FN$@u!%&C8 z;%saJC8A0Ek6;|3RDl55HLg&voCVE9Kp>R^GPcJLZh`-8w`od2?NmzJK(&QOEaMhb zP=y=Y`;Y#b`xMHM%&Xo(Q2%#zbX+f#r??c<=5od9@zNwqdcrgBH$ihnJ1l_&f;oAA z{sjurpd}1wK6yX^aDH^2fNHuG8hToUr$`{Ckj7Qe7gK0PqCh=|l z`Evt^2tbI1V!r3=>>G(vHA5rlX*4*Rde}WR!5}j$`W=O@QCc=IqSe8yfozLX6P__& z$*+pxeC+#T-}3-JpgoMH{a$ziNT}e8RHCcdAJ-XHd3TKxPd^g;`ZVuegxQWdjMPj zjV3U34GIq@L_s!)15HwaNS_1Z9#n%4oM-?RI^UXzi~j2r#m@TLrN7`ev;XR2rN{Lz zF8DwWhoIh-Dq@WzsS9*yRM?IR1ZRbTrvb9xd6q|i*5-e)LeK)GLKw_4nWTN`KvIQ@ z7aLFh2gyHhaB$iAuJ#(t9+i>w4|aRuo7uP;s(k<6f5C;<(Tj|M z3=SO=134I#EhgSN2h%|A?aJon=DDRM9W#0m7Z+(4KQ#67x(vVx)ys=I%`hRGBkD5Q zAS*)EY^WUDew_c_{reW6Hyj!py8(QB5N}3-0&E+rXZ!n#MTlBgntAW6=NA99^soG4 z&)ctOAord$1^!D4yg?tZ#03T0(XQ5?wYxLYaZ1{+k%~*ek^zDT2DrcOE1z(o2)zbM zIy_k+wjK}}5T7FLpp689Ih;)ov@o6l`{WfAr$b|>vC=(VUDzPP?}nNggp0+lAEKg2 z07E)|jE|4UZOK1|782X8B87NHxNKO{=Piee_%#U;*_8&BqD)a-`2RV|ZGdqwNH3J9 z9vB&+gtK`9keHpFeY5TF2^l2g2Hyi=QBhHH5N*Pr8GBJNG4uQPLnkKm5m}hEuKoJu zZI!S<3p&5gcqqvp6k;x1#yTCEBGi<&3}b>f(xpBJro4$@_W6u)Xu&eMg?dcqMSVm+ z2}tPOCsS}9qEIlw+X}90Xpn;j&%kc^rHJ*Je6{r6Ye3`aBpP@;9iRGT&l7xXD&%hR`%@=H_OkiG+P ziXdp`pjMTIcPAX}ZS(;HXzu&>P>`Hd>Gvt$x^>;zSup6y6U?PvS|DGamy%*YY4gyH z0=;_>-1QrP$R8sGWB@_u=I1d8h*^UQ3NAvI@Cj_l{I{OS6hU(=I0GC|cp&YuZ1N~Y z2$fI)G@Yn-VikWd(f;#WsgkC4Q{XGAziAUmTG8m?Y2F2R6B-s~i8h0%DEUEzMg}kF z-~}E(7$fgkrQ=k?V>rDWfC)$-+RK5z`W0pvFe7KEiFQE}57MBn!aJ|#R#zjSG}&pg z_5?%@$pk$sNW1u7>YY9gZdpHg5cQwpiUMf=`|EEuD$F{9h!U|lC6?%9`K z!}*CdHa@=Gdjg^sHjvxGYZnlt+5eoP!F%kG29tP8>lG4BY-oj_{Y zs_(%gz%{^c_VTK6yC@-wIKZg%kqLOWfl-4q=JxH|=+m?wtzUtYn1eud9eh7TN-gm> z|M_vc_9&`3zaR_z|8y=&La;9&1x+%v2W4*$s-kQ4I|7;RwB}2U5w&1%1HTz|IA?4^ zyxW~Vx*;L%zJE1kt}tXtsOZJ_s^Og+AR6W!%`R!)yDR+G&8M5xUG%a3>M4MHxwn0k z4=CP(6>NgVuOC3v5rnsK%ybiPyCf|9?PizNQ(5Q&=>AWLLaylxkrrNz(gBecxs;^o z=IPmo{)*?i5ge34;fVIF4cENDv*8M&*eU3;FR<$)K=+FP=`R*?dc319`e1?_WY3+j zqPdL?fl=FHjzlWK8WJWne<;gB*x=!b9PIMkUc7~#D5BBk?+0>00*^Fqu30L<|G6yd z^$Mf{-j^`Z>@D&kQzixiI;0aU#H7_X9A5Z92;W{Yp3#vlB>wLy&Os&Mx}=PX3egG~ zkBlV>&>uDeI*D!%()gGz6-<2MncQx!heN{7=y@Fw1m!(mfv=jh`A0*+my(v|`W2)w z#PB8+fIk(lWYHK3ij9MJ0ktFTt%cbTS_t(nb# zNm;p20W$|)f$DeAx)pUo|D#_L92YKNU?_C<#mkp~Kp%t558g|2E0#!4u~}5G+!oJS z=X~;DTx#mwna}3{DaZqLh=G9tlu<;b-ImA&3PPwDLZKYQAW-pb!b?U_{-%L}!G*GO z{qcCAO@So|@7}3G;SBQmWKSLNC`}xgMbps9HRZMd5NeA16>($ z3Rb&1vn2_94Yn{4GJm&4N0sDDg3;a0p)6-Mv402Vx z%TbIR%DojYTts#N#$Ev}1FxO$(F1=#HZ@viYzQ7C2^0$4a{c?>UOd=DXBU^V z$G)ua`iN(H8H;}Kj)??M>PX%;*Y&%{b4Y7rW98kdgY>G!p(}!?56r;v8@O{WYgM7CD
#K+NiQ7D&~{@t5vI6 z+&yd9ZTW9K(^41&+E13RoXZbJRAi-SWeIi~gPRaZ21@YIK57w=M9v4cd^dI|@pd6+ zA~+-@L`xwZu3SV7$Zn}|tT)os)GR(|huygbACv^UK)6anEn6#xh@94p1QtvE+}R0Z zuC3XYNC@cIBFu-YQY1)(sk)=1@YqIoXBQU&R3rN7C1F=!=jty|^wP}R@8IBIVsd2K ztodxr$#3D?DLI&YiJOXo?Rxvx;qWw_+1@}Lylj6Cv}x2LOD#~714@on>m^Hs8UcAh}c!V0W-`YF;#hm5~)Pxjub;l#;ls!v}Zb8-V?k$;ilv)Rfp( z+X0Nn#Kah%I^`{}$A@2BJSZjDP{KDoT@nRcLP|;i9$_y$-*XQeDmyw>BdZDjv#_wB zC=`7*1}Mx3h6)QAmi{W(PY1F%Ks*D>peMyUFawfCF)B|{IOodo)tlJZEX8kcN;Tki zcX#iY10qA-Nq&Alfr}C66M*i)=Ec(u4#@T(JqI*?lk%hOImq}xwU zth&0HLwIQP*d$OyB1(f9tyiyG{cpDi__iieUArWZ-f;#gVT9r0$ z@x~#);<^ev#1|Aq`Jg~XgX8iQPet_eS&@6dgc&>#T^0Ue#|gs^HK&*ZBdrqH%?f=p zd+f%HAH_z8rNbEh3})|N*kZvhe=ms z{`f)xVLs+KeFLQ{P%l=VYe}nvj{7B#nH4yiT5^!hFQ;~42bu+DLK6=7dnbbjj*jcn z6|JaAco6+HVlcV|i5?b|?x|Dj%>t!yn%qY#3~09vSZy0B>L{d}tU^qiPA~vwDL65? z;E#nAUl-fw5-WaoAT|YF`mgYRe}!YcwzL!u=)6pnc+bF?WTM{AM3IrnDXM+`=Wiq? z=%K&S19}|b=Vy#{c}Orc1De~cvF~_Ck*TTH32{1ImI^?pHzPp^H-VU-;MLgec2{w> zW)dT5R|B@eQ*iDiYa$bZ0BNkkUjuck@Fihe5(~Bo0+vqW3xnZ11OXO)(iOUDYD^@w z2*2A*KtSLt>hjvIuDi~zt`{mNfBmw+y8D`vUTg}&M@&{W%$08h9(+8(HXOmN@PfCY zE{?+OF?sie>*NjT3J`FsqUD_JJzM4=6Mc%zvJ8B|j#J|%UCLwj6=4*_1YZ}m z+4q{S@4~lX)0v+M{QE+B98h{{UVgp|g1bg@Nh%dNhg@AoJ9cagS8JP}n^fp>{qR78 zuj?n25z%meihZ0c%xDk+HKeDn{}hY=d}VDxT|+}K4D=UfCP&&U0nD_+Ltb!YUOpN+ zhRw3FrvPLPR3nw)7FL*D+;@B%u;s7(J~??hP&(`5wFt@MSK>H-?0POX`jOMrzyM z`zE8F^gmS$@0Ps8)_Sh(!0lQ2XR4LAYWnx4O@hsFqR}~lZp56@g@KkVcHrr~zh3v- zTbP)X(pyxY{E=rXh0kb4=>j##8Ntzg=-|NsVl_T{?%Y$-*3>WY_&hWu1?ev0pz@-9 zWsHT-QH&nD-De|3zavv`Gg7)n^YWvu+-XQGU!Dy5XBM(JyR+Sp!%Yk$mOT$ zx_yC~uEofn3yQjP=P5aN+X3!H>WW!pW@ZLTm(L*8>b$I*`0;RjSzjqx~7^;QI#fiT0_Z{AUikesL3ZQpPF{@S*g!DF}6xW|S~ zn@ElW{bgs)pSzONM)T+h9wo)f%F4B7P2Gsj93Mf$%?`0sUp{}%M8%#~3s;RUX^;o= zUPL3hhMs{z2Cr~mBd&g=VdgG(|Fb)C&N9O|QFG=Y=*F%+-x52?+m7V1V#vh1erBJ0 z@6Sy&KRW6=(p40#WAXWI@)OWZftj-}n4KZKk}73LMtRHRAe6}Au;jy8X~p8rZ7V}& zY5npSwO$gcxf5zXE`N|=kiL?B2NxY%bj%H(0CkQ>ms^)F8#>E9)!;TE@}S zp{k1>9ji7U*U{M*^MwnhOWlUuMU;F}8GW>Swkt0m2dj*@eqn~JA;n29Qc zgb#q{e}mW;N)gKyup0}O<>*ghd4bdlRjKjWvrDKmsK`F7j>OR`nK75gx@KqCDx3Iw za`F_~s-9qlIS*go#RN%5sr$Xl7iG$LmqIeaIEa%7B_`NvaeFuUefSMlR#wJl-%Tds zAW}h48KRzU;H087FRog}%WH4m5m{kJBIj5uLEY?CQCUQgNTh0=H;3iyxb{lX`b(Y<(NG&cJ$1ly zhFeURzvqX|B zw^i4RjJs*x$lD2;9HB9#qMJfhbK@>NxRdAdQw03OHQYHi(e|?jN(foXK#swW;T8?^ zH~>N)rUz5gBOq9T^Xb!vD*sw`YRpM(`-2HzF1OP{Q)tm;?y zdJ1#Y#Py#mA_j;ski?q>MTp0X7gsWalkUx?A+&Oga09j{yWY#m-!JZ?1LcN5)bYEg z&CLa5WR~B#bBBaQ(R=Wfl{_7nDHq-^vc*W8)m!|VkXD;z=#ueo1t-qzm}eJnd|N!O zEoGFND4BE&@n08*n_1B;Gv8N_trma1h{8$Knq|rDvy4yT%*|#{oxM2nNdyzzblro4 zzU=Gv^%)eK*W5<@NjVb0fNnEL@C04I?)m7EFm?G@mpt+Uv)b6ys@vLD0?h*&c>cnL znjFJX<3W%?jZIBrvQ%{n@H%*Dlrm37M8HK?SXA_BWF!bpgE3Gsg7Kn62G)T{rDLNE z?nq}t;HFKFl3iHe?iV0(AoE&?@g$Bq#N1_Aw{NBVCru#0LK?!+U0wyaVMz`wcDt0z zk2B$a_WoAqOdbV@M0i*jBrCFL!GQ}%OS2=Ht~~#Qo7*2#x3s-~o{%U{>>Uzl0{h_* zc9&;mAr_16&1T$0xpCs+;wIMCK?p@77<_r2oYovkWUA>^D=`14~0cLt+5Li}2b-FA`eImikG8O~BT=Yi6$ z0QSBg#}ENs5erF2`TvL&c{r*zIy#E8RDe8KRHB~{#z*=*T&ikC%+DXjg+H}HAF>Ba z$2s+_^WnR4D~ZA68D8t)h<-|< zmeG<|NL9o?sz3#I@VjiBL?%Oh^W0Jo(^{^R^{j0d==tyOOApN+qTN` z0B(ip4-@(b)fs4rTLvC{`;Hx@ARPAJSnXL~ zmG^=6iO6Db*e4M3qp^|bCjfmPffRQ&6hCL)B+?W30d1ox3KYKMAF-%<0GpH|w*}2; zea-htBwm$OSNl&AW|-Q`pO~H=3~MT#X5bZr;>$aV=cX-55^ZyHGr3jqkn=B%-2ifC zisen5p!t^o^9PggZ0rILG$ss7S62lUh*n>E9G@?Wo{A*0K%X+ynz;@w%#i{|>#yxO zV@X}4ce^VNoaK2J1~ZWMMgrLYtPrdt!bkx*Lnsc)(7}B} zS}z>UBw3U6pip&PVnLA%G4;0_!^6WhtNO5`i~{)-)?!!q;xyc-9^;3t!})ERRq@UL5OMgAGS8 zW)f107l3$#v3QZjfx?3g6?V=C<veg)#3vW&J<6H|N&9Fukh7LlgGyHk)a^JQPU`-E82xGfhSx(U3RWav z7AQ{?;7UO5!{1OV&Z`Up7R|++hOvG7TkZXMUFCBqR8VBu$@Cm;;_k9QiP7e?qAm z36>4bbgrHXu3r&4weV+mkdz1-4=SV%mn4tGRe8^o?}^eXc79bUXZVL#P`+A6i`$DF zIY5NTlCn|yf?Xc2KT(4=l!ZqMvMej~`8l@#LFIOW(lJD%15{;TtvP#lam`tprusw2 ztQyl#e1W$FOvi)6VDF(ZodUE&GDRi0yTOT$qk=X@>>m_CNi1_Hk=S^6bce+5{&zK- zE+@aljTcvaCY;@S83z9m(j{1Z7CZcCWm}^Mi+r6@ZG5MlthOH&QI23@H~+Si!@? zxPO1;<;wX3>u=JoFud&(7HAbIxM47gu`!j$Y{dfe>kM8lSFm)6^7CZg3*G*2wm9-UlZ?o2@nk%TKe{7-{}m035@ z>>Hzz0~4s6dBdT%g7xBPN0h6)^#@tX1DV$(4;}124TTv5FI_uvJTyK!x(PrUQVO|o zND~+n03s2Djc)f4jIBjvyA+rr#0$ZAaBFUg=@`s?KkWBav<^Iv`OvFWcU*!@Kw)%DxrO`L8hN zzm;pkBbr#sWwb34W7;Y>sO4Ip2|kKE`CPY|>}tN$zIVV`Hj(sJ@FkLCkHd5UV&V%7 z#J+iaPygt|o%-wU5~?P1tD*UnjR~G4jP=xzuHa@wWRae4m`o* z(0nl4Cw#O-b<0uCRS?F76Ecn*HiY7VNFFy8IijFfIXd6~Vy&YLw==5i5-cftg<#yG zIabLx!IfaoA(2mT>bIH*g1~U$?nT#~kd#FC=ok|s#7WwI2j2$(kLT#32affDYP7aTG3>GNmdsw`#!AtB5&P~9+&?8qw_ z7e8w$HDsN6BRIMng|IhDF_hi@VPRn}Pa4t!J>CmJY-fRj5(Iuo`dZ>3bhFpUN6_uY zH472`#qk=LZ^8KQ0EU`VD@#c@FZydDuK5L38&3a@f=kPA<(Hs>IFHCX95CVM)=W%% z|Lz6K8F6smkO6w$>^fGa?&HDtjg3{?o++MXVqzkZ^bx=L(?P1BX9HXZ38siQFdU}E=DJuo4Xy+bp&Uf!mVzH{7MOS1xjWA{$6sdfz~E! zTP)r_*TRl!7s0Mdq0}))$TiM`tUPu4bT7zn0@$Nv9!Fl@*HV%i7d47-Mc-w_edH> zS}o!^NFgOrc^Ux-!0`RS<$V8g%G4D46n|_f6JQ%vIDWAesfV_&mq1vHOjlYVXi9PC zkrI{g{GOgP__Elc|51FS{0w~xYLgJlM@yKPdeQ{8Z;!l`hWDNX@&WA%$+l+W;|oSz zRSH-dM-mx~LC}P0jI5ZNo_>iqaRB#3Z9j*H6Fe6e1AhYv zu8*guQf9F50!$7y0jsQKGt2zU$NBSUyjJgWWC7|}_!QtP*=ABU9ozAc7bDoOVbUp& zulo9Bgv_Bo9boO%gUnw{Oe{C-1d{y9fXW-=OMr?SQ2qW5{9XADqEN$A5bhwv+osl~ ztASN>QC*&bPQR`Lv^3yLz@h&UIHF!==INftzWQ^Uu3Bem$ zd-zMPgF7=tf7KD`WFGJp)ta%b?J{s4l)qvD2tz3;DG$J~3@kh)MG62fyrGjq-RnH4 zhsH z00_CRL{EF{hLT!@F^w_cMv`DaP8G~D0h3#}ShzSWhO6XUPs~PlBt6-Auc4h=y>$JH zK3k0&Scu8|QV=IX=~9{V(VZEzOP4$r3qTZ8Kt6`ty~`A4J)jQng(7zf+>|uFbU6TZ zllfSqqzA)8+XtYK)Ci*%6C`ST%N9z|tMa<8{Ky~d)nX4mHIM!(GVf`k2X_e@>p=XQhdGsAY&u?neKS)(faq1SD>ndNHE zkE^nlnN_$yOc&A#>wWzXL=Ko4*`ir9Xqs!N3>?Z9mXwB&rXBgtbyyBOW`W5M9&83V>-hQYhOW&0LA>RVn>_jDCVydog^a2wkc2{v z#uy0P@d5=Btxgz`@DW0K5Psrr3UbVGWXJ%482ydPClXy$O?9;?CP3gH0#mGT4MGux zjB(@X>FGkU$8%*+TR#DDWCfo{ZChJZ(8|p|?Kv12F)%b#)t6UXfd5XSo54HBfa*cp z21+FD!_L7!9e-IJ7HSvXp4p*4lRnZFz!O>_^<8Nexcl-{2#qmXx+j^D*tN$r-dzPjEy(xiUZ3;SMy3i;u6tH4Z`Gx^Qvk9R58&2vh-FfGf#px0wgT z;xzwa;zTdbEO!-|kst=O01!A9wwEwO*!d?&b!L)=T$KR4#n{0Z6*)8)JN2t4~4 z4lm5BKVr+TgIr}ttgh`!9P5b&YDIRM|%xU=M5ItK-_7v*R zS@e|UYg6z@D;(BY_gt=~;3c!K)E7<`VcUAlBB+jW(5wj3Tk45sJKRd+s| zSUSsfT}9;Cz6%jY7?$VNpgh3q0)XT#TtAU_AX$r`e*oBVFcZ}rBG!noHB(obcjM%k zl!IktO3HBMTvv_jVfvzF)lqkW{U|{T;nZ)h;a|1;#0Y|UBez&`ftSyf zExQpN5<aS!+-@(s!;;qbFUIZn%xl*&NmS<*#+55*#3 zXeoMOE|7UI5rX9`uDpSy_>;_S+=U=-7vq$J7Q*jxO%Ct|unfA{Rx61(lRgQ+Pse(% zj?!aC&RSaT*?7yXX$!}z{|y2E{1*W~wn`C{J`1lb<1WXqB_uMvg696cdqiIf7#}{Y zSD<~4-ykh0kUK8@NtCJUuqM%J!X|c7`O#Af%N#<&wqX%5TOZGeZy)Hwm(+FBXo<$>{T%U>U)l)2~~<( zWDxuZTI&)>u!%Yyuem^^G?|%7I94*;W*oh*UHb$a5K>lA5=Ec269_mbwp=N$A;QLC@&(Z^ zL6ua2E5y07wyKH`54zG$zTa0tQl52wpMbo z7jQnsq0|B}f^Y{QYDzHW3sr5ud=5%b0t1lOA=*z&TUtaQ>dr9}V&35gW3#c8mOXneQ04JhmtzL{4I1yJo8Jk8B zQ4DP`#^DX+Bm*vq??>W{j^q^-B!MX<^&=k1HC*8)P%DEH>UDVWYq(_@YRfVNYc(QW zJ!^$m7@2j1+<<>(48Fu|=^VF`64T8fcJxXVjAWh|-jf-APGpQv_~eFjG_xE}V{~MZ z@Izv!!&v9v$Dg6EHNL}BFh;ZW?8=gN<&S4YR7DnNGIZx@Nw;$gQh5b8?XT`=;7q=~ zECX)}zn+22gMhMyc#EL7g2%ZW+me1JN$P(=-pSY}q*Pu_FP>(?YOBfF@*3DVzz%X0 zkYj{2Q7E+ckP%X7p2^6asfilNfa7uCM)`pTU~BIO3j#cAoo;Xyx@4lzLh&sJt2Hha z!^h#lMAi>+t1&#iqzPx|zW~gWptK^L2F{am`t6tssfdCzk4wqc))s;)GhF5*(VVFL z=l&=>kFf>vitpiSN{T3MR|tSafDgyEpF}T8nlJ3s3QVUUX`bO1w|NNm*J1g8Luw}R zS(CAY*fa-skHw<9!{H(gCW*M2;lO}FrLe8vqi0zlAox$HH_4C@W;qKi*>5-(&0uIF zDtBQ2#ISdw$MPRST!fxs!rq0`pjCk1m`i~( z@CfVP@xcy*NdM)^s`SSi$21ehK8A-Hn^EwBytm=Lu7Y6ttEbk_HyH&STF^Xi)LQT9 zUc*vKA@8SG{}k3+K2{;kGO6t-I-l3uam7VTn(7Ql2+oMKLbNy4-A z$Bz+qEz({a&q%F^RPyX+d>x@nmocW);gpiX zH-k;ggh3maP5FCBS~PAa^PcxM@@e#JKJ+9F1JN5IAejM6VU4+?N~X)TMz^F4B8p5F zB(4U6x1&2pFs{7*hv?F3-Nguh?+9^w7ekG+ZoW#=j~9!NoYme!lE z=N$55a?ZQrmSSJCM!ZQ_VFz4eT)(vXEVLZ&i=L)=VeZdW0)p%;EkR3_)nIXMtf=rY zFy5Vmobm5}|3Zf_o9LX(gDA;r}O;R@;(PE8k*YosBO!XNM?ZGHOi)@fZ6d<`L zx|zyVd$iB$$?RyMXW9K1z87S{cMHskGxr6yGK`SbRX5GcWFOG8O~0?nk@3EqIQ!?1 zG<*dRblhH$g4g0Nm?(%4ix-*SV@Ukl_p(w&LQj^5xT z+&quTQwKIXL8Jwwaf?*AB9EuM5yk@!Iz5?qnyM^Tf#!S3KUBZP2JXDh*`m!aA%3IX z;?e2rZQ`fPI-84*HH>`zyaeJ5+{@jlK#6Vw^LeDuJYGJz8Q(XY2lfp!KZh4{b8|y7 z?cHu+V{@W!CoElWWx|kI;bVN(80V8SnaP!eOOQ+mL6L#nk;g1iF{P2h_LL0o{C@eD zuOHruwjza`I1w(`h6gq&T zi2VqKV&P6QzzO04f=i+02db?0^AG$(6YGc8vpJyi`G)T8t$f|4O$Rp)-u6sd!oP^( zF>{LhQ>zp*>Jc(2zq=R-O^|>rZD=J zw2kup7xwHayL?jeFRB5REkQL1775UitY+NpcKsw83G1~NvS3fBz@ec5ZAO%{5c91d z9xoJdva<4qm-O{{p=lxPALhxCnZV=>2TOK5(Q>1Ah`$>Ai8%TT2Lq*{Fei5sW~yC> z?N)}2BsP4)60_ICkRfa$S6nNzsKGs0yTD+HV`1*|?aw>gvsKO0m*z3;7B33kxy+1( z#Hd0qKu+?@zxyN3v}cRsyE4YyF|=kR<^rP%I|QAdxrwo{ z2dFf(S1c=K7GWt|g)xXkf8bCw0pQ-6cVQD8kj!-)sN@JX4Y_L+gzU(FIq)g0^)Tb@1p4f6)gZsSnifTIn)t@4`3Ul0)E zp-1r?v%U7OB$Ce3TKsAkm zQs5zt04h{=&~tFrO8jv850abT#c62ON%0Apg|4wEKb-0tz-KaepeRU00|GyY2S!2> zmKjJPv$Ma3LDL5Q-{v&*JR!5nM{b1Bp7GlvRnU)-c*c?tQ|O7G4tJ3+nFXwz2xkUy z-x1XS4u7s`UQjutcmjV`4!(x08x}tK)zG(}0Yy}tjtMP$0HK0#I>`?^AOI~Q(ue!h zyKp@y=wo1Mw4S05Mrf7js$n8x1MM_Np1umhB<1Xe(!OQ<)Jiwql`{S1JP zB?K*o+X00yxgEguvvF}%1nu~NiR0j~-2eR4ftt|dl^;8~bGZXIZrVilDM5+gr(|2c z{3_ZjqQgc#Z58?@Dkg>nzEvO#rBziAt6Cz`tGdkC+uw5!ht3d&#W6QJIQ*$1|yceR}~fZ z+v3mx_oB-#$N3s?Y+LCIMf)hzY+HMlc#94$e%in%*xs@X8BA;<)RQydZ-}@8Dj71& z>bVx1z`bLLvXm;#_pr9IBGZM!N6?G*C0h$ULSKeo@UrvIp2=`13xQ5tgUgtXLTCyL zIMJ?Lek?*r;S7sh!bRU<^52OXFTfJ$Oyi$EF^Xch6w)eE?34|O8N4ehG_@}L1IT*f z22@RcOB-s5DV@IC1PLj_BTbx&?*50K#p#Rl( zWM8v?cd%ATh?)&pMBV3SjyUUhp}LSob%8cQNhG|7l$E%imMBU~oCiFC?lf5Qfv^rF z3B)&5gLqeu^a?_#hahYp4_img?}MltaPL zuYqui(5+Wji~c)HCq8nD9sYLMV{p8Xe0(gv-8eAF6l-#nVM%u!G6DLHy2=#fAJKI~ zG3$pr&={G7SicMuxxpZVA!iP*b6>Q$XvyEHlvBX0WYte8w2)`D>g2zuPF;e=?CP_lZ;1U|RE9xbK zRuIIS88l*K!WRyf(8x$X6p`!r`8~GPxuW|ZtS}ky3S0{5V6>z?&H32cO5|oGC}4;T z3soPP9=x}nM)yNs6yL^ZO=jcw_t4U!P$NwXDp|GwdZS!lTwn(Rr4h*@00B5o)DFIP z-WnNl9`v10WRiW(xGbmAS{3sI@ty$Q(*=m_ve0#F51h*$x);AB~O$ zCnXUN7f>G8w9vCv$HmihLxD=RI9!-1&l!O=9Ira+Hldp>NY!##dhJ_CxlmEX6-=~) zH#Zzy@TuzQJsP>NY44^6twXe-*Rz;~7o7T9a`SZd>aXmE%_RIG^Y+Qd zD=~nV^!UIEuLCiAbgJtF*wF*($A4CFm{I+_kaUn9NEbmeIKb*$yHHC}iU`cG-F28z zSl&*ZIX>o}-<5%RDyZ?)%$%7&iEAM?gYi&?UWtk>pFVYVi4=aXH{-z&l}3YXDdm%c- zP_Dmjb$p_=bgDb%)O`2V^C+_awT}ChoZ&i8pZq$^sN`RtdntwC4Z1^%VCBBE6F52fzi3*u>XQ&#a$5@gN5u(9L@4*?S ztxlBxztA67J@BC|VNzrEm-gUY{x^2TF9VqAJ*Wfr*3&rw z-MAU^Sj{9*>7@+C9-nX+^U2T~dmm4vGVowJ8a{viOycUham9mipeUk*#WnZ|k&5C+ zHl*CK+To=V_Kx=9VWSe#-Lq@MtJ@)LOs1xQ2P1k05p!sxwQs^ui(Ym84x0*5AkzJUdELM~55t$+0$~+8!Ee+@9T2Rr{V+ z)$H$Te7-z=QKBr`TGE$6)2zIJNsvelKseG@<}MSbI7$cQgX`%iKOxqI44_3Db;%T9 zD`I&>m01GyJPn1)l4x)NOL&v_5gRr4>H5Z0`a^#=!&`cIw1a?)$D>pyGE$uXaKw^* zc3W*6CHmq})q4}9FQwuEtBcH#q`fX_GTY(VX${nQWR@dzG>VRn+1(C*c-o@I6y)B- zQ-2@)@_Y1}Py_(q)!Mb>o~$7(wJ>M7!uIHoJ^l$2o4#!Di3|XoKHVMu?52BE2QzGd zorBmQ6g;{f@=e0-L(pAwdsh0_^_sZR8a?j&7zaHkPe*`S6tn0pDol)h*M3;WS+!sW z4Ra-$G<0vCu%7AcLpPyQQB~#FehD-_U}h6Ql4RmFIz6H_nuUM~43^j}OCdk82h7`7 z==U2EWuo_G|3#K3CSae%MZ>;)IjxS4&d~7i_g^?83?3f+Mv_(VN+dYd_V`U;=Hzt= zCxZdmkbyR2(IPZpC8F89p@P!s0`LyRvkU+Ui8sp{tp0Ch!LhUhx8Kpn2;5Xm4|%%q zxym=_QV$JB%;0wsnSsqRy+&IV?gmMa37>j<%dCmZ5Iq%9*+ZjXiq@12-N2|>xX3apZ5}keVl0G(!pQFPh~O5Gq2_%1rLZY-kJY z6)+l{4K+E@wgI-+L`NDd-$EijAha8GwzrpoTSk7`>Gu&wn$y>x(<^KxTTC_it_d3= zenD{Bi@W|li~F7GMk7lM7Z9xA#;y=u!OgvxOriX}cE4b#qM+~}y*#mizuO{gtrA@c zoD6u9To9>>+!7|ISl=Ux0zU^%MiC4nvlutVPE!90E+Lw9P_B^OP!vu+687`#*|YAS zViL~kRHXR*e-R{jckb{>tV13L zzEN!V-DK`NnNE(CPllBeofD;qhflf#f^1`9lkpvps`%n#MjT*1X`)-tQ)2Z`7?dy@ za!~;Py}`U?GAK->dx_mIREa<&li&6%!dMa>~PYUmjU|AZXBWb6`IvV0!hQ3;gZ_{tMT zrt{EffF67K>eV7pCqB@DBYcYq^#=&MlYZ}x3H=MvT`t*ReBFDEa)m;#!0WjXQ=gT3 zd;0R0pO=U|wv#;G<60t3qlWnr<3brD_5`~Y*%%stcQX`x_w0&} zI~lU!#iE%D+u0l+#)vEgg=t&|x{Fbv{t!4O;Q#=;QHFrP@#&wIUtwI9kdRQ09eB4P z;?tiioB2&xOxdqQ@OZ>X-<#h?m;ck1)-=feksw)T@~n`$q1KOyHZQ=o&hv_oB}-ZVL#fU85)+k0OkLQVomJF&mRCwlxZH@k?V$FoQKzd;cCV|$ z4=0zeG3v2jgjd8JedPT1^7_q6^hb{z5>O8GR+U{T^~X*}V)O8%Z`}<~ntdw=US96n zDQPiQWRaT1?fPr(>8}&6HT_QTX_6ut7n2zdjRBGSExdevzT1rA=sg^j;@zhYx15%e zuWER?1sfr;tHNrM#ws--f#&3uKU?5pzX4Ajk1``r%E$tql+BZF-b*>cdcSLQmGidD z@AdeaFFWr)6!5c7qdT;6P{2JbzrCN0oqdN)*W$)NFU6&6<@``o;@}kAwvBtTZ#p8n{oU@f}fhO#SvKB}-hoZ*^#sJ!8_-2rwVojK94Qa*?iqOP^MP_ZyQ z0k6TWMMjv9|NOaSfA-eY2Xj{&zbvv>OHX?GYvXLu=*y{6x9*NNe@vi(7|PDT@2Bbh zI6rrZ0BA5n6Rd&!ax@EgAZAnNkJ_9!y^>3VU(&|6_J!*ZZwc9d@TqQxhaBB0@mZnE zjK;_AWq`Zp)D@@Mt-V`3D1L868ygACyteDBVkd&Zov};Z-B;(Sk$)swi)2Ocm=L?| zKYW-K5(75u_8k`n_(epj1=1F;$MO1<+k%uCyGrua$ORL&BB?{0;iz_ zUzUL}2)88*$7$W?exJU;VRm#%>MY**Oj&QvN(~wkE&?gp$bc`xj(|%4`*+e1;=Rjd zIMwm{N0(u7T>4QdJPV(BBR&Yp?gU{#z7~Kblm|LjlC_J}s%<*b~fjHWI z>eFJm>7k^Dj~^=^Kh6nw!zVFO@X#IE*M{X-*z8BPIk>V_VN>{ld)w9bPTq-Kn4LZJ z&!=zgEW16(DGhI6@@LTCNjUcl+8jmDuF6E!cE5L_s`2qdV`2h<7AYra@(T+;GK#IP zu7<*!G0$)*1hQfV*(^9}ckA1dcW%%)LSR6hibg7XsI!Nb8V(Re;pA8NNUzIqGvCtmB( zhr0B%vrSyOc7fM*{G!LA!E+~jtPm0fy`qFQ`J15)4qwAY3K%tq;w4(k)xIfJHU*H3!(2c>jJyH|(=2r)={(>4dyXj%w_t2g*a` zq`g45w-)MQ*0(TI7ZmJ0fbS~g&K-a37koJjY%73BXt(3voIxNN1D2>)gHX8I8Ck@p z?@N+qocmGpvu5X0I5gqks<#?eBEN>Ub18*8&01CX?w$H#{<`zM~Ybb}c4mwU^wFB<~<2ori;?qY^(aF30a_6Mfdn_~Fi%FJCmZ zv}m*RgGho0l69fzym_0OlQZGTlS3}b2cT9YfnUe8wbjnT-$X2;z_N7<=kPXEZl8)s z@&KLaj}}`Hur1zgDOdnf!mQkEQGz(l2D$Gn#J4E#HeG@m2xc)wyyC_~krWA>E(i5I z{%VDWG4bJ^kwpm97Rck<%E)Idx@*MXLG&1SDFPRxiu$zd$`l=u(}2~YN>seOH-agH#-Cv#Y#%ld`iO<}&lcrmBVQXSyg7TdQVWQvOPDQXG z->v)(`;MjV%f$C=^k!>L^DcV)=#eK5(Z?w%czV_&UqZs|9+-cWk+3kf8!%<$8C`B3 zLPKrcZ#ys+`p{SJi)&TyHo6jw@Jz8?k*6o&5%-;2vSob~pi^#yT}FQPkH*HZBl z`!)NMn4H6MES zw&oN=|A88V2e!9--%MxS*_v&`@VWhoPP@!5E53E#4bn9pj9A@UlYcIt{=>+FJ8|cG zs;^628Pi#ncBeR0)0cph(F$DT{Hb?deiL8?2Cfg&blSBUi=^9)t5b<{%GgB z*pPV<&zkU54O=nWB!}`5zOO;4(V6iNem*aKDnXmyJbb9A=4{wW+r`f637|<<@X&#Is@Bv7kc*+w*ujg)bey%OW$oWiXv5oj2I$wvVHy z-Y}JFeZIvpPy0;W+kIH5=Z`CXavbjO|3a^!qw%@uG?&L|PZRNIOMyzzX+28xp>>ou`ap=c#?Fw*o%zMZCVD;(S4(q>PaI~*D*n9G2 zPi=XKlEt`d)8x)no)=e3`Hn2>nQVNzohf&fl{rVKu$Sna=ZCi&I`YilnpeoxbV#U? zqm!fKzZ9ON*Ai%9{XI2qx+VSHKyhok-av|~nA=QTWXv52ui@@oZm z^2zx8%nyAN@_{oM?~Z6IGoHDYZKTNioj>lBt|y5i*XXMZRg3XIKrKiS11Xa1wr%SmVEy^i=hCt`cuK2lYLZ(}+TAnP zPvycpf|ctfm~)bs0Ia|i;>_D8VwXB>z1n;%I%!+)i=_EgS8`{TX-O?#!Q)l;Ci;`) zw_b0+Zi`_@*!ii&+_^HHO9q^x`M<$12-_YZ5X+}&1-TRQPZ zs~Z)GGWY6LT69OX#&dWR$&=)brM!=X5MNDA4GPzb`^_O}rvZ99tnGH##Hz{mv{Tt3 zT;U;NhoKfj7nBweRaPYvB@Ghb$`qmjM}ISY`lht6p^nt_+@dwNqSpM=Ga-J>NCVSk|9Rcz;&hXbV2Y7jqu!gJ0TOx=sA?XB)dw z(=j*ulNVWs7kSxKSrT>U2LAeR9e#WfA064=w_j3?`^KfulUEfcuR2^=lEq5g5v9_(wk2-c4V<}91iaK3t=!~Ez8XDg>bX+A zdOf4gHG=|PylB(GZyZu6ghRbdC{_Hu&zOfI4Z#i`1`qsIWvLGmO8+nZ^QT*i`AUp5 z)Wb)QGVKrJeN-Kr?#-UtKI#W2)E;YV@rCg(+G$JhVx4IpKIl(PP3ac!Y74Bmv3kz& zmimJ-bdd-gr;j>haa+#v+&L-u-P5c=v{GZU;WQNGqTmO>8`@=N*33JN}lImm3(6b zHXDJzp@ztDJnpg~+qS`M)ub5yfVfF6JJY(GpN*`-#aJbO|NM{D|EdYt2+7#F+~G8` z7(LN%S?avDNrszX=T+PeYn}VpIXF32-jg5Md=hZ=VuxBjf`lwg`KgxSlb-q|QGp%lhZfpN!1RWgx(;FaE~qGd>V)+XM9j2?ioF zd2o3kwd1JT<_iOdVs>3x3I>fF#-K~3(Vrhoy?S`~u^SD)ynL6jYge&bL8*PVZ6$t3 z2u^*F0fNw^kZwOHXs?@FQP}+yx2yjFlL*?P{dyqsZ3P7hJ--j}5Ofa);6Z*-?& zL*Ps3eTSW~nvah4*j|~5mW3Wz4XY~vmVRadZ{Qgm)GI(V$+9Qie<#Dw)B{;qwr=N$ zQu{AXWk1MR0sxNXmNjI!*=m@ZCsX|RaNcb9M^^kA`zVP8?!7N5^ljny`du&E+YMy5 zZq;|B|Ek!&l}p(V@O}VBqG#e~gTw=Ndu%0&b8x?y=@cm20Q)i=F@Qd@P3yk4|7W0t zVPp-oK(OvTEgDlidsbMMH%Q5KdTNSaP*C|?13o8Q&pq%s7lnb`X8`55-Qi1E`LIz9imahHw%$(j#lbd1K4Y{jvgf#^TdQb zNyLFJR9Q)hMwXWq08#hwu;uv5WUVDZ`^;jQ+NIs1SGK$VtigYTkeBs#L2hn5oDx7Av9b5-cvtV+(#Y`8&|5U7SP?66y08i}8vHK%49hAA1!2_Ju zYEVKX6bg4OsjL9pkqPZod+S>sN-I4qB!s_+4-;!}cp#a>Peb7k2QF&uCoA8KxPHy7 zyhmyj%ubPozbhmv>N-3yx|tTgHX#B5m?=ZcqFc@mO%2SCYO(@(8HI0ImstH??_ zsPpdAH;xQeA_oJ%+8(q$<>5mQwdAK-sV?0A#)7aMQE38J3|HdKo{W8yqj$PJT`l?C zw)jWM$@GwNo{s+L3D_(iFsG1&1VhQME$?Ab427K;4*hs&T;EFzlLs|9eReCpIaPRnN3ruXj??%i8-E{m4{ z!zn2#O0ItqrN9xl#qcq(gsYi1-X#zmmQvGQ6h(kQUqTaiPu6!CpO5;3TT5wAc6O$WGuAu{60iD@q&ajOay3T4dZBmjh%hlTm!nj?hJ3)%GeR<*sANwwI3EyJzUPLDt{EsD?^5u|nP#gM70swy`8BTgJkD{zs7O&HHOe#KS64G#kJCPx=w zT2OexhjnJW5mQORb=g;~5(T^b3j5kSE-nt@?PPNPU3^~wppcYhBoB%`IXT%BE*?`D zt_4Lz)T+I#t@KcGR4L5=)*ve8PXhxR;R%;SZ*z7;q%9Ta7;~1sIJVj=9O2>!rIbLO zoZOEoJ8S^buTFK2xo_&T&vW|mX8&Gi=R8hJt9Dp*1gYn)b$53gFxm}kOQjc1_@ixj z+1Y6X8P#5LHTBs6;Da}ZB|%fj8=){xz(4m|Tc>h1SNyuzT#P*+xOJ;G5Wpp1$-x8& z0qcD?8oUQLL130RmFY07EgU2~S29n%UjC+|!|GF3=3%2OJ3YN9 zKY#z_IGWg@Mkm<@rnmtU^YcIO^JQoDS78!|{huIG0-drhT)+hXU$DT$lgXffTUuJa z{)V5>7)mgUh6ukQb!niA(GyKaHIz2qI*<-nUJ!Dr-@kw7ga>c%O55%F%NQ|wU}rp` z?;?6S9N20#z*f(V$LI+PTAHC&+qwxqf;(48QnC`e4zVdnS^E~;oOb&N@!>=H7K&;d zR+32reZ|u5g_{ zrzjCc{3Es|Pm1FrK8`6|MISEYNWekjg-(dm*e88XKTFv{Pm)&+MX#_0Cj=OaN@on?=%U^s=U{s~P@4F-wCZ6c0rz;e@|EVU0jb}q1Z0USsx)as&d3+qwfuOyFFA+UBIuA@9XauK*PFY#}2=M08odtkTD7+ zw?Su*R5V7c98|FQC`pibzPtQ=Vq&3b_7ci7l)&@|bh$_L^x8~Z0KAj@Alxk#kP>2J zXJ9vygf;Bu*ycez;eC*V{u=FM#%`rb5Y`cZi-#u!%T)=VPPYwj;&`4dO5vlF!{;#HP+#93`WKizB~ACYGPCH(D-hfU+#~_UV z<1-<3EccidF@ImP{58l!Jr6vgl8xLQ9V>kdNyF%i;l^iAIvtJ<4<%>8>C7bM-x7GBwc*6y00%AWNfTO7HJ2y zn>GB{JnKY|SdKg_bSt8r8^TxEew`=YZz$ku{#C+@4j~Af-%_P2AFaBkZ7H=E9t08# zBQU+i?NNN}l^EejK`w`3Evf6gY1?$vI7l}P<_h{NEnP$hdwkJINaR8`mZkt&;#`^4 z8cl^&`oxH9LL7G$)&7d3yc0G+8V!r&v-_|ZC~Gu$ADOlI581~@Ny6;3{Ht$icj3GD bW6P0S-PBBvso}}HSjF|)T2V4tT - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Inference Report - - - - - - - - - - ┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┓ -  Forward LatencyForward Throughput -Experiment Name      Backend         ProviderBatch SizeSequence Length              (s)       (samples/s) -┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━┩ -bge_seq_len_swee…OnnxRuntimeTensorrtExecutio…         1             16         9.95e-04           1010.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…OnnxRuntimeCUDAExecutionPro…         1             16         1.10e-03            909.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…    PyTorch         1             16         7.29e-03            137.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…OnnxRuntimeTensorrtExecutio…         1             32         1.05e-03            952.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…OnnxRuntimeCUDAExecutionPro…         1             32         1.14e-03            877.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…    PyTorch         1             32         6.69e-03            149.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…OnnxRuntimeTensorrtExecutio…         1             64         1.10e-03            909.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…OnnxRuntimeCUDAExecutionPro…         1             64         1.13e-03            885.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…    PyTorch         1             64         6.89e-03            145.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…OnnxRuntimeTensorrtExecutio…         1            128         1.24e-03            806.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…OnnxRuntimeCUDAExecutionPro…         1            128         1.32e-03            758.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…    PyTorch         1            128         6.89e-03            145.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeCUDAExecutionPro…         1            256         1.45e-03            690.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeTensorrtExecutio…         1            256         1.46e-03            685.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…OnnxRuntimeTensorrtExecutio…         1            256         1.48e-03            676.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…OnnxRuntimeCUDAExecutionPro…         1            256         1.50e-03            667.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…    PyTorch         1            256         6.49e-03            154.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…    PyTorch         1            256         6.87e-03            146.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…OnnxRuntimeTensorrtExecutio…         1            512         1.97e-03            508.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…OnnxRuntimeCUDAExecutionPro…         1            512         2.01e-03            498.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_seq_len_swee…    PyTorch         1            512         9.60e-03            104.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeTensorrtExecutio…         2            256         1.76e-03           1140.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeCUDAExecutionPro…         2            256         1.95e-03           1030.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…    PyTorch         2            256         9.01e-03            222.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeTensorrtExecutio…         4            256         2.59e-03           1540.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeCUDAExecutionPro…         4            256         3.07e-03           1300.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…    PyTorch         4            256         1.55e-02            258.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeTensorrtExecutio…         8            256         4.34e-03           1840.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeCUDAExecutionPro…         8            256         4.86e-03           1650.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…    PyTorch         8            256         3.02e-02            265.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeTensorrtExecutio…        16            256         8.11e-03           1970.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeCUDAExecutionPro…        16            256         8.83e-03           1810.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…    PyTorch        16            256         5.37e-02            298.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeTensorrtExecutio…        32            256         1.52e-02           2110.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeCUDAExecutionPro…        32            256         1.67e-02           1920.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…    PyTorch        32            256         1.09e-01            294.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeTensorrtExecutio…        64            256         3.03e-02           2110.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeCUDAExecutionPro…        64            256         3.24e-02           1980.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…    PyTorch        64            256         2.04e-01            314.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeTensorrtExecutio…       128            256         5.84e-02           2190.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeCUDAExecutionPro…       128            256         6.39e-02           2000.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…    PyTorch       128            256         4.03e-01            318.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeTensorrtExecutio…       256            256         1.37e-01           1870.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeCUDAExecutionPro…       256            256         1.46e-01           1750.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…    PyTorch       256            256         7.92e-01            323.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeCUDAExecutionPro…       512            256         2.94e-01           1740.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…    PyTorch       512            256             1.58            324.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeCUDAExecutionPro…      1024            256         5.95e-01           1720.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…    PyTorch      1024            256             3.17            323.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…OnnxRuntimeCUDAExecutionPro…      2048            256             1.20           1710.00 -├───────────────────┼─────────────┼───────────────────┼────────────┼─────────────────┼───────────────────┼────────────────────┤ -bge_batch_size_s…    PyTorch      2048            256             6.34            323.00 -└───────────────────┴─────────────┴───────────────────┴────────────┴─────────────────┴───────────────────┴────────────────────┘ - - - - diff --git a/examples/fast-mteb/artifacts/short_report.csv b/examples/fast-mteb/artifacts/short_report.csv deleted file mode 100644 index c6255ea7..00000000 --- a/examples/fast-mteb/artifacts/short_report.csv +++ /dev/null @@ -1,52 +0,0 @@ -experiment_name,Backend,Provider,Batch Size,Sequence Length,Forward Latency (s),Forward Throughput (samples/s) -bge_seq_len_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,1,16,0.000995,1010.0 -bge_seq_len_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,1,16,0.0011,909.0 -bge_seq_len_sweep_baseline,PyTorch,,1,16,0.00729,137.0 -bge_seq_len_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,1,32,0.00105,952.0 -bge_seq_len_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,1,32,0.00114,877.0 -bge_seq_len_sweep_baseline,PyTorch,,1,32,0.00669,149.0 -bge_seq_len_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,1,64,0.0011,909.0 -bge_seq_len_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,1,64,0.00113,885.0 -bge_seq_len_sweep_baseline,PyTorch,,1,64,0.00689,145.0 -bge_seq_len_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,1,128,0.00124,806.0 -bge_seq_len_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,1,128,0.00132,758.0 -bge_seq_len_sweep_baseline,PyTorch,,1,128,0.00689,145.0 -bge_batch_size_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,1,256,0.00145,690.0 -bge_batch_size_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,1,256,0.00146,685.0 -bge_seq_len_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,1,256,0.00148,676.0 -bge_seq_len_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,1,256,0.0015,667.0 -bge_batch_size_sweep_baseline,PyTorch,,1,256,0.00649,154.0 -bge_seq_len_sweep_baseline,PyTorch,,1,256,0.00687,146.0 -bge_seq_len_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,1,512,0.00197,508.0 -bge_seq_len_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,1,512,0.00201,498.0 -bge_seq_len_sweep_baseline,PyTorch,,1,512,0.0096,104.0 -bge_batch_size_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,2,256,0.00176,1140.0 -bge_batch_size_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,2,256,0.00195,1030.0 -bge_batch_size_sweep_baseline,PyTorch,,2,256,0.00901,222.0 -bge_batch_size_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,4,256,0.00259,1540.0 -bge_batch_size_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,4,256,0.00307,1300.0 -bge_batch_size_sweep_baseline,PyTorch,,4,256,0.0155,258.0 -bge_batch_size_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,8,256,0.00434,1840.0 -bge_batch_size_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,8,256,0.00486,1650.0 -bge_batch_size_sweep_baseline,PyTorch,,8,256,0.0302,265.0 -bge_batch_size_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,16,256,0.00811,1970.0 -bge_batch_size_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,16,256,0.00883,1810.0 -bge_batch_size_sweep_baseline,PyTorch,,16,256,0.0537,298.0 -bge_batch_size_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,32,256,0.0152,2110.0 -bge_batch_size_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,32,256,0.0167,1920.0 -bge_batch_size_sweep_baseline,PyTorch,,32,256,0.109,294.0 -bge_batch_size_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,64,256,0.0303,2110.0 -bge_batch_size_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,64,256,0.0324,1980.0 -bge_batch_size_sweep_baseline,PyTorch,,64,256,0.204,314.0 -bge_batch_size_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,128,256,0.0584,2190.0 -bge_batch_size_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,128,256,0.0639,2000.0 -bge_batch_size_sweep_baseline,PyTorch,,128,256,0.403,318.0 -bge_batch_size_sweep_ort_trt_fp16,OnnxRuntime,TensorrtExecutionProvider,256,256,0.137,1870.0 -bge_batch_size_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,256,256,0.146,1750.0 -bge_batch_size_sweep_baseline,PyTorch,,256,256,0.792,323.0 -bge_batch_size_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,512,256,0.294,1740.0 -bge_batch_size_sweep_baseline,PyTorch,,512,256,1.58,324.0 -bge_batch_size_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,1024,256,0.595,1720.0 -bge_batch_size_sweep_baseline,PyTorch,,1024,256,3.17,323.0 -bge_batch_size_sweep_ort_cuda_o4,OnnxRuntime,CUDAExecutionProvider,2048,256,1.2,1710.0 -bge_batch_size_sweep_baseline,PyTorch,,2048,256,6.34,323.0 diff --git a/examples/fast-mteb/configs/bge_base_config.yaml b/examples/fast-mteb/configs/bge_base_config.yaml deleted file mode 100644 index 96b60263..00000000 --- a/examples/fast-mteb/configs/bge_base_config.yaml +++ /dev/null @@ -1,31 +0,0 @@ -defaults: - - benchmark: inference # default benchmark - - experiment # inheriting from experiment config - - _self_ # for hydra 1.1 compatibility - - override hydra/job_logging: colorlog # colorful logging - - override hydra/hydra_logging: colorlog # colorful logging - -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - job: - chdir: true - env_set: - CUDA_VISIBLE_DEVICES: 0 - -experiment_name: bge_seq_len_sweep_pytorch -model: BAAI/bge-base-en-v1.5 -task: feature-extraction -device: cuda - -backend: - initial_isolation_check: false - continous_isolation_check: false - -benchmark: - memory: true - input_shapes: - batch_size: null - sequence_length: null diff --git a/examples/fast-mteb/configs/bge_batch_size_sweep_baseline.yaml b/examples/fast-mteb/configs/bge_batch_size_sweep_baseline.yaml deleted file mode 100644 index 6fd462aa..00000000 --- a/examples/fast-mteb/configs/bge_batch_size_sweep_baseline.yaml +++ /dev/null @@ -1,12 +0,0 @@ -defaults: - - backend: pytorch - - bge_base_config - - _self_ - -hydra: - sweeper: - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: 256 - -experiment_name: bge_batch_size_sweep_baseline diff --git a/examples/fast-mteb/configs/bge_batch_size_sweep_ort_cuda_o4.yaml b/examples/fast-mteb/configs/bge_batch_size_sweep_ort_cuda_o4.yaml deleted file mode 100644 index 8d496384..00000000 --- a/examples/fast-mteb/configs/bge_batch_size_sweep_ort_cuda_o4.yaml +++ /dev/null @@ -1,10 +0,0 @@ -defaults: - - bge_batch_size_sweep_baseline - - _self_ - - override backend: onnxruntime - -experiment_name: bge_batch_size_sweep_ort_cuda_o4 - -backend: - provider: CUDAExecutionProvider - auto_optimization: O4 diff --git a/examples/fast-mteb/configs/bge_batch_size_sweep_ort_trt_fp16.yaml b/examples/fast-mteb/configs/bge_batch_size_sweep_ort_trt_fp16.yaml deleted file mode 100644 index 743daa4d..00000000 --- a/examples/fast-mteb/configs/bge_batch_size_sweep_ort_trt_fp16.yaml +++ /dev/null @@ -1,11 +0,0 @@ -defaults: - - bge_batch_size_sweep_baseline - - _self_ - - override backend: onnxruntime - -experiment_name: bge_batch_size_sweep_ort_trt_fp16 - -backend: - provider: TensorrtExecutionProvider - provider_options: - trt_fp16_enable: true diff --git a/examples/fast-mteb/configs/bge_seq_len_sweep_baseline.yaml b/examples/fast-mteb/configs/bge_seq_len_sweep_baseline.yaml deleted file mode 100644 index 3c0efe2d..00000000 --- a/examples/fast-mteb/configs/bge_seq_len_sweep_baseline.yaml +++ /dev/null @@ -1,12 +0,0 @@ -defaults: - - backend: pytorch - - bge_base_config - - _self_ - -hydra: - sweeper: - params: - benchmark.input_shapes.batch_size: 1 - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - -experiment_name: bge_seq_len_sweep_baseline diff --git a/examples/fast-mteb/configs/bge_seq_len_sweep_ort_cuda_o4.yaml b/examples/fast-mteb/configs/bge_seq_len_sweep_ort_cuda_o4.yaml deleted file mode 100644 index d9a053b2..00000000 --- a/examples/fast-mteb/configs/bge_seq_len_sweep_ort_cuda_o4.yaml +++ /dev/null @@ -1,10 +0,0 @@ -defaults: - - bge_seq_len_sweep_baseline - - _self_ - - override backend: onnxruntime - -experiment_name: bge_seq_len_sweep_ort_cuda_o4 - -backend: - provider: CUDAExecutionProvider - auto_optimization: O4 diff --git a/examples/fast-mteb/configs/bge_seq_len_sweep_ort_trt_fp16.yaml b/examples/fast-mteb/configs/bge_seq_len_sweep_ort_trt_fp16.yaml deleted file mode 100644 index 191d6614..00000000 --- a/examples/fast-mteb/configs/bge_seq_len_sweep_ort_trt_fp16.yaml +++ /dev/null @@ -1,11 +0,0 @@ -defaults: - - bge_seq_len_sweep_baseline - - _self_ - - override backend: onnxruntime - -experiment_name: bge_seq_len_sweep_ort_trt_fp16 - -backend: - provider: TensorrtExecutionProvider - provider_options: - trt_fp16_enable: true diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/.hydra/config.yaml deleted file mode 100644 index 4a0d3af7..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/.hydra/hydra.yaml deleted file mode 100644 index 7a1a42b2..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=256 - id: '0' - num: 0 - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/.hydra/overrides.yaml deleted file mode 100644 index 68453b03..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/hydra_config.yaml deleted file mode 100644 index a8374b0b..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/inference_results.csv deleted file mode 100644 index 64e5148b..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1)_sequence_length(256)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00649,154.0,1890 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/.hydra/config.yaml deleted file mode 100644 index dfa93908..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1024 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/.hydra/hydra.yaml deleted file mode 100644 index 6245d526..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1024 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1024,benchmark.input_shapes.sequence_length=256 - id: '10' - num: 10 - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/.hydra/overrides.yaml deleted file mode 100644 index 874d4c51..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1024 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/hydra_config.yaml deleted file mode 100644 index 176cbe80..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1024 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/inference_results.csv deleted file mode 100644 index 13937065..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(1024)_sequence_length(256)/10/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,3.17,323.0,15576 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/.hydra/config.yaml deleted file mode 100644 index dcf20c08..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 128 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/.hydra/hydra.yaml deleted file mode 100644 index 0af85235..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=128 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=128,benchmark.input_shapes.sequence_length=256 - id: '7' - num: 7 - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/.hydra/overrides.yaml deleted file mode 100644 index 459be207..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=128 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/hydra_config.yaml deleted file mode 100644 index 967d4978..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 128 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/inference_results.csv deleted file mode 100644 index f1aea2af..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(128)_sequence_length(256)/7/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.403,318.0,3494 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/.hydra/config.yaml deleted file mode 100644 index d1415b57..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/.hydra/hydra.yaml deleted file mode 100644 index ee269ee2..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=16 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=16,benchmark.input_shapes.sequence_length=256 - id: '4' - num: 4 - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/.hydra/overrides.yaml deleted file mode 100644 index 9e742a69..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=16 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/hydra_config.yaml deleted file mode 100644 index c77066b1..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/inference_results.csv deleted file mode 100644 index d5d39469..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(16)_sequence_length(256)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0537,298.0,2083 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/.hydra/config.yaml deleted file mode 100644 index e7e7cc9d..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/.hydra/hydra.yaml deleted file mode 100644 index 03514e6a..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2,benchmark.input_shapes.sequence_length=256 - id: '1' - num: 1 - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/.hydra/overrides.yaml deleted file mode 100644 index bf46a09f..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=2 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/hydra_config.yaml deleted file mode 100644 index 6526e0c2..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/inference_results.csv deleted file mode 100644 index 1326276e..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2)_sequence_length(256)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00901,222.0,1902 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/.hydra/config.yaml deleted file mode 100644 index 5d10c040..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2048 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/.hydra/hydra.yaml deleted file mode 100644 index eaa83b80..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2048 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2048,benchmark.input_shapes.sequence_length=256 - id: '11' - num: 11 - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/.hydra/overrides.yaml deleted file mode 100644 index c0074884..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=2048 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/hydra_config.yaml deleted file mode 100644 index a0d85fbe..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2048 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/inference_results.csv deleted file mode 100644 index 12f5a323..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(2048)_sequence_length(256)/11/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,6.34,323.0,29283 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/.hydra/config.yaml deleted file mode 100644 index 6df59894..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 256 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/.hydra/hydra.yaml deleted file mode 100644 index 047bec6d..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=256 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=256,benchmark.input_shapes.sequence_length=256 - id: '8' - num: 8 - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/.hydra/overrides.yaml deleted file mode 100644 index 827e72de..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=256 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/hydra_config.yaml deleted file mode 100644 index 1480d48d..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 256 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/inference_results.csv deleted file mode 100644 index 91898ab3..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(256)_sequence_length(256)/8/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.792,323.0,5107 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/.hydra/config.yaml deleted file mode 100644 index 64249ed0..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 32 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/.hydra/hydra.yaml deleted file mode 100644 index 8fc2114f..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=32 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=32,benchmark.input_shapes.sequence_length=256 - id: '5' - num: 5 - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/.hydra/overrides.yaml deleted file mode 100644 index 3e2c3635..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=32 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/hydra_config.yaml deleted file mode 100644 index d0d51aaf..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 32 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/inference_results.csv deleted file mode 100644 index a0c77f7c..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(32)_sequence_length(256)/5/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.109,294.0,2286 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/.hydra/config.yaml deleted file mode 100644 index 4a4c4aa5..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/.hydra/hydra.yaml deleted file mode 100644 index a43fdac6..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=4 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=4,benchmark.input_shapes.sequence_length=256 - id: '2' - num: 2 - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/.hydra/overrides.yaml deleted file mode 100644 index 3c675c88..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=4 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/hydra_config.yaml deleted file mode 100644 index b2ccf7d5..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/inference_results.csv deleted file mode 100644 index 54f6d8d9..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(4)_sequence_length(256)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0155,258.0,1936 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/.hydra/config.yaml deleted file mode 100644 index 6e4fd05a..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 512 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/.hydra/hydra.yaml deleted file mode 100644 index 456b45d7..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=512 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=512,benchmark.input_shapes.sequence_length=256 - id: '9' - num: 9 - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/.hydra/overrides.yaml deleted file mode 100644 index 830422b4..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=512 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/hydra_config.yaml deleted file mode 100644 index d0416dc0..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 512 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/inference_results.csv deleted file mode 100644 index e0ede21a..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(512)_sequence_length(256)/9/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,1.58,324.0,8328 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/.hydra/config.yaml deleted file mode 100644 index 078ebee2..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 64 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/.hydra/hydra.yaml deleted file mode 100644 index 4d33e000..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=64 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=64,benchmark.input_shapes.sequence_length=256 - id: '6' - num: 6 - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/.hydra/overrides.yaml deleted file mode 100644 index 31aa8a55..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=64 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/hydra_config.yaml deleted file mode 100644 index 78e0093d..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 64 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/inference_results.csv deleted file mode 100644 index af286bcb..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(64)_sequence_length(256)/6/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.204,314.0,2689 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/.hydra/config.yaml deleted file mode 100644 index 812a7865..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/.hydra/hydra.yaml deleted file mode 100644 index 783acfbc..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=8 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=8,benchmark.input_shapes.sequence_length=256 - id: '3' - num: 3 - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/.hydra/overrides.yaml deleted file mode 100644 index 854c6355..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=8 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/hydra_config.yaml deleted file mode 100644 index 511e3988..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/inference_results.csv deleted file mode 100644 index 202cc59b..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(8)_sequence_length(256)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0302,265.0,1995 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(None)_sequence_length(None)/multirun.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(None)_sequence_length(None)/multirun.yaml deleted file mode 100644 index 8ff2f6cb..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_baseline_batch_size(None)_sequence_length(None)/multirun.yaml +++ /dev/null @@ -1,242 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: [] - job: - name: experiment - chdir: true - override_dirname: '' - id: ??? - num: ??? - config_name: bge_batch_size_sweep_baseline.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: ??? - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: null - sequence_length: null - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/.hydra/config.yaml deleted file mode 100644 index 07de7b09..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/.hydra/hydra.yaml deleted file mode 100644 index 77b4d0d4..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=256 - id: '0' - num: 0 - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/.hydra/overrides.yaml deleted file mode 100644 index 68453b03..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/hydra_config.yaml deleted file mode 100644 index 8a12a256..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/inference_results.csv deleted file mode 100644 index 095db4b2..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00145,690.0,3498 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/.hydra/config.yaml deleted file mode 100644 index 7d772929..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1024 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/.hydra/hydra.yaml deleted file mode 100644 index c836f4da..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1024 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1024,benchmark.input_shapes.sequence_length=256 - id: '10' - num: 10 - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/.hydra/overrides.yaml deleted file mode 100644 index 874d4c51..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1024 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/hydra_config.yaml deleted file mode 100644 index 1c9acd28..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1024 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/inference_results.csv deleted file mode 100644 index 7bdbdf81..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(1024)_sequence_length(256)/10/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.595,1720.0,23700 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/.hydra/config.yaml deleted file mode 100644 index 58a6fccd..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 128 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/.hydra/hydra.yaml deleted file mode 100644 index 34efa3ac..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=128 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=128,benchmark.input_shapes.sequence_length=256 - id: '7' - num: 7 - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/.hydra/overrides.yaml deleted file mode 100644 index 459be207..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=128 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/hydra_config.yaml deleted file mode 100644 index b7c8696e..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 128 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/inference_results.csv deleted file mode 100644 index 52458f7e..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(128)_sequence_length(256)/7/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0639,2000.0,5583 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/.hydra/config.yaml deleted file mode 100644 index 6d8ae445..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/.hydra/hydra.yaml deleted file mode 100644 index c91dd995..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=16 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=16,benchmark.input_shapes.sequence_length=256 - id: '4' - num: 4 - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/.hydra/overrides.yaml deleted file mode 100644 index 9e742a69..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=16 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/hydra_config.yaml deleted file mode 100644 index d346a163..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/inference_results.csv deleted file mode 100644 index 1324094c..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(16)_sequence_length(256)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00883,1810.0,3767 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/.hydra/config.yaml deleted file mode 100644 index 60bc9d90..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/.hydra/hydra.yaml deleted file mode 100644 index 2f2b897e..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2,benchmark.input_shapes.sequence_length=256 - id: '1' - num: 1 - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/.hydra/overrides.yaml deleted file mode 100644 index bf46a09f..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=2 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/hydra_config.yaml deleted file mode 100644 index 5e598330..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/inference_results.csv deleted file mode 100644 index 56c0418f..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2)_sequence_length(256)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00195,1030.0,3500 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/.hydra/config.yaml deleted file mode 100644 index 427d25f3..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2048 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/.hydra/hydra.yaml deleted file mode 100644 index 3376a3de..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2048 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2048,benchmark.input_shapes.sequence_length=256 - id: '11' - num: 11 - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/.hydra/overrides.yaml deleted file mode 100644 index c0074884..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=2048 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/hydra_config.yaml deleted file mode 100644 index ec0027cf..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2048 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/inference_results.csv deleted file mode 100644 index 3eed0c11..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(2048)_sequence_length(256)/11/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,1.2,1710.0,44135 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/.hydra/config.yaml deleted file mode 100644 index 1be484cb..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 256 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/.hydra/hydra.yaml deleted file mode 100644 index 357b41c3..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=256 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=256,benchmark.input_shapes.sequence_length=256 - id: '8' - num: 8 - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/.hydra/overrides.yaml deleted file mode 100644 index 827e72de..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=256 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/hydra_config.yaml deleted file mode 100644 index a0a13d98..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 256 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/inference_results.csv deleted file mode 100644 index b90bccca..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(256)_sequence_length(256)/8/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.146,1750.0,8152 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/.hydra/config.yaml deleted file mode 100644 index 5244ec01..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 32 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/.hydra/hydra.yaml deleted file mode 100644 index 69faa12f..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=32 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=32,benchmark.input_shapes.sequence_length=256 - id: '5' - num: 5 - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/.hydra/overrides.yaml deleted file mode 100644 index 3e2c3635..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=32 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/hydra_config.yaml deleted file mode 100644 index c7c3b724..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 32 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/inference_results.csv deleted file mode 100644 index 516c144b..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(32)_sequence_length(256)/5/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0167,1920.0,4304 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/.hydra/config.yaml deleted file mode 100644 index bc7a3904..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/.hydra/hydra.yaml deleted file mode 100644 index 5d8bbff3..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=4 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=4,benchmark.input_shapes.sequence_length=256 - id: '2' - num: 2 - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/.hydra/overrides.yaml deleted file mode 100644 index 3c675c88..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=4 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/hydra_config.yaml deleted file mode 100644 index 8865a904..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/inference_results.csv deleted file mode 100644 index 433d02f7..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(4)_sequence_length(256)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00307,1300.0,3765 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/.hydra/config.yaml deleted file mode 100644 index edbe07af..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 512 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/.hydra/hydra.yaml deleted file mode 100644 index 0ed92d8f..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=512 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=512,benchmark.input_shapes.sequence_length=256 - id: '9' - num: 9 - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/.hydra/overrides.yaml deleted file mode 100644 index 830422b4..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=512 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/hydra_config.yaml deleted file mode 100644 index 794322d8..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 512 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/inference_results.csv deleted file mode 100644 index ab8286c9..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(512)_sequence_length(256)/9/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.294,1740.0,13263 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/.hydra/config.yaml deleted file mode 100644 index 422a64d4..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 64 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/.hydra/hydra.yaml deleted file mode 100644 index b9bdeb5e..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=64 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=64,benchmark.input_shapes.sequence_length=256 - id: '6' - num: 6 - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/.hydra/overrides.yaml deleted file mode 100644 index 31aa8a55..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=64 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/hydra_config.yaml deleted file mode 100644 index 79d08b36..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 64 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/inference_results.csv deleted file mode 100644 index 2d63eb62..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(64)_sequence_length(256)/6/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0324,1980.0,4304 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/.hydra/config.yaml deleted file mode 100644 index a20b60b7..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/.hydra/hydra.yaml deleted file mode 100644 index 5a81e9e5..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=8 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=8,benchmark.input_shapes.sequence_length=256 - id: '3' - num: 3 - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/.hydra/overrides.yaml deleted file mode 100644 index 854c6355..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=8 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/hydra_config.yaml deleted file mode 100644 index 28e33900..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/inference_results.csv deleted file mode 100644 index 82681477..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(8)_sequence_length(256)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00486,1650.0,3767 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(None)_sequence_length(None)/multirun.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(None)_sequence_length(None)/multirun.yaml deleted file mode 100644 index 62776c29..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_cuda_o4_batch_size(None)_sequence_length(None)/multirun.yaml +++ /dev/null @@ -1,252 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: [] - job: - name: experiment - chdir: true - override_dirname: '' - id: ??? - num: ??? - config_name: bge_batch_size_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: ??? - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: null - sequence_length: null - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/.hydra/config.yaml deleted file mode 100644 index 7bfc99bc..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/.hydra/hydra.yaml deleted file mode 100644 index 988960a4..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=256 - id: '0' - num: 0 - config_name: bge_batch_size_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/.hydra/overrides.yaml deleted file mode 100644 index 68453b03..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/hydra_config.yaml deleted file mode 100644 index a61f1d86..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/inference_results.csv deleted file mode 100644 index 1eb6aa3d..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00146,685.0,3838 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/.hydra/config.yaml deleted file mode 100644 index e8b256a7..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 128 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/.hydra/hydra.yaml deleted file mode 100644 index 103379e5..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=128 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=128,benchmark.input_shapes.sequence_length=256 - id: '7' - num: 7 - config_name: bge_batch_size_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/.hydra/overrides.yaml deleted file mode 100644 index 459be207..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=128 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/hydra_config.yaml deleted file mode 100644 index 45419696..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 128 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/inference_results.csv deleted file mode 100644 index 9bf5e5ed..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(128)_sequence_length(256)/7/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0584,2190.0,4612 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/.hydra/config.yaml deleted file mode 100644 index 3d72d184..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/.hydra/hydra.yaml deleted file mode 100644 index a316954a..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=16 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=16,benchmark.input_shapes.sequence_length=256 - id: '4' - num: 4 - config_name: bge_batch_size_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/.hydra/overrides.yaml deleted file mode 100644 index 9e742a69..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=16 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/hydra_config.yaml deleted file mode 100644 index 7db739c3..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/inference_results.csv deleted file mode 100644 index 25531fcb..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(16)_sequence_length(256)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00811,1970.0,3943 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/.hydra/config.yaml deleted file mode 100644 index 89eddd9d..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/.hydra/hydra.yaml deleted file mode 100644 index 7835eaef..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2,benchmark.input_shapes.sequence_length=256 - id: '1' - num: 1 - config_name: bge_batch_size_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/.hydra/overrides.yaml deleted file mode 100644 index bf46a09f..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=2 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/hydra_config.yaml deleted file mode 100644 index ee4705e6..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/inference_results.csv deleted file mode 100644 index 66368e46..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(2)_sequence_length(256)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00176,1140.0,3851 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/.hydra/config.yaml deleted file mode 100644 index 9ae4cf91..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 256 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/.hydra/hydra.yaml deleted file mode 100644 index a8e2e54d..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=256 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=256,benchmark.input_shapes.sequence_length=256 - id: '8' - num: 8 - config_name: bge_batch_size_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/.hydra/overrides.yaml deleted file mode 100644 index 827e72de..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=256 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/hydra_config.yaml deleted file mode 100644 index 06530c91..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 256 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/inference_results.csv deleted file mode 100644 index e0c9802a..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(256)_sequence_length(256)/8/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.137,1870.0,5514 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/.hydra/config.yaml deleted file mode 100644 index 9bbdc30e..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 32 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/.hydra/hydra.yaml deleted file mode 100644 index a5555aae..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=32 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=32,benchmark.input_shapes.sequence_length=256 - id: '5' - num: 5 - config_name: bge_batch_size_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/.hydra/overrides.yaml deleted file mode 100644 index 3e2c3635..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=32 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/hydra_config.yaml deleted file mode 100644 index 7d8e18de..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 32 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/inference_results.csv deleted file mode 100644 index e20af000..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(32)_sequence_length(256)/5/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0152,2110.0,4016 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/.hydra/config.yaml deleted file mode 100644 index 7e63f163..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/.hydra/hydra.yaml deleted file mode 100644 index 3c839db0..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=4 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=4,benchmark.input_shapes.sequence_length=256 - id: '2' - num: 2 - config_name: bge_batch_size_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/.hydra/overrides.yaml deleted file mode 100644 index 3c675c88..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=4 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/hydra_config.yaml deleted file mode 100644 index a64e8e87..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/inference_results.csv deleted file mode 100644 index 0cd91e11..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(4)_sequence_length(256)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00259,1540.0,3871 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/.hydra/config.yaml deleted file mode 100644 index ccee80c1..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 512 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/.hydra/hydra.yaml deleted file mode 100644 index f2b834d4..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=512 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=512,benchmark.input_shapes.sequence_length=256 - id: '9' - num: 9 - config_name: bge_batch_size_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/.hydra/overrides.yaml deleted file mode 100644 index 830422b4..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=512 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/hydra_config.yaml deleted file mode 100644 index 909ded7f..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(512)_sequence_length(256)/9/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 512 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/.hydra/config.yaml deleted file mode 100644 index 1d37b256..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 64 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/.hydra/hydra.yaml deleted file mode 100644 index bd1eac69..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=64 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=64,benchmark.input_shapes.sequence_length=256 - id: '6' - num: 6 - config_name: bge_batch_size_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/.hydra/overrides.yaml deleted file mode 100644 index 31aa8a55..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=64 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/hydra_config.yaml deleted file mode 100644 index 2618f579..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 64 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/inference_results.csv deleted file mode 100644 index 54978095..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(64)_sequence_length(256)/6/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0303,2110.0,4150 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/.hydra/config.yaml deleted file mode 100644 index b85ddcb6..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/.hydra/hydra.yaml deleted file mode 100644 index d0b65270..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=8 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=8,benchmark.input_shapes.sequence_length=256 - id: '3' - num: 3 - config_name: bge_batch_size_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/.hydra/overrides.yaml deleted file mode 100644 index 854c6355..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=8 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/hydra_config.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/hydra_config.yaml deleted file mode 100644 index 9b73cf99..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/inference_results.csv b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/inference_results.csv deleted file mode 100644 index 286e93b3..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(8)_sequence_length(256)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00434,1840.0,3899 diff --git a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(None)_sequence_length(None)/multirun.yaml b/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(None)_sequence_length(None)/multirun.yaml deleted file mode 100644 index 7ead4d1b..00000000 --- a/examples/fast-mteb/experiments/bge_batch_size_sweep_ort_trt_fp16_batch_size(None)_sequence_length(None)/multirun.yaml +++ /dev/null @@ -1,253 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128,256,512,1024,2048 - benchmark.input_shapes.sequence_length: '256' - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: [] - job: - name: experiment - chdir: true - override_dirname: '' - id: ??? - num: ??? - config_name: bge_batch_size_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: ??? - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: null - sequence_length: null - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_batch_size_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/.hydra/config.yaml deleted file mode 100644 index 0ccfe4de..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/.hydra/hydra.yaml deleted file mode 100644 index 161f2c19..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=128 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=128 - id: '3' - num: 3 - config_name: bge_seq_len_sweep_baseline - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedder - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedder/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedder/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/.hydra/overrides.yaml deleted file mode 100644 index 1c55c407..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=128 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/hydra_config.yaml deleted file mode 100644 index 5f2a4904..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/inference_results.csv deleted file mode 100644 index 732c490f..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(128)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00689,145.0,1888 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/.hydra/config.yaml deleted file mode 100644 index 478a8d82..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 16 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/.hydra/hydra.yaml deleted file mode 100644 index f6f55836..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=16 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=16 - id: '0' - num: 0 - config_name: bge_seq_len_sweep_baseline - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedder - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedder/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedder/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/.hydra/overrides.yaml deleted file mode 100644 index 7d89e6a3..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=16 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/hydra_config.yaml deleted file mode 100644 index 4562d4f4..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 16 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/inference_results.csv deleted file mode 100644 index 9057696a..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(16)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00729,137.0,1881 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/.hydra/config.yaml deleted file mode 100644 index 6a6fc649..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/.hydra/hydra.yaml deleted file mode 100644 index 8995d1aa..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=256 - id: '4' - num: 4 - config_name: bge_seq_len_sweep_baseline - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedder - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedder/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedder/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/.hydra/overrides.yaml deleted file mode 100644 index 68453b03..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/hydra_config.yaml deleted file mode 100644 index c14f63a6..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/inference_results.csv deleted file mode 100644 index b2d73b86..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(256)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00687,146.0,1890 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/.hydra/config.yaml deleted file mode 100644 index 05f11981..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 32 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/.hydra/hydra.yaml deleted file mode 100644 index 3b60c1e3..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=32 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=32 - id: '1' - num: 1 - config_name: bge_seq_len_sweep_baseline - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedder - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedder/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedder/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/.hydra/overrides.yaml deleted file mode 100644 index f47df8c7..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=32 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/hydra_config.yaml deleted file mode 100644 index 7c4a5cfb..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 32 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/inference_results.csv deleted file mode 100644 index 209fabad..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(32)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00669,149.0,1883 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/.hydra/config.yaml deleted file mode 100644 index b0dbc122..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/.hydra/hydra.yaml deleted file mode 100644 index 31e76c7e..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=512 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=512 - id: '5' - num: 5 - config_name: bge_seq_len_sweep_baseline - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedder - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedder/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedder/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/.hydra/overrides.yaml deleted file mode 100644 index a262b1b5..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=512 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/hydra_config.yaml deleted file mode 100644 index fad0e265..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/inference_results.csv deleted file mode 100644 index c4cd6554..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(512)/5/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0096,104.0,1906 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/.hydra/config.yaml deleted file mode 100644 index e700bc5f..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/.hydra/config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 64 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/.hydra/hydra.yaml deleted file mode 100644 index 1d8e5207..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=64 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=64 - id: '2' - num: 2 - config_name: bge_seq_len_sweep_baseline - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedder - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedder/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedder/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/.hydra/overrides.yaml deleted file mode 100644 index cba39030..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=64 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/hydra_config.yaml deleted file mode 100644 index 99404dfd..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/hydra_config.yaml +++ /dev/null @@ -1,69 +0,0 @@ -backend: - name: pytorch - version: 2.0.1 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 64 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/inference_results.csv deleted file mode 100644 index 3ed7fd6d..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(1)_sequence_length(64)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00689,145.0,1885 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(None)_sequence_length(None)/multirun.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(None)_sequence_length(None)/multirun.yaml deleted file mode 100644 index d4c2c139..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_baseline_batch_size(None)_sequence_length(None)/multirun.yaml +++ /dev/null @@ -1,242 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: [] - job: - name: experiment - chdir: true - override_dirname: '' - id: ??? - num: ??? - config_name: bge_seq_len_sweep_baseline - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedder - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedder/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: ??? - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - device_map: null - torch_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: null - sequence_length: null - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_baseline -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/.hydra/config.yaml deleted file mode 100644 index f8432893..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/.hydra/hydra.yaml deleted file mode 100644 index c7a9d347..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=128 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=128 - id: '3' - num: 3 - config_name: bge_seq_len_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/.hydra/overrides.yaml deleted file mode 100644 index 1c55c407..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=128 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/hydra_config.yaml deleted file mode 100644 index 63bbae2b..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/inference_results.csv deleted file mode 100644 index dcc94eaa..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(128)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00132,758.0,3498 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/.hydra/config.yaml deleted file mode 100644 index 64afa779..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 16 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/.hydra/hydra.yaml deleted file mode 100644 index 7726101f..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=16 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=16 - id: '0' - num: 0 - config_name: bge_seq_len_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/.hydra/overrides.yaml deleted file mode 100644 index 7d89e6a3..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=16 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/hydra_config.yaml deleted file mode 100644 index 407f267d..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 16 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/inference_results.csv deleted file mode 100644 index 74272d3d..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(16)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0011,909.0,3532 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/.hydra/config.yaml deleted file mode 100644 index acfac450..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/.hydra/hydra.yaml deleted file mode 100644 index c8780806..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=256 - id: '4' - num: 4 - config_name: bge_seq_len_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/.hydra/overrides.yaml deleted file mode 100644 index 68453b03..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/hydra_config.yaml deleted file mode 100644 index adaffc10..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/inference_results.csv deleted file mode 100644 index 3ef35c01..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(256)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0015,667.0,3502 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/.hydra/config.yaml deleted file mode 100644 index 645481ab..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 32 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/.hydra/hydra.yaml deleted file mode 100644 index 5f21f5a0..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=32 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=32 - id: '1' - num: 1 - config_name: bge_seq_len_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/.hydra/overrides.yaml deleted file mode 100644 index f47df8c7..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=32 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/hydra_config.yaml deleted file mode 100644 index 2cbedd4a..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 32 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/inference_results.csv deleted file mode 100644 index 15a2e38c..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(32)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00114,877.0,3536 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/.hydra/config.yaml deleted file mode 100644 index a247a6a7..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/.hydra/hydra.yaml deleted file mode 100644 index 5ff5e201..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=512 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=512 - id: '5' - num: 5 - config_name: bge_seq_len_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/.hydra/overrides.yaml deleted file mode 100644 index a262b1b5..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=512 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/hydra_config.yaml deleted file mode 100644 index 05f00cb8..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/inference_results.csv deleted file mode 100644 index 120e5f03..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(512)/5/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00201,498.0,3500 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/.hydra/config.yaml deleted file mode 100644 index 3a245689..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/.hydra/config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 64 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/.hydra/hydra.yaml deleted file mode 100644 index 3025fc1d..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=64 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=64 - id: '2' - num: 2 - config_name: bge_seq_len_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/.hydra/overrides.yaml deleted file mode 100644 index cba39030..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=64 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/hydra_config.yaml deleted file mode 100644 index 606f06c6..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: 0 - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: - disable_shape_inference: false - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 64 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/inference_results.csv deleted file mode 100644 index e8e30549..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(1)_sequence_length(64)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00113,885.0,3496 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(None)_sequence_length(None)/multirun.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(None)_sequence_length(None)/multirun.yaml deleted file mode 100644 index a1ea59cd..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_cuda_o4_batch_size(None)_sequence_length(None)/multirun.yaml +++ /dev/null @@ -1,252 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: [] - job: - name: experiment - chdir: true - override_dirname: '' - id: ??? - num: ??? - config_name: bge_seq_len_sweep_ort_cuda_o4.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: ??? - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: CUDAExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: O4 - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: null - sequence_length: null - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_cuda_o4 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/.hydra/config.yaml deleted file mode 100644 index d4b6b5d4..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/.hydra/hydra.yaml deleted file mode 100644 index c5333f4d..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=128 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=128 - id: '3' - num: 3 - config_name: bge_seq_len_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/.hydra/overrides.yaml deleted file mode 100644 index 1c55c407..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=128 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/hydra_config.yaml deleted file mode 100644 index e40d334f..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/inference_results.csv deleted file mode 100644 index 6cff36d1..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(128)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00124,806.0,3869 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/.hydra/config.yaml deleted file mode 100644 index 4134efb9..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 16 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/.hydra/hydra.yaml deleted file mode 100644 index e6886a78..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=16 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=16 - id: '0' - num: 0 - config_name: bge_seq_len_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/.hydra/overrides.yaml deleted file mode 100644 index 7d89e6a3..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=16 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/hydra_config.yaml deleted file mode 100644 index 01b97ae8..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 16 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/inference_results.csv deleted file mode 100644 index 003987a6..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(16)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.000995,1010.0,3832 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/.hydra/config.yaml deleted file mode 100644 index b35d8efc..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/.hydra/hydra.yaml deleted file mode 100644 index 515c308b..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=256 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=256 - id: '4' - num: 4 - config_name: bge_seq_len_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/.hydra/overrides.yaml deleted file mode 100644 index 68453b03..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=256 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/hydra_config.yaml deleted file mode 100644 index fa81af8c..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 256 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/inference_results.csv deleted file mode 100644 index 06f60d6f..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(256)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00148,676.0,3888 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/.hydra/config.yaml deleted file mode 100644 index 430baf34..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 32 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/.hydra/hydra.yaml deleted file mode 100644 index b2a9e143..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=32 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=32 - id: '1' - num: 1 - config_name: bge_seq_len_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/.hydra/overrides.yaml deleted file mode 100644 index f47df8c7..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=32 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/hydra_config.yaml deleted file mode 100644 index 42dbfbb1..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 32 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/inference_results.csv deleted file mode 100644 index 57b1a868..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(32)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00105,952.0,3844 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/.hydra/config.yaml deleted file mode 100644 index adbde47a..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/.hydra/hydra.yaml deleted file mode 100644 index cb6168be..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=512 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=512 - id: '5' - num: 5 - config_name: bge_seq_len_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/.hydra/overrides.yaml deleted file mode 100644 index a262b1b5..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=512 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/hydra_config.yaml deleted file mode 100644 index 2a5170bd..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/inference_results.csv deleted file mode 100644 index cc6a0421..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(512)/5/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.00197,508.0,3920 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/.hydra/config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/.hydra/config.yaml deleted file mode 100644 index 1cfd0f78..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/.hydra/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 64 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/.hydra/hydra.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/.hydra/hydra.yaml deleted file mode 100644 index 7d5c355b..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - - benchmark.input_shapes.sequence_length=64 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1,benchmark.input_shapes.sequence_length=64 - id: '2' - num: 2 - config_name: bge_seq_len_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/fast-embedders/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/.hydra/overrides.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/.hydra/overrides.yaml deleted file mode 100644 index cba39030..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/.hydra/overrides.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- benchmark.input_shapes.batch_size=1 -- benchmark.input_shapes.sequence_length=64 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/hydra_config.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/hydra_config.yaml deleted file mode 100644 index ad3cfaeb..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/hydra_config.yaml +++ /dev/null @@ -1,82 +0,0 @@ -backend: - name: onnxruntime - version: ort-gpu:1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - trt_engine_cache_enable: true - trt_engine_cache_path: tmp/trt_cache - device_id: 0 - trt_fp16_enable: true - use_io_binding: false - session_options: - enable_profiling: false - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: true - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 64 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: false - can_generate: false - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/inference_results.csv b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/inference_results.csv deleted file mode 100644 index 26698cd4..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(1)_sequence_length(64)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB) -0,0.0011,909.0,3857 diff --git a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(None)_sequence_length(None)/multirun.yaml b/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(None)_sequence_length(None)/multirun.yaml deleted file mode 100644 index f58dc5f7..00000000 --- a/examples/fast-mteb/experiments/bge_seq_len_sweep_ort_trt_fp16_batch_size(None)_sequence_length(None)/multirun.yaml +++ /dev/null @@ -1,253 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - sweep: - dir: experiments/${experiment_name}_batch_size(${benchmark.input_shapes.batch_size})_sequence_length(${benchmark.input_shapes.sequence_length}) - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: '1' - benchmark.input_shapes.sequence_length: 16,32,64,128,256,512 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: [] - job: - name: experiment - chdir: true - override_dirname: '' - id: ??? - num: ??? - config_name: bge_seq_len_sweep_ort_trt_fp16.yaml - env_set: - CUDA_VISIBLE_DEVICES: '3' - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/fast-embedders - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/fast-embedders/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: ??? - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false -backend: - name: onnxruntime - version: ${onnxruntime_version:} - _target_: optimum_benchmark.backends.onnxruntime.backend.ORTBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - export: true - use_cache: true - use_merged: false - torch_dtype: null - provider: TensorrtExecutionProvider - provider_options: - device_id: ${infer_device_id:${device}} - trt_fp16_enable: true - use_io_binding: ${io_bind:${device}} - session_options: - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: {} - quantization: false - quantization_config: {} - calibration: false - calibration_config: {} - auto_optimization: null - auto_optimization_config: {} - auto_quantization: null - auto_quantization_config: {} - use_inference_session: ${is_inference:${benchmark.name}} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: null - sequence_length: null - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: null - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bge_seq_len_sweep_ort_trt_fp16 -model: BAAI/bge-base-en-v1.5 -device: cuda -task: feature-extraction -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.0 - transformers_version: 4.34.0.dev0 - accelerate_version: 0.23.0.dev0 - diffusers_version: 0.21.0.dev0 - python_version: 3.8.10 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/fast-mteb/report.py b/examples/fast-mteb/report.py deleted file mode 100644 index 750e824f..00000000 --- a/examples/fast-mteb/report.py +++ /dev/null @@ -1,222 +0,0 @@ -from argparse import ArgumentParser -from pathlib import Path - -import matplotlib.pyplot as plt -import numpy as np -import pandas as pd -from flatten_dict import flatten -from omegaconf import OmegaConf -from pandas import DataFrame -from rich.console import Console -from rich.table import Table -from rich.terminal_theme import MONOKAI - - -def gather_inference_report(root_folder: Path) -> DataFrame: - # key is path to inference file as string, value is dataframe - inference_dfs = { - f.parent.absolute().as_posix(): pd.read_csv(f) for f in root_folder.glob("**/inference_results.csv") - } - - # key is path to config file as string, value is flattened dict - config_dfs = { - f.parent.absolute() - .as_posix(): pd.DataFrame.from_dict(flatten(OmegaConf.load(f), reducer="dot"), orient="index") - .T - for f in root_folder.glob("**/hydra_config.yaml") - if f.parent.absolute().as_posix() in inference_dfs.keys() - } - - if len(inference_dfs) == 0 or len(config_dfs) == 0: - raise ValueError(f"No results found in {root_folder}") - - # Merge inference and config dataframes - inference_reports = [ - config_dfs[name].merge(inference_dfs[name], left_index=True, right_index=True) for name in inference_dfs.keys() - ] - - # Concatenate all reports - inference_report = pd.concat(inference_reports, axis=0, ignore_index=True) - inference_report.set_index("experiment_name", inplace=True) - return inference_report - - -def style_element(element, style=""): - if style: - return f"[{style}]{element}[/{style}]" - else: - return element - - -def format_element(element, style=""): - if isinstance(element, float): - if element != element: # nan - formated_element = "" - elif abs(element) >= 1: - formated_element = f"{element:.2f}" - elif abs(element) > 1e-6: - formated_element = f"{element:.2e}" - else: - formated_element = f"{element}" - elif element is None: - formated_element = "" - elif isinstance(element, bool): - if element: - formated_element = style_element("✔", style="green") - else: - formated_element = style_element("✘", style="red") - else: - formated_element = str(element) - - return style_element(formated_element, style=style) - - -def format_row(row, style=""): - formated_row = [] - for element in row: - formated_row.append(format_element(element, style=style)) - return formated_row - - -def get_short_report(inference_report): - short_columns = { - "backend.name": "Backend", - "backend.provider": "Provider", - "benchmark.input_shapes.batch_size": "Batch Size", - "benchmark.input_shapes.sequence_length": "Sequence Length", - "forward.latency(s)": "Forward Latency (s)", - "forward.throughput(samples/s)": "Forward Throughput (samples/s)", - } - short_report = ( - inference_report[list(short_columns.keys())] - .rename(columns=short_columns) - .sort_values(by=["Batch Size", "Sequence Length"], ascending=True) - ) - - short_report["Backend"] = short_report["Backend"].str.replace("pytorch", "PyTorch") - short_report["Backend"] = short_report["Backend"].str.replace("onnxruntime", "OnnxRuntime") - - return short_report - - -def get_rich_table(short_report): - # create rich table - rich_table = Table(show_header=True, show_lines=True) - # we add a column for the index - rich_table.add_column("Experiment Name", justify="left", header_style="") - # we populate the table with values - for column in short_report.columns: - rich_table.add_column(column, justify="right", header_style="bold") - # we add rows - for index, row in short_report.iterrows(): - rich_table.add_row(index, *format_row(row.values, style="")) - - return rich_table - - -def get_throughput_plot(short_report): - fig1, ax1 = plt.subplots() - fig2, ax2 = plt.subplots() - - short_report["Forward Latency (ms)"] = short_report["Forward Latency (s)"] * 1000 - short_report["Backend"] = short_report[["Backend", "Provider"]].apply( - lambda x: f"{x.iloc[0]}+{x.iloc[1]}" if x.iloc[1] == x.iloc[1] else f"{x.iloc[0]}", axis=1 - ) - - width = 0.3 - n_backends = len(short_report["Backend"].unique()) - for i, backend in enumerate(short_report["Backend"].unique(), start=-n_backends // 2): - # for latency, we study the case of batch size 1 across all sequence lengths - backend_report = ( - short_report[(short_report["Backend"] == backend) & (short_report["Batch Size"] == 1)] - .drop_duplicates(subset=["Sequence Length"]) - .sort_values(by="Sequence Length", ascending=True) - ) - seq_lens_axis = np.arange(backend_report["Sequence Length"].nunique()) + width * i - ax1.bar( - seq_lens_axis, - backend_report["Forward Latency (ms)"], - width=width, - label=backend, - ) - - # for throughput, we study the case of sequence length 256 across all batch sizes - backend_report = ( - short_report[ - (short_report["Backend"] == backend) - & (short_report["Sequence Length"] == 256) - & (short_report["Batch Size"] <= 256) - ] - .drop_duplicates(subset=["Batch Size"]) - .sort_values(by="Batch Size", ascending=True) - ) - ax2.plot( - backend_report["Batch Size"], - backend_report["Forward Throughput (samples/s)"], - label=backend, - marker="o", - ) - - ax1.legend() - ax1.set_xlabel("Sequence Length") - ax1.set_ylabel("Forward Latency (ms)") - ax1.set_title("Forward Latency per Sequence Length") - ax1.set_xticks(np.arange(len(short_report["Sequence Length"].unique()))) - ax1.set_xticklabels(short_report["Sequence Length"].unique()) - ax1.axhline(y=1, color="black", linestyle="--") - ax1.axhline(y=2, color="red", linestyle="--") - - ax2.legend() - ax2.set_xlabel("Batch Size") - ax2.set_ylabel("Forward Throughput (samples/s)") - ax2.set_title("Forward Throughput per Batch Size") - - return fig1, fig2 - - -def generate_report(): - parser = ArgumentParser() - parser.add_argument( - "--experiments", - "-e", - type=Path, - required=True, - help="The folder containing the results of experiments.", - ) - parser.add_argument( - "--report-name", - "-r", - type=str, - required=False, - help="The name of the report.", - ) - - args = parser.parse_args() - experiments_folders = args.experiments - - if args.report_name: - report_folder = f"artifacts/{args.report_name}" - else: - report_folder = "artifacts" - Path(report_folder).mkdir(parents=True, exist_ok=True) - - # gather experiments results - inference_report = gather_inference_report(experiments_folders) - inference_report.sort_values(by="forward.throughput(samples/s)", ascending=False, inplace=True) - inference_report.to_csv(f"{report_folder}/full_report.csv") - - short_report = get_short_report(inference_report) - short_report.to_csv(f"{report_folder}/short_report.csv") - - rich_table = get_rich_table(short_report) - console = Console(record=True) - console.print(rich_table, justify="center") - console.save_svg(f"{report_folder}/rich_table.svg", theme=MONOKAI, title="Inference Report") - - forward_latency_plot, forward_throughput_plot = get_throughput_plot(short_report) - forward_latency_plot.savefig(f"{report_folder}/forward_latency_plot.png") - forward_throughput_plot.savefig(f"{report_folder}/forward_throughput_plot.png") - - -if __name__ == "__main__": - generate_report() diff --git a/examples/fast-mteb/script.sh b/examples/fast-mteb/script.sh deleted file mode 100644 index ef7bf418..00000000 --- a/examples/fast-mteb/script.sh +++ /dev/null @@ -1,7 +0,0 @@ -for f in configs/*.yaml; do - if [ "$f" = "configs/bge_base_config.yaml" ]; then - # skip - continue - fi - optimum-benchmark --config-dir configs --config-name $(basename $f .json) -m -done diff --git a/examples/openvino_diffusion.yaml b/examples/openvino_diffusion.yaml index f9f62e64..b7b325f0 100644 --- a/examples/openvino_diffusion.yaml +++ b/examples/openvino_diffusion.yaml @@ -7,10 +7,9 @@ defaults: - override hydra/job_logging: colorlog # colorful logging - override hydra/hydra_logging: colorlog # colorful logging -model: stabilityai/stable-diffusion-2-1 - backend: device: cpu + model: stabilityai/stable-diffusion-2-1 experiment_name: openvino_diffusion reshape: true export: true diff --git a/examples/running-llamas/README.md b/examples/running-llamas/README.md deleted file mode 100644 index f4c9036c..00000000 --- a/examples/running-llamas/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# Optimum-Benchmark x LLaMA - -A set of benchmarks on Meta's LLaMA2's inference. - -## Setup - -You will need to install any necessary third-party libraries like `deepspeed` or `auto-gptq` depending on the hardware and benchmarks you want to run. - -For example running FlashAttentionV2 on two devices with Tensor Parallelism (i.e. `fp16+fa2+tp=2`) will require: `deepspeed` and `flash-attn` - -## Running - -Then run the benchmarks from this directory with: - -```bash -optimum-benchmark --config-dir configs/ --config-name fp16 --multirun -optimum-benchmark --config-dir configs/ --config-name fp16+fa2+tp=2 --multirun -[...] -``` - -This will create a folder called `experiments` with the results of the benchmarks with an inference `batch_size` ranging from 1 to 128 and an input `sequence_length` (prompt size) of 256. - -## Reporting - -To create a report for 7B models on A100-80GB, run: - -```bash -python report.py -e experiments/hf-dgx-01/NousResearch/Llama-2-7b-hf/ experiments/hf-dgx-01/TheBloke/LLaMa-7B-GPTQ/ -r artifacts/Llama-7b/ -python report.py -e experiments/hf-dgx-01/NousResearch/Llama-2-13b-hf/ experiments/hf-dgx-01/TheBloke/LLaMa-13B-GPTQ/ -r artifacts/Llama-13b/ -python report.py -e experiments/hf-dgx-01/NousResearch/Llama-2-65b-hf/ experiments/hf-dgx-01/TheBloke/LLaMa-65B-GPTQ/ -r artifacts/Llama-65b/ -``` - -Which will create some quick reporting artifacts like a `full_report.csv`, `short_report.csv`, and some interesting analysis plots. - - -## Results - -### LLaMA-7B on A100-80GB - -

-throughput_plot -

- -

-latency_plot -

- -### LLaMA-13B on A100-80GB - -

-throughput_plot -

- -

-latency_plot -

- -### LLaMA-65B on A100-80GB - -

-throughput_plot -

- -

-latency_plot -

\ No newline at end of file diff --git a/examples/running-llamas/artifacts/Llama-13b/decode_throughput_bar_plot.png b/examples/running-llamas/artifacts/Llama-13b/decode_throughput_bar_plot.png deleted file mode 100644 index 9c8f6b344efd8196f596348fb3f7c631215b9005..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 41316 zcmdpebyStzy6*y%P^46(OTs`YMWm$>J5d@Gq(MSDq)Ws=R3sEzT0-eYK`=;BLJ(9y z5TvB;Z+?59bN9LD+%fjP=Z`z?-Q%3SzmK(8toMEAe4bxDGvd$z?KP{oR#7OFHM%+) zM<^8P01Abwm!1|s+4=6%0RCsUtERr|QAbPH3ufofQudp^bI`BC__)D@Rm)aV5@ULT^?A@348yNOB|uh7RyU!$<$ zm0%sty%dJOexFe!fV?7jB$Q2+yuz|s{r~fi8JCH>d6R)JHmts0r)kFi%$eXlLA1`L zJqK%=nwUmE=Gl%&)@$qMoMG@#{mRB_YSegn zdaS1>Wj;GMH|WkCqoAMax9C+SZQC;zdu4x2a$miDJIbsoH1$-jW$!N!{W3qLOMFVJ z)~{c$==e&u6HQezqa9q1h;}llZUr=l;Q@r z&&4Gr+zKrc-B-m9W}SJ(mX?;b!+jImT2X__+tJaCu5NBp=iUk!7CQ9>eH7oet>%h| z-sQ`e$E}0X!^)PIxJ$feLgVAvuU)%lYHog!b)#1Aw|Dnq*smTsetbQkp!j9z9BV`I3v7U%46|`0nXR30&~w?z$tVPe+!Qt5VKg>yFB^Z8vX9 z-2Lry(XPSfWM&Z&kxxYzn8gkxmSe^Ae)~pGk=(IE&ZMhFdn>=xL^)3k=qAd$ty1J0 zD;Zs}Vuh-%(dDmDarEW$&(niuCCITE8f0(#c)U7&3vpm zS%v@MfjgBcdxCnue7R^);IQZL;cV88b{^D$6zaf$692ES*NdNiPLF@(Ru3p2WmN;k`bkt`idzI{Wr%6gYhD$cb1h zR#{&kzMtCGeP4^n#*H*uT3Q~{Bb=w7pF3<-q8Y^=aeaS+B$bz!7kM_aj@=7pbQ5v- zgp|}&!4p3R2Sc%eOnXXwId(bl%mlW6dCE!cccavI-n_3e#ACdd%6+VRTh8e8Xcq@A zcgN11sVOM|2kyxCy-ig&x5vget&QfCl$JIxx?LB|xl!6`vxEdQt~)G0UzWUTZy&>} z;NJ2vzMx=NOKWRtYATh4galq`;VMe|<-2NmVUoFYaoS+hrcLG5)hiS({9M<$oySte zWsp(yU=riDZQH(m|DN5o%i&YFW9b~Nk23!@gZ$v+nnzc-Sd_^XAQw4Q{5U0`rP3JTF=K9HZE`r965Ro^Rh} zUU6}yxfHz$Hy0Pzp_3=;Is3< z_(G#RyU(w{=LhHHNFO_Xyl-SAntPW+v5YJQC2c*5`01h8 zkD+lmoj-r^)-A4M$BwvRMt}bNnf+DAf7sY~`wr^_4^7;1hQ8C0x$RecXsO=4a4<77la!JQymEz& zmXRYMjG6zU8OI7bI`xAGnIj`3Ii%0fPfSi~ew-c7EbAQ^X=Hku@9>$cqN3u_lP6w| ziP#!Kuae(lc~eoa!>)Yu=vn&I8s_vz+o(avH!RZ(SL&Ink;4i4#WH-vYkAzz_Cy|}z@%tMqO3fua(vHr~K6LZ( znVGQcY$@zZI!8xGveQ03x2AM2)a*V}&$2MttTO)VS71oUN+upT3N~DJf=JBR(ZUO3 ze4%T&Lx1(xRW&rM#_|rswzRXi|Mg~9Pk756MNetejX+e5)U>n}<+u40(J?x@x~kgS zH(dDnsp7%@+fH*6XN5&Y+q~=3@fNfyO?P)${G67TmsdkuJIePvLy5;PYV^I?pG9Mm z=GF8gJ!SkZLro#qudhb|)A&Iiq#_?Pexzw^%-5D_!tCws4N%bXRII$Rl6udcJ?2%z zdAtGzu3W%XdHnhkqVvAQu2Wjvu#jb=R&;qV!`kuR-;XX#G)R!` zhrS{@adL2SsJU-oAUJNLc4bu+&Bu=)$71%(el;;Nim8j`-zX=?v1-*S4k_~$1`i)S zqVLMLuX=R&o}}{^jbfiUw!XeTCnqPkiv71`D{%2>%QY8zeCa47u2C&5$Iyn? z?!UF8B2D+IDZ20c%veAAe{Ig$hU%mZ78VxI^s}j7y?WJ=Z_oYw`SUEB)+h3@>S%&k zezWsqWm0EfuRCXBQ##WTA}%hTW!t_zF6ZJ!>c+&~M}?g~<=Jj1E-vov?F~5o^hE0O z=j(3E+N;I#DLDb$U06D2RN@5?w*DKc!5uetas2>v*3yCvH7V><`)aP?uB>qrsb69=iX6I4!#M(j+B;>VO_g+vbzp@8C`gT z&{5qdPoMUoDf%zX(OnhYe-Ud4?U#j%EAsbmR~|)=%Sx93I0;ChU$qKbw+;a9azcXs zSbo5+51gfQgWQFMg{o-L1T~<9NB{U?GdnjIQSFOv7tOVEmCC|6jp%_yQsMUg=9PGU z{_EcQ`g+spkN$CYf2K#Hf`eBy-ci^v^Ye3+MA^bcG=2;0!x4*(8#kujy?gPdjFlQHFzNWf zQKWwDv~BY_w6J^kZi>JJkKf*vD_4$O%bXf+zlY_p_-AVE(*l>F17tDy`T3EmDQnje zfSxF{ZQK64V+}KlwZDI~u13qh`0$~ag3%ZI{QP{Ou`aDT9}yuTAr4C8*M=Lz&+cVp zG<9@zWX;S^4cFmU+k4aD^Ghi-6SD?1g=z|Lt$FpwXBiom;6>OE%@kAg zm^ThjA3ZW%K}~h(@&$R!O4IouNoRJ(+1aeEh@-X%u<EdVtsE-Uj^*2T_UwHfBTr9H%kluKeE`q~ODKZU zl!Vc)f~b*(h6X+HP}CL~JW|WCu;5@xNJ0v%=4<0(kBW1)wle*-F%}I;%D%5Drl0}h`DMl>UQ)n%jrhhIuS1gW+>&aw zyK2T^`R#4$K6LMH?|qfck5EAS2L`GE>!V+IO+C>siH?rGP-+Yk& z2ol4&X3ZMQFVzu3ZZ7A~bGW&=Wlt2?cjd=Wm8admA7L-EW5=1qJ9o%spk+y;q%?x! zaD>h+&kSR+bo|N4$cQd3R&-fj^35MXn~i?*WXlHt^89_XKBfC-Ih(NT@yJo4$GgY& zhnivG%>L@X`S4(}G@eo8^{pp4w6(QqDfsqW`Q0dO;9H!7pI*2K#r70?%FqPp?%Q_; zAE2hLjyw9{;lbfg57X?iP_D(tM=Et!R0K*%NwIET>(^egsC;8U!Fbk7Utd4mM-gn2c`nf5tr(RhYl@sUm$MgJupCMKpr@0sm*xOlA zyS8s>=mLEddkWK@(h%3yv{Vj_=^uWzSUPEL-w6;+#9>A{Nk?~e?u zh{H}y!Lyx39k2dz0F4^&7+G8^b|B^MU25FgFf0@urJXzTnB|X-gMpsIQa3a-?2tz7 z$e)?EM`040ygKmp>#xy*o;I%K1Sty*(LN?_S;m(yUxr6TJ-v{~|6|mcp>J3sT2NG! z7AuZaulbKE-qVcuy5~MTh|xc|3H20(eRzFUvj0-V?5qcmiXRU)xTK6s(CypYP01?F zZE@GHhhS9&>{61Ro11$Cwt>QSIWcje+ZWV)9S}oJp>w|}AZ1Q^zTjSde*U<50yohU z4<9@ByJ>L+Ev=Be{H;53rKT0s9Unj5wWzzq#;io83f98JuN)0wLI3n=Y7d!Xrw$zo zIDAhlqL>|bGW~+>=S*hVk{UK(`ux@B+bBXxGa(@%gyjH*#Vjw)Gvi-{M!C4T zqu!jlhIM{!q$7L8ikdQCa%!^Z0oX8V-{QAEla!_8ioU8abt9uYB@!d+r^m-xZg#)B zueSk?PpQ#Z#|;USOIQS^UteF(Yl@m*hw|?9``cl=Plc@L zn47k4ed<>Kv906 zB^iT4R&OBODz6R1Q2ptNbSIRD!WS<}az|0#Qvf)G7+A(F!*vUdW;0Pi>W{Ec2xyL* zpNbee>M>u(=`m^P3ml~X0w}!Rw=cWIml2>9kk_oBy$;py?MpY?YzdR2M?ahd{3wr$caeeWl&}nx@Fn~~~Fx*w2i!M}1M(Zo37KF7J&D#JJz|oyF0nL)nI>C3{7hOVB(Zsv9z=VNu(SdIU6;GR7Rhn&n4a) zfbE~SOq7lXCt}O<5ex_@De2g)_zd=1*K1^GLl=q8)<98;z3fcVz|3qaLgA(b?d-jLb|EThBLzj-Ltx0|F?}HTI6g zGMEV&85)Y`#NU7R%q~6i9l!qqc`=$_#W<~sW_H=E?^d?Yw+OkN2R*>xgy(P1Prvlf z^SyEF=1nu)97+IsSGEd!@Z{aVKZ~_3nzOv)FA~=Y?_}sjgFK|KUoXm1StgI|Z#2Yd z=O)5!*ehRpOUY|z>-4=TDXYfTsoG4mM;Th(7BN5F9mf){{LIA8020@}#PitA-*K7h z*XAbkV}}p#xv|}792_Rw>H_{QcXxL;xu=P{T~hAfUwP`(DWF?5yNTt8)jtgBAE8V^Rvu%IU%d?d7G#Y}fBc;y@+8poeMru>{*pKWTXY zTmruGwoTFciR@~2@p8Ndq`al@<;&9CpGC#Pi}UU!_}u==kkteY6ZccEUv&nT(*>d!hsT|`^ zc+JNSp_mh$tf{FfFDd3Js^obUA)zlW=*lYo))Lar`cfSp-AZIyA z(UYyFrbfsl1Eh`BI#c=APT9Wf&o5lG&zyULc7_Kd^lKNkbeqg`K1C0K34L{S^-sl~ zYZizurxk$*rqz6hNn%Bsy6(}9FRg#jj zvY|I`cG8XZs~GZ zuxGE8C(Q$jT=v+i-ZwL7%#63!F?inxV9^Ne{_!IQV#d~&?xT@VXa(ivxqklq*^z6- zEV?h@kkvE7K7f~4Sy%#6)x$2<^{2WGw-x#=%Kw=fp6PBTwGjUl=C}CA3^J>Jsn2f0 zu>b(7y1R|P?Ln088Zeu1nv+@9HwuVg+I+_{YZjk|g^NM&g@=<^9QY!kn^l>jVuV^w(N@TpUq zb2RpV4fxG{-4N$Kyw=1bx1b;l7{hOA&MK}f6dd}nzWyd@#`jp_5bCJ_lkGauNNeI{ zot>P@A@^y<7kh#yK}q@Z?$@ej+O;3c{8e&tb2XdT<98hB;|h1%rf}#+R+ePkd}X@{ zM-?H{C?zP{k&7X(BpDSlM~=R3ZmvTwNWFKj>cDlDrB(x(Q1lWZl-+^s%AIsQetbln zKuwItVZD|$kd_P6@Djg;z#Vl4yx{XvAO$~mR9d3pgQLcOb+_}XB5 zM{K6F`EGjpHh?r|^z#dCYgf_Jm*YOw#7kYde!T`%la#%P--QE%gFr}JP_0bE?uF5=ro2|aYEi@CWnSl>>w<9+hp2DZwm#m3LH53%ihNTEM#Y54_QO30RKHLs$_ zp);lZ_w{Z#VP#D#oWy^`GY*b|Obe;E#f&~3rcyjVKo3Y0^ZNB(T-}$J$4A3sW5b{U z!#WU_W(M_UBqAL|8gO%ZY+Z5>Hg9Ht@K6cpPepMVY^+#XoR{*Mbs}XLjJF=o3wtQb zbJ9}Zz`){tnl4cs;PyBJC4j1!q4GnguEu`qyvjsu0aOr+?qbjS#>QX>exSWJi&2{I zXSoa@dJ@M3%c1h!J61@tg)T!(xFO+9cNC)bCEk%Vqb3yvY=|&{nX#Tad{j-w-iaz^ zm0mov8lvCWf86oy#}A<`TOxfIl`WY<9y}2B`u!~s+)QZKt_^t3VFdOth+e7f#5zXB zF@*>M8z}_!>ZR-ViS9WfDLXkk2j084@yeAeS>7`afUviWN=i{9^k2GehFKB|#)=lD zhV6nk%Q^K@MEBnk&VTRYI|`i(o63}f_F$6Y9za|$)j-%4xy@ssmMm-62H|E^c6Q>| zI6FY!;B{#ZQf=O{rSkoIHX=$lHa4cfql5pHYEIpDtl{RK z*WI294m94z zv0A^2>#p97?PuGYZ$l~^7#JY+809cZIGTo*F(5FIsy3ECcDKu*>WLHVP#3NG**|7m z(2)m;Yrl)T=rK1Tf+DdBxFNzT3u`6&+&dxn`i}s5L<)_KWj>Is9D^@$4|Jlyv8NX0 zv2$t$?bQiQTuwou>g`(=Fwl-d=Z(LA|L*7((e2`rG^JoqOL>f&E{s<(Z`!`S7HvEg zO{6KQ=Wp8$<_RRyw}5G<3rt>|*R;DuHEyZYd`rnz?XQB3twszc4^L0CA1#mJcXs62 z-l%36hX~LIA#Q_&u_^pAbFt+-htH?)4p_Bk7!hwo;l?DCmXXP?-@m`QB!_yIYEtG0 zy@BSItbJtBg)t#TMII2Ij46#RM~@yw^`|KUyb2Ta`L3<+iz@~^zPj|GZ;zDP*V1s>5RPvsl6E}B9#1`cyrHfS|d4k zoMXxLU-pah0yDD$4!Q}qLkf%_Iw^^-|LfNpMeph8bWrPtXkLYQYha~~5)#eOqgr2n zBBYS>;K74h`}fBjK6%ngQAz10)>P|D#gMdLB~ii+<@7i1KKefTmA<>6r3obLHHeuE zq))Twd3l^BCOxMQE;2JPn4QkEsTrU1d4Oj~WQ*$Q8cD{<9{*)2z@2FD{ttIGS6j9{ zJJkqU`-)<^-LO!AZD?1I&&GUyh|{%i-(D_}Fs$tZ0A zViJGZ9Q%cH_wE-n+Zi?*CiR07G{Pd|IDY*21M3eDqRYxuj-EIndSba*W%&%YeFeCg z3=AQnTlEhO)j>5GDyds8<*@0#cKteMplUfZs*`CBK#RHH1>`IAP0mHf#H?|F80fx8u4#Eyg3sJ%68dwcPp0yOvs=wMZbJ0e=jAa{JF#DTfe5K zEi(*XOmq(^ii(Ez{qoocQB23vD7gfyNE%zyyrCSjny{GIU%wd{x$0h8TJ`VmAHsI~ zFBR)^!y`P45>^S^Dnm<4+xXILxH_qMVBm~p%cH~5*q0CjIYGBA-=^#_uM55)Zl4V& zKJNZa6s!cu%u+o*lWT}rh`Jz+(!LMs!BJ2O{LMr~&nukpOg;czXkkfOcjk!5*tBdC zRZ=oav)BD-&JnuX^&88FQeSVaJ$nMBU(7s+&FDTi!3LzU1opuJx(TKf zx7&Rrp?=t4L*LTy%hvp7C+vguGFsf9a#jIc-UOnQ0&zZzij)w)%|IHa+!+vF+Zfw7 zw|2tUqraD#*-X|a*wILi!If&^SB{KTEsLi*;fdl-9<$p1p2LH2GNRUuzONjcQ_7;2 zQPJ!7qi!ozF=649gUQMr1&;h+gl(#}#3A zc3eh_-C^6txb?)da`b0z1)d2`RN}xpcX$CjxfRyMA8Vu%HhF-q6E_b@xDwy^E$;il z{Dn1whL^8j*F1EHrLwY8`Fj@cTrp9)p_C>6-m-t7;R2+GPvEZDwT>f)G<@7#D6iW4 zi^fWW@QCGIzXbqgjDWJCaB4x)un5!ADe+xioS9vkALCYFd>;TeLL(z^KRrcGUfwU> zfA%o7j(spF1S2V-gQnPVL~q%#kA!R`gBz z@+1ES1f3*QfKn_w(mT%qJJfV%>Q#D z=na*fDbff4Ezrj7Mh?*nCZlfb-eLV-sMmPIeaRll97>ty0F7t zAHTB)ze7MU$e5X(-9D2a!Iw{O-nvyCIy`vT9y>enkEiW--IvIil*O4 zK%7ZymN)Lkjj)am{aey%^nKMQiba^ z;9oKVm!e*pp+C9i&ir!!apTUh$rOuby7t;2>cDG+w`Y z{rWB%HEu`@?pg0tBPRMn>p?nH|%F`^n0(Z{EE5@{JprI~jn;FJ@*+09~xX zqb7~MCUX7dtMkhR4q$7RO^FdO$yH5E_^GI<#;2$Ktn3urN6OJOoR$_mLA`i>{P@Af z`vIAg&~M+)p%C{&QGy-05)MW64}@HVHf^E@z9jxMR1;@lv54tCNTls}F7cA?ZF4g- zT47c}!G_IcUQ=6)%lx(-ad2>8qM@M?77?MM0B)L|J-gE0-ah|QP!J6&AAI7N(=#Vw zt79|7b+tc&-P+dHM!$OX{3CavjT@z4twP)AL;W-F+6r3?z+WNFxl@(ZW9_b0{{V_ z0z0%?ZF6%WtysQohv&&|+fdrUSB?TfBn}9p>|OC!uhIaFWhZg=@o<~nx%Dd9smRH| z9#N;j^78V7EQ>>L5V?V+ljl(6J1CW?oP z;5a^bqo5!aevd=NPm!!FShsuB)oD}~f3ICwSm>x*HFQ%{Sh(e*``NQ1sJfxB0!>!k6bpasJz-lopa$f^xQuW;W0}DUDgmWXTWRHbesWWHJm|9rSKvgnqb+50FbpBSc-^qe?TT~kv5w;agv+ou;Y5BA*@IwFQ=fo2-~?3p+MB6MiX zBV7e!B`%XTxSae9c}-1C;H)6D4*jBAw{9Iea^xbs>keB8tQQ(@?_#EyT3UuyS8D`^ zhTipGUSg|RK;faH#EYN02r^JnUr!G?FDo}!-OEcJ*&-2}FcvkNKbK$h^Gnku>cmP& zQ;oD|^22}Dc#YK9YA8a)=S8~Y-1{_!14)VukbWuUaKOGF;-WxELERRV4$FXe2YYN5fyEAKW@hH#u(0;6R152HEg%spSu?Rb zyDyRe;XIH`GgFklEY*%MTZe=ZT_R7hm#OZz~JE6Ow;%bv1Q$ zxk{c8s3mvqJnNXZhN0aRnUCywxGZdScDA;wm6K!PI%k~)$ksw4iE+&u5eKXmdTSkQ zSX29M<*i5e?4d&TXUy<*HAtefv-29B&KnO43ptPruo$yCbB5GHtWrry$p8?|qHbeP z&)s%*c9P!HcEKSbf_N1>Ej%*v;)@qAbc%>|0VEc`-Dnly!srP?oXa5o*h>7LgU)u+9rK9MAgt>)^U3v{x|1;wfF7R$F#yYe>jgPNi zz1o9-#QXwmWTLwPvD-Q3;w!<(W`z38!Np~cr9ufnwedf+4^^rD;dHBTz`%zW=fAG{ z^ZR>Mck*r*QA#;7JZR#YV7GwFwD^|wuU?tqYT>G=C^Gt z{rGptn&$rR0Pz-$PW|#3OG{aIdeY3q9?|`u#mt4S{})imZDM*l^yyPui!k*3fvsx8 z6xmRbA3Q&wW_2^~_MR3;X%9lT6H-v%reornO<6GjZHJ`PZr)1KNoGlGAs0)6g8-oWCYqE@)w@LMny%#XX#kT zjR{gTCr+Hmt93?FkC-7~-dbTyyuHYg_4b;gKp;v)O(AM02RHXdVc``hO-SL@L4`m% z>h#dB&dyFj{1_Z_B`j9ZJqo@ZJ!E`B3ps+jd>Qemz0f zo{MyT81*RMh$FBe*Y-$;Pj@9PEx~?lmb|%AtfbB#6+=b=EHqyu{9y->71O4i0T>) zLgoU{{8Z>1ly2eZC@~=i=K;{PwbS9h4OEQziN%jCja+{K001xgE*afMXnahXgH)7$rf$p_*;c|!u>fzy0-qpp8K3@wHTH^;zDKkVYBF2ZJg0npy>v zZPvgMx(ECnW|JAZ>N{nGmYu%87m3UH^vTfu_PB}JsW z;##37r6O+E`(}I$jIKJ~{3bMIlIw?nxc50fa53R73BKYOYmn^&mkU(8)@Qro{3Kc|d4J-jb-dZM2Qc?3#QTqp9U*G!e!$W3o z|G~za6cQySUNyi~q5CKo^fm!8u`onOz<~HTAVlMHO|7jN0MxVHQ9M`q`}-4Gfp9)7 zo=P-?j!}rBlS0JbV}i-iS)t z7j(%NwXzT}h?<5b^}&Ns(1?iVc__-$BL)RF*=RKPQYBu@hGe;;@vQ%f+R`h*zqdBH3fX%A4Cp_D+ZBG%pc^XC&3E^LSW z21P*1t}A~HZ2nqEFes$d=t7DJc?wpMzdWJ_Uzxk1}Ye=s2JY&3foTQtU#6< z_6+cfsYRGNpw{ZTQ`CiUKj1*pP>4GZmfVZ&f&3jUoclwW`IsvR#t+o-Q7B1RXmYSZ zi#}soPLc26z+iv>#`lPzvP1bIO#&i0dMq9KGQWQcTh>Rjn$A&mLZ}g}(gH?P+0sX} zKln}WfVoc>NZ?ij{|4daLkY4;-}q|y8oC@-`o-oUG7S+I7dIAT{}12{Y#&1)danCO zvI+U@?%Q&;wY9`oLy^_M3hn#xW6R{#U4u-ML6SyjDy^}Qta6)abDK#~W; zctmA+IR#Tsnr3Ewbae!G4p*F_o<%+qZH_|0RwSCxG-KcM=YNd!4gtS!J(*DrW(oIu zJ=w+Tp^OzV+`BsD|K5PT4NAOr7NtYbgH}aCQ*h(qo-(0tnZLWMnLKD(=7cVTfh#S2 z{SNKCwE%bUyfLmqipRPxV^6-j5Xtz9eS<@&jO0BU|f zT--YaiY-#x%tv{-#97&MVg*)w1yqV(sG&mA(rlVG@{vo?Lz0$>Cmf4~Xoz^wL1+i?smj|BalLry5|j5+&+H*2 z9fj zaEDG8I1mxnyff!4iDc~_{tgv>V(yDL75bPs@jzf+2iEmhba(SnNE88Xp}!Mpek6di zckf;S7W$5fM~^lUK^5Z+RdCoa()8LvZ|~lVh#tUH?34!X1agb`EyU&w%}T$>eH)n~ zfC2?WhF)1&SwGKaJt}mk*MAV&DoVAc1hMz7U8^p=VbkW>&iM@}GeOQ-2!aEUC8ivz z91PGDo~5TdkAB=q)G6#WbswMNYAHO&epEDwD}gWoA+C_&5zH@~{32^}OUYXnbb%5u z`i3?D_ox!2n+&aCZ3JMom?D1$Q~T^4`}OSXYN#>i&%3VF!K|`-;v$SO8VNp>;DzA-Z z`z4czZTan)c_1D#?{fJv3#O0+m6Uj)rm*F0Ov8jiQ|vQNTZ|EE{#cyua>QaShsGrd zAt~D(h$0jWYTvCLkB#TW2)4HwU*QD4MMklBcz7i3IwZ44F+j%O;sAvq4tR$6Scp^L zXL1+>#M9RF8vv9~dHOU8$*evUk8{Ys(eXEc6SHt|%vQuM--6@F3p|g9NQ#=q;9E?G z2_R`r9+tr%q}cKCaW9d8WHo?ozu7*z=_QN@y+zBHA8sGfWbcM`U-BOjHzgoEoF0m_ z8LrDt94vgq0s_?`GSo&s58Jf;MKW+Q##IF%I)eoeZ3x;gZhQ3)_Vw#4Q2nskFpV8i zU56?(Fx=Y9fQ-lHcnK3GJTlxx0*#=^AWq(@owpTe7eN6^09L*X$_{j3{lcDifcragZn*(K{8i33Y>1Onp7xzlt{h4obdtk_ z9$2@|`FP;VrDZg70EmqG^Ex|+p0(9tL%?jRLygyu@pA%x-_I&`_7xv%a~Mg}`)Qo3#-b6;#O zF#qY(T!ezj|N8X{gm~>-pC2~>^Nl3d?zF@Iq5_GlMNNQuym_J!v?5>ZAT+oK4<3vl zF5h>{CP_!Qm=Fo)Mi}9k@FttkVETWOh;2_fkAheO_=+8QKtd*q*eT|Sjc~|02@rk= zrx1~geQ5^}e8vPK1?z%G-WAhQtJwJI0EjZVtz0Nmzkd_WeP5CyCmC$OQY6{4R7^yI zQ`BIVx^H+m3KOq`CH8M%KA^8awk$^;g&1i7NrX5c2c?bXLi(2Fn_GfAcdiFuR)wYf z7G^vJBR(_)i^I(!0wlOGJw1H@CTBJD?0iPaLh{Ip+WrJ14BwB z47Jd*IVlFIJvbP|^no>KE;d7kxL}KrJ_m#ix*Gy+2QHd@GL))YSes~~B?nNbXQ$dt zeu2!GLsXD+`>~FY8O%d5%TNDvn&{h>mSCvYK0ZE#s=yB>*&o}sXH*nMe&vNA^Cw{F z_GEgF3=zPINRY9*q;r1jz9oo9*~o68#$~&lz!cx%F_|$O7vW>X_0^T=@gzeAjRpU> zo{f#o>DyaD2pziDD1%?WnxQO>P?sXwUPs1IG2E``Gs{9I;~`>`{RDXyxKvO;Ai%d6 zJqhy?j8N(5S5U(TAv2Y@F>5?KOrim`NNixlw=b&zl!8pDL5(HW4&XP~XBFx!kv15) zcF==OWj%lXY^w1mNr?6I@Z$%Fp;bWrA&F}DB0PwIT~;hzot?VBRTPE~(q`tF4@fGeF-ht6XUxnrW|VA7 z+#+}_F78*88y&foi-Zyxj)34YGFGXgdr*+;ru}@?Tskt#FC(E`KJ zzYa$cnv&C_03&5#RuXv}HqvQOA3K+zSvjv?_16_2!xbkYCy5*Y4IV9$kO((B=h+et z#&U$lilG-Z=I;qVmpnbSAIETx0p3_$SN9KAHkX1pwE(~~5TrKj6pHDqDc6cqW>3Sh!xB>Lque`aYGvb-{x~E26kPC+tvU9 zx`~>A^OW0voyFB_1+d-!6K`7c3R(ujo{l4ur8Yz&$J`VmwB`N3zRigS`{d(J1_cF? z*db;?&gbb(1matfV+w$^V{OE6?W8pPZDwUtehD;RptrEhOvm(C%ukR&E(#n81%R2* zfZP!;#XtEHCegC-b!=Vef9B(T-ROv7b(Gvd@4>sbVF#hg8%X8v1=9hU0 zitO(0j^VAfdW>;kQBhI-7w7MHFgOBA?d}t&V#F*Cid9N_`kJj-2`nPo=)xxW@2Hmn<7lgIA+ef(BjPX2C&=(X#R1zpLRFvGVb4Z#!XXDge8O3X%KM z{r?utAnFcGbes;LhG+(YNrw&}rlODpE7EeTY;4pR*Mg2@0bQDY&6@PdS&Xo$sr7o7omAX{NC~r;0d)7^F25mK^v$#+o?|tfi(wzBs0lXfK*-?N4X(1 z8Q0Gt@487%O-bmnNZb!|NP7ir%=cZ94z0i^CIS<_n-vA4!p~^X?W`_;96>?Gvc>se`L9^Hl8QoJB54FD+khQxDpsgEJ3(6!<|bAu zzHu$Q21(5K$8pa|gIZaDTO>d<1>D3);zl%Y)7B@)(Mrw?-+^s(|NeahPQt>%=rCIi zf#QnS=vE}UVRGQRST+E86IB| zyU;Vp9)ZPRh5$FQETF%NTmq=zjOAA$QW=a?!1L8WMGK&YYz4pa5r+`mvSk(47&)y3 z*de`Y0->4!%wXb{2NQ4$<%C4fyc=<{0|1iR-&_DH3gm=pRL;J!u~U#84@8`Z#3}$GY%~7+$m_cQck_9r23*i1R)_# za4Qf1B>-ABu1EOFj_yKd8bpA`M*2j8FE_JVi3N%q7biS5ildeN7vdF}#_D1Jm67qk zk~Q-xS8Pqf^C=YY9&(SN(m~abINVtIHTu8GSp1=T6SN4~j%4lgwWk3%;T8ay`NHzs!9`P3R$KwMDyX-nB$lsI5@`zNjia#IpmGldV<+pd=#uFxQ z15=@*kX4G(j~@ks0Fc5#a3W@J0?>ZG;D{hOpV_SH`EOkXne~gLpu>o31FFE6iNXbLhgiQQvvVFlD>vSEoJ1sqI%fdczp z4fAWJHa4f5ts+^}xHecR1yHa$0&r<&OAB+DTA&+WRuUa($nl+b2hkS78b%!#Qjdhp z7#Rx{j4a{qWGCN-m8;pyaas>(0`wdT z&W9pX-(EW{fbi5ve)lG786_nviv@WCq16u%L91+QJ7L#@nz!DQ7nk_6yCe#XQog?g zZcj(nU{`Ym93TyIgHoRt9g|$^*RQ0IrvZF|lBkgn(_brX$PNNHk}OroL`MRr-oqT- zn6E>~c^4NcoM3^QMGJ`S?2YyU`K}2J^aN$CPg&3CdO58&%Y*I}STE-Ycr*QiLkVGukT6$eKhtaysd2Wny=f_C_dnjcYm%$Ac8utY)z3zd`$Fw<*t zd~h?!C^@&RT>vIwW_^_LOcW%JJ;K!Z$Z|-8v;Eo1G+z`?lH5T51~6HL=81XOeQBSG z#RG{38krW7J$%YO;kaeSUpg2B2V5Y-VomQOeiu9&w2`w7aQ$^Atx6>^vW<9Q?ZuU= zQCrvG4kJpKTa^wWJYlD8e23E*1f6UwOyob0++75(AX+}~9yuq60;~oZJ7#~@3hKZP zr*l3&id|9nL9i$Myps{A4F6n^b=hY(FOC@k2SwtY4kJAjO0nM}7jP^Nt~oQ@$|LPV zBhz{xWTEygg{Fi{gHj|M=FBSZR%_&{jBxnuhw7F=N)~%#x*hyT4Is-*c633(lMt%I0FFXjl6%)TS(XX3xO9@BIiaaKeaId=?+PmBOArdS-ryc2W{NcfSE7DzZO zEgk(1_dBckAT$gd$Yf?}N{9s9Ok&Yv!Yvdb4(NW_5NIjo<9$^sn;wuexsXpq7Ageu z3uG=2*$p+j@3-)6N%Vs>C=%*ukSG%n77qUG096i^H^=G`PWa*>o&Zun*+>{LVged4 zbiQ}aAC)>7qmU%J1sH@RVGsmVGC_8(r&I~<2L{?h;ohJksv!o8#5#s|>oBa2ILdOj zFR3fQ@@jC=2x`ELPm6qeZYgomPzi{P44;OKLlm768Mov>GPVXqg+-yO7zeN@ahC!| z11M<02f*Dumn0xKkXzZwqpFVemLU6xIFW;skj@}eKF+YfkOrtihE;ryhKzIyNl3(wW`p&!0+A5mVBLldx%0(v z39X`v) z<>THma&vS4b&S(noZ|s8qZfmqcD#Mk0yyT0q$c?kJpyq1Q(?~Xuls;4D1}i8GWUy^ z1K|x$P604DbxJT`0O-kx2DlBWR5H32_w@u`xqS0x1THH#`32l1a~!ot(jG*zMkoa` zqtBs4HMCap4DsDUFoIIq+PWSnCKLkzWV{d0#T3Eex9#n86mm#GbCQz9!Dk-H(1S_( z2S>35!*_Ua;LaaiuRi>tFhpSQ!o?)&J^-PO(Onz-MUSP0-JtC??6ZCdhtDg6+SHQz zhcyES$H&zN%wzK}jy^TBHFlN34CL*ESYTC?)dP(iX*YPW?HgX0`+-T}+&3~mDr@x= z^o2|{f(}8b=i01xg#SOt0mVn5N+RA%j*Wt`LgSI!WC9mi`PI<3QN8TAP?7(=Sm5Y& z{XaXZaz^wo(F+hDfI6d&O-d>_wmCWe<*6k7+!sZUaSEMx^Ik}9K(eAR@bxNTZt%FNu}x@Rfmm?nj4&bmp=NL4lO)PH-!&F{NYsU$ za5ys%RL9ul9_+<;2%O{_dWewt%bll(=pVOsS^4;F$B~n92??`J@;h%_p3?`(b4@ov z)q4%0pcv&KJ0t+}C6ea{bmDf_5+1LnQ}yK;9mamyNsIu!m^ecqPejoo({eZ>;l-+; zzbw`t_yX=fKXH=ql%bIkf)ue>1mv7YYi_^ssQ=&j`vjaL^!b^NzML0BNjQXiVo zCV#Fnn|73H0q1zxY;(kjAPi0o_Qzlc*7X=Hz^~UY?>I*dCBoQtF3kA;U)FB0PciL& z%0?j}t{hJ_WqpX@NIJ0c^T%S5qJ3`wjXO7zMPHq%kpugQoF{c+wE)6^;kj2;gK?Zs zB#;(`@-g3j>-Bajrpetkouv!1GwCxHO-V{oU`M?q>x|_XNJCeh;khM(B%R=Y0|1j# zC2I1CAB)C{CWRfL@L2EXYYstGw#3Nspn_YhC)IEM!HKW(<2Jbs*b4KRh|{Ff972|_Lo z2Uv!uUyX~Sva+%w6bjiDqP!6-j^U0HnS7jER)rJSz#@ZSa3WZwQAb1}g7SZVIOx_k zhhud}7#ZTi7s#L320lY)yC9ju1QLF?q(Y|`FP<85cbLSw0EhbUM?iotqMeb0_mFem z<$eyyg^Rf>a0b~;g7S$$k2rb?ve*8LQyZXF1p&UcK23*oM~)ODSvN?&fMJ(-c2_$1 z!yZF>zaP-*)iC5%iX$;m*bI-=^(Vj+@K0$>9Z_X8t80l+wDWuZ7J z4*dGam!Vipkd-6KfNVY)p@o!X_eB=$+wSwr^;oo-Ck+4>A%cly7?(s6r5y!~{82KI zMIp%#vd#d$n5~);D{v;4Mw3o_npH$g(|;o+$#!Z>eMJQo@c)SIoom-t;LyUjy4`Ye z0-HBKZgRIUHz(%@0$0Foi%JXxbl^fHr5|0Bggwci3GiTq1umr)Ap$|@g+LMzX)9m7 zdfX=Hq>EhLDxhl+Nmb0;z?Ze*J$)vVWaJs6IcOVn2*s&F?Ulrx$N5}G(u|@1kyAq<$q@Ak+h-PM6caJQ@WuI+ zj8zb4lG9DlB~hFSup|*N9$sEDn2jw3=0>>v2o)<1w`)*2W zrX9ezzSbok20%j$YcN4##k{9UNhlNYb=BQ6mGcl}#-8B-LUO1oavYI)Phps$x{$FO z)XOsl=BGH|F7QpVh~#(IfnzIc2?eJPz_Ha zC>PQCp}k!j>I;-b^uQ>bhzDWD9B0rGGYsG#{6X`QXI1DL5Lj|(Rz2S&LORZP6kc@< z#otfGV*LPr;c#XK3Qj_n#c{8ZsCMEer99YZ<$$tMW|b>QGywOt94kVkJ8ax#8*x#{ zloZ)7IIN&!0wq63DU%%AsVYzo!L0K*Jz2x#$5ZL)={RidO*b=iGdyYHpFtw)zwKBW zn3uQn?!9|Nh^07sEnpQ=lT)ZHh@yk9w0T+^Ud0YsI4LWk@8IxmVn;(LJ&QDOu&Tgp zuRGulX8Mu+1nc?&%q3(}h!6;@PgCy@;qRw}#`6DV(WY$Ccpd$ULN~n$-3A<7Lkako zWE(LeZT;VqZJNE_e5b`~CP5Jt1xoREDumHwfss>ik}~@JrluUv^6kD)$tj0QPW+5ape54^CY$L?gA+{RKv^&+ug z@Dw7jk&KPZ-kzkYwDk1FZsU!8`7d$Q!@c6-#1e^xzROx z1d%KHIOYt+JniAbDv*ohmKoEmUSUI2|C(?LvcczyioggzySU$wI)wuGA{$H4-z^=`RDdsWHj}EGyEMbQ1vYRApmTb`?OOlQRy}^m8btqxu?o(iq_|Fl25YIAR_eV+!BCgqn}kBQNp=wLDVro9d)WT7OdA4~jRal-0ZgQQ2mFzU{c#$N^35rb*jpKL-yQet`oih%P zT8&IoG@qtj%Jt{BS#gtfOrSvzD0}c3iEb831srOxN{_9@eP`^#&29u|WVPAq7aNs^bwMn;ly=JW%`fRg zRT>VmggM*C!W)SnjtYW!-#}P|M`Tt}$`F3Ocj#+v1+GLAvG0wKg`5Ukas@*ppkUJb z1?Y8^=}%79M=KAmjcyQbE0H?)ZWG+EC`I`oak_H)@;Ht_bm*VCpNpyDZvGg^7hs-d zw=#Gt;5Qe(nn?of-^kU@Z+s&pumecG$R2)}^kknClpB;xqPm$yT2(YYO zP2?&e|EZ(5bQJHth-r@;8DLSD1NDcmvB}DNjROnHEAZy~QE$Q*+_%Kt6uRkqh= zyUW=IpTtNosKQ84q!GGQ-*s_l8}E?9X)jiNiVQ7_AF2Cz)Y*r(!q2Vn_A7qzYJL~! zh-3Ei5Rv49drp{2@94e&MORB&p$fxtBqA`<2*__rwjF)<*yG~j zDwwRP*HjF=MfKP5(sQmg?Y0)*J(WQ{ah*c}IZ#$D8Ejvl{hY#oG=t5VVl zxp3qyxb&pbnbfk)H3q`jA|&r@HF46EDMO2gyZJ5x$etjcK}>xr zWm49#M0T$yZZ3j%nLVy={MN#iIv~2;CAT-2BB>sVcG|~O{N?Q_HycP*EJHY8(F7Ic zF!3eEM8F=a(Wes;eZG?L0oDMhXo=VFV>V=GRL7uUmN!q+v5F0!;f>t_@B@9PzVx7~ zc@t&NI@GLPTYdiQAxAL(fV7gMUl8SCcZyC-Dk#noq}#-gtZsF`k52IDsuy;1J4wuS zV0n-h@!&4WC3~EFQq>;6`N-SvO#|cIp-Q(cD7dHw%rgr0#JPS zDTU(>+I06OLmjgnxn0yYwg=#+kS7DE0Xv}v&Cf6Dx@P9SD_waBZLdsx#?4*aTF}IA)15WB&@QU(HOfX-eU52}Nbop5N;pZ19Wnv-#=fT~q+) z_=v2_s)sSi|Cr`YpgpvuMlG$HH}_t-Qul{)%Mc-Z-*@h@+sV$7A`rEf{I7z&^lY_X z0?L<^Lnb#pswpYw5){y=QG|ar6*B7k+1aNV7kOC8u~_k_prFZ7Nr`6AfSr?*lfC$N z`9)v%ovYD3ujn1l4B4R&e&pOd)mU|1FA}qCtmpjPmc_)`fw0Pzc(l&3R-kPWs(Dp* zdrVMU(HS{BXHtbR3S72*d%E|ZD#lpl>G0P`^DkbdqR)NyED+Y>yZ&qHotDd>hRhgT z1DoQWVy(KIJ-1`2h3`tL z|C}ES)W`dpglXa1XfN)GeIkQH_?U48+;r;L?^lY$=P5!E6h!nN$0kx=NyGy|XFs5B zuU5IhJR}O~6Op2L3mMJ6Ph^7xO(tbfB;>WQkOyY4|H3f!KWUu^jtI74O&*@>th&Qr zS8#zi#}1B?olCEUEKk$?*!bxh4tB{ae&6V~yITfg^P3|D33WScGJKNJ3!8)Kwt=6Jt%lIxeRiAwWNS-QCNJSZ911~7_v`48#lQSYA zk3L(NX0E<9^dk&Oz?~2b3_RPcd%)gV(*WmC&h;Y3Ph8%RIU?1gy!q5PNo5Lq9skH( zhj|?);21;WW|)C2QyAICMM{MJ{UF9%F-c1>GrF|s0>Mwy()OQ_^I2Ri9cMn$1hz{H zv)6`v8P7{DW5inZ!)AqOxi*Uyp4|sbfc07}^z<_-CB>WO$Z3HHZ(&=> zpLYirv*>T`;)ft-xctZ^*P`mX`0iIU^hek98#pknSvSHn`2GHuc`i{=`u<~W7o4I^ z7o=K-C1EIuF5vS0A?Se+R#F>E1c=JlxRr*edhf5wk@vtfZVxiQnE|c@RWK2Kb~KTd zAPO0B3O5gr(s><56)mH>9uad>{ijfbUS&&J=iqSR^uqYaY!TRh=}{&n0GU8na;#dk z2;b{2E9CX1-*0MyLH%Jq`-|K;N&Efjr!ck-b5Bz0iyxf9s3Y;|@USez7d!tO(f*yE zr>{#b^hqtu?A%}V?NfXEG?i= z$R{-g4Hh0qtv-FOX{`)Voo=&l?$+JCWs8f?@#4b575IDw-xC2sVaa|b4?)KL??mIK zrLR>d`oYUt^%76s*jn&vJ{q;s;d3WqqG2O_H}Y4!$>e|t&qNdK%DczySq2w?uGhf8 zATa|=-6f7zbnQQi2(#wR|NN@0;+kGp%6mIJ@x|~m>L-1eu%V~tlX#PpU|U*tqbbo8 zN%%=}Og69#AcAv701ot^IRaph@Sw|~jRQH8S}a<59`*L`ur2(jsF>Y4B8a z1-MOvS8Sx8gRBhmO=wC>OG`yUIfQXxH!T17{Q2W-BJwvLTqi$Zx5tp(P3-xw`@&ey zftOQeP}Pex6X;c2ktUR5xT_1tyj)@vbbm%hM)LPaA9}z~Dv3lX$p8>5y~5(+YlAyh z?5nGfX&NX9F{62cO@u8Y6TCJ-VyeEH zG1?%{G=xRe2F9Jg9WLjF>7&3pki6J`tVjrHi^AD6e*DTlN@whI4T7}&$2wf4Dic*q&IbF%9f}ozJyEt~n#W6ovtT<9puQ#>dOIwz>RUA60a3nRV4IcfzZiEK*rBl= z^#wupdOPLE%_>#C0u)df9vK>sCh)GVx#~IW$wR18R9u`7>^-i4npKiHepD;|-jkv0 z_y4-joRAqxZw*D52zl#Un^~I6?Rc#u3TZO6$NSSXXa=LxAFa#^*2nUKodk4 zqW=P0@`X#5s$rN;^*e6UXpW=>C?w+O(e74E~%yX_%={$d^wBYiFFB58wY>FZtx(%)xEiM3R8grVgg`Z|`}v+dJQN-wJ`Iijed3zkF(k zoH(mf1#aJ+0Wp%rjY_|h>%|LsFA7{$Q$Bg zrP&HZmzwr+DAM^^m}VGCcIOT?KxraA=fZfrC;jAo=NU=u60!n z7&&st70_zpo1>HGf;JLEsmWV4;o3Je3CvybR{7huJH41tf!ZQ|zjD2zhH0zbfAg{| z!@}Ao&RaQX&rfmlZ^s4XubrTW+i@_@T|8@)Bt*D1gAPx4wfx{z@RFKxUvc0 zv|PXwvk%SipgsWSJhTDXeo_9BSbzVH1k$%HwMqeLx`G${$H-% z&Z+C7;BWoRI(^6gYXb0$?Fx(lELsgoC`V7It4Qi4gh71O3bo9EnMV!UDk2ABa-E2o zLH@)bPQ?vCAOV+RcyI|*eY{gUh?ITMVKN7ktAHJta1EW(DteIbja>dA?U%3?qQ~IW z?0GeC&6=10>{EA?Dnndch`Ijd&}UF~YP5oX90VO{01k~gyDK3z|J48(RX$}rPU9aZ zsEK+tU0nb<4N#?57#Bpg4zt^nE%O!n`+pv$cNkF%l@?9x52}@L1W^?HD^8s{mE2pk z)tc=YOJkxgx_m-u;_f$5{nrJ3cnSGZ>*EfuUfo9nZdU%YuW?&7HtJgOTvsCFV(NMi zPc=|Qv6_XB_SsblrP1{q0fg8_a)CGDo;bfn&9NNm)FX^0dUaLpt8DN;%Up#d_DH_J zeoP8I0)@4>FW9~>{b(FLl@|yTc;lmIv3*XMH0j4Zwot><)ke}gc`0KokatQ{JWPz#h(BVgi2Ljs}Th$Hp;RyXCx@Gkbv4 zeJ>3t8fj@oLviRci=7txM-!c8@DfDW+Oz045eb0KD&E*ICE5Xa^v7-JqwpIT!Qg|* zn}lqm#1K!VoPj9_NqxT;OZ$pNeqWQ4u3**{Ijzkew}Q7vBw)j9+I+|otp7*?_Ku#m z^G9+*59 zMVnVx=n@sfA`w)XBdk*~?hi86LK42f!kOQDfWUz9n(_pxw1twucKlmZ$BN(PutQIc zxoI^h{cE-)`R*K8UU51ZrZqr&bB3*e2y}GZmoW`E>pUxRCa!*QmMSk%*^13yq6la~ zhege=_GSIuL$D0BHIj$s(f&Q)IlN zL^(HTgpZTG9+AkMEDyFU03`Ndz$S|g?j3)3$6WPv-`WLUYC#eXP&IK`{>bG z5j%b_!?Yy&dormM1HRh>YK#DB@a9ABORf*oCeU{~klDU8)3DQ`?v3|WZka6WS_ zrNkDMEcW&u$T09s$T$fkA)?RW^vBKEf6sf%3N}zuh{S}%blhCFtg;FJ0C7GD=>~Bl zLUOvNqXjZ5hH8ZMx^rj!I{y1-mqAG6+(S6i6vajH31p*!(x}mm0XVt`jp|290!I7K zH=BAZ>*s~->UZ^`BCP%6Z%ov85hp3o2nPw&Jzgt`;!grkflO-u9a?3z)@a&SV&dqn z2M~Y6TM_}Xf@0Vw`TTX&)8Q=P`C^Ge?1PMP6-;ULwdjgQ#6RhGRXhz3K%9Y$t-dy@ zVs9n+B@?KaqbD#MP~jz7%Q&5if(Y>Tz4GMoV@WNPu@&4SZCqPi zH6tTMFfB==lBj50O?52gIRW-ojO}%x;%4K$JBy768Ydj-D27EEhnB{jDgZk5YX08& z`T3$53V!VD1X{vz68!XP{fa&A_LP^JFVB{LwMQdbd_RX7#h8 z3<>NMi;#trHZJUfsuVRVzi`FI8cE$DPn|jnZz*a1Mq?{#nHlSt@~zRP&9QkNg|FuS z%Wp%<_2A~Iir-LrGgnudLK80|uI6ju=J=jpOeYw<`%8oBDWhXT&3a}IUc;UpIpFd| zc+FLE$xXa{*L@lhKDwMhtwo^m{bsjT(t4B=bN`;8Zk};To!B zOyBW5aM1S^#0KI&5*Zj=f`%e9KHwbw2{i9ObH^q`7CugCCP0An&(?S7ph4!h-Za}7s(RNszSj+}Q_+BBTRj7neq7~Tmcb14?6N1m zV~CwS%!rh-gj~<~4jmlQW<*`OyjhY@N>e9(HGv(8@k42y{G%*rkO;IW876pS64#m8 zi=^?znX+nZj)K+yxOA_dI#PBJ!smT$l%eV^i>22m`Q$ud$q>}yC3HE}#MddXxiUsIgOr;DEHwD%n1@A+n5+& z@8qm=f4@#kTDg+OLSJLf#e!{IplFgP%)=E+D|q;hX5NZu1p9{C))x*Lu*hHEbaYe_ zbHe|Qt@?iWR=ps8iks=_qKBnVo5>vOe}4)uEeWeA&EK*4-M)Q08ZQL_m0_%uIHIYb ztNbXs*pv2+p0;V@#zb_T6!#}Q#!$g5VQ`dt&gr^HcSJD<-z8g1%v$yZG@8p0)=2If z8k0!6u#tPKKnfo@HLr(Q`tE_8)`@nu^GonUWmxSu%Y+ zp!1bG019%Y-BE5#I7KZ@p7q1{w*5GjX>nERki|{&17Bqy=ZJ;w@chQPI%?WK<$`9MR0m%K7n3 zKvoQnlTF8jfG3PcbzSm>=M1d0^!(F`-g-1;J-Iq1aLE_$FJ&j$n>7eK7HnTqS|w}?^#Xi>1K1l!U1f!$&Kqwn=Fxn}jOUiGAm=OVf`4^2t%0YK zu3Tn3!c&)L=Iqq&>A82BvUI!F_S#|n8U;8E3vUz`c5YK^e+0@tyEl)Raj{8=--v+T zbLzc+(Ay!f*P4Ml1_nFy9Dms`!lChHy@55GX>IIbR(ZMMxu&-|&N$PpURg+O>zM2+ z_X}Op2j|RAO?dxIV@}syXR3%a0oKphLsaz1$Ai0|!-4 ztFg6Y;0!u}3mk`T$D>BZ8-jmy7+hC=cKzg|3fSr@wy2CLri7GJ3HBQ-;Q_&qw^=A| zZMP4f{(c*FGhr|&;yp@0vJBX>#M!@@1?9i&Un3WG51rq)Mr`LUJD{3M~CwX$+PZi%Q zsCCxQY=`V>i#`@?`EYH_*X3|gifWPrt--IGd+>!C|K2o24L_7Q@NU?k{Y+MGce*rV z#DIh8cpMzS!N0x=TDtW>P4ffVmSxA=EsE%narT24cC=kj*jC=w2=%-k`CrP z)6lN=c(JmQ3+kIZLCqRr5XXo|;5#a~y3n=%xYk;gdq#Fxglxc{!AAS9$|G9eWkaPz z>eum*f1;&Ox;Nb*nWv1rY0ZL}bijt+8_+m)g4)a5&fWZaMJ!vrx(Yh|AVO*x9AwM@ z1WA|r{>w&HI>h&3f|2UnLlfQBSdYTapk6)q1BUXebb3|ub~|BA{D_pV;srqhBxM(V ztu3wf`^fWlcD)nj4I$!bwB+-b7fTr%=GR)^QS~?H{%qros$2xv?2!$969rEHt;N)5 zr-&&WzgcG7Ih0Slqr@T8r7w`28HGym>agy7Sz^206zt1&CH_j0NNdhTdYTC-_Ofo$ zURW3XEEk89NlTV28&Ag874o)(?@L_WU3Dk?jg}ekv%S|`u~ZMfGU2q2uf^>}n|eIx z>=kdaC?F-duTQL*xw%d4(VB8CjaxTeSaYO@$Kn0e$j+5-_EtgL+FeeL7U>tFvbd79 z?{gOFlb?w`okKwkJ)pM}N(MQpcbVM4dVa{%xxJeE1_UH8uUp!JMxR+n$A?FHEMT?? zdrVrZ^1^QA)cTwM@ygY->oRkjTy5WLPu14RzgVnJOI6!=Wx~2E6WV33nT8F0=n>at zqeJk(%2BCwhtjqEkgEaZ*Zp9+j7s>x++-OM7LnQO*6gf(kv+bgT{=FY$Crq4K2?jH z9UN{=daE?|O{BEhJ)pVLrRwMx3GW+lo~w0#@j20m@R)bRTn2b#Wq&Gv_?&Vqh-K4G(m(??^j`D(*Z)9!`92keZBl=D-O<_`u$Fa$$AbTigAY+LlzW~>mK?OG;^cGWt&%VHVx!mm<8ezWLHY36_yn^KRb(xs%w z9pNzs{COrgWtuC1`ErgQ0jwEr+ml%!$b%1N7Sc|f*wyAkGEfQ2_Q1k|!a|dHH(7y3 z&NgZTBGP057Yi+m_rWJ{P#`|SNY36^+`MLGFo~iBdFn+WT4l9f;{GHbyfT*q zzFGr(&S#`$66X;ck61>)fb1iOh%w{WKl(0%L$*>_z~Nd^=d=>V9{~3))S8TQ$sD~< zPq1W^l8Q(!z%=W$ci);5xLbA~o>VNXbzCMc$0~%jE0Wiww4HCl%{gkZL#;9YTwg~^ z^`5WHb4je7GW&^|&!;iR+6*$|gYWE~p(U?^)3}ZrPFnFLw=;HseRwuHCwIKVC9S%r z^|+X>U2ps^F6uw}2LI6yuX(JIi|q{uoGua%;e4@2w4SFrK6^F!OZmY0Ci7)^q!);7 zxJf@KvyjnEJ+Tbrt0qrrTwigaOs2Uv0lkteBxweE1sac~-ue574qYGR9Z$%Ol=lSO z+&TeTVFm*OOK`~3Byval*x23?;|E)Sw?{_LhqIZ*Z9 zlO+@5&n3;@^{VZh6mS359;s!Wr<6Qwm-upKn>i`UT9d??8WwM+*UX)4;`-^~iHG;1 z4E)RcwXbPomNswcnbPc@YabHdr?trytwbCqc9vW6c?Z3oiA|V!3X%7Cq4@uV7?8X= z2OW;ho-@aNz|xn0B(`4IzFzF)3p4pghYRg4w*GbhZ%-`G9csL=dI0H(SWlMoZjKs3 zlZa8i^o(IsF5hiR`wh8GmoA6x(UV%$Nt~KiM}9-DtJ%m#Ni?PKGr)$6C|wore7+7JWCJ} zQqMoEGM0;x3?aeH#$uLai1`u=n`5bD<71Akd^b0*-~c+WrEhjenEVm{;DnExJbly&L=#vy!0p%H-kv)(chzYoG6Wr z_);c^E}r!Kmc`Wqln2E$WDuUF4Ab=}XH-ME01@f)X_??cUgFt^OKOIdcTtqSvwp zi<^WGIe2~K$^z14&dV%Vsbh%rcm-XtbVKCWSX? z+*s%cVr1IP`>FIYcb8~$EmFRd3uZ|e1yVh>D(iW_m-YhgqFR)SNZ2NDG4T!}rNmt} z5VCOVDTiZMncT|}g+u6O+|2R94`3cXnD&CDD>-O63kvJE`K`BAWsQbuRSdDc2$G1F zgtZXPNjjdih>oZXAE%zOxk&g>ponm6Z5wcJ9icI}@@mNPpd_}RI_(t(`l}e=pa5bs zRYZ4(yI!4EXT82ROBIQ0cg*FYq^EGRs&fo|;sVZbPX$DIjpl_>NRv}W9?Q-TT5$~uw;0*@pJku>Czhc5FU_#-EsnzCc( z&X1unpMEj;k_nbr$=f)JC(TDiyO~%ii12UR-<#f&Zg3pS>WSrraIlY+=<{^BzOk^@ zqZbs^YK=@hEZyz;sO>!$Y)_5mgJJyc-)Rv3ymh69{-5%QXgZT7qYTv8Z|#z;_9jSYc$+7|aYw(@sVF>qSLSM+h*6WIpn>3ZT&7fx>|~NY2{)=JWK9X8E!gJ{ zg4$5YiWf_12m(aPP9i4_&I`$*;rsxI$5wTTdhc!aJaQu#%3MPQpAboyM}d0@Cq^)y zEWp*V7Xzw0f7#_{z`0-tNJ5hpHkAl1v0}^lOudD~XDnHVM1p83ux(n(4AhRYUioMR z1Em&UG;im;oF0=VHRUk|@iG?IpUq5|I!u*-It~yN+}zx*gGbP8B0`CTmkpjuXP=id z;gD&8QxOwzleKtw?2w8g(eP~V=A4KE=WiTO@2D>v4WMo+UNjeeZOP%`j)BI0%pS<4<`d?O=nyqV z%|7Ux0qi(~gTF0!{pA?!@6a6mc9wm`@Sh7~8aZS&j#Y8f1Dc6sL&*h)m9Zi+za3X< z@uaW7vqh)xhI7m!FZ=x9jSk8J&+-^TlFtNwq&6*M>7aRnYpd)Xrr4A(#4$*$$jT;%z`&hobMCRyyb8PHB3Iv zKY`Oc%3%z}W5-fM>?FYybDrK*wFW%VQz$9t2Yym}nm`SrjiFS+4)GVK4o=;WJi$ha40IwV4oCg@Cr+$7 z61DA?-Xg&>>ddDWymbhh_AH$4aOdSqa|82R5$SHI6#jgFa&cKqk1~c%_)`i}U%z@V z?_nO>n>3J;7mfg4T|w2e0@z|4X`w>M@Zd@(6>+&T3JapQ%c~C*sFx^^tbf1NSg^k9 z*Zai^F%ExNhd~H3%m}k2KLnKd>a2++wex_XLj%AyM86;jU7TPHBuaQ|hE2D+S~x{}BJ z3W!xw=|$>Au50+g_cMWVEoosTN)2I3@sb;?0U`X@q$HD4RJ@3eBsPJr4YU2S$%i)t zSgeV6wrgyDZD8iJ9!TOn7w>vvZeZpMw^{>?r#i;E^OrC8I_{n|i1si5iR>ttx_1Fw zR24J=a^AIOT;qe2Y;2VQy~74HcT3PqDQEJIuhNhU8#I2@+_^F67XlcAx{|}@d{UAl z?B6H|n#@jGL6+(Z-X4vKsSV9Dj=T=Lh_t7l@Qx0}ayf)v8evLH-VseDnxQ14LZP2z zPIqMh--LUK7cXC~jMNViLnBO2mP~;SAMyT!E$1ms5QO&d;#QWHwZZ;s;dnG)yQ~ca z9gg=>da>%fn|1-)jh1y;Qrw`+T$8N`m4!?PLfLK(D_Um*1o@ei~&v~AGp1t-zXRWi=eXqM)e0_%NdcR-O`-;*zrA|%7Mnxi#sE=!? z>XAsBf=MK@LCP)oip<-u-|(M9o@&OP`Yv{!-j?pRq?4AOu1+qVP8X~=FWI_#TySxg z7Lyf|5aG1<^mO%*6Bobu-@if3#r?dvOi9Hfe8@Ie4HFL%iN=!nm+Yb9g9{|m8qIOl zqXs@1Q+?j146jzV&T0#6+eS;9632N!iE&c!fR3)x)Wh>+^$k(c5qCBpwwk$f{;)Qk zYq$vGGqjVj z&!0bUa@76Q*w<&)pB1>!|Kf54ldusdKmW1*te09DPEj0GR8(Ev-Hn6wp>7M`Uw7t9 zD=Kmb8x_>02%mO1*vlsC{d13#lhfVDkEz+?x9aQbU)v?Ug+g&mN?%Zkrsg&=-xXLaHrzVsQtUFI@N2w#>l2rO!nu-fV`Gh-oiwCx@BB)97wkGR z6*&$ZIAC0EPv;}gey-Z*islWmp&xJWxw^WNNUXbe-@AL4)YjIv$TslQV~m!CB_cC$ zC9h#|_4jmRwpz^Hq9XGmxA#rKWaJWV!>Y8(vnz{^xQDGOS9jDaZmf}CzI@p?;*X(R z(7JqiMaA0+$J4$MdSmko3z8S!@p{hm@4a*9PS=MI#|#V@{R0BrmgjBPR%dGQ{@#Bm z^BmXvF)@*C-5N)^efy*GulLmByB_ND_*}kx?|2fw#>tb-SID={Hqt0^3JFme85vdh z`be$*83>Ye>XqoYbx_-Cx%@)6giF8t((>|AGcz->l7NZlnxwsZ_rATa$?|;}*FQT` zyJ0muSkEfwDPU!7T|eqR%4*qHR#|zqqC#e2?PuR};~M`%1&<#O4G-%*T5W4@56#ba zc>Ljbz|*xg|5=}Ep?2b}pS-!>@(vA$!k*OBRLY$@*K-p;y?duuaHfKae9Mly<`|ay zA09k-Aicr-XJ^lo;9%0PsZR`BwrtUx{9Ji4rm9M|z`T~d_V4erFIxE@VS#97C}!Ng zZE0zF1#2LvY#S#RMR<6)*YDf3Y;08O>gwtAnJ#;8y*_4R!|V6!N4?0x>})XJeiB8h zOWH#%W#vs;T3WM1ja&C${9OLdz5RyxjWY+lCJb2F*sQQJmj3=3P@GY!(ed+>+b<(? zKlSk8!`2@POmN{X-(Dq6Di%}uA2h2o9RFCvkt%xj>c=AUk*+Ki`pG*-E??EXckHO9 z=5`?=p&#$=Z+~;=h_Io$krDIvj&yPc28P_HzQavX^wz2Gy1ON9+ZeH3l+@HVocJIyliXBm$+my@`!rl?;no9rJ=2;9d$iDj6v(m51Rbk4z9j` zbXwB7Wrw|^V`Ep>b(7l5f1JxNeqr9TbL+~=O4=ch+#a{|v^0a>Mbo!cH*lHO*t|N1 zhT)Z!2fEAOSd|D(Iry*q5?uK`y&Xk^oSfY0Yn86miz?0O-V$r$Y9C2YFR!fp{OBx| zh~d|_4$8zWU_*9@uVQsc_E{UoiZgiEDqQ}(!@A`fS!SpDNK5R{^t74f!B_Z!uBRU7 zU7woTU$|g7)mx%h65aaX>vM0KfR$fl@vV)+%`w8USyIoQJtL9$?r&_E8Jn738*RUF z&tvROS65ds%8TEhpRtjVl(||~R@@%hzZVv)&qd6e9ULCPOB1Xz6~nd<<(-BEY40q&x^=zF;I>yG4+gZd6Rz}ZTS%skHTMg6%`26{4RSm|Up+SO2^ZC6614jxp;HOSsiq;RgTG-fxZQjm&_u)g0Y*QZ}S--`J%_mQue3+M)b@J8$y_vHn zlT?(HO0|K1iHeT$xqbU~lnr$uyE&h+;$jKU@pqd|#aJ%?`MH_%%OQ4w$qCakJL(rOb+2AUjT>Ejo4wiS@tMt8I>r|>oDQ7jUS`-W zX+=TeJaE7$cjWIckJAjpLmVjqna*D-saslF2Hl?)mz3nWdAk`7NDg;0b5T6MG;Q^U zXVltppytS%j6(}1twt<0&zbAms-8Wwnj3De@|ok7mzO7zwC~7U2BwIcR?*_s$Of*- zUZvrPOWLO`sQ#*r-x46mfL*r;K95!+t{K5#LwM3 z7Tb_4s6Y7q`&B%dy1u@>w-0+C`%-z4ukB5mBspG8^ZaOg!i^hS?@n!RMU%L6=~9|P zfZXu#aPj$0_ODMpwk9Mb$ozC~Z*Nz@tGk|YA!oq^`W3W4P>6u+vqU{c;UX?g{}?B&1r|B$ zLFYYqGk_`NK|w*iU%p72H51PPrA{JX#q%1gdmWOR zYE;oGW{2AaEI1+3juk9kyT+X$%P#Lji=tTkCW(b%a3LLaVX#wiqo9&$_EplpAAQdcF8M1kix`t^ZEdIep0l7NRQW9mJ@cC6 zUnn8BRXnqG=bp&ileesf8p6AJdQ?$zl$Dk9`wkpFygNNTonGa*DmUHLoqMA<)>mm1 z|Ndg=c>Q|r$6FpJtoXUPIoW`f{T_R8!N<_{OWa3KxXSJ!78J1>3bzaFe$!QX@$8Ph za*QWVoFIUjT}K*Of<{=n^!XD_djT2TMp`UU&Sqz4m2MrlXd~Ae^B4^eh%fD+3wvo< zS&7@oF|9NSvM0`ciM{bltrK-u$Q|eRC2+T&FRsROzgJboiif9s?AVqA&R-+Z6YiSV z23EO`Qu+J)&ki?lM+GLw^KS$mNV|wSpOu??)jV){Ga%{`Hbja4vO6WkCa2Hk<7LCz zCr@rBMSUzjfM+GCQbV9w{N1ma0rUK?nL+DO85w&zG7d4I;r7-999~(Rj8Knff8|+I z@bhr4cBX=?&n%~@nOXV}edfg1r)5p6Mn7MAb8~VAlQ_M9e$6YtcJ=C(@!k@i1uV|> z9Mqzm;^ONj{Pa;8K%E+oj2@@iq9N}iX=rFf8rDHxZZo?)~#Cx zJAnS0T3W99*4VZ)Zwk&(2)Jotq@$zM(a}LkqNk(dDiI~kk7Wi)wMR!sM-~=}xm~(s zm#U_!N=s_0s|#MfjBA`5X~i`Lto~+eHR*F)6*Mz)=%Mb2++6;ru0tWJ1bEUjFyJi_k(N%vCcchE zF5TW=8>CI5#b@XJT^#R@c=+g15_-O-2uskVwpN3hQ_7UPP_{38t#YeqZ{I%L0qQU}D#$k=?y&0{LA6`9Zj~O5py4>x>vX_(d7iHf zo6lR-%^&POIl6?H7^=!iyl3C)bupudg(_uAhZ;AX|8u;tR{+~ z!+$Py=bk(c;8y1UHd~E?R8m^X&Bw>rF^{cEci@)j++V%7ld=~-?{65H<86B%hC)5G zu*@qhU3Tn?q1P$FA{5DvLs}3VQ>dlI#4p3-pbQT2KYAp>>+R@JkVVkdqUJ z?IJzO$i#HNa8to?|BCvC2Ib*R2`GKpxMH8hi4+498X6h_At9+wakFP(!^2h`caH2v zDWV@;@?Sr1Cg?NNu$h&W)dEnmLyC!sDR=R=KFSFFND|&LKxzclNT#e?b`$4U0t#zu z(`0330XTXDd=}zbUyZS*+_vosco%Ldt@baL#@$DcVt_7=SA@CC?$NrrKkt}ihP*HR z(9jTp3b2#Ao_RUS9zG0kxm#-&KveN3Cz7qG)UI8-1VlyEZit!D?%iuPt$+m+@A$FM za{k*Z`ws=_P0ymA2^=|cgzpX-!)~JODRDk@R|Gngb^k^w?dtz3k?+UXr_$&bydEDJ zNJQ@o+W32L6FE6}1=-z#M^eJV+wn`2md@g7WS@QFoKkrrCSYxejs%i*PpDng>>1&r`Z1e z`}b}61$}__d!Uk4{>xHmOu?we;erf%_U!QR@JLHfKVEJ0cV&^B^u2<9=1)huG;NeB z(@*UW&qx~OU(cUEKZ0cdj8*^WbkX(Ow?%bbP~7jgh_t!KvE15!fqo&KRF`qEJ<}1J zS9t}gHtSVX-v!gJLQ})T+>+Jza909iVyR2)f*W$twcZ5e25plEY;Gwj zGJDR9r`v!EFT984^lrQgSG@bF{ujHyfB!BZAn>rb-EV1jh~C;Y2;9VT^25pR3xOwV ztc7?Kw)QM~o-TEeFsoBCIbB$Fb_{ToGtA;ssjYFPlS%k!cXzj?g+=fUar1^xpPsfo zL$``076Gr4k_k(a&x@rEKR-Wu{i7tJAM)P$$XoMycKn94&TqTd9UaMM`k@I#jSX9` z{$jIc-q^pYJ++0^L zuV#**wJYJ+$N;J(6nrBA-c{AK<;P&tqfE>4bh0OD>5dm`k$inT_@#-L^+nQLKu_LYVp}+M}shaYicP*rUucQRd8EOb-fH8KP97pDdwg3w<4UpvdVkB6LhfMg|L* z`uv*=%fg=uodFk-{vi}ZN2@&?e0Z?rd8IDVw1qr0*&);WT= zD|=~*m4Iq7EaJ+#y1L(=5!f6&v9Y6L2cC3zLINFj5;%0p#V-f)m|MjE7QCEiCz=Ux ze0TI3pTo+o0+E`D;Wuq#KSWREHkk>4#=bvyvmbJlLlLvofdeh5eq8iXyn%Evw_eA_#-?wW z*ZA*x`t&LNzK;-#qE;N}GpnQGMa9HUWjp2r8w1(U&mDB=dDtLaey%o9wB&I~iR2xH z04n9krW+<|r@y>Uu2NIKMjOH+eomuK>ZNGW753qWZsa99E+ICvy zE|i{f=f0J)-*bmfXljxr|7|;MXxI#?z{0^H`ucU+DC@<=#gyK{(vL^i&b)GT{~%`e zj8i~>QczGZB~?&Bpbk_pD=#mqc6~)NpUK=mA+)mc_xS*36gE;aUpkP$qf#f7rorLi zkQbN#oG|Mmy2Y%Ie4fWBX-5wbj8KtUzJZt-S4hTsdJ+1^eZqFfi3I=|y zy0`b#CA}tkPFUYQaFMO@;+L!UHE&ooMbcr}+k?!5N(JNHpI-e0(yRnBM(lm3zGqD6 zpUsJUnpihv(7e(o<*)>SYF&R%nM8dknt1lTJ!y20IRnZ_>)oTF4)?_XmCPrg2@#M4 zgsJO+Hiwz)uo5oc`tW zXRDe17fgYNl51^a%F3jPdqO2Up2Bc`BXkS3GEiUK-oq^g!lI(h_`}`Y+?H|$V&9-; z)9W)*hAV>;sbu%xY})4n8gEhdiD1#Zfz->Zt2~DfpUvK#qZY$l<-5R-Dw=MKTK1%; zhaalF&+6~TP00rd`UAn3rB%3wO>3$tib3P}@r0@~#vf#xJvfaHOe@)5n*Pi;XS()> zrB%ZHn+7mMLuaS<1Peb`*sjw?x1^+`Tt`}{p$$l>@HN%?wYL&AL{&}gxRdZ5kJ;{z zSr@uQ&{w-H8)%tHxSO$)6;8!#6h=vce~a_NxKeK zfX~W?&I?D6l1V$(J?M@%C^t8m%>QxZ=Iz_D!NI{oBjVbr9@Pc-1q4EK?5iye+%jpU z*xcOQ`{MEw)k82TdOv-VjGyloZ*FO6c=c)Pb zmR1wQLB8*sm=6c6lsEabetF_Dg5KJ3=(M)Bwou{cOK8tgVvfG6KhKah@N@uScHm8G zXlgbBgT-LIL8>2tT0}n*j29Dr?uB0#xF;BQa9*AuafwV!;bi}M9{i_hOFBr z;3Y!veG4Vi21pz~O-RF*wzlEW__slO3xEFnFeLD;5EY5<2i;#<{AivYv-9eXT_KQ2 z2suGRODilqd^2{3&{{LMqyPK|Y-hB7N|~fEur~smg6+aC({=;C7FKf7?qdgcrp z!P(25KIsi_VlWwT!j1=jo5O=uF*Dl>+V=xbu3}__o}QktY#>Mx{y@aey$!Iw(mzd} zEw{e{W!W0U2N&OjBAr!O7#9{sks;?r1@R-PM{7aGYq)6})XFQgA>K3n^#cPOkV%NA zlm5{gb^$w7N|hLUNF&oZka>PNRYsC1*!j?F z>;a4+sm?BRDuy;&4Z&5E+E;hz&>_fc>UMVg-#atKz2?ku12!iDce2Yzf^bV)9|9$4 zsXm8`*NWGNrrU+K;rXMBES62C0XSo5W+onr1(kS?1AIIv+TkFZ_Z~i^M5_~KJLr53 zg7-^Le{-QDNA?mg=ov|;aAM4AI1KOBf5~QUXp_F>m^_o3Z{{g= zoj!xZ*k!zSc6O>JCh3H_g4;OlGrj?@C-TY_vLRR591wtS*!X&@++o+TKp0* zhi32!CGr*Im~ABYh3_Y$WP#3wD>xuCTRAy($g^mV%tF^qKx?6c?unm?N=i!VVdRsE zS^&#yYG??-Dl5?Ht`Q{Fd`DAx}m;20VjTG`nh zvwM?q94d7Fk@X2HfelzZd>=l1_=fFFZ>z>lXY%}#sN{hI{Cjprcuw^utYMq#6^U|% zK?w@={P{H*0F1>M+vP5djV^U+N9Dc-9Rhz)b{!Day}*?7%nU|9-y!D z!{6q7aYpc-o1OTPzN=jdQwI-asi^-pzP?ih6&|X!qE?J>d``ldvfie`-4O+fUZ?UDSxp^j@Kz31)HY5gh01>{4)caykq%Qv!xd6uk zc$o{GiTJQ@Pzm*BZnvFtQO;LvJ-uI)hU?u-9$r$y(aq4{MegS(_S5J?d_;!GmZzyFkpS~c*O(UZ0kjHT~d!iolY4jnF@0^DYJ#9IyWd78BGfI-0tE&W_iJ>~3(MfsB z-;ncaZ(gZ0=wqM?N3E`dK2z-QUdT5><$VIw#{JG;Stma{ZiHO;ebF_)aqZd;tn~** zGw)ObR>6gXfhUP(Grv=nni-55>EN;JX*{MlAk2L=;E=rQ&Lpdk@)G=UO@D zM&cFQ4)4g{dBM?9^DFtSY!Vvg(4(~>bq1x0N4735@z4TC(7Uu)J_8Pq-w+@YGXY$3 zTmE~p#`=Gmo%x~0X|BXeKZJz~k8mUVu)fYeDV-onu9vS~T|uCt=Wp$9s+*+1E9iid z2hAT4p__>>^WR_ZhLMbGdwO@-Kx&ch1Duq0u#dbb91)E zgw4~`+&r|lynq;n3K;I_US?#I>b%|PUV?MhTa`#${_SLEVrj{RIu(UYrgHKm^_w?u zfXoXA_qF&~NVb`F$`E z>Jg85TN7|OY7{AwdLnkQXU{cAVZ(S=tcTor%gzZf5^xd~05xI@_&F^rYa}}F$3I+n ziAUAdw?bb9XSqi_oTm)Yl-cJ3o6^&sMEWh$Dbpo*YYg0WdA3~^t(9O?H36%|$hp}0 zUb=K(?eAYV0xeH|q#@DL(vqOoeXaIoz%Oov#5ISbV|3!9?eY4>^Hb#enXiUE z_(isS|KzN$8!45hA|&+wo$AI_Xa8=|U>LOE)(lfqQ@p7Iu-368=-WXfy9!LCK_tjX zsJ~r>W{1(jBy1rVbJ0@H0?tzXIc;D-aYc!9?xExS5|bx@#*ST56sR(whj6lfoqwN{ zkpbdBt*O~g^^vd?0lrA_t&jFEC*Hrc4sb%iG!!3bX;Fw)IaqI}qH0*2{0Pm9IwvPb z2~nS1>lJKJ3rEN83Yj!WU=>7ucV)Cc1Y<#jE z{L!IiRTkru&k@^X$5Q_`efvgPyX%l!bhXNwvwzvPL2P8Yy?bM^D33qZgiUqpb?OPB zCNq&?snI0GIlsAQ(FJFp>||qO>u7`)#T=L8&cgoS?#HV^lFkDseK=z4s{#(bb!@El zS|@vnq)JK(6S%;l+9Hu_>PVBMrKG6xnoR9kf$DvY{Rmt41MMQ$%O=mp8R+Q=D2s)_ z&CkE3?0jcSPo6#e;UxpZW&u(xFnxy7M!yMt(Iq z`oxQAZ`8<_Z{FCkyYyG*l?9mjWQ2@IWG5t=36#joJg4oZQTup-9xpqR0Po=2dPE>) z3wwM799oHXFz=nP_W>%T+9|mxDJg?-y)E$^G(kZ@dC%6?)+Elq*#j%OutBz|?-wes zGX5RJr$ITj1cZW4yoE#{d1CKF1l@$C@8j!Bchx_<~93{zr{$9q~%<}HnlU2*v!^hb2?P^|8%%ToxXlttj)!)s@Axa`_ z4kqM;b`u5h-n~$~$Go3U%ggtJW^W<^M&2)^Vpe3ivCJ6Jh``DXpB@S>isk0}?-kss zGzTyL-X^pX%X6-BcuM~pW9mx-4z7wnj<2AQAtTX(&UZaIISd@p$Is6SSO8tpq76PD zC)f!T1Xnk=daO^R15V!yc=hfi2NM&Y3`HFf&Gsc&CMI( zG$*L^US+acmdu_zPUbcj^xnf};DO6L_jMxpfoq^|2mGag9KE!(RQK{FCG<1GX2`gG zn~db<=B9DtLpoajw0rS=ouLamJ8A?^if(Dg#_pttbE^_C6#iL^frgv zKN-%}8qF*$-x_arIcQ>+v>oUE{fuH_V!-S_K352X2l45)fCB;$hG%DUV>4Jn3>zG@ zsCxdqy_y3%NLZMMmXe5Qh}{r2EJ{sfMwqVGsnVcypzOT12=X2z!sbyucFYQZnN`9f zSWB|jZ;^^rY}HIfC{w^+y`{F>0XgV6IB39j(pcz|B63a0(GSw z^5B2Lm$ZYBU9j`G@j9+txk7LKqqMY?mY#kSQWJLRF)^CqeRW>bfEVbF*6d(MMWxkC zTa}l`kWZ1C(dMNmBc+O%Y=T4&fW3>1j0{@>`LV)fGBh9=+Kbd@-#Z-2cNnnQepdtn z-ImRpH^bv1$Py$bg6)-^gGxgfQt;GwIyyQ6>^HoAO)Y%7u%Ilhsx8{**}Iz0?oSw` z2H9O7e0IN*^lUid`}-#XZYfN*(M9}(99CT|50{3Mk8iV|-$u}db(j!DzR9`onItLz zx_D%KygERg?dJ$MhRy>uq9s$^&MR%tAa5%mV1>Mc5M9KTD@R}NEWg@Rq_S*egCG)A z8(u!X_v1CdGc~pc2fR0_tLo(FxUopY(AviY7f$c{KrF`@ezI!!(dZ@*ScFs1G~0wt zpw!Z_v7Kp6_^Zeq!>51RX~yVe)`P5&o~Z_^Rwqw9KgWyo{Q~3B3u=VYlgp&y;1!Y3g!n^7LKNdV(!ti&*6EYPQql-~u|!G`4^mDL+D;pPR{UdG z)5<_;_~U&_x>0Ys&+e{yB{n#7JA2di8kIKs4DIg2a~=a+%wDN$qu;+1T|rV(lGK)a z2C9YSnPL<(OC-Dq*%c3%mMWYGa*z<`-2#@ACg+uJaPTjn2`wXIQ^&}S6_nkm?{?(K0ZE0%ns)$o1mWh~((puo_UBWcOEewv{p~B*|CqT-^RVVp4qHP`U%1 zEO*F-iSJ!>$5Y?Zk)1tzD%h+@ZGPMQWz~nBsh~@w>r+!sd0xK0z69^(6&If=o7@TNL;fuLBIkZ^)?%z9hAwYnH57Wge{6wTfHIwHS7avP}pI;Rb z6XQo3i4Rp=e?}Z&zV~hKnNVWfU?sDuN!2$(Ntbc4k2}nw?^8^qMrMS; z&aQHUu)i-pG?u@n^^N$z`SxG*<)NG!W&K2VR^(C@?aZfO=RC@-PL&N0Z5KU8^t?j< zZD!#;-m=kch43K7mEn6*7REv$K^taW2TM}C?Wx)RZJHsUA&hn@oMOcN!wDO~G7CNK zkd3;4F6R#_xOSg^zxEGw%7d4OCB()GB9x-SQ~9{3ymu&DhlCU&d_2*grMvO(;}j0mTof_!GtHxGw*_=;vdkpw`-k@3?Kk+n zny>E_-@iT0;W}R9(A3n9e@C(+z7Yn3Nb170GwruTJvWS@4GCBG=8_NEN5yFT!+6IO zhwn}PAagE|!)aAz&LO$kAF7`=)EeXlMq$elPVkS(NkVsnhM8%o?d~oGoeEu3*t8Zd z&aWSD$q~-HdGjVy>T&dHV0?5-v}j4lp~HYlK=$7oA{|S0e$X_X|sJQ>6_@2OtLP{><*^fr1LTh`~ zs6^IuIvVfs?Vfx&*98aEj{t;uOG;p%?k(?(E*2$;1BBk)_)Nl7sWzcLF@ft5~FZ&*CP$G%Pni<&O8jh z5yuR#>ST%@K75PB`RUUqy-D26OAxr@=g#$y8LofZ!BxW+wluHkYoC1NPR4R#m+h8d zDBsb(?7MesqAecsUy>+;9%BuT6)x{PA4Y`NBH@445^sodYlzZN zQ}aQ#k^ z)`1K)lG{WOxD6%xxYKz^Imo@An50H_+*e%blJ>i1}G7+tzDlysRZ^~myiV3e*+f(57f?{)F zE(ACmrL&~GPIuTH^8QP-05nLrB(tjANJ$CDTdAt5l4v(IH65IpF`scm_=Xkfu#OSH zG{e*qsV%ERexMlRE}vMh7hgG65(e=`yis5+&jt1LvQZ2r;aj@nC~0H%mt(z zP%Ep{=B6fzjT)r<5WaOqO|sjS*;&*r3%smwLqm1Y*Htc?b{mQHY{AuL=NomyJ?FF; z4FA|~GR`y>`Si@{>@V`Am0-@zO!4G4DUV&=(&*?EOba|pNk||=&k->xr9gR4ld#;B zeeYiSPb0ARj@yS%=RfxbLzQ(MBz-6{7w-X2{Kg7SbQxPaw z6$}BK?hR9>3zU;(dSc>DA~`p+rA=O<6S*{faKDaM%q6m*SVl&&2+14|}cKa!lAh^zl%gxC8_u(_6&_1*7s#3ouHp zBAvaNv}2DfEi*GSk)?ubgg^e1f^dO9){H|Q3X{Fi z>4~^?Q){a=R1a{rtDxp(3K%~F!uOdQ-b^B7C|uuBq^)r+hhac3tw1trj){zkvCe(E z3u@WC#}H@&PN1NI+upBOGZRM#eUU25?!(Ow9BQ40D)4$`!tuEs$TCOr@4K zKc>pReCtehxNnkJ_qUm|d^7FWYi+&csCRJL>c4Q3Ov>0|7vp_d{*SzI#{cO@?4hZT7e(={hEA>}~e}_wS>Sh4o!qa=Tyh60Q(TmgC?nQCJ{z zv$LF#kG0Z~(RugcvNznX@%<75thm2LQ2{YgKCyM{NZY%7B|FM_pZ{6p$;r{^w+uKy z+&-)fx47$DFq8msk(}t-)|LMQTu0Tf%J9>t#%}94N#5hx*0T00>&p`^2}wy}T5x=? z%tJpuh10zJ{6rOm>?{?ZbpPRsj_DbyKZ>n`3Bw-6l=Yp;Dq3C}yLHwssV5jN{_WH8 ze;)MWs`61f`~I{2x`gsit)qKk8~^$tdTl0#EG5eC^bA@Vo>vd3`fvR=l|DJSAJsQr%g#8RaE9oC7Ig7EV4p2 zw^Z%9|7yTnRV?Tg^b_By)PDwj9lkM+TCHUr^(joFVoIRYWIAkdZBp{)TEO{<(V}G} zd+VSGpz7!t8EuJgCI8n9CkE9*y=QFF_N4#w8FDFRX`yZrTZU4Gx4Novi;Q@-YTO95 zUFg*tNRc~9(!MPhjx>NZQnNaGdLi-gGzcmXdj;-EV@t~x1VBRoZNO*eATB-I5%JH5 zp0HuGvHWVewc4mH?4|~Vd5Qw7oU`J|^%{LIxh;sb#6l6=jUt}D&9Qwqpdo1x?HGt@ z>(0G$zdkW6{WGb&==;t?v2JmRwpza}{Hji*Z9cih#>gdK$mChLvU}OtHbUni87@rM z|6_;b9}zrsA&pg@F)w<#i=L_^c_xhP-ho$#Me^fbT~vLr^uZhvgn24F;OK0&@`Imd zB)EFF5#K8&8t!s0VgIrt=Y%L(#*xc}h)b`4cJh3$O>&{?_50K2~e z2mXPaKtu%77BMRV@rIrT`#eex{x(o61*YX--y$5C`BYtW@o#%)X9O}xsIVCSg;4Zu zuU?8#fiW2-DT|FoFfwqae@KUMaj!;)R>tUG#;xa}7&LWa2okDkBe3e*B6GzZyASO62PXOW_yDkk z`(k)6!DJsJ)exj=Y)OI%96frJh@WB;I*wU5Cg|`s-R?O|8Nd{MIM17taoePSCTd*c zbuJamscxukQ~zHukpw>7+W$k^@h`={NW;lV1bTC^_slt9t-7~wOFmzO+#})mK?L)t zTQJrJ@2`;vuEW~sj0??vIMW&vFFxewz^Wj`-1snuqTTbWrL<={jFSo77kt4S3jV<; zN+d#+1oN;fqz(M_LpOGUP0EIH3pHC9d>A5bVvGqn3_!cWm(Sf1-x`V^Sr7;+ss*VS zhQ%+7)Fhgd_HVnh=|AjYueH%0{0Zry5p_l_R0!{U`s%Gtm`g*|)r z6r<&wXeSc!h+U*1+f68YFJEe0<4~-5H;;+vD^XF$Q%W|~6r(5$pXLoF|#;qwaZ>`&Yq<=u6(kRG|FrY1@Y7!nx?ta~#CPuM>G z!9?0aWU?BE>Oi{SiiCi4ku27h=7_b;&COkG|CW0X=I9{ZcT~ax?Rxt*;i2}Lyo^l6 zy8vb;CM4E1K8BQ*xAKAIH(Ssy6X{=&^Qy_H z{^{=vZ&fUOX-He2UCbJDvs2wWV%Pl6n8YN%Res`t2qK~e>H+EW=6DMxubL34b~H7| zZ~~#ULhK{rf8gE3P@#y3$ZWT^qU0#v>Q?wSn4IH)OQ?!4mBK13n(bCa#oEsPn!Dws zzBtd-sp)pjc5RjTdq-@9E<3J?&L;a-l$w)}zt{pKhq3xiU~G_jty`KQ6I6R_b74^BO;F=Pr67rZ;riBJC!1rLP!=mXZrYM0EC^gfHU(O>XMGuRm zF_?_pCVCl8A*ro@hDd-T=S{eJ7@-C9Rl)Mg^@GORlQx$yYCZk2C{y*R;`$OfL@DAP zagK(FVJG|%A|n#jxdrg3^>xnfG_Ow#;Zzhx;mv7%4%?pnx+O4EEO01)fw>N=d>G#I z%h#`)F+4+9jmStyP4<2LD&f>y%=QrzcyCK>H3_o>f;r*LR=O1VylZprYfCIdDw`y* z%6$@X0Rf)C^$W^6L>3&$kk|5<1koRDm~t5!_VpdcBS+H8Y8MkGBUD&qtTE?L<$!P5%- zXa;aQj*G1R%3sumw^ZNQNRDVr2*zm8?ZHS1o@!{sNsCeGZD1Qj_NW8YzUR--8D)xU zF>RHsO-IKz8Gbj z$v=_8w*UZyJ`6sP5_zw3z&n$^cIW7T`oBmn{j2n)BioXxLy`GUNZXmWrl!f{=HbzZ z9#ImodI>?>P*kALpU+Hi=VDkC`BEkLF+@2kEuClCvsX=b;AXqHn8kX~f^Ig+vs1j` zSLI|^UVIt)dCexTJ0vmlnmvfU5@-YmD8#^qx5m`C{g+BnO#WWQ*#gsl3Rg8xBg$5A;KsLH*ZE0Mh&o* zdGerPM6=ba9;PoUwOc)Xr=*&G-&L<*J^3|$X=rJ=VsP4FYO#rJ_%uLyjeXl!&Fy`Lbg$hdDJVd`ZI zz|VC7xDFOBE;wc-6yXGAWJ$iS2u82`{>$La9Fv`@^m5b6HzrGmRnZ^4j*-MureC~7 z7l}DeLZ-uM0>qrSf`S70D>1fi;pDVKTwFZLcP&5h&Yd_S$&cB<9|N_D7^n@!w-d83 zkcx=mFy!GZ?d-0?k|ScbkPfUXkuGUM&85awz-Mj5I!Bth32$a{V%AYr_bA==Ac5Tx zEg|22@eYqEC@%Gw%wy0tCM}IMH$?~t2{c=4dn#Nj0CQqeh_HWgf(<6z$aOxHGGT}3 z=|4dA(;Jp_{whncd9&og60Cp8#pszGh&Y8pMj$0dt^AAg^QSz#?B_R(I%iyLL53=L z73g^ca&cyW_Bb9TFf-lU+#JGbEW~jE)6=gDF#oKkrL{vkRQzd6m7gxhi-f5$hETy7FVdan*=u;sO1u%HyYMU}fTw@%+(*Sq)0qXj{WCjAWm z6;}@r9zc9mL&FplZRt^$zGrI5DJjPh0G^wl=SDqZ`fw{TQ5n7RCDQ(sTwzsDpBh$W z{>LIfVb6YkSa&)9dn!PBbgnHKXUTsm0vO1EzlRY5y8nu9Y+M|_*)vZr%n0W|bL2&p zMY%`_N$ zSkLetEr$6eDzaRXzRo z_m9=8>z~v2eD{<0hV#JHx~3)yBGLn`V016l`{2kDLHYJuR3FftiB>*HU} zwA8n=L%5$!TuSj~g_O{9`pw3hQ)?@p;v@^3OLWH;sZ^hzDm}?^Oh$h3z!nKa4vP=ylgw&w727iYV~TDNpH*7swyC)Xbey+6C%0+`D~ z<~w3&lEl1;fWTcqP9Io80pUk=Q%-1UC3{oim&+(q))yW8FjyC3O$v_M)$ID?56fp0 zLq&X4G4Uxt?QSc-E)b;!%%r2L7BcCUFB_+b^Tsk^bS-{Nm3}pl9M9*vhgB;{@dW>3 z>OTb=`;$0fi4amdx-Y?PM%1{Zrr{qm(&bKM&hzI&KPS#mFv?*_L$U`_bytxOb@~Hafm`aW))bz{x`UWaV=7}^#I^JJjMTbXgP|SGll#5lM-rj9TT-(+6UkvMz z$R|$H2$=r){Aif+x4vhwQ?;EuGaoL}W);8r%slN*-@}fFs7@ zL0GP|k7gq_&8pyErM38x4|RKOWwK~~=g9Nl-(E2w3e$*PiR030`g>T~Mc*`+*X|oU zWh7F?P4_*=?d^q*LpRrS%?tYJ>xgt34*h!B*;!O%7PvqGF0U4JnD^<^p}uNg4R?>m znDtAvrq#v|R$u?WC;pzj`$D1$SH|2$7EH{8&R>;~Ucf{+aXv;aac&gI1I8-MAyfZj z*~@tefhy-;n?|-2#|j{P#0MN@%btkiNocli-70dfy6T-2XmS`_8)UM=w$iY30)T@B z7L~8zJQp0|!ri@Iy9IAI7%0BFaY<-&Yz!mhj3{U~o(;xkI|w}&*sb3gt9EU0bB`)_ z$0H)iff$6xKeP2*f1aj;R_P zJ7(|_JU`7A2XUm`yqO4cPm4(F@X(O%xB@1^BQbc&Ct7 zfKytNNjRY-30}>+DmUv=Uf5wbuIbPKQoh)n#i`M zjs3@r|9BHAn8-{v;UFdWM#QKFh8m8-jX#CM`Y7XDd5Ad;lx%u8cuL6t`3wkDzr*Qk z^z7_!3e0ip9B~mjaZdmSnlL};@DwSRPek6bC@F2p)D2TurI)79(6g|do<7j?NBkaV z=rws~Z4q+Ou~@a4RPQRbR%ANYBDa0 z>@{%yhKxNqkI#|Q>gn>4DBpc32$$HWp1Y!4~JfTy6HNt-#N_u+qI2B}Qd_0PJ z_klVLtq8JUj0xFh=n?e*d>AlN!y!zuzY+clN5mG1>j)D&08}`d*bPV{BePsthFK%5 zultKv!@}yotMDi=M0pfZ$ueh*l|Tz^fC^pWI;4#An|S2qO~0pM>|+iG6%ox1Swqy_ zP#k82!4(mYy_uhjEy;IsC|tuUMvhRKlh&YDl!K9xh=qQw3D}K^L4yC5VCd(28qfi8 zOwYery(D7v5(DX*am|FWgEPNyu;D~!91{utG%*W?YlUM=JaP~AOZ)V5aV7j)Jb(=+ZbFy)T;fL6F+@N z62fO`)-px3n$b|7gG!<9Z-;%;SZ#Q!4rNht%a0#^Y~Npd<`1N0+Zed8T!BsX;bURV znVh>Mx7!Xw%tt5$PHL+7pZ@e+$B7|;MudH){m-f*@qz&$Vd27VCJrR*xGAHVeXX`u zkvKJqK=o(NG@wG&*VoHT$l>sX6peL~L?=w6MuDB^WEM!{jvKAi)0(wv+m%pP@p{J51wtbd%3sidSt z`cP;V180I5*he-`2)7x7MCS-j>`3+Hz%d+2KI^RoCt8(x5 zMkl3|bQma-(jXz--60(UA|cWtor01g9U`T43y9K^5()?iC`d{w-Fe3Cdd}Y0-q-W) z^Sg)^$D1Iq&-)WBh7>(GrO;L7|{+s(AOV6D&DM1BL<(LMMnWQnnD_Gk&-n zfMjTVTS3M*^XpeA(Dbc6J-EO*e}w-73I&*i99WwM;D20g(jwET9B$B75#%ROl%>ux zF^niItXrDCfzR`V5xLmEz~;S)0_USS9EC_8C1B>LlYmH^U3)>`eSv8YgDw=L>`GDD z5St|h`v~4)iY0)38~gj^`vTy(!-t%M1!wq4EWMGAn)+v6c6KFw$E5v^A0Bj*z^#kK z?}0QN71<#LnuNP$zA6w*2BdbJ{=)Jy4hq!ga)yR!?IFtm<}FPa96dv1+ze*~uysAp zP6R`Aj0MQ9U5kVZUgz+takIY(?K{9alp4$6mtH`Lc<*3CCx@`>`T@HuI^juG&^YD* zG(QF}7*X6pJFa*9J>uU1EvZRDm3x2A&y)0Lsc}Fp60fhTua6CtepU&V6Tm>o{hOD^ z2#^&K2n;mWk$nOf07S(c1)T0uLX&>e^*5PL;(Ypp_ph!*zsVsSU}f(}(3W;{5my9g zMh!U|;odX1w!R3*1GL}Vr6e>oH1O+Np<**I&Hi*LvRC%ylFvv|Z?g}jqko#Nz2Ix{ zz2MQ@NR{JKd%_zc6438KuZ85k0q|!v`48H{6su1pb(16_cT zcy+7PP69UHZdn4H^q-nr$w`6?3V>Yd+XSfyV2d%3gm2wVo2YZggY|~wdR0aBFOEJ; z=R<-z!#x9z|L`MGK%lcROA03~ffg7L!dZ~V!=^|mkcSweLl2O7gSRcapdbwVRv@O5 zT<}ETQSecM-|{+=+vg+zOIgm@xl%?DRGJG@v+eJ5EN9>N!PAA}vzI%2fX6Gx_<-`OtDO*>!R2R(B-K*Wg zyeq;wjyThh>4pw+2BiH4p>E$6oQ`}cn>=h3%6Vn_BN?LEZ;B(oR#!)WE?8gV>tK*= ze}|Di4zbnXZt`d5kH)%>$@OI<2a54Kd*FvQCtVfcl;IB@`@}=#rtEz+(`@c7W~!0_tjqy({4~ ze+`nl6mCmg#4KQKeQSG_3QkLaEG!1HBvFB&v_O_9x4n@`y0|)2`74p~65q@aX= zR|$>^%xsNM)!a39<0POHx(wcCz|W^a-3P!OEr>4Q4|jlD5R}^}6hz@72L%$B1dnPC zNyY+W1lYq4`-S%Y1?Irxkq4L!z#N7ZB|vgO7`=Uj96YehAQ$QieC?LSZ75xEpy&f5 z9%8Tt7#|(=k1QJ4FACVB&p=k$3b!*7o(Mv_;bA_wHV%Fzn{q+q7m{EIx-!I%Q1uhq z6of(ravQ$lz#B!m1F}0{a%=gaD@9hYouQ?2|}f z6#$j5p|W#wacM==+)!!28$_Nb60w1tV8B&YjQ_UXA1u;gMVK1^Pr$j=0ya5Ts#Sx# z07WC`JQN4$pq{eqO;VhEOTx5zVGn{vAq)}j8^+2qXk{Rm&+_aPDkrEqB;Yi7PWpyr zhi&ePh}11wW&OZ!vK^Pxtb*3Wv2t>s+quST4O70cUV|`)=QJ|q|DEppY9!lG3-V>a5Pp@<7GgAfKS8W> zH@wMI$e6kF09dy6p`m!@TF=eFxCyuJgFIp(2pzjC25Eh;Q=H{t>T>WamG1+tpC}|w4{)}j-dU}NuSi)s!f!ekJR~HiEf@=BpjpDkH zTR~#~V1DiW*DFZ}#t_pUc5vVc1xT-mTGG*D*?d_G?=kK9coH`zwqR9n%AHs5DA}tm zsozTZHT}bcuA={h{RakSED&PAo!bJZA26DI0-Qzn*<@u49>?3)QLY^CKq{DVgC{Y( zP=qrB19YD| zw#-thY2agOdvkF=;WU0t(70=3+;uc*fsNG)TR1lU6Wwumo6!*8Y!BLefh*eMLP= zp-9Rgai=UQy5pwd)yBYM2&RH8bW+3;tf0^WU_E?w1o%rn^QHPkA23(tnsxIsVe%E; z!{UhPS=ol&4EER)G>M0XSATlaBvEH zH4sR!=;sOXw(w(-G4LfOFiYtEr1w-YMbWXRo#;Hp4plI!FPk;WB0;avhkqr2Ag-ac zX+w>CSO;<)wQmvVqIVL}EhQL(X@bsMcu$n0e#zPUDasKquhsR)82$_$Y=y!kah#AW zUOw_ekM+Sr=SPyS7z5$7ayJl_^?UcfaR1))yA=m%|I@JF+}@#9isi9CF8sK*u~^lH zLaoP)dm0E_=gIeJs~}cAE4q7h=2x2Zx=6R&0;Xa>e|!LK%3qFK^ps9lnV#13T5|Y- za3TByC$?ZM-@XBYPm%ipVkn|NmGlD=2s$^6wz6(7PvbTXc zWTPnl`Xl$>*?r5ELH#&2&raFQ08tXsAXFKIXbWE3VQi2~AZlAg(-RaHR>5e?5P$Od z{6)02Y}GWM#U0hN2XBU}AO8L6jyPseXfvv3;3M=puzFxx21C9W2wZ|eSagIt_i}5? zU+B!(7bCxRQvUt<-SnT2sRKBgf|B=#R^6IGoq~W1aN{-=WB^@^YzH80kaS%`Q^+9) zeW%6I&N8_JB**<*F;yxz9MEuSOTnp343rZbbQUYag-8lN_d6uiaAE>Ux*r%B!HVpN zTTVd?B-7J@fb-}XE&vUVXi}j|gv1IAfGTvoe!K%;AMg+|d8YxF5aAe<#$L}hb?Ttf zhnDCWpkZJ(_y~0Y?BjMoGJajZs`Cs`ao=*zkCltJ02Pm%vYzwb@6xUO%-Rs2wKqs5}e}qluyfz^S?IaE4Tte<0@?QWKn)qMp z;6S>Gm?I&`1?eK;tfpgUSIr?XFie_X{&nhRJa&B9%RrW-cl&LGlll6bkSaP9!^nUL z2#J=modmQS6zoV#W;G5~8%z>F-11geXfR=WAP)pcxm-rHi%2pelmr)%AT+M$|C2WB zHfkF16?q^YDnaFrsHb{+@eoK0${rR`F;Frfxp`2X0XQFATFM^ymf|e}7lSYk3Bd<+ zU7G}QqU;d+DU{gd6>D%b!mVln;1j~+09FAEND!ncYhP7&hn=tqteiBU0Eo8~l#+;@ z!jK%UA875dfG`ESM;7=kAmQ?g>q}WAiyWvMNGO2uA#n72KqX)aVxmA`1zw&$3GjZK0j~?^i!@|lfPRp>*?)Lcm5F3lZRL7M6K)6S z2$|T}+jEMyorPu`nTUdatz(6;yoxy-%9qS!4B)0?VDPMUqERq6@4!0q`0~}DA?zMX zOBocHV40x@Qgmi%=@Y#J(AceDt9OE_2vw12NZ&|u0SJ?NY_EiiX<+A&z$QQNTLv=9z<#2T9}4M1*rhVgH8X+O$aXx{wFmd zrP&>nO8*098T%gH@Be3rWr~p`0W2oC(31+!pmcY7aT^9Ej0n^P>$S;}!A1G8ly}gmlnLyt|1CJ~d2xCW7i-^SybTeouL>mF; zl`?%aNL4>VbH-{v^SQrYUQrPTIx*WLZY|Sn5K?|Tdn3yDO!aBMJGBbI%cy$+yY1qN zGa9s*tV3AuX^q2Vhw{J9mtRm&#QFK<)wA>f4j8#&^ZPcr5(GL~Cb+qoRh;Sx5t^|V z)rMznZ*Q}vZ>t*_)pwe-z?hX-&n9p}0K0n;F)D!Q0T$o?o#7!d-oXR{3j(fW z&Hc$UyI6Q$uT}Dv8!jIxC#PL<)m#5VwKZ@$Pe1fOn&Tx5_?MwqHHSd}kh_5FcTh59 zCSn8c`WH7fd;rIJIB+wCF3>h0l59{XBU3W~o<~qrB>WDr2q^X;-;Kk$58e}kG(&sc zLbVNN9Hjk*UhTq3s42OKiFp%ZS|B0>6$tG;{C7M+30pwmHkd7avE*LkRU||fIwvHA z7m1hRUY9=kKyeIsP*!1KI7G7`Gi3U<;B>srUjCnuX^H_h$oh~;H+c;p`T9$wKmvR# zP*F(cBy?wpNeQ{Dp|Gp48|MZz&mRsfX(&TjqecLHg0G|{u2taJpF5ho(VHk}DPGW9 zlSLUGfhNVpQH(NwfRm)|@uW~gQ_DDN-qc@mEw|TCsPiCZ@p5Olq*vh}Yjk5DE@(&L9U9q~*j4%%r8JW*bylsT99p zV^=0;W?>1IxcS19SNp%o&^S6^_ynXqv~+ZY8Z5ws9CLmr6JhWVBEq^ZIO>Yo&X;`# zA&23Ajt#KtNsQ?RmE+&sEI7c>P+);TvJDBvPtFrA{sYE(lY zsW=Qjf?mhGH3Wye^8--tRHEKwh$>7;XD+RvJOjk(M+&Be^W_b9SE4H(q~WQa*-B^| zM?F{{2bb5&OKm2?**|MJQGvMy*dq?&5*8Jt+V!Ur@y$XSj_iR*#vB|xFvR53Nsm8- ztpKxBbiKkf78d(Qc)(D-z}IPmm1PEZI*5%xpiy{R1(JG3-dznJ)v#qol*!jnLagR!QEx$Y3`@o+D!zctt2ZFZ-)Tg&spHm42!>5xr94LfnW0}Nry8$Afo||inipM~SWt!$Yf0Kdxntt{aB1(}#9Uz$uGGIJA zfj5D;2;sFqnZ0$(4gvS408T`-D^OVg%SZqubn4u{6X&H}p=8M`Dd{^gO24dEt&(2d z4|ND6fFR~g$o0X2?+E<7;;=AitPTziZh|NUjwsLVg-iaB8(nxA&eAC4t_H4>B%Ep2 zK0>YWNi~UNZa1|)+dK>TjL8|@WgA@)v6MJ^78a?kesQ|>6}}1qs>^HNv1;y#{<^OF zL<{P8Xetpl4JkXIA;r*m%>9bXTq<{F~aottB!t3`tA( z;sDVbg#T~v{ev3Q;jtIky1d~)F{_snUzDRT)X;eGXJzDj!oPLSHNOTz3ql20M<#_K z+Ywps2<84z9HSF3R6x;d!&g*hSx5`gQYwOcIcI+YEG!D|^D$h?Jzf79 ziSpZ}D{P0oARC5GAZ76%XJ8QAS^(5u07nkMX-H}dU^`L($=-%pW)L=l2NJ@t=g&KV zu?2A=qvkq9ZBzi=i(FIy+#`h`c=rHQ1ll%LHxIC0DF`G(Ad-LLU=aV`o4maH|B8dz z_JD4n9iXF1vybR186sC;9tS6q$N`y`0soQ60a6wL1TUhZV$;+E)EN;&j4nbe3F&e2 zAW_ZA&BcPf58oMRSOnsQqZ!FPLYaZzKa+e2GUh8BCWRojl>`yjgINlcK)HEkkN|*8 z!1N_e$edM_QvD$oGh?t)tz6fUhh zOkwq{kPo=oHDSm;`XU?`cf+a^e_nnPGqUT+;JQ!N5k_P2GvgQDh=>HTc;6(!{H`D% zm3*B~0Uc)mVLd%7Ya~n-iie|s3yHOoL%o3S3?_N-R%&emgOLmula{_d)3z9>_Wq6Z zW$$)lm>M0L*m>-$Ar79HmeBK9%E7PY7WNT{oBquW`el_WlmX{8gw3?KqtT&%>cnDpqJFEYl2B0|GU^VXkSveL00|-9wiclxkc^wU$ARz zP{HsF4h$6V-uMq44A{rsg60X%Un$U&hG%5lKt{)S0W=)`FC&hiO9*o&%&7p8?kDik zg0vKb^9sMGMn{vuv~jy1lp2a~XN4h2qTsy1As}Fc1^)obPL(UDd4Qbzwjmi9JrMG* zGGP!*CNyf?mPqI)0P2YE2#H_?(6;@@52M)w*a{Otgad@ zuY$7^ycfX`dYuI7X&u9)xR@u4{&auPzmxPz^x{$@|5ATyHE%l>zQX#bCB(~%e$bs3 z1W-{Tt7GBwlOO16_5aE^|D`EKyy3vgrulO}@c?oBw(>n)&>#eZ%mhxHr_d-N8SB8a zO2F_+pxFXJlLv-sG5`}GJFAbph~FWBZw@b13dzNV;#8$K4y0ds5a|pDVh`0biJO7Y zyMsC_x6=-kUNDV;0{V~XO|bw9&)Be=zq}~6Fl9A%fU;~kaEC`-T?dsNYU)Swn@59XuN7up(Yv$;!X{tJ_wR>nt93>~$T9^1( zc7)fnbwZ?JGyQ3B{eD(f5iZbCEvv(t4x9hzg}f*$JkLa7C>bD9;T+o88eu?Clv zSHvSKdL6do1BaQBCO;p>LySPE)d3bn0~i<16M7L{0Rdgz(DV&}M>X{Ue7= zG*TxT8eucR#&(V+;k+<4aAlYRw{lvV@r*qcYms7y!jhbLVPQ79OP%R`05z_yt<6F+ z^@Ph^4$UpiMVzdI8^s|+0H-KNjX;3V($B{F;rA^o+WfDOMGB{&&acuwL|vAy9G%m2 z2dF8;Ite;h3G5&c_Cbul_Up*I5IvAOePC7+VTgm?!of-XxbX9a`a?p@`RkG_{DkY| zyMkt3Axc~>$}Q)`XNNWeX0qC*Va*}D3bXW zw$CJ-c6VA8Yl%BEs)Ayt(W+xZ-E3x~AvY%4;o?Q*Mq674&k*Po(pG|6vX+L=tlxihJAUam}IBJQ^p1#B$;ep}psE;wTMmvyXMM7Wk`Xs8x#S zhg*6;f`dMf}TAAY=7X`fQ;fPaL^?7B4CKGDi&4+M4AQ?TD6 zS#N+lAfpjs=V@qaO2XxzA?EjYXdB!U0NCDNP(IGk=8XZyBq8O1N>iPbTZ8-)XLlDT zaTReY2^ND?1&b)gpx4}Or?0eb{E4S`Ldi7JVuR@S6`3Mpp52dUMbCc zr}&=ntQB@q8r9BOCAp#78$E;3B=9chtE3`9ahMk`&cQ`~0Btaov6r%b7n#8rM+irr zgJNQ8GVn3T^E;@e$wzM7=#+6cy*H! zii@G?(YJP1QH$=)#@pGqQ=wS-q5UHFYuuYGo zwc|3LBhSo#7gtG0$cExFN^N^##8Lmbf{fGY*fvY=iN}dxnh{^__RP0n?|N(*6=~@w zsaF?#x`0H7AV9)Ko&^GHygYLI3FRt)z4JjYxmo(q5^8 zgB7t>hV@^5rtwzbrAwf90-QJ&+%Aw!01#~&rWP54WIFa9L@|B;UhPv{ChC2T&C{As zQ%rx6v4cTBUMs9Q&7n1-`*l`NwrM1vCPdPV!sjU}1;1?)OFO-S`7YZ$dW8r_RzaC) zTC)9GYR@80ks1l}_<)DEC?Z^h$_r_4Vq+yQ`rIXB2RS=gK;Ju-%?td7Y`rv$ACnhJr*_YiydAJ z#|*2RhXxlS@ItxQ^%B*MY#w2#qE?Yxd}n7#CL6Zt2BoOj1gPPtflhMcX^lcK3e6v%KOha@~Ot=jMGr}+HXODEj0XuZ9`Y*vOkOU zEl)uVnV)M!eB8$O&)0LAnayC1@_44C#<#&(CTSVW^{M%3Rl5su84qsm>fDiOJzqb) z&s>r9sOMSi2Z}TUUb(v#?o3P-ERDGOC8H_2yaweA;6ebzVHP%)&4j*-S}p)DZY#q$ zpx-GfN9ezT3t(%VBz!!#f3X<@)rspe@Tc+iPvJk$d}?)Fp0EA5o1UVdbdsAJWN#Z` z1;6-g>itMVNzU7l^@kUJwp(R!>ABd_`IC}ghEM0gW4~Pe5R*Vp2QM5_+vZI$oAdT}Myfa_C5$k+F$ zmmN5M=<}_dlQ$|!#J#+xG}U~u-V*Qf$xO^Jp^`$~9+l&Lo{dfW3>h!W=d}3Wd8Lrg zH?WbsYxJ(r-Oz1V&b2ttQV$C+BCCgUbuX7apr-f>X$$Zz+~)*%c=0Ldaxh!~uvrMi zC|u?G`z+q*>qp6sQqIh-Mw$@^cTn>LrQhR9dwcDA^zx*rZP8K(9(|p9@J-Vz!2}jB zezFJ!7)M?v!M(bf6H`H#n@juT3a$l1GgDo;;& zDYUf9s(M&6*~4=xp>=|!A;L5G{gau40^11M=MS{)~lCnynF1t%r`RX?}H{7iU8GcA>h~sK6W&yra_d(b7j{SnPf}LbZSRi+*;ZzW&HY z=yLaElQm(L`lX=ubb4OMqM+NwD069?xnqyU>p8VvD&Gw8alZ7T%=J!mf*hm_nSI^) zX~z`bao5dnKjOtN-N|mxbJg)D1U3Y?Rm;Rx;0*7n$0E*?sh5EShJE`Ogz*6p0ym+l)(7)r9LGn|lJo{vi0kha-b6XWfR9<-Yh zW4bd?_H|tck`Vx2i}Sn(!5&a+BRSuI+y9fFC#J08jc;`?4NX#!z>tj6!c~cYgMTV& zdG40PG7qQJK?a0gt(z`RtNxa!7yT@;!ipBQblx@3%2bi{L+Z%~Pib~mG{-z|iG+9z zhpy*b-}f()jSSCwVrrWp4wtjjW367ugF}XB!Q7#LY)ePajn$i;o6=gn*3iZ;ZlyiQ zrI?XBZajE+FnvMv4m*FWOctfBN(e<>$T{v*CBQ@eLmZmue*u%uhAP z@ZS)ZOn8asu=s`Rdp%`@ZPAthX3_JJL!FipFTgVqku;K6_;k}?Nk;}b=Umr*39>~R zU!Wn*(ep?1SH#HJ5el4n><0;ljjYTRvy-lyrQf(;q|yrBqBfExerjc?!b;tYqgV9& z#Am-JuKC1wqwaAi(xYY_`7s9U93Qj3CpU&5a46w_T}SLmyO#tHc5dGJIgX8T>iOK( z?#2<9PGE6Bczynwz9>z|^xO{-D=O2TNIPuh^8ht53cP_kGsz6T3x)#^zdVy7pPcc6 zQ8v+wZZD9J@_075W}-3OeaD$+*S-jzWbKrHyr&vAVl#y?9N6;mUH^KRPl=^AewOy%UJzb-a948Bh<-fLF(cpIyOWOd}K^O?tJbSD?@BTRCg z@iN~p^3yBBYvp?+R5uwms2>Y2A#vMJ$QT(Zrq&<+_+9`9>(pStx+p+rwh_uyRc5Kan( zWW71(m=CeB3K_Ra-xOdMHTtwUEfO#}3DkI99@dNaf}wzDJ$55PWH&x6Jn2*aQ*y;~-RiQ=u2yhSYwCPztJ0c3B}E$1lNpfcy0*Kj zkg<%VWsJ{v?b_wv2bXYvJI<-y7*3Awy^JJjL&Y2wHDBq2c*Cy6-A#Xzd477uV`wdy zop$Wt3?m}+oz0>jO7&oBPoO5}goM7!l6$gFkeXp}ey)g>XYK(1b4^x}rf{jNpkCSZ zpOQ>?BU#^?#qr+|Y;&wLS*(}167e2I?hKGO7LG^g$a@f*1e{@k}RN$|Ycc~f

d`ftpE~Yhk}NtHG-TK z(e?uGmER-<0k=6#XpSyF&AxX$^o&2gU?@f7uddp~6J6+(_Kw1i;ZS9Rjf5D=BdLrm z>>^)+9m9uG*=d;Uw_Xz|s*5E}+WtZAcz7C*-t<0+^)hI+9;hQu5OnugB$4zK`1$1! zi-|q7?llJf9S6+FJ1x8lyLSi?1r=ocvAB0Qf}a4s+rFwAmh>VqXoZTdIbU(!z|VY2 z7)oufp%o#@yuLZ$B=+<@d%>NVB391z{JEpb#ZSE~_mc{3T?UT3bZrJTUmECY(YOCN zP_a^LDyjAj_ZARgxSYl(#S;k&+Y!^>Vp;HTC3w0OtKpR}ArbCJ50~Jxr@Lm%-F|lj zK4eWz?T(>;g30S}HP=8_w_rAuXN|0efVJGoC!>~}^GWkL?wYs#l|Q#jZ|`0fvW=Kn zTiXO33CC|gwi*%6VrsDeku>C7YC3&4ALbr_J3@vm(i zAxkf7b~fV0L_+@ht~t%O+y&aEA2$Sq1bW#SW5cH5q39%iaNf#ocirt|=@SrZyA$0q z(_Q|O@A$VtgVB3zqVOyLYIbzXbGoT@n)lJ0Aom*v2f@rLEEEz!55vs>%otcK;?N|TEX^RyigZZPumradRYO6)M;?JOInRD(xC3BRTtC-<=R-_e$;lRLx zKlwiG7a{kn%#oBBArh`TOiM?HLAf_GH{*v#bHR`aqwEEQmCu4-549kd8j5H`0A;jp z520+m-Jy#YDa4)&gqG=={Gz(@;#PqeKnHuL_=9!D4%g-B>Ptv(y?ATqWw6D+C%?pD zsunLEp$Xdyj2fjQHGq%;pN7OD!<2bM+W_5VAVjHiSwb|pGvX=ugYeTJ4Swa70degJ zJOiPVAT0*3bQ_pcvxX%uc0gS)!Y_sW?&pt)mRmunKt%)es6H*(!bjL_Mk(F=0;ocq zaZjhiXcbGHL(~52yxcL@z;6CE+Sniaq2`FvG0>iPMS`$6|T+P2JyV040n z+CCv7N)Ak7;)}tgRMQlc!T8KR)uC_ol$Dv?%H8r*U4DwA2jOBfsS2(2yX%0XUvOXX zM|yq#`%FPs)W%g|iu1)xpyA?jRtl0*zW~UK1bxD!h$kOgs`|xxK?EY`y#q1)97(4) z{B|_&dHo{qwOH(>cz5S&GyuMc@#*o}gd$+igklFbA!McD>}XZD@dn6uqwyIrTpxd# z84WQ-3+wCt^$tBCo$)j(Aj{q->?^7J?^Ds(dAf1_>gM9)5A-TJFREOk(yu3@7~x+ zzGiAjNR*cLqdUnzi^@-~{XDCrK1|%m7|ZkQb9kmwY0kq@)QQyO*wN?iM@?m2qS5xc z84E6groM_tp>p`^;iQ{BN6d<6?K%b!_60GC@SGbqTJb=u`SUCFr$|1`#zt0un-b#> zaqu-F;F-Yyu`hU>4Q3CqCLjd>a9>EWpL%-^0}fL}ytu)ulMTSBUb=kw=<89_t9Us> z%MRmw*fcWsvE}{N-%W?ck}C;IyW3x;j(ZLS$?-IIb&vl#x>$g@tVl`HES=2YZ*sEC z%ZpEFy6x9C-Zm2<6j*l8(0k%;B|%o{15Q)P&TEdS>FuT02XbE$=GBekM@U)#nXiJ$ zI>4`cD91eFPDR3CK%tZmXnfKi&^(EQh@KA6YREX?hSV&zbOG~J^K>x$(i~#2#SDG@ zTIO8_C)_5a-cNl!2qnt7)6Mi{dh=3@E6X~WgWUF42gZE=>hJ?9VV~9gh6m|`#HT*A zij<_{2sbh_BS{HZCVhl+YZ{?-uv zG6{)`Z0&j}$Ur{=+XAtGKB0fW8};_29$Y=}%)dfV8OWDy#wuo-g+}2r3Xh7a7Wua9 z#vN|}Y2O1^Ca*A-X0{*ZY!BZ~Nq#&u^5L0UOq1j8c;OXmH<@xp+GR5eCxcmjdVKYO z#YatQrMjB6g~>Io4KxP5V)U+?_fab#`dd%H{&9i62&RO^fxpvD_kB5s~{ zCNSSc=F*Kr9x6E2&fhx%Dvbh$G#{iz09iu{*-zYK$jQVocAd!5Ev_M}q>#dPo_lUZ zZMpTddw$6ko1Js9ts4Jp8!a~VafIOU+qk}Y^u4nTeVWZjUl{_`MiozriuxFoKY6eF zp?vezUw@hJzVlo`n=&yCUfYrH>f+*X@IE4k3moLS<@Z!U zTLesM^V%br;DrLr5i<)?10d8B8I*K#(r3YsoN+K|klnEQ{S07$QYTf7g+8fAquLRd zI(SEuE9u$ehu?92b#BKm?s$=EEU}X&Lqcerbl^^qH8il2dcYhZ_04BJcK4$e%9oC= zkY`!*nmj{Mypk%4($prdX#`|Yh%&qRWD5^ykfhr_kYEfRXi!SGfLRfVr~%0axDQ@{ z)A4Ju9w%r>Fk0X^dI*yR2}L$gcob|m+1aCke*&LB_QPMKQej39=2D|Y8h0P|sQwB` zPI$}3{wyjZ^5M|BWO2IqwjJ7Fv!2SyRgb;=p%+{Y=yp6a^UYdhp?$TK@u!3Gr~}bc zUNJ(|YG>`7I!)%p+g#;wCXdWkp@&U{9Sz~PmZ0GS&Nu~FZh(m|^VyO?T$~74Vvh}a1Qr6Eti*~v8y%+jG{5WK*!j^F~k0n>>hxo!LnoCnq z_lh4xDCJ5OkLdQ+`D|g(hY;z(k5DJ@Ypp+?!kXqVp&6>`WokTrgl@D~_oev!Wk~{h zPJsHXn{l4k6WpsFTep1mchyd&rMCFbFS&@FQ=!f&GrD|!>tB=C5$!(x(enuHd?;ZD zYyrT?wLm(7=1K{I-Q1Rbs)Hk|b1_er51th7U4VbpS$WP%M_1n{jTaaWMu==t5)GdW%{CrNI!LW0BXbSF$YG(q?7k=4`Ylo zlf%;0ZY5@VD^*&Y#L3uxwO-X$(wb@4o1%yiUeW1 zg0oK0v2mATf5J7!@&_1dA5PZsQTrdzfJB}a+#yhuKZDH?XTd8+`@sjUv&k~8P59dv$az<$<@3$W4=_Lc{OUTMcAq!ys5Nr_ ze227P1oN}c^%!D0yX#2>G$nN;pYk*Pdl27_-TBUtI(zX$m|Z>hOQ(jV1P?y&U3Cuc zjUOAA^S9OK9C$0~MKRQqzl4aLU1xb_G)j`}*YqNVPqBTyHxTbG<+aBfbiVqR#oQn1 z&R(R9Yqrj$J5M(4>z)e6@|?Kc_su-~YP&7YG>Ei59Iw?Q!~a0nfdc1`6Qr^t*(4}1 z^?ca>=~GG`M) zm<4c70*~S!&P#=TU(r~gVRTGa@i!1NxBTUsk`YAn+h1xufYL#d;aU2&Z2L*{p$~o zOnI32k_NQO7oSwCD1BGSUkejF_cEDEAEv#3rZB6m;vL#vK2&iuj8pa@oH`}G*;QRc z@#E31nWOr~Y_0R^Xg~a21()-8_bE3(Sa-g8c(0MAR-gcRNDd}ng{&N3MN8fusCg68 zv1~x*9uQi0ntz%qv zU91)JLl(nF*mXZ4%|*|yzJaTH(56}!;jzy`9iIoZt@_-PmozssCoZ>I$fj<{RuZKPe z5Ec6W5P~-x<7o2-_N|JyDDzOBX0;_NOKH+HgR;|~!AV$LhAp-@STdxXq3fUa*g=mRItT?Q(<&=eZ3!4r*_w$!n1E?=1JQ8{m%MuyDzhV zEOdPQ13s&B%S`N4MY)gb>d`_jCF!<2$*ui71Cr*7JMK%q@_&@!+cPkvpEjSr_8fK4 zR#OXMGkU6={N){i@-Qt_vf$O#pQ+4Y0+k0h3{`+a%es^caiIm~Y|&bs@I9G)SYLUU zY{AlMXkdOc4bjJ7{F*ikc|f_)5E$vjj(_t!3WcXME99c&n$oa&E|BJjc`Lqk zc1I=cVen?u!SxA8JNTHW`}fZkX4`+dk;NanikH+7&Ug%$*Mw6CR#t>n#fZx(s<7Eo zQc8)PU0x*kV-IU86ka|ELUcV#bcUNyzEcv z%GfvjAMgqInC|Z$rY@BIB=zB7@ZU8&9{7-S%OBO*D)mLpJ1WlaSfjp<1V-@+)HHr5 zbE9z}hxYuQx<+OO=>JhWK}BI_ZE+A8L zJy#0z!9$tP*9L2MlOM1Gm7kX-%|AT9fSedfK^99yB3&F8g`YiY;t;P28LLsGYG&KS zvw!HYxn0f)&*`>!=Dhrk+?@fM;TtzX{P&pE*p0r!`P2(d!O2Q-c?uYFp(nBW&g`JeP4~$yF`PU|3Eqcp1}iTIL~9`eEgfn*l@n z$>x50|H}>G6;h4!p0}noiMKgp=AYfjgXCG>jEs;n{pS`{R9wa4Ah z-2hGkPW2N*W1G$R_I2E+=~QvIRAd6S=+lJU3u^BRes7*U5Bc-y^hbnA3wY!0zE>|t zU?PFf(3I=5f-0t9mrQo1@pOH2Gdl+D@=MbeQo%zmU!3RHyL86%l1-$3f9Ii^O0`uy zkupp%IzhjtH0#0lw1h40mYPfe%MD>uQz>!Npm&lOa1zL1!A8yWoW+5LUe4HYN1+Tervk;(F{xk75Rl%)7BPZQ=9pJth%6M|my3P75jE5zjAw<=T^Y(Dij4YWo9*)PLSOJEy2b&Bgf52 zk14bT=b$P7V|H+HR_KENQ}fYsw0Fi&?#SjZ4pO--c;n+yOc#zX2FU0%Q=+g0bKA>Q zXU8nVO`Ui+Th|NJU(oxX7l??yn%^X4QdVl^;CkWBnlyKlVL*o5G7)pYjB5iDkG2P- z36apmN9kJFH0_qSy|}4wAF@7i5m%*+VU@Xz#vQsK7B>x-<6{%tFy z!(&75KI&ooDlAODA|A9g|KRtNVC>UbSE}GAa2ybgKB!T=36WAri41vtkJ7)eJRmA0 z$`Sg+(tZEyQI}w%2fB59tIzV4YTe`Moq=+)0_m~ZHGFwa<&%{=yE)s}%`y4*-a5~Z zNFRQ1U6|)&z&f5blhP|`p?P0c+D39Nu{Ndq8+~oGoEkJ`g_<~7L{i%B%vj7KCyVr7 zC|`99+03?8yR8U^-lsaHGrE)D?AFDh@-#9$^VX~N0?zO$ag8Xs2#fo-A6VUAT#Ymd z6t})5CUbbWJ@)LWmScwt{2IIKc|@a@@J>Fxpb^>bVohTn6eZTV7E1cr;{mljzo2(c?n0;MJ}i`~(RM^+wP%+@mJ zy;@4#$urhsqG7Cyn5-qlg#f%HCJ9rXJ@By-(|kk+vByE9yFBKP1V0< zw!Cu5*3MzG_=h%0^Xg!!Y5#|1p6RQ#eSvzW9jj~S2K-E${6cN^xDNQD_% zz~62-o^!cCFZ%q^c&<#@08R3BbMcw1=*7(;(ZkoCM2xZ+Oqh4JvbHxaU@FggNRz#e z-5Fgp;W@zWAV9)U6@0 zq?BSS7}vb?4NtW73>H+4O`KKiIvl2ox!&YzQjtc&RjC`H6n5TOgfk7>578+C;-d#c z>-Wa3zv0Vb(-bu}E?U@7Pf?@D$w})XWBA5qNJ?_f&%Z0p?sHz@=5bv1m(oUdFi3SR z5vU6070H;HW%**`(6k4THS*_5)vnhpr7aRo^!>mk9#Od(bL|?P)#9pAcPzH^s60Um_w;5LzOl=KlT|kQlmvc?kCy1|yL`2odhjTO5b8CT0rDALYoN zH^eK2nn<9&yb?0J#YE$P26rALVI=Vdo)^Ovg`gbZdKrhHp zc$&H6^=516DbmFQGC`Kj4DLRI)}qbh*W@C zXBO~r$Ru#^mBNTO1L)z;k2Adt&!Fpl2va&iT%7|m%@A#z@Y?$)AXkOaGlVHd%+?T{ z1@PSDLk2A782lDkI~Q$#D%Zz2VvM>u6%C*h57vLK&T3n0$!>xiXAP{ zfvQ*m8oG+w@`Iq7C2!T==QqWtCK_k{*x#5VfvHMfUV`8>tNjfkSRTmvVaJ&UZ>G&; zT@Aw_#${d`B5)}!e2S%maQ-vXF1iq+#N77{DU=>f3d4o=_@ZKmQd{|18qTyJ&ct2% zwN1BoNq3xO!nW(pX#8Fe>-_lvDQeAQLUOveJnOMbs~8h@t^X-Y3FhoX;p^&?aN9yH z(iZr5JX8o&1Q!DhJUfA~vDJO-+=$vxK}Uf!!iSyk3MgUjz%R;Nq8m~YsGwqy-f z8y(|6G{_)JZE(_DB+VSCW25?g{GSpf^-LGPxqmR{l zIEK!dq%%-CMb}UPT4R!JWRyQd3|L_?hdee-Sr!v+Ce=~M{$BGl>q_z0&s91U=09HCyBWMmKEXWBnrSM>R%!^A27+6RgQo1-puXrS9q{mK(Cd8dAk#h57Z*e@Ofq1G_L+-jCe$5~`zdhq?>-~>>({IH~ zMy?i24L$y-E3IU3WqErszJBEqYT^R^O`9{3VGfQv&sD;D^w^VwBg4XwA-Kp0QZU&W zbpH18^h73~fu{!SJ6y0&z;imcw)O=?vEb*r1_>}&X45S})i#5{7jJk5B4fgmk~$GU zMn)!t-b)JmrSNOvww>m@_!pTHPWR}ZQU_qlI=4=;XV#lig}+XCe5j5;KK@1}Qw{or z0__8}ml6FIG1*#1r9x>$cXXC5&4z9H${Y8tmQg-pGHHYKF^7Yc(KKE}@e4o`Dg{qc ze6yX@_jGr3Y1bPv)7vjn`)XwS>_hnq^7m8_UPG`HVg-Z~rR&RD=hppHIgXG5e51SX zw)(zVGcj>y=~&@;cdE;LMB_Z?KI5+!CwM-x<+#uCdVYggD^IQCMOwu7j?gUrsrFJw zYN+)}6ri9`LOpr%WbmYo*DFz}1Be?|o+u?6aR~quyWgZ7EKSS%678em;-2=`=5D5L zGTCf6F138RGFefT?Cf_gd*;|2dvt=w@jF~Jc5l9tww!CtsbkaFJ{da9PdyuTi*r*Pdui5LxJEWVfg!P-A-VIR zoSCJ@H|fmi>4fnY@l8xwVGp$F?E{MgwO1@y>9VD|?AB;_G%~82dRwL<^`{0AJ&_x% z{I~zKOK9Of#TYPG5Uu+;E^+qaj8I8Eot}XXuyK<*RE9tua1&OZvqWkGb)ZrZq3~cr1<-~uEl-cXfrS=!Szd#}fIN8lvhjI#Vz_AA+uAupn_P~CS6>l^9 zr%p{>(}-@Fh5Gfl#^2JY^m_N)wH(a)BE$J1r8Mr$wW1w zCvwoYCfbQq#V)=Rm#@rDk$4h|RTxU88jvxuC0FogWa9RROEl~a_Sdaya>ZXMD9K=2 zhKCna8={-~VZ{hrvOl0HlS68sf(LT(u?)`22L}UPC^rt~Ux~vm-z-1B9=;sb82@zU z?swv`fwsoTzEv@|q9aJ^GQ zrXLbu3Q?UZWW-X#3WrC&R`xGDp1TU8Ja3wIy-wXL>GQvq*o`62?<1$;sQ$|A5dVaq zOqS-jjj-Ey(L{qRUFb3W!K;q3kwS~rfslf)(70y^x;}+TS}=4y>R!1u#Nxp*u?ehD z*66CiJG;~xDG1Um7D)ApTMkgYmCJ=CFKe!5b?46(x%vr=Vfa0vTqsPOJBNUnEwad~S;4bVB< z%x{tY#+-aL?%VCPn94U~c|;GmHhvxQqPrMwn|!$@kW%e=LBMtN?sbO4>o;7Oo63?1W$3HZ^@@`X z7gSwR!iJUHookVD&bs^*r8%wq!_hN$f6UG^cK0-EzPGrzNSoP`kQviiOYm{MA=35P z9<2E=+YW%8x*xRK@ZPHwg`i-5gGfzmO^o@(h0UFB`QwSU_gw7mXFLxxzV1l*xxFw; zn4kdL_IG#1`J*RK;`=_<8${&1sPe_Sf=3zL9b)nF4rcQ{$KKi5I|9`(+b&y^7O?+3 z*qrT1>S9TZK)j57sci$<(xv2pKxs$J33gqnvcj*|@|by`<;g=A8UBGW17fI@B|Uy? zXqMLyw&lM|oL@KPN=$O`|Hj5m!G?;*iP3yru>XrP?gVF()4-kqvaZOG0Tek*NEkLu zHE~`UW`@*=Hh>y8w~PHl)pO(C6QP@ZUgm44-eypW&c#f=rk|b>-o%!`@7_O{+m1&* zATW{XPn!06Q2)>R+SS)5Ctg~4D!BkT4A^W0EL>^mj85c>>u6@&cVy+nvgW#cu<+6m z@$Z295ex}IFm+Hjg-q+!C&Cv*H7`FtnEo?XdpNs9i2~SRCc4k=jA{L|BUa4U#^ICk zZ*TQXyki~)E3xDBerAE(Zp$P5C_zCR5z)eW&)Xa(^$6bk8DS~hQGb5_hP2#tK6PfO z&1my^pXJwze9ovAMTx{E_s@BCG!z7+Y|COSmDOI%YMb_UyFMskSnLp&!!+-I%C}tJ zkMRV}etQAl$PI#z?)KNyi1?&(84*k8>ZNs`e6~@#kOD@8kn3Ey@`fUJ7G426Po$cq z!-UBe{aMH7B}1z-n?HAoXs%xvsdBC!i770J+CdnHV``KWwNOIKhw|*Qyhd3mZCT*t zz^?hW)dH@VDW7!CGacWWE3HtCkDMfJT;p8#zS5|v-nivLk#DV;WBV~MxZ91hP zX#@zg9QM*hV3mvKPXEnDf$Nm<;U7}R-9|9EirjE1@uf*%+JHQKb^`y&S>S3VrqrI!#4y?>b8(2rNVzwpYR) z?~vokxlK3I@bf{%t&);-oy8%W-`^7*4Ekf=5YEqVTZ<@t?Eah)8+)DpuwD^? zuBg4o;!Z|Vg+ix2rV{7Xu~{qNgtE@`>jY#B8;o7_^q7GllSM*;{m%}q{~}7GIu2}$ z(M$XXd+oKTiF?|8s|Fw1FR0$X@9QBFb@c{^M`zh^NO5wKaDHtYt3+JVOHI&ZSm&#J zbP^-4sp64zY(#3{t&BIFAyjo(oX(mO2!%ZYM&0F0sKW{Eq! za*TG+JR!ok%*S>qy}$LK?%=9U6sN0PF8|ETVtd$?KhE2atDT2Ol+V^5DfpdvC4AMQ z0{-8#``G3hA0W+A!z{WcIB)V|m1d=e*)DPu5`7Sii;BZHR%KkUelR%Mh%*!&TcYTE zv%O$JtdUeH6(`e7Z!P^Bfwjj)qh}`eOmcKrA}M0Gm#tCncUBZ9L-7VbE#_Bne+V#g z#=nWU4H0i6M2aJc-4kAmuk3!j`wa=Xr{)7C}Cl9v*4iWe^R^c)@;Uwzdo=OjP4 z4Ih^!xHn-s8>VGNTk(JK_Lfmqw_Et`LNGuH1w>j@8c~o=1qA7C=`KM+IusD41?esU zX^?J^?(UH8mX0%*d!PLu zl!*@Z&X{KQeGy5>8tNH!Qei^dci4CYn~m&i6^r{;Km~+-J-Khj(8*%5tWWMflvQ*w z$uutZbKFbYaX~Q0%EL9qLheV#5nC1Da2w;pNRDt z7#7Y4ifGQ)pCPKqTo7033Bm?O#p;1D#K=CJIR1?*jbtK%bfIjsWCDTotpdz^HqL&! z0AAWJY7&-%FYJ%rDz)U@&_>R+mLjzIU1$da*_KLCmln2x!Z^6=jj$!`^9b}@7u~b> zJox%GRY)Rfdoux|*GvLe zZHCNzzBMbT;i)>u(Sm#=21fq=5%qDk{d?1aW;k0?-wcavhigTdk1) z(yOy{=Em##Ou?Io#-^73*7ap!(6Tv{}VB2&>o6|?NSiR7kJQ7!!lEVf%$f(Jwj z(DQU*Yrbgz*ddX6GY0596SJM=!V62J1WZ3a^Y&pWn+I1f4PPuRp*>57y$_K>Yw8K0mv|t>NKJ!_iOMNP4P)Jr!N_maHrF_v!|FYtN zJ)^Qs5LsPczBxzr_;dYrm5Qm)e3@43(dO^e zp5(F6NY6O6CVE~X`6}A5xksqeuNWset4SHj)R13MIX+sfebD4P8Q&cu%*&$q=Jk&^ zQ~8o{hQ32>1brpzo!M8AX3+K#^(NA52yN*}`x)2K3FWQiHB2IwM5y^8re^?VjbI0F zZEelS531LGNKL&QJ%6KqG9YKDJpcv_PtqQGN5vRf@f@Ln(j_zY?bea;SJQKX)CZGM z`R3HSgx+PQn+`?FxZRi066Moh++FRrBKz-)M6_R=XQKP+>4`?Mk?MV5Bf`dBV{{{q zcsB*A1DrYj{Iv=m5RbA&nII#9z`_8YWrXeea6nZR54h4-9dst!wcIbf+w_wGx~mi< zeQS4ngR9mry@W^9EeZ=FRJ{pRy-EqOQJ1Ybs=dC_e+W@cq^_I#c72NI{XvV6{|(&w zuBr!n4rU}I1}25;Z5K=G{Y%~6d)B`sWA0TgxRQKI<0!!yic?f(y@F=gDyVnCE)24! z8y>KT$H1Uq@kgM%ipm=p_CfL{;t3TMrxtb|VYjHja3|iGAItan z7Bn85QpL>gmn}DH=-*#4xj);?Jw;@5d^Yd^lWMIrr!oHvTgbVFkDQY5nxD-Z=T%CY zXDt@3+6^_Dlx+z(tX^=#Av5eAAz>T59gu2QEYZ5Nx2FpSNOrqypv#PxT3Ql{1D!$? z^hJ))^Y1feK^B6@Js&2)J3Eco1CU+_$7Q9YvkwqI%VIX{5A_Zc*twk!R_fu`*mmS9 z-;(9@?eIgkbgS^s?o;6j-^bf?EC}~NcNdso${r8xe600b=B;6>ND5a~|D*4bmh-X_ z7?PDYo;%3VZ)~uZ)cDhN1xK|-?c%ncZE?$F?gaR!LeB<-3dmE|-2H-aIV+GA&O)H4 z5#$#^-2B%9VvF&~>QnHa@aUIoe z8ZuLV(S1V+ZjU5rw;AqB%v6q#EWR_c*ByC+()&aDIq%jxf3c^b_b)};k_1yFqI@ZM zNsD8M?Pk=znB{3A&-LnJ(E1}mjf{*mHYuo#Rmw`s$Y@p5feBM4>GFr2gYfj;FoIdV z=Y=$n{jfBCG;a1Jve#!@&L4Ab@gN4y*dFm{8Hips_;}xS6LF0ya3Xcga;LKBCIR^k z2@%^9m{Rl(=UJ}j!^sqcE84)m|LW(L0%=3|C3)(lfk6FMhrzWFYYH@_$u7U!A65H( z5{QxG_1hEMCBNWgAr){FE=ZIP zu3LsUCq0QUo2fy2>VO)MH7Jai`~xYGg=auW2n`1CG`Zxp*FS%S;nSL6Xo z;qmLm*H!uR*V$sn5Oo?A0`ZDxe|B2wc1+%gy<@_S5V2Vkh>AGcwh(D6xf8hEBb4@u z51S?Z#Ns1cjqlH{;Fykb!WXnP`51C>Uv8}los-iQD=y!{pvdk74cOx#9%OhAQXC0n zj!6h}g}k)gZtiUbnn?yP9E6drQDlt?9eRb(2A;Q3kxd!5EzaZ#nXv|^3uD4p`s6l- z-udm^#n3bEkYn+CMnQ|oCGt!=f5KVJx9u-*hji0gf zz#`UW8v2q^8HmU_I)Csx3KZFqRaAQcs{wtFxnuM-!Mim#Q2m%~Oh(onSMH=pH}Yvsowm0vG&9yeZOG9^ueivnLxDj#3}PnmML;~tIM%v88Pt!Y)ndh z6i<47CYA<0T{t^|NSW9#$?LmM(qo%}R z%Tf|M<5Z=*eV@89&jK%QGFySzH3z_10*AB{4%>9*+f$Y7uyUq>=56?v;4N8i{BZoe z$wQ-)^$kPB))4;ljsld!AQyBpJ6SyM(&2=Ki>_US|CUU|@;l3haeM6G1l3QWm8kYo zW+HLgQDs(7h2sKkr1?bqhAj}sJbTY;UKl0!ztIq)FpUwDMu5tD1wIfGXBpIfeSt8X zBpM9hGAx4G{0GcjVDw)$Zh!MO#bq_JQC(m{^!2(?zA|2XPrHuofU|Zhv37b|r?&2J z&`s59P@WE)vKqQ{Jakk4m5|KC`))dLk~4)g-c2s!SnXhU_g_WcNvIGQh*on*L}q`d z2rl|9;>-gk4-oR}f&c^wodb6oMo-xH??@03M6tDk%Z3iw!`Hjzb-c|+ZE1Kotcph{ zzVqUNN^F?)`p4Ue7aWK^@pi}a4wVma-NGqKOqL2}?~e9oOOLQ*)KmhMJzm&LSq5cA z9C8``tj1uom^+u3)}v!=W~2!YKALGMczqjLX?1}h1w*{v0!hUxGSmPxFsPSz2j!)A zVd$##0gfSb%`icDxP|(|B3}4gffrUa^ zIXP79?N6N1K4oZu&ZMmLhu{V5{n46aUgx$eSIpPWhsh5DXsB?bwx3Z@>SFwu(*?Ks zk4tR+H+$4x8Rv|%0J<0ezLy!_3>2GE349u3u$r0wHfAn-rbiyt6m`sX)?c58+Vol1 z(9-qu)8&$DR_<;n(oVnX-6pNAc2QhC%@Dk_ zi1){_OX?JtjrvA)nahwc8kH=6lC9x(CsdRIRB4`M7T$555}LjL6*pL>INl*7e89~7 zGKFF%OnIlu?3cwgJOKq!rIUg3#KhU38g<%RKBcc{1t^3lCmakPf0KRDRXY=rQy-%d za-9x8UifiG;cXL6bHZO`1AFJCeQi9I<8PN1OFr{4kdAyS>LWBiIYp!SUlX)d4Bxl! z&FT7n?wd=-^xcEsdJDJcg(tfaeVx?fLwE;mb-KR=+-d{mVd3&Uh`UqU|1i1Q%AWNW@*t1?=XI2mjGad>K!H`4a3A%7@TMRg_p7 zi({`!=&pPmvQGiJR-E#}nPCy%x5DTz9gSqoA+mn=Dgh^o&!> z)lTqi;^b7xS_nol!asV@Dc+Ue(P+oqTvkb4-b$hm_6bgV%B7 z9hv&gBla|do{BJ%_kR<=hb5_3hHu~}r0{>(IDICQ3FqRLa`~2aY^htaId4FZU1rel z*YEhk3B^5rMtj-{x{w}XvODZVC-2h3alNk}mz$iJ3vfufYMkh;QiKHQrwnJT@E2SU z;6715q5Daur%6^m<0i7Dtfg9EqZkjbjPiV^{?9L=vv56G*{h6>o5Wmn_`e zl!s9cj#@x*QHD)o96IWaqZsqV1C+l{;OJjosY#(3aqA`21w91c`>NXF%sDQ3Esq^S z-r29FH1`-UtY*J;>vZ>L#B@K0q_Lj z)&Z3Yj5I571cC^8xf{Y?OzE^aX7A$S6W4VM;}3j1gV=QRyAbUFFA4|N^; z2vRxmzh(B#nz7uInQJ#&4Em;S`CN;4kgz(}KVkao<7d?msM(!AOI!F@X2b;Kb)Q1j z1V@5z)wNL)4`&JBygD;(JPR*d?N5T$GH}5lRTecv4ow;XYgDOA(1g=VfwzQz<Ail#)3H0vW7!2x&ri> z5ZTr{Ca@N_Q6<^G&rf-N%_FAo_rWC3<;VgV%Y`PwXdC0q2=Dvl{O%gjpg*H+oySKm zirhO1IWVfJBLB@p-k3oQcqGoZ`jxi}aA92Z2olR%b^4)RKjfGKaedbG*Ke(fe+ z9dt+^Nw>I4Za3y`+sv-W>bSp_9d&u+d14l1!(5Hyfn!Pi+0k*1D8{*IS206IW~G<= zK?_mzc^D<-xBaGwg&a)_!kP5l9M7ZC_sliYw`3_*HU~<*H16TCzVjM!hn8VZbh-I>s`l2oy5?@QAni%jWeXD&JU8kZ z+u}1_iJ3~oX=ZB5d>-Xa=VS`J?we+icpROmG~%DzV-~n=HhgCref~W;7hbXdHm-oe zU8RftiN;1xaP4$Qr{jTU9hZ~Z!L`M2qk}0(I!jBb+lG>^;y@LVvsIzXSz5%|R8@S5 z#@`Zmv#wsM>-R*%aqR^0pFie3<+$Z|i>p_=vEJoQa?*>kr6zyhVam6+_gqgl9kFzJjl{ zPNHuZ@}@(A(Hi1EIZ?e$(42k$M-*Rsc(+H{tejeAQo1E=t(2X3(0aGy16)I)zsQNS z^tEzhAk}B975iG&Ufhq9Y5|e8t9;%JSTikaPN={+49(<1NDL4TBr$Q+l*L01;UQH* z0*0tye>DOgCw?>85;VZUISCFLcF-}|PuUS{SM6hg=(BW<**oJN7+vP0Fd0Tqm`xVB z5KaUUJO1k8pIV+?OZbd`7}P_feL0S6&SOY6vb2%-XLlOYG1bU5mb8k8#o4;;B?D&3&v(?M)mRS*7jORiPOx=x0UInP0_ATndhcbNbH+m+ij9dJ02 zf_4N_(TNl}!r=|p`>J8chSC$2lr#WY+V_Zv7Z4SRv<`p~^Ih9t3|aixzBrxflI78F zb_kD(#T)T`6Yrg`juKacG0qI}z85;o)7Av=`SqnH%4R*lC2R1%l-#j<5i}$(fkB-z zFt)#Lf>GGjQSEr0W57;d5L#U}ugz2%58VbW*TvT|d3s` zM4jMr&jX~X;QCfA(7g@}{y*^3kg_(&DFit%8nS1KbeGc7(n4Ees<%81T~$Fb?Xw); zSglu2pxY}>sttVauNAsvCKtD)cpueI_Z3>ckNOx?O11ZA7|pk%Q;_MD{b)IgqRV)} z=yF~{7O$JEOJ{ivtx&$RDPDtzWQC^+rK{q4;&4XZo#g0z|LwDL8w~cgV?GK~R2Xj- zT;KWq6<3M&Dg@qVpgtl+sOI}SJBe_N2ac>CL_g%4jWEK@`)M2bOBmeBW(~o##s}u0 z8h=Nfs>!M^<&?vv(zB{=-<{JMw`TKooY0zmt4f9_#gBMCkwU72U3FdaFT)w3KUGa_ zzODL_ciP1G4d}bw61BP`sYJwH7=Q;!EgcVkc9-0@?`F#w$U)3di- zaPoFIS&XoJ|65>^Xc?o2$$h;@$Y2Y#u18xtT(iRiMO>DZOnYowO}vzsH-oTI?WS6V z(SE-l^7)w4>oObtxvB80oHsJiv(d`7xO8u5imUnD1)P-evCnFMa@1J`lHpr1#zCd4|_~AK~rsh1NR`r=mI#QLcK>`=<1B5DgRnyW#HAk3SNQ00$#5rs>krGtE-_;G{@IHRl@7wQSvtE?l^W{ce zi}r%3CvM}7DxaT!Hp4I8>94Y-)c^iwu@u5cc^&@8O=oM8?Waz$M!GUA$)Es)ThzmJ{Mt>aR&;D}ytT2y5eAH_bF14DBX=>4d#6j) zsNRI{x@zV8w$kCrL)IW)=%Em(QYt0{xpkxDhk9_e_NGgdcg*|E?==UJ_aGJar>p5e zkXFJ~eQdEf==QEN%X7SPZr;cbu;lhIW-!sO* z?fu<)vyBUf07B2W(o*Cxr{&j?=#81QS*tV>^8nvXAqCR7NKAoWJTTkv<)fdhk6r18{bTV`gIJ~rTT#w;WG{@jGm;wX zoPKQ*e5x*S3bQTesCl-$-$OfEmN|D^vpe54f2QA=i!(U|2vZ0v_%=*68AzD%;|FzzzS6t=%ld5U zLu^1xCvHq`iLJVuA(OKkXIEzAZAuLmI<+ILAM?FV9OX)RqBtY)jAx~nyCwX)G3!4M z(10*^7kM7o8Ib1xLz#Bpt1P$d*8@#Irx08xoAPj&zVw{grVUN`)umxs>4}%(ueX0W zdHnUUdXK7D1`fc&%$GL!LL~r=CE$JA} z3=TW-+p${Z4~N;ztAuVKMrMXVs=fUnUS*Hv{P5)W=e6%3g<_dTrL6(r9$@%rb!O# zIi*N7gJSo$0}jn8p3ujq42bBp4u|Pd?#(UDTiw%3X%m6zFSdi`W7{*zY$r%9uE}O9 z&`^0FyAp0(F0xce>~CmcYf)DOn!jnp8cI*~lhX`*cj5H!WS1luzR6haG{@`4!&o|{ zWRZ*X@-V}9fEkGkgHbkepCfjOhh zqfcihfAJ{tU#K%YQ(Ko2|4$GfjeM5dT_RhMk$2o<_g-W06( z!B8aKl8u6^4bp&USfs-Vv|I`r4x#w_3{-#QDblq2?+GuSEP)?V~lckf7-GYvX3DzsMhmO}J5TaHwr6pH+YvYA#acPE^$B$tjpS z9{;>Ogn;+6<7!t#A|hzJ@``W&n2(%X{WGH%4r}<(*1cy{JJ-MlN(Jg(2qC(L9D0M2 z!Hn>V+B5){@TTKt9^voR7bwOmaHewOMJNA-L&D3*F!f=MJmTGeL0JCs$Uy953|6Fg z#jiERh(N&xs}Ff;$Ghn1bW%briyeHm4`roBr zhoeClZe6sno5%0Ja9oNO>~J)@a@N;DXeUnpmM2YknAj+USGLt}vFP;pCL3jur7QDd z+7e*{$gNk|3Mp!m6pK>*bXo4r5{w)|Uz{*Tiv8y{?2^=KmYS|9n&lsjE}Y*_)Ojb` zRl-LLr+n4p?k+ky5)!jDKpcOL7O(JP$&04ckT5X#PR8?QdTr<`@*IVRlJdd6!L>*n z@Ge1X0!TRhpdaJm;Q_k;+&2-jdidA`x6`lS_^3IayKUzENGOetkzRLfDgBt)j`#*X zN3;ab&qJLCQt|h=wXFMEUXjj!jk~C=`${Q3{q@#-L()%%Fmr znCK*mZ%PVnVBn91IuyL>%gwuG1Rn@>pL{3fu!h$DUrxh(l`+sw?WdLn* zjtZ|c=!#s9|M)1I!;q8FWMq}@Uh+@zcAoYsN%=9yq{`CV`a1KgiB(=JiY8-6Bl&ZF z#;Ncyxw_n;bWuLt+#{p(q23zzTbnn$#($;y732rQ`qha(Gi@vuNP^;-)W8Sh1`Z#e zm|8BMd6`1hiy1o#pKyoAf}XihJNcj3^q9d!03;Sv^s5!u9kG+BRl#869-ed54VUQMzjopO-mIfiA*l^X)a!$$`0)GtOjKNB zzS}ab+2XbQR;#cT4+OHNv*cR587*EKh8KRf^6-vNNvbw;WN3 z72=~#=UeiO3yQGO%Lf|>$N~j-0whPRp{+Zu>S93p(%t<$A(^}i;$QL=?5#|0dhazMdF$NS+-3Uisi+InU*Xg+ryb{D3%bO-&8-ku8v$u zVi|qEnE%hqddbys$=@UMAt_aS>WAheTLp#NU%D%!*5EJ$lPUVZz*p{TgZO70Uf$4f zk85l!z`k`Xy?mY?29B79`%pA^a(O_q1h|e+@$TR6hft@}qixHsff>0yRE5eF%ru6X z1GI$Hcdzo~bkb>fjF(&VJi|kB&`(;@CM1M$TR!|KgcsI#zl&01&8tjJtU^HvWnu5Y z*7rzQJ*1}a_iy^c_r`-b%R5$Y8yZ3Pj_%A~EA|z+^o)pb#hI|fjjS5n(#NC7Qp5^<{Hjset!GoR(Re(PUX9&!alW8!J64=Rtk5t1eyzdob1SDJ4x*~$GH3SA7*>6;pF2ltzNTAn0>Op@(S|QWl z^h&bwGU*pNj<4rH?NyjOo_FEz)Q;wz*D)R0Z>3V|3lzmW7&OwcNP7npmF$sDVPQBp zX}SKzM;@Ml{O#fR=nb4Q4dUg#5F4_5FWByl&|eGLwZl#mT)){n%9(%EtK1o&+G^LAsCp!2{k zAgNSc&eIsDA|Loisp8ugmbPTtt;2QMoJh+3jQbOoQ+R2qJ`Uz-C4ig;!kM$*b7cin zEv%W%A3ApOmxtJPr7=0w2zzS>JU(L@+1O``{&Htc#E;F!L9cNYmU1OE6{(GiCs?oJ z8jYU^p_-4@2SLYbcGZ2JQUfm({h}>2g399eZ|%;mocy5ndX)4Mi>DeDQWg`*HNsgH zAZ*VHJ+r4QMQHM&r$T%-2Zu^+eh!X%>H9ODNAt?b=N1n%e-JVEc_v%s2|m<2V_2G3 z!Tq+qUA_h{q=B&R2xS2b`P#)gv-Acn=0r^VNZ;;6{x zhQGWIRyd(I*pjXNAB%g9rEHV`fgHgHD5;V6625U-5<=Z zFrXFzVWRg!ZmY*L*eiggR7?p z48bu<;Oy1keC2kUuYN_gtKnoEU*t43|8NjlZr?^vaP0qF@?KAYJ=g>JJ9vz*uGuEJe zZO3{&`1~|Gbb~klQ^xlkAwuic{L0=i(>&oEnqTsj{nQU**)X>s^Rat%wY)ff&gMDX zJ+%_7Au%vnNn^ir?cpypuCyiV_I-ARXepAZ2!ycByo1K_TcY?g+)8%)5DONiFZGNM z>k3n&Z5%kjWf`)AjE3K%tG=z0(ia>Z?K|UreQ9cH3MsIHX&?fWuef-4Ub~j;!)q_2 zgL68KA08cOne$eJWMrdm$VA9ntJ?nzy*5RPxLABULR_7?7dostWX)BB4Ln0Z z&Qd_(sqZJId77J8ZlVh9_t}VTL<>vV`O{gi%Gl(&Z7SV4x&*hEB$RHs$F(kWtdO+( z`#kk||Dm@xMV9oGZ9fG*@qCID>Ox%d3yJdK`2<0ki9fC_4~h7z(45_C#VQqGWo}SH z${8{hlx@GMrgUt;L2+&|LMt~v2ZTn*&~^`qt&ImxO3ld<@qrUBXd>r4vH9v|W@`KO zaNXrK#dcREH+W7lV_wXU8X7%OPGORsm6^~>e_577UM+AsxA>m=X6UAhD~Bih6&+fj zDi-eCrckZn<$=YLNW_h4<4u+a`}?$$HBovoM>dBFe9Ic@pVs-QJrFk%v_UA2BQU6< zw0bQb$b93sJCwu1!}lQZV%}P(-sxgyEpaU+ng`gZDC|2^0F?>{sQ{de9-X*Nbl>J8dnsR z8W&FR-5Wu~9;D_L%gtpDNl<;?KjU;GzT_ zc7H@e15|Jk=%hD8(--M#Vq**6+p|X2Z)2s<&4AeVLg-01j06*O!-< zfLqYmk94Z85r`;;27C5SR$i>oUeHBDiPy|mItg8$&~SlbqS!8e)4+_Cwdr6iJD;Ry z>%XUut?yq%c~1u$aC!|(Ngj1jSq=WRZ3vDKSxcxKE$0B2Q}Fs*>=X?eW5H*(#GY9i zw06W5IKwGS^-o>i+ZNQCe#xQ2La^mKJ5xBFiO-`H-yqd+F4J^Cmh2hjrsmf;AY!JY zcH)$ZY0kMwD=G_PWG={LSi2D>3qCZCmO>r#f{JT1rAW zGG<#Jz*4|s(vM7|1WemYRaHW;A>yl{0K+$r%Wx+d2+ClzGjYrwSOs!;W@moZk2w;F zbT+G%;~*h}P3rbnl2x9l91Y%Ay{+{z6W5&Q&1YidjjE3=Ebo^$ExyD+nBxD!u4eg| zl37c9+WGe0$cE1P&CJwAGg?~=v`=+BLO8{`iGlJ=&^ySSDaCigtdhyRy70FSs3r{% zo<2;NhRqd-L_b;4A&`>-3QnG#I#ihJ-gS0%Mk>5D?5D2R0Rs*qgnmF}Ji_xC;H;g7 zxr%t#nFD35&+R`@XH)zAkMX)77<$m0AnfFPY(8=%sDcsh6Zn<_)~0}Vk+Cf=Lt zbyIzT(~=h}6P*#$lIJ_UrX014IA1$-zi_Kd1vX>9&bi*e5XwH;mh$ck>R)G8%2aXG z4HA|ThnvW~mnf~7%0rs(7W`$(p?|`F0s$J3=Wb&6rKI!Lr%rad=IkSz>^cj zWk&~Xj!^W61Yr>Z{T~VguNuOOWQAlq8L34Mh*-;_#E;8N2r6FtK{)~z(}((j(F_@EWV}xK&PVTn3WMTswpj{uDL-yLtUdQV`V%a!)oyT(g2 zk-%8`jGZbNJlAw};&N1cb_})W3HaRw%cP1>Zat}jX-5rR=-r+@mTRaAUUp_LS3ClZ zV@-8Rp^1+lZd3V77dGz^^a!G zu6=pjMyK!w-#cylBr=-Xo$D5YJgd$+JX#*2lKtU&PoB+m{Ji#5IE;6RPeWSfqm3xn zC8M|3uf4STUa2xK(I|Z6z8wApB^%RPs#>&{}+n79YkDK1q%x}gWqj~w`1@bsFIlj?n)YV9c zkTyDgY9v9gtiE2uyCtwVs~sv%0L``EVen~R{THo1VE@21;|$La>@tWF;^clWu#T9H zz##YGJQ)})B3OK#!x1^#w$?~V5Kqg(zdbK8V|Y0^-Rtv`H+6$cvOU7*MAE&WV;C!6 z(1M{c>-jfCp0xn?bzGh@Vyssmm8$jOhY3t_h$75(218d5cW zkLE4Qwy@-v&9WTjfw7@xW6}2hG}Iy=xH#qJ=5@!qR5xFmZFh4JatGV8eJ5?fd>ehJ zSJ%??kx$Zfm2l;?cBVjc4PLjqbM({?zrTl_SN05T4e8H6CL-{YrilLx*xx$5l3`JY zH62GWqVw7%Hl5!6zUIySNOx<>)oAfFnezaQ9u`9JE5;WOO}Jy_4)PQu*uG55{^xQs6YLsLv5OZJ)ijwcB~%f3sIaI-xyQ9 zFvMr3plnw;3 z<#y7t4l^BHg1opR57^@LAc(sq+bDWX<@E;lL0Z~`GGsLmvi>fgjMH@{ z&D=t#A+T;(!ork|`E12P5v&t#kLGTLLfb zeo8972F=hN8mdV<(Iq85SLRn1pWc&CqDI6Y>1s@J2K8C4LIOtKN2y`3`GXix=w2P&qldg< z^6dFqez5^8>-e9|^8R7DVHK%;G$x3j^mG7ae0^(~Ul+#>_p^fAOXLS-6bz2cyjBsE zk+=MnR&JG3G3ooMgvBTE$Mi6!1oAIt4SeDO-(*>erDNRKZRhC8PQ$#RYK+z9WLx_; zI0*t<=NtS~855RYQEa|SBK)tida`z6l@i3WQWAO%$7vLZx`wbH!Zp72-c$dMHvSx+ z)x*t?btB86ifg{p^`sV2fn0&!XjMCRwoq@5a|yv;DKLnD+rb3?J~y04ZudWAAT*f! zL2eHlw7M|-+&5``v;ApBIY?e8g%BpC9JgTV7s9O&nOIV^=|5#z|&^?FrxHQI2~PnZ@YZN=o;IQ|`LEUtvrD%Z$_Q{8&-z z#eY#^^d&xCUJ7D>sDo#M2O0Y9;4sQ{Cnlp9eYTd#`OqAKDW3*OQsHuPazZjV2tnVi zmV^2Yc%ZSo&iC!@kJO#a-+!&@8K&4SvTHpvC*9*y7cr>mfo3R#tIEjqKIMI9CufFR z2O1!#quxkM=jHSA^2T{UTm4F67ZoPA2Vc*LZS+x04vf#4y53^XAPGiuE8~Cb~|@K zA_$ke<5a60m_S4eiO79G0YF|Ig=n=nq${{u-_USx&hJ@Bc(^dY^Hcy6hL0;~{wiyT z8C1pe6|qe@l>g^9`TYBdHj8q!#g16N{ylk570g-?$jV1xP$}fUe_;N9`-2h_)U*c2 zJAXj;N-z# zJ{qj7?hVKZn5{mVF0wNo z8vn}|gbQ+Z@PKwjvp%My^MwiCUc>EYv!D%Ie3MlQfVrif1Zi_~1|}w^gUyMEyN`vf zz-xu2KSv+D4fg}(b0WLDuc2fJvj}QwFbHhx72{?dw z!&nwt;$fJNvp91P9eKcJys^1?0_If<0u{_f7+SgPjt|S&w zvWb$Qf=D1iaET&Cfo^VYkZ1b(^X9pTaLPC2pd8X6%@{kP6B3XVsweJO->RIGm0mWx{L?YG`Sbhz1i zuW2^KOn`_vir<|t51??!8wE-gJ?JNzPgg~O8)bs)x9K9VkdR(sXf*+Z5=fk^$TSMw zT8LTHbp5Lc3PpWS8ngSNh}Svy|Ca?)nIo9_zB`_`<6w0FpqZZjUw`T&Ps{Ymx%xoF zgQ)Wm5EdDBM!o{p0R&u^+BvX>wCP{d277O3EGMo9gz^^HEK7lKg$jLE(iB<-;0p$c z?LMS?Jckz=?y4LG`rxW9GX4e>1UvoW&&C)>i3IMwlr@u#V)jPn070V5JrWZ0bt}`@ z(NPsLF9z`5-tKV=^GC9uz`ml zg>Bs2mGo+vz;I!7m$|$R^h_4h!CM|MuenD`+6CjqMtBTwy@ae(c$S;Hx&nr>l=a|P z*kNUchP*J~Fok!`037T>uU{keU+O*zj{y}FBsILZb>Pq;nJkOL3=8gB?{lFg#W zS{*iK30IUHn1`R*$cPGT~r~vgD?AwY?c<=5) zMI8wYtGW64r$M;5xX_PngmHLpuMDxUl8um%P(Dyqz->)1IXU@8TpYEou@RM;nwnTd z*#;cI8cezUO-{vb>{~@_gs<>8NBoV-r@wfW=JQ9 z)8Dn%Ac=-GleG5YkQf~JCATYxqIsR!1V7)QfG7vZm_>HgAnnBc@`RczE)Z<;FhA&o zrOR6?T`+wBWp;g?-u=xbfp3N!o8@#iLfIvNoF6DILl2al-BAoq_7+R9a+=qtDodGN zPi@tGWar_LHaI<46*D%@m|I!NU@`7}>9|$Cj90B`x!(J;m>9a`5VjB|96$sU`#>=W zw+n;BiLnnT%JML03`SnZruzC%Y4u1RZ= z5EBzflef-uM=nndX&m*23$n z2i9}^k}253N;i&2?!m<8&v;yHtnpYOCAsTCpCoBZw5T#9Gk|M>A6)kwTwL%{bb^hZ z!|j{{0ed(6jqUfKV7;w+TjiE)J$#aCjftg@oKgYO%ovD*=+_)sege#cubM_Kpt1=h_GWGAu&y#iQrXk@&O!0 z*V6-8V9n5xY>#NUE5e4JYFw<=)Y2kC$Z4ILOtZx5c`{Lv0I_Zz5zmxC4Fcg~-=UJC z@vsJiDn<(wSKs;*U!iu)!#hU{uc;D{Eb(A}KnBW)85%CJ=Izdnr<9kMq)iVm+_gQhbHBAC zt^+Z>q_FS}0RAFdz(5;ALhY@str03khL|bB0lKj6ZOqgrz%A)RCcU@;-2aoOr{iLn zG9X?lSy})3szkGx2tZR=1wu)^H({Ax00rF5htHk`fjp`o!kr+o82N&)uC5|KoZY3% zHyM~_Tu@&?z3XS-4CMk8xFE_zN~xeKg^z8}XGVdLB~1hp0u0$?{1*qf2+&tkK6>;? zG?eQ7N-u@m#b0G2R#SyPATB;L%ntZ@5J0MmdEPY__+XhDrz zZO8zNw>&bu17m{G5-g^#9ih1v_8F5{jqIvgaR(??ieP+J`SSoA*oufn5SJ_SZa3Yp z^BZ|*WR#vUWV%>9qFHH}4G(BkR1`)cP|e@Sey3NTVuZMPxp?QTax=1`aM121q z92|^z3G@v(f2_Fs+CfRdbFvVMEcbxNTnO@6K<#Lov~gL@B4XGrn}>$}`i@XNdD07% zA2Tv%xP0)6CXK9*RoXw3{~>{tV8H@~Y6YA_O_$x*@Kpqrl)}>%pK)@YM29|wt(XI{ zne1BbmY57Q?^!}(I$Zs>*4DD6OL&8LphAOW>*k%!^r^cJ^fHAqw7IS^Los`<4>qkIUCi<7!% zu2Z%pq1nt}%NNf^0h27hVOv_c)5ZJ3g91_ezoac)cbs&s*^v9|>go(t5#UB~$f2q| z|2YN-StHk{P~3zVPT0-d49dEJZb6m%4dslRRB9Jc{nwm&zbzS=#SGCb=7S@YruK@1 z5mro_?2yR~IfqF{bd^f;L~Ve#_m?;=D5b)a1oyo?;}gKXEqRMJ!k3CSCP(G;VQ->s zOLlX0txrp#ud~(TEbe0O2FB^y#8~(%svhnK>K3wY{L^|uKbG!|h>o5YaAK&Zvai6^ z!^8Rv$ZCXcHrDA&z)y`dx0f3`prDpvenp1&Z*8A5qB;(r!f+TyvAu&sqS}X&f!1%R zdN>GnZdhn&H=@f#Y<50v_gN{E0cE~%w$9|lk%{<$Wi7Gt_4S22V)=1Lo2%9KmDWtT z$YkYp1GM?XCZ-I0-P$^YBYmoO=~q~Dc9~kEa$7k$7r*VJe?qhgV{R?_Q1rZoa%?oNE7I8D*dJ~ zBlvjU9(i%olQ-w(DV-hVt(TM%VmL8;dc)&&(7+l@raiPoqJZc{Zp)VKGEXpKI^eJu z1FoQ?g$6+4ilVNTUu0v2k4E5J}6St z(_=uJ`B3p!kSV4@l6KoOE?hF9>35onwbmCGD}_srj?0s6dF&d|q#d99un5YxR>606 uhTRwE>kE&U_DhK8<6%xkS0>529nGXG82`G$`~1v z2${!-=XJIBIcKf2?{h!TIqO;Lx&L^6>+G|CJNkY-nMA zjyuR?H?v^CGSz6dh+%$;{2TLZ?q-^e=QZhj0&#kI+d85%%ZE? zG|X8ib3H1GnIqW0va)iR|E<=QxmdT^G4qEPgyrRV`8b%CFJB&7z^}r=CrfSGwCUEZ zTb|=KH`A;Cge*=r8Q~`m4h~WG?#cA;j9xKh>0J@TZloW4IV_Ch(c{O0Vq)xGUS98_ zr08U9+pS9_OG@(cWS^bPd{8#fv$jor&z_O*Z#au5U%h&D{l*R2;0nK_BujtWMThM> zc9eD58Jn00+Wq+uBaeSC?Xt6*>~FB;`x^fE>C+`PU!KzhSCo}3F3c5GB)@1#(d{0_ z|7bVy)NQNXFfFuxJ?-)1$3wWKXt^W|=O4(`ynUNt`s(U%G$o+uzS6?N;_TOg9k{}e z#aIefHvT{PRpd zf5mWn5obZ{@stxL^-qpypE$u7T#>v(c$eplq^OwK$3#uuef##s8@zPR$j+`v)qAn# zZ1{pkWqCO*ugrx#`}Q$Ed-iM@7gz9wuLWJB-PP>HnZujJ#W}pay+cow1O(78%#Ujo z_XKnBpMP$%x0r4tNJY@m5IEirahPf;gV-@>Q!Q@z`Vz zP>YtWxi-z&mP^;KU*Gxj=M}vd7fechY4#mFnBrn!Zf-s@K0bGWr89_~SMq%QW;3KisZ(=5y@@ao@VR2yURt(*oL)a zTbd0trbg|wS(9c|5@uNB-c=RK%bs~qG!u_>O!VZ73;ekkzAjlFA@IT_@A>nU+qP}% zkY{Tg>8cp*jq@=ryTqVS^YGzA6C0aQT%LE*Yu{q8xo`NI<`N%ei`v-t;y%A8wtR?F z4wo{k@@$$ZKIY`KW2=Y=uc+VGmo5?oF2fvLTwJ23vc3mjr4c!-06_Y zlBkwsYcKL(+Oj2iZ?ZC$$%zvuJk?AWu2sHz6>;lUfRfkmjGDHuUlULPjEs!#%Q-!5 zG|Y|8%F2pQPQDT;Ve|mYH0O?9vFEOitT#7>A7WWH=i2b*+IB3M%de-5?g$GB1)a*a zP|?&3wVcJBn^;;hp*CQlJ-{McAeDI>z?}6s$i~fU?3{r&yvsLuJ> zac+5M(fWo4K|w*99XocsyDQDq)6=66zLcGvp|rKF_PBvTsOG($2@bsDZp8X$FijK22vQADe+Hy9|XW`0^_xGCI zeN$=&yt>L6-bL?VB{%xu!FuxLZ{HqRxoTBeOAF8Ydomj1Ki+2=m(dm*9en)wag@A^ zI4KW;f_{f%a8l5fn339Uabx8sR^#E#e7lYipO*C+jHB z@!gJ$?8buSmbt(;`0LjZL&Gqih1uTh@RaxXWl873lbg3}Q8|5jO_9g+>gLB2Lto`l zZ)xS^F2(;ZmkRn4V^9cAj`*b;NZdKJUnlT zORT~UdQJ`U2x&&sV>^=igs0nF9$A5t6?q%0R;^O~CH-Pd~V|b;&xyq6P)G zB!cj)^9l-7j~rRPWXTdzF|g=+UEfpFWj8+J94tH#jzS?de?WH{-kLeXm|!f)=8xr+4p9 z@u>`Bnhwj2ShIhAPx|9p&zPHk#I+Ysr)D?>1_ztf#jB92hc_Odo*r(~nmhXH8TR1v z4SUxqDXmp>pCE6>d-0D8CD7sF@Z+6dVxmy%Syf%Qu6Y$R+s#ecOzE~iP``#Q=ugMQQCd~S z$h36rnK$Ub~iQ)g&GwpuBcyXvlSLO02xRoLo*#q(pY=N+l&F zS@(&}ZEbDWV`Di`*3jF`P`8B(3gJKe~~Li9I!U#1bAqX3)^kcxF_x7VlX5 z=uy)eEUlFQT?!?GRu11^F9)bIZp*XRK61nlyYj(<2lPV2aW~`Q!qFl9`5Z*FIg~+dx=^?@Kw^`nDRXPZb^?R>q4)&)p*i*P6Mmw!hlj_ZSOp#l!@^RsE@7W@0pD|k$?(a_M4f>~6gsJ9n|2p5)N)4KD}y`Ai4RUu^(CG)gY-1RK#Cre6R zUhj#?F5C9PvVo@bmfB?_qea^R>-6*NXh@So75nu0b0&8484HW73W*CH#i1xg@m5V~ zBh%Btz?pazzNo+MO$?rVAL{ZQEJN1pSYm2wO4fa%R$Le!9%_WY@Q}y!kO_)wl$_I6 zAPrLUu*Twj78hQ>eqD;1Sy$xYQs_CmLL-c?46D`c^jq91D|Y6)dooMO1>!Arb$0q5 zj1(WW7Dg=u5y8!*)*dX^XdYHc1FSuU?u{q6SyZ$VzieV_8?NHdD2Q8Ud)C zEiq{?bW>2ixMBVJbs{3Hwzjq?>)+7yP6FFtH!tVro>3ol2tMB6$-#RRO{BfVCmfxN z;7zPQZY(^bd;(Z)JBnY0@JNrKn+9NAqipZf(psD!y)o;9C1(McTGoy-l$R2P5-e>{ z;An#NTrHlGmS*vL;%9~9U~`1Q>a}Y>P{yL7qEUEC&a=Q7+-UfTPo7*h;SO>`F?smp z$?LqAFQsvL1@P(q`$8bTth-be3X3$8F(tL*(w{GOBqFyt-(T}n|==wlh?KuA0`Ha3hDIxx9nTLBrznwz0 zbi^NaSFz(s3CYXf1J5!)k$U=-j<}*?%yLf7?o$^mEUGADAZf$iT;#c=*z} zKV_1O9)>7&QGgOzH4+8|m6S26&3MbnW^rRk9?#D=TrIMHsmaR9YX5uU9MKR^lU--W zHW?OsUeQfAECYm@9`2|HqH(?P7Oi1q`)B-b0W8G9E1S?JQIN|aJGA`9Utqxoj*5yJ z*+D~BiXJy}r-83S`}FCngp2`e8*?)PqV$R^CPzhaZ{EDw`1N%`1$%B@UNtl@W4@fb zk|wmIG?TsOV`ZhXV%4fZY-8W>5=LC#XjcW(MBhiccQE3K>y=&cMbFISzP8!iD9 zm>J~0jb|f>g5F&nZW-C~<;$gtu1;VTC#4hDamf??4XnV*W|41zzu(^4^7_MvDu@Ua z1>{Ju3`6XuQ_?@8hez=|y1}c+Rd&qnSr%ksH`1vR!nbP$Tg6CWwV0UL=#Tfjxv}`j zCr_W2fA|pQRrvh5W2*n0)%dv-dY469JLRb$^%Ni)FhXJU$&)AXX4PSyal7zp4&9f{ zG?d~~a9xJ~A#4AQ9(yX`;X^tw?Ty01+2;M@JS)Egs>!+x@ez9Dc1v-Mja4czE@v#|KTUtwT_RSiervQ@|Lq&V$^LUYcv` zBKd-?0Ekvm@hIC)cg9CY%K&yWoPVA68%TNj)b3j)OLKE`JOr1)i&#U4Kx(y?(*`UlE~wt<0|okw{i!-)4nN1>ZPmRY zy%TmF6E(%YxCOMXech|dx-GIsbIm>)#F` zAtCW8DcAb?&SI;os;fV4{6pva`t?3~28J%YLItMHSZ2jpTm%yH8naZDJj>*g(7XsJrLUj(1n8|B9_;>a$Pt&drozH7d~Tk zv%ztPI1uTPkuEjI{`#NUSLU%8GA{NUfV?>xvUG*T$Rxl3Aa4M_yt8q27(Z=|T}L=IzmCr%?u(+(2@=_RJP2|`9zi=7T296Hj-+b<@DUoS4h ziDoxQA=+`ojT=}a6DT(dED`b3zt9)m495#Rhdg+xH2wTbkhHe%qh+oimOdFbd{?U* zt6`_7r>7nILa_j6n|{KhM*)`N2T|kw(m&$+ySlqcB`pq0*0OAcvNQ9I)BcdAX3JuA zC~viQu&3N`T)bH_B=lBa=sO=Gmu7Z9$Y;$nI=zGGBdxK|D`4dGMoz}(IXP}a*Pu6u zRx4`T4YYya^6t}&^WJg*4guBC7=v7rg0e?lJ=5a-nqt4| z(rO9bp)xr|#Ueu~`@2z5@xbO_YpQ5Rsi`;FXWdy(FXT&HE2y7RSp$BB#_12l4q=oY z#6TT&CE$QzUb>eJ-7m=Tm7lkcip41VtObZF#~RDP=10+ffWE@KbZM`Avc|F@D|8cP zXsdqadEG5*YLZyYQF7#)9>h;;ywCsc`^9MdHx&E8D_2+`h6zj!4*m9KENScwyHb8V zGP3%*pc+wFw_7!`0W{?~{#*gvaNg_Ci4&{&`1r_=2o|+8z$%P8|M3BXZ@AI(OWyHK z&mDgr1)pL9SMv`I)r>J8ING{2PD^iZ-Z&dD|8jIebx%(Pw19y4_;vb)t`WUnji@eD zd+u~q1V1#dxdRGbiSE{7AJ@>((D&&v>m`4Gh6{1`0`~3S-_@4yKv=w~nVCHxAoLFb zz|e9OR&>>^js}EBc9!`kpaykz8b3)+R?*kzgA&sW;MC#L*5rL6z%3k@S4Br>{Ms5p zrQ;p7#$Lsi=c^2k9ixXw0diQ;(ILPu=LpWW@u-eYrd^lnlv`Vd30SDi`;0szy)G45Xr_+_S=xMzJ>Rn1QalbUad(ZB$YCQBr zCnm6rU%$1$X+R=PT2Ybz?%lhWLqg7IZKafocgH>b6;Cvd`wDI=(6VfIf zef|FZD%WzBCakhRr~=(}32F+Nv%i0j-A+wQ3ra~5`QFph)zcG*#!!K_tti=04V09L zdWMG3AjiMgtgt=jV_jV-3c={qFTL0kyD!`<3@%V|VDCa%nrNwp zcqs@mB{nwJ)93+SgWbi8s*aA^u|xZFw6My%^%j0AIrY`iM2Z`f<4&y!yJcId-aXm9T8wryLHpP!#| zXN!bHH4zuo)z#-`d*eKn_mxb^K1)eq?do3kXEP-^I z<9z|lsrk|6)~&~n9jp92d7`y22MX|B*iJ-;*rTCQQC3ES;#z@bYZdC$U!P2t5uSmh z#XD{Y^^UlInG0>DxN+sy-{R}nUqUf7g96IT!BGWBpNXvi%E}1ftlH!>(Rbw7v16UD zBQ_|c8cp&<*8{0DdWBb2RT15u{iv>2F;mH2Q`7Zm+JUGI#8Vm^18a!A_~F9`1vM9? z_blQD1p%;8;)!Em*Xdg-urKb1z~s4{1wIuqx6;MaV^(k9y*m@04`M(&zJ7PxGpsPVi7n+p`jjLbi%^8#e}|i@GSi(9%lC zeepu$`0-URT4vUcV`rY$%1_gTTb6O|v(U3s*^Ka(p6U+b^{Ya{G-|llTdGjXc zP+Pt~yf3w5$5wV%h0YC{<)!+-`0_<_seRwxz7|Y)GIpgqo)@tXYpqTJ@1sMH%wNpS z$$14yadwJ-rejVOe;*7w$!P=Bo6dk*cvhanx>)1QU%q_9+oCO@KVN(YEvXtn#4Ryu zi-<@{&OqMIw#*9sl0Q?tR7QFaSyA{K0HdS)RuBQIZ|`^nP>!2aQ&$h<9})zClJ71} zsk6AHOXjE}qFFZBbfRw+yQcvJxO308ZDrPcWm@OT7QoJwC_<9p@qH-fP@9mQ<{g z*XiMwWZXq2L`2zLyAsd_iST$j=X~J(`+RVdA3$QFE#bEM)X<=gl__c4u0UkSv9U2( z*OB2#rp8a znVK9PDN}l&I5l1 z*5Us8Z@9Amac{Et{yKtN)^6P$af7RagVx{x-zu+g&VV>{LTr&{ujqw=_fY-X>dRNI z_}#vJJhfZS>gv_2s=#_PRoCr8p%KDH-vd$^qhnAHoqj#3Z5dfv<=A}j07n4TP}sY$ znyp1bva_WZX1bTUdU$k?_0$l82ff99?EC&haZ3Da zX$CJD2|aJg?#;d_?t_)XN+1cu45Sk-p`4nQ2e1s@Qb%bog(;lrI%;~x(y{`+2IOfd zi{&T>Qs!@05`1RZI$w_#$AAYe$>tB0ZiA>O8r3TtJ3HfjiLT{3en zi;)6CKGR!!PsvLT9B~9625s+>unC`><7RlST|a(QEi|{n_~Vt)g<1d${VFNksOzp! z*qn#jSir0jPG`Wq&GekTm|OGZ%^Mab4nE)DV5SfEcliSkKR}gq8E${&N5|AWJ3asB zkNr>^v3&{2!5eHYa1wzPRh=u&qSpjOASf;#7O_FY6ib{mbwofu0%yqSd0Ry*`zS5^ z9*_v(mtTETNb^!=rqmKhFfcK!JBkhZk}G|~zn0v%aRci$zH|~iyoc8UM8E90Ma`PX zjJUG&h9oV4?Y8ZF;^N{_JFHg2@I)$MgCp%cu*AvJr@uk=jKX(x+)u-6?&L|=y{G6g z)d@Z`c=7`&TeYONx60RJle4W=;0AKY_u~_(dj6^$3(DQik*!CVhdgCYXKzD!tTm2&U+hX0{zLleVGAjhm;+ch@ zs%;PvsUg_|dlwhW3vGF2GW_L`6{6t4ZG%^BireqlPD{j8)R(_k1<~;*aB4QpYUeMP zzSQ4mOcbB_zH%0#wFOvINKg>t20_6xZXO;fG)OajNYs#Ccm5z6^RF*C-Mc1ZOcq?+ z7qG4e+Ah3L=02~}YBjYZ-NmN%T71(O>M)q212P(4{@$mY^WbZGM#eGCjxUHq7MIq@kt28@Xg{S8b7$`SZ{olK9Na1oAY?dni zjf77WsOC28mqRF%-0)GVfFCWlhGb+&CZd^#z`uByl2Q{89LyTh4FdlTT!R}gf|1fX zgs$-p0zWsh1NDQaA*fGxVP|LStY?vDe+N;D8`3q=XwS75 z?&|*e^UbZu$no0%fa^Y|8!1Ea2_gcmqm$EJbPGw~OtTtPZ7KZ4G>4a$OrN;j#E(Jy z+!9}yaCa&B>%V#PCIriQgShwy87~q7io!0-NOyw6=Gv~4$rFm7jK<$&1 z6zLT!RvZE!x(j1y?_3c$6Cq@dVMMOmIIt?_E+p~b39R>a%4LBGAl8^1dOVncI z7vA-Ke0Z0+TMe%e8-D)T&uGp?AqHDQAUa3XeY$AfJH& z5~0S0Ky}3)<|Ts8J5*GLs#V!^%+*2^Ow}xF3Cf5le){sV%{SWVPawe z2A1Mdvuaijp*vqZ32?@bXE`6<5YPe|MGuB)C1JOjebyErkdDM9K&*^m7Z+FJb4+?J@b97>+^*OKq}`z z`~uuJL#uL~?7yvTCdnem-V3^=j;`0SQt5liL%gSxnWnF}V(NE$>z5lHzI0un4)3iK zejD*8@xF2n+o~aGd3`ZK%9ms-1vS~6XgKlT0o$c+Q7it_zc{XVt@|V`tqK5CP*`}n zWk8Mm#`o${ij;9|Hf!R>lSaW&#-=9w*{1CzI>&sPN%c!)l;G(Eow{J&8Mh2smc@1mhO;DSBAMFP=@kay?Tp8I& zXnPYyYU?%cV{JTr`V=<)D7l~QG}{?RcT&b`mQUDR069=;2DH7y;TJu)}nfBg6c z`v$Ne*+mf+gF3h&aELzu;ApB5+;gZB@Jbk=H~8&83D%pac`pbkYBw|!v`zv%D=QbK z8#NCd3V88C239dU-qy^_Old&c~so5*6@KV&XSqmHhd= z1L=|3x38+I${|xAAQ}L40%2xky2FPxhaIcnQRWfWbzM5APcH#Z2nY)DKCTFh81W(v zdwVJH$I2JQSy?Mk+h4&np;syf$Y^YAeDdts9t2=;3HW6{M1iu+0f3AV>O+-Vk4UPa zp&?QHu<-{%egU)9eg5q4=SPE99iXK45_tziODb+`I(S};NZx?!CcwKe0vS=3h`^Z8 zFN}p(%y6D&qbCeFV;J;yUlG#!2*@`0CFh#vc@maIZ9*|+|NZ+nrF7VS`P#K%DCrMi zyfCw}`U93~86dt%DG6P@gybJBEYvb68i*l;Amk~yZXHEz?5nFp)&UCPaj79kprWEe zQIJ9P;nx$%6V<=e{r&s*_EUqWC;84CYM-?-(O{H)vK= zSV(zVJFWZ<`WB_=u@j4dpgZwiB?;Wo>u}%G)c&Hr{(e(D0q_7h;99cAU0q#`0dy4h ze*kVE;Z?XwvT|~!aAHBrQk!xKmIWY!#F||LY;r_jKLjprwmIBx*R_^N$iV$SJ)r2= zw+v|jlk?}VwzcU4U0yceLaqhxR=>oX9|4ncK<|j_*Vj3|gI7i3ED$w}&zzwLC?~Xf zkoNC$=fZQJ+zGE0J*w>E$K_}SeJ$C-79Z}v(Gdd52HPfTBQ)_kM4o_XJ)Nx$H33l- z-Uv`r-gx^l<}m*ut#Y9I7rWoSbqg^~cC2)OuaRd1tuG`X2Gdh@A3v%P5$QtPW$38P ztHwb-C<>)~H~eFgqFuR?9!?;#5~}<6?|z)xC+;V5bN^)7Nqr!288RU?w5XZ3yT9KYESJ0(yf~*0jUHHnMcn(a|ZM zc3*!g)aA6aw37vnTY&yaVq^P(pWi9qFX; zPzG^B;1fxwO9cgT3JMC%vs1WPtfT;RbHVM~SK_DSH9%ScK6C5mSN2vwQzarJ9w>Sk z6M|B&-@H*ln+EiwDEJI0@UOs364KL`3JVKALzoIHY6LDFU?YN@G}Oq*NGa^63;^dt z(c4)_$`tix8Sk9AgP72!O(w84QEgSAxFRs)+W)quMhyw=x%o#)NouIC$a;|rhAEAI zU~;pfoFY`WPEv9ig;-~^#MLWg$l)y7uM&E}E21jTwYRrZq|mUf+0Kmho9F$9Ft~My z=K|$CFe>rp2K~qWK)SFta-OCXh?1nd(ThlM2eO6+_}t5OFox1<>B}f3f{<-cPffl& zKPS0kMKaX^LIMlP) z0wf$h95$k(=or~33Wz&fI1eaa72?FI{b{YyYb^s~T&!11YT)kc&3}{$bSlFDuz&yJ;ubOBQ*o5VcjVJo- zvmX77&-Ud#VbIZ`$q@Q4lObo>)(I|3$ji@p1d~Y;PfjdiGEwrWQv1L8FVT-6i(xOS z;w2)S1U}Ls|LAmnK|y&{)enzg6<`?UfFCsTYfo(UtgtCco>l{j2rez%O_~C}3c;=}(0*U;*F!pZ?xV zBZ*QEHfI7e zG8H{%mbtsTOG-+T1g$JznD!h3UO~_}Oy1nslyhenc-8&;_a*JW?IE@*3T~8S!j#oT zAt50IrFEYCBB+twZ^iEN>bGx~K+vefTAykBTI9in9eg<~%p#Is){a7>%5&)Dco|~C zja69zodU+!Zj>t{(v8XDE^z$0UNQ`6sZPAFW|Hc68ejywLDXp-DhF$#ymtNiGDyw%zk;X0 zh}Y$gpcJd?>T>b!v@S)wEdnJJ^#`d1I$~WF7c1!vA*7A=><>u*?m?z)htjP(cN(qq z&;f}p1(4Zd4yB#EP~2V1Ht75c;n~lh?>TVbKRulL_euC zFJH=Eba05ccMo+hC;iJ>vg&po02POPk4x|{&DkN#GiR8Kitq(gDG82PS+PQD#}e_c=o;Bry2A{hRuzq- zb{i&SV%^aTF>dvqQ+NMzl`_ne7CUB^I_F~0Hbpc>95!8fq@c@ z5FiHEQ4T{2Fb{Bpb|iBSXf@wcCzEKrKnhh50|UQ=B`0rz=SH$5&o6vkLtVdqox)Do zS+))95Kp|e8)8KB9R*+#G;5Y3TNv@*U#4R}PIN;F#>(=8&YplaNcloGfq}vc*VB3z zvU^jD*dJ&|A(kykkj4CfTBddtyN;5CeoLk|S|+g53c155Dw@2Yz64v4@Vs6!o;p@i zRjx61WXAX}21w1I9pKD#BbnwRJ9ex>Necp-sI@`<0fuNL2q@)wzdKyd7HHz-`x`e_ zP^JF?BuFtE9Uc7-AW>E|*7tFP=*bL<=%wJ}_MN3P*u1^hrWChqVS%}#VPPSJJU{Y; z{zQ)&7zbv<+D1Z(qLAMOYpV(y?v70ufK^567}SHo?K0@t5=NJxd9dHDc^Wtj?q>qtI&J0l0*>-OoXDMkvhCX#=}->!gE zDu|Rl)e{zvf_^+fVi% zho7Z_?psz?rr>1LQ5>TmUin_UsFKY>6p8ji zdc2HOV?5?fpz~FOYc)^e$Nn(0pv{oU8f?gDz3Tucsj>)$fXGNLR30b}k660aia=c;`w9`4gz7gp64e}zSQiQ06gxFX zKXJ+N}SWkbrkYz}CxH6K+wXVm|PF zVoso}BOz3)L#z!%8pxC)5DDINrZ9)y*FrZ=2-Cr!*O)rW@c!dod=eNG;U-q}QOBR3 z0*9@SetHfsLuXn7g;%KSo{%rR3NkF?;JM#z5W)@&*GhEDI^b?qMOb*V*jm! z%zWY=?dHenI7^S|MMcd5r$N zLIe?YkcmWnFkyhNR{==Hx`*<~Y+NB_@&?(LB2@+TT;ZSP_W@_hSU4#GO}SB@lKF zEWt@!e$)YcO@CW6%#J!(PKXAQpeZ5T#9BC9i&<33@Ck(y;UBU}=&j0GDbR=Uj)L3`Q=%L-P=N!#3^g?yfM5e6bVWhpf+n z0`~XEEc1haNX$7}|4(^{(fvI0GiOQ>>_hJ)5thJ$L^~8VMDgqqgN30HkBBQF)4O;3 z*3BJ2r6@^hq_1miTus85uw6Heu!f0X-vI*PQItc=>u)=WB)a{~hzXfsURYQlPaK*i zMDNl&WKIw4hIqG-Bne=U^`4i7PuH>y_>yF}U`<0)p+zCbn9*H>^!RvRv{oXjVS02g ztRZ52#^%$`P5ktS#t9wN7__Aip~{7YIa?sq`uwGuD(B8^VPj{P!l=dkSdD~{z7}*K z3~>@KRKLLSicLb#(`U~r(GQ742HUFm->4LJ1));F-bibalI+ZP?jKwVGLRp{F6@jY z1b6^%Lw~ah{im#%GCg4MPZrgjVREw2HM4XZa6$mk_K9@EPz=mK{#@$xK>`^*W`zCF z0-Dx{P-GmCk3-{$Y@D)B7`$sU$brPGMw=wxhd=4Y4A_dN9N5u|27EAgR7A+=3`Xi$ z432o!EOOO5Nu3*FXrG*zfVB2Ldq~5U=GhhC*5?$^2p3>7vg|WIkkahIiD^{Qp zv##<$tk|CY8K4EhL|Rk!*6*AG1taq4R54=dD>ylcypQN500a|8kNog!cSKKJfDB zwkk0zjw~oHxp(z+HPt9{*XsNmBb~=>1MIae{(hhLZsx z8#ceN6~MY3$VtVRDCy&t1#1Dq!FtFzwe{WKA9`K;>I6|nDo`zn=m6IBQXG;J@^VBh zBB57c$PI#mDt%F)(W8imUcP*}6oL?{J`x(gzlppi9!dZ z?=-71_+M+bc?9pTu8Pj&ShM|dQd=0mJkiUMJ2J(jk>?~CQA0{!p<*4ez6_jV89cF9 zF_FI-p#T^XjEC>b(g4Ql=U-$%SF>|={*ix!>71PNaC<0PAGvFgE(NpC004G)6Idcy zH2`xAVdGZTC@tia30rKZ^z8X0OFy(Im=wfLBydb}$Cq;)I}+ z>+}$R?S6rOSCUo3Z80}j29w3@4vTyo$hHE!RdQBDtSll7dL{g+!b9WQ+4B0(Js~Xg z?*9Ou+`Nc^nt(u7uJuw<5e8HFQd>{};8w1JvKXI`Km)BgDmwZ>R@RNAHtcc|^}$Z) zf?u`$m_J~IZ!PKwKrO1h21II-Yb9$QCZ~B*IHt=L)grd@L&7D=CQ#?D{{AIQOiW+_ z*#i~jw&zBYaS%qeF9CQf4J?_6dV{&=8jW{(@tc3w#aRD)62eQN4w4~H^*g#cujO^3r4h-3}=Dv7PcO8*4Y+HT{NOReVO$~L=$KPTs=py4_fAsYrHvPMRA z>^(jWO^9jm4@e2FUIjECk}*VQa9wj5$=a9y1?*%VlKSYCAv4L!*&;Gr1b ztj7BV9SXunu3Wu(J?;yxigij9q;~89DI~~nhy)SD2*U)7CDBl`DmLd2`^fQfrSZm z3oSe$F_95s1>|5_sw#|sPF;%S6N(7QS!3XRH+J}s+PK=AZ5MBr0G)wZM)rPbAm=JP zd2-hO6`=*aI+nDryX;)6oW0!R=4A|u+*e^P@60fUed%VJf61P$TZe~82nBI%QJJTP zzFs3Ei#wG@kdz2hjxpuYd3SxL{oDxOVrL*?*#_32xLC$+K%qgVe_Pi3~l0ff?8j= zZcI3X>b~w2X_`Z@g;5?M_MW3Xh2MmlcUg|J6irMJ#SE1U32m~PjE#*)QceGI6T#5< z4)`Ne)z*`*Ost+frIp0ATq9IZ;Y$ULXkZ!_8 zuyw2u;f=>=U*E<tzdT6c2|2Vn?@Qa10NBL8m1(k39 zD=XMIwg^QNz2SAcQKl&aG~ubQ-^g7(z+C}Dkx6SZSAZ8g3d7X8IfDkqZ^zc+#Yb!g zMYhg!D{v4%1yYves2}LI^M9s0jE>c!x1)a<4qQ<~@KzN`V}h2(1C4Z`>*3j<4uq0K z2IenFLPYB>L^a3Zc7DiS0K0ql?5VUgy9S8~w-a&yeh3`De+SyW`T4V!oYjaMV6Kpy z%z)fVFhnu!Nf{xii*KG~`q({;H+P@Qu)Msv#vp(i|(Q!0S5QaF@b+A(Mrm4`o>C zRz(qut0C`W0AvLULYJP)^pF9Rz706z#lRWT8yVC9VH(^PWC95Sha3r$)G5L}o=ymj znVN-PgOdYH zYH&%*5EdYl31!{N>kd%pG3ye;z z5sXiFVk;@w6Z(O+2XLfLI2&TR&VNARj7?3;xx(Q&z^bL80KoP@Ng^Hq8g@dQG}M_! zrF+y8hYgy+D1kezn|*Oi#0E4va#{^~4|YCDhg6ePx$EOfk2g4R+l5Dt$mN5w9+)AFZ z5Lp6&9Lzv7xp!UMh_ygb92^`(@6h@LE@0DE=$8FqwISZp@{c*b|L}ESS>sv)AcNuD zuPexwjNM{kWvxKe1`|+Apx=_j0h+~Rzudc;n(qAzRK_&a?tW1nD2kBm{e-t-36ax| z(8N8zI>H5dh0_u!3V|#fMpcf*A%)?t1XFJ~I|v3pf>ewXL-rt!gf&N$SzLWWfAY^4 z9o`k+UR`Th-GZG#5;y7hqodb=-H~(gaBG5!iu@#T1Eh^43%NMJvsf<<7M3!k!-?hZ z9EaQj#<)h^t64nxvUw2q0n+-1T!Sn>$S5CJ9BDdu_d!s7Ovk>zAtL}d%^?H=5KB?0 zmc)_#f1LgD4Pb~k_b)vWJhHACz%C|=0FqQ>XqzHutzbksLd^E;S(2Wa^2SL%90E!q z8#Ka70Z)Tl;%B>Ib>q3cc zS{LM1XVXy>3^PlOx2s5bC%m6kgL;QfR zDhLNBtp&ZDT7=_9Voi9_C?M5X;sIgR;QV-9su0O>-XBT4Kre9C->m!I?LhvO%y{#* zM_%A$MEi$`j>X1~t=ZhtGFmSWfnQWVZyC%!BtNPlQ;i_hzT}?|QIo;}1D$<+mx0sC z2qZ*868%7C->x5o1nXTlP2VrJJyKIf)%);&kUC-&gVdq3WPMFVJ0@qU8MzF@{m#VO z$2oZ?p8NudG=-2!UOW^f8tUJeohXS_4b>sJo4e{DKn|*v>TqjPYC9m{v!*$mf58oi zm4_opz#*6@qQAn)Nk~X2yOYvlJ_CRUvzcXZFSKu>!IK$a0`6yAogHz`Pe+?;Tp+ir|BSr(JQ6l+>0%-&8Q#H<3%M6!85SNNrQx#)T>v!1LYVR4g=28uyb+2 zuu@=MtpkD?WZVL6k&c3^dkK^IOD3CZ8wQa0cg~4KEy6J)NhAwxjPM`$3P`RQPKOxu zxCzl($SI8G78Gz8&8@em;aTCQWKPF%)%C>;$R`S}Bfdp(n45(KLXM3CRRf^V*3t2Y zx$-|wOtrp!gtw1%#KteT5@-wamgRVk2(dC@LX4abq*(C@(F9E0Kuu>QZRoy|*BT6r z>?Tt#zdUf$jp;^9d3kw>QV-6Wlm=Y^S=Y>3El_l#|E8sJU>}2LR^Wq*i;KxMgKxCT zDgBK~or7&iBB~gXCgZcj0)lPz<6eOf-~hyGaxM{x!{CS@!Ofd58>GYhB*Pt+PjV5O z!T5kG(UWjM9)TjTXN?pVmd?lq)xomH{321;LAR0SCg&?8VCx_dfD_O*!wRq)|DlOd zUBR6@SCjS!M;S{%`DG}0q$$7*5GNC|hf+QZ^Ne?I6{GNz>bf_Kj|Ghe^*uAyf5$Fa z+0v#aE+kDnE$<;%lVw?f|Njk$4;$Mb^gonyW5B$2zta|3NIu9cYL86|xWEs^Ohy@$ zJF4^7FLtah!f1WwN14zYGjOaLmaGcODID=vz_X(`W@|mdl6XI!h(!`310QC6?JG=L z6GSd?FeFxLMr$FwM&EF=ueg3jC<@q75my5g z1c&mmVW*PGDwF}JdI~*|zlkx6u~HLg?nH*>pNvQ$Qc?HxxCkXs&5l;UNrENY zw_{LE7@owqy8T69zzUqki1)0HkqTooGk7BfMWL9lwXA@M;!$Az)ct(=a(ijD0!0IO{lxUAIXbW5^x?GZi z-&;8$0YYFP5Dk-D>j8y?wPGLLOa~eFltM7<`=Xb7Rd2W|X3=0LJKag%75J{{qLb5O zt3NAXWAY)INfdzXQaQO4A~W}Gqpy5{1hwj}~H-lk{1MsLUPP6i7Qvb(~!nIk9ku)BuRbCtsTf_)s9RMTaQotf|iE9%Q{YG&N zA3hLv05cpu@_8KjBlbTC460;Oe*keb2_KYD#6&`(3_6FOK-Tj8Y;9 zogKE-O(V8bU&ox>Ct;&FS)bmkQ^ zBreEs9*P*j&o~1H6RE^{ka#5%2@(<#m@7 zj-kM+m*Ejc;hZHn`xJ#^r^)GfU(c3B3|IW0l{fUX7G(V&#tmohlPDrq5C%&2V3TA( zSBKL(^89V|y?YfP7SJu=_U|TkCx`{|x@p)NQs}jGm@~UM0zrzDQwaRr(iUt;ixfYV zFGRWqp&B*F^d#MbRXKHF6EqSv=wZZf&7FqSbKjDtb#5< z4*MZTP2z)*w^W0eg~P5W=q5nt@gVoEn4Q$(Bcs6pV_+sEBp7?M4PM>)5sh>#zQ2M2 z>0TIVAaPCu0|SJYS#eN^BLGFH`1zGy`1SeSN*wUEP~fG{Xu&^~nI7lyq0 z(Ea8ZDvN&h_h$`s6nO;0H6(L9WaD8Jhh!r9p?CYicZULxc~vW-W#ZI_&Mh}#Euw>{ z!2>~(9P7IC+9qU4u?)!>lYo-qM#cK+UgxfYW)svwQg9%ct~hKOVekx0!9knP?jZ{l zcZm6+QuJUjeKq9Ri*j!x|AS$e-KS37HL$>}1v<-AFc~}l-%VdQpQNLp8JDAP<0!3a z6hXo&(aD+l9k6*Z&B+Mo3W$Pa;Bl-A?aR`@99#(EGep5fp+{77b#n|K4-X3%x_+@I z4>8+uSOkpSX0Ho4Kow6NApIItNwnm#g8ck4EFoMmMswGlNYkHvl}YOh{T-kw194R7 z1Dt?EuRu2JK>tvF)`MoF43If++DH^81}*hCl_!xj#4$>Jh%0m=b#NAgtk6YDX0GlcCr99LrVwcB zyYUnO)t5J`BL5Nu_WA&zm&o^MIxWp)c;+8+2!QBrKnRLtQAlhX*q+dFjQHb-+*x&d zS^=N?fM6T&BtUpH&<+XJo)~Ca3Soru1)wp3ltl)!5TGLpw0q2IC8)V63nO&6_8Kd(h{r@R!8WVJS(VLd!UiX;Y?!y zPY|`XZQHkhKlkScN8m@KM!1Q>LX`Ppfh|Zq6|vl=5|ffx2RW>PvT!`#so~J8SFU6l zsiMi8ZOPj7SHHj*ml{lEM)o!I|H$QT*tP4f=61YTNr-nyOGn{=MFa^mz2=;O>R7+B zH`M=vO#zDrMK61eFNO44*jj-SO@j3AN zR;qpVHiU%Krhj$c1Rqr?D%uLH^4x5dU_Quiz3vlsOt0q)Pl{I z1_pT(DFV&oZTZuI`Ywwh<1KIXh+vaTk0(nCPOka3$f!*lA@&eWrSIC|A&A?LiWMqg zz9Mds4^Dee$hQ7)1PJvE6fX{)218XrUotW_epg+*4N2&;_pC`qQ|g&c4F|pF5(ur@ zuxliNy1BWTe}X32kRT;*BczH#)VL*I^Y?oh&tiH-C3*op;-VV1AhxzTAxe;eX%+dy z%y>!3LKr7?A*aIN{XXkeG zN9nL2KQnOm>)kG6=BK94t!d1|NXoRN-d`HLXW*bgt6O!Ygn*e)aZMI|IcH0dD58FG z=KSA~h~X2w6f9pPk+9o7ehA}^Lgo_-CB=sx%gc6{Z-fWM%H2mnpf$v}4;!bFSkcJJ z^Z)>ygy_3?_BJ1l!Ymg#V?S0jo zvDfDxykKXk+abok-WqLj2=*t$y8=(Yn=e2H@qg0%yac8hr5D6fF}f5T%7-_&ObJOy zDm!owp>#}LHf_fPO|Ka}3;!M0pzVhL1J{7JS5c(kaZb|{0z9eyEF&Btz|-|CxtY}J z|Amb1e~1lDgb)KGSc+d`D7YWr+*hk@0CcK1Y81?{&f7LjiNMK);MT22k7h6)@qlnI ze4-_z5xL7qqJR*IN@qCgupm|fod{!^!GpQen(4Rx6A9vR}&;qhGyq|VAb&ij7=RT1VK9lG&p22==k^?lGx5_(<(*q`a?}#1umUK@)R`_ z=PiKI**n({8Jl6hkv`=abNC`)+swAgX5M?NwrbI=v;iCwUE8U zarN~Z!oq}v>lP;nJD82guT&JQwW$>+;_oJOY*|&qmk1vO(u91iTm`LU9bR-u2G0kV zDvkhw90$j-uetatz*R+Gzed4bdYA_epz^Ap50Gapr4Gys1`o`kChvZrC}qTprkruk zC_vmLP#I-?I|g3DFi<=aT8ZDQJ0Q>fo`t3=>muVhv8v0~nCPQ)M10To^^nZzwWW z0sraRd;cfE>B;mJ-)b%9Z&ISsl(eq-^ zdjo7FKHmQ1?a`)d`mukHb=_+fQ71ZJlSkswb#K>~7F$_F2)(jGq$)oOHG(Xy!D#0U8Ac?Bz@CR{7G%EGJB7X)-vBMQ zicC{A9_hL6UDFXoGAReD{Z2rhheyDeVR6+~Ub3B21rmI~a8RQuOo z6pArExPbvzUhcKye!Fhi4RN7XfV|M@G)`(g|3^C$w14b7*l~P72N7ZdrANa07n1Yi zRrJ2IDMkft;ifR04s@g+Br{~^5=FU~UJ2?0VzuJ65j2bUo8@>S;@9LE+cYe@qr<&$288Ne0?UfOW&T~}( zo@=U2veE`m?MUAa<~j-rH?9jloQA^q{rO{d{uaXA8~*@l&rz(wEVRmAW#YT`PrS!b zEo2kL#IuB|`1Ha7XkM&i%FTgiBeY}wxTuC8FbU``oP`xhdq<97i+~2oCV6Grw$1nS z>!VZLn{7PJOiee>)U8#$`ZTxp)Jle25ApT_FqtqznbTIMrtl;6F*YM@BN+2b3Hq>G zw!)ydtIsA5@#3WzI-*g-Y7}^Zqo)lcf+iHV_3CXcs9(Fu*|Zz+&llL`{Wd>yZKHJn zn4GYCM`u^f&UHUcv7}g*c}*px1iZEIIhPYbqscu_W}M1d|7~F7CcfoOtG}j{Sw1ck zV7FKQm~sciobK{v(Z_SZCBusIHrKr(Dr!&U zGy>kAKHb)BVqKS?N*i{bH&UigA_K)Kj-*jh$l$cnDM5u6OFeDf<=*gzFh&vXn2F|{{_xH;WHu8gCnh#^o*Jidv<3v68!Y^V3A^7 zb{RZ0W@K^uF|R%uR#jfMfB9(%n0RM`#gQXN#FI7YnFPKb7TiLvrr1p9w#EyFD-dZ; zxfg2S)~cZh^Ij$&2~$TnrTqLITlgK2+e8jZI2AJwMOJGOpQTy`MwU-&QhD%;sg7c+ zA_y)=Q*>N=_s(!i`3Y4IOw3bxQ9p3Q%PUYh`%-!sv}j%S!`*$m>qfJ}Ogzwdcc@7j ztY}+uIGG9d^iB{>} z*7Cyeq=dbG@Q_?KSBBRu=nPJb90~olj4$M#C;LuQ86hPGihBH>BsPKwz$aWB)-R?U z?21~9MpIDD? zSC;`D%YIrRBZhl-?&#RFG!Q_`zRv}*H{y*O9k1)?>iX&E8le^Q*PB`Wrdjr{N_#Nh z7t+d&5Egsw>Ag(;JT@EqGt13EW@crdoHOUA@lf%qj8J8=v-9G#Zt73HX!_pXNukXK z;5fB%_1{&W2q=e8|z{*&<%R1P>p@hNM9 z)z@co0J&w^+Kc^rAwr6Q%-nb!#qXa*JRsH>M zBrH|`42zT9J9npH6e}5!*glgd=N(lWOfUoyN+WcLN%YP%U_!ydwEoJqs!(o|2A0`D zMNeG(*Dw&WBnV%o@RCHJ!2F&jVW7Pfox^F_CQ(R(qujM=GJV)|)!;l48s*PF25ciAH$%rHXaL>jRP2|3IF zvoRd%4{P4;j0Ut?fFOegw%cDOF%$?#o?o6FY=Dr_cV%tE=2hTE&`xlvT56_ zYV2HE%0cl29QgJK!!Ir)V;iXxFK62LBo8N1qSDjL&PE;r!|@zY!#k%IbruaJ^>o?# zK1^vqb%6a81&ku;@Ur7yEJpT&MMNADB#`i=wJ9gF$witt**NhTPqPgjJQ}KhbyBOy z8p&kE-uxk=G77HJCxMkQjimeo5p@aLxbXnZVfU#%P`G&@iQ>+2)~p}~ ziB8bTps>dtzVLbka0q4_*`r&6$RPtdkR~)7a$6a#!U@1x+#ZMlDg|!y3K*ueO`;tZ z&#~0h@~jc^fk2}q|2rm!(aG_2h0;=RqNIPKfcH&LA5KMN`{KJgnikj7y{*w2f(I~j zHcr83;09&CeH%n6&im%X`Y<*4KYskf#;TbQjv@V8j0Z>#Juj@_y!u?O{SoRB zt_6K}4T^y#)apo3A)EOzTa_K7P&ToI$l8+x6??}wq73GW$e^K z{c2aXq$H)Fg4@!Ef*+Vjcw1tg@!I+_eYG&2w%1m3;@{V-7FQ9tEwdX#ob4_b|Dt1H zYFb&ZuKGSttsg#9s+z$Pv00B!6}OwD{yR+` z`%r#Up^~sN@BFJ6GxniGO(rD^FIQI3V07uSnCP{$+%HueQCbXa6-&Gdu6M8Eck7+4 z`V6k^*UV1CotG;m=%0)`tlZU4P^!!qe@!s{*LO=^?!to?xuF(-o*%VWmuYLueqjy}CP;;BdywV4C-(wy zVsO=ttz|=X)%C>1VYC1T@UtSZdZp2)+m0G_C%ZRJeAg}iDHgwUJuxLVYme87!E~6t zb*6gw>T7ao{;&|QXlZps(Xa&he(^_Dxdm%~D3LXjiH$js`Y8Fc zw|(26clV6?7e7sH$(^Q#1&qoi4SMAZY#SG+Z{AUT_tLv@>2y0oM8=M=k7g+Exb1uO zGkib4x}oX{$}*h&;5!)fXjt}FE>f4hdHtGw(RVIJmYtT_y+)tNwgX=UgClFfvD4|&hlUk30McPSMMi@2JCEwa`e@bSNYCV7MClS-yI>y0C z{iW}h*D%}$NN^sd+8`V-`EeJ$eQfG&=Rkfgc>MU6SF?RKm9UgXa06sH-{XOe$$Eh` z>X+R;ciI&qFKiqQ<-rNp0GIJs)s~igVDPY*a_^XD1vV(J{nPcId4fW9{nuj+QLa%>a5p5R(>>jNcDnRy zTTu5$Q_hr~hLx={>LznnWZS2h=Z<~^2na59%vR&4CZ=yYT|BF z_C^=-Msow`GYiXmhST=p?FIuo6`F)WyKZFKAqEz$aju#HRsvo+TsvM>F_$x^K>tok(I!HVzxllK$^M$ z=_YNTH%*6NbYwGesRSUASy-$n0C0uGBBdXdG|W3MoVS+l9>R74N&XOdn2FHaFYB-g zwFVHXxM6|$9|I^!{H!kCpSsw&XbA07;2h@e%SBuN`mQE;e_a+LJ$NN6l4XSvSGAJ+ zJPCu8x-6iPN;f;#zJE}|o~tR3!+Lt^u})1EJe>U>m{s2G1Xv+QQdZK0hKeU9hSgCK z5fKjG)IH1l!yqYk$T5k5b7`@qk#omplPU3ylM815f>)tP0D z!tE2hb1pLx03dB@KN;iW&fv(dDv>C(YL!cQqx5nJc}m{;?|EYN>l#EAdkiB3Y7fzn zQ-+AF3D_*lGn|(}XsR?_nhFS4AHYv+OsYZ>V9bEp>cyU}4ge8=6_ND1B>100zN#2d z{rJcqhOCOKTayK2bLp(zw^a-mJEVHi0k#G%(HuigZF4wobM)HCjbQ8sw0 zShZ8$r*h83Kauc=U4Yl^em$6_QJTmj@&5j)j%YM^=k~{a7%OATQ;CUpzfnGHEGNaq z)7?lSiQqCH6>&ChJ66<`c+dvYO1$5gPbr_+)es*Z#H@-BR*J$|##cuMe&Ux(Y8}?r zk>Z5=qIlKcT=eqpi3f*$r*A6$cqIpaNSL-VHWVERmDeaL(8dfDL4RYilFF)^DX(5@ z?2C&N%^2P(q8S5p7nIT5l(FmnDp7cgq>PrSE_H|>9}ip$rNhZk7a}+dz(qxr&tPxPISJG=*LAw+d98^b-O^7cVw7-zl+x+99&e;sX3Yz$T$p2 zRzyet;dG&|&5t&}P3f*K2z2_yw9l59!=zsLZq%@NB|=oJB1WccubTENxVB*a{Bv+9 zM9VEey$IhRsUwyP5RUFVe@SS}U;x+96s=|$SZFkzQI3Gs%*adz{au2CEf!6EKN1Q&t(H<3*8^^U7?72NQ2bJv+^^fQ zBZx{Ol7jV@3v1y+k;d^!&1`{n7bqu}Mp#^}Te~Oe_TCk2@v`a6UgBuCoNxBMB zi=kmnKCb#yn^-M2puYhYt_czH1{(pS&X!5Qj)oRyE%`)p!_0ZKq33wX;Zl^t_)Jhg z$`*-1n9|wW)?39?CZ>L@t6{)Ls6ALNU$AcS#xV*cM?=H% z@_+5u4zC!odGpA=&*WA{RXE$KyWE?Xb@eRen;M+c7$Eo1X_O*AbHR|U6s^ah0` z#q)oAMtE%6wDO2ON-y40G4^7QjOc`*T`0+fTcPv?YO9~k`(3}QJ~G{;&EX4F6h>1Y zV2;ES&KlKCKE}{{e@S~sMf_4w;s$XDd$-&$FqgS9piJi9e`eZt71Rp4$-e6f8SjQz z$@N)>gpbiWKz)Z$W9`Qi2R_b(9fRd@=s@VM*4FxwzpF|<-=A$vC*{t{8c~*0PsyN` z^os21zz~CRSbfU82J)+LH(^17TN=)X-CY4e4Dd~PSQqD+@^e7;b!vro1Lk0s{9Fb- zOBoK-KHkHO*%P7*5F1Mnf0j$;HC@DBn~I1Is6fR*3d_-PTR;2B3lnF%M{%PM!>-r8 zw8U)8(OH2|5Md8$Fw$Ha5HNUaS9khRVf}%52>=1bP(Wz{$Jo%$uyRXEzZD3IJ9LNn zDuIGg6s5nX45(DMqm(+`7JoHaQd;aHw01i3bcidp-s@W>qSm@R z*0MDCX{`!+=?@5$%v7G(;e1cj3sG^_@!Kqw2;5UgF~%~O-O8k zd+*R_R6IVew4aKlZRHeEYb)S|*&uCm=Fi`d2c3huSzCsPiU^H`kn}2Re}}8AU-7ew zOorO9%k0Pa{yAZ1UBS_y7WH!u0+N34;6b!mhcxI;R*^Qv^5(})%P{n@@}HE-3=zQg z$324%jP$5aZ?bh=@{B@5qs>pth_fH;Jd2+VH@@Z7;@QP*$2lC^n_yO9g$5MjW8Ob{ z!AuU=KH^x=W-0jtR)0tKQk^=MzmQ-QGy3qK2}Yz`h__%m z?r#FW@s=Rufr;Vq#?`Jl(hJyK1NYaipf$L$9C9;4AS*x=_qp2e9!EXPb>O1V{Wa;< zt>{SC*ZKJYw9!ow9^es@evEN7wX^Ih4LK~$_yG))@=kdjos~2G?2qjq-furA9Xfun z=&PR2GkZF3(ZTr@vI;w)1tzhb$}FuR->J82Nx3k9XmLHt&#X+Py~$yBbhOCQB~zkk zNK5G5q#S+H!G#45qP;~qW0=M|b(=&FS1eQqJ8=ThDTM4WL!q&{sK`AezeX?;CrVLl zlZS_Xc&zsvrrWd&Er8=anqq|6yYK*WBMmI z?k}avGTf$kVaqf91}zlxC7OI+%zGiol~ZJa6Z(r)!4{=;DKW%kZSwUUm5Bu}D?O|; z!RzleAQGL$%8+0kV}9cverU*nWNs!rR+#clIj$$#Zs8SuF*EwJlcTJ@6dnS7GWGh1 z(R#INMSs@LiXU5EVM=oHvOC(%Uf(~CP&)MT$b=><5eF3|vaFJK=c=ZUJ3cOmXLT0p z1M!U*zY^CiyjVJ1rN~O-I1$~||Bwmwg89IWv!%`CsyiSws*RRL@S$Wff{)OK4^z|nJe$YS9Ts2?6>kNO3 z(xROipRk;0Kuc%n8U#U_M>%1uq=^?ZBQu>GDMyT5U)h~cO+5#Jod_kS#W&-}nxc6h za_gV8sofFe94hnu08_g?;(U{_ceI!Wi4zSCR2TjA{{AK|Z|bC`nLKWFZ1>9b)olCP za@Gl)3m&42tc`u$E@#-LSwlMUgPNx69J!{ry~+>*rfhWj%Btu6Nna~a+uk|s61>2r zJ#S}q`^wX5C$kT0TQhQa|h!JlNvLPb*Gy3dJ+l)UH9bHTiq!b$Y0)2RIE zw@1CJ{r&GQKbwb;Rg4V`CtM}ZgbZ8OC*aWOSkFtr4|xAJj4@p*F@Q=WV}YJrS)9Um zVlFNMeGJ~t(^*B>w6ZRz)Knu?H>w+?9;Sg}dt4VTu2hwtCIK$4i< zTY?SP+>HfQ)F0NE52MG)vz`Tse&~P{MZ{tt^?@q@Pnt%( z-~GD^W|OI~8PWH_G!f#i@(zl**S9xE^((8B>9fIaPBQXXevppCZok8LeHfjc$qO+D z=cq5Flnare9tFv|xfn;7dlelx)miT9I_+3{VSiC7+5KS-<(&Lz_{B#4R3~@a>GQVFhbLzZQ>bOwytd%ud!wO_CRL& zV%bHG1CzBGy#zH?pzl97spR$H>+ov^deQhZyIVG-<*fzm+vM%4cBDc!#o%0w!sum2 z-&!PJG~ir@N2MX0kBPHp?R#k?-(#oZE`j4&GWO#)M7=bZe;Pe;+TwB;md%r;D`0pG zhes6ma#TusIm+`fw2s$+@*0~{SN##Td<1-@n>_I-)4x6k~FLsRwh z2I`P0iI`7-rtyk?e{_6)f2SPvuP=#p)$T}J&R7fSpe(EE$S|8&=ft>}z$(aL)XOmS zR}IbRuK5C7cbLWZS~Bd11Ze_6#M`=GziCGG<=KW<@XM*33Yl%xsptKC`$Cf2?{6d3 zzkhzlSDiaNq6#abE*8FkWA=@x*Rx+G{Z+EOTJ^*ge2Ex82``OH&{Qt| z(Ftw%6J|Xr#JMi|Yvsc(ZyuwC!^Sfk!w`^uOIj)!05Nly<$yFR84LQ!lQ8Vht(*ZQ zPhG`EgbLt%{0*T7Zrir)pMrajqW22xbPVI8Vt+mt%43qQpgH9mc_~VnamT)daqTxd z0VNm)uavkLbvZsmLpXM_`*yf$96Du`9wLxc1m0p=Ai}7tty;*RooD;%x5l&>7uYFo zm%@-&95A66YCr}#M`kej`NKxiooraY{syw6mq(C1ef(JJ?ed!4a8P|sHU|ypAOHP{ z`_{Z~<-TOt)F3t+gm`G<^WkThRpQLol_?{VT!p%)KKQEDn0Z0~gzj zWy_aWZ_q%91AvLS(1O#$sQYrMU8I1m0tbjKk|Duy6ed!9)B1?!M(z@wXW^}376a!C zj8oO3q#Dh{IXrmTgrPfZ*!AIGHuGk@m}xUjMH;sNyt2DIGLmG0^Hx;~@z%GrzkSM= z%?<4PJ9e>ClX8fcR#EYZ^qg%LMb9=*5JJ<-ArqN3Qe_LxUZ7Rz-DhAH1Vj5#Q&=3v zo!o*88k`5&T7wSdJluj+B#NnHg}ja8+!zL*l&73+AI6x3pAmn4FRKQ5v%s#bIf*ib zB<2Wx;m&Dq3t7rZwVaDL9I~Ft$5$ZD!Vx9(vgV&~=VVhm4z$|J9E>9=6)O@*Hi!eb z*bsq}rH2WS4wpDIm$@r5O)LQ{LqEJ11>24FQ*&aj6TN!CZTo2oLeS}5!c}zI#jP9Rv zi16Nw#X%WwYPsNQgt^e8>96a(JWlU-CxqO8lG-6WKG!U!tYXi z?P_duG|p*qeU7a{H`ezgM5L=r{()z?)4H+Z#DkaSADdF=-Gju~=WpFQqPn zpQ25p{jw*)X$2LU$ER0X5KhGNfeFZ_P209^&4C<`u{|AU#aEf}0dRRkT(pwDJo02_ zeu2(YdSj7 z-wP5}?7m_6GFq2NthdQECYE@X#uan->UTto9tr)U#>XIcxmu$_`^yzeZM!}^^Y3)CB zee~3Q)2A(}*{O3qZ=GHqT6$7@A*^fYtl84%MC6AAcj34QL=*Kp% zu@`-V-hQV_EWFmEAe(pwY8;XR={g7s3h3I2i=MK;3Y^9Mk0HE7>1#PfZ2IkY-#Ed_ z<+_qoRyNw@bU~B z=}OCyICH`0N{tn<4aZi3EC3MOKoqRdnZ@hvmq*thokJba{zn46!`)lTP6M$y+p;C*%_#fJjCiO=I^yLpvwv&` zm`34M)5pnxI@Ban9vm}9F%)s@vGbVvu1kUue94XqD%0hRx`5t-G1FXKb?hokkE)Qe z(>24Pw22wGB9r2lL&N+@q2~~@RMt01maNQsH`T05qOP9hdTx*`lcqh5|7)jRJ#<;V zxZ?;tkm@jpIzZa_LxTMbS(aViG^oOpbYphos z?%y;F8Tq{-?nC_urXo9LGzlvtXWQ(grIC>nwOYlG!zuXyAg2IKOmttc;|_7u9E@H> zK_aFl3m-4`gTLqqs*f5~eA5ydYI)E8!h%FP1x=E^Hy-JeTUq&*T&SZy`=dhT9;?dL&CXl z!rj7vn`c<#k!L|quo&S=DI&UVMx_mob!*Y`!JD8kVZW|5JdDOFM zT?=H(XI%UKj5}ph4Q1r?9%DjC)~a2@S4s1RvEtxOM!k9MS5~UG4Cp3;BzG?;#-2uB zJcXgs+a8|$n`}KHxEXaSpt%+SLdzjrB35j)a{$r4yow(yLsmLud0~l}fe>^>hSlBs zq5Wu;+cD;l%)+oIZ+s zfF}3gBbYEk&{iNO{PRGr^x0Vlv}3M9OpG+{-j@uVk1@GYtxQQxw-5| zuC^trLd1+JSqziS>o4f{l9IyyiiEhEXwLDA6jRI?hr|v$lq(N+YJ{>uQjy+3*>uHuRU*7Bcrz|7y79iESA3xf4xW6gwS^DU99$kPJu?0-J zxHD9CPf_PjYh(Y+9w#p9LZ!pClH<6Y(=K8sm3{ZyQKe$zfg1<4poNdl!hZ`!;ELnN z&Gb#CL|phIWHZl*mg=DQk0s)2NwiZCGH4@4Yqxt(7_^DiwmZ{VETX{ydRgp17?JgmP zTO(jLN4~zHD}!WZT|QiFsBJBL$|P67dm0)WQW!3M}}0L=BlJlAPG#_Wo^R&bEMZ8a6N zcm|5vUABO|(c0G>7v{F$ZZZt-o9OY$ng;tkhhjyw7L7(Im`e6NdFJJjLbIRc)Aj#p zMHDg+pi?IA^l-Eyd(IpGMrnDtU)F?rWeH&axU5BhgZv-lo}2PLR>i#HZE1RGcleSD zTZjDqbT^MZJq)l}wAtM7+@T<>+&7e4_%)5y#r8Pb{oLDeWsm8l%ICm(zXr(a}^5(woh0jao^->@P$=IE7m|@b_S8XYa+EFVu z0pxDnu36MkO$&{dO^~)PL@nYtI7LoMM_t) z$y{|=@+1(g98~Z)U~@&G?I;g^O-Ip-9>b3iRQpmoT6tM8MLf2z_ZT&1zd(I0MOTRs~*tJiiIlqOpLmK(fdIzSF*t#6dz$e z%8DOe1x0~_ChmHq3LnM}0@o8gZSUp*6nEsaMJLBCgAkFh;DG~6I(h=yO$0iJ%QkX) zIBBg#gDkFkA}1XP3oKPr5w|!IE0XL)dCM?5k{6h2VCB8kKz~ECnWi>PvMW^UNtcrZN{Tk+c3 zrIdPVvw|9%t0uhdMu|&`_h#eZXAJ?vE^{s>uPV#tEnDiHe0a9A&x&sj@ngC~V(_A& zsX6$Hp>H+bG7TTL7}c?=)ieM1i}&u05n;{wPf9ghx;Htr4H=}etl!J)*!eHiOZ_;i zeY)T34qXpxYom4@+OQG|m+!HRJ6DraA(DtS6;<;S-cZ92w4W_#BL-iww6?FfpQurr zVPrDd$}WYzF)bm80;5l?T@<5V!(1>r=Dykrhe6M0CWeVeZR zF&YnZ+R01GD=^Zb|BI(jn}0t_SxP-m$zl7%i(}*U{!%Ntt7NLlgY%yc6~J8jeuCxr zC*L?LzA`F;P}>`=j!K~l;ce(JZwB8VP=ZCnZK<*i5d;KyH6Q&u+y?wD__)@G_P#T2 z|7>n&8j2S3Dw7o!FueeP9p1a`6lF5x1dPd_$1mRAthno3XS9lz|kx^23(U6+GO4cp`M%WQ_p3fet- zoSgI=TR=Io)N5_rjw2`=la&yN6(faLs$Zie{*c-x@yHC#ATLyA25z_K^W@WS85p8! zD0AoHl4Qnk3jLnqgHX04yg2EI2@qNd##MX5Vh(Ag+LLSj0%~ z=L+>)fE^sQuCz^ahM9vTQ#PbK@hXVal9oVGP*C+_68;XDQ*@(zqF;D;_~mbCEy-8S zAG*so1B5vLMvZ!<De!O6(+$JKT@o&_1UuPk%6ia`_smwstiSoWufMh3qKzNj z*V0}Mo}Y1C*IK6u-8^#WDDto#uTUNME|d5iR;Ad79>%#O&f3dqj`j34~72>>^szYpOxdX{{?tO754xD diff --git a/examples/running-llamas/artifacts/Llama-13b/generate_max_memory_allocated_line_plot.png b/examples/running-llamas/artifacts/Llama-13b/generate_max_memory_allocated_line_plot.png deleted file mode 100644 index 8b1327ad94ba0f0e341bd43d1e213ff87754169e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 61570 zcmeFZd03A7|1bI!(x6e3G!GgzAyFy~8j(^`sGbHQqCuhwrFou7rBP`vQPP|=&>+g# zTqKR8k@LFO`u_Gl*R}UK*V$*EKhL_BYqeC*b3dQ^^Lf8t)B6rGJfclc%SB5fk?3`G zG>u3kN?#I*Vvw2&f3v6i(%$}0RkMJTimvv6MkVuTy#2<=>syPlM(#(vm z=E39FlBYgIBy=PA%hoGpl#|F1~gdR~|SUKe6xizRrD8>eLik z$5`>NiB0hs59!~pe|dG&^8fSYae56&U*g+Z$M$R~AQ}Dh?NL?-m8CQR82`GrD?vHSO)k-3Rvm`Vs6i(h|<|=-s;6$&>YMZF%OukHxV59pCHN zKl|3MI4m@@ZepQ+V!`_S`CxJ5?7{8~Z4=JywN0BgS%2%vGC6Z;uJ!&a`;k}o_Og6@ z^X5(c$Xp%XYI^$2*lJPW&6^asp8SG>tV;t=ocQoH{+<5(?DzeQ)e>rd^{3 zrlqG-?%K60->ihPva(V>q0D(qZwsd^>DT1j4I#}EWk;OnM|J!A`_)dI;5a4CLC>|j zZm|CL+|t~L#QyZZ#RUZgB_0d*j-RX8Dk>_dX=&BJzblwq`So35AgOxercL$j?TW_v88UeVwbnfo%%HC_7pP#vF+uGV@wYR-~eJ3Fy;gf{#b_K^e zUy2R5HN{r-l#XAjw=pmG=OkAY96WAsg5pZ1X>42QzGmrsu<-B^?&lgYBy4I6Vm z{{HbX;P=dcPhX3{l~{APp^znQPFb=_bt`L>jnK-BfGS;wAs_kLyW(9GJ|p&v;~a*CPhY<9+S}VxZ`q=WyZC5dL!PmjS>RZELI#%R>9x7^ro{+8 zRr$a6C1%dcHm_S+HlMC?J-EK|OQd3t(-&?zIXSa3d-{rt9~VE{#Kc8KF(==@pEQ9R zA-iW!^Sc5IYa1J(mX|fW0voZ_2IsE5HS#tzWOBTCQRGh3TCgHFLqkJDSEb9@9k1TL zWuu~^n*I5+3Gd!MWhW^vEzN+xSz24(HHlZUuMyFY*U1dyQ{^RgWZ~&&#W#LW7b!7` z3!nE04h~LKxX6Xg-PDtM;KrT$+nY6vjkjWxo$D*)d7ZpZD88kyk3))`&f3~Kai7Z$ zydoVN8`~DH-PEZnUeW!_s(;sRU;12iP)lpm`Sa(C%gP#KM2|Mjhw>ia%FD~UwmdJg zviO5;&z?Ps`}R?gE`54>Q$XWRnswuyLFdY@H*Xq4x9y6Ii4j^{;ZeFuVse_6*})Q2 zY}d}d8>@uNczB|ScgKn$v91+eCqGozkfm;-ef<(XX@(es%r|`8kL$F(c61Glxq*|+$3}^biz6x_arrWqHMaAP{lygbmD~^g`0yA7*Xq(G z>29|de-Pfx52~1L!QUw%J1)GXFFwGzZz0+r0gv`y&Z?q=$Y;x9E`+!_ine09K;?84-aQ) z6wA-gx3agtv-*1`1G{Q)VHro}fx%pCW5KF8+R005sQ4jS1iebf=~Gx36%wxj-sD_ zwljt0^zWV*LzI8-hoJc-mE$_=U%a4lo9v~CXwk?x6o#egryY@%lRJ2Szgv@c04Xvd zVKes4ZigPf$}b)Fl}-%rWnp2tg`tgKZPt9BTdh@7;EFpKqATim&> zGz#3-?y|E-tFO(elPz;|bDw%F?3kLGlJJyi7y9m2JZkm?2c)j6i^)@k@A-}2FYfJi zQlqY)A8YSUJGcSwu!UQZskOD0Y?rL!#dY=SRf8wzM_z=LcrH6r`!~>zj9ykG$JLT= zQ_)kBG_|$cU8}Qv+~bZZuLvJ)AaPTqVp(q%mg;`Bav)N1SKP&4rC7rx@SRrH8oQIQPMP;g8IQ0qQEacaS9GqJN% ziHKy;4x|E9WE{LrMNLgzS8CTzB58+n-^5Mp<egERjK*UgEKqJcLwr$&H7Z>l~ zO40Be%Z;SYRC8jxG@~mpv9WDJ%{uk;GR5lRk49pd^GqJm+H44xcu1^%dxA{&(wd2*|Ga-uHPqv0pcyaUXT^fY3jYYF5Pvdm2 zEp)1K?!Cm_^X845*NQ7iD{e65zzsnG0g5--hi?}aO5P3|%M+_b3>Y!#F$9`hT~rf#e8*8#0O$jtORaAUEqt&O&?@U#YCxSh(?w1;lx z?lWH=T0D^+?@VRHCvIwKY2Pg+x^pMJxVU(S6SdfiG(8~b-#@D|I}8#@Teoi2)Y7tE z7=INQ7G`(HQW@`oN~F;qzq7>gz5M0MQz$T$=;d+JbYfhuyQ)202m}ilpa`h9<7mp3 zC>=jQ`@m+Nty^#7oY)LE1zmP?(<-xn6`7P2gcTInzn{ydC0y%c#l^Yhg|pbRR89jm zY!}2W0ciRi73@1x!ph5c=UY5sICA6&H6tU($4(SNz|AhSp*Bxp7Tf0x>UwAX*a>6D;ahoaCp@W3*`Z zw|@R~&9|)m^JrLx*VJVGK)xR8rkI$RnvoGph2wkT+drq?8w(Bq)kH-_-NfhON58hZpLP=D~wEGqbZ<)TvuvzU1fP;%a*PHZB(bXrhZX_nba;ikc+v z^T*T3yp=Pg;LP*8#b#3TE59!6xY!mep4s$39$@Goi3wPtJxweP1Yr5O|M=Zo=V)l`<$B8&Yel+DEZ7J+vZ?4@)bs2 z0?B(S(JUrZ>3?R9mLJg}?pJ``n8+j;rS`5b%pc8%s}1Q$x)WkC%~< z2o}SiEDLjU7Lx7n>Ca=+&s?`;@!=*5D=L~v73J5~gU&FMn%=xICi{S7i30qRC%Dv` zRBMM5_5K!arsvW;kAplUxASBv-dRFIqUqhc`216q(VuF4ECnr3o#LiY1ANGg7h(z0 zsr6nD%*&Hl*q+znVRmp z$>}JlfCoT!%;KN*8|m5EhpOCXUT5mA(zDDDREkMUhvP=$=7_w$&_m)U77B|k$fxQp zj1MK`wenMd^b!pq$rEK*&{4r+%Kb340FLPnl8%`fCzb`kU{{}C=}U8Vfu5o(*d9$? zU4~MJEiF??kxMBYK0Zg16$OqRnfqP7v;YLfym}b?)?{)*$$3<#WI<6Nx=+yo+o$=A z=y#{$rDsHQ$YwZ5EkMUE#ZS_CWLnTv7|&g6O%YJ>cmQKrul)@hLO3K=>JJSXJ;={`U1gbG?*nyTS-bv z_7+$qc(A(P+aW4C(iSJlx!+YxUS56{70dI_+BVA?PXeK&sQQGXz6O~{h#DmFzcUyp zu-FSJV*EvPWF!;LHhF1w$iO32)#uKwTa^2+PKkAI+(L@kaX4B>a$k*rq$C}_bqI9y zAT>%1qwstJJ0~Z#o15Fu$+!Aa3G8jIkNL&L#C#iv zNMb7&WcTgchGR*vEkQxSrun-`Nzei4h%STcQ}Ow;1t7j;IzJe7%?iRsN03oKg%FE`+T=|7QhN3gi$vpraX=!P`z)3-Id%;wg)P<*C zJT5QyL!C6c3ob!?q^l!`oSo(WEKl%(p_wQi0*jOZ?WMfK$oD#PpQ55KL>b6LV>PCC z)6)3S?t{|8 zmLP(Klw2!+{J0zHK+phpLVI(foQ(uq&-)Ss>VC?%@w+_AOvU%OK?u6(KoXoExo36$770L|kV82_FN?{q#(v(jFf8`kRo z__498D^joav`v&*@M5SXo(P@AZBBxZUmhJAXnQd3=GuT{gg; z1hI}0m6c@zdXQC6V82mx{pVK-QpXLG>H$<~4Ph4D&#C!#6vMTDv24Wt1xmm6YmyBs z+|+il_hD31WD0QuOdgrCJXi1?{q2gQ(BVW$a`6)Lc+DQ92|eu3__Sr(d~H4Gw;Bqh z$C*zO;1^O?4@~u!2+c1nG#46+473b?H^;_`ynlbEf~&qM&SwL(#bWzcTqV3Ad7~pE zx6!GlK2>hTmA-6kjrw4B(aA|hf$9mm_gB1*%j=zt3xz+xb^xw%cy60Ld5=2;ro!j5 zb`xMu4=Y{TPnwdYjmMM=rqe z-A@B(6OM8RGFJ3llEo_D#_H%Ezhx^&ZlZSlSV7Oor=p2p=jP$bI`c4ZctQG4R6MVC z9?U!rgWy(n;y+;BNY|VGP0EfBe?fr z$5LV+9dfMJzmiqz>YpuGWLX>k;k3zoVn;^@0VlzC^BaQKu%n8}&9s__4h4rXM%&H) z$~l@EdG~HGXa#xZ%X6azR-YEjTJJERRGrw5`6tV0e;XSCHaDm2z4VFGQQ_XUd%xC} zU2vA?*(47LKY%1k<`6mQI`idpzJ)!G(zy@i;smZj4cYY#XCdCy^VsfPyJEz%=U)e9 zb-HKK^Qmy7=4yitH8eI3eg7VUGw^d}fDejQ;p}(>Zt*n_59|6{n{cG8dL(b~O4H>I zO6zE7XoPa_V*!6|1}PM&@LBmu^ZxyNquLAD4NQRC3*+>6_c}=%E30+$g+R-iou6+Q ze|68=&Ti9<8#kcTP@a-LAvIHXa|6L@Kq)>wzrle$rhfHmMSoZ%af@ADU6XGA4)M{( zp>6!LzA6Wf(UGdk2WZ@c^AwIcyKAh~oSo$7?_dAs%~ovIXwF@COXrXCAjb-iKw=oD?fAfBz)Dzyl=-cUz;O`Zr^((-8%25pmey#2EmbVIT^VBORvq0enP9BI zaG~u3!$5Eq0Th2g_1h&Sa>tJ!C#WA}v#O5T8a=*y@$sffZ@gpObHaBM-2>V~T6Xpw z>&82A9-_Z2eb)WK4I4qoUB)|ZqMD$(6kq>!KImqPT6c5yk}VI=ZEj+)+f+Yu*!I0I zetvty%)xQO{=2P-_m`d?$ttthoP56Mj0_?BjzkJTP^nW-nS9oNaiA)R8jocymC0O4 zTZhcsWG+&ZvwiO+D@eu!@wz(wiH3)VM@(G2zNJNbIGfiYSXKY**|SfdKGk_;tejF% z6Kv|g^rOmEp2Y5Z$1Mywcd^3THg3^hh9^ffxO6rn77=K2y?=<&A&9R2+ zRs7_M%w8vtww1;l*LIyb<=X_01Z}u7)UYX(M~NL*-vl|Kx#leNPpeC|-#$K(ne5Ho z0EK*Q@uO46)1%-#cs(l|GNa#?r)Y-eu08c$-w$$r<U3R@k2Sy_3PK6D?-sHcDqR7m4OwDs4*ojR@RLhHry^c?a5<2`*-is>g($R*GO4ZY{8wm19=~fU)Jf1^0?P0=!gBTS5{V5SbjYK0wE+vNI6C` zFW!Qk-|ngVh}!rNH_V@6r0EGd?UYJ;Ft73Dos2{E?{aA`Yg0ywpK5IlXuLkznXd zTcE`$UK#c?b4*D|Ve!xsWIhzZOMg?%7p>j~QYokMHCY#OUMN8j!3Y5S#9j7>PA*lq zrxz7PK&TWD64JZpJF7l`DkI_386x z5$SYTEkfGiL`7zoIddx4CO9Z4N=L@a0M~lx+BHR-rMBwsuh0PuO-)~bP&S$G1bh~r z*INzmE3n`I{$`@}XY-E7>Rnq~lA{(N+vzIB&nx@zJAFw((5k5Si>t^s;BIN3i5A~g$j6p#`cH>ai>QR0e zz`)nme-E(uM0$VsfR4qf);;qOlzwdd;-{x_(C|yncJcDROC?-**4SZCGy}+79&VyR)3~o25fZYseJ`}rk_Spz-!~2o4RP*1 z$3(yrXnuO@8**LOGvo{2u|>U$-n`ZqZE1w<n< zHn30oA;=|pkrg;39!(xF1X4C>>yE0fu zAuKE$cyTq}bUM1UtSl>KD@k_5rv1X$RTmcx+?XRN%HnRxU!J4{yn6Mje(1988Le}$ z*T5d?uqT^(*jQO1!#F`CHqUP!0}8+%R0B^dYwmhGEo;p zv>>3(#g7&KcrBP*A%1=vlW}g~+C^QrmTJXOCyFMH;T+mD>NohzsmV33@$qpIL9pN` z+{xuPGdCwgFf)68eUG4!P&XDDjdBPG7g}}`wth2Jb9?%zZM)sKz9K#_=bHe#^{-zu zpb+*yzp>Zg@Zshc{E+8IB2@p>q2qMd`uLEvo|FOQaKTn9{CSub<37os03 zAg%xv*x>$tHfCn#u_JEBdVuNXAc;1*0ffh@3Z1JIY8%eVpL z%FTqqW@^d~bH)#t%4V?cCgJ^MAHLU>bCi$hD1iA*==lT^3L4|WZoKlXM}+8>z}4q^ z9}?V?U{Cz0Ug$V}j~+?l<3}YYhXw{xpQ>^Vs{ul@CFLJH`H2#!0j8JIu^w-b4T%Ofx7K@Afs z^Xjk@uFMV|R8&+XvJ>P14qOAi<+I>{>bZ?D7SNFszZJC^f1)+3Dy1Sxn&w-LcivO> zkY{CMBYF$!dyoky>QUGo`t4-U{SLkxn;E#RhNZ_RCIaA|kh{_jopn{#6 zWL}K!9R?Rz_Vhcn*e1eN7$3g_A;vgpyKAY|J z0Eo9Yi6)7X`OxGMJDMK@bTX7#o##<`&{V&Ecq~ac0M*sXgl`4YDc&qmJr94y1g`*s zQXt^?6&DvFNX9Hr=xRNeqk+SM&t15%6(H59*4vAaG@YmV<#t4ri)&FY&nPVP80!+M z9Ii6GuRO<)}f;|c1PWiQu zMJrtV5a}}U8aST=D6F&TQ;pY+9+-nA76UZPGmC3jp4evt(Z+JIS^1q)7AiBPUpO36 z?G7EHN|Ljo>VErH1olTJYMCwMW0p5hV!auaGw`T5g6dHOLq|NY}d>AN=9!e=)g z6cNGk?EaZ}2PoaJb1|Z>qfU@7Z9&Fg>KAo(&3}R@DlA8(NN>f)i2+J3P>Jf6- zf9S`L!w}T~U1j)ZIeLE(x4abiJW zAw4}^Jv1z=HCEhM$G~7qOiNEsG%Wh+PwRRi4@Qv~@Hc~+bJ$5E(6P3jyw~ZAFtPTH zLplZnvrBU}^5?qv$IC9k_U(7i%O6nx=AfSm5sx3`Noa9#(IDAVB^B)d4(y0Cj7m@o!d=hsY*LQ5(6S^D|_c&;db0?V5^CUqT6&5b1`B?53Y{5mIM za5%y~ReKnxhXOUk;5e~B)#p-h)WOXap7TDWsTm2(9fPyaBAuXLUS3Wp$HL*c)eCJ9 z*_&%tA&=Xl;%9m+j1zkTJ%JfA+DdYuA^hV z+56I1Eoyd`>hol>yomU@8_Dw`w3e}-^16EhKMRLZ(WiC1NWfd38_Mk!% z#|%PC5`b6b<)K>!6BWbj3BMv0*5h|KzcJUn5gQ6}9|IvX9Bd~9T0G0?)2D~Oe%0YU za9wy$A#h3z+A+(40|y$$oORJ26CIvNGzew}uSkmNy(Ty<&b+XQh)LC0@QMPFoJdIE zQ1MzhesODtkt**4LuzAVWB7u{Ck?qK`*8RI0W53~gW)+UxkQg|Y_4#UVga|17F8hwi{i*!F7Sh?&CUX|&Mzl9>+-TJl z$hE@?gQ9@j&@gsFC2!(PU(U{;r%oo|84 zLw+!I9)6_&-xsP?EhVO^5>37Cw`V7qM<#^%{6(dBK*%!t5T^` zqZg#1zm^=idibEE@4z5*SUYK+=>YHQ$Br?FaqXc@XwUK)GiGfD+LMuCgaz3Qgk)uFyAeIrBS7oeae92# zx56;|z%20#>7s7I>6u2yj!^_O9z+RvovO+R%O3F-i!as)s&44IrZsJ!}Wz^QFuwI>eugFzOzr5@*&+sil!-6CpobFBJ|19Q2 zCwR+3rgqSERO!?8?OSWFOgH5WoT&${@52lukdu zH-i?q(q{l!Ow6u;Un>L3QS(LwpxD%Z`En+E9;R8|Jezkl0dS+gr3Ixi0~yi z8FB;&CrFnZP*vq4A_(wCGi%O>u{HpwpyClC0f;5YWEC)zM`=5^V@AG3h_x3kQX@jISUi%67*$3C>0RjB6B^1 z(;wcQ?0tMd>vN4=as*9?|IuK_$Nv$)(RH4 zSMFR%{C2#(22{@a!UajUkH}PYVQr@dYEyH2xer`t1ul5%^H&A9mxA^1l_99>xIKgb zj#$beBmkP*=I7@redR5tpgaX)?+>HM6Nxxz^kML6Y`#=`m{qxo!vnlJ^Cb*`)Ec;h zM5F-LkRk1__Sog<=$ILQ!OqSfz?DMGocdEw4+{hUf246=gl!KXUK4Twk#uYDr<1d5 zV;z4C$6>Q!@1)paYisCn%0z4hifFj5jJG9G6TYcriyn7S$@47VG^-@&$FEdd+@O8v z5`}W7a@4~$3ag&@6p!H0Pn1b*LYb3QVfd%Sx{=BtMR^Z5|IXJTOybm;)aA@U$dCm;l~0Yswz z!-rvzboqpXIQd&0Xt+%O>l3w!Gc~yCE}xDm%U60} zA0z0ufj2_*SRr@?;(fSv@V1&O+sC0*Ioc1v*G5k1KN~wQ-uWSfYFEdNO#gSMI*IM2WlsaB< z8#Xf#gd;F8(6^|lsGqY60Tno{l=vJ5cgxClRX=}Tdm|afqM?b29WQ(avAwl$C7Tw$ zem$4;Ta+b;L;}gYb^G>)j#sa+PdBiK_}A6d;e!!%HY)iKP4GsoxLTtFC-VQQy64+4 zG6YE9Gh;hPH|XAEY*hEguBRm{&Q(-;TZY%PX=10l!421Q$FGF29MwITy(BRLNb|C- zjq={BLu9g`tLvZb5u8j+zMnpwDv|T?V?y4d>BWnNmoIg)y>MnnkoqM%03updu>Gx7 zEU@LhsYXKeLF6E?`553(8_zirwpvD!XN{Goq;WH%BGp<73|X~wV^UKC_O<+&JC;Om z@_?F~Qdsg&U$LRK&Y;c&MZOW*A5V#yR8kfB*`5MA9ydGTtT8>F%`Yj_3JOCn9saI< zbA7#!1ETZ`;QEqt-GW5?y0w)Cd&|K6(UT{8?O)v$>He(bF>i~|0tseMl!GceJG)t> zvj}<#H6$_;5v_tMW?WxdT&!kj7}@__U0ppfUKs!%J-gM@5z=-1wM$rqh3|xZGv>^U z7!DvTB?%3biI0yFwo_931Hb{@aXxNeoiXLhS{p;c>@!XTFe|!UWW?E2)9+$hjNj_e zZpoH`S{q4uZ&T-=?8KiZHk$dGi4;Grns`4@{z1YAz4qGN@TLW@69!aCVhjKv1;mgF z{hm-Dkuo{=CYzqf^Pm_Lxm$cxktg=BTE$YFlV_de~1MerLAks{%%23v0|EjFhRDLostE=X1dqMf8I_pk-m4ubvpf8l(3S+9>i;p)2Ez`y|0 zAGO!{)S2t=KehGjc=`SPKT|mtO#>pm&^LG-rt6vg@F+f+K?l;xw&n5 zMfH!VmRUqdlixkx_R`QQ!#G7^F?5x~(Lax`O!}}#-^|@xS1JAuvi3`v?JnFoBTICd zytZmxPnsg7T~QC34!u!kf5)i0`dPx~IP-q`E&t%!fy1Up*vFH%g}UoW_3mV~KAd{H z%kYuVO!0Ycl)PhqIr#WwW|)Y(?;!Yq$%XO#P>{30k|6V+bKf>)+$k92F%8rF2fqQ3 zN}Fy9t#_N|D5kE@tZJOKSgoID$bA7cPXO=qy!$hian!7bNF*xB@`kV8`lH;(ULE?+ z{F2F|t{bAo-;GI{QtVY@eGS9Ro5y&J!nFVE+xI4DB1+`v8Ewnn%f2#hqMBIv89u;A z`p?Xh$<7}6m=gYe8ot>6uATh-{i;l!DO2q(MsEX532cdT?u@H1TVEBpBYEqt;l?6{ z@7-!SLDh}veRAL2w!77R4C3_Qu`EHpTzA- zQ=`t_!x?a7=fyD|786P!$gi5}{L@rSZuwvT3R*v&K{D+-F~b2yi20z)FvyA|CSaSP z&&k3(uJVFkL1^yiS>XDaz>lB_;$cIpxTjNy}*AM7lL;@#rzuUl~e&X9T8>9-t!?OLx6U9$FpcW)@+4P-AXx z&PAaHelA=A1u6={PZVPHMUrc+4(iHpY35jCuRr8Ckdpi-zRQlv7fNQd2QLrL;cLpg zbH9I|?ZH5cI*|?o{YfMeizpttmoLjE?RDb4;^wx~g%1)7B5|h$4~U5cViqDgHnu4f z8bJukW^_V=Hno5)(h-DWKrX^;5GTsJxQh?U;zrU!ae2A09~0!HqfiD9Au2%w@dXUP z7Q%;7J2gGM2ni)U*zEejgA{};@E(G7(rSWfd`*pa(nmxfU&8Ef1!~nrv_23OI|=!& z1zxztfjo>P77h;4g+JoU5-KN%IC8%E!7Jw@$WB|^*Rz{C`a%_F6hu8d zD(4w=^X3Z&_H4RHv6ezjF<&O(J(|7YZ--X*<5TI8>3=tMO16pWduW>PvuuG}PmDPs zmlzcp>5Hz~fXO33VE^1)@faCM&XA|zJKO>GA>21Cx84uT@mQSePTvIUsU`D$e?R|$ z13X7ll&$B@EG$BvzsAS_bOwpCPe7V=NROkngAPI35ly@ZWl9^n7vYNyL|6gJ6g3S^ z)V^^@+0{3GGdL=2zxbj2W_UOQ26!+JW|Pz&IgU(uV0icoz&k>9AHfSDE4Q^ zMCKMSfMx5}2RS*v53gpR6y$e`>zaAGDc6XVK1gMB+46@q14e9T7_=K_*v6 zTu{O3ODIwet*i?O4v*ja!XvAM}TfV~XdiX@fP2(U_<_oa4; zP>+eYHu?@;9&Zvf0+h4DOG`^9`~54a+K6hmpP7Q(*p_dKT#>O`X+(+)n)A(@Hw|i{<=%hz08DdG3MlMw{wI`{o1GIO&2fl8LmPa_y&a|1 z?c~?95!7ScV$8SRhvj61UD@jqplRY;LGnJTx0e3yX>fH7%~E@@TE!bVwc0Yr)dDh+ zI0C%|4I%DpMnA)+d>7EvMd`%^GiUCuDvb%R{s;g7Ruy5Pex~7C61(GF=*j-7x zZ>kvej39$4>IA=U#IWGoWQ;Gg_rZPoDa*3 zj4KPwA3ul+Bz|Gxjg&WibFEIAnemH>=}fr^2nZPHr=Sw__S&Yp{egkN(s|gxz}EYD zW#v9-!WcK~dVJvoNevYRDCq%|?9mH7qqVJua2)BNqL-BS@Tz!;IR=yK!nKxd`tvT2p|n9n z*N1_m1A~IBvnv4$U!G{*$a*+w7t)~0UMs9feq$ui4{B}yRC;PL=su!e%zS(i5TAFQ z?uKCZc5FJv(9z@h27f8`4-<1G&b>jwmYVB|qh8zRJM+zRZVF_c%S#A*G5n^^ad+;b zxK(hDqWC&Sdjv2h2|L3dq)dLfv9U2II+}%smR1lGKuzG4=PS|W{a)F}yvYd}PEhco zA+d0AwN*ptPXX&@;f>G+w$K1s(gLLs9=`awY7fL@#EKEYUOIj)l#um*$~6DK1HAT9 z@B_+_95i}%^v~&hLSAgN_a+_x z&K9;Dqw(KC@Ef*04`=e0P7%uunhv4wrSg&Z^ihGmGTTQypAAJht4y*67k__pTUKFr zg1cFq(U)J(vX`b$*MThEMoS`sPGGdCO@r7`9aq(yV{u7Cq*8v)o;x=k!Ff00uXla$OrScJu<)tlOJQ`)h;Y0ZP_ zf8RjVKE^5{>c`Y^pr0=ycHx1Ppjnq^_t6u}8Nr@?v{s1mR=Iwo1g{}xdakYf6ej|f z5a{GQ&@KaDiCQC)CSk@xPkk6v+hZa~+8_Hql%ZV1Sdn5roQujGLrb_df4 z@N**|;@e*w4lJLz1$kM2Jx;qIwm61FWlB#em?7>D3MmAU*ochW2Mzn#u!N$k*~KegWmail<>ri zmp25*Zw?6xI*5A_lT)f6Na*&*54(=k?{hr5pIRWq|Fy^Dtm_{Oh{yw9pP7;@pI`S! zM*9J*JR-OxpY|KN79Sps_EEZSUWHJMNSANsr(s`Y{O! z0f5aVoBvt%wlmMeUd*p9L^aztDjucSe*g9D0=1Tmzf(2eHM^jyV>uf!7Dplh-PFTu zE4lIe5}=)Dr(sPSP}C4|E&sB)$!9NL>NQuV4e5-Qrkz$R{xU6l+@yDmhuvgT%>}wN zXlMN_CvkJh4Fu7$M!F!H7bDVX>ol42KX%+1UOv2}^y{k)%_Um7ufz|#V3MqU+v zZCuUgfNq86)_1-lI|wxZJnR|I)?Pb|v9FTCYCZK}1(3l_~^8Un_CMduysHS9y_y$r0`ea~uful4x^?sMs=_*#7(A&?Rt^W4Z z$CZjr29(YGWJwm*dXv6S;&FgWs)uuUVBUE)Q&L7o#t|!N zAs>w3L2Gpx+Vu!PHSvT3Ya}8F4H`cJrB%+W5h=xeT0=AZ(o2!fK5vq`OIDG8K1&XcewEq61 zM}~Qv|BQJ!i_S5Kv$A^|dz3!C`h$xI@&HX@j0XIxHZK|IkO{@I2ZMFFA5Y|)9fUkh z9nm6$B(c##wfFknpFch(po|+~5JvvSqGRp_iYM@mfq8p~_}V+lFL>xdF6RB9a0#N( zXbQ8mx*FVAoDu*b(>#1QjKq(@KcfY#QaCYtiL$;Gp+tGimnXF)j6;Iyn1CEE_QvT4 zoQ|f?pYKCZW(KWH&&kmy0tQCL#BTgm_4}J7Stv?Q+}t&_M=w<97s*(^y1!>Ba(&>H z#IL1avbBw#X5TFm7TTO*{=>=<2U$XDvbKsFFdeQdD^eTU$aL;HRAvSuerL-zN}NET zb^Yr9m^vRmd>~96AWk_16A_}4bB}ImZYHu)mo8m`2-$+IM`Xh>K2rDMMRrI3nHo=; zty^RCo-Em)J)41?#n(4EjHnd+dpZHzh9)P2kqmpF9ll<0>Td< zKZTw2>6e2ECZ?}sRaI+^_O!N*|7AOVmd;AYliFeEK?P z6)^o?Viw|4>OTER0C|;6WMMV=nIbV+9fM#(!n0J+F60M|H(1RoeGxz-?WLme{sD51-KJ&w0r4ZQkh`rnRdD9&ie zXKFx+O{9;MX9ug*xk+{YPMZRFjiG$~IuLtc5-}Xh7s2PI67CD z^Fl((=AMQI%Ks&zU+`(&b7_{Mu&|IAp}`QZ024Kao?8v_&KSXi#IzJq5@5x(c}fBQ ztsJrZ%vjrzekNSS|9YZ|>$7B`^qK@`3lg&Cp+pJ}vWDX2K>|opVb;K&^e322ve>4D z0jV$V1S_Ab%38b)+I}YzXZ{4NCVF}}`!8Qz{6w>GeO&&>le`0v5g~;l!b+I#kdDfB z^7E50!baZf#q>W{3Rt4+A>Xcw>}LCp(BRkP}|k<7zq3wqq$kFmU%}@i02j+nEmpHuHficyNlmpwt}w5)w5r@A%)GxKlgg z5el#d&Lom}MC23oos1NOrP~y=Y_lhqc+gkS0U2P!K}fj$J_Sx3b`tS40TL$Z{qZ#j z=?1a$l~~r>IXR*Pwo6RpB%bR*lC^yqSrn{$7i!=z9uEQzO-ytoCnq<(P|D89$v~bQ z{_ia)l7vD2ug8a{mPmx4-KHKrb4Gfc`$@|MhP0Dn4U+@a$FwyF)pNk2@(DFY&%rx7 zT(F$;lW!#no1FzC7jIo}&qowhiQZxeViY(KvB-Ll>cz@Kx5q~*jFp>pzXf2s&n+wn zVJKTzK_R{sg6$E^?dw1#z2fdJnw_0()DBHb7{hkSDqeS))L{*T^sXb~DPII%P85&t zLM$^1kL8F&0oT1x4V&^E!h%egkW1QeYjJkgD%(dq8>)GAb@fYF4@6`Ukj54gGub2a za66@%V=9r<1IpCY*AIdB%Zw<}JklrfHsVfPoa)s3Jm!VP*Nwvh1Np(SgaD(7WF|57 z{q_6z7(}*q+3AIBB?X0qJepm9P{RjX?APt#!iWk#1)YQ1n%QWP zTL2<7Qa~N$%y{;}Eks-)nvv3qi10wTKM~+|AjOE-^LHXgrlLU88 zB9wM?dbau1l9J38s}kAB-329as(CtD68}CiaNYCL0ooT8PgajTtUHvWyhmy~huK`n zLrbpHEWYV~it-;{BSKmBG&89--|oMEX-IAqiwFxlGErxmrV(_$%30Dk>%?}k_BhEC zdBU9k8hl6ou@bij|31ZDPOD}1riG6(uB|{vbNy~x0SSsW8(I@gql~RDi6<5I9@P|P zB~L%7xz@IGqor`eaD01rxwSqB=Q=-r9=MDhjtdh1ewgif5za%mYggF}8x&*wuBC&2 zEnT5YO#P0HF^}(zJXx#-O`P0pG_Bc%LB4uKZ?1sx07wu|E5o?=1|ru3J#vOk=%3$@ z8+5t4FFZ()f^?hBys`OGiRXrNZ$(tz5D2VRP)G?&0vXrK7!*Oovqz8rA0lrh9l+N( z-5GGuOv<&huPL3`d!8X^g`)L1g%mq2044cCgi&v&@c&5CdjBr3tf*^f-1ew( z{bqz`-I!mRGwkgY+_`HTopfk}ww8h`9ks$w#*LOIwQ#OgAL15)k!hiiLj(y2LKs2S zCIBJISM3SUJ-p1cQEc9u9VT82Nv<)xbT_*?^M7q#+I>+h$8-8zr$24;RI!}8Wod-6 zh*l0S`$1Ng|2~&-90EhB&i_w>Z;xUClz`*}XJow{aIkJ7F+;={$t7P`4}0w;tlYI9 zv&n>&>zBo&kbd*XR$|-KVcSq`{^!YS$VkBkQb+Qgh>8VmHaXBEbEYXI<(Lqe*_{V#5XgA6$?p+5zWEhA7!Po}Psr3@=AW@`e7 zkz3fX5O8OI{c4UD(N9#y=m{~n;Z(bR2Q}OvRfPuywFPOowy+<-Z>Rz{k!eCkiD=yW zvk_b*rW=r@liEcPE25o1;GiO`4Ol>!!Z>FC2iyt|CREE_|4U6k03r#c6v}6Wq#eA; z5)6|Q(K{vXJgVi9K_)-Rv+7Yy*LGj_*V5k(AhAA()j*DejHk6>w6g_OqTSQbL}q_rFR^Wklh&|8F z&D9h3EGEARmKlqMhMWEqT&@?`)7p?0O$^K8E~SordJXBqu{JQM^XDa6hTSk!p#!^0C8kU9skvP@@z z9`8!ng0MiN&9DCVR2`97dHIqW?FJdgb4hc@Gf|F-(HlGn4H9q|9NGHjX03daZg^)< z*Dt%f2S7D}e14mFERr9S{VI${JRN2W-n@hs$u1cp5Cc;U5kevfXjk#FO%UN&tji(T zMZ{yg@Uey=n2?Bc74e8V%*0sDW5$66@Cabl*I;Y_I_dN4ztnNHX@!Lhm{tYs4jjV+ zVfxF?GUA|ZR!L<8P0mtpPJ3hJOAJ*4_i2 z>%adO{#sett0XHcGBOh((lC;hhLt2_l*q~|5{2xQL>U=HsDu!booHJjqHIFe{rvoX z|8t-7KleHJea?B@*W+LlOWm zfoH)HJ+gQ^^HoDT+uX$)YdoZZhdoqvJ_d=d8r>a1$E+Zn0_x%5;K2F~iFrnst^)>W}F;EO3w5RjaCn6cf7HBm7 z<_*LSL;z2`14>6H4*jNeu`0Hq z>g?_oLOaWk+Zo|J5h!yB+(#q|*?5MgrosUJhT(f2ep0@OU&k{uG6WXp#74%(j+mR9H}^oU=Jg&g)D7#J2>E#m z2`HhnhoA?MFu>=Tp-}>vqD@Cne+1C@+xz>kCw)p>R|6>(?#Co$^-2%koC7fQHQ`2U zf(!L9R@7niP_fh`LMEV$q&R?E0>JnQlL2s;>p4PoJ5K~2>uYaT@?%?PMPw;hk zO5D5t-{qd+FOMiWe}#FR!l~y43)0-+rVYuq`;ln<7bHwEQd_gNuV8aQRUh*f3b4?B zJ+2ujMhOfVTng|mI$X_IYv|mN;skgk98c966@KgPep~?vO^lCZ6XPx!gX$O{3yKAD z5+U_N_yd-{HB8?2VCG=H|A+O&HigIP4YDh2`woo>ivv-n2kdBGafJZ~%h~shx3|`n zR<si0wKEpeocJ58QrEs>r8O4&Qksa4Xq!^@Mxu#L)^nCL4efyZ3;LvaV2=DFKWoSmP+1=W(h1 z*!F|o?(U_nD>yDlfw&OxWXRzC!IeP<|24SS*eAAnsQ966WY$GOM$-vJlh%p$2rcYV*4~t?{_FTjHyemRj zfVUJ+z?jPR%Owr=+S|8p7s0ShXh*R%A5T|F{`p=<4Xhv&g+EI3Pz`ju~BGU^z`zQlha-(&lhneZl9SP*7{Lx>S02nis_ zP=gErUtWS*g$2^O_?qvVHDj9@FMG~Q)vvF9S)Bad797Sg()F!^diBbL?N?V* zU$}sAZr9kAL?QWqYA4GOS2sH1LeO2_px2HI#J8n1FG<9?2gVGdB zPBFc)e>IElnhkDU$jEmo>rE%aM~kxSEF5?Qe1s){kcp(p<8VcRBGt{T@I>|*0ZtJ- zN0&b!@gfw-S67! zV9vNpA9>UIO6*qqbKQg`M?J%>k>plxzDGqx&cYiN2msXmXvfFI`9Ivux*zC6&oz+N z=pNX>`h;@5`a@>lYpM^&ms?s}3C%{*C`xN+Ho&lo5M>!O8%PR112g{HmKH61B^l5^ zU`8k)z9xkE)z#`SDE_;C2pcX)K@C`)&=7$BJcyI`D=W7`7=AzcaK(aB2`S-Qev3I>;3c{i~=ZvV~9zJ%GR)2~2N@kVX_S zVUI50Xp(_DoQQ12f0l@=>eA3by9)F~k73a5$vBmBbUR_~g{i(4s3;MNjD*$i`HdT7 z+GlCf?%pL5e2FWM;FD1dTwI<#N91$DqXTs~HEbUtc$xbR4481nhGJ2>^uJOKH zg#RUUdUz8kTeF5JABh)_-BghhdqqXM&GOk=V{rx)>0gJY6{)!Nljo+$AGZ2 zD^aWxcviCT@YfLfe%31?;ZqkS|67Gj`GeRynJQLLt^XfFOVPMp!n_cFp%1iXx4gU% zfCUcT2cewgAu!>Z?t*kfZZvRt!>CsQA0etM>#^UA)A$Xdu_g)6sm}U*RaMmy>>`5J z#0?<|q2*F`Mc~}rToXDNc{79O#ey`Ktw4ICV5FyMZe53d7@qT&br-COC_o78r zSU3bI?(5OHBL@%O_zW9qoPXT<ukw2PqaEe?cSo>zF z+Cd%8e*rHdQ>;Fz5N$08hr0TD%2n4F6p1t;Z(n)xg`}(PFGf`Lx8#cM5NK$l)1;yn)7aA`pkZTyrVkp!@U6R#l_9;2?9%7EX4 zpjC+6d5kpx&2zx@Rc08Y8IjLb9=|xD#I7j_J3{=??&lS}mo}-W^IW^x zcJPxb!>+BP(o=VGt&SygXu$qH(hp5MVbevko(x-d_8Ht)FWkpiA-6NEG9*Ae6Nc&W zHnv?+CvNT%R@9qiFRE3haoW4UWy)a7P2mOM-lGd1e*knON?GL1-X~-edD%^bh9%(c zorm?;j*`{xt*ATpz2#M$u(s(9Jr1G>9m48h9mBarWU=R~5`PFjodcQzb}Nf@a*vtw zG+7doo&@dp8w$5NY-n{P+rb+6und|lb6||X8z_&JPG5y|8y%WiA(@eh2bj(`IO2(% zLlA~2z%+m`10%a*9%c*a)Ek2$dhV7&!)T5)$HAZ2I-JMXSA5CluJiX!;r+^8ayR#s6st<*moB;Gm9#NiEbvBsYlv-!t6m9HEEV#i{%IjMJ)>oF}xmNY%ZDm#1ZF%7In$PiEm(Q`4Qqg#+l0 zm=plaWc{M(cp{V9;K1wDJ!Q*enw9B;-6Pa{UQCNvsx6%;Txn}=)&wdFb}!OZoS2wE zq#2{%1y>^cY;<(=ERN_Td54(pPxK+0pTB$&0J4M{B^ZH)SN;5oAy^bKKq4$~$?0#d z)dKKZp>b{m9{>$$h}&6Tl9!s#2X=ribEy4C|IfhA`JEX99E5k`pYfW=<=^-&VlNCwIX7$egvwI28%t5u>X zlFqWfX&QA^vX$?WMmvk4!lvj>oIh_-Y`q1u)C5l~G6cZ&N5k;Ya*9$Qm>Eg|LhJ%4 z;jJn<%z7tJL}%cHABMi2FjGB*F6SXsfUSSfYT4M?9fZ8C31O+kjfij*#*;?+nW>lmDy%ud~odCq%{D4#mjO75pWM##Z=)+h@LXwjDHm3my zm3vO{HZ?crb*~uz)~=_1>Nog9mQPyHHfruPi67jh?L8#;u|fX1y0(^FiswbdeV__v z6VVr9Wx>8;<>Apo;eH4eWlK+JR1}jTtSX0@plk&XgbHi719~ArsF@*!V$q}_vI`u5 z0)GC5Qsy9}kIg;l0Ak2$R0;8gKrq460C-`+PsQyN22(a}t%`19mcA_W>f1z3x9X zh+jk?7)3BM`26CjI=twcu-(u6`C~V@vpj%!vFOc5mQhsSgxum0vg283C?BGNAI1q` zvzx?Ez%!`Lh=>v>Ox@;2^k>>Y(d3EMf|`AnQxs%bl^WMS8nbdkialx0JywH zMSmC|w@~DBJfr%S7Ts}Q#Hk(z{B|=v-70y?Y^Hn=ThJCb1c0;b*2?ZDKbl&6>)on% z?w(WFvE#~fVdZiw&#L?4PbzI)dt4-nD=*oFx@0vqRTYbzy*AUdJ)i+}76st?Kd zsoqFkp(fo)NZ5orqsEJe|7_AvFO9F}65JCOc zfLMuS4eW!+FCz31C;%YJI}6_j;k9NX;*SQPGy>*PL(GEkoPyUL;)M^3pST1lsk~p) z@p&tUytpLvtmmlhc`a z06};w$!AfG)!?^Auilxhzm3pVAq)zspb6!--y`v!9;Ye5A%6IiQ9T(|{8(FEy`P(V z6FvoN{3s%m4r{rC=Q66pBE-$(%?TnCTBGwsA5}XNV&dG9tLJD2eegRP>yLl$t;IroJ;_;vmKZ;ehg*9v~yk|5(pE z;9w;YlMe7@20(nYUV!KP8rqHcFRxS6(ju@e;kQ2i$N8cEr6vcy;_3HWt{U2ENvB({ zO^M!b{n4VbD{oWumU zQqKpc3+d*xKmWQ^sMFl6(^j{>_0zd#l{5LmaW71jqPij1B;Xo6KZp#8f)EEXN6XN} z1OrYc9I`d7twh`-_{sv4PiKsbVm^;<4)g}UZ{l(fQed19fsI2);_@Foq6P4eI)I>D zoQW0=D~F7P6h#{7BXDqb=9@_;+xWxe5SZ!m9{IerH8muHH335aUbs3>keKXYQ+thy z$QLA8|NjGE-XmfY{x<>jb76hJS zavl>CD6Sh#{WuP>Yr?%Dm2ra&j}=)tL2y_g^EMLbrT{;GU^1!NOH2>?k%w>gBGMCY zTjy?6Zc1TM5vhc6u~GLNxkm;~OU3U%7Yjcye;;II%SZ>cwRdx+$G}DjVace@1W`nQ zp3_Dlh~qK@`VV3>1r|4)cH{s_UI-97p2Kqb*7NwpoEJuf90h8SN8aHrcSOMUU~Ae$ zrHtdk@S6Y00^A3=7!ry%q%dWN(89rA5Igew_hwhusVUwZpbh{ElN;gCYHqWte6Qd1 z|1?KsbEG!d2oQnuX0jqU4T%K8EGUscc5!S>f2Y7@Qd4;x@EKrJ)|JWs1cZvQCv0^E zzFL2x@BLIQeoDO!z~K1$mGLR7QX`@<0jvS&v;LV?#TEU6A5H+IU?+Qx5*T5JK|1=P z3;!b&w9Ck>02fDV^{s$klBY_Ubz9on&iOY&=#sS0R}rPs{frDUsKLh{IBl8nPNb_yxvmyIRBMt;!18*;R_ z=f^pQ5E)k802~>(w5=X(hoD1t!{y6UVIuwnzI-^>O&l+@MG<5VINZPEpCsHqCl!5o z`ugfrdd^_apHPmGU!#3=FZC7vw{cP3o+RU|TE{L)Bmjx7*Vjb+LEwKY_4>V7_VhpL zyg0&Qj8kwG5)mQ9D8N{e|5a4KfF{`Bo8XY5ywz|GquRhV z6Kn-edOu^r07I-96fo~_1|eCJ7;03&F%6L3hHKTeke%Q`AeeN#_uk@~u;hs%lHB=r z+k0S6v{2=YoIO{3Le`^UJlRoG`}(bd?n>FY+>e8r9VTo}&zwA;9M2jawk7775jl(S zWMcimaae0I032jDQuV_o`*27PVZULdTZG;U3ZEKG93bFI*zw6ocp8ADljstN@f)7@ zfZe*nvjDIuI%NnpoYmJ=Qv&9>ynxMvM+!gJesBV0B%svLCWpRS53o=buGst^=C?<^ zH=!gaB4B_A-+~;MLhPl=UUWAgt~pyy|2_I^aOP42$8JKJ%WaQecELk<#!I#MGfGKf zR**s;g2Xh$*at!Kqnk>t|6_yG22iS`42)wS8(x3?dTL}*88f5SQJj@q$#^%2%(5%Tf%pO0(G(Nl3{gZY=-YZ zst2ugvGwWC%w3`DJJG6h>t$zWK#^Swr!Kk$d?fXM6 zI~)R9M~={fJ+gb&SYOXCAP`i3|1>Ox;^K6C>g!4z;iWqJn`Fc1*c0J#C1#){91>#v zjvJn}*Gl+OxjCB5;zJ(RB`1F>7C4l9zhrbi{uv+D(GivWR209*PBSrf*_B1y_tvrB`Q1| zYkSb52WE~XqVgn$%OT7efls_3l~^Uc&N)CdOGGjvGX@6_ylIbge3%Ge7KUjeJp>XF zWTNdz`dAAL=Gw;?9fao->WQvs_p=f^Flllhbdr-Ycruj&2wr1Y`Lj*uws4 zThL=-!^XRb0Q3|cYohCABbL$+b>kXdZ0Mw}TX4plu z_z>+>k2L}g|EAwM>T4nc5q&cYgupfsFPHV86n|}3(uAT8O&E&V!@UMGaplmRHTC=$d4u(pUV zipJp%Bq-6Se0O)zP*GCKEaQ$Kx>KTQ7I--5V2yj=FxqJ%Gm=<0w3{~z;hWfk5Z2?3 zOL;|1uK6$MufY$t{~`J-1igbzV`5BgxKk<;!@8$Jb8^lnPjRJf8u_%i zZcIK_@#!{nk9wO-!=}I@C9Wz?%s1FoqJY z9Q2VDMvX?j92%N5EOgq#6D78+mqnv1{Sx$oH4Pi2GhgY9>C{K*h_SPR1%3*%v55XD z?H?E76{L5V?!zS4PYJQ9+IP|9+P$(PdS3641^C|`7-LaX+P9D&ai>C$S4 z3?Zumj97o%F83{Tals%-`BFM_p zvuC!lM|FeOFY;jFc=Nf{l_v+YcLxSDKhix1+4`NM7j+Cj+-|e00}h7v><}-ihYG+yd%$lB);5)a2WSc!KFylSGNc)q^%!S?> zLY<@qp9oZl;z4j@qrz*wdX9aeM8dCeG)fax5c=95BRiEtTAyUXswrwhU0V5Teg}DJuFx?#>ZGL=F9V$zJfv#8=OQ$0A~Q(vWd<8XN0xz ztqo}p3Ba5tK_4`3>TFty}d~fH>*$x49wk{HS9l{or=*~ z8>g!Tm6XhEUOnb>_;8IcqSU({zd$%g`VN|mxw~t>pFWk7S?rOi9DRE1iAHdl-}()W zkjU6X(^EzwbZ)a`R8kr@y@aPXs-v!Bz2%2aRrC#5n!=c`RaUB^@7V-bCypBewBJOyD%YRZ8N2)&n^Q#| zu`Q)Zr6B#F%W<-H=W^<3a<0k(AsUYIx1;My6q}T!sr0RQv?e;3qWz}SO!i(8%aPaN z#MT;ZO}>t-VPXs&>VqP`h-o zzO^%3``T-=aC%amRq4e}Yqo1U7Y^1u?d8YI=whbRrs-SRGB!rp(_@=-$R)^Vzlf1S z3V+lNl9Kv%?nlx!nuZeA-*XJyB9}h#8&gf=NAB^O=7+&V201N6Qw82W2(lp{g2D+} znc+C*;g%!D`FXAVRlq0`dkM&4Zv(i@rWW3}Z#zH_f%|d)Q%!?(ra;i zvoYz{Mj&Jk#y~C96?<{Tl51<_XNGFSNK}J~}w7DmqPcCkrNfb7wA|^he-zJ5> zS>br%iz3O)F3nhHFC2`xeTPt&;R+{9@5qTq`cOT)86~dX&5*!(hib*|Kn!Kh(0F6uUgEdW)*T zXQ9UR{Q5qv)pHR2;{G}Sw3%R~f!h&@Tif@#*Pt?ixyrCjV0{%|od{6Gl?b-9tG16_ zI}8UbK?*@nk)biQW$g@+PZ%QsL^3l3&TDBAYfx1K#2-s z0gAxcj7QES-hc3bNbB^&W6xN+*lutdrO zf?p%>7XU5xaO-%~FBpS9CPbD<pev9a&B>P#qT$_2#u$3k z$UOS@{8gUQmzlOUmFM>BQTR2=*M5sW`qp=y+}xOL%jyEx33I8ZQ@w|<1dkP*=32cn zV)}LV(35`IXQTv}1CZsmmKPcFk9&9zUhDFcZ)V(_^y%y;i6plhe^UOek+0BlaMH|l zOdfiBxIN@0CzIlkUbK88g?ERQ2AIj86Fszw$X{{KOLP?Jx=MkPwGQdALhm7}h(u*! z=Anmgz`jtolyvV{*0Q6xCe34>t2Z8+jSb9lbh_$DJe>M`yWqm7TBG9!5j~mlun_56 z7dGR~0Ny8#ZK3yAeYepen0g~XkBBgO+a@veCfKNyk|AF?CjHx5z~Id4Bz4i3U7z)2 zC}{WHvaP;OBK0a<@Yyot6Yk%BaSbTROLRD^0>ezdJ4 znbZ4+ve!qd~Wp@7m#C84UKoFJn(Dx%-c)R&B?nd$N;!OF$VIg%gYch40|Ky3r zdwnFM8%#4}FMhAEliuZp@}_imG@gzc3p_?s8@QgkFZ%f1AN#X8YrsXk24V@8MH6@t ziG=2Z@KhHAb{M}MMZC3FsrLhzY&Nu zdD%8Sjyv0}{o%>O;+4E3@4@fJaOu1O`yj9XCIfw1#<|w$Dh~xCim0H%O!BpAiu+EsLeH0eE>y(Kk^C_y z3hfGsgu@2FX3nW=#M8?izas9=#8~+`at|YkN^$dSP_cft^*dLWS;mPR&y{)I`hBaL zp9#`GbCERk`Xh#qCUa68cl|2%shQ98KsKEdxNdQnp#{xYH?fQnO zGZCAd%V*(qRY=j&y;0wVyN0YgSa9>P$(zY_o}H6D;>><>EG%5%BL{x(eovYAWXJTC z-n}G^D(}k~jt)CJrgJx9$E1h|he4dt#B@|Rr=u~a3&h+I{thN$9QSBEOswVpT`EfS zElkR1ZFAXN+`7Ph_s%JN?^j`-JH&H8zFK^G-CO+eUv&yn>8}eUzXtUcN9H+%BqVwuMMzCVcu=dc=%N4!A53mzBQi6V5cCfRsYKG z?7f%13Ppv?WXmDTu=Fa$O2|XM<~PL(i_xj3fp46DW#&{V4sH+4nD!G~;!JyYamunN zE^c_c2+3z{lkIOJ^z$I(xI*tS2@r$cEdJe)CZ;!icq1jvR4*Pfe*`Rp^w+l=P)D_7#Zr&3#OQup3C=$;N{j#>W-yl`W z`dIt{O%|!&zPIBZI_275Jfrf6O3hdDV0&GtH3qx8E=q|VKB<2@MjAf|q7*_H$1QIk z2=tYVgj66JP{`2rVX_S^La+V9S=bDhG7v@Kez7d#$f3qePhbeX?_)9VIpq zyQ0eb3g$_5C{QHWGc?9Q*F9G_IFrS)a1T<5tz^ zx&EkaVqw|h`ToX9df|GeG-Sh3n@UL7wpF#0dc?+@ zG9ee=&nOA3GKyP>+9 zP4vCA!19LYV#?@@OyMt|QiZZOPcMb`oZ!vDJYM9?_e0M4ZP`Z0qVCRp>=;Smn_BLW zns%RnkyX%|q%`tbF!^TwxGN!paSZNJyOz=CT&%uA7sK%dv6UMare4*hihrYy43JB4 zpVHo%^_$_Tgs$Zs!JGVy#|}`MpEy=SMl*gZlV3di_zj9_kKLvwH)iwW1WXm9<)rt)Z$@?w}m`HJdjIiKYm-uL36^5OEx97?LidKR_uo4ct0ik!MJHO4ywWY{wNNa5(IeW!b|T}Qz0Jhx+OXZ0 zX*&1pj+<|U^tRlZ>E2ly6x1EQw7KlTgQ_}qrKS-|V#MdjuP4=Z7)S-!Sq=kHLC78e z|06D|1~A9;grT88dI3+PwD;+^-5gAdSL9#FepgR_m$_fmH`L5|r(MWf;m{*$X%P=@ zQLI_oYyTB3Y3hB(5MLr!Hri2F+uz@JCj9g}uo~=h6;w&55R@ZOq*xjutUdb{qZ>7FXvMD{yd1GHfY(=c$1(P0xz_bRe z9(WU_yFV*)gu3LC)7S1kf$-BNc1+e_oS4*CWzf<7=<}V@mK<8uq!DoGd?ZJ`e6g3_ z>4SwvXa2?AspVGR62j`Z27Zfa2Tx4=tPcBazKyzOUtP+;y)AXg$#P`V0YA?5_$)j< z>z-L@u0#f>kOYBcVREw1D#9vycieWAN!Y(Hy~NXgFtdPuxgf2Y&p0~y&c|2oKPMZ; zGPHJ&4iEIa+NZF-l5lm=OyJY@t_jGh0f!^}F9>YqJ*aT4Ws|+Whj1;;wn!s*qkJs3 zBSnEeUWXV-FZm743Lx7?WCxm=neqDk6o9g$z!KrJ1{>>Zm#$oS)8C(NX5|-;!PohU zP(znGf8(sOBt*!FyEzgz2TXPpql&q}#i9hLM+@ls_Y|H{oV)xLrVjtk5T&78vFb{( z$!8e1pH#NV5ekUExx4zNLi&`bdIfX;*T?fdnF6DhSEFBJ;9;(DXiBifd<7bf!+# z9pAk?^Q3cZ{tu&}xviqaYWYuohtbQIu6*ir)zg(~O**H*{xn zsvA8VNb&dvz0<#d-Nggn6^9)47{=#jRL;NG#d)OzS?J)vz=s%*I9ddBYpU^fJkysHE9k6ggor|+fe zi3lUfFj@I>cH5`u(YxTLY&d!hnb4JTJJ%ir4^q0efF;cUK6e_H&)Gyk(w zMfD0!C;j!pyrhhh)vr8=*^>I`;jR<8JdQmFl)DScW2!vztLpNZXV(hFp34`S<$pM} z(EdeSRrM~Oe;4L1Fi+4eZ5xW`0<1fTF8YdUTfSqGyl51GPUTY++Ge| z5xE`PD~hjnOa1lme&6>!%!sGQEz&RgJn83}iQL`DHLd1GJ__bN&fM zj`QW=x6v2gs6jD~3@n##A#?%<6r$5oYpEsD&!jEOP(_M&hriwzLRKj|pzBEevBcAB zkh`7t$l1B5M$*qc{J|-I*^gE$(vqHt-mCdl-1vLzyb1W&9h=1)XXrN ztJy&>1xb46tSZL9qj3MWTTmdl=*dQ5TEpUK^-$$?x^D55E%K_Le8j6%_SsCmJFrzY zX7NIQTh3tlE02~r8p*eGy% zuz?pO1|4_W1B|ZAKE9Se5R}W|aKXp|P#EVCfQDEG!mLPwkVgx2388Sr zRVe~el{o8od0ocRd~wRcxH(Yv$&v__SzY*K1xeRkM3$atSHXwDwoJt6jA4Rx`qf!l zXd5sMa5np~K8--0Y)Mmnrb5@rl*(|S&_PqxQ(H^?0P?A4rErCuX-iSi zOOgo!$ZG^`^{c<*62w8}j5IAxBn7&@S{8xDk{C(_pKW5&7QtZV4;J(53kMDuoU4Ao z!qdIPk-T+qjnttkqvf+Y$-g4lanb&lw~6@AIrqz@Dm#u?7BL6bc=o+1w@^_zB<3O- zkApFy67geUQ?FJc@%c9Z=e|QI3gqE!fn*fN@?9V0wQd)MDIMIDtehO)&vVZ`nly%w zjox?2t>m0vQg3qzHxcJy5AkjD@OGXyTk=Yn;@ilyX_zoL*S5NLUn4Pe+q<_a@k=ZQ z8SI>cbH7WL4DMP_F}q4EufGaA!jGAb?h{|*0Z3iK{58b-5&AATAP94)?__xn`D=Df znoAW)`XZbK{%P@J)v=k9ypzrr_s{K1F~HxCnce=Q!p-am)4RN~ zj9Ep@&wZyFTvv5*`+N>+v$%q*{_4{AP}~A{@yVe`g7n{5^MsBGgE>Nr*?g6bPOVnihnX&0{Ppk zrpdEQ<#|#ScKlpBnVa|JXPTH@7xOfpAYq&&q=g3RYw`wa%R+s9eaD?+Vq?LM)jKJg zohXWra2KE#$P1s_nX_!YAm`M5ukoQ#hLS{Z=S?%db#enGE8kF27fr*Aun&Aw0y8F@ zyz56*=iRm$S^9A_1A7J=kgho5QUzy&$=k;&)r^CXJsDlr>hrZa5iE>K}`o(z97{ zZP?+~%XcBz>?_M)e==EM|5D<`scpwUwrDGy!J}t0P_6xB70`xZ>*9NPaj0=TgjO}m zEb^>JA19O{byaDbAK%{S5G}QT*My7F?GAgQ_OMbYnSF1*B7-A$*tWf7dLli1TLL>S zcYIOSG(7n^j>L(9jhGU$KDDtP1sxY+6m7}^Fu;(5^)4RqY_Z)h)>nJwL)m<&Pi zoH@{Y-fO;(YhrtfnCKS=s)DrVe8w5c0|{#Gt5HldQR+98CFpZ1CNmMrlWQW5>PMUw z#|j3!5IGSfT)uW?2L8#YfY)yyq^Nc}b=d8=>iVR|mhK^C_r3da+IfcSJkEa3G@#!1 zBB`zCiE~T+clwK0+{@R48n)M-I9C64d8MgEcF?#h#5Yt)NMO~Q&Y*GVScyoPpqf@; z*B2E%!!Ud)WM~&>4#C)dDfZQgeerz80RpDJ#rIA#wr*Sz(AQV{{lSv|v{hx*t=j8h zXISLvT+UWoYnL0h(rr9nVt#bq?V{V|`~6AB&6C!oyj8B;zj7(#mA zsZ3QG7t*QI3miCSBhS6Gdx48RFyFP&@R5e$MY-hV^_gnn<@`)WS6%Cl^cm7TPakFC zi(a9CX)+R7tmyZzRTcZ{*y5A)X$Jsm;&!oybPd!|C`Q#!qkWqBaz1_L%{EsP_uJ^A zbY_gZD+kZ?U+BG6XcS#&SaRiC%s1}D7yH`z3`ECQc92S^%o44mHn&&J(uvXC)4DOB zaP>vn4)*wDeXSaz@V4?>;9ydn4m@`nPbc?eiHLONG&v4pmk=s3Y=futTfz11q% z9(;bFPt~@t5oypKUd<34_HCz}aFhoPkn8F)zOeQ zqAt=}mI4>l`VIEG2&~t-%=2)+{&N1HVcea-$1WC*ezUaV)YQa6RmMVvsFf&=a?<@B z%k~KhUc+CT9k#~ZV6in~)5rCy;A(@l<};U7bEHkDE>a$2Ig$I$oc5u`F=bBz+v?8rE2LfJK3&+cWEEhGB%IP-uSA(o#1o( zJoPL3{PGjj({AKH@j%8^a=of^S^v1I>b7QIzJJa4kisFZlH@rS7E<@p)ae)Q-uOfI<+p1+{T@P* zKIc=T{0ieW4t!t^m?p12^jpi2f4=swJVWci{Isay?SqwvB89BqU--i~6Dio=uXK#& zC2}R<4vdA}EAvl(e93j(l;)hilQh*XsU7R30m`{8B4&k~s7V~Bv@^mC57Cb%%$NT< zvR8h}qiYmh;M2#h9PwAw64hTpaikIZPUBSRJ60vzUE!TKf9+cw*l2Dco@)@ED+CN3 zu?A{Kkh2lyCmv51+hj5sL@Q9A_SQ+Z*kMuHNwqXi>&980uH^8kw#$^qh8$$OD&MA7 ztXAwU`n84J@4`0~Tf?v)=eM1dbLD$O&ZfZa_0I2@bNg*CR%1u;zi%RF|4wU}a7|Hj z3?9T07>QYEhj_h%mHMy71_lO(MiW1)jC5gBKJ_scn_#;X z=9ltY>is;K4&}$QZXf)s7h?BO^0t6P8OhV?&|kH#yY)Mz&+c)fjCdjq$VY2v+-6Fq zVS95nCDA)TeWf#taBi;cYGt<+zE#@>j7Ty5n%#LUUxWJoLD%Edr+$wzSxU_3wpEmP=ce1J*)xavof%KfmX#klrPFJTyN4r8Ag` zN@N6TH|_{#j>lj`fo?_n3Qy$Ns%FH!AgUWKL4HtLOKj3ht^c-4ouGs;j@Z zK7LU*u`G9Q$A(a*Q9g}tw(@xOr|4()EF=ZHSE0tKihmf6^~~T$><|?68je2n`RO@& z3>2;Zo<}z%{Yp%U7X{3{xcRw)zNdk`VTL^2=KT8kQkGY>tjy0p2{P1|3%xOK|5vPP zud36dHqrXRJ!PUHKhq7!-%WJhSh7tQTCeURn`U5Yd=L=edqXsj23s60S2Qd`YyeU4 zoeDm3nLILyA<+rs-R?c~Y`XFB6XiQ>b!V!AUQ0O6DW|r{%e{R6zJy}Y(?8*0ZQ<|j zk8^E|1|gA+t``%4%b3yX+bioJ7U)w5a8R+@_KMy6z%3X5$5Y9bS zb^Zg{j{3r)on-Rd5zt&6h*1|f5PwloInwHR`&rlVe&Ga*F7s1Y+KOqbPMwmj5ILZ| zQ$FQ{oU1y3&r0&Nu{o2otZzck-j2{d{a{m>dX&b*Px~nuZ4Mh_@;yj5f=Txc@JP$f z8T9udpP#vI-*`TCnvs)dB=<3Mmubsm1?k!QWgWenD_6=dND&`@td%ZWmob_({r`p_ z?=Cj=az-?NN&Y%Fnz0ajL_729mpD?2o(KfDZFtoCaRKS*k%JjK8bXQ1sU)E!o!bA* z-_-3(*{!l=mvdhf1e&}4^q*BOSD`99*x=XkyuQ+(W2M-<$3Vn*ad!9!p-_eKJUsI-(|zW_XD>^Ce~rAnZBOn|5$*g<98Z6zym(-n zg-OA+{;k?{0)>7K{U2hxv_fs#+ZxKXs2wX860TN<)15OWCp@OSe)EPzoW4%hAizXo z-XWUXp&vgQ4)OV!bY8*&F0RYDGP6=`#T;8!(R%LFUyrLPoD6%vsF({h+qO#Ev{kMR zpXVAp{Vm?oKz_LAvVpPJBoCv<__4XJ@?7_qzxWQ{JZ+7Y0bClBxS%J_K}(!40Z{7< zjPb-4X*R{a4_KeuaXRwJ;IYn~M=eZKJD$ANlPgi5xV~Oj^O*Qbmy_D)&@tU9oh#qt z`}3z#wlJ67YT{~dg<(axp>OTgQcFuT@g@`~BYU%z^Hq0FugzU2G>s)!q>F01didUCHyi~U2eJ}n{oZEqLW z6@n`;<5S`)KbIy=p!9KrAC28shd)^E5=^INbMnYxO@ME%KVoO@C3(m3%k6&*#))&?Lyhpd+e=&dS zi<2FaRsepMXi~6i_9PRyrKRgBhA~j_-=E zSZ#9OF4^)pOnj&{T3&I(YjfaHrr=}7>95C*#FD1D)p(vX@|K+U%H)2!lk6^($DnC# zFezk9#6e5n2?a)8)x#JS1rLL#cB5GEi@GfloBZ|Gvi2-j_=pTOjP7IvVy1caLBdF+ z{@4e{J)X}u$7Wa<{&*-lGTX>SEtAT@?Xmg3>1olU>VMgF_LE8VzGRUZQy})D0QMr7 zi;L*0q0GDp3#X`PDC!e7BxAz%AinpYzyaZed&#fwJxJZ-@4|In&ft!X9=~P${=CV* z7ecFluE{*$TXJon-Wi<6KFo=Z*MGCOHs#05IgMJiceg2D%N(|KXBaoW+N zNyaSS=?r%!g|`-L2;OKNMws39u@!Urn4GGEwHaFWm6ZU8$+(r2P<7@#l4Yw|TJlO| zG=huSjrJhT8FjnZFt+n&KL-rD4jS1UZdKL~-x+VKuNHPn?4_I7C!@@QJyxuz1oA|*4L`3A8Mbe4tJ(h~Bfv|-jZ2c>w{DXOBO}2I z^_s>j1iwgY)+1?YYVJ~(2q!=BWX;z>m^rq|c8(T^dI$Vc$m^uIk6gFP<&o4L&wf7= z=F(4tSSSIAx_J`?w2Xup7iN`~-2mheQMVZes`?Kf;*s}HWNVa~xj#4rzYs7hVl}{L zo~-E1ZLn`4&U6=q56XO3r^yJ^2V&t4wH3m-{Pn7&W=|NaaYxbOXS<_9zrSDocqAqw z>|vhUk&80XIj=I6I>=h~z9j7}(Tot}@V7g^TKbCe<&9(N_c;o=uZZp`%J@*_d+{M2m|x za&YEL2{|%c$b`~3=bleE8O5d(!Y7n)iJT$~setS=#ps9*2feIq$Gv?7;zG+NQUrqz zkv@eb0=VZK!A3NR{7Xk5d-N@2PKOU49%vFxudl5QTI};>3dE?Dzl&2Hh#l!bM3I55 ztq_5uC^&W#e#OB5;3VLj)4+OI8jdvp%r-=*BUe`XXFcf77?gW|`K|mc>`-UiC6Avi36CKh6hBUS^c$_BV~3`+z1p zYMD_`&`&{JyI6|!1p$=&;f!1|l6`sx`Gl3It_Erc8X z*=#^VGHK3wTxA5iF1Fc%#e3BcjrCCsO;`J@ol$$B#AQD|Jftg%u{Q2rj(3qaW345ji|_aio7pTtxu!j-cixhe=w25V(Mt^g<)AvC=O z?CBrAqYH64$V)tXgqUsN@a1XNhdUQx9fI)iJ9v^Fk55F-`04FKMybfMq_76hRd#_3 z^O}l(gMpc!>`jG~{lc3IGFxP}Y)%+O9=DR;gy!dQz1Nqn@O?Yr^j@ufv_9rkoMyOq z6lLMH{eIdpo*(WMmVD8(j^(>C^@q@eNT2Cf?;gA_!gCQwAP+In>9 zQ5CKGBVCK?gwnkJkQ*MSOv6)r_NLuCH09UXcZS9=Bb-hjOl$gGd5U{v?ZFW)nu9IU zXH+k-7FMg5jdW;PG>4zk{Cum8b^yDW7%V1nynq-uauq1OFss681(%*AaEdXAUWXD6 z7&#>{%9oGM_8!HXAqtm&-UQa_w@8*Sm8OV9qafW9r?v3brBwH&N2;_J3$&f5MC1#* z!j4BwzA==x64(43n<=)sP`yK_!7jt|s2FWxN@E`a0Q;@(SVbG*X63h%JrE2ZvW1^$a z#G8u<3nRXZhMh?l8C1RFUVxAj{1VrzN++5X5#I^7Oh%{uJA3)8XXWm<-~U|jIl1zJ zZZ|i>4>fu3{45hkc88e;26YRP!anX!nOsRvg~=OHqh$foAq~%gVTm2XW(R%!fUw&=JR&!0XLQ+_@aE%)&6WYyLmzUGUs$W zo2t}@jf$7c5g*C!BHe1S*OQNs5~!%8qJs^>IiF7CbNWtIC4wa(Qr<8r>l!?{=mIFh zL+@vJ4IakgK~N%*jE4hO7Ngq;qB>tzM)FiC=B(X?fM5pT@66yW>8H4J@DG6OTf_O{ z{`+$@Q21b&!3nC8b#asBtyaO8a(=}=;w)qNhBc09Uv+;RdE>({{3U40(p2Y%%n>Tu ztVE;o<}#f}r8HxjB|h7oq_PBqbIp%V9ITn`%?yoVRpOd3(dmG zn4Vs-+G{z zqouR&K*;7xD{mJU8CuCX2e0f-)iZyqGiW}$Gv@tZ`5lkQ`5xP+v&K!k?e3h8(|;Uu z=$;L~f$8DLU-h1y5;$ah&`(lQY5rSZ2m9;XdtDOxJ2?b`*MxXF#N?JK#EwAAvwuc19o88T-|$XuC{naopWnHwZzCK=;4Rc11!w2$j~-}hVZUf;LA zwb$PN?q{uMJw26sxUTd1oxk(=9YaV?LSaDhjuQdar(it9%#cQ0=Ew~@x za|`L0X}>ZU!)a+%7l7*9f10&=W9J@uHQ$FhvNx52#?1%&w@az18{|t_=@eZya~3Pi z9yEHybLJUIL#S9g=KU@zFNr4ZTC4T9cU9V(pM(Y5UvnJQY2?#sG>&509Hky8nh|Z; zS~8Tp@V>u}`ARtHLBGeApVZ08u9D?_zw4b>)Kl6Qp3fX0;CdCt4362h!-Mga9+NQ^ zTiGWC=hV0&T_ogwL`0EkeN}GFe@U8ZYEnv!A2$oQ^7z_KoQFZ_d2yV#LZ%yw{H92m zMxTVd!;XN(&7Y_Od~vjT#hSsSLz zn$1Cu!XaJCs>8slgQlCQhkx8NCI8Gow)=`>C)Y>r=Lh~ikt@`$$=~?Zvr|W*K(MN- zlFKuG^tsVi_GtwNEtv#Yu{Td$eZ!eM#qZ8ZRz+LK*o7)=5L_SnGP@$DRxhcOtC7eM z`r>jvl8>WT2R-OP{V6sTX4zK$d%oepVy7m$c{QUaq3RAz2TPA1kINC-*dIcA{$N7@g&^N+~v|;FgyS1-d#1mZG57W_GypmMvLftAC}3!e*W>Uh4N3) zjD40GW#k`&fxR_OqkPsmQIg;8JF-J8-(|yvyxT9Mo2jbLdM}Z!KNCa4Y)bl$qZn@3 z*2@as=uO|b_iMy9H!$U*H+Lp#g&$BeW=f9Qn|IRmRe@&wV5ntz$L+g+ew)jGk_z$O z^MVpHonsLS&lR|gJ5t>f6K`LrQJ-2V>vE5pD4x)^i~Fe!j95U3DWaR%!E2w%Fpto4 zrL;JQ`R zP)(IPN4H;X_PCgQKd$z6faQJ5xWU<*iu6to7lOHN%U!;}yw|_^`MBRqbLB6#DZl28 zy0H)CWNy9R_dc1e8d%Z7koSHeXqiaU70bF9huec&K*S89QDtJiV{>3smYBEdCygyE z*YAw#x%MC@Cb%r{h{r^Gs_Fqi<*Ei;yw+zpoxiIbjZ{8$YDnB*+rX88vsvv!UkZ3- zXBuDf9BZ{wFgr`4$hq>S;>v!f>tPHhYm&99q3ldT$Cfm`w$yuX_vr`ruQ%Irr#{v? z|L;!!pD%hcEw$x+%$ect!u8|HQo?+Nz8aDytwZY!KNYnZ^CSG>&K$9D4Bak?KwAH1 z0DwyL>da*f_9>4p$>no-6iK^(x z85s>@%h!1?0wlkmc)9)bxopx%?fWMyOX7_0-;?4@lwzyzwp)Df>o&La^W~-T4QCszT)D41XX?nzCTVC4N!xd`&(t1#B!v+&8lk?i_nhSv>-tLP)0*c)gt(^ELs;dazKm|M3Aa;L|P zgJ0#t1vP%aYG{0iLN#@7g5v_$_*{4igOI)(Vcl?I&HBcI*~()fUf;KM{k*>`=0#6^ z-$eRxd?C5_iH!Xw#;%OU-ipZg#b4i8jBFwO;<~)$!EM&2sh1D@Qe6OTL6-dq=F3-q zZ^#OR4D#^{CcR}zoFjSq-N%~Pcw?%E#)dC-U1Z(S*29dM{!J?( z-u=$!$A0KhD3z5BpIqlG_uBT9dT&Ax0Id6#mbrb)nu#*BiZK$hQ1((>s?c>;z<51j z1IvQnmt3x}Kd#X=rW!3qodvWR(!H;46o}Ev-DGF3T#M;=yyesD*KsOCtF!M`F0%xE z?(H2I+ImwH*MbNL1f}q}Sx*RZhUS2tz=r7^&idyKBX~IpN&pB7w|j%XEVWb53;Bt2 zOKGpQjShL)ubiTIvetZtqv^T(+uBKBac^j@{Ysb^Kt7Cq2oNnwq0A=JHbMA_)l8 ztgFL8HjmATD!m0Op$wf5J1EMjR4&4I-8|@FpH$J`T@CJ+4V4*NesmBM-xC!-;w+VJ zpJ0`}R{p4iYOB)x!nv#uV6)9SS?*<}r+4xe_@u40DJ>k-s9fFzoVeZ&Z( zo|6T4fc$KvUCZ>>97~;L&El_er#w46E?Uh4Dret@hRN|4l^OF69g=mt$K^TRL^0U+ zTG+ikIpJPQbxizL%i7$#J3fLK1&%E>p-UGOBupEG;e`mzqrLd8%z_av74}Cw`>DBw zyUTy_q#641E3R}{C9So{P>Re_Rx|98Z*vl^>(8SincX-*jQe-(;(Av>_NrT6zWiy! z064OL|IE0;Z(bO_$o38n>T0H;F~2upOUbHhJkIcEK{jwbc#GSBmVCom0 zt##Sb7MLw=*6rXq3<|;nnZWnUtK&-I;=L#BEo-06%%^h+0yS~*;y72|2g7dzv7)kvS&Bs>YV8iju8r&oYbQ;~b955iM zylzR5>u*`LZT(X8gR<6+YvbTI^4V`5i+3IEX#cfas~kGbJykXnm%1fXH>g>-d2r#U zQAc2)ZJ_Ss^rTNtq)uZID65SMVXgB+PF_Ca$snKjYwxpL*t46}j10c&8Ggw<5qQvp zI+RBHevWMI<{v}f$Vw7@YZE#D+Nsq@I!?<6Z_MAhUufKY%2AmKH_XP7h-7of+! zeB9g*YW?4=rEO2O5hFH;PHq#5typp=FUagf`Oi1`a~GpvT$HSJRX=cRTTb0o{$$Xm zna!c!RQtTGox){-l{+w~Wf{*gyiO>?NxF z3MG?ZbJ+f!(&XM5C5D|F-LlU)Z%CpQT}S5vwGS@xsu^G~>FiLsZoLjU<~F zPVG~z#CDa#5L)bX(&t{4$I!>^q@R}~cdUFk9@Se^c&$(Baf4|+d47JLF#aYWV-jGP zL}Ec>CQl$A4)Y^4v?>^Vjy?f(ps^v)a@uH)lJ4aGi>0UAo`Cib>J&|Iu-I)`q!3D!aUhedIBmXdgAYKypY%{r;)^ZZGV3YL;44$)dJ z+iWe$9%AM(t`T8%7X|5g;Z)+1ofe!d&K7Q8j!l_!`Bk4Md2W4ojt^YJah zvND|Q(9n4<5-5Q8_q07896L>7EN96gn4gy{8o9yHsNutF0)?EDc}#&%O0gC=e4tnH$bMDYm_@Z*zX@r5|qvMZ0rc zafvmCMNC;1yK>&459VCmNv6A@NKtzw1r_#~glrPf1fw+=Ph^i3nc>lOqqg9fZb% zJN5x#UxgN0ps-ezU>MhAT8N6d6UTuks$0%_EUouRmS9t_S#}-&?_Isjma-DNI@je% z?CRtXX;7k>-bMVgEI8UVa5RL}G9KSk;l**Euwwh5+N7aT;5)4qPIe&UBJXY(LMK`` zO(R}GVjh0$=m zFeg$(M9p%Ty%V$R$Nyh=pL2C1!Jhu~{McRz`zK}7rGEI18Mk$eg&|OhD@h&#k`9-A zU;QfWtYkdcK>x!DWb(XzNbK#ASUp9{Z!-&VZXStVfy>@4d4>YVSw+SkZaDH_m5I6X z7fsC#Qr51Sp?7n%t`q?o5Nt3@^>IY*I2#~?nV*!;%~l9%24`l1>h6@L1xGd-AeQk8 zWb{U@2~;GE<^F@nZ~6z3H@a)BKW}&UZ<4cqA&hHbNFNKoMFlR^G_ydR!#a*49&g;< z+;}C?Tza)z7aXJG2KjH_G>)$`_9Cg_whbyd9!Fa*OJIX)BW@*N5#qDx7On% zi|M|I>bt6~Cyj12w|Fv)R)fBvo%nu}2Sv1^T{}0{H4$0_7q`XaAWQMas8hiZ8uyBC z1Kp34np@~g`6LUNnxxsZQMu*#fGx&@)Nbr2!d@;LJS)@RO6sXTyMgP!-nQ?)!%h-BrYXm-WN9I?(U8=7uHaw zBSrADXYg|?D0uByG@YdNR?yZ`gd?a= zVPor=*pY8%Y7TKa8VW?SrCwZ^7E#Vnf4(q(DZ0vSt9;nmo9sBCm0t{ayZqWv`JYaU z{rqb_VT#}2TDz0ks$mQo_d4NOnPN&J4(?d`gI1yi3|ik| z&IquZn7feR-X#YQw4@P*|ANeKeuo3lAZ+y^LO5&&yHpdKN&L>$K(GW6MnZU>BBpE$ z58JaqfB!p3W=Q&w-8IoVv3T-+h18at_qc*1o(C9quR9&~q~cOKmw)8keYUIg+$Z<> zCOj!L-zPiMocxV!cKk?x+4hxR6W@M(F1$A@^_WH(Jvj{K&ti1C`aLBU&V!|+JH5QU z7v9mIdF8lh3`?`a%aacF_Jp}9pkyR_8eoQk95lEAHI-NnZ=t14!t{^?_b_i%l4C%~ z!SuOI7_cN?J|rR0_bg8G@9zcJXwUYT>%S%h;j?P4-fV$a@@F~r5R#I11bm%d z9odOwm)*#B63|Nl2S*WbGnI>D(soo-oLH&87{*C&nM_nstwIFCAz#}D{Ex%A0#%lMwzQ1vl zVb`t?Ft5qm~)a?5|I_i>5p57gidR#v^5_6~Mu(+c6hf!JMCN{A?=e@EL>JnOwt)1>I znBN#h4=oUf$QjVSHU03>E9rFNO$t6WFdr2loy%3A&SIb|L#G5EGc=zU{4t{MQ}=7 zhWdi-qr;E->z0cH^4(w@zD+u~!Lo)jfsCSm8`{j`lH8($hi#WAox{say7@|KQ4ltKa4GkhB4d&dzbqZu3 z1@bKwAHUaUWmIA==a^VJo?6eXnJ6k-E~Ee3rTcD(bu{}?D&5Fh?*}%*=D)$&71RAe zxRv2m_quiXEkA)fIgcyVDbJ|W_U*^nR|)cX2j?O`GJ|wpFl4do`baJTuF!bV{_Zaw{NrV_+6I z+_H10*w@9O)CLC!zr&f=2>zFa(FC$EngpC|_4e-fHH^zY_4RmO^~|WYhn5OBr}ow} z|1F(P?X0QOov!M=YkzszWV2Hl5>ZfeEBV@TWsReU< zDn|OvU%&m>56B>M)PPmgIj$sp=F`*rzT3DOP6udQ&f?PTFw^R5A0NA(cO-}tQRw2rQdXr|<^nFDP47mk#EC z__`nCH942le2nEn+3D1&{{e7mUZ4JFby8YDOHn>w&tJaKBo1jT9hOwapH3T^LNJvD zCZt^ECMMI>)TBTB)k+4e>9ZwE6Bic+=6+?ulkR&(1S2PxcG2C9 z8>vxHr=5R=40qBfi+n^0k#qyMDD#^r(4fGZ9|g&u;tR+xXR4`atY`$*)cCe!&YbHH ztC23#6^W9kn|h%zG6Sf$M`SOQCST(<1azF1-Y>?miQ0X$=Vtt#_jG}>6C$*h;$lW%XNiL54{mvfG4th7g>#Rf(?K`PvX z=F1K2Vgw;b;Sd4eZGJ)<@zrHbfSsKkTCu`|0A3&WLWLM?VcA{So^-fujOm+#MFTm$ zzxo%O(Q!_1+WyRJRh5}ogL~TKU>e#XQlu(EffJ6Jj9?Z-GYrR zQ4jme>~0@r6g(XRjskJu03(3^_$72u`{u3v-|tsAWbVp0=bagTT{T0UQe^(6xx5~V z#o+fD9r7X&PKPqvYhJbinpwfw8G8G>oE!&jn&;_Ys8J6l zSy)sjNqduZE1ewUGE~P;=!9Zb$3m=vv)gybKz*iQpLj>d0j5h*YE~CKxteg+`MFMB zTg_|F#;bm=(eEM{$hJNSJS>6xxw+HrKbx-)RNSc4`Eg-*D!?R&jaY4nm5H+~JQ4}q zHPjs}Oe9XgWI7JLltwQ?2jze}@*O-v;2y6dyuc1tIb|&xaB!H3Ix?Ukd(`Z!V$%oQ ziMI>mSxXax;sfVb`r&Oj7$lMG{56z1&~FD*-6hVB--(HD)Pg`8l4YIW9oFWWb%cK$HH~H{x;$zEmJTu z(Lu62zEnJF7t_8VplheVC=h4}4350{%G7$UquuP1^pCHj_qq*)B4qJCeKl*VkP>GF zT94+Li2QJhI|8=3v}tD23B7^r>z^C49qe^_6wQMhMQJw;C-O<5eXlm+uYc3Aj zPlBDEX;sah^_e1ULl=01|0?Ko zX~;8?bRu^o2OKVgdQtP$_nmXmb_0Y7!bl_rvE8pnHs|8es{2o~wf<}Dai=x+$;}V1 zF^1nw>sJdS?uPFj3_@Ek?zhBo9cum5YsE+C=sqv^O%7C)5x1i+0@C(;)}@MFxUHvv7Pd*S7+TH>(`LISUqY^d_#6q%vVK?TH%}%bh1nxqjucJyn?#4krmh;x z?y9+yFys?DnXzgyC+Vcg<<2}alu>24{qCM6rhBSs7RH_RDv?*dmgIJf;jYr)t|C+E ze2n7{Uc!z%GBQ%%smkRKF@7gdz4rl-y43a+hgA^c?5RYD zEidGa56wLCP`vH5&C~Nv_I1BzA0F*HFRjbp9Aep9aU{0(iQ>Pe{@$uRV={c*R5OQ zclsm8XZ#aQ>X<~b;bP?bEcn=m4>$e&DpqD{YP<7q@CCj(W3%5SZLMzB)a~4O;J|ZF z!w=MIjNA#j*h(y2|6X$vCOYDy*mYrIf?!&oDZKr==H{F}j~!BOX)Ou{#x6d)i+0zo zju`IFSrrSV>3a|@cloD|*E?gJjNJVI8?yvf^!jzx)@e$`5tvy~sE9nP_x( zCv{PF)Q>~D@x>GS)JA%VYDhL4Vd71pl%@ubnlLz^IDW^|&?-Gn|D+y+}mMtOO~OV@N}i3Q9)6-%&|S&Sgynt$xZuPTC&`}eJm+OZ_QI% zpj5lCr8CRq&z3a554GOG6*EaZygn*|Qb|&K7~C*)K2ghyg8n;|F)1_W`7OB461F>& zeXoO&6!-AqL!1~n;FJK=i%eFL^vf~O*N4gTIXIRBy;!2VsCH}hluX9%qM|SrON)+> z=k`P2^(5|G>yg{tmGE2vIP6vW&JQ1M`THdvWsm>KF?G=FY_+e!b$NC1?{Y+|cf zr7F$G)Qq}T_+z8cAUQilM*2#}$)ggEg;E|qpF$!9lWs{i6qOYC;(CpBzw%AEAt#?N zH+zJ(cUMf{Mc%{Zw%g3jzk$PE7mbJ+^0>{9xJu76vw7>kg8>4m+^?y4OMpPSu?Op7 z8Qz+YtgUU@dG_XI7x!r$?q!aA{cg*Q(RyNZzNhWUFS}K}1)HQ$71=BGhlIPmRQLvd zHBiU=T?C^{*sHIPf4VlGtw_Ys5Pyp=n(n$qG{57#REsC$-^{uB^jCTOzIEpZnyxb@ zy^LF2c=y^_T#dEYkO>WvL-5SZ1cw{!MBrdy_4m*6#-sZ6jN7~#^sK;&vm0>OeDPwf zZ&1OGXL@ns?RAoMnR@nvFV^10n4Jsw#g^3cs!^`4*0gfzs{*JVVK;lO@g?0leoMk> z+LvQyK9T3tYLB2=n#C5HQhPz&$DJ)%(g9~>%O5dpSY7v;P}l(W{yoC%pOeD>@neF| zFM2(*-W3ccERE+!Z%Wl%)4LAS-@ZN8 zc3#y@0?%~^8=H$XcOPb*guDRw8)1$F^gaSmzSa`I@7rtlDr&;@T#&%H3Ar#g*MF}b zH~rGm#)aT^G*yJ}1&FH--{QRq#w>;fVJA?uxxu{zJ~%i&UJDRPjw2r|;K6{C0|#LJ z4N<^6zI8h?r!f;yKFxUH&2zo`$1M5t*wgqGCw=RA93SvIN_K{DMVox-8_-d5J9c`& zF>I=)wlPel_19X1l@Y!W-+2taVgJ)9>B`H?tFhK-kl*QY4aQ(?r8Xw8Zmt2gFF#2H zR>Zpe;w9!uI5U((vyB8b%Wp004HaI55jegqU_7O(S3d$acUbB+Tm^>iM$Y|%xU&{G z^AM5(1%+*d_axG`3}c;lxdDTj`*J(z7!(`RRexOwzCJNFL0$tvB&NL{{K|k&C|u!l z_GCcWO5YQc=uf=*eb+R1g=%{y`t7>Nym{~Sy)9*;gWrBw%&m@2VTs;J9?PHWJ=7(c z9I08irp*-WL-fpECW4ZD=^?(vU~_wY6lEL83V`rz8gDgY)H@EFlX0Z}owTzGtxw;q zq|AurX}>xe?l2mv=|*OK1hgNcm`9kDSiy}S`v(Rd;znO9B}o%qIVq5aLW2|*7k6L` z;uW03K^TR(55-Za16)=|nFIIouFvmBZYb6=ND6=c zJu)`784%|8uT{T4&w)eBdF)tgdSx6C&uB3l#F4(h*W9shUjR}+FeUibUv?5qT4<@l zH}8KySp7rUXD+)=f`^Jd%84veWMQk1-$#6uk!67Mp;L}1*P{d2}rx7b|# zaqZ0_f!WO78d;9yg09J=cJb~3emh~t#OwQJ4>Z~ zN8j>aP0Qihjy-L6flaNat+lNDB7Z5d^s(YoA->&tbtRzH$brQSjRoQ+!t$rhC>Q5C zNE`IwdBgcEHQjXVQv?E3E2IL=g_#yA)@$2=w+(Fq{aGB7Tr#5PMe-Rsd{#a zC<{@M*P`N*!o%K&t{)vA8)$R*qq;m4UK+ctc@xY1>2CpA_6AXBFU&>mIqD$9wj8cE zQ$#!~5ju9qS^?~T&e6Mq0N7F8DmGtRi6_G_3Ok*v~ok!YDwk2R|)>*#R{R1>xJ|@4pR3 zBNtGhNh=TmgZxjR*=+`3BQoyVMYgc?8bx6>>MJ=0%bc(im`5PeWWZ`2)eZhaj*Tln z9VLI!@a3V2?KS;jF3s9jAm2ip#b=o~*~*{NHX(myB$JKpb8~I)eUk9< zWm}nwI$r?=g=q0TyY?OY_z^#P*y=eD3MKYd|(ftdU! zg=%d|K8TF$R(O=`=)LIA@&<9ohdtw}R>)g1(fZa?$PbcRzQS2|vQ!H+YlVE2e}E$E z<}(!J&-gF~P9d~R505n89Z7zBaMzi?dp2dQi8KU%^UGs7 zktHxP_GN%SJlMJ&`XAj>P9J|QTzNJSDci~v>;fZ7d@!oz)6SDD5uj%Ha_Qfgd_vOT z99`WSvNmIDTPfkavC(hl3V)V_te=M-Iab@z;hB*7JYGjxf$M)l>H4?VW#A>Q9ioog z=jC?cJK)|~kc{zDVOlYi0h_Xdf@meJH5I%{q#Z}iPln2P$fx*Lbgla;yu+kc5m2)mwA+G0KWeJ{l)*@MM!tvu(CQHt6FS+eeb?~ zgw(FL_+GKyCE|KAFr{mAyMgh18&kjX#n0COUPKbcF1vQgSXt#&NaHcV~;t`uG-AvFU0Zc0kZt7i0Dwh)|# zq)LN-?;Y=#fXHfIBAj;~rHXcUcLN^yBEGFGwTODK5d;7lAan&1CnFo77AFD(<&yZi ztYIFF>9QGya~@@7POsUn>2S>~OtJshzsgT%@f?HZN;uTSysPvo1VlD@FlhjXRY=o;2s@`?k4gdN@b%K;3`jb~n^mt!r&tE91hZWFq0XycTtT$~}Vk2bDoy6f<2 zbOA{FJBiuqjkt4zWU>^(V&Pv<_2WWfW;rCibZO#77#=d%!>#L80`6nM@G)0 z-?N{qVXpi1sld9!?zAk4KccTs2T`t#80nkOD5Ym&*b8I8CWs}m5$)kQeE1~>$2b`D zh!^k?(mntxPrz5`77yY3{%85CAx<RMcLu zsv0o+#%y4dSx>f(;U2 zke42S>Akcy(|3MpY0QKEG>=H@HSA7TB<3j5(;F3bmPXA9JY_qpJawH_ZWe8&HHW&Y zy}c-WeF(^Hi7u(WbZxrK2fTL(AV`zF#X*21tnfbFr}EMpOius+XJr*JJ`cJR2Fd6~ zWX*eDUjRV_aevPuBb^%cwU84Fsovdy<*`1i_ARK2M!Nxr+n z=_LGDU)k}f_h8x!vP=Rlw$X7`EFeO#h?@w9Uv&*&+^0^RB2?v2&d=gVl8+0ziJt8f z7RLA1MDGge^2GP=-=o=)Ls$R~qQuoDK=t!w9UW?fw?mBcl9u>+twJpJ^upO$3;2={ z)>Tvc4?>a_C<=zHZhTs?=t@mAwPPRBH4v+=nq4NyiaUW6YpiQuT< zD&GKO4(p%64M^9or71rIff1r&td^y1#d2Vws)_q15NUxf>#K&a+u1@*O+51|2mnNK zO>WN#LgsV)?ptWjIac{E;sBD``MLPM$#GVB!B8GovFJdM?Lv6iZQwP5@Qx`)_0F3o zB4NCH*0Na-3kA~{tXg#$#(E$00}6_YbQmbTk_ZY2(8nbMZ3V=$ed012HXyV77smyN z9A)*pcN;<57r&@!1C>N*Nlx3pOhaFP&5NRCIqC+(g>W>X|J<(0?swodvWOUmR7jU4 zUY}W50aUGznD+ZK1JUTr^00JV)}~J48%{F++-C$AS;DXZ$6#+<7~({aKOqbo;klbY zRFBcoxqiE^PeVg5pe-l4xVX%Fk7OyGPs+0#mZ9RAV>x!~QH6Adkerh%g+7w~mgutT zC$J{x+vEki#r^qUn%``oS0zBZ$qB_l<)1$W zmm(Bw(i#_vub6JJCV^1SFBYu`nxCrU;*X@?*u;Gkbk?dU;y_O9IXNpH)$yOM=p`NK zyx3Mm(DMCn{(|fUEv?l|1;zp7hbbxJBE{;DP{7o_&GB!9HMU^YZK9mL<9>pbCGo?3O)vRRoCpK^CkpTcN^P2Y#x774A=5oXB1{Yr2(0z+ z%9fk#1nD{xZxs$b_cgQjiN&y3gviU{j70D+B6|5A1_!sBUCUX847#MOEEpgH;e(BG z3Tv#>xCf;I{Xzh&@%pNi8ywZlT1(^cB?97V-o2AXrok!4pQ#&oyy^EBiH%|XhTvXn#UK)s z2S2TjfL2CVSJzP=?ieMAe6t+_!auWJzH+6w2SG>9`cOcJjbq)YmfruKfNhlrr%E~4 zl!v0TxVogb&lNd0ht~+l{3PttzKSWTXl6oD1v9#wfqm4+o%3MCb(A-Gz6Fw;hySbc+FRVLC8c@aG#hwW8h4PMzE z+i+`4ix9AgD$|e{bXpFiT5xd$Z1z+i3X+gQF~?`UjrI`wsW6bdaroX;4Uo!y_RZ$+ zT{-)mmL`$%{S3X(hS~NjXOv-rJK(Zr2K6JHL3p;y1CS%IEI8;^bS}ZDKL*QXuNw}@ z>kF}eExNAT5`8yC}F}%OZFRaGs^c{X_NKP>l^0wjKu938CPRB@E}l z!Ls_jVj21MB)oVGXSS(rQFZ|LPO8=DIJ|-VI)_ny3JVC~R#Nzkc5jhscLG*4Kd!&V z6cUSbP}BhdUVzK9IzEVP<5g7OP~sqM+#4$ChC2n}ZLSIyO2a~i6V+GjhDoSv8nvR< zWBwa>2)goOdAK7<@Oqg>24S2o1l+;oLpqNyEbkLYataFmpnf>@AHf!=f?yuU#$i%D z?1v9GTU%K%o<*P5FQNwz$aql0uQ_UiqG2|Q%{=T+moH{f`km?OzVPVCvzgGq|Bm9w z?<;i{B5H3Kr@7AC$ngLeB2sS zRuZuzV;pELpG#~w#DpK_RNi<+V%u(qYpVQ}&$y<5rw*YN;M8@(520DP-+238cODg( zYcS-&;l~hl3A>IkA+rqJf2IjT-#Tzw<&BNglg0&4pI(0vEP$OD?CIY&Glxt&(?drM z!6bsMiL9bxrNJ7WpO^>pvcztI_VX(GE0j$(LV$>pG9#i_Y}m}^MZaJ1{CP@%=M)FH zxHiF)gIs|fk#pUNhJ9Lx@af7dDJcnIc~%aU<|!zoJP<6pW9>+&C2vsOUXW^kkDvJq zRN5sD4-f`b^kI}!Dtk9>+BS%Q364jac;M(_&}3cWX6PN~t% z<@VU22HX^ifQNgLP9#IZkEgN+q_*&H@^FW3ea1y&;~%v960v!~Ttx^+nDyTaSFqcC zK@b8yn?7v0NW>YKAQBV*MVR<=!Z=B@Gd-IE0y=TI;z0sM4b)ol z^!5%83;S?8NRoitMR9r{4AXE@E;qM|h>B7G@fLYp2H-Lg?KyT41`3= z>H24pnK-{XR=~a);k#iz+!#=jelJ)39Yq}_qZp2&6(vsRFoBx%8u-dT)n674?;oPu z#KVA@05>SaIZ&BBK^lN1_ zT1c2=yod}`-J)FgWsU1R@JY+8`2(o+MFqL~I}jWuhj;-Yw1-4Uf~AFp5-`y$P^J_h z3@I+qLiT$bdf^Kx?Ysylt;YmLU=2t9P~2r+$BN1hp`)6PKU*&{oyBKU_Ho8$q007P z_FN6(Y%Q&V?`U* z7$+Z}97PDmRo!i4*oV}J&x1Gw_`OghU8B zhH%z7K~h8-L{b_Br9-+wq@|=g3`9wll9q0kbeGcIUDDmP$n&iK zxA)!Wp15b+eeNCieq;FkhXRW^-}$`H^Q(7$c_}A;>n6!f1OjnOQsTKH0&&?Jfw(k% z9TWbAt7m);{=;kkLfu};+SuMn-_{5rt8f3-!rI=#?Dc&|BU?K&Yb%aNJdYkT-#4|l ze{09b%4+$4e8VGaTN75UyrL}lkQ;9$H0%%vTz&MPOX=fcZ>?99Mq`xV0o$4dr%!h>CT&DXU z@!)UgI(HB^;on5uUp+(+puc=nD}kU#e{K8SH^j?-zW)mKrJv|;uD>RB`6>FFKjQ{m z_5SCxD2Z<%e9&JLdi;O;gX-NPyTu;i5$jD$& z$yCO$={}UB)l})STc|bSUTU|9qf>65 zVA%4V_};z0)6-AaM~m(~u^Q*k>hoXdiVrR;gi^G zxI=R?)=!^4l~z=6IIgLG4+)ua*q=1l)z#hGm>_K)4>mS7-YgZf?uz6vfg50@^w_(d znwrYP&)=7CB%P~Q6SpXEur?xEVl{DeG#lJAFo0h8k=`YUi7NC?lVR|u{Q2Wd@)|_K}tjv;g!T^JrP9Cf3)31IVFX6{gtU{GqUEq^_Tu1 z&IRkqN>om01f`gm7#Yu7@2#!3WMpKV;W;`Lcd4n9aL=EsRlB=j;!X{`UlABYD=C(ZAX=AFRk8=;Y((W`l)2Eh$z9a}54`x}}`0 zep_6;DF&axdZZ)z389jbQi@C@d+Ud5xcMydva%}hfRP_ReX3s1Bp8N|-3_aTx@;XA zE}Cr&R6?X^6c|gPcIFFME>3s5!WKHBnY??`$Z(9x&v zxH&2KLR?&2r-s9%^K^CJ4S#ugSxQl{mvnJ8C+X?dB!X=A*lnEAnj#HTKA`rsl7pO(4orZ%2t+Zn*c6S}#bSTtG4 z#aB2bD5c5yWn^SDRt5xMr)UGK~tznSDaaUh38rMYg5zoLi3?V@-82Jd>G(=eu;CQaxSZ~ zwH(In_o=Af5i+U1OqTd&(YC+0w=`8F+~7yp_rq&{MMbmF^jU(4&trD>M8y=TQz%># zv;5JcU~>M(pU*!0Dk^H|?+;Ctjow-6NwP3nA1@1mE7gDV#t(L)r>CcOxxFN{>|n~g zR*BX2e1zU>D=WDQ$8`jc?cDYA^K;l;2BxMGd9NF;U%!5zfkD#XjMD4$_S0EKuW-0) zSe_l3wj8rPKXu8~t;&oeqM(p_`4Z1^thggwh=#@o z?t5l_-X|r626k_Mri%ZSYdEIKq))8K`1ttNrfT#I&Y=SZzI%6x=#jRwov(CYl+nUL!q{n^Q3)8OD;CMG7=gEdJhshezUZ1!uzx6RGX zvvYFpaa$1kC7RBC`}(zZy6yvmYBn+Sn9xT$QBaLW(h&mz)sE|$LdQ$g*tc$tl-bcW z_!BK=A*)+eFXrY9nORttM+z3r4}01yEG@UUwx$Pjv`BV$U9(U3hxCq)k6#-Ye8|gV z%gxQTher3*#fU3xeX=TADvVLz!lGSX_^iH((yKi!-ZLs^ONc>SpNO{_tIz)6FhJDjXOQ!lzHx`ouR>7uiC;{&~xiM zjr2mP``U2c^XJch!PO2eghkjr8?-5gerBrXV*%{O& zd=WD}J*_qJ<1a+&!#66 zT=!RqiHS|y?d$6Q=Aj~{=9C%ZLfUDH!&rWCk$cD(-wZp3D`(Ppue9QL93RJF&*FZ~aE9Z_87 zOZ^%6)z#H9@j|in_0R9JXg-I3<#pPKX_Vwy}lY{l$!eHId@bE@xM0v%< zgFoUU3~1QdzaQ_f8kn26!Snl-pN|G`Bjs1-7E7VIlAsp=mKA`(hu#$FzDyOS#$d|$ z9J323NLcHqNy4hSZ6y4T%+Mni+E{Dm;Lk%Io*(d$A>Y4$U-T$(-G4JPI}48rx22_} zJzh8-R>F3B=Ed6j`nyv~J$P7+gM;DFYQKN|x;4rNkM|BCA%>ynpd(DTT%8JHUfX1` z@-QZ~*T%-^Iq{f-gBorbegJMN&5Dsi(3LYUFK@iYOV2~Vc;7)vUOr9#&nIZl@v!n7 zuWAwS>qjb_SgEL}NS^h;Z~q(|oCf!V2Dj}$obH=j5>PJ8dMxJgRSKs45;gIoh5D{w4SPVc6Hr3+BOKH5QP1urmJD{l$^NI z8K-&GNfkgvnf>w|*fvRk3ZILKMTc?d;rZze7$g3*gFZqMQqU z4YOw9XIS9^7)sC~o1aX%i$Z6J$Q-&f;80BqJqNr?>_}Bt@8TyYl0UFq0qS- zT3e--dy;&`$N}sSN=r)zq5y0uB8;F{jjY5dDGe8z^$~v1f+^+h3#fLzCr1E^vu8$hBzSy2gkk%HPYyXm>45e z4~*~lMj=wJH=&m+i*N8`W%LaXtCx9QRJ)#MD5ViRy>b2e%dWP~Z50@cG%PIZVe>WW zzA>-?_FEm9=j_jyMhep4;Up@Qj7OP6z2zcWV|eZIS=tzxvgCZQh*aYQToQGw9adE_ zUY-N+R$>?-CQIBU*tT4D7j^de$Y2=I*M6Lkf`GPf4T>sr~iua@06_sRwKpcU!O zZZ*!cUzn0IZ(cEJ$faZKaSjwJqdkgCS|LNZ6@XE;C*|&OEFn4h*(mMBsr$uz;f2@P zz8c0$97-W5$)cLELbIfA2aSQGegJTKVerui3hEe)_{aNZ@3#B9025;bG&eFi1-;kA zyp_ci|E;d@2Zxn@c$Vn#;{w!H#fvpYz(rBe>u_UYBbNh0XNJ+&s(Eh$~viW#=R8Zp~r~Nq2Vv1qFrYFJ5dz zmw~xeW;2Vaudh$y;&HY=L{cO3W+3y0t1F+=<|GEY*Fb%JeRrxXse>o{46rjrXkQkx z2?+FqXcu zqH4R)es6y&Cd%uQy@SI*L=bgD5W@cu}naT`WyZ>O#k2mnqGZv;le0UyQkwu3F zGBGPl8sHfUKCNW^c`6(>TClfhZPXZ4s;2Y`vM4&@+uDr}pqzR-&kIbL& zV;ARVTsx!!&X4^Pf7va{1)^Ye>-zeX4B{#(_*Jr02bQwWU?{>|k%YaSeMMXs!xS#|#v~_d}xCSaJDwKdW2WG7Rzq1%E z{2~@ek|9j$!eS0>GC4W98>uH8=n+0eN*Tpz_C6#e#I+p2HJ~K)^&v3$t6ia;&ECQ+ zLmjUo(F4itaAUq?$}iwl_n$9Y6zG1MYEf zmM>P)*DSuTMB{?~3?-QO(_zbZ?%e5*zW@ryX4s7Pra!$7roh0afo@GfKwu!-_%3DzL6#4*q3JD8oigZL{YpWk{XwyPdHTwGyI z!IVl+RRE)O$7t@;!A44fC3SXl>w~W3j*O2k8NC|gz`HkZe91fB^Y{x42AJWTQ5{y3 zm0uF+-A~p_(0_OrNblXtu<3PjbR1NVb9mo;{B*q3=F0sc+FMvySb=7Of>mTE=O7xe zgXVJ%x{U>8@$m34*{){~^jT0!ItC+Xr40?|%y*j=6GSis1O(JetjJYWRe5-LD)U>6 z2QpRK;{>B&JHNSl6M))CFErv%fQWX-duFL}u~977Tm6BL?tnaI_xCpjEUP#SPCM+N z!fj72P0h=OtKx6W%rdrxCaTaD~Un{ znT{2+K)XVcHIWnOzrw}DHa!puPb;mwVXu=}~A=rs`&Kq(vt!{V3o_Up@+FXyv#ZI<0-{ zxmbE&*wDG$QS%Y#wYx(jWtt3)Y6g1R)#>xI6Eu2*TMhh4g`CoW)j)F;;FSi}*7C3m zO^1I`!Nyj08fb<>hsFKzSpV|s5VGq)cg{Yb`c$LLVf8NP1TM#&&M0D^_SwC81AD;W zoNxMztO5dL!otG9^hXNKLK=h7(>IWk7X~wlPKfSe4yJc_csR_!hfQ)L8Vb8@u@B&G z5X9_;61h*XLm5@14ERd)RyFtqnYy^l2MtN1t~NW0aB*_BAFRT#ibWG!5YisIAH9Xr zMKd{O<^UiAS#8c?plqn&yn179eljYc+Yf9Nh}uWAK&n~Ps=CNrbGm~S%kPvb8A6Lq z&PM=L9NK;)x20xKY|{m5k#b*h7C;dyQz<-CB@0dc(SQ*ctX&mfM#mw48p;Kg-z;l& z=Brs^CHwR#0w9c0f4agcNRTiuzMxBkL64M_bUz{;iDadqpy0F^{)*ZK^@C)7UmZp? zx*fwK*a2vq5BvylSnJwI0k6x>Luef+p$*%y4b>b?HrqJ~ZmaQTAX7K6vC-p;wfaaT zR&1>4D4MZ(dq0)DF`Scql zlAmsYD;PkDVzGrrWmOdlnz#l`7V|5$uxz{2bywu#g>r6d8(JE`bGNJV+XmHgF3K7- zu$LLiE;nDpZ3YGh=?!7mLPba1XwqA+viN+jCn_%6!1Q z)xQiY%XOeX$ctvXE2JSiyRlqvr=J1^uLo!_T;=v;%JYP!FI5&zl7RosF(KjKZBLJE z(4-M$!GXnKxa$W^279tXOfI})Kt?(Sveb6%mAY2v4mp0rZ$?I}PQWZv%~r32mD)Zy zXzcC16DIWndeOlAI1DZ=gvWI4r9jdw(3H%&e_nz`BIC7vIQI6@6-?|`(B@W_(Ex4y z?LMFN87&^vZ1^LcR-k|xiYcw;eI{p`y+cD@>tAS*`F!GIBO`=7UBIDj&yF47zEi3* z5bUhT8s?G^F&JVaCDxQ&Z~D7iBV>SG+ykbGODWX8{V9YQ&7_`xxnnU|sgY#GduuJ~ z+qY*xVDf(bdNp194o#(@NV`Efa34a-Msv%=2}Hu^NVn#685wh~u?rcf6BH8CosIAs zOu48G64gk5Np*U9s$Sv90{Yl2=*-rfZ;>EaL4(u>g$pD@d?lqmcS!6fK^(0P3v zRtI}>b@&I7dh&qTiX6ww%FAIe5^`G%KSiq?l`b5>W)$?yK2}oBYMdMcJ|t0-06r0l zXgtRnqSVFsT6W)8m zo&rKb|KN;~@cDi1uMYE^w6tOdXXCkecNr8v0$uB?aI$_tO`QRVSSeMe=Q_eplMWb_ zhmsa_yPdOG74*KvrD#V_cR-V%f|!7bf!?DoolJauQ2=Z?(1bIHJS#4~&ty$yzqz@2 z8X9<6-0m)V2dHamz8Tloeu+Y1S7*QzL9@uEBv{Twtw~9hwdFVDK|FwNwCSKK)5TOXT5k(R8XMgWopMaGRIlj zwu)9e_4VsuGAFDox18@-=+Hr^`=?$T z1mf1UO*WpyMuU3yeQ(bmTF6f}uZ{7tbmdI7xMaP-1kr1c^s3uIi35r{u*9T@2R6y# z_H5INr>cvfYGP7SfybwOxk@lg+SGJidSK|v#R;^)@}U`x>3N5|2GDp>J-v7|Jxe-A z-AXR87)k8$-P~L516nV2rh6XUGz4nvQNG*~uLq`*qM|y;6~zv+va)s-R_2qHynw?- zf~F#NJ@CbY$Xd3^A_vlTRMg52*Tc!C0Bv9w6ukLi#?Xkv-(zF^ zUGldxmC|sahF}Z|j!o~n2oiw>^2)&A5A;K>-m25;6hLflU2AJ=Z|&`sy>RqW0JEYs zV<^!}E*G6rGBWMOmS_ccd|X?{N_V$)nb_ylgEkt_a!?QsKAl{<;xCU0rbc>vVM@YuWu_%*Y>9&5lG7Z*pe z>gM7ypA~0)cJpU+G%56~Bq%jd(KXK=;=NEP1xphCO%%{mQrASV@VNxYjp#-%=(_h4 zx(O82+nM?s8=xMgZ7=B>LK%SyzSNy?1#FFE*zUulqjyM1TA?sA6q3Y(=mv;foSk37 z*7$^dPXr(mIF6?WW7fR7InA#4$A_E2NFPNdQ9$QKKp>_4yH_dB!{n%$MGCWU8Cp?Ar{f2WV621P-czq0Gq?X zdG+z}IbJ3h71#ip+47Z26!67$G(K^BL4`CL869N@QUE577zjlu=nWv{l7UDDEQ7Y| zBnM>U{%nm9*gV^7BOGWg`0sBYuDI{J#lgtw=~tepd7}FT1}FeuXx@{qTUG8l?7uRQ zWngJ33y2M^;sTTe^fGspvNS*60M0WprzsvtXpQad0dU(rc?RODNZk$)3VA@Ph0gT= zEGaOdd%(XWYf5WJ!WgnUSknM}nQ9eu3w%o0Rs@X7OJPrzK@7r#{y9EA-hV>f2Ae&o zs!DLm815&!2xh7B|zaxnO2yMr+w< z2ab!2kdBTHI&%n;$uLDK>@{39bh$4uv3jyKIKVsNwOhajkwPY#o2AQzhK9xlhWTBv zLuPmS`;~zk#8`R)MClI>a8m$~YMlw)7p5pC$Y%nAf*?s;g*#Wm_1)tGH+2EF&`;P1 z0sytf78zLo1fBtVS$Tv3w(vu+6RT?A9wJcW@tY@Ku$JI~OCfcuLScF_fFj}ty6w#0 zzX@oCs}bH@9v*erhJiC{YuVssF*-UrO2G!>3RODHazSX2E_?e|q z5WT1`jtY=Lu3k!hN0|)JyU|O-!^g)6zg7vCYI~+^1$L4YU@}7RGcme}tEt%76z&PS z7S61!WG=0)GJ!rV<$sqY6y0{lSXpON^rdZW*M`~9y#GWqR5>u6|f6cemh4p)mM zE%>zOj4JH}IS(#ic`zsXHE?9~Lj)kL41k8~V6niYD>OmWml32}B|u02!~wvgY5nbk z#kJ!)aM%X90|Ff#oj`*NhbavhlTz_Q9t60!xZp@D=M@$zXR798Ud6;TZWCsm@;cm{ zQj6lY%v9IVU;xJDS5s571E9#FsdKg3A1!Mtr@wp#2B0!ndSL*B>0$G$uy(Br?9Q+i zS&nhPNKu?Wx*C=JchuH3j9EibAvDJKUCXM;FDRr|@Tfy`#>ZFAb`Dru=y+1W;x%C9w%fg(6Al`&kmP%@R9ZMM8y-(F^M45<2j^)ir*hRLEPM~p$7z} z=ZW%x{VKghS6@Sa|GqbTOTOpXv6Bwr_T4bZ)PR9ix%%c*-fh(b;d7*S2l5=CK7gQ( zA51CSsYEG5_3;2L3YxboZ%^$3d9PVzJnafWy!`B5>W*RujQt1PROn*5OrqrISkndf z@|AxNItl%EX{O$5TP&@BCxL*&vt2xPru!%ObgD?;1A~c-oV=%})(;OTw6lFN;L~e{m<{;Gvq^J|NL4eP!m83_`x^W%!>Z; zY-2s3d;f}2B{*QLzovci4*iY)<46`5x;^yEWI=Vtc2jKZX{W%&Tr1rfrKi zL*Bfy8{zPtNaS;jVJvlW{I70qxDRLFd6UH3fNsP4mlP z)n=hX>&2^SDdSGEEIE7<*XSVfvvl1vrGqtIV0u^X@fGsJjl;EWOjh|sYuytN5Nhn~ zr0O#G1p{?vb{FI9IIxwg<6ymp?bXw3n#F6s;{=I@^XG|WqY68e&PA>#u-Ia}y#9^V zVU_!*)dVK*qY9T@#_DRqNG@{**=S=-F37G(Jbg;%xFO(QQ9<{W_qyY&>6@u?I*=-O zp;6;`5Af(fvw8E^8wiWxU+hi!IOL;$_E#g|OO(aM2Txr-;l$SWXZ*P*;4A`y3E2~z2ne{!(RAm9nZlcs?mfh{( z^j%5{A>2-)-J;NngWDeQbwgc;c1d8X22D8NgGT~vk`)v-uYo{f&5K-!FBYaMGvHRA zIo?eY+oxq_4uzHCu$_AnK*FU%-OTH_hJdPr$rEhcFUswse@A6-q=0T~i~i)I zPLGz7QV&KCl*M;GS%@EU4(@Jm&jbA~Tc|K>F&-^ieHRh&WU*5aqI(@HMo^LU@PI3v z;z3Ej4WhF5AK$};vQ8paOneH8Pe3j9SJi%s_&fk(+&fFH;K@5Is`{~q=m=#NAZgk8 zRxSVof|oAEGN|RU>r@={NfdaVv6DQp3e3%YL_tA0yQckK_+tDA(w0K-=Fs(X#J;_u zVd?Q^o0)~h+Lz#9Y6w9!x##3y=^GlphnjbE#8KBGQe@52F2nRV3HoU$=>%NWH(n1b zoH;UjkDG6`45!UCJ$$!g(nDmZhI%X%^n9(L8i0=DH?r3(w(v9kQdPMaJIsxWg`R@P06 zKIw?gHai#4*%5o2!X&~q3yAi}g7;Eke)*PTVtn>XsLX*&>Fh>&acyn3qv>6%`13!l z*g!Ae?R#H%In1Ho?M?O2GJBQryH;0)X9siy7V&otz3x77crDfNZro~+3YXNiulCO7~rgF$_X zoSggtHRWq-z2{YHhf_n}Vq#?eeCj$mW$+Ub6T>XA(n&LvJQN7iH(#wSvrFSz5OgKD z|_N1h?qeCfnVL0!MWR+Z~Qq6;=tG%5}ZAft?A}?>7sSO)P zDAIDYBS$7~^|Xaf7jT;G>LAyCC140p2}FAmAGvgj!-NQhtSC(gh0iL2gUzwT;R5`+FVvAuPqJ`0&5OW=)!pc{8^ z=<6rI&o0(|c-h<6H(*5jPypknTcy%#9G&AzSZXxWpZE8^(uksSXT^h<8L31QioMOCbjcsNTMzeT6(*>7#i|W zQ7wW!K#Kr(<>AA*%5tyxvNDYIF|R+_8nZn1rnvA~26N5r`c}|SSzpF}#7n%Nd_x@? z9MPjwpPGxbmE{}wn3twErywn~a)+Ss!$d_F=u^INaTLO*TUfHugUPy8pRQu8*G^1? zD5ePH&yW3lrqmd)Ed8<@XJyE`y*<)kq|_$NwG3&Ef#9|hc)GtL1YTwSn*naIptTpX zjgz1?%)sX-h|*;a**H2H?XRo{P*MUW+GS8orsptu)ZuXb$I$YK-D9|>h8SM_$4^RL zjhFJ0xa4RQ{D9m-dRjDB)-A(>-L{n}u*o=j`udW*E^r>w(J3T|s9e7C0c@~G|GGB_ zXU_;KeND4Fjm}g%q+*t7aVV?t5)WLyNtpR|QSRo`+}Ud)#$JHsdk6Y<7jg_IE5nf4 z(oh4?iD!%UCkhtFnffX%X6jvdJD+eAOsBb;Gb@I+5g_boF_OPFyuI?wGj0=T(7sI&Eu-z~MB zzX=%|6Zol95P4QOfy7RqY}S}Soq7IXj2Ht^e*7a ztca$D289|=58!1yZU@Y0zzcD5G^JbN1l2AL^dEF~3JlVoWJ!E5XGJ6=nrjL;0F6CY zRD1xkSDL*UR!odCh!mBpjPs1H7uXb=WQVy~kiHLg*=)Q7dMuBg$3CH_XJqgnM%_xD zn)72O0K}N=$aBzXgV_ukw+7VoF8zYA6Xcq}PsRd9u>jO}ZM4WAE(mQ@BRr4(UaLOc zk%nxVPf`*!sQ-W!d_YGvc=f6laDP2eMi2pl_+ElWB&6rQjR7PK;}r;wYZDa_kYNWG zYsSlM=riue*{<*lO%2@YppHX6I(Z7ng-2>vI){J$?8~fmz^~`t^YFq*xD3W(PHy9=_QVAKE;O4KakW7eytkvAfE?qBQXJW9g8tyEE>`U!8QX1=Q8vN|LS z4mPK{zze-p`u6W-h}@+-`_OLkd9vCg8bXz58n4>~eht`1wjj?Tk)@u7Dg! z0WKR_0gX+C+Jx+4dLIbDw(Fx@AgT_S=`zzX$*S^-xPJzmv9-tC6v`l6tswO79g|sq zI?eXB&DCr1t;GUdTt1&}oh=XNsRga3rKR=dB~ikG0*eXP7?)ku+r*{cBRQHf`#ye} zOf?V}&Q!Yy6?E(Dt2%l531__%I;@^X3CQPv7gRBEgM;%5?4AP0GmHNI{m%9_>b>Wg z*nE`n$-xxyf%j;;<}6Yy)UzM;D^bkG4O+&=HWIhuy4O<)nrX4;kI}c!cPn-?zqr0WK{HjW(d2 zH*j&ijCfhy4%Y60psXYhJ7nH`%;$(TU9030K+a|xh{daI@8>jUYe<`oa&n}jW`mzz zzRZ_rx2Q2bA^5JH2?Z@*|8Di+El2KRHC#9IAPFA*2V`tA4DnRvttjC zM6b->8vGnMF@of>9OVRWbpZLg!LMqS@y0fiQKj{RbxmwTxx9+B#nXti;42M`udOVVGajtHv|M)&%L%HTAG?7j*fT$U?j^a6{Iex zxC5^3jO#dKJpWD;4ZZA|uI^+E|KV2}Y3{5D7F_{+`7YbFNsk~Q4>4%I2pq~u0E>Ds z_??}Jp~U-uaaxU&fV6@pNF!xez0S+n^yURz336|rURqghZhpHz z6#+)}Fg_)vxT2zB5K2>v=y!AT)hk!7tgVs#Dt?r#t6FAz>-d2DS0Gzu?WTa6WtX3{=6FMQ>CbqF@|_L`7xF6+m1QV%C90 zMMZoa{_}<@EsTps5FY>@Yg+m72(KfuAO_GC?ZbyZ%IwtM>?J^MSAVgygVTCaR}ks4 zdyuVHv!;h!T<9>jvExvxfsVV=x(Br~l4?C(`h9UHztiCCB)G8)$s6wuGm@7H++>Tf z0|A6*o*X$JCu9ed1<$4z#hz2fq6KsNG1z=gf5ofpb}!<)Zu@c zBo_FC-RLGHkKNHhA_zumxjsC&Sz&&%yq-l)33L8usr7GPW3!%R8HTjq`<+E zLydL2=NGh%)~Je4<9~6hf=_b6a^D$q{y5|Osvc6 z5>lrk9F(w~5tI0Yq$E-Ba!_F9545pBZUbBxkK?_%wl*w~fzVDVo9Zh-R~nEL0@$Pv znFm;Iw4V#{?^!Pslk3}*1mtHQy}dhu!$rY(-&-FG(y4e4sxBsbw3pifnM_-FEY~0@ zSLPE^Zf2AI^vxaRAYjK+Ucx@<=@J$e?AFJ8?#`?W3wKM)Tr&HvVwmAfdLC`*?uQ9* zaM;0_q}@u(x;nZ7*^08Tmfd5hC?7bE1^g>XDl7^CVuu_ix_8e3U=|@!YMcivAD-hk|<1j)O z*dS1;-3OU#$Wb&yV$)7m2@H3LI32d;3!&!lgUDPQLV{~*gdtgsl%cNoeMt3}0n{C3 zdEZZyv=8S51_yoK+S(e|f3~JqOeVg989+rde-D^F;3F!T*kfumL2+jLfq^xsm>KFT zR;}kT@X2MDmfd355H#)USXeZC$2V{(KEgni1KxF?gNL1qE9pO|l2~wDU6S};2>pD5 ze|%;7i)D8wp$>^BI>LiD!CyZv}Ocw zl*HwQ*eCpZ_aOTbd9t`N;18H;c5W`9$bkxq(sb&a6at*SNd{~&G1}^t&a?mRIUs)B-OKL{BNBO z+wY!U;xz3JWivQNQ?TQGQVzQXb#5zn&%>JkHLZ%jcx?Zlr&YDksQN$1d!4My!1Gv| zs=3nAA_+=)SZ(cni0)x2(;@yO5_T8ZKX}u2fB*60yDEGFNGBR|%#E8(CIjT(`v~q~ zid}yv<+4pxA(91}?XSW@w8La|{Wb|8JMcJfa&sGXN1SV4+)&B7o91@-ZYA%0GK^8_ zVBRgZPs~F%p8lGH|9p-7oIFkGjO5Qy-*j|13sTtlkg%AVon`l~^=3@?g+bx{6a2yg zdAr?IwnjH6B9L!DFm|r^j}yF*{IK@FD-^?Ha~%QYH#zArT*$z2-F%`PBH`MQ=MjdDnFtF22g${&tb!@7 zylel-LI)NJTAF1uFz!v2eX@U=efC`s+8zGI5D9{UIWt~kaneolOR3m+#(c8a#Pb%3 zq_Ujf&XiiMqokLn^$nW8HwLY^`)S>KT$y_7%;{4|c*4)LkP>h3HKu6P;K;kCQfBD2 zDNlZXg(O$B-|*qu;XU3RQnA3O+}m?x{Epu6WI&@3FECyoDR&Tr(m*k;f~M^39C~&! z%^DIM%nDpkyT(&8Hr2Spm1GrlbYy$5_C7Z#f0LPI zjUdpldeXYC!yt#epxdtYqCe3Gpk9zJ@jc#WdYK?{d$h#5b|7mcx_0+>_IqJf2VcZ= zx62ir*e8ge$+WK(kL@g?Iva{d4ofvx`~Dt2j!QM1i&8r+z;4I3e|UsHbA{^vpE7m+ z(h_Ns!Pk*Rc_`g-+DHd3X3a|>z5CrIj$$=mu{eZE=on=G)}Pgr?Yevq?W_z5HdEua z_8^BU_UOIJ{IR>7*DhQsE1VyA%~cGY-X&2YQqn)2vC*ir!=x3Lj_48-pbp3?UePgb zYu&c3zk&mOHK+L@nJVVQOsVw>5N>?^vDM+!-owoH6}w#aFT8O=ZMF@VGr$fgx=JPlF<(hzvrf?OnkLedxT(gsn8 zc0GOl`ZCsytuiY;GBeb3uY|(WFDY#EH^hdz`1}g7a-dLxoj*ratdcq zo3{S`0S?ro8ta3%nYXk=clD}$s@Fx&^XCG|V8N}85(dB^`Z{ttRtFiq|40$pQw$=un; z3Zu0m1&|m2A*E+3XWV%D<_#klxCy2`i;EIy?frsMs4^IWU#@X+^RYwWA$L4)WR0FZ zwr#<-x}~ThASpE@fq%gqTO9r8alcC54wHDmUb6UCJj(BvuXz(PD%aN5dZW|25O!05 zqkDn#pqI=7K1H(=IKGhw;RrYwJ5~By08!pqzQ;4k&>hQX9 zeipvKHUy;`WAp%|L%3XQcaP||bIrQ23(~)-fpY)t8ywjQDf0p|<8wv^ubf;bxKqE! z$KQiFsXv%)!^3y?PfLprw6x3JzY-2QgI&Yr9_o(yR_E|by?JmZ_sJmn@N`A@w7`D! z^sbn#CZ;gw+lN9lAC4|+o+bavdsJ>q{Tc6K_a+&_^KioH=y*XO;IbjNIZTRaINA-` z6})Lf?wS7`kxKDL+DBMk7xDvr__WxUYo}`PA=yhqZ+Z>9J5hOgToAHABPELB&XKHg zy>;W}=(~yvlj3Py2y>+d-0O?@`IEdKIrRgSxO|&g#(i78shbFkacA$90gvv10i%IT z%K)+w|Hi;72qZVar^7%~M$WFSwXO_|-4hn_Hfmc?N|sPC?$nN2#e0nGgHnYn@(1Pr zm6cUpsf``yj%I-|8=Ssm%C^tasQVvHh;?WJCS%* z+Jal`qdxPy>DfethF&6d2WHO>Ol*|sQkBU*_bjn3&CQBPflqhs&1ONVRrQdI#`bND zigM2;`op?zJEg&avZqEEa%~rE0Do_cj--{j^bw1$I$dJ&`~B|9H3=!HL^u2*KOsX2QuQ~;pudNlEC{)Ea7u?BICzqH@Sv)W58g~Hufs|wzx6pGc=#?!w9{ZT zq7^&zTLvKCjeaiz$S%q`QM`M#kbN~9FU?bBWCY&>ERfm2T!h&3=hGW;yrx+pA@~q7 zBl%Ou!_U79=at7*iT=5q&010nZ}3GyUCm5$fH*w`YH#M^pD)dvZ8bVwV$~@p;3#M} zQBF-sL8mM&EG!UkXxOW7?H>o@V|bMUVrgxy9SoEHK8QJir9-Plg0^0+Z=XXyeHpj$ zufI%D%U^)~81ayXrZ-94PAa0fwnY*Sxwcp2en0Z6D{*}6B6?*XlI63g)7_oTToI8| z@v0s|Kn!Y&3_9@QmE9P$4N?Y28j4XMwk^O(#D_3cAOr&zwJS`vPjDUrLT&Yvle)3o zoSdQX0MKzh;PvQ(^^jy1mES}yP{5%bh%~}=6s`?-3B7ro!Bk_ zSsA)jS81e5@d*gNTcS=;O2ai?{1K00o1m@2k?s5x5KiH=tuC3cCqhYysCp^r1VCfCNxIN*Q?(B*Gq)IKVVD%F>3VLjkoLAh}_ zH*Wle#97nS6s5PfH=@{jsuMgg2!p}AtL8Kx41jkCT!QZmf+GS@18{>ezPOa=@Z#S7 zek32)u0{yR7mZzA!PeENPiWQI>%tSFNOF+h*xK)K|8)WZLv(l*jy|LUGHs83f(vgu zfFrn{Vqm$XXUC_c5<_wZ6me{4aG*oAW-JAX_%avq&Iwh*S7e%RT&CcsnA<%0z^22(x=a# zuLC%ihc{uM=0fBrOGV%i8A%^HZ3_|cWN0nm#^u9H7tp8RCdwmm)phe;V;HrD{+(+H zfzyP65T`~TB8TLxFQjzQrxCy?!=@C9flNKxP=nMo_?ku#gk4$ZftRm<{ikxx8(tfN zexU=r^#a2iw95jI6Guqwb;7F;CY||xj*yxEV}E4#@5XPVsb&vSK6{CgZzVuzeXicrSHLsS7iiPAn<;;mguz3qlsR zor8OQZXu4=r3760y7OWPMYD*+QWSe0Y*8G}_7Zz--JZoY4w@k0rnd_i8t%jWGU^h& z_fc6!@TUISo0eHYC!1*1tf%|33he5~M>x;qDYrSPZ>e2Ts(z$R=H0(%!aUkTZZek7 znZb?i-L$QLT3UtW8HPEn(z;oMiWm0!v{_C1LZX?sw^tFhl(Yi}^dj<&m@@mo4m#SX zXjq##p~J;l7aeNH$toA^<|=-8TuY`%jZD%jy*jx6DamB{`cuudE3boDXlZ6{O1)v% zyQ@m9(D!NdQ7S@)pQItL`+0HEK;?Gco{5>6^0(1!%zC}UR7!?o2_X_$WCOGE6KEjXJ~!v(T{Zy{nH$cc7svee{bY3Tq} zQp2au($xDV|34Xyii&smH&@B7@_#cMH65mg&|5Uya=nAxY%1Ttvxx2xm6UvlI>@tF z=GF}oRTFP0F7A{$?GK9Y5-ZqvZLz@p#5VbJB!y@u(Sml@otRaP%ihTqc^{EAek!tb zez+L{k__e)n;nxc=!6%9%NyoRwgZX|Uuop|^|KY|5C6eSoY_b~nt%2R%!@fOAyV5Q zr=q5gm2IL`$ds5D!0uMUB8?>%RB^z&kt&>{^hX#hXLA|ulk%Z9a)I}DvgIe(xSk-8*%d~2d-VG z51kzy?Tf0;qI45@YhA(@Uli#&2i4@KC)GESp&6skV+*(+eT9Pv_{7BM$Q{~%g@@>t zR0+p!L}X+F%B^*soq51YhZwL~azRN6oq&LL>_WpdtGUZyzFNu`e}4#ueMwKJhhUDK zOh}lx92|=PlPU#XX`!U>@#rN8B>&r{Sv*Rk>kBB%0Rb{#Zq{~oUNT=5SuqM@8v1EH zbvu-KS#BuzY-`uN4$^+0Pjq*;X9*RU1e?@)vt?@*-IA6z*DTzk;yad%+jZ&b^VecS z4r9*V;p!=IY|K{$KcqcB#g22xE^%BTUz6GY z#nE9w<|pJ%sT3?iaNdsbU#NQ%XfD^befTz2NK~dYkR+iDkqBu(M5s^_MI|B4A)%r& zMwrr`niACeO>2u zp2v9{$4R$+d4dI3ab1LVdiIDgzjR>mP=`mxc>8kGeNRR`&8>7FzEtt2Z)>Je^4WE+ zPNXroO>4hdY14qK+9G{pmU8zX>HgYm+w1Cn!vuNSa(Y_hz=80|xO4t`&tFw%Uf4JK z+|9%&Z2h;P4(`{EcYH3L8fDHp{!8s(h|4kU=*EnVuHRHyYQ(;~h3M+ssZYu;Ru5Ik z2%*((E@9I;y4xXlQog#yUOI$niAtuyN_Lx2#-a4=A*JdgP_Lu^zYf=e8HC@Y00id( zzIun-9#nZb*hqQs`qY5`f*3&U;8XCWF7$7fKEu)`fVJ^F7x$L z>-H3zbTO8&NA^qjW8t?XM^3RT*^&JS|R~TYEr_TS*oaw}ndEb>F+eaz0 z2UgM4Gern79Zf75N(*Q?w60alfRO@U`!KroL>Ub^5AOUF_?sj=XLe|BdviSi@}6Yd zk&$jLw6M`bzUS&%lgY%rbm`68w_!2`ijwrO8|i76O+J<$D3*dmi_Q9g=sI1lvmmPo1FM$I3w)$Nwa_~UL(sT zGB6S=j&xRWdPzu!bEs)dkk(i>UuJXacJ~Uam9dQ1O9Wpr&xFqxAZoNQJKc zf~woGyIxIF0vo^eQaiYwJ5}!UW_oitHg|H)`O6s+OCxH(UT9G`U-zANQkZVxRerfi ziRBwAqtEDYEnd9XyyfF#(49RmLvkUeG2?j5$R;(`u63@ho%{Ot4I!UdOI}Vk_8yT- zZaOJ!Y<+#N`s9exlnCpks8z0=CVM3Dl2|)GoG~;Aax9+u<{9J!h?r=9&z(C550^XW zY+iy7B9!$v@7^t4scQ)$*vF=(j-K)Cwz~lVT==ELFi&9sSQ0E0I)4QJE^GPp=`oUD zCG=#^SkiAs~f$nLCjG5TD~~ zcKWneyv4?`vX37p{7x-c7+SvdN4Nj@iR+5LiXa1*isc|w zMRzSaq#@<<`)oOeUhbf9Q%gGWvykmpzB zF;5AZ%?XtwYVbsgWi#;{&foRk2twfU4`) zxA%NFxbKloU6tioM8M$KHRE%+bH<#imu|#yNS_|5x7lj!8IkVJxK+@=;>_}0@vXgY zL0*vt1%RHO{CGDVh@5-+@(6>rIe0Ts?3vttu#jsq0ZNp;pyLF6%r{Mf?IO#YI)I$+Stl+V=l$-l4 z4s!h|79Od41W5oz4gg;P>{;O@sNKdPT69-<9&p>iD!yyC_3~vG*Whl0FX0xGA{j`V zJZR`j?)wgV0BILDuVstcKLh7PLX<7P#bB{fSA9gaQ{Oo41Ux<<7+wKv36aUw%c0=* z!mwx2(d6UaF80#UtbjIC@#oKw(Sf#`H4$cmy*k^roc4fYW>4rCxUwgT-54Mp_dphY zar6wEccoPA`mR%(&Hg)<*v&M%Fg}Yc) zYG;>=?BaiL#6j^+@|6(pSK8$VLJyu$v)7g5*sZXdX+^)uv3Q}dwuK{Kl!Q+6e!72t zjFC~Fxj}#F(?>TM$I2}Od2BsfUG}w~KBJ;xC!DV)A<(vC|F`gZSzD_uVLEF^$Bw%C z#VRxw?oS%(-DBr4wA}91Wa6g6`g+echoexA`T_Vb9sUx=z2c2rQWdkpz74)}vTiy< zXJfQ$dJ^xR{31SiBt??5Tyc%E`T~!{2-!FnU;q4+d*zRVAC4}rH3{`u#hJJy++{-M z%}|Pyt}qAGw`J0v>XGlSQJL0liZC|&hM16k3iTO0tnNb0VX>G#SQ#7~_;A+1=?}Y0 zLRvZ!$~%oG+jN4#a`Qvxz8q?&>E9#7RSfQDx~w#G)sRd>SziR@K6)_GuzCdfQsk58 z+FkZ%{sugr{B?b7d|;0!&lLcS`9DARRDbvY@X;HxYL_TYivx*9yKmjP+Wzxr;HC+> zGu2NwnwE?bjB>1B<}^BjmMxj;Z(15NXsMMlYo6L&;e{Uqh-53*ss0MTU}_E5#i8+m zqUx}!9sQM+FJMm+Jk&*ZINef_@aVeBBNJEA4eHwcT2+qry@XjMJ*WZPGUvQhj4T>N zPgg!+?E7}Q;w)X@%=F7i8HfMeD`#prRgz$0QTrg=wROkpL$4m$Ik@U1t3R!^Y&Y*P z-+3`;SK*bg-ngen87!IuZnS2cyLauxodNf!CBxh0_vtTM`2|jQ`&Jp)5?n-QyR~WO zIn9o$V>a2*?bm3%lP;NQjw*nEayKTGUG&P>kd$QflUN0BlYy7pZPseWdFH*B8ZrD7 z^PZ2>vcG(mdEe0UkNs)Ct3uv*oG8rcb{7*q@NBPyipQh(&(wA^8McY~OXsYe%(D#N z%DSiYEp4c9l)UDKoVNlfrbSYAH&MWlD)aAGg*6n65R^aIrXM9 zZ$xyXzF+@(5n`(0#&g^O3AdH&E2(XBHQY!61s#HNcYg2ZENk5$`D=7HzhiD-|2C%` zoA#DnnC)LR77{dY<&{R_k*6<>MqkYYKAz_e^sblvQPsHdTwl>cH#!L(g$4hTKm2OQ zZ2b05Ts6ijr*UI{-}K}j*+u__9&qRKe%N)-C8bU*3i-U<{btvO`yua5 zp)pa^dEz+7gy4PR94Y$)m|vau+2xQ6X$mh({MlU-r6}Jle?PS}BiUSj^~|o#iN?B% zhVq36Ir1PTmVo^N@gD`POX#4=$o@S$z zC-X3JHUIM#dUS(`-%zTL-yhI&*_Yw@KmBj5&HuAxZPs}3`D%^UY9$50LNRS-Guq-W%nBoxyhJOJ{KkdRHf#Ubg-2bQkw8r zTi?RS#PlxRes+2zGL~SdXb76*CnC{Q3&UeclRaWEG<1rMT`@ZwjD_K`v#Zjw0?qs+ zXgFveGpd%FfUBi4W7ygP5&>MZ))5l%E*HzBq$K-Olf*0Me?pkf-lQ8ayL;TNVKnA& zlG!r%Mt9jEr_3!K^2}OPUQCSaR)_WCSAA4n+#HQ>(&Yi<{VMwd2^o`t2V18COv|03 z&14NSeyWfEtYCVg^#=#y-Ma^O8J8$37rW_7`$tDbq3yE+{K5p|&>MWe0m{o=9VIPF zz_80#{Ju{^Cx`Y&)mxvyDnAyMH=5AMT?R3VnKR*QZ_Ui&)y=DsCKFzO!QUv*BWaot zc}+~%-}J8H0=-nSRO(a4TuIUi1#U%qVbbysT`U0N+6Fn4Y z^~lc-SGf9uGiLO%%`*1)GsRp&irGw)dUpn1*>%z#Y^MfcN5}IaKR+MxNS#-gyYJ4z zTmi^FjGMypHtC*s5KFw^zY&iqR=t2ruk-U!4n*3)pf6oMNW# zE)Jyf?y!hl678=JQ^4yD9P1xHVPV1g(5$ywO(R(DN&fA5v@uB9ok&hvGMgw+LOLK^ zCLGm9DoL4{7d;Morw0$1tOw+f>jSn!-w=tGH+Y%ZlWE~ zd6h&%gCd=bWzPX7P|He z_S`$p4cs!0q_OcEzHT+!Qx)3rdO&6AvWs4jNh?mq$aj@iMOrqw^+`cam@{=w;`;dP zyW6{`s-;s3<9@PT_T1#ED1Nu@ZGSJ<=9^dEbfDs(z0W!2^wHEE^p9JaW{(h^OQg^rr z#n6t^$e(X_nyin}S%+>>A*u!#(F?+|2?}gth|$?IKFOK@cIWQc18czS6jkx;BO(aa zH7#ae-db}@pM9lr{}PzyAnwQN#;>UeN;B(k7(>(lOMC*%gxfoGe$Cy^)lgK-frOOM zovs7{tT0dL>FO#2HS}V|HAoWSj!p;hSY`0_w;+1gmvM@PxZ)IE9Z*yaRa{42EV|tl zmx9HN77NPDFNP8heymC^Ov-24a`HtIshr9W?PCH)ZQfTtul>07@m*ivy~97gR~fb0 z$Sygyn&VTm+?Jt(+Z_~FR=jdg4*b;e)`$PJ-QrlIN0$`#od@Te^8Hz|EN50&PdZpg z@tLGxWP)w>__>??OyYAjn>O!oH%oZ4u*^;OnO7i7;hH;a9M@QH9NRr@;qkR)$Lq|C z1B+Iy_+}}$)%Ad~aL>jGI-5zUHI8%pp;vggV@}>RK7QqDIs11XqKA0CAy?bu&bd0- zDvMje!Yq7z|GMsW`Err;lJsI)c6JjlQSE5`0}sS6#@^EGTJbT>&{^xk!B>{HO2}-f z3i)zbu8{KJN9<#Ajo~2|)(g7`uIZL-&{b1YeWzV5?bCX=vn|^1u@V?X6;PNar8~4< zD}s+SXbD@c|FX1VC*m9&8yR3-&x~__5ki7t)_X9-wboUD)^w&NQ~K4bS2i~G9x0a? zXQwZj=DfEZ6*62mL09(yJ@LL=v1oK(Hz+E40k2&(l9s+1=J1RF^gcGp9=AHS_f5jZ zuRNnaw%_ty{~0MNuY<#;!!J*s3+Z3QGs$}2wyOXtJ%AD`{rwwK&vfj*Jo-SsXCyqD zE5pPnnWbd?*>LM09;cq?^m0EswXf&Qiz<5Fqv{-zPmio%ex;XWqIO=Rd+W;n@OZk@ z3*bT5*35Z;p4Y>RKkp?prs585`2vP5N|%j<(uz_ym<#PMk0KbbZh@dHw#$ zlGJoz^&{){-Ln2cJ>9!jrq8dxa$rMrO4s-CA_3?4#0A~T6+u_bTeB4+)ot6m_3o_L z;vl$jqs|RonSJqk$FMZ(C-syi603bjC(e6(-ck5AOAFM z=-A{jH^)#thpTWz9wI*eBfJWBpv2iPu>emdfNz&~3 zG9yz1s`Vp5GS*dibw<}O-AT7<&GrXPBre+V_o)rK@!hSd-@bkgwzB(HYiqk%Y~4CK zr~eV{N1*En?FzLT?vFbLtfOmdl-n9uw58s^I9#;1>&Bq4Nx7C;gj$t$b-j+yga5-o znd_wE>IdbNM(+K1yO0;I($Avr`}jI}-Q2h$R;?)1NOSn}!Dky3^EK53WHr}{Dr&yj z4*WQL)UTH`x`Zw~=yG|CX77R88XAYq1~jYvPFLF|A8;I- zV|-V$r}*=3)yT50%>!apU3llK2hZ5CkoPd(?w%Tm zX5-*+_4Hg#u8HE77Frn@tt&y`zWiNvyd?!Kldl5XCQhcC%VbE}8qGb>$`+TB`qJ>+ z0wzn#o&GI4?z{ZrMP%LF4tpD3fFNV?3T4~-PRZSg>g7KAV3{q;?$UA*19cU-SRI*~ zjE%86zp@5TR=nDwQBeP0>Bg9rv8uA&oo?OTbk~>r4wRUe@3S*EC{^O3`_w}do8`5J zQO=ybUZygA=G=m)la$1_SbLKMTipS#3n#|raziJUT#xrfqF~zpZpA~<1B~0tI-7>e z-hseFlw~k=Sx=ofF>W&~&?jy2#z=i)y@q_BsUiQ!jg${H$XtX6?YFP47>m`bdsLy{ zrMAE9DFLp;#s;ou&u!NF^*QoP)kSXEvO%%V&o%12N)TdjZZ>?2;St1i3a~1%XA(8c zWP^m1To*Q40s`{uAtiuR;AVN49;DatiN`&;hgeXXo1!CvUgTY*zk-@Pv`UZ&fTJ8` z_30;Wkc6ShzWA-EZQN2*%LUUQAHm-pY2PUX-f*Af@@0}!hV`)rdBM7c<5wOmTejJp zm-z!lQsm}2y(E@&+*{sEu|C+q+SOeVpJ14EAH1z+s@_htR#R9GIJFVYyV=+GyiMs= z>p<_-7W4L4ya5jj&vg2Bs0oNde0*SwOvck2Q`V29EmjfwE;K@*JTW^#x(EV&%1h%R zDH{c~4DSa#H@rVaFu$|m^qypDm}7R|cWOF+?>R-a6J_<={6oFNjw*|c4V(B+K4IF| zdBR>;%eR&$>4ok?=Xfofi+m3ss=Dss&C)8Uzcr>yFLe6&E`=>eqg0H#20sX`i4wfD zUN$7HW<2xi+%{@MqLrYOuyc34s!nT~;wiU!ldc-qXOYp}v5WQf0bp3MfE2uT+XVZ} zWLn`{q3hy7#)Murcj@87n8zUchF5vr&1M&)BdsMzLr>FBW6@G$)r$w$4CIz|1!r71 z^W&z&+I6*^AwHbr9v)`qOmUtl~}_u|+}jJ04_uIatR?>sT^`h17j z^vn)jEv-lQWd&n$9XG(Bm`KQxeG#p>ot+(2#$r-d1`q7Ucp!qH!%IV>>8-!@?FE`E zwnT|RDFOF63E+FDx=M8WzQU7(`j|OH)yGf4WV$R)_Y=45OD@lq*G)}K-0=B#mITKb zl{Pi`)yHRSaj>V#{41hCxGsN^HR@+d<*4JN!(mAb<4T->KZ8zP^VR|k0NAYURQQ~5 z#ELH^)L3t>1Uf@kJm1XJH9lEcSE$IBadSKS>E@rK7mxoKH2UmJ__)?Wzs*r0#wLdn zmc>Xpwq*>a^2ut3b-dEPtXuv3mh_J@#at$tqSxYuc^}U!O1nuymPw`88G?3nzZ2wJXl#5qMCfIX?6B} z!FrYBr-E!p_D2ZcKAiM?%aoXCafV4^WA~2hza;OlOa+PM#Go@;%q?LOH@WcbtYb{$nWq(oYk9Fj~-^Vk;JjGes z&8+V{Ars54{H{YrW0v?{61P$?m>@tyi9{^j{m*Y)Exn#SLMb@Sc%CGoTP<|7N;#CPH5u>JGK2&BUjuW%Sl9b zDtV}Q6RT1AM6NF*ra5JODck0>7TAGS_lY^Rb#+8KbR`M#BXHEuYV6)!O*GiQ)W1edkQP=jfSw zxrDtmyT0{uU9_Ev!*n=*J=#2HXL?T&$0NjI|EI?$h$K!8^rzU2mH$Yx+B)ZFzBNrW zDs31DQ2tAGpT-R+oEuYX=?&xcHo;Esd5B)@AqX7_tgkq8C}^JDwzhSHK^E;c@}J9AGwx ziaHq1dBYIT1H4y)8h0B)HwZxXHx|HeY4KvoP?jLy;nCHNdln5TjwFfFI0^SvEKK3- zR5n<-uo{Z+ng~@3k?m_|qY;Wv08Bb`h?{~@SIi-U-wJ}WrG7$B=6Xt^c-Y0Xj}&W; z$XG6R^0KhI`Tk{6#oVLh*jU$pi+_SbXJVrx-0z-ZG~lpyI;YJRS0vM>x7O&HAC-hF zm5fz^zPzKmJIYhkWg@ia`xDtax_WFvdyL9wgy*_$DErJw{Gy`RPe!^i2)p$$9Nr+@ zr=HJredk~1DQoO6-d|9r&wgBUt&t{jGnkAKhj8#oipt8|(F`H;RWPY7)#nY{+yk5d z%o;<#!^hWmc#^}Jn$FqbJ8&g^C1=8ws7TY;`P}|@7%kKb~>4M1S5&1PIGVs9<3NZ4=SX+D@z?;q-{NY-(nv3dsb@XIH!5Xx@hkiP$}EA1GFLx#){AuZx0sLRSwa1*WnynjLyttKQ6V=v(<@%_8h z!WK*(h(ITk$Nb{^u42iH`yQlCqe%IatToT*&8caQXrG0Pxz$O`L0N!a=t+P}0JCqx zYBC5Nr|AtrhAU|*=i}qUQ}&^@76UU}Rf3)j+a!Cm)PC8TH}=+N9j-pz zT*y!YGCVoSv6|q5JcU>=BgLQv?oP(E9X@&#cB!7Q$0+&y+7{lNEZ~$zNetk?!BCD0 zlyQcv1Qu-$W5c*l$sW$gkfB;zTQLQM(AG$;0qMVFj6qtQTm^Qq8s;TNH^KKC0~qug zn?8Ly8ZrL@vdiHoL`EY7%x{A!6_IN+yxYzzt!$zKwKzZwmP5}j5>}c|6mJCKtsGI1 zcUO-zA?Dm6@+&Yn00R4Y`RxUmAI{L^9>d@gYdN1PF!L}guGr2 zht;Y?ornd1&zptjmu%M=SL6!l(Cfg+-f{$*^|Y-@sq=reddYKoCor8`U~2wp8r_`( zm}N)mD)OhPdvKq@VU=6_I0d6#GMp9u_YE(OmBAvow?01L-@o7QwG(kT!BIj1xB@Fq za(GZJh=DRrajMSz2Rc_T!>@zcFE02kmwUA_s{^2;7L1UK%F8uYHZjlt-c7aY%g=Ly ziUMa^urrUthX5Tx<;VZ}<7##4BSx;mjhlj7$1?Z?M`1tO55L<$Oq&1konhZ_5ZMmp zFgJ4zyC1=%gSZgE0pTA%hL#%4R_OC^rpR^>6cogOPh2D{|5yoJLa@}M$J`uZ0EClA z$F_R4CrLtjc9iM{rByA;Eq zbuW<=ldhYG9<7oSpdB(?;QZ7tD|LTSQb@pHq4~Yhe2l~^ouu%fwXkGBe|n-f!g~=5 z3j;)|4)##&{NuOb;=YK{BTLrE$6WZJia-oL^XI-TZ+7VpZwj>9OT8&^a~A0LkFY=gBaW(zc^{E;8*+J|&g z9%IXRq|W4l%}p#{W~YA}=GX_~lDvd5CVyLCeaZqu0b!SfpNb&%QcWofF?52s0>Q-h z-;a`Zf!gOU3S0MmFDu&MOp3u|)p~!#hEA3@LmYXjsZ!`*!fEhT$uu0B4q)0YLc(|y zM15ozZnBG@#03PLaf^{byzd`w+>I3V-o1OUSd&9t;=dHN-{K**zlsQX_7{85p zE@RB#G{On_KkP>ru!xc|G8F{FG+nLvPYzoT8hi+dQ%L8vF;xhh(_%8dH%2D{X5+Wh z`!@V>tiFS36%if|;vsz=rZ51>z`{rQx&6^{%^-T0xI@BlOO%k5+@-^goKa0p4cIFf zsYyvT5$a59XJ5N{f1I7%BbH@am#%cm@5+_RaXGH-$3(T^1Uh8=ONHiXxKQTn*A^Q3 zu&p&Uh&XHY*;?7XX~w z^%Zu`MjP_zV2?$p}cq@Yn#{vRyqrs+hBi_N*B`2(XJU<_5-nATyNVfUY<*dayEdM-{#w z^cCCT5Uv*u$s!%tO(tM~B80-pMi%a4t{xuK*RI|oD^PTF*l2+hU<6#EF~;zfrvB6^ zpupda_zVaS{y`i>4uXLsv`J=FYl!bZ8S07Q8-y{3c6TSy@vL7zFG0Xe8#1e8BNR~J zF%nPk^5E_I$LsVjAR`dQ3pxqS1{wYbk1rB2vH6hCLGVMt2-@=s z(o_8FLO!se*G-fegrt4fHHTXRWMupx1;F}Y)rXT`eIDR3A%Tk?T1F0!O&jpkRT3uj zxL$}=8%}(#juD7qd2MZ>T8h8vA&j~D{o_szjl~$O*BsM#^uL-lk3!jBTi%oGG&!>U+(Ot`H3}%|S zn>}4z7;t@p-T<`cv1w3({6$HOjR8-gJ`r6+jx&${aIq7_?Sx$boBoFg956}20D<0o zyacp?qi`R9R7a-f(0-j6{dN%NUzx~$hKF~d@+V}*0OQwJJploL+(ZKNR=y6tWoyunLW*Jj?A_b9^G-xkvhY(Y0+p?6J&6!QIW?1-e3)R19kxOzUSwkq0L^GJTcIVno3~CXS;9gM%))6A;9TAoL^Nc_`TO z!OBE5qS1z<9v*}^L}9uyTtz;^e(Mfu5HO+_wE=k8#44Px|AHLUqiHL2jr0sZed z(w@tGb*?y)Ig+7M;{&8hTC>B!10y$y`#&^5a2=x+zxLDc&%6vddm*+prU=G0HbM*v zWAf3ydS)z%hNk8>XyH&}9Ev82Ah_3q>L~6wYYz*F|ZZJA&iu=Cx! zWY8OmeZL&y8g#%3LeEPx0a?SEgy#}iU6I6XXNJ>`KBo+k4T;>?L-xR{YK3=xb1 zG;_=}P(8Bt-xu^UYR!ClKmT0!9B(#+(qEjLy@6pFi9am=lt$tc%CS}P{;<1n?r*uf zGh+lACJ@BtRZ;Ur3!C*_y&xy3uqt3z4$aR1F1-a5{{0ke8&OGbIrW|$NW}ajr7DTg zwByPng91_C=v7TYECwH-a7-?QD>ivK*{aAKrZLzLc9SnF#xeOC;L|^n8i)jwkLTaA z9N`i@tOTQ&zonQ`G*yoz=x`U1B_VYXB&uX~DYn6G(#0VUv8m=gM;@w-^B712-*8030-<)Z1yMJjvhJR&I> zJr)xaV_Al~8yHrz(7Y@8t}{3Bx_n_y}o&wh#8J$NV zDU>`OSK((m3~NZT`oX*Xd9V!5}Se`;rh*@wto!p?UTzzM%5+xF7D zzaBFSOL0{dE810Nlx7XZm?b1k z%YSp@c1>j+`U88nXdc*Ack!m``n0`;*{2M`IR(D5?E7u|Zl9p*MQtfo8#=Y&TLI!4 z45KmM`RG5F4|sEqSTR3JQk8i!kl%XNGTT_z-ApHW&$rkjq1?!v)IVZ$@ zi%<#E1;pgkvMpyd)*QKsj5xdolZ_(}tlhSa8*`7|zJFhU{|vWh4GBH*SSKpF7^T8a%nvF0_41`;!qJQ+Fg?fH;R7du z)G;Sdo$5gGPR+hredA|~EC$Ij8W|Za!gx3ghwypy=w4sn+4i1aUS?6(|0i&sDVp6iLHy!S{a35W~jS*3r4S zbe&2&5IktK4@Q)_ySb5hGO(GbZ@zT#;vfp=w5$61`p7W(wr^ia-471-L}pNkF%TXY zvFmgowg_^I;zz2bXeF?f#=Ll;vVXsqfd{HSd`9u3$wRujJ7W*USMQR5~ zUu$fhpnT)Qf@08L!Uugr!^Nnb;caH{{Budkg5u(0lkztA+~(QloYG<;KlMX;dY$;q zur?YiG0Z$4mfsS92|%d3b~N2whHq!?s={zwN2g|aF*FhKIxT=pAQqMay}l6EC=cpV z`L}N6M6B$zjy&KBS-=^IbvOaxucU;j;Nalm9gPlS5cth<(9=i;E*AFdb1f-*_im@T z`9`Eg#asQ&}W=e=&tcAgkTvI zODEWne7Pw5<)ZByi8Rx?Qsh_>x>aN)s&1(;Jk=1q4YhEqUH>LBMeMnO`Bhb03_yBY zRi$qM-%BS9%xJwnpn$QNG?1)dfLR_;HyVn8fk8-E_|~p4iV|XH5;e-^hK?^OJ$(U1 zzH31NEo3kzFT-@ z`b9ff907?vfypSigp{yM54=24;KpQCpW#SCwnbJ0ubzeovoY(y9d{%0EG}}WG&MaB z*}7q#$W06^W5BE01Na8Bh~1H?xXoP5T7`Vs#c=c8U153TyAfSB>S7bPN^QYZW>lG& zer|UhRS}uFhv}R}k8s6wA#TIFeH9F2?gs?iO-)UeW8RGix@W-iPLBqo%D7M^6{CLg zkZHA{8^|76qT;oRkyIWf73JlJrmRk$bjIjhQ1-TFdPhW<1|QA#<^d>$i3c+u9|6{H z9bS(uI4A3yy^xK1`SPXsh0Z-h4*vD)*S#7V2_vsKu|@f76;WBYX4@_&GutuE&k2_R zKZ42J?<&VW`BK&SqjeV>3dKZ-LE7vG$>;m*!OvqlF)<(k(EIS=Lq0JvRv8zM)cy$Q|Jk?UQJ-)Os3txuPUjmx_+dU!I zmteoDI5;Sh^+gS&_OBm|u%>BzUs0iqw~|(m_9|*4ug_X2zs>-9dbsDPs>3dkBwSO9-0L)`}E0^ zusZ1Ig&=gZVJ3w}lHnUWj$3?rfWO@~k~QATkBbouLgE|`b8Xg3m%toNrfXz7p-Peq&3f8aC`09!XYz@mf^2m#;@DN})4wsSQ9gl;8_&07;b2~vcZ12YVUsfoQg*YoK^@?_4 zT*T3P1aIoBB+LAp+8aST2uE&UQbcQW!pdrsE+&y-tFoIPqEPB`pa@d6x0k1-qa%t0 zXuWo7d0+D<9~~s($&`nd#Jtlf#-;f_cmVJ8#o05zK9K;1X-zc{6~qn;*qwgTs%15* zsP9>_)%?ZJ$|ZQEJLGfKF5wC4eZL&|3~LT=KqT#`-+*1@V?E(g>xNUh)h+?{DTNt%j->25CGGF$C-h>tyE}5 zHpIrx?hG@m2y`*wP$~fzne!Of_0Z4n1duHV!AgOYb#!&zMt6cwM1&bzhmKmc?K}l8 z1bn<7L$2wPm#0K1SAg|9d-|aL0RH3w)QT|U5Qa)D65o)D|DG-Q)21-&z_U4FOe22q zxQPkry;|74pMPNW2-@n;pI0Yl4>JKay)ZF&;!AfoznmP8RdcG3fGlzYWHL{~!)cjZ zXk}$(F_@f&BtP8$%e;v_k^i)rS-hvt%7fcg5V4T>)|mj3Y9AO7D6!cF(C(>v3~3`>E?%L-obQQ~o+GjtOCzWJOIeU4l4hcAU5) zWGp}O>8?2o=WI8;UaUHp^*Ot7NiX45oIcd;TMJZ$z!p}~B)*3&!$IxR`hQGnY(bz-bfQ>xm?eKu%(ecXJ$yrfR3-I8BLqnu9 zj*O-#=hiJ+b`Fl6VcHah%VBtAq+YK->TfwwTWO4E?CL^1Ax4Lug^*7gNfn!EAEq@4 z<>W+NB?;l#b0o}20Z8RXDh~{h2#@aj`rbmP2O1n+bcyeIcEbUacpdCq(nN#wVO9DK>|)NLIVSXV3)Ndq;Aqo5uqAklKjyDcPf$^ehG=>d~O&G z5cY=mYWO(Nz+8XS)D*K0mabks^O2jpEPs_+Gb%nlUPoJ7yC=nhd{Uh~Zenz?L$Q+1 zP`I~z{;YH85IqGVrZDke-WbJ4zP2lV3h-wqT1_elfj}^et^$DonyLHOts+l{D)=1! z69#oqR|6a^ z0O{x%j~9tFAJ`H@R+4VQqMqp4Phg>b#U2s{zC!=v#fzX*^I4s5Z46}y%Oa=T7nI=o^|7CSN{fp_ z2DBWpPO9ua`PBlEIyA`U7f~pHVSc&21|X-JFZn*<(W<_>d3Tqr_BA0dl;3WFz;%{# zQZN9cc?z3z)VpVI;Br5pxtlyGkMA=AyXC>9kxW1(2|KWFg3QwIbjrxc_}7yQ*s^F? zS`wkD>-4le_lgxK5W_RC{@@{59qz2Zax7okL8xoF(bb`z0#A=gNN`5bLm$-%B!IuA zY}F*2`JCgn9%XJPZ3vC zT!ZX{azbetk)6E?#{nis!CAB<@r?LVNj8lrk+Yu=$L}K<1B&7BtEG~j9w$_^3#e`9+bI%v&^aLv{s;)5 zZLb9f6<>gbLJ5Bhl`Y)FyvTM(rjq#P7Z0lAwcX3`c_6r;wjekG`9TC=%#0T+HYe9# zEfe+=su5mv35wx$scmHBg-M$ykYUur>7@3#PJjm7l=OGBpi7ownkm`K#B9^%a@p9Si$ADwHbSmLg%cj zfGsN^kwOSY#i2h(vwa1QA#kkQ*bXXP7?pk;Yv21JniaqrMd5ma$&eBNih2_(OG|A- zL(!7yH~)R6(^ruZ7pHDW!{QX0k1ra2TFsX>$sf=NCM*lA6hkr&G3-8f+kp1}lGz~U zzle#+!S;cMCp{ zwiVsUCXQ7nG$1U*`Kwp;`p5sudBo8Uq#WPei9#4gK1e;)7Q$dA6WkvKIDHf>0hgeM zH^dZ)!ebs0y80gwA;-_Ym2clFp?UQt3qvUis;iUUmCiqtC^qoVPZ_MSv9VZq?-EmG zW#zT#6@z%BfZ4UhX1_pzK7o`CgOAZuAXz{9D=2jG049cM3xO%eOp%j^MHa_odeNil zi}B(fZNIh|r?ZImNAR`Qox66?LRaUBIUVhosQM9u-hjvy8=9DyupwbVRUD4C-Qlc@ zu(Akip8Ss*}=Yx(#n zm^Vgpiv*MrP9%(q8L(bxI$=a@DJFjvVIrj&CQO)M^cT6)(K$}O`ojR{`JMz0W(ksN zHh>gBv{3!8-L&a>WdR#@ZMZ+60zm36h!ll*YbQ)h!fOuS;+r=0zyLBFN1(PJk9+(F z?5B5u^>6Y$()OToFAESW1s@0l?CCHsJ8gi5u6^8TzDC8ZpVzERM#1l?n+6n6`B+BF z?F5PZy({j2QE;NpquZNs>@_Efe|Xz2amqb613KxkxQ*mz_R{(q(#T`PHp&rXjjov3 zYl1=tqvHw?Jdkra!w*u$&HS&Q^>B5~$MU(L=t8XBYst#QQ~(;zvwtHwU;?U<68TCq zpcvaB<;wX-lgCEk;o;d8d#J%Y7d_vQ>px}xx_b{`e30TQW=_#Oq^lGKcoo~oX5h_u z(ZCp40-X_Z9RbgN7!W`%4X_1!FC;LylnyM`B2Tgm$wxvxXmaRj>c3x0)^M*0iW7V$0Z>H}vuUY&IDERgejmlK7XX5# z=@-spQCA6KfVhv%ZbI|u{(TB9dU!L@Q3P`VO9UnI?h%doPh5t|tO?XJ3@A@%@JDfg z+N1+0%LJ_2l0mQZTqc0c!EcUFG(sFhDCBZnxnjjmt%12a-|OSKu=vsM{z&U2JkQaz zy}qXPBQO`4M(kK)U0uxBa;%EaFZ0U`)vc{-NL;!-V^;a(Ci-iFf8b_CDA8`;x<$4m ziv7lK@d*h<$dPbQCH(Dk9S8FllJi8WIG~LLS)`DKJAnxW#tIv+{oqXjSMmmYr0{u& zqW4kbgZ$Bs?B*`tB5HP$6Vd(l(ANOt0s#KLd*;79504{b5jbM#6pq9FyM_n9$O5>{z8lpM93mn$UR32ZtEbNnzIPU;mCY>(5`gLM98sI@Jr{pw4g?@LPVnhM;3Khf%QsLgtf5 zp1lU_JAE;Zhb+=-7?NE;$DAJl2-P~5X+c2&A+{nbA;sCYi(eLD6N&y3@lz+@IffR! zefKUOz^9wH;^ZPULesb(;H40J8H_!YS5jauE#={HLp1yPZM8gJUd@*3??&{iQIo(w z+-{0~XC4YjQg}K$J5vTzP`s+zHOK){SY7dgvVQ_P z2s;;7A?|g{l+@Hc_>w;8839=%Nddwrr~3|SaD1Eypa?jnZ+wHJsi~>AF_9O)vFH(? zB6Q5y(Nb4NbSLemM~}GEe$&y?_OJPous-cDo0{gQFXUO+RPZ5r|{@-nNREMT2f=oNSg_E4eoH?#~ng5(J%WN1wAH$ zY@{fcT{nS_tlh9-A?iblG)KY#Zy?p>GzFIn|GEY;9bR&ud!Z{Fj;zl_1Ef`SDK}SQ zR`e2=T|*vTzZ^?#8_#*RnJdl2=i#gDy;Sj&>A0<}?C|g~`WFk^6J2<1ZEZ>O2zeG- ze#tPdBXq^zzkieJ3LRiV0;hUmss&1%8`4A!$6G#8d6=;OUHx|G0-=pUHfP{+#N6R7|Gf(pUtG7v&L z2b#l0<>gl)%O))$-0kAF!zWu_!vJ9Fmol>X zOnEeP3zXQL7fhX!oUbB*iAZsqo;u}%{zGYH*SBx3p$?PZk|khWQvf>1nox24O&#VB z-_OyIz+Cg8_&*1p|A)Br|K~r>+W^+xBIG70)5CB!w_WITPCtlK!GBhg(`gR&&d)(o z&WB&x{l|~pNgPDHLtC}ls7JKi0(->UI=PIEFknIX)4&SCe{1;>z4QG966$EH;UN$&%PTt9dMbO1&QW66>B zK^++K4k&DzVEwji+6iv_({01es&Jh#xDH$sVWPjN6o{grpvv^Ig9mRz$MY2EAMh$I z1!F56VLz~~=y0$XAX&GH{?IvOukPJ*$Ur`{w)6Kx{})fRcCrnk%?L}?7iv=}ls)N` z!{vq#&I72hsafH^KRUX{Hnc(pf%$%Ctq^G)76fi(MdM-a!`w>+3w&6YtRH7pbgGSF zWF&JBR2?thO}(V17ovnk7OV=^8nHM7F?AUS2M3O8_0_ZFy4Eb99pv>`qrd}&(=K#F z5nVd~uK%qY%EalU#V05Cs`Y5jC>PlC(EcP={;)pF6QcEP=!v>}&%r;wioK-d&NF?J zC+7E!>jeK9@(=&v8jpSF^00C$fOoIuM-vzW`}52kO}&Q9CHo0}MTiAzftyNl)A zLos3&YRODS=GxfVxeZJn6k`9FVa-dmBU2-&Br+%_d=yv^?Z$JH0TZ@*lg!LYXJ5Lx zF=6r`9Xbrn`@cFC0KFh=Pt?w96%N<-~&9#2Fry)yqzg&cTL|z*I}%-HVnixs#iln|{!|#>np5dMG>zPla+K9bX_L z%EymC_1|Lg`|R1X_k)7mz!xSV2HT+b^J^L2c-vjVeoRNxEq5v_Gq5u$zmYg~>J*Z? z<3r;z^2_&BXrmEMC_SKz=7Gxa>09by^22CPK@=7HmjT4MU-lIw6Vh`+ zu9g~X;I40A;D)S_C^ZhJo>}U@`52`4QXbqAv;akSfgzL@Y?AUYG?W#$II{7=^76${ zQrvp{_-Jq@c*&+H57Fy-$@$VU=PFnxSA?(0X2H{{6Gyk^adAEO4TK|wr}65cfIO=&2C&;t@eo_Uzs`V_g! z+p@CVM~nfkdMGyw8e&niDf-v=lgYVg4tRbF$krqlNr_;j``hYAE4Mkh!m*h`I9 zV|F$h`{UjC-^KQUx5HUJIYGThu~a%WQ+II@G7A4xPtloKx2E==MqB>Y>zboiB)k*Ua1?2_Y{T9ME6*h{XOf8`q3k#!p16!mDS{q=*~E&=k5{7< zsBq?6M2;IarwjTm+qQ4F*dEDg^&dzvoQ|HO=nL%9(wa%W0Fd11O?cE6m`>v~AMQ!h zf~d$jA|k^7%BVHtPRI>{ZivB^w7XyMbhJpk5P zE4QB6o;J4v+-@#(LZg0283;rk*a+t{O#pOkNUWTJAu@r3wG{oL`3DqmG=6b3V?WBc zrR0xsfiFVQ)L>Ll(Sq+lj~)*?i%K>>vwECi>&A%Oi;X_4;}VOKkf>t#FEr~CG#B=A@@ zxy?(}?t+A3UO`}=Kn*>!m2#J3xKqG~YXM~j=LZ~gliOB7fGSC6t-cvOu1!r7EbqY3 z*?tv4oxCqF3eMGSt9schEGjxrBrr2uh$a_WW9!f%2j`rk2n|cdy7h}pfNM~)Uh{sZ zdzU|9@sm#!E=b#oOK+$O4D*{H`h(Lj{V_c{AHX`gS6WxR48YhT1txm0gpo+FC(`x- zx*GCsTC0zM)8K5AA|3f^IMQFzT}Xx>DanQ@zJM}R4e_7%xb_aE&PbT>*B(PYi?Q*0 zZ_l7yJj;6#e2~NpO*VQm6KvPXlg~&#oDW(IIyuX+sq0EU!DYP~mSOqef1UZ1{v27p z@S+T07LH54fc4LOL}7%Ip)56CTEwT+Fim`M49la9Kg9o$gI zPE$^L8eGMwzzA)l_!rU#NI%*EmI9n|LPczJV@$WMu8tx@ z9|o2Z*n)S!E~JpO@u2u7ybI98ZWA&L7Aq4#(eh9k{<8vB`cL;fdkyns2839g$2cpH zqB%`L94(5sXGxm@+;s~z?s*XJ(=zd88I5m@`SC5f1sK{HJAT2X3zsjmqwNsZ@M~Pe z3A+oX>1<%H3(Pu<#*x(B1$yKd&kH}ArD>diOlc2fr$JBW{RRc)EzD6%0@ZoJB@X(X zMAb(rff>m~ANEr>!vJ+?0U8B7s}?#D-Nn!~TiYy32`QWL?J0EG7hxlG`Zb`iTZBWo zbZX^$Wj1=!Uc7~(mE5~zd*VL3DzsaM4XUL686^>?zWHhf1sz;`Cw>M_fcK zo706hZ7N2cF*_uSD^F{v3dl7HDcDT6hj4^Q`7XI-%LVki2;uo(#5IH#nanmA9Y_Ft z$GSX)NSJa$w>9QaoLo;7qJQNMEq>o60Mu%Q4<7s=mm#n+9z9R|W>u)=f^(W1GxsJx zf&nKd$uCh^Lx2Vg2!75pX|{y4il=>^Vhv!7cYAtDO3I^nbl(Z81JKsJixoeB2z5B+ zGz;<?6UcVlZnaTa!Ann$J2M6CsVDI2W zDWQ6KGXvF<)~LqO!-u&^n}Hx{m@Oy*oIk=JJ7RnLmoLtG&!wjNbvFk@AYf60ZF93+ z2!rIgjU0ZTqKMj{{|l?q4{luh;H5hQ!GX}${9#oWE+MG5`9(zHTjx1lxGa!C;_BD$ zzuw`(g`=l!U?99^t0T77t1|($2$7JU6r+VaBr% zs%2V=3}KoZn48%_{sE_4)xjaB*-s8dIBD*~IK%P#G&+}gc-c1r+U9}Ak7HqC3{d1} zOV(}jMZv5tLSKOh?6DNnrWcMNmBsHTg5!@lH@4@@{5m?15)$Sjwst4(!4fc}E8>PgnFeWW@HKn&~_0cWB#?a8+Qpc z0zXCUL`Y+?dnyJ;Z6z3zg+w$Al-qUg2?!xIkYNEKI{~51$E-^m&}IdXjZyT-OM1j$ z&B_P=*E;?+Zj*OXQM!Be!fQuh?WlFk=A@DUFz15WGIOm$H*DT43_MM(6*zRq0UW}0 z3Jjbmu^Bk24V(}422KqM0O#A(fsJ&hH>b~?69euZV+HPssETL)Tn^mZx{JX9IL28! zcOS4k;{rBHSAh<*0M^(MzyV%S(5M=)5WZ5)mnGhKB@dXWgn$d~TY>qT3pgX06a$<> z1MT1d)uzC$1|qhpbhJ0C4Q$N*purLLU|2#@tZgLR@j++DUzo zqu*4Q@BqC4oG$|oPxF3#Rt@ZaJp^?rA3S^rDkV6dJ$vQ^Y~!?^xBKk_?1nTcT#04* z^kQ}OhZETez*Tsyz^w`gftOXE`3O9wMif|R2j}y-EU}!az;S4^o)vg}z8X~XVHp=E lqV0vfJ5J1?JG$Y2>W^!-Zr&LAXAT1pc)I$ztaD0e0sva)5+48n diff --git a/examples/running-llamas/artifacts/Llama-13b/generate_max_memory_reserved_line_plot.png b/examples/running-llamas/artifacts/Llama-13b/generate_max_memory_reserved_line_plot.png deleted file mode 100644 index 12a575252fb0c818cbf0b7104ec25d07352b1517..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 61567 zcmbTeby$^a_%#S(0SZb8C?V3_T?$HeC?ykA_`F+%6%!xmr5uoH+)b~P-pJmhF7@y z{w=^4K8IHt4$3wr4$g1vj8Wv?IJ~#Caj-Oh`_#$U&feU{nuD2_nU(RWnS;Z7dwv!c ztN;B5W*a+G7Ovv*T=)?5_mZ0SC@9!(kpH1(i)5LjplokQy?mkK62CF&tbx~h{d@Pg z&z+64+1KZW)9d0-EnjbMjJosq*BuvpyTvtZwy|jOovN5%;})hyk%bE0t(Ol%p1rSZ z5IM26yJ1JGF;|Bo3r~k_|O0Q zS6lwmg0YJo)w^=_xbO&yVFFkQStu!fRpa87|F=@-TmV%u7Yyw ze?F8W^#(2SwU0Xq5C8vuq<+L)^Y?*rixC!*7M|7J`PMcg_NDHGxNT!s7nhmY*&o}B zwcA~BJQ|vs#pVN<(kwu~`fI@;hZ5=NuYz3)MzkQg9KEhZ-RDN7;AM2qan zlkfHQ!t62k^!29&Swv`>zY|DMhrMp_x?~(F)TzRWk@c0Op5I&SNaCw56gv}^_9lG% zSgHJ->Tsd{b%q$O9bH{X#O|B7Z$-ag;A_;nu-R2zr0q@Cx^iGXVr-i*u%CsSb66j~ z@9yq?aBxsmT%01Gz%Luix%$01yzkE!Og3)rOsk0sgz@UKxP^rU4h{~x)jl6R@!HCG zc`jl%H8)qLJ(Mbj*Y=r|l$1VyZDDL~F2m*7AzJ@Y+K(T71VOadI@J#7`uh6PQOtoU zDdexdKgw`iAO2V4tgtDFMJ6m8Oe#dHoOv%cHWrUXPf+=BzF+{Kw)^o`hFl!CnYnpi zir5`Q6icwL?``L;e{Yt%6BxBC0wjV-U*~BQ_dJxZpMb@aiDYb#l78w_P{4eBK7CEd zXP1uHOAv5gl5wp{$;xVln?%RNY-(u{H=fLti|ZX2INwrmz^7L+7%h5(QtPt&QRMm} zj9IsO=V(*g?QrezOB$|Su5OLf#l?OP%HO|#i@dMBZr>yP4Z9T0VJcs+KLJbF*w}cF zRFF5AqHFBa9qeqS42-k0Gl%V|1PGq}xn|58XlSRKmDADe#{TS^vrT>??jCe>bc{N` zLdeB@lXG%n%spSeyx9@Sr0i8+FJ4_;eQ~;+^n84`#I%PLuCk@c5BpWF3AbLY%L_%t z2g+IUm;EUoj~XtIo|yI|W~k<>g)^uJ6&0~QV$qY$kdFBEe)fhjBD|d~7$UROab1mG z_2E~u&uU|d56o&%BJ7K ze3YH%es++Po|2Lhu-%^_lO$BHKpxeR0Sl+louyIyHZ&?K3Q_X)>)rOYHc2}tr>ljF z{aDeI5C|t>i9a|nn~6!Y$QKKc%Pj4vt~()c-wDWYVmneZVh zu<60$+%?gILxuVxQZh0!ZWkxEnU1rwvt$vbdgE30SvMuINd*-(6TPhS6jMGLSy?5l zMt7~*A5%H3_P=81%_5y@I` zC)n8-EBzK6Y}yseg~zDDI^f2{#B>{rSn|iSS1OUyRrYtt$jC78XnhhBNh284m$n69 zl1$j6tw-cKptzWggx>*W=3sT8?A?%m>Z|X_jD+7t$ZmA^+qZA@6D*n~H=k$Zh$M)n z7&Lyo70#^tSV%}HFF${EAPc*%ukYRVz|rP}{o$JG*1xJWokx!z`8G6&Y)(}6gbVZW z#eBo1nOj@)&&i>K3xjKkn{N%yO^@R=$9eqtF|mM?tg-9%R2_WI?0C5aUrT0Imc;i* zOry1~rHH-j%Wd!IxVZV%)tE)6D%Xadc)ljMqvEo%znz@{!k%X#@$sGOBZaH~syLRG zmf)=%b_C}4=DJUo~GBWm#jI?%l z2W?H(c&q5G>KglDk)+x!B39Pd_aU4WGi8a^hV$d-oW6*D$j@gI78b5zmI5J_Ar{&kwP*xH8nhD z9gTb!cIj>Z!G5Jp0|+2&Jv}`9y@f(|)G2B+M((1*2es|7~Hs|5z5cU!+1##)= zl++4|vQSm~1_sh#&&tgDo(&YE6}@f7fS>9c8iHJ>hoVZ?ItpPR2^Utn5m;POqU3sY zMfm&JkH^R(p4V11hMAKy9B$Om$|?}*VunnV$>G`%U(4+x_hV~hv#P7}j9jOYkC$|D z;e{g9!N%g)eQULK3^tO(iY2UF#m#kYZjMmUg%#O#)YS=|EuS zh8Q9oR&}>3ttV4G&yQg1m2=gY7S95o@F2U_y?ggqd3ds*6=_xwCh^(({#4CPh0ywj zLsde8QED?S4BG^q@#@a8vHalaJ<;)EUD&$<{f3~L8Ug6Byu7?chOJmA@`-}q;^Lk_ zT@I&L^-q%s{%Lu=(2jd{xGs^SlmXGO>__I645jp4uteVLwA=jr{PZ2fBqVgY)tr#+ zY7D{9I8*ZTFB>^er*73cZ|e~g6Ti2&S9Xt!5APQ z(f97Fb+wN;K)ZeK_rHJfQeo7)C%cA^7&Uwf3t6-)tyMF}ir%2m`^2D$EX zv_UjcX>%tT!d}q(DxN|js0h+VwlE)F$kVNfglJ`ZKl7Ya$n7umBy0ud z*9KGG&;GHTs?{8$tcP9^5D>7lw}%K9=H`w>_Clx4@xEMj76y{`8-B{uw2hVkyx?fWh#J1KiF>s?CiWfl>y-jHPuhA&h7P26*?#}O$+T|9FCjFU|jA=$_;2$s?3Ap$nJUO z;B|Fsiu?}}!@M^cwY|Ol-FD64`UoxofdrsDDQW4qkQ9Tt>cLNVtPQNJ?k`-OtU-;4 zjEt0x=lyYVazbnkI{MC1 z*OLh2{pFtJwV~XLohEFQ&whT&`C43y*o@G1k+l~(7 ziTMEaZERB6>3UD|>3SjirOw9IRzJY(Z!InFr3cOmIByXne*$t0;wsv>BN7lwW4?A} zxTF8S5Xx^mJMSYr_uGXlzA2Hgv9aMi<+2!N9H?GdF>9lETBvjEo^NS=u|+*LJxs1T!2#<92^W6uTmX>A_cIh z+I2rudK@5`v`?Ak7<Q3j`kOheOtCqkgO3+H!>rB^38ETw15ydh> zJgvILZ<{Hf^SkUAL>9XDrM{x?I6qvcAtEB86M3>0iU05+`RdAwBs6ULbIXmzPfqGC?jP1eOiB50I&c$^( z^ZrwKIGz-Bm{gPR{f;u``Kn#0TQNri=ej{_iJ1Mz%duP*>=EdaVbpR;!aQ7DUA8N7 zZX?EL%h!#c?({<Q<3W+{)WB?Su zHGCKghj({&e)sn)ol0(V2m1T}nVwD>&euj(S|cOMA3uK7x*cKw)Z3bFP%tygme#9t zKUO<6R;qEbTCn3&NEAfv_Zq78x{QVXMz{YxAmF94GA<8KrL1lWO2fsTOr|lBw_HnW zD=ra{RN!MS$kgQIWa8sifXHlIT#H#rUP|AZhKBGdDAE|`+vEYbx$exMLVhV-b^Sai zc&@ChJQkm)Nc+{_{}~ro1R{dLbS)7%-YRFx&O$%>3mqRuAh_hH9D#K)PA)F<$!ad- z%KDUj7xBlV6Wtsy=g7E>KCXeTN55SO;3bq2DiU@CA-#G*UVjF%O~Q{6@xD)%_JSo6 z3#Ei&oLmAJlmx14gfW#&XB@>CMyR00o5RfY2xldMPoV-Djxf zJ4jsA;O%{NGLH?j&d*Th`BMs^NI;umAtA}^@e~5ymxlx}f7~NveL7Hm99#n>DZ7k1 z!~1Az`>lumYJWzR+aW!PfYSnu^T;V0z}6cpt1eh%`vA5{D6T70bppT|7+;+1KG(u* z(f7W5HQy5WgHmeoWar?3lA3zy(`3f$*RLg^jAdMSB0&(W19U5GV;6bxDj|aGCKya0 zvlB(UuNob8cXtsz4WB-t!5onVzYMUG#D8g#QCJup6oduXxy_02HDm`nX&TTUFU7@` zphNmFPRG@Ip8ph_u5;(X`0^1@LTXBiR-MZ=p`)$s>r?rnva)oQ9Hp3`J{YIx=jK8e zRr9JM;(C%Hex-ZF0hj_ZY8o5U%#h%?ybiE6Wu&2^`U1Uqc4o#WCPp(z>_&;*ayQA@ z@wNg~dRQ2lg32EGdMDH@9>4l&CX^kRWgb9ExVZe1u0(PZ5Xb1a)U0p*d}iDa{Q6aq zZl=;sHUnKwiSarvNvindzMb@i&mAAi*4kOHAAe z@a5hw0KjLS4SF2ErzQL?E)aR}rF_ z&vx!6++rCt`4QW!Z3_triHJi$YkXdfrUzF?A!?eF6;@AnY+Kcx3a`c3jN;F-fz|?! z5HSAp)Nd`XQwtUVUWxqbm zbhI_u0pYbeS;OPxrfe+ky$v?FO`{aWu zm&rWYL_s2FXXi*3{cb=5?D;*LRnJLEO?xtBqL}gMlx_mHL84QB=Pg;7pH%u+S66qT z6rliD!C}&qDEIQ^XX%U=liQ3imU^9U^7!6=;#*$MHS|-pX<{O-J3)ZK@7==EISW0# zKb>-B5nvhF?N@58E9XfOe%NFX+9|`%E$6MY=_Fa(WNAk!4{_%q{H0fkT3e-#x3^1OH3f?Iyx|+iaKqK zrtHmgQd5gBN&x5yLiDD`FIe2_9UKh*cni~nk5-^=_~I&A@{1_yZzyb~bq2p18(%EC zIyDRrIJ>ze!qT>Ja-oN;e*T%88@D?0U59xYs`oRviPvf3vkMEKfs2Go2k@t=+2Tiw z=Z`IK@Xp*k#*mb#+-5ov?(&VLrEDCx6?=2MHB=FJV*~^ZastWA%Y*AMf}7C9BE5zj ziJ$@eW4%D1>0oVbJvuSL0jL7nbGX&IkzW0@CX@Pd$1{PkK(coe6-;8j_uc@9febd} zuv=wk4AXpuT0Im#1j}n^UGeepJ4Z(*Ffkz1C>(?FV`N*P03*pg00`Jf!UD+A&N!Z6 zSX%=lqww{4=9&>;HJ`Ku5H&$Q5W&DcQDM~tD+N1AKd^Afc9kQ%baFQMVF^X#>V)Du z0n5^0j&eAYcH~H*{`}Tf(6?{skU^^5#RPdDV0yE5R zKYxx%EDlvWT0n@letk%h4Rqh$MITJSjKO^h zr+L}-0m5zvUjd3|JHA$sw>u~5>^kl|x5Xx8^#Qoe3LpGay)Xz$4top|)6;jj0us)| zIDqz|%ADu_?OO_j3KA?pQ8A6}HimgIPrFk76k4tB;>XM9K}+! z@a0HoMTJuk&11(5?4XL0lCu>-n}5F=0f%T*S|{wK-jKA=^VWBwK{D<^q(aib6z(s` zWWqFroCOJ3^o(2^rK+LJDW<+ehjq!p(N@E)p~u6hcG`qXLWgQ@0%HygjlIAC=(LU$ z=n4Qb4gz9#Pc-a!vGqJ5;Q)2Z*4CE3;bI`}YAC_4hhFTC*h=kj^)L>Pjm4;4 zzvP9gTBuj|7ASlOKZ%$v4-$Q+Vc7hUil@SPE4Nx z9jC)L2kRrj07roh{~JKWp+fJYX~kjG_F(aq6_mJ+L+8l4Iw1g3h9Cv}Yw%72l7rP| zN|4+7U$&DT5Su`920-0owVivpxR)nvNain=qWFG&IN!48w5k}WC|Nl<)dJQljjGdo zUO!>udNX7QfTzvUiG<0`j(BWz)aa*(QEe|f9o;vWMeMhygka-ijdR!9eo&~oDcTRX z-$1L2=M9qid3oVv<9YJjnsVErxKJFRM@K^sjhs&(Z0D#O zPGZxO3f1`a{!L~X5KzyT!o=aaesnBC-C`Qx&H;&`fBEm!R3fYdtHp30wpu}{zYwf8 zG(l78_2uQ(D*I)i=Wjtlg7kZQvDd+Z$R)NgJ3QE9rJz!!V-0A(-3LU{BD;Ww zxYb>A(x80k{CfWZup!{ZSG!+AF&skij(9yJY3DX{pm6EJd^j7<)g7|X!)!$H!py?jn5pCp{H$j9y}yXdD? z1>6MUWOts3$C?-ttfNd%Svd?Swmrb0#$ycW-R`gJ+#JidpTT$;1mOdkigsrE@{kZg zbab@An>UR)%2_J?b=URni+i%$F%c2?AfG9DVTl<1_CW>xZfCu);l`S`DHSE9k(<@g z(GlU3Ck!2`JcW5duiGX~OEgPO%*@PK0bt59y(6;^fN>=0_Wj2T@9w!d+3gc`elMBw zu0!6lIo~jlydVS82OnU>>o8}*62*&nCj$SjT<3Pk*a3O`dTeTFh)dme$sIE}EsX+r z$4p?SfX|e6rFkF~>TJPz~KlB0Z4AgR#NP z6u7v!NUjmKE?XrB2as2De}8C5$OGsc60lUIf;lD&&>fLE;fGDb$%zL)aCNp`$f(WG zKmGXw+8tmfbMl`4s;PmhNCq9T8MYHidyS8eBhv@2y!8F7XvFE@Ae&ilGVy?myE`(8 zV|i?XqM`_3W8R+bE&lrTD?6a47UBX5dNQzF2sQ%ve+ICvkgf`R1=6~p3MsE!)4E=I znPvf?K(-(Ff@D%b4(tJH0!gStHSBh>F3yAUw%dd)QsjB=1T;zbW;Bl@&_7?681If)s)GRC< z@8Uxzg=GIw_F)nD$N9pBt5irZggMK*!R!xJReVV9Y3BC_z>M6Mqalk?AlooNX*FKy zO@Yzi76kA@ z?@PS}iYdq--ymk#08^Il!)7kPy)o+73w9jB&&nnV3k~eSYS7vcMZLBb{*;6OKzsiQ zx52x2{@=f20*|{4JRo8<#E#u^l=bTB+y=SmDk>_OJz@LZ^tBa+(gzU)xU?RiK%?*H{ z0?CNEy1UB&52~2i{!aR#RA@R#X)-W8v($o*BE9bos&p(2H5WMlv-#x93m3L;Va(2#0Bv}lkF zGfIp*q@k~L?#a^;8ON{9h`HEEKl~y}-WAWMOs|#~jTACf^E6%qg;qESeQPme7P^K~ z1iflHi2n>(JyMmB%;+H%){|$Ac0)0fLN&)5%2V4LM>1;L#=tgmLMKJav&ef~SXrU% zis7Ic2q)RzZpwp1W*JaHJnF3$Eoj>`!7;x0|uTwvB0$itKW|pVAMC!Ll(HEp3U^c#_12s;c z71HKX;I_XMI0&O)duONY4V6qAVB$j{b7{uL#-yO8g$4!&F3itU2zi{a4tTkeZOdtg zMnEx@hS@y|5l%-D6B{cH?^6N}F$1=RzC-f(ij#GF`+GoeKp~5upw8NKLxTz^nTr-D zyRBpy_zY?uE`Bnh2Vga5|MN$buUVxUI{^mQ1DESxYR5VuAt6r>SNg<&&Wk{9A(Sl{ z7)I&O_o1}*1I|qcyN66mOG_w>FDmsOuI4}h(7SrBoG(J{3Wd=bQJgg3tY&1CJ_vLR zs1Z`2dWQo_q2l4;F^AD0W>0at6mrh@j<(~ zwk8eIY-mkQjl)9Q!^N|WZVsa096+@kcT0=?-9BauT*Q&W1O>3LZnULSL;Y;7?| zFzfOwSB*7aULC&zHW*4b5Asgg+1Wz{g@iJYh}<(O?ZA+`#*e>W-&-yCfY9igtoG^M zDr7hTm-v-TY6R1)T4yZyf{OD+FTKBg9wC5kVijR3*GTho1XB>z9QrYb5a z9BB4PdH8%c|358L$hE_ifned| zJC*SxzxzjBTteaw6Vomi(*m}w5&9p3XHqy|8UeD)YgO^)ixX+6Ns5rpY*Be3%gCjv z4>>?26&Ew{^NY6`y?yEX^sYIj)C;@4C(kIvP-)fjMhSu>PYRLVtdko`%E}QDA`}#H zEF?is@9LA2kWiq&+ltG}@h2Na6LI9QKJMH~4?dL#_w?Q^wHFv7)q)BD6F{#dT#DVP=!HO`v^A5PVNn;*_Mla=eLchJz!PiW{SC;#&W&q{fbeKVZ!UY>Q| z+k>wxgkzN~R8$|@+9r4f1PnvOzPJti`u4YhR>420$}Yyqj??#6Vc(mU!3!+a6Yr#T z&yzuZUo-7TpKaoG$RD0;;QMX+;=mWXS65e=HScHfK$Ol@$`BnLNASA5KbBbBzc}Ua zCm2c5t7F$J(Es)Rwp{Gs@2M#iQ`1t$fzQ6ay*Xh7M@P5A8BCkY%qb*cPLZa<#FC81g!?x2XYu|BpFVBLc1NZy zt}Dr^ynjUuVh_)XLrx^Q@#hJ6UrBj%wm|PtzZI)OrwVKd^?%eeSGwbfXd&x>vJQ2|2+g;o%{|uG$ zgHDDhs{{OOZ_2{RWHnm4O#JX63n6DVhnChjOntaMAt4eDTN|&kay5XnqQiJ6*u`eBd9|6W8sWk`RN zB+gT@8%9dLDxn9=UG5W^%Q3U<*Vl1F#BL?mENE@fBJ7lObvMUq6xo-a8Pif+22-A2 zdxp#&UV7JWUsHBOJF69v8@h?vQy?yNVlF$bW~^l6*$L^I>Xp;!zXoStG#-;P=drI# zD7;XXnRFsst!qf4&9W?sUt*05w8ISzaJDgEW)EGAerE6X>gPwEAF3iJ!VrfNZ#^#5 z`$!n8efEE>+;~(HMxgtfhP2jTdPiuZM0Ah}re`A{S3jT;5+@NG*bIWOA%{{-(8cGc>Y0z(~*S%r<~BWBA*r7#KPcWm165;OD50 zsSbinwzX&I-kDW48!2e@bKc@H*zRwx^-!9X@!L%@c}tfjen+TaJAh&~Vot`7VgYp> zhhUe8?<{Qp6q$#pc%KVDQ%X^0AcmwF)FnGbT=zejFC|sKT734%?48F6)(tnUSXZYg z)H`=*sAZ!&MvE_jA9=(cbK9`xz<6VHIy^$er_?kUqUrhO_+fEzafbQ8V<2nE(9jOO z@%5|hNNKW45C}3KKR+77mWw~2768xiOuZmJOD+G5egfbQyLtbN<-Tr}9SwNVz<_FS zbzx64KF*JD?1<`l1NVII3HO&U8f-S(xu??Mv{DKDB02c`Fm`<&9~z%5Gj?lqPUnRsYoO?udFrA0hJm zRPfm|+0)}o&PR`iVU7!pj{aI-@0ID%)rFayW$FXcjGEd{ntgiJcXR7;M;((>^^{S} z3C4?v(|779Vxm{K45n(^Wn&j4emrYFoey?}LihgcK&Z0|GdrSZM6}R{+}4&atF)-* zgR{u=1@?*v?d7wOCFM9{_lIpkV+}Z0Hr@Eo?V~~j&0TRyB9DX@bWjH?PxzO-@d-ZM z{TQ-*aiL~^e&qP^6B;;G@})5iWjigfh)1P5B1s}Tv??~D7kL{lD(si3iAe&(gMEAk z$Yi(Iw1fus&yQ_#brVH@41eKyKf_yFtEE&B&*0@{5z=n7Fq z%#cdTEUR{@wO~@j#$cML@CvP0KIm%aJcRg*`ZcO%A;I3?$|K>gkD$#;ub7+LZA5Lb z$hNJ#GsA1Tst-4;c9r|7*m%^GFAZV*E+XbhrL`^vIXUV5`}dzc(=)KTv}T-xc@xYl zeahCzCCL^VX;)Zg!$ocx`>L$Chc9mR@&nwn#@Q5oL@?zN7N-ULV7 zV@@;CX8-Lu5QVxXF}tW3a?375iUoAk)uY}vOFwx0IJD4s`Hm+V+FSruO4K0zKH~Zs zhqI_Uu>QlbY0$=bs*Ty#H;opxK@JKN+w4Q%O;GM(>PbxZE@Qqr@jMxxy3+?HZ+&#I zKi5}8Y6bs%#Z=7Re{SHVChn%f^e)Cw&z0+nMc2UrPt4cBc;>e)m?QD^r?Dta2y4n9 zzRnJaT4tNnBZ4U;3TBto;_|iCb9IAqEgZoE`2vhKc4I(Lg0-o!ukTgN(#Dt}{RE7p zFULYnjf?^k5p&o{^0`wyO6VkgPOpS-J%Yd-T;8tAdF%16!bic{`2K~PD0Pf z+}{kV4Tt(5CuxUF$MGDt=tsYvE}>q?@^?o2$23coDeL7jj>vy&>6}ZCzO%z|8u>r> zHvLrYKGCQ3UmiR3GmVZqcC{!AhJRZdX2G_CSSBYAzKd1wU*(EwY4x4m%|RdI5#yia z7xPl`UNVsKG2B+X9Hz|VyeITL);CLBcBzSp<5l*o#>+@BAITaow5@`#`r+fpqOd+a zNq$eGI?%D|F?HLWm1d#)$H^<9+ob)i-fj5=uy^obLF1!S962CAJMnP+vN2o(HCSo(V~T$ z+t0Pi*F?Kw4+#Y-$o#Nw!7NCD^5O*n^sPs^>f4Re-Xf+wXZ9B^OAOHz&;Go3EH@td zsmp2B`?>K`O^N4uwd`tzjfi~UV^S1B_oL5jhF-J(s&Ik&>;I`r&=Pcs&y?@nVAYc# z(74bp2BXL$pjb_MlhOI~EZJ@63_w;!1d-5$cl2j|SOprOkyxMt+t3i#!?U$Ft@XKa ztm{7Y{4x)fvKM4`xN&BBI`FfES7#PzQc~Yk@>W{H=`d+ke;(X1JR6rC9rQ09FI@4e zK%;ZZpv@!@ucOl+A^7M0!g}lGGY@^?@*r}0pHoQ{lrI|#i}^DP7~d><(!K^L{Q098 zy!QoTL8fE+xVtN#SI8YJQ@yZJpFf<%dSthm(XA?M?%|_BFa8IVw*mzo{Yi~)io)6y zkwAXXS64vT0$DlaX-czX^$vu8oaatE{4DQHpvFEP^wx;0W3~6@i?&<; zGgTx%l1ed{N_p|p;@!-NRuW!*2}=0x$q}=`!_V6Z&`@u_P!xOqd!B{l4?*SDZa#FrXN#A^Rb*%=6JUj$mVj_=qgrvC#7w)KUprfp1j^g4*ahtx)De*Nk z`&x+e;$--%LO$}0#|t}G49xBTay{iwbM5Sp)5|Dcb)eLR6UL8!E%yGn!}LF=N@hPr zZjiQ+B&ayx;dn9xEH`#lqKpzqi+0R3g_&Z0+;KcxqxZ3^iU*P0z|_pN@1p?(rl>R7tE))w zGN`KwbVx3D9vD@yd;=?M3|!>t{vSB2QZE|yF}O%%@ivOcy2_ki?+r?`*yNblmoZM| z_3s^E8S)AXZk$ZmG!%oe2`v7ApDO=5P5{Ihk~+?efj@xxRsft@G*6$R0RIcawUo3Q z50N!o*)NDNu+6;zcQz1p{xG{TjI_42^n>RSDV&4O0tXqeKw>umq7}gjuqh4cd0grH zl$`t&#%u9XlP>TMUI_|_J-9iGDJ|>K)NfHLIUF*3p%!-~XM*c7hmP{r!m0yq z3+CQgU}jYcbcm4F$32i6_CcIR{tOUUgh1lIdGiK*d?>7L2WC#&Q(d4wp_mP1-Uo*U z^hI?d8KiKfm~sP_qGz}K>$hZNCcNzZ7@}`ty^44p_#j{r!z*`JV9C~GWBS2I*_31{ z0T~PxrEB4~KdldOUto>_c|z@Ihv8eW3!sB%AM1`P?pNz@^w5T*w+ z?zJ3are(;{KE60>h#ySmkRbAw|(h4>FC!wkBzzdtXyoTGvP>r zV$rOO*E zGfMPiWWECjnw{QG9~@w86h-ZMc|KL|DPw7whY)+PmmnI{y+5!bGWr3&B!?_p*n(uA zDihTLj?TN<+C+D-x3021zz`$8JI4x(O5_y*X694_T&E8ygNeFcjo#k1P=a_q2ZZE^k+&(DGdjw-4XGHgt@3FXGba8jp zdwR2(Dbi?FZUIIa8Lc~K?20Y#;l8Z0r+IJBP@s0|YP>b&_1U8RYV* z5Y*s>&dyT+&WD~}no3U1)rgUzhI4g&ng=|{K!$_|gRyn#>=5%~^#SQ+;OUQUv%ezh zngtK9Z|qYf?C0ZX6zWlk2gb^$iFa}2yYI|k0qFn=zh>3Oc0(ob1wD1{mEj$fQg0_J zxWK0QZK-PlQ2tL@*ZX9`U#9B(W)@VqJ9xpX-wAKjC^MrlMyMp~)rkiXvIRP>N5V0Y zd%wZXNT)RzBo($ayRsr%Xms&KP{n&1JF}`Kk8N{qS)>;kFqF!I8#O5ThWtf8Cv5?ya)d-`JE0d}kMLx5d9M!X-u_b+v$#A0e#!jmKE)F z13L@ble+idX1NqjNw~`)sf{6}B;?3sF>JjOZx3ZgIKY`R8sN9a%hv}|UrB&NTvSxlqX1EDwSaF#L zk8D#@Q`oOx8X!>T$SUVXuJP(&V=ENsw0jY6BO(N#1Q8Sz454GVHNtlh>ya!;pT5hu zVv%5^dSCY;*IGF%D4XQ}KAq-h6h9>q;GXMFP>9o<>5MT12{DdDK)%SR{hIf^0gacJ zGDHS=NPRO zhr)Df&||UmC+mEm19bui1fwJ^=u*_Qw0A{BM5>*)v3$S_zcbSaIuQw2yujcP&HCmB zD7~qWCMZDJ4HUf*1LsRKkjvmQYXnUNY$8fU;~>u(nSu!`7>VLXgEv9?tN7g z`2mLR*{!W^pqk-8#anQl!RaGS|4?G-7o?g4J#T=@^9O_ClfO>A-rsBzY9a^~zJd^>>Q2uilysWSDp0<2V;K zpvI6N4WpY8El(CdyZa|$b{~LSaxo?gZ9l;O=>u>@ z84mtU)w(u;{00Z1lrzW5%yH%AtmyR~qZ{E2Bd!`k^=D@~Q$OqxZ zr0t}_Y%x}17&-a-)M~j~j+g&29T_TWmPqu}P`O*?GB8eRHMIwc0Uyv>XBUS;VeZ@A z*G8VH*q+)%I1m}afrE$OETiub4_wO)ErbL(IGmPyIJc&iaB{*U=v{Ed8qRf2W3S5d zP);gUqWwcOQHH8G8EKtVYUhK^K^KI>gGx_aVDbwKZuO3iM#ET91V^bX2cXJl$|oSe zIa4YJ0yQe!UHqmSbX_#n1bF66VWv zA9{bM{g%aohP7UeF* z%4Y{M^)5zrcFD$KM;TkqDwD378aJFkct+6#>ctCveXupeDi+Vx@LF10+8>MBwB#w{(b0<{anO)?w)a4O^RU=(q- z_TuD8_e_`qs-ged=M?=x4uuY6{XB*G`Eh45w0&^P-&6Y+sa=z(+201mpwkXXQQv<> z4{R_;&;ok|3`F?({eg;P2ioLMlm5*5#D`CJ&?3MTT#=Wmko2a^dQ#Q621*~0k?&x9 zQ_Z*2(aqgI-{$xl5b*oYGpq3|s7?l!mLJp7zOx55G^}ktf$T`NZ0BHSZ)t1O*c^F8 z=t4eVFClTb+{B>XjSpr?QsG*ep`YfJuVtj9W}zd%xhC0|?(Xgvr`O*JdO+7!(mbly zt}MuLFu!wK4~Ab!zF`Gz?K`iy2hiX&QqjrkDQC<)&}H&-`J@Bp)5!3e|~>RfDz&`jXW{1xVzxGA%i<}>ZMqSOm!BUJdqjYehw^^8`#xMM z4oQNOYJPDsI*GuMF7XR@2fj&t%%O_5CxgoK86?#v2-3*k{~(5C5Bt(3$1 zQf_T>f#i=l@L=p5tQw}KK8w)pL;3E6JzF48Uybq2s{prez>|l0uVt&u9fd&F^|Nb) z9T`ay=t^Wrx@bC^vWEKQbs6b^eOhlPD}DfkPcfGlReOO{eYpGPxUp9G&s+~-V}v5g z|KbjIj;DK4#9jbfl9G|pocihsl07(Fcc0&GND8IWegFPFBHRW*50S_e0UV}6vIk(N z1NNZiR76OK;Re`IY~Ky;fH^A6as3ERbg^)nE0Lg~iNWXp`TB6%*B6iJUF+H5IUHBI z3+Erx4I2M+#_ZFOl9G)?jAiw(f}R|ZG>sP!C>PDyu0U0I|4Af6Vf`&Q5Ng#KpEUh= z@fI^Kb>|B}_@GPu2oINR_P4g2SPG7bIhAO>shB$)JUYhLpZbaMq+AX;*StJuc(YX{;@SXo{zfw`oU)pYAxhrA_gBk*g28$ z3!!c1LRJT#>h8G>38{hxZ-4(sUVM0E9V)|i& z=jFHF&^P4jtYv+nvg>5T4H7tRE0nEs{Zp5iFKO@oS$_K8V=AV|$hbo$8jyLrf{wLpwV!c7cfuam=cCjy zTcPHny)RscDfNHXac#?REWc1~jiVL%l4ZFun$RTCfq%`Fc-HppDIKkDR#Ehc=7X#G z|DLijE3$ERiEe3TXA1=^l#2K$p810jVYw0QK-j6 zr;4}<6C(7^1_wzpIJItsh^lNER%?M_HliaeY`TQfK=9k+$D=eB>_uaL7%LS5WR}_o z%Hl*L==1%26Q`E%YCrhQ0>ss=*(Wqn3p(Gc|Lzi;S3Pn>ucGW3*<;q>eFYDV$Wh5r z%*2qeh7+yV>kA>oBCg2&3jIs^s}SHpbY&2|d?gZQ)B@9qJ-Nlg(=UE-1^Y z{qX|MHxzoLN5@u`N;A=*6nC)!f*XGL;C(C%Y!C4cZs6i7m+K9*1TyUG?zR=rftgOY zwr;Yha-_AnIeu$0E+@iU4cvQykcHs2!v6SCPqEUPVzK&Fjo*8HiJ(N$4>m*XKuz3u zKtj@dvWu3jTZ4CWbfy9iXJ}wy5oI_dZJxL|=qm=QYV!e7^7c(Y7m;2bNDQd$PtgA) z!L}#u%%60$AbZm1!N39hJ3-W5XY>zOtg3&Y%Q6K%sibCRjs#mM@`(+hyGPcqmb)8_ zs{WVMsiYJ-22v;Hqddcjit^4Fj@L2NlnW0N1l)nN(8VIFmxg2{VoRlgGJa-Y3Fn{w z_Vp3bWU*^#j%5pxGPALvVh8&I;NqU(JU_Z@PJWSKHd+0|4_n{xc&n7tVpw?M0GOfQ zUHL8*>trHccYU%JKn5vqsSfnd|huOx5zy^+k=kn-TfKep3h_F zF%y08%+)^1RejY>5*e9ty6mx0bW(tyeri=;1v|-VzoZ1@cV{G1ZnjFh z997|LQx=NR#9QaRg*Py#G)FQ?z~csP)3d()%W2Y0++6e~3=k`vkzPbr-gK#b6gW1z zG}p|Lr^82vjorhzf4I9lb8#Am0wR01-edxaKtKH(tBIqTt4nQS;R~M3OcPkH$O7GS zhRV$$3)T=d^A72E@91%9u-;yr@ReDNh*b%CHY^O+x`yrU%SQD|i@9*XPgMFEW=8THQpPHX9cDkp!GUe$b9m}Mx)|W1& z4W|o$brI`Jr8-$o+Jxghs+)EoU@7NZpdywZK74~{*v=|0Dne%5-@h*p4i7mNEv)rW zgoMx>92|;DN?vQKXGqI=cn)kOSlH-?%0zXS*e!Yh*=O@l2ZpHUS{;+=QeJT8CSK$! z+7D=nEQRT&f3-eZr|BG=oPP!f?>#44pHjAPQIrnK>GZWW5mByGGyG@aGP_~fa=K4* zx=LfSc%)$Sd6!x>D*fdBm6t;R7Kz*PPY)PnbyLAx^I33txiS_`$HEDxwu@5>)t27o zzqLyM)__-eU6p6t5d;VP)=H7a?*BJ+F$Ki{6fqaq@^l?O^^1$)r(x}*HO@8cx-Bqi zzk3M;JEG)u+L=j=_et)urAI|jI1gPL&vnNfo%hUwk#vp{H3(7UoSZMIF3Y{G!@`Ms zmgbviRV!{Z`O2xCmqVqq>Ii%E8%_$sWMU4^3Sbg$!09P?cFPZ-I?`w|ae=T{dACxU zrML8*yu^;;3}gP&*K-gjH*CzZ7SL4T$ELC zC}Mbc80qW+WfV^QB2R+3xw#>aS3sFYKF6rmAKGx35qr$IZgGTH_J=P8Tt93m+T4hf z@5$&|&|fN|!DyV=5)%dvOW%8|h~FRP?_zs@D73kQMS4%gsnU9^#9!Fcx3Ah!Jz^7* zzjZi2mq_49y&PfdnBTqK&;!pFxPghufC5j{_y~&<6vT|+1i$P5!P|R)_5An$qn{9o zh-is~sH90+il{V{b{a}k(O%M|LYsE9Xpj~y?LtE(sVFV&g?8HNJl^j6ckcUt{`dcP z&bhAtIrnvaZ{M%0@%g+zukn07o{yoHFXHgyK{jjShY$3J4;$v`Y3uj;$Tvdu`}KPn zRtIK&&WlrAYH6(-4nl$m!naDnRIyzWF#=tuzAy3}R-%-!TVuMH%@oO1%#wGLy5 zThsmI8kstMxWu}O9Q;9K0|w$)# z)&0f+gV#1RJ+OrYowXMlFK}MS%uYD{=PoEi40nGM^2Rog*Vz0jiwC=pkX zaoroG#NhzuN&oV7cTfS|0TOR7bBnbteQqjA`sZBry199_W%t$UfF&R7CqsSxi)re* z^t=WyZJ#`o9M>#&X9lt5+SivpFj9i%W$mLQWmL@E7=OTxHmJ-`lVh5C^wNwc(}Ev zN9(V?u{3Q`uHGp|^+5GZFz<*SY5j#J*p7}Ll1gGWN?hIrXErFuFv^OGGCE@-y(k7P zi1o_61t(`rQ|nspaqFRwnBLK|&%T!>d1kU2e?0CsZ`m_>=ZUt>?Rlp3n4gn3UfVCQ zIUdoI&<=?jJQ=_^R4N`NCd3|Y5!(3hkeQOq@dq>fn_YQIr;L-6j!-i=?=v+u-M)Qh z!rZN|b0O6sKs0`{RLGGaW^n}rgF~1zRCKKlxPPU*n{eP#^wFVhssw9P?`&@@|1UX8 z5)~EWb-@wMrHutnKX}w2l{59BGG9Jd@LmbMb7^FxAW(wEX4a^qvQaYm{%B;w zve?JF2Ui!|l*z5Z-6!-csJd+js;L+SZa#nz6Egozn>O|S`t`BEW?+T`K&?0T(SyCy z8ejjctW2B}{j8Lbb(!Z?$H`m=d%LdT>95wimGhbu0&8lEOF9+=Y0-&{gcf=3WD=|Z z3*5%Z={d9`gwz!Wc2`&P@YiM7uE9Mz!$Kq z$CRnN34*HgjkUNn(U#3Bg?F!sbDd3j`rT}GS=_9pOjEMEB4w#@iGk>}Jr&C+7a`HD z{P2Oo6W`sMsq63KLr%h+H2!fZG4@_}j9(1p*Sh8z;23d|lG2>RG_~ zkFP&|JbWVtGvq&FsT(j*QBu#;Np-a}G=$D=A7An=yc4m8*~rGlp4q^QT8yf4yonBf zo|;?jbLFupNpPM!<2P6D49J>#ZAC0~8#EO)7=W_XNL#|9k11L{VvG>-lR`p5MtjP| z#aE{`N=Zqf01270{`#c!zPESp(2&oaJ7h4y!U#vg*(KE;TKNR!WZBUlKl*!nD+r6J zs%)d$-GmPn2IG*;g1ybF!ibfydQ@ijwjJ{$$q}2?Q+L1jxcYKFUuG-2Xu+H13X~N7HXN`dq(JFVT&{Z=L^CW2`Xx~H<9&2 z&e3i@22#_9d<~#@GYH$W!7GE1 z$Y2TpD>Io37b;CVXz*;=|EEE&e|64UMV3x$;Q6C6_BlTl$n5szy(x8B-fAne@%L8%$`6W%Viwx}>t>3p7&>P6<DFy>1d zos3w1F#dQnIOBR^@|}+gaKI?pa7tnX*88@bIlK{LB(|$M_iz8sjtjWe_l^Mo*G3nY zjp5v@0WfMB{Zb2|=El149D|PBj~Rc6Ua}=;x1(dxm-MlU){Gl=0T^XFY}yn9s)QZ7 zf{MxurM!0gPG?rs)Euty8)!;gnVNTe|NSa69i3f~lU0O8oKyU!9R8!@HD8gjEq-Ck|%|oOgWoqnBaeiE4xeMbGl@p6;yjw{0ny0dVLc4nF4J zTANtJlVXJB`$Mi=7zdDxx}2 z+pg~&J7(HMlWXiIE6ex&dxkYiJM_JL7Tl5bV_ogrEJ)$fTmOxJzDPXmC^h)^ne3UP{T*%qd@*K#RL7LqXD5R6BA*N{-4mEbL5XJhVz?U za>|@2IM>blm7!JQPo|!y^trwx$>z^OndEBYkGn=hGvHkQ*z!F1$}4?~vdG^VGV<~X z*PC{Ot!*$f$f76A)F-ll0~cU9uoDbmO)>nHL6emtt*ZJ(dZZjh6Q-e;pbI0T*tQKd z_ZG-h!tL?E!E9x3F-l)e&QjD{czP1FS~Ru085n%wBfxj*_JJ$TEAt=@zvkF^?gKab z^Bti)&aY&LcAmY>pzv<+EuMP}_g|1fDKVwxg%XS~fn$3CHo^!*ooi)f(gOSgNOUut zcx88Y=4U)Lu2%rEC00-Q9Yo^MZWjtnLvsQK25aBL!qi{VUqCrUp8Utx8r5>nhd}=pR_N_BA3GfF9va!a?$sWP|uN5fyjx2{W`;?X0L^vx% z@lczZ^2u`l64!iFI}=s+<;^&`fehth(e-{W)mVXdQUVPnlx+1VKgQ76gF zumtWE4IdtrhK~Usx>y@heY5@9)yev=X_41E=a1Eey{@RO4U0I@jS0kM*1(L60IGd! z33jGdzs9Moot%behGucKkT7p#V%l!joU|FXxvIX2a0v+GwhBy4+$FxQ%5mu*wG45V5nzyrRuzalb()Fo!#hV>OU*NCjFJ? zKn1uk9%a=XCm;LvS}Ed$XQX9Z|JfO1(wwod>WNSX|Dh1p^49#T)d7!De*Q@joKfA3)t(aHM71=oU*FyCd8`$Jc6vBg z_z&jKFIz%RX!1}o$A4lHbKMJ8y{lU%zqu+iIFnEq7=u27Uue9HG3uAJ1%B-}2L{lE zK%~)}p>2pygoHwEBLM?&1mese#4iVaxfVJfeQ@x;V%(O`4GmOaZNZ_X0#auT_^F7w zGya4v7o%=>@JjeBJY@*&>ZjOAg)*>WjvPBS3&WLg^$JL+G2RR_`(Cbf`VU860RF{R zP*ju&*D^q&CJ;MRRQT&pjyW3{e~jYYzJJ-${2m%urLB9uM21~VkGv6Qd`$8lOMFsq-N+y1$!>200t@7b$=fhy^Z`u~noEy-j= z5%l-wBAH{CW)d+=Tb-^E1J%_Ei4)X#*E_$C&!x4@k%t~t5>cziIEP@vZqC!58o9!wMEaK=Gl)CiJ01E0|b zl8BS#GXu%fzkV^<7Czs^vOd(VRsrzHexfTHvW&}cC54F*psaV&BX3ZB!2|~U4rMMo z2M0-Q?I-Y_WRL7Fb%}(O9Njr6}x&y(|BxPbQ#cz_rRE+*Lbw zv|^&$>^(D1hsL;dXmGdW8Pa(-j)iKI$Z#kS96%Ufu)t3kw9afWX+fY8=MaYg6CBsk z=HzW!T9XpMg_8kSZr?8)2BxIGfx$aZvW+jUmI{A;nH*S1m>Y0$aoPX);tS0z4J~ao z8lo|Xdb3YuTlJK2UM-1&m>BF$C-An~VGIQeB$bF$C?a8MEiqIVo(kI(*sUMLivTa2 zgdj_jc%$ABFbacQx(rd9{_o}`YB^%-NWG`HKwBf`T2ZLbu4_+T9n*rK(v z@4ppcupQqm%{B4t+bh`*#(a+%3Y6dFZvHQVhDAW2`04k68hY@m;&=tNw7>lf_dg$^cSX!oo$wehmvV3b*fInDB|Y%G=spSHfA^EQU%0 zh92)QOJcv;MRw+l+wFyUrJ&Q~*CArStL(21Jd5$Q^V%vqID1MWj^`F*(CQXgbPB*7 z=snDor|0I3Pzs>wGhWsoX@#s8G`y!S3m*-|h8yDvsxE}UgCHN@x_vv_eysTT09HXe zIxg4%ZiA&o&F9Y#VF(Wo4^?AE5fKqw8a1%jg4qfSnyz7@J8zloriVdP$JES_+)?-_ zYiiQNQoa{-+-R=tySz&dg%YfluP=3IqLZnb>#eg7Y+*ZZVip@QK_ebq2yexzhb+1b zg4RrZecV&O$1C1?-Km>sC1w>E{e%AbGP@h1vTsEWr|_6xstbVxxAbR$0m@rgl#o~a zn?$9$$D_Q?@wvi&rF;WeSQ;9IExq&mwQ2L!4|82jO;^Up>%;AE#6*JD1AFBYBxPmg zTg!92cq4~7xcnRzJfvId9?I4(Z@*FgV3tk+ux_~V>1V#a>=6;II;FRL&A?5EYoEZoiawCuDqz+};xtIMdoxP0xUnw=@=Sw~n|iMpcBP=&fo01Y&nr4w*s zGltX~9Ozkig`f=%Vef|PxLvpvJc+aSM`a=p04^!noW}?YP(&|y6(!Jq9XjpSh zc+p-ZS#(as2n}E{-ZcEP?B*IkjHK+xZi=3Hg4xS1xXNXQXaEof^epNglkWj})19YJ zdGTs_aYO%$fP>k@ChblGPCcuJt>~}Hd6i=a zfw6CU==0Dhru*vVyQ{QX)l#-mQC*uEd}{ye+x_0&v+7w_p_rHr`{}=jUEyp6I~+y) zF=`>F)cjn!a`y45v|3CKaj3GR`CJ~I_IN08@0Fzgj_r(J;5I*^lp-$)jH4EvsyOaQ4ApB8b2~5-6C$@{bU8W( z5qD<1yE*bS3_rzet!sT7fVpP{x`T@f3MMl~U(*^wLqo$g-ak(|fa8M_VW{RKT$?^pf4pjIU|b80q?(DEGWd@K{&EhwWUwjhmafI(R|y{Q1y(R8LaVx9#-E6b&K+Ke-+YG4*PYJ6dVSIh7c6Ps8C9W`p^rwfuN@8)YKGA%=lxb(JN~!G_z0Ra>;Gq>YWkS zBOLzx>MrZq2);Q%@xNNSdBpj<;`=zBxm$Es&CD+!&Lg#SPAHb2gV2eqiRO=<Jt8Y<{#wDIW^ilZ*Zns}LvZOK2zBO`4uT~^ z8>GD)ckf-ln}5D~SfvuEJe}*@NgnO|n@S?%FmMubng3No%L+s}KJRwV_6-g_2`n;m zj-!FmHWOSYLu*g|C802?U0a)Qzi>fZ-)!Y=7>5RX{2>B^?@cWB?C$RNeefUw)Eo|AAyguz+5R-WI{OOFR-KKD>Ik#iay<1Y5}v1-fk$l^V!Pi^y< zarMYvWBDg}%&~22Dv|o=VBN2aBAf~ z;5Dq)NBs(yO2G7(qGA8u13(S|6RV)0rhNP%2q4axzl>oumEQe4?cD1<^v!%uy5$U| zly2u2{f&%5HmvkNr~@-lXgP`36Q0peVbYI#)pqzJ12M41LC$t`>?tWz3(ZsA`YEx8*k28PtkW}kbvM;>ZXZ(<4N+5iec0uE^BN8uQk zcziG1S0fb0NI`Jx>Xi%UY82aP)`fkg-(20u=l9d+nMRR(Tos0O9+B7MXfR<4=%=C4 zbg&S%I%O0l+xko|)LF=Cj_i$s_<4zYX1~h!SSTqDxtHVhwEvAag0acl2{O^61Wy!eHz-S-_kwML$hm%3#~m998@ z`AXzF8JX~>@$|l>rsPH)%zDuniL4y@%ORU2usa~AVI0PH0pFk*iH|WI{Hbws)rlb$ zf*h6Yqk}@zp))FMM=TF`Y(M)nl8)D6)lFxW(EZ26HiR{f01H$US6eigrg-;8_8)97 z_9Iutf`-kk>A9Ju53|oKOa%S6vZKh~lH2|zJ9^>TXSxUa)#nOVAws&G8MC(~CGa4? z9M%SiWTxiQy<=m!PTdfGcXEl(>N+Kb-aIoHCJW8=EY5);bWgp{rCsA?5cd= z-F+x(Lxvk}K&})7M+}ROFUQzM`y+SAEh@Opsa3$WOHCvrKE9`h_Ate}E#BXQdY7NB z9H}Z(<7(k=N{)GcIRBIS{}u9G9Cd|;NN%7y1Juktn4ze?98Agnr|77lNZk~2farWo zGhk-n1Kxwne9U1mB)m2|NXf}P;WaebK)rkB!Kc_#LaOUeXVni)MF4#h4QSqA_n7SK z1kh(=Z(m)@?xgVrZoa*JP1K-(K^!GjxauJtVS9Cil{6G5#N#|p0ZESx6toZPRq#{= zJ4Fe#D=OBl6t~i|McsbTJ)xspx^_8Irz$%BUvTeuP{hR$*?x0 z=Xy^W+1lD^cFoW8a`%t9nu-;X^RTg99C~g3m4k4E1}N|r)P2H?5Khwl;E6cXXC9;& zldzJuws9d4+=J}%C+Oa6p}8$`NUttKBWqWp)TQSbGe@_cxn;ctQw9)3(skxR69ap1 z1jgxg#vw;uTXZr)aDQQTcswV)_x~fg(Zw4qPsBYK|19SLBc#^Q&~D9Ci!CXH)OK+> z2!<`@b;iN*;s&OJ0;HGar$*vWY+(?0-w`XaPz7TGR=J|jNisig4%Et4hH4&q-*ZSM zNjdkkc1l)^e@+fVvfO$_07D|~VSm}QXP^`fR4?)U4;b#n@?By0dz8kg!B{|BdzI(r z#5(=|0>piY9=p3JnL#M~4c{L?Fzi>FqtJzQN#KifD1+ z&!Xb8zE$Gt(kNvw#*g8_u;hYO?I$>I!T#k4#&OV$D8sX$q@-kq@9Me5#*ibvUyKU1 zg2R$jGnq>k-mc>R6=jvpgPh$4yAS3J_V!kyn`D3-99&U?OUi4w%;)&a3S{?Z@=o)= zYP zxG}>Q3oNGuv1# zZoqJUd|ipo&f*H1e8-Su!k6K%T0T2*R1 z`AbD8<%Vd-{uBEf{`Wzy|4>-D{gGdK{BH`Yo2sp9sme5ZJxwGGX$G5_dik`db60{zfat{=m*3$+Q0s=my(M~5ph%$^owSzh?l+Tz0nlAf$J11pYQY^MoxxFiLaQVuG zl&9Mqji!xP@8C3oW8&cR`0u)GFpNEITz{k7JrOhuVXrNUmv>N&w%bvkzwfA$!mV2* z3Zb2b7_*QqBj{0tN6%PmLnP+^1O3WzK(pY#yNLvI;|tI$K%rp7@Dpd;t9-5tFv`i% z;igz(SLE8@8yGl*FqG(sa@VC07>07@Lx^7fvC-<(8!NqIf`Sj1X4_uMD=^W_K1MSG zpEv2DY+$4&?Nl*6_}SixiH~N$^j`KF)74Egz3slfHhvRME2100Z@2kTkbe$pOrHN> zv!Yx0FDB%Sb5$MfuN_hMmwv8938ce-rVmpmwZaU5T|;%oaaW1sonz7q)*%PZ6a`}j zDX<2TmMPHL~t>qQ?GfXL!xCs4GE3!+C)6 zA3dZda5Wfj;iG(^g9(QTUY%;SoJ@$f#M533k+3mT)3&2fk=KC0kzq%|%_=xR{x8Uy zp{F-atEjYas7*iSlTG?SKbSMPJ|NS%lcD&%147}{)4gRSv9!|a*1hg-h~hQeBjOm- zYoScJB|=A1uI0hIT_D%fM;@DY_p=@1NUJQq{d*F_W8%n%fNgI&;AT+JY~fvX@|2o( zEEQE?*t)co)YN480{ZVkFb}?DbY336wRz`+g#Fy{6a`&GtyKYYNR|oxfsnSeq?S)S zrUtjGH06%ZX3xp4d`W#T1mhD3a*}V^Y(bjlSa+wi(DywFU>Ra8MePSrj=uLD(<_YbqBe^oah+4amzSRHjH6s`Wp#sZU%H z=IY&hZ&>J&&d_wr(|H;tSv~X5b0afij9+NgZ)#H7&d?lwXJgH-18W=Lc*7)0|KgWr zcee3fx8?1L4nH$FIs^B2!iHUg-|<(LdH4W?i9H_iFumDCOKYYfiUVsCm~~A1busRm z+;N7WhwiQapXj0d$(_z7>?C1I-*e$LPt$IY&KBBcg4{DR)u;PXJ%jwLtSluoohjCR z26f~7jtphDrndtV!EmL(rk}y&o+M9z$GPd{y*xR&@0yz*!-`fZLu>!R@gJjjN(f)O zD=UBQ99gu%n;bh;^-9=7@#PWOY&7&y&cy+kxvBM^;y->?N9SflcO1mx7q}v4f2hG0 zERR1!@$^kiCrD+RGj(@L1&m4(M@m=Yl|uV5PMG2h{Tnsa+yWXo?s&vHW!7cR6Y@-# zc}>VHH5Qq>K28; zYneTZGRV)*00dD)untS^f?eUkmWN88TBg0s>TU}U%MEb@!34>aFxyGGl((D7RjvK* zseifmM2oiXMA{0X#>n9AQ%9HwTlLUDtichSa;fvR+lugzs5@>;fP_O!v($sOh1nnR zqDlYe*aNT$XFWAs$DYeYuHp08@EOF9IMJm@;9@gCR=^~y16Y`-k;TINyMtR-xE}Wu zD82_ks)SX@_}nP(#X>Q-_WbrZF`#pyLvnv+7Uk1$WzW&k+}9q})klHxMWP$9fxZI7 ziTWtsa#HVj_YW`QdX>FhqK_P>?^`UAe|&tJ9FPDR=T%Yv^Y`Z=vDQ7Rz;r=g-af@; z!~J{rc${Z06W=}2y>18w631&gQ06^7RZ4E{dYR7SFe&yBa*Ax%B^}HU#Naoy_CKP8 ze^#7!qsc+Rz^Qf8KT&#t3(ST7AmcbruhH1l0?&Js(P2f4Y0ow8HR8+HbKOoXlZ5P&7%2Q*Qlebrk`1xd1 zIZrLx^~sj0j=Xv}=T)}k93mkNs0x4D4a*y?%>O3Xvg<()6yEmr9E3zEkE3petopS$>M~#m_NQ3S~dx`<f3Rd}8>s$P`}&Xf*`VtX3O zBt1<@S2pjBo?-(pBycFx?lrLVmS8{Z$L3=L1{xzHVe0#B5`4*TPjs9Es^845b>mcX9EJ^jJ%tk2t%7+MWv3Y|a z%y)D1@-Qb;5<1(7bLg!XbC2#p3W-lfjC`E_PgIfus3NKwB!?!qEv+;SXK&T}whPiFlIP>(ZGK(|uj|P%Rs5D~#)?tp07k+A2mWlsb@0r2HhqKuxjaRS zC3A9r^25BOTT1^Ey@#ltMFA_u(1^fkUHeEyOU)U`XS!)^BDdS!vp0UenqABsNwJS)Gi zp|NGvCo7(?!DUr+myKXoxrcQKqQnV6FtUJ|PiW;Hgy{tmGfLPbsMl1$vm;$ppq=$_ zOWQ-~%UZLUCL{IErI(dcN9$)w0z^ji5-xk%bg59b9S^=&Ksi?&>K=4a+>}D%YHcX( zSrYhY)qyKBZ&-JGTiwAR0sp%omEtftHcHPD>YG_=RhvIh|6u8D%V^z5l)fA5Gj}ZrM;}rQRx-P|ZB914u7%G#D;`lq*OzFrf2*mIT98!{47;J} z^7TK#pbsbkjSzPHbrB@j*_RKj{E%Y*&ZDvNRP488o4vX|m38Lss`i!-M~p?wcGZ@# zwl&FLO1j#$&Q|DP7#9hndL`#5JkgmDWs1oFFM9Tm z4L#j#Zjkh>uB}XR>XqTTC*jh>l>P`iyG(QnUM#_&#Xi90dlgWL#gYwh_5&-D7Z*7{}6lr*=T`gM0Zq$ zHWkBA&kvr0ugpuzpX#66cz+K*ceIv0&UM?s<|UO{;M^j|hVBpvl89CfL-a2z>eN&~ z3Wz88nS{1eX9VI3wp6&1w`Xdw?IHZ&PXeAA%7)jQ93lvs*Vh)r5orl5Lwol@)Z`A08In5Z|S`BSUoQhOOT#cwT)O^%#} zo43+&F#Pqho*pkr=DSgU@vBDR#hvFf2epOnYmm$NuP}w?pI%Y(1Ff;AZr7&1&Ur!# zVn$|fYugXv!Q5m|JlykbMnAJ4?$u(4D(Lb3#%n z*i1uUGjWf&fd#{gP}(745nIjn?PY3l-*lF{{%9DRGP_=#GfiyeA2~A`aXL3=+8mwv z1V}Po{0u!#815mD$_tKfeAho8L?k&T5wKB+gkKdro<1P-c5x_Dk6zIFARgDogPfe( z52^?eoNEU-ooQaVlA7Mu-2Ck9oedmj*>#5v8QPO&^w!HTdAWuS?h5tB5qv~AxaBTh zVEZZIF3lCHbhygfGSGBL`^sqr(eoh{zka*<3B0(Ql4@2K&PwraVH~dtC<@^~gKr%< zCzx=VjfhD>PE``Z?&cr}$7~vvz>H|ExOgA~|GlUv`XRVMJ=YUH0!_u8BQhU5+V6~X z@Np{dmA5Km@2QhjkY!!03QvXj2x8fx#w7?J_DLLcL8`sLeA#6IQ zN5=E5j4ncaPve_x%XpV_Guq^vxAyJfUfEdW==jgyt4acF3QOeo1k(l5iNz!vYh*4A z;pDv?(NLdSjOc*|v9Y z*oA~7f0U%ZR5h_+NNuWXR_=PDv846g>v&&tzgSI@c+upeiBo3m?0fuNPbgA*TlG9| z<((Nkzz?LY(f=mtg>o__GH49|2*}3@?**}l$P9#EW0;v#5R(jUYgi8s-tFxAv2Cqpy6xkUP`6#}-rnDwWc}?`B4tmUNIsDq{?gKqR`+tLmigGrIc0Np z_Y)cBO_KXq_sU78QM|6A-EEY8kfGfqBzkt`uzYgFp-}ll@goC|YEJb`Mo%uu+t!PW ztehG7))3_q={_Pi3Hewc@X~sCcz{B1*&w4rWU?d_ zdv$vFiPcQ?J}V+l8{CgeNDdpHclM#)+bG==VDeo1fPyS<{_lOaZZC$Lc@($q4PN^y zniuV9y5&Pzmc#=-)3%J>3o_YzW2nO$CtU<)f9P>POl+BXk-3W|ZZTheg`cT@xOl-S z;We$TVt>uamESJ8TwKN- lyr15CKC6cX=#of9~ycm3_WK&giddMiGb#Y+vrZ(ni zH0lp6i2TrGW_G=mFSCcv_`B(9(Kec{=6ssfi;7!4Bp%paizF>}JN4{>h5!MQ2&bqt z;v;()8{2?BW4y_SS|Y44Rc=d8PacIvT7QPi^MPN#)KmT#n-1D>if#1dd4KffneXND zD>M=h{C^2-y9JyN-gC@Q>vc{SPZasXtFj|;8;duain!%cklslb(yiuM1ucOeI>T-g zODD*aD<*%rO9=Pe2`%@CUSRj8)_+dIz5Std;k!$_4Q#TCPf(x0TVdrF@h4yDsZRfD z{iNMzms`Di-_qSKiFqq_cb%i0teeYUoHPFZoLWN1D3U~v{7dXcE|D@ki60mR1#@{* zKl>Ep88o#yYc=Cl+y-zT4P9dcXoE}a+xf})jIVfEsGBcEiU%>tQ?(0 zPx){wy7_&*^8SpEm}vW-Slh9M4?*^}fj*;`+P#aC1`y0qICtEeny#IdWPXyd8e8f{ zR%tcf>4(}{bg3!e@^uW%NKjmJQcpAJSWtZUBZVd5L)~CmeOCmFfxEG54+m%5h81)n^w_x0vaNcWEW zelbsOUXV`W!3-8bRBUWuyxz3Q7#uI=#yjbd5qf)W^euMy-&OssYDKTSlIx9YOjv(I zzE$5upN6H|WEs!$NPOY_-XII>Or^xG<-bpfm4tLTNEf6nEeSVM813)%#2MN54kn3KV!o$i2b` zwHriQw}tL4dwLixo1Xa)&*_13Ejye3r})HLhp)}w(v3fQvJQ2uf4}tn)gJbefSRqD z1HRg6*?Lx&DOfVAx&ChTByc{Gl9H?u!ID_|D(M_*YnC-W^Jv<8Mx&OX7Q_}3r3_bLR z6B84_KmrB_`y~~m1L)rtPnP}Nfc+^+#<7M<{YD?Ps4N;Xw~wDm(^=6_H`ZQGZmp$S z6z3%chIYFz;E_Kuh$IOj>3G!XVL`AC;hBv5ZpoW7pf+Td-|WGhKe7 z@I{wXx8Uo^?N{!J+Dk5d`RG@(&?H@-=fUD#u)BDbvFP@1bA-bx;F9@#{bc?C%H#n+ z%K!LL-=3Yy`8N7^!*zPv8MZw0liUl8{cUu?Yd%S9K^|EXqa~&FrJGs27mpRMa>P7( zbWUHt*SN;c&Q5NV1dcTB`w9m}j=wk6JjEzXyEL=7(JIn~ih+GZDE!urz-;Q<=7-%C z#gryaQLt2oe`6(mhlLJgH`VjY2BX@q^vMuw^!HU{KB;=S*jsaqSMUY}r*cWln^nh^ z0@c;!{*k8}Tb%ltn<(94g?FT$r(jvD<$B|Z>^fPf7D3$LADAr#dnl zuM-Qhzo=n#{!)IS=~E@YwtZBIkFBm3wmr1_RQYJ!jN~1+N>$DqT1ULC0>8w3xE zBxP>YR^c6@>x+n#qE5ZF5GFZ|%ULdULoZww35hph9SHst0+a9O=AJ|g`=Lcq{1Tq9 z5d%HTh3HqBdy#N+(cpsspF?DD=J?i&{b_kLuQk;QTaEKL=5;g|h2{n~uO65TR?p&Y zW5nC<3Hw1ig^A1y{BYPv_oro*{rv}rD`7&5-T{T?Es_>CL#BUV4k;CCiG=?TuFGsDIx>E}O{mUPi9}X;xicroUYI7nC06XAarC zyjrTWf*t--=_M&DI$Gx9MG6v>vEayjetLdO@t&jJ)NRkw%^WREciuB>rw?Li(%hEn ze4EDI>seif6;{r!chA4X zCdsUS*^m|;E*NKDN^9l6H&NY7iv0^K%jHBr)3qQALF!nQrH>We`$O+4_c}R>yOVL< z`tD-&Uq7aigy`kzqB6@$SdRxvC9+R{EwC}{T)KOA1GM8(Xavx+C!ym@N)C6eJ_djk z=I6c0*VflRm{mH@h#kq=0r_$>Fj=Jg5laAp!)56aKpRk>?bp%OHG`dp$J){el^ZRK zx7&#;#Sg~r*Mv@_C!0L&ujle#ELc0cA%nG({K~c4f9}{9y66;sQG2NUVXeoX&V7fR z@6(QiV@=n`vztQjn6?&=Z!%ECkp}+r@BB6e1&>G}wCjWj0F5q8y{=)FMyzbKTl-1= zRT98LU~6uIjuO-YqVGiG6^$fA5*%I$_j9_dx9*@}2N~o&f1~L6K*m#Wh}@))vllM! zk0BFoMpm53$0$t9j`;feZrZ##V!-J<1=&=2IP>e1D|WUHT0i!hN44D`ze5(db>|J~ z=R2}q@%L@x3#i?6>wZ&8Rp^kw%$8Rk^=5t_)kSR;l)7yhd%l;}|6NoJjLE7?nIX&E zI4LcNTH4ih)p#Tu!X3l2!sZa0y6Tf@eUl3$52gR@BKBe-S5=(7DqAZl~x8kI*B3-Dbp^x@m1GP z_74o4cXZ5`_ukcils);@WZLPa*10oBGwuzW92*v!KP)De$E3?hia2y!uY^I`GQ#)h zmhuGWMw3n0eTntECA#`$pYU=Fp7Ddgj%N3630%n#IUKC5t>XWQwRP^=H5S}vuk*mc z<3%G6i2xZ9R*$pyX5UU2)$C4e`hdjPn-*N9)l~v?S=R)fpzxFS70}9vfFk2t2L(%A z+}D@4ZcOu(3DDgy7oqH4bk&#l+3G=(c1#L=Vk}`*ca+VJ?yJ#?9LojP%YBw)0Nvxz zO+xT{7`||j(;iyrvfBxiv`U{Rq3!4N@82h;3rA`(5Lp%@&LY#2Xb{3A(lDiVsT<%d+md8k=G88J$S`)sirT1J>++TNypLI({ zm3%{s_SB00(?q%5IMNIvwJXrPg9;nPa{eAjkm@cv&DC%(#s;=UiXUc5yiXd8jE%#u zhekw5SzGgv5UUv3Kv}l9+|z-))`8*Sd*$Wg!m$&67F<^cWYKU-V?~S~XJs8q5zC=g zYcK3_l{!(>L%X(F@btAGq2;OtQ(HW^S9RV~-*=kbT##oz)a!?LyNpXe4i45x5`7pQ zU5krW8ZHISlcftc-{{C4K6dOraz~Zn)r%kiu-*89XynggS3!D7vuhXG*$Wpo0TfAk z{=BmT$=f@utgHZOo8Pb-JH?e>c#8_9JII}Kxv#Y{kkI+9LkI51+mO`CT87h``}a-S z#I-CM?e4||B|kdR!}OJye53>2a-X@5xPM!by zOmf@MuV3j0T&K?Ep>i^Qt(p1rPgco{pI>008V-pc2z=MT0#*V9!g1z|E_?EqZQLaZ z@7}%pVpaYp1zEY=dw1=6dg#!ho0Hre9Lfz(PHUh@RYO(*hp?~~`KC=TkYn54!6f1+ zUszn6)^^q9(tbhfvv9@N9K-Iw8$v|a<<#TW=DJQy zWS|fgvK!uxTm#~(A+QMqa!V3hTQvL!Mo?wMht9e7Ig}PIx*8l&YJ9gv$v;@LS$*5d*hiq)v5j6&rmg| zz3vqD1b*W>T}4N*#IXmOz$d%6ok0Q`&<_m_#;}BO{qwsmM$qQnfo?U*>>UZz>y~ow zm`msuw%6!Zv=|th!(rshnwfN@!zGWku*&)PkSV?cBSBs{X7t5mv8$)KyxF9a8|WRE z!hgTr!fXVSoRi4!B|2q5#(g6rrXrJleQ%FvmAu=+?5ZC;;Z6PKLHzn+IK$RN8Hc7` z)7N}|g4lB{nmU_|byXgyCDLN~@EBGZMGU)Mx}zX>I48n6*fm%$ zPdJF{XHm>Ir+apPTM+Lg%POln(;zMxG%J3(v~9QheUE7tTvo53lhBD(_N&zkFp-&P z+jIR?uE5VLGmBZ_M_0)zv$!!Ol=-tic(a4GmD7E%<2cKn7)F|s{QdkE;rLA?ArrwD z@82g@E&+__Ox%a#-S{=fTJd!X@v*fFW+%I@j8?60&XAn!9~y1W)RwAqR$z@gpNP&l zplWiwt?By7ATGJTujy~`3&enmgtmyoMq%ylo@eI#(f;&rMtNHc4{SbZpSO8xO{Hlx zW-f?p{N6A0#dHtLXm)b%|9+-Oq~H)LKxfCTp8EuDzN#_ChaAB#(`naVsg`D`h7F>!49##quG`nZwplAoh1u~k_S9YxZM>M9PMQQ5t-vL z$whEpdwZ8$bnf%{d_(9ilaL!;ve@sxo8+i^YIxIQ?L)LvC?tW_OdHn_zW(?R0?l}KxAWDF!|UBA9~yE| zGW&;*WqeQnf@*?YY@?wG*(aYuU@h;tIqHZ<=dFufmT&&m50e62M^}`Jx#nuqZ=7~Z zgne4jR_AxF@sIg(gMuxaZ{$;OVOqmL5Leol{$(IivKR?}f+R`XlyG-gYtl{y*0b@QK$Rh?o7@-$(?R4a^B*LE_2-9UhO}9no6wnwh*yEUojVFY^2}4`%vf^ z?-z%{lwB8p-zwu;B+F5kv&*5eTgiA^BU0Iv-0s86A8%O9N1`~xa6XSL_?d%~leHQ! zG;zmmp5)}@zuI%|gUnXnx>juO_=@_Ot%@wKO|5^r2|sPdJ=9)NXLuM-JBdq2nw%~+ zRK85P`e~x3CG1?@osIi-QK5ee*WI9RZ+{X~yhHr_v)Oixft&rS41z7asduOwz2;k+ zy|bF$v+a68qjF$yqI*?s`&YcCK=!PMf3#KZY}+O-E!EG&^sD!!H}$)+Uo+c@Op{x4 zP0})#E>)T5MJ$F)9jnY>=~4Y~e3rR{dM)ADsc44ZohSCLk>)MLESDRR3OIj(D>7277k4 z>G^5*;cM)?1wD@YEKg_)7jWRgiWjer|M_q@n0nv78^Lm0SvaC{)9p%f7sc|6;?E`S zX>|EsyKiGz;m|wgibwm$22G+r)5RTQ8|S{Vc!k2IJ{$WCr8n^=+Dn}IF`b8(L1E=m zrvcZ3F+HomG1tabiyPi%s{omYx@u0h9=|it^YtU+4ote?s}jR`DsU{EWY}l z?uK3sJ+~@E%=;nD8`d}1Pcn821xg(zyZk`*`sC}4@|tb&?=97J(*4$38RWARvIL9L z`RsmNrj2G7x}M?lMYPn%Jn{Zho9s@Btyr>8zYSTv!My>_`UNxv{`0gYCyrcDqj@vA zGi7yc`!Suj^#;bW|8-BQrEmqUeZAQ2=YO?tW%Tfl%!_zULAo0f+Ha=dZr{D;T|mCN zJfbD`ZI*H~9hJMJx|sQCpHA&LNBb+rKW>_H9ozlMQ`tMN%TuvOY_z0=NjCy*9YgVv z{1-3S09#oS518qC)xE5!$2^Q)Irn-GRhcd{6b`*-u9)aiCQXUwY+1M~zq9fJ>DS#wI!W2TNoZF8){IBbRL2GXI<}ia8YmJOtti)pOin7FcEg?LPNRRLG z{Q?^aymG{2%}b2)4+$XJ^Nsq5(qjFjE6j3%LW$RHW?aP8#$AY9_(WiX zme|jgjqdJw`*qa|FMhj4Dfe{mWQ>t(c5wgc>v#WN$9$%dq=IYjH@oOaQ7C;1Q9)cC%uA1ZzfbJgqQMIIzcDjM z05)guV4YUm*to@S-Da1Bs=Rtyf4P6$@Xu$z_u%$Z8?Cx~?zI{&Q>JD9z|~owg2y=& z9|CHy_Pg?M9c;x>Oe9(XyCtd?%=&ZwY-8F3-60voc_M9q4!*5)?Z#IAuS~|CMEzm;p4Z; z-TyoY3d$!21R}Mk_wt!vJKb8uM3#`1e}Gvk=JkN9A_jO(8c^>C+947&& zkL8rj{_>mxi6x7+8a@W)D1~g7-w|<%Ipi(?|MpONI=Z9QKaRp|BQ!OY3*9R83bQ_} zJPZ#X0bdpvEK2j)_G>xnT3T93KvDUi)_Px61wXH`T$qj4Ehk`p8YXqU$NYGJNpnL9 zKEHlj>`pp(BE~U(H&k1P3wrf)ZX0(@FN9}Yueot!;~Wr4RCKZJ!MP1edFyMeL=cGB zU$mN)5EEl~sedX7e4yUmUI)!q%#D^zxUs;5SjT7Yh5JK{ErHSNE!c}Wz!i5H0}Ldu zztSV9y;pLfh$tZ3@Q5jIceeX9jw>rEC80e}SWdtt3XQ=UU=f(MaLvXBTcQEK6lV0Q;GRvia)%(9gHiS`%uPA2QKva3C|r&H+!ms~D7m2rj&)UgX`G&rJv!TdKB zT6s!gKRf5>q>tV19l`1}_WfMR(qN1dPYOnswV+cIQFTc@OlCQ4w4-2Zr|A@E0qIC@ ztx(CQBdH!mS6)YnCo(cJ1>i{wa29JDcrhlD=ouMPBQ!>FCv^<6beMK*HH<;+z>Tua zTqrt1L#YJCQ97b7=kj=KPC{UC@S74_bYWy({fit(uAxk#pFe z0s=8EM81ea0AxHVFgfnFSr4M9N`()eb^DtD^wy^C=7?eape1q=)qxUA7pA^rW*TRx zm~H1{%zDFvBoQbJ`~)y@qOBeB&`ToBZ3Wj8vOde^ure`SEy9al!5}BjcFQ~iczs;D z=E!+Ce0ws+lPPag>~OXe%USD;*_6%DnWghwWUoS5_H~JOeEv1!7QNF8z10d88sGcu zhvEuFt%r&5mgEI&fY6*H_S^sY>G~C{Xa`3}99`7t7$<>vAm)^XQ~n7$ib7b-bq%<~ zzdfW&OG`OJDYk66cyuo?lbt?{jiyqrS)vNb4^~&bgoK4(>GA1WJI*lc;+I4~jk7id zm8kX4Q$KEweSKobZ2ROw-*S8L={zrZQohNR+Sy5eS0qi$t(NrZ<7|~HUP9h#XT(uB z-r;t2(ci`lV?KWbhp16zq&daFpNPU7W;T-AI^MtuscZn zr>(SlW&?eavd_)Kv#}bV)^9m8-r>c}ZL+AppDncdr>i6pSGj#sD1B>cYFb+a#wGr= zbmD=GjEumh`=d+!rB|R{A4b|(gX_W%*3K>iT_aF8(}9`rIkg(?8oM5O*qBgO#fy zbng@`@vU3XPVYPRRH?xib*07@!U3uyp~cTwRsbG3Jbntv?=QVFYMlqiMg^8Y0hiIK z2lWzwoA6z*7qBY;Vmt*%N&=hoA^dFsWRTG9P>B}u`~BMqMt)?32PL2i5vWGL4`Rm% zG&N$GTDN`~n@5-Z5qKpYeSEvuJO|w%q@*Px5O)1r{j5X<^ zW%==`$y_|PCH)7hX7O+3tiH0OxtWitx`Ka?=TN}gX-$@CPz0$-&(l%MEFMr*O({wq z9~d};KD{!_-1KxJJRkG>PTk#c5qa$5(o)aF#AhT~_aN)WN+hA%7y0~L%xr(S+AQ|d z<)eGu=01xtVb9t)P5LpKTtEU{jEya|_WSGVLdSJ4{FG?&e4oNn028+t-%AAm0>;Db z&Q2p3Y#OCEa-Y_JQo6eH(cqe|en!gMwrKINwR<^g=MiVXyC#b1Eg2Lu%#Oy-RT!QxQJXlaH%NagUJ z2M->2Esvz;&q>6EqD2sZ<_Y|fZ~FUFfElXtM5N|BNH(!xzZ?Z%e5E1wEY4Crbfxfn z`JzCRIXF0O69Yky?!`=mob&ZT~uo@fq zVT|R_{r;W9Zvw5A#Hg4ucImkp0i+iO1@Bw*K20>P?9z$F@m?YygAAx)NO zKKYN2TNjsccdiqcpWS+KW zRg-J=e6iF+@ozzEa8>y3&*nDJ7(RnfD-K6c1i<3y3M!y{ZDX_Y_l_JzZ7W^o&%Zja zmG^k*+jZLYb$0I?Gk5Y5-%J!f4p}_2)8X$2)W>Fx}EaT`cvn~ig-;jRBFBF>rk8^(wh=2`aqP1#>5%?qBv ze1wfZJ{#3=&^+Nf$%4jJKR~oH>x2Ht>4kE0-R3j?aVZ=?w{A#k>f$1YBa5+)-J zx`lH%*)lKg1;_vv_@1)W6o7|(J?@SLdJ=XWhtU==1 zb{?(z!s`O(M!WikU-OFmts8WR7y&j~yijrPV^Mt6oG2OSr1?p4?Ei z6|XQKXZP`54_nCtpwJ;#!o*6KUv!H3I*XGJkfMlL&mnY%5$eJ>r|^ z=Aq^4bTzU<{1j0RkOIH#8MC|EzWI;jq}|^n<>?P|uKPG+P3Ajs{1XMg_AQOT0tV_U z(eA8gn0LC;d*og?pLwTjAI*NvVqqoUW4r&X?nqZW^R&tG-wWhJ>Wi3p5e!t$&}fyz zcd7TQ=9ma+s-pv_r}truYWby7(WaE9FD0%Y z{q^&mI1#AQ+{S51jDM50J;T3BB!M|%4?A|)6uk0Y^FRou^PwYQJipDY8`l0&DPf- zIb{av3_&Hpxf$;<$|JKp3}2w1#Pt_sW_GfetQ~wP+Zjxf8fbo`yDaEptseQQ$#}%t z#QSk4U-*O`8=Kkp!nnI@%i=GE@Tm)t61hESv;KJL721qx>d9Ntxg^*w1+m|9nSvL4 zW!CM4lQaW&%NQS8{pp)r)EuyGeLY~B;(UMqN%`c|6ZT1;PDbLnS6xAhj%*5(BB_v2 zoNyYZfyvrM2nT^vO9R-hXER2jEAGJ?5T(0P$+6$A3TY&u&BIwJUE}C-a?+1f8tuufQ&?Z;8Hx?~R;fRrc^|j)po{xfj2(*; zvHF(w0d527GoQt}2$I+j5Nd3h3>T~~&F&X;s6BH#r>geN_v`kqG1WMJQo^qlyDSzp zxgbR*s0-`DtHz@>TnTSb)pXJ(6|9x7to$djF<&+ozS>e4ge!|Ml8(*Ei1RN{X3{-~ zwnEs??~hNo&mJNyvXR(4{ya!K^js~$Hpli(+MCj)2}c?6_1$f%2J%$&37ut+m-Ed1 z17j{W2=?YAga?{(JNHDF`JAVWI>_1a8r0+@i~uE)geRnqh$e1vT3vR&Cm+t_*8iZ;tpQ&V?1`heNpjVuLQY=UUJ*5X_pUcvRul1B8CFQ~45PLV*iP7>=JkQ={?i90x zXF{lx(CuQEl-!5XPce#P-1^s_z9nAAzD*Hl#@=apuL!p^SF&)JgzTs`GmkaCi?NdDVhxUGdFyaxiot1ef zlPa^$_0{0&NuL+1ftmdg+OI~&PL2O`qV(NWGNzGZx1Y;D%rEJnl%&hE!Qil_cQqI&47lD3+5hZiiRM;qqTt> z7ty5wMII(#3FzUQ>eK;|27h}OjwM1QEepH<=Q81Gdaq|^+xC_{aPKOsGYq1b>9vR( zmi;3-Y1(;9(R=LuvwrHuvWmUmN5dEE_@nv+&L_5XKcjo<7x?k{*h(YzLuRmSP1e&d z);&Yw+lf1nrBk`4CdTR3Mg8oY2Sr3uyD!G zU{GLt4t@CjUejdY`tM*Cxkt1Vah2qRISVQ^_Z~k+x4~$OdjSC#VXH;F(5FxTB)T~{ zf_7aWCm96@OZf7~9XgqMCof&PR5c$otiPsEau#4@20!L5<5pJ8%b6X6NYTqb_(=?O zbkk@Z&0<;R!j_ZGW0)OgjJJZ6M>fcQ&&~DSrk3e`pvcCK0oVkI9B?|xZo2y2fgv-PoF-?<+KW8G&w=?0fo)x2BCV%%;ZZ-P9}rT zkhE*3Jq2iI6^)T{@-ylE^CQ0-L-O9qw5=80TUP0PD#7 z5UtopEA2x}s;^hh^PRmLlS74kRb+^);vY;Ws_#&Ns$xZ(tH$WO!3Ebt?XUv4oF({}p zt1uI4GuapNao8D>9bPqZC8rd zj7MfvI2y{Y7mv z2KVfh_INXq*JOD}MCA8Y*)^gG_s?pN>=#w-c32F(VTWuNLQy+QVmipn8y5TTMHfpPO#$ea?8VSd{?@cRG( z#sIi#Zzr0uoXZI8aTCnA2cY$W-FFJz9Gl~y4ITAboX+?@0)zahvx1R+8i5hPS}vTn z-Fl+}8X^ATnh~mZMTeFZ<^p54H!q*B%sbq(e$G;z>(-O%F? z{PNt}$-6w}8~bf;sT<6xi3ciVOz$a6Z(qczXu{iRQV#MHMjP0|hQuCoLLTuODxfcc zNiq}jDSY-|m< zn7zlX=m-%WWD$INFU#|{9*{$q38CNuETZ@B*h<7tC#T=$drq;`Sc|sVkOLkFXWH7k zN6b#dnd`++*nR!S3eQFTzxjj?w!K^Q6cPNnUGYa&Z?|0(0PTE{dqb=eEVjQgiwEQ0PLYqaF5>@fL&6kveD6 z&<)x33fe~Ew$2sjB7WKG! zE0X(4zCAg%DJv%C_7T*C>2=v0zBnoXYC+uRA0BR2_j)f`ap6w==c=1@+u4+P-uTWl z8(J7kJYgK}SL4sYj{mcpg*(~3cTk@)Yjt@^CUl}!F8QXan&{!h4+=XN7v|?D+rsZ( z)igIXMHZVvd5rvVR8%#hwVuPpjRq7*grHg>c<)IhPdT1rA~RD+jx#^lbk$jk=n1Yi zbzDkOc(CW@liuk3s$};61y2r(fHK4`HS;E!d~lo0T(~2 zkkWVVmHQIY*JPP9vw?S|8K^+}va`^B{8vc~CN{9Sb(3|>fMhxi&{~=FCfVuotaDzF zLqK=S`dn=xGkYL?KizC!=FeyQ#d^n+k~`);H~GjW1XGpKEtj33+t|(V#ptq`!8Z2O zORtnkaLzssH=k5o3g`N*>rg18rw83s?=88hqo0=kxLj;8KWVHkgX(p$C)}+eWw3X& zMRTyRRVk7i&Uw*{8rk-Y{FM zh=Ylrl%_%(qa4kXcx#pFE)P&ho``x94fxZ)oXBP$M|kP6-}ps}@+%L@Sy}m0IxQ+oLw~7fS3?4SIoFqv zUxK3SLs?DznPDgU##OSSg{N;677*cPoBbA-w_Bv(nNCz8>uW1Kyu6oSpfcW7mO%74 z?fVo^V++g}YWj6`{nI+fbu)s+R?`YjBrr=&+@(so5BndVrcT}H*mPgH5{+%_NY$H~>6 z*QbzUuQlx5zt!8&fs~=Go_fYv!g;>;_go_k(?wKIp1gCTlqSk!p2he%jeO@U(Po_8 zk*J!=AuYZ3x(~helc`_Z2&LBTi8i~3^Fa%j^LUx_sgpDnEmUQW->dkV+SHnTF3wz; z6UFOk&Wm8T<%r(16pBXMhu{nK98ls}UtWT$;PGa;Ak{oe!^VGVyBS}_%Z3pJ>*oaj z@Uk*uLvFJ#L0FVs>fe*-_I>Ks7OLsrtDdgggQ_YJ@+`5UM|kPvPXxc_&CfhhAHS1} ztT5T)#5pOo`?K}jS|`bD5d$S$cS#7MO~ebLt^&$b*ilr=&1`!Q4!LsYWXsq*U^sse z-XCQ{>T^nuPse+uY!k~e$4+0*<>QGilWeNH_EViX__E`?AhX}uu7 zprJ9t{}jUDS5TUR-*zP`31Ta4v;3DX?`1-#YECD&d=XFIezRKC&ZAi}k?xnYC#{eT zFSCV(VifUl4*wWnDu*!QN1u@VJ<9S3(+`aalRdwC_?#LH4kp;#ZT&uVwEppSInb5% z{2q*flrj1g6dM&}Skq)gi-=lwRSvcqS90HecuR0UGC&n%sug|$_-4lJd4Hm7OVe7_ zqZ3)NiXVHcNLu@s$;kvu4jD>O;-P@5u@~(BQ`FLM#dH>Tn?3*v?+}=A+$omunI(vI z5y$$Xy}Vb=#PqMa7rKjoovUJ?`@G?sdqkIehr6aX9j(2V!fSaIxA^wM!bx;MG~;75)5`%Tn@-WVKpHsQ({s^tG6$pLSfo9^xd;dge+6R8URz#s+hS?G&zP>C zfmW|0h$7)db(^~2{Q{eP2ClC1LyeUg%8ZIp2!0g}Soaq|o&53I93JH&7_2Z^JX?F@ z(4j*w@yI2dhA$98EEgAmNqJNYm-sclSs8^0P*YrT^FI2&ay&L$euEQ4tw3X5j&m5K z2(+6>N5MOCaB$Eo^%6%NeV?gZUQ3Q5KmJN3@FB|4Ukv3p;-tlo7rL365*ZeQKzWMy zjlUl%DL6ucXyXvG#6YOO+1v5{^@=|Yz3%F=nfJ!RHqyDt%#;54fe~q3uVTe-m%7*x z`I+PAC%dvWaAfQA&I|jZSBTkg4q*Iye&e{q&7s7TpCto8HEW7G`Nmfxjd%}Aa!`RYXVV;fh z1ZR-_>>t;^Tc!3kxLfeOai4F|T^dPE_y?ReOOZ#5wDKoHQ$E$cYy9)-`rOqo?U}TYA|_j!NutX_FHX$0#AyP=k5tk&s!$%&O~TXhA7;wy2N9{ z82RwOdy516b$dwYRW&7W-Y?$ZGl;t@a66{gXHjfN_%E`Q>B*+s_gzz;+PnPwnznN2 zCmLTNyZ+dSMkd#}`!65D;Nw|mT;6EP_2 z+yyVlNd&?p(^jLTY7~b&v$H(MKJ-t?vh zlWjgXFIUqn&enlvMw3LDt4vy*ESiF1=fmgEA>c_|5{f2$!oZl(Z!d`B)tw)P?- zBcr!=1$??BXb>@4-RxxT13>%e`nFYUiL`#+@I zAIkobe)2gz%;lklh-S%it`dLI!=9`EG~^hDeQb+1NT~l%&{!ENpoulL;l2I-!Zj2h zECUl0Sy29HD7}Ckf-#5#nWs&`6VN0G1Fa}s8<2`ifG(e4scUFxJ4LZKH z4GpF5)%ZWmpZ(#c@!5l2;j@Q~LRMTt;%q33roZHx;>7iDdaUmXl6g;f{b=H(L^iyM zfVG*(&`RUmv~@=!s17(8?Vy{Ptru>b#^(~g^x+<^0uVb1b3BP1_g7aX@HUQ+>p_eH zku!wkfJB zz2aCtfdlN;t-(KV29b1rc6w*h^zW;&*ZkEj+BA$tTV4MiJiG^rGB<%-vRBqyictPx z&J{ZpJ;*QI)nzfS z&!FSXq1t{0MiO9Ob4fN9mf$w)!$*!}eBdUy#ZXh_i*CebdD=gkPO(`RcRaz*zZ0zx zs9wK}I1M|!I|B7mhz~Q81JIE3OGKGN0GeRpC1*&^X4%i~TSNypfS zTpkKJ@X^ut*iT7+dm_FEC0VjEKBM}3beLr3>V>U43mVo_51#+6i#YavmDmx(VE4wrSTpm6Soxd#@d##9~ON#5JX28%F z@v9@tD#Gck!9Bk}oF&pb4;mu9EBV!wg6EXBWBk}2Kpu)<#Z<9HX`nvRhW3^h7D6e{ zpSPFJ&dyrf5Xc5S1H*Gj#>LQT_IUw_2fC_@lCTqahH#q=SR@_jnzobE8#vXT$5zhG z6L%SZ?BMFEi{N!(uhmR%mDR@j+UVP%cuj&kcZJb3 zsGgi*;lAsyXQk20sZZv;f|tir^dcR)gKHlP>k_+@_%{xkO!GXwe@{vWEvTrXNP1AT z5&|txBPBB3Rf9K{53U4v?l|1>AWXW3wXnP;OC#A-c(k9 zu3W=1MOE2{ZCLmAH;aU%3yo|}GBS7gO%6eiMMy;;IEk0`BqqgSiQ);yqA|mOBNr33 z&!WO0cuf`%GZdOntn>Tvcd=|uRQJCyIXT==b5ia6e`-d4qXT~rsF(X-H-Hh7Pg3JO z2z6{93^XKhM7hGC&WxapaPp)q&Q)K|pKhI%%`40(x!cc`@a#WFujhyc>}s~LkJk!t z5I)jFxs5VK<8A;)fCv)3WW&(cyrMpikMH~lC%y4J8vFdc#aT8HIwJ2Bu~qqTYw5(& zie#Y%9zp>#I^d8O1}pTLS5vb}oQ6#j%9`FsmtN^D+0>BjT8rMwTv%Xwsv@uYAEsHM z^h8pDsLi%GNs2fXHfSi+w~jkdEk5;Uy97ZUdKKh}O_sz^B~bkG*F5}as$jI?9UW8t zRQBr^tK*#;PMQ@CuHm!2M^SmQ{rPq3N5vK*Un6M#o6Es7zK0SXol)@GQ$a6Aqi;XF z(VnYBU;2Ab9?w!P0Um*7^W;!wO2-$5%`71UzcX@9s_*{wvuKaRH3kDQv487EhAWW1 z;yHnn8Vb}AMviQ#Bv7^OBoxQh1Ac{^{Y;-%7t7p)G#$Pai8u9CF$`ZWeSh@wR#Iu$ zAFjpEVok$Nq5N7ftl(E;xFvAvHO-sCPmiM;sfB^#f2#g$*m#@6;G(gpe{K*1+4&p8 zRGW)`(=vyAd>{GuU}Jg5Xop4xbgWQi@9_mC^rBF-^^mg!>s0ZaYCy`xJ84!8@eKq1 zYP~g|g?hAqM*80^IB@-Y(Z9P5mI*S8Jk<=Su#%K`v`I#<&9)=Wc(tV*tm&pqec?_4 zsWK;Y$v`6jB&s!Xgg%K@Nm&Sxl$*-iGg=uvCmWxhJ60+jKR5RMrjBxm>#z3(*Z2RM z7X=b8NErB(t$PGqMsk-7>q?RrdbstFo{}e~J=EA0J23QiJ6$y2Wr9)mpfilJb+vOBFybsFS5Vann* z_Vkx%gTsLE73>2M2%W0tKwEqYyf|e95W?7Swv(u!tcvaTuCo5&pH7eG_(>vfGq$?^ zn)rx~s^K%aOQFTk~DU6d0Xy4|p0e zs2mENx+=aGO_qfaqAmUYs+M8BxToMSj70p`MT~@3$@&L*8%A$!yQ-OhIop2^a~gW3 z7X`PdsHwRmB%VOBKxo2Au#OB%NHBd7w5MWsg!Y4Q^WIqYsLVG~4NSAv8!Ok}PiZ`P zG_fsaDey12kyP=j&ZL~=?d|>MT5E$z)Sjez`X_v5DaJco-^LbA5AmI1JLC5~qjah{ zfUT8wdDijoe%Ja3`t4+;{EqaTK@s#4*&_qB!?PVy6t5trpcnx!9*T ztM`+OTj_no$By%P^eC#R{<2Fc@rYQ0{J(E#1d&vE=%8s41QMmKj{_bA~fjC z&5RgkRjR|erQ9j`TD=AQ|=baQB{VZlP5AWHcs7gms+{Y;Y z%dgx5mK^EFt$zM&^7l7gOuwz{c4GORvwgO9&}HfU&$#(_hG=NjNqFf$z&8Kg(ErviozcCK@GHwrB*)_Z4;7!^CU=+oAxSQD@EcxBe*iKP$ zY*&b?>(4GO;_al)lIT*NM>URB`pI@$4l8JF$J@LJ`IOBWT{H(mG7)z8y52wOBxJAj%_HTO{$A9J>sgFI` z{_QFq(~kT;T;S%CW7&&v0fT-MZ`k_z(yL4Lg}>X1%eXE-cF4ig8c#2>K9mEXP3ETU z32{>yrRSMCfvO4lKcAPljeU=BicR|VErT)b>(dBtKlUTwvzb^%#6QeMV|sivDoe=;oQOcfYp?iHpI z&{rCmdZ~L~)2-!?_}gKn`j=~-B8{vTYgm$e43ui|t1E5A&kofy#y1uX3h^{F44*oM zHPc6HLUF^**s$9%U1wr+3Kzuwp)B2w%nr4e*YtY|5UOr}`=*5h8P2XHHbapb#q-|{ z;gm-(HzwbW37}1I3L;T!8@Mpmp$oJ)Q@tN$DV)oqU>Sgt^6E95Rcu662d)cYh=Lv| zAw1x;g0cCQjkPHsaqra=fDF4~otF$i(tKTdQB2V_WzSmMue#8oE7AAXtp%y2x;h!i zsT`;T+a|}SqVwCBmh|{_nLcll%6MzE&5e!oR=j^sU8Le%u)-{LM6B0EnfN* zORZ$zjkeenToq((a?IAI3eogiDT9R(H~?vO8x@4XYr6K4c<*IG+~wG|1crM8;oS!B z3yivH@Yz@~1$Cnhrd1#=k>F~6AGZS~mm>U7Ky4mGIdT~mFQmO83Pi^jOSTG|T2+yKl_N;wx7QE)Yg`Ns(Rj@{*C zgtP(A=Iz=-2N@KA&+!Z}iSQvDoAGcffuJK(9Tt=l76nNR?luICgKQWS1QbN8h+O#| zz>Ve5xsQekqjSsk|7H&W9vZH;5EeY+n(oQii?K}pgo++BVR=o65KFW}a69D}uDd%<-z zkc|j$907q{5YHPfZu&bEI_nYPOzXej`*C66iOG^bCinqR#eS@;KJ-VlGIT=l0~jkC zCt=R`fx&-KkBQ+nZtUB4ryv6hSt$X+oQy{sys z<1kzG>T-ToL$*TSXke*o92I-cWLL6>^JwDS@4}Hr2tMKAr}=_Bv8Tf08gBAmfma(L z_#)q=ta{7N!J(qDG3Rv5^z?LHw*8C|YE9rPt>Z&VSQ#Hb1njxq8*VI1JvC;So9!JV zM_y6LsI2HATDDM-I+-U1sE(m%f}v)b7TL=r$m)#()K7tO zV`-UYys|BO%uo5E=wYtw9oc(0x&4B~gKmkLc7K_P`7z2TRqSSO=IUDX9sYiZZpp~O zy1Orsnwy(JFTXqC1~3G9N`Bs1H8sJRQn;95b6@lH0o9El{k0o;tzEYP1O#tEMkCux zx&iytUpjLACrnov-Zy3@7nEvf>z<~pkM;}LAH{!pdeH{ zcvB##*44m~K*6Jc)QSZ9#Qn&~D!^OTHUx-}sdEI=o(uBY-dx?|7o}oJAenG?N^18z zJ_zcF(r-sFTR@}c=D1y6|5Kz(Y(Jx>PJpApb4UC+0pHTvCvtCke`%`vs=#QDsp5}S z_swN9TU*$CSi*Kq*zcl@AX+u@TI%$+n~wqq1BUv;go~4NH|7i`Bo$TC`jKGo?m2k^%d>0Njc&O~c4b!WkPn zr5cPdu^#&a>Lr3(s23VZg#Q2`3@lq;4gwoIa_moE0!Xr?ZS|e!{?C}uCUKH|?Ju8l z5GWEbYVDp!H!*A}1@>G~tYMg80)!;rFA-xK6;VFJ3Py}@SlcPMT@q|O3~cD(WVp2s zIau~Pnm4csBG4vOew@ojIEJs6EgObH^0h0dSOpV2?qdT@%d=?#FNprR^x|YXC0o(VMY4XO-JLq zGGQ~257sK&10y$=E2$)Z?E%w0xsLeGX-@i{-JKSf4`nT7BSy=lBa5EZU|BO zE%6bGxBhEMMW{brr+&%Vrv-tpiyae{=W1^c^?AyNW2UBE4r#N_c;bO7&y>{h^ALtvsuA-Js>6)g?sToZ>Q<+vDUEQ;2p4J z_I-aL5%40$;hKsbJNgS&@X1n)(i$K<%OocYHvzo?oHO#XNDMau zLOCT*$VYHbaR4?qVU!s|kRMF*%kCg+92``DPo^7sezqP#y!JP;hq};#*){#;6m?Hu z-))FB2{tdZGXtjM{Mhv3Z>-yz^S_%yP+PV-OGP41B;kR(C=GKM&OHG095X>h|C4tS1B1oHTANa|&;AEdO9mG?R3N36@F>H~ z>Rp6=C2T8;As8=4fxSBAv*GA;wcw4W0gPbsdGz+C1aNQlrmsO9mss+r>s9NX;NA+(-8I8YW+$E;9+4-Ak_9YQ`{)Q?ltL z=Aw4ka#kDzV%>Zu`rpciKsY3{+@aT!fEss6bY&k{Y3R z@kgABLUf%2BVsoz>$wD(8!#;21U|HrQ$RuRojaQm{K}PC z$n2((T8k&v)}hv`*!7>b5mNCWys-xUaRWrA5B8{3YrkBV*W6Cr26OzF5a#{s&Oo?_ zLQ@Sv?U0AQL-;ktP$-`B#fM-c6=uUs5_UWWDICmk3eul!&L9!X66tD z!h-;1k~@ruS#`%b5mVHa01polUTXM0tl;aK2DEr^Y#uO$yjiW+NAHUjs+FLzO80QpPmt>Y5 z`TSV<7Rb-NaC8&HAL1$4+(_8L*f@UZigMKDA?kfNr=fHVCXqz>2FhwrPfw`8i(10X z8z%r~=uXT40l%9dHI3y@Yot59uF(47wAWF$un_!YNgl>1C_0@EO)mP+VL!qsG zTrW!p3G;cWSw?gwaWp|AcgpXbCOg^7hqW~|Js2IH-wL}u2rK)Bh7|Sm9&b95AwfKx z*XDJEvBS*lEK+$=H%^HdkQ6gAm@x`gz;mz}>``>k>vA6t3=7<97auu60(oiM&>#0>SncMEpAlKy1Iy>H}=&YWf>N1bYV*APlGvo*Qb+wQduF z97Vs#g&*DuV2L~gT@e}uv{gS?ijdRL&~z`Fo{vn6Alf;;b;x#;Sd0xK?Ng>?6VZk3 zA8t;6gKRV%ztYxZuEnE}tX=~D1YSNqmACKSrGqh-4r7HQ;WhZ1wpUx{rph7Z*2Ytc z%*e<C_WB%|mWIZ|%T&5bHIOd< z4UoZ^oBm%*TGGD2qz}2BEa>hiNloVU^wHS2k2Wbg2>;tmlaluARw)$W@Myu#C^FH7 zixiOJDPCeXH%LCwd3 z6`FzhaMtr#!c|@16swC)Tf@Q1nNR3Hg3!(C;X5hV3D5`i}fe(!* zJaw%clag$YQaqt38wa11%St@X;1eCkbghA@sT{0QEjUtjAj>2YM?(-+t{c!KiqVR zRovl&%X-2*}BZVTC6|U;IHKwLbVX$@S=+B}8Xj3%tXmpX+uW66{%)1sv z{tc@&r~NR-_mE+>hOyF%-h?^Xu}$BwM?s!|Jx`jhiTfG9J1`D$b#2Kumh2E`KiziU zupD^&HH1N_>GDld3qLLsKD-!jd4z4#UtcdZwIS?~i{QxN?7E8(;J7fZPa?EFmDI8- zP_?x|PlW~xfahJl#4~}JB&g!2VKM;eR8&hdFm{6S4zE~)-bKi0n2uf`k@WTTSBGRb zdLWtUL#ae;ba-8}d5*stTd!B*LHs?ds`?zcZ+;)p2!c6J6jN~FfXgM~G$hLOzEGO& z@x@IDfAXXjLr}>-aN~Ku%8n z#VXGWhrAx&qcQZyEgIh3FvU16cf=Q1juXL+v}3;svNxo zuyGIHDLfgfng}-b5nf&;n1v4X^r-hHeIv92i_D|r-s zLQN$e!jp9CE*7^I13auOY`{L+Y5`V2Ixiqh(!(^n#)ajCP>xQJh7yt|*V>jAZSVljz(y5<)dV3nZrMIaW2J1H@w9%YFQhY#{E-8kz9@$n6yI&I90S3t+n zzCPa5bF-j84C)hI8(H`YZQaRy8j%u>`^!~<^qBP$Nvsd_;WyH!&?#dCGqo>LsJgf(0 zHFKEwQahA#r&XOPel##zP-zt99Tnrla-&NqlTe(fW|+WQC|P3u-4r0v8I zFhn%2Lx(nJv?$@y;i#iqGC1#j#lm8UHs?5V2>%6>kfB#gmdgWFW%46H+U}Sl(0&p}090h4^w_Rg%inv=6)uNnBn+wmpmCPs5nmr>Mz@$992m6b=MY#ORu}Dw zJPrWn68f48r!Tzi4G->iBE`QSRKSjXF*Xn?7WQ$cv@1O48L-&JBv7&e}BCBM1G=rCmtNJ@y+0N-8MAueTogrSf>f=XV zIh0_v->!f8R8|+N|9`|KB{Q%?(spww{AgI4V^-oJ!YtA>rz#I& zA)#-|f+c@hMvy2*Mny$gL0PbjSRDzMG9_b~?fnVLf2PCCFVtVjeRA~Tt#r&r%lZho zSZ|^&cPJ3^E|k;|vjs{;H>)AcK=;yw#>%7kMIF9o;uV|y4`U}~XvQZSZaNAZr z6N;h_p(wfz7H0g(n&yu^_&;^u%@)3<9?rf;?Yy0+9d^E+t{%Rw2kk}uoV zxAH!`EzBNu$~RYiP^2tPgv*zSKC36-Ql!4m0af+VZIV*=Plrdb#H!Q?I2bzRL<>_6 zYNG@b4!C% z#nXHWd!vN6#Bm5HQ>T}$6h8OIWWNrk%}?&oo%;_MQ+4HAJL&h_+DluhAJ_5o)5E5>Z+SHyx!&4)k8SfQo=pY@ zY@Z(;JK0?upLO8nC5uP?Dq&$^wg(T&_(e^)Pmd3OeVdo+IraT*p2Y)Snc4B~MM+9F z`=#@(N{a+Kd1dijwRqZQEMk(P^DJ`K zb!S$L|G3|NCM6}}!+j5j-{0S=ofwn<`T22-ef6nm;m^Ci4Ruzzyt>M|dd-?@PKkEw zNB&%RF8yC$o~zhZGP=09*baVv{Pj(4;!xUzQvC9bmFem}wjLhw2FbFWUDYw|AOB4L z?$6nKPj>y0?~H+gfgAjXC`3+mXH_H@A0P9^jT@abAB&8PelvX(FtG$**{vqRpxyuO zmP+t!z}-V{Z&Y+Q#Byto?Rb8EjV}JBqaGjs9hRFb!>4E!RX#Vn4130>{$SvqNBi>4 zF?_u4yUwp>-FM%e79YQeY|Hjk#b^QRC*uze8}Hk<48PlqI3sJ5t+~hE{))6zi_LEA zV6pqBQ-}qphmY|iS=iaDW@o0Fi`_oZ$Nj8*n`gf0$A|m;?x}L-oCwF-FV8Qi93OSz zTG{w`bkw)bQFUv!Nw$cFj0|hvi;Gn^cIPDp4O}*+vGER#4?9@w+sE$2z|0Zy@S(<+ zXK5#!v-Icsj%95*y>xthc%`ACVZniy%YDB#bzYcqZaMU`rzd>u$A|8wOkFIh>CelC zikmlYrlVqGV~bq-TMJi`cS2BP8m6f-3AN36@+(i-onE?zekhxLyL)4bLXL43v#+nO zt-XEN&Wr0i@$77WG=gV0;O|j#a&j&Q4?cPIYDrbZ!m7TPm*;+|R8>~mIy*-&FI+0R zV#Nw`ucO~yF=2%(dmB@|x}LJq+)|Znw!FT!!_LKp#oF4s+C52DcEGNXFHcrMh^{DRW@?a5cutx0Z)16&i z^c{_=R>E5(;=E4IGPuV|%>6P9u7)@fCz23tfh+lOA=Wnm{zHsANKId}eiWkm(0 zsi`@RSd8obc=ur9Q1+VwJKfBEqbIu6l&nfZ7Kvz9eEPKU!wCoxjZQx)}TZc+czc`H^w!^-U+4+azc4R4-p0Ds`yke3N6$fOMj)tgM}FkX+zc ze|9c53#pSTyL;Sm?920IqXTW+e0+SBQ-`oSmOsl~+f>bRjOZRbc%Zy??G}W7q1%UI zArZMXYj_hScj#c#ZLudfKyMJA2VSHbx9+(|YG|la*LR?mi)60m z=1ryEy~;t;{xXSmAx-P*Nl+3H1xdRO)K_R^A)cs86E8_ zBCNmiu@T3d+*~_k6qh$STrVzb#qYg$h>rSP78ry7SRpCN95^`|dh#R#N#`3+jEN#e znB?*xW1hToXVuxWXO$i679;Y+l$94FG%GJ{JkfM*$1v$9{IB~y1n>xT~x zNH&$JTk`imUMwIGg^b6YurDoef{>$?$d)K zO|M=x_{~Y}x|9((H8%9?*U9{RrM0fDAy^)gvvQ0w!z#;8GA|56$pjz>(~n-;$CsK0ZEuK(6E3jtfm$ z`irK=K6sp4bC~b*;}ek*+fziv#q~OW{mI&yt`_>@qDIy8=L>zlJfovX)j)b0`txV* zXJ9XDRAFJE(b&&V8c31)0;@fcz_Rq?D__1`|DxgXU)b2now6DsrO8=ptb^13)~jt#z20{5>=@RFN!aZglU^Zd%DY_m5FM zJ&vQ_UN1m~Q&v%Vf+Ap?zkj34p+i@8UDlMgez5wIX5cB^NKU(&SV6WwgGkS=uCB_5 z-^qV0@nXQopDZmcH466Dj}zKNj;ozrLSm|TOrCkZ*y`0hB#GpjWDfzw+(hADxO8;| zipw32=Ykvjl~3`?MdX@Z?|PNJ9ibP3#n#c)B}gf4)q$?Mgq7#kc`TRRbCX3xM5ME9 zdy4#~ygj#?vbXbJy>`vMHct3!Qzip2L&vvoX?f!-SFWVf&P?568O0)^6{L6k`1<8q zL9vAnwK~?;8$XC>9ox8Ix@DrsqrFVpvPj(dMY;?}YTfkvWlNV*)8la_Vt} z&Z69gcRlrqcb?k1x$P(`E35qRpa+;u+rk(+}!lqFE4AEmnczeoSb|O zSFT(E3XRzkE3hJ|zP)A5p*I|;b8`=-j35ti zQ|GbQ+1IZp0(b7tHQDU{qdX}1`n79kOH0*v-?T_T%9j_Cl9rC5nyzk33|BQbH}^hr zlFo5IJjKYyec7#Iw8?V59*Mxj_i-LmQ*?WyT3&WSdZqz%_S4S*_*Hb#bF5-Hd69KOG`t8LFbHk zel03v@SkJBGn&tSS{)zm+DxyFb>nO38T_69Qe@}Oop_)eoAMwXOT=rk>~7h{6FvR4 z20M0S<7k*56_qXfbw6NwJmJ~%=ea1O>ScrR0WSdESwe=)ZbWq6iFVYZKH9#{)Yb`?%`Bi;Ai$26uP&*83jq>-GhD z>y$k*3<%f|qGa6|FQQd5SUwkr|7RPPl9a4OMtkz)$(edb{7=oZgW6l{Dia&|%y9Pb zFphQ3?!1&H3EALhnt_uY)iL}!Ew2(*>KkD{Xv3yJ0$*QWkFWF!Uajfmb@XWDty{M| zAA0vT08`Wz=Y8oPK8#F5M{$T~T4e?x+f!%U){O_N`wa-Br>FDmbp_Z&j!1R=bJW+D z=fMiPif7L_dK;1(*UsX{Npn2Ksnf!F4ev|141Sc;WRN0!Os?bBzWbgX6``(wO8tH; zMFcC`R!}a753~1Px^&6Y)3a{(#bttmD^Riez@-z_$T zVo(7GPlKx|;e}bnOw{!K{ryGPuH{3PUA+7c0;c5n??c7$$%ziRd3o#(<2dkFva%8` zU5Y!u<}llo`qXYf;)FW^NfpM;-6hBeuAH#jOys?lS;zR%R)`G)RapT>Bq5- zI(>RG4th~w`kYu|JxQuhpFXukGEgs@izj6unp?^fILPqA+J#^oGhYfaA6;Vb_AZ;| zb8v8IKJeQI(RB@gHp8{8sHv$bghpe+$y%`Ho>I9WGB1Z%*yG26#ebepcd{*7REsm~ zIn?n)zFXej!GYIZN=mBg^=odVh6-eXjLgjR`#IBk8K}Zf7g0FT?T>c3y2_iFnsV{; zN8%_1{TWxXDSPJUCUxv6AhMEiq5Wop+i;{U%M9^zuX|4EcTeRzcIgMsI4uZJ-{iDT zv8JYGOXnEAJM-E#W(^Gu+6dU*le#(<@=;zJ@N0NtqG`%VzKE`()Nt_3B?XhKET{OD zE@fp~jf4f+y4nD74*`zbzrTBMXk?_J$a?>En{p4T;>i;_;OsgtE8s@*0!=q9SCKQd zHSjqkgE5)xsj=ouIaZJ*F@2pC^{@Zdq6iI%|+0}VYk z9do~rrA|!-O3iHunjB?iW@e_!rm3QWg1WHjgKd-Dgy=)kmoA&2bOW&)3 zu2rj7Glt0Ltq@qc)Oq42BNG#$4xoXi0*x0YKJp3+-%{q>tFN!`AN!1X(ehYO6g%*T z9Fy!*MwiZ?KLzej3w-|US;p<#mwwmJEyy*KjlFfa`3lp=j~|WhyIY^TaN&TZ)M)t& zIsg|ohd0?Kxt0D;)6+f5Xi<8XzrAD?ow^3V82Ia|L}I;Y!dB1Gug$JnH%IvH$HsE3 zcK;~Q+}td%%9%q*NC@;P5>S?)cgwwnckkY9dUVWZ8q1>QH*oG1-}vJbe-uOJBo>d< zUR>v~KgDZP+U2Sisq0}QS;I1o_ji`GNyxP?jY&&QwA`{~%k4q-9{Jfvb%)chUAv|= zDzU|)Tr3I(9i=TDv@9gihcXy9N=@Zfl5(3hz2Cgsk>fQnjC#tM$WY-Rs&uUY= zfB*ii1_nBSwS=f`-@d(b=EJ?iEgrK`2}-(VWdX-F93N%bv}qF_Q2?kd1`x~s)zxk1 z)*m?(7kAI8kW(WxleX{O=BfU+~>7@b!EUZ2Ep&oQ&(JdQ9Gy|kvQ|GYlpXAGD%TVoV{;Vn} zE{-sitU`9C<9cbx48Xb(X=#EW zDG3P)!@7eu;H&;8PE>vPunf4(gNO* zD7$TObCZwea?YI-2#<)6{b8ViZwo~_*WDgEb?`maS703nAqlnaAfNR za%QAxg}aK2i&;21_p~aBii&POoCJuhxc^ZU4sE9V`t?h}U7!0rqNAf@vpxxqpmY5A zgUThsT1&47DBKa5q4)FlrswK}L^oHO|A<#vUS7U&Hv`x&DOa)6 zDP?`|_)uo$i7)SN?XyhbJ{#=Z`{vD?@RWOV56;Hjx}|`E8j16?LREDM2!Kn=ZDDN6 z1vk}X^^|-ObJ^s|=4LLva3m~{uZO>$LtiR?80879;7rabp_pL1G`G@&J7!T>C<+y$ zemL+>7E-E291c`(snhz0wFz6r*=hp;7gJPnQ6nL$JsgtKdP!FSwx|L!4Ypx~J_QuC zwY3!uP%cN6Sb$td1Oc3h2%o+eGfgGwApaR#6U4Ue*ui_7amkV;VY0h#NXx5dUAq>6 zypoA@2WCxAgqX7u+q*!@)qK8i;2#1sC>zC9qc64QDxeZWPuMgu(#yrg6$${|T}fky zhEv&E=uj9q<+u9qI}VWV(CBDZ6vts`<#=av0g<<0xbik-LJ+q~f@apEkjItJPRcco zEL4x)<+}TZ897h%^z@6lxf#G7tV+F^LPJAQ*GDpbkf)4Q}z+#2oQA8IPgLU1(D(Q?b}~Jmfk7g zFvQ{DO-cFMc1H>-Fw3Gvi^eA=R>;b-A=_+$?8+l42WrZWqDgiEN1X-{(;f&8I`sFW zsl^G)P-y&qyr%(SJB$sqo!fX~?Td8vlL+Cgql2G4eSP(Oeeqr;p@~wv*sA2tuRa(C zfRu^qP*6}%fwwIPnDF*Lew@w0FL=~KVej3FvcM^#+~Ci;CVurgisRrS~J-_OTxY$hKEwTDSp zS66NQ`Yu#2HE`mT?DHcH0ERgAdXQs>rv}Sc;*I6@+zh#RQP{n`Z07Qxa_?TYfPeto zLxCD{0!-q|whU5TIgPlfrC`$PJ`m*B?3ItIXAPrmY+h+;{`3;f}+tlNLiL-dq zx@XT~5kff-!&iHDXwxL5AjlX%rjdf?@#@_>0;-ISjEquh113f!?j3r23jL;Bm*y)y zgM+*9VU2!iCq+&Vo(OUjJQvEaz*%PQczxoa!o9?llr5&$cS3hfHJ!>WO2|r(pdci869@ysDE9 znq+A|d3=IgMuY<9>k>l;;yUE6e=8j@B(x&Y+K#-|;y zWCoPk_~z<1Zq;?`r0@msq&JYbPeny79-GL|R#>{)MH{%7hnrhhPYsHYf8Wf|UU&jmdKQ+)_Jvii7*_hz8%fq!qkVH7ynh?;r#Z%Q&U{ zT929f)Wk~j#)^Q+(P(79P1ycytjrlyv5f)a%8gSa`}&4FsxN*jJqmr!(ZgdG7P+UR zLmOz0=jWV_YWaaXQ;h+i=PnHj>;8l$MV8mw(^C{UMN&!fkW5xl*T%aydYMI~)8IImJOQjeBPq4V>l=x+4*U&{#S>_Ik>b=urerSlC)nncv4Pwnx-#8|_&{l`x%A88f|sZW*>H zWTdyT8%f@xJZK|Plo61mU8%15`Q|8#2BU|cSC>67BT3`D5S?8P-`3v)dB3{ zh}bMDN&y-p^VFig`y@}3{_)i!g^=Ol;npH2cE5qvP&xD4qHEUdwzzQ1EZ2mz`9OPC zTD^}V-RGMHM)i!Iot3n6Dh|v*F9h(txv`raYIF$HKCpAIKR>P0wSt!~;1trvfH84b%j)|)SpXQee_ zvy5(-gBFL;kkmP5?JlB80uIo4kWTDzo^(iYas(3lw0GeP@;|IH!KaLlf~*w zN&8)9pOux6yn+I4a5~*&f>BvbO#p{!0X5KGE}|C>_{qr3Y>(wBcchmd99yHT+%eyz zmwW21@n(B(KI4*Q%j9lO9UW*@AyPcFnxWCrr}%`;adG4c4JXVr>5!|Q;z1H1B7s5@ zLQ91}N!|po(estObxqQ)-iINP=q@W-KOles2!9LIb}Hc8(%WXaXG@-t&x-A3NZv&) z;1CEwej_bDmyacd%E=Fq2G3~*KB#M+|0bEs4YwIApjY2@|FjhNUeVWlA}S{4BpNn();`2ey3UrSZx^!lOAZfF$f#lD9jLkjhX|_sq6_M!3gMlN;smSYm%S8 zW$xuBCvI&>&jeB-omYf0u>(7t1f0!)SVrnHr;u6*AasndB%XE~fTyjK6BDuskJ5f2 zbQ!9!ceEeqV{V2@ZVY(~-G@wcUO|uYqPUK4)ZVHcsv|Mrwe7zrM>KN!A3~XK;<_ZY`Q;r01F?`Fa18gCv_DH*r2=F%xO@uj>w==929xE& z!l$wKmC!5CpFiLJDA?6?K#+sG1L&?hpgS?uRrHeAK+PkMte#iUvDus~PnU9Fv`sQZiOjjg7XfYG=Ql+uR3&PrWW_t_$?0oL*D+qY(vp5$MQ!QOwji1QNTS$%@oFRXP_ z_V~PEd~)&%P&E|-Bzy(Ey;9AeNJ6A|n%%at%nWEPuv0_vMq6beKrCEFEa+VGrkt;@ zs|&e)U5fO!nwpHDO;ps@Er@tiuQI9 zB!68;M{!D9HhFfad-_z#Y<2bTIc_6woyqO!k6b50UJ+LLH6Bag<%{fqA3zYYv@ zCND3qiCzGR3D3*c*noy)2-SpWh>rS-ZARPeT9a(ap|`Tg4jD;FNg~Gs{y8;}M>6pA z2{qsY_D^-HkoB zPT>K&kxx8*e5z17ZSm>@9&T=@i4KBII=A}Z63*oth*n@X1-Z)@2$ZZm`ngjtYx@q< ztsT5eM?zN34u%XcALW5KY%0vao7J;7|MEZeoqE4OC zK>?!`K{aJuu)qOCSuJQW6@i{3!mh$wJY<-y@7Hb&f@av7P^Mk_K!g7IbA+O@HWmg~n z!=(}KnmfHd1b@x)L7-0~ljsNceY1d% zB*&ugicZ?4OUq$npn0Q2pz9+9%MAccMaPqqo$wJM@lq7HD?N&eZ~wc6kRmoC0wfL3 zg#hrgvF{<<8{136QhxsY847L~WE>ErJ658I$i2Do_mQvQZ^*QM=m%$BC@?wXR2f%D z1o}osizojKE<(2gmHSCc3qLri=^>$ckD|5gWeO6MnoWH0*nlPN!~6H|feM)R+_V^e zK1Bpg9GHov`~>cprQ(O9t6?8mLEz7x46~pmEELgVxFrp>kv3`d{6u#~q0x(*2bS|V zYQxc=LJ&~Gk+>XSgV?%tD@75v9q2lr`g-zKD!rx?jGjn&07UEICZK~PpS14K8x~Nm zB`Why%);r+5!Mda$3WaK$oGEv@&z`flPp|+5tb4V*6G;T#avto+0!t5p*4Q>-(S-> z@;w|>P!w_(KsH2B(kI9904sgUIlp&?-w@q!!%2c1!@L(A9?l3pPjIQHpI;3)_=Yz(B8!*!^l?HVG6LI!jiVdq64o6q4ZZo-$~Kwct5~5MK~aFJ&A>&(=`uDp zw$;#34~6dEU=xiI&y?S@OSrkwbuqnq{dx=!FIjVoS>p-Cj^Npe1vo1w3ky|L9{IqOb%99GEQ$cP*4@6H9rZi{ytcLTFVeh-U8IbV!q7R% zgqWLS{ctVp2mT`q$PZ*wNH(PMcIPoi>z&QE!rumt+YAs$DvVs=qQ9>(@_j88`i8@vm&uxFKV7O$p_~K z(6>U)P$d(3=L=UZJBm|GpF3d%3m6!t})IO=c&5A4Uu8)MBo`fnw68 z*a55wh^j4?P=U<0bm?3kAO{B#LLv6@s_2kF!AUA&&48XwiOS1!EMLA{+T?0f!4BEv zE69FOf-i1u2GJud0_vYVvYCBkB^VK->;($1B}f@P@I8p8fG(jyQ37)5Kwvb~B)!d> zpR~3LK*c?`>Hw!fk~AGf%&92l;e@whtq}O6XY|1Lo0xYG(uaR;kASw=lG(xj{>FL{ zhl!m3dq!{wUhR+Iq&FeQlF|l^d+_HU_&7%g+eJX#6*KAt7;KadgQ zaj(1Hy#_e2c#CDQ@v*V)+qrPrT8<;%--JRYffAI0KHL6#krS zKP*aUZ}Gr+VA}8tPVbat13x+?IMYZAG=z?!sl?0S%}q;g!o7=&*P>k?4wa3_za>Wp zm&3dn4!|EHtm$+Uf z@ji4N`T6+^t85K<@Qub646p0!S=ssILoQrc1~WFnHDq}av6}&5Ghi^F(8z~QzAbL~ zVdz@$xmBT-sDSU-#kRK`_PnI-!@TkMXay8uW4K-(X>CKCx0%$dgv1tl$e|fd#9^H7 zEV$AMih}YGhLl6uJP3*5ks0|^PI?&*cx@hZE{Nk8ZYxe9b#F$LACuNT$bfJck!5+B zYU>zV7;xMZ$(u-;x+;gXV)3Y)X!cM@Bph5i~U>2HNR~Rmd7NfQ(9DZeoFie~t9WU?9vyG!c2c*rWYY zp2{&WGPq_$NS_>#9x{F=9z+f09kibcNIRXLloXEUNG8^~(&j_4Tk=d2a_Ec}tJ8ko zG+s8?{ghX))&2&0Q8v2Ds;G*=|h82M#mW; zhVL*t+pv?~65e|XY0(yyY(C#9tX#bkf{LqF)~?q=ArQ2(2}hH9)$o50S|jF zfbRi`MIw@_;9Np9=ptd#QIHKto%RA_Msr{R01TEyVF-=W%-zTqZ;5jPZs@rx>sRHK z;%D&i8R+IvA?Sgzqkt1HFc2WnNf-XqrI6OUQS!N{Fnn}9TJyUlorbfiiq<3!W=_1kT*v3M(0+8fpFWIuSVu2`2o8Ex0@k`+;GL z4s5hVks~V^en8)_6Zwg_T=Do3cbt=f*Fe)N?0{?+L19vobSTRNkEhkyG%+hVlsvJx!|RSg|=aM z7%LD7Z~%GE9??Ja`*(GzPhS$RdoeOxB&JH(?{{}~0dJ?}$LG#hUp+A{cMWxQyC-2x z;K3(Rj2n_?Wi^|ghdJajz+20Y$y6^32DEV92WAR6A=N{AC^~b_%TqOhueR zaiHy~z@}z`3Q-}5KXPW-YSH?;Sny*XvR!zdEDt&OPcVMv3EnjQ>!lVcEyTA#v<Cdgk6DGD(VY^tyogxx6WGgJRh!KvrNFM{WuUK??s?vR!< zS*BIAUpkVB2x%tLNB<8&bv-fql?#c8CjJx^6g<}0;@l((09a*6R+hshhlw_;vH;7@ zF|;)?<+JF>kt2vo3`GcoTvp>~pT{^#g zi$?Cs*mZeYu%Hp)I3o{=JG2jtyHjW;3VkA6oTSQ|H*YQ$5s62=>%<`>JRV1~v#*cr zan;kOP~({eV3dYbX>@(3Fr|%|2oUHf^l0tSU5bj0eF}=D4CkWG+e#_^<;&yn_p*>k z2$;sWSp*Y1pQyAn3t~tYoh34~fIhyqWMU?S0m}Ow6Y7+c6(-wAR~ot#LYhbv*k`ur zKj?imUD?bIh@S;+T(+%v2b2BpK5)>FFFNPMm;FxzXHV1YwQDsc8s=4PjIogq%1sS-B-ywjF_0flMDj zS$uYW&GwS~ygWG!2j<$c7V^61<>o#?qQ$w>!7>Hc%Xv<-^;1P3yN^&l zaV>N_{P_&hiu)HBS!wmJ4=La8m*og|F_#7-a(8SYef9`zh7}4HXa{3QqlqF2wwV4? zd`Trm3(Pn+CZ-BfF7Ykc`Q;VwZsd-(AN|-0Ry^E3u>G$6| z#EX6bJ@R+w`$EiJU~FQ3*aRA=T{~!P6fYVd80CDG6F~p9w^zdN??fn*0lHIY8OI3I zlP_C@qSIugh>0F!W8=!bA>JeIcJqr@se|uB4}l+UY?g%!cS}o&Z53bQ=;#>5^Ir;C zfRqn^sW!A2SUNbP^~C(P;zI=-Cdf}gj zEfzEz6{fNsutOO$ZG=F>x^C9W$Imb2moxuy2G`G8SAbQsIm|?{z~;<0H6*^6JSOiR z@Nz)qEdz_^KTQukKY;7Jzxx#%y+wk%;u)+77u`zr#6rPaaHb)%@2)WZyOJUx?r057 ze!4e@@xsm@KTfYX{7!q{zK7I580*F*m zS*de#@%+vt8K8k@e$yfpx_BlU6ggICwLCK~ukr7EF`IDmWv1zGucc_^(4>er6FIK6 zEb!sK<(#&{`}b>+c<EKsowD)>Cd0TR#J~qY>5U3d9T8N zYF5!Qd1h<$c)ou9inv`svL=i%5`Gqcqg)3o4H_B+Kel~S#-_@v@o3*=U}gY19s`i4 zq<8rIH#Q&>brd-@7C@e-$pvAkl#N%`tO5!1?^Lx#X4Z&x1uRw20VFpry52~uMksVG;lU6A(1$=)v zMqN+w$b`YJ)CmQiZ7Mu6k{HB6oG|8Gi*e605aRm!8px6}=lHYRV#g=Pe#DSfftGYk z6H_>SsDs*M)CyjmTJ#@6C3l?ffMS4=Z3YUHm>j56WB?9Bo>7=v%tX;;VP&PGkZ>yg zH{gE9vvl=UFaJtmxxVah4@shboOibyK$im#sSn0LI%KZz@3JB{63|N#uRl4gkSv;^ zj*)3dq)9z60k9QDY8;+F&$-z^SO}QFbv9#B=Kba174XdM-g}d5hN-peMT*|$Hd6pr zF+kl9w2!I)6ren3CXZar&VKUglXYt?Ea))2l=u&eQ5|Ue=a}U#fvqt-5N@5t@XP~o zIZdfy$`ORgbex?9JBbs*0t#hn9VtG zm8^otIFu-aPl0#Ci%?HiY}g<1MMj&FvAgrSYP=Vu8nmn(k8$hnlC@KFeid>l92}uIvYF_o9@FIGn(5R0cmE;~a*|h1UBZUiDzdIA!pR~G(jZC)4oLkc zDKc$A@#R{%#kT&egw~tm^DdV?0y&QSo*=Be?jSWw2}8vZ$-oX4xB<8@2`(jZXi;Wb z&xnF<)aeJ6O7B!Sd8V5kP`H3ux8U4op`I9-nojKZBpEVSAo+O^`r~MsF8HfU2*`Ot zQXRRvX#UHxa8J?Mwog2@j^}7R6;&Jn17?)*+s0PFX^30l;1FEV?=-awB>CU z$hv6jv6=aCU}7cze{g_~)4u=QdBOX?yYqsG_2g0qe1tCKc&YB?CB+jN(AqS`26pgGG$+KQ)rQgLIKGGQ1&ZS^%4;gROkdB_t$o zPYxyEkuG)!l_`S1Q9=EHq=%u9_inB9yvmTNfRb`uwRbCs=wd29#z;C zuY>M#2N81df~4*9`1p~MhYw?f{}U1@Mu2?Lfp_4x}#Q0mSmYQu=9IQ<;%d}P{c^zawMB!bR2 z8M1{eR14#T%zvRDtF1Gh@G0si!CaY;tGtPTi!P$*zt!C_EX0Endg{O9)!81zmE zwqjKk3;;t&EL|`HVYHa|G+^wl<~&V4PTmG^TiU$X)e=HH!?^cG+xgh9B$$kjO#G8# z0E;rF*-wIgcVqNoiH7|A_XK2)D1-n%JO$e&mz2DL>y?80jft3_iN~qHu1YWN@3kzY zKrU7gQvvY^smmnl9IOCJv0}mOa|JL2@PP(XE^Z~Dsf3^usR3UUN3S5?(3-!qcOQkxnn>uxSs?#p zevlZgu56C!!s|fG*bx`cFC-KTetQMZIjj%9C8W|%tde+Eh|TKjS3=gw97wAa^dXpi z;q<0Ms9+Q>GCVx|(S{vlIaER+jnZp_`4zz8oh>8>`3gSqO`w6~Rt)(M5Ex>h0g%CU zw3I-P$iN*b;ON&gPzcU21Vp-;7d>z?jN%JmoI8nPLky9WDtl&z z9+>)k73S$~u1o5&0hax&0%- z2?z?Z){9U$f4Vz%EQD!%&!G+TXL^ThJ3Yn=9nipamO1L_S%FS-(xe4Zz$tcicA7VS zvD=5{I!+pI@nJ|Jm@#hcI}DgX?zC~Qt*(|?XpgB8L|gs+~~g+;O)soJMaZ$7{Oh%L7MQ>fsGg_a+yx%G(cBn zF+?Sp;0^3fU}6^TOj=;sTFmuBC}Ru8EdVBmK9X0FvqJD<07?-*wgU!A(Q%G-%1U;U7~EoLmjp<{@VN5|*iy38XF^2!u;A{XRU=kJdSPrx-e7@5Q-$f?0vhv1I!kNHbjg*HFEgqhd?gOZ(}jA8@nh0>Azn%RZl$#J)`1}sbTvsN$zW>eGGW?2mV00&=wvSEkLuFksqJG zwr`q$0j*WZJE@oH>g4tqbPi1{9$BKdjG;M{fx6$l9rF(nA8+U7#lY7@AN$uaZJRS7piPm7geUyYr!h#5^l|fr?xt;yFFgvOr2O33 zG(9mouHn7&w|~hcnXRP@n4K>DFIM8#K79D_|JWdCz&>_l-As4?pKtymh!fgtu+Ik& z7dGW3^T7}XxJ>T&@Iuhb_o9nZg{tuHvRs1Jt?$o|f}qfg8^}~E&P*p(;oq@{B=HoL zrTNBgIU**JK|fS-&o9peG5ZuHyqql8xmURZI!$RaxV`|Fz-&S^+kxyu4}S9E#m<3( z`TZQBL1#h`zbyU;L6LwAd@8Mo#?H@Y_Lt2w=|C(Z1NHmd4;+Yu9mDRSo|sJC-@ncl zj=#R4^dva|Hw=5FLg%HA+@;B`bko52Ed{vN0r+?&tn79l@PK-!PW{hWVedepAST)W zTzP`yQUy1W=j7-?qBQ>V3IX((xH2To3%IYt%w(%EhxI?V=YUSyN6zQAYrc>JAiDB^ zb5j&NJ4o}0aYCUXPl@UO{iXnX)6!+jR%&W4r;Lgmw|#tkf}bJ=>8n@8RdmUk2{iJ{ zYO!907l>p?Vr7DNniv$(!L0#v??9ydgM6S~AOj~r+@%CCdIhPB3r`L~zz#y?zklZW zbLUv*Fj`oJKG5da`M8sxBgC(xQSS_?q}UV~&gdzjjmLSwlepFA&%?Y|Y6uyLpd(_X z#9;&Sprbr7>;?JQ^1t46?g})ri3^@Q_EnQyqtD>-z@pop&S%P+kw~OUVt+%9D@wWm z_=`;!)BCIXmZS2lR#nvj7D^exR$sv#NMd@UT9{!Xqwe&nIW3X8^D|)(54wkHqV1ilX&QJSV;Q#eaOrE@kEb zF8M(gbe6e)_wKX%0+ugEmuOJD2THaMu$X-kFJv$}7%{W3nsd{=iAB#_s%mR%tD2Pi z@(b~kNR>m*MvIaYRa@S&h!Gkc!r)1w$m)c(gk}+zn3(ui(!Z%})W%nVo7{-k zGzmjcVhRd7u14`J1ayFWkpr^{u`{5RzY;JQwvfddt4VmwbRl<(i(z=(I+bshx+HSodbQ8)UkeE39!oqs zue8pJNUdNR1luI{H5pjHE)(^luM65svJL{54A2s956nD#8sf<#@R45bdKGm8{s@B- z#bchH1$E6imyks4Fcm~@UnBeo>2D`JdlU55ra}k(0ZfDd^x8+7N|R4EAlwd%?fLzU z4Dk=Ry}$be;tn;WG#mzGyXVbq>t#rsL_^aMO7{AY}CN&}gJ zf}J%SzWy!{G-7WC=6IrD%;X4cNcw=JBaU+W>+Qx7Rwc&gLF>t-OUQA=m5xg%n5m`* zM-J=h>vsdY*7#tTSX*8tkgD%2W{0trprbY5r$|K%lEmPrYqcwz^q69#yh9NR%;N-PQhJt%hCENE~_WV9~ z=@KVJ4%43e{ih+0Kt$$7nV~8^otePUi3swtK7>G?1~{@{yE7A)lw=z`k)nm0N1<|^ zIddk27_NZ&2yP{INP<`JhOM|%hSL1gBM8C-RNJmle(aZIvb^P4!hzh7O5ioqu?n_-KWQB^&Q3u9cM=kY9GzWLA}Opm2fY*YDrS1%wothhEHuD>~)& zKBGlHn#IjccDT$D;4LglTMzPI75dt^BV_U7#UVf+7{(U*PcN!2VeUP-PX(Wk`38Cl z1BPVCSQoSRNY7|}>0#I^GdDLHM9Tp~5ujM)#yMizfDbQhALFm6D4y>|sFW_2XH%ub z#nBCaLge)(j3Yb>oZ>^J>6tV?`|Jo_DVP*Pq1k0E3zbU_F_CMd!kV5?*}4l`iG*o~oc(h>F>L6^tSff7039G%x0x12&KR{6=iinYbXfL?)5{4Bcw2Fx2M-Bzn=nR}U zX!4Vexj$&O(vGwGs%eH>I{@Frd+;P+Cjrw^6U#TSac~fZNJ(?L z!FU}C#TmC&k||Bs)&e>#0htX4KeKgoe7fpna>xO?JD0-({JIHh1I$iW#e1@9~0on|PLIxwy#9z8}sny)EAyB(#7c~GGY__0O z60*4(wsmBid9Ke1Ns^39kVc`7Sb``JyDwVat8%ESwSN zcild$r4>Q)Itydd#=uG3+J;=7gh6pV@#)5qFZ$oLemO3lm@A$AwR3+ea-T$SaPX_< z=1hc}^g!a7GuoJ{$pR!h#buvqiVFsps(Fe!IXW)pYQ`Vt0O%<^xQiqg z(1Iq+i^$-%wWz2RC7Tz0A`;Q3PiJH1g+1{+Xw84Le5*#zVj*U__z2nW3bMNo(Y>@BkIWjRjb7JJr zA962BGtgi74>+97aG)pQLaJsfY_cc|P*+z|lYVp^TIg2jZjxeGf@dcpfdXZemCsYy5bRlY+yq8oDC6 z$oHmA`Em${RZ(0kJ76II$HMSkafO4TYgc!7LQ)bVrl<(2fQlUsgCq_!d@`{lxUm{v z-`!ev_uf5YNXAe<30WWyzw2j{dDTw*ISn`I5rxtgy&H0S5ji2KCPaI%c<6Uapz{WE z;VPqROM9#p^=yKUL`FoI=;(wE9p12ZZ3?*x5w856FJHEx`Lq)DjOq0Uj#d3uDnr_S zk3m<@eV@-_E=K`#`J!T1(9vjy#$5ODV>%|c48V{0#=70}5shCKlJQc^=6vh#e~pAr za|3EHgIc~9o5B@%%nj&aGltSTkDG@*>${B7N^?u=N87R)9;5mt4Kw!yFX5 z{LbA~cS0=7vJ6+6rTLlR_!+@;3MFEsKJv(Rd#*H2x9^~d#Li|D3 z(Uur#K;Fgs67LPU05*$hDIZzdiS{K43Nt*K#0TkVXoaRTezuxMtWmryaNK zU4d9fuHt|Z%fHexXvvxxN7-Id*@0qTqeHM&GSL`4Kyv%zYPhy=nPJts!CD9RB!ad9 zZ?8d%CMya&8jVV*3r~ek9(VIx4I|70n_Sk^e>`64t&bJNHo#FCbQc#U78Mp|Vx?u~ zFF3}nL09k_W29(WiWHu=G2rJpsH%bj0!_%jIMO?Ny1I053*1!*4~v^qkMA@zyb3OB z45mw)?EAHA7MHy9Mxh&R48`Od;I7`NQ^mcqGELkWngB&~2Z;Apo$>Iz0INBzNJRA{ zTI6+=m0{RT7fg5PUA5>-llotsy$Mv$d;9+T&8!SN8A3vu5JhaU8%V|`#WtqQZ5s`S zEhLc=A_*Cy%rcZ=+hwYJ3#km-l%c3(j5J7t)Op?6XRUMA`Tf^A|Npbrv!3pQwBN9I8|9| z^?!*UF=WjNAm1~a*-*nS!qn4!T8Cc_G*YVC0NY6)Ak#CvqEVQwncyib!4WC@4S-D( zT#uzbUGSjao4jgcpB?j;w){~yw;WzYh$OspV0Lp!c#3GR8#^<@7gk` z?TY$z7nidbL4y3X!SVE$Z#%~x1!2^PZJ^BAIyeiQNxaOI-+iD(QEN)GPi$NNtuge?PCfLl!}xxb%%}ppApy{BwL_(Fg9fT4Twm+FIJZ*}N-7^6bnV zHdME`20#60QD;T7&GQ<%#TvFn&}VONFV4bb2T=?|j|YBFFIiX-c?D{pdPKZ?AhZNT z6{9Pef1%^ZgI>GNlU4DJc?D!4AQ8F#?qZ`_^8zb}xINCdje9mxh;R_N18&B}#bv$Q z8VHj0uni`~gZT5%RFv$9z zgD@W*kn$g^JjX7tzV-eAhi*JtC?s?!^s%`8oSi_}E=WW1M4l;03V0l<)Z`(-l*uObMd}zjMH1 zAC7DG2;t11IO=9MNZYZlHtBJ2a53(+E(u;LV#hi5s2P*rf+T@e2(%(hAo)d}3=v9v z@s;IWt2+1;F}H22A71|wMM%Sq&LheS6Q^zN%tRWR#+YoyBaF8 zlG@;KBjO%1*9b0^z=70?NQZj=y}jY4*G89WmpJ;h!Vz*5J{0`ttly87;{T2PsucZD zUlN_A&qNRt7#^+x^QTZ;eYCk*!aq|E%(Xmt^Tv(tP)-$YJn-y*V-Pa)7j;n) z(|GsN3b3lVbc-BBB`U2xf;$_kGC(t7cZFafzRMsP{d^G0iq&PBk-i1mb36Srw9cyP zZIXVr*XrI8S8K7m5f-usbt_Fp3cb-eERneQLpe7T_ITlR5?>VYpN(=0U3bgfcb9u< z!ECzs`jK2`XH-K$q|)n3r5MNyM@XZG@y$ApPUgNrjtN#J3$K`SOQX04++J9tv*K~( z`BICZ8-N1|T*{X2!-Hf2fk=+CTZq>>38eq zPl+@FkSrRF$2OEtTQvJw=k>Q_slx!`VvDEH&a^XjjT9$4mj~P4XAIc{l3=jW#(Ke%)7=jlfBC`L52xJZfR8`D zO>KYNy&k16Kvxfv4aVBq9ws1%aS*DitDWuaLe6@Q9NC1?@a?!c0LiucBm0w@gJQ}7 zkdB8^(v4WOF_?+AVe}aTPH?RWjqz%8NQ*Ilo{Y7HzMh^F_jn90&7gg4_1kKh z`)tENpk4c0b1&V}@JQW>mT>O%#Vf8AJ-e`h`iKB&YxDI@R7eUD$BM96OyI$qfc|9SBj?H>nhY^@Qz)(Pp$a8sGIiFaHv>I!OFlul(Ds*KhGMR&3+ejj^S^sarbpfamAHCmPiQ}{?1#O$^Br^wC#Om3}_3I0UcK)Q|O;%2vQvN#s5UC zjTi4Q8kKs->RFslSzH1Sl#%7KKbXJxN}D@&%$V*vhp0Zok!qEHdF7UnuzcRtr|v~y z$Ho-U(D{yV$arC3sw0+JfIVv8(Iom_2n?NOxnqMtcH65kMI5{g=T~G%-TzuHTAqz< zyJWQF^l!z2$0?5{`<<~i~7?l$C-!f6>Ub806YX!O^I2Z7)LF(i}$ z;B^f`FPb1s(>nU=7>syqQqOPtUtvJ8nqJdg<~ zahh{}`VWvrFB+-%tF_iNh0jPxng}%Q61sHmY)k8Wi1(@SZ$wNju=?L@5LoQ(-JJndqvo;=xwg5&i4Hkw*mcGCP3+%~KC&1lSPX-||n%po67wt7#~MGVQH63h4s z+_4gI2U6dy{IH#k4F`Z%bo=GY0|36mY<%>Fdp?d+S0(r<^k*0oj~nDMQx(}K311tdppt$j3)wA^$)l-e!_rg?`-}zKckdo8_n+{p*2j?y+|n^Y{hl-<$gd=d*tp%CMrGhkwH|}OLaq}|L1fiSYVU3s#auIebpWaB8bG4F7u z9iNZ$s`4+(s<*AOk`jo#?etp-GB#v+ksUwZCS8n6SXOa}gQ8}7=Jo5>^LcpLrN`%( z=Z!=$uRaGBCG-7GgJ#^eG}$iewqRp#zc>F0e_ z*Fqz920F=5;#xqk2^UB)bm&k8j(e>YLXp8sw*1K|9wtJm-pcK*RUq$#N%*xqnwcyW zVeqsW%<+05w}B``-Yqh27<8b0LCl!^xu2D_f&Y1Wu!E|Nu1nrL3@>H<6!jDw-&WMu zh%ZJ~hQQ;9q!tSisb2Zy4O_NsnW!AAop!Iwo|5DhePGjDMXIwkD982&Cv( ztM2sHIOn!Lt-0a%jkJpV!|jIp`fvC3apJk@^QtcU{kR+)*4}}`{|wGnG9(ph;QIB= z-cS)e!di}(Dfe_QP5(uYoV`@Q2{(Rk?xWn2fR||u$y2%4Z_l*x0RaIq(l3CZsWxjS z>k|Y%Vb0^@^NXiOtDDY+7E}K>qQDviVschbwm>|IbBoezUPbqe!QB}1Cu43C7QQa%v7Ap(2_IxllqB`oNKW5AwCbqd z#^6}DBnL(Yb$&W5P5?pzxakykVe)9N1}6?B*5)R%5CgsN2Y4y#DV(!2Zb_CIsf`9Z zST7DOtb4bZlzSPi$eHO^b_vq}4se+6dFB<|6^TCEF~LF|JpEwgn-<)DH`HazND|@= z34U2nP+&upy!&f&J1RlaZG_Zzw2g23-3lZq^>VOGu;|evfRP)~1**Qln_ey|xVIe3 zS?edCkThF1^cw>Hd5wIzh7MVY&)rPB;`iiHk#jF`zY_+n+*otmQZfv zHg#IzqnN|n8u@wJTR9}EHEpgQ_Vo2Hd(33L6+?87w{0w`EOa88QZ;(}(h zkvCR$N;-mMRr~`mfl$@(ATbL6n0;k=Yrdt1el{K-*sC^%-@l1CJhfHQxXS7ut$p`2 zYlC=#HHfAC=a}D!BoTcet?pI)Kk(e;4 zBNJsNw%db?vD}zEb?T;?5I=hg1SLq7G@UX9jvcWg@v63~tE)gBGU$lAf@l>%>Xh6o zpIvcn(!%-c^mUh()@ju59}-I%o-?7&hQgH1BwL7K8?^>!w2oAq49@IY8F%#9G3#DC zXTf<1oIw%og}1thXV4IEt|FHfs1swv9hZ@8w(d2+e*;2&u$V$CP|vTUJ)%ktVthm` zPFAPkAk_@x7VvVFdk7R|Z<~x!AR-j9k0*KJzOwqi))mUhnqwrGp(6LFYiTaIkLk*h zl=Ok;GvDb}Ef{XYyxR?)g18VMx6G%WQMNHT?UmPPT;n;l(M=mQ%7wpzzt)pU0g{ja zmi1O-vq)UjNpF{jUSAwUxhq@XevTV#1#c0zTpo#%pQ7*=Ha~@tT9SH)qOxVHm*gKe z2Zjj(Z~;RQ>+T(WrnRTT-M~NbH@-a@c0#>l$BodqYK7D1Wi7j1`-1b0u&4zW3-qli zJ}sxM9|cheC#td`n*%kulXK8wHG^KFCMkx6Md_!3yirt_H0t6_ zh{95~=KUNRGnlUM&HudtSV#nbHLPATGr1IQ43ROjMK>a%?Ba@7Yorm>85J|6zrhnm z3IL}2;D*2O+=MrsG0K^1h{BLY3wV7Ic+=~tcU)~S|zziC4vqmt; zEI-mh+gy_m$~w@217%GdL%$*g5G33Do01r>AqrbNJC>dF@M8K!7&o!);fx7t1*ET# z1zDnBBQk`BTm3%HBz()BrzkWWz)B5t4pF9U19Uy(a%p>TqRXl>p8gKV8p2pX`TkOs ztTorGRxYvxLR`FLiAMWpyVP2@K8^uFczk@k@<9)yFLZY2U1m>AT$%=zI(pi)eI!&H z8@FAZr~I^ql#Lg`@7FBIq(%YUG0uIj7R6|0e^XP9*w6b89B|;CNWn^#8BMhpe*E}x z>*;0A#(`arf9s_KO^4B+xZbR+DB8mKG?Bhm;m@a&(I&Ri2wSBn_Fpjfb+xgXTl{WR ziTS)hafZB)bGK&4jw<35#a@PWkflQVAsW)LnO0eowZ1dyR(dsBXSN6gfMxKPv?N($ zb)ce0{aCR{t9702*!buWbSlUV+CSCi9`&}|;yQ*1f zH$0&u(ifl-vE}9>jv&;k_;&2^_~#lIS(%odv)hrlc@t60FIrkHc*AH4qp_h@_>(YS z11TwwG~m-H#N!PC!Fo8agqJ%7n0|!KR^X|>%zZ1j zNs^}PBQzS$T6*#gAgWka0(bpx=$UhQpsZ~YLqF#^pWKzrXLR2l8EGfBNGZ*hgPL^=A-iBgP_YsA)ldpYO{()N%l6KWK8WE z{|dOEugho}5Qryh(0(>09RK24y6FebF48d{7DQ^t8Wt!~;*S|MY6GFc0GYyzEUOue z?Aqfa`4x`2*Fra%iXV&)Dt#vV-8=QGgF-Eg1GAlENF(H6h}`nGXV0IDsb}DMATx}m z)M+GC4VoPA*|wZ=ZzJUyZDsfC+p-qWF7GsSTE`rEuTT57Z?{JO{gqpur?#!oibCto zJ-xEfG$?A-to9KRY)f($M{u6nq10P`ea+La{xTob1QKxrsR>wCoX8-kQ@CPsIu&j= zmxH$-_&(`v!$A+E@J>PX4)}0`CnG=z z(SEq*(|tL)uG+=QmfQ!?SOkq^Wwlkqc^_GC;aTny0;VD*wGlm9@?dH+Bhi9VDW5P)pS-h9hj zLUS^!W}i|PN8PnUgD%iYfQ z@8AEt%b}D1Lk|!{!uhWUxZOOejm!h0cCz7mOBExvj!YX0iQtuIE$}jTk16f|@-p48 z$?EE*I93`O4OA|s1hjT9kWt+W*;%ZpqvdpJbDkF>G7wxw?{<614c z3~7s`$^1n`iBYawm~ZxW-x~Y3Yp;?*^r>X_Vi*f+tk)OPbttMBq*y}q;kek@GIYI z=Crqu8ER~O3;S3;jQwcK$_!&qdJiAEyy2XuV^(!F z;V8d`5>`M3k8W#}9=Kt{cq-%%++5Y>gOcj@Xc_n?ZOiPMPuU`S=s=Iv<^ne!AvVEn zS{4wx4^eNF7<=gDR(F{6|FfZJKX2IL(y-ZiYjlI8tQ%>CMsM$CWqsAtkvB`cQO(+=SM8P)s@ZQH4wu$Y zx^F9d#d+$2^0T1ke0^x(2lPY1!G=SI>}7l6eCVT^!y8s>gercFNHfr=L8Z3ad_{k- zonWYuL+C9v(PZ$9yL}o}w`N&Vocr<;|5w?st2;zoby$T$k0w+_A!sK@Qsi3fPuNDXV!+)bw!S*R?YOS^hOq6UB6(tfRuL7)EHQ4uU9F{2R2Wji2 z2p3Z>K0!Vu^?P6aGd5PYep1zoF(1ej@FG5RL<*`rYuIw{|*g z&el)*FXkKwUQ(1j_~iKGtpxl5?y+Hv?hxoeaGM1)Q@}$!kUv!#eS$3`x2N5|zwOhf zPik9|HImY zVRm*ch-Cp(R0xw6$4}ChEl5)Mvlvo}U~E5^Ua&pt41V39@_a%WSfjYsi^2+hfRY0X z89ySZ43cRWGn3-nOcJB}($DC$CuHshjpV5N+-lL8h=2hbFeSx42JX!ie>^?gU8eZ? ze5zwcL!$$F;K0^Z#Gk+dZ4ynY8bvN41NH__kuB^@nH=IvPiCJtSZpxSG>@Zdtz(Fo z52qWk1XOtO3TsQpOWiZzkNXP#{RXBgcb(e>MS{>vk*NEIez*ABHxY+P8*TLHj%v`) z(oOVE#owcTm^Am+xZyZoIy@?)?cw3xPy?j>W-nyL6|uw`3k-gX=)s8WqPf5lM1{Uw zSVoZp!sB_h*{;TCANzB!*G%#y)n^a#sMv5cZ!RuHCSvVRYNS|dTyK-l`lAA4E4}E2 zeOvEBd`F2DqHWO$;#>^fhWa$6t5)bb*7kSPx2s*Fqofd$ev}olvnf(V5Cmp(4mv5U zhkMAIwg&!I8{^_K#5)+3rtAa~w;cd(8PH;PZ?9})c(l>xR2A!wYEHf@pnhRBgIVNM z_SJ4+`!7zpp;$sC({EBUq>_-ldFCXwe5TtYzrGO#*bEGaNRw)F_qb`Sm^J!;=z?n! z-9L3mUG&XrZKlC<@?1WJ5P+X}_z5b9(n`{;bpA3#9nB>`m9TT0uOc_xt1noOqpPg- zZR^2wVl-M|34IVT4VkGWBxq2A$P#b9n>Vrrp04|pvd|x92fR{t|C!%@XxLnBPt!ji z8H&D}Qpg6gFNmd0blWNlnH>U1S7KvAhEY-pf&yQkZ2{+0_s;wC^MTUUg19g+7c#Dh z-kTCPA0-hL$B|(5_74BMooHY|s8!RDA1f0YkFzOtdy*V6?2{lTQe>b-5JOz})3sC> z3fbByJB+y>ve}_vc;Z+qsq?*8|9CN;;t~Cv_5SquE0-@@1OANepK#If66I4EVhLF2 z5Y2uKWY-7yu@}m8;Sc%6!$8Uv!WPTIX3!&^j|h0@|7Gd=<@q%_ z8ad&37j6ICM$gFo{64So|5Ia^VWi>Fe|gg-rMI5OZMovFHur2 zV4+1p6%ZGDsF8VIP`@T#9lX}2NBZ*58p;nL`cx=jX#_8Dq6ZQYpUio3yt(62!V}9j zbzFHc83_ypV=f9$ab<b z;9057X3ktxHF>O#46bvAkX=L@-Rh>}P`mg;VTH$E8Xj~WFyJM!&#n0&mvZXC%QPts zkjONI^SG+6V>TkB7NU&xmr}v+zp;epg{t(qHf=m9C{9B zvYLe-3wNoEbmEJzg?Bu?h*$vu&&3;u~x;iIDjQ|-rAEH<7rrW0lZB4x~*%iMw7rwdnYUlAvw2R%ZdM89f?YJXg7e5W)9Q@7mSa zF5wdNi2PydY|+1CNI6sn6Jl$xYxedvQ!!GMhVybp472e8R-=gO7Jgeq%IJVzGn``P zQ``-EHmO$xo<>M3?&oxfdf&!Z9+g0RAULhVt0JbD6}g~y8+f9=EK4q|1v+X=d7m{U zGeGom5rc(L^{=x}n2#SGjgO(-<&MypjtMjE8W$VoHu#EmZ9<%D7x<7HY)W7RTcHp! zEor_b0|tVHQ#o$H@lYYi4@lCipy_{j{ixFl9=-d%q&Qgr#{6Rwd=-$tRqah*u{1YX z1+6oc8jBe{*%u0}49mP8wWZOm++9{TRjnKpJKq=XEV=N#@oL6;xYLKYA#7T)AaBaI z-a#z~LbaY=6r8PCF zIrsY2qi1LRasJBIA)7WG?76sLabl8{OJZ-=>W%(Q@2;?@9rA8fUtgb4jX2AwFW<|q zd&KvydE91Tc5qr3)1wm?Jv)B`aB|?Q74NzfPGw&38APyY@7@_p&9siq-}~0l>c^b7 z@80c$YJYMO^lmqYu)kxZS>BFwR~kG{$}{ZKXCU>sC&=?&{`}mza~-t978v&I8B^*@ zn|*AiCWMUX9lXgN4v72l{S_Or_w3$%;_~K%o{f{IPIPvjrEg%c4_xFesq+qW$3s>l z*W?DYK3y@P$=0!KD0n;G(PA#M@z2e1EV9r(x4`K~XZ^r!&nG!K&HOPfuEko0lYVX8 zWylH}!DAjX?Dc6yToLm~?>Nv=CQs_tH#qN*1dg#O#MDBbS z)Td|9+Y50zxWt&iLy!1rSzTIfs6wZ@FKFg&n(aaM4+nhy>&hiqgNKY#GAVuSLeMAY z32VYWI;Ct0&K_N%nD`JuozL4n?u=-icQ8$G0z##kIDg>9=fC)59Vzw~xkne=JyEMO z@bDfhb(KYpruS*JA%JG@;lbfe(W7NhLCIz0t6_{TGY=@_*OdI`k$rk{QRGKs9UbT2 z9yM}q`R2`=PcPy_$%RWO`$hK_|H?0e*tDYLMhBTuVt;DYjW6RW6IPu}3JxCqd7R>O zlN1x~%v%H)`0#8+mMKeqDQNMo$ODI!OmO1){$3Y{mv>NCkLjZ7|9HlQXPD4+jZ3WV zh5BMJc@;FjAqD;og0;ju$o`ML&NTeL|B9;(->#{MXedP|dnEykNRF*mva*oFfBcud)rPkXlCPIfT~gRUt$vP%hj@0{B?9$QG7=c zo&jK%l$I_oec##gF^TL*k)h(HHEbMACi5Ay;$Ku1ggctFNcq@p_4|Vxe7=8<=OE-C zjc~=CItqH}^_w?0mcLr<-nf0A;V-+?{|b-!op*N@A{&AE10ZWE_eVHK{Ljo4G7v;)Dqzt>s5fW!hjL>dnMD;|Cuz z4_?;&`59x>cJHeQS@7V<#%0&|l`T5PJ}-UC?D>*nuYr%>KtOwOYMS`)Iz%=*JfcsZ4q=#aV&37P32S(0@4T-l zD)ToR^9Il4ikKS3t2nanL1&NX>GhpKub3uVkEDVfkN#{Y;l6Yq9^}RR&%$5#DQ``8 zuKx`8&NL9|h5V+|dydm2mKM6z?LB?kh;wtM0<4+>t1W$Q+U?sa$yr$u{5}T4V$O^# z+FpOFtP<=MsDLZ0ewv-@2wc@kyiuZ zgnM${r-N4-{r4+QEt&z>#9_Jdl?D59+=Pol6c5rJ%AvwIVcKYm!vU#%hYs{FLyhnb`wulJ#W&6@| z#>>Z?VAPyPJX ze%|}^atj?JNmIi5`F?xmZeE<+u%_zXnwun=m2P}+8qzt0?U2rcT0tSZx60A0KXm0A zGVUr~?q1N_{UEek%!v~$d)P_{3y+Gr2mInWb?ZVZB3dTy_scTLhH~#~7hh5cl|Fy( z#9S{ktqBt*%+M59k=(FR+IdlRH3rOP!%T`-C}I5?^X(_w=S-Q>an1J^ukgM& zS2XaH`I?C*N69alujw+ci5Enqd0D1uHgzZc{&}_l5JJ)kIx1=R?zw|_iP$`Oh->D& z3;ht~Qm+%-8%G}txI#`04j!P@w_n3Q;$L(`+0TtY7u{OOzq82oUaqNIomiQ#hG@O0 z!0B?|`bRL+dt=As^XL1QmX)#P@mwrXxTLx;&J^3Rn_IisjvN`JG3xzuxm{sFN3SQ!>j#%6J@1M}Ap!+FM2t6oWXf`~d7*Ws=t0)?$ z+?3WFZb1?}ca59oj(I(T+A&`QpVs*kLl=Nv$#Zj}Oa3NC;}uPNAl% z>htN`t;+X@Lwx{|&kzKVCNf9yfyIYBM->*HghfPbz`{Fb zwygWn$7Z1!l|BQ!rcvxbogF)OGP9YQ<+cXqO&?^nQ8aio^5e}kX$v0C3=G}tB=^x} z__bF4S&j}vv|1Hi`C>_>e+Qk;X$}2_zjmeoM60?Veh*ad?2FH4T3dU)Iy(2($NHP> zGX7faDVG%kCa+j=sa-ct4f6Tb;!qBQlYH@)2Fw#D6Nk?}Td1maaj3jOZ~O9ZabFFh ze3geoEz^ZW`ug?A>C-J_Q8&A|#u9_~prJyDz&{OHw@&t@3`60wbl1W@_2J)T(zS&R zfMC=}`3kf&tnm$Eoc8F^qo_qS{fQ}M$g;}#BC=s!wk(6%f^6NKcVS9PRp-@`w1O9$ zTCP&e$>hb!N7QgMzYccLvv1#d^xlI4w~_g4%cAPeLekJWh#Nl1*g1aw&x9Hq_pz?9+~icwUQH&naxz~Xbp#~wOlZM$t?eeIF(&&qZR&x;EuJ2_p4R$&6| zubsavW-t?XY+H)e$6n)X>tA8TIL`(%=2j;7O^hi$H-B%<>#lXVJ14nP;a{K;#}-QD zr35JN(zNLTl1B0^BQ2MeRV3Bz=W90_9S$E0dS?a4OIze|2zX9#8CwRo6wb*LygWmysJKb7v26~ zj|^!rmsSci_OG)sE_ZEHZQNwTnR=1gRkS#=kc5)M|DR4V_Vw@j`=#UK%?1^9e&=N- zD3zl{d0XyQ^wq18V4WIm+u9&EB4reyCc@xk;P1P7w6iSQDvwtu_ejU&zyBTp7P-)( z4sQxRuO#!PU46IRv-~cI^2@K6rY$lzdR!mP65>~BD=I#}5c3B>=I*+>sh71iG*Xgk z%Z;g^#?F|LdrfQmwE8=DZrZe|S6pjN69pydsdLebjfce4=Q|sZ z3GY@eQZlR+btKZ<#L5}&I#Aw2KIbm`4?A| z@?*%ZgV)rWbAKAtW{ta#mOxICpI>dCdq;L&ssGSvX#qjP&ex8Xcy^Aj!m1ZT?A`aGKmTx5J zkw?`5#8A|$oq*mmA)a~~8b(J)`}3O%ji&Y)_Om3mskOQ|q#^REdF1PcLszUA%!nZ} z&<87vn4X`%m$JXfzB@PhACKx*4+rpG>Z>Q6<%gcsK8PB!Y!kiwg&EfyK$28GjH*qg z`sit7)adMG+m2cczeb(6RpE{`UI|EV-ulQ|;PB{~lh?8NQ(HEM(G}8~!kXEs8?CKs{O`;= zwYh8Wx}%ak(mp@>FeN!_P&;DbgT&RV9z1+l^7ZSZ%!d~BjYHa*M<4vUa-|V6F$!?* z#b?#76}#(Ccf9kv&WX;AB6n{6#4v`k->fTO6+FRSCj;a0eWsL@sI3l{|)sp&kNf{ZN z5wM@<%ERk+>^CdbJ_;N-W!{CsGgEc)dktu4sFSZN!V1Cw9lX#kG_f3{yyZ#XySS|` zZ^tx9(`1tt^4C$bv%yTPW)!HDLdb)#b9R#_YexB1$CT#?Fl^F-df?0g8CEuFAy`u~ zRd&A?0kox?kYgeD)xc-S*2AO7;?&w;vkmffHCUBA?Y~RD9FO8&mMfMV!n@OvTxG17ag$Rkj|Fa*CNa^Y6Mb*nS!m1rr z#@VDcr}pY+9}ZsDD~nseJg63>vY0`mT^;0mV7bPDUaY=jJ7z>cqCt5gdq^97h@|BMW%l* zEiJ`M=k($}IGfO^{8^U?RaWRtFhF#1<W;>v<0?L zO-TuWM_G(-75cN)FYevlJ8&~PR%vHUsl0)^pTT3oekNb+m`clxOOy@?Z7{01xNF~@ z_eom7G%24@-2MPcY?)!;(Z+0cn_}~^xg30h<}F*kfoi*2a;LRZ|9^jYPhG=Hi5L7Xh3tK-;6GzVIM~Em&sg_=0p>p4 At^fc4 diff --git a/examples/running-llamas/artifacts/Llama-13b/prefill_latency_bar_plot.png b/examples/running-llamas/artifacts/Llama-13b/prefill_latency_bar_plot.png deleted file mode 100644 index 903dc1b67feb14582d45444adf03286a856e84c9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 36349 zcmdqJ2RPSn-#7j-N=EjotSDtxE#JLM zOP``>eJP6O^RlJ*o6Sv~efY<=^V-Je4eU>yceQXjN$s^b?_gtp-sX&@$c2+m&S&iH zHp(c<$VrP>pFi*5tRyRI`)}VMWAAiYc5~j-4Ez#Chh0aUDT>X4{Ff$8HT4WdT{*j3 zOWn{de(2Lhlf5&G45MvXPwXEVTIZ!aT6094MLw#^T-sms$SI8rFJ(?d%86Np-&k$M zUoXA6_S)Jb*AwK{>U}7(4OJ>tol`Pv%jyh!cHZo5%@>uz#PMy3<0=In_9H5ZZ)XHT zM3&J8og@w6n`X-=}{s0n`Ra+HS+J9E9tM2Z+I<7vj+dFzw!V7UoMM0;NT#4 zw&mfHy?gg=vg_oh%+H+*Q}It^G0RtOT9%)me{!W%Izxz-pRcd4 zzG}q7uF54#mRL6?(R_QGX!fn<0bAwk*URy(>sze2CyMusoNi1~m)o?-^ZPurk9g_g z!gGy@t#+NuJUl!M(+GwzQ|{BJ z)%{KPW{Nz+?(Fcm>-_xAeji>InHwj_4 zHlORA8UL>6{(HUm+?bA*j?S0f>afPPwndQ|MTgJSrcImJd3l%G+S=yLeSCVZv2l9( z*MwC~gvFcqZTy?8uTHmd7niaMDw%&SyF~fokqB&aqThJ>O;wTe_gveKEh!yg5fMAB ztZZqPge|PCgEKRwhq_CJRp&|p`t9iZ{i;2W->Ovp{Peu% z=g)=KEqCwT%d`8WwCQZ~vZrTTH1zfL|2}1hAMe(l|Jk_$3%Rkeu`JK3M$WENF<#M; z_V!l$g~rs>tL^Q^+}zyeU+bdN3{oSEyRwdLMS=k)@GmBR5bIep5 z?(8^rs?cThbGd_qLmBo8k1iF&CLDpE#9gq_(9l$i`<^;=O3{0+P%o~uR8>z;FZXPV zSbd$p5B+Yc_)Ven#YL_JW!J!{D2^A!#WXK&Sv|3=W@^qc6MpvWncUW`HRSr&?Y|Q|yn7Aj z<)9!jDXBdIZ&0i$5mkoVCka?){!5o@WF38)gj#_=EJEA)BZYQw4&qi;T^HX=OEqU6 zF$mwbcCB{i<1_EY3kwU?a3{Ej{*D4!yY7+=&Y2a-PwH>V-aG#M0(OWFUUS}wSDZzK zcT{~MK0H2M`Q{BPE++TMlPV)KuK+9(4R-qWk00+IE3ni4dD_y_&%Sg%ziM12evo&? ziV*jS0d*rIK5T78&*@x~PtRRPEb%;XFF_FztOIR1krRV$_01CTiH}a!^ESlF(_v!= zm6WL5*FO{D$X#7gLCYCJfAr|$?DOH;D6#%e&$nh~WhLLcM<*pE6~1aK>$Yv%EE711u)mVF#0`I1`+KHvLX=T`f!%MTui ztIQ5Xr_Rp4z^d*!8>;Mm>VEkPL&nBOj;QPCFt`jfm!n|t4D9WE zdXDeO)2B@j_TCmP*nfM=@|l?#<0sZl$2#0+ey#QM^E>eTf^C0)e_3lQFSdiGon5xx zju0*V6ZS*56UR}z%u!O(jupt9cX2tGuD|w)-KUlE^78gqzK*|DWf&cp@c9(NwXQMU zU>$0LnwgofZAZSgiAlU(LU=e!xS%pG*``fR>MaisH6?2@tXZ>W{U>bx0y)d7Wz88z zZ0_#vX@=>X?o&TEjPIbCv#*_ROxEO=x1@h~I4gAJ=Ci>ai!WgkZ+7{y-vQWq`7^wh%6 zj&;Y59RUNpDWgwi&YrD6_hV*bV?$|3I{Nf1s)-D}`hmh>KR?=rL{;I0v-6DPTT=GibeNlVL^t-FXffKQt6Py%x)6#_3@c_k z^w5xoA{UO%pp~fNC3n96z;?G$ij?k;PtN$GD%Z{NO!mFvIzQ0NXy!G+^XK;u|8Ac} z!Rzbx@2m`5-t*%JBUStMt)zcEL*hPovHg09ME1J-?SBbOpSgx zofyJs6)kya^sVZmdF|EJq)jPb$ke;7uCCtL+WN98ge&UyZK<}(*$3a>CWbjjJ$m$r z9VK`9sx3)Aiwg>jpYv@yt|la0>8T95r=Oy$c=6X7@97?f&LU?W0m+5Q4*N0O&}wn< zrK`5u-B515x>}Q+gM$VopG8oKTh^?k3RRXo!MvJPRD{ruP(kGj=2gKQataC+c-oNw z4x*bkANsa&R2(a;@B4Qv;DynNi48N=K9v9q@_43kkM`c)V)6FQjdLV!r;M; zc4kHVMw|9rnqv2fipz{#4!u>3(k6xMM%gCiXerT3uAf_kk1+GeRAD2LcCjl(KVHRa zB_ktaKmI0UmRH;uAHrSFDxgT=Hc?PTz!?@e4mwy2 zl}o4!H1o?0Y(mQl?7MmMZ9a$^J~Z6j`2IcD&(0#TWqu;lfBrC0m+_`+hkWoNXnfgM ztdKA$@tCqJaxybBD=#ZklUa(DR^HZT2lVw(FN{Ij=)K(upa6kmtX!88RUP@$c`b(rjN9o+JJ>{*f zhck@?C|ZiZW6KXH@AT+q$yhX&AF!bacJ9IcQ<$9^?l6**m*+4mtw=OL(+~CByC<~g zwGN(i0Jd*^^M;471DN@kI774wT?S)}w>b6fN=}QMy+p@APgP(6e$6jlI7-8gjuwFZ z8hrBmn*=Uu848wW$oK1a?#Q5nYEvTZ9UaWTWEJ0D-wsIKpIF!8)#0HiBeM@+-A~SK z)I3jm{rX*USoPPgT{|(bFHS+bvALNWU~Nx$&dw_<6-FLw1SHNHr0!!x-{mGx$24`T zs!m8q2rXrSU8FFq7q55_4_47ZO`1-Ge(BOW!SUB`i>_b4{`u?ILzZ(W{CF5Z<`w=7 zY?p7`;I;s)^t*HC&VdwN9_)ub?3ayH-#&90WD*h*Qc&%{%St}=`j!UjVU+?G7nem} zZB(_8L0o>Qd*sWSntHEq%w{8Ko6IOk-1R8clKd-HFm5>dgdMb_uXp|Z{;6P*Wu!ex zP0b&fR#I^rO==&P8SpS_dTZV@610SwH~8fKJKH&a|Nc!Mq9tnc;gPmH-aC zWmJ^v(xo(ID9)drxm2RcUrkJ8Q)kchn!Rv-_#=;>NNs##P!I#YAU?Hn=FcC=&F9~N zXH->HiJ+JG%#AUi&Ph&QSeCOq>_yk9!-sis55XV;0G}v*lUX$%O1zZUaE2&u`t@w9 znG_xddgU?CE>sVYmHN5xP)j?z2yoJJRARika&W@NQXe0~tfM^WeN1fQ_fu2v8ReK- zcNWSY&N1B(L!KAzV4LLO%=P1s9b8>?yuH1nDyMQhe)mV|$sRcfmK;Dsx72+2|qAmaqz|NVCS>zICe?eR+DMbTM>$jDl_P zf{+*JpU@8X%StW-WzEe`YDcbFR0gV}4R{K1arpx(>R}Vw{T#eKa!|WO$Ja=e8M^>j z^G{aJV8{B|6`sX99oV9{Apc93XaGaRV%mTnakUDLgXmA=f#c)jgt+prTzS{D*qzC1 zUFna7`Pu&NQq{BQp=b9bJvrCL3XIgAZ@X%4e*Pd}RG!h)-W;<_W{Qul-Mks(H9IMB zURqUEH7+W;LW13X*4zDfk8AL=&+NIJ;OG{oPcv-Tup##L?R)tBPFyojQ5L>sQDvY5 zV?!S{I@S*C4d60R6^|5FeHp*aIT!4pXd3HdvUNVktdv!q{Xl`;C((k^ZJmej?Oa|Y zJ08j-)jKyc-a8ITLatdsGf8^|x*1xo!(i)%_V#vj)MPh2xuq=pqpgKB+$$b~aXxzd zc~=O6D=jh|o0Dhw~X-R1e3Daeug5Po&s?hj_(Gi!l6F9VwQ;w|h1o1mf6 z)6-)&pW|Ja8{Z!{bKloeHI;lYaI8Y**Xq3d{1~YWaDv^K);n zAK;G_1dp^s88Po{zxMT+_f!M`)YJfJoirW1+jY<0`!G}L$Mui;B1F5z$};G4fwS!A zWe#3eNGz||mY=gxUS2}M4l6HilIiaS~9-q6r+tQeh-BTMPYRFZ z`t@qy04Lp3OsBGiv)?tR?iUtNzQ6*8tU&}S{AQ34!1Ue8Arp}EuI^exw(5z6Te82u zo#?weK!wg*dK+|?c*#R70hqD`z;AzYhU?ZA+fYzmiBE?QGQI7tsYz`&^R-{_^G*zT zA3%r4Go!Nr^w36 zHfJ5+U~=C|yI8OJQN*%;V2_ z4Ib!85(t4MscKflZIC?X(JSdMfRFJonW32F@N%IfCsF4@wW zcJLm%kWd8vqN1gR2Mq_$4h_jZYNTmuCUgBSq1SKT?6k3wh7ecdseOAG z5HHIb;x@_yPtY$}Qr^(O0t`*KNrR*Gh7Bh_V&&Z|%3Uuj%SM>fn?%(})PmZzt$S|D zRHHsrec!TWixc0_iwtai3sf|~Db+4_^{(3}^y1?>hUVr;4cY?2y|X{C%~Q4r|FWv0 z&t)Nm3T;diI+4mQ(E}gvY`^G-GHeBwj?EooY}zbm=vX>*wk>-(Dh{@6!2<1JLsWc6 z@Prp`V|S~*YGAnnrUca1?sD_+5R;MlnZ-x@B0FtdPgmEkq@*M^bJdC!t5sFal2f=; zZrqeLqxGF`n*l7ldgI1r@T%Z*OKI2tVrshPz0YHEDC)WO1L@|6hnPVsq%Z7x+K3*P z3k@jKIFA-+G`hnS0>g<@r%J}(R&;flzPPq%dkUq2Fru;EYNrkx8ynKR2itNg%gfcJ zOI=5VAObT1;^t*qR);znX?*+om1pK?Y+PLV+qZ0JX|)XvgcZo)!#S^?LP-Jss=$|s zo`VHm-Q!_pWffp51=LAITB0xF2{ql{%|*B_K$z~+3DlEHAm~$Z3BWQ^FLt@~p+9KDFIP0*rQ+e%N>v-{G@3Z}iPoyN7dQfDxn34ev zIid3WtKfGk=2As+MpBYUku3n;T>s%g*||LfuS?@Yb?-8@^}CM?l>8ZxvOas(60g~5 z7!dq%eE40U^AL!8mZ%F(9bMaB`qI!j%=|j3cWV>lty{OSgmWrtLLwt?#u+%zPTFGi z3ir3?iEY}vIXOK&1dYiO>!=VbqH_VT>OgNM_Y_2P8Y(_Mp5mT&=A=)q?RnMfy$`GQ zvdA(8pRv-e)*!IW5Yg}R@!KzQLZy?;J3WcL#IJOIJ#sGH`O^H12GQkr4`b|9h`18Zr z45P2#h9oDi9%xR#cOVnn8e23lHuG6wVbX=+kH*i=pNVoclkn{R_WgVPyk9m*^}|CM z;zCAE`MyRY@3(gE0Uz-1EaFW?c_%!*B7k`ex@?^7$S{u(=Q1 z*>(q;cYPe}9`qA2#|t3LLh)DA!$VSWg(~N1-M%%fpdKE(sL9h#XT0=dzHJy(4qlLQ zElW#Lr3*vBkby~4AYg=e1SkQ~XY}wI%3fR?@2fl4n#pKyZ|@lA2LUb#<+=U2t9?}Q zngG5%dWkB6Kq}e+OgxRKd!!tUe|!B`J-Bww59PSBwwAEG_9EwXB{Sbv5u%bF%fKoS z3{0@(OHE`AM58e%Tu@?}u>dNAImAx1E{^~Dr2*-Scmt@4yvmD@*24kv4GIcsP>`ce zT)VFw#@q9`+~2zC?h=S+OLcT~wtLUo0Crp3Uu&(<-wghi#_-tyS0~ic|=vl1*oP%`l+m})YJ>@czP}f=#;ej8f>uY z)eB-e5lPs1!hH*+AbVOTPONeN-A@B%ZGG+>5$KFdy>~#Cl(+r(#Byq4e4K9dtlQ(! ziS^BoPVONU3C*zrfH(xIf;l`a2S-PB;BXm}LSLd`;cC~i4xS%uT@E-6QH34vWx91d z4)VelyUt*UXRm%|D{k8crp-r`G)M(a`L@T%(uZ8D2Flu$eOx*3Cq$K=kr5&U-9>*? zbp0tXIrKq{FZJF%vChs;;1XJ>UQl3;n%wznD`}YKD{uAE{N4TC?ZqBj0f#RG29<;V zBX~#+W8U$J^<*LNG#BM{9q{&o@C#x60hWF?vGG9K2CDyE z3Nx;G8IX&jl2WMV%-Ritzm-r0rOiqNh`)h8@RvIPGJ!E`UbwJji%omd0p$RAEY(wo z4r*v<^u;`AYHVEp%j+Swg_p~ZcT2!M0&qn~^KC+~azzvr^n0|dt3!E=i`_S4M<)Y# zOs|I?^6lbdf^Sj9;M`fQXQ1^suA$SEkvK`? z)sP5=x{6n!Bjo9q}Y_51i7fQ)As-oYM@ThgJ_h4vp|e5 z7{$BiDe}}laNz2JVt1FnkaTw}tU_kU7QC<=>FMa`%F(I#w>jN+82-39&Mow4%!9P? zS8v|%KG>fqL;MJ`-OVj5R{tJo*;9CN{6Xgex(qwD41n5u@SOfKUpWXD7N|2Cz$=7Q zmw)(h1_jBF;9at@a#$y|n^> z=fnW8_7RXdKD_tu3R@yGqSPq4HkRO=<{zrdxNM7>Y`D#(%qxKzV2DAsA!y9 zP(Wn=0FZ!X=y(-z8&CI{-rQO|QzBgl!XL2vx;W$BTbnol1?a(Q%HO@yClBZ9)vF=~ zQ?C~#4cJBz(+f`?J>n!(xWIEiR0g?=zfL4OeXS!n<8x2XYNC_> zoQU6Qf2R>rWqEV6A<@EMLI&U}>*Eb}?>PpwO^;|sctTP!2|GLvPY-0|T-J@2WK>d8 z^1E{7N+Yna1-h`G$cv*&C$Y1zO|F}xS^%If+qZ9D#Xw8O*N;BOj`@wAP3Ho^`+EV& z$;spbOp4tP{WR&G+%XG`$h6^DJ~xp_uyvAQI}@#3($1*QqYqsUf==J7>rN9)W;J(( z&|v*UxaW&A`cLN;j!gFU=0ia|Lfen4CZa4+ywoACTDLrmW)~Kgg@1AQ+xKJf6;n2j zky~&6lGN>);hS*V5lA#9yk%ZkM1PkFgfl zWCBg{#<9r<_hY-}BKGdv7c;wpd+Nw5uoS?~fE620y_j(O8luIn(x}MRq;m>S7|2D_ z-Thg`)Y+I_Yp|?OL{&BzdfUQ8K!w#hdscGf%<+f~M;_6m7eg2@$jI1{e&YFsVGTS~ z$MJ7_i^8nyHol>`@ALz=M@R#ZN4mo=oq_s}j#b#Dy|zVZo{wOr|N2@VHy0IN9mXdn zBBGXcP1V<#N{=t{SyTa)3nE3AfQHEmn!9ahtgX2(sP6$M$e@t zX(VG-!h+^UbPoK4SKhfB92^|!`22rlIpXPdX=!QQKNU-HlD}Ue?u&obIvdD$=|vEz z#T-O=^!(mOwD{Y^W$NsNrJ?>{pYNEhh^0z`78hpOpgBEc^+!ges~kK3AV~9}pkz)x zs@oq%rmvB`2o?a3jwXIZC=7>OEMcF>3^8%vkz|MVw)XZ|MiCB9NNOFw8oXGrM~@yQn)dng=ckQSI6~;v)$HtKpw#(; zPL#vpgxd9xH7vN9o~um%7r+h-@FXaTbNcJ&tiPhJs6RH4BHtgHh#P2qwXyp}jXaL?yWz_5}y4(0=>&ZP)(&MC36PWa3Dt5%;|x z0|BDx@#+0gsWe5Shz9YHHKaKn?&PoWad07|=d?NWm~1=d81o>1g|@Zdq>;(?v9p&b#M5$j&RpC4~N<;o7d__Lq-YQT*uR=&7ISdqOtgf=mBaf?; zfW}4vukeen#>NJNXIc{aizgV8_X+knD=RAr-$+YKbC+m9kz7SA2+-cptj7==p@mT3 zJOoz2{3A^~;Nv<(o|uPd)@(y%heaao)B(4X@SDtI1^e{Q=G%Q@!gB)0TpAG(L78rF zs9NoqkD?}if}Z*4ry>T48Yd2sUQr!ZI?R-jHWxpkl>aBy>921>L7jv*Es z_$Y|vUg=}`EY#=EpM9|yEN6awg=M+^(ebTYw+bkoUsjf}>QVuoUS3`vN!38~Csbq( zI8$!(=4#vxX^4*9xOrN)mO*2}DlOri6EcqPt@>5b77!fVZ&fCwrXN~dydy^DcqoV1 zP7%C(*Por25LDomHI=^JC$Qa({lkY3Xoo?Ok!-L}{1DVgiQo|yjwF^Ywv+Vu=%}xu zB=J3~lz#sFsW5xd%}qo_rKJ6W>rne8yqI#Z{4;0HV08o)Zorbn>m}Ce1HCvuATy}9 zJ9}ce5L}u1c^raG*@q7+Vq#+cKIUcd2$dabr`x5w3hox5Q_q(#bjaXrI@ijAXL~K3 ziimK?uRV^V{_6c=Q1iqqGqn^gax4&z8AUQ39GslAXvO4x*xA{^()Ejr zi_;ip=P3Sp{98n6y?}$h@DR;?*GJy-?j*+pkdC!@HBNKt_YZ@i_B=WmvX*gpeiaoJ zb=%(nML{$Tjl`C@Vcgx)a_Hwww3HFIimAFf4P_2tqW{C=b%0zDNmvmY!wpIJkm8!+ zyTkYQK5XD1yaPNbkqsM`<4N`AzrMZo3mS!j^8%_AehEw#NJ6QN@>t@;+KqdJ7+6RL z)8YnckK%~DJb857(XThn>Z%R`kbT`}xH}-HaCC{u5MXt|(eV;OA0VaTF~x3U zw4e-d3J(?vii+NSS0$u&?Rs2X2=Srb0&zse#x4WWhs#mf-EC%KFS~Us-6*_z^2_wRC5M03%Q(<%^vGS{g7?GRq35=_M41T$jPaTH3`ON5dQ)9f|M$ z%ylFpHkRL}BY*FMtlz)S!!`)Zc?&rQ5Q+ZWxpPEVX>aF;Uj|@tIXapL@SR;ifQ14e zoKmjFga4f@k75HZzY{gA>N`jz0|lS*E_i4#B6-NzB&Vb-0gSZy_=FKCdMCtDc0s}A zAc;kNY!pb_C0}2PgxKn3ORG>5b1f>DIzE5=c%!e85&QF^A{YT&5K6WpZN_S3WCYw+ z2GRxo!84VPriYvdiFjq26mcSAQ3l&A^f>J|TXDTuU*g|F6CuHnUAuzyj)6II+{~fz z-D3NZqF}gIGn@GN`{VM6@gcHq9RobGGCZXNd6w0#p~xZiYhH((lBN{AKiW_-AS8ql zJZ>qvf1EAq46(Zf1O$?Da&qo2+f5Q@MBgiH6+U=m{ap60-MiKG^*IR`8r^2zC!iynAMXluM!cKL54HJ9OP|6mDmz^(b)rX!NF+)Jxab0a59pwr8GAN8sw$3(D)kg)784YR~d_2+Xxj^D9!YTsL-Y-Lm@$~Y_ zLvqjh%=}0el({f$!Y}Vq_5c`Fgs<3GHT#SAG&-K5Q{OUpae-hdl~r1KiLYRQ=3N{= zh0u?Pm>3;QtV{49Op4Lp8xoZH(CUK_mm%s1SiZ#ha90u^czI=&6-Z{v1r~HURTNt% zBsxC#^<75TZ|W>sA1wskOQ`VhHv00vfBj1Aeqt*|&3^y>y|TWZ`QpWkiWi3X2@F6E zwc*z0J(<_z<5?g{6AK6x0xwZIkX0}g%GM434wS1NbhV|+S0Vz3?3Pxta~%|41RUgu zurO%TT_Q-gl*JcRm=wFhLdD-B-KFh+{?Y0V^-p7m2-A8{yCYIl*KOaiBdqhn#K56V zXWr2x(8G287B)l$Rx^;#B}f948aq95a3qWfRck3qaN}KV=D7TC4RLgz58XAs0XEPJ z98F}EhS#r~3LUMWE?fNSdqv{0pmcesUatfN<*JX_Ku0MmDt?A5P89L&+qZ|>v>~|* z51E!i-iL4wy^27V6@KXMUyvWUoSrTLRvw5zOXw`#6K`3@{jBvQ;)0Tb9Us10GcfpT zKu-NEm>Vl8E9j@VS7E^L9r$FDUM0Prgm&b8RpR{`~m^U=>~Er6^55B)Xd7a@dEz9_C@Yu3=C^cd&ctbm{d5uP4xCg9`6 z>-qUxT64_UfF4NT66Lb`{_gAS+}!lgfq*A80Rn$|2JgBeq1^rAfy<97b>X#>)H~%P zkj;o4LE4d)R&7JF$DiL47lzttV8G-x;1(?`EKUlY27!j;P+f!NqMJD~Fd#WKhqulo zoRgG9QF2~?&Xe#iVv_JVg0w`-5wogt&#CGA@goGaBTm7`$0zPWLVP@mr!1GzsZ-*} zYGygu;1dJt_OgARH7Qz&m==EOF4{Oco^*?1eHiY6q=jIl;5GQeY%H_|Xm?wfD{6>W zg`WXI{r>$mAPW-bSrBrA7N`FJ8j?qEeuWQny=jh>jSUNwA6X;o9GtVd!$p9qYwXm0ZV8)Y- z!K-U)32{VsAjls9Q_5{({Mgg8JSDR~l}X4Kl5`moEgU>N6Z^gkVJRSSl>}fI91=p- z(ut3{`}gYrHBbK`&l2I!-2D7NTrlt(ky{~@AnRKG^5xR<@^Z=-YoOFXuPkG&Gbwlw zK}6QBU4j(OvSrJB17Bv8_So3k5(f-yo1{Y#AMV|P{<}5$HvQ;v)Z;Ixan&e}a`s)@ z4+V`kDSG|cgzmLYMOf8)J4Mh4bOIgZCEncG!3?BhedY|2_pCoX+x+bO55N9?YY4A( zYkjE3dpc~GcMwrhT1iZJQ1Ad^nRO&2B+$Hg=GTq_4ee+edt2Uf+5t7EqO6P}rW=y! zh5MnG;(-Jp5N<)z>u3;oL`FlLa`qc7D(E31uXO&_a0!qZg6)ZqM<1WM;x@72uSZ&z zF{R(LL6n;2ensHvQay30g5*ogNLTGV9K0U-Rmb`n-wSqS^FTCfKjuzNm0Y@l<*nnW@OMqhCcsf$A z(Iv);0G<^*|di*2$uaTb}H7N;8`23Yhh`r-#23Kk{n~D1%ymu z9q5?%n)!xd?%<1}Dut~p0lK_Gn-yppYHQ0NOK`BWi?CXu`S4+T`a^qR6p4um2tgBt4r_cWwAitz5P0OU058c7xbtU+~1Hp(Z0y_*!)8eNz*MtV+_abqJuh zti}&Shd5oQN`~uN$nSxMF%9ODl$?AC_@Q>l?CIG9P$nk51 z*FN|;@GFsC{bM?e$Cd;*4CtWf*n7Ei^2LkozkhyO1@%bQX%Te_<2$S2#-m69tFFSU zyo7LShznwGJ#fR}EFzrE0r939jt(*}cQ~z{UmO<){)cx&XKZY&CK|Q={65fv{^oQp z)QZD42`CCyWxh0{R#ClHtc%77I{3vGw;&C1$D6|uK zXcV54A5LrM&%<&+nJ0A^C@%=I1qw+MR;T3`iC3sEBs@a-g389G-BEI35>z_R?jehp z9)@M%Sc+|94jU4lkQ6ctSD+JHA(lD~WCkmv-Zd1mP#qgoM{Wd8q>n%2hR>n~#TwL^ z4hVz9lAv)}7XMeHN=x|S0748DHu`En)3S`&BeQ@0^x{Qg)<%(3T@)fQF|m8?`Txo= zdz>+wxPNZ1+JBv#l;i#rorL83Fo{%--h{d=2Q@k^K?5(WSD>=Z%C@_tSg#oJX$71q zK!1{a>ic+cdUQ3G#pvs;-RoUE!PF9g`$tZor62{i9dlpUUaL0-{X`kYVqqXtQVPPt ziK0Q5_8j}a29}GQkjE^1-?I2|d{PP}cZOb^3WteSK1C>X3iCYPVL^^^vj z%xRO~vL*1&9ibAWo#PbY(PnpsNT!dvxOpj>U~U29Y1ekY;pF0~UQEOs8kvr;vB@+( zWn*K~>j>^lf{8@EK;j3pFqbkiG8DHyIshFS{x3Zi1%l&TJd%cPk&neh@BZ>*{Esph zu9@U?R3)YPOf zTU}KZyyGI7p@EM(-5DXQDl^rIz)jEmaTAlId-wcdtR~}WQoe{B5b%U+wJgT<1uvF5 ze`h5b=(;X(85veGqk(=(Lm|Na61DZ@0Q7J-H#apr2cLzRbui?4>=LnSw~9kP-?)(r zv!d*Y>(;FkLv;o2A~3GBw6w6x6b43oLIMc|Tn-Er#aD??86JMQxVTvFIRPio@#KNW zfVEab7$fZfJ0B)PwP_y7kpjnI7~?WXHS&?ZfR7?Ob_i}f)li19tLl^k2zlmVw9Rws zlPei)B(WY)aZyRhfrK|Za3jbLs-seJAb$!?^VO5+J9i?$+Que|9C~QO!^6#OZI^>+ z!RCOTCk!kJwF7qJa_C7k6j-`emz8cJmQj*}q-JaviF(N{X zX*X%(TngBdyI|d#HNod(OFle~jgL3!edZ6Ov#^_9o!wBdDpgl9x`M=sO~1JO&lET5 zml1++m3Cd>_BJAR8Db`oZVcV-NT;)PNYq81z2DDUN_ibz+4|)0AO5;7Tgm1{U;PQ4 z>@$m#x+6*VySoScjA#fX`**w0nF?7j)71@qs40T##RWIze{WJ;e-8lyP{#43FkU?h zY5h=;)}k!*DP&uF#{uf@p&?6#1qG*V$4E4-(U4%>(3lKS;{csW>=ux%9d%XtDOj%{ z`1(B%VQo92#36FQ2v#3wHl4F)@7{o$H~D@~OpuLDvRx2tZlrTTgcIL;i)=c~`T;vu z9kc2{-X=m|n;#$yX+cObG9|nRiGiv%*;*wpDai!!(-&0?0r6nszBPNFA+9ZQ$cBPD zi!NZUmQZ>~4Y)GT;*kD()j%XCPM$1>z5-4-(y|XjX)pxnU|-<9*4n@P zbn5Nfw?Vj4gxVa2+U3cF5+3;9OyW8Wiy|w4nMOg>NHV5KW~&h2laS8`xTz>Fry*e@ z)LrD^HNF#p6NHi^L8@CnASI9OO~JO0S9WEErVZ>r?coI0UV*WbeG#`e=Q9*iIsdd@sD z7GFjS#h57VMD&6P;$-_eZX*K>F--u*uy8BfD!x$x^yxBG%1Q(kkQ{NAm1wC^9$cBO%El z{92Cjqr)dU{#}=20nmZuQ4S^o=~H6Moq~%P-P{=|v>X;D#)lZuZvr7v>*d_NeR~zs z_d33mxQk^kOaV|D=^yQEs~McoE*6E?&iRO)swZ7qF{u0sag;w zF)rx~>L{{h3ok_mnu)rEZGFXf5lI9Rs6%p%=j1QB#D0x>;3x7&3V(g02p2#E(lA~Sjdh{pIA6>Wpa)RcK-BS{B$s@Q(qgoz9d z86h+bq7hS#_!2z>!#(@%612{P)0@RaCC}+myCo(kCy@k{*oig>c7RsF00+bPg_}Ik zXSF+YYb~q=c-`>m{1NLAk&@bzZdF}Va}O3Ed``Z42&NbUbHu98i+KM0d4o4($Kc3F z+5Asr#1BfKgNutc@v4W~SKDEFDiGl-u>Ua3W|_mfgidi#@f$EKNwh(*a=36IFePOz zz&43G26VW+hu+D7f#T6o2k3h0lF_`|q)TCZpq-H*2}1E}fxU7uzd06vt~u=z2m}!| zq0v*mcoAgS5$c5+xGnZ@5Yi~c&Y2WmtuLa~nBQ3f_@aXq*@VD6nZ>|}Gn!`=n9XR7 zun$S*!{#IJXDORd=7HH2D^_4Cu^MSRM`%VQn+>J~I1s2;jNB=UfFe7DaioTHAdix< zL`$esL5p>tcpgCA9x8D&2e(*2$=_kX0MHYOqIcc!-V&Z-795}Kf4SRa%+-?7Sj>M^-~zKo+)2hbNRfe&=-}i;M%~Dd zP-;NeN1s?<0zg_nql=Em>p}fU$TTYzBI|`ndZMBy0=Y3l7K&wFR#T%RJw7%j;ly9~ zL>t~DNTMgsinw3(2YRK`Tio{D(J>|_94G^)O$Xvb59Tazckma8p_n=4t_NKmdSJA~ z_|y~wMMfh5sx+b6R(oyUycypv>Nf(d)zWr1{ASUunKR$cIbg~$ zt{sCCB2&+>9%(6(&jee?t9@modcOav-rEmAyu_(GlyR8M(P3Wv-7&Kc4SY@|oJ5S( z9LMZ=8T>H#;FkN3CqtVLS(Kd<>zVkm6w(Of<86RozD7I9zPjx1Pp60BM@&pe8D-Z# z85|zou+N8`o(5ydQkW~{iAX>Y0lu93IuezHz<_~*t1&T45$SU5z+?U!C9c947a}$U zieQ|BNf6#WE!hMT$Zp;#%o^>kc+@sX5;8ce06Jibo&c03qn}#QhSN$fR0OgCxPbs( zq2Ix+bA^rVn{N|Cwu1?W=9;6LjcV+mX^}XRoJ+ZWVcb&)lm8$ z2qODFwg{P72iyg@ZpZmQE@T9x$L)X2Ib@$4Ky0{bkx%@kb~KMT(fzbQ6iCD-s%izzNWbA=UJddN#;Z*43uPp+LeO@&X#bsJJL zPHc!8Q&LvOiWJ6Etbu|rQ&8N9=q<6LAk5IC@B5=SROC2gl$Yee@tJfKoQx|F)g+&c zbbJ)VX)B7&caQ_XlxLh0{Ao348>H?d;6Z{Lp7xv^Ub?Z=wfKexJ+xp=&pYw);Xu5^ zcgJ`ky4Jtgs2G|2KULswrvI-L_%WtEfDxHTpEBW`9zpbka9AAXa3~S@a!+-R3=6Bg zwsucrv4<<{GctFkV4ISfDu#h+PcQ693NMEw8(>}mc|qsVfiee=3@^hU<}#&*PSh>& z07*Iv3|-;a6;^VP3{Hk2s-M%IHrRk9t2K>z`>3l|eTBWJX<%S-6?A<3*b7wD%kvPK zlb29SNQ9o~JpVFv*>!P2c=Rl5GDfYJBEwEp3IuXUP#&F#Oj;9BZ__4jOdJs50awJ% z!?OhOK><~70a$hRJ9gqXUKHV`r!Fzof%spTgnj+paG9C(VKNAM6e6wP$FEh2n=tK$z^gR%vov z3duwOACgQ2(wig`LC*Uj`R2<30YGsFWoGDnfxP}kQX)Sg5Ak_uC>(|cres2Ewxj9# z8aaPjSeUg(e)SzB zud>JGL*%1_n?u?d)%alVDtMs80_Gm%mZ4Js!X=R<(vJQu3ULMwbAMZoFrM-<62Vxx z5?GiHph97SJa{bTP4-;^(Eo{9d?Myp98z=GjO6qXqRf_MT=XY*iD3ptK)0b!&-uW_ z-BJ~q17LQ;gou^1phXTh2)yG9fFo3Zm#B#HXWp&J#F!oOn3ONk-jSv24dAgSK`NBf zB*c`7%n7VJukPLhxP;t>_7j^AoB$vA#S9>tqDDEu*>1o4Igm^I$~(OPnYC)l5x@;~ zjfp%1L8aftU&KWJTwSBlqhWD>Q@SA&@G6NF+r2pqkeF>!L<_{;KRB4NUPC_+f+zSU z84!D@bc2(Jo4XAB1i)MUzyWq&qrth&cAeAyofpwZX~DF<4v*FC>T%W9)n!7?sS&BZ zfbXxa{mD(ZJD0XIG~213YjftAtl#o7)20U=dZ#VYqzpuwyk8iO?o!`fTH2yr^n=E5 za`GgsY&9Sl667X=Lrcp3$Q+HPLGXoexp-eT{NU8{|T zqUSW5hv$FF?-$-zw43TSLfA_|K|%M00gjQ8adLL9{vNd!#) z<|ILh*%{#bJ((F$fMnG4ApieP*B=xr!^{K^B9qe-ZDtVmQwALHlfiXbc+#F7!}Z9b zhmrw{n`YiLqi5UOVC?}1Xdx4M_0ApE&n;g2_v+~xIT3ZGZkyINE- z63%QANSf}mjN@8bmm9{}itKJZx>3yYX4dUyN56oICqHC#GR64r-Md_j!xwqnktF#E z0+FFkk|R}$hJmql5ab1yPR`0Yeu6UwJ}g$o7cw(~bH`xiVaR3KhOEV67bKbFz31G> zX)xGh*t#!|wdMVjP(Y&aOjCcLNbrJ*qmR?#1&-pxm$(xt2k;r>Ffx3cCQd|xNai*8 z>5Ohis#C`E{U6AIBtSOWo~6$V3(d>@mRNVaC<+x56TNoG&Yk*Ed2lit86B)VKuyB4 zV7?X&BbtL$8OeE>_AQVDK9GwAc_yJDxTH{GG=^H1m@c32xOg!cTO2NQ74aI-M+>@I zM*zMEox}pf>;+AT*4@T!Cr_WghYWq}7TcqFF8TJ|5%8YNFbhba8#0R_1-4;Vt^{Od zWogtb^5aOU+sxX0V&JtzUPPprLs*y%_@BsiND&=!vDVU3K&uu&be~w=JKTR??I39$ z)CMa+TFVcxW0O-;gFraKknR@28Fq*Usev4FndhQe%X>)Z8Bs9`5v?U6`-CygM(7tHJ zFhln6*Ak7|?ItEFN~BQeQNR0}=+L#L9vK-NEGjA@WCzLxE1DeP>_d(|0NUh$Ec=p~ zotZuu9KazJ1h6wb^%-cEAYf>y2&-rVfaQq+Fo?O|@As3@C5T%jfdyU*d_)Hi3P*FO z;e*KRL7XCS(43u}(UI=LuR|^>vCs=J4>LmnCfgC-CHZX%(@C6!Ym!-Ld;3T<#bm@( z06C4OC!iQG|7R-(Z*JF5fu8+m1&d7%B89gT)D2CSgO86P-SB9i!!PWKBqSFh?^q%Y zu#Azh9M7W4IUBJ6vM=Eaz-^+Ta0bmKM7Kn6JPuF_QQ5FjrrzYy5^oQ=syv)TKytv) ztpA>_1eO4gS`<$Vb8KZ~uo`r0dU`qulVVWHb2WyM{UMPDoZnK7Xv#qlEOOu;{wPdf z70Nk$Rzp4?IaVR~4S!E~BVIU^W3VXvv786eB|m`2UjrSKC=@s|<50XZFyR&4P2HL+ zFp(;d5e^&f!F2UY+`R6KN_LJY$s4z~`5Imzq7K>_u8GVoKv=6p6yn4H;8QZufts7O zPy@)=0O6qN$(8>awt6x8aJZNF5zwVUXZB|whk%d0tBK#j%gd|K0nQZw{f44YONm1g&HWF2B;%}5>^jwo6g zGlafvcwbBLfdsW7RQ)X;8HfJk3AcQUB%~s^SLFSI7Rw)lk^9rKiTnxjamP zl~{n<8sfKcLB*oTh#{)r{>+iSK6=2hv5UYiWaa`8jSFvx9K!@HAs5+9@*`y#LZ#6A zl5nlsFO`sr!YPEqS?9tzIiffJ4+3K>5+or7#Nrzn8EO3O%NK(_IfpZ0aBd(HPa_1n zD`W!06kqR{6*+15d;x-@C=%Bup;b^icnCn;=-#)DMhN2_+-KT`Cr*`- zA$6S7f+$_wFyPd+AKPvNcj9G}Lk@`uf(qz9c!Ch0?2 zH>X8SFIK2>nkL$r;{X<@CSg;*-yk@J6HQR1C=hRyqFzMd6o%bBJi^`!)2o+ZPK?Rk zenj+(2~LS3V+qj8H1I-t#!2xZVSJ~bo!cO#V+@H7oEe7^au?Wky;y;>ByaAP{?EwC z$LNV7W4?Z25!VU!{-`1P2bfyQ%2Wj$@gT`HLJlj#Dzn7Uz2y-Lzv#F)Mw|gf5F_$P z3d6Xk(8W?rY5~P2L1O@YCykY<=F8w<7zzQ&BO&`v;_!GE)fbZkFv%pcP%0-c^DtBH z3~WzPa=a(buftAZfTZ5vl5z7NHXb?-;e4PFoh4qYfX<$pgp)%Mpbq3>k~L221`%B7 zuiluP9y_MJEzKGl=w1;&w18Oq5%;k*^e6X1x&Da$1SUW z3fehUCXw{OO1gcNOa000XmqT z4KzdKPqbeSg3+O>cH%HiOduC^PHG^bK#t6l#~HDNl_CGiRQiTQ^f65fgPMgZ^qSp* zGlJ3@OYG0NB68AuzyqmCaI6ypWc(M6QyUEe=RF~w!ca6DB#UtXK(iWbt40jiOhtqR zr)`o$P~ZM}M(F>w!}{QV>9Bez%+Ah|g9)d9J|7{cYT-02ik!hhkJ^ZHjLPs&|Ehe~O;Hf;@+6;~WcT@w+lF5AF8`M^VAwkas{2*>F z>gLULu1{y+3J{^d2AdyFTdzq=MY01C*1kg_y7u!S^n+3o9>i41Wkl96kDQBBPssti z#FXi4msw;Cp%yZt455*dlYo1#t)j>z z7sjl~frr3OBUG!3M!DK&V%3DA!T8Nmqxoi83iW)Q{j1)}yfFy{$riu-z) zU>JyI@KsFTqBw zZ9(vlkDO0MPR(2L4##)tAS;y&VL`!_AFTC&!WFOjoAySl6(QmX3@y!c%Z1B{dwW5;W5$gdF{^xPql*}}js$iU8;jQDKzEo6l zPduHmE>a(8{+IgtP9SwH2-Vh!d5SaY4<|hhhytl>@DN6OtC``EYn{E!XfA=X27JNW zQQmOHnc(RCV$l$rfob#B9nkcr+o)yu2jj1zm~w&>(1QkB0a?KO%$ZlFYNDKE@co7A zV}wpIK!QDDD8R#!PABf0f+m;*mQaXB22M)`>%=cM^lF!jWit@ z=>zwKxrVbV8eVm2twCB|7#p3e7f9)c1X=%1yPmv1`e2ku0S{k(cuD?wHt@d-X8y-H zf+N9jD(B^^@{N z^bf6_m(&(Adxkqa7@dsY!V!H;axaC^N^-z19p$|^?@IbB>JQ1#jv{g^)zS(bg9HYF zK$pXj!@QaMpb92!$ZJ6^o*YAfm++$U2(n!qUPldYQKwFyCP$u7xvnForM?SD8f;GY2fQZ7R$@Ar4LF>EA`1x# zkLa`YaQo5WncB=ApIVL+sZ>)8|9>Nsk{prwb{os0fkCR5Is64Cj_Fe^<~gT@T6wNlZKsG5El!&XzM6LAZpn zbYmdz{rmSgjabIu6pkeR_vz8$>;Im=-q6BN7K^Tb-N+If=p#s-#ytWP%vgWx+6K%_ zBRPK1g$y=pjifXL`OeSJJ0f=yFhI`0g>x(&tw`oGk$L+&pNWyc!v9v?nTO?^xBdSc zMP)5p))J*I!erkwA5_pU?R@&-eL$zt6&(gB8plR4g=0JDMJo zRO#BsxYPEr&Th8^L#g$VuWScHpLz91Yw3WJ_YYj~Oc!}?J`q6WgdRUj0q>?;RCyREn?Do0cGw`8GfxaLuaRG&SlMPn_Xbj=GrsK z>mepiCpyo6cGm!bsaU}h#N#0WQ=2$tEU13rk63K%VmrVyB?Lr`YorOd9I8R!qJPQK zFR*|&&%Snl?O?|4P(!OJIn)*~ML*Q#*Q&URXCiLn$T;#cU3(!Gs!4TaN=0N*yMHsa zg3SW75{MO`$MPXOwjJ|W?(eX1N}P14w>X@TLwMh_J0y=()1NdW`L?a#rdYglhSK z(56GpXt2Um0% zSAk3k@?r{*Ba82ubAo{n$u=UIvZBtKL?ua+94gNyvS@JZ6upV(TP-6-Bj1ZRl-HZ9 z-fNZ`6@GM+7a6P)0upiu`>Z^844RgrL~zYaIIFvtl@G_Oh1B(Q{@YR)NqQ-67`!mq zwx@i0(5M6hGlkG}^yJA=7p6*NOKK3?DMS%70aCn^Hm@L)r^bD_pl-32AsGA?$nu6~ zwK-^LW3w1Xs@}A?LhTjH3xG#L$sVPL1l`r%JSNe$ja>NtZD(}_br&%rG)q;ke1Esj zEJesQMvYAEG=BPWvu2G*FeoKceTubCd*D)tLZKQ}>;k#A;)q>m+xxI5l^ij!aX-Ck zhUb<4&Sh!>SViC3`WXuIzT1&kwV)hGbtrODvG`&exg9(Vi{kRts{jBk zL~cwLI%;>ej79k1cQ5VU1A%v7Ry_|Mgp}Mo&N_Em8wWwI@adt>!9G866IH1=5*u*s<28^a9Xbves7J%A(sH0i0FRq&JOXVm%xJ0J60RBoFv6!Oy??(hH3qpv9r#;#Y%PhYgqpTo zBK@-W@8m>7LP46Jol(_F+q(IMTOI)44~b(?8NbMJ-ZY>SL4$Q>^w_4pBK#=)XrTec z)W`ympEo%|2xxssD$?DAXffB(R(T6n4D7$~31m|Gk)N8ld zzl~`dz~9yXu8kQT5|rtArS65vS>7wnqTdxL&E@AU-A4@ zzn!&C3l*Bt*vy>4&`gxV)U&sHgA3r^DkB06Gsf}uAcD6+oNUXWtb7$7?hsQWup6C& z9BO$Th1Q3fL`7@Y2B)G`t7n-{iR03^5n+{|v83Qz+r)MZAOZ=)f4A=vXhU7TR{3SOU@aFWQJhU_F9kW!E>HrVb z$w~1QhZXuVw%1zsnzdv01lCyu=N*OZN|FbWViy(_g(7}dq=X~}_m}39B7zNeos@;5 zGUHIDhi-Xr-XoP|C?B9Mi(x;r-lQ4x9;#BzH4J*21(d@i$tOy5rvw)NVkoi#iq47_a5yVQH?E0bLMr?vy zD*5#R-OS9kU2b2(6qwET5i8RtbSf z9`VMF8&$d5P_j{Tu<;yE&kYA|PT@9)>8_Htr*Mp;ql+JnBEI7WE=KTQup~txIhTgU z0gN4``_5VTd&rohKOe>*J;d4y^ny6@YT>yf-v#dX_~W!V0~_m2HVRNF>!@jEGyv7G z=Tu+qr)_kK2r1bCYg357s@OQX&&drVSlEdZ-TWK*1k$;aAq*Z7Y$eSO5zsyE5j3X8 zEi&xEjm4TlA@U|tRB5x#&Cx;noA|3;CZz|v1k}wqbJid%F)Fd6&K!7*YIEy(i{Lf2 z94;0}J0@$^DX`imHH)y|8JH(p=}5_yH&yZ&{g1{5k3gRIGq)aDhP-Xj`bX=n5`Nj$ z<(VqMG4ec8EqU$l^SRvb4a?y;JbY{}2f(D2F^!E}7o49xY*@m}E8`6V8!tIkG*Abn zQ>J|-`&649#kskV9|C@Pn@S@%a&m4z?f03r^VA{*=Zv)3H}EcVi^62V&fteJ>raInQ8>@L{76?-f5)u2z&WD-^n6HvhmsEHL@0PZS`0&7tL|R(Z^?YQMf=Cy`$EBX`%7w}T8dD!0G9SO z{E{noOpRSo%ZqwY7%B?0%pyYM-V}LlZlnXC7Ej$m3d+A2@LT;9KqI^5a5;H)wBw_` z#REW^0vr&f07YtA1~das_QxxM5B0c59)b6LE}a?l3est%wdP?|hYO1t@AlCC3_le| z9}oZ;x6~d}21O)k?E$}kFVPiegI z%B}BkRz_TOII$yj_wLzrA>L5$iJcTg3<0B&7M+~f)U6c&jod&At>!cY7as&tI@N3o8Q@H0@4AV0n_`oULq z2iUQPi+2s7PMJUdfU1!*m$qYr#Q&aW zSPcBoe4A~#_V9_B#(_d^Srq&F)vH7^V+mfB@#}QgmcQp)8YQi9daNgO9<^Wee-CW? zLnN&|+kW>1{9o|5t;ZSo{3Q??KFRF6CclKcO{^^DtG#q=1JGUgJEU}0>A)%C^1!wdHp zE|FnK00_V^s>RDyl=gg%mM)Anw`{=&`IZ{>Om=NlTq^56#3hr7-g0= zXy{Qlx7ZkDZDl1yQE>toSHh_X{Jqxt8w|80SAzO9?d$#@0?^_oue7JITc)F;BO@^o zOE^H>kpW6lZh3oqYflU5Y_MMn==jUQrWO5vK5Let5>zG&wrmVMIEdtblQCLgs`f;> zR(%4Z2&Jo9EQbVZKWv!Iu7Nbxt$vfJO>v58?CNdZhoc)RPSbvO>nZGfi;C7pMo(|m zmS=nzR$L{BkjtV-do0{vfq#1xN-P$fBH1!dd;?u zN;n%_j}$)AFD#v8bKESy-0}U4Z>$cF%H49&=|XntqM;9`Ljsys#)rS=*q)?!Ww2 zy2Ln0r1CpFj7P8nw! znd%$4_S1B|_u*K){m-7Z|6k6pVnVpOW4AxY8_F%*3njMvWmGcux@9Y+czZdL)nph` zc#r^ayc2J`KfL7^%HU~5H!t;e$jEl9TrjE!7hU=9F8;p= zM`bD<#D=gzZ7A0OoW(zC-|=d=318tkDnCQ^fz8)O*B$cN$?gHhIUk`nkvl+J#D`87 zGcsr$*@e}hV8g^0i6ol3tT{1W=9r=W%lmB)<1W-4C7P&608kJnN(CcaQfdr`i_oJi zU|EDg9)R?#gRV=uT)=D|`7v7mnAD)a&Ijo52}uXcpxH^liqeXTHMH~1UozRDfSSm! z2pyxq%+sQO>jk~0%b+N3NiPP)b(=gi;3v1r1mmGe)vx=#cDhhEcJz ze!fsuvqh)u&fG;MFMa&8FDYeH`uI)h5t7zA=Td`LeFw)Y+1En$ElWF`I%%lOyB^*F z`eiwMOFcy<|M?a%)}dTOs+(T(~qBDI1PbZ;_HQ5i;Z7x!6822@$$55 zhi;Yc%z9S6g_Z;uCT*oL7q%yk=Da4Qtq+w649I!s8(=mmoP$j1ekA--JLE%?$+w6y zxy0syb4Yr6U}kmBM!h35KE7vFz^jASX#c;0^x0)%N zzta7`5qaV`I^tPhiJqygz))7abl5qLBrXyyCkFibblV(c)ni!Z5m7nPo6`c!=7V70 zZWdUrz=HHk4KS|}rU$G1mnGfKnOi*@G_S0vnK-r4kN}s^0og%gLC z=yvGO6hK0JQDmXybAq^}O&)}Au26^YAPP$}g!t() zccy7<#*%1pYNo}$oIn9Y9Qf|d0)cSR$N05*SdhT`$T=w%OSBu}=?FcznA)1dFHGFV zaqcx0_7Qa&cfA%YJTX1iwC#PkZr9qw9M-{xM9`_V?{u-V22YkzPr~qo)-{|>aGp|K zYwrvwO@%KL$+0grB_}Yr_sT^P5!;x`@VBX0!trX(st> z6?bh&rsb1EMXurDO~(*XeAhEFu&xtZaS-6epbIJe ze{{|VT!QDcg0}iph!Ax8q~Ml8Erybd5zIZ>#yL$w=fctv=_esy1`CQm+l57c*w!SP zIaRxGKrJy7Q4>H$pmtLEGuK0~RoGcyv1;~j4Ni#SsS8(HN_tg4i^~kr4+?M@xhCP6_Z&LSew*UXXgw z=iy;2cB>V--%mhw?jVU{tuIf`eVzCn!Cxev2-lA9gHk9O5z#Kw9TT34$*lr^MjlA} z2Vk(C!a$7n0w2TRu+`DHz}iHIW^pN{th6*|@O4FOCP={)1r{VU5kq3{#l718hWH^0+5# z@qI6kwDGCs822w_lH_p5ewrEt%SZJcFtVbY`A?RVWHR_RxCpUZ)A6NJi)3HrL zd0`;=IEoKr=m`l2Lk1huKxvLkGb1-Y+m5fyG3mo1xf|Cw?DiCQ#Xcx#->llZg7fEo zMBOj_Pt8r_aL{Ik>(^n5BfSwQMA(|-ZHQgYIpy}*`{^9ml65q?Is@e!5g#$q;F*Wi zFCI?Auueq%GLK{s=}pIr-caL_Gl}^S9{3h{eh3jH?~YjRY&Ql{^Yr`es|s zzNhaQ7#Btx1$Os@Y2N;N16gW0ueMrkm333JUFYFH7U(KEcHW0 zJ2e&S277j%9xGDlM#tXo55C{tHTCm0IoYQV?6@=2e`LcI(^rna;$j@@7-E1I8$r0Qt&lK$Y(wVG?0IZv z&o!&^FnXd|wRd%mNz6)2)JGm&Tfy8$2cD?cf!XZ=#$7x-!dYHJQazm)o>qq?C#&WJ z2T#ZC)c!Syvyjk;%0qXZWBHrmOu-6JLWp`w4y(zhXDe4v&pJxs$&+7srU+GJ+of{X z$&(x6?@;_HJg=s+_P0W-775vSc)Zb`YhGw*LuliV9BF`Hq*Je5Q1EUYY_)p06v3Rf zv^I4W_SCjLdm74gkH7t`Gers$qgOy37&)a+{^idr#(laiD!rtD2JJ{y?7GpGP1-_P zu>XeBZEEb#ZdV8;g?V}5L{0T0D(8m)TWWPvV`KYPtMjLIxq0&@Zag-snt6I_BdrDu z_=cB+KTl5yWsAg<`mM+(7i+Jazj@Q{*8Memlr^W}UvD5Bpy&6Bij7R9TdAy_oYpDs zX-YtRQ$s!6{|aySRUUA_9m~A(cS}X%?d_lL%wYy5YwIH*7eI zrE}uWodg{Mq@ybKEL~CcBOdDm=r7Hu`$RG9xZRNaO^#k(CUg|keu&2g_smCQuRqv}SJJ+vMTR}*rM{DhEk{9S>I&ISPmoCZVZ!49Gm9ls5x=J>#w2>h%-KlO> zzYZLN32kbKI2$tkA&uxqoH)OdQmF0ZKg>A;m^zP7h8 zWqUv0bh}f6@w*Ktd}*Y7dBS(!<;f`*%&|ebaNz>?WJF-#Cz6=5QL%+PJ1v{FFNR4( ze(Z$wln0dyaTSR^)EfuYJ4c-JFK=Ddgl`!++OqY_ukS|Ne)AtP8GY{D$^!?B3l6+# z{Y}3ar@s}WnTeS}dLpf$2NS0ISBYGE(<$7f^erU-__<<|bta7te%#5hFoBQhcM!hY^G_9DS+}JQ!#TFS{OVp?y z%<0eR>3<9jP0h#~=XMs)oK0K4qlLvSG#v;(o94NWA8(l8W-#H})|%i>u})7b%f~4> zoY7})-dsaK(as}rad+?0b*U29k4vdCoeZSQVEJ67C75vuo7cY+W8r;3Rm`;eY z9~U4^1`rUpH`mwJ;0prE%O{qN^EVZimYQ)X4h7Vn%VtKw;4`Cg=fD>RuSh7bUL7yy z=bQsd2rOkVaG4@PKK0e)v=*u7tz(@;?2$J4aCOD~f`WoObUlPUg44}57pt)Tsh+Fe zy;~|@kAtVXNqsDkEHRRL5q%=zgs(jw%Per0i4&s;h@zKJq^<*uyhCO3;MZS|Q+B5l zTU?icK9I>8wswY@sIQ3Qz7!plL39CYON|7Gzw|-%(iGM^8W<~UYau`tU%9e^G_`5- z=7vw#>}RYDBR5L%<^+pqa^yZFAXfXZ_+m3wsGpyo7;WH)E0SG}M)T(9K->JKJ+iAU z)%lkn=bpVYlq@x@_t#f+0T&;h>vWac#k#2>h-3#W_Rr4jdH%)?n;)%hY!a9{zLv~z z%vQVakRgWi4KYI3+$}BbervkVkp#~a1EpE>=4(=L0&AIt z*{xnCH*{?mLZ({Bypl&H*;`M|_~A?i{JEZ((gn?)jk4H*J*6AM!0N6)G6W0OQ8x;z zxPlxSU*B`3g{M!iA&c`z5o~|s@+XmzoPO;4QBCr%UAi=avN;Y=F=*aA%gl*md1kw~ zTj!oU>29>TMAx>ec#z3+lTF+or!iwJu$DY-(xSx(B6O4d9Fv;omm3?s&Hj39zRC2c zPpZo0T>6Cn_gv0il^=D^H8A71{7td9bl1wCoGO}Kt5E)AhM(*Te)NOww~DR2*t_Le lrKP-hcl}Nw|K;1f(YtumysP4peN*9|kq%?kd+dEe{}Ypo&0YWi diff --git a/examples/running-llamas/artifacts/Llama-13b/prefill_latency_line_plot.png b/examples/running-llamas/artifacts/Llama-13b/prefill_latency_line_plot.png deleted file mode 100644 index 4d47c480129be7fb736e96f039b529aef4a78b56..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59534 zcmd?R1yGjl|1bF1V4?^rB8U=_Qi`;qfP^3+9SS0-C{iLJpkmO{ASKcQ(jtvY2}lZv zq;x3Vxu5%efB&;{X3w5IvpYMpGduIXqY}^a-1l`|-|wfsx9e2}={-AXc9KY>Ju;Us zDU(Q>ok^rkJ!ISQl`{<;{rE%BN>am0#q5@q%?%49(v=%l4@}LhOm7<;e`sW3dE4wh zAJ;i9ZjR$7R#p!zg-)No_uqbi%gn;~^qKVRB)kaugUgzhB+|Yc#Q&QT#N%(1NJA?! zmoBJ23j5P(<8WnRqhzYJu&rI;>E-0(?;euNW~sS-e7{OTdSK{>hV_%5*p(WUj3i8;27?*iTRX*Vudt%{cWeEXg$ZN)Se98g zow&HT*iyfrK?=Rw!prI5hUl7b;h@FEMT0~|iJ{Cm<;1kKw34#2=icJ$!9RZdh=0#_ z{J3+j}`nYT1FHE)=iDb_`Z3O-)W>aq?Ku{}UfPp|dS)YZplKU)A> zx5e$-w+j}!>_6q@85kNqi;ZQv5_;CC`TezdKn=v+C&$%^j2ayiV&5cUnJOv9l9$TAMqh?=ZD()22;21&{Vy zTFw}>RnR-FKG90K)mQF$_~=pg>Quc#H?6hRfiY=sI-{-vTP&06(p_1*hr0O>2Rd!K zw;dN03^^(7HQ69dZ$923_;9>qsdR4J)~#s||Ni2Y*=%TNc)R&M6+Xglda*Fv%<0MB z7i%NM>+sH>6O^L;shv00CY(&Ne<^#Lk2J>aKFm!zdNfpIjF*>JBg zL{YnY9u+vSfB%K6S1JDf%5KsO3Jy+~(G=k2W&1mM&VHU_s5abiX?kGkgIP=Rk&Tt^ zjbO3ILd{9)rw4{-7ixtkuExqzk;E2zwrt;hxWuOWam6=37F%rX>T`MVGcm=HioKOS zCmETT8ZwRR4kT{a@;&G|PweQXP9cZI2Q+8yly>A=&dHLbJ!#5*B&zW6@qKM=%_-TM zrW`9vEAFUc9FmhGaQ5ukWX;SMRP1Wkjg1#fcg!9f@66xvIzAoubp{_|a;{aM&#bFp zv-Vki{yKfXMyJZ8)S{i|H8c)SPH4P1DsX=iYinp^^gKSE9CUst&|TC?zU05x3-Tdye&dKzqQ$rD-D} zn{kiaT){JUeq8_N%VgUttEgx^-jR!XgT-(wEEGi%I4f|!<2>0DKR-&dqNQ6!?QK7Q zPJZ4&f6%TY*Rb>B{Z83u+xKvvITMR_p%rsDr`VQ|aO%ZAdaslePAokC{f=OqpzE#X=cHGcKnxpUk% zzHDu5Z1m^7_;RliE~;v3K+IxbU9uxah7#AFsFlqoYByV%b9Wf$y0p3Znjm)F0`-vv z>>+M$?zbqZwk?|4+GC8F-rk$6Y;5Y9nAR6MgU& zW?o4GP|&F=(b(dS5bZB<>en(SQwOh9JzV(=CHp? zqUzGea{%`{8DCzGB5EN%r%9 zK9`jZgj9r$&Kz@bs@u1J|Nhz0=3R1fa(8|tL^%kY*7;O|-RbuH`8F>vFIz9pvu6Wk z{FobXlQ}p!(?5PJ|N8ZrX4yg` zJ6eT`qsRr!x?=1X#x*oFt`^!^57tF`Q%FqYWovQnI>Z%&^~WBc{&hFid~Mk}{Bd0f z-wtP7!{E|h6x0CiocsF^9GK>RIUmWWO^UoQzVGfpRX~7ITU(peL{~WKQ<`ZTZ|vR0 zXZyrTy1OH9e0{<5@Q=oqFJCk=Z|yOzi(oo+itOaclSNB|M_W?$>u~nl(r#=yA3^Qc zc;N8i%I~29N6wlae)#aAPM($P+*qsSNF$RMy||ZbZ13NkZf1Udf1>XNu&cAB?7Nnv zw(a`wFV7uj8>D9^x+C|WlzOnS{#Y$l?@EQ&F+uzJX-J4)HXEi z$0-u%c6b&Pr2hN#y5X(buy3#SpR@|R*Ou}8@yfhXiq;vDvWm*ck3>o(B_#(`mmlx1 zxkpQT^FCg-dKVqN^e3}!XJpLznO%$zg-M@eT#|Pb`l`^j#SEr6jas8A((rw6JPJB5x>Qbw;nJd{&YBab^GBPZ84JJQ6V{c*Rnv~>8m zdZn!{%GCwzG~x|$rj2UD1aBG}k99KTn>3vGapw%N#~2u%Fv$h>jEsB(k{~T&u@=$W z+6wJ+cy)SXgY=X?n&EbmE?l_4kB%6u_hcySwOjwt5DUq?r(}~kPL%n|{JnHE2c=vK zqZ_wwF=N|4r;vDLYfFA!KJ@Gr;}z^Rvh${$dDo1WaD!$z7N(~U0A<)3Jg~AlsHdk_ zHS=u8zMbc3MIK!>)|Qs8IN=?y60h(M4fT1T%#|zOGyk-k6;nB%b68BvFa-+ROmXAJ z4Iv|R$miJM8lN6S)lQ>2kzKraF?;~G=YTVD2?0h&jvVE(eUMR1L83uibvUR2PV70v}-R#a?d4b=|^o=Uu$sM_0- zqFt4Njqko%H14!I^^#PIaxjx@KbyVTB|Sa;ljB-m#vkC7%@hQXoPA_qU~mcOZQF60 zv!*&+%ZnGI{^4iuy+OOA zKx24buAX`8N_d^-hmMK0M3wkbbW)e7sHobp*~NuZjEt8^2Boj|3(d*D;g=!xf>>M) z++=`VVLQLHxKMDrDefJL@bhwU<67EHuke}NKv&!+Cm*Ea?h#A-UM++(QJ=uTKo&2Z<#jJIhEk#ctU3-Bc`ZzIpPu|Qv(nkTARKJ(xY+Nf z=x>A*`vvLbiwT`$ME%rI{h?s{SG_!*UO$z}wkc0FsI~Oigu$R~Sl*CuE|fJTFN} zy~uFDI&7vm#HDjJkZO+Z=+UF@O3_kOdBulZ|7>>Ltea;Qh1yME95e>BS)3OE;IMjD zfjd7=g%1hajF*;|zxbe=Ki%_r%YaWiqtj&1=SGp;g(AOdBHL*K+Mh2x61eJ!Qutgg zDb{IYU8us;*jO6(bcQxFHa1paEDSy1bnz-L`mt)@+T_=Ls)@?ac?`?@+w|`T#bsvl z-TRewW2_~mD(~k{4ab$g0wSWKb2RP02M6O;&yZbBQseA$n7K0lxBaD5^aZld`@~nz zPjnUTml&qi$EwL(zFav&V{2f zuCA`qva%`kL|&8n!wrp6vh?n_66geKX*NFnP{?wgeq00fFwR#rtusH>z!+z{(P#jtbxD@QCGl&RQM zdm3Zqbn+jjRA%g<;eSgkj-Vh9D8XoRQUz*jTvpn17ncWrf1N@1EdBa&UqCA>6Vvln zJ$q05gI#x!c!^S1N}NWP?#H4sy55dbnHd?67AF}1Fry`&?_63B^q>_yuw~1Z0bATH zz&=zr&bpti72?s$t6t}8?Qa+w0_h%ebI^64j_Lj^t)0cLns6Qf8$|vs*0^dK?|l`m zBK*@8d3k^HufP{C_t6{NzAb|vnq9u)*nxI;nBRmV`)(^&I_p&ce0jVTv76f4j{*(E zURyBt)_W4!p6MGB@(q-2_E&4#jjvXAY8EfgADmm6P3Y);JuUHYllT+7MDL$J9#K&Y zA3uHs1U+7N-jOJf=IkUi`;!a|7e+GdbjC}549Nh$%nW`vT$%rC+behZ@+pgzyR9D> zT;4jaEiZj=D#%X(?6Y;OVOLA8#OVnb0;PC+;n{W}-M34&3!OGob8`czbmxSGGO&~x z85xIpZ(jB>-?CKx=g%McJ9pS{prupP0C(%q$^ve0rCV6M8`$m{or*R)y1UA;&r=`) zm=xWpw^iRM)@GuMbm`J1ZfrNl^(Eu*shGFjD^`8wd)}+3erVFwd##myXFyXe-Jncl z<-0d%4xtj@B=kW{_|u-_UFLp>55N28@821#88I$^9YDM229uxJcY|srm2mZat4lLg z_0K9sX4V>6$ivUvrS5i^+2c;lX@FC2+tQt9y(?QQJl?*#w$^JW725;6_=$z7Pq_FQ zaBAYVCMB_hx?Hb(%^;+!p6j^ydq;9gO0Ls}7;v8;(Vf1@+&`eHA>be(u^CP0Cioc| z;_sfG^T6P?Ao0YFLxHZPYz+Xj2ikFqjHCyC5=ympjBKbVX3xENZ#Kk2IlfJSu6~`E*>5b7^{C#cS8jqo zdAL^9MLWd~G~UTpO;WoOe{EDrabxx=*I2auBdjig$^_Ii|iTLzkwV!UI=kv2& zBAdhZd0#Vv6W5H zC91OFvdkYoj6JP~oaj;4_J@UPZ&cBygKnBxGUEn|#? z4zqn9T@5yH$k}^ZPX;0ewoVU|?|qA3>-c2;j8IT;v6(l%Zdd!D^DRi*w6H6 z-oPa6Xa3ubTNM=*(P^I%H+LU&o@>m(PCDZY8K?^d(^c$$x(Y*y7qA*prarA#gO!+^ zoP6{y4@C7Dc1B-?cg6Sbd(eke*{E=bDi`>cIl8GHi$y0UCejL7y*gdA*qgZEHzrg3 zY+G5pVn?LIG&%j_+5ISP#sCYd?^VgOwZ1P-^;ypwnwh1lyo`*DtbHyS5)x9dHs2lw zQ36ROT?LvUd=!-nI~%@q#S>SPYZAu~4d^Af0rk(= zH+S)=9Om1MiT}agS?iX;;e5lwR)0)LVzUIWFS+yHZQ=N6+V-Wl%zY-=(#q;mQnLkG z24tlej$+_%Tr0{PTQz!B@Q`dJh_i#fJ3cax=}&H`hf)?6yvKwe-UKy!&iozVrY3~% zB}gs=iMj4M zXJ@SnSrThITT#zuvOb7;imxv|gW4=|=(&g|%hF!4q>18M{dqG=a}gODwm+TPbb?u&|iG9`!@&+iog*xPvSl-IC>ou zj^m3P1Uw-iht*ii9(-%}?%l&sMa5Sqw;aM5$XvsLi0nz2U8p`?Osdbit#W*0L59r7 zY~kjG2^DZufSJ{@4HMn6-)P)RUMmC#-bXLSk!3WG1@r)^giOhIzvGibH6Z}tsHU5B z7m1AZ_Vopn<{k&j0+HvbQ{`OxR@k^o@WiI(KznJ|ieeV@I#PXz*@yd`dA8-uAM}gG z6nFLXbyGrh0icLiPmYL)_>rusBBKV0P6h}_^P{O|k&XVhwT?UdZ$wyz0YH?XpBNP{ zw`>!PY^o5K1Dvk4Ox`xql5$i{P0e9>Bp#~iShD?_I+OF~Nhw;{%6ZI@WquAj+zs`; z=_jzTGaa#Us1!op(%M@qJdS)N^aO0#M=O7GW;zu?bgVw&de$f_mQfQOx}PFs$v)%eb3+TT6Z@58mP)B^&f;ub8@8$H{-9^Zacz>^1(4lEs053+`{d;0QJ|xT zj~p?_UWY{f4PfojP`HiknKq& zfr7C$hdc8H(2NH9yv1)V&yKjfd^yviZFa-RND|6vzRkoS^cCaA7#YQbGx-xm)oAwL zU(!2P5)$t~fcj+_ncpQ(wm^p%uGwfs5rAS=gWE7QIeF^As*$6PuI@-zVSX(?x3hw8 z78vYaI+4qvXYU;rv|!}o;sW44KucQ#Q1r^v^YJ@FdmR&#Q&^o%7J13gh-wGFhbF40 zM(_rv<3#>Q(GG%0wx5dXd3N?0?6`|XogujsP2gw5d-xu-8Se;TxcFjnX(-YXcO}++ z1G=*n4z|Yl#tN*TG<0NK;zhJ`p0j5Mp;egv$|8q!c&jE@spli=$rg(|0ZV;v9Z>gZ zg>WIF2Lc>~Kv3eb8omniLo<&5Yp#FY1WEMZ@l#|e48qt|fcpDGagb)gj zPNChkcOh_-vZ2!VpNn8wqcLa6kZN0fo4;K>`mjOMiaP%7$1;ty357U_jWo{xWO$p7T z4&w2O)5jdt3hm|$)>oHyA3f^a<*!-5BTSkHm|R@X!cOa&y0i7{P8iJji?I zj1kZS?kvGc2}Cmgr|ql95kA>_uT%lQVXm=>tCTp0ZI`0=R@k8w{50;tw`?pz1MVlm zij$L*F9E!k`m?H(h6~%cB}adXe-h=KC`Eu11{Vsj_IHYrV(&mAzLb|s&5pGy{zEfH z*r_=*jzh&|_;JTNg7pRgBO9VF?f`yd;Nal6giTZi^i&3W#JcyJnbwfG(j%!ycc6@xpz~ICi0kaP? zLPL4>3m-txvmQKnaMi%}Z+2@b0m^x?_i@xBPrZ<)M>&57wAPEmdI;F1nbjU%Lqn|G zA>cRG_o|5}q`YVo_d{%@VhfNpUVicCk7+tUOarV@&}t4sF@Pjj ziXt&xz~ybfY8etaGgRk}GY`D2^$N|C9JGXxSMWJlo<4hKT_v_2AuJ?By{P$kb`e{y z6snda_5mT8Bq+zanY4Zg*Z61}g7(Y7FUH{VXe2hM)PABcJ~KIO;_>(oDsQLzpl+oE zjH!!%_UsufCcB6cdk6)=Le}qzm%x@e1V8prD@5iQJBVx*z2K?T;D8jjZryr^0$~8< zSg1nv=MQR>M`G207-Tdb&i~Pzf?(^8^S+CclJfWP@Tuj6iPMf7Yc>p*YJado;HAdV zphATw^cntpzf?hTxfItVo0%@vsSQywwwHRgUGHiKz?zT> zDKq}|rB}o3Sh$X?{e1$D7Vy=lVU}L>P6C?-rx^ed|JL-|2@vqP?26n3gXR$}??620_ zNJcl1;XQfowj`feSU90Xt|Y%E;yW7_!hK^iexH$_ls8?tnLy@j?=orx zHX%PGtLccA(`is3(+tXXLM^UtZjMm?O~{@EPltRpt#fJSgr6ZP_K-46VZsnY&$DB3 zQeGMu8IeKT^l!J2In50F1;CrtPh6$~y&sg6Q~eu#C69CCu0b4g+H((_3A6;ZP5tP% zGoz?%irSF<1@Bn{TdNEsnJVi)KhrbzM#&?6VHc%SYbKk&6XSfVfJY{BjIRb8v8QW$`qVj4~NRWo6}YUS3~7m*1euv9U&)FY$Fj2Fj8H5bV4_0rB(G z(z?De4%lMXO-*Z1;lK~}5waRAX`^F-j4}hx1!OTU0j=6KmwjU*JGx)txTG7`(FxwX zRkV%RAHRKGN67~x0)3oG^K8%U9ugTx4a>d5QH3EZzkt-#tmR3{*SzlHbG`@b_ zSt^?0QKD%gYgP7|eG>ZOFRYx)*1HO*P@QS;(tbWOnm^T;at+KQP2OwT=5BXf;g1 zsg+eyTIymqHwyDb|E6iEfY~5m1g)r@|M0L0NcyZ$99*ov@o~MIy`OM#utT1Ogd8E- zV@gg}W0E5-a{B&6nTZg``m-k+!nL3NGVEUMHYfgBcqqDBDC>}$JSo!9XpRZ!DGK-? zSvP+^WOQm_As)mMC(oj{)Kz2*_TxxvDm}!_DjR!Gsf&nmz`5^7a{+5Q;^gFXt1cqN zpM2M@Xz*eniICT?DayU+>tOW!#GQ;v?*zw%)AW0E)Ga(byi46A9``s}dLMyyu7}V& zEqJs>CH^8X*WX}b>g(kXS(aPk)g7NC&l&w}1 z(dMx(W$4#w+1cBAKrwCt9YF1%M<|4drbLL^e|j|aE~68I|0{2AV?czZ@w(2=a9Fo0 zwq1!TEJWS{#`;9z0v#-(K@^KPwD_7}o@dbfhZ6gcG$QWhr%&g?TdL6)!@|OX12pzCH#V%&SJ5v!zh>I> zp6cN`*Xf(PuCpGHc>N|sxBn!osJo!JewE+qrmBowaeZc1QMuG*H@*t9pJ@z$rC;G! zVPnFLnVO%!1&3UAddCCY48p5xjNJvxX;7v93a4m{#m*U1(1{7X6n2xn6mB-GYu*m4`@5BZwK{eP!KlIxKLX7Amq`1uB%h zs_MZz*>8Zlp*O&Ie5vr14OMBtnwR5aN&h~JTBio`k+9wFoP}@ou3ONRDEkM-el~je zTi0)y;=H_p(3QdQ)}+;erA{6JuA-b@xEN<=5+oQ8=;FY}*myibMrZGLRAPDHev}}) zg+`YK1@gt-WA7In;iXvlil4WoqohsEQpgef66rFwKS9X#fPj`VWB@wsEKx4wnX%smKj*pMSaZyGTqj1y*xjnOSCaSPi z(JO+94>Go0wPOPr>4mhob;pjriHTIc7hDJ`5H}4zu~8d1TH7ac!^DbQPzP8B0Tpm1 zcXCb)4h9Y_L1milid0M5b{!oS+aV~BQyTya*$M+BW)vMssOJNh&agKbaI#P-d?W>) z;5)g6t8PN?X0G3j^xbp9*KP`K+(@y_eb~S$drNA&UD*O7aTGx@um~tNfBtS9^ZseKs6Q97ddY{K<59Fpg_b_(h=2eBirLY(BQG)|V)i4!r>i~3v<-GC52GhObNp|8KRDBe0bajiJl zBC?M4^XUsvei#@STwGjGdQ$MmDNc>9t}enLf~2IDtnoxD`UaRU3E!dsOGROQU~S!O zyPNjhYZzbbGmDa!E}fdY!%t%rnVa(kn%Gfrw%tc~U69;>Y9R(dPH+JP1{`Yk+iDg^W5{GxXBH??7MjTX zqmFHzE@fKwBS|dY?$fWCvL5Q1m4S@fYf2B##?_4cReq;Z5+?Te4fY*-9YLu{1qB6< z=9<;X$;f*Cv}GQqp)oKqsl?76u8-ORK!4)+anhkfhhTOFBUt%(xo2!l4pN9&!PIu7 zyKaHlRxP7`Y!Ota$j{F=ynej|GIkaA;sZ;|uV21wK7IN$@+qmND$phf%H~2&sgZq$ z>df7io50yt6M9za_KwU7VU-}fAt=1KV5Hq7;y=Jw^R=BBWUYfdM&=A9w6QlAoMrc( zJx6$Nk|E28TK0mF+n^kZU6Uuy-V-(Ua|L}gr1uqX#4RE>vioSVL)9cXwKqkEgzcC- z;@>agoXfBE`CFRCI8&_3cQ)fc&rH|3j=)t$IEY9q1zDd`7CbSu$v=j+m8-*eNEAmUs9ceH2d^5OL1fHkoVuR_(W14lox30pOU}xHXFN9by=(R*Vkq5ZwMxGB}1e zAR?Bb=a*<gwAlY9Wms)_~xN?kJoqW59)gd@yFI* z8GGlRpVa<0+RSDxLDoW7lbh67|IsUK<1aO-C56w{((Lqj+`4_FjkcwBT%m|3mzQo8 zW6L2Q+ynykCjK~9 zUxr+DqK@p9E3se{;9SwliRkmE$6oK`hd=|lAmtxip7@iDL1RX7th=Wr`_sWkN1Zdz z?0s$Y+UA|{h1x6C)R8(KQreXwJic8BR~9tgIsS)CXHjfuBX;t$xWghh481tu48rOF z%Zz`|!pnOA00Uaqj4p(033yg7fg8a*D|6&8UD_&YKVRC>5eE5?$9DQ=MrNjU6(R3~ zmt8k7kiekVuFbfUpLhEh+A2zlhi?jv6yO`U_k3~yjJ4Szz(G>SN) z%rdEKa=wg(WER>R%K}NY&Cirb8adUMlDP7G`W#2s4tS;%ye99N8J(_6>(jkl5S=f= zBV=q#&isUqj?Mu~1CfEHE)SadQNViSF5)=a(+zh(4FDa~)MR!kL6VcexNuT|I)dXr zpn&io;E1f4uNf`W&>?8trpQF0Qvd$_8=Cnp4HRJ*EF=fyS0UJhf-E&9D4j(9$QJ6V z2I4YLDzG%Q_(ef%6}Et#Ha0dS0&ApQ-vni?^vjp0xFP*APb+}MQIkxd?G#xX8yiE^ zJ{O3nFtV(*wY31e^~FvboBJACp?M@AR{3vfXHWeM2)Z;=C$5RIh?ftLWtzA{(77>( znAapknd@$`U3itQ-XZh6%C&Z*Nh8&i3 zRR%J*#O48n^yk&h8`h(0DYUl%5>>linE&vvL4T#sr`+8A$Buo6QRHt8Hc9B8VWRed z_=5*GVDpwOb6$ey&IKc~iZBz7L`7E@9rR1cB?&gimX^Q|2sVNJ(Tf0)5dbS}iha(| zs*sW>2N46#U=|Xh0RuPw6$!#D^Q~|?NAaZa{q2%l>IXfAvTABd&kE<4SO&_H{mIC@ z?Gs_+@piVqpPxIHm7Pi~QExYuYgpiCNxO)C7RSxay!-?CKw4;M3ccIm`@RAt3`@jl)i4TM@5@*h@)6BO5EL zxDb>Czzo=gR|-Rdkq-fTgp6yjJqeK(AQ-`Vk&2TtzOy2^4+UHw2aUf|zF;g*uIL@_ z_Ke9oR9(_~gu98ZoF_^vK%_l!U1n#zn8(-Sk`Ff>+R?W8lxP6tF7x$OTPSy6ovlYM z9)i!06~i4sS__N6^l(d177#mP4v8Yj2rHY|3JBzg)TQd=zFSz&`W5y~7S@CeO2BOd z%^V#a34%A?k?3#F&y2-9apHtkpsl_A*pGG)jhgS@t$Sq^6bR?Yu7Vk|OiRnLs@yT@ zy1eF3kEiKK_qvnaWm7iGY5$6gHT>{hcqB7L>Z_amzG2@t+XE3NmQx&metQ!H18!`n z0btEb($bms3-=3C3wH0^`3|`=Q2YZ24k%k(pa~huDyJG~QTn;8&@doUyrbgb^52|b zFh(y#y0POL8XA7Kx3iC-T1Y{gLdjJMBxk6701j~# z@7=p=f$K(~8#SJFoA&EobF(57}qv zdWQS$d#Q`=v%7a+ePX?5t=3-IM=y9Q_d-=OvX3*6F~+`t;Y^PthF8kaLM7Hy8?TS|8N62M^L3JM&;a0%Qfp z=eL5SVK=11nkk&`!x8)Pd{?f`1YPs{few-g5^A6nOrI?*EivG5-Z;Oca@RYd zaus_2g*R;*f%w$riN8?!a4N{vQ}tr;h9qcJ`|0Sspyn$BwA)n`BMAf5>4)Y#LawzV zR=4TJ9ckgr86`(zuhV#j9hHMN)T;i{>}|u&yq5Phov&Y(h^IJh%pNZ@V$Nu5JeU>Y z;p!frQmawyWEWz`!A79^;pZ|WQ4tZscJKWQ6NT_8AharqcI&3A96j${CaAvhPC|g3 zW9c8X59wGv;^p(_2s}%GQ4*>5{~hpSJjA4{KoFGL0QS#+Fh5LI)XXS_LX`HK@HB|7 z158XzOW@8z3N_)UiM@v55jzbnE%-kIIyyRt(vZmoaxgG6zd&A!kn~X{oWmB(5E_>c z;eAdlB*OD)FqnaWXH+ZbKw*8EkB&Zx)3EI=+P5MVWxssL3?h$^b2+|z{R-iJZ&6WELbG7>Ikcv8*pp~) z_=E%vHVQ}8)XWbJuB~nIAr-tal21MxpykP~Qs}rXwkFO!^m6m9##1Nx;*%DXTAD3u z?YYeS4St;dc!+tE0}YOPM}e&$r}oE5Sd51>#ZI1tzwNYfLPlPmBezGV%XwXYNayd$ zuqt=#B-1$resT2FBAga~za+t|nHo(>JwQQG0^yL*FYD`H~T{qPc8GUFNme7}ST>8@J;;L*v`gBH_A};ppleLSrKJ|y{s5k)5X*)RY!2IUQ%I)} zV5x|}1K*+ujuT%{Yau5}ad$!BP6F%=aJl zMNgT&3HV)2>>23F4w%cKG|L>*s&z;&ey7uUMtTmGA85>q~4LsG3CDx-z8b zBH0rx2m#kHb@e?n9Ce0wo?v*sF<+RBZ}!n2+`d`wJ1;}HDotLSO2c^ft`n`{+R`5@ z<1XHz;cOnPKfBTXOtvgZhuIl*T<6n+3&w5m0K;3mJWr2au6 zQY;*Zrf7ZnE_%#|4NV{Nx&w zN^H%W%)1n1c}(#=2CDn@9E*3KwpvA+xyHDXP6L zTv**$YY(v)z7Yz6Qq}5CU6+_jM4QMsHTmtoZ?e2=7+W)BEWV@1v;W{|aGOMRl+nR^ zOR8L{n<49iq4lpmI_s9O$gigs`u0lYm(q^cw{&U!=UCn;J`hC3Ul}#s` zB<}dTh5l#GmF+A|N{_p^*<0#*o+dJ9>ds?4JatPs6cV@#k-_rT>^n>bHE&G?s{dyO zHu2}5{DWV=udrtAFMs{+bH}e>=J300V;df;elrVohH+FL@|Q^qBiFC1zStk6h+nFT zMT{A9knYF>um!jwg^;l~@hbXDSxXCbfb86AN2Zo9;|LGINejGuoaZ;P*0b$8$xrr$ zg&sC}u6BEEMqWNfwrsh+e`LasOH`CF6Zp0E#2YB=f57Lg!V_O_a)zm>4uPzud>b=l z8Q0~>rT=vmwkf1!{@>w(ZUsF%uH(?q3R<{;XB{|p4C!mf8HE)6Vmcd}g%gw&AYt_| zwISpsXrvoNE?pkmAqTuVbBR=FkmVZi8 zwpeXM34dMKV?64UxVu{*Y+T%G2w!E&>jrXqbM_uF_ZgW;2jczPG&`Dx&($iT-WW;=0_iz_QctTR>~SrS}U43-9sl^azF!Gh04k5_hj{sqA0 zzw?6(5F5w~DP_B3DqtZ*QWH74ZGl8~ndqochx$-!F;2r{QvY^yHNB7(84F9igyFVF z2wPw#uW9T)KN29n0n#x~;o*MhbnnDO9gIX2Sj%VC&quZt2iZ55 zmvFN*S2o^CKg&2&z{?@FKlILr`e9FA_nZ#OMiu00DV#4Ob)Ct@#3YH%p+zX~*E*m4 ze)7)MiSqa8#QPbO-tJ5MQ)VYNf87#!#-Wg8fOJ^Z3~)Xf{F`*B#zt+30I^_nu8bf} zMu-x^!j=BhnXfJ%E_4m)*2-Hk|9$s9%t8KG}n+rwEtyt=%%T0 zkb>MO4YX_oPp(b`BbDIl<|dJ%)>L3?iKH?zPCf9g?OLZ25KHP9bRE3P`P#MQ z!h(WDOlO~STs;T3a0o+7EUc`FB?L`L!+2l)<_rKl0fpw-n?R3O|R71jk4d8ka-Z^Ilj z^YdI*;)vB4LEj*zO%dsZQ7xmOP`mK(CD6$1pEgQhKiiz6GQP5+PQU#Aj32Gn23AGtg4_ z(JSyH=iDzdb9e!?7$8h&zJrWKdQOgd3x;bw5i$LhwUm>s@*gkgAf%qo6el6{XeNGg^mU~4t}CK!<6f1x@B1d4#NA|f*@_7TDCA=@sfdWAW}oE$SV zvreI10GwgcYmEKF-o+)$=7N&2Zg_ckfTdhD)>L}>^r=S9{bO)5gh)AN-GkV=LWJ^z z=%FxPVHH`R;5Sw65ZP5%i_Td+bEBd9kelLUw%)#cD3Y#jP%d=Sq^EmDnxjC%HIa)^ zF~%qJog*)9zq2K-FpO{SmFMG!T|0(U^dqzkzUlvKOv$oeL)(NyCA%vS1#j=(y)|#n zxFcuqw!tKr|Mr2`O&L`G7q%20EI}fIy4akA3JV5-XaIfYt?veyY^_o~1@=qOIaZap z$%SsGOl0#O{`vVCo=n8(Pt2}qUqP92St3%y(`8+Sp9xl>u$ za&XNrVxN%%)wGkAjKY?aNNRDX{(W@Cm?oG@IM09aAa|}xn8-Tv1qTq}!000N2^goW zP8bNk0R#uV&m}cA^?|whx126`tx!NxXb5fL4LEuxhx;TZ=! z=DnpNO~J&s`0uvJV{F|Wy_ZZ}y!eE-IpNyPp3bE`cB~A7?j=l(yX5BPre$VQVuG!a z)vr5?!?_k{iLVT|K@#~ymwGStnOHk+1mNCQOc~^^ctlpcCBVF{^*pu+_G~u9q%-@L6{m%|v(Evr0yz z&QAaQcg+GRE=k2ni5ohrqIYSrl<_!TcO@1Wo z9~@M4+V(*ziD6NXdqHJFrMZBO^L_Rygfyp1@az$oVtkIPfZ394(iTqa&>G11>><1u zJ8!>$m<%fco^W+)`WTz14AvQY9yqoIe)xKneN0?zou^xO&Fj|45r<=s_XX7PB*bmJ ze&eh9b1A>zwIM0e66ZjxJ)3Kcn_wF<6)#Q-B)-{%m$;gAEo=Omwx*5n$XRihbyCUC zPmffuEQ<8FZPJ}*8yq#5B zxqacb5!HKD64Xzb4YGavPLEOGoz^K=ySnyt zR+Dk^fU&@93Cf#p?)vw^K6Y}omE{|$+M8A?EaawV?s&-WF=;3a%ouNcCP(|~oU?3y&5Cq&)YZ>XH3k7Y z|10)2>l+b!qwhDjP|T3W@w+Xt)VBi5tCpLM)wk3O3x?!<)S3zDM(>3sY`&61Kfeaxgf_^W3(Es#+0W#j#jt-q6myLw~ zL)wd&srLV0%6qwWY`+fR0(t&W$hq;h%$Mjp*YW(GGB+w)D0Z{&iy5J`Od;DGV14*> z7DTxQ#IGQ`0ZB~H1?nAM;VQc-$VRy<>dSjK);;ZQ>|^uYzv{ZvoeWv8Kax0+MZY6% zc`Z)XNSfKXF1>VZC3jux(20VAPkgrz?AO!NaZ1_cy0Q>7?5LuwZ1O9M75>5FI6K_Z zk?vw~6i&!{C+e0FL4}uC0i%7oQ!l!q9#MQ=K0ZOa*;A-~<*@yTSso+gTZh{+Sy0TG zktiSzGs5SP<<(70i2*?<+r-m={&_w~<5)0}I%OdMSq>;i zCr_Pv1?EYmu@3bPQ=22%8$+K#&tQ+3Xuh2K$Dsw%XT=IlaL* zpZYh{nwz^)hCU;^Em<_w^24N?cl=XLBa@j6Kt>~qxDAQ84X!i-5r2SB2ij~V4xG8~ z;QIeL+wXY%b7H6)c@PRb69AcT9~N>txTCw^w3g zeQlRV{3$$N!{eB6AVw9h!c;_f%`GS>NFnJRo?QUxc5!)`$k@S6S{#nX(>zX%nRVus zp)q4q36ZB37mK5V5cUwH(lm#q>CM_V=MY){93$gLi~yk95pf6z@ta;pL@0p5*lw&X z1WaR*2{9Rh|H!osj4}@Y%4SEj`7`F8v0D3SX_Hl?VwI%-o{ z0WiFA;{t3xT+#~#TC<`hW_W^x?wx0S7ZYS}lOuTyx5H8SMyZHf5F+j*Mj}#Q|9J)z zC8eK;5z|QV@YUhR$zPkeFGy~)w;avMy4JF0(Rb(z<=4ans@*b|pYEl9nl5=nihV?m z**W$y*^{$pvMXOQv%2QfpLTIl=<>d{nQWYEb?X&n5DeGVQCnHC*_&+{Mo@n+PJaQt z9rv0;ui!91PizNZ8XiXg>dm#AP9zw)F{q2vmsWst4qti0|G8zpn?CfvH#Myq+CCsq zM=Sg=xx~)q@2@hLj^^+`goNHeC}bUjhbxQA6U2j&@P{G#{?_f=M~_)Pc)*Rx3!rs` z_PsFS3ihB=@JMMwh%N^m9upA6v?vU4B=rW{Ig8=1BfIj?BS6$rFhW6vL5>T8gO~v# zo~H%G5Akn1G2Ki&Xaw5<#8CnVZwabpq>vg(Utb^myc}m&&}m%+^$A)M9cHPaIjZ2j zs*u51!qYeEP56oHhPTIyx$44EMOj(k8~5V%Wp|)p6$C8xoYt%n*te?U1QCN&5rh~i z(mj=!7AA&`h(|r)=?YS0Pk@$`#qek@OaO%wll+ML5NT2(bc+lG3By%H9*yO8fz`-? zp}NS^SY3dDF960qg1twCT`P52gU(Q;|}$6HGX0X$10AOLTtSWEf~QkV7kmH%*+#4rEIbStTeoarZcK>Nn36@I z6mAQ&vot(G1M_Si#KV88Q3VpgWnh3n8}q|ZcGiypJ4~_oI}NS-Ob$QTf-zC#G_ldP zT4<42uDrPfo0xdYAjU@Uyc)@PJ|g#hxbqYY7+lULH6Ng%am&oi)LQ`|>FfXICsd7V zN=mL?a~jW(uErG^-ns>IyC8}Z4`DlVN+vh;Yjo#t@LEEwWbq**Cs$gBeMXKx_1?p8 z3nVPAhI5gZKy9pg*Fb$Cj2R%VT$RzWm0eJfm>DJJrjh4V5XJRZ6;E{LOJlSTON2G? zq!YCpKIR=1bO@85Hxa+bd@iEDPeVgPdAaz5(8s1HI{efet1bNIiGe>nMJmInIzc^^ z4$+Vpi`;K=yg)YOgtjlK5{s;b@Ad>0ZV z=ZuOVLCKO-qN0E(IcE^bBB10XAVJAWlALpt43Z@Z5+sO9l$>*v)MNVXbNY1mIbFA_ z`k#C2epP$dw`-ScP3M~PeaHKZ=lKoT382<_SbPfhml9|Tz?LEnJ2VRVxvjr`Y0G>B z1NOH48~FRP6>5UJ74VEp9#)7&Xr~UUE?1jLP@mvf41^oD%R=_3tL*Fu!v#*0Q7^+B zX{4m2z=j!Pb1Mx%X0Yid2ss48@_*ruhV;dY7Z+mbHn!Lc?K(QH!EEe4e6a1clrPlXUiDkuTS z#(=V(Z0G^k`R=dE99Qt<(3Yuh&%U_g_w32r)sMy#>gE zNWSi+UnU4Ga6owixKD&GiTFuD{Qd~N$ZVl~K*s?(?6ZzdJ;H_rd2T3|9h&?gqR#Er zK%gCdR|KO4@i=HNhrlNR&!p^J=*MjV`pwkBg1$FZ| zAiF4pG8pLHH__lvAwWtnpRunbLIWSNGB5osp&FzHG!J5ahkB>(Xg(g2q8qs#=l6Q4 zC16FF@Ho(m<~ERrpBcegG(wgfLA*fY#Tbr4P|yU!k_-gCCNT4D?(WWldxM&eYW%e# zlCuDO8^UaWz7FK!dYuXuBJyClgEc!1gio;d0G8)kro*NZ?DI%M9#moxjd(LtQ#Oqj z_-|4Q3dx5LvEf~RnUJ!Z{Bq5jR9E({AKI`kn{vNFxva}-l<`S-!(AT~|j5N2uXJ%x)g^*|vIMK3;JtxIT zetpr?weDAR@pXJjg>$?w`uK;W5LoqlGuy*N!MfZEb&ntXgH1r7)|Gtwg#-jH13&`= zWGp*aGqwwF2?=RKjkYzYAdbV}3+hmpllVoVE2@<^L_}0jvLRVMp!P_DeO(DWEOP-> z;NkcTHg{wpggy<0OE6T^WZ=*k(3*!NA$%Vz6vwPkEBisY4_@Xdc~~muXX$p*kKrbf z!*)UrOB@+I9BEL6PD954v;8GBEg*-eIrXQYfCITU&?_aO(m?Q+lJJ|1c%d36gI_hA=jKh4uvdFf(O-x3*$w!r2atAYgM|`ES90E4 z@FPisNuoMX#o&Mm=Wl9rcj+1sQ@M+DYZ&1Vw!&T!xsA}8nw#%|Lq!ss z!52YIoGpBfz4o_QAo{NbsT=5Tp}_e2x?)^~Oi4aR_h~{$TiXo;b;$vr?hAbrKm~>@ z4&_Eb*d(EMtF5gK2L$LR*4A7|<@tSXePOF(Cb>cyTng@RPMMBS5F-H*k@I5#Q&hCY zib8+@{Rx-4=Vq-lbyNXIW3sD3`NV*B^`F{6amrELlA?NITDg+-TK-;-zkQ`B;h`gl$djSZ(8Z{D%^l+5^@#Duk_{Z$4I&>gGu}p=f zIYzvNqI$bHWnc!t#PG(@h`(|NB<%G<^~bQmFTxodkCFrRa{dUG4Xu(fm(Ot4hJ$tY z=uA}j*km9q;?NFR8m%gxm@#I~Wf}{WGud0cdZ~=yqem|9#^H&Z!$~%)n91nmsPm0$ zL7Yx$N(5ts1R{aYCu{7c=4sA$6~d%L)RLc!7mk)~WWU>Sa@qQ%ZS+e?UKBMd@K z&6or?`S1+V5{nTExTOBP6rFR=bLyH1_cxbQg;9vYnJ2|=`~CbvQ5trLe{Hh6cITyP z+k*&&$8*s+bIo{sSp6+6=tM-P=eRC)nNU}W{RZgBG$dhD!DZ8SDUnH3v_d-s&Z}UN zk-+u_pYTP(OU`LQDj>h@zPQB@Qtv88Z~@m)=h==FI39 z9QQVOHfnau_(kAMi9_N6$RICM53G718F}s6eU~I*?*c@fJRNCcF?mOFDTvuJucF|! zUr*yfnZBDNr*>+loh2=|i7}0WKDpr@Fkqa)h;Ejxkki{yKS&c+tihJcJQHHY^O#z0K?|h4qpZP*E=Mf7C{%ArAYD@Q9c= z8B0ia}#?Cd`y(|)|rS%$#P_gapdfm(Sn8_c=)csjm^NpqC^G%#KT@F;60NCsX zDBDl?eAq%bO6%Ya;YBM!uDAYr*A}=`B**KGfXVPcQgRD= zr093X8vrg?pj{CNDjV@mUz9YIeDGREez}QcX_YM=%YR{7`g)&3QE6{3FD7m-14{y1 zjOgr=mso(zP}ldn3p8yFp6?UohThPJI~)s230O`Y?wR5j6wY0$W?lumAcXqm;2a&b zfb6yJ>}H(7(14t_!M_Q#vc@FH{6XZo2XZ@6b2W2_c_&xoN5dK~r8dPEtj&q@Q|RZ| zv%Z~jYo<=0_BniDyM|X866A62^nrt6@5c+(`S~H;D#DgP3ba{OZumwT^hLmu6EpT* zyA`_wX4_acBxC3o{XyRfFhEF(5FD1OY*BS9ZFa4&cHoBLV-12xj$cA9dqJ zpc-<@weJlL-NNt}+bky02mEecOA%>0`IuiXR*tXH*TdzlD_u z2sJ=Leh(Y-tX6VttPJe8)B={O4Ws`T>?=!mlGXm*z4E%F`2?7ENFogA1PIF#fK-(@PCnu+rP%eD?p?v4nGjOat zdY)%=rm&JMmH$MplNMDsJ)Qd&%|1aHmab23A395(l4xmZI;=@VeG_ZbZdB!mTau03 zl8IujaMI+|G7)AmpLzp5j&Dq`YWpCJdXNuiwGN>7_te_D7bycwAYRo3P2=tl-0UFU zK*e(tIDSa39~6R}Apx5S5g?e~h1GmJ*PPLY-}h% zuMN_rAajXxXfOrYs{1EU#niTfJ>`ezWrpc9_w9+@X#ssKgD2+n=3 z!=6A>0XqcL{c7OPPX+roq)AsIA&2e`Nje1l4003PgI3r8kVGFi^MKpXU!*4thsad2 z4q!P3KqwxHXC!+7PUGt%R86%l11Fh_(4GC;CI2#j#hc?X(j%CIGpme_lB z%9LBaI^l?U4efxS0{<0kA0zRNq|wCwA&Ohp&|kQOOK~~qdE>8Ldq(Nww3>5N#Gk#~ z26Ga&3R&q#@$PMp-xI$sz?XX;7F=!>>Q1F?t|#44Dg0v&V^6>be9ZZknqM10)`Lx>qQ!;z=}Xz&LEM#08sPtNO2!{faH~x!{uubMi3yq z5E&N~I>e}s9Q0tXUVs!6HpAxLo>~{2t#?4$+X1?e9Z*XU5-`;CQ9&I*aRQ|{Lw>*+ zGTuDcv=grS>(#{m zEvyS^!R4~cgLk#gS}N$PBVN9-lxXwHFt**ne0FR!YO~5BPU?AviI1OCvdkAece$b~ z7y_0_U6B5}4Lt(T6u^$TVt)gm*#FTYW!o^`#QYot6zJrzg#q`OTzjIYhqDhcF)r4N z7pmhm!C4YcFx;@txAyfBMLlW*k0t0*?X$qfe(=DzmlxC-6g=70fZKy*f5RU8Us|mC zL+FDIyU>IBRqeP&OB<5aw4hXlYue=>geWQzv5tE@=WxFo>cK2OcHhxPtxz0kc5)dJr%LrM8SO{_%Zj@J70}1u(TaD%qj(jF56kJ2{395@Iwp zi(`EDY=tyrN2c6z_z~+wpZIwKIM!k9Ea2#t`xqV(ag&E<3Vvv65kZT5M3vuwRhtB4 zV{dOX5ciPwl559*W(&&-#_Iv}OvZ`ut((0zTGj(%uyKOp3UO@0`wFqRV5bQe>3^g- zc&H(ChG5U3b9FL#erk|Jjo>T6$93Cw%1fS+O{ULhvhq9x7n^T@XRgO{2u2e-!hhQDma-N8m0^cn!rR6aPmxaI0B?Zqz{Bw)w@e| z$ef-3Bn>sw=#xXg#tYazaFy{)T0a0MA%b3ZSSAOV5#wc1DC~2+e77cMG_)5&!@A{K zkuFs^mLL$SC)ci+fCG9FEJvF)-~! z<+2+DGm*R&Orn6m4Z=f)Qa8IG3_NK-U$0r40%ip$(F}ruW;_*;G=TOpqJY9N!ftO_ z%_J@!ELx&!vS7yl4XWZCqJO}_2&r5kRx2W&0x2%FwLHQdDT~OX!P*CV|?{F2%1&tC}^x;+`Icz}vC~IG& z2O8HGcn^PqbdF4kDLX@kQhH_S{UdjXrlh3-cKWz}|~`UOW*PFqJ8FSbU?Wk<~sDbv6G zuA_~o;i>V$u%wVUXa)OylIlabKee@1HPvG&sny%cD(_uWrzV>R)Dh31B92n9oo^)H zf2G;R$BXZYf&Q^u;jB!`_wl1(bHgh?-lcs`B6zY9fB~E>xmOf%K|MqqF7PTNz~#HW zBA$rs5-scjl@j9bF7 zc;34i)5c0wHrGX|TZTDmJt;MON@h!U`t?}$(1FP5Z0T}dgB6Wny{njAVq4}%P;nVL zAZrARp#ZHa#|lpdFP!%|f$5?Eje8KL4dFU`{ro6gok&2wlOAs%eXQ$D`V(7LKQD*L z^V^qi?oNBcsP?hOu&%Bv+Do@s&7KT!=`*^{%c9uAE^X;Ddnywm@R1}t%L-ahxL`Pu z9C-MVF4e#5uWXHgwK-!j8+-XS!2iJQWc2gN@KG_OVZI5e1nIa6*(YE~^$BJP9yg;i zdU%k~&}iB(KOA1D)5xqt03o|!9mR+4`hd$JCMQ=lkCo1T=b{ddqo{*FQ5jU7J!Jvj{v#VJl!SbgYP9;Ms2$Ri&`Jx8sy-Vesp#EMrILHDMMk0Kw;$SkbWg zOxKcltGfToZ&{xLKVpB_orsUgf?wu+iq7RH%ejm%)261XxC-|0@}>^*^SJ+ExA!9+ z)BEUEK4mxrEjZm;N*~r3|HuC}tTBU1buoZCd_D7h7OC+_)$H7_J5}j*v~E$8pFPnH zzqTAJ)`-=RZnMZgeOhQaM_Vpao;} zxvVa?XyYt(6R8ml`qC~$pcp|h->UWoyZfuQ4wG=dzAg}{+r zHkB+sO=xhVy*A|BA?CZxnd04{vUcCRVk}lLF}Pn5KTkDvK5jTiPA+JNCxaBoiKH-J z(R}=q0EC}rV26d2Q#TXvbuQNg7($;C#&~IKy}5SUk(q5wQpde8v34)(XYleEF}j2N z%H%?GGWw=xfXojI{>^0$=PtVe9|wbXVzl^UrgI;zp=hkPe&0a4{k3EodvA)g$7-F zxoiHwl5PI^sFAS4qnq^Js@ne=WXkQ;`o=<6=n`fV$smW6eL+a>do-;$j_>{jn1;@) z_X{c*%~mJLNVT*cge2cpH)fI|BplMLxe%4Q`tYPU{2x1VJxczR)?fWvwMh=c#G{_6 z%23qmr^=w5+3R_RolUI|%=^hK%iX8{Y;fsIEC-O~^z`J`I1<26Kgt7!hnWxmK^g7R zQ_&@U9-g^*7k;rc46dij#7&kg~>Y~4+X%H!4w&MmVm zY2%6__-Cx@falm318YVMUA}>LhDD~D!mOyu^ZLYNi-TNmk_{$ipOsO=g+C7*vaynx zT_@Z@LEdW1d;DRV(0~p7pD1d^`A4@Z(i#$XdbAUhI6t#^ZPjufmK%HlAyl_6@GQT= zmtqsBR`5*wRF;+=a7mc5wDjZh_Yk&psi-EBhbF0CrER1hJb)1toM55N1-1*Qh3&1a z83irKua$A`OC~;!F;%K07|_lpRi}K>tC-tUPD1K}15cX9OI-dPU$(uyB8~VY`HT%} zexfUe11V_3;-?r6qJRHti4+UW zsCH|#L)(IW+>y$CMM$C)6i#4F&Olar9+)>!#=Qf{96})Ad9z>a_l9=k4$QoK0coNE zLM7x|liVb*L(gLd|2Xwtk2k)yxvom(B}XOrJ}xmZh$ro_C2hGg6`Rnqp3`c32-Ddg zc4}JXr^9T)P^!Ow`mRepw+f}Lsc8#*6zISvnCXFw2H}ypz<&^|0US!_pkxK1^iz4u z-`>{3ufiarfSOn@b%GArGUqZ2f=g_%WD%q-Ri2)BO+RXta`5F+XN>DtLMGWlu3#0_S1zJ8B1nFc%~`f)?SsJs!WM0vxKdM;`3PZgN5 zcq?MdhiC`5I4_U6H)}WQyf+=gzh*hjMEn}}^p6q@q97H<%C(AGLe)Zw4u1Hk1vrpQ z!sInFfLuDDB4#4I`iD0R}KYv8o7ouA+vXu;Hm>{`U%iu5S+OC zRvVQGP2At_`M^)h$@;3-;#Al=LFN;!yU;@US)p)MsMUM9H&*D{#>RI?l?=5iI)(hC z?hm!f82i6@YHp*@qn4b?E6DY2=3SS1EQ=Q$x{${Tg*4@1^2kegppl5X7>F}4_5gZ= zyX)ieuo44>>kx1dL1X`8Z|F^9Xq9v{9@p1Eb$^WaV;eOa=Va@*-y$9`<>|-x`dqC8ebo458_O zjK2UA$FIug-LUN>?cK!IHJrlMkxY4vojaM39+}d6zLn1-YzEQD|*Hr z7F1lMqAS->Y>nq)--{dd2i{y2W$h}*v;}qu$XYm{86yN*@Or)*r2PTWBhv8$I8ZKb z|2V}5Yh*9{JMI_@lok;8NqqD7xA6bEYt_5jvvgV5I5I#AGu@yNn3dBzWp`S#COZ3- zS=s4?WKm@K5-Z&ep2-gl3?4{fMJEa=IgI@v6~|gSQp&=a65go04}K+RnV5n?-L*Xe zGnqi0i(YJaYGeeN+hHx!XU`r(Z3Fejk@XJvonR|X7lGdVBYr954sE|@ZRYJ}Js8bcb+@wP9(1@naf~HTt#I0ZBrL`YoTRwYs>3d;YQ{LuxiKRA`WtHR=>J{2!o!2ecxQOK`sYja(ujG1eLf{(%64B$>@8Zn_MIvZ_6K9pg-j5(9dbB{C z;{Dfxd=Z#&3F481wxs6o&}gZLbGg`}6x+{SShAHR)BWUCn))-QK7q-9Jxvm&8qbf@ ztsxY70#AIVvH}X3YzeJ9IzcN#5RnkBnpr($KS5f00`jYZJ$p3>I52CYNQ4Lv?;3k}AY zgcuJ%Yc7S6KzA^M;bqw!gGCD}v31l(jYKrI!xWXviWZu5;d~owAAOmu$}b4@p?pph zR^n#K89m(lnV1ZJW#BmO%zqiJi?TBjBD#9_pYHZz09(*B;sZ!)+B+JbYk?*t_~*rT z5ELs@E>psZ0Dhsmq3&S9yV*CYCH$$L>=$3vH3W^yROMy}HuLQZs@i$C+brn3>9?%m zd(K8PQuI`_f=ytgKV=)daU#gZHS8QU`ds=zJ@ZSBYr8|se)PGzVT z$Pv(teSg1E^s1wN$n8gN&mz?Uao(*r{-QgD*IrD%x>BHX(QCZU1 z_Ql40#Q5ARdN=d+WT(X`-LD40oPnRVPtZ|f2TyshoE<1<`I5L#m(|u~>8iJFC;JCmX4KHw&}$N1KiptbzEv(*!2^S1A8hs<;NjaH1DIV=Z* z?lKR=X(ER z-^Kd`D->xmN{)WAS4kz{N-L}ZJ=E~wQM1F5%lVjf*Npub-7z%dIB#h^>W@Kjf^S8wN z#LRj>v3DxciE*JenTv66R%U2^{&s<$&pkifOn34D&wH@l!ghridqI6fSb;#p09yh% z=mBiqAOHCZ-$b{j`ppOUA;TwGa)5rykITU)`?-H0h-!Z@*>%UIQHZ^7|Anw4aTRB_ z?w-lxo8VY4m-uk@w96eIVU8SuQycmYgb3gtVDC^zA+=ix%*ue~O_$IEi*AC4wemH;)e) zDd4#AAuz{VHodU*90D@226{{oN;U?%uF2&rpNz_@?*MZzWiR_;TSIFz8pfG_;k zF#fb1ewrGOV?j6ura>j}C>Hz*FQFdc4OT`TuSegbQiPer9(H{F72~0G0|m={P}d<> z zpw$52Vi1-!ut1~&bJo(AhutjB2E7C%v<>PJmz$MPhmJ)Z%Vqg!i-iK|*6M;t4PW4o zAf`FBEF42f)Fa0DXgV62_c1Y=|8~I-<-Rh_9*m-@9gEwd$w*L#_C@&+gzXVbGp)qZ zZoDWcKuk-ZM8^8wWrfmzcrCZak|NK&Hy!Xgyp6cG8}FRK5iB$)=rPR$Y zS<1(+YF9pekin$PCs6%E=c(b=Erx^phv)n2!XkNW=QD@L@(LS%zIStQM9J^nmCR&Q z=Dc3b=txB|m!1D(T6Vbk$N(Gc`L_c!Rd+Xu{a{s)E9+ zmiu|nicU7l)<=(_t59OH_9{-@q%zN)?F`nD)=DDX7B-+I1>DCw=dAu zj!>ZbJ)9XeJZmt$gt=vA3xnNIF`6@ss%oi|^+Rg4x83{d&E;9WKj=wC7KAeQqLMG( zMP(1lC@ForNhVfh{_V3g1$e=U%{tm`2vc%t1N0aVbH6^4sqi^e6t{1Vzhf-=AZ8-0 zkzy6C(RL_OW_IEQcTpOn?DJ9LgMmgRdohDHHU0}?1YXG5G+M{{37GBoDxbyqr(Lfe zx29KPfh$dLY5S>W{G97T%EDW9T~RYWRF(c%f>V{k#i`0X?F;+_h3DkB`p!|mg7NJc z7?|pgSpGEKO&;c=rGonk?Hn4P<|ddlbsu^|##@$fu_3?w+m6x9o$ld_Q|Wvu%wgv3 z4<@ZkdR=hA2;V!H{TGh+&BzU$jF-QKQwB-IXDDPjb^cVlev3KeM`dMaM?hlZ<;zm; zZesY+dq33`;1aG~m;82Pmo}-hBH8-FRMuIEd-aY1YD`VvTMJ$1u!6jSm&|L?Q{(}h zzZ%$d_?5*x-XVh&!I*zoiUAiiT`b*%clx?uYf)k#NE&-ezrvf0-hv{D^ky|n1IMD_BU$d z4UTK#`K0ytq>I_Lx7)ioP}YldU(9O7u?%0zkbDyIOBnv0qs|^}try@iZ{Z^HD1kz( z9#aHxzbG)%Y1-7{Um$_c(KKH!uJ=sj4q+{<4m8*%D&XAwOmXnrvGdMMqX7;&7 zC|?!EeO2MzvZSOI?U#eBN$(q< za$}pm`o4e~({PR%Si7f6uc#!;64!Wy)gEJR5Y0(@2cB20wfoo%51wXyQrqa9jJ$_q z#a(1N*!)t~^|Ei45qH_(F!{ZMHObjE{EJujll!JdD)PCzDdoKS%p#y&C;;S^Yfxoh zgULr;h+7XbX@}Lk$j#n0w!WT{_7Ur`*ZatOoX$I)Wu7f=F@cxc5Te?#pt2R6FKJE*&1 zPdLADRBX*88SM&TN__aOtf#9^CBJwe#a|Wbb#@x?YV+J(x5AtaJ~P>4UkicU11KI4 zo)c?G9|b;KtUwld)kTHX7nJqpuU>6^@BwYBuA*!3_}0gu_Q0`?{#t?cKTKhtXwlnE zm0J6pG23kA3;NmOQ1Gxr{(`G!HvDPj8(&{Vcttlc|Gt}EgX@fIwLYj_+KE6P3IUGO z%%&aF9|t*cT{G-ouVZ4_v-tV`+(;+F+0tuH`?6<1p5rJNrt^8u;>4VWx;!&O_v) zJVwdPF)ul+O=ud$=^@4=x=<=xq_>~>zw~6yGOuFYAikCX4UhRm2gS2nL!jX zx{nX~#)4Kqywa}E&dqJXX?7c;u`gj9>~SeS@^f&Pj4uE9b6*hUAyv*;JkXoAE7BML z`>{;#w`zx2+`B_w4__B0ujVUDlFbjI4m+=fAqX^hL?1dh2z=0eiBmQ1NCzefRxqP& z+(_PlS#?Yh?+9v>NEY+>0dZ^kb#c3SSqMvPg2@dsj~A~vqiV;`G~%zp+VassOR!g) ziew9|e1)P#SbVLe(9+&ULu%q|?_~1AEg_26IzyyeKkyAc0IMQd?0MdG+%VC7Y9i5H zVla@YYG@3WJe3B+hQg69@Yzg2$wN3Z;0)z~?}X_NkAUa)?OTU@)*To-aK&Yo1aUyZ zw%-Ab=xPWdDUtip@XdNYgd36FtG;A78h8oyC)o1-iwYlZL3%8HS?|fseran7%ce{7 z89#mT6t#EvpTm!|Z9z1}Sv4l1rbfXM5diJ@0buOGb(H+H8hhEznQdPL{3=`I?t`HO z0^6Nn3ILM&0+=4f*U!L=hs@wc^c_$(5V{S)t+CDQdVgda3&`+rBi2ZKB%5zYTKiyg zQsAQPC)IE=JDTkyM{SDNM(Q%vIhV%Yb$I9%nSX`{`TU^qTuDdA?dv{-^&sh#9w8Ps zxCDvep4v*7T$uf|Kjj|G1Y57}fI)C!An(~sjRNQfu?|6!Ztg}Ue5o($nG9!R!IsF1 z)9*z=U>t9K`E%c^E9G)>zs(2#JRi!_ZXSJG3jUS((btj@^AS2t>iHCMgZ=y=d3kIc zltBlgRii!Nxpx~Itz?A``Rma&$X{kn?1P~W9FrbnPhq-`f{Y9fm7YCGT-NWGHPtIc zwa&ZWq!pYmziIiMSn$VO7#;^~FS*G}cO9NF*<+`>C{q`oeSAZk9Zyn3p<7ai;nlAn zb=XloRodaMTWG%QK7dXQOi(}`1*Y{kc(_5Is7@So4&?sJCb>IGOM*2v5F)}!RTHj3 z;Q#t)*n9!Wz%`!j6_Z`z^Y~*Aj)FRI$ZDBYNWlZJ@TOFRE|yMY=VXs&*x&L^5Yfn$ z3_pP|neGPZiav$2Aje37qIU|;Cg#%WAjc@I6Aik74j zwrUtYOOB?``YnlS4*RR_ec6Le@E0cC^esUK4#}IKWhyM1)q5 zzm3X%BIGC3t#NxYa05dE{jwKST|5vD`{c9%)(Q`pj4~a-nq=7emSp1Ub$A?3VhMuw z(cTTQMsHT5#PmJizNe$?PdDdwyjJJrfxQvEicycl<@;9iztha7`07n@D zc7(g*Td(n@UJh0>jXOg15*zv2N(<^=4mU1Yj~|esZ1gi{@(eK_XJXY``@NeN zE7HU!k`(@S+KUCI91JigwSvi}W_RGR7LZ&Z(I;}eT849AASUZLvUV! z*ZTO=C3yA-9uD$T)-0};(nK0Do{I~rjo08$P_tXI7k@4{!x1?5@JZWK0| zIv8@6sC=9ZY|cqvhzhukhl}I6g(I66RKAbZP^UR_=NW;N*M!rO*#6^t<`Xx# zq05K2X0@wXmUq zOZfP?$wMK8V6VOx&@a$NOcviNu~gcPLO{d3kw^VBH)sf#sMO10#71S8=}| zAGO{iI<<6KdwAYNZo7|5HqD<9fu@L50~)QBgrBF1{K(|u^DU?^&yi!S&kcA*XGAXC zv$b0k$bEuGF-?BBOM^TD)4EklEqm3wuacB6jTJ%Jxe~U$?K;tLU|+khQ6&{c;|I8@2gF`4VA{aN3D zBCnqPTgWHLoe%lr6D7w7M(^4iz5vpV3tU~?x$CA%))_}3r4zb+I- zKXLBop%bKJkBiRgc$LLd^hli@MY}nAtg-xlgk-$%y3kaq^u27G!_R)VCAaZYU=KnY zx-;&6yf3)_oKwn7k=jw(EG{ZL;(YNb$|_hUtCduFBc}9fj5t%IdI^Pji5`QCFZUy= z>BukF4Uv~EDgAp}_$|K9zPwh$ffsY*dN@6OQ=2|S-g(=4-C}hfFLAu|7cE7n1#PoFkJYwr8TJh*%2rB)P#lntd( z@F5)DQc~jmiSJ59FD1DXY3p6S=W}t!bx|p}CmOYvjoP)^JlT@>4d}e8(n87IYVi&Q z>u;J&9!HT$bGJq0!YDSj0N;M|6^!T&)`i~M{#C~dSh^A??A$7a;$FR)E>?%a5-Q}& z!2*k?M1tn05(%MLjhH9N=O+sp2lW8_?=@mZ{#JRg!v-0Q1{My;*hbQ)XJr_&iFKxq zZrm-*Z9e~uXL0uH*=n)<_H5NNhFPXw3+_9|et0kZ`JGWn7)8NSTvCY%1zXhZnZUAq z&68yN)nNzU4osexk8u5`FAT zmM^0jeF}sx+CSlNIT$DxEeZ~oYv|eIa`xRIuEJb?P)7Ljb z8%p2a2e2v|SNm9lC{Eer+~DQRu{{}D!V3SxS%ngsTj_)R{ru!|`A)s*#q?Y^?j@Hs zQ}vKeb6ad)X@W#T(ryE=t8NEHA-^LVJc>p?-5OIq@oU=$8vaO3Os$JVt~2UGSgsm- z9F%}{bq((z1#$nTDrxfW4RrxJ@x=EuE;TF zPV(s?xUXZt5|>%aPB5>ILJm^6j>UauZRd_~uE>(+_pw=etFG0X3S6e{-u9SxcUL{n zyYI6TmR%PmO3Zz?xKIDY;RETDfT=>ls=*hNlW5fiS<#%7d~L3xO4heTR|)Iz8Zp}s z3qm9k^00`!pp9s?_!y}$(khFEzA7Ydpk1vo;Nz;jNzxbm_}8MkeY zL&k&5%|9l^B%e$HgZay*HxKn|Lm$0GAC-88p_hu_3JRR6-OzY>2#6$*3{G1-psUn> zR}Ho+a}IvFr>7dnKetaAP_BBh4>w2(`z@b+safG#9j#0yB9J^iSkXO}<2s6}@DArkGV7uR}`M>ze9JL6F!rq}t(k*6&FM(Fkg@yGeso*2Q$&3Ku{6^J2Dx8|ymC7+O}{o$mx$I`Dml(ZIXPjR>PV?B|qd@~w7LgXpI&j`O; zdRm%h<15iA7y0|vXn3$@Ty`M5^?0Jo(9`v%t4}EV`l{?Pp3yhesFRK>>6S~%k9K zp`p>Ej$6>hPOnudn|%S zjhf?rzA^Wm(>EF)YKdKbDd^RUYB9Up7tml8O1yxPe70u>;SAty4<2hPk>};-yDaBs zS8sPk(1*;~3qdLf(OD**3nhjETM`&VpJuL<0Xq*SJaj`c94;so1S%15A9>zBNNM@q zWLfe37!C$6_Kh#bc8}QLW+G&o>9IPONM$g2U?) z)95ot)dzeT9CdwUXB-T+GwwK^5|@4tUTFeoMJ$|uzJVIH>L`q)kHOmVKuT&G(4L9A zE{`5PvaKD5c$lYl}{+iWtXxZf(O0zG?&z|pJC24dgbglJby>LKnyymuP+8nEQ$eA005Cd#Ipky zDKB^!Tzw>ZvDDNci9bK?G$~m;G6*P8cqUzdRVGi3%9e|72gpo)aiKc4`x{njqD?}DM8-et(!xx?Eb6(neNpaBvh< z)qniB>HD|`glzouZ*TRB53E;UzzrrQ=IzAZr&w92m>$!wraJ~jDzHq_Lqnl_gRtbu z)+^K}0@;*vf#>xr?IWhfwws}~ikAol+nIJIe%#h=#zUhXb;J+TxlG+CyglYLvhrls zv*~JMmEWOgvM4&>-AYE+%w}~#WCX?Xd%Td8ni{MakZ9a^5r#liK?TjpTOO~3VcxJ# zIF4sWFgJ*sH=>|vNyi`&Q>BfR~SiF3CF9hW~- z*1>e4U+LYBW_V1aeZ?8%o3efGA`{!7v+=fjPH4?eE>S27u}Z?z5eCeb7a$!!E&ExQ z{@Ui#5(@-0=f}@Ma>*(v2nH56;HIu1A{%tH`{21(sHKFrFdHdcPOOM@JUg+FTe0M! z#%^1J<_TD8eVZ%T?gyv(*r6(EB=_9`kxC?jZG)n7;BsLf`E1sB|9+ z?0O?QD^9S=Uk{Yf*S`Yu9}gsD;27Ck8kj~)*(aNw_f_7oI#x;L6B+HyhR68|!LjYh zq=!9QcUD(lQ)9c^=gFb(Fp_pn`g+A2arhL?D#_rNH=HEXC!l#OzO4DE$+1^S6C$;| zyzH(N3gvfM0e@9oTXCk!QwLJgwT_(QS~BwWqbs1gAE+BoAMA=RDwQVFmW7%Xk93CJ z+?F_@z&ALe%yA%|Br7_{e{FO&Ybo(od?o^wV?hfISi5?YVb~m;#vL^3YfuU%R}f`r zA?ve`dc!Nx%WrxlAGlw=mYp$0zpC}(OQEI6ff^R4{xPd);DOOCHW8;pi5C*Y8@{_K z2MRW>@&hl*%}j`9e(JI6zs>s0ie?M*$vUiRValL~VgPdOH=GpiJVt5(2=(S0A_7J< z>KY27#Fo`?GDS`d>j~GD7#{s_s5@f^^?oycb0~GN{O- zbI>8Sgsba)GdI?o^s-5l$=joxoSKcd+}9#U6UMe0Lv%}RDL>3UPnRd+)+=oJnJJJ= zsX-cFg>NrKeI%4IHZ>EC{q3;xhYL~;=&Eazh}Rw9<>kHqArPKg<@DKb2>qRkdQ0oU zEgttHY5Xpkmcpj|bcYx>hxx*S%NQD0uo`)&4{k8+AG~<%Yo73r{*dl?me7%>;FxaF zzy6vUYhnzoe!}k9QQDe0LhMIpjDx|;x#qo`rj=_B5BAo+l#MFiypzw6#uw!TW4b>0W$)h$4r@{v)QFG^w zG^N$TukptbI{nVR{VNmF+vdgTjW04#eaqMXZ)h{g>=|) z!V$q}np;IJgkHSccwt!t#r3K3@;DX?YU*XaNQwEy#r1xVR*{rAMw^5b}T~$qU#cE z&R+D=cI8~KWX+cO!w1jqQ#z5pljqeu?_LDld7w5dVMSH_gv+<}xRgeMU+bUWh%|e= zt;)uR^;X%XdZmt!!a@;0m`S}(+lI$ZnpsM_EvpM0Nr(#cL*#ufi(aS}e8G*zMG#)( zTT<41X%Tt!?Ekr*fz?W(2A69-pV8HWDMee(C!!AblWyo#8h@}`c@uxV_{GcIeM};b zN#A>ZK8c*cmbFx5cPHHpH^uK|A6N+cnlJzJv-Eqgb{-FbPJkQ=M=9+8HU*I-;*Tt^b-pd*{1 zY2Hyw8o7I`94edtc_Z&^PC?`|pa*}cM+jS<;Kp{P&)M#&xKODPX7=mllv(_*f1F7S zNR=6xwI(+S0(7yoj%Z6NC(DBg9y|SepItv@Y*Y39anl}J7acO=T-7rq3R|kXX2J5S)D1wOJ>jqe~9^zsXhX1f8rpo*sJHll(SmFO1>&=had&iiuh2!fLXW`r)J zv(%-p52tM>I z44cAGLHFLg-E+xBb1^FB#6$LY-H0a%{rrPx?Egx)YyAIru=*v$AH=e? z_WtK!C@S~iTuY&f^g~Y0lc_{i`fstz2WEN)> zH(k!Eu08e-zl{tEI*Q=P@1j2gQ%PcH(;QkNMOR#ELkhV59M?bKv_c9t?mimjU)w|- z2Wp6LJuEb|r~`Jl9?;(JM;;^Y?!U))qMOj&yZ0Ss#qO!lm3&Kl?7xl&9tqmtaqM`0 zFnY?x(}goGW#1)vH$9);^1~r|E9Och!L9N~Sk}zQYLy^NH>QYwr)wO4-66tYtIxXf(>*o}PL1wcW^Z(h zf|J_q?D};Tmu+_U)hs?Ay-V@)j!I~~3|M!cLZ|&<{4X}j`gg7xtZq8ASdu4N~+5X!*v?^7&T@+mx`7tZh1}HJYXH>Lm+KUS$ zkb0F!5u$+JRO4p(Ob_&aS4NBI0A_~hSORtX(z^{nY#{m7z||`x4p3^!LJb&1vf*DE zBy}}DzqBYXXc=1n>f$%WV*Xia+N}B0Ohcr9ts9DeIc13iDhub^EZ2H=uH0Vw$%s|! zk{had+nbM)LK4MJAU^HeH}LYiz0Dgh*9LO)V6QZFE%aesDt2vUIxwT>MEmq{D0lo z<+30xP|eJix|qf`Q^iDWGzju=sdEY^!l8f|C^u#-Q@n33+HvUdD8Ec^-GBg z3l4pabH5{oRFpHEBW(j~j%e;!)V8#--h3P<8+->P0h*J{7l-Bb9Il6Ecj)NkPb&~p zbWlXuHiZHmI5&ugt39BJa>IMoZz4osk5Rs4!Pw1$tKQS}g z;5gd`84r-_;sno+ZC(`>RCY4lHwFg3;QFZ_!WG*c-Mz%6RSV(M{=Zttb;-^bE4-N) z)t%;(r}f=m#;7D1a(~>Y!1d}^8G1gYDBkFc^@whxmGWbZ0I_7GBVkIvDG_Sl8_s^H zIlvXWR;e;GTL$i$M*x`MI0LFuF;w7OCJ@j7jE2)@=}4NIJeIZ23zcAKgkQ{whF%J&!S2a6#E(EBM_yMNvt4M@;Ej<&w#3ns|med4Gl!1@uQ=(B$E?o|k~e zy(H9X5Q%Ppf*^_^Rs*sRZ-A*e5VeZb@7saz%=9ieS)sITE8p&e~ZZ{+1?ja1sL=SeMm7ST7dn`nB0%v@c(i@j{j>z z_?Y`m>r1&@m)DYO*7$y|}M--xj&0uUZK(!SW0Jpr{|vAL-6%S5BeV`2v$Tw*PnC5jj`1#vZTGj11r8gYPct*6 zmnP$nB`&894-&A?-n^;fN!{{Bpi>3M+9GUwTWatMB$2@p>BhgevTnOO^ZJ!Vxs*#Y zpns0l^m&Tmqn|i6(3i=pqPIWM?YW2jF@U-)*|9uncyQKp&WGD`lLx9{+zQA^K2Npi zkOh#{yTUwvpW{q=r7$$?gYUk5fpeXiP2^Y)RV|`SOgx7at(30W>xlRAb0~JewUs~Y zemYv-CZ+QuwSw0y>)!Re-jytd8xjg~1Xzb4Fu6*GP;(ZWLKOj2_Gml1m5HGIC`JN` zh0tshxBYXu0L}BLw>QjJJ$Fpd;$KbMD#tRb$MW6MZ0B`|O`u(isI2JhEV5gZXsX^$ z!ok{6+54X#o_Rl?R=?$yLtb~=qSckzTV^SpZ@X9Bo(jRGiGJw1!Ffe!FRwwkW;I@Y zWGR*ZXiwt~HFt4YV?A3mMQX&~S$CrVsW;!QUml6JsAz;#y1U-I5QLl9r%+91?;RfG z1?I$K(iI8|KJO1w9cGZaGFpZB+ZN>f_SwqIf4Kri55LW-Yh5pfPsjR(EyJ6=Q8wUQZF zidX6TAo1xK7Z6eR{KQXLJf>^5w>41Y1Jr}^;#xBlI%1cRL2CY1GVy zt)%4l#}YyZ7N0tyGWg&KM!g{2;aa6%Z@;&@kG94gtn-!P;RCggxu*vLY}91RO!)6< zE9WOC<$WIbv+A><_!3+VXsfYr=LhI>oBc?F6J^KCBup>L zC>(dyvJomVt_>IF5iKlYxMdv1?I)4Ri3{ZR$=7Nwc(`E_&W(a`4`W*p4>+gcw1 z2wgIt*v@K=O0eO;RW;sPI&5;2?sGqzvwv>EbVf+WLeCW}iZDU(e(sR7e1eqKf7yj$ zbh-)Eq|W2hKU9AXRh`P)7=1o-W@6kh7gzrh)+Sy&T*P%bvV1w-vbH+vfq%ByIi_xp zBIM}5YU0SApB+Pcz0j_b!etV7S2%O5;+opm1N6CGF8-37lFu>S#_0R2?j)M9gujB0w0M-g&JNXA#TV$)J! zAe&>CG-63LyiO=qI2Kz$J7M|kJsqnZWwckm{cYW}fa{ZaG04D6 z?B4r_PN|xVCz!U30>16Z$2DJ@lkPra+-RX3d!y_C^GyNfrS4Q_9^$n%A<fOajy~ zQyzcOh%v;U<6hLdBn&wH>c?+THU?X7S_6z}qc3)o&vigEpUFs+==kO_z# zd3SWkfXJDQoc7n<qYibMhR%FSKcuT}D648@lGzpUaq~d!N3$t7|HvxAn@~>GOrh-CBdC(VSBw1mn*sz+tVbypYTDi_L@3FYv1 z-8Y+QHxn+-nl`eO)NAB|zI{dd9HmCqiU-Utwa*Xkc=G{R5{@z~*@5;6B{3J1>rl#Y zDwi(9l6Z=3U~SBAI&A1t-sT%!nPy7Qlb(3YZ%M3aBP7Q0d&Rq(jf=H<8f)tE>J9Un zk<6*@5dkNAg&yTZdFidToFCpbJi{b#>E=P#vAYo<<{^wj+e34L2M70`NsEZwZpk-k zOE2Qw--u~rybf>XBS>EOcBSQu*`NKjd)Qio(}_PKtA8?S{&1wPoZhn&({v`&^f5#;_@LnDX@g~Iwex3AsD$o=W$ODb+>NCMEdRVkXQaG|Ur@4XZX z>p$-=j&00nY{YZ+QoS;e&unV?WwT}@y#7ZDJ(?^It*T)tSKu$}M6qz~2Ps+o%sgM~ zY|*Kz$@jm`rLaC?KCiSCs>Vi(k|(iPi0K{4zU3tj)Y-ulkDsw zPgAHMjuCB?yOf)9kf^?9?AAg3mSA41iP>Lz{_0SbP4DST2CA=9^vU@l>)EDc!~;=< z7vF|){Af^T|?#3!RKaO*_6 zf98_chi~>O8VBekb2l}N9GV1Bm7;iGB_kj2_SS9h%@wNguk;g|IFM#E8%C=r?A%01#ik{&vxiE9f;Xi%=p7%; z(`p8PwIVZMwF ze4)~N#pAjtwW;0pGIFg7uiKtiMl#TEQk6wU_>r9q)5cgdE&7<~9tJ6o0}1HOGZ1q^QnpWxoOfj6!SukVQ5%LNJsFP}b}Uht%6h zies7MrP?ZP=Wo8wNIS%dB(?mZu(-Opr>2shZEdA(Ci?AOp3 zJj$ApUw*zfIZcR?N3?<~wFMJP)Zju-g1nBcjsb6<^gA;A*XFbR2^A{#yt5P*;4V@UE!D@>nK!Ikb0*9qYgsx|>i7=;dPzpg` z!L8&G9r4-4s8p`66g7EJ&>=vQ*Pf>_>0!k=K|zcBz}DKc=pD#0;~Xg($^1Nc-@m## zOPPP^dx;fm=V1;?v~n=51LMN$(_eW5OFG{gvCL2?f$;M{JiQZcOwcSsZpd!ppagBoSQ=e%*ZPK<6 zr+XkEG?5L;$2gj^J_=dVgerCV@EUr<$5w~%SnlG$c6HDE%`N%(J=;j4dy{x6Y(1UFWD-;y%waj(>Ds}T)<3X5s(DLu(#>t&r}meda%YmpO;=&X{k z)opU%U3PE99IG^vPS^S9P+1{$?XB7J#~*2b@t&g^v5~5I6p&tLU5X+Sxhu zCiE7#IwIj=_EmEqM*BW1-1NSr7{@HF-|6`RJ+tsA9hZ;q-=hIcvK5iThMRh5^)_(E zPQco7v`8Nl1?(k$gZY~1+(n!I|8fzY<@$A%mLkwOkn`U07CQ=d{b7Z%QJYyOXT{js za=Q2k)+xglt?(tG>hSMbfnQ3bYYto*q#dR47AcY)-FhWS!^4WDX5_?sOG?GY;YV7h zJ3G1=mQZs+)nJA^y#2_LZVeTbLfJsD9l_8eqAtL(;IvzL0lrxPpb#;*1i=mKX1s@e z1-hd5NpKKIlejPL93Vw#b)-T1<%I?b-qcR zY?A*%(mXy-3{3-Yu&f#}NO!=Q4J9d?87c&muG_Ujf7Xm6 zlJ)nLl6k4Ot;jU$$VuN#h*m9>-mZ%*s5>UUl=m7f1?kC!#a&_GKZOJ3=^siVePERg zq|2cWD41nCI5_x1O-3ED;$zSTHv7>@rX2ARNh~JwzcP(`>wlt2OQdg({b^X3S zmc&%zm=zSb@pGr{^7id2u9)oC6BQo=ocA&{%5b|^$;orcg32Onc zpggETCf)MJWB-9cCbZdAmpLEOW2o=GEHxn}f#mi~Hc=$rFu^$;V2VxUaiwg)_PHnY z;wdmsFff^|N445E3{{l>;4(YA5F2KOvF(~(8=DS8F zk3ml7a%U8QERpIsy#u2#5TPFtopb~(Adq_=IvY1B{ru_E&Ik_aJXJ~-1+gSg!gTF< zLZ6yT+_W$$1%`|KZ0AVMs5q{j>8+m_9Q4uJrtQo#P0X6S9v`R3zAuyXaUYvo4RF8E zGoMFuA(2u#{`E|mO(UrK_sD_kxy#}H{vjUq`<|KlcOs||rUjs)%l-8^BAYijs0^CB zcurT}rKP9a^a>xv?NoSPp3MkB5fBlQ0zZUx`2v03SdQ3k!)!BJw+@B%KZhH{G?{5T zUc}PfMYXQP0`k2&7hyCau3iZ^8h-tvYquHfSziusrVWu4t(EJG>|@tH`?dOApVxIl zC$Ra?I;&=JYMjH%y(i-dVtb0clU^vN{#bC7Fud6qiq?fZqS*qC>xF_t~-8M>*CU9Oz|}49S!QWG>N0z$H5P<@;Yv3zo8^f zHn)y`&moxSt@|1LEK74xB+~ahHK-`L?xbTx6v!%#w z#4xA<(8@)+c|J@5Fh&fvt35S_bix?RsP0pEWBWFETg;W84>LDx1ZrwsW9G?2`>S8_ zMERJvHa^&}UM$kI-VNM)GOL6lNvk!q@(Sylv;?C|8HLn(QbNTcbwffWW?S|qRZpBB z9C}u_40}Z5djvlpg_5PD$Za*r7P&!2@n85mzZfBxo@SZ$v9{(jKuxv*p-0pSPF zf&{}^CE(jxXG4}@eLXABMsXI;HG+sHw`q~D%&+^VMLNS1`ACg6hMZuxNh4{@9SUU^ z>8)?(qrBO6XQO!$#m@?6wI9iDpQPsDoMsp8+mxl0rt-JeFjIZ4w%(9$S=Eth+D|Mhf-ljgIbGMv!r@anJ590mn@(Tm-NV{r3 zxx6yO5@-oB#O-swEYY|-^m1{IXlS{1c`yXhTpC;`AFq#YmK0az1_aDZIt(qO86NX$ z_->{tI{Ir@nJ{}C{Z07!&F1OHchkdG<0{fORSgBV;U_mfba&kuLL zPLxVI1`OPO8O55qbFx~2?GYUAJE{)t0;!!(v>A46aGg+s`UBdmaoDBTkF3lhfm&f# zjI%r<>hJS5G-q*I>Rg`pIWCA|Usc!oCjMb567Y;q?9Cm0a&79l){2D;UggFxf9ARD zEKZqm2}|FKlG*L7J6M%bR_CpS5Px zXG?Rk+lPVSoH)S6SlaeEE~wI6)6T=g@Sym`z6;vvzDUjp!cZ|m7Rl97W@_oB$-U<# zT6US(j#Jw%M?_ebB#+G237&J1T%R*g%8Iz$dXUWCJ2%Dp>yvnWp{%#}2II_&4v!Bq z4y;T*6nO!&zwQL{`cPPZODZ^Wnhl%HV2ai#*}2}%&PcES)FhOaO>X!XF+e6@hsZ|% z7lD$1z%2b`^$WWyQT!(sQ8LzYLzb#TnQ2vB)?FS;C|8Q|9{U>;2hk6Q-j51D;3v+V zN|f#iN$ow&;(oZ9?CC_|A?DZ4rJMvbZX6zsjb9a+ST_T?n{q;R1KHU7xNg&B)iSQo zGrqDIksI!XFW>dMixR+KJTrVpwRb8oZ+>K!gurBJ-g?t$K)7nUZfMZ~JH)%T>0A}) zP*&AzM_c!>z1uKvPB2H04$~gDoE8;V`P!d#s>gnn9X64FKQNaPLH{9;?$DbJ<7413&pC%8R3)i6i&`m^1m=3gQ|1#dTVp_L26eY1GS7~wnx zAg?Z|!lT+Nd{I>*op)*CSvwLSM|)>_Z$rn7MSg|LCwla=c4NmmeXe#`XIXPZg-S4! z8tOT94w*^kohB3d66J5s9MS7|4d*;NS^c!=4l<4P;*6HFT)dVce`4)(AmsaV$)Y70 zec_f(1zCkP8^NKs*Rwk|m7~V)b>wF|P8>-U?K2V~x!ZF14!pZhScCd@?w z35CE@4lRlA0skf&z-E-ACbF1|m9)2#s=DNc!bEdesM7{cGAPfgpP z&A|S`L+gk2*K@pSJyJ3QTBb>@?Ov}JTrT40hN~MYyodKE-VG3~X@?&=V!U?v!XCY3i0<;v0zSuFfw_?5mc?gU5%!uiMXWqjB6iQ{8Wt+45>NX zUmEM6ZhM|=Bjoa7>q!L{A&~+9%|Ak~diTbIe$Jotsts@}$Kt6eaOBUMX?>McULvtt zu@d3IkgNy@Q0RD8q#U`hT-Y!2ol;O>>UxZ-C_2qc;v(%~pKnG6#Xa|%QKr>gnaU%o5u_vm9fka;cS?T~4I76Tq_ zartH`-~Ct9n+DSSo8Omi0Il}H`D1d=fm%D0huEnv({&N~mx?DSLYU`6#LRMj=_Z59 z{PLr=YT6bVOJ6LyFDN$9JRMDkXLQAKm;`TbU|U6enLYhRre@6%GXL7FCBf4nBJ%wi zdhwlGyAzjQn+!hLD4AG;OxTJ{_+Eb=`2;*-!r%XE`}}Jw-JVLyGfsczF)2wSO{#Rg zE;*KD;+NACbqd`ApMN?3HFisZk@!*h3~iQwNm(_MkGMx;WZPwH<3PG!mG6bNXTp|8 zEsrVf%1WZNMenI5_x*gCFCB`L^z;b?QEEXf&4eo(EiKBVQM37(e5+L|4F(*Fn+2%? zFPbT4)9*i_e?TVlo`@NZm>CTQ*EsjqXS^7xT*tpd%@@pw(W^|FAxxP8{RwM{H_JeT&COl5$*OJd-<_Z<9+q-C-_szbe@q!N=jPq^U{~nCf0tf-YZ5|e9NIo zh*FAbk^NyU6E{1RZ?t7jFStn$BS*C2fWoT817qiTM#=#9Yn5R17KvN>Sl#ETA8{5`uS%{_goA;qP~=?JV~2CR(YJ|U6cem z5AEc~&4l6}^7=!JU)7(P6h#Qu5>l!~2xgWF$gfc>p)?otj{A@aH_92d&1W|$Y6kRc z_-iNztv}+(mz8$e0cuC3mNF`1^g=~~K(P<=<%^E{Vnk&3l~0WMuEg_QF)hLvQY2S= z_R2qe^7g}0mx*gVDJa+h={Gi2KRU+xb6ve@`SFej-@A&wvLDipvw2m{1K}1n9x9b$5E1!?k(f%w`@B){+|l!#Ldwp24~djgFH@IEgxxK}EFSL0 zyyFu`>1GZ4fzd)pI_|rq5bqzn8gh^6@OYp!&R2|7_R4Icxd^Qgep7a+5FHuj<&(Pp zvpRU z>z9;LB&CwF*JE0Yyp4_fj%xU%cqC81Uc+ZiS0YIll|i3_thk8MT$w^!`Ess_B%zOF z$zeci=tjm7W6w>wJuklb$C@{8jfEK$-BW)|C^oXg@&>och$2e+*wbw6f}>^FEakms zUux$}+A<*fB>=p)LzUbqYNdEae?QT=tHEbRz9!}P3|dh?2*`bCnX%p9+zt7}-oe^P zM1j_>fX0`k(Oy#hvxr72tMkr}FM%y3ltILN!MwRrj4Jg%-!4+=`cKH#z;+Uy?|StE z^T*9wpP#r%V8*ln(z~AocPaexHpQoDz4`ZB<)>A+a7LvxA|oWR(#bKg$=8+aS@Bph z?Hl&9`Z~Bb)QFz8KN&2<3V8nRHxnvVwolx5itCXt`PDthG-XM|$~KROh*P*C6%2FJ z$5+xgAAO)xnv6F(minYgKb?0ogNx7*JGs&`*Dy&^f^(Nc`&!WRtG0b(r|Pg9u&hZS z*hxpHq#8x>EXb>6X1c6!l<~+jROkANe^a5#Z2xAr3}PbRjwx~2qdTm~a|+3+lqn{S z=CHk+KU%P#WijuvJ?thXv~Ja#KTq|?H-Y^qIGz(RPRzg!^CDp zYeJ$q9QGB{cY|V%r)@0yb4$0B?|5?crQXF7p~}_Q{~Xdu<>D)4&mfYf7&XKTup_wfWr^N3Ng z`2V9P=ie#;)KB||AHxm)w}PpIPR2l^nrNMP7*FiVEJ+ZJ)xRqTfFIk(PA^0f+i!oe z{q*zAk^uP%d=F3zB>oY5AzR=GW<~)M_}IG)46lpSR|YchK_&EW^Eo1B4enqFifZ+j z4;UDgy<kg|c=sTB~@zT2;> zLQx2D)g1$R-4!)9WV-tcDY|ce`tOUJZ5yJXgKEZ034-UrjuSqlI!|O|umPJWkY(B- zp7w9(rR(Y`f8HhlIX@8m1+Dr$i-A=;TncV~ko*Gv9sCXmyFr%;(6Q=J({;CRS_g(I z)pfrk`?qiDXHW2-BICX6mpU^MoQA*gz?cZNFf9$ed;(n5p4V=2aByr)*Pd0hK?R7) z@e=G?33Uv?u2=|2eQjp)Po5m(s30k6Qxw42z$0qco?N7#2~AJ@E3dgmt&X^g3NR|^ z?%Y9>ROneL&HYEY`~CYj_t@5|@9jj}=1;wquiBR<*?-$wkQwgT4*%1F0%_--73W?? zzzj}GXxV}unf8mDtCOLBBxA1LS4uys7nF9 zXPwDoVT0nYpiZ+Et=Ti*_4$7pv;VwpZ$46VLcYtJk3V?+pIQrq!aqpI1=bli2hu}= ze~lqSa4PUuvO$pVbjgdJgGv&ee_)`v!WU7kSoOa=h#lv99JbrwBf7F;1S?h+9M=ju zP4;!1<}g6!=;Cbq$o^h%$i(Inh%6;RcK{4>`_pA9K@Q84XVqS=*E{^gy#|c75Pw3@ zCjyrQEhyo?F*JPWR(h$wgrBQGww&!DBvH56!k&YXKr~}8sJ?OAoY=% zo4ap4v@8l?mMpeg3eoxjMy0#9OzHs`F>nH*8N8}{fjRvM_?FO-Xf!B3sVb2&g$+=% z*CS}RIX*P>PERwha2nlyb4Guw3>SigSfe_NKpz3NMN&l0zy%s?)S`1TP{^e zTU$1iM%W4BgVW)dA$X_00{QMbwj71Zwd@@5ADgQ^n%=XhK3&TJ)xrf}CGNg@8{D?* zaZ49u&&h(NL@ti?XNZTN`1tuDI>yuwSKLn)qb8jef=A7Gaebw@;;lb~(>&_`O&AVZM>F4l;CC>R?5hNsE{>$b3%n17aPK*UE}m$a-~h(+>qcNJA4=$}O*qG*T& z9srWvPxr&x9OS}b?qQL)85GA`84v#l>O=oGp5bI)B0N2t86eY$!L+EM>O14-V3c56 zW;WIYtrItxyF(EE12pqfIXS6O6CXQm|q0Rfr$`AZPvW`S0jphV#N2ao8Ab1VStF)#2gc&9GCUXL9TJ*UaJ#Ag0cOS>4;7IDWge!H-~+xmADZ z0g)$~&pM@`)AWn73WnZbrh?E;!xfaYw6sEpO$|_bq(|K$q6Y!0gakPVpRmvMfwG1e zA{;yhJL0#;J9Bm$qhX-2Fz(pGdlPvMduRacfS%zFd|Q8wv#oB_zzX2L6_k}FVD4)s zCM_N!M!Lufz3z9VR%g2zn=;go#>F3@Q9`$n#*0n63USAVV8e#GbN?e)C+e|N4Llo4 z7RNlnS=80z{Ax5&#)|Y1-yqBXaNHEx0w~HVDja5`>=3fmK|o9@wLfZ+7aMt^RAUN| zS$u%#M1wUjxM6mT(DTTE>+%~g$T}#K1*=eyv(!#baIbErE+6P2_jrm`4`5s%qrX z0Vw7?k(Xx_5{e^X)i^Hv-m3rS_fNz}P%@C@1`&}25N=UG0371UNkvsFI2o@*FEd|3 zL-Y6d|Fq27TZoA30*bmnMG{BAe(eW17=Go@zgj+N6Zl^{85uP|C|m!ekFpwj8&w4R z5pnAPRn^7nlwNTdoP_|E{3Q53WNbjo5&i{nXA6ay16n?G^LC)K01LE8BM(JFQu3$n zU3ou-o_nA#z~?)-ePeOAz)QhjrzqEtLwL+fpDe8?kDX z^nsk=FqkB47PZj~z+sC#VSLy20*t%7=|owcL6JIQ;9{)neh{&ZEh0Y!a)TN$mOSA< z(LofHOe>)otx&dv)mFYkoBOf_4>^}+9h;!q!4r{(jECnVS=># z<3BJUq!vG*=J=og{Qr0g_$w*Cc+mhnifdR{x4F6Zy(fBnA)M~yCR}PIF=4}l4Ur2k zH-5kbO2_{GK9`FQq00k8){h8T5|nMMV=~1HpQ*Z8Fbb;ot~^ zw$bne=ucE|Ib0Az(@>?h>J~&~(8(Vu>V>u&&2ogd-zp$DZn_XTW(Oc&+G%&bH zMfKd=oE3s2kK>Fxu)IrN%{IKlvpEk}QB!M!cYP`&^BRcWpcNN{C~bp0fP#Vos+Udy zBQNi8xetIjSnG5G<*=-`kSGA2qnU(tGhqC!KP(|U%_}=l^5>7uCRj}l0ehhoL`F8h zcd`xq8W>oQx7?j?(J0WVj+K_SNZkPqErLUuO*|S z>gKS~4nksuUCVeu6s^0r_um>UGwM$v9Il4M6tC|(jgV85`*kY5B-C-(SUj?Oe_b!! z-Mz{{oKkjxyW67N^zz8_GNYxX#TZmvGyuU+yAAzu%t{PE^csUv`)hc5Eno%zUS8fC zH!Yxcq--c1(Rrqza05By!2|{I4~KQEsH%Fk;WPa+GA;vLP!6R>>{fYrBBpCyuOQNz zh?c$R<%#^Cy}h5ky`t%npwvA8`IiVZ@?0h$P%422DE=dm{egp*AKXWfTnkA0U!WtL zU%gcoZc&_WjQk8jl8~k@Oj&?nB(R9U`jml(hZuM^(H#0OREWJm)Df-F?ZgJY=w+cE zDhiS%gfq%7g$QoxK^VNRQxgr6_rujo!1`&nAJJ}upFpUCpsn7XJwFP$7_Gnz+>J;9y~89_>Mhb3P*HG5Jy+wJkBBTA-7Mfr)AAP+dFi0Smf+ z+Ig88W+cyYrf%H(8ljx}GYA`{kl2?_m(3ian8c*lLb{*HAW)oFt+~GrG(yE=nCY-jyukwI~0h3FSWIj`;Jy4?WIu#NS7&t)V zx=}=!fZ2!eRkqjH)=cpF>L9Ttq}c;Rh7qiU>4ag=i@n70I`^}h?ZiwHSl=gm%cdlO zlA6T^!Rf{^4sdeG7L1ryR#qMyQu0|P@xogW4g&|OzMTtDS}ttW8=TXi$>K86)Z8q; zO7$Nx5AY>XuylpZ40CbiwyM(;nAC!Yuk%hL=1UM|g0zeloU85$yX@~Az5xT~ zLRhSzo*T;V0117G<;;cmGI$j5h>3~8jR%5JyYSJZI4Hd$u>l2G+{ta4N(6*0m)WGKKf;i z693xaIix_C$U>@^+R_)H^lP^6s+a#C5i}*i%gamO;%P!Q3@D^0 zt&nzxIB&0Tvl+f-mo_uIk1TE5rLMUqoCj|pKbiJ&C<#GMzxluiG`41N{kzzi1X{;8vFcV#(dBU4+-b z4jIyZFcW@nnCKn}%0+)RihIEliiC)W-RAcbi|S2YB#qrZgvZ-=5r)0Ha&YR>M)lwc zKz!wqtAwEzq%KTMObvSMpQ59kCf%zXHW8K+ST74~oiT*3rX?81vcE#RJZ=-MnDeC^ z1*zZ>T~GH~fu-I=c`!w~+Pr#RHL!j#I6RE@LPG-)Scb=5f)~7sq@Zns_=Be6AfW`% z0|Y|>hk3lJu&0Cq5nRMUYG~7XrhH-(+#>@ZQ8DO>R@kluH5y1L?=ds~DU(H*`nL&a zC4@nJ3DO@IQOMXb><~$v{&GuQhPP`UBF6YfoWbq`@tbaZt5drL5AhZ$c0g%HDE zX-_>V*a8CrV3cM-b=(c|a<{*`ZSc(4USMT>=gu7`kU#;S%h%wK0~gKG>0J;*g5<{; zGeKEZ6>VpKKz^@-rC<2h3IlyATVQccKswxS>-`lRg ztSg_MsfOGS1Zd_UisHkLp7!ho^b$l-#3=+>y(IV5!9E%jM78PQvS;}7Greb}9PJPO z))9PKH~^^`V`%Q)yZ5uY_a`n|KeueLM&3A_Cpk#u2cPL@PoDGv7c%Lsp`q*1S5eQp z$j_fszCLGQ<>XYP6m-Zvs)c}O3<57hpkia4@$MuKB%0-}lpsXP=p)DwQiI94q;T-? zhQJ)51ZHm&Zj>X+D#BZaR(1sK{kI?}f#IEuD{S&oSby6dkdu5EhE%uPjMo(TT-bfz zP{$$luRZHZ5Y|K<$WB1J)>sd+^KUTG$Wwyi`Bw)R6PKB@bAC8;;p8pB7df7-I3OV< zHHC3c90o5WR(AFmIM~>~s_a%%YXAP}jG5RR=H46?yIr4H3kCR43IV%+w`;7KFwKVlx>X};82^9Lvj1^8|8GC$#Pj~QvO#3xiME+yF_x~61YV(r- diff --git a/examples/running-llamas/artifacts/Llama-13b/short_report.csv b/examples/running-llamas/artifacts/Llama-13b/short_report.csv deleted file mode 100644 index 27e5a935..00000000 --- a/examples/running-llamas/artifacts/Llama-13b/short_report.csv +++ /dev/null @@ -1,31 +0,0 @@ -,Model,GPUs,Experiment Name,Per Process Batch Size,Sequence Length,Decode Latency (s),Prefill Latency (s),Decode Throughput (tokens/s),Prefill Throughput (samples/s),Generate Max Memory Allocated (MB),Generate Max Memory Reserved (MB),GPU Name,Num GPUs,Effective Batch Size,Group -0,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16,1,256,17.0,0.0543,30.1,18.4,27506,27946,1xA100,1,1,1xA100-fp16 -1,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16,4,256,17.6,0.206,116.0,19.4,31284,52164,1xA100,1,4,1xA100-fp16 -2,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16,32,256,41.1,1.52,398.0,21.1,66527,84414,1xA100,1,32,1xA100-fp16 -3,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16,8,256,19.9,0.363,205.0,22.0,36318,84401,1xA100,1,8,1xA100-fp16 -4,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16,16,256,27.5,0.707,297.0,22.6,46388,84422,1xA100,1,16,1xA100-fp16 -5,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16,2,256,17.0,0.106,60.1,18.9,28762,31409,1xA100,1,2,1xA100-fp16 -6,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,1,256,15.3,0.0508,33.4,19.7,27506,27936,1xA100,1,1,1xA100-fp16+bt -7,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,4,256,16.5,0.194,124.0,20.6,31284,52110,1xA100,1,4,1xA100-fp16+bt -8,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,32,256,41.2,1.43,397.0,22.4,66524,84422,1xA100,1,32,1xA100-fp16+bt -9,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,8,256,20.0,0.348,204.0,23.0,36318,84418,1xA100,1,8,1xA100-fp16+bt -10,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,16,256,27.4,0.678,298.0,23.6,46387,84408,1xA100,1,16,1xA100-fp16+bt -11,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,2,256,16.1,0.101,63.5,19.8,28762,31383,1xA100,1,2,1xA100-fp16+bt -12,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,1,256,15.7,0.0518,32.5,19.3,27506,27936,1xA100,1,1,1xA100-fp16+fa2 -13,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,4,256,16.4,0.194,125.0,20.6,31284,52110,1xA100,1,4,1xA100-fp16+fa2 -14,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,32,256,41.2,1.44,397.0,22.2,66524,84418,1xA100,1,32,1xA100-fp16+fa2 -15,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,8,256,19.8,0.353,206.0,22.7,36318,84416,1xA100,1,8,1xA100-fp16+fa2 -16,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,16,256,27.3,0.697,299.0,23.0,46387,84412,1xA100,1,16,1xA100-fp16+fa2 -17,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,2,256,16.1,0.101,63.5,19.8,28762,31383,1xA100,1,2,1xA100-fp16+fa2 -18,TheBloke/LLaMa-13B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,1,256,18.3,0.0772,27.9,13.0,9708,10208,1xA100,1,1,1xA100-fp16+gptq+exllamav2 -19,TheBloke/LLaMa-13B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,4,256,18.6,0.208,110.0,19.2,13487,34418,1xA100,1,4,1xA100-fp16+gptq+exllamav2 -20,TheBloke/LLaMa-13B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,32,256,57.6,1.49,284.0,21.5,48729,84418,1xA100,1,32,1xA100-fp16+gptq+exllamav2 -21,TheBloke/LLaMa-13B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,8,256,19.9,0.382,205.0,20.9,18521,84370,1xA100,1,8,1xA100-fp16+gptq+exllamav2 -22,TheBloke/LLaMa-13B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,16,256,31.9,0.749,256.0,21.4,28590,84420,1xA100,1,16,1xA100-fp16+gptq+exllamav2 -23,TheBloke/LLaMa-13B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,2,256,18.5,0.122,55.2,16.4,10965,13669,1xA100,1,2,1xA100-fp16+gptq+exllamav2 -24,TheBloke/LLaMa-13B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,1,256,19.1,0.0683,26.8,14.6,8796,9300,1xA100,1,1,1xA100-fp16+gptq+exllamav1 -25,TheBloke/LLaMa-13B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,4,256,25.3,0.177,80.8,22.6,12574,33516,1xA100,1,4,1xA100-fp16+gptq+exllamav1 -26,TheBloke/LLaMa-13B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,32,256,53.1,1.21,308.0,26.4,47817,84418,1xA100,1,32,1xA100-fp16+gptq+exllamav1 -27,TheBloke/LLaMa-13B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,8,256,32.5,0.328,126.0,24.4,17608,84389,1xA100,1,8,1xA100-fp16+gptq+exllamav1 -28,TheBloke/LLaMa-13B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,16,256,39.3,0.733,208.0,21.8,27678,84418,1xA100,1,16,1xA100-fp16+gptq+exllamav1 -29,TheBloke/LLaMa-13B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,2,256,18.3,0.106,55.8,18.9,10052,12721,1xA100,1,2,1xA100-fp16+gptq+exllamav1 diff --git a/examples/running-llamas/artifacts/Llama-65b/decode_throughput_bar_plot.png b/examples/running-llamas/artifacts/Llama-65b/decode_throughput_bar_plot.png deleted file mode 100644 index 2f0547b3305d49d1657086440b8aac4457675703..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 32942 zcmdqJcQn`k|3Cb+va*RtMjIJTMYc!_DT=bvLRMMX*@_g>vPx01%8HO#D7+hpY*7>< zdtA4N&-nh%b^U(-UFTfqT%FT-zw>%|j>ov)Z};2nc7KHE9@1iC5oDn#ifx~^hCW5n z`cM>2C(|-d>TKocbl&dbR%v-@ zSt-$TmoC{m?Us?T{qGl~?Htd_Y|qMhhKn%UYae%_C=PS-mnKE|$$5(M5!$DrYT$bR z*EctlgVT$QBTcp)c53VNxoTpDTCz0)hL)?vn8#)Zyt(Gtk$ufm%4*Xx%X{ag-tBhg zuKExqwK-QOS^q(YNb%&HN#4eq*B3MTdmKdC>ZjZ4-8z!pJL;X+`iU}$ii?XUS*WYG z&~S2baWyto(%%RU4o-~zZvVV4;xpkncLNxWyx4qS9jy)P5sVliP^2vSG<*~^A=_UKAG0X?P+|d z#bFgRS{Z6fE5OT3AEF`NK0ZEfW@W|T;^HzlGnnBzKa-QKK6hbea#&{aMDOLnj*^mW z_4)66JZE@uEA`W-A7!h*+u0)G)N)W=ozAKzN_BSPm&5P2g8BQYXBp@J=pccR~(D;y%eYIlB%h=P|FJ8!9{%NpWL}}IDy?a@PRZr!8 ze}9kv_<`KqT$Ylpl1HEVJM>g__0`_ibZ#%ZaqHIAf0FlGzVTzA{@fPj|9*RU3qNy+ zhTriQXZlW5x3&s#htT6XrXIx{eZ9SAe}8^XIr?nH_}{;J*{Aotv}xfs%sBOadliRW zYo6V0>kPNC?~c`%FI_S_dp1Z+J+KqQ!Vqpd_Wk|2Z?CrNL`yK(+S(6XpaM`TaG<=kfb)j%r4C@X18*%8UZ@k*8BKR%jUTZf& z?%S_jyGBMvD`K_zw^x_XeMws>ZIZ9s*?%)8Ch*6{Cv+;2>o>a{e|e6p>bel4p`qb{ z`|@1NmMzQoUQ`NSwaX8WR9;?AN10h#UfpTeu3~IF`>blRKi<3h!vh6@EvK2Ng#EXr zGxPHJBc42ZWY(N%KG0UM_0^>x;xWcXM!e@uaxQ!>iO?%oacoSjfTd^khwkk+CuL3l}8PO$z+{{pkZ|br)|zYES7Tyg!uvB?(vaP${{sVc`plVb ztQMmeXV}<ki$Q_Z9J; zRsLR>az9tU=xR*sHlVl%d=z8)oNola|sI099NdKtPa;yQK9bNzc1@G z=NfzGj+nSOZS8HD>fw^b@R1RFgQo`j(uRkIuD!ZEC@v~W-P>um&^u;uTuqKoKwu-q zDSfIa#a>EL(WIfNiQmS?#{cEZZR<29&+F;w(NbcvvY8F7#jc~>TCKTRSzKRptS67# zVthSRPY0BL{(Lm$*z>#kALdh=jvd!Oa%4<#2et`~&&W{A)`CCYi}Qw*=-k{`J%dum zi?j9gRN1H0Bdl}H5tUIQb{)lh2M!$Y|MJ|p-(68jY2k&1!~x;mmpJckJAbo(^zdsh z&C!+hgZ*AZ%~`Zm*Y9tI7I%i`H`R@KguJ`8fpy1@9o_SDMM|cBR?glSn!J2l`XoK2 zhW*|)+SzHQYHVyg^5+ltayGV-DUadV`N<9jstnf{cz? z?4h?cH8&NU7v|hBpDOl_j@<3Vo{}y$jY*n*iHV6LGk*sU?>QKK)F#MB`LR`POst~Y zmeoog!OxBJYJ>|tXLn#imTld-b>PR3#EaivHJ5mos0oFHgb3_$-)3WLoA~Tmm}6f} zA{LrsIWztUY&*v(B_);cj$dRe3DN#8&xu)hZAye}y zYzy5IFHdr5L^75R|I_6|*>;ZKzklCon10{Y+w=b3PP`&3iIO%g+1(>I>JTQxBDF_mhf8{J zo%3))-8J7r1_qwP*n|r!tp5@c9=`X-Kgl%I$0vvRO@q#Vd>nH5 zbba2XAOB#nYeh>O9jTQm*;pYu+akMtdw_vNr4J3=jhL8;#{76?@9^H<)7vh#$v5?2 z2pqGD7gxVi;wkussplfye)!N#6Zv5k67IizfcfXu&+)GQ7s=L37hfngDe|ow!t~`n? z)IX?rZ-=d4Ow0-d!VNJGl)YEu&kdbqSy@@1XU`-L#Vb7+?7^MG9!(A-+O&k*+8&FMtbRIFGp26?@J$q@PqUI(}T$(2yTD zuBf}SHS)oO2N?qVGN-O#%|1lXdi(C3>V*qGH;CLq=tF+?$XP6&^IAcX)MHfQwMS7= zQP#2dt;ryQ3$yaVRL_Rir+N>UAr}Rtq@?&UTyrV8suQ)*x5I0e?e5*Xj@3V#G9s`_ zdVm9(Uth^<%fhx_vwb_Cxw$!m(vzoVw6Q^WGFpmVL`f>+r$u%VlwUu2i^h+O&yGaEy#;_zv5zW-ZxP0=qA5yx38Ey*fgK&b8_9w<0$utW1XX z_I9Iud$C9Qo3KqHumeMEnf-jQFf0%~2R=UGIP?B)0Dh;ux>`%@`&^Zw!0(Qd2yDf~ z%Y$E-n3z;TxYsq5crThPmd;Q0MC7}Tt$%#rjvq76ddl^7Gh#zoO$`eT4b8yU9LX=~ zC&TbGoybxQ;T!N~n~ifTBG+iiT2wI2{i&6acly4(DncaQdLFs`4f5EbLx*q+pQ~5t z@Z$(NM)r-L9_+cYLrq;>4q^P7fE+Ex#@@-P9Pyfk_u<2bYtGcg(oiJP<=V9P;qnQ9 z6Y(B4*hDwt;=&7@`jc#1bEp0`nO1hand}*A$`FwNE~yOTxBQf%$Hc**_2SHXUhY9m zGu8+X$%X(a*vVmst499x?I*bpQ6}H9k7XIh>g9X(>?wHH@b29^GT#KkO^@|ketmh~ zbz*2`+qZ9(ZV}1*Z&zVHX(%MI6{d_WBXv2}O$`aEbgTBb^Im1-sLHmgZAd+`VQ#uV zzMxn-!?dKTt-y(3I(i0%5cOI&xt%+Mii?#NkRWL5PdZ(?#M=}74bv+9!T2U-l@-Z` z5sSJmURmVUhQn1P?mXJZqoJkcJ-+I;jOpGpXVzb6Nf@EG0X;{jMlH!h85Oz8-Q8}zOhKVAqG}8| z!m=i6Q_9K09WheJX#qg$9w_kdxiT>~(Bn1xYv*zSS&C=`E55*_p6+h@z8by&^Y>*V9o~!P!|kunbrkPef$c>=BDR?Q<;#_b zNKT#yGETkD^qh5O@}oMx@!u>D;&|vX++Og7gN4N}GLma|)oV9w zAlWO$Fk>}>;92cteSwuLNqle4veZhCxTEB`8yE}uA;|sj&v)}|9;VLDxl_l3uV1gk zs8`BxyLlG4clgpV-gxlfx{~LNNA~Y0MecuD@DOV6Zmwe;&%ZMHy9;P)pgC&|a)h2L=cGr;~f`B@9VxTTM4}W?nOp;Jg3eLFMf86FJ-nKw^)cJPEVf zwQHB!!@bx25eq`Bjyyi_?nUjM4I8vL=~azhT}q`?nqOUh<|>6)Wbfd>g~#5Bt@Q_c zUwz@>=B5`G7ET2ERm1xlNed}8=Q})XEAa5};I+U|^O>AHSviY+itFPp7A@FOKyZvS zKK4V+as}12oIC8=rDCj3pJs}QiBZFfs>}wcVy67DfJ29aM48GEGXNFvPnK0@RfBx2 z%5{NMZb&YUR*L{ve12xgpvvh#FmO&4i8yx8m0Xk8gH0K%Cr+Gj?91*iL71Vf9O-)h z>63xH>qvshH@u+OnlS{H_%Xi& z*WT^&TPjNG_@7(#`zE$tICpNNtGhc35Y-B4)8aJU;Uh;jJ32c0PyA|SMOjgHU1%pg zo~J3pl=boB$7P*wuJ-+`s;p$zPc=xKQ^%9>Q%v}<{>34$K%cR(l^xkuVF`(eV)u7n z*7-eZ{UwcyVtR5oenstRkBYFqOI|Mk}b-IC0`G>Xs+6FH6kJm#G42T-<%woQ>wmlP9(F z>^%N1@heZBJZWBaZKb2tRL96uo1gwe+f54`lg&<_Ui;|rV-_VPB^LG4f`jTas5=^T#H*eJDXZ|vvMqDLtuaY)9(xpz8L>VGf!2Ika zD{2G2n%gom=gysD@}B>D+oUkNzA0#@mX=mm$>PH7xdU4sYDIjwEU$=(L5#cOIy>@$ zK?RuW1_85TVq&WD-U~$yPmfB{_;7M^CfLVC%%{CwD3(5+-TN@>ZfqshMty3-i%^p%c-n8)Ib=adqfkeHgz?=kMR)WUm3qQa;F^BpaYwd7*YM z`#IOb!mORqi4#dk6y0(t?MN;_&XafO_BT4be?Mz^AREu%`3n~;rbfCPf2~+sT&9cd zS->EUfV`Jr8U&q9pMXL#O`2wqf^OWo6QcKUuPP8jdR#~dGg1ues`@uq8TE2)j$ayR zP?h@l@nbnwiPLjC4Ep5zBU)OFe+QfDw=a0j&%9}FHY)0PZoCtP-ZCJ17vG*alj7oH z$4RT<5~;v%g)VD0Y+&5uKF--Oi+ZK}pMSXV3^#>P`(k1dG!sV4x~1$MhuU$?+O_l_K75eBG{EUK-N#RmAZDWq077Zy zlRB)*(I!)Gv5gyxQ5jiVhvRhr|R1rl*ZPXL*aOhpH z96yS@R@T_a#m>&|n1!6nP4QaTHYCuLm6dU>SP@vVFvE&12#mI~0VQ0-L~yOrlZAx^ zYMw!$at?-Ju>9M{Q2Q{z`+FxH8_*S^{RFSJ#)qMb~W6w{J_XF69RB#Df zu{j|lWAR;6S^`jcWrul7OAC2Ngh*ZNCPn38V0Lu`h%DRI9h8c;Hq&nBq1gVUkjb8y z`HOY{MF0Hrk3HCph^h^(Y6D37&3Sers2&4x+ck=ctFT+5j3Z5oJ=|99zQm*+#2#?_ zHviu6RUxQ#5`c)iXUCOLHjTcp?<}Q}I{Aus-@bh#0MuP~=@*h%Fr@eNqtk_#6fXz?99~+xwc}_Mf-!RlXJ<<~oGfi@?hk$tT?ah) z=Xpw~ewHc2oE+A3cjgD|OED(e^@roGVI%aPP!?_u&w&^`RU{Ad~;iH|2|?g9u4B(-(z!(W~Y5oCqc-1Dce)?)hc!Fz}ZJ|Jsq z+4{XN1atbt-tTqcDCAHM%6{|%G2V^+$J_CrpIHe(;OMyJL%ecCLV_p?$xa-}sHI0m7^H1bt5-JL?%P8jpUs6(1@Ap1W8~JR&5x~?^1TTm0b$22@ zym{@=vka_=Kcc0CN7gL3pS* zrwNo1qyVhDx03_3Q_Q_`TRh_A@sW`c6(n-qyo)EvM@Z9e>*@?*&*;6nbp9MJxvwc` z)?~-UHlzE7dG=jpCP79D&O=Pf%F5bnxk>2-f=PDUwz?j^x_eTAhizvkX8u|S>?$rT zjBl*}HmuJlOH)_gQymc*6}5Ju?55|#Rj8JLUxB?&y{l>($?2K3&rUmV(feg_?>(1O z$|l%$ZdEr#7($elLnS`U#mOMLfv#g@U-kB{hlrQ&#%t}I0Hxc`HBi`Wg#SUUn}w=% ze7Hk|)F>zf%`RSKKX~vU!H*oP6{otLyi#P_wRm`VUS9krh@cdho4W(Wf`(4U)C+OB#kMjjYV=Qqbi)xQ25;BnRKg4;6@8B7LG z7#n;4Q3U#;rS5}UsHfLmU*awG?C7(=iV6)<04dLnTy49}*fLm%kyFeQx#`)yINdJ- z9LfJ~PMM0`dX}ARJ|IF1OfCwgwCws)U~}bZx2VkvyWRe*r8*IwS;oR1CMB6!TeAX}QKd-8te`kXT~WJN zR{SZ~jNJmhQZh2N+}zxWf`SCwL58TyKWbmSwzaY0T)jFxDvGy2x!dJ;TcmY{ z=;)?pa}PdC~+Gi^ODp(J96j>%Xb-@k#pF6(D+;v$C?>zJLa11D#0=jA_%F zdoYpTG}rWTLBaIdd8wKO*}a;YiI_SDFAr4%+z*4I37$}C_QmwVSy36x37+N6>noEL zdhAqiM1+=wMN*lpX{vs5rgf9_w)6kcV;z+#FV3x5zcst5siLCyR)!rm=u_}}*24$Q zn{%xBPrN*rn0fdle$&KmItVE_0?DsuYP3gShmBF3DNDGBayY6>v&rAz9K$M=On!Yu z`G8An6v@o`odwUde}ApXlSM_4^PpGbS1m;St?B=~e=c!o$Y%6KXK(MLOr2vB$!}4e zf;3=aXAjKFb1D8RZG&*HhQFCE!>6Q-bETS^)N%_oahJZD7une(1C476&TDlDexRu% zb&j0V_XD6lLPQRL%HL+!z6wy|{ojc4S*O*noPUKPQ+$8*=h(!rLR8Cy;|7?OUR_&L zQ-)Y>4*WBr^wPF<1%OER(|t%S&G>QO*Uj1l?caYSSZi;t+C=#e=3yZa zB){8VhoPG7-F>+J+c)9$>(^uZA4-?}s{Hqb(UU{M0|Nt{-~&XS8w$9O5BP349M?PJ z^as4PhwVynW8rBpnGZpy_t*nQM@!!RtN1-vbAN$jpMCFJt|IW*h+Hd_%P;@@q>2=Z zIFMv=>%*jJ2R3q{_D=6F$U##kP+hi~jy*s7{_eU{zxphH(fx^^ud6$db|Ob7{Zs%8 z-;$)zZW=KA&84FY6X_Vzi~yNs*P#SSM1PL57*1qB5mMGrZW4k#n1DS;LL~5p&(xIb{XJJ!AiJ%Y^g`LR%K6vV0Fcg>ou$4hN1m{sD)tBg z`PT`OH&7o0LNO!X2L`Z2*z4ZOB_ksP`cVL!gUY2#+prNs0k*@O zl-qB2Ohr8hVvCh`5XV|!=iuM~jHf{ad-L?@Gu@0+dkR7ZQ3UM4^%(Hs_&_5a@=z%_ zH@_&@B^HQd*744tV|gbsO!i zxvJwB$k@AgE%5NNJ9Y#DB?)djH}Y=U0U>grA(0N0p#8+qahq?i_z-1QcsG)$2a2Q& z$b(B*`1+v-yHQFA;M>hF&T<4UU+I@+S?x7pO%?6;xPI-LC=+d*l4mGrzfJ%qXeHOD zW^(O1R)dV++yCJ~BRESWNpsviuFl zSqLRFgSRJa+0f7sC+^?S(4bb}f9k{uLBZ`8{DF(hK*D|b@&&w21rV~s&rh2!{rtp? zDxyGX$F^;}APM#&HaqtOhZx za^`THB3Do!m~4l?KaZxJEL;K1KXXpJr?;10L_}n6qSdZD^Lqd*f9Cm*>+vL;>TQF; zDLYJ!n3E9q`}c2o_wkM3Y%;y(-9Zi{0Tv0!no%_|o5Qe`J8@@{wBx-OLQ+yT5)k>j zc5D`OYRsTjOZ#FsN6~(LWfPD9iN5&GO0byKNbm%&ZM*P^kOf-y_V(%J0V0ldUTb7dmb3K9SA_5ELDU+VnAi*Y77D#D3bZ8b7G$v7YBIJ) zMgq&1FK-}X*!2u;&q}^(3CkfFz5SW$JekVqD@*?4{4uj*M~a#6@`hcjQk+c3=Kb4);hys+{rIKK3*49v?^!>BCK@w^sLEyv`0zl6b9c%_o=}q@(F^<-t^Xk zMXsZ21Fd-(kUYe}-6aBgmjMtsev1)yX-AisdQVlj^5UHbtdCo?K!zLzgwDiy4|W+NF& z$Y-q2B~preoeqR0g;Q1VzYIcnwovn9z<(PGtN6S@G13{z!P6z-_D^SDJclb z7lrO|*6bu7gfb0nIN1MtX^!B z^zEHcf?Ugjyu*)Bx}LC&C>J?@eo9?)AYSRDwvG<>pl3eYHM|=@zBG!u{m`e2P3{$( zWbVb%D1+?HprD|zGm`XO!Y;D)s0Tgl>q@)SdsE}4UZixEH=8zQyo5nvI**bBZa#uEBcR#x|1 zOLnpuIQY0iteG+p>AYU`Vz(uIJq~zf?0)IJ==J9P`-9Nc_~H#AHuQQeYi|P4k_dCa zbhtVnUn~8M&>)Z!5L2PS@K0rB)7vA`YjPf6;Qij0GSC_s9lg#V-Q;*I1mSG;=`(KN zJdptnodw3PBlp~(RE+X2J}Aoqf5F5Zq5=m@n? zX1wu93+qJ})XAr(NB7^4cSQ8Y29;FUmP9-IyKva?^(vvhdJOcVA zAkk))Lmg7XVv7e;ggmAb9tc;`pEbL7twa{=?ilS3X71#_!8Qvj zbIHLpQ^ni=!?@C^^B%sJB4GJ{b2rH|W(Gq|Lu411l-%Ev$Qc>h_|HFnxw+0+aX}gy zHE-xMnLw$F?%pj3d{)CA5Nr?yN#kF__Ol^fT+*D@NCUjJ{n+<|=gyyhgZk*zt5v zjT#2a?b#E!n-4L~U+52wYf3MT*oESDg|ta9s@*j^cC5HEF{Fy}j-~AbYGu7dH3ls0 zl_*U!%0_|Z*8p)uMn*O?Hon`=92OGtCeyr}s1p!)-c(mJVkZW~d|C9q1Z>F;imeR_ zDP|2jgB#N3Vq-I56OKGNL?Y?NqfdDs>7Or-`)F4(A9&*iCl*R4=v>#oKch{u#d&~K zR&G5*YGL?*u0l_q`2Fo(h7&IzvJI#^7Kk_c7gqfo!pO&Nv0{P3a1AV)1q33G*CE>L zvKVjhf|oCQ_l_AGZ!XE%&(E&`njR6SKy|WIBTzKHzT&c6Xopl&HPAH?$3f{-+}V1T zfEQ?g=8)Me9sq~Dv~FVGeBz}FsxH9j090bD0f=VK*#IK=ARj+9%wR{&=#kNIX#f7J zK|zd5+W#P-laWYemMs_X0Ziyd%muIN68D#kjTC=VVg12pX@H31yV;4`n3wr8 zpToV=ZBap+!zMu_ZLr{>ghfXj!NX|9je6)DJGNr)H31sr2na>oSYtsE5fLUCHt=(F z_4hLp!Yo-oO5!LBD$_twrtVQPsYp4HZ5)~HbagpVG;u&)!HBE`30E+2;k)79z00b? zg~I@Q${@gl)+>dEPg7(pZ2J7f_T?+LXrwK6zM(@_v&24~zOdL`83s#QPP5bC&!3^e z!Au3afeTT7Ya$Y2fGJ=hQXowZcWB|U%+H+*#)Gf|wHo5S#?y-PRNlY#_*^BEboyBz58?6X4DoAn_51yUT@l z^CE?7fibUi$Aqe5Pq($TH9R>iV$)G93|zsmy9v7v!^>u5WE5-#{H%TG5LW!oirp74 zUUZCbLJ5yAjM=z=W+3$)EM1&JLSdu=tH!Qo0zG9?;K)D(;kS`%@aQqav1`P|M|$7! zfO;)$X;}%gq3@F?XVcX?r?-1AcpzV_k(K2{cHWENm0tet-yRQm;C#R^n~!!^LTP_J zb-y-RG6*j#ikJbafbzkMSVC1NW;5&)a4^2m{XL!2sLuZWAdvGG7~Fj4UxKe(hDjyZ zR_MY3pab@#3cPeCs%MgLkaUSza2flm*o+A{ir7d|0k4U?%kNc$%g(iJ?M94n{CnoZ z{Sc@=Vq3OwL-!zZh1XOUBeCKk1H$}@$KV-+|0@9?5@4ko0JgKMiw3o7MMVWg6}^5< zd~^kmMlqOps;^pCTtk8lxcVu5U*e&df=8$k(|~}T0D@6K$s^z70gAo^%qa4+2CR@I zHV#1xQ&A6caBy&ZCqj*SnV7U`4U!0hZHe7!c&@3gZaKVxh?jZ-kF8Nttv?V;Q6y79 zgt!h=kpK(Fbm%n7L%NZRv6nEqnK?PcCr7LdC@Rb9>ZDA671>Sy=|dTv)V!w9qLO6} z9sriVY0sZ$!8U+>3FOIu^jZyrlC0~98N786czLR-svPUxut7mxmlAZP)W7xYhfWA+ z4BozIS=2Wg$nUMl9fu6%ssNHl zYp^gse`6c*Y8BVtzIBU^n(Di43eColC~v^x2rA75PEU-6=VK9Qxp;Wi81A?V1(StQ@Gz%ObSds9u0#IoiCzbGuohZcLq5|LA4%uMSa%-e=I|=f#FOGa*U759v|ra0Z2F)d{BVIqWLao@Yk=+hrBuIX<+HR zN{C3y4dVKILo|=-9Z{kq1wu!q!ww}#+*Jw!By6RQApxRHglI*a&;#u9ctG?}erb~% zl3@`M{1NXKqN87%R##Ne!`}whArq68B?q)SHs1zt_r$)Vu~DugA_Ghs6jF!&55k1DrbZ@y8N(A)URAX>ZF1p8 z7gAqha&q*bY?6TlycG=aw+6$3LF1aYd$+i_nz>yOhvA9Z@73?PD#S|Mi&2El@2S1X8N%Vg;)Z<`tFB~ z8#W06gmdLeX27Q?8PnA$wfH7mYu~>0dGTT^Nx}%U;t0wZe?p93y?XUuQ$ko8;27X% zvGwbh$(R;{0$vV+sq)nND_5=%e-OARP-YT@vtnRfAu27+jH}C{BvHZ9 zjJYDNG!S}vFU}k!pc2Tm0_mHOcc_I3xNdK6C%Nv@rAsgtK*lx4wj)jhSP0}Uw&k1j zyRR?+<(3IT00a{_0TMechC64G(TMdQfKDQ9M}1u#uVMLF7`j132gAICil(2LJ5r}> zm5M%LW8@VS%8~o2&f#IgEFZYDg#mO)70B4@aO|nTy#eL`7AQ%qfgmu%h6_`}-(Stc+lM6SkDY_4d=+mWVtkSEdGJ6Op$L1H zM12gQ8GJC^8jF5{*mYBLpxXn$RMqC>oOv%ut~%0*D2fg72G5Cw$3OTsmxhp(1X}?U z5}(ROy`+k>mD`Yah-!_?UBxX?g2A@JPJDsx_!03X5{ofxki@*}*!sLKR*tT*u`$Ic zOB@yfw$5)wZ;*t|kthk<-BlhWcmAJ8^`S@BBR`T0fTLOxuEDE9HPS#PCamZ+p`Gkt z2mNw#wo_&RaIOn;1^JoXM|RjW`v4m!0#gv|53H-V@wGj?G=%0O{&~U+LfAkeJC@P} zzY6F!+Vkhnvj{y>_M9=8TUA|EWdXM%n5QbRWs8-g=*?LlBHFs~8jBQ)U?d6{9$1`m ziH!oazx?jqWw3ScI<0G7Juq-iCg>8Gi^?#TpdRS%8!hH; z84d-h+4lMhKgyo>lZwZaJh~*z7(`}gPny^>#cEgXLC(i82cq!WvSmvqPzQKzhv8i) z)@@eNuGe=E6cl7)VIe5EA>YB$q{K^DD}2?ahHhST9ZjP88t_&!8BLV{N^!s{i1^~> z&3Z@XbBVY2slnU9+VAlVQm@DA4bqQbkrJCL>S1O?H@HC6P0U$}TJO7}07KeZj*gSA z@81LAMA{^0fjyX%^dy(L6D1Y7^Eqql9tbGJ^@>WRH@Dj;(~QOoXeI6Vi)E0Yf*=7v zNRNz*ORspl)ZrBK^#jd)Wp5=s8f-orV^AH3Eb`OSKt`y1wC`2}T#8gFxLL$@UxWsN z;!XFQA@|KtwY@^2EQkOtn}o-ag)gw9hAp{e6YQu;PjN+JAzvbY}-;43Y6NKgkKM3f!HmdX4r2REXL#k^58>kmWI!tq2cVo<0-9d#ZcHo zJOB~949Mk%&e-IMc}NTA+KZ+)@Kl2wAwUr+9|6F>Y`bOjn=fB-b*1^V+<=ILkqAS< z0VhCcBfezRVT6*2q|G@<0YP2^^ZPdX8x}saW~zNNQ`XCu8m_MSu8P=PWVwWe)f!$u z`uTsy9gJR{*9DP?3ML!`T;I$T1Yr`!@7%dVeA9&705C-`)lB2z;|suBB?6ux{#GC* zj{vmyK+_Ys{tJoO{?a8_yyBFmeDekO-_G+xv!eRpe z>VZx{JQTz=4(@=6G$ViiGEk@-2$4d>uZ8(phxwVyq<05gtVE;n!vk?~ak1O2SHRMl zWX{GgT9x<6$jPuP=!S4y%+r z1OUS-utW5o9or1Whd4Nl3LMu#LZYGI;q*mXz`pGq7$9aD;ttv$BlT`iV{!>v=iQAJsjY7j0#65WyLWW#j##SL24SO+h`#{qRHd@3~ zLx!wEl{xl#0r{;F9a4ndz^y>1BLd!dwkA@W=&PutR-;tJ!dQX?iwmBFZXsPFkg$lt z!>ipO#gTiFY7U7PhtwbrmmyIg2qB7q|$YJ)4#4- zwJH@$Ed&xHh;iu$m<0gQ!B+M2<@T?kf|d=R+({w;DMQ zRv5UB0*(6Tqua7~yFKq+=7R!+WO|saRc&mfh`VI9F^D5zTjEVW0E!phL=BK1#g^g0 z!KEODApX%H?h%_Omgp36W#Nj_XQ^1?1@_~##3(fKoBPbb}h z#8e#bgo;NN4TKd{RpMYTD0{RK5A}mZb>W#83T<~9UMzn zSC=mWKYHvqQ06tiJkNuz5rjB^yE1}bt3oQmOyfRXJv~>w7p57Z6{urXHRReF7kSN} zNN*==7={y6PGxW2&>+E7p{vUWk-Ho7fI1G3VKEQ04?*j=9cgrcXx-V@x9*V8=)^=7 zUZ?kF6+#)|M%^Yx9w#T0K1kT2BnN7>H4Gkoau!jch?O5oL43ah8alz#1b4J=^7fh= z)gt5p$+m!)M1n`Js7d+dZitk-}s;4z*I1KQj+S0sB{xQUk069Avvq9l9r5@>p6apw4sM`mbLFvHg9)tiwlRkBB67Je#C9BKX4rYmPNCdH?oi z_4Iyt~FR4He z12Nn7&8`CAjVfGhwJUk^=@ajUL-%jYy8XXGXte*G+Ws#-qo9g#Bj{z? zCMJM=&pqOa;g2GHVF_R^iEV?Hjt<>znNIzOvf5|mphAhSU%xlq2Biq)0~aMS`9nbQ zZp-c4x9h=36X3Mdrdjv5C$=8(TcOwD!P%m`i{JDGzRZnmm%sco%EZR$d!3%^NfdE3 z6w%h5op->)AmuU#uY}{Cv*>S7MU2e?HneHW=K`BG*J~}wBO(%ksP|AmS%>t3$;qke z=!7;O*>D6k&*);d4D;~eokY-ZcYl>1e-63(|Xc$jSMltq09G2(3W>tP>in+7r8rlvkyr zr4eNO+jZOs@ZF1$@JUlx*jO*+vA{AWPCTFmu7nU%bo)b3G}zkH%*XI1SfLKnf2^>J zYH}<=f-VrJFev!|kRF&>Mh=b$k7|%USBd)!cg)Pn@&%Iu&bGoWaSNP)-HTQs4i~Mm zA9=!?K%D64dNQ-KSI4QhupxNKP8WGImxuZ1la{dg9_o zA?C3Rp@tpkC{ZR>bHutbI5)A_y6p$C+hxzx2?TVtlkQzYtHB%f=kjVa($dh_ICOooOUJrpPX4$! z%nc74gh0B+4)(M^fo#lw4;vq~uhaD=lz{;1+S=MG2|wS)-Re&JJTg6dU5dgd1t z^vzyIx6n2y906E2wAAW7?*2e@{exNY7xKIp7rJ$?V3Lv^KJ5D$g{EiZU&20P9O1IC z9(}Rth1nZg^8WueyOhXFqtYm)hmQ2 z5S;`?;|eJwAJ7*E?rvj72Lq5X14X0)5+=dp`V-3-s$ax6MikV_g9Y*b-%=psLoGZ| zM2Vn>a@QAF#}WlBY0g43?s&`CqK}Ic6Z}~u>k*L&Gl_m#byrt;;*SGLHb)eJIY27? zWZ^Qjp85v{lC_G&35WcL4fT-5*AbgAs&q{xAzc6Vph>;oQ-f4)00uU2`arS=;uMb$ z346rnhmn zh>$N>C&B$ixG-M$8?lWb5|mX|GJv2~E%BP~UGa(gW1Yu91mts@iw;C{V#BT-<4{;q)?eB$5P(0zi z2|Yt_JLtEWG4Tv^uKiaKHxIbv>RlswZU=~O9LqUd=VZ18?_~NrM zPNZQE9LHYN267wmb>NeKBzsm8mAZB|Iq0u30UEKeUyRY`_DL$Qb z2lh+cJfl1@S(x9+DH|DjWokqWjlav`3p#!JH1Q_F?lm*;Srg${DNwysL*fvqc3hZr zKEhXd4Jfb&svECpMN0X}sQgo|`LmSM)EM zI-=7vA$(|D*k{E}+?}PM!}%l(d|<}MOM-k}>iI;R0!BFM9K7^Z)gR2A0QI^Bj#eZt zqMt$livhdiItn3kuvny(6Ujjrq7?}x*ndZy-!j~6RkF~~*fJ-qs!@|LzRL$ZYAHk{ zG=?#=u~m8)ILl2E`W}=Q;R4YIKc7=BCEF613#&vM6%+g?>gG+_&To8{!NJ8Vk~jFE z;(u@W8od4YS#FD`ghnLw6hJ$O4Q`c-%+g$Zhf$1xEqsF?-iVFecD9cF6F|QS$(FNxYMJ6&PX8i~6_2Lp2B%-Ns!R>ZMY7X(kxP*z5ZqFDog#vTAR?bK_*(!#LhXXP?lXd6stUk@rse?_zo#Ry{o zl3W7z36w059ds0Ut<^|6I^n36g0=Ug7|_bwe%(e2J>swd@Fwm36tNfqu}7^E1|W!Z z*A1c1gxE?VHa2A=NXdX6^fT%NLmbCNQY$`GK$RmD0wGKfA3lsnx_hM`zstqP6|GG@ zLEwaaqzAGX2%$3kM-MJsqOl?E!8L)yZOBnW`a%+sR1HbJq&_8Qj9|SGIFs+x&klw$ZZ^_e?oLEwYM;D9pS1G1d>ssXgdJb#(tn&LGNbfjn_NzVa# zNizbb(Njb;Mp(TxL4}Y$$aPvI8;}1wlDfK-G}U8uQ0*s0Un5MCf&@i%<-tTk3=Y&{ zxC9^^fqtTObvbFafB$|WeG+CI%j#+G0KOFb>8j2;9}u=E>z7d^P7*gf`cdFzBW+v~ zyP)2Pi~mi~XHEd01zIc&azz)g0?xQ$qeOuSmt0N%Ge2O_iTsNC2!%WiuohJ`$acxu z`IzG+vbTS#CLg150WWg!?lw{*qcpDg1B^p#Z#vhm;fb_8@!U)l)*2|gD9LB0CB-+B z(Cs3wuZ(4g)>@)uk;ZzcM9}nm!%11D6Z+p@og&N>AU%N|U|i`!G?Io0IUD>b9IuLw zA{8`PAfBbcd_;a4qw}0rS}%znB2<*{GO~hB9KpT;u|teF;H9I4S;$Yn>Zgiz`uI?a zt|Js=ObFWnc*!=PSEhBEd+}OJHbrz{Agtyhw-^6f zd^6uu4LA~D^(AL4{HuaLEM?c4yZKO@Vrs?Pr56jr>jIirpdL@qJq5pPMP(&l%5!va zLTGvW4FpI<57bkTXPU4X5zq$2B91d*!s6-33qL)QDrN?`5)J^#s18t!=ya8~zXDd3 z0P~936DJlIFJU#nLaop0$tnMGL>VeJAG85L0mRCJcx( zEb|0CV(C&8d@6V=65HYZI?*|S$YFN=e5!6-RFpsL38Ij95vu4Z6uqP+lyDDFZc2ti zmIhQdT9B3f>i+R;Lg6Sa2%{oOG_hZ!u86PHIV8kdsH+< zIFtZ>6N{4|lt|_$asI{YYuA?JO>4iAu`Av#@tJO8v<{JE;XNV;Xdud~;*cUR_sdi{ zSw0;DJH`nX4K@zq8Nrvd0OUsjw#1?Jfi#8%Gy2N4fPhM^enFrsetcu`{*$F?-hy=k z|ECxtLlFKx#Q6jkZixGYYl6X}uW&Ph(&*xL>o0_ZBPTDR-$KRIbn!*^Jv1NSe1ghZ zkWb)87t(Uc<3?#F2l_zo4#uFpjkbdG1g`}0XD%rhN;D*KD|AYUU`N3bra>Bs9!yP5 zne;EOL77vAn&>xqtnudv*5LhkLBCdFB~TQ)aR%C7D?q68#m_wTT9GGRM>=V+tFG?8 z{1azE34zh+9t8(2im`=+Y!Y)04(N}z+MObi#O#t zn8q@+DuTBr_DY~!7Cb9azOat)+J0Ev*plR=8ZejTm6ZXKJ_u{tzfwtbz_O?SW!nhz z10*9f%<%xP8~C>3)EhYh8nNz4cP**kF!&pOZ^#FW1B(!07vLTAfiV<8=V& zpSHiBbjIe>h<)#J=D;uGJgeLL7XUcsbINLNL+^l#3qE-VB~dQV)_ks)Gcz$At>W&c z)04&w?o{$WZOXAQGnww<#>*#l1HOPxO88e8kT@0`Qo`kGNpfNca1AGr5@DP=2*dxgEZAL^K7#@xw=Vp;K4)nmy!$W#>?#nySg1FFg zb|t~}f-2U*;#3a@>39P6=db;mYs-pq8>BHE=!Ija?3cc#mh4V_{O2FXZzC;jZTA87 z@FzecnOg-VGwIhsR9HH%LDqBT(y1gk3W8H^qqJ42!yeo%r)p3lFNwDH^HGTVq<}N`sk-ybLUu z@)4sb3M3&aTy;QO#TWo1OnM?9bPdbWXg*KTpvB&%DH^2Nwqr+(@krvshgU%&NYt^C zyq;A5keke(C{_C9%L**~K6@EFk{*|S2)*tZsKYhvzGPBvesOWD!F&WD5qG2Q;|-CiN!i#{7Er@} z2bTs=7^k;~`-}iL=ihB1_=YfXDc!tzlPWUi!CuYZaX)Jph%2~AY=YLL$C4jQ(~zG5*OLj$*%?M{}HBwdj<~-!k~#C{69}p z#B3c)(WQZhMR((2oVS9eu_zLN@rop3z-kO8gQX8=F1h~c4Ix`#DOYE))nN`Q7e(NL zzsMR3QrSN!2#U`!=1B6(sUm<{ur<+lev4Mdp&`BD=dG>r@e0T!I>AfXd>+plOMFh5 zFJ6>`X3>K^ejPX#q}pNwahj-*&`|w<7Z-<u^zAqA z*@?|)t|oK^3dAk{GJDhGy-pQ4Y9nBPw0UB~(KzwpDX=xZ_a1)$Wq4B#In2NwnH~?Ym)7z)F zGzR>Dy#*XgCsH;ZY1!oiR9gmf{J(L?S2DcNc~D#g!c~J>5I$~Hi%|5}B8MGo=~}u7 z1M(nF4wEPK5xgd)D3&S0^TUkGMvO;DUeT+>@YTjVGEC6SV8=XpvCjE*SfvNT0H$}#(gC0*>3;}?6MsO&A{H|RzRQjpcgx+|0;WG=u zFV`SAP6bn2n8UeQP0`WOQ36tdPzX6gJY>dRkh+EE8yn_Xdix}9u5isW6;lEncRVK z0ZQej7!IU19N0nn-l5|Ma)_`a>qt`s;paTjccS2U4aH$!XkmEuwmyf5FOU+U`jeBN zfDZ2rzh)q_RcytbDEq3dm>d_@`+X+)@ndvEuo8FbmoG8TW-)0Q!tGk?Hf&Irh=z%P zG!fRY2L%s|9lRx>;3uv1}y+F*%p1|gWyjQSTZO{M83`92pUI1PZJ=+#uj~|gk-Dn_qy1^ zFscxL9)ao@PdbX2kKqq9|L4&@;-Ffw`2;<#O2l%V@EBZmp!E?7MU-vid<#^>!B(eE zogy|ka@-ltmL!-J7UVNaD{%ad!G%?Sp~Aspb8Mpvd^Ug@G!!YX4jia!Vkg;K+ZF)g z9JV^};l?OC2-`*9e}kjaP0|R-5Eevei2SOH>IU+DB8mxl9J7a@!AKESk?;i=eHw~5 z`pEfcSxvHw$0$+c990yy zo0eK+#B-AIig3y(yBEP&ZMtbGZT!U#%}~(t)wFjA>{d{imE22`)T20q3w$HMFTEE* z=HoCYQ9NA`$U;d4MO@Kj4V)^>$-aP&bJ zn%F6z&I@Lt)R0D0?9(V-5{Z}cVFMhI|E>6p=^f#gB_1i1B_JK3UeN&$C!JXs*d z1s)I5Jp(R`xb8?-C!8SXK0oDCQ&Wo)UQI50g3JwOl^ksg3ICe0?T_a zqKQ^Z*HWprIr%RxqIg#>4Kp^?NVSMJJ%vFq>WYQF>g+tx#7@G3_Ae}TfQEl>-A{2k z@0W60QqmJF5{jHWfgQAV?b?mwOek#rlZZKxh|wev4h)e1ltye?FiK>rFD(ZJEQfU1 zufT;OUT0*hFR>BE65AQc>5fIqWbsg3d~CP`2ILAH;2ZzF#vhN1nW1lJbP+5eIU|@f zSbR!5&I35+c-g5{WQmqw*$cK4T66F3CM-E>1SMomw38|muBq3h>rU&6ipeTS1uM@W zEu~_W3i1Sb{H0qYS*WffU^q&cmV7-ck$dTQYc9_{I`MQHU!W^lVC72Bmmh+;*8KU_ zj$M=G{`UgdVLA$S^}tO>9+NW(DRPh@7<m10FQpSA*mrMi-Fh>IlPJ+}wVc1{w;{h3(({D~MfnhF4JcLWc$Z z@(T&srx$og!{Fur!Z8>$<;}IPXq>Gn>5$mMn3*y;Rr}#X6+8{(AQYq-oWrydG$l6N zJ)>&a6^QPIjTN?S1(|58BOzj)Nk9ETqV5K9g<)ZbIpsh68x&BO42avzumiyaT@r4& zx8axp5*qaa5fxc+kjzn(K@ua}Ua9dW)Sq1tb^IWo5gHT3#np%i4m2)JxBPk!LI44) z2w;WZVhO&R2I-6wwkSvY>t{qTaBWlq{+aJODh?4N5P_HYvd}gkHOO!+A|fa`*(&bW zK_JQBUn?;t2WUfrLup!>6KmMJUT6-Y$v|IIqLFSY2arP)avs_Te8IQ_cvV^=3?elw z#~c3fC_>_fQr~-VX`Th62WX^NP-Xg~0hk=;0GXT|@`aYb>O}P*Q9#dg4Wa-J<=Cc?ZofQMyyoO&-1}bfdvS__cr}EfiauG#zP8f(keyVIbgM5x}>+W zv-`iwJNLMr@4kB~|K0oNdTdvZ`u)D&&*A-ko!+|w zn(9m6dBBZpW3+DF8gWkVq7fAbE0qay;v8Cjm0O zw(rJPi8>tD3}2XI*QrH|-r0>odX+^A@&Ip<3` z5}bnc0jW`xyp4$HAec7m8A@5lY8-kC>9ApDq+~ZH-?`!-e-uIv5AROqsc(N4QsfYm z^G=_G^Wu^N-Biqs8}||YK~qo9+VFPO4e5sQJ%`r}(i4byg$wT-p{8R`ZS4teUWwp_ z1+VpqGsge#Z)BWZ|2H?XZ%D|(XAN?i5r#^hQab$+>$7D0OZw<{qC!TSCL`)@fY%pnMKjeKAYfL985KmwK1C?)vr*E|aWL+tT zl^3k8ty?rI%2o7xK)mex=-i(HC>~a@O8K;)^$#xZI>nmc$SVIA=e?p|H}tzft~uz1 zhYoYdft8R0oFfog&Ib`x-~*ZPv$Lf}@59Zx{hgnybENna+cS6(YfVWP?AAe%Z|eL0 z)@#-7<5a+j(#G%KUme|n5yz1UMmU@Tq$;iif-QxSaHXk#cR0_hYB8Zhs@yhagQHAK7aLU>YY0uaGBO>*k(BHC%HG^y0fm`V&^6w zi>Fou2kNHpP5+;{YE?RC32>4EQl)l_nk8r?vw`K5;Mfr24WUdYjQk}uTHUJu=zvnk zJVYeT0BtC4sngXHS*oTV%=4*nm2NCK69BAi2AXfcq64{{w|SohyL+BnrBTC%niQZ6 zLYfF%G1Y8(<^eZuLM6vUg8I~;ZD%ED0ocuWwRxP*ee%1MQ;7C*(`gc5xD0zi$SWt9 zxVF&Q{dH8Yt&!Hqx-K#y3{RyW_jnH6Qnm!{508GIvxox_=MR7P-P#09k;T$yL`$Fl zh&2>WwA2{?f}MK;GlaN2dRYM%P%0lQ%XjQD5K{mO*-|h+$;PCN`5P%_7X5zK=4=ik z;mHD}DO-LoI+DN_xiVyhW3N0wzL}IA%L?Ysox6bH87n$snQl2rP6Vo$opj*9w=+?v zp&hnDyG2%mo8gM_g4eA-2&p#sRtrmGSO=EUe)3|QOC2e9xxmTeNImrixqb%D9-1xu z-yCc@)4sZZe}{JV0)>cWs7c`1wE&nVYs71h3NNVnFFu0#jRsK$0+FM6-=q~(QV{FM zAp4i>vG~CE-}?eXOzz?1^I*$JEj?J*K;xsvT?S=dFMs&3gw>Vb0|jotbw%h=uC>iv z!&gsU*ozu7H1vddOfjlru&#!XOtS$xXO~u?6xt3augSA@>xpB>@c7W6G`xxOV&AD# zyT6ZY!5{Om^Vnp!vn83!2%}X~f$Aio(mP%D7nKlM#KX5o@z}88;_%{QXy~EKPOF2x zGGwJ=x43{kq-XG4m9Ytpnm;8Lu7C@jicm5A5F;brdaF7a0vZ_t9T9HmS�EaGcFu zO)k_np-nKC158$%JjvFvPMta*TtDRubcwd4fYLQ*LZ0h$b2u6TXrhHBz=I0#G0i#hyl^--^e-l#NHuq5mbU**Vrv71^DlS%LId#sm zXT4II3=U?R0;z7;$dWJvjk$O6)Joiz0UE$#c^@s#>73Z-DUI^^TS6xhl6}9yA`-|r zvYLw~h==P1FQHEdR<#!(M1e_Bvn&s&LCAX!$v>beMkEz4*qp5ehxJWmCs&L62tL1| z#Lmna0TVuo1z1t%qP>wvKKo{AM(0kQWR?~^ioOJmQ+gE|Sv54T<}boHz%}TJ9ze>n zpFT9u6OX@lW2ltz+#qrO<^x-9peV?#d=q_W_|T| z0?DV;6z=ph25o*4inG+hcX0fv7NaK%>@_e)B}No{p&V~z2ku4pRi zfAs5>E8;+qZ}lHkK`;flS+Ebo7g@PndkCQX4sdpJdbi~;iGtSs@|yjcuY&}#9q|^( zMjhzvo)%{F6Iih1_31Id7u`u1l##uhIs zuNZ3ONIeIlpX@U2e$Vu5M1&+M4yaOL3?2GQxOWWkN^PJTHdJ|5fI-SH(ru?jp!QGm z4*<-o2t0<{TLcYZ$d7 zfSzGcaY<<@DR)UUS#PsB7)rQGmCcX7>?mUr%67V%K4nyhoS}3-U<`rWBb6WKf&l)y-*bLP!u!OriDY`;Rkm*?S9N5Ff2BBcwHLzOO zzeyyE<|WvUe%Z~D%1Ru_nVoW{7soU!$$Q@C~<_ar{@1Lq7qP4$j>HRO;JU$(J5_^2^n$M*;vNv9K|A zJ(!`zMYm_Rx}%7=v-l*EI&+MahgCqP9XL+$f22J__9j|B!Ii83zX#q~Hu>Kj_UC@L zrb-wlq}8Gm(FQ^^)32g63RpL|g|v-bs3N6)=I!Jma!^K1U|_%KQF>hgU-C1wJo`x< zQS-^I<1B46^O4BFSaa>DRzb>~MVqVjTLh;mw2{uY{-O5m+Y_@olTzirQ| zCFSdK?w|4S*w}4)yZmSeuFARqsR`iqn>?`xafjpX>0ePq1*|`Lml>BZJ!l7HVL6r+ z;lg$%5Z!}>suh**B52d=MsE3yE?qyoTbr>cJ+U<7N=O21^zf{m9$$F;xZ)F>XCC^s zxZv40ac}FG-YIK_=vI%JDc4`nv6oSj^50I5%=DXxdPOV`=mKpu6xzV(z!1}=AD5;g zMp6JP?IDqgP8gt9Ui*5DNtx^XAolg(=$3bqc!ge6EZiQ9>TszX50(1IwsC!$5Vh1E zF|EA(tlYH5cFq|rsXY21Fw9=WYLdvP5lF6d1x4xoo9+fM7EMSa>d~dXhx|MR!u@eI zabwofzsB3vQtSgau}8+>o7Do74nYEddK|2qU{?f;S9+s{t*yd7dH1hCx`Ko{f5|lu zpfD1A9vkf8H%s4J z0=z+OrEtQc^nOj>!3D~Jk9TgUq28$myFNq5Qig*kg&1vZdk6TDNx==PyvxnK()~&< zU!Fx?r_ADrQU*kK3Nc9&%Wc#P!rv3Nfnl6Nxw(ul&MVxZ-9lPbTvk@r;EpH}WWS)V z2mH$P(06rlGg(7{&qJni4%j2;$+1ALP+0|cZ6*#l4~_z7>f68HtAl!%ocKrWr^EjM z>bG<^2X_cE)*k)OZl2J{H}ANS3@Rdp&#NV9o4N{@7@u6LFh-cSzW=(AMjar zwiC`1Njhi-d-FVGSQW>JWIrT&RHnI;O;FM$J|epCB)?rS8F<@srul}nZ0^`+V<|@< zYuCQBEGmGxBWnVSLv1VeW9wrbF=JsUZlDEpr@d&Hm8oIGVT>S@XUY_BvtwCUo`3TKpf8@(gebtllBeK+an!gg5a7GC zZY5$O;`+mVbe1#FMNq#c0LC~8DZ`pk2sV*5;mM<^2vNdzyf+tgT-q}6ludd}o5M?T z_40Bs(B0Q$p_EpC)qh=ptN9@{jf;r6DNS}TM$$<3c5zuAeh6tfjjbCR!$4G%3)P2i z9aZSZ7hgDL968&{Mz%$fpqNxYdTf5)2E1=q$Ee(O_-ByYc$nf{kxBM8T*U)yJPhtp z3sZ;vO?3A|3Fxw~U8jyMjjCct)uI0$GmnSjWSB?-|G1R~mVS*pt?UW(Lm~=TB%dKr zsZMBaS}4=M>yr_W@UxX{g8$L*7-yPU8@?CtPw0*JscM~gZp{_7N{6zS{_poW_c$FH zoC)f6q%&#P%&z1=#JgsHxV*=ldX|w>e2J#rx(!t-5+TTLKrbdNL=8Wp>A(-;P+H8F zkh&4Ug>+o>EQhw$Hk6J)Sd!Qg9furtyoy$GyctTLX5+7LvxV zXgSTDJ1}__Xqj?v;u8`|4`M)7zJTEy8EKXp3Hx{W?hk4zunY)|tdj*dFMFD-u>mIo z5mBZ1dcM)OuX3H0w9IWN9V<0E;V4!HI$|qBys}gbtOz4cFgAx@ubH*?@crboV${l^ zr4y+LKv-(f^UcIhh$U@E%rD(TH1A{7sNpb9*!O-sF_90n2Q%2azk=9ox_I2OHLS&4X#V z_|(843lt6MrRD3#WC-yHhwRA=@`0{zV1kvbY!zn6>d zLMXC7kbeW<@=_t&7XYYzx8Juq=o00b=EXuvSjn0+ML#}mB=G?XBRt6g0tapud5hc(lee*I6I^!B}DK3T9J z{noU~WAlJl`}QG?KK5KgQ6T*p^lfOT7mN3=da1!gjnINI(51@J#r0!;an$Bc+1c4I zk;a%*u*iMDB{mEZHJb*lA5;e_q5yOu|L(Wh+0walg6jfCF2Tj$8C%oscVEsu0!u#R zMvlMP!Rd!=_~Cu1#*w)2(i=dmdQpIPb;`qFsx*EvBp|NM7hz}e9WcO&Jv$S9*6EQG zUdx<)rT9Ba1G}MYVh1P;77u01rLfhsiV9VXv#K_s+s2hEKgseLd*;lU`@}~aC7nYl z-&cyZtPX!+`lGTMO442RM^x?&3R_tYj6C`QmSp3vhU}<|48qn>z@uGRwfP^`Wfv*_ z5N9ahL0Ldq%p7V1_7nd7Pp+0h_NaWq-XxRcX)lfea6P#&W8U-6oVL@Hn0%2&0g1%x zH|`=WwzhZ=5HTncA8XDl*hBbD3HZ2d(ac#JQf^a&?W3UOjBKr4uyOT|;CGS&!f1D& z`a~EmhXRvB8VS!}dz5{O)=?u8xDn1%T}j_BFB8Q_OuJn{_>(D_*#XHQ^OgagxT*|= z(MaK`0gZAN2+GgR%{_hL@ZrPh{Ju6_HKE9m02R5Az>)VEqD1{B@15<`*s~2Owm2L# zhDAw7igIBy3}?+nX1i$QoV?(cQRB>nyWgGYFIm!*E|2k4xFhLJ$fawEBa>;k%j@P~ zuY=1H1O3-E%evkZ*@gMv2acHSna2mBFoG;Ja(&WPpr9p~tQIfVkC7dNFB6StPdng- zpw9&2;|QK(ZqkKZ?|!)Z2~77sF?RTw6PUHE;=U%|AZa9wo3o|@;_lad&2gR})oW9j zjuR(*x10QP`$H)5LHANuB`dqUu}Ud+jHtv7&8*) z7guKycF?#L8*c zJZNo-FEP)rx8ug18pU~tKJDmu6#bmv2qlgu+-d6*jSv?$(;0WFPYWRw?)7HLRgs+X z+>3GQaPw=*@G{Y}*2_Vj`}^UmLYd>XfpVe|=j-G8Is@c91%?QsZe diff --git a/examples/running-llamas/artifacts/Llama-65b/decode_throughput_line_plot.png b/examples/running-llamas/artifacts/Llama-65b/decode_throughput_line_plot.png deleted file mode 100644 index 912bb8f16a89ebb8dc13144551709f04899b4084..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 43750 zcmeFZcQn`k|38dMNg7l}Ny<#wg@}e^rVynvLiWnuiV~7+GD~EHBFQEzdnJ^Wk-f?0 zx;;PN?{!`0cdqmMo$EU1I_LW5dY|*I4zKYXkH`IfTlc5;jqBH_b~EfIAt9lXk(N{< zAt7@oAtCMCMUJ1GZSESxUn165Z(A#yKeV>fexygDplxkoVs33>_~5v$-XkkRb29;M zVQ${j#|^BlEv(M-@RXK5gQE(jFvLy3r3AfPGNIgK0lN02k%XlJe2VHU!rYA z&&hJ1Xi4gbrIiiK`Zk%%FTVN~#QIlV_}aK+)$7zdRJkUu93QN-i`jGk<>!0K>N-gO z>z_+<)EEEjXHx(FKc4UTUtWX~>#^0vaRWhy!#o=6J=4E`|Bm#R*jz5`oz7QEQ$Oo- z_>9p6Z~5KjvDAzv13@w8WnP8ps_^No5H7X;u`yXm$#Ub~pB)*q<;9JW($ld#{blWW zHpc4NMtT#xJ?hUCNN9O9UP#3^Ynqste2=*xYBToZPyTe#z-a6H%DuCNqg}aKrB(qg z30A`mm3z;2=UWZ$)pS`r#C|=B*OntWbXhOPo=-sF68#8!E4FO!{}-NeMi_aOFtY3kVz=Kt19`EuU8e3?wA^2Hm?Vpm~d zVV0F6@2rzm)6`#A3{Fl)4+;plZLT|St_+L0t_#;sE!}T2%Kn<0J3E=vOO`D6s&~l$ zML82SgWt1^;?47&=O=$+75OxwLd5O|`bbVV>jW+wEVze9>))iWqB4wVE5O z*41&I&3sU?+hhJ#s%m}T*N~7yxmLr8*Zhxl;q}=%YeQk0Nw-oD-^tW1ZE2y#Q5&_V z*W;o~%gevGv_UK3o2lP;9pAMW%WA%=u`-@foOCybJ3>Ieva<3S*^b@smVX@+cNQ+* z+RQJV4?A{;PLxFFh>+P)dl_sdtW_S;VP)R#Qo79s9(e!M;7z5Q%|yt`6^LHPB!s+bj@(A+soo&iIY+< zN*Wqoa;c?%bXl`i{c!tIcb*OD%-mdmXO{kLb@ih5<<0eZMQ)7(Y5!vvNDO{{pkJ9A zYGv|&{ya`CLqpK?SMbC7*Q2#r_%qQmgyZHD<}1%g_A@fpbqw+nI1&(F&#@#r^1$wvtvaPR22-4HEWwZ1annW;xP z+!$Nc&@dHZdBN%5i$sOUOugE}r&K?DZ+dg3`t?~Zej6IC!gAH2~|BR2<&ipB}w6S63<$Z-~GymJX z7^k>*Zn!ZcLo-uXrY4xP?6YC(GS-X6im*axP2Ax#>hDNw7Dk#`s%$4Z-k6x0hJ=NM z`S|$oU$)LTeDL5ql5B(Kw|1^m@z?x|7P>SvG*T{kkegdiJ%9c@=lMkU%+iwDRiA@= zH7>XMgdcd*u*&)mhW zaShw=VBF6ScY;#r-pfbK)mSAitk*{fTq89a|CyHTGnv&WuI&C=#Oh&Y6%W4YZ$q^4 z4e}&$msPsp=<_zRq$#$OzhoL_Q$mWwV<<)$j^S)vWwxZn@{40_^k_2|J znvbp0`2=otDf58K{ak{vA1a?PYmc*)}I9Cx){| z!S5nZp0%9nDJ0f5SyO@>t&3WDZDkm%t^U2Q7LQRe{|pXF{;i9k=hY%60BR zy<3}MEYHf9p85JxHZ(L?kG4EbN=gc=+?e@ut50H`N1|P4nR8*V0p{ zVWt)_3X$h-CdeL|n4GkZE0HZ-@ha0Or(Q;7FYQ*30RNZ@Hxcw%)?{b_3PJvq^j{$ zq+gPdka#rQP>(fARkMwZj41Ny>FF|NS!Uhm83YY3{+sCh^yLeic~8L$CW#nNPfwK! zVV70wnh>sQt%(ZFM%j{5Qblv;0s;ccD=QPn)*Iy}jY<3d)<>@6pnjy?4I|w~&T^L> zEe@q=Eo2xaiw;ePnS*0wkxXidKA2OP&2@eDS{<(aOZ4-Xf6Bbjz+|xVbSysLp5yZ~ zO@DuBFH%%yrqJ4r-b+$~qPnQ3-x!l{`RVQ`3AY%YqU9&3X4>ZFia~4&xASe&iZmo8 zw-fgk$NwY!ek686bC$j$wowU|3CaCdyKJMj+j-WUXmL8Iaky%&xmNlp2?-KmN z{EN?V!JuflpYmc@$HHi<7dDK5=`X?8Lgo$oB-StD`@DU9ohI#1oIIJVQF#7$j`@4H zt!CsnmXtc*>hdzLS(h-02Mp4@vxGF5(i>Ta5Y8l5^WrMY_-JBPiuVuXWGdoa4 zv^lIQtMz5eJiF*|Wzh1D!VT}wY^+6Li4e9tK>(-O{)*M~q2RCi z`PkgGv3S8Ropxtu=Y?4|4fMG*rkr4d(Ut^3gC>%lJ9n}v#?&RI*KLq7g&;~ z&>xNVz>y;5Gcx>0eh=1=0YT!!fRfg$~~yfK`g^Be0>M7 z_=t17n`1T$B*Bk`o}!v2JJpq|$Ov9z_VEG>O3<}mm1(NK-9xq$7& z?W0GJqQbv1>C7bWecUJ~E!|Xn7}d5GAY)O2MQPP?u>iFy;MJ>(YyT!S?=0Pl5p(z! z#IE=~N@U)+dkVMhUV(i8w#LBVV6aIvkOdjb$~x!0ue`vXoSP%kv@g218=mGFC%Zm4 zG4Vk*^mKBuM!Xc&tq-^P4ly#O@w6Icr#z_eIfY`dnhEUpzGD#gBt%KJK1xKH)Ci4k zRR;ySrm;~`L|8Zp3o9X5$!&esi|j{EuWM=sEfZ4)uwvjlF23y9LRq6cchYRTX$_Z7 zw7Zb(>}&>p-RDK8XGPNQhOyf+oxbJCzWe=em){FC?Uf zjq7)*5KEkkl9JM6uDnVDm!aL~PlYf3`&HnOb*X!K+5Qj>jn0jKgVi)4XN^_cvW?YT zI&1Uvun1F*GBURL9TD17L(?nTShcV&5~7rXns>_TZ(WA7y!Z4?G|^ib8W$Gpg?f{) z_F0pcQr;X^Pg8XPWvMAGy%c@HNtH9<2aX^F-#FG&n187oFnwrZBI@1xRf(TD5wnMzMoQ8SNEgHWo?fc4%T@`60YcYEp_h_kH)E6Der*)4J_7Z^W7Dr zgx`=Xj=qm&_#rnT-ji$fzI21dmbCHpSvf;NhO=l~8V`JW^;rlkE*REYV;{1#=!KwcC-Mgul1C{i_v*t@i{Y~EG!_L@+#A#fW zl1jNAeV%oc?7A6Ui+-$2BKX!G@$n!UI=Up15CAgb(k*T*^`^SJliW(bCz8-?@B@H( z!N$Dn>l@IVG~CGdKQlBl9yUZJV|OGFXQ}D7p=Z%k-rnwuRmNn4G96YKYyfOkZR=H4 zUY@jb-?0%Mo}w=#ZE1If7W+!K*=%jP*euUz-@*#tk%T+Jnj0Hds776^sb3d9+j2V^ zKozwoFqoF(#>KW&wduBfKP6=-`{Y0YNA$NzxUPqzN{rn-O6|5H=D6@2i_&6cPS32j zh^how{bfc54|b!%qlWT?1Lw@To_l%iL^)_?%KH5I9U6wDq~u5a#$zrQu$mkf#}YF1 z&CISByKd}hSUW#kNcohK_FVY!{=a{v09|rzC)ESryh@)7AvJ zFs%8dUN`;UUrbH9^Fprr9=;iOk(6kUkGJhyumKMEl$l9pohAoH!zL~s1FXBBj&8YT zjp!;)OH&Cbnu9eVwSBiA)%q+gQrx!AODF`zKz8@+j(WqY$Py!ZO z4$qdikM@;%P}R_|2eT~2Z2kLfPT*D)g>&J8mi^@v+?qx5_-==VkpS_(po1VXr9FjC zfK=p58_n04fIHV}h9_+9+`T(4QASNoO`Icm@WIo!c`MqT0#01z&?pct-dyD(sSV?K zkKMv~{k6ULRC(h+?9Tm=C$I=K7&QB-FN5Pg00CYYYx6}1$ppXS*KaW4Jt?A=eh=;7 zDd0oPN9`-*d+18g6(IVe_6nD-=qkm~_K6bhq09f8u z?-g2P!itiLA!k`5nQO%J=et~T8ql|I$nqKe;E?iT2rlK?s%i_D7e{VU%7lND83dU{vU z#TKeGt57??M+%t-viw=;)M~87IR)kcyY&Ot$&86htWjEfO9OmsbcQT(7t&n;YJud_ zT1D*1U?rI`R!7Uj#xIYGEO@IM-AYprIejN{BqPV5`3V+xYlfx-x?v&@2r~sx@xZsI zl;qtj8Dl55FI3AIwSRP)wFwUwynsWi;EtBWHhzPxO*CxO4@h2hXsw1FADO|m9EM)h z)kg{u?ON&uo%W1tvGWSQySTSwb85fryUocH&dSLSepsOq^TUnfRk}qZ6?fhOPxH}5 zcEnxUw)^mDX>85qmEqW+bzTMQfl4YAx4;6X3u0mxmX=GrvH?%n)!VN01)-|;&FixSih`&8pR`4WJw z^U6@zL|3jr^}fiDA9*h>-CtaH4EX!^@9Jz;oYQm&kEVuO7f{GEsw0^uo&5a+1IkCU zh9TIYwS)^AQs-cC6W@(j*l=^9 z>oBPa>D2`5=HJGOkViFZHV_m(e_rXG^Z_s|WqWevP`a|zP#34w1;aqTqBtl2qEW}N z#EaA=2fOlYd~ui?J5~F6YTjJ&QhI+qTGu>CDRWl5V0u~O=;=s~T%NR?m7(aCNG@h( zW)Q~#n|?*V&pra#OYhr^@J-kaoSkbf&s+3{EH~{_bMN@1I^*&mIz*U+*+G1rr5Vn zF=*C}1lUyTN77B}$zLJZ`U7KQJ`oZ0+jj01GA;5_>e9MGO&b)D_M z>M>t<-MKP!xdXbO!Azo$pC2m+$M@r#Sd*-x11mtj@f>#nSZr>uW)Q5NR`mu0$Jt0LND>LL5-Gz9En~02faq2 z#V7WwMq+;c+L1z9nqoSSCr>z3KXe%PZrTjiM}}doC_(ZPay1p-VvzR6{XkDfml$^v zA%S3h&A`AQ(5whG>?01~lv=tx#G7TTip6UA{`v92!G)S(yRv6g2f6PFcm9mD(}o;H zut1rg$&^%7XARD{-2}!?Qc0%Hx0_CcV9L44(NX9mMD&^Z*JsItDm0=de|^%qhL8SG z9J9QQ2TQ@;-kwEteHd(`qoboBb4}pWE%5}^ep*%*mJv5*CK0r-p*dr=yuDeHz-*&32 z8r-TnS~O?#>FCtdb#pENh&asYW(t_oxVd0~#sdDju1yMUd;;$|s-W2)K73f7%Aek&e)90?+ZS)&4ljK~ z-~+R5LW{v6+(9 zGJ1@wB03ZTTKL5|BSh`eOBc(F-5%`0wJYoDiUNdTGu%*;9jyu}Bw)~A>=0bN5iePm zFXt~Xm4T&-*w4h3n$lE>7A}jyEVv1>d6@g2w1vgT(oZ>))Cva=9SUKIk@ObO)jS$A zQ-HJaH}9?x+uUeA1DR5sSe7VsP&9 zF!N3r88Lu375HbQiDb{1Qj>ej)s1=6G9;o$*sFz4NC05+nMelG%a zwz<&Bj@ZM6PD{e}Gsn@?S~4~k(1D4~t^UPC2J#{&t_-2jZ>%oX#Y!+ikR>!4LJz_R zi`Y)kLDKu6QTUpGO}N4vJOSvLO36yBgpic3S$zG^x2KS>{9Grh=9P}be#*_w<$F-R z3)Y7ma2=s)z~LIi27XVl`)%zK8w<)@UHO1Y2p@~lU{;amup1D=RLxK`jVVWNaV_5|jrYHmINrdbol zvsE)|dFj$6L8BkfA%Z-b>AM6(CIv*h$n=@m6tIjQmOnpmuh=yjI!oWki1dvc`vKk) zU_!xuSn!M8*2G!_K+BQv6^0w-;nRK9LF~9Znp^8o1gii+o`JTt0Mxm*~hbXHs}A_(r!nu)4<;kcnT}l=vlKb>R{UyKNSf`VS9Zz$)!D?Tp#_voDWjv4Vo(;lqc}lLJv-!@(k& zA-ogD60`vLRnlMbXRdB;Zt^fbB)ReF!Bx;PN#LfD-eL(@lAv=%st*LQ6rqI3V3p8| z+4G{y<1WU+++dRrkIN7;>EI=BJX#Fly`t_86DP{g-=bxhDXFckjm?W)h6St((B1Lt zSE7+OL(|6bUAuR`g=6~R_NOP<4E9mkP4|)U4jLTwGj3fBw7y z;7Wk-tM?I`3kRBV-gY8MOiU~|At9kA;H1~;@_>IEt}WHr1;+*H#mV1lL*E0g+_`fn z8Nh;&BIxA}i$RvhS`vI~0$F`CGc%PS+R54GdSewPV;ykd9zbUK0PQ*sreDm&+`(kO zX}un6_E)rFVh(LeqCi(SpYA@S6dD?O3ThI1pvD+BgUlEq0sQCdpohqYoJz*Ivsb%7 z#>qBrSMeaH3QkK)dt_y$eC64mo5k>A$$kLX?&+5|qj*jn*~H;jU-#6(I8FhZc+=J0 z9fH>zLDdLkmDONxuo?fEjB*M^%2$n>BIxVE3aBzFAP~VI`B%>432 zl~pcO#SOM-;4mbpy+?)PKmpRwO#@H25?HSd()Lh)|6ADeDj;-i&~gLI7y79gy)$y- z;^Gc6Dc-vmwF-er1@iua<*&}pWYp~xPUS?p(>nA0{JcD_W@2s*v$K$G(M1X5Mqljj@Be`oatnPR1Pf(uVL=6^vvg*# z%%fNyKzh}eFJGo$Wv%Mn#4S$;z)J+@{_r6!jc`i|j1PR|r6jww(hmVncP9WkdqS}e zHp!l2S02$?Sy?3no6(2Ec1}V`=e&FO?k%(f8}M(I>9rqInE_q=ydcSHhfb;7fc$z2 z0L!T3qqb3P=oy*N9LI=MwTu*0&r?`>(AQG2qSC=$AKcJKO(%}4roLX`&h6VtC@e{M z9b3Lwi*IABRQyf2KPk|Fs(-pg02RT?J#nkh(Pn6DETwoF%0?0l1NN1ulM&GuHQ2KA z_oitUYqY`7&;9y!gh>tMXHUnw_0Zx%ilDtd^%((3WFV6*_|oPQ#D!ne=;fmPC@pRz(R`5nH$p zgp>!G((9%*{ptnjL8ce9?aaO}U;ZTu3gpyjla(XPJv(vnfYqyjxHZq`UVirhd}|Mb z;P(#g=hkC*Wgn_O0a|5#I!A+ro>BiSZF$(yG2l-XiO;@kz}VkIxM<;hH2Ev$TKpj+ zB_*xgtMI)2Up0FyOFmH;V5mSSeCofI=g-(8|}#>B)_?!7{Ms5xQ04c0}pc4*T*WpOuWwk+-e zKJFhJ)EzV+J|chtBALCT<5+Q(zzG?W-pdbiGJ=AFSln?;lIMd0yv+6LUYh?`Y)StA z8e6I)N#sj7XlZE)e0)UoAhW0Ur;i^?T3Yz8_+O#EOy|YoZpgvL_6%K@2Bd1@hUb1Z zNVcRP_*nOTj6VBMYzx@V>xU!p^6*nb= z19o7kgqh3!)^-6aookDii0{<W``SN8bZvmgK9QbCB*M=}s1xdKi>ec1~2P>cinBJbxo-v5B%w|;h(hi`g6>qkwY2JJ*NfBQ$Ulbl083cm` zAsgcJgg;Z{g-WRK@7y}`d;Mv8jJ;2o{`~oKur|yehgZ!11(o@ud9OHXatW9TY@_RKI;-%b5K#KXMN>sw z-6CfPggmN)Icb2CWZG6*19NhO1oRsOZRwakgCVrw%nJ$&A&<;CQ_h|@J1YHaPXFSi zWz+i?W`DAf3Tj6-b|uUWMt@!z-M>vWDz0A9D@n|O9Y0lQUGsbK0-}2u7D7>*1GF&t zRMoUy`}PgP@RWI3Rax1RsV4{cA2KmXLro1mo)nbfsIY}QaoqN^52XB$rDf0@JSd4? zxI7Z?FKpSr9qPp(Duy0v2#?<+I`{JO@{P|1H__9TE?f~kb;<`-g&$#rgX8PaxU5H- z9)r19Kt=dB-Kz)ytNfuDlpCj{Y_pZvmzq&T7`&Bx%UE!m1K1Uh0f<4o^ZHbF;Dn3! z*Cn%Y@36q_O_HYKR$&^oWVuHUo8CA^eOMtw@3G&xPj*lSMj6AMMb&x^hQWHI3b$GW?j}wt zUecQy-n~D{pn%=fy$3ovqy#p`1$_AHvms8kDaTwB{&Xoyu9CAeV4@CwKGB^|>nC*u ze3;}uWhXc?1U4%4@j+lq8EP?$zKaCYIHi*GY9e=FV1Q7nl@sOPz+(r-b_Ygig}-}n zz##-ZVRdQRA7lUu>O;8DNRm{?-TH(YOT?OB7NMWXBW^;6_=+s&#w<7>uR+ssK?c7| z-|}(CV7NSuj%Ea~(Zg|Ze)*m#B`?3{iq~G&YcG#>{Qj+s<%Z}O)yTstT23WTsPH4> znuU&s;0=M{hNsLkU2qJ>w*|1u9gd2Qo?TjE=jCmRdnpwUqW_=TUfIyF7aq>uV`3=e zYDubT3^-BjjR5pShVtgGmO7aa>wS=dzzWfW)ww!=7|J^+c{2U8i;H?)UoA<3&)f|K zx@ba*@J_qp^Y+<=WA_|a_Ss$V`e1lOjvSTO>0R)NQ|C5A!rnJqzPRJ|PcQ!XveAY} zj8s*^n6tM`N&XG^h)AT#LX@M}yZ0|{Ak2y&fI4OL1eZb>j0X-Kx(d!~*jsdgOZ5W) z@m%)QI4pyMT&gcV+}U!-ugt4y@J8%_B5!>njHBw8+*Vsx@ z==37kcT6q|2f*$Zk5ug@)*x~H(HRNR?abXT@6i136*x(ehiwT z3^6p%cJg(~yoAeYFmUTE3?i$MCT2K>ilEF6s|(4O3bEi@At_yt62A22@>7G~U(P_< zqzCBg0|&Uoc@;;oxOno!i6qTparjhySmZ?h1XYvz=+QkF^KB=E9}Pag)M5`ji|<%$ zIjq54E+_VN*ZAtUy0;dNLtLkfOE|sEWawsp>e7cVjoi6M_Vn8OIobv}j^iw1uODcd zD%{$z8W_{kU!2yu1fU?1Y5e3N*_D$VZ4kljH_WF_fc5>|*3J zRZ|`m*-kRVjk84~P6@FZUESfjh%(@xpda%K z3%bY@Ay*4ufZ%fxd^+T)@>OkZhk%jbC{rDq2hAq5cM#uJiN7eFAHm(*U(#6XoTELX zE{PrtxUo}fr0I=GPeBBh37_`2?Xtlfr9>_UkqBAb#$2So2y~w^O(-?w?PyQe?%liB zk)^+n$!+~pQ30Y6tD)_Xtp)?~VFIIjMi zlmnKW!FB{*eZg>cw>xAChz^Q`MKd2kePWxktkBEp5<830+{V{YLnXEQ{<($Sp3Ob7 z@!R;OUEqhE%P*xVN;(D>qo}SrHdd*?Zifay zRRfAd0XdKkN>Mzbh^xO}M-nd|Wm&ra=JF1YVvpEZrZ65&HfH7_dMntFiY;j?{Z>TZ z_2>Xcel_SowQCniAL2s5j3aj0(giSAs^W%BzQ1B7>MsQBkB~gt)z_Ed5L!?7M8kC= z0&GD+09prDL-Qoj&D*ymX0LslFQYAR-G2xGQyo-wJOJI^b8`!9i)civmXAo_W*agLk8>y| zzPMD->$1cMA@(7}xz)__?OZ2LkYUxssC$;1`)7P=rNVEzx401!2CtCNFf2G!3!%1I zA{hzZ&S(7dG%N!mi-$Oh1h-%SSl;HAx3}dZJ#U#cD88LvOpib#VucxpHs12~Dy7H# z7sJ+PSmuZ#dM+k1>pUluqCI)i;7vhx%%a{A0)>b&Z$&SKg;c0>(xY6tFoTz zirCticIzs13WMlz0Ktc8v-=;lNTCAAv~>SQ`UI`=`#YH+B^wD>S663X!>(dCBAZSE zG2|H#zj!IAx?Brmp6Ty%E?lAU@h8DolU+@`_A<&o>aMC{Z_7cdRS;L2z}Rp%w(W;O zh>-H^++0h$Q&H^<0X}cvX8G+mf_{%3CE~cifkL&zIxBwLwrxQl{FeIV$IE9ey}iSU z%J%aj6H~0N`H~oztVaiG7GXf0>j8}qeD$gXx+k&{vVjBM=mWCwwnmU%MWuQVT_)Kz zW(kE5ErI9-q{Kxshgkpxj_LhxB_Pu$rFA-nJcjuG8S zOY1n;9}yJjDaBrOE{;?d(njZ?Z6*Q)IU{$B zkmU^gf|mD+OdES@(I&ucj-ri8K}hb*wPJ?`p^I{DzR*Ej7bK(Ppo(;0HKNn-3#&;> z?}B-`Iy{VW0%w#sTwHF;M~~hD&7qx;L;J+a4hLEwL1T7~4@X|E3g=cG#=`^=>H$(v zgo|F?a<;Uzbk8xd@J8{q7bPVZ;gHlL3cgSUp{Fz7F2nVi5pW)%1VRCQ0MTF2tScO! zSTMUR(d5T$(w@!;drcQ8kGMbx(0oMjz=xLSI9E(M;_!awz##xQHJ! zGaZLq1{-5zu`!hr<>>)QRuCl(VjN$|10F!A$k+g0ImAb(6r*={#^S1o63JPJM}eJx zK@V0tXYeN+j7S6?QP+Uql8{Rm65mQ}wNrUY@jIk_rg$N2H_ew+h4<413LXEcnoqf>1;3(P z$iT*x?EL*Z72RxfoJrjI_8oQg6yyz3fQD0mo!Olb2%A2IyiGPzwaBy)0r7vX*t!YD z_1IGoSQZH(k5CVZ44aRyuN<{GrXPY4U#kgX_XnJB=3*LfC*2lYTwHu90tym?sO<+N z<`OcqvXqY&6o-iC3_%-Ntc_%3C`LRM{&XN^W49Gs)VqK@fjkqhJ~U$ zhW=Ia^QY<=&7ymnTj&%ut*y%7n(|OK-(L0IGvx-erl!4J&Fum&?~M>nWvR}NjwHM( z6(xofF3efPHeDDmrrpn1B_>IvNyktaQRUlL0B9F%#8*d760(=rg$r#sNaCt7@M^i2 z4!|q+CIm@j!->E>c@_2aa7C?eM)okPt19$jb%k)=OZN-x`6}`*hkrh@Geo^dDw(fA z*3Yu#zU#VlvVd`j>~kSoYjTZ$8__onf*rk#J#~4}X%Qf%Iea)&w@wvC12L;nRkc$z zUuY{kDDKu#3dg#Q_sw5Lf8F5DNxhlpB0vU71lgGHF!uZyYvc^G)sbyAfK!9&u+w^c z9TO-0(Gy~y09o)dj^6`??B2152V6%vdKk~2cs(m!KpF4HM? z2?!q`**W}8r+ZE<{O)6O zVQNdKI*=9K&fQ|^36NeSTKOI)^g(%fUi3IT! zuLlB0Z<~QINeDrk%LDtcKQ?-F4nr=Juc?o>&^jEoMY-|%V5jWGgw4KIx;lvt!G4dW zQd`mt&0-?D)&%(ivODLUOcr=@L7VpQz)+pZ*S?<@6$RB@37a~}1!c~;>9cxw`O6(P zT9*`rx7g#3yP1wG&@OaPU5%8R3u|!s=vD~pj1PC7i2Z`J6YUmVv#O@X2V`b+DNw2W^*L*R0o|hXP5q z`Wd8Yw7+>$xSM%BfvCd^+zWqrL25V(QEcQ$UBa|?Ez-T*{O~*bJ)O+Qh!JI^$OdPzs2OYU5*i!P0Sp#cm)&!auWAdJ*`fvWK1+Kf4h<6+=0Ryrm!WfxEIbG7Jj}{d| zfrA=yrTD#(y}dBt9--b=$FxE@EZz}8$eP^U;^b?^9`oauHuAgQT;od;Ch-v@wUUc} z#@^$<@5>GgSRNSLdrm>aM#T3+c{Ce{SvSNMNg&t};der!K`FLEYzY33GSge~DZGDhx4*g22QPQNe&_F()#o_uuLsd@N!TE7jm<;{p9dYuS(L{%2%R)I1ujYfioHfUxDCEG8?ZtI~P2%wpM}f zFEhI$yZ+&Jp=0)IUhOA&=>0x6Pwf+`+CzQ`-%HNO+E^;&tG|}K(aX7bderWg)MDoJ z^oxEEGg3hj*-IvasXWbpJ{r=k-fE{(e7mMUZ*jlyN0cI~U5t?O|4fQJQkU0*01dIc zG3baMpM{ZO3?^xg7Xj@_gNN<;Y4ibN4FC?N)>JizS1Jl<#tTgNrvE?JADw93kq2l6 zXNOo+i%m?(|H}|GdWBWp0lbMo3aqzM#IX!IGg)DE4*^LadbWT6{u0p6+c{>8NMu02 zi-DTlO!(>1TY!WF|A*Nfi7g(0!0}jnMkDGI)tDbDHnf9BNN(fCOTs1~vNS|I5@=|j zEGLd4HG}EEflDB%ou71eV&LvQG*3a3jvK{~{#$+Ah0@a2`xoL4=Aceh9PYvRGK^$Q zb-HxFzxlk~YIysHsolLHo@MjLxv4V`KTVVMFXi5LN*a?SO3i6 zFgHl(R~cH-w(q_uOJvft|1TvRb|uDXU`zjF<{CtgU1*y03uNK z)0X7_3o-Ic^8dSPr1uq2mG`hdcwF{k+34^8O@so)GN50OM2Wk+_VnDo`yhvwg)=T5 z`5AraOK;%RqbzzsuLy)JsV1J|jeB?v%4aoVnrJ4%A|j9^-pOEcDl%w6wI)KK>I*a`+AMkKIYy?b?rlhSbvX%wc$q-T*Tn(brmE z-)3tmUcN1SjHGw%(nZSqbMN`Q9;8`kIf$lvIG<%%q2XaY<31gf5S@N@>XzBjrov>+ zgl?mml<>SYmnwvWWsp5a$cfO)OgcC3p0)<^k;JSK%VAm0&+2CoN0(icJhE?}KZ^hd z4eiu*lZv^S8EIQSa}9i}X}eyRDj=8$F^9~e(|n~q&!3kd;0ai*jeL^Dv7I}2ZfyPE zRU!S6Rq=2P4c$7U9}rH*t`-{pc^}63(OURGzH#}y($dZ#&A=;cP6W5{bcV-|AB*8> z0f+z~QW?wQZHYEXr0a1+M_AroE(p~wXolYJ}HxTCCBu4qIm@x zVR>^iUz`6V^i`SkwdHrNTeV=k7;dizbW29Kj|5v8k;41MyF|2_^z0EAfNB#sDP?VI z%LW(hD9k~CRodi z+jWoZ(}6txDwDi-8P@tAZTKEqpNNK=d@v&7N_X|g%#(4YycFv>GR0Hx1m!kQKL55A2Fg#3`QaA*aymuc(y0Ogz;p95j)+{&MaaS zKpRmbVq^dTl}v;j5vQ1u@^{(V+#vKzLBkeLEXq$tZKph}o_c!jr>1`E=1L5^qy#U14>cLX2qEYNS+IG9j&1GC=_y#fr@$wP)FhFZyn#_izUs>Lbr(DpCIwkF4pJ&WD&l~5gPQSG)QEM>X`H{ZCQVMifg3|>uj9nRVQ|K$}yZR*jN9* ztA#cX|Gp9(`lQldW{1hKFJ|377)6J{K{R~iFvF&}Pk2hRzb*UI@jzIN>DyK}UuDZN zSsTyYg~x;1<;=OuBLlbJ*hQoNUjiWgd20??PK2{-04+3)ANlP6R_@vTlhvi_@Rb*4 zoF|TEUS4U8lUMs#ulr-B+)EL8v(3f+7mCV%Mn|hri#64US6Fp>R8d~_GBlZ6WWUo3 z3-7$1L1tQzep>F*eIfH%RV{7Pu4Tz*+L;tu$W*x|z9-ZmGtM2CoV2#=ENrUWhBQLn zD7k%CqpZ`_B%gJSkpEAzpCAU{2w&kPsiz%Ao*m<&GN=w1gz$AWCWNzZgRJGHof3T| zcKaX6swMJyeDkw8<0=w3HFC0i?ZLu!3%q&{lXw)Kg`!mK>ax)j0V;|oI3Zl$Ks<8@ zQ>KbIaC!+>F?9OMy`pV_cdyCsVy3RvFW<@Wmvj5(+q(rK6N%|{Eqi5cn-nH2>TxEX zCEKY>%!@Zd@z{VHaF^=>4apR{>NaoziRTrdlJOw}wZ!<>Sngiaxf3TYA9g=<`5niJ z%Q;zBz_0^O9xy+%o^JM_O2YRzo4ne?Y4Ra)kIe3phPJP>6thR;+1+8f$lodin0e<& z$)~KPt$hiFYP$mXAV(98#%1jshP_rzS2eo+Tz$7~sj=*_h{c#t z(`Mv%{f4FK@|Qog40UkK($AV{O2UW+3Q%wF;D8YJY9w_mh%Si>HHYVZp50VbL-4I7 zuU^$QG+eQztnyvD#`(2KF~?$lC~Yfbc&?Szb;nqB`Cjc7`BS7wcg4ASECYIiREt>j zkwB{_@Vp8hWYxi3bl6YI1M^I9c#vrd10(`K8=@`RPnK{HKZAe1pNWZJY>4ty;At!! zZ@j}~o_o!YTMIZ&$1LaE*d|W(=7HPHY2&&PPwBHkvLxVS3P=NxE+Zv}dB=W0mecq1 zf(e5);~r(b%1~o0Qn7S8lXUsJv* z&Euy$!qL2Uef8qsf=WLmhjf*kNPl;95US%NXd;-9LKa;I;Tuf*;pt0q_V!;tT@jqg zbD}tv@Z8ro2>VC7jirT|4`kF=e%-AO`hY4Bp#l#Mi z2WQ;Te1;HoB~m@a+$aR%i#c~zy=ML-H5)8sc|dUu|F|=(ug^p{myG&>jb6G#Ne0jI z7g8?zRL&M5oPNIb=w%p(#56ZNHK-nB7KrtL;_42L{70A$e$wZWH|r914wNP<%BEaAU*~IViHNc4OI)wZV`{cka~F?a{o@5NRr{LACn!HYaPW; z8907i6p9y8+kxYkROj{PbB90p4kHI}gxK$&d3{=E7ZhtoU%w|bq!t!?#d6cR1ah8H zo7&KbW)2D&*`JvFzLewrndC3hi%@c#id>w4^`tQ9W`GG8;B#JE11R(2g8yy!>f8Uj zt@bdIqQxq zBU2_2th8Fli(%|izzzbqLCQ!3O_h3i1DRkMUng?*(6Z9pndOx9x~|Epcpnp=y))N{voTv?Wn zGrQxu^!viwlKxO=8`ZsFESU$)goD33?l)}*CjJrnBV3c2Zo6K3k@G00Y{SjBmW_Gq zILPwZ#{wn7?%lBC-*cYwtd!gwyClDS$`0OBr*C`O zCefI1I{zWbL-hQ0*-}TwdH%6m8#!Sm%zko{bG6-lg*My-_Oj?Z?jH& zjCiTwBWoGqkKnUb=yvbYaG^HM%i*f~y1gL!83VFR_txnnB*q0b;0LS1?i1<6ybLn&WbUM zv#^DbU?%ZBL@0zk&c6bB666n8!?rO|KXb1qMQ+_}eM8fshar!?Fn2@mf!BV+5?m$Z zH9x_nhc~B7?L+5Ell&`cE)j zQr+R-_RTxHyxiQMJLT~kvvYLc5&Bdn;&ro(=0rSi@^%Z%0~3$s!c`wx+6H%RmUI`@ zGmqXYeLVYVlew(w_8xM)GSP7wBOZQcXeit#gczuF0M?2$ zHVs68?7t$qJ2HH<9&Yx|in3c%3dL@0Q+bkp+W2Nt$37pyZIoO(l4m5J=_^raAwThg z@d?bSALnz9o!jR%Bf1qLqUV(tRMK)iY;dO)n~Rx$9%sW zh;aINm*efu0~_K6oXz1^n*ZWOnJYzMO`i6Lu(>}SzUYp;88P;WCx8$Z%zr;=523rj zlU{rF;#A`S_|!KwUauQk53HS9Ag66yIux=0M96=36|B9h=r!SRD2b?%px{Aw2(P>k z%AdENZ^zRopGw8^eHAMXxrexy%jyC%s2PS^nt=I0TbH<*Ucy-nH#^EZ@lPT2$&*Fa z6`F675h`TxBed`=xH^bf=$Xx7#4Ic*RMB8)HJ8%6UCeZ`u@z|$NuLin2 zCB-DoZN!s^$5bF7NCKyI7ZK29JTHm7iH3wdv*&k?qnSm&9bpePyi+FuJ162nO0I}E zc6D`WtT_|uIY{}4eR&ayCj>f6z(>$9>9t#q`1vfgv^;` z%2e1z2`NKF=9Do}LYbo?p`tWU2_aO5%u`a*AoCoRkSUoW{_AQ#@AEt#p7;NI-|z4K zus6G0uKT*q^Ei)V9c!%vxcg?P&tA@j0nD74t5Xv*4?ke5`Zvi_J`ZJ)PnxLeDyNRd>>9s=Xejt=b zds@Lf(?<$F4uQ1l+l~wBMkl#mYFE_tf4XT_Hsh=lBk;PtO=MYu~EG%K=R}s2l-b|EHvtWpvmPbn&ICOc1rJWaXs||Dd3oP}a5bn_1v)30lk-tEVc8k?UNn+C zcFC;3ozi3T1__B?ers8ZOpgomx1Z|e98L4t`XEv=(ttmSmkNBWw?J3A5dI)46cH8< zM=ea4gbkd2p$pIrfL;y3i-!9rUOpjKcuom^id?rs#6=X)aCpa=##;4lkgn*Gkcj!4qV*Wd-n!y`s1-QO^5G0n2&}e>1^7b!qk3Yy(B66ruvsB zlrOEnuU-FQ6605k#Q^ukaJc(H%K9l8U<3hWqqM}M<3)Xex|=``G|o)PyT0WH#7*$p zfkG0<4eTTdY#BU4K=Soq@wt*1h4k-#_p$oO`DB{v)Hb&@C3Y{5A6b#)=}<$_P}r)a zVR5O!3JzpGxK$9+SuWWl_Fx!cb-)ztn2Af!jalLAlh8$hbx-3^2p7)Xw25wo$e z>4Et|7-xJMcEG{^o9*P58_LmF588jF_q?Gu^|taQQ;El zA;jx&y?a!0_u3CpiO3~EEvE2{{htlVUFEx023@uso!yo2?_rSzStE?87?JORWfBeR zx2xd7J4IuG@YMGoEW1(Nv~u+|Ur&D6+p}@}*U7{DN`aIqtqblbL`q9bKfrPITsIQE z0ZS{K6|>ZnpW>D;GeqaU^pgp>@~Cq~I5Hq;w+RXDtR$KIgPHGwuISSXDHfcX@MqK% zZwR$?=}*x~{i1U(iad4?nLjweP(xu0cU_UWP)!@ws+%64zD!y0T;cQ~-I(ppzo_Na znFTb5P@a42|$Sy>XAs6KW_!&CkO_D5hcF!-pC- z1NAK$-gnSR0bSnZ$4n%7xIVqT?vcZ~Xo$H^^o3_Ye}yB6GX&B&;!M+7Urj>Hg}V1J zk}|{$2cVISOeQ$sHqI6nd^@Q2b8A{y;Gpij+;S0E19$&BL-#@QQ&FY(!`BH_iw{SC*}cH(H&-{E(XEq8q5 z?8l+Q0zJ7T4NIpzqLK0x_=PhM;t=3=hoi@FU_kJ}CnStB``ESc>SJ|f+G(scB{IQJH-ehSf0JAnnWE3- zgM5&nX@K_11FNtr3pmpe*vJ`u@xc6nMIGD%+TF5Q_qpSa2( zWcKpAZ4Ge};4C6S5IA)N)(DgnQTj7_4TQv*XiVaWAT;stJ8=AQHHuufByc1$qeXC) zQ_ZzgX6DD#-#Z+OH1wl@Z&?9ASi<50r@1#yVN!qY5b9gx68uzy*TRao)oMR0e84JfO9ZL!l*U({J$rWkI5$%N^&Lg5Yu{;l0-9{7v=48vczPn7 zJUq{Hjx`1e*CDkft=zFE`_?WMwM|ZJ!@?ilbkmtJ4Sy6ONu$wNH>l*-x$W15p25(Y ztLM3NE7T2fuGSqAZKyv=HMFGIKhNuw_EWICXFolmHAAw0kbQBZ#hIwfe7l@zHyzuR zedTY=4U^B-jsyW>1~9D1SHKzsft;QN*&>T3V|(80nUh{DuZFHaLew5V zN95&zdUwYF@)>v#ZE1h_ApIs!MDZg4 zr(5W;*BXL+N7ba=DGo5#D(8E-mlzHF`#}^hQkz1Ra1}(gD*{@;tq#X-#!jo)V|k|5 zds0Y+^69c}Ik21-BJOD0V@-=~fQ!>aC36PLeepmierx?Bj1O9yaL;~ZG&H0>f=_f9A^Us zP_5YDOl1%K%of@5AUvpTab|mYG-52(&;8zxll=>T>PDuQJAt?3NC%JX^A{1P(~;zS zGS?>evijxuP+DFiz+166`QOS;VQ2)@qbV3JSxHX>c2l>kDqv#qEJ6n z26H5wq69|($r-n#lu%>-=}J_z@`Od8c4w)u+eyyxk@iM7ARnhnHD z0#2HYLkNl31G&o=FMdw-u1ZJuA^%(zT;_l$w+=p%5bAxL3R?*~N3BPXoA zbTRZg|IoOx{IkeMbLm!$#3@3~cQ#{4gVYk4o~lI9Cl`Cd$sY0o=u`IlN{fh4LwN?F z)y6mYrLZ_PxHKAj@uGs;osG=QArHDD$y-93qRAgvr>Mg-w1S_q|7@2>fR=e8*QZaIN;$+hgesI9X+5FeLkB=2W8)QZ5s`=Ens~Sbx=ib0 zTjrOJV;s=?W_0Or9D!>SUDdpd?2?*}ZlZIG(G@Toplk@jaWSt5qmH$INf$g-k zy!@J)ETsQa*6V`#9YJkg&1St+4*Ss)#GW6H11_@!df+n%y8t96d|M=F>fqJ64OFP3 zjY1T7u=dn$nFh+v#LpZ$|7WM@&ako%jx1K)JUB`s1Dts09d&Y8IG}We;0@9J_zzPv zQBBRw$BrFCWJAkb@wA$5%=3Z}P7l7km1=Jrw_p4~c2vcL8f=K8H0ObtYl~yJs3PMSu4- zG984G*&JmPMU8qHYhdsoKntOR@K^awoF@0W46NFDWW8zP)VJSd6z$>u`w22*?~+dA zJnIqyZfM-*)UPO4awR-U=D9T`&G^56WfXr!TP`E79b6#1DQMU$M`LiKyO9%hY!o;` z;^T5FXZ5{KWTo^8VmO}sPM$nF))Rt8+(Wc!2@#8uQqrwzbfez?PuY)f3*JO2O8K?L z>h+H>06{%XR|EPcp*tl+J4o&A!G(Y$sS@N3TlZ{3z18=dj?%j3nfO(89X~5Z*}H!O zaO*Y1ad7h-z~p>>qJ8ht4&-G}wKl@LsRnz1u!UzDElJ>!+7@CPN1U`7Wg?Y?h$L8$9ENJfJ1}I&$%{FLK_SzLyfO2kk6u zd&rAGr7$q7f*+!)+wpjj_L~s)On8%?|J-jZ@wQ zvkYmZGkhqaV&)8mV@xzekc8122?@g+@q59JZ{<2;A-IrsE1iyX_%PKl&-vv^BFXWfRJ~^}g25BwnY( zRue-xR7)+dmB4kCJf`2LWYy0h>qQ;M6RgR$w^{g?uJa)RGqD;`1Yv?v6#}SEkLp9FE z58d7BhKIozhTg~#mE&fO({Jf<^tEE(hQndTm_Yg`XZ^~9oPVkiYtTUvw_`+j(!U6p zyG9N{EaC-mr(2(ojTX~3e7l1x=#Kc}n!de?=B-m+@C&O8FA8t9; zS$9j|Dw**`PMtV~eUielCEQFie!>do5ZDTGPmB&iE(+P!IPk-b3A` z#=f>(5}o#+IakBBlr}|q66)l1knRXy2%PR8(Fq~wU@+7mKN8cXwK%h^y8LNp#BtL} zM#D7}o*Z}{N8ok|&C^sFqvvev5DhtfIBo9e-#5&MwDSW-D4@M0 zA_+c(2FTaDV+`;fb&c!mcFc&-AMxE)l6ST8A(4#5_`QDUO2N#d7>Unu3X~bRU(wg# zYl2jclU0#u1Uz`Lrn=fg>sTk`egH|lTHxPMW>A6$@uNM53kn5-RRUCqvg~9SG1>xU zn#$iFgq$OHik3lP$lOr%+xw1}M24kb=BmhG57#T$Z>AxoUWHwCBhwywhYeR`!m4S* z|HV+=+aGr)mX&b7(oKa>9J2(w=6_kQ6iU(rW!BvLfl&PW#SN*V&J`n~R6WKD=HioFMo!9glSGdW8G zDE=b`e6lWg6!12%RQQg_f%8g&U$BBiGJ_Cc7KW1r zPvmY}r${Bx)vI=KwJU z+7BQkg4)KXu~Dd@oPD1A{PEmfLj~@&Kh$u+L$-Lhv@|RGs3WUlE@&= zZ)~h@C@G1iErzKOO(~ocQ5b7O5OvX<_%T?$?z!a>nWr-^L@-TMGic+@IlXA*p~Pn5 zZd-4{UdP~r)09qJWBp@_5w9s;a`Ghe@eqXvAVlTFR}~8x1BY2tKHhBLu6b@fGL7UKog;g28_=aVTz*;x z1YfcLsxZcnwcNUno(}ta)&J^zyx)G&CdTjU2TiNFY}w^)J(NAGVg!OyBDA0}d<&!^%StI^2*KSVpvblSKnac+9+OTPS_bSCBbvhL&Y za*dvcC-r$E{%#(i2?XB<<~T30T?nEeNVTr8`%wcF2#o_&f3awSz~A84L?^S3PW7Sp zBu-(yK+X--#bHBK_FkK0IK$t+BGSMFV}9sORr%hZ`spe`a^*ER9&?X&#-~GuVe7yE zT`FmyI^-T9WcXL;MOdk#{z=CkB7w|D1(7nm_>J+|z^D5Rx~r;h8z>~1LPX-FLDgYg zme*ggjSs0lz~ISU)JdvG?VAtx({4O%@$J{I_m?eQHE5rxQP|7#h!lo67}eK36&TWR zWSt;AvV3~wYLmQxZB)j7!R_2g$5sYwAEXcm@I$3|VaFw}S1y!HbZq(NDaVJBp1$Bd zw5__z-LiusSjXrIjXa!wfk>a4A2)|X(q&X?Xhqh(el59Q>=M0@-RE~FtvV?03xvB1 zxK-00IrZ=TvGOO^)cx7@w%7l9rot@gkldGdT=N?a2ABJgH^z*|7w)C@~Kc3*2 zFdlGy^X+oqa>+mW2B*c>EC-`6EkAu2pm9oR)A_S5;_1&(#t%Vb0OIdPq9u3g+xprJ zqGJY9;oY%DF&n@Cu(tX{#p)<@#(YVHJ>IDV+hq#w z-}|M;$a*gJ9ZosKyOTpT6uG$@xf16W7QP{4+kI&-uGVByFnem}uFs9@vfw${ZxFJZ zeDf#Ms4pOlpKFS0$li0;bo|fOA~P=!>6W<7cMJ%W0p@DZr5Y|HZ-(WZ(Kk-rm@|8U z=`k!Awc5&k+B9xLp|t#gQ@5foB2(dQ*ZGTsR|Z6O<;bgk`tT%JmbR_Q#FcKD;`j+X z%v{(Rph*0J$u`t}LDV2l5xqGGX}r<2SgHZ=H)6VUEFEn{mLZ)ZoB_CK+p{j}^8;t% zmV=(K^5nQTWZSB3tDoQy&QCQO*lsRXwDB$Oi*SwED@lw^KQpNCSc_NN+^1lrUjODjUad&H9s)bxY)bYZxwUWk3G6))(^_{7M)6?6W49 z?p?~ON=Y*H3v7yk0jh=~rKbSlj4k92`JZJD3A?f#h zn*8=TWfLmZB*B7a%1KMTg+vGj=^etSqV_1An-I0&FTZFJmhs{O6Gz3ErPh|4Kl1F( zQ9Agd=2ki0+AvfGqpX*9$6Kl+J|v|INk3rE zZ|@Dms@UMeOANqlWHOKWHAc`RKFTjs+5V(6b;%-+@QyvFq3RgG)vf-w(4& zTyKsV3|dwwv6VDj(+kF8S!}5?3iC=&SQ`lFQ!&G47*x2z=MaFv%LJTD*7!C&zvDE%$6zC z#_e-H<~A7~P}TziRze51f}m+H*T!m)gZTVKn1tYIh? zN}oU>k}>^0uLl&u)?8i zVXC$$d7n9YNUgY3(oBA!1GSeb%`{>?v8*mGtzFKrlfv%}x=I_GDJOJ;zZ(y1{F)j! zs?Pe0JM`^>#m3y5_d34%Ivkf}(^M1&7MKj4pH(5ezzMyNVR>8FYObo!DwWPhbxir`bxC$VyJyz1+>w5nL&QOIDuo!8A;kxtK@OQigAUSYk`4kF+X zfch;+TN7a^6Y&!kF@)pc{Ra=?*bo0ix30DCnWwZ+xfEMCePu)==Pe?;IX& zP=|vHLV*u}3ka{AmzT<1Bnh80;I}uG-EUya3~QC1G45;$3gmDb@ptjkF7p}yxvcID z7>hzi?+W=$mR#%ADI@eKd`>-g^~s}rE`5iM@#c__nLU_X`F52^?d*Zpfl9~vA&zks zDrmR9(n4Y{#!1k0u6OMJQTlYU+Bg4=X|~Rn92(c3Lb8$aej^7&h*W0ec>fI9X5Xp6 zLoIsokIxpAsAT7#aR@g$vC%$os}p^L4M;J|^9Ab~rEE*y>9oPTndE4lGEk^{*+Z7zNJ1xZB zFLcNnJTp^Ak#)0jxCP*q8?;=Ak(iJiziSyOU15QIwrNyAk;W_kk(GCUDE8)To0E9L zH)vVPkkagKO`lMa!z`RqF-B?gzMraQ;LYv3YiF0A?-lUKbEexeyI?fVmxZ7c%dHTO z1n%>XT$@p8OmhO(YX;@j);;3-QW7+lXTH1sqMiJcuj%b?JDl9Bvz*rxNu~^7XQ|KS zPi9ctY!q(c;Poio|F8*dhjeJ_gQCKEwk>2lRs_twP|L#g>ON#uB%*FXOYJFwJucm_ zPo$ytrgE9`>eQW&*N?wVcRSV3d%pV;-`{t3`N}@~+CgNW=dQQBI8k{V8RhWWPqF$X<b%*49W`gLeGqyc@-Z|NE zAf2o9%fBJvQM*)!Ky}OWJgygPk9wz`yD^FH?pREB9FPX{I=Bh(p1Po}iGGOmB(p z>Liv-!cASSp*Dj{Y7R=S1BW=!;`Ho|*?9k7~Ml-R1n8kXmE);siv`oSYhnQ)CG1(-fc8*}j zCz3XdFlguU4W4nJnwkEluHHN@cA7nx+4X0-Ok}`|6_W=y3nvw;`8Vy(si`y4TiIau zD?ee2Wl;C1O~p~1q43O1g&z1KtXk!dw~CxQi|eAXl*O`@jVIB9FjfdxWWQ_g=_AOMI=Rf(+Ygar350-k+B+W6zXPX;{s^l@^r5Gm;GoGE6~bBfd6}GU6U^Z* z-v98*Ppw+XRM2GsBL~T9*$+3)9iczFAv7}wi6{H;$Z+=ugZE(|;#?=XyeZJ?VODjV z*(Dem%s}j?h?<>%Rh**5wGTOz41Lf<+t+ccpUai*+I-b(bD?W*kN&zL{_?E$v&b9JkxG!G&D@yN)3lsqT?v7j5!l`Poa`Qi2Z)T;Km&yM2v zMC*RJT{C;rwM#@CVkmTK;a3vJzxNV2127~Ola}5nE`D5eFW0s3i4{LKjVB)K`F-V= zyn>IeSt}Om(I=c>L6%xNDy1`3zT5A%*ZXPOm(LU`jD>t%ij{o*ghh!wUnZ0UksQG& zfE^|jRdM`RmP|z&Kq4s1I5M@l6Wg~YljYDjv-)0Q`|6jm-b?#h65^%k)axR!`@2}c zwb?ffO?Y{4uD=E&efES{5($mz8^BNrN7)+O=RP^l(q;=e0G zpd6r2sKW8+Kda5AVmI{WTz`tg^xM-!T=2gX&_vCg;FY3J31NOXgjXOM7~Rvp0~&n# zHDcxv0h2?Y8im>_#qUgkyh1|!(03D`FMIxy@>{6C27Z@ac=`&Cck{Pj@HeXRy*o8? zYf4Ep>5eGXi7%O+XXSFG-^OmfL(YFC(nw;0cnOF3Gu*s~$HtOY$T4{UUq3=Hh5GWN z0gISPBRM7Q)h-_&+0Bqnz4vpGwHxZ>+FMMuEoh`sJksEdSG2ZM50V}A^L(Ujj7nt| zZ}#V(ID4|V2-NxE{Y{IiDGeq!2zhew(u~OKEuoMngG;OqxZ!NxDV+WZ(0dMl{~iNs zEQm0lkpj^ABV|EPo*CXa#Qah`(Nn1KN&=0?>C&(0QJkE1boQ&y51+PsD z{Y9K2_q8#fupoM{j#3&kLu7jT8SZLZvFQ}xO*e;3|U5*?IM z3smur5FEh@Niw4=5b3lk(rLZ^g$HN*fAAc5AI+;hrC2*cB@9MaD<%C5;6CT_&|Qns zfxVi_*kBl=(!X~3zp)f1;9nR6H9|%TsS$i?j3p%}2gEzxtt%8Qv(>2cMvgL>?sqKf z1LG0RINrNt;XgYBRwD_x;@59B(o;1ng#uR!>*V={x;~NJ`xs`8Z|YNKUAqtW06DES zmFfz}?jdM=h;ke9A@&a~n#E6_)2+|;_g1N{j&M$wxz3H!GkY1tzZ8totfHopg=pt z@JuXuxTT>%yx`Z+CfDf--L7Y9B&pHnx~AXS?Cyb8Y<`&xvx=6Tw+zY|hy%Woi& zN&+J+V&uONNkRoc!Xz^cg_4KJzL8$t>C=hNtz!KGm8#6^mUabIva`+e zH~%M+M8*l)f`C24_dmCJ``D{!mgPdo6+V3Lds$$=Gb89?z3Qh&6pnhUy`LR2CG~C2 zP)*t93)3Jr~3V2eTUp8)DGA;>y zwlGLj+0Qu9xX5vwmebwiIN3cuiQfE3l_2i$BphW>r|Lf}%t6EAm@-^}s z8d^+eF1gt?@AH}%&M@7g>v@pT@_F;rZ~L!L|45azDYi6wS-a+pJ+iQCZu=oA<0Pu2 zTp?5}CE{SE5GuS;?=4Abbn#VFlYE*)^0fTm;>6hi`**>k8e4pv2X4=4jdqEh{_@by zJHFyQ=W5xNVGB?FHfM{Rn#`Nkj@?z#t7c{tq!@;|S2~Rg4bx>bhDZ1l+r3O4kaHwe-VD?bqD#wgvbH-2cv=fC41kyhITQkJ7- z13awQx7$fw%-QODPL_j&oa{41*==@5j=ZHwu|2%(B6yDfY87XeL~+q^%FtaQw6t0b zOTP3MVs5>1dBEd+ex9oM;b^TRhp8k-fS>6eO{K?t;j2s9yV|U9wdwL{?Fs~*0O%zt zx^DOuTIX4ze~!o?`sB)O_I3KrXZh5M7_wBldwTqz<_&hrvCQ}~&FSlxJhA6(jE7UU*QSh7?lT&1;+22VHg@P^mOCx_NJs%KA;ppWsT-;sP zo5ZG>HRe3Kc8*Q9ovI7L9qt)zW4HJFwHJ3|m(q-$Us zM|i7XP~3};L33tG5}&9B^C+(5(Y-D9FB;<8xN&)#&!SW4qsr&=ED62Kc~5!Wnb%^9 z=t3I2uXB=WQU=lkWMz2|?5bk5-1Ft`5np<=bNHZ@ZG?MrLTc(|$d`$G2UjPlY&)G- zd(@F@o_yb0`k1VwJUzquJ?bmYiBa9{(@BcCmHbj_=Hu)>t12D7C>K}K&h;ykGp(g} zxw|BH4v#n8dC|VI=;`~KWYvqnp#Tb)v#V>JW1)fV6{?Z|im&`d`+qe3UJbNqAlr9+ z+PSC@Y9?vxssH6hBzb8^IsIiFo%q~vMbge!q%E3TxZ(=WMEOpiIdRf!cCBAuR+;*U zO<>>-w9>OIXYz(n|LF63V!6X`Tsgv5d1h`z_uhUB7fQvF?&1^W zzO@~Xep3E6rnsO@)A!MqSPY(DlP!kk<(|#XzP=|!J(OW4JT_pA6|rf@7L)Aet%>}& zCX^MWqj!U@-o70RI}t$<5&iMRKE0RbLtW)HXQoMCql#6Em`X=a+Rq#%$%uE84>Zf) z$|)3+p<7q~wbjVHIBH~M<-*7dxgzo z+q0@c(n|`X_(UqQz^!GqJhP%{J$3njm8)(+2VcZ}$}}@6Q_n)j-okwWBRL*NRC>g2 zY`V0uMyGLJzdGi0E9ZXwCmbZ6wObe0Vm)(;r*&u>iY_YER(fch{&kl=ORY~=e}BpJ zN^r%`vBN8zo+R?)xyH!z{4-wdt7`5j-KuGRAJAu1`4 z`FoAbO?%V0QMPeVqTt%ySFF#yd2>_EA@q-ljZY<5;wTds~kCe1-Z# zV>^?_{Cr*ekDI;hQW=yx;>1@gwLI=A-Y$77dYhJ!*V3^%REKds+)Yt=-53N5qj;66{Yur@f~ z`}vP2n?|5^K;^NkG1hmo-}uhmE$Q(ry0T5lytZfK9Uto=x>e=7A38Ri?2PE0Cm&-D zDC!BWI8@@_@1MEzXyjN$x}0)*5PuqFad%C`{nC4K2|2zONu}@wBozzPfbX?4qfFog zFd#Ay&qaJ|U~V^2VC|t5qFR>Qqqj-(r_hJ9YR7)DDo32S%;ECrM6~@kzx_w<;3i&~ zTiB!?iAauqI$)<}8&T=gIX!nUs75cmy9f_tZp29*3sPce$tLHU>ZjEu?yPI8TL1ig z1h5pKB;lwmJT+JqI=?Z*kB`^7phSjEH?5+zb%^@Hg}`qo zt|;B&Q@#0l_q7deygTwTU*(-Hcs4ldu-?=A%yVJ*)jU)EcsA9zEuGK#;Elt9(S}z1 zEd8@=gM(42S1~%-aihzMCfq#ZwdS^1g;xBF@#QU1&6f{ zs2s#NT~_ms4XeB_rcXM>Z!P<_n{nY?pC4;ZCX<^P{c|gt*hgh-++Lb|MGGgDSL#)w zGWR+#yY}p0;~Zzo?wGps2_98fF0&76UpK4Gu#4#tL;!@jM+0Q{Kiag>aIIMTJ&||R z)P3A9-?KPhXv_Rjx~zi!!LfmkC$T;9m(7Q+o4oH>xx`vvn?L_1^O!=eJNZq^jX*K^ zzTo|84_@%L6yC}@$Eru=&@$Ldwc}n{eNJ(HrPlQ$`gdc#b7DteqR+hS;_%oQLqx6M zu7C0jvMGme*rn3+)wr~u+^05?%=^-jqts*jNb@rqrur&>2GVkdSl|55 zPFwdoe8N?gU;WP3Qu8p-Z@Y9viZtNgY1;+H9xDIQ4^@~>fT3Uphs?KKI!jLZb^gx; zsZpk|-{h&KOX2;3w;RtLr}?>W|A*vTC(bPTcvR-R8bNeu@5z&2Ra)zt~#Mv|biG6y=B{?ildCoZ2Q#|U)J7z=*sFf!+4=Y3l& zc+7|CG;W!lrzMWp*IX*}wDw&I+5EDx!imhmh%SLMKHS%148(BHjjvug!2mB{vZ#}= zxCg+5@|y1Gxp@v)49fbS&D)brd4DRYY{LoHYIBRuvBI2B5y#}yODmop82x!jk~};e zCx1s(*Ppld*ovojBDlW&DA&7b_C1HAK7wC z#`{ZEZk3%>WKmA9Q)V{DQbqcw02O!d&0If3h3}W)v2u~@FQ8(P6dqZXZ0o~yXywc z|CHLxv9Pp%Goz3f6{P_a<`uRJrf5%2!nW%rB!*HBE^Qr$j;2c5u>JT&JDT228NtvV zcUzM0%V^XGT3y?$>d9yE$@~7C@vWl?x)_snVPB;a*f|@gE9R~o48liD>zokHiaC<- zqRjPC?(4H|3)UaDNhv*0=r&F#g%_RZmCq$^s9a?`-;exntm69mUo7wuAGePOas@+n z)5w3OPiWUaZ9P&_GMyLf?&QDt#OL9e92b`=Igy^Lb+BVLu>977#`62)nQNY-Ij4 z?JI!_ygTz7jsC1yu+dD?S{Qm@7F<^0i8mR7D8GFT3}aBLW2(wX0MCX;@_vc5No zc!W*!C+WLr0tt1`B}g2chHApdxJ$M8d05P+XiY z)KfU-wR73)2VPU~F1tq40CQ_<5}AV|idlP!6c)CBFfXm5Vlx;*FNs%yTiDTG?a{4BG)1Q?7X|L$ zzb}tqs5w(Zr53ax4Lm$`b+trFtE9`Jt3;hXjW(ta^E`TD!KVael`_oEzPp3+_Ys*^iS+6?CST30Z29+>OTUZr%59O7lON4psA@Yj?H{^=!!AJOrkNJe1hn zal0khH(a}MV;Izs`SS*zVB{jAEyQCfx@wf{&8|U8LMkq>v}u`Ko>jPAfQPTYU+gTi z{l5K_FA~6dA?@6?3r-hL^7AZBO+#T4sk~GQGO3`b=*#oH_VU}^>0>AOFSH+|Q_G{` z4&<94vEu%f^?#DAY=HFVXqsSf1XSw@w{D$a4yKKuAjTKDzPsH$G}3lLx$T7iHBsi{{g{9k4CVa@Jx^SAoc?m8K z_gzJoEqq96|Cwf!LeZJPB|iX{BNeB2gY{}9ePj1MB^jAb(0mzB8X6jQm7aWXPUt(} z^!pDVQdh6^PtQz;tP}HmynmhY*~Wv*7DrDH%CeHK?-l(OSgA!iQDhAkdYK_bf$|Iu z9&Fpf%%9$WE`%cX#bnKw>XQyhup2Y-ol1AY2fu&gGwZX^C^FhyW#=yBy>_lUy8hb| z{=Af-^~24?i*4I3%R)*@Ox)~ca%V^TSPD!biY(NLKk;f2r5u7q(?i)XT-re8$T06t zTvGUi*2Ylc&lT%Q2KbMN(*OJR|L18uWe$a?R0U4qpCL#E`)R9|mhE^Vo@k8qwOlG< z^msyIVmMe!BSYq7@~ksaIY;d7X5+C3H~HPUwRkAu?W&Bv~gB2(Zmdt0t|o4 z-iL)Jd_qCMj7#u3>?qN6r-U4@utf6u-dK!+L*pcSQNi^YkL$DU^X1Q zh#yi>p6=D9zql>oiU-vYKvRc8E7*??e-J<%g3Ds(N(y{rN3L7iojie^?Uulc6_)iC zb9Q!ReloPZAmYqN@_vOb!PUe>$7;_a>jOuQ5TlPI*e|)BlZ~hiMRW2zhq`MbLlskc+CMG8OK+Eg|$E$#mI8{?SyDRVxH2AOQOiVYqbm@{joZ-ID z2NLXIKm@05aSPKFS;yfGX>3wwQ>Hm;*~Yq5cK=3Ar+Kp(z54qwF+9ygp>8{>zuUp` zf43dJJEGqlgQ1`ZA$sYi_DF-xlJ!0N(91jdUTYELK$v!OL$a{P!s14oOF}9*n+!;T z;OZ*lFWJ_3Seg?}aKeEeDh*S06xQL$XA#?N3uv*!3_UwzcjD%FR(?7Te&W`dUi`j#}y$L3M?3no3h?bfUW~>t5 zf=^v<<6Vi=#_OMs2~yd8IR4vX0h%T6@^Xewh^YiJ_!xi*#F7JqIxYJ6QePCpsH3AW z5Ie&3dao_hi!e~vF}jihK0;I^gjh%T$QhWJB$p~K)(PwszKKkYhJ+m0;0VKu2%w3v ze5y>)nI@L8`?4(#1j*r*qCvTdqz0Kg>>(Q7CAc=?=Xd}Lr)UMKAUs`wNMfHYQ_)GZ z*y{oO6XgHweL|`~>lo!~?BxY_?BJEUzJc>H4P4Q69UbqtWxN30MwrJCPX_|jcnDbx zZ8sF11gWh~>AhwISZz@7*^K)(#RWs!2Um3^0?kA7T4?K5D$vGuLt~Gn^icM_r_VMu zu1BU41yGK_VbqRzly7e-Gh&b(1M#k5S)bjUuHuC^og(i*c}VJdWs76s&UDkA*@x;p zR5bebv{_+=27Hd<0ublnZByzagvsfV*nRt~ZU#9p3nrwp=?-OQ-mQbr%GDb;8fW&j zIDTnLP&@T4R<9S@PJ+5HdN1Ve=Xfvp5C5UfXgzr;d~Zg0`-$sW!ix>0?~bv%w}K>T zUwsDC&__Oo# z<=|F#_MD`$Arx4|NJr4&@54=u7*z`4&XrdJK}O@PeH)@fZH=t0d6DVd>A`7I0H?wZ zxtJc!DBT0zD|k!t5bJcvT@4q!*$ZtMrhIS=z5q>wBO1-{&d|rR6*&8a zckw?B8Z0>v1!Lut05o@6JthJKg*Kgm3KknJ` z4?Yyq#*?FW&In^Gz($^qgcAda?me65ED8LJlw!hk@al$9bc9~2bzX7Eous4)IQrj5 z#jWb$&cGY@sC_~1-J3TA)j>*1Y6i(T!TceD!0s!XyQSU_TA7=hb87`|$6%Z=I86(q zPYrh;rG=^f%P3<>p?foQA?SD>#hT#GqOad6A}V?jnl#$!>9*`7lKr~D-o1}Y_R|YJ zv8$942w6SJ!p<)8+&T}g%=nxVU*OqEArp};KM5hip{D57Ma@_Bdbs5$JQ z$UE-9#Zn)kizEoM^HHC7I8rX-t6uMW>Zf+Ye}Mr80B|Fd#-v{*#!70yA;zaErmAX5A`|~Zw`=XrLiV@aYsmV_AYM`F%DB1BxuM4otJuxN4C~d^;ELsMsq@7Y zHTBFov2}I2#F%J;kOZ*;(NSXO;-VsHul!IlN|C1^DsT+bxO(f>C6FU1Nbsw=s4-o^ zhCa#8^XIiO`sB_D%8i-?g z|7bA^5pz(#eEsSGP29r5&!4^zA3h|8_ZsZqA7z$J40y0FvZf%7jg65)^Q(Suz>4j{ z6hxB!{CGQY8UPnPh7+3>`Rpz{C;q#0INob<@vAp)UW5`*SV7_A+Nqq0D;iUwxE}Vp zeW4ZayjCSX`ocYT-1^wR3z zk=tzy*J6(BF*8dq$0@}MNSz?giLbXi_ScRZ(uJku_%~Hv#tB&y1n`0c2n%c z>ny2#g|^*ZFw44vthuDE1buyde=MpVx(xd@Ov9+dser*)uo`WEHHHYH`4Y%oo3HSp z-h!2}afCAYKyRHp>0;9^SX`h`o&3N_Cj0CgCC)B{VFXhC9#~jkO-iC7A&}H0nP_)E zv?e^;j@>)Q3HsqnbnY=z*cCPtH3Lchn6HIZN&+FlIKJie>s>fT_%s)R6;S0_Fh1*! z{q@U_IBnovm7;!1j5LG?UjzF)!b$Wq{P`B{Rs(yc6}Le$ISkHV<(PjYqHtlz>NIML7qL;?8))(_|9 zmt)7AR8sG(Xxg)4|o#STC{UjoWr2_8dQc+<38FdZcx8k~#`C1u#g6J|Tu; z)ItRCgCg(I6(mB)Gup-qVK1VpdFIfeYn2^1WItnUT|N9oevHCUYY%>V0roaBruq{_ z_hGh}m7D7^b35+OT`eswey?rp8-i*)Vk~kn8b}k&q(TT@i9+29bHgJjO!GTO2tVm0uu@r3!Rx=@28qqxpec3DpG{krFegO;I>+c8+G|TrNNeiq9~`fqIwZw`=u9HP zUU3(iVMH2)n8{4STw$Ti-xO-W%m|;5C}O+*12KBi_zRXO-LUW9-9SJt9az~f1{EVK zaik-3YV>u2_iZK3iPzLqtt#xRbDLUMpXsfu5~{%ZSNI#=W1EpF)dAHaLLXsrLtu*F zs>O004Q7Gy3LOc0kS#bvUc!+7B=S1Ey2kWOSquXs&~HpC@U0)chtYK~vX@62w4t$4 zSW~m_{86k4%+;K2+8RFuF>%xD*R{A}7;@#l?MEQ32HG%!an^O12o_0TD4-ybLQ%(T zm6sb7R>SbrfxwA8 zo8QYFHxd%UU{^*!u5i%$dzGp3X-60mE(A{eZa%se4v%(+uWznPNJ%|i*RZs(xQ}ML zfss)ICao5r-vJ;*7^?#6!@2Bx8Z|CAXIH>r79s)N0V znAD^~)L)q^nBw&kaaSLZ6=vSCpsz{{g{?bL)Ul$Jb(7j`4)dUdfFU)&8Dd-P?gxKL zkzig19dZNa31XRTZEbDEwU9xsC?Gc`W>o8DAKD>J1*#}7(iUboyi0;S&~VU1%cVMh zN*#u>I2vL3)Mn#-;6OqEc&7j0%#6Z#9t?^eeo{Wgy8gSfI`X9>i>{gpWp=qA^SCtC z!NZ3o(I*f`9XhFOjKQ723Oj75Sci;+sd%I5crO9iAM53I7=WW32x~*FOCJ1A3+D{Qv*} diff --git a/examples/running-llamas/artifacts/Llama-65b/full_report.csv b/examples/running-llamas/artifacts/Llama-65b/full_report.csv deleted file mode 100644 index 7ebf49b1..00000000 --- a/examples/running-llamas/artifacts/Llama-65b/full_report.csv +++ /dev/null @@ -1,9 +0,0 @@ -,launcher.name,launcher._target_,launcher.start_method,backend.name,backend.version,backend._target_,backend.seed,backend.inter_op_num_threads,backend.intra_op_num_threads,backend.continuous_isolation,backend.isolation_check_interval,backend.delete_cache,backend.no_weights,backend.device_map,backend.torch_dtype,backend.eval_mode,backend.disable_grad,backend.amp_autocast,backend.amp_dtype,backend.torch_compile,backend.to_bettertransformer,backend.use_flash_attention_2,backend.quantization_scheme,backend.quantization_config.exllama_config.version,backend.data_parallel,backend.deepspeed_inference,backend.peft_strategy,benchmark.name,benchmark._target_,benchmark.duration,benchmark.warmup_runs,benchmark.memory,benchmark.energy,benchmark.input_shapes.batch_size,benchmark.input_shapes.sequence_length,benchmark.input_shapes.num_choices,benchmark.input_shapes.feature_size,benchmark.input_shapes.nb_max_frames,benchmark.input_shapes.audio_sequence_length,benchmark.new_tokens,benchmark.can_diffuse,benchmark.can_generate,benchmark.generate_kwargs.num_return_sequences,benchmark.generate_kwargs.max_new_tokens,benchmark.generate_kwargs.min_new_tokens,benchmark.generate_kwargs.do_sample,benchmark.generate_kwargs.use_cache,benchmark.generate_kwargs.pad_token_id,benchmark.generate_kwargs.num_beams,experiment_name,model,task,device,hub_kwargs.revision,hub_kwargs.cache_dir,hub_kwargs.force_download,hub_kwargs.local_files_only,environment.optimum_version,environment.optimum_commit,environment.transformers_version,environment.transformers_commit,environment.accelerate_version,environment.accelerate_commit,environment.diffusers_version,environment.diffusers_commit,environment.python_version,environment.system,environment.cpu,environment.cpu_count,environment.cpu_ram_mb,environment.gpus,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),forward.max_memory_used(MB),forward.max_memory_allocated(MB),forward.max_memory_reserved(MB),generate.latency(s),generate.throughput(tokens/s),decode.latency(s),decode.throughput(tokens/s),generate.peak_memory(MB),generate.max_memory_used(MB),generate.max_memory_allocated(MB),generate.max_memory_reserved(MB) -0,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,2,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,1,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav2,TheBloke/LLaMa-65B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.348,2.87,37657,37657,36106,36182,36.3,14.1,36.0,14.2,42020,42020,39404,40544 -1,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,2,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,4,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav2,TheBloke/LLaMa-65B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.957,4.18,40956,40956,38279,39480,41.4,49.5,40.4,50.6,60776,60776,51488,84401 -2,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,2,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,8,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav2,TheBloke/LLaMa-65B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],1.77,4.52,44979,44979,41176,43503,67.0,61.1,65.2,62.7,84835,84835,67597,84408 -3,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,2,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,2,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav2,TheBloke/LLaMa-65B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.56,3.57,38916,38916,36830,37440,39.3,26.1,38.7,26.4,67076,67076,43434,65601 -4,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,1,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,1,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav1,TheBloke/LLaMa-65B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.355,2.82,36315,36315,34753,34819,49.1,10.4,48.7,10.5,40694,40694,38052,39197 -5,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,1,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,4,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav1,TheBloke/LLaMa-65B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.967,4.14,39623,39623,36926,38126,136.0,15.1,135.0,15.1,51725,51725,50136,84397 -6,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,1,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,8,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav1,TheBloke/LLaMa-65B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],1.78,4.49,43645,43645,39823,42148,139.0,29.5,137.0,29.8,83501,83501,66244,84391 -7,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,1,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,2,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav1,TheBloke/LLaMa-65B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.574,3.48,37582,37582,35477,36085,77.0,13.3,76.4,13.4,65743,65743,42082,64246 diff --git a/examples/running-llamas/artifacts/Llama-65b/generate_max_memory_allocated_bar_plot.png b/examples/running-llamas/artifacts/Llama-65b/generate_max_memory_allocated_bar_plot.png deleted file mode 100644 index 8258f208dfab6058450e438c07bffea233eaadd5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 39213 zcmeFZc{JBu+c*40Wh{iK454I5Xhdd;q(VfcOqD59h|J1VL}hF;{|XgpFqA1osZ0$j zWeAxvMrNMR?!NB(k7uoCt!KS!y=%RHy!X1U>%7n6H|+1;$MKnteM$d-_HvdrEEGj8 z->ai#NKy2G6h+t0vDnH}bmu`-i6~w@J&g5g)p~{YGlsFISjy7;yTi8yKWpX}`Q_`p7c6q$c!cxYg2h zzJ1&5t3to-N5s}h{y3>i#ZN5U$)xRD?K^&_v==hy4S%R z^ji48@gEISO>`WboScn4ua;a54-fC%Rr&w_Q)mC(19w<%=-t&n?cfk`qQu+pMxe?? zQSB%-v6zn6*F?<~W1X+hDgPPoJM=7PqnOyTEjQR+zI8*M9!I>fy11&F$ggDoM4>7VhrbeEs|kQZ<(L4h(3Tn+vU2 zv4T&@OLp_|=NWZ;sxOp%{#x{Z%v#!-f8@W6r4q$I(UTUO(DS$*e) ztQ;IzZ|E)CxwC*jA@}Gr7JL@`XL0UNy-n26!1cJerBYH-uTO6I{o}>tzOWeQCpPuf zQ3AV4-TBO8=l+P7+1c8rc#ZzF{qcPJfn=3=1IN2GnkC-Wd>h@u>~LA*BTLQ1Terx^ zJ5?L2-cxrc&$+EsW>t*Y;lr!$>fN{?tQlI~(4gP9{q^AIOaswpuhzE>zj1#4{JFS! zsnVWk!7DxW$@d*gmJ09Mwf5Czp336$f2hp(b-Z2CY}4$_H}pbpY_<7iTJ!cTD?WB; zXlQbA>EtOJ8|&$@?##mv*Nl&k>u2b2ZnJAz!Ygy)om};#^Q|3@hPJr|>3dk-K701e z;JLfq+*D_@g1=3Gcz#>JqQLu`+cg{0b{k~s3*(cgpa1jAwCL=HEnBv5_m`E)JbwIG z7v`VZFA|YUVf&nRN!rbj5eO#nKNg`CYJa;{QLXs#lS%7+4>+VB|ZJ3>cSL*b#FyzEiS4J3B4y?6MwhTEBh?{>W>h{}b=!p#j(42H~n#uU=wr2n!3_ zetq1XutLK8bbZnlJfyVaFV11J$v9y#WVdb8I(KfzP+M8F@%_W8>KB*E`~2OMZG1mI zH@~T=N!Ib(HcH>2>H4jutg9@>dmC-PJQA*s7Gn4F^ZQ=p#>mJnSao;*t%kJStW@BY zE6bZce82*w)~{*9l3lsA)A?tOP{0z(y|;l5TQ(;CUPgvRdwGy;TdA_5d+)lqW2UB= z+jHzcWm|T=zD5O_?Ic^sC*flD%;~n^3UI2pLorTODHKRmD=*-TjIH> z%DVp43K{$O(dhU0A};)y{SoJ66+e!ZE9sq`ou_7IM7D3|;oWRj-dPhJ*MDx{^YVvB zpBaj~d3YFSn>;?b_uY-HjMUW|H!e9e=LpNna^MuyObj%q=H@QGEX22HEcNsG-Bu># z(f{#fra`)erDYIyTxV}%I^Q;1j=I|l7q>fn7FWtS^5pc$-@l<8^o@;;ruk=9-`r+* zp`oE6dgejon(ZvCyqkmZlyNdxaeTG*?D<*H=3n{i{VlptI?ti+oXeIit7vUKq2xWT zV>dm}ocrqfhJ9LR&ioF$Ib&sLXgFpZFw*{#UdZ2m%&|6Bfv8miAoAnKk7fyP%#$}BOcIYZFFL!M;!s_jb1QA0?0UmRr>1vf%Z9e-_flfU#s2e0 z`PQzDzz6IuR1y~MNVk&Iacs%w!Onc^F&&mv7P@@xW!*SQ9$P(p=8DQn%`D%KnFjQM zg&qU(M)4nPW)3y~kr3N|b3Kki#%#8dYVw$%kGZ@1)A2jY)Ur`}@=OTv_wwnL~oco%VJM z6v*33=U4js`->lbz{keM)_`rLbbd_B?zqq7(C+z}2}=tLN>o(T%gd|1|I>XI4?LHp znwpy5OMG~-VJ}>}##vlkOi?w_LfZEZ+|kcAlIZR2E&ujyjp&|8HE-|d6`c(9^pyCy zH?^d?;j?8LWvz7g@UX;&P4=H*mN`*8)o@6nueD@XT3Q;ZHCdZfR0P9T2xv?W{fO(g zIepq{tm}=-xpSeHE-~X1>X-TZ@vHcnrnbgP97;id7#VR1850Vaeobe zr`)^8Xk%kDH{FxWGKj~Qards(kLMmuv$H>3FR=@$XraiU7|;m_2pDA>F{64IU+#H% zkDY{2o;PdM%b1v1`Pz^R`5FNpuVxZ-N&*y_kjm(=sDUQyJC zyZd93T0EoIzb+^+&9iFw@Ie!cEoWJ=gvPg$i>vXziA>_nn-y545}(OE=rl>Ls(v%v zZ{EDgJXYWo_4fVy_dHg9o@^<;zfT|7dF~*?YP^-8cDC;B20wsW;a7?U{q^ z_d+@BeS`r)s%lrA!WdYl~jJbx-%C?SbKox6yW&Z9V-??d#)Hsp;tq zc)5R{{h7!2rZ``=iWPNVvJ2N{wyXlc5 z>ew`=-p0R*T)p*bTpZ0Xa$#X1)A;_XZw1cLzki>>lk*>u!=Zis?wwKAyl2+T2`Ug} z-Wr9-wlQrf%22%Zow4q>)(t7@LTB!yB#9q=DxQ{}{)bn#b?745sc}*!8ix)YvOI#b zD0}Yr7xu;2yvL7MqLt0+Z(nMXW5y{aCRXwCrTV)&yG%A6e{QtRwm}WMCghB=xVR3* zap1s#w7oH6r0R14=yBh?c{9zer;f#O0U$_2EannWLU^11?0)k5UvR$gW}E^7(x3i} zj&dq0DyE(J`naN|MtjM!WtY)Vk248ZT@}{Ux_|g#6?!6`yOb92K*-&@cYD9&rH@Y? zx_96b=?s(=N=wuJ9P=`NjeYy}jSsh(lb6V|u3=$dU@&-cdjB;nX3rmy#qNozQELM< zL^->r^>$x>zKRIBbm`{)AOkRW;9d`mCIDtCB&U)jtU2+EKAI>%{5P zcU{h&U5Pp_qM&dDpRUwzb_s>^9m!tmG-jU3xPqU9s=BkwGvm~!Y-28*+nDj2B`pkz zZOt!^KC@55t`J$bPHp`6*FzKsev1>&hU>=*Pu{oj(p&(wtEj5y9s?OQIFS+P2uwHh z#AYT2HpXv0F8S@LohTLFQ5hz6hLy5(c9y|nhyjX-(bLmcp(IrWGjnOBYJ^k)L~!>z ze|?OORTCvJ+y3XN-3Kw@m^T?GhVhc&haVo5_;iTID667!!qn813B{NkrjFrZ8GZo) z(m^ZSCV#zO+xXP(5S^O);5TFPoBI5Cmj!OBiZJ z3aOrWV4A0a6S&7H%W$`Z*v5?;8&fqI$;W8SHCOH1dOhL6gH5UT?rp>uiEC{ej+wl? zWa(07;nvcj;h|x&JpQwTI|-;JyRy$UdnnB9ESdgbUp%nl9?a%aBC z{(Iewy#O58{@NWywyniQ4b9EGm0|3>_*NXHrKM!Q&y7^Fvp9bH@+BPKFoNu2@?V_& z2q0f(5SPXjb%v1V3VwI}e0`Ttjd|AEoE#kMic3nCQjIyschM$_TNnzhUa?@CkbRW# z7)>zs;X`S&Wbqm;hWEz%{T&JOb)Kq_d-`I zthv?9cwd^E5=oG7YxT(NVHWsb&k7{5zd)8@?N^7T;-8$4{OE+F8cq?a)^J9}GvoF_fw~y7ZI*uLG_`=g!ICyMr2N0aDb1}#r6DuoghI^)p~s=1%8UChQ*xyW(?$H>q{tAUCb)QLI{i{5ak* zan8eoXUh#eHa=N3%UkEqwp8jHBn?6tbafSqi_^@Ph@NS010BD ztU$F`x(N)%mSJ)2zUwjYInC+E^QL*U7#8yIQHDdjv)2cbtxQ$nxzZjd$e3SlqU~cv zSG^V&w;`CrYVg}L`oNLNNt4SxKN8ylqJiEn*TzYa+9)2|bB?JHBsvYW`)A*Wdo119 z>^$gChO-g8GN`2S(#J9svD9g1MQ5jVDb$_&`YI?}bZWXct*oqq(M1xsJH%_0kafaG z3K>I-WT8Z4Wo7F<%7iLsccMFirmKWJ&-P#T=bOY%G&{c4t0jIr1nc|GcgdA37CE(* zmWUf>yk;!01FEDC#P4!QAIs-{a{B#B@F$b(Nq_pOQgr9W=N=B0wzlGap<_>k>fQkR zE~Q+tNM-|>*D3m|{vTO|QIbMQ*-A=EGW?2X^sg58=rRe@Gce$X$GY}ry{oPktC>^> z*oaeM91ggfo|cB=jOQETJyux*lxk=itwd+`_~fOlZ%(qPzI*cI$!@$!+}~(BJG(Sc zaow=L%@2-<0*i60{8<`Dm3mL;ew%!~TU(nI2v5#$#yw=qd3tdnMd5%t0PSE1COZ4~ zzn7zX*Ec(LFWG-3;3Z4j28BzJk=&FP@MCaDNC!OeK0ZF9!)1%VR>yn~=M+7w#GiESTI$uU zC5x@}>t_BAN+l_s7ZFtT-$aPXe6M+-OKOw%*vy01Wm0YPjY%;|>~yHV3~Es}!`tjX zGT~syIE_`_txs01N!1J^K(yF*dgZ}nl~*6`9UT4rb!*{^7o8~jUQ;8f8pi}L zW1&T*q*y@?7$|%hVL*2*Z_GCz5b$l=w|D&fndUCeO)Xo&pJ{X#b$veh=FQ9Z4ko9e zMsaAgqdJ~f=Ai=7ya@({XmTjL^6S^tI1UQc*KQ5{c+rW%Chs|Pdwl1;$}%b?D~lOt zl5%EOR|ODsZY@@azQwV6b(Vg?+3woJ9gb@b=bl*Z>FKHDH)EvuE=6+x{{8Ml-&Y6B zjWPxn{FwUu`E#auS*hWV=<_ABe+Tc6c)uttRAUMP5|OOZ$MK$n++_GQN!bT)bqVVr z`E(~p2a|AYU9}^`41Ruo?pjHsOp2G6H#j(0{7fYn2kptHu`w(VQ2@xduT@f3Mh$ud zxTz#`D+Ur;3UEWFNp6zSX8??3xo;!DrC2t4BHY(Q2!_`$CpR=N(;t}MqcQ?1Qg4Gce z#1qO?a&SRq;?JK$;t9$6*FWe-L1eQ;Yf9YZnNzWs%fP^(&~J9fbI;)@oBE`^a`*1u zC6b5yx!-(fqdcE~6uMf6)cdMKiU|s>ZW&*PeGGtfF*Q|K^6-Pif!Pvp%FLtBvd2sE z@GS{SC`#Hk>hyhVIErk0Y3cozUPcGH4l&^Yv+eR6s)BUTSmNUu92Q3S8;i~Q1W98S zA{7PFF*MxWzm{$74mOBPYG7s=&3(sF+nanWhg(aCfQ2XC@c7hQ{;<0xG6ltU_#i9_ zhYoCf56Ng5j^@RpA_X%uvy2W`RQ$zu@3x7t?tN6?wQB}R6JzgKy4dUNO7DUbx1;~- z=;-KA$+kjo(_qrOxs5qSY=3Ru4`>4EfE7*ilb+#Y0>Qp!KL%S0=+tILO6!5oT+^RE z)d#MpdFSQjB`TfY0!%^Mc3MV4LZT+W&13ej1&a&mMO%{2o}h$Z&}wyDLbQ?&8B`2__D@nur?dX@#cSAQXY zY;AoJJLM+?xZbZ%Y@#OGo}K@58cNa{5U4`8o+zWu2PNAM+NyNnY`VC*8b}6s-jX!Q ziNsz^1H=;2>jICz0?AiIQc~BF?Yg8}f-?3ym<-Wq59gTOuojs1nZs|3o8;U!e1UbH zYxiz7F7-VhGK)iv*0KyOkSd;k$1Ds5q9kCI!=;rQGPL#^FJH4w4YDt7R$$jTE%Xk# zldm~$s|K8qtgp8j@f+*X`lRrC?AylDvN9iYNJ|Y5kL_H)abqV40@I2W)nqxaPtb9@ zFWQ_OYAIj`f3KVx?d<65lZfl_RK2}+wAJg!oaB+m8BMb}E*%vtAFll<^;_GVYo1Vj zvUGUf&jatyQ!VE0<&}c33Z5s5f_SHsT?QJ88n9o-Z@u=xL^1DhfHQUbD)^g)Ww4MIj6|ea&7f4c#0N<|TG0hk(FC+0D7w;6*Aqkflru zU4-{U@&$oRb}KBKL~BlzcNJM7pqMdUxo5%e_Vw#&R|9PA?R7hCf(nvLJSUS;+-*PH zUB2VYmp~w>uIFXjwrf}J&y?Z+6{2`QE&Q+_HQHiPn{!yKZ0aG1}P;-H*UcP?K1mucWQr}!}G1MjZn(O8F zu4zX+w+n!woxnl5Z@UgY_tlo(xEQhp?_qY3&ug)AY=PRXFde1Q`U__~?yeYJEaU0Y z*@4}gQ612pD*yD<%boje`sM4_m*wRYk$7@XlyLhW=@!Eh{rdHbr9^wU*3l-0^e$o_ z$Zpx9k+AuAtW|u=BTEM8bs;!emXO9}ckT?kdspn&yBp~)^nu@@K}ikjZ8k5xotxN& zldhkC<_MdR3O}k-(cc5Bx(4nXn!e|Ir0eh7La0c5a#l+;G&Dy3)T>$putRbV)qN8& z_pIJAWuB;6TTa#by1z^QiLGPXk}nORC=mNL6CKxre<^a?2>lK61?5N@ zC*1<10|Q(N2}a?w-(t<_12=a#vQQ|^ra83+D$9g7NJ-ITGn_rpE*)9(`LAgkKi+FDGFbe#7$AwoVBeQk6n z1LEI6k*M*;%Mx26^2!wg*kYxRt%4sA9vNBnCC{4X0dY_5=3!>9U4907dTb9KJfOOi zl&_-to+$Ib0{Xd(M@|?%-FLG2lsColwCyIDnkDfgPffAtFvBg~6 z-DBgyHT85i?L714YNt2o{E>AV_rBTXH+D098Wlz;iLs%wlHtBdZZ*D%0kj(S9Mgqv z6Cyl*DJae8+DBF`;{m{ zP)3pmR_qnX^>l)q?lRJ@4h(3R%B$~RsP^f+`1^P7+B-Ueu3qKvI)A>S>rD(g*C?Y9 zuYkZ69RHWi%_e>Ow&5J0A(GM__p4&2VaJc+^M|tL<{c%E1?=bG0SeSy&9SZ+-A7hDu4!%;CeI8O`^tT}z7LQVph%{IlQUi@%rp9qZUP0!a9} zt}eb#30z@QFo*c@=i30uuIA>ez5;zhS>|!Jut*uRkuC5Gn{&JVA-b3~JvEbQ|&+wFt# z^Ie5c47&MdW&t=Y&zyL}L4AW@kG;W*s7}76&#+(W9SyccJYA zK0u8`HU^52ZC32}DslUeZ7g;2)G5opCK^k3#470vdSY*zPrf7D3#dWXd)yX{B@OjZ z$TzZh(i!hhgw2YF5KVE!EptjL&pUp=N9Q^57DxAiSHLl*t&xHWBLi6+g+7w;Z zhxI)=(xLg_=rdirBQSJy4~5RnPqtAh4H17-&a;1?RbAJu9Iddw(GXycU&)K3udfeP z`o=f`)q9}3=UKeGz#miRncRj|%=8$u0YRco63xaA7OWYyl=t|l~bM6_q%$lv5nV> zh|seOD(Ty)Joj9^b}i|^z=#{OO>(1Pour|-gkQO`8%hF>-K_yGX-LIk05wD`1Ft1b z;ol?g8Fo8&ZW%?qsiC1E!wU@vAk2+_IGDKAW(AC~22gWuNh78L=eBCBT;9nyD?`*r zvtK(VrFcGb`mqMaqBhuN2Wn++uPwB6BKliuyUx#e5I|nyJ)wd809Aa)2Vn_&Sj76J z2Mr(P1;F4m z9JHdS0bdgx9VTML4|JrXv|7EsDvX|{1%=%BvNfbdVuFKU>?2?4^y$;27NSYx%kHq- z_-kGx{ZZ^POQ(?lw{H!$&G`bkEUWv2zz`X1P z?Z{Xg5Bdl`JJI=(0xE}|)BrkVr+o}q=-q7vF4D@;l;JlqLU-5F*6z$LTd0UuT?mIT z*X?j;coujFG7*#T5Yn0z{~Th!;QLyV|7dr*Ze938Jfc7&MEK#bNG z=LXxmx?(%6fv!iP9#nq#z=cy{=vBtIW(^5XBo_;Ph6{?khbtPY9`Y?Q1v_O(5rf7i zYf(;-)q=ve8(|iw?}b4q-WJ%=1k@QV!m?Hs-zUqFsG)bETGCZ zS-vOK3hf(~;+-Z-4N!;L4r~_NkC~jP-6YuoKk=W7_4V-vN#pEbfDS?g$##cb&jtsL zRFLqfsA>=@2u@@CE&fo;q7qI?9e@5*BRaOMtgNZWSW}Y$7XH?w1x{CR^3%bfG7sHb z0l*pwI1m?FunmHHz2!1W|K0%s?#*U=pb~tF?&8P@LAFoMt+Jh)8vXM46f3G~M^6tU z`UcJg;c$t{KKVwZb{G0gZYDSoiaScN1u7*G&+-_=B_#HG%MkAnbfF3+2n>QS*UsuJ z?iMr{7+_Zr|FHm5BCO)w`}d=WL=hhyZO*f_W?zp9h}H-c^zybgq2`?97ZVbAiCg#j zwGPVFU&nfzq#mX+qBB{c4-vUj172cpUmx?XUAyG)cxWH)K)ME=uV`sGW_tAKWx!p~ zJ>75|{Rphdq3oCsu_78>{@a5<&RHQoSz1|%z%|~Ddb=C+pb9@E`sq!dyIT=@6*H7= zqGeuQB`rBKIV`jJL~-UefeLKPe{LPbfnYtcLpN)!@pvn~emz1y3AQ_s36C5aCVy`1 z*k)rJiyEH~)D5H>E~BbR>_51!s>O~A zHEbOYlr#&E=c3*)k)uI>PQo^HMxSm0VHI>8X5s1%Mv z8X6Z6Mtg+dMEvIuu+;L-cI$@hz?*ISmhUJ9^s*at_Z8fT42f$-cb5G^Y54as@NBUM zO+f{JdA*896DR158J!&rX5H`KzpGG+UP1GxDNdK@Y907c&d0~cgr^?T=NPO%xK-S< z|D#05@86rAK6@5-*RJqv_hn>|O2&_xpb>Q_$F0^fCb+WyECoH%l5STA1IhCI2bqY;zSU*Y}l_^mA|^{6iQaWPqiBQUX%ujNdu>u zd|3HiyCMJ}E@fn_&uX3hP!1Zy&EE%^2HR@j0L#IEaevvYKmMy@J%Tio6Q zv(0c5h!II_D!ZexpvmDtx8WkYFEc@vwzReuMc!c(qBF3La&@scuXlbtQMBKhUTvG> zHzUinXP?{I+p`F_9$?rCsgo3B;=rLnN&&BFfnM_5{be{IxmIED?ZG4^3H#<8X(-^q zadB~cTTe%RDGMmmon)r&>^~g0VtRhtLQwmzS&2=5I^39T_&9$ks%) zUFqcHL^j&D9H5DkJNrf#P4vcptOh0i{NTt%6h_}kv&MTj6g?#I>!I-Sx)kn)FC0VW zuD!iI5a+x{J~mY40iGM;Y;&1*_rgK-!_X}(VA2T*GU$w(kN1C)I`icb(M0tY!d18c zl2Eq=5Cl4(WTsx^fZ__xW0Y2z;`|X#as%uMHhy`I-*g$nwdhi$&J(l`M#(55D(a?O zdE;Z4a-f!$d3=9D*t!|^tL&lMckZk~`>O4V4i7Kg_nU4#7JH`7g>4-P!g_`>|8M`? z8032ateN2cJ$v>D`IYP?KVKZo;Wa(B0W>u>$7lY>>*kN2m$uIzY3?q-iZcrb>l{4D zi{rEs9Us{(HAGm{t7>Ybp1Kl$0pOZEDdL2K9nrQ)&7j$Y#KgojAO{FkVO-c0_8lr7 zZS@}qc+}u-F}aRK0gL`QTeJUEuXQdS#s8R34x*t?@3UIaRPkYNM|zBVvz{+J;cX6| z+3qNM(2xA>X9EfP^4vp8PfxFXU?2jUvmM-#YOPy8+|faYJwb3XJW)V`J@DW&1{UF{ zRDu;VEMrpxtijeLE)BQL34Wk4Di8;^vc8@jA`#TRm*@c_NpPGPe8gFPs#KJbh_t z5e1Ayx35O8B-RsI^;N4@QGr;)NP%6ea14U-^LHSq7lBj?I>zp!2hmxsB_@XZ&HkmM zh<-!->7U(@;8DD)5yLPlZFf6+b_>?vYHVy3$v5||eF^*Dccsqh(_%yn11=`nUKClc z`I+tCzkknA1ZWorN5rG1D0~xQmd4J}1-^K(>v~L#>qNhCR^1E7*8{IxkXoq%n&Rn4 z)hK`eet)(mNmiRr>^?~U6`SxsQ6Eap*H;NO&C=dJTfe@df*ua4;T_Ky)##}r1=)M zxc5j%Lbz>u|Lxn%zA*ArvCifX7)P*z@?InBQ2sW@9XWbb1c&_}`3+=dW`tRsgYPy@Qhkq^IHKB@fg=M*%({F1ZP|JROyj zlLPy574gUJJX{M25p_#mUS7uoDs}p!(npVEvzEhYLQxh~QsRe`$}sYG{Z$zIq^GQT z5*4)qK@FqU>c~hofR%L{Htg=M;b@$E1-%R$bNX8W2d7`Q%ykNKi>SCb6VOmY(~LP2 zmZ-O-KxS@k4)~M2C)?^ROYm7p=!{B%fdR2V4dOou&)}q5|E#)18sJj~0kA>@SuP_j zlLAjp-o2LtF>i!N_3e%WwjE z^bs`QgQso?P;v=4{qi+Y?O*bo?(i;y@wFi}=|yp=FaB+DPJ_Ou0W11rw(-8(Dt?OQ zE!SdW*Hhu>ZnRVQct0VTa6;62XL3tx-0%GK7;UO2Ie?^tt6(>lxyx&{g%m?3=QZ~g zEO$O~;tvyzM(aSKepFBpR$98id--a3>A5k#SCHN;py{HhJlc3w=e<08JFg0BVt9D# z^@E-h$bHBNuyFb@CQ?nF64KJkQP81$h3biET3P)}SaE5=9OhKTCJOS^ZhVPSQ>Ekx z>^$@e?q6#*h!`fwC~}TZ)}Ax#4U^)znrN=1V%Db&2vdnu>I$xk)N}``_;sjJ(a_;i zkZgr=ccJ3cbQi?W5Ii14P}Pqe+f}oE(~cdh7#LSnp~w=Bg6IZ`1$A_#Lj5U6IfEz{ z^6c4mu>-fo_is+R1wzmP`4*NOH;^I=@N!(f9UvhwUC>iT;r~E<)3Ez_mI5Ir`5@Q6 zrVS)gYFQbI#uKJHpwEj3KtyEX8Kt?-B^)pD=0TW_@GjB;HXVH$UcZymFYXJ2jxOZg zj^5tzz`($R_8aBp`4Gd}gL;LkpZ-({rxvxm0=&Y_*B3NJNlA&Mp|NU2kR;Xx3ObFb zpCcn9YUuc!JUk)zqUnf~0U!h<<^4cJ_3z)mnPx>>fC7zGGbn9o5>i0klv#*6R0Ib(*(~l$7^>wsbN?E|A?(6(-6wa%4RRl?U&@zSck(1>ZJw8Iu8|$mQOGOGMFjA z5(?F>?d5d%!Q}{Ds$n}2!VF@_9Jrs%8ZUTI6JoP~sz( z4h{ZY=g8;-&LaIte%KiVvwA;2TuYL&$dAz0gLQTAA*-@Yx^Wh0=OUh!C1he8j-F^*Fg4cwM}BkwgYC){%~;gwJdlRdO@C_Vw#>FiY}jfO*Sby;`ar#ZN~Sdrz>y zE+J%E{8Jf-SY%`*(Wv6Ssd^6aL*o~l_>LaMaqZeQ77uW`!w*b@K!3)0<1 z_y~B>6nOjKayc6Os&gX@n>KAiP%|7a@sIzH;oPr=G@v^QRfA-ms6hOc71n?Rb9*L4 z({k5vsnzm=dy$+iz>5Y-HmOz+h%5k^o3y@DE>k0xkl|jwf4>|KaycI1RYZhxJ$7qp zk>|+LeCC{|r#5zP&WRGqPp)omhrDj1XQGXsb@YTVLv#sr0aDRiw%^2QsBCQHB=P@3 z*G^gRSR@&;_}xyPTra5NOVlPf+{8P{vTRlG5!i(%#_~pwQI4W{(zurYh z@lbf8Xz@XiemYRCdp~Az9ervW1Sv_=#Do_CCL-A&&Uy(#E9@f%GGGFC-pGCtBnsvK z&oDVwVt9ii{@@ib*9% zsqqPIFc#UYnS$KkJ!^`D0$OrQrz&KACS!9YRYx+^>S0=yl`QQ9nW z(dDqkLm(KzNFk{^GV!y+>3cXL!E_YdzV@F#FJMi(|J)FniM*S=qk8)Ow(WiUv`3O( z398MALt1!+5e9@|E@74+MDsFm)|XPhUEbrp!f_KY`f4i6`&VJ&1v*vVIjAv818-$` zr-tU?*55BCDJ!ZR>BV-)4Nyfv$JNDREC)V6G%WG7wug3PWo^wFDa>U0MnujW?_w1fO(%OVpb)$EmPUY6r-`e(kU9gU17?|qW;vA*-mnVA8Q zOMO=#IdY`QSh}kqT{~KG{Pq=og_Y!yW9#Rk?jSGPb+P`T8QumONi;NyO5}q84R!YJ zz2@EDm3{c34vWE@y_7XI}a!Jd` zD0X}iX#+&UP3>;;2j<#aHNj;OubLgmE&GWsNdhZIVh0~#hKINa@D?@U&!^t;+_=+! zZb}Yjz)}!Um|ttrF!dD#0UeYOIU!{RLsY<_Sar!OtZ`B>!N$v!W;AuQlA{kmJk_0GZgeyn>0^R)poCU_|ISZw{1=p#czG7HQ;Pg+ zR;?17umXjEAFtExxXW+mX_FsnVY#Js_&_rC&7m=5z%3GeKL+ptARqeB==P*4be_rJ7!-o$)uC;}C+1c4jyLay% z$1R+1lo+?jW+YXx(Y&yXkhOE4n&kjIgGR9+AQu7RS_!Vy-7`(BOpIeewpvO70jngb z_+>X)tA2Ds<;&0Y zskit9Xhk9DS~`CQPn|qjj!!}c7u3`!I0)pFLN`T>K;mf&6ds~3K*kA2r4pppA=Qk?jqs znnY^V&{TEGc?B$G1*kzrPDqN%W8iQ|K7N~1NJvIm6`|2kOby|aE`{Aglw!P8A}kqX z&Y>J#L~mrEV9|{m$Sns`L90@S!fXN81ak<>!4&mxJ{JS91qGSvyO&J97WoSl=h*m` zZ{L0bh&HXxCKf3^lR<&ganjTYEI`wJ zxFt)L5K9Mo`UPNP4dho4D*!>zJa%mM;sBZMgu0BaZof`JVKpjAo$CV||2VWjV!^;; zSr_w076muUbL2Pwr-89+fo1SU2I(_{n-R8*0(P{34u`FYXbBym?a-0_g}9!aUJkD#_PQ!X<`oICocOXo zNoh4+h`cWjcZSCT3<|0>%A=*kXD5kAqFTbKD2J|m^DF~Jd@zj5*O35GvttBuIr01O)za}{fpKkv#w(`bF5$BhOMyfkp`pW!Ii7U_REtDU0rn*J zUtK^G>+|F1*hW2~ao$B>W?_Uww$bDO z{1UiLRF9dZQG-DxuMrB^cs*#ba z2K&4a$=ibm*>UaED4~S>Ow4cT|KJfVC7B6?LHPnk#M+5!s+e z?cHl{FA2ni5sZ<&gG(40)8Y0*#;Zn$jg3ddJq@gsPu|5MD!RV5_5zGjoOtpf9vmxJ zfhUrvbpEl?Mn%Onf`Wp?V?{BR^__AVA82MrRI`3-K7?!%LPgR(lt$YCfm*i(>l%vL zt9B?9b>Hlg92_6|v6f^Q$Fv07@}M(-ajkEMIapmI!Dl`&!^6aq$SGG9bwdf}d`bf9mZjXe=EtGz@QW(5tJf z6C0ZVcrr-*Y&%?lNIp4wnBQ;YqM$0Y;|Gy=V1NT0N|-P@0TUCG8k9w99YXcMh1HO9 z5bt5fgCt5P0B^0wED3vH{YjZd^@T!YGt z{=-Ot09s%akX#P{L+O$Cggx9iXoY6oJ<;L4ZWi=9(^hDcWDp)dhSb5Ui;V2V?M1wS z7ndv;mXLh`5d>}qVNoO{i?L~6;<}LM8idgRqhAoVySh%Am3SLxcmDcycSIhQ5!!0k zw;`~RMkp(V&!3BcSEAh|#J^8g6^e?ADs<_fM*$AQ7Z={NNv}H*aEXLQh%}6%2}@iX z19N04dX|gD_33su_2x*K(vBjyg8^QbXAwXzmIG>@^}eidZs1b&z`>DurI!f zGzV1R!a{kf97nF9z}cM4&r3?Oz`LskDNuNT1Fj2e{ zWO0&1Ajv=wK0!Pj_FxXYXIwvS3`jvk#@td3_BFX^03r2ds9F>yBO^n!W??&!1xaAu zSO<3Jqes^u*SCD@)+ci+SG2V3UpdenDn*8uiNx!p1(j1CKD^G#Ij6cjFlPe zgkRa48|jnyTKu*2L@{)*RYX$XX15Zl13C(>A&KG`C2a%oXLpaRZ#|2<#)_En8Nig0qBi zLk-RIn>_~|L{oAAv8<@Dpik16`vtUV8U)!q|Jf7t<9PrAH*w7gckV2w$aDgU9{m0t ziLH+L$1^{k8)LRk4`~aS0uMC4LSUlMUG^eAuZ9+sQDbj!PyBbgAJEVbdkKKMyCMgV zJX-gfVf3fC6%M6~{QRo^X2SU-9;ciOT0jM&)02r9a9Mp*|M6afkfbLBAVFs*2%UTr z7_+`3MREJ^J|wt=J&rO}{^bivOBo)#V+L?STkKX{*(%_KG8BYDCk${~-Xd6zBKQ)m z3EBcQ`zm|}o^KOlW7k7fnS@6$4hX?UszyEl`I|G}o-&iwMhL;URYfGNK8P$MQC8ub zq7JM>$v_=QhPsDm6OGL_YMr23dCjr&Zl32|R`3I_aBy#_z-#`HADcUH_Qh>;euRwc+Eye3WVMM%2rjRt5Z3K zp_5+@V;3U6x8V(?m%4lR20?1COIBsaehoPXjze3-xE_hQs^g3yTt(70Mo~2=2#}g8 zv6&J5X8>tL(bMhJ=q8bZd6|QeZjvz@2MM>rpvbCCrhyPi)B!9YxER?M;MMq`yg)LC z;1q=xpN^fRE4LXuq-<`C715>4tKlm-{iG8q!%{L`j&xQ9bPx_+UPhD*om$^B@lqf%M(9349PMlm$XBfrscQ5N_BD@hdkWOF*kk#lITL#G^4mO*LxG zN8gb8QBY7|GCc~(gj`nuk(UIIC^9ht$jyKtM^@{LFg772FG^baAwRq5O>7Mqb)=_3 z3#E#ccPVb!Vrb`7JX!xVJ~}#PqZOc%Q%V-g*%kpcD-L5e*a`qmh3S7}q6|4oBVmH}c+z$}6e-S99=h|EGP5ACsb-+&#>wgO}XyYtGysEBYK0iwra92l)d`?(PEIhmQ_ z(CAXp*@@5%YDuUoAoTxhoR|+=lKQ6@1EeFRz(&3}_>d(U9|Ya`Oh?u8W9u>Q8-{v> zCdr0fVr(mgt1ci@HvMsfQiuXdJ}}Y=7xVJupnC;M9J)&cMU1aw+CYU$K|KYxg0(A) znNx(}R-i3S4HmSGU|N)xwTTmfLnUl4!5t_Xv{sFvg=2P8ij|G!6#lXrP; z7CV>}iFRJ``Sam5_a#-iiC)&ur)D@tVpPBo;ItHlIe#+xg~`n+1jzDi>ID(x6T}@7 z0BIz=_sIT}B&Hd85%dXy8DTiaNFml-gGNRZd;!#4i5G%lsIIGf`Dq=Y+~qBKPC(zG zs0t{yDC_L#_xlbxQEGUKWKadJEO8A$3&jQ%efuiW zJ_sN1^P8997VZwiwj{X-$dLhk`eU)^nULwo=p@(xHF7Sg46uObb)3qDix=gYaAy1=qkphlL?=*jJ0oT4JvJ5?@(mo5Qk*s)+WHgsV>|Mv%IE%2!wX0VHaXkg%gyCE@T#YuPFQtMo6e<8k zu7YR+FrOjmq(WVS#&!YL9H1F(;IaQ}uuOuBfvS{hx;2=PC?;2Z@xgte)=5Y(BZLES zVGpu4BufoBv>iRhFt-EayA6ml5#<7T6w^{)1rnW}n)2$?|BaV?WL>j@oI9!kmtJJx zjvnCIrWq^;!MeE5OP@Aq;2|~&64ePN467IUL&1O!E>3`=cki@dcoVOda1*dUtdKTR zN~P{zY|42fyw6!co>?%A!MQ&izJJCLi!r|F?6Gv z$Talr87s0(#N{Jbmw{#zme}NBj2RRtv+U&d1357p?G5GjJg!^4?~lmib9iikSo zCFp_bfbGOIT(*3TxS44yDg(q206Q6d2q)l)$Q2wERsm;(;xB4`Fh#N}EEihD)$7;k zPxwWLx6*J|98gpW(zM9A^*(vMoet5yD)7-!XdMXIZEIcz*A#;zD>q@b6Q-eClGN;E zTL9VpP)0lOQ`IPJvhKb5gyaCJp@C8qESZp+Bc`T0Z=p2DZJx`a(TKhrqKMsi*j}^r z0hoPwbacI&vcj9!uaQRybyo#qsUP$+L1Nz^$4misz)fV}B#6*@_wKKR6&J%xMT^7& zp-UL{C1k>NK#MR|yhtV!x7&X#heAY0k@?TF-7G{O?d*(+epD8z;P%S`f}+s87xaY& zsl8Z}@eN(xNA3ODH8Ty%VCs_T9PuYm1O8c_O-(DQt5>h4=x@X=Cr3})*7^YT4Lz{>-}qNK-!m(E3!NPUug>lKu5~ihaa_?1WdB(QZ>V_ zq8el1JgW|siQ9WaWkm%ceSMMEB(gez{8w`f7ho}TKdJ91rQxO9ap?vb-$qQC1kt{H z`GRQkzf3qOMgW_YEiG%&xD3yqLSBGSZ8@L6XK@J$j^!(Ah*Nl}#V zvxZ8Ao1=tQZ8i(Xdy!BF3H~4#c&n}F-{qgTuG`RKh)T=7|0DC{a9a%y47sZYBEwI_ zf5`JCkqyir#F-q%cO!Rtk>LjYqO+$+Rs)+q6n>wP4gQ6cW&gQRZ6f10p=&GkA%6cS)Ttfx zo=%5srDwHnf@)yHw^jVs0R1)bp99nb-(5nHwZwKrI>&A*wj`z8 zrJI7)5Oq(bP2>YY!i{rCSlzj?y=2((^u`&o!~^vt1T_k5J8UowIlMq$MTg@tLC<}TMG zuWWu#iK`rET?EPjI#>u|^gz{>Qm4q(t$v+j{0uO>ZAp4&*SX*2puxoMA?OJ?P3W@= zW^ya>g9dblee20(T^L~^nNQLn!KYYZ7>fmww`oi9y|a`bJ`z9@dEg{5RI`-)s^3MG zL`nrwOiVnG362KfAJB>ppg(!PnQbUsD%z3cC5!^cl$bL>XULV8H+MQmp+h51s`-2q ziGyP9CvF0V@Y~WLkGp;28-CuQg zB9N@2Ump2yfkm4&%aNzLfb=c$pyX-*%pWnJbK(9^QoUUUziq#b8CR%Kcmj+-uZs$u zVt8U{(XkHnD58egu?MkCl_<7`MZ6U|NO| zSs68mTK}bTz`uk*>&3%CUe?*D^t1=&?}LyTcy1+3{-{3ChK`G@mQ5n{Z}tC=|2(VA z3dykoizvB$jEo4dk}nzlQL1@~3mo;PMMB;QL?0SjL5iCi8XD}sXzaTzWgW5)CFQpW z2T$p5fOoOY={p~EVwP^O^jGkJQSnxIyqYxLhbw0x9OIe;+!|h`pAGSc#7Q0l?B4j4+C8Kf4;IV1?`38 zpp5*H{(-EMR(_E@5s?W42|9A87RfjRM?o-yyxj?(oM16DwLi})kh~<<7k!8PozV>u zsR9NU$*S?KSyKTJjFbe6BV24Gi-@g(dG{KED^RLJ6p=YohwG1_m5UH-gyrQ;CE`)h zai<+-Z+Nltm_s{Lnh&omy!Tj!AwBt5GI9^pk2+2=vS4>)(n#;Nyj+{qL)>DC$=-|> zWS}zsbu^5#T_JZw;2m*~6(_kpR{H;e9hA^dIv31s;^0C;LPC-WXsVCRhHQmJ$r~IR znMbFgeH8jmZvjjvgcFdy$0r2hz$?&v4jvxb(@j9G$RAabX-3p6svOkX!r2+Yf2NN) z$Je4&?uJ=53U?D%%25>H3%O!I30G1;+acl=eD8nPtPP?KliM+Y`WQyeov4H~t-CP~ z4+iP>HMpzoC9tm1w^b+fZiX*x^;)TubfzXG`vLC(U$eWTJ9yN}!2Q|va{a)GI{M?GOSbICcR8nUXlU8(D13b&|mgTyRG@AEd`Enqj%Q&N+Ggy}fGM+C4G(3I9IT+pTotqoZ^bZj@U? zE=2`1QaK+_}WvWnW#f|l4wxFJr9c#2pfkhKcroiUR_E+K$^MnYroFBvHG>J^B< zlFyH6NlgqZAPpUkF0k(nk>?-b#zaD~&{kgp72|%TRl%VP&c0E}$wEjKlIxoY-arH0 z1CQ8ya%lcd%RiX5{g54-1*A*bqWS;o?#$zQ%-gqrg{*@Z8Cx-9&(35?CA+exP)gBI zO4(|NQiH6?+Sp}Fn`9`FB#ev<(K?ihB-K!4E7J2mzggyfUibZbp5O2F{Po=b+%rRc zukUqzuJd!8$8ns;=i+i7hADmW%Ln;X@QcNqEA#d&xxK5HXTjyDRtppsg9jNT1}Oz>*$f&K`nF7bPT64=>_n zz;SQ)mp@_XKZ?ajU}*A&?`0)b>FV67xtlitTfZ&bq$DdPzr^7gRqJh3p8l{AuTCy_ z-#vH1vXcY7%akV%_pNju)cDZ-;2#_3QrTpH}e+MSk1lO{>4v zWSw^vyoCHWQFpl~o555^d8a^kLz%51FE~}JGrLw3O%4C$O27~=Kf*(lWrMRzKuwA_ zb8`>riHkP*sea*plX|uYUxdIk%zYMkPrTK+&PloNr#-`_^TUydef&Ap$s%4VMclL+ z8X5|cpJ>(Kxv5UfK!MLrnYF4paQ5$Y3%%%br7G=1uj%kL3+Ux0Vxw0{v+1jI>GuNV+>G)?btqY@Q|4SWY zNZ692rZwJSBSB~B;!oct47a*=Gx^pbTexCA`V=E+fiFoHGAXP1c@y7Z#GHmqEWtD# z0<9?C^l8!uv%;p@d&qA#i|1qCLoTyQB%DgfGTFu?!H;pKuV46J?YXpI_!8O?%O-ww zcFE9wJ!Pvk-QjVo<~u?Wv+%;jXO6w1$%tH!p>lQGDuP{`j$ttie~n>9G9eu998!bf z^W!!8fefk#pJ8=ZwO3_jS7A+G(4V(wmk|Fa8msUNXcwV_N6Clj7=a$LNfi6|p==_# zZ)0?r?a~B0OW+%e$;$eLeDGwoDIs-wqfW!PnB)VI^=nBJ@M<%#08!~0Q6i`TJa60--7SKM6Efa=CPYwrrGXtz@vRiDq~Irt$QdGJ{3< z-DP$}MS(Vv;an~z^~>KXP)o%OfKHayvVr`dXKhFTdKlY|VZSMFhNJh(H#9Rc z(@wR=?$48!kPk8~b@m@PAaV~>UPwPfxy?GZZQZ7>yLiz+v^o(4srBkw(rYvcx!d_b zdKmLcjPo@6d`mso-MP2z+o;$tmKT% z`f3}T%5Drq@`8(Uh3l z8uX9mUD`5LD#R{ou_im&HHuiXXYI(tGYgyA5WMm`+ao2{n=4a{5JSXK z2v89}6HS$bqx|U8P?0DHud0tSY=d)0gVAR^3OwF7Wri)~DF*DJCL&;PqC@@pf$;D0 zm1rHBB|>Ei3hcHs^WHsC$9TsSg`beAi&LjgZ7r(CfGyFi{flCA&R)21A$1fcB+z5T zq|tDem|aY@SOZ{Z3IMbtfi_L?+shc2;XEHO19syM5{ebs9W@U8i<8K_OZ2Hzj4sBS z6+C!6G0rNkdxRccIu_jPNUDUL`rCmuRZQaI#iy@q=Bl2~kdiNnBv9bsrI8>Kajn8% zJlWGd|1nR5eDxOC*bjxhe`Y<=5nh|Ff13*~EgL=6yX7&Zt6FMu@-W`t-9DcAusmx1jlh{5mQ? zLes(%w&j(L)A}%u2HPw%UZfqgky*|gI2dGF?g8%Xs>{yO_*dM2{Jw(*w5mj&06i!L z@$ih$oMO|I;^1z078osk2#&U6GZ+)f&Cl;xX~S$JfrMkXoUcZJSWfFJpv7zawC3&f zh7B5syvZ^+rdM+I`-t`naMrd4Jl1!^``$!byJy-B)^sDN5KR|Z8#%3dn0M>cp@W!U zIp5%lq>Uj2h(l!Ik|o30s*X<#XoN$ZS)^VW{h_slnDgn&?7d__>y-TWryqOca90#& zfkiMP0-=Sd5+v?i!RV&Zs+CT7256GuD8`yl zjBWI4a3`ZoyQdm^)NfiaBtI~>N4L6Si)EIZ`|tx862`M6F! zGE!3|W1-K&TqDGPhN>kqZ?~amj9CzqVKRwFDuVFkJG)ynWsHEc4lbAlL1rv_`M4e> z!k1=RURK!^^!h*wu77%IRA8+;H0G?CTVy)>; zo-OcvA!2G$lTVwe&H?o-bk-9VXGT5vt$J5Gl-wd-Tzu@OGPz6nCN$yDsjQqFn;w^? zNKd}Zn-L2Si(9pkBIj^iA-8sF>EtF#ylPO=GjMj?#ReDob)Ex&M}*{^cs`JC3JY@M zt-p~BbHT|3w{Onl_@Uw({t8LEG9)EFn1qDdpg!JuN$NL?soh&X($m09ua-Un3Kv>i zY#VrL8z@jNwKybcBh_XWE)2(p+vP**`eoQOi(HOrTmPo?H=`xCJ;Z}s1)KYoJ>nu5%$4Ie;5vAV z)TQ|4QcV6jW54AqLDX294H+DPbkPQe(fW;G!7eO&GCnoJ#^wuT3#p@_fL4~BuETe{ z>*bwTFPA051L~;1^279b&o3=aYNVq51Xt$*_{45adah279XEdZBQa5@G+w^(7z4r6 zBuxrs&K6BkntHBJxxlfCv%xzRURN0~1^9yN2G)&l(gS}hSmDTm^;{<2894{Y zxAlNVyqsFP)n8zz$vrDJf*9ArE0TDy{^<~-K;PTwMQn>xmwp0IJeGlYr@#ZUWlp9W z2n2&yR;D#o5x1K9>=_WtVjNHeG5O3XIjAvA%970x{)lcMY;YraiWTEtx=p6UM5U+Du#Z#Uu5KA3KOX?tO*ixww$i$=r zMQ-vN1vQ=-LK5^OL4ws#cF{ntKBtxc@n6)W>750XmiS;YW+Vd$?fuCLANrZ!dtLgl zigJ<19P3b~{p5bjSv*Si%%$%b5Rp8vMcDo%GJ`T4U?!~(IqxY0FJPc@}I>YKO5P;q+;cht`u9OFdLBPrscJ6JZ|| zRdd-2>qjW!STh=mr`huC{0&$?+O=z^ur;cZFNES@+gSs0XI531Ozf*;HeMxGNq`Y; z|3mUsh{)h&v19ir&sM$q3ycrev&C?L2GW43UMYz@&w==_L^otUidfqG4}C^M-2D|f z?^~HehYyllj4=aDv7-p)_rCy}7DXf3-zGT>pE)}FKVZ#J{fRZYVo+cgPy?ESFJ zvBvt#F?W+9Kn>-Vg@UK}WR~=0+6u8x3OZ&sNG4Mcjs$!`;M7!5v^BT?SBV)}|47Uz zaJrxphHLYl@dd>O&bHQ@m_4OxU*k#7?VhO7Yy1FYBNb!~`uexP3DMCEhgW?72CoLw zc^3AT!euD;k#nsqr-tH6hBjAI*qay8tw`rbZ5m2_#e1Q-TO7RtC#z1&yA9R z@NPrKzI&io5-?~#@`?8N&>cS^OT5{V znRcv&aFfWsPWx%iFhh2|!wt%-JMMCs5v74~lNGuu!Y$%G1WQcv5 zSi@JtRy>jwr>2N~W%cTs%isLHfiR8lX(5eQ9e*hB)v@Pl2m@okBWj%brqq|xLDAC)ldk!&p117 zc`QaYIoyx!Qf?DxkI9@i=rvG$5dTBMkPERw_C9#i&CQJ&7*s`Xt6~KHE?SQ~C4>Ma z&PTskQcB$aP*Jp(o~CqqsiI~UfQ)QsF3G#N!d+K(sEP^%C#KY`3%Aba0rOJ7pqd4L z*}j^_Gd?tq5jH6&OyaUiMc|+zlwjen9AiCOIw7I_WQJ1)mnqb8ZWC=T%KMbha3?Xt zxE-|w+(No*b11AvFoSN_R1BTt5z`vyIB7><2eubLWphLikv0KLZJ{tDV(h9J57ZzL zKxGP>RWuB=eCLEkKQX{}G}$Sk4XJH@Q0-D3{5!4kW6BlfMKe(X-{_B}YjLbs4Tpzw za?0BsmsBAk@t+8wDdnaMiI0p&XVKtPI$?_E;J#DWZXv%iWF7a9*+jC(Bw;xhJxA1O zJaFyS7$Ik+X|-26! zot>@V_w4~vxk~Aw9AC#j@DHlYi1uk-mJRg(BKZAz_xKxW-?mg;laGm=B2FgOc|2`C zkLn8DpqRJhm`Jg%FZGqJ&CDB|U0GSV4eyU4@&RJODRR^CupP8lR1Ab2f8*xPuI(#{ z&y_kJr7k}>q9`!Ag^CKQDEZ?|oze#fFU86!3F=M1QC)xyWr>c1ykJRdBzFfl<_6Tmj5MScca&Rd9qYpoPH(DoY+)1P|5kj%S)nxEosA8IMLNeMgFH(7a z_UFewZ64Sen+WM}bMv5Tj*RM{Xx zF4y@sUQ}K7cbSnznvjuM)j14S=SmCK3pib0e>z`^DBx1gS#d4N)}l4@IeIj&VB;qF z)6@z!Yw`Ym_}biEHY+E8q1H|0_`fYs z)GEp_EzLjiK~|PL>Ceo1$?A+6wY_o2+XA#Dc!(J{vzZ~L9XUpDn)BxtFQiT>9$m`$ zG@Xg#$D5C-s{$UyL8djPqqC5WY4SGTI0xjzo*8=Z5BZ{FdPtXsR4Zd`c=WDv_0kH9 zCqzgqxaqI2$a1APEo>bmnFKqE;!Ty5^Y9Qw2M!(__JyrZs)<9^Lfj(obk{!9U0iKW zxBaBCI;NkHbVNC074H!;*YRNoq#xuYOiq@m%TNLO*9PLHeDt_o|2n%ESYB^Sn?bAi z4rC+MVKxyO@06)NX)luDeez^agg|DX?s+Vb*W+&V2t-3RPT}lyyx#mR2@=TQP=d4h zGyzzjJeA~(w?V2jJwUEZ?!Uo=zuM24a~V~fVkq%GAjWmoh1Mbz8&^ZnqPQS9zoLbq z$3WKEI^fk_U3x|;7 z19y)ED`y3Ilf5MUGw+Th>?sOUg^a;nhfAD%;b&(V?3IUxD#qNxFJPisOc9FwEC&&T zf&P(xqe!WdAuUFbaF#YwIAuskdO1%?MLbIJCdPp#u%}I?RQOlnfAmumua;V@LGeODl`!5zh}7DQ)1I zVXa^4(FE2heRKL};pEt*0`tjs5N3*N@cV7>oJ!(6i8+Tsjb^^8-0*IV$S)cd9+BOBId2L*jOHI2b%q51lrbTK7Q5#7@wt9(kDX&D zvhu-SZKE%9Ge+3dgZ)sJmFB9!>G{z2(zfVyFXIR)cb{Q(C96zeu^%rDR9v_2OKBls zlr*%p8)4m1;z3_c@E}^d{SMrbsSxq{k4aSO^npv6U-9NIblq7;UHvc>O36Oxapdq}HEx_t!k`79|L!Pa=uF(n zA{1h9g?UqGSV)Jk4xPdo)0AC_YMp-ir+Sv`?-*0o(351V5?bC%1J`f+3N-cbA)9R4H9 zj{|;_zt4V)x3(_HNPnF1W5G2pha0)ObYx(+U&whHllM_{NKmrLO$p5SvI!KaAtv-^ zC95$)AO;g5RRKaWvNz94#UXgM{*P$H(54!W+}-u|a;@IbK>#u%eD@TNHXpi9t5ONg zy^2@cDX8(1ih=9n(URX~&5Uq7o^0HXRgwTL9rpaQYTQ+A|DYw3ap}W+Z+k}H5D)4P zP$A%-MPG$)P!tfF&$v!I5`8oZ&*k7}W&;IO@p7wIcg)HI&of0>8O@|5Tm;I_)pj+ODNMr_z%AhX)Se(+KqdkKg6 z1G8RVc4SCbID88EAv3Y65<9rcPb$j#)G-jq=qQ{KJy~Z6q$r?fO73R?d8Y!U(3ZCT zDu9W3(NcUyGJ=cckX3f*3$9N7u{UW{!)b~MVcv*)U(!}64Py9`@sbhDpQ`m8tGG#stYX3>WekWL6FOL1o}Xzy_?oWF#^9t))%wh7cl1VvkH@Qq z$LaX$)rbkSW`EfjC!!?=g3TAvV` z&Ste*-oO7o;>660t*B|&*`QdCuihgteMVIiX(!-MC6kN>hPW6>rDqsdj&cxYiWPYw zCa)UI9x-iv2q_Sn$%mN(^1Y_%EI@_FFLwM3|F=^)ICyCzIRn+y9pH=Q$>txA6c-MO zCl(2aXGyA#pM&0ksu0Q=0VI*#Apky)yd;M&zAd&xjtlNg_W9yc;38 zESxg2ly**e-Tsa9`( zEBW^_E27vEHYE|FzdBLez!deTTvbU889neS3s2avA?z(JvBV*h{aI6yVvik`V{X<| zwC~7+K2l>_fpI$qY-QeGGg0^eUbH;C;WCSd4dqC>Dkme@Hh+&IM_+*`w>>W|DjGIC z&#Q3==iAWI9glk|Yauv3-@WcY&Xb(=XAVSavtQYTx^)-s;YftdK?84L`KTd_tye88)2VUkONy@or=-5fsayhaUvcod720H`|RW8rX;!?#%(MAWE5voNf2ggHwB zt0r#@hwa4xA^DCtc7vMT3r~2#1+9s2wZ*KisV2H)6gZ*h_0NIBO)w>@g3fRNKPC~T2oCP7xFH*LF}!hj$dutPGvbSi|dz0-MSh5;r_~$h;H#2{Tt`Jatb@1Vg4W|Cuf!Z3Z(~O zQ`{5`zH)Gag!#xB(QsQ=aaRL+-#vZYtFxp$VL&{0g*UyPSbdwk@Ot*ZYn=Jp%IhyO z@_!g|%zM@F*shw#%`LT}iXzj)kxj#A8GRR?*vaj7&)WOf>RMaRIMLk6d1`oi!r5bv zr;RhCK3cW1(DDx{tF!UY{3*?j`@6LpXtqX2@0~~0U%Ph9R14FL&2=~2zHgafKvH?u zTZ4c#am$|%DtNxF!!wSz#oT@BNKGTvA*;iN-M_dy zG10`>v55}ahVyIlsZ({jcmFdyhtMM4M;AL-AXPzp<@o8=I(wzXlxBdIX5dqOIhH9!^*ho z!kg0K2?2)AuYK073kwZR46nLWRT;r`WhF%Rzn+nEG`!EP%J3JjUz<;ww6*&B3(eGe zbDA3^C-K&;sQZpiPEk0zP{B>M<8tc4HZ?(AT|Ip_1FTk9o?8)RraPCHBZh2u-MPTE zR_uk)hVwR;SG@mm%e<9sx%AAm9*w=>*>rStOeas?cC6~byUAu|#~>_jJY47iVUftT z15IgWWpx7JcO!qX#nSpYOOgyWZrYRr_23^6fC=Dr1ml(|Y)AQv&v!89hy5`Pl0X1MVqqZxx*p0{~`D;mPZ4=-=(EzI@ChvD-2do}J+VBCvNTi^Ul zO6j$P1k21@hr1Iz4EvMDX9iVNEH^0ovmasa`&X8(Zri1E=g~Nq(+%;bOtG}wOW|#g zo44}a^&#na98Y|t=k$DY&)#JG_%tje7z=t4Q+CdvTj}LGZOwlE`Rq)O7b7d547han z!P1;?hGpi_qD1F*Q50p_ZSKn!{g>-czIZxxP@6_QtjIgbwqGqH{(Rr}3Jc_~Z~i~}5#MJub2ArQ3Wp*sFE5|RjALR_(py(!zO7bbx?1Vc zErzBX@5AIr1axar-23M#bcE*S=J|!m{2e{t@S3@_V7dqi<7p1wX0M3Ib}!I!E6KJ- zL4M;pPkvECW{Uc zaYb9*7x-ANd*NCyXJ(yn`!gqFPd0U0(Zkx^`NFFhlM~N$T??b09ddL2SaP{RmLLNu1c`$dLOQ0c4G8(2mH6kNw_wzfDG)Zkpp2)Dwb!B)Er z^-ehNi}|o^mUCum;FfW9B8u5WnzhLa+;S&+Zl?0i`Okd=CUSUD`p;9Q?BLCBhL*hU z>ch*4db>z+hfxTQjkSWnIM}n!v-MFI4SqWr-7Ed9TS5!#a|?&|+;uZ0aG9@jOr%%H6JiRp_$IR37A|9CM zUmw(zi3Wj1MUK2(!KY7~;?tkZWZ0(G!x!3dCAwZ8KQy=5b+p*ddBMaJ#m_cHjhxXY z;r3f2(n$W=>KvgauSU0j`)Wk)gT(7%XgIUyuCa~Q4aYq7($(0Zj@s`ZlY7M=y0@+^ z9qg%mE}0ZF^Xb!f*RNmy)t%vu-Gg-^gJNRzI(F_1EEo%6CePcJ2H9lHm^3@-Xp^#lqSe@eKztbI_q{hYr7A^*?lxd0$5YJ!IX>b-7dC{P*n(gE%qX zj?M5GVCBV-!c+2tZUEF|n0H+SI1bbOD<%JsmIrZAddo@=(L|LLn%?m{==IS(Ej?XF zPtS5f&=k64QhK+MHECeoO0QmxcHYIma*HliK3jmJeV2S-mtMUx=syu1`}3;1$Fzw& zI^@H{#w5cYpj7H|q_(D}<~%l&#pkWaM0w%r{Valoky+JES2xY^ zCc9LLSJetFgXO8K{STkX{idC_Q{My!%D{cX#$LXBS=TFi8y587bLajFcgAbH(a$eE z!mHBhip_U#(x%zmxOua(uUAk?2!d8)^EJ2ED(ioqGHqHAE;gt4zi;?f_D3rJ^O)kE zji;{Nw~aeMbLY$f^MKP(4yWj#UZ>7Y;MLW%hjXD3&z}3WU%Pfj91CHUckkM@>(`yg z(2glSd4ZO*oo4gqj&Lv)_mWmVDNMd&T&mgo{`i?Q4>Gp5rK?fb9 z%QLs{+Ewu89)gXmkc$-f0Y4^%e`5{)yn~MbDDk+UAZTz&>=fp#^p?L0?$DK~8-C%3 z*w#veOX}amkGHbgk1HxK{Yi1LQ{{ydC&s}6r0|A%-a6}-TT)xr*)-wC`ki+*`8p%~8N*V!n<+3Rxz$OJ`_#6)R&D2`RDZJ zvsE&j;ROUA63SpUdGcYl=ji*LjDbjI=H~u5t3y|!SyOdMiv zYP$X4vPboJH*`>B0Mc7Ka2)y2rP=O5Oi~AL+*se-%q%i^4jOkWLyvQ`mj&r@E)Vp^ zAJ8-3E|KyE0L>W47B{eC{^Q3_QK31aC)v$wuAMId<~YZ?62YA82>&__;K8}zhqAJ) z+}+)ow=-tz+kWiB2M^=qfT|Dne3Ao8X}e6=cEz2Wq+jOC7m8vW=XFU=&NGAWx=)vJ zi9)*b<&QRW-noDOe68p(&w{}@+nCHYEPwnh`BX+ z+9RRGnU$5x^9l;=VcO^T_IbjO^_~!v;=!o~E_u9wcJ%8Ra~mo#%}=V;$|YlWD#YMo zrv~-voj*KC_ARzW4x)~UvWZT4MEY5?TZ!DfYxG=4;eyKOB zA&7Y?OZ(IEbMR5ceuJMo{I-BgA=l{6lPqqU87(wQ3EOA{)Su6qHJlo0vHciiS2srJ zkSS7N4^Q;-LdSC~o4YAARHyQNPSwYR;<|RTXP@w@Q>#{&E?ve@Wu5(y3)2{u+qr93 z5Ky6r+5Ggdh`3PnUy@1|IX|B^eR^=f$5%m?=NAX!(&}NX)3fJP2$M~ADcG848>qQj z1N&VtG+BCdbU#{DRFujYrTO{!QBR##`i@un&F#Up8HE3!fKq=BcJHu z%!xt^Y5x9AVM@W?C8I}c=s1^|0_K$|?J4JbvyLtr}d4G1xkt7cg^w+43Pv zaL{F))oQd(vv+3G7A=A?>asI+>(_4zH(?sO)IK%R@=S+R+}-l3ETGB?j`7re7$%A! z__|$CO5xdwPD5K6WsFd2-Fd#Ykya>evpZ+E_U(H*jdrMyphsT28PDFncya!u(|th0 z>#QY7^I&T$3WijLK>1uq`;ApGa@45A%=b?|W(_(p!J4PL{S~j>Xgc`B#Kh#rUhjq* zC&Ol0ruMq=tRK6n4kR(BYi``U`C`cOlh+EOZ}iZq8r~91o^;Ki#)`ksB)JmC@+|w}j9#&&tEdU> zjQ0-liZgtdTa&Cw8>V zG$iCMoN6SFOdR`#6TK>S0Fys%et0@G)P(<1@n}=8S75>7rI+3xXxg-C2V=?rVRgaT zD@qIq$0;H^SxQspN6P)nw{N3e^i?>okR0-KPNO=^skKij69U$5Mz>C7 z@O*irbH()`9}fdqE>Z+7NslrwNn811lZ~_y9IH6V%`GbGHVH53%o$R!q@ABfk<;Te z-I;&XJtRErn@eyWF{)@HoC5e&-f>#l@LLcD8XY9OE%+y9FZzo8rKx zdk||=aAxO~lq}$&&Rd|;@t8@&t{*DzwDrj@QsiK3`X(8^KM@iZb|wLJ;TCJjK(nX%R(+{=nhgu(2QZ&f7Pp5$c|3V!; zp0=D>u3Npl%E$IYUR!cqYw2!e(Xxvro=G3xL|@Vc@h0s4b-*uHl~1*;U$@we>2)8& zZ{N0TIWKMZ&YeGz7?R~k+0tIuCn2loPCXjYq=)Q#|Iga^GB2wW6}?<`4O`0ZRE$SX L7;$o#?W+F;OfX2* diff --git a/examples/running-llamas/artifacts/Llama-65b/generate_max_memory_allocated_line_plot.png b/examples/running-llamas/artifacts/Llama-65b/generate_max_memory_allocated_line_plot.png deleted file mode 100644 index dccd818875b37479cd942bc9ce9820ad69454a55..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47608 zcmd?Rc{G-7`#*Xol~9yS4Wv-!Ia48}B10i#h%$@HJP%PyrOafAj7bubSy7UCE_23E zA!O$Fxq9E<{%@~6tnVLtt-aR!Jnv(=@9VzK^E{5vbX@l?sGi+HxtEeeBJEH-cUql9 zBJ&}UHVsg0#dpM-dxr6Uk`4+w4jOi54$j86Oi3!n4mWJ<9BeF24mz3MvbVIe72%iQ z7vw!?;oxw?UP?gV`v36@{C2m@1;ldlGw~wZZ=BP$Cy{83iGMbw%cfb9NQI_~r%!6S z#QyAczIJhJf@(90nKzKyCu=zVi z%X{uLwA(2BY;+f7%4J_&F1+zRh%)B*RA*3RpMAW2gZ=Ll3$at4sqz$TkJ%MN@IR#; zw;AI${rgY$jB7Qd3&c0sGs2#1diL)Z)VU6CmizbPWc4#!eg6GaG35a@iQ_+i&df|w z`uELnVPR6(zdzwNy+%6v@28qyKL7vy1?fZ8WY_it$S3C4wYG+C+Pt+SiaOk9ZDry4 zg`;1~y*GjyzIdxQh}mzfubG@sJ$;&-GQiJqvV(r|ro*>)HxKXJxidLAd5>$iS$)jK zQnz`N&*5GUo@w(VBeM7md9jhNQj`g_}Rn@f+kggV_vIAX6EKj zCthfJafv5xvV85Y{vOILxE}9~wE}*P24n{;JvOWCr9Gv=hxBExsYR`;5PT; zeM6kIkxu-rqv1-aY6%UWpB&D4^XAI}^PaiRm%6zowHghc0%MJJb=0h^tTL<9rv`_H zf_}Ygyseb-;oG-pf%4VeO#*UW@v_3Vyv`lnLuu9+pCzJ_Ame%A=pG$i-4nAjf7CNH zd5y|qyYj8C+S)#P{`@eXUf#gmc&pTG)WXzQf_F!kQLb5iUdPj@sDY7@k1JE<*|X7W z$t9i}YYTa{y?c)wIr8A)!<#p6CaOF=_EInZ{(}e9S#z#a3bwYkd6unb>;+uAroMD! z>2If`{LtLIOGHEji}14W?ZY|ACt3OhfBtscUA=bgG%hOrQYORJty?b_Iozv%DxT}H zbn8Q9<;?W-(UyI4Z@S7nrO!w5^Fdet9{P??4xycbzckY zSeu)h-xU>EWl`XY0 z*5P;0l7jh2fPR^Wq?VRex+b@6Z_&qsey{VbsVb6Qs}lLvo%}6jEiElkLRYuS~+g+}xy& zsK#DtPEyz}al>b*I+$@XC*kAFP<5gAhRm<-0`J5nK@pLmv;?ogP&Jumz4(}zFRb3H zzR}UTIs2WPo}DD$wyibWNcPMFrju4y+(*>n**Q5mjk*j=-I`w+NN1U@udgq)gbNtO zH_C48_o=H>=D+-|;yyjk*L-V6MghaB4<82aEx9dDvl$dQ7@vw41Ek73JBF~+UmBL`Sr=o4iMR8o^}l;J*XEnBUY^DKx7WMlCyIrQKaex<>d0m3 zxrJy-s2m;2dWFjY9UVws$p%J7bVZ1hwXY^ zzk4J%1qRD3r+=;vdiapuCrZZC?MVS2uiEnGFJE>aJ9aDy)k>j@M5D<>TL#X- zsad^*g)!Y%IVJc)tWi`hR?1Py+VjSZ*Mi56F&vO~+Jj4N^eE}^S{h;86o>;f8D8x= zJ9^?~!j@9+^AWted=@9O%jdpyZ)5d1=;P7YykGm(}u_*2iV#SAl|))7y@stJROTW0TKRaSx!TgtWD3cDyn;|Lu+GY?ipNut{6` z#dpQU)_mm-q!c!)KYqZTB}cH0bE*wWOS9 zG`V_p_JDQuqXVLrE#%k%FSD{N`^!#@H70Z_iCn25y?pudo!6Swude*cuC5pv&t=k$ zA5CmU&Qq!Zl=OmPVjp{Z<4|b-On*O>^(&I!fYO4oX7mE;nOCMMp;~KRx!a$Z^ud+BzsDg+nDq z)VROQQ*Rt)_mQvfCilNToAuV$SEjC-m?T}|HKfG{>~ovsJ7)9sS^g?c;fKDy_=*a- zxNiE9S?ol?=VuWMg|u?v`NQHUBH6UKksgQm!Ig&R>KLv=0|msx%m_)6kBe- z%iDI@kcxx~I9NPYhARogYD_8*4GmSkHm;0+myNqJFg$!3HM3dx!7|G=?93OJG6h9N z+w`={y*xQpV&;Yugwe`Bba$^P*5t)XJF9-}>Y|jDl@)USeL3e^&Fm=HQqHU`^i#3nHW~&Vt(UZ+oGQCf8wck*jTmvx z1Ih#>%S%j5OinNT8Tp9I6`N$>(_^=}eqF)M&8^IcEGH)?$>!S|6C5EE{K_D@)SgQ( zGP#Y!Unh+^X@%xn$NC zuNvFf@D2_R=C#=Myt_ym9C&|Q%_G5UeaRfFB%0C(d`F9tS{tdFu)E^N4~vUv=*HKt z^AUfVgROKiQ;W~|%9UsHeWvE-K}08&!x`1!)qQi?V|{5XNg;?~aG~G3GnvPztc2;9 zO_a`?Yxi{C-AGI~Dla2dVI{;Yy%xW3!=d1g5w%PzEDL5i@uEfzw=$r(xHwAEK2@j8 zqwq&tMkuLD+sHHJ+gUxmnDonqZ-8g$=#HrF31Z;eO+b7x>yEZea>23EjEm3j{P>#l zoOmafu{dY)gUf$^WrjQb{rOEk@j@V=nm(`oTjjbap;Ho)lH1kP)Mf_WZ&oSt@gaSm znBb0$i~ERk_Yrk3zwfEvpBqbmOn2?u$AkKKXBR4@$(JxLlaVjsCWBS?lFmHbyYxphP4&PGP~1J! z+K77#eePD!B=Pwo7%*<9A< zJH@)k-B~+E0CLD^_21fX2|kqJ@F9*}Z?C}vmJ@o!$0POv$pk1tZ%X0mvFiX)0Zbxh z4~PT7BxK@GYw02BCy(>69Q0|7}0{Darrg_ldRY)#d(bo5MHS(p+aHD~FI z=Qxb3Hv_kA3Pk-LlUDt`y8_^YGC-b<*bnt@PPqT2prfPvJ~br(40o_>d4lI6O1`m) z$sz2;)co@F^rOV-FY9j1Bqu1$E|jL-p-0phc(h;b5ixJrgBwNn_U+q2ync*B;rULP zEj ze+>$6joc?y`E1%udEC&&<=s(HlCrXLn=2a?TNSE{sGT})@wxNoTVGm#dy|rmU3yMU zt&>+HLo>;^^3I3q>a)4LM-E!FzX%^q+9e+dtU`I})Tt+rA0PCmJtQfqOO(F%x3^i3 zut`bjp{!c*H7*I`7)u((+m8uVgMIPup$vHEI-6>yd-75R`0Q>Uik&+<^y2BRjTpph z{}$-;-%giDHrZ+9Z9u(W;sV8!4f)RiSkB*1xJ=81acP`Ew?1dR^exvs0Qb%XrIEPs z)A(GcL6%R0?j*X>4bJS~mX_8hc$42Jv_A?tP!2hBl1^Krw4mUH$=SaP3yjzj)Xc)C zCu3_P1q1+GMQgn_)|@v5;-#%eib12WFcw>0`UnaN(*8qm?l@(e0!L~it@);K)~Dy@ z+P-L?B+47nid{>s+PRIdU1LWf&TAQ`BRxV58E-G2A7kr9pd0P5|K6OXz;&&$h=LI&{b%ePr=xZew0c+zEFrCT3JIBD2+8n3sl@mk+`-rGR!r#Pz0O< zayxTxO!Rt}=q3t)*=F?knQ!I^KsBnv=Oe{Qb>~ym@9jJwMeN#o=k%is#s+08zoj(KfB7Gs*`7Z4gDP$m!P$-M^JIvJ{&)Z_b(urt*xlNZlXgc`J6PC`oRJ^*1LQTxEIF9J89B2=L~h+5k9m2h?CSso^i z@>`p4hlozgt(kt1nwmP#&zz0L$jEi$g;o|f7|}(G_*)MSNNbrpx|}(4N2@Mup^K~v zsD%eKK|xvhfrQh~sL`_0w>(&4oXT!quN0~@oX{o8V@M?RH*#y%15tZ(* z7U{KRD<=?yWp7cwmJ9)I!^6WHJoY3zPxW0cc6#W&zI1jr`_+UI3FjXrgycx?;Nfu` z2(m3xJNWf$!K}RYOl@<3TMs}R@~vBmH&79~T}HiQ$9VJeZL(`~bkF!dIXgS2DTZ<` zwaCZvhwWPu~$@%;yrFtX82vV`H7oDpLW#0fjn^4!s0UuQr=&2uT>X++9kK4 zt>okbV`FLgFK>-}5w&RY!BS<}gTCKQEzmQ@ts!_nYC;R|WLG{9M6HMW#2JdR_#up? zd95vjv+Cs5efe@9tv?|!kOE=>slu*&?M`SY-Qly3>I#B!GCd#4x=j02Rh>rP51Eo` zR-kx~UpWI1yi4A~V93|k7aD}s-(1fXr!NtF=h2u%yY+Q+EcPx;S}#t2*UPqVutR_I zDY_b_l#+Bdm_={trlTWZ@{xY$28ceso3}A=(Li_G2duveJonO|I09Y0HQ!qM5kQ8b zM%dIdezes6$6X$uyuFoZGg*#BuL{{t-LWXOZAsn^ohrut)=5C7_jh*eOV>!-S?u&{ za41+d6>YlYZi zdUm#}s>&Y{f^{zsi1X?87nf${=i9$nqb`yDjO612{Qdp0p*{c;Euq9nc&{IilX7ga zIuxee;3#F=Bat;Cc|gfQY-I6ItpM{0H&OeshV}{3o1mz#3`>=!`bt5kT8AaY#KM8Y zZcPuI)Z`BD$ayMm_i8@Q+fZ3WMM&_%-EB%KA@ANvfGwx1rzrV$x`URb>*l787UmdN zZtVlsCX-efZg6hMeU8nOoRVTG7Tm&NPLYT|Li3}!v-1G)Rw(2&05^IkLU=UOD}Y{^ zgs)LRS0Gqh>~YuK=H}-0V_{E0Ku7hkZMNZ49ug9I0*z#QdQ%=0F^ub7U$%=gS~2LI9AFw+H>$$SGz%}8@=4@NPX?6PiIz=A3Qi}ROV6U z@Z`x8g1htSza6&Djf{#)x9=~+Z-!$ZX1x_H9G`I2d@SWSF)%q<4+UhLL>yInJ;LAX==9XUvX$$C}Vx z;%Hus&A4`Vb&(BQpS41(nV0NE`4AT`@L+VrHV~V%Z=LTW#jsozx9h8d6bNbbNq9H` z_#IOle?=O3vGMQ(;hJvz>JqE}6Z=fqPHvVRznhFbvlKBVlZF~)zqai7LNnt}m7Eu} z=(_k5Pc_n1E%Q&dnE!n9^{Wo*luo`CeWq4cOEl-$-g4JjPRNQ&fAv$8!jG^V5r-h0 z)d?*2p`yYE=T&Ykj!qtZ=&+qY2XRx$#5~2WckO|CTR(}d{~e4FokjBIpud^x#j@M3 zBjiVx9~o?p8ZG_+BTROLhv zoR|@XQX0n7!1fk)lM8 zgZ9a}-!pCYE@OK6Hpq2)r6MPL&FUH&`1?3;hV+t)IGrM5;y4sL)W3sqYq zm%F<=R*z!)_U1V)&pu6VdNe&VC>u~At@CT`9UORX6hr07z4D&y)1w2mpubetZ@aIr zy2>mK)BXM3`*iBAO2e!K2ZsXBIzfB$61L1c3oNX);*6t%{XuC6iHkEBSZjIz-WT=f z&qqe18!LaWXSuv@X;GilqoAZrLeVA4xIoz=niIogPzk%hgE%fI-c%2s0_~hWcaDNI zJ6hlR?G;#Y|Fx>XT_AOkyE3%0_JW#jDY?C+w&s-TL7m6)37i1SVSM@#xV0ntk+Z2| zJ?ZwttMbu;#`3-gw*K+b{d4R>-4pxgpTCXK$gV!Nv)KK)Ur=NrJ90nj04fa~D{Eu2 zV(9ez{KuA-5a4M+NNHI{T3aq=Xuf;*&LW&;M60Pj^a9_;R8dQm(u(bVXJI_#Tz(ns%PfwGbKawyu zmSw$^dz_bY*|Y~o1ULKqjT?fHM0NpTAD*9RZ{=~RS^7z3W#Nq;#jW*nvCY+s0gyHc z+Zw1T4DjJ1tPz+Ax**MEpS~3hdqhl?%&Pj*B*utY%XA(e3txEuN_);N$_sj z?Ni#BS`YpG$qANAhR&58V<|g&=n$E>xVYTHv>a;eHB-|kF)=T=E;n(E?F|eL_8y#%YF(NH?h=j9x7E-+EWL#XFnD0oi?0WjF1dgdUe_Zk{N>9pt&zJBg zYAoAcL-i)$$bMO`SadnrnF>i7*{fHF#%>*Y#!pY0-?oysa@_qdFJxrnwdMJ_$xgN0 z<3|oPW<3+I$EANQbr*lQSImu{CI$yk1h_aUDG8z&E2Z)R1QK=N>tl9(FT)G46l3Qz z)iam`uaF>u5$c9WpZ@QgesjP?0Ia}RnYs`I6Ruh$7^VP~#9*WMKMrK-jj z)N`CoHvgus$XE7Ub-2iU7C#syr&81{-Qu-gXl`dChkmCCclq3k7Xv_^&$4z&N=iy! z|9ijR2PFIH3}jQ!%L8laU{hYacyUXrD{L0R7y{EExbTwKW0^@$iLv)gEG(+Hhxbc7 zFKXgKp+vO4F|AFDFFd>;c7eDTD0tXL?O|P;dy1Wd0XIAr*iq97@fM$!S;^grn&FNBJOV z5-cY_G{BnxBOa5p>xUd+Dgx8Lyj-Y0R(~czYOaKA^JbtsW0RF1&t!B{K8TVeE{IUWnPO)Q0 zeWcU(4*|p>xs<6zKt5pl&-ma8R&5JVae}d*&^Y|aTK8bC)m*+Cm}nCwuQ%m$1lCnw*dBX#+!8ho4GgO-5K z2Q1w#y3n*B9o5>$HRV3}^QST#PTb)gfKYmq@%c)kgcR4{PWH@_RbPi|;W68(ceh3; zH&*&L9NxCNcdXoiyCWnd#PvkipD?H6q@5RWRkK;{i_?jQr|zuc{*v@kBwRGg6!DzF^BD<8cniL9CN_kqpg@-Lqqjoy-oSUfuFK#OufQe!`8DU%*qYRo zD`O;9dls_c9ctzfJBv8%M?vS`Jx3x*%ZrPU3$}t?DPZS{iueYeJ$%XT>uaKvXB}!w zIUm{b?&ejf`0r72Gjwte!Nfc)DjEjnG%3bnV2heYIP7JfD}TDIvzyTzAxPeP@ZcI8 z8BzRMg}Bt+>}&zNw#f(o?VzUHQ2)JFeu>8oEwpFpGx}V7er6XFQ~GvV$S#DNwYWOl zDCsz{=XzJ(`>$W4GPSecfPOLv7`|*jYRJ0fAzhe0`YLf|VcjO8iW8PB)b!QW)jZoh z@OH1kQlw@Q^Z~#jo5)$P>U>p?Pk6sa@jWE%>~9z17qH#&!xs1;gbaw?NO<$!Yx9gS zZ?<7;HLv=0oW*^;30DVu=$~=n6j!)q+cpjCPO{8kfZHknO1OqY`JINXJggwfqp-=6 zU~xRyD@M6(+qRBuBQ+Nn3Dn%Dqn!9cwrpeF9Vnw3CbuQHT1H03&%V;9P|{|gl(wM6 zGw@$Nci{ppkPEErJGk+KzkbDKb3X%sC4LjfaBy@q)$;r-K-z1&ewn?Z78O{Nnc3OH zGBWX?*=QYEmJ7lz(^n#Rb>+*w)+BBYlB2ng+-3D@vJPAAA&@~^2HnY%C;ji-A;4ct zv9n$M!?(aX1OkWXNV{v-Nr*u?Z{KpF!ECHed6#UguP8?eCZ4OVBZN4xLrOMskOotL zp5x%d^gLP?O2Kn$J7b}hJz{(>G9@`8_%$(G_wukvdh`|&7zlcaEI!`WW1DUkp2(c63j+>)`#aPuNn2-yz0`C)>9V2+Rpg^41 zB@pZm3kp8QLP5N#1g<}e4?}=mxN^yt3vXeQ-^IQ(#t(_q3w8mqD}hF(77py&muate z461I{?hLHx=t|jMCZ_4;v#iaDr?vqW)qo5TRwXJehlGSq_9!1@-hg8wBny1=@87?3 zQ~e3}IFO(SuyKD6lL|l<#Ece*1o&`-HwahgAP5+4g++@>ti+Sa&TRD6^F2L1fq{Wn zur@H!CU{9o(0VUZp?_a4^N0pQg4@^t18DwBH-tqB8XEKA&reoy7D0q|F)}j#?k{hI z$Gr%^9Mof2l#a#ySiZ4d`yqg;@WyvC&*i!I@QASsVcb3EU*gRRzQGUJKzHiyPI^$c z4~SsEd!+(K6T0^Ku}5!Q0PP@fR}%W|hxYbMO#Ai)VF?HcF%X^mQs|k^$A{$i9DiDY z%5oZp&FlPpdVrTC>{XKrpG|}C+uDb>r5pf5uoE(nR9><>BrV z<{GS3D20dmFRA~4Qr5r19aZH)3a9o;9+VzevXoLwGFyVeh#l+ml zK@UV3z7O}#`iyU5LjwbLa}k(%iP>W8enNtjE1P-pr^P&Z*-s|de5r)W&B(zs0#bkp zYVU{odKG*yDzH@{XoVh?1;d^+jy24;u7O|%q|(U;q7GBYS^xZ0f+9qI5VRuG(Xenl zX8SO4Yaly+XlPK4ijKaAFUR)9%Tdg~-VPp$06S-CwkU_G5%EC7R%fzL57S z!>^1=vOn&_r>p~Rk~JI48jxixLU%SG65Tbj&m`Te@ajgy!*q)j|y`W z?dPp6xY!)L(}*k%5V>god5DifreDH5PDTGHDrb>)QU-GjN>zDkbw@$c^-r=3q6GfW zpFbyF8uB!u-kCJUpOB}Rf4K*YR_g9wZVM(lxjdV1>XZ8&)|mJ1rDSWXta!jGYY4Wh zg`FP+J^LQq;$5_~&%{I{%W`dg6h*R1-a1X#F&}z*E+bnc)z<^x@l~O9(a-u@jPAj#1ZnB%D|=VzLIIWpRq zrJZ=udpV_~iav8_x8yDXxf3b~5Ng0%t`I{$sP6E|0hAJRxgO44BXTTXzD#7?i3>?4 z&?Sh&EcS^^;f`nQrT1G}TbE|4S;xEbGpX0vGyDk*XFsY8V^_~&j56a2O6pY{7y+q$ zE#7U=Jja_7H=}YWM39tHP^hSpFB(Pn8G$}~gqQba1xtha%o7eGk17zc-g@d>3(vtc zp_fQ!pmyZr9TY>7)fMr_|IMbp!9@^})c=z>>re7O)ZiNEY9G70mNUkl{!6lET-(le zPF3|&eLZ4$ohbp-q=ff~(@@1SC=)N69NA3(wA43zD%VJh)X+ON&2PDe(2t+};eYX0 z83;-@oSi$5SD!!3Op>`BYh?uJ^Pj+Mi?_0+Z5hXZv$cl&+#Ef}DY+IZUQoM;l5ehL zjLX!%|KE_Uo9pGTWYR}Yw)9rl+VmW>;9qofySe1{F|6U0z`u`I>cMa0`*1DqwyaO` z(fL1A`i9D7h9}>UzmD3@hLqMJY3UUOa|K-QgC|c2n?I%KKnS~Kde}sWtV8_;9@3Vd zvD9V1OdY4~4{ok7348M7B-%6bdje8CA?&5eP+s9~RkgLv5wBwS^6A;x$W_Z-j~&j& zHDoH@Go8G|fj>)9&p=Xx4ABr5S69Mj@g6xJlFa6R@bKZ=fQt6eq=FN?20XF4JQr(Hvxgwa2{44-|f!q^>gd{-h<=E zj#9dtaaW|qx}9ShkFNP~I)r_kg9!R1J%0`b+Pzu#N%CMM1C2E2dtvW)3}jnBHQiu$ zFnlEE%dK{cKDg)(xhI>n&+#J&I4h;(<%t4<+<*9R2TTMoQpPxr@h7nJjm^!|T-V0a z64ZeZ86y%xOe`z{)ic-+A08rZ+x=kSeUH4U`b)M}Yx!J|4|ft=Wy*`LSFI%7F?+7B|^3qC#LP;1t$ZF;H!bb!4JTJQFiCz zfNuyeM&x>cXuw+BP5OXP8xv36MV&qh&JMSB!ci67G70=ije38<_3ksbx9&g}sYa8B z#xj||F*rOvUIVCL=y1%?5}s%pSR6RWDlZ;HH8x@*e zejB=Zu82SPmQl#I+en@1DgAkz@zTp`6%~~ftt>rWgQB_Uzqz)(Qm8{G)zx?7*3=fr z0Men;i8%bY5ZBEUuGCdvr)FlxiJEN+B?o!EM+gd7wtzRZ0(QrU+cjipq#j7;MI0J6CG#Dgs!_m>R`P-A^$jUYA*Wa%%&hQ16 zl8^3evX!Bhqz>1!2suC#e1u!*0EvFHs++LXAX%PxtN3p)=?qL^n5! zruBpF(^6n(giKC7l=#_S)@YLOCL^E<{-~=)1^q}+ew6DBgW_X2Po{u$xYJ>Pa3rml zI=+#Sj8qJ~&v1VpFbVmg%g(fG8_~OX5k>sH-xf%Tv;p!bp$L*3w||3B0mqRMcp!{N zo8O4`xLF+&r&@d+;C`ZgKj0?N5c+ z*WO=AKS8Uge5>g9+Hj(sRK^Wq67NZQiWcGAgvH3%_>XgZV^sU84h*#NF`iRxY_ay1 zJXtYISV9OsG9R;H!sjMJj4%qp`Y(eii;lyYfE=xje%9VrAdShk14MS5QEa-J_*}8T2-o1JQUa)iO2=ckGaDt#jw5ozj`vGHI!_Y_VRHsHF(gn5fJ6m25Yfo< zXpGTc0Zb(bE>;gN7c&v51+j#Z_z{_T;@wc|h%mW&#xOp5BC0o$BgtNawAhL*wv$z+ z4(#G#a4-!(&0!>2czTIl+?maqrW%LHO(YcGS;XE1O~33}Ut2Kpr^Pvt50&BI2!M)~ zW8TPul#3$X68<7Pa!Wo4_ug1tyqWcE2I~=vh*;BZLq%>!O{GQi{GvFcAGeImg`lB>BUKC44x37)R_bw+zr$nK(siRvd3zoD(Q* zF^Xai45*o1Nx!>e-zO}oEAnH&*x%tE6Z}ojQY+g~1wfS$?+7~nQs)VjPWp@qL1c1x z7B`D#1JqKfgocGxAu}!{F5XyR1n~kyfI#ZnDxJ_vv6$3y*zlM-qA>EDrw8ik;As@M z?IE?bCnL8~g}DPbLrpJrMIZ`8sIH{@`?w#QzY-B0QI0oAXB&fsFSQgF7tf+nfV;xV zr3Awr$;>V@L3kP9^a|Q#etv$3q<~>b)U#(qa4=nR3<~!fsL_YU=-RFy!x1}=NDXvZ z5(Eif$Y5{4ad3oAVVSV>$B6Qc9WV^l*QU4V($rM~_)g9P)uM(Bfmq|np3L4Z+kufCcr7i-wQ*_iu6HvtXXM*_ ze#}NfG~w1%-w7mXv%hs9_<>&^mHtZ<;d#l$l@)uKmtkN0>|Q{rsMF4hUN|A)@Z+&o zAMS4Gl;@6}JGTxXVFnSU{w0H?_qLt;o46(S$$A|}n$D~ygaaQW_1tY%i~$4_hR@K@ zZB%9`gBWhGP@1^a_EJ}4b|aQ`d*Rc4*b{f=<{W^H(H#a*jMMeVIIqFMXP?$^ef5gpR=4WK0pSNe#LO2jw~ ztI@+p7H5X^YEjVBbR%c&8s79+A_ER|lm_CZw}kd`xmh(Y@K)d5p7!slsi`=27kM5_ zI8KC(ZUCWNZAn&)ak@p=K_6oplhU=bNNPxd=ptRLa&C^a2ftT&PeGjQLbQM=#;n!ft7;!4^B;GTQbH}!a~Oluh8jy6Z^~) zx+{>XAPhEy#Bj#bAS~#u{6beoIOq@zI3(+DO?L40d97S8o#j`|j-6wdkWf3393)J) z=g#zUagzyby*G2VoM5^ZZAVGJb`CACHw_%gj<|!-dPjipF z$S(c?1#(p%x}x6oQr* zeL1D06O~%z2qgo|S;ok53c@X+bsKri1Q=GM*^B|+9_HlS3PFk_FE5YCYBRYGG3)c< z)G2v+(G(r@ZZP*rPg~5NIhS>!X|=nrV6&T8S$%=XW!dvih@`8ld;0Y0f$!fF6(ftk zOrvZf7&rd%OkRF|3Oq|_Ua}zAdoCap|(rV8!;{dFkHMCxj^D=b-EA;mvb z_CufUONzf^!pcbZ&w@t{X`17j#j`7JXJzlKGuZe4@jI?6rt|yv?-;!y z1Z)J^DqxX-!H!`Biw!&u5|JPh{UGgM0GKJLs18d=FhOd4C@)r;O8sYBx75l$Gb%~_Gg+wyw9$&wH6$FeV z#urEwyFt7v?1Ol2>{pLw*u1|SD>4&ux2DGV!%cZ_K82Dky-SCdRewK zx(`v{XS!2hH&{5a6Pj_cu}orCd*D=>tgkMWER8;egF2bfz2RH5!x#mM00sme1_-J8 z^yv-Szg5ReMRYvfA_p$yFVXO~LS8~SK7-$01ZRWE}ntGN(7)RhsjFSKh}y+L)sxwA>m z*PBC9z$s^Ui^h3nP*ViHXf;ER+xZG?@7m+xy{ai&lOAK6|I2 z1LjjQ=ieq+z;PyX?8%t#B*qzl+2H4K0RSW^djI??M8x#p+V%4TrLzHVI{X&r=2pVw zX&|x;rzqG?|BJYo<0WFQL;5_8r6%&4gnmm51LA}({n1BIf=FT`O>qV1_OxvG$C4MW@e%W=4i#h<(jOs|BV44v-?oXs8A<}^gm!J2Ux526+;~M zBN#`cyqpjo&Ouxp3b#LkZlP z#?BrDbZSZ7RJF0bAX`6)XzQjxcu-amBbUdBs7JD8Ur8Y$`oc~zL4!VY@Sq%#w4YpM zIqq^07A@lS0T8!3^djwJOT1mP^LuXnrbiFTB&JZ_VLrgul4CsJJ`LkwWSP8j!NraG zn~Z}$cC)<^cHq>8Mm|<}w0W+!G{#(~+HU7P@56&i$NHuA{9Wbe+arIxT1Z|g8dFIi zO^Vo2on}0Id=K%7)iKk7<%(ZsQVM_kae#wBjzU*I31TT38ir zEMwV#(~YL1_khraKpQ1b7;djA)JE;TG4h3;h!8tV>nd{ZVy?Mi!RF7u#dPaY>Sw>@ zEY^i|Ciqi|nZm2;WCdv5PMs*o56b7FyY1ncsVYfr=x45D+&I-DueGUs_?pl=huKjT zP%t9({rU6fE4Wz@25NK0*cG`Ax#?rqxBsp42|FA2tAwoW>QZ<{s(t9YFlq+>%iqU_ zo}`mUF)<6iyT@1=<9gMS`_!Yzd#j>Qm&~tRsX)_-5;P{0r+A^AO>n=UHc17H)}OPz z^P!=J)=_D#tB;CI_BQt~hqtq}by_o?7UV$-mpLR2dKGV+w(ZDDI2SbfeMqI~_?_}!*~&4OQK zOA~4RPB30wv{_O3FqLWU{$kXuGH<7sX!=p>Y(umEjO!=2@Qcvz6!3d)bjnHUnNR2P z$sn}~rG*P;wSRMnSWj-*X>f_Aa`>9*m#PYGhb(==x0g0(9{=ar0O4A=JG~UevH{oT z_kVX0)hd|f(bpK`q)``HvRa_`3sy+d+^$T$gQ@Ln(D!;CdjY0;-3C2-e37Mi}4*xrtR3+q*Z<1BQMdH!x`!63yr`irVyX?9qOTOnqF-=}9WydQi@j`~s4?nLne)ZNp(fjmy%J!jd&F{ZD zdrhUwHzfY6*JdhHNG$C0SH7FvJz+|}+Wu>`XW~Z>I~!l9LUHz=p?>iit?dtX&{JFd zsQDZr^YycnS2p>-M=QAP!f5pmgpPssP|zhOKc2sahyaEoEDgOO61AbS1&o{xS#SLv z>m%kn-CYcb+i{>ME5KLUVXOByrFjS|qf_R+%@l7g`DtCMo$mZflChmSa=TnR5Hgt) zH-|r~UvBw}jZ0iXYmZS%l2MKRGpJAGP!gZo8OVgXNPCNQxCh#^;do;F@@UQ{S?RVgpeI<#4)5th&&0v8jkk7R2q;_ zGqC@Wt&dQ^pMwv<=)nA^8rVIi^KSl`oAcn;_^7yL1vv`B^oA>glmY>U9Rb8NBN}Icu=t z<|0|6%96d8nN(H>;aj2I*mK)_cES9Cwrl|#u8vCJZFLYepp<3h6KtQ=VZm*D>m_Vq5E>UN{WB8E@w$Lv`6jv$@d_7-c0Omu$4R=>)39YO&L`b;~5;B zoRo5aqNDnem<%B#3Pk9Z9i(M9?$Wki4~Vi29c$EDb6yWNuPN#y&6IDfsm(59CY`XM zFj#LLv4+Gp;n)(&Pp&d1J1{wa9{}t=STixx!|3vW7>Cr?@Ajz0?&8P5!)XVHJhN}G z00SOAe279yD^Ef6vx`}JVp+ckW`U50&cF<+Ecvq~D$JnBQvhqS!Sd#E`}>m%JJA2> z)2GA?T!SZO(bY(Ucptr5ICR83PZe;q_P~teuiDDW=N0q72PTj=&~LS%9Rw1#8B+ZB zpgCwv#u5f6@ek42p>8Y=hpPE2rL?#P)P4^Eni>3443J$bYx58s>=B1*h zX2*#{M9R7)&#GO?x)`(N_yww-DQGv$$6W+L=}s#sw5gWV{>n36zFVtpKm3GDo242g3oY4rVo< z8T`n5`Q78GvK3YP27owo=p1;zUGym|)ANY`m#q9QCPW|5`DUbqkkVXhbKQbDL~`JH zA`}5ke+NCC4we9H7a``7>-iBEE`>Wc^kW;}_;QOt1aL0k&Sgs+C<%uPsUJXZ%zo zT_&Yn76gtOi5uNYQKI$u$yq-(HC9~t{TkPn`b>#jdu5GO$GOT5`M0w?JFW=~goK4} zweN|qd_>OP8z9g#e}28$K+x6empE4ce?o6;H0s3wI5EVJ%+tFfD;iHCq51zzar~p{ zTk&7D@kdJv73qImZ=A3uB$S^$g`p^V!sZS8iAWii!xe%f_`(rr;n(1zxv@TnORtx$ zouwBL7k2<6*)v$!F(X7O2;LJ(IYxB5x3@Paof33)5_*mqAw|x?2uOnJ7}E*IWYBwK zg%#th#*nZv&iN3;g`f!d8%j9@&Jj$n=y2kcbUEacB#nsYk~(W zKJVWf?0e^VMV0&*=h^ID^U)5f%lEZ5uDlqwsc8fB%S>`F5+85pI8RDRdt+9=7fBaL zF2u+~tJSH<$B!#;{D{3PTHEJ3b^sO5U~q{V&zb?joOS-ulqlc2va&Mf?g^o;G&oH? zgG4-=U>x8E5Wy5GEC3E>g_0E`rPXj4IfRAPt-nD-Ac0p7gVYn{G&d_TkqzSG#-)u< z`|K<{cjBm{7SgA@8F+LG>SHocL_pa#MEqL5bmN%)Db2=%0!XnZ+!<2T6(K|Ccd%l9 zGjrGmcetRME(}|k; z=d?Y)YQ~pWDOq;ROaHcaTAs7S(FEFRnGKLY~4`DP<732z+4O z?5jjBdGe$y^xXQQSa17?5!I_#uinNJCr}7mzMLiIdYzIK!e(WLU+okZ6H7tlL(jJg zMMiW(785Q|{Y(HpVod*U3y^p8R`6t|mQ`8ETbO)?SpOMi503(Bk(Pync7&h*(Z?Xh zq3_>)@$?2P=ruDls}zxAUY8$+(=kozMbPD`L9xgD;b>z*FccW{K_a(gT@GSv0yUI) zWDcHl0xR(&g0FD`!DtO*K>}r0fYtPT)7ekZu?-9+&Qt|9YB&#de%pB= z(a1adf^A=>h-rgG??TPp@MxEE%+8Tn?yi5VYank*hFdrsLJ&9nk znAix=5he|pd{CVsdSJAqDY@3b)CB`KP|X~)4tI5RsUw$w=Mb>Q9J@}L?tb=eK)}ep zeFAoBld*qqO8Q3KyQf&zQbD|E55G9{gRBxs35myHVaX!pc=pc849(qibf>Vsf3g~d zESe4yv0Y40*21YHlEr;fvvApT-r6wZtZp6v-0dm%Hp;%9kk=0?Y4M^e*i(CC2!OK|f-Y5(=&RsE6WiQnR`7*q%Ta%^>3u$}E|H7`xji z*S{Q3QF=YzuV1$@DZHT3CTkM2C?S-8n>D=lhqqdop#icD%*;&kO_#rZrMd9z0J+SI zL8|4XuYTpwya^oSdVqz6u;gzbLnL_-+=nsXRo7c7XC(l+}^OranzORy%42`E|j0&ztjyS^6@-U{ekty*Gf z+?PY{^Kvl!t-ten>STJwXX?F={Lfsi&|DC6{CQ@-NP*$Ns)6I|o%^a09xaL7^_lN} zoEQ$*q%0XQ{JdfLQHuHj?L?v>!=e?<=GMXD70%E3v4fUu}AeSQ=2kIFI# z-FDQy9L(Did4TU2|Ftj2ImS$&-0I9-sSQ-7PS@C2wo`Z>!J$L)h;b7^JSy0{LKqvy zP)xW$Ic^^Vphpr)qzMj%+VN_P5?J5yuK7n!{$ELqbIe=K(7r!lj1y!xh@eb_`~!dA zss)HY5H;l;s6@k+VV>|{amc8tUx`z zkIV}3><>a%#dt9fGJl8AzMnInu|SwKW)7+!k)0-<+94l!gl01x0|O_)As`w12L}g# z&9e+e!J&nL8_v1I^^O1~4Gp7@1fq>*x_yZrAiRWu{!fE-c4j0VsD<3I30zUVu?36` z5Q#2Ow9}YO>X5=@PF!a-$|aRY`LLj1h^j8L{Qf1qK*$U$t~nKp-DU}03YErT-bcS9c=!AK|yo~UK8wxV1F>~kM=)Y#48YUiXSi}7bak2 z2r?pWWQa#29L0#~A*h`Ss8=5$z~6V^|Hwk3Wb+r+58SscKq6OhEn9fy+utMfN9|~5 z-l%hjCn1FV?~!x7t@P=~v;01phTXr5Bpx4kI4xtr_MCTrhbrt8G}sP3VtA)`%Pk_R zcqnOl$)$KYD({kuN*}k(dO!27$dHSE%o}!ih$bzE-j-F|KVnQ0_xBqni}3&p0`Fk7 zQzeFY+!zs$$G%1^K85)$!VXk1@eW8+vjGI4VPgD~2}%Mn%%AFPh{FkyAQ2=Jk4x}F zp%X@t|M~N$3m#XviO6oDpAG`2qycy$C+Z6|0sBB}pv_dk-TSJJ$G5W@8RX>wy`!}Tk@r*@ zNY9=W9M+It{64w-$ALZ6P?cjZF?7d>3JsAyQ+E`fW2v0hj~Ga*Ig&7q;Rdp4=unuR zW%hz62eMxStnjVS{>aAaY;daYJg_@Ki12JM8PIO%524Y~HJFJ+0wEFjm#Ff{BUzDE z#LhGllbdoUMzt|=O2B-isyOB&&b83t$&;^<0^?MU@}c#^w#P`$??aBNPoTv{?A^8^ zbDPc@k{z{5g|`F*!_*>an^n49jgW_L{8c?fVtK^i(esKsYwZhkyAVSqQBuzvQz{Jf zJ52S(0lr^&by<=iJZPhv9L;aTdCEGeaUBmtXnM8@gb)u93PrLP>yYQx2{ccMs0-#x z&LH88F{~8gsWdyj;Cs>1&ipv06@7j2_;sZ9)56P%hpLJ24nr_OQu5TSl=-UQc>Cjm z>akip948Kb;hq@I1zO=oD`ehpy)shd-u{JGVx4dQB3Vnj@BT~DCnuu^22$)prMJ^8 zd8{bBjlbA9`oGwF?|3fXK79B?N=YguDQ%Jw6%A#@x2%lJY-P`kC}g!uB|9UOO~}lq zfg+T>DJxqxCC_oz{k`w!_q?8ep8uaezOVZhpX+m7=XIX%alDWB@iwqBfBzw{bNcf4 z`w8bideiM$x*A|gxrWA62vt<)*!IkSkMf>JF^rV8VEmAr>eU&@H+^ErHF5iJYUBp8 zWK`2M&Fq4B`a4aoToC|nx3YS-PsVy3k>y|Zz#x8XHsK{C*Iey|wT`Ydw(1S3mGvHx zHES&={Q`rl9xOF!I8Cf#Y2n&T_kAJnRF9_mo0is08h+kFF08Iff(mIVhBu-N9e#)1 zQx1ITJEY8{jr$Ui{hmpxKg7wo=IT}9I0HXIwQa5(Dj9FJp0QZVVJgTpG~FPW86?9# z-!r|Ve=O(R6{VAubp@o!og| zCc5<9Z@%t`aj^q$O15@xrIX|E2p$vT<(Z`7UoI#~&{{*uIrVhPHb0R@%|2lCSG0-R z3wx^L=JoeLVzoG!kC5E>4h{M~v~_gQW$bP2`j@R(_i3bid|W|XoEmzVpQBqK>j7ab z{=Oi{qyfui^&Fz!0iKj2r8HF6-%nqA-Q(uenvT(=&s z+a2$$!SYMG=FI&-3ifOuwmHdgN&g$_XBvNd@Tu$V?V(_E2R9D%B8p`dIMI*r+ExrP zz5@?o)%u;E@B+a#CJh2O4V%!uOCj_^Ag4slEEvL=fP@VIyJ$wuqP`OX+e6|MLJI;y zw17b5sFRZ#*u#i$YzL}$hzW1<@O*6j%a<)c9{tJ)48e^+h5v?eh{%GC`)%#EsFbls zh0$)K-{PzX)YF2id)M;^ycD|jjEOeIFFeM2plLK~^7_^bE19=v_*TAF;W}_@L*D+- zgU7&q00zZLOfcRCIfR2N>4%={69HPp)l$$ggeO5m-b18Y7Par8Vu#@3_pd1hfSd%! zCk*pO6lkCV^R7@*gJ_Q|s+q@* zEtG3Bif>?HSa5|@zj)V$Z>8uKT=PNm zQ7{~VEec#fsL=%j7~v%3SI{fewD+}_i|Th5Dk3ET%_Lv+n4tdp>o%V>qxfow>z(Hc zFf=jo3ky4kBBW_u>^cy~`A)jj)?Ir_p)FETpGi!AQ?h!lZ(eANfv?AoK0~~EnViq6ps$Cer4g2kz~xSDi(g8EqT<2pE)F-`6pSlP z2wI>K?>6b~jk5Q1kskci;dH<1;*ha;f7u;+3$^=iP8JpKJpp_b6p!9^#$naD#OS?1H&B}UxmB6#uTz%yK(2~;maku#ImcOT;R%_Yn=VSLGs;#am`WR zU0GAZiWxlED@vW6FWBsPr4?d6QxsR9M#O#ih!O|Vttez#AwEMH85v%pcgaOnv>EsA zG29lm9I=>{@W^>6dvTY?`QI8Ew>FFvZGC6b#=Bu-Tnbgv`RYKk9ZT0bl;@BnOdsJA zpg?6NuW=XwAHQA*GJTOQR`RGywv|$z`@FipAAj;J#d=^)Q`GeLnhS!%?Y(waj$~x( zc28veq@2(SoxG^elvk*xn?2?C+n#QeW-pWcvAqFo(u>~b6S_yvHZO6qNhSn29rKK= zd6RXgTJD=Nzs6R(;C1DA4d))xUrS9xo!-TOy=`HM;}VzCugx*});7q6x2j$3i*i)8z4`>ceK}ivMG3^dzGwNt zaEk4;R5dL-#^q^yGfB#Ai5Y!j_mE z74Al#Liu`IV%tJ)Lu0PAxaNK@N4>tg#g|d5C*^DN-qMLJoy|`_Qg0Y+h*IZx0^0~8 z>^u0$1aNjy&v*f|8I%3hkfG(C3TdRVAD|g|O8?qBNv}=K;~4*&)QoCJw-yW-kgAw(qj}AWncw%A~Ls|dH1+Cuk?^f3+3pGP56%Kd&5}Dm+ih$1Z^->P)4W-v&C#bY z=n}=|mB2;lCQM)N*s)LIrf)?qTcM%?Jq1b)m0)pPKn)qkwP33H6zeHRdQs7islBAJ zOet-XxUh@lhbvMKRru^LMpNpS!L}@PSTkx(G zh_1!idPRykZLhuY^oK5Y3ZXkCHF%3WF6gU!FF#HnaGFq}o0alT%^Hs(owb$&b9JJx zFU&U??G!Q}XsT_x?cJsx^CClajj^_`?#|oUR04+%MMBXP>Nk;}f5cR5M8d}t?9%e_ z7!9t;eqbRy?^#!ZzLm+a7AM*_M*3SQv{VeDnp@{IRog3|ztgFb2bMcd* z#oPI&jUph9`M31iuj58Grc{QBrz{ntXfqc#TUHLXvwd^pim)n{`f%HY(yXzSKgGew zxhWOKO`6e;x7%c|`)RyHrhNpmooM2CVxwI6Qd?(r$l9^g;dVAoVJ~)<)U;Wt6B;s} z&pmGxUA}fGHekKumi_Ecod%8eM|9co>XUO|wAo=5Qj3ju>r<0E>e=0n<^ft{B+%lI zE-jcqK#NDHR>zNgs%KFB;h4tz=Amq@yM!l|K8W|w&*y|H^x5a9P-Y{7NnjUa9>T;x z_-R}^VYBr8+l3jsM_gbSK1F^#U@Elg+?Dm8Se=C*2V;L7=33+NpgNE&$`ltbd(dTA z7|uf!G~u7Zt1E463`C@?l&UL?mTIC>k3zL}j*Tbkdqn#U@3RJKIWkAOu!glsY^OFo zPWHGtwOeAxhx4Nc9maL@=}r=K#GrcCYeZ<*I*&)e8x1s7V^fQ1%hI1kurVcmKIXx~ z!eZH)c^~29ZCut5vR|E2&r51*ypb<5pO;)CwK}H&ydl2dcbqj^ zT0cATs(jX25#Z}nRqNVx!Sz7cxfr??E2qu6-!wM{*uL_I3FGjq8Q<$j4SH5D5MGEJ zR%nPlVa8ODTK?Rty29al^HehyNk=~0ova^<$#@olZ0qRAu_;HL{l^4Xc$9U@lht8T zZ>d(OjkxZ{oVMv67~klGEe&8MD=Z-O=%#DLoBLV2RrF2i)=9>fPxCkI_<-&v)TE}u z@}p9A&f2B-_Bg7VX=F{9dW-k`mv{;oU)ywBJk&QG7W|6#D$6TBwOU(hl5jV zzyCiM`d_o>%Z`%#7-zcI(=;N!jjr)eH0x|P%M!`1U86*(6|nJ)ikPw?kHMA6PP_32-Rv>=g)Akga4 z5*1(#BSb`Kyvl0k=fsy3{-Yw}@KLX`o9{UPsjUU;A5O0S#B)o(axnjTS`Xv-`-7DF zxhwjblA?)0eiDzQm83sg?Ea1jfM{@lh50<|I(Tpam!mY_4&IYm$CJf_hiU})UTF^?L|87kiFV>uscJQNL z4Kr&u$r6T~n)#EXb{(y33wX~0YrVR#{|hN+K_lu-S=k+smomVg01M^>(k?K}57bgY zX;T@RyNwa4sj0@U7{USLQjW1Dpl1_djY5~!Vj(0ug!_cvdw7%y5R}n}mmFY>f+10m zBs~VBqg8N=lYK_0!Jy;w64dtbNa)wP=2MOMeo*wF@J8NWRPOY5Z>wH3+SJUyX|H$U zP0_fDZc*yR-wb|lpEoI>tDw7ks_|i}OT)#w z4emfDYGuX8Z7JIYOi}-yuopB1zSfMOtLNNS;p#F_HCnQ*wu2K3FgS4IzO!W5h;8qO zjcC;++^+J5h5*#r8JU>QpsNg{BtSP3r2MDx6H++hsHtQ2!B>dOM!|(5H>M_II}Sm- z5(q0Q(d#Z`nr;LT2wn{fGjq^@T{zwpAJd-h&IQSs>wAWjL{w9D)s$`NZrFTX^X65H zS91q!)K96VPPz1(#K;4|B~at^kuOQi zhe`ndLKW^HTB^uI6fFBIgK`G&@5S)2(;}n{T>$`?l*A^J!P9=<(h^Lx8X<5^0iT#i zbFNvpuJ6(DPJJ*#(B$Nac6m}rB;A^Cpq7VPP$>w2s6?cpof_$0T+3C|hA^{1A5o*n z$w*fyKtlRy4TUR!4nQur@(EYVx3qXkA??<6W^8X>&M`l}kTUBmxyzwspl%zN08J0a z>q_NBnnwd;?C4irp;CW5=hoUa@Ab=V1NgY;4;??oW~i-P(WbSz#tA;&lvL>Yho{0ivZ{+CD8+bJm@66T6ZiQTvNYmx;Z_VDL}XUT??UY&E5>M_q9haKIr ze9g_wZ;RAv-Ou)SZ(2Rt|1Mtn(4^k#MeU9h1@G?l-Pf)a82=LaKl*QlonH!lQnKUz zebM$=44ZU~R4P~ZM?VgJv^x;Z6_UI;L7zc@-E`3$kTB>JQJ9+mLJ`Q#^iYd^3p&!h z$B#k6q`}0DWGmoL5RameN*HpghUF}HECm^~*bP0_ zZF|p#RXqW;fWDBr1f_y>)Sa$Z>S$}D>%`56p*h=1t7;jPGHL8`REG2+=2A~>&w)%K}xoVLsn0-QrTdpgB zJ~E*4!tWdhVFOiII5)UW*wQ|Juhlv{T38bI=;Ze)%U>!n*O8K3^jU-3d{%Y&p$Xl!929@OcY=Xe&pOhf(os5fFc3O zlO`Cjw&u>UNTF>8|8IJwfA?sLzuRyki?HRl=X>QA2x0cnzfHlTc>2mr}V z5Nb$!J^i|w@lH`ZHwu#X?%t(Ex@H8@B&p4y1GIZ&~R%E}v()7-3ykcP6ug1?ej4kB1 z#$2D>QbwcV7^}g=jCcIiRCU6;^Vz@)1Q09b?r!i%&o(|c{#{aWCjCrxC|kg0 z9r`m(1l%rkE-GT|0eW;ETra^(CGy1Rv?CxY;Nk1DEhZ|7(Z6Rx!o#aU`&@wV11aG^ z6$-6`W~xTVOeE z+_6Iri?skO>nQ)T?-B$p1oLJ0SV*4CoaX9(SgHDY9vg2Bi9co4OXK>ubLdj;-Uo8k z-6a7RW_2Ea{Ty(?chY~dfyYEv43DgC8*A*8zc-*AG7>#sf505{TuM%v4PbY#t&|rj zo}s4z!v2T`%vY~I|IS(f{RSW%p+7%3c$1Ic2}1!@41>)!*v{K>cHLY(I+M+`bFO*D zoNj;M8IfB`>?a(!PuF?&j6C}%dB$-|nZS%K7TP34&9N-uiIRx4GZPS&aH>d0 zF#dqT?Yq*_l`%>hC4s5O$mOGha)yVLijv$DZWg!4Twhhz{bjx5S?J+_?h!6f3JCA0ML8 zgBt611xyOlE$+N!k^0&}eSOWhy0bbJCg!TW5(iXo)V@FC%BFNx`J%>-b=zvw# zBeQQk|739SV!xENXa-Iq#=G#GDzdOpnC><;#0&`U?1mT!Xh1h5?$C42o*wSY47{;(%zj&6Q@lT^P$x0#?{QKmziaaW+bDBSR1|hFf zcuqyA=2Q?@A#)qhh!i9hi_7ZDtlzscX!7>1TO-=`9ox2^Ub!rJTF=7fhfAVT5p6;+ zYr(e4-<4^s1&p%V=CO^7iAGkYx@UyXRa!6^az?O8D@dju^8iTzlJRkIe!&T@5?sma z4zNo*ml~OrRPDnpeoHY=ea-vRG%u@BF^%OZ(~@qy(wgcHIm51)3$uFnzakA%$(3C= z_sDdl=O1L+G%B{S4|)3h*aihZb5k%o&;Vox%k~@}L`2tzWi9xC3+SF+Z!HpHZFT(C zE{~U)<-?orD1LoZu9|O@D6Gn<2Rw^5wbNudD*O7Ja;Ce=&pge#k@H$b0C=}nVpsZx z^TQbuVPW8HWC{uqiX#XV8tDLy_kwbEU`z`m8gz`h?j`j(<&i z%7?p(2{g7!lpLLo{=X-bs!+7D9NQOPX8m zrX;jYj>5Yx`O_!Qs8(F~_koc5k|&aRtV^YjG75C~XzE9oKzX7vMUyM)J_n21aTbKv zw*=#qg(Y@0d$kTHSHved6l$H0>DRZ`>y=pj9FBta`r%t=_D6nF{bXWxNowC^4!MSu z&}V<%L+KZ83YZ3aK@d=hSENF(CrA}iV4W|Z?3I?0!QIb;#S*^#{#35a!mNaGMUMmB ziUZ5eFv%hMiqnia(bGH(dWAJbvWHo77-i3;J90*Ndofido~SE$uy#gh$Df_Vw+=4_ z>eF^O!eE^tZ#zPi*^v=K{1-!e-hQi&g8TF1%-B6SUVd8d)5_7Az-oM>rpK8tCWjft zf!G8usn^?&gy;NuAMGdN{1t?c={dQ0iO&N6oc{LxD6@?&c)Go)LZN40nNV#%G`Zgh zlU4N9eHEQ=Gx}x2!Q%{KXO&U{9%#p0PmZ)4jpOY*Wj9mgT#?0{_76vlMRIDd<$~$# zI%eIu^z!--6D`#w9^;7Uat@^Q_ksI(_ip)AwHJWFul^b~!}C2#MJ@xGOpb6eoc?2Qx}uOKg53qC8p~00|Her$TS>D4 ze4q-e6Kute=mQR%NI7$gb{%1~PgW7j%|ikb(WsbRtsF2JV`ykUKjg|Fl-T-6rk=Mi z>APXVU;%;|Dt-fv3J#DvLUmP3V91D~L4C58x`h*{zhH`e13#=Z@J&!Kk9c78rN~cr z+Lp0NQg4e*YmCnHGkC{oCDl{&JbQ3`^&T2^1Eu<&Nu9@iU7y#MNgJ z^}G1=7ujw=G=g@cB0p2#r!OnfU!C%wbULYNN?mlE>tqGl<+^6hD?M;}2nic$p6S6Z zjCL@-E=HgSKrcVL%44pdkIQNbZIg%zguH_0jR1R}^?Po0J6k?5e%#UX72!ba*;NJS zzxecfx8mY|&(3;mJh*d0AA^*m*_Rh9DM$w|Lc}J02_|k)`o5=yOa1AtnZhUGk%dK; zqxhP>ArYKRP2*#dsikSlGW0)rOYJzmK5ty4W7rbnG5+YmW8`*!P9_JgjvSJR&VV*r zhDIq0l(dKtB-Er+FYq70JMouUgTjN$ufG;G z{=+5h6y51Az^k`fyKBYzU8bp5lM{HqPMNLi8}hvPMe>F7(*(BZGFQ2VvE*x`N=1{i z{Q{}q#O?JQv_#hW#cdmv!G)g!+OuUXftmosSLi*U<0>zIX51BG-C7c^oqs%s+dQYCCJVJ7peW0*_7X? zSd%(jx2>Yd9cHcg^82)9J9_LJJAPg{3s;n$t61}Y2e76I1jJm2c|G&|D#sI0cqTx1 zsYPo6g{ZrL1rAh4h|KzFgd?A(PEgt4z?dQuVmDg7CR297GV)ReY;tl*?1=J?{oji6 z4t61F_~p9q@k7{#vO9D`E`pz5{(X*szYns?Ig9Fwo!vKx)WtvtNV9Sd`Z{1bAoV_& z@~g1ka69#&C_4B6hi@C|ykC#off8G_BmlOe_fMe$!l3z)9FKN#i;wXHcf3nn2*VI5(~U z7X;LSclKe?!(P7CgQ8#GZUMbF*<6dVdB|v|T=#EwsVZgP@ozy%{;5=3gEudH9@LO| ze7D>}X2H5$p+R%p+~BOeo(VNq{^t*E8!a+k1hEl69B|!jSc5tT6HK9w%?Sk{KqAy+ zf-k;14GH9>DinEY#IPBE^{xGVtze{o&rI=twV~|mf>R@{f$oVCoevb0uXZ9c`Z8>b zEZ)44h2=o}@I?iCJ?u{S(?4-RG_zPLki?GRG#DD*#@G3Unup85{v}-Ug(FSv@j_^PD?r0_C)fE z2sJHV?j83D4GQ2sp+-&1r?^$Td68pR4VODdYWtB4)nD5;F30=x1tuNl)Es`6EN3=r zy1TzjM7}xX??MD=po9WcXr|Ru{JB_yiiqHIHtE(QzV-pVeTmtRihYVD&g!Zel|0{j zLWR0qa`~i+%5rdjNbLC-m6~^W!;btoXB~ruEM2Q=ano+{Ihmy-!d<)hfkZ*?_LAvG&Z4&w~AQV|KqU znD2@ITivT(YQT;QYnf3MKdHeENYCI)RcLlg$;pNq0Ny3+#$}d;& zEG_HF9}BRoYG3vg3PP#K{Ij)`iy4Kn_$0{(_noe-I9FA-W&C_wLC)|KMxFWWEswpa3nV?mZ#MP#3h}pHs>)Vk zCu!&lm6-+ZsGT$Gc)yBMA8xkQ?(?Esabin?^th>+xEP>`g}l%2+t#>LzL=vUwGmyn zl?*haAXs)WY^K9dsUO<>GkSYyeXo}q#=f~Zp-t8KsC;zNHww$wzDM07D6X}!J>{SD z2#FZRf7oL-*@^}f292dH+PvTNdT>_LX>rj-F)A$iPox)sIOeuL86+NLC<5riQ20|< z(y=%l-N{X5mHe-3RDIRY=i_okhLg+pWZ3V?%W&-F4Y>Ey?4S2BjBcUII+8Q;`?j8Vy;rI0dhvSR285KGVB0+2_vwH&?vdZ zuPg-ovX`9NJy8f z3*kP_C$VDX*lfS|iUP?u;X!`veIuU^9}zfU(-t-gStEn*-WLyib}P5*^c>u=bjW{{ z2%gFWfRym`=~mLjjFbZ{SJ8k}N4ko4q3H~&Dr`4J2V0CxL>MMBotry7-nZ?hGpjVd zv~+DlZ$SgLDEqG77{mA^3;RTK`;{)aaWa*ausu#L_Q@2!IJVTVLcubH)sx|DAPfc; zPle=`qe|fj|EM08V{x2QfHDn219)~kkn+Oa80b$}uHT#BUr~MQ zR_IBYN$YsoNxF5DFR!RKoK&HfjbmItITmMV?vQgYCn9#i-F(mKa8GI=$K78|J)5Fx zh4?sR5nLX)eA)B<`5$htZzKlAWJPe7zw5a#Eni-$sWZwg`{()NI97Q;$tV^rBS^A7 zFp4JVSMW&P`?RaORPXLBt=)9Nyq}wuTux@RH)ID^v+wLJjWHJCDwVCvtn|93q7&*8 z*;_!rSy5iucP@T&%}46>c=xiqKe#aBT_B>+Gu??*jt-JQ!VYj&z-n37eaQimg`XhQ zwIaTJ*+mT)&r0x|FM>9YDb1@r(7T0c=QZH7p97GKG6JC$9x>Xfptg&JnjkV?I!SqJrZfx&k1(3jS3TMZ~)v%^K*l#Zn!z zM1ct~${sY$La=QDcpw>$0}oF%-xs7u=4hj18o1ypGVB^PM?{|wy}V?7Y6b$Hf4cMj zL-~r`0}Qi_drzy{*hDi-awXn9_hp-S%-#$6f!ChQ?LNq@u4={g)zyi*IN{=}oXEL#*Y)K0+`Ql9S26lsJm=@Hf$sMp(-ialneKfQ|2ncdrZ&7;z z&b}S1SH}tnJ{-#Suj~ipDMY%TNJ9Fe6BAR7bMo`~34v50{AH8V<~FHsL=HGHT(a?x z9PrJ7tyd0YG+2#KR%>m)s?(vC`f7L0hmjxj%ua268bJrY>|Wn}S{~MvfqA1hbA&JMnp6X@#s16^0A#uMGUoc& zfV!Z7#%;WlY~uwq)q~MX8rnmQz=cEpe|i%I;$uotXV)cjeZ^Y={jLJnKv{|bEO^bI zAEf!A(n0~APh`t*5Wc)T4sHsrfTh~bL*uL{Nr^iOxkcXwqGS^#(;B+jr=kJ<6NZQ)tGE;71u}Nq*W4QT6tp zK9{fO6P@LqT^6*STrtk^{smKwWjgBXla($!F)UHq330LK zg5QFy|7X3R!sRYy^tvZ64 zGT^b0A|QG(_M*EBBcPI%N7e zuh0C>k>VXHnU&;<+2bC?L1ujX0#q(F+Ezd80(x8gU5cz|XP$(G$L!qB#~TouOI>g; zN8p?C+BIra%ocU!g%-Qk-5UAvJ&S(wk9At${1Ru5^C;*t`?4_|IMFue;6~lt+(HDPMBQ{!UP|ho@h{WYcnj$e^+iiltl$_n37RKF(c=^aW;HRHNU2vbu+ElL-e(& zw>`VZjoW){C+%d2&)uAt>=z)NlBnr$)T-Ko`|tUwT^rX_cWtuwBN66Bn$C+`B=Cv1B3g=4`D%*><#Gh+fr?EXhk2u3M~D7upv0ELbz z%&;{pqtIOYIr{oOIuoIa^^c?^rm0vtjgs4-0v+rVK{We z5-oj@^n3z>C3CCsnCe`6`9|mZwHGK4v`Hy zUGMg})tG;K+tg3V@n;Hghb5mG^tY!3Rpx4k4GcT!rj}bglO1q#D$=bPNSfEqUBhf7 zn`o%R*1K{MLk*re{F9m;v90k(fasp}6vZ=X4BfjftUk%luS}XSJKdLE!D3KAR~LB$ z_2^p7)#=xW38-Sxi+q;UrGBCIpOwQV7Fu`AxrOa+qj} zmX#5HSqR1{Sm*X_;qV{arZy_ybT7-n@XPQ}Z??keit~Zy<`!nRWSa5@cezQbq{ezX zye&-)vQpsQH-7H+&*Z^j+i=;e;>Z0f=NTs{6q}=tv?b7!afP}Cxhz1Kmv}G3cw6c! zD*kkIbOL6zZ1~DJ>qQ z76$H54Kxj}#=l&fon1kpc7>VJW8ix>M@sESr~TrjFPFxYy3lj z*m??u^0PV1y3%#7BMz!JO)XhD0#}cF?Vnz9_*OnHq2KazjjPs&j#2TA8!40$5yy@d z*T&if!C-M&97;;9#~7J~g@qQ&h{FbVlpok1E!4a{l>D>R;+y;20pYpju{HE-CK_)Q z9js4MHDy<$rBG6~IIbkYOA<;VzR`ec+blCKZ1u5=)=d_7u<+VoCFq;{QrTp)N>5W| z(ZgSOV97LY3bRkcBrum^e} z)G?8n41=QIcb5o#n zXpRB2bPNoRj>7nBnv=2*fBM*V(c6b;xBUONAhU2m6Tq9 z>|;SrDduYBO7BL%Xd`1wZd)jyT3<%yK}zv&XX#qF9jojmNAcC<+Sjo-aH%Kr8Ss<# z{vd&*9yDTV7CL9qtzFA5?Z~qSf9ApcP=|_uylMbl(M!!ggQjAr_xeWTAP%A->x#K2 z`5GQP!6pTvJvfJ$Uh}c3sj1biN#`om5me>5$rn5}!D^GXEu!yak&;^$yM$Q@4X7D* z`@hBSyICC{QMPQ}-2CuYz{l$9z@VU@cV1+fr<$24m-7bqFAaz;rxm@vz{7R$U;}{4 zhC-LCaZEU9l=6&W#tHH-QIhMi<>X3BOP!$rH-qk2Zpf~Yxi=XWG`Q4?mZQ|o5J?;D zD%LyRc`XVQleilw=tu@y^4veC2Biryky2ZAVe{_AX-g z$_A!QoA#m9w{qpm!GyT*S`)WIjMvU?MKZGm#(&meZ3s9Wbx3a^wN20>H8hD1^powIN^MQSk!>SXGvc( zc{UVaPKP_r%1-S>UWpLP3L+ zY^6`y_1;6iHI4QybLeo&NJ)L3`1+1%+qOgmQPQ=juL~eEsR|d?!WT7Kq!z~F-(zB9 zxegroFgAsij5p^hpwWg_nT~zcQRi+?THlZZJ+ zL?RYxzp`&82C?Z}?QoEhkx@dwZ>C+J%+ly(cWAsVMZ>d7*Mdtj^NGLaI|`4G!>i}7 z5sE^o))c}fjRx(o@vt0eyxJXzK|5^Tt%FU9WlPA+Q~@Or+Urq}3ptTLWW1UMt9lo& zRGV7F?N<(A%%UZlYybYUyRJ`oLRhW=Y_8CzU8*=>_(fu5q!_ABtwpZ+)#1WPkn6J` zQ_YWJTs4SfzTmd7!3eZ&`_0CQ49LmJX%TbTl3m^Mz0*aEejecEZA3}XiuCAla&c9( zVn218t6rqgL(ZSvKlZ^OB!R!cOuPV@FbVS}LZBKdBP+Z7`7|U%R6y*(MF%m}tN}VU z^_W~}WN7#S>JaGL${gIq%K8$iptRE2vv=g?x!ZKJlV6D7?;$Wy4^|zX!LAdxaU$Zy zi@kk)eJuqzI*Hrz13i7Bp;vw$qFpHrtS5G3hYBx#kHY#bNYGkA?$p=MFAId%N6_Mj z@=q)B?Om-+eFAXED`{vh=wU@uM|^QD-rx=@8&Jo#iCoVCP!{{^*L9#7N$pb43du@l znkhR#CmzyYs)%7{K*r%_WBUhZweNf&T`iPW)!DFU~YIA^i=on#A9+P-=;bwEUbg0qx{c5w^q?_-Ho&8imlz6ZsfJo zb^3g6Zp*V>+z3(<@zmt|VWF{&Us=n_$*H2O13o0XG~TILO3CfO3xl7AS8t zyR8=**l;I^NGfCXz@i-y0hOQtfKM_2!|#wj)O@Ay2}zkOV7FLA%%HotjoWIsEqr19eI}g!ERvPS!1Ky;zFhmn@9Xc_8+0Le%cv009zItlM8byP<$*TVT&t-Vg3z4)G4eOH&XO- z?9?-17d6z3R16KHv0$;tk-p#AZfyg1h8u~L4WUue)_&>h%NXGAe*l6c2mq=R*W!2b zm((fXt3B6zZJmt1ZQ_)Ga(xDKJvr1mPa2S`u{Nh<3?50|5wL$ z8PPfWAW`6l*dZF5RkXD&Gpx`!{BO9VPB{k$VD-8q7C@>6v^en%0SL~y3?2gE8vl>$ zbd;mM@9o`$GaBIQyZ7?t%Lg^miLxG|{SE<4_AzN9(FbihHwDp%I?zfVHiT$II5|i z@xFwy^>C8EfO^z`8E^mls_LhQB@n**MkB)F5*5`${8Wnxqm}gfSU0Xa4wPk@#*NgJ z$RX^zY8Mid+04XGFcNb%+WHav;T=jOWDHAvOSuO6Mk1t+fSl!|{n+=j>21Cu7(gzB`#`lfk9CIZ_%~ei9%T2z_>C3L z=|VkU9x+s*wgdU4G}m>;dyo%>(zptoxCT$)+A%2EwsDr~3|^3C{m)r^NQ6q}>>BV1 zkILw`6j0$@a))pSB}Q)_RO5en?@;j0yDzQw_`m(Y%SqY7%*+i@bObr}YX6O~Vdb#A zhwvCAa)I;nV*|HchOcniS9vYj<_0ZH6v!pa0Q%s>2tu5v##y*xSji7&rR1)E@GkP` zOE02RI?MFSUWdWj(*FCq@r=mf?uMzc;vQIQQZ|-AL-fFb1M%7}nA>{W`yMjMl7R9Q z3O24}@`bdwi$y%sh3HLu@bF;> z7MT;a<7)eJy;G(Lc@H~ZiWD$va(VZY6cBxW~*r6hf?Kj z=wi3E{RyEkHjsDlI_?mTnYtS>SUeamYDBIQ(c(0GW)jeDlIa7)w3DXQer^2>ktWEc z`;#$Q3BOYX+NP?gs6RUOev`s?$m*qvU=%NTt0+GBSr;!k4$s0Cp=}{PC+EWU)2)`r~%5XGz_ok3+Yz0ZEHQOo)!Jd(k zkxs23R$Ya&Odi4-0{HvmWTA3#B*`SBDrB@HOY@U}KkUHL$a@dwBQm-35po zCcDm#by%U|mxQLI+>23jr){M1upT?htsm`wt)ff;dM@52=3ktP`}wERA`f9? z(4XcZt)TF_g-mRhLxVM@Y|f+aoiaT&mC-rJ zjNB9)U5(ezgq&&x1Bg4|9Lb}7g;PuXpVaEM4m0+}5d(Zs*lumI2CKt&QyIz(_eY(!@{gk|2wbxX7l)wG6^tLV z8%1GP5h2tL-OaeyFgVxw&l2kgF#)j&afwmd1-NKYGXM+QV2_Y|fBiRvJh*}%33@<1j2MFajgol#zmQosU=F16Cp(~6g2{aWrI&0S zb?{*uM!01Ja9IFT97bCkT4wN%B_BsLSCO=cj-LL?zo;CY2VcR&4bT))fqS0H--*9F z5v&vBq36`cwBzIzDTwbvD)6<_&R1;Vw1_PgZ)0Dei)Fm`2>G5|?OO5AuS&&{Sd+8Z z=CnSNt*^KDI}ph8NF~!VGT4`#2j}p1uW0@U=i{XsZ_ScLR|r9PaN1_D)|{9lg{@CA z-QQCY%O~=Qp*wL;XP<{b*~2d@)(b@6SO(@nAi9L+(vQC~K%(ic#p5T5$PpV#yF zRG8*&e4e!uyP$$Q^#G!91YQ6qRIq>RfetiaYB-A9w*MwQ##FROjbY5m&L!9`0EMi} zUAlO}>J-4V6oxYK&+gP~rT@afu09(vl~SCYjqUqo_n)C`(6rHo7j7JddZY8#+su}M zcZf9zNQT=1EMozUyGF_h3`@?!aA`mdzGR>)^zzS;vovgzbGQg$}XCloejhZc8x}GLa-9{9X9Z!Lv629 zQ7t#bmPe-EYI(fO6!8|>S+LVDUc9){T|yNqf~lR`gs#0p-I5^rNP5p9XF29do}FZb zh;$S6ii@DArJE9n;a(RuZrn&fLWCwWIJnxixwsjXGfb;W!opZXU4F}lop_E3Hp4&v z?C)6wyNWY0viVq#*47#rZRmev6P8iDB5$x^_m1}IGv z>yOyYZO1zp*kN)wQ`DHMsol`qM)nLaUTrixs-RVZO&U1_fhI}oFarI?i=~E!hEl#z z-nDAIdGls+8+aX?H*Yp{VQfgmQAW`X^@@toC`&DPR7{jU|JAlSEf%P0N*DdDL?giadBnnBwkX2KrvP}R*k2i`Tl^KK&K2`oh_x}X)DMtUum1$c z(zavd(@&o0TOl(dSu7I^3#T;FE)_Tx%(}83ox<47B!IU7nX_j#iror4V}4`c7Onc=TO$w)kVMKWIZ}VD`IO< zzcjIi4hzIhjS#S#=sFL^*kyqe&@5ol_=!Y#h?_%zhr=cudh|C>zihx$_Up1SEw9FZ z(VxmbePo)e-HN%tGLm{37c&*=a3!T12mqo@|NsA>{jid@1Z@YLLPFZGzQhz{!&|hl zV|`8p$kLX;l9{1O7;@<14abq@=ST>+fc&GmK0s$1PF7vc6-pQi#yg$A?^(CwqzXZD z;Tijehm#0=lrD>K%qGM(+|QND-Oss0%Z+?t7rNuz6^zhA}| zUN=mO2OoNx-eqNQj$K{JN&6TjT*yfYWlhv7v(45sFf^iKj}IdJH%AoP*9w4s+#hu< zk16aP8JM&HowG4EEtGjls4!aK_8aGrMpdAu3)hCIjMe!i@=c_bdI$~ukjF{Pt!pZs zqpL2#f8C$PQC~u~poD6%#GF<@jwR*gUz9$8Sb*^0;Djeno=mxw0p>t|`Z3~guBhGj zWdkz*`!3zx9+#CS-T2R@ajrsD@^?kf2-}ALWoC`pa8~f)2aX&`XXGZ!!yB{%F)fgX z1E_lO(O0f16yrW#N;3s#H9&_hF$bW_ z5+X1a81PjaP zalZHtu6zM)c6h@?Otk1C)n22Z8%ZTVfTkhQVFrYw75OC+xme(6!(g+%9H~v=;w@z8 z4u}Uv4_xx9QIKcS2S{>ScGiR*S3nF#>RAY|`(+a8)V&a7t=lPd0b&0apbgyltlBHl zn-~uQ6er4JzMGMsdy6beVU|WhZ$%34LkczT{^Q5heK%ta(Z_&p^9{_*aUj1T@=|r= z%#r!k$LD??H4{)^_@ozsL}p^_#wtnHha1I8ZexJEIo5~J62QGxZRv}ApoL_ZoFv3j zYWVyEjF6N^a#6uYvm1jxi4C_}IG>v{&<#CoK@&6&hBAeM7R2(#LK{+)b;$id^s z+c@d)Jt%f3M?XlBrzzn+hrEXWWSGF}NiDeTAYbk{~m{#ETdsfE~6D z=#i8!O4rzF${!n^Cql*aZ0=8~CBV2SN{{iCMvy`EV1Jy!1<0CYfjwIVj2zQC^^JlY zRlhP$Fah{DkCo+X+PQsu5)eY2-!~Nd00B>B*I?x1dZCGuzJsgOv$U*p&%kcfq57-V z5)(;x3gdWxFVwi3LmsT6qJo%-vD|^3k9LXb0^s_y+<(#F3$*_qoz4GM*x3d}T}5Gh z)ueG!r=}^lEWvSo&5+d=CmJD3v{g_N4Fv>5AP{|_{$}}fC@wzh|dY<>f`n8fodeLLoNCxoCG)_nEr&gEf2Mn~W*W-rhG^n0^e0kqw!ivG+s^ zm%I-EVDf61Fkzyw6BM)t;qWn~PrBw-yC-(UD9g2?kU5NCZ#rHNr1_(VFm-~zGxYg+ zAu+|x-95i<;Ljgmui#CJES4bG!ydhcyONsthDA(OtC)SMyb0Kw+G#E?WZ^^4Y&`+^ z68DceymBSB7=wy#GP^$bU{&%FMe%Gm#n5zbZ#!1-o}JB<+;OV4k?d?o(mZP}j*E+9 zCwkjNnByMJ1v+oH&}_5$r_CryG7Ob4U56XH8Lxbsazg;MSmMCifto_-Ea@f;DGNqj zY!KF}-Z$2kyZ+hv$%z^H217KcWD~+bKv2-$O2lIu(u!x=C&S3bhGH1q*jk`{a$d$C zTRR8oIw5^2F7cguR28Yp$N+rx?yCg%GkOaY$dMG^pW|hGDdat$aG*(3&k_$vB&1& zu|5vmwI?Zx$A^z}iz+9yKp;gxeC=f$h!CA$9%#VVX<~keEHG}?5U`urNia7pGtlkl z2y;A4lnY`Lfiz$>xvPoFlv=N8io=(_bj1pY99?Co2;X$E6SstS7t>anP-b4xij9eC zC1jocVL!yVQ4k_B0f+CK?i6op9K04nM=*HW+$9PvS}@7rI7BD$C7ycg{ zEry~N#mEF@UYw%if|Xg{#MWIf^jo`RFjRDvfK?)G13mMpcRk_ag2X9_L-?nmsu30E zS8p3R`klMnz|;_+tmtzc8iH~{J~?9TlHWbA50G}0WP5VRGseuoyqDDCh{{%57cE|# z$9s>oNIUF2T(38dca=}sVO9PL51dCzT+`;tT=g?~(F_0ZSGO*~k!7g>!@Ab?gV~CM zpS=gx&oUZ~hiE}IIzeO2CEY8v)22?ns0_}$ZVn*Uq!VmBE8Mjm?ylcH>dzo%?%T-4 ziu7|=XmH!e)uGTOp-g$&IIJdS9tq7-%b;~D6bOy|&nY+9pkJe~9MnsZ*;oSqG|ks{ zXs1SD3axnb@g3QSmK@90(s+igRTpGzhiHQ*3}K~{ZZmTZf|=$%6d6FiBYvwp5vk1l z3Q5!t8jgL;MXeHYLfDd0QI5^)s@_61n&Z4B5dxe!;f|9gu3tcLnM@|hvDa|wJf&2c zxSRck0e8k>T!0D0Jlt#rsXtORckSH&A{Q)#NL$zEdmg;7#?|F;OlSE;nz86PN>bVo z)0l9e#mx6*#P(i)*(v3%y3WylH=ZVG=+eP{}DfS$_oRMfeVkg5KpS&Q3Or9Sk4D5B;+Fqjw@ z7q@FLnF8~aofP{v&!0DU)1Fk904nmfhQ{bbRk7z#)~?*LrFXP5zUSrl($a2Lk`P|cbiny7TlWw9D(^nX*_bk39yTqlLgEB|a=+Cp z{J|Eew>Ye@BynIIBI90&X{NX>0Zj0^)?OJ`d*dUmhawRb2}S5aM=E~tWw&Yd z^eH;SjT*cyFi$?ZK9gWF2p1+`Chb=R=6Wr3&*<9+#68S6&f+}86nIjzfc%?;DEhva z>6zQ;Nw5@ABM6TaVp2>C@~c$u@?&qGNiE_;6tEi;@SRT>mqQH$h`!2cI4qwaekh)L zx)$Jg5C*^u?-PdJcO4zX3^>2J-FL=}H=xn%yQdSgaE|BeBTA}&*&`&=^9&ZlmT(ol zAziC)Yg@W}`CDAFk~kz!F;CPUXn(EleVUoyxqaz#t%p^)KPM*^0JjPHO+?2N zFlbCdw9q!YY=CPSyW^oVdi(nDGhJ8nqpDKFR<1O#Z?n;4e0t`3)kq+n@@+K_clR_V zjW#j9;Dl^sxIr#2aUMKEaA>HmwPxWh)!xz&LR0`C--eDHNc1_?MoO?DS;oRl1Rxp& zL`NUpw~=!Y)5XB*=Yr|R)ZWn%Q#RHH$PeUfWb>5tozw6krOAU2eY4clJUnWdz2dCy z=W5CMKS2xgIVf{^S@YRzUL-VcSC8wVA0DyjteJjeesnH#-g~#M7G2Uvz%a6T6SC=4 z=tW87vw57Na1ut4@b0T)C{Zt8m>TsT6_34XXask&iKT{&hCRP z9}A1+|N9@9tv?#Ga2A#Qf{Wbw@K)6p1qDYJ`3E&iIMWmbCGMv9o0spL;y0!o-K3AN z8+J#!r|%S7y!Cnj`w#faJG22&rjWYDrt`EwwK*{mQC z(0ao^cR&5ODTjP<9*y$OPvmROw?rSjfM2}&h7(BT^*`T8D30QTd`;+iC<-R>H8&k9 zRD9%XCk=1^k1w=c_JCTVFI6n0#;VnIb!)2HgoC*5e5<-#OZf}NU6a0_ztsztMT&}w zMkgjpjJjh<>m5$_mf$~KUF`Rk_Z5$HI(bI2XeTa=$V5L!4HA9rjm|DRn9r%W#$BU&!psPCj)U%1O)}RX6lnn z-R~6QkYAtAT-(jF9nI|HxVpL$b6Na>#kZYA)L)$iBnrCD zFD`of`Q3T+=n?W+?o*0>I6tyX7$p;Q(fPjL?RoC7G5YfBy~od=KmREi#r!T`g9sKd zU#qqwl1W{q)a1V7)?~8N?!5EVyZ0O8WdS}|1a%QqxDBJD(b}Gu5pcZ~#+hQnPW5aS z8Yw9$9=jD>3JMAyKEAbF^%A4I&z?P_diLy}{QC6W_c`z8eihW+nB84yV^l4^X>DyC z8XCGXkSR-6F0=S9QzpS=IF|@TUS1ygIiJvOecYZFgg}W{*c;B*>}&}jAQEsY>|k=Q zjpMdfNEN*+B`v)=^oxL6A{=?io9O7Q`mNZ{Wa6`erm7vc>!<6&!}0(1CiQ-&c-=QL z($>=x{Qj5HYI9ejVB9SnvgeLl8VO4d4i463mI05L8XFrw9~{_08OQ&aMhQ-GR`uU0HchYNA zJlCvt7!umGfii%zv4R{$R+>uQbm@DZOLTi|u)G(ew%T+HA z{`T#gRObxBVRKy9-29jObk2JQ`gd6ktwAI+V>fq>HYYTy9qtMX*9%yRWh|TXzT0@fZVC&x>bZg^c=Acm6|H%C>P|rZ!Ii*`SJw~ z(!(6_pux)vY;(M9tVsV>WMm}Yx^Zt}_h7and+k<2XNplzytsx2X=fx;XZQsw zAtB*&lfEbZkC<{wF4l&Am70$PU!3mi>*>8ZTpK!tl(g4lt5Odk7w+jxeqB^j(lj|4 zKb)t&zt|ooWxqLH8&_q&u4iQxoSsgtrKP1&WhYLPAr?Y5u8rCq%h?1)gITi*Wv7kY zv#-wG)&C)bZ&4AeNWde*Ki?^+sHj$IJmwmGQ23@Ai2e=?#Bf`aK@7DuH{ZN<>(<%P zrsSJ9ZxF|(%RLFaj$5S41zJPQSa8PD==Sj=+d^wlQ+M}Wb93{q?(S^$ za%M>VzkVdb9s({dE)YpCj8B)7JcEDyxWC++U1*VNp@C^-LWV4GkUbUfyx2(VsFZE#?Zf$C#cS-o46y2B?KN6vNq$5--A+%F*e0>*ihKG47h5+lo(4f zEM-fJk9xV)Dr4_f(H`97{OYP-ULF&acphHf{$I)k-1NWT-5l1_q@JFhu+_oY*>u8} z2Y3(Z-~H5W@{Ji!OiYx4^)nsG*9=})!1sZEBA52|epgqQ$!MW&Mtxh9^A*S7WTh>g zRt*nacd$S^DS&{*$iKIJ*M9NO{j=lkklI>75r6!4CD+8vev=O$J~TAEI6gi>SWfKh z?WM>izak_dQOtQq_e?f1*EXl(N$;vR-<1JHRe&|2Rnn}^of4uMD(LyVO+KA7n z@}0|a+ z_sE5EgIrH`D4^>!{{D@sudfel*vnFXu{VxDwV0}kfMzV0^Df(>rCv%wA=GS!h$YMh zW3gr72?Nx=!C`pAYl!Je=y&GYj!G!y}QTgy?Wm?@L1U9XjyFi~zz zBP661w<8SY(e+?^CJA<3&&*8X(+zZ!nL2lb>|p`))4o2r44DLf=!;OWRJT%>(=#$2 zkdVaX?IjuO>wn44Hbn%Zd->tfr1YDplo&~8EaD`(_sS#+sAeAHO#dcpfGQ|gWoJ4$ zHC1vM3mr_p+QAZ&fVnG6KJ_^_w{es2-BPn5|L!>Mfhv3RgcIm1$QW)7A@7AEflQ&g zx;o??la!bZ(bv@0?j9T%v<4D2x3!g+^gmr2%x)ShF}4>nP*hZem(pYs_}q@C907x% z7TPR`&Mz#ijueoiW8%+3$+*~Q!Vz%V@d_g0zR_21tqo1Kxz7EZEg|-gO1<0ZN0+0G z_Mb1mRM;-x!+OLxyD?TmD(pe}`gPODR+7u%nmhy}vel)DhoPuIWrm*ZIb}+MkzPwQ zmnCtv!=~PFt}1k`PXH&-dLINoVp99C(nsOu=9Z<9j(K%;g-tFL{}qSah|kb&XZ9tR z#i&<70Sf|wK&E4vxiVDFc4+SHNy7D94uECeh>6w97rZtzqsJi=c%ohB#@AV5o9pf4 z)1M}e2Q`F{(+r=Bi)*shm3!cWS3IdB+-E9;)4}FM6g;*Z!a|kLZlwW|hb>`T*kOY( zgiJ66N)T_p`DBHSR1BLBfQ5c&s7R>vH|q?%c~GlQq0Lo( zn7eWB-n|z+Aq7{L=laIRZ51{P-vR<$ElFTQ%h2{yQ&Y#_K3tDBbPrbg`D{9)SlR#} zB)|Tur>pxLl7oobinPDKe>|}fKnL!_hv~MM1C5P0yh=+sV!5q*Aj(!oizuPBQnRwM zZcLPiKpk155`j8O4R-@E0pT3YYx~S{ymStlTERsivgVv0Z!bf&cDvYZE0{W3hM;8` zh+2SxgskTfV<=AmHb6T!^VAs|8!K`>wu0UG798xcX}S%KEa&~N$W#YIkKcsx}#H&A5Z2x3;K}uiJadW(dCdn;yvzdwp z0^|+Ud{cm_7#J89Gj$2DxM7KjJ%H5NS^!7J+`)VH;bcd5v#1=v2r;Kw_-fm&27n=V zZr}b1JBbfq33@;=G@0S3Fqk8tR z2$*RBV@W{Wjer-@Kg=~s06K{X4-Z#HNXz4=18DS*ii!%2jm-g|P9H`is|0kCS-UP5 zrQ-*sSn2i{++T34BhpUqlE!^_yd;p6bqVnyJrxKTEhh?1ztEn86j&M3f zyYZ=fO@5Koep+tp=|N^Rd|(huP@?#XAcYlC4Es|=_&U`~O_IZDoaY>i>vv+k~{AOup>s>+|W{Zzmh{xLBzG82H0!vXyp`(G|aY2&|$VKv$FMa%ZUt>4oF$;N@)TXt#?2|9=-r1>c0nXp$B~Oetv$Qt!21C zTYGtXU4(?oLJr07Z<_eHp^%V}#P84O5fGUPqreU6hlhuu;bGI!(fNy}%GaH({Zh7S z>O4wHsY%!l1hz;(4&^ZBDwu6L$*?ohH0ph}f=E$$x$@Q3x%Fl-kW2=^Y!U#r*oOdL z;w^PVFi0f7@(G8;TB+$p?kZrsi1m?z^+A5KL0S>u4tMU|8)gm&2v7uEXEz=?w7eg{ zhR}sN&k`gbo*UvQD=V9B&>qUy2{~U7&Z4a)k1vv~Ra*sBF_pHXWSf?flF|^0KnGMp z{1;E@ZpmZ1~xo6--6-gwMX*yf60cvl9aJ^2_$j_H(&^t;)0tYf=lZ2L#)$?e~xVgC*myArt$%)7F z`jQ)PA9RQ>(5u$M4*}n<#~VSRV>UH4ove1madLPlZ(u+LxaMyp0U23vU7gU@)>e#+ z&y9oi5tf1C<+hhCEiL!qa!`Kc@O?^*d+QuLEha1OLwXCHF5)FPR8&^#>gxV5rq12K)2M1IF z0`U(SlonSAQsDEG+>eNWpD^rL;95iS08Y2c80B`ZcXxBMo3;jag)j2EtxYWR?$9sg zkxB(1bDAsb>+FEV6ZRmE81O~f?IM(rNvhmth=iK(KzxdF0`6zw0D1<}B?%H7YP$fI zm9@FUZmO1mH%bRUaH!r4%SR?O_AF$M zkNNESR^@w6CR}$reiKB1Tm+Q>TxPGalklrIf=kx(mTtxt(={>f=SM}3+gc!k zh{muPnoJ;=P*G7?EZY{^LXgc^(B+_yH&rfMff^YBn>H6U(BKv7J=_80|Aw|_f7Aw) z6-lt5BMz84LSur3TUhB&9Up~|4h;`~1B+;j*e_l^T@Xwa%k{WCo2+w>%gV}P)E=m` zHHJ>}3iw(am*p#MZDBWPnZQ#-lZ4!X?Yxduc45(|kUNhlbU8Ew)#xp> zixT_ff4}xJASBlorCILjJ7>zE8pl`nti_+o3SYjFlEO|-PUf3RfinL_9>4DN9|`=4 zW9+<(g*9;6$D))Q-E$Tw{4J1GjL3SiQ^r(9RdrQ>`}y-f#p|3rJkg-M?VfMf8x!{) z6r-Ivb6ZV5p{J+s+_Ly1@aOD(j#8d~KmZm%BxDGRiM1&w7ONKNp+mm?gQVf}`y3Xg zm@zun^tHrliVtdeOR4Dq8}-YvepoK#bN3_(_X56M9WBz2Z2v*LU0znk`69z64eBi4 z_CT%cF)g6k4AzHGss{2swUo{=egZK&IXgoNP5uNdJ+Ncq;^N7WZK|#Hf2O7qfFOeM z*9y7}!~o{~`}g_2td+RtB6%UKB%96r8^9ovNO=?D4q8v^=qQhvVXaj)KLAzGCAtAN z(SdpbmFF2ik$aE%7AvNR&d*x1$)RX{JU~wx8oY1V5f1G1ZbRH-ZI+2E<+8cc?2z$N z(=Vc_y3;kz2WvwCpab0Kl@SSf%pVD{VsJMiSS2mZ^;$J;##{K)p&Ed}vG=l5sTdmTN5cqj8- z-GMKoVVhi6(b3W+qlTTYinZ5?ZoLtW`K6TC!NGaQfGo}2!Xkdv|0i$&WKl>Ia_{)_ zgA!=%ZxDeX1pb0JK5~z&+obK+v*+Eb9Ow`+Mo`{W)YSe$KaJ9ADTP#YIaqlKmn0N& z<2gDyLY8&+vjfIkAwZec&rzK~zaTB6`%+!a4~@F7VUQK{0Dpl-{feJSL?*a8%F+>(B0@nI4%lD=_*je4LR2KYtt=cAYPDUL z4rbxZcd{DHx{oBbQ0Cs{s{Hu#=PlrfkcfzfVb7%h_7oA|<2S>KlpLP^z$-YH>arq& zmY1g2;_u|-)CVOAaLE4n%|g(_1Y8cj0rgv@k|3HGh4K!(yu{5Co>)uG>gwXO--Ls= zN@vS(@T%vJzmE?(bQy~Yg!E7h)i}bk6Vl~BNo?$)f4xCt23ZL91=Fovv0FHhUp?r0Zl;=C( zFjl7>9RUG>LIgrIwZE@OU(E9KczdSz;L8>$ejFx!Uax#GZJ_E(-TBZOGBWbmnCS66 zB7I?P5n0&~-@7Dio>y*GlNB$aEo955`pRTBK~ls#*S!Hj)!NYTq_ZX)T0ui+NIk^h zO%RpzfcdNTY==caRlcEPVsbK$6T8ttNNQhRYU{JM&Go@-JjD#DWR1#?ED0EMmNhdhVXO^;&z^}cD;u`1mT&aDjuH0s z0Ne~Kepj=~ZVsNNGPYs_#8<%A?3Uvk(2W8rE1iQwB7nFmAQc>$gbrkx28nxpI4>2% zr<})*GBPr52mN9o#E=0@MK(Xkfl|Ss@0a=l`r0!UHm_Z=8%nfN;@y4=d&WL%I$R$q zLtDP?URZa|&dvrXIFCav)HUOI-3>J(OSvGCYYR$4jMjNhZwMe^05hR^0WE{!{c*c+ zhsv$)=bPn7nT$ytF*`q>Qqt4Yb5BxI^6%fj(<`;eSqQA4>K1!h_HF`dRohFYJ36g1 zzst-=*z&4f4(UPUh|%IUA7;eH#$K7J7e>m)!ugUB^dc4(8|D*kpxJ-^@};E4MD8w9 z2BV|%gFDmAEa&8Jisy6Ol7dzXns|RtK}-Zl)N`Ag-5>>(bw0qu^9u|d6mqn)>pFOx zV8fak7e@j-d;xkMpYSxfpv%pgt2VRN5B?ihS?;^+$OPwO|7PH(6a$PHkZ>?5Z;IeV zsOI({;R`Pq&FB~zi56(pmPTa@Z?|ZtK|MhRC`>ZwI&tE1nTI?d1K@{!2f;@zav`l) z(WsTBb$Q|!Ehd!NUi`S-Tjv*_Ztsj4C;fmn2$B+e?K4hJPQ{D$aQgTDFvMbo+NAt! zM78V^Uc8BcVdS5{ZhQ}r5>il-N)(7uD>Lga(5~mMx|D;5CKC3j=^ zhXoBa^-@;u#!6o@8jVc+Y?QVKg@Ay@@`1I?n%QIptFN!`1(G^$_L@GewO?0CV0D9m z&OF@a`0?J1KwVV5s0F`{}B!f3JPQ=5SC*wRf()68FOZV_gzI)--fu^Jy=z| zapT4ZJ3F`4@84iTqmVA~P$^d>dEXHTxX97gRGi_Bw0@HlU?iZ)JqDeCZ=pfxe1rW4 z8rm+j)`Kc2pq3Ac%}1C)-msHV%%S_WAPCw9RH53VG8kFtU!3e-fL>D&Z>|6+alq!f z2}gJ(UWt>H6*=bQ16FiNq-@NhS@kY65GFS|IyyfS=4tu()Xr67U!Gpm<$6UWsyb;& zNMHgIYJrdAU(5nEh;P&7>gx^w$hVS`sQ}1S6EQu;X3qIX&{j4!x{%2Wbph^TX!;J| zKo|!#!SDJ91_p?1%RkJ$0@3s}&XMywjCLV?)E16p?_H;XD$6 zE2$v+jE;@nCnfDdVg)EXk>okOwl+4&kbuyO1EFu~!8`tS$CYg4eXzAf)9L&95|FzY zo;MVj0T5Y%_w_){MoNCL=}4jn@+-2;CY%lyFPyeVYG**CT8tI@0AgBLT55r|yR*OF zG&J;ndA91~66TEC{#_)V@4-fDv>=+ly1u$BTgWN?1^YBsMdClcecXv;4ICDu-$CyC zaJ;2SubhwMPh$v65+p8!-0T@i$VEIuz6R2qyosruJ^cnD>`r)&b95UL{DjK?_>V2MV+*C6Z8!XzIa?hd>UR=q$lVYt{(5>l%L zD!jdnan7c69XpU0kcrX#Q+8JRDI=LRCEmQb>EYo~P{U&Zkb%Q;93KY<2b9p#fd&-~sG@?0S)-x>IwEqQ4`m%W zt%0BFN;P4Wf(80-d+EjLb{wprfwN;U+!s=;4h#(Z&(eS(3S}I*g|4pWBS%l`&8m|| zpc24;^8xOWpPvsUu^9OAX87v@MOb7Yqb)F_c?B9cJl-IarrW*iA~YL#jSCg4swgIV&b!Q}4pfMiq)bx`3R zo5#jfU1`399-6OR-vj;jIU8GtfB=R12+{Hqzpv$`0BEc?LwMl1X3Ca2OyLlpu`o0d%GV!ycDJB-MutfjEa03-pYQGfg;R zr4$>Hy}~~(E)Ir6Ie>-f0TjLy4J1qk5V#U?zH@4dqUA%7fiEsAs|;I{fzP6)0V5|R zK=e6KieJNL4$DM+2Qd>Cg*bKfYWXVyqzyQj|!9fXWG%)Q5COFnrf-zqK3KF7d6CKwmZ>B;!GUkiaa?Ayh zoDO6n1pt^CNSzA$sRRfL_Q9(gk%v`BhFO}`h#Ry(G(AkE9M&WxB^8lw1eo#3Ar(o` zoquZ7ILU+1?}W(w6a!OvHrFGIbg*^!Ll`NPTWKx<0g0KnwRNk_han$;i*Nva^uU0W z_4M@Y=3%UyKbWhkh`bs&K+*yUSzrC9QK&v7A^8O}URAh`JW2wcH0X5^P^oAER`~;E zpI=$Yf>E_hB{af}%e|*Bpb=j%l&=C5LfUA;;bOGN{e=IjkgLLq+|O}7wi=$hT>Ci=IDb#DuoC zwk=Ifug3~?zsNYNN~5LH`Xdt|9a7OB02`F_bYw;CfVH*PD?W=42nkU}YK|cC(n33z z1M$yz5@DE7@)Uk4F)dYZ*C~SB6al@e1>mycMBk{9X_)FHLsOv# zb+UVSSPVK%fr{NU#Hux{_OywWv^#7Q;GdZBSQQC8PN1mW>Uae1G7-?^zLXF zk^koBmkcBGmDQyhVc7=_<*?sk<0LPzkY2Q`O}HUGN3l+3ag>>-0L^E|;=&RpDv_?X zR|aC>@ql^r&_7~c>N~UQ;WpzV9UT(?)8Ur2CH*zJUF6CC*5qb@{S()j5v16~#l;#c zU)=e}`UVExpFX)s2b8}KW~A8rmO=}T3Oq8h#1ZNHuWC|3fusX%tBac99Szv&oT(^J zOG+67fBZlv4hsB(`$`~{c9n{z?E%>pt{2D;%QgYco>v-(5(TB;*AoU5T!wUp%d^a*-?;l8n`_b$4O$a>(LmAF&15n&9oL_lxsDx)bDO8D z+3$6w$K?a?5E%CTEV5haTZpinDAYx#k-hqjutZ(#wGN1guty+F57&~$s_dBvm~Y-E zA!%Cbysx4@Vcwo`@F8fY%JUNJ-&B4PY(uqA$4{xLV=O0bL~~oCgYIKIoNF^4Jv+;9x4k8jr*?O& z`k1uV702jgXPQ)Ku-hRteDGCLJrhIgw zbLjf1HPXHL=SPUJ73fBaCgY{{8TQ=PTIuiT*geiI^Bl&u>#cvjqf><5o~xNbiwSp} zYB^z5YBd!DLo_3L^U*>-x2scJ4Gnsuv0zHEqmP$oh8`!noM3>NTL_6`b2~8r1e#T= z_i>0`*#Due-o?on4}Vmuv8;AY&3KX1?qrmeHrT69Rx^>(Pk5sbCv*)Ir{ULJj#~!M z7T>J?{Z~=>(CGBz6jg<6((jYA;1sp8Sk=gc*w~H=8*Mg+4aO?R>(2`59da3g3`}Zn zM}LoMWB~5WxUqlxxMV@AU5BEW8NArZQVX-nh#syWqJ_?A*1C!c6~*MwOzg%UUs7nt z4|`qGeB~+%eH(qQ#hpe9v}J;bTWH^RV3Q{epPY6N^P0|J%Yn^Z-Wbko3I4gQ zn>R@@p0Kk1fL&+TYhE5dsdluM&j_SmAHEFD+v`o>mu?GAl7kOnW*#oJR8PVp_-v<# zNibY!HY97eJsq!*-j$_YPwII^^e2Iz8weR{x@6oyiU<>o7SacT69v^3DvQy2dLD6@ zD}6~7J2oGYO}gUjj;l5{lU-qd%szn^ z?^}SR$i@a~X(utq4j36?VI*a$zcy%^yI+NIR{$j0o%AJUtr{O@&9m8m|9+2^gs9dy z1vn%m%`j?cGAwL+MxvsLvRX|>kqTC^Ojy=^p!oLv``4A#RRFRexih^T$(OEl;oPU& z*-)$afT51a)$NHlw3u*+kB&CExGXpFpQx}wKRd(N7`5>i5Bs;j-)jp1)XyOfgucc? z-7p^4BUa;HA&e)qD^yLsr+p=vEUw3isY-diZr7LL+*S+?qeXS`ym)^?g!A>Uuj);v zYXmi`uR=f=(b3au#1YQbU1)uHa&Cw{dA@eg`vU+xXBIXC>`TRP6>{RtN5YpD*!=cu;mX^KUwr-XB zE0n7%W7ED_&x4IqI#_(Pz6@!Co8Rh3?9(tqFG-5t_PE>}eyl+u&&BIjP$J*qb}r)Z_UO^vTg zuj>f)&DUQ@Npak{a|f)EM)cg;br*oPbqx%{S2-*|{_FvZ3KqYMEfMVkYzK>~GDO zc|dI)&3?8ZhK4h1M*dP}DRkNeN8#%0I=54BC4gZ?!8k68IYIUd#&xRa_5E?ntNmV- zd*st$3l5DXtZ@)Z3xnA@q>r&scUlOVcQ}F=^W z18GbgKiOMiP^@z^+}yuU`3$(L^~TgWzo6@x0>Z)>B6NSk+MI}Okpu0?uG%H2f< z?&6=i@#SC2+pSZzTapV?lLBml+QkN1h;>O>*;}9+usS)Hq3b|+zbnx4e$3DHH*cZ@ z!NadEa2(440-;@Gndv|T5&H!=YUbO+a@8>4hm@n+_osNXL6g<1Ww#|_0p zn0JnYJzE9nSF^nuGiHX(qsBN0165;0sipVERq(?0sv6PjVy zD%PvsSv6L_C6P@_ZfdD>*)JsIHSq4j!Zscr{K0$>Sj(Vbtp!Gb(-QC>SfXVh?2F%j zO-P^wRQy-zHON{{7=!{?@szjWl;PZv+A7{q z!J7T)u}Cg9>)(++y(xEvzYi05Ue|hhgd1G+&Q?A%yfA8a8cR!+Jz3Bam>Dcd_YS)M z41-u`W;c4V)U(5KRj++exUmv9_N`t{BTRgN-n~XlFgMqWBjIVz)2E4S25)gN?h?N? zSbeIsyQr|R>j;64p77^ja;#DVp{<=AO48ceI$32O6e5&^ z_5xHsm{g>{{V_jYu`Ej@G| z=<9+tA)Lz3pX_xxT+VtQtq}dBHRWT)IQ2L5&|G<#uuP1mkDYrh9)B3fBpr@W-WnR0&!+b^DVaHWV!OGbw71}r?{OisXUX4ehv8*MuUSk z3omL^;J#VzskO1m{<&{5R*Xl&o&SWB^Pv-$)&{&P;DRrurWT9n0YHMXHe&q>x-&NM zV31@~kL^jJFuQSZ%Y-XRYin<)kJVJ&>~!q}jFmpeaTgT3orZwp?URtZZ%sb}M(pwA zp7HH@>LaRkZWR0u7R_zTlAC9Rg}88^ZBy0Us1+`UHwah~DWA^M?c^@!n~$)ZY*p`) z@)k$I)mU8)N=&W5NO&!3S)4T*I{MR|YVyt#>UkmgHV-O6I$zJTvPAKKyOetssq{PEGSJ>=*dx|(u3{mQ_;9k7aQDK<5Wm@a;HDV=a!Mtzf$Y%?wz@?(gHfAF24u{ zM>gF?+(_t^zdwt>1^gU-ddeJbo18aQdC+BV40WCFi&l%4Sp``ohZ$|B|x$q z#ag!uh)-8Xr{U+zskc&6Kh54da#`?v3zDwmHVYFe3r?ecgKMLMVRFFgXz457zqZpT zXwcok`> zSa|xr$ZlP#E6IF1D-Y#Vh)lFAhVu>jg_N2GkIOS9Lf-JEv^UZUPd~@F9o#929R>~RblkFT_Dmxp zX=%-PWF9_XSl2a*JT&&vxVd-FHGtsLB3IjYGN`OhA?dB&p?25UqCWI5)i8?)5I=|? zLyR|#)T`Xy{T+%t5I1d3JYR5z_izV~RwugtCW6Tb{yJILruRP4`One0u@8HTQ#Dqv zBC#1ai_F=tBUl~sjD9I4RR+-fW47kKI4iX^8?Jzts^uLdk3Y?N^Z12p1FpoUIg&fF(HLxq1 z^$Qpp_h3R~qL1`u!$>V8DTy4c5Y3a5q%a7C@nab)a)t|<8`822){GAaEAm3u7q**c zD=Q|k9Hy^85aR{09}e*8fj)*50mq}kdCX_O<_#8fXzS^4#tLb;2RqaiKmY67KWH92 zn1@LalVPWtJW#r~W^3D?^0qhQ>;*#|R# znwrF^)M7Z$adEa@0HX&?r{Qz(9SO7sKb8bT7ph9-M{2NoA&0nNri8F~2&4KY_#`l# z@SzA_?4ZGbaRKDQRFIf*RZEbXp2z7D0T44_p#q?I>-WU-j=!-1Kf%Se=e7RH&KwGI zWQlZs>HNNNadC0AsYxk>>*ImA8d-BWaPF~Iw3~01f5a5q;)7*;`>uP}-}HLJs%2Be zn#X~cTA74Gu>sLNvaX9%v#X12Ql7#vXd3|hnqmqoSgi8v{_3e z=)#AAaqU)$ev{N_zByT=-ox*ek3XA9pUS4Yv$j!_@56Y3(_0e=^zXv;OiD^)uChu6 zS20ikKhC|vg<~Qbb)=ul%A_+YF3%m=tf#5+HBPbg_3_MxbKxL}MYqypYso)3`6}F|luw_K&T5k|VK1;3q-JKefz!N< z6|BZhpw5yktgg1h#2gNE^|cCU%Zf^g3chmt_J32MxgR7NmS>67Szdm*X8kZb!V)xA zY0IwdF&5M-wB9~lyF6BP$djj5dvTSCiu%>Yk>y9;*$jGrrqTnOC;K0WQ9ph9YKp`N zT%_~IGiY@0vS>jXnA=_W>O945XZ3Uuc4#j0I0Y3(_Ai zC8tv$IX~;MC0x$QTg&%OF1fSpc zv?Mn!rjZnVwcx72q%n(|*bxpeYZA!4icLqnH3B&uTxx25o_7$@3y zR}_;NB{ZF2-Tj02nT+N^QfRQP2c_sxQM#6mElLJC*0R$F0e(J$0aK(#w7X~4`JG}x zrBElRrwTT+F<&!|*I@%?bHdRx^^&eR4jED07NZR?xkC>5UcP(@pyl&+-FYz58*D$I zWqx+B3JGXV4&rzB`m;N`1@xNZKGt=o!3FhOa7g1f%*^0)M*Zx1f$<6zZ(9iYQwoa2 zDti(bMJppL)y3T0G=kUoY@gv0MEPGQT?h{R`~-}!AA&=i1cW&&7LP9&iSfMqds-JQ z0p)$^lH|ZXkP=S-fpvfW{(hX#FLT3-%U(EtK-(I09t!Nku(}(m1itN4CUJ z4*KeN8J%)L-D$-`7Zqg>tjwB zs!?fo>UUS-CUjx%g9V2dM)d_z+LM*P32@xn_h%|P@4|u2^y4i#3Yx&gb+$uoPmXH8 zZYSK}zwkEAcJbaOlr~#LKT=o|bI6qUMLPb8^U)z~maG00$*n74@Ygo`)fC|}89 zQzdXS!GAuy_q$cSAQcy{CXLm!UuLOonVxm|e0Y!Xx9$A4z)5odZL!m_sdPfj=SMox zTtdtubNV#4!{rR0JxAs_Xmw?J^Fo{m6K=d9A&~6xrTWOtrg|O=P7Rx-4sDjrIqA3G zXgE31N#ZVeCj(wBPV+~B@FEm1pDNl9mN9yz+_&XcQ-o{=x1rhJ0yq18FPK9njQIHa znvL-C@xeeQqXZ_U5paxWt=Zsx>%M&r<1_us0MQUKX{2+~28QLN*7dmC)ntNaim9TH zG45@2e5Vl1(x}9!b-6p}r&?}B$E0TWMq8VlAPN(vPobL=-2bu?X=Gp7g$lno7hq(3 zbG{wGia^|JZhjN?{rh*rN-zb$VNz`H_-|}JZjahdVp)b!-@-b-KM1;gzeZmCFO}G$ zoR{Z>3kCT1Y$r=wv0lr)+`Rq%d`&bUw?@_R%Pm@?zJ_Ho(UG}dbnEm~Mh-=K|2$QU zuk(34Tz0M-T|jGONUW<>L|wGZjrP-658cIOg(x=JI+EPI#CU@Ae8ifNATQW(=pNxX z{_hk8b18qN-ggcT0PpU54kcYA&gf`lJ;fOIDa;)wTloC&TlLvSsT;a0p6DQbL!o4uFg#jZd^zEXIPwYM#*zLCfLi&r_XX1_QkRNPz2Gm#$-RX7b?(WVqW zPE|Bj?D5B-?y_Olk=oATx;cHlOh{*KELASSRWVAe$d+`U(riLuvh{myd15!S{O~ZL zGQYk@HF2st7|zVI)P^3@Wd2n}*sl%#0QDoqY6`w%fMDLzWmyKbt*s4uV_ei4G>F8j zjgh#pFudU4VC1oF`u8~~y)tER%qP_2GBN1k-p&s4@Flx(4`{g<4RFp3hTd&cQ<@~~ zMqPvo3JT9TIBx2I6BLZUk#47Zrutw?Y=AQlWtL-LrzayLQ$6|`Fzh524c_?WUgvUXH956j$1&aAO*ybnE&0!|x-PJ{=;fEz*`=XJUc9!E z^0hAa!Cwp!+&ezMVX-@D{ukUQe^rN}#u#<&Q1bElA9&vmB<(O8F+Dh7Ew#nCis8d* z_>&ypbFQD}IFF>1kwa&IdR zzdFQ7fB799Vftom#_@$Xjh(b?0$aoZ{y|_~Y45pGZ(E{z5hFiA``WNiyLGJndGQ_< zb3{yA(;cVp4Kur@X>^!(=rD0o747Fc#xD0&Esd`>e)Y8+B^e@&FsL%<>6 zKJt~IUH@xH;Axy!5SLbdEA`%qa9_64QiD;|kuH{k6YD1}<^Q}- z;^bePHF(i+;iSZkpJ1&-!p~GNNSvIW{s;~I3^rxt(SCrfC@|BHfwQjwnwr2k3#|Dm zz%t{lNl%Y%&(ozSq!ArXry@;FV9Yt$?-jPs00v+H->l&b@97&Dpk&-&y>}12Y#k;@ zOfZ|~u)gYz78a3*BfZPRc}N@IGj;pXG2V|y8*~YVa2|ghP8c~3Ud0vt_Z7Ll9o<5Q zF+fwL*TSwxRM18Y1~Qt)4I?A3sU)4d|I}e0J-&*0NO4qu$VeItaiX4m9oNv)gT@DDgY(6Gh$MjKZPemVBW{Sn=ohT(u;#Mb;)`*nc*vAFn% z^tZc*SK_|xSH?RUVQe3l0;8n)&b)=UTK1&{)uAQp>iwMCe;gt%bDlXF$s8$lNg0oo z?1xl1!CCx>iO;|+HEG6?QR9SZeR#8Ns7wJ-E`6J`7FZLG@I?dnjN5A{uy?SqkmLb* z0M%yUs|!`fYoMBiajKsRI=ooMMgjCG6f#I3jNZ;JiN> zHZg~9UtinFnS*X`;u9(9B=_G1>gC~e?)CE}y)G*?Tk8eqB_5Y7$N}P5PFr{Pm+kr_ z2`48)1frMujL*f69Q0Hk{%SM*dt`$NHJ2;>?&FC9SXh#v6_(UmzI;i;Cg|wE9JM*3 zoU@4ho&hkRETo34p?nVwZ3IdIPMQj?ty4u`u$?p@oyh~=l$3^7V*XL`ZVoO2>;Mb$Ng6{O2KdF4pzHd8V;fO4@2O;UYuiseQ7tU7|U5AZy_jjmP{HeIN)76QnPk=D%t785Whqul3CM z4%ggM1i9G39w}9K?Lluoe4<#n7ndYM@U(oo@@5}=)dJGLYCT;O)e2t`11F@L;n=kn zaTwT{hx6SJb(X-Tf%JaXuw?)v1kcA+1nVoexAXIQU^YzhbNcdS`vd5X0N`ck8e0fC z*RU`jF?K|02UGL%`aP4*VRPPlo*}LN2mAmPFmyx58kVan4Unm1Qg;BOo;Y}S&_U7y zR`?7gzz{NYBzb`U>t)>jqp}kHaGg6@ibxsb?pkX!E4%X^m3It#+x({B3x51m^>P*! zgaxW%Mj#xT)gtA0_ztbT@Z(Ys_xKSUJ>6djVc$KlXd(C90CQy!pP`SB5ALH!jZ@XV zN4@ZUDbKmf(>~1UEp|qcOG+|SIa~t3B#}*iB?2c^csx8xP)w#Tx`m7x=@C?1CXb=d zvKG=wO#0-d(X{Ny)(^9r7U{krJ8qgB3JBjJY1SO|x8L-X@gz~<`+l)uFqDT;`fM(P9-=Z9tZk~%NTd` zfB3KRqpU=1Dx9je7lq{w5Lur;hb-_S_fU~u zAGSA{tl$OrL5dX7h*UfIi-C+hBL3m1FSi_FtcnHSXaT3ct~D;VS5+T^BNRvemlE2+ z;US!6x&?qaRRjwMzH`D9B+j{}>r-77;9d;MTLz8Z6pWgDNuSXJo|_McM#-S=^SPgU zw16-Qo#YK8Qy!tPN3e`4ur*frd2^r3at*b&_0%xX`r==l>VLkPM2;Io96%!pcS-K) z>4$9xjT^}4QJUVl|AsUgmDs&YGfuEt?sclG|4^=l{#)t!cB2_%n+P@ie{1QQ*8g1v zQp-XVv2^Z4$Z9-{`*4Td)MjoaMn!i~?q1jfPI156Mtl7ZuZi#MSEqVU>LT6&ykj00P5;|K9nuybNbLaQnx|_UIb-u_wWc(uN zA|{^^W9K76XXE$t0}g}OC+4P;8aNVNHA`xo=oZ0nwHlm%gpT{lABOf*2NJt3G z%_-9==aU*4sd_4f6CN4~j&G+_KVwP4zpUk;SawwJ$e;X2_!sGg+AbQ5mJ8iV`kf_5 z{Z^9VsF6z{_4RXmtCKSVED9kNGCe(ilvF-R1J-?JIGi3)g<2vZC`(MR$5g9fT0j8*RwIp31(Y+P9<>AY3rL`jWh za4l51{RKbuf%DYO!?fo5UCDldWHt zeO)_bXyUHC$6hgw1XU{eO znN)40nzQDC&o$-j>2d*qmD4YG<>JK+wEDp43jH$5%FaAcbb^1=t}vYj_PG2((%0ek z%&$WR-$u*&cpNM6Um7ZuYqO5f0t$&HT3`T)Ld8$$G0tn#S*rC)e4E*#IR5GS?GX{> z=6PyrYNEXZ1!)HSQX(4N;2C|S-){Yme-|B)-lY@TbNMDMQx@AuKR4RPn=j%T|N1`P#@Mw^xZ<_y{(8iGCjuLpz(8+ zc3i%u>}PgHD=w!^4?Xh(@>ijm{?_N7+~=ORLQ5{I(io3#)>nevz0 zs+I{TaFN2&uiZXFzrMnDEhqSp6q)oI=9ubowR_9$s6xBh@8J(lqVTJ&;$SCh~avEZ72 zH_?ni;eZ|nL1)phqEL)3Rjd^nLg1-yAaE6GeD;NKF_MfPJ$j;lS6Jk!2-kmmetj2` zy4_R~T&ig5?M>S!ZBor7^uN30L}I1T%4eZfQOCiv_h7f8%OCw7XJlxoN>}2sW5-UP zJv-e!zIjhYNuP<;x2pO^R>qi?r!4)`v+ww?19QLZ_22LbXu&V5$;iUSg!~{?o2*28z&jjL+P06Nwb9Lb-=PgZ)xF7wbGK3vH0b%2NZh@ z8M2cF>#}>agi;EV(Sy^7BcbdCw$2h^QrWzOHTE0C%PC*!F-Ggp)XD;4GHMCQJRWi*sFt$QU;wl zy7c$a+#z0$@T{V41gfm{%Su^Ot4^m>1igLv&Tt(vU$7G$)KpbPbHVTzSEK(NdN^Gx z^XAQ#_$r~VMXz*Mw{NKLNp$s2h{NB?c6A;xxA62l1-j&}K7PEPKsyx4yfEqGanf0P z!{##mew#4mqWvb{86~!|s;XOYhFmpWQ!!#UZrU`A6;T zIu#Wi6=$C0vllPII|!#57Seeg*(-^*pRABgtP@cPT{c!<#jMsrbaPu3;^E|Gy%bLe z3Q@Gts44vAePEHRs~;t>ahu!r@oxM==Q{-l6S~17XPxUS!SCcE7FlRublhK9U=5>; zAAv&C*4QX2`)H?qWG#0d^4pc>Y@w^LKFH=3My_de?~^Nb;rsXD{JCh4ycMj!gO(L+ zq(1dcHWP26RWzt?$6QOav<(>0f~C3s zOWmYuPjqa;_>sn;*RNk=zt*s?F<1y$EYMZ-7%hBxaojP5`bMs|k z;!s+}N-+VsV>3>A#BVe#`q*n{?@9iz@e_DUNppzjyG2OrS76A7LnF$5p7H6pIMK-< zdM>mM%huDR=arhp_!QkcHlfv}%apOq8X-5z>;1_)Z$mmS>mpOG^8(&og;I!mO{+o3bTsGSIFq zNj{sI&=x`|vM-l9~}JJ=U5ZRbm4ZnFaH> z_!IFKk+kK{e+MCv&FiHR_Y$#u?dxJW+J`c)UE54AR-~ou9ZcJ*`9c2u+qX^t13^^b zFSM|UnlcsZ%hX$QBBjr8Y~=Vr#d}zm0I=5g2<3|K@Tm6A1=N!~nc~R(tU51Y7eDnv2wJx}28Oj|~lCksm6f0~j}$seUrR zv(e1DLe6*l_UwvJS+8m2^!4-0{pp5I^R!ReCr)p$552#WxIr{p{OI)2A7Ad)axD>G z&n0Ke=x<+kQ+lSM=^}kyYd^mmz|m~`<5_!3um;Vb9qfcvL|fXCHMF$+dC1G(znaBk z3Y*wT_89v_y_3jO`VmBM+_nzVKU}s|)5wP}T|gcu{&-iKLUfO)is5t zw2RPZ-W1T^~a!<;x>3Kj@iyj$Vx9A(kQRf96nF7RKa~_557&~`*h>08q zNhfBW>E+${DPnc%vHrel`u;>aVFU;pHq>wQI-UrkgNqymz+B;H#-rZ)39rdW+*82R z;($lEu)rk?a4Oss*x&?Z5#YNn+aVVGm1Ne~Ykpifo#;B>oQ4HpjD6xI1Q7TcI-{B} z&>YyS*Plpgofa(;_PB}bKIyU@`htn`_X$-5ZGolb<)VPCqpyE0J9{_y&eZ}?kg&2s zi56^_lx~>5>;d@OCmS0Y9w1mw1Ld<9#bVz_13xeN-0;4*qftJ6#vXu}6?Qa`yd^Y+ zBtEi}J~R(=TA>08#=d*^4u`^~ps;Yhn@VxYidpc7CA4JW{?>g~jBUriNiXVhZRhCx z{%D82eeCq#a_)eZTxt%iyaRYz zGB(F9wj}*s<7Mwx${)KFd4F8iP2t;#58aC@b#@kX?VV>NL@vU|2ZXkn7Drv%8{IS5 zs8dh_EC!^~Dkto5fvrpOo+i3)-np~Aa8Ywuz^RFs1xf-5xdr0x(8-Mw{>GOx3uQ`9 zW=^#2)Zj02G-tV0=MU@EtJis*yLr2R*=>1uJCa=h+CvX%WJp=!c1S{rWhkdEFv(_7 zTVba|ruFRAD_}Lz`qma$CwD5WE{MAG`Y$7FBgUZ3*%BQcedjFt70^m+ zA1oOq!ed8=|H@wlbfWRK;^b6pZh6;+IoNz{%NWv?S3Y?~Z{l!lqOxbLW-nuQ)7IyoGL(sQL30R@@XGlBm;l-b z4csTH3Aa2fG*+xxlb0zLKYq$m0M#F{^@ZD@D#QBuSLWyR7B^UDtUE1qCm7*`W$V|k zVtSG9{DzB-Fyj(^!#ti@BaOEI&nh8@>y;ZRgk&E!oMLKvF)wc~vL4Z)DZw#;?xGHL zb#*AbcX6EW0+|fOzS-3-?aLc;VNb}atHUaTo&E)K-q7l*Y)ve)HkIIx5j#;wLT~6_ zqM?=+v!RP0Xi$Zr=|`Qwb3YR*_D@ii&^fVkL>Oj25oN}!A#)Pu;ft{~0lA-dEeK*s zJ0=$)b%QAo9(;v`g}@Bak!R@Ms{y84v_(!aX zB9A|n3P_H~O51cVVBOl`W8Sh3e;j{t(LP_le3^;KIW6(|Wn5I>{{4UbakiOC@LrvW z%Zu*d_zM?ixc<3Fqy^27wzW|*nKWtAQVg&bf1*BtPss5fhgyK-eMPp|-2p5X3@q2a zd-)5!iSs!Bu}b^t=-l5FsgorZiP@2sy&>U*oi<8j(a#UUj{1QgT7l^)5AjZGh3+b^zocF1S-7auKVLfihkHnIWKjz`v+%Aux;S}(!Z@p3tfiOL)B4y=n+$+cIJp#g zJpCd%kT;T1BZt}JEBW(RVbd~N7^R`iF5f-Xc+Aw=S=WT+i!)7%;%FN=F;KC62>9PR z%6cpPtC;!@pfQ-#eR}j-*-KNh$&>)iO7XSczh8xp+I}NODEsiB-(ZOS^TQUtJOnUyKZ0|8OP!S)oO7eB9hEcsM?9R?_L45e}Io={E<$b(Iv6-}hpAjq?@KsS!aXHS) zKb}Zfv(ucP*oFJkJBd!>I;;dG$$mqIwAa_y|MBl!)ida`rDv_gB1_*je>^@_IZ+d( z+y43V7%vHFr6`wzsgYp277WL*K^*69cHgn9WsFjWux!IYLHreqO88t8bwy2;OkZ5; z{-)9|Ea|ozf`DY>xL|K)p7s&O_u@a|6y~63sSKebJNl4xeT?msXeNQDxy^-(6LwYA zLt#-~2A~PYS_5xITUm07c9l|}86{g@Fy)@gz;H!{47V@}jt0Z6f<3I3jFY;y~gs2i^Q z_0($VuWs=+<-S2m(YbAFT8vTVk3mjmDz96rl&Lpmn4sHQ8t|FjSZJ5fYEp-s`n zyMY*c>DH~*DA_i(>rPu=)RbFT)0~)bN|oJB6Vd_J%R-l|OMkbbKU(aEgydv*3@YN! z-j-hOw{WcM6@~ngn&Lxa0(`iG{`~9h>7^3pYX)x5n?nU1Biqlf2T!o`b{#xuVPYZ~ zIOC*%K7hw5YB>IN!*B@8?w&%#NoD)(G1qE|GCrZZw*jk?CGmOE}V%ciVWP4+&VO}3tBhHZ6d2ArNF!A5?8G2pe)^I zb!qo8`h0$2C$wru0t8;g<4F01r40x&Z!7wa%lGfs`Auz-z5_BAurG+tDb2RWhyO!L zR2-zhl(#W5-tnq;#n;W>9_hT+PWkrwsbN#WYo+8m^@umvL>f*O(UtfWSc;tE6$bDA zs<|@0$o_i0at6qmHsa`1yK7l+`M~eP1(ni=I$o>HP)<)z?>Rok(?bVKqh!gP5YU5A z^P?no=_o7o6zOuxW7+a1OFR8EOe~=ad;7duqX}KZqNJ+-_L|fB$fa=TL1HNs^B4`Q z^i=)+4Fho!`28mR;VbDT^&>Zkcf>q^NQaYBa08>$VlK2MJOGRA0BNfgIWEzGhJ`oH zhSkZoP=iUJRfbch?>sJV9c_L&F%D`zBsH|+D^h;mZ@4N-HCKAvNby+=5>^T`f^w@q z3#DP1uzVi9E{R2Km z_$<70a4Q8D60K{OFDoEaHWB17)WZ)f9H=f1Ja^I_w$Nl?9{~igyorr$iR9%6qzG%) zJ_Nas+nLSMNa1A?r^wm6OU%)s#$)0B&B%MzY3XyiZb#S?NjH@^+VU38W)=iiSyrK{ zGDIfNyApPRY!H0Xoyeh9sRw+Km3QpONvOeybBgWn(y^Z`koF>h!`fg4_UOa5DM{DC zvi;C_T?WlGfq(eKY?er96yUsJRkJp#$0rVx0e~i4iLeS5{#_6wL=I6G6E7YY3GotY z+w;wo=7!)_ir?YihYxEm43vVPD|W}sE(CGFLXByBl2S@*+eHSP@$m9OSLFHAWkmR7 zKX|Z@W^+6~!Jz}PC-i{LwES_qN*S1rPs8CzLN9vcxtdR{#>$p=kyRhLQ7d2WRF!39 zTf8?!*}4{{cywSpDk&*RrgfZfyF>=bTJm7NVQr1%bUX!OF$@Tl&@=mdGX>!{8tu>2ILKdz@K8-`W zd-Uj|U)I<61V^75=72`Ng~y9#bGOEs=4`P%oSl2IczCrHlfhD zkn}Tu9}%*+tZV|OhA=1=$p_^Z=*=ZgAbL^GQ(?*rl5#QPiR=+0siH+*vc=aa>Y)5# zUM~*wB`{nXKnmOgmsdRKBTP&w9J<>(Vjx4dJdXw-z~+SHQ8MIo79)RwoFtTCCIX^G zh(`^F|3#{O+PeMB8xXJpC_n_!1wzkY!U5v%$vCi?8zqKnv7;JZPr)uLVkpt7H3|(LmlZEcO8xQ%Q@gIiSNgPSE8!;S*HTd7v=|~)U=6p6~iYL}h># zSXNLVt32h1P%BohR0JsNWalQS*tQtm{cTC$W%dD;Ei@0?fFkr%O3RC#oSaJS9qOfW zhq`6gZR8{GFJfvPoD{PCELG`>@qC9n8Q{WaJ3(x5|BFX6N z^Y6&Y;*en=6+57Ot&{P^+XG}vK%&AVs-@lbdA>V@w z7d1#MAQb9ezYc*`ljtpepvaImc5WtAKZ)2pSX5z%RWR}c{%NP_7lYs*gV*MWwXb_n zx}8gElJ7}i-4-9O5BXXiy2Dub^FbLbj%~Sq?;hp2xAkLcw3aaK#f?wHsd1f@==PEBbajK6(#Dz9VY>Q{^h z>kgPqT3s#10_@jY959?lqpgt4XQAbVXCXw{th4jCu5PKkcC{MKaSi^k0G1e5;l6n7 ze}GOZe|@l4QdKp>m2L!?8RFocR5o2Ye{&=)_@~|TWYKJ+Z>Q8V?g+im7xTB0n+@OE zHguuB{-B}Vw=+&A#tLq)M%Oo9wliw#7VH$M+pdQR86ELBoIr9VGxIEF@fO)}Qoyh! zWpA{O4`E+eR1dPd;7cVBM~h2@prL-HvZ{wvs)1@Hl+S?>v1n<9b_Ah(IT-4}Rr%M{S)FI0fk#lp#cTRn-E;$tpDk(Qhd#bv{Ke2GJt7m>pVnFc}}`9K}Pu9{Wum= zn~S)OuMt~XBrSrD`j_%0#7sT`({%WhEf^C*iY#P(itbIJ`N4_hCrs5Mgeo?xuN2%a zgiuqBKW!9;JE#qKee54(mYCFK`6jBDxPeEXBe`5BG6@tlCw{;)BOmR)-gn8gf`5R} z2k@V4$@SQbK`59`neuR!J`gtUJA_RnJzGotUXa+ZV2PORittX!rvJ9F1gxK)i&X~s z-2%8rYWs31hK4UIy2W{Of@cFk|8H*sqUIh-2MNR(GjH(x{?l}KUocVYpWlZbaN56N zAWx2h$%B|y?b`kH9hnlGCU*mJEm201XJy*X`GYRdE{|~M;JmZ`QO{G#ph331np&$2 zIP0x`e!_!r#K@6Zoie1$tX3HWfUBR&w`GihJ96ZR>Q$N2DBdLSyNg=M*Im)-8RcG$ zj%5JI#1k@BE3&_Bi(=GvVonenIf!?ZuVR@c(m5v;$$-Q9_NN)*?qr3!*GE?>4WtdE=7IzV!8p0yS>lgM@s zj`HAG?Lzg%&Hf>oI|xOIfXq?w?Af!6yqvfyUVzGwxe|sHiN{NLB637oO5iwc~a3`RY1Xy1NO)BI^Gz;B>lw`8pEeN)__~^IDvxx508Le29qE3N} zFzi_F1<~0|^=dN#y`-`dbHeWM*!ZbwCq8kOU9hHY3qhy}@fArWnd?SwjIX${SNn*j zA9j0A5JVhS;{8G|q1Kdk|0=TzAP6RY@BXFh{1-+Kx{AjkreQf=Aptw|pCwPSDpA_a zT$>Er7<*xfPhxkL7`Zpo=#S5Olgh9%m*emvgT%>1XdiLt4diy{px)@`=Qkwjmm1zGRYM=9YWAfP3+>OsB>l?L6>}oAUDy0uC+$*^l5O!G2k(hv@rm5F-T0p} z%pAv0m~aMXQ>^95+ppW5FrSRPXkp2ftsYNWOFv_K(?x48ct?glGx)dcXh?i~~%m znGy({u{6#{dHS&g>^L)H%P+lO(p&(UFrN%W)By_oxl5ri<#7D;GIWQiw^4ou*$*7{Xu-B8bS7xdu!-l}Iwzy}8lv|3#X}qg*Ki8aT}@l}WiAcJm2;+I}L^ zQdaPEHee8d7}gi-TUTFyH81Z2oge}i)T-HBll1M^&z;9;5%2IFo=`)}V0;Zqh9N_T zh7#P+Vkr!vES>iCz1`0T4kcARI67eij7oLUUV`mDbRa*S#ZD4YyC<@rKtu*VpPa(i z;pb!innvajqhZ1!edg`u3T0>5CnY@fCt6{LC^fGmU`iGL{@UZ&l z#LMmbrxs?`6jNqIkC3gkpvVUbkvOAM`U)=-6PQR9e$;}@7#(XCl(jn`0Fre2zrH5` z#c(60q8k?4e$`?;FwwC(2{hxcZiG#-dClzar&AzGWI%AnIdv2A!$aAr6bG?*m1icb zkWGmTh$Old6U`-}M!=YZo}iN`J*i%9Y>H8oeUo*}F&rkd%4q*hJ~w=W2Lh+`Z;c~K zwb;~ECxdbNnJwe6w%AXH!dPA`L9iSGSuZ3rWDUXuyyJwUWd1*H?55t3|6f9qefIr? zd_$0d@N;r^r!>oYGFk?b9TL^0ZxTijBZq0BM$+X~GT5rD*G};iC}ID^6SXLYsuyv^At$PkQc+z)+%3Gtp&5~lVz&EV zt7W)|&ZS621h`yObeJO9Zv3e`xWn$D84=BPLbk;Lyg&aODT0_xPUN9VgH}RwQi1HO zjG@xJa^=eBxE<~f>JCK$YDz+-z>(D@-Cbi zFo5f|WuluibUS=`)_ob~OfiS8Yj{_2S+EfxAE-0YS;=WJ1ho3gtpg`2qGO6R|f@ zK_@PBGyNuFg9qKT#7VY?CL#KO5>W~wkF8llM2{AV(Po27uj0h4vm<=Uq zucY)RmK;>Q49~z119lcU9iG~#`*7h&FB=PfP*j1(j~6vw=UYHxdZ?N2`@ZEOlcj$N zDtXJA066@n6!oYA*$iC9DUD*LE_J{8as+=C2S@0{2Frr98Kj^8_U)y?%hHarnMC=P z1y(t#Xd?~CVgo}NGLdt9vS^jSD%rxe%jQj+gq66TN8Nu@b1R+jQ_XGt9esNdkBJsk z^&uk#%tSb8Nt1z|>TR{XVf|ZD8hZ}ZE9DHt9;xEw8h7{0_wMcFkMChuQT$j>kC4oG zAW1Pm6oyZfY>JB1ePjiRX4&l)=Wr?}j2j4?|2v zCF^P3bwqn`D(wp`ObCn1SFH-8t}jH((0#ZQWf2X|x5)1@XPszKP}ccPk!4mwlfFoh zIh9-rbSbp~JB3k^kbYnx|Gm`q!gHjQ3xBhXcZh2Dyluq`k#9*B-3!@p=MBJ>J>5?7 zA2r>sCuJN^$ei@`Jg~WBmrze!;Dbz_>>2m+DIz?uxasR{ZXPt~Vs^HbTK3U#%@B27 zV$_E)_#yfObNXff-BX5dgv_7KEz%=6v<>_B&x*M@y<=IN4%`At_?;xcX?o#sCi|z< zrA0DKLR6Robr^Z5ZB#Z;to}MnnZXN{yUb#r??h zW0nlNrZ7Ax+*GgWhXebrTsd{%QRL)SkDtsQ*rw0y^I8c#_x0DxT{L)0Q2()=+Kfsc z?Y3`DUmr^q<#%aw%5NDIm3zEMeCkzu{nJ6O@*=s)4d#ax`kO+IWFtlMLbh)@VZwxf zw8VE_l{o`!PsvLp60PEZp*{u%2HSjnt(m97T(cNTteWVn5MVq&E&Hw}xi;yyVf@qO za(fTTS>TNTeZ7*{|LNn$5&5Ee!*ER7lhQ9M36M}NoMQC#UA9fW_v^p9k5GT!S|ah; zbg_@zy@CQe%3?P82*{5o*Cb_iisnmwwkK#v;M_|OU}_JM*~|3or#`UnUISV?8Up+J1LEd;x&B5$7jikY)Idd#6pXG=<`4F9}l0o!cLVv`t z_vO!@N7C68H>-*G-jg%rmLm@}gOB25ih3uW80lah*SX|eOa4W|%u`x&P@uDzCw!F3 zbu{`w>|!Sd?Ag0F0-0$A`kse0!Z}RuFTO8yhTIVJ)&z!J6utGAT4`Tb?p+c5RK6xJ z_Q?7V$xjDpXk6*@w|JVvT)vw3K=^G#7pItk_elVL$@; z`KsEYCpOtUtq}j5%D(OStHO27efXcw<0Og_$!TTz9xRWm?Irw^L7ODaC6c*1iYEO1 z_MQ@cX=ZCVK9ljA>?7I5@{ETHr|#_>U*LT1@p-h45$m0NfeE<`(f;)LvnJ>q3S+gZ zm9_P8>Saw76(fNSszVp}nwgm$%;ZdXd33XSz>Xa|C?@H_rE12BE|qOtw(R837}gz< z-dG>2_0*B8`uvdJTfF!LPI1S9UA6_I5pWKkdD;Z-BNrji^YZctBJfKHA%7jx#LtkE zfrk&rq<<;XzIp3b4iT#BS2jcC)AV#PGZrSS3T<>Q27?)v;S51ANzl3#bW@>t$)G3Z}K^`6+VrtRNJ9i#3yRhpR zVJF91e6Vc&TQ$=M@KWL?SRacime-%|$IGak_}55T2{m{)E zJ$+4CTfqpRUp4m*=%}JHfRl!CewTG#Jlotoh@s6+w>VzJnxiIqQ|CoN)rqcFiH9rq z1O}c+NO)AiV!n5;&|2t{0wUke2Fy zAk+H2EhTh=maSW_OyMQQtm(kosVtn_NurhntgPU?@XzB2c;cWWk(hMSw~@fL2Wl|6 zC;!A(0I`1>+OSTYI~&$}6K85J+r0Y}1Nt2y48=^~LPmr!ebm6n(+^w2qP@mJ( z)3a7lQSrZklLB`(juT^#21Ow&5p*E}x;crFJvP?Xk@W8P>(INwjHYc)7M`9-V%v?Z zP09Y(ZL71cf^Gh4<0PGc>cM^dhmsmPt4g(R!f)09QLdn z@VI(4OT#RnCiR0Op?xuRoSQdqJ{bM&(^Mw2yI=yy`wciw$@OMdmN_bi`^>A2X=nV= z1k%SAs1@qJ@UX{3(j`xBcohn1*w)F=1k;h~{ZY$PlI(R^zGK7VxFvf5T%|K(T029m z%uaL*JM2|s5EvB1bg~slnh$z@s5wP`E6#)>a^YhHl=|udTV#`;Z%kc(jE=AopK*w& zry{J`v*#EVnGLjj;K75h-kr>7)_vVH%FhRMowq>1XF=`dMmnCF0HCk;5(3{#zVoRWD^HaUz zr5l=;2t3GPLl@3eA?Ermd0r#0dKR%@R7#3HEasGIA+uwYWMCZ!r{=(c?t_ku_I3=g zwzs#BST$qM*VrlLu~V-OFRf9skMQ5&)yLmkVMe>9>-i}t|A6O;tZE%&1wNGT)b(4{ z>a6NBo*!EaGI7I(ymWQ>VjIS2S(7vy{_**|RRcdq#PE(JM$9_I)^tNX;HSGMPZW^WVio(}%);WNZV>MJecmMZj}GyEv-@cjEedZ(t#mC1pgmQM7uHnfIrR|Ir9%k0X&Kd3=`k|cyFEjkg zUm^eD0KHVLJ9KELZruhaKhX-l9Zsg3kgSmtO0nnVq|u*uQ77oswd=`8k7kyYl_e$* zO1ObgYY_jCNR*V^F=zLiH*Tr#9+Me9M$my2M@NP(0YDt z-EV95%IgEG8jr$kyFN{GpzcBj6BN%ZOu~JQ^uU(r^&Sb+K7@>xmQ5DH>{+=EpBPBDx8;ZZOPLF_E;H zIAOxyVmcO5>k}k%HCoN;pVNeuN6F$mcPE~~+Gl@vVLzOt2JGSKDK-xKXNJA~!DRE~ zuwVFbWpd&CbnkS&x1?tJw5gPaPKe;l1A9|fw_=UK^1pC%nnt&_+2qMK#OOVT4xQqC z+*}$z(Ac=>N!gRLAN#b|>pwhrGQF}hu-O$4)7-)$hm0_A|NavkS4qiTb9VEx7f~#G zL(9+kz`c937M#1ma4Vj}ls^*cMTgGcT^y&49s6gfEBN7a_L0Wp$ZwL)$evL`pw7e& zeJ$=$n*cEOy>#R%hEle)KTdHaS)MU|ycD=!I?_bgfbHJA`R(8q?8B(Y$mLcO9-2rt z76k6u^AgPKUk2+wVB|)t5Ku2Ze{)|x7x80od zqPBKUS;Jv^yj&QL_*5Zo3yofYf4enL|A^-nBPhbVAWHcB>8^K8&4Z+3O)~Xw67`JAJJ!B_V@aWs&2obW^(bH5s zM`mfgTAeH=xFIJ2A0;NI*magc&$VrP{I4U_f6fe_rl+_w>ZrR#y;Ogxy_228Oem>C zcb>L?^!xBrv(9CPxvP&}Z_kT-{P?lfp)NjEfZ&ANq+OtZs1%y9x;hoDEp1a)UOtc~ zh%O`Q%&7~PE?ZAIbwrthPYW;fS^s-iZa{-qU$&&2=KgweV^U3bJeKYoa7l{-jio>})%5@bj zO}1pltCIZ9cqlEKH@^oCt}h?`Z5DcUTXM&o_+?K$XMKy4Zua`B2u=^mV>S>C`y0R3 zdM}}_B5;Go4KjCxI5mSiX5^|!mhUDoQhl{@@`hRU)wssePRk$eFMdj^c^?u+*y5K8KphZijY}QP=K)I3T%|Blds3~U6T&JIQb?f3#V)a4J>PmCFofC_-W>~;2_<uE82@x*1wyk(JM=I%?ym zv>cKmmpx76-vwx#qCEz8jr;Wbi=4uF{`^S3FW|V4IgmRFQ1#~R+dziDg$0SmXdWu5 z!3vkDSY69f>NbhS zdZ$mx3!9LHEarf#)e|kaej2;??zOG<9C%kUW2qM@3oZ_unU^@jFHY5olxm@`#_r(Y zCB7#-AD#+$vQ!@fh2Q+%{;Eqdw_?!VJ$sZ*G0$!@ENvkIvlndSH3t_T;#C>2A4Ai_ zXZz~BV9VclpY|qw2`zm}geRU%Ly>?;N=fNmj@4AUttRKE{gG_^`DO2~ z@8i>R(XP?jl&<*Ug!0>Q;0Bsjn@TBcCvopY7ZY34lN%Q7Q3gnrsgxvy1&_ z7jVvyYAm7};QtBpk6iRxupJhwlN~yB>g4h?EtQTi`_hAXNw?#ltPajTdwYSq;;jP+ zR|hhun6DI21go78zB5-@(yC?4cXSI>bxHsDH%-o^i?jy}*cA~G!PXrN`xNoPy3297 zgr;)sGbYDEEkMgn6FINCx?}Y|Lb(8;oVqa=o@QlL9Z1XW+@(uI`C+dqf;oV!&~DS+ zxbTF~JdH>BKsVuPZ$~;6R@IeEFn!?ttziz}PQW8ihz|`@N{c6^r)xyGzDD^4IMUVC z&7qrQcCPy{PNH9a`33Z}BT03-#$)4Q2uK`P0HIT!xldnzbyUh1unAL;%sJKULJLq zeZWV-(z$#XQ#QFrU@V2C{h-7H?D&TyN+U91QiHu}Q}g(`VAk6K*Z~#O1xO(LbqF@% zdNS$ytI!$3;vB|r4-<=iF-N?(>d@L`ckY3+605)oarJ1jGrzpqo34kTO@B?{ip@-W z)u-iUNNno+0|T_R^)szgTRczRQFDKdx`I2cYP$~}{2f%&ZCG0s-7D~O)^-X$`R7!e zEdJO$g)}Ik-Cm#nklOqgZtx%E^?v}?ethJ`H%ZF&p}~E$?rYAsqrrm!ve3B7NvaL6Gi}k_HhFkWLk8Ndf6Rba(Tv z^ZSo`zub3>d*2Ut3>_#Op7T6=uRYgXbIo;vloh4!+#bumZ{s+7NN(R4S(?qa~7+2oA)Y4jWKsZoEf*rL#G z{r68#BB*u9&d|NH4rUP(IN|9Rmw@&Dg1{Ha-F(h=>rHq6@e>zGN{Z5Ja+ zzLmY{eE0Ve5{dkfkT60t-CH*|KhMF-Yd)B(P~*K>>$DcxVWQjUd2;*tp?%U;FMI=W zG0_q8m^P;4l~L| ziGt707k+EWts#jD?d{vQLOz#5U5Eevy>fJNis~@IA?JJ2dvwJ?FBkv(QOwH{{RXS5 z677mxN%HX}RLv)>d&H(9`v(UFDmk~vc&ysD`uue}&pCb0UYnRa|8R@&Rhh|6M9q23 z>cP>G`H%OQZe&($LTQvj&1Ox8Rg0;n!rT`_jaFE`@VsJK~K8q)r-TV1n=WnY+YSl-xT_Gw+q@|U0t`6 zfeH2Km4#AD8{FKt`v?oCrc|TUPwt{U%!5xNiDxQ|7Diz{&JM;sLf<; zM$}ot6>?*|Dx%VQJToIhtSy{^(V$TXZgHZiFqm#(m-3A^fnQj$-i%}^9u`&&CFWi`s$tCse%ba?D|i9EP;6lANrz>+RG3kc9jhA5A3yDy`?@^-k-QFqDwfImzTF^eq3kM zua}NvQdg_=>evdSl?w06mLZs!m`Hr``WhA%mXPOhB)o$Oqt4e?9ARCs6} ztAEY!F>g!D%8Kjh>7ArpOF$EW>pO3Zz07=mGn`6P5#m6-^6GrAmqoAk{?m$UR4x=z{u^!DPZs;VLpdy9YUA;`xotucAfJ@=RI zxol0Dp6nt|NCQ(-DOOfi&Uf0Vm~I-vjrd%iED@dPhlGXYz;hiRAAj>YJB(PQZf$Ml z7s(@i`u2H_Y+P)Yu*YG`&!6u_FOO-ZZW=y&hIVm!kmY``x;|coCm|sbx8bnVW7--@ zqLrpOQtguD7@MeKuWU2zVy-FD+@p z9%`K?JXL9eF5$Dk0wL4_8XHAewZG3Ctc_?LI&II0y&e7<1m6QxfcvxCTU3bSoUCtKuwN92+R#tt(!?~_Ib2wx?q1oAVaZ6XHYb9t*f0~mxIXPc_ zGekG-j1_}Zef#$9)58rpH8r*H2#Y{mifZQ#QkTt%m&Ndd`IaC8$K^g6xs$(t)gCFP ze5|VzgcVs{TT?AJQ-E_?pkgzSEt6kRV1IsWA9vt&x-TXt7qT`|;=|1Y52jsd71Y$! z)OiXE<|qE)*6hYc>c$>o&-?0N!~hn9n2U=`US58Ayo$Y`py0Wb)L@Pr(Jexz=Wu9< zpT4~xO;_+Y3RVzL^!(Y*-0yyPBFB}1^W83Xtxi*Y$?5MF1gNLb(Mg8$tWsto^XaK& zJpJXuf=Ost*vfd-mQ+N4U3Yi49Re{2i^{a-;9d-wg2dL=R$f7&e`x6U@Nhh(u)C*< z9>99b$VkGkKs?m~6?%Yttx_Y*;^N}J1*(tb)=5Z6Ug_w}FD)gkdC5e#?5_+mYJ4Sz z9D@3B*Yx~Zy%)(_2-aAJm)GEky&Wl5u(Pv^->u z2iBIGTg@k8YpQ{aii+y%*RQZ!tvx;A7K6D3x=e?=yGHOkLSCn8C!;UnxsZqOOplb( zLbI}H;ZyZE1TwJ zUaz!B80pgMx1PdhFJ41MKtKR1+zNOvdRm{Slx}2e8&O!uJUcs!2Ajv_xU7scPibfn zMxiJynO0g8pdM^R!OJYkaD7#quX=Uy09r;iHsO_(JZx-iEIO6z4M6x{PJhBZUPGzRxJWHeBT3SuhPazP)W{)}8q7(w|n zG=2ELN$H?@BVZ-&;?bC3%_gsu2tJki>z;K z>}+qh?>N{@)yo{L4wY6`!ZkS{w9B2=G(Pd#_JmUizUj~WAbTR_hxIaFndXtgrmS%~*`bngMl2Y6Fcrw7p zb{Bio=e#`ZX2_G$^77dRP2TqzRs11Al)3I0>DRmdfFGct4g$MDOHA~fw{ShQV~i3_zVz-mzS6FsRD$)tU8s3hK9Zo5lZp9 zrKP1`^&37NZj2Asx!BIl%^h!4O>Z9_YSakr%twfpxoqjXAFg+Vk?}s^=3dyI{q+@+ z2SndXAW{$w!59_r*pP*kPsBF1wj$Eg4Ju2vRHtm^7 z%KgSSDT&0mHS|YIi$9PflNH>^2yvO1l*C~__Z%M|UxZY`$SAYS*xSmA1sjiYp)W%sltNG=X95EK z$&)9B$J?{S#Kd-UEjIx`rLC;KmgU%;AMdoZwB+mldK1U@jb<0aX24q?7RT!)=HC<(B}eG7#KAEd2fb`>JdWAPwEPal>%^U#WjUfZ2+U zJJ7$?k&^WJ7CK&d5XmG?(>G(^86h>UPu3+s@{fs%`r(Uyy)1UCFXK9#wnYHgP6rfu z+iE;x>x#hh1&F(M_wKVg=M7ZoE2Rm&9xXF?2BeeT5MEQhOVg9zb7&%FbfZlr;~|>Gf4-4S^%VkiNpWH z2N}q$Ol$C5Oy~oN9L8lW=gy%?fX(zBCY&oPgL$1!5Rj2&9?U?rn9&D-$g2%g2{QqA=yb*>p{GzgroRU(4ii(QbYB1bB3kyp& zB)Crr6%DjkAr7jiOy|4|jK2&LsI$sH)WU9ZHRUXTvHmj{t>}yOs^V zP}s7uwDcJDOi*KVI{9Yj=02LNcT>%Q2ymf)29zPG^AsLV7SK*DXw9oFAAqQH?-Gc& z%2H2idA&Kmu;wL-shvpQn4aI##jfVGuzlGcey4LH}$3;0o|JL)T~aC+5w_fIDQlx(VZZ zg_2>UD(>#5*hg2+IHSU+2WtsP-S*ABEZgbE=(MynSt}6{5f!Lv%3}4(&B8FGAu_tk zSRD+KI0zY4o(G49s@Y68cvN9X13kZ(vAl(g8xO1k!a+IXJ-12wT{!G|DiEddWfhhq z8YgG&Bf~{H^v^yCIDJ9r!6D7bd=Vmh@7_Ho)A0(+$0THA<{qJ(IXO8psKuQPq!KwM zopL#+Tj}yNH}Pg`I(x#w(Xs0idvXa$R->jEz%sg~re=Tf4;iu+2$>v|WGV@4dS&ZN zKqv*x3*SW}}Dj z#Bud28(Rm(dUdjxtN3;R%-g-}|Ek>F++LfR<$9Xv^Mr+lE0K=lnuJlzX% zHBHStsDDr;jBh4^E#OvxR=HVxS^BQ`(fX(=^5SrN`&l#{B>4h-3AmWrW~=}tXm}eS z-mh{Rsl1QTWunU_a@1k_kdyXFuB=doKHwS>fR~2uEq>Q^!T9f@U*cjbpxr?2OBT4eQ)Hw*K+s$KPVTWDpw=>;E|7=1Z-|d7!3~ zfnrB(u2{2#5>b=FW!}TkmBwfPJ~s9~(8kz0zXS=}^sOXTBS4A8{wyif-JGRB^n7zz zQUtm(+UO++d#a_qTwW;3${NONJ7oy0JSeCGyEjW7Dt5?G15i~xVAXyOlvV5dn|n~% zn!@r=H2D~49ac?y8$${A-D)%%l4QBZk&XM&<{yBf@`k~)5d$A}??>kV<&cI0^&8xg z%aVWakP4_{-;q4E(W>&94c%P7u4nVp3GC0ukPuoSA>ACUeSmg20RA@#K+X`aJZJy)cu%inwa;_L{v zxTBJvtjxLW@-8ngZ%Rta_R&#WOACgaTo;~xnO2jxFjU+~xO1kU_eDk4@nx_fCh%~& z^{!D#T;^sn?-7|2fw(N%-#-Ef&TnlgL4qw)2;ZH%;NjuHY9EOB3ko07%N@?QhK7dL zsT!Z4Aahj{Yn}1+K3Fy^T=Mq8JSE1LnHWGMX<>okz07qk^(ZMR<Cwn`)yP0l#i%>94AnwE;Dl~ls@vCTiqjw6-7K00+oN|VW|x=SVK3e(2{ug~or>Q7H7-F_VKs^m zswfCLKm)Nscf!1%km+;2Bc+(amvJ818ynLbfvNy&9aqRNh|m6+&js=c2$b2+OD-Vz zt86BXfJ>r?q=?t4p{3>7)ax|P->t0!Wu|hF`$xaM_S@aM1hj&Jt{j1sh59mS!@*&f zgrQ;3;7E1hzy*t;cCp@z;A46^6)r9=4k`CXX3b&~9i_!-v6X?Gaw`XC=S8R|v}~Tg zg;Vt4!2@y;Pd>S5JHSX(XhT)QP?byNsqU|~fkCgzv1K6)g1 zQgcxWERw^h2M2#D@yMU4sNlm9xvLawOOuX-)QlNyWf<69NQo^pS ztzGYZp`CM8^6nivNN`l$$JTY_qnx7R~jS`uM* zZbd~!C^ZS77Ka@X8FS<23dg%+_-Xw0Edj^nW+)<|Dnq5rt%i`5qiLT!ohln{+o_Z& zS}9cVTw5yui_lrQcMJd-OXbUJGtsUtpClwC^sT(yc)2eFs>S1mq8PUmZ}X~4hennu zmnBOGd}1CeVvx*?CaUdGhg$868sj+L&N`qcQ`7T?h6a!fe}V`CAjzZ?hHx)72@i9+ zJa;?Xoa_RkzXIWgI&Bcuov#@Y4pR+VPF2?9@ukLXWdLO$t#(4Ja`5nIklbc2>D}o) zU|iDFpV&E{beS$S>A(jaRswi(P@6_~35V0F8j{!pN{LWPVF9Uhs+yhB-jR%1NbwR_C7JOX~+I+nj{vVN6z5 z*6tNHF8OD8p&R~S9i@n8yUVm!U`z}Fz(^JdXm2bmP^$G}Z~E2LUIR!1P;79xZ0Z6K z9Q_dV!M;P<0!N+%m^jcvVlpylJG>bwPA$7!>P4JWdBWM!5!I8YdYDZ2%6pT@#Qg`uqCYd2GMv)$wd! z?v*^F_eBHd3AMez_>NbZDr_Md_tU4Mr(!XxNvTb{7Zfi;y$X7{y0owh9 zTT=&(UB1rF&QKhMt_~G0btK4x)R)L-|1cvXV|2lPy{!2^zWOFeP5lD@9 zppruED^&M8dwW>`HZ|cTkUvn+XKuc9gzL9WKect#u|7jdM3lE%$ho(xqu7?gqFen0 zZqjP(dk`d3#O6dzFKP8iTU#J36qm!IJP3{VaE3ss;zMael$O`^aOP~g1} z1%7W2Pzs$dY6U-`P9)~DqlN}SHUMd7ykz?cf12CZt?8x`A?#=Uva!kR$5rfuCbceG zX=U^0O>W)|tir++$4i>$1*&ZmOq(J^eR~q{I`kH%s*v5_HM|S4~ud9J1 zhTq)X?%^siuAfmiH*}2?B%FWD+YH-!5F6>Kf)TQ5q)2%`xMz>ckYhXad&dv_%V^RHB zv?}HoJZe(L=U@;v2szoJVTpn@( za14NM#wchzC3NlDwO1e(!8y(XX1u>PvK5m{1z|`O^GHGEn#$hdpNUI)?sBI{;nm77`UP z)*uHoiyH8ZUfjve%`F3!nWcAVWTf{n+OANe2-QYJTsG=#E%A9AT2-b;C3*FW{O$CD zeBup>w>5%F3BtDAN)tFaAPvYJwBy5JDfL8v2v?mf;%+;s_ee-&pkDw@t*s6Gm*3yK zM77cYgw!u_AvX9C3(}Q0Bjq0%17AEoIq9#k)Qs~sv>y9T^W;e+h|=b6`i_p@Id7sg zHb|*V35>FP0rH&d(FME$~4cxtMtsgCc}IXQ&6()fnpU&j9Ae6w%#v zk>p~2bvSOzKG zh&LNV@E8ul3edpHg!j`&NBT3`XcXk+z{c_b4nU>V<|p__Cj)th*vH4`FHrr{i3?~r z{LO#q2ifdDF)ohjg{v#yyLazE%zG#`-{0`3z5OFVN`H-mIS5hGkf5G$am_>5WPQ3R z9p2jfEaQ|1j^g_IIx&Y41`Q1jgeGL0a4Q|=iOETHVjhTSEv~zG^3Vh@HoBnH{yF+8Ldl%VdZ*TvSzs6>| z@eeT8<>8`agvqhf+VD%zEKiTNO4c>wROw}7+xx8;+%CMElpt#JRr7E`Jy-%wESB9M z4OImIb4E263rl~4*1({WeFNSaC7o?u`1Tfnm@o@K@e{*m;k|J{u*w}*RJ6ap zc?Ty4ZUE%hTL8Am1S$PA7ufgLKsZ8)Io`uLGPkg3UulFk25J!?vQa4-*o6Z~!9b4k z2O?IDAx(hBjs@K)TIh#@3aFedeJ@VV(BSm^)~#D}z#y>i^SY3WdZz%yDi>*^mI&<` zR6q=B`FBxZ1GQ+o2`7)uMB)Z^4Rm>QDy?3FatS>J5@`E^z9bIX0dzsHLo6 zR*b(t2B5nnG%Cs|(mAWVFHg*ib!)_-%7{xa_Iv*x^>R32moo|iAUCvicjI7VVTd(M`D9f2GD$4Jm*mTyQ__fR<`(=8F|dCq4fF z86mAhWdTIs1oZ5W!NEq*&!ej6w*utle*5=^l9Cdt;w8sw{rB&FCJ8It{>0#hFgIbh$6G&V$_ET{bm7uEWX06OX!zprBv@Fw_BLS3;a=Ehv~PYis6b zq$rUPL=YuTz%a;fxuSd{2zVQDF6V`DP_T7Nc3Jxqae(x7lM zX%@2(GHb~5-xtq?;FIyk)AZo5Br(Y)5CO?7-i@ zvE=0B*`RHikd)0&8YiZL6fNhwu(fsd17UmC$Ds`kfOv-$<0bG2ME-@%>3~{x0Gfxe zq#NCBB(Iw*dctn+ffvamZ z>V*YBh&xBNfziuA)px#)oIu)r=Li;~DumnI=5mWEU^0Q?uRe-gzA z&+|#htxOI45i;N80yP(z#7ZN=gbP z@q(aI7J*A}SX`)CBCA*H7zM2mI!LNA5G@d+Df_?hMDuZU9?1l62`14Y`zz4I?uxtt z$UKJx4c!1Q$G(>|wkQV_&|rv^n2kxhpFSg?SiY)rW`)$&J7g#P{wEY%E5J5DCmx*& zV)ksfrAzfp?3i6vQE?GfHo}TMJR$J+>Hqfn*T;wmb6_ba6rGwK&@+1x@*rqoOEkav z0R6uIRWyVwsywc=o$dj0h4_oDde^_+*w8StkV6$yJ?TQ_!xbgCzkU*-1Tz8W^^Y|* z{NVO*-`eIunZvei@x9P6|1+00a%AHDZ>(ee*xR=|+uLUVBY;(V+ZihT_iF!%NV*`< zU?G~7RlB@A(xlOvJ}0mnFg^BtfO4vdwOfLrMl*?mHShVgV@wiK(rdxIj~@@#{EvBU z;W?x7;)8H0|TZyJ#rv>AiQ8iWXYaOm?@P#XG!I$mJsn!BP~v)N8wD z&$j4;@qYV`tdf%EBOffR_!Ek7YR?KvJweHp{3g*?t=FonzAzVzeGsvJyrs`@MnpF6 zxIWpOBM?<5NfVP-UzNg=N3@3^t_!Vi;!B(-F84w>Gw-oUYAxc5&`SP^Wsu!lY-oF> zfE8aqyZGT(Y{BMU$5{vSLx@4o$k8Pwxi0E-+I#*bIta&C;hp@slkdN?lXnwEo}Lrh z>V8qx_Qq@YCk6Kui5;COzAK3F47`bvB`u}~V#kgoK`)o<`I58B5^kSRC;jtmooakm zJ}szjHgcR@P!d6JHCHdD2U~zoA7lQQOUS_o5iV z%*q<|@#A>))6Jl?G=1+tT2Egg;neMwHjbF7`YO>?X03ZELiJY{Gi5ZNs37pBqGDsE zx2B31^u}&jT9R;Z^q){fqeVtHf9goJLooUuYz!?&wH*7ZeL0b$+38JY%%Ph|LA=fL@eAfG0xmN~rQTIN0H9e6FB=}y8`D`)x_TT8r?7CI7by8%cx#Q-#Uur5P zvNMUxdQ5s?a8SJy10&_LU&9cs%t2{P{DJL0^BX31@GAMS`3>#U9Zq!ta%yo&CJNo^%d56sc9-YNlg$bx9FQI+Wa> zWvz}Yw7qNck6ScCE zon0IM)DaVibm99+G`BcOc;Z4)bJ8PE9gn%Ji_s76UxOl5JaG~iYH9kD>#l_9-PPB{ zQrz#JzBq(TPAeIarkEwkT4v?3J%j7)?0$Zd*Myi%6V@p$A4EZj6Xxqw-J_LKG`=`Bh;z}hov0=! zBhvv+5aQ>@HCcxToH|0iaIxv=;N-*&`pCNAcJ-^Mz!4Q?f7UO~<8#S*t8 zIs8@UsIb$)YNCBR2rV_)@)fv{Rx7mO|BxvySlokhli)fr|x&}+THR6 zTfpjH#$WmO8l;;llUk4mHzv7mR-F*N@}^C7!fKG&Ck}-|f+sgXYgJ>Uk6LChQLCva zPyNU_)}B?oP@|&8o>n?1rJ^D| zT2K`+vp>FGR^?*!)8BxYw^)>hrq}gk7t^@SySLh2bbfg`lLf4VGSSia(66Ip)p0h0 z-V!vV7Ty|Y7GumUs6F7~x{U;TJQ$RWpjyc-lMf?3P)l1vBLBoOzuPKu8Tyh6wnj1+ z7o}qTJErizPV26O-XFhngaYvlOq+c<TTDSA4C~H>oddIM zo?@8DziY=FO_!%!f-W{jQ}q?=Q(ocK)vs`=rs8rYDy`REYK|CAH-^Zk)<(uLFU@W> zX`g%oR@3_No@$Nz)7U~yLQ%^1@7bOhJy)uAp@On;BwwQlH{^lB)xs-u2rX)O3=oUV z!YR(k052R@|3*qhY-VK@{FuuS{#WaCr%m(R*I9*cn#xnbUgvR&WLk!0Iu(NkpXJ<9 z;bMB(H$0SyfV*Rh=q_m>;NfFAFM z%}y(WFM7$RtR+H2lUD0QR&Ls5k4x1UoS-RD>tZV|-Oh!kUAf*4;+lGND_EcqXNS{J z{mzz$CsBLe#AK%@^tV+*)TV63Q}mRd>bE@}H2fVE5idB^OQ!dy{)_?S57EbpjULfU?BLUt?sf8VpbjJ*z76E$|aOiCe%^W@~aM%soGv zRm$=Oupicf1cf{OpxiUEN=jZjHxKV48_=+IZzgv6b7#v=6sJvOA~aGKT4huYNv$zp4w9@6I1!FzYuhENo}StlV@hA7kdrzI3q0N<+-u(HnY_W6+4}Zw4FK+Ikyuk z9hM%EljFoj+$Itc%1x=OGml4}9oa6q`hb#;1}%R|s&tK5=B1u`H%sV~fMHodYjYz} zz{Tb{#7vx1mXr_PGb<~c5`#e13~@rF!_d$Wqh^nAS0l5=*Dt#wkP3H?cf1x5Cm!<+ zK3C51jY(USen20-v{bt8$X}kT9#n&3ebW~-6zYVo0Xz}sq;#i%MQc}AZuhLIw zXE!#nAfD(RKPDi`Blcj2pf6CBQm(7y>gjpJ$43lZMe8_saFMnb>jiJ(LH6-DqJr9s zqpkGz&nB7gXt%`CGqN&zcD(t?s8+S5=|tCS_WKbI$r`!u4+3@K9J&8GeD9s8c3Kk% z3c3kK=ABh_h6fL>CL(v2(l6m{3-=SE>4L!3MJZAbu5sh=L-3GvppMsYgEs~4XRF_w zyj5z&UH9+Z3(v^72Zna;H+}bct9<6ZeLpX|{{2%jbNH|EgkKdE70^VEm3?Tx(Eb#8 z1y-|WNIavB7qN)cjMtlwy{}X{;|jC!w>~`$0T_EdQJo&Q#!2wi-{1A`&=9}tHs>mh zW~70>pL=Yky4t{B;}`A{Mk!;nYj0b3!z`6GHJ5hg&;Aw+K55&*p%{zH&*w#fn$d6B z&p9~?gSlzo@%j=w2!->dC>O_YUwbVZmqogO69yE@rhwiIS_7066ndxoJF17gm~V!^ z3WE!1u{Df3Iwl4nonIlj@{j#Gs>}nbdH*hF5=&cxQQ^PD*EXMHkXVsbNo32)zguw* z)+OmK-e+b$Bmwr~haK@)NGSH*bwR|ka^x;zZShZ2f7KQq)UXOjxB1os^RD=(p=3v@ z;fM2^n;n@ixITV#{ZLTAk7nLO{=2X51~xX4X_5d)J+Gwz;q2b4OKLXEU8(0~Z zJ>yNWv6sk2n{@eIzE@PZ9dEO#fBB0>4Gnsb0+heLx@lTd#ZD3s(D+P3f_A*?BM{WY zBrUs*F?#3#V=`=pH`>#FF(WYYJGzbF{v{e>(?AmuF_=m6eB=8Wcd% zr{^Jv`3Z<-Mpja2i3v^glB;7%CE*vMzc2_Sc)HfMKUY%;ucL6b|EpZ8oD;5X~h?R1%bibYMgz#kQLkKcVW5Paz2!$ozB>=lpcTK49C6U1?9 zX#CFCuHb=O^^l)G2sjjTuYwrEn-+RQvEy3>g&oA&T}VIEfMLYR!wv&+aiJeSMsDCX zVlf10-^6XqwfaHIwdfK2fVC%*Mij`35ZvitZWtOh-!Un#4Mu;41ba$~t~%wPPZc1G zA9UKEi;Jh-BliqTU{9w>=!!d&m;Q+T*?9wtPoVaRtDQL*qk8}-lZ4NtYMm*3D=X)4 zyLwWamR1I(p#2vN2=ooK8|V$oZcU~0V6^4PExoFB+@P_#ezwbT@2#|DK-4|{;=L`a)73j};t>GKkz9qJ6O2GvlM`A2m zoFTsgUH0Gyj}*zp<>g~Lk^0Kyy+CkK-*etL^?Pdhps)qJdopagd=hWoeB^w++Hd?M zPmuydVFQ>^5&=v5$-(8 zmF^~1CG;O%>ED~8O>Cj${OVeHVPULfJnKu{Nm+kO z4AOuNahmt6;)SCh(|Da-e)hb$O{8c(knP6gWDP_e?oBd${m!$l_=;_Ki}ontPR^uX zzBrTM*u_5%Y+qmiiX`c|2>yw8vvT+mohM@8(QkX-P&YRQ>C_1#J?)1LN@U zZ}dd?3g-{n*)&ph$%03`pgMx+_RtU8C08oqhugp{UW);t(6HP6U$vgX7|r{H1YXlF zjc%15C`JHN>FDWE!4YRYJIfxoWbf+A(3Nntd$pG{V&3-nMh$(;E&}I<*bC64z|0F) zEkhGEyVLa>C+wigLSi(2yBeCixb$a>u{W~%k~8|Fzlr>T6N9b#u|d=N8SC~ogG`CO z#LA?{8}GnfF-Y97Qt~m6tYH?0^fU2N@9&&zs(Eb3>!(##T5Z5pYPPhOWCX*_>`_^i3`$yF$vLa?e7|GZ=OTQd3>A{2+8q4Cf2Wp-+;V+?eheYrbJdy{d z@c?9L_*o*4^{38PrqOgLQ#(LNF!(>AZ8{G!IMV?b5>Z}2P;9|tnql6X29;T1?8(_y z6Uv2$8b@K*Pl2#E4??xu`1tuVva(P{4}Lr9we4H)Bh&v5nW(X7_+#Z16}A$}takm{@?;YJ{!7&lJ6wrzTjU8Q zyzpFJ*HWSLc^}z(YeInZ4@wYF zAIT^BR#owW*eot3Wj~q)Jw!01Bfzo_Ce`Js20@ry`Y~-_AA)l6-y!Ax3>{MzgT@pv zQuJj>;h|=f2Bqw^=%}fEp%KDmIn3he<+TINPjDo_=)mmir#i+=kaZeiUNx*pXN1Fg z>;|1orlW->kF7|2VY2FOOLE}dZx}06VkXii(ww-aYKv9-l6?FLrZ25MzTWonHV_iK zyDRyw4r6_7Ed=Hanl27|6AfX&LtIlc0p>6%WTH_Pl0YhN!+4^YZ8~Q>%8CIU&v~D9}*TE8o67 zSsgkvcYkbbYz$7N8#rq6Tla~HKZ3`P^W-|zrIgUD17n7mRxZk{v4GbMqch;Iz$YZk zVc9$M=Ik=rltPUz6B1ktY@;o*z^HPjq4gWwML5cnbTMW9z}dEJ-|g8%3EtPfH0xcX z)JE1-4I0cR#~v9lTPskrE9aaq}51V6Y2HB0pFVq`Rj+z!0mHM?a^okMfIr$g^m z(v-iQu*d0N<`;vz8gKH6st-XApHi8@Zw8NYnZ@8E=-9zb63UH=vS_{1(tbI=^a_FOO9)$HxaVrv3c&i&1+&-M$C@0k+(0DEC1< zSTS>^p#KKV*0|oP@89W!be)3pyO9xb9TS(u&~f4PJhnq3{zq2t9awVZwV9^0NFgizWKM4Q> z14A%jT0B8XM%D!lx$S+}b`afV938nei*;Yd^V`p10Mdb1i=hj&ih%FmxlNP6qXPa# z3m65={=c?~ypRF&U zmg^6rMS(RDRzGM4N$H7kD-JYZ7-0FK(P27O?*?PP*Oj{yz-_0TtCSv&8rL8u21h(h ze&PC-mvh}A_&^9+WQBG;VczFHiYTD81 z>2xZe^C+<1%+Aa-Ll?D$7>Ef~LTqgN%#T#)hcbX3F;PqSp|n)SJMh3X=h2S?1v`t1 zxia=`akJlZ_o)<3V}A`ikFcoN$lo#w(iXwxx!jbe18?$TUxpqyq*%KAoS5JcLb>GtJt%St3iVY+u#mxA8_@21FwG4n ze3+$DW>hWKk}vT5}eYwNGKEP=nww>kL%)+*H4s+Vcwzzczbs=9!@& zd=U>g582#2KjCtO(WGF)qHmS)_RD?6N?(?cT|}`g1urIRN3T9>SKDDJBy0TYQN%Mh zkFo0V_ZP}?FsE8y-)GS5NrABg==K$>JffbtKQ%>IS}~OC?&JZ-?x_D?rHx)M7&S&4 z1bquNB{(=cN0lG_j~LD_o51sPOj}Z3-mwxHPB9))QiAR5Tw7gzKT|4Vk^jq8h_qY) zGud{&@<;XCtzs(oXIJCT6UXi}WtZyqf9Z~N&lw#XA+R0EJo$9)wZUVp4Sl<(_V$ea zA#yZhl|^k(iS7xq%#x|kpgI)@#iboemP@Ju5M3F z`aaulv6lN>;eu!`pUg7}W+#C<=hQ5>y(6XZ2OyE+MMU$D`4ng8{j3l*4C77tgdUyp z{OR@FT&G;OpL~iPLd(iS@u?Su{XeoW#z^^-Yf&l@RJp8M^A~tGaD-lzn{ABN9?*Fl zViRZt({9%>y$o~fdSsNYl`TRDJd{~YD)ZcHNBV*G-$Y?$Un&=3;CB& zef1aXdFkpY*n_sCUN|;UeJ@+3bxa#RM1gsE;5U(=pzzxf=fz#*GcaPI)&o>?ZfotZ z$4Zo6TKXG2xH=Q6nt%N8g_naLkpp=MJ(JnBwJ7>D_k&KD=Tk$AXBmB7WpiQgROAan zY14VxCs1mj1LGRMbn|Bds(Hu2KzCD1TfB#ov6MKS3{$#(oQGb5qb|$7`Hcxsm!z$i9*5rh%7r)Jl136Lz*gcDWeGt40+^r2L3Vv8lnHoi>&^@D-I%sQa>#udvgjzhL!K2sM za_!ktPgqbygeqf|;ScWf^IAI)yVf^SiTS==&yqSaooOy57jh+$iH1>|cb&U8Z|?jp z8c$@^2}dmj^u-1mJP1IPK<_Ivl`7V6$dBd*%TUaC)s?sHZCe$Uhbt=4X+DK(3SDF3;Dyl%v?`Lv!3Kv47?9U)HfL@@%J&UWVhU_Al z2eaC!<2aLfuOtU^H$d-c1GFMbR)NvOuuAJXq^l^%p9OS2zKD~d!Y9b4t$2>771V5O z@$dl(FuYg(S0`WjwWH%c@aUftx=!7_AlY%*%@CuluanonxJ9AHpFCI`{@|0*aw*?A z&XsJLJqG=HIymRQp@~#(z6FEin*{GFEQz?@^u00-Sq~+VPv!yvdBTz^aE4}@$y27(O?hRg$^Sm z3?ugMGkX`~SuMdJPXb`>aB;RJWw*OP0|~0@Unv(H5%B*Gl5k`afdE8Hx4;NC+%xGv zS6~V*?%Z7;Wwv+bp?R{+eXw@MGy=AMz0uvVrmHufcn(aK`)-2g{d!;F$lU)Umb|*L^HN zHv0MF$Fq}ayV-T@uDUu8Gp$CDpu>+6_|sNqe$4OeSiGn~Y<4GCJ%GWOMz8T$qa-_x&TZme$toQzt$`CbgF$ntFPv2;>CeZM^Vbtq-;r(Fy+yMOu=S>W$!S zRR>M5OhkT9ObFd^UO3XHd}EpaLvW)5{5H8pFNV54{l945FHO%*_VQp@Fg?D!(aYzv z8l%*3vEI*#8cPPkw5 z7|(@#Coc#KO;-Ki#Al05(9QL~P?DqA;(TBw$dr|o4B3Ul*0u%_r#1)Uw)bAYrUBu& z9HD3SJ}&ZZ1kA%=T58?BA2J#}(%bFOU0nbR{Fy;m|H7X?qOsKukceRcuPGGAo`1NJ~Gn}3J!)(7qWo{ zO&Sbeg6i0wAtBb;RfNK*=|<7MTm{~^r7*IQbXJ|=CpHsG{9_Syb&o?H{Gon0M+lz+ zfd)0xeUP}x$#r8*)z|_ro8--y?e@0Mq6Q)H9(hb=})qztLtCSEqj<_rsU&~%ker(Lv?{ZJzYZ{^BL4Sh9_|) z>`uA_)6_vWARr-$%3AVnM|Lf}nXFCTRq+7n8w8}6jK-$wpH+JgAGr#F@JUny=KqVa_WcfV_xz9NKHRtCHa?&Cc)hOIb)DCFo>zk? zZL&VtUZMH75>@28Ik!pA4p7IG@Y{ULl8spr6{+O_A`O5#(D6|mero2{G|bNu_l%Ww)IgXa}RN6C+0uy(l$ApQGL)h1B*nn8*H>enY$ zwrFs-?J4d6-KK_Do)yo|4(sSb1DE51`Td$ zD`SA+C;AF@p?z(B;>0qmjSbDk>AuF*st2KJ+RUqw%eL$z2S0_tN`^ z3I+8AlOpEw$1W{5r{5PQ_%9d1gXQb^AodqP+^uq6GaDl|$TsC#I+UW_yeErlcu)js z-jcp361$u=YK4W!oV>eJHf$D`DVmCmDjEY|i*Own`oO>$66$sWx(1!HvFJ1(4qj3u z_vyXb+7zm){@k)_n=Y?g0Aok`dby^vlgigOOZw*yx31u;S1p^lU*p?HzI}rmouROh z(9x3isa{!%i1ITyMzwLEWE!dRZhyoP5O9QNmq*UC1E8`%=H8y5xIf#jVvm$%8*IqB zn{Td_bfW{-+*{#3d;z#bmT7POjTWHvPGgM&r01us8E|MB z9%bx;E(gSQVsi2lx0eI8){g~_QzU+D=9ypSO*|8Dz-{>LWS@`6wz|X)bN^;5-qGU@ zx(ydUb;M+hPT7QgAEoIi{LHH2`N#ET9NU)6`IX-sIhMugr>qr8|4(dvNv}cX9TLs& zKc*sD_}Bq4DN?eE-E z9}66KQ8X=l8NG7P=HR#!nmB5^)ifKofgd}}!x|g`r7qxIAWT(_lNA0JL(d*?bQCT= zJ6PY<#cTAYeLCc>(0(at($8A=F(2-ShYv0H$}R^m?AfFL7KN(MFM-6t)XLnCtOYy* zlJ!xs&KpCy&elGsm6a*$9O>SyDlhwElcL!AUR76y!^KZRA`PfA4;-0$|2n`+a>sCs zzJY;=@W_s~wzkPHODxwek;o#40wxT%y7g~= zQZ<(QY7ea^mA80y@h{=JD}@2gy0-VXsarh|q#gX1=_2QaTgHO!bp7vXbOW!~nhxX{ zo~{kgy|wp2vgYBjjoZ5`>q?|Kiuh~lN(|L#w$W;7Zj~XstU1D7FsJ72Lua)#8xcVE z`EK=_BY`8#pSny3pX#LVk~-rSE!nQIHDuqDI}49b?>{)&Uh!U2-0Ck%i~j&*Tu8jZxf3u4Ua<347;$@hLl#OC;5!Lgfv$+YwlOTF&r zsB8zxc_SQ*Mee!TihsCf-TR!bESB-1)7dkXDX*B#w^>-Z+~LCK-zVC{V_Wo^Rc^+E zOnN=kAa^K7y)l++Do^Ts!;{^GXLvl8k4i^hFG=0u?{>F3E061|oACE#Zs&k~SNGaA zp8QJ)O-WA=fj+XcGfIB@^LI$xcO~tWh1+x!H*B+qI?3p=f3GP zMr-gIZ1Zd3%FyIBxUjvuDO&87(E9Oj)t2zR9GmAH8PPHUm7DpD46V0LKjl2twR;+& zEP$ehA^E6nh_ohwY@7t`FN{Y(sEiH+L6^1M6sRNQ)1aVd-d{6ZiZU<=9(v&ZX|w4F z=M??>nCYGe-|`LSKEE)DdBn0nHWc~X*;K~T9 z7nm@=H$}{eW{6geE6w%#>0Wwl<@#0E=W*xCt)aH+GM1}XFOzHN@Nv{HD~}#cb6=~I zqOHCRVl?pN^4klXv>!;WpS5fsIUY%k(S6b2Zl2DqzNx%Dy|8M)oDC-sW0p9fQ5^`FCxk3VZHOYS(H zlRIwLioa*}*9SD(QekZF*CFLqj=ib}^Ax4&l`apRXgbuYwivp4*^XSpM*GH5rPII5 zi#CO;B>vt|h*d4}*CEvJ$#uq;KgI6fr#d?$)$`?^<_(9vDqfD?-)~qcU*2=VgD!z_ zMUva;62*Rb&wGNjzyIYhnyeo))TX90^6P7+mOZh`I!&2G(KNS|q<*$e-m6peXubHcpY9TF zwJ+=2y4>a7G^F4OuH6!&8-jLgm+zu9TqZ>2<=Nnj1n0%Mg#}10#bhrE(FRAai_i%? zFck@q<@EpEp+Jh7P5q#;<|E`f(Ogl&wUHq(?kV$@QiFG;!Afq9Uw3G-(nq)JlXjl| zPz$|QiQjJp^c4W`r{?Awz%~M+3w-hw$R7yAm|$RA$}DbRkv$ z>?5_?ADTS5JGUEfJPyPLzL-;0S$6z zlOm-{H#8+}Bebg=TGjl*8`$Vbw-w&G8fuGueQkA>ST;*v{SJ}Ca}m&(Htbc>+`4lJ zGf&9rI9ORpfePmahQi<>7V8 z$jI0SV&2jEm2Ot_*HTH50nDz)q$Z}{m@sx3eyNYB%|E_NV86)RkE-4LuXw2gpJb|( zos3H}er(EfJW|Mqqf63t(7fX@DA}9 z^<|%qWyBuC7gZHKk`ajbAfStci-Y9ssexxM z41~TGGD5;Gz`lySw5+Ve_m|AX#6)sH2LjFJ@AGAKE6np*`eJ5=`Wc()a3UcJfLG_e2CQvyo?o+A?V<) zp({*6m)jLx4;WQ!gqVnsLZd}b$IN_aCJQo7M49tw29aE(CZJCuH8wV;5DuWQ=#Y1+ z9}~ZLj?RjeKCo7o){1h&A+5v_YUT*buvep1Mcng@*EVU&#(RwkI6RH0?dU!%?`fm7 z`)_onGAkdXLKDgj3zr0mohvYd_(Vyy?V}?jYPmC@lm)D>dWExWzG=916blbbGq@wG zy6{4|i=NwUI(QNaq6clU^)d+(E6Bg2pfDPN4$|H}MSoL*3uLo7{j+G3@$9~s=kXdR zKBxy<_;cC!Hn8;2 z%N_gU2^8y!>Bw-k2dx!jQ)h%Hw@Yf9viK)jXN5E6*ByF-cCY*tBqwdz#6;cmA}ccM zZeYq}vAt9h6|!KN&E;tR?GoJmJVN7HRT&&=F} zR=Xi;Ga*(Z^_ThH1;b?c%NM#p*ncPv$nH=y(&&8%9U{x7%d>pkk5VFIij!fKj^R1R~^G7CRXxJWWPcnjyH57%JD3B#< zDqp^Or2q*f+@`7^rzgbg*!z~e)R{Oa2(KDYH;DLCbmmhtVVu3d6Us&>0 zyuZeqmMqc3Ypwm5%_NoM_;DdU_T{so3d!EjDUH+vA)hxG+tu7=PR%Jdfz}-0hv1l8r>|-Of*PvV^ zWb^Q+x;;8(4EFLJARBg;74E1YlBnk zgJJoWlsPP6&1D(}omXb7axVp_UAU57%(+VX`Mm3kSuSVU$vawhtph?xqFX;t@FYJV z9fP~ABe+V0A1$OEgd-Sa-am{6IfV!;fiZ^i7jxA$Ue*BZrw1B%a+s*2%2OW4KN$72 zrDwSL$>~SN=SD>lu$rDfw!M$zUv6htQ^N*A1v$d_pWQfYBR&crm}`y<+|40G}4q$)k@& zR?DX4d)~E15qC`=>i3OeDdRKrpTp`C-J9y%Wr{CK+;MR;d9tlbfGW&DBC<{-^Xb@_ ze-F(bQHv+>uO5fpQ&QjV{?JP3?|E4LnLWe_J8Znnn>H&TnD1lYy4?lwckHG0!3pWl ztn^>qzY%0??KeUuuNRU1G3Py(gRPUR?c*CigxnDe*E5j_Bdaz9oy5a@5k9(md>08% zp37fnkVZX1#kHL@Qd3n)u5V89V*OW3qz}F8v6>g^8m5fh!LhPkyfoY{YuUoujR= zb&s%X4OsqG+|GQ6HI#g!M?iV|Mn}7~!nQ_ww;SOFQ>@R~6ganeD_@dgSI?!Fw77qH zAv}TR%#UA@JS6Gs&wr-|oH$&vLHkJmdnv4Y9lstULEpt`2iOn-K_iEmB-m*kot;9G zk`Xvwq5l8U^Oz6W+w#`W9b7wuXxC2^FIM)*h|7K{H!8+>zjiuty-k&r3~=-ybs0+(%OY&k!?P(MjFeWJ2Y4pdi%nmB=6ds!YJL9^{&$c zF48>R9dx@DLu7d$6WgBo+yk68{gEu~=SnVkQ`^X>yRLOEgN2)U=6s<%@%|<6O0Q6b zpcZEx*iVep7#nCfyuLO&&$n%#ta43FFA5d$w1&y>1KT}baC$9b9LLxeqLhw4pH1g} zf!+2nmH3(UMwuJ2(cht>-Eqa)Ia)(V)hOKWvj$BR`|%TguUCVK#ZCYBFDbUmRww%l znaaNym5NM^4}YLTJ|OT|RGU1SC%K=^h>6Lkr(oPycK7uQwOyxi`&3~BxUf{Pz*)C& z`ZIU%ZL;t~jrU|y-(M8JJJBZchRKR8TUuZrTjoR0iR-V#8p6BNE;;<&3|g8$n&^g5 zFP^u(2v$|{D?Y3~vp%N2_ZyCWGMB!4lIBLh#E8MJRQb`V4`>-dkH2XG)FIAzrK7llqi|UzPO(W za$Fm@zP&>+Y%4=$2^}eaMC-PWqRFUVxIs~OX@tSrhF~r{Xpbp;9tuXub#-_U!Bm29 z3xtx0To7v#oqAZ%a8Fm$oF&>f$`rAeFs+ru4jujOc!g1ir6+K?Lj) z+8!sS7z7LW^J9%{*i`Ay)RdLu;bu8E@6r~2eJP(!F^JYK4yeaV_M1lG3nCpm!s`oD zZEeN_+w`(Mh!e?^%vjATj2q1xLU9O`FRqi!1In9 zkoD$d8)R(lK@|jhL4iOws_zQwxt~VQl_L9GN;b{U zEUodQ&4((w78tn&DjJ@du{1?jR_oq-A26N`#3gmeBy+k<=Xk5S|Ely2&-kv|u<$23 zKc*}Y?P)jP61y7+-zr`>)12(d6WX?oi3HS(a27{j3X>Vur!^|}0j@MjsSB$a5OSYQ zaV&XBv)ztbXZp6znJe`Qep8Xo~_y80mkx^T`x$XiUWVSfP5P1h@ zT&V0cP=q~tBt=mXkvX`Opy@tc{=QZ=R+kh;J~feCa8_AFC-@aNB4jspNTLeq%RJ+u zB>&o#^k2HM2An;;j{DAAZj9r4>_e=0&7et&cnMf_c66Aa%nC#Q5;~vXYE-F$de=iK zwg_E%?fCwg^m8jRh=D49Dhme<+vK17X}z8F^N4i(`!wle;U9jp77)8%bWD$Y39hpr zo}Z`zln8YdVdVxZ+|EK*V^A1tQ8WB-+=-WZb9ui*yWmv@s)UVQD`ajr$eFg06cj{u zBwcRK3n|t+&{An)6QXC-(vg$)Vo690pS?L&tne8N-wsA2BO{Qn!jlHn)7J>oJ+KhR z>ts+un@gOFC=a-|!r5fUMl z`XXhRoJ+gWh$-xV&7Ba^RJ?C%81**-Eknq~s?*X6)^D-PY;4qh{z9ihNOP$-L$v1F z3DzYk!Jwy1+t;~Hos@_yy=makpS)AqM~Xi%V#n6j(`IBg@6IkyY{LsatC?g={sfeA z%+E2AX>rD!3Wa!dwBe7}6nO;bmzj`eK^ixu44H`g6y<6gh#E9!Lr2Pn826tIOV5^Z zGOu}iYPmd-Ko(fb*}UJ96K-AQ%QL%W?8*&c@&Q8`!h;Xp3J*xtO8dgFfVUWjC{|P^ z>=JC!9OO@w34VLywvHqFyZGl~@ke?&-zlnWP;xwaZ(ex=R}Cjtyr04RLn<^i<6svE zVlxwniTgmtM3M@2Pj27)_x`29UWEBM=0ad-0cH`%p!6Mw$yD>V|0lcN^MB8-CkC1P zS9bl7#uK|JV7&P=m0x|_|A^_#TV_%*zD$W(c~Jf!;Gi>u6~#$ zFf;?nVi>0JkYFMuf~|ux0=W1-c&vlT$pZKy(2-P!plE{fA2jkdcA=Re+Vc$3-TD@g?SwW)dMt*o%2R)G~H@=dT0m-1eLiqH|gUu|bYW2`|X4BG7hYr#h?5eQ15 z`>+;(Bem=&@ZdH9q6_<>8-OO^b_AO+{gPFbyF{Ud$2_j`A2vNBrJ$S~?vTS}o`!e< zpmcyQ6a35&8iLzRn+f>?!IFjNjrGL0Rp&cR{ZWEr40^#WC+)30b>;!v8gXkioNgYI z-DT9)N>O~GP&nH4)-|hplOuy4Lz;|Qt`f0Bv+;FM3ShZfRH%f+0NQFPJh3cPzmi@w zpKGxstcp(Y!h#B}ukkQGmcKM-4h|Z|bs3HBuGBRv zMn#JpH_PU^65jJL@xh6;dKuWfaPoM^kdAp=xZsRq8pO;f2tnp?uoCW&xhjY#a0d{E zg;edEH#gvb+W~^bmveQO@qb}zEW4MUVoEE zBBT@ZI3!;i|FCO6C%Z?$jNGk8vj?yS9yW#4;-7^li+rgC+LBnl0q?_z$)pbxc> z$0OPO!ppAa66xV=we4$(mES`T!pFqsHku!BR3pk$!Ws!#hUBY%tY;>X?OOp6s7P6P~O087G+HEXk~wR{SRI8#=h{{ zuZ2-_)+-w>S6r>KvWyM;CRy+>gfHyp3pLXc;-q~OGY_|$Sh#`;Z?h`Upocai?-h(3 z#ZMeRt_!Rv8sSS8=6QN>Er}&iNtXlysv$hFVCB1Qb@kVvDPFV)6hRUa5_;B|n0pp; z@7}!&u>Su7tx62x0tPLOR55^P1FTYF;y?yigotiN*UUpJO*Fj!yHroUnY)KaFj?qGnO2Q)zR0>mP=dsU+GM;?7>{FSqWkxFyR^mnsV3!wzN8SZxl!T1T zfs9rl2pP}}5Pv)dn&gFd#_BHDTo4jt;Dih4Ry0n+Y~KeMnBKIzg2c*re(tu&8>t`b zW9?Qk%GR@B1i;9b0i`|8@4L?Gz{K$Rg_tFoIr?%NNUCGoRsB!cxsP@_^ zocm?u81uH{t%=iZuM2fFz24y@YpVR zPT)&+#BbmAL>5zO9`3MX29;~cWQ|)mOcM&E;#YTSk%?Qj#(SS`Mx#64?%>}~E7?;* znU}fP{-##pz4~Le7!je^rs$UOB-3`c$j{L&jWxx3AG{&x$2azcwSe_NTQDk} zZHz|Tfd+~$w_c8I&zvc%E0U5Wxy$XeKYEY;_O@g9jnO5;7)Mb=KClgU%jU3!u9xp* z;d&e(Db#C1OWZSwo7 zzIQ-0p95M*%q_s^HYng~vE;C1#=6hI2qH&quupmvJtm7KjWVfG%kC{1J0)ifdu*k+(O7}ktUkGIm zPTm+qq*0A#MEoiFcPQ3%tbp zKjmA=9O6xsr7`n$x%DyiDdV8e1N=mK?p}DzBUXWD>cla#RD6)&*aNIIv}e5Mh6pl& zx%E1lW(N_q{@$$WnQ!4Z)4nr3I-&L8smW>DM^bD3Z^wD__AFohy?^%B*1O_)`E{+_ zPqts!me~Gx=?+HV1CenOT}#3_5)5Hjq_OkxG!knW7W+r1wOHxPABro>(VsbGA#L+g zE0W9pOOEvX|6kqmxEW6pYCr^U!qD$le7r7h9fQb@!Fv=SKsgfiG)q86|2K z9_kkjf#KXFUS4*-c6FDYs*h;*+CO?+^)S=rxJBn-m$?ggMPNUUH5m#fz=4u@0+2#M zn}{Y*w57%6xLQNO!NA*>lvT)N@()UHC^CEghbuo%nw|YY?W1qKyM7HZkx;J4eazp} zv-Q=#vZT4qCO~JJ5S+pyBN?FRBV4R8(_nOZ`u^S5tf9f9Cj_Ve2&5&6-^pDF^NH0o zU@&rCaL+Mvn8AJgnDGj4g2>V*S zn7&ePMLdY+$WOFdRnd1C#j><3*)_=19XnPm#m6%UY#{9E?z8UG7nMm|rw!vjW|9|W z{`t)T{zI@}sVKtUp7RroN+)|LhhW|*0<-bS$>We>5o69 z*`cl>`g`}KEu6DduF4m8^KbK1+PCw9I9k*ENaYN3FZI?Tn{05+=bK^2L?5O5&BJ=9uTW^EVDK2)rutjgs+N-9sQ^1 z$6d#WJvpa|sI)jcn2g@Y+{*9sm>m_Bn3&d`j#esM2Io8@e8^w_sd;xk!sYdB%6oRX zXQtdGOUf~9AD{f+3HRSxU2x1cDT-nVHjn@hIyzW6-bbDzRcOUfYEBK$UDCZlW8^=V z#60sU?r}iV2@e7}2j&+oQm~`b<-nAOd72+_<_h06U$`0C*``X;G$jdi;TGNNp{q5+6Uh~=#q6J75HHrEC6=-P7zttZ{I_*h!H+K4jjSwJB9_e`@Q~&gV6ePw zDShuwICo3ctF&j0g*&fZ?+bVUXozf1@^pRs<7P+(>t7I{YIy+G2`qqPA&w*lfg%4R zRGH9w3q!IEtoSprP0sF7qj*iN(W_f7Yp&0E-|W&oGcP*|le!|g^EP+0E;*E~T(v@MWG#>1vx!D-H`4Bg_V!tW znY2pwQ4%++>=W@!58v~4Pp1c%HX72L*4F6!TKxHs!Pc2_Jwwxthc~)ipO-O;`daj# z5SBXB^y?hy-Me=vc9~J5f}|$d3m5Zy=q-96Z9Th#jC0=FNAi9Glk4Xo5gH1Ex>Ewo zKjUt$l@r=>`-m3NwWArVzp;t-$%+5&Qrezc}BsAlQUAJ9b4lYnp3!;AO$ zyG$nhDtwynp#%6?FC{oyYIr9S42PYZEl&bEE-_DX9pc_~QR=?@?z60H$-&`?*Q*A1 zb+orI8~xnFD_}<4Iei{DQpn(LU_zw99qHjH^|c}5UnGbN)9+(}Xn`+v&+;6-R-1F9 zkUiV|`*ic>$bHIxq!=>%x7;V-BmK+6e4j15{>F*?KY40_plm?b6bi@$hMAf_z7aN zd3zSY6{8gTg#vvk@8l5}tO6Avvp;p}I!3S}?^wMriLrtT7{)a|JT~^QKk5GcR`6>G z<8K7N=Wg8*L}8~wV0NIMV89g`-imN}|Ldnn%zVMToHW!!cos;$Ti9Dre(1x&8C6vk z2t4RY2K1cdC}-6Rj?A_9W+Do7TBExf%WqJiOIWmxsqXwz@P}NeQ?HM+$LXY$35;P5ZfR22df+p>(w2YF&q{zv=IK& zVAeurC`ce#z$;0x;)joU0rcV6K)^_$3=Kw~9+flY5a0()pq)cCRR~!-F+Uh~M&hV$ z(L*M|_cjc;Vxiw}52*Z!X_@#{wDos>v^Ea)WB);ZktKA$7*~tvNv$6l5e`S!WGAgT zw-g=yjI@ljN%a&@Zy&G!w>`X6dVhC|O0p-N6~Ipy_x@Iqxlb-7wn?KZgaI|SBu3R9 zzH=Jm`!J1hcy>0wv~USj2Mq8b&Nc8o;F=@|?`jxcvNJH~Otk}rBWAClRlN^WYQZ0X zC^Y~A?l{lllf5@_NaJ*+47_rM2F2AeybBUCBa_lg=KyKdVj`c%!r@xcAz9s+q1kQj z_M8uE1H+lS?vY#k2~13L*sY`*64o&Lbh=AhL8M^&+hNxVkFJZ`TmGIGl*2#9Fboox zy>dURq8Cs6>9c2WqM(6YImG{AH*PRN3lH@C24)FMw5aUb<>2I$Zb(w3Np3S-BcU<- zr8j>wx`qU50fXc?6@?bYO)8v$Am6}py&9yExR2dkU4)4O{Ppd|lo(1_*EcRQg%K>f zo|z6us(2Og$bRM7*+h)f?pq&?0GiD zH<4p;Y}^PO#q1GKS?qs+PB04$0|UF67@4-UEHYT6uyJvP z*~+vRfm!D@z~>8c0TTY^aWC002JfN8qkX$M*|%sIag^@U{^^kNL^(c$bjvNh8i6+MG!CdcT(B?zO(6gjlnXE%$GFTU!u&vqg*rV0i2J|e^1F#C({JjV2Clvee zMGr*r3;H=YRJpgonrHwxL&BITX)h{Qnag?C|3!^~d_L_z)ELE!(^eRRb~~z->{(d* z!W^Y^Zl>;c(NHa>>Vs9B2zMr1jFzuW`>>v1H{H6sqg$fVzCc8gT9* zUdQX#uk{n4dnIfZq+>UoK7E>)t0e%glu_WSdSyK~D<657f#xr5lgsqS#o1qjIi!+b zQu$DICpr!(ul(7VnR3IDedXi$kOvXz`+lhP_h9rAVf&9q4P}cc?83D`3_+-Nf_pK@ z7$7Mr!_J}pU(6Tqb+Y*{7vLaAs^5Wm4*w><5QDOTZDP{!t?hp6tG}eJpM?T8$9=HC zx2Ouz7mLiteE2Bfu^Gq)kk%2?3!05)CyykY4A7i}I6@c;3}o4_9BV&b5gcpZ}IR zY@Bj3P1<8_pZKU36-PpJ#GO|gYrgJ&n{`N{ODFg5nZVT4dyb?S78+Ur7zt1P%H6vg z0D7mPri3HH&ybx)wB#hvj83ZEU!y}2(axDDP3daqax1=e`_>t{iOb(UdE1&>tkcZ3 z%PCBUq>ztLyG5iNKQ8W;a4jw7WK^PcOj=xO_VKMz86_?s+Ler2JhHdnwQV^T$j~$=Y7$h6Dw;&!xOZ3R5s8&XkY%B*juCeYxn2-nnzWWdlP(TK5?&$ky7-G zlqOh?Gt2IzJ}E4mwqLY`ey9w{?HuM7J0cs#{A6PC6Whj(5uk%qR;Rt4-F1u05)%U6 zjK!Q>JyRRU?^{M+`ifMrzIg%msGY%T@t88YQ zT%#KLxOr)C=2L&LV{5-R*~zF(a?)4J;+p=oH{bn^h;d+A`qjxVVjtP{!5dqfU0-7X zvE~w_TB+S`->32`ghfTEFt_rLr$R!;I|3UokJhjxC9lH32ctoS_w;XmN+k0yr?&Nf7!%3s#aceR zo&C&l5~-?ONj}}uO!x@L)!!^$hqdSx8pXOSV0`KzmR!;Z& z4d)c?sJo7Z*m_f%Yz~4q_&`vQun76?n~5)7l_@-@MBS%C-Y8Sz$?&0+Ctn3Eg%qQ5HE)Z;0j|6;~n<{NZTNAglvEBL|v#KYEC`LoVWD=X1 zC)o$daL+LT)B+- z>Jdi%c(O@)GQO;sq22|h?7jEwLQhoOZ)iU!dY2sJorco2Z5_|=q-P&9+Pin5HJdXk zxTU!nK5~``7cq~^*~O(vo-*NZ510MavWDBKrtjyE1SZ*$GoLYS8a61N8*X=X-?E#9 zo@%Zw@~3>!`7Ks9l1AH4;0h(9q{#}fzO3L31Te|Dqa1_d;j3qRQ>f8Alyo4Ev=~6n zIZiIQ9AhL95E8VG;VbzjSKhzoc=~nDf~0Z8)u#2^AiwM-<#p`_>~CkuXOxw2_z{GQ zDzSz!oQfp4V+S`PYYnpKah_m9&1JEI(<1J>o{hac>YMV5y|PJLa#w;6-{{W!^>H>O zq(IHbQzI3h?p3zwFEUNwGetP8*X&-Qkb}yg9t`y8_;}jCL+uAHuls|;(;`o)s;5PA z-9dV-Mmet(_?DjbKKXM2GOOFCM#QCioE)4L?(DM*KT0~|cCvLXN>*iij>AY32OC>w z#Vr>O+PK;5e##hMh9yr%_u;!SPG0IM?FuRToU+HX61kmv8@9-=GLur;f4rDG!LnKT z!{J2xYFyMrmgy;SvcQ!UKTuMrF>%IaX4Pwb#y;mEyBDeAIt98`D6B+b#mt9hv%8s#17!oqO|UiUZOn)!8Q-z1AtWg@}~qN>(9D87koJPUEwU zdgH|y{WAj+8nqLY(-qNVq`6!huJg;qfv%pr&E6zp`r3?{#6_}oG^?PTk;Rx&woRLu z!bWB-gu}EPipUB~^^C=>f3_&wcuemmP40L-=3TyCC?&)cTa=`8=!}rLKEw5I{j{^C z=9~4O{K11)HA9RfLJ8)X5srseR*lc{L&wsjr~~XsdoMFjMv^|dRpv5~W-HFVNiVSp zZTi^mXtR<`3f!vH9j~6UtF<Dn}Hb)O6qu$;!&=Dj5Z}+=|7HVi_`zK6FRx4O_i~ml6HqSa@7qZw-bRtvrBS>~ z<414My1#0&jmh_)J4= zBOWd;(fvCY3fj)n>|msJAKc_OUudvad{}$v29w5%irJQU_ah;5Oy6I|Cs2?k2Xl6) zd3W4mp7XqIF}7)Ni-WJ{!h>wcnpj#5s9jUfgvjIXMc{Q+QP| zVTRD9Z!dbIZ{2d3D@V;tNkdZuYdSRJm*#3%4%*tDGU33AQdd_eM0?2X;UaF4@cpi| zH92NegZ7sW?UVAw>G*@m`?-CNG&K*d4$mId@fI~+B$+&X=Tx=kyzu+0mu+jSrl+rz z-|m-O6DwJ}h`o6$0WWy%XRjT>*T#Mb1yGDB!b~JMR7JpcP4!SRqz?_~4>3XI6S4^Z zy^*_i0aXyBotqodT)`us{8*fS;b_)7Aq48MurOu6B{^83-T(&WFhAC0YJ~C{7)B=? zI^oOdgnkY&<#loT{g!UR{1WAIyn+k;fuudx(B8**FKN=ir^8H)V%g9#F z@oZ3>Ntw00^3;A^hjWT5*#D&CVBcNV$v68KiGx?sntTWl16h>2>@7a``ua&mLSqHvoHHBb>i2OBV3tqPX=-;>u@XyC7Rw;tYn0));;NIM+f zkbF3*LqJbBlxOw*WyMzSG3OOura+2@cEP0<9+Ccz%FcTBNNTkx(X&fcO$RKhI7CoP zG-JPlQ+gBwkoiy|?l^vVrsVTn!{*C-JTAk5j+U93P$g;l+S>9*L_{!tC@U>B!R#;C z$B>g8ot)tMp!o9DT3Is}Cnx3OLy-g%KT(?Ac9ex^Lt9L5Kb1%TS`hvF<=aJ zR9EjS&ndX~=z(}q+FAlCz$0@>^T&s~Q$BTfb3vPD`lf6+f5God$>p3*w-=2qEwIrk zPJQ>K`LUj%q2ExyD^Mj&KUM3e@wff?f!xiD_Vh;1aa)>48#GjMhHbev`>nZ|F&t zl{FBv5ko$`!cg6i!;iy@i;KDcjX6aP zZ2rc;J^Zse>bvH!Zt2k_`41bUYX!>G-){e{d?)7T>;qirE$bK;DpnHIlJY0silqN> zcX!voR8^)A=w=a99kW%pZ|B60cfWWs{P5#?SUVBdg29I*67XAcF!)M;_SnQLx-tWX z{*H0?Tco884?pmoE&Ek~Y?62_NZ#s#C}=P*j2iM{?TtGj6+<&ez9uFn zfhmyx{_M7V>z!eG4J>}HE&Hx(Ao0ZIv$C@XpLmz4rv#P_Vhs((Aw~jUN`trQ5+k;{(oJtt(CS-eh<)$|Y#>Gwg}Edsr%=8_r72CqxJ z-UYFequf+^&~{Zqt?c>XZFqdCoySuS<+^+^%i3^UeVO!D>G!xA!(Ot=H=P^hHR{)T7YW0E8%J<1`zvlXj5Al?d+v~XVDmmr{VBDM0izyQzpQ9W_{C5@MF zU+&DfvXcnYZ)*3#vn%fJ zPn?)Nx$35J*h!z|?1j^Z&3c|<+dT4+-{Hi=MVjRTYY`FDakT!%+~VRsfG%3v+Au(B z**BVHsza@jGB!1YMbuat%d&yy~Ql9l9^Eff?q%(Yb^G_WZ*0yN(8 ze$^}^jY1GtnIPt*7=EhdJOIdgyzdUE5XZ*G>;Zhc2)rj|lI_iO?Rs(p_NPQxASU=? zMCuJ-e8Z@VWY5bIFobZ^tkrNfIi!VkXV!K6$Tb}C973G` zew{5rf?9+4KE)c6F8-SHF&+}q&8w}~rOD8T{T}q0IPn@-|9%sPHF*lA(?d1n*l`G} z4sG%@G#5P>HHqI=K%)Bu(^er3AgHTgM+Ge{UD~t0@}mz!>7j|)xhq_d_^O0Pb`mP1 zD6|jLA3dT4d_zl1s{vGx2+XB()eQf9peKheUI+pge!%pfj);=4#fZ<~#b@*aZv)C- z)!E60S)0V-F8@8jSGqJ3`O;%Nf?pdejqR2pGE&UqUYag!FYsT!h#4akK+g$oF~Ls9 zQg4)}B%VW4?ITyRyDFHQ3a8Uk9TQ_?p|GkQf-iKm6D{$LG1s(cg$z}%xyBvJ4NFho zvV9{{m)nc%EN$F+J1yQgs%Vn?>L1TjgXNomcO$@VSH`Z|QxXpJKv;N76FUimXbJ8* zjNmr|>&M&|3Apjm3sJ7Dtc>x%4(IAsXJJ4nSFh4u->fcxS=7*O*JB(s#w#dYnCu`! zHXHk&s{yV7B_E-7B&F!bN#dvqLC*_+I2;H!3c|lPwf7F`xT~w!kzbaD$BbZ_Ypvpi zF77|?!Gp;j!fk@S3nddVshwx%8Gi6^Q4b%(G>LsTg@|j?-HM796|#_+7!64dv*n4^ zj7zzMBmN~tJpRO^m|=gC6zePnv4ps)`mPJL{9S~57Z&8cN)4fAQ8)jDy>^{9ocKUz z=Pr8tFK?2dH@ynTT7B-PIJ_0;k*GcDcEb-!N=kc_%Wr?9wj^33P*N5Y7dM>DFAGOC z7NQ0M3>#*$fNcm=dsV7dp$g!>FVpVudmKt4$H{M z0QWhMRqN`n0bgSjIQzjckKhnHa24mkRj|7BXJut%xG}<;3$3CN`~gQC#c60f;pJ=P zlwe5~ym-NdQ)C1n`ACaw1*d+7=`NcZ4Ot2zrE$7!XVtC_o)MnyMbT zFP)7Px-VV+tCtp@?gd#awS!~@T_7^lfe*140433>Sei_aBCePJ2!l{4Fjn@#faS-H zZEf6OnGzxHZ3!r*v?O6rfN6zl0Yo6>4I>7-y5Z4FVkLv-#i~Us?@&!SJUZ$mSC6La zUJPZuFg-gf3*h~s(}M$I=%v(wW-j0}z(~Xxb2h~BXQ#-sEpoZBfI`t+bp&{%?6V*a zFCuqhTEn02FBb?ib&vDs*>GeckK^Rxf~ST_&iDCwEhMRIe0&U~t2b`IYxd31*RPV@ zuzlCe?IxZBaxwqgJ+~MCTLOr$-fJvnJwriz+xFfskZhgs4!5f=y9$>OFBrBaPS2FeTU;)IVXPjhm+-J^}XzT9qMSy>hsAB7*t zk}BRxgufp$;^G>bnn4%xy4^N^3n4xB!nK_PmneV- z?*$#!CE>Pyh88}UjD((XEqcO4Lxeb@@xt&zcUV!iLZy4}9v`Xl&6~%qXFk0>QvIu^ zzv8?y4kfo28{wpGA{mDt7X}T7A9F+S;azKNOuV_L*I!W+@dHn&0jEwB zR(tjP_a*&zpDy^QVzf1!LJh1A9~MR~r2^OLP@=e_q$H8x0f8n`U~8*k_OZcoe`JU_ zJMXy7d`o|x`J5$AKMZxo#x7u#*c`~z(TH-W1x726 zY>H)|7zsheAgrRY5kkixL{=QbYytwzuCA_#1=j)N;7t-l2~rBmQhZ&#&DpXr0%22C z-2|6yd8|%sdMeVT|2f@C+>Z(AT3T8iFz;SkUUq_ONaLF~b)fw@uebhPPPbcM$K2Qf z*K&gWpK6-D_r^x0y%2EJf@Yra!`s_?1a0{Z2M!#l?&&#%!fHQgbG!@Z0KCI`YbV;u zTXHP(Wl&y<5%rB*T3Xs($i+hSGuW|FH(;i(wBlXxnoYYN+gKu5MO$6=CjYK^-Q=+1 zAe;|r5%#C*O=yKev$ZD+oBL>#h?N%CKtxjizX$TqE|6Zdfv2@8Ylc>+z@nhzfiyRk4b)?)Jj zoctS~9Ty4%$?UB`-9yY`5}|rxCo{Kx`pL>|ey*7#CCG zZ=0WUE@81ZH^0sQi8yjPIRp(zl!4%QIiY^hH8RqHWTUEtXl*Dl64%752keHI_Zm{Y zG;~Obb{vWjVu{fT4SoJB_D>deh61Z22$6wksp0hMdwzlgv<>)usHE6c1pM=cL`g?q zx|6}ohMI&1FclUtag5?%mxF%mzbuAt)6);9){tOb1KMEl8Sh8*pYM7?b_!b$?f8y| zdK}O^Z^BukjqGX1*@5dg`2LeUA<=O)guEJX%k7pqshJ_q@9otWT-~@r=R=U*R z0Ai0kgjeyHJv3HJfPQY>xkLT<(8)RC_tKiRg+f{DFFHDW_qu5=&iS@oi&mVIe)58f z)0In-CteUVF0J%VjmL-X`KaLCEL@k{;netyK^eN8n`ekcKS>uYO1g+i-~asY_(l!V z(zoKlxruvc(@uXZA|~0Bh>TFf(D1tLztT%aEO!KcEs(F_$wDNsoSd8_5|TrhT`2zZ zjEfj9f#POW?zaneAsLG(zJ^h30gfO9)#P)^2 zxE_rtIq!Mr?5$|Qg4Hq!!s^KAs61S08KJa9bid^W$x;*uT%eXvfwe=Ncc?%^TU%TA z{~^Hgy>SuIL6|T~Oin>I0u9=^-}5ebv-o&!;8yZEvp9OlG@zhaeu{m`Y1Y(Lmx8O2-iz6%|!w1)z;pQ&aQp(~61; z%p!@s>8bL--OFnO(h#3VB`3FUB-1#)J#qg^7jacA$4=D~XbTcE0yu8S(*X_gj!X`ng;YijF(ry5Nir%*Nr@tnL!%Hy;}C<85lZ{>WZrq-Yw!JD*WT~` zXU`vV&Bf5~_x(O=J@>lrd#xp#9QO$}tZ_&+AHb>M()Yc8#(`*&QI*8l5tNdVF6$cx z!^gN14J7zVxA$pP&&9K5gm^blsGPtyf5$u7U+~Uuua{i|lb=klXm#8+C(X*mr5}}N z*S#~GSAI=|=f2&`tWu))<67-q?GX{}AvbIPti?ADes!^|>>PCkOZHT2^4E2zbenLb z78V%Rx+nL<4!1bHD0X+qGK6mI z#M*y=8n5ZLwidLmf@dTNd|_i_V^E!*=Urr#&mEO~GTUe0(h77nmT1 zk3cDV{x-Zi1E92MsgjXIpz(m#K2JX-R0iH!J%$WxrX^)rP-?sqvmfY1=su}`K<;&e)u|q@D2$*g}<)brV#FfMZLM)tvs>qZu`!nm;j9ytY(ca0)eO7nmg!x%Z z2RP}*9A0#`phzRRpykbUd)QlGQYhg=(zrM^u;+bwQQYZ<{G!>OiZ_F=XZ~K3+g*h1 z$+RWm|Eg@Rl4c~B_JuVlxtHhr=;@Cl8S6&rS4k8py=ceSjEe-vqtZSFrMl<@h4wU| zK5Z8okgvDVX;E7r7;3ibJ1NyKU!KxgK!F^273oA~=4+BSzujTpym^vs!N)UytKa-r z+WyeF8Jj~VC8bahCslYj-pb9zWTXQZ7+CFbE`ck-SUOdr3gBebzY6E&>cnYPv zs}fM?+sD|)=ARG-qYvxp_84)~C2h`#hOhdG)9GXcO_6@|D1m-Ci?LlaN^AYo?F_Zj zog03ty}hbrwtp6@5Fk*jBVb1B4qdFm9jEZ2h%(Jz>9jI7#_F4Qg1RhxDa}FFyD1 z)yUS?RsjUv{$z+W->Jj3o-7&{2!k?qVn)^Tu2%($h;Go#?l3p+hZW?VKZ2iB#2amd zMDIQ33#KbSELHPj?DkF8{P5v}$JIxV9=T#JLwp$T`Gv!YwSO-|^<)O}OLRyzAR`Hn zj}%3P123MGlcNFT4@pZ)Ti!h&uF>>Mk!!bJ+l+OU;+BI~Eu<~eK5npjXE|fvmysC- zAIp}vbCdQS9!*JgjCNvE>PqYg4{&)(LDYxTRkxpB5|}2s#eH2*0|~aUxVCoDv6td$ z91hXQRty#Ywa>J@sJ8UnEF+%N#c~5lJl0sv|DK>zYUdiGV_hZm7ahYfF(ajZ#y*{x z;}evZH>pQwIp$q?t#auU%;RdW&M@-Bn_(t>eIY~eaL7&c;@uT?_ z4pigAh*9TYODGkK5Gu;bJp?`a=1mAKYrL?{aA#$o*9oM*FUz)kN6qet59v zEnR0tw6-6o#k-Hm`W`?0@B@<|Edl}xoLI`xKYo&LH*jw;e(Z3_@CSQ+??+$#E_=R6 ze^{2La%C1YKkVZ7u_2`yz;!T=>49#fZay80a8hU}_QodPn;~ZDIm+ThHw_B2RSIHc zc7)H-s^^<%H--5db?_vlu&am1;DC<}LxBQpE$tA7flw6`6B2`-S~ml;`#ZXUgjyA1a7k)D0SLq{r1``k!(IrDDjN6J=LLk zrh|h+6m&Vh;nXYSc?c$JwtKakJ;=#cv=beS<2RA9>uA?^xH5gznT|2Gg9XFEY-UAx z1`6lCDOrbn;@^N8&C-6=$`=PgcTbA0bSNc7R$8!dVb?nh4ohAQ=}o0Idg3A`k8pl^ zSMIDTJ>xd{@S!HZb-Y|TthNoaIlI$Z!$h#FzfA2M6tmFMvg25-ZB3_tcoCPHB7WVe z#8F)gI+bWO)0BU~zit@eXmj`U)QX3_vYs<%m-*N0{A(taZ1l3N*j-vy*4ZojP93t8 zrPJ-nvt{UyV)ewzu*k@rcVPU*@HnOqNJ48M->UZNHT2g$6IV&F)I5z{UP5mIO14u4 zqN`mr*v4%r9iBeC?-su3(`B<8IxLOe4yO@vxXS}We{CuY3P_n zy&I+s352(hf)Ns3p(^Ln zw8HJ1ye_hzi59KtCD2Rs<%Q9E(qrh1_(J5+gr*D;hBhvSuWZN9r)raCJyyp+7&>2eIBxIN4MYN&t^YsE1^=f+^8fD-7j8NJU&KaV+0@>qM zd5YNCo)MlpbXF{!Wf$YU-2l|`F9fQ#v(tB6gcuSM(xG8)(sfD7n!n=0jGg@4a@%Dm zRV*go`lKm&jAgYY!~U}EHtaQCV`tEt&%XZFz1-Cb+}hh)Oyvs$XMwqf#n{psp^*jf z72KYkKLIR`t=k!w?MIL53(H04O$RI0nt%V1&~= zr>5EiDjnEm5S*9y$?Z&nOr!P}aa0XOMiiJqKh5d|%Yb;ZEA`Zg6Cs>w^KW3w6AJBQ zvgcoEtB$_(`c;%}m(L>{JnW+bBerM0aSoShTm*|aaL}NRkUJq~&rV?xK~PQ20wqnu z^OcUUNjmHGm4e#XNh_1H?(L8vGxtRzt_vdq(Ka%3vcQ-qE@dWuH$SR@t5xC`g^KeYW|k*e7v&{QZ{u>u@#>U&BOJ z#xKg!Zw~mTZy}8w=gErgd21hR>Qz@)CozRYHt32pi%5Zd5wzZ9Emc z&K7B}D@SR}yRjF`U)(=)w{9)3fD~`eJt!)>?pMOgXOkKc$wMDq-pz;BJhZP{?0#}y z5}F&BMqWbMs7px*I7be&o1A$fr-O~k$4 zJrVuFwr?+``=^dUrBP8Tjx#HygpO~N#H(a<@cQ>JSBTH7#mHpQF%M!Wq9?EN=72Go zYn}@NRsn*~>t=%we*hj-aP&n+2f`f)3%mG6oC->k~U8<+=>$c9lj`W1=>46=iD@H|MQL0W|mWo@R#JAvEbWgRP9-6txT%f zS$64C5fhLP*3;Dh$1f@d-sHFkP5JO4m(X0zP<%NbAy5-++T1$=#-q;P*yRd`Z}n zoV+}Nqkg8Vj=Lu{Wl7MV`0@+B{de&uk?53~oRX5n6?;50b1KJIyLyf)D`sc!Z*`U@ z&>;9@vMVSaEU0a8cialtY9lbNyP;70dvvynbw=e&j$`zSux`=Mj=eQt@t$9(_R1!Y zUrsJL@vf~uoqO^L_b53yGXfd*OHCN6c^dH5Skl|O5Nk0E=;4KV<5Ai+oj-3WnUP`H zu~_n?QEm3Ov+Yb|bPTmar+AqAWZ$RKli`cq++=(A-qX2n zk`w)Wm0siCym`|o7%$rOmtIShVGL4}Wl7*%D=Vv~HMwKZha<@h0#oM%v>Ljn71d z7uO@97|OpO$*BO+qkr^$ac3~`ITXjn zrp(Mtl1b0qBNzU3wRTDykj>_aFq%Dh@nQt$1yZIvaUlQdn*zUFu8&jH5vnNTJs6s` zq0VpSUgN=181LBhIIN$Qqyh$2v)I`(sdmZ~Ne6NjmW~PlT{&K`YY-0SD1D5lS*mYb zB-7nKc}+|6-}v0>wC+vAsmuJ^xhs21%(7O-wp2}jHG})$zPE@d#ls~XcPs5Cf}iLt zr7BR-)Km-%43r8{BJ3#(g)^Q7Og7V}>y6n>vAv(DVX(tC^G_5n;=D-hyg->=hA>^0 zLZ}@V)tbAzSO}}!8oNn_0&)TXR5S;rns5i8*Rs#1BYBgj5X@vFvy3WPiE3D061E10 z@(8h9Dx}&IjIHnebqeVA)dl{Ow8k%AYVa5x_&QTxghg6>=0x+22W#)PdQ~2l!Oqsw z^0wkygOBpNSibnMR+pPMZWyr1fKtEk^5rg)s87Kk-*Qi|$WTdGf1&vwGGIWtb;G~| z)7+nRw6%P!H)xP0z=1}IC5>@Ls}DKdOmiN!EasK@3KGN~s85|KQ)&~BQSUvZJ*KBc zCxqDkTq{k5xs{wuoJ6&WFYc?G4G5)atAGO^?BmyVq>iU-#i3C8NH=Jl)E`>9tnVow zx#LZl&Z!Po!kY^fKY7(Li%!A;BV&eztJ8q8W2a-j=RM=bS-*ISbRGkse%~j*;ZOXQ zP*?V=;IpGlrDxqRUU6rv;C35YHa^w~Re?iSBaU^L@dHbXsy6@?6vBP8`9e+<=%=@-q?N3!8)^yXs z+!4O5h=U-+lyvvKrvB`DvPTjb_4Yo(M-rr*^6#IZd01L3iw3%iow<>HjMUgoqrn_T zsg4gXvZ$zf$ZcQg1!D71u2~+9fD0G=c2UN_rg!gR0#0UT;!d;n^n5J)y!hNX115E$ zv(-~$R5Qp(=mNqbLM0tg_euERPTgPM1?k+H%U6;(Je&xRfC+MPavq;e{>+#T+#4PR zKlwQs85xP5(E~|Im*e%$Y>Vi#$4IQ$;6N|t?hz^}{G*;+){XW>%*^N-`Sw2F__JFr z{;wMgkegRAptd~T$nn`(?jO-Y!bG`lR90xVLT&*8EC2r1bxJ0Fhawz{Q5_y{oH%h} zBLB0PIpMhmM40ZH8p>$^mT0pjkRgZ4TVyo+@lAzCndABk&1N9!-xd{nczO;I9zp^~2rkVJwrF1? zoTZ>0JGvvUZ1yhrAWu}XAxuG~U%!=&0VI1Sg4pz)iR2`Sj-3l!|GzA~uEVBlRV0@4 zxLQ8goUPq(p|n&0$7b@LVA>X(_EDrwt1m9!0axA!yejxKQb>qwL4H6ky>WIm#w+2g zCT+Mj))k4_EnK-GB+BE)brIF;@9WgApEY=A6=vyP(9dT>6x#RnamtyBmYoJ<%})nq z?Gzaw`^fStDTNl*fWk0e+Tos?*aNlm0?vpoB1U6iwD*@wQaYj(Pz3xA=cYoD2{|@!{zxu=F bZL$sbZ2OGAI^({?KVMnRusmk5IOIP7_l=sY diff --git a/examples/running-llamas/artifacts/Llama-65b/peak_decode_throughput_bar_plot.png b/examples/running-llamas/artifacts/Llama-65b/peak_decode_throughput_bar_plot.png deleted file mode 100644 index af19789f6be74117698ce5034b64ef260eff3341..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 30405 zcmb?@d03BY+wRk(c@~vsNh*yNp| z{CassIkCf@o~|D13JS;n`2l$s_d^P6v!12lLzrE+T6<6wr#<~OEl1iN7yZ0D>Fe`B0sStYtwi3i?vM`UoPY0lU=e#&}cEA zM4-fkUF1s#n#J5)m-Bzlc2&MeziqbY$nW9&iHNfYClXXLHBKLVAFraU!$r@<&CP9^ zl@!$>EG8i#kvx2!S&aONQYroA@bK`|ID6Wy`uh6jZxv}I@%J(Z<0u~dJ)ep(bq;?Y zAt2?Czt%5jfT$O7uV_U?MER)W!-B><M`}tBMLH+t#?W+YB5UO=)~&{bZ~lEu{TNOODd9PjVVQ zQ)}KNo*wIAUU&R+aEI?SyY}3CVIA+29kyrAoJqAvTY?V_diwOK#t};yRhRZ)W2sU+ z8|AoK$GQE5+Cz&pDTC?6iZ`8|oop=(I{qOc435gyw-Qoc_|1z%OKl5$_H1pByIOV3 zsvzgKd~y7*UekAug_G0r;qcEN>gwvLW+^OR-``9#KeVf?qQas5#p%>NIr8Jbeu>Av zu|IGiWhEP~|*#?p(9_r|zw*i#BMVKJ-38E%W*F z%?=I{fk8o8&TZ;K8&0z2IJX5pe}2;Ul)}z$pB@LLr_1B>N5*=MCjb1gueq&bmFK>~ z+uPgD-u_C*-nSk*?{6>vpfxX7Smgajd2aFx=gdTlOR7y?w$;8nn=j7v@4MnYJ#qZ) zt&PjDxm)twd$Y&mEz)+CeEuxBdiPVx#MRgK<=NkxTb>`Mp_G)A)}Q>&zd2Im;-L?> z?5mpK(o>fXk(L~n zEk(YwF`B9e-|~}H-fCb_a#hx{b70`yoh^~wU*Fwmp4)RSGIC47$sev$<44FZk*J7> zVDa|xQNnKtse3YQxg=ydrN%C_o-2Cgj?kPO)kW-rF)903Kc|;*f9~8SLvhL57Ctf` z8XA(PxL&+``SWhu-0U?SZdpKrrGJ= zOFItr_SNIhl8!b!jCz_VqEO&DAi=+E87l(LuqBL5K;q+rohx?~-Y*T~T*}7A7F-!A zdgwu9V&(W-?}5I?$9q(DHf`E8Inq_C`VGs-O(Lnkzn`h3q$GamOJCng97kjCL=if? z%KfJuqJA^t6xY`eiHS*898aO%BdQSbsptg|_>k6Z5`WgFPj?Db=NcnJ6&0|qp!KpB0 z41}qioSdTXjF&ea>&EeY1t;01q@)fb%)7h0-9HW&`b>BCCHhsApI_?MUB;Yp9LG%` zVUm=Q!Fl}n@ioUjvEk&tGpT$NtDxjHVizO7SI|9A2nmT_(|dhXbac6oh?eh#p|*lM zj#W`LpFV94TEwo*=0O#oU#igA-AyYjEF5?Jy2H%msI5@)`=j%BUHfaq&C_5hwuG?oiN(G#O}9}aK`3zPn~`38Z}rtu ze;T@&YUkG6fP?R@>)`bdyt!81n&**q;@hs*j@iaxHbFr_rWyM-9zJ?B+568}Q=v}* z`I5}rx5e0m)faExyt$^nJ_te6{_4`1h&=j>Q8M(XL|T3sREsm~hkJ3H<4H*bDb zTRVDw+OZ+W*S5HF_{VLXAiIMHrG|3weR2&7(N*}bMQfkAaE`z5ofQmX0CU9mrn zv~8gVe$(FB`I1i1qAq0e_@R!1Q!X!jrgyEXkX*U)OGeS0Zlb1-^6cBX6rVDgI6#C?z z__pNUj>JowFA5&W{}8D)7mhmN{UrVETv0Ti!oC;9i22&=183%~TkOzNduIzpEnmKzDt>YLXXoG`%d%z5+fyjUbgIfyviYmxtTxVmoAZ?E&roWjSM!(eoV9EJUj7S z=fkayqm`n5w{44j*ZWL8UB6OFLV}ik*}CAwMA6L`_{HCg4>V;h=Md45{`9M>l=11& z1{PmmU;ed67vI}{GeYIid%L>31_DzF4RZ$K>(}!y-+VzE;gsn$>L4j4m3*u*J$~*( z`q|$@Teog4p84}r$?-K8Rg5ZeK|tl;ZR8)TEC&XXtQlCB+_ucvr#AQj@lu<-iFF3Y zI8ONhOGTt;N!#^Uw>*N6gzqDpJY~I`%*@Q;7cS^KI!cn*eRizz zRb%5)9F+N^wwm6P{@DKgm6wG3Q9!QAS=0LX_^|OScAhU%ptc$rk=V=hnLa6`;l;H# z-;?I@FPv$t<>e~JNo>KBxzabuyeqeNqLlNmcNN=}vG3x(muKv;L5EQi?{yHkgcC$l zaB6tTn@gfvm*V2$6y}5}|GM*Y)A zCBOanG5hM(tJfU;$kdp=_szHpIi0s|i9CJwtTt=^77ecvvHDpaUf%NIS5FRo==|}6 zT|q$s6>+5zbLW-iIH_8*16;1Iu9>$HO;3+CZr^HbJaaIyXdS&A zQ{c!@+Xfo{nmms~I$W9@F3n9%Oo)&`-6WI2v}ezrb@uc~UdzkRXOWSSiRVbX>vys> z*Nv9yLgnm!{(HEC6$x1vSS0xA(=`Ch_DGVnl>Kn~%WmJ%(NVJdS3{Mv>x*2fsxK#p zhK7#d1ZI7o`t?m9bbFm&1pv#gsTmvuluDfHN9LMt-TE~CTwGkYKYqe@sEn6XR~sJT zcNy=m4K$YGP2cxY=Gsf7CTZ*JyOg%qSkJk-Lo`e}Iyw%`-x@O*@@(Xd;}mt|jOO<> zKG?Zs>-OzS-k`9^QCxF#)5A``RSNWqu@TXBj*jCEK6j^l*hMs~q%9uCa0v)J82^I< z!#p)L)!lJms>bPHMptQ=tj|2X_!lnMsZ(eA-rsbvc6ZMPIu+|jy2;AQGW;QDovlwQ z?wN`2s}3JI!i)pi-IvX4Nw33&myESvFUZ4Ab7)D<$>H{!T?G>ZBZG+6V4f9*;fX)m zUl!CLQgyhL6CaM8URG6ArTgq?L-4iLdwid{1d1^sl9y6Us2RF1&-@BT5Hkb02gb+8 z+Z{eE{o>3o!`9zev&DBI~edgJR>fS-)+wn*9g9n`~eeE56>l$jWRK^mfI0m@f95zvybmq z_UzdIx-Z=(PlDoVZEL$)ZTs@f3W{sV>fJFZUsk7kF=R4%czOn(JI9QYUz293fYs^l zED4g{d5?`lXgw988z8drO!%RQD_5>4q8?nl$WBW~SAGVKK$%} z{5l$Xl8%n9>-+Z$yB_cEDh{BFGQPFF=*;BY>{&`Q?3$>Cm%paZR43|`!~B;oUuqv( z$kH<~=p>nFCz*8CBec_AJ9+82&o=%+Jit(^DU{zwGzgqaP97x4eG0-4A77c5i&HDQiDP z0sE(nG4o1M+Sk$#dcQ~V;^5#wUvtfBSGJ?U_>UIV`q?z|2Z6wK;ltaEjqQ#dyK!W` zt+f@6gM6VVfD1ys?8|E-myW`aq$KK5V>0x+gYXMKcaA{e zm_bhukJ59zD_wz*$j<-fXZC5G0*sEkapTUZ;dY1k{CxF_3j#s8x$6@&eRdz2 z@9A+w78BOg)WibB)io4+FXIc*rLnc`&=8noi21T>iZ2T|nbHnT+?$WR=>)DE867Q| z{yp6F<;&0Vu8t2aUcZNg9UL5L?ixfZe42kSzDfx_=*k_p^nnFPEhXR^ab5~Madqk$ z01*19SLNjlMe}nTW@ctaCVuQepQNDIc2oTXPZ6u%{H*Fd<9J$ITib=KXj#2~|9%zW zta$YkGRDTnWtIoEZUBIkl$Fr~CT~Xh;Sv?)R8Yt;{xaNnw{i+0etcm1p3(fW zix)1uB{*Q1R{qte^u6cNyXtJ)#(wbN!OUmRLL#aw6Xr{T7`fkXHUxM7@ zfHhi=ClAfrh5pEJn3tEgHhu2~Kx;-+*mrfVdb{p2z8Tk=98ZeOV0S4mD-%;w6F``E z*eG{5KRJ|&A|E<7-c<}>nzZ}bQKVJ@nO%>hr7jIP9Y21x{SXe2Brj!m$^^Vf>FD`mE56t&V!3LGbeR$H$tSs*RdigtwrW1E(+R*@Uy8;KZePcejGTo-oTfn9LMb#5}MHv|u z)T#<}#mX~}G$Z#6U?V!wHn_OBNJ>hItzNxtz;W0Erzmd2Df#rh1^3qY4cAX{@$sds zX|w_YM5=TcYf92%OmR%Z-k}4Y`#N#}_{+4&*JpBi`cTuZ=T6os!@4Ny_KuFhILWUn zD;aUpwQXz?>RvdZcA*MWll+HNZfGhylrg!jN>E_u{1B%^x9$3xfQJudE?m55it-{f z>(^;i)poxwC;s|%nXC8bFZznyHxS@0mH?zXW2^DaOnNaA)_UKW_2w29Do#5%w;nli z#MRwh*W7%u@9ZzjrsigyEn65BYzxbPBb05*qu%ra7pz8EaG9K%GJNj*2?PfT!fW`S8y;4?RPd?83KCY$e-m+u)@;k=Sb>F8x zjo$O~^PL*FuW(D>@ql$aa+m$P>uZ@&MiP= z%6ktoOnQ0PU|k8!_44w%ZT`T_#L}`7Z_)MR$LgU*oQ#yTRA8&h=f_)2@;#4G{_*kF zq50dkZF@4l@kwT8F=}sNp6JME0Y#yHlDv6Mes2U$&-s?ej~{FR=PTl zTC$T*#vFMKXJg6yZ55B7U$?au`HAk^x6d%zO{-{`>P>J!I9fI6Zx?`98ogT9{z?u? zbiOaT-H$@MG<`G^;TnMRAE)hm?&?*GdjaEU`?@E_=iBp7psq=;Fi!Mamo_WQgr@wq ziS$Tk30=xE!q{;M3$GU0FfuCB(8v2HIy#zr$&zbH@54An4<43%lKuV6NM}IqA}qn6 ztCdw;?xKAsr{df0Hokw)f%Kyb8U)PIla4=f%1TN)=Xh3JzIN?e+VGAk-8|a@x{m5< z7FFj~e*mR?mdQV-wDuwb6`h(C2~B~Xt`hmDTvo&SqHJ;H1BS%8_jmEo8xk~5^N^4O zd@mbQXj?P-;Qh@F`?2=9u3fZqGe18l4CVOE`{IPW`tV`Lk?U`;pI0aLJa>vWQ;xkg z*??3@|-Cm<_nWW2rBERZK-@rh8SM~KLPgLF~YGrt;?niW! zxBNB-eox`@Fdv2EMO4;nPl`s0-9g?`@=V$ohT;l%G593 zjMM{b)4}X^R$rApy>8})bt|p1Z>7qew6yalzJ0O>%ZV*5wQNC3GVFM8e@9|Bkcr_m z*}hAcE_KUpyw>#jb27eja(cMv>eq*}KR;-B&CTT1-C3kE?IQ~A5ZvbBw)|hJk<70r z_b!B0k>=i^lr51WgyZy`pFL%h>ndJ9b+=G`J`XLk0}lPj@86aAC%!2=p-ye2rKK%f zk)+AT!$TWyd~IOO;wjO6G;;T3%v1f*wK;%8B~Zx5#>OOBpIPJd7(L))@QoYGq_!t8 zA@I#pO=!?fl>vh$-M=4(JmTl!rHh=8YH%1l(C4Q|g3x^&8mQgoH~VX;=ZOKUh8*7xdMWAb|)S1`;X> z9U2oWYbmH9E+HW{z(C_$8UpCoHsb&*g71rwv*tnO0P1I+817KmCm^8g$b`kFv$V7f z4h|+c5HM{^DBtvx`l6X#AVbE7J4BHU`mn@P>m0iNNoHs9J!=!NAt9d)=QpH7l1UBzq!BhF-bCJu^F7{_EXX^{Wt8 zZhro7v!;P7HI$$j`@*bY^ElDoJZ&2Fxp*?UPcGWeguo-czSid%j%nS-Wp1~3%%8L=Qv*^ z$Q$cqRd8}O0X`hteBVUy$)ZOLuDH02q%i_q$9mtGJ}_IsCaP)Ent`Xt%*k0QT6Gey zaw0k)BI$Q`6GEpm%URd6F>GwJ2lY zAoUaGJSIo%gF{2VTGJ*vBQ4~f{VflE1(4QOsrlE>pQRms^B2*^9Gr@|fpdG?^v>qy zeP`}9{qa>5s{b8z)8Y%7RfRRqtyVR)wcMVbp7c26_C2q!yh0f)^C2z4dUbV{{QUet z06s;~EN;yasJ#)>2PPyhzkacr4b{1OSJKFt704g}L4( zD2pNTd-F-x7HA{Cl(59W*G$+%^UuSmtsu

>v~1Xe5CkeD(Uu@{qfCCEXGc+01SE zCwRf`(xXNv0oDXYM(zk*2ly!E`Mt^1Ce$9(OJGKZ%e^-Oj?esTqrokB7xq*&Ui)l- z4)oxMTdS@1>|q@o91IKz2~j*y9mA&e8*OFRWzf^Sa2F2GLUF z8LO|YmX)*0)b^Iq_VUU#(^5J6>Q&#Ht9Sh}K#Q&>=JB}?DFdlv#k#EeMn){I|G zcjw+csTc*@eUrVhw!zpJ*@pP$Xd;mX2hcIHpmz<#H=4XWb0Vea$BvdiGX`R4d_gO8 zmz{HH&gnJ;9TN;x<2%~-Go^93XkK2VG{HMPRm|g39zQ2u;=m9{0HC*&RBx(y1-qVrIjehd47#?1F2!K!t{Yvu7b${?)5j zF%#%d9-5@>TCIbVM&3nk_39A93(N}p4D{VctjDb0vkS~8FCIU;RY8X|iCyV7tI&XK0fD_FG7TlEd(0;B zw>1hha_7!#)2236^x}~jUwF7@i`>3ckf07fe}20ABA0D8v^zHF9$+!wPek#Uotc?b z`u_bpkiMInA?<;^M~}*YE6TJhp}oMbBvDkL7h>$>BuB_XoHcQ2Y3WjS30ceZGN95? zh++!uBb!CtIK;Sn&wUW(n?(CYm_FpK_b10JFJrqzz%y4Mas4ci4KYLECCnnwOW!HW zRIa-E`ce>Y_D)V0urV^Lcg8?T>wOcuAKBk{F5BOdIw!s3<3kHpB3*$af=U{IfBk}R z>E2(Gop##>3aSi?WC1fYCWfxAF7TcHAQTL*$*rn7kP8TDF@Jfxt;?&L?8H7i<5mGS_e1Ud!l*W{jP%Vhekf^n#Is3qqSv3QmV&swA zN9ykp?wF9JsPx?f_4m=^5*`|WCgt#E<$=g`XXa+q#>dCU@>GfT+{_CViIw%df@b>Y z9!Uc#C@@g*#GpA<40tNeM4No)PFHVl2oS-NTzW;HCd(Z=A{RtzzTIj!tEI^EIXIM^ zn1|q2`|Ow{sHHpLp}c^b7jGlt_!WJ8=4v@P*SVR~Wq@?RN6=4~Q=LG-H6K65Br_u{ zB%;D4^@@?r^`HSHrKPdT%F04Q7Amex>H;}qR7=0GWcpF`u4Uoesg@Zcx3v5?R<2w* zda?&eWOc!e$8cZwS4?D6lU4t>Z_A~mLRACsNTx~AEjCh? z9u!owK*)`EfDa*}Dw8HKVdL2)<*vt%;E~Api@~fM?VRC9HEzuG@b4MT@?k%1OO^xkuhE&*l;$ zzyWdU^=puiLsavES&E_k)cAm+va;^<#1A*L{;RH&RiThdw;)J?)VO$f0uh+WC?vq2 zABNLS#l;M0C-VqCz$wi={Zk5g>l|{ZI4Cv+^6N4tz^F<`KT6}}OPiYb0mhcz-+bE3 ztGJ?K5u{y36%{?C9QTi#3EK|V43Y?BWI^P<5(M!TRNg%~&IYH~o9i36FCD>3A+E{} z)q+}QZf|cVkOX7}5%5-B7suC)ym;|K5kx*g@B~QUFF|xLpFMjPz04vA0@`?GZazK| zIN^ymwEQ$dr_VR!gXv;7GBSc@RDu*oMBg8upFxh#vM*f(eJ&Zj!)c#zbNZe^`A$#L z^aF$9wZle6T+JVvGefp0McyGm3`IsSqR=q(6b&!8={}+)?kn=Wh-ca1t>4&F743$) z_V(t6rF4uOWzYbXeSD80mVv7Zcl!Yw-M%+5`uW{o;$R^1p7y3q3@G61&x}id|M4S5 zOHszv{l=TE?hGU7xyT`;>_Bg&?t7{J{?^9xP=y%rg0ik8I!yN+skt3c_tRQfNGKBk zkElAkcI_fk8bL|;{X3{YNa>3xKvpyFM}s*`f&NedUVZ$y7)p7-wQEa=LxMwCy%Pr< z{ImHjp=Er+QEzFC4GaQo)VDAd){qoHVMXb6=DQl}=>bos~hy~|KMk=O!bV=XmBR`4x}vY~w3k|)YPPHVZlccVra{jEpHRlc)?&X zK_H8j-%A=^0O2obC-5JF6!08Hbw}saJxEOjAG`I0w0W>W6eYj=DHD=&1qy3Tk)Pkk zN7lTQD|XB6a2#AT>zUZuLkbEugmMT+kQW4tMPd-nh>^+B{cDbYUJNuqh}^|24B**l z99@{2pyS{V`#XxXunshkL!4T3xp&;s&_UxqHa3zPep!-+6bOms%c0@1qMT;6zI(SV zp+y^Z9UiBG{dsy&N>CDM;v)mJVVTO0~NpNyEm$Q31V?;Eh!emrEoINvrlA_B|C4PHF5l z8F4AQ;>gql15dfDj5xl*G)dBcHrKt^khDp_YFCrhNnWYei(;es{k@qNY8$C%YtDwIdH^#6Qj_=^5{u4Ek3Q!0+YDjq99SrJeoIV7@9X?dtEB zzBcgf+a2H8sr7zyFBXUCk>|aO=iQDKM~Ih$(Y&3MBz`T&shMY~yv^%fdm}})mh}Dn z1BVZM5Bw?z*^oCW3W$cHR8&+_K0v-X*PAe7 zYM)rd>LzI;RVw+-c|*7T?8d9@QS>#%#gx6h zy+Ac0IYRkBwPRX3Y7c2#3=cp}*bLQ(Dh2^vT-nFzCnF)D+d7fAYUe#MDXGog8~x@q z_W0nFBxDRx($D@H41u=Khvx=|VIsYF{n-gQLJe42HaaylAhEO-oZ9i=I~vo>>}=YR z(NHcA4-c?gX00O7%#meZm6V9rtM|GSEE2Pi6z}q@lFXgZu~K*O@a8XwxVPJ~je?0S3U~E211Sqh=FVh`hYK znGgqA$SYF3lHTFN#AJl!#21vbw~Iglz)sLXY|6HXjEv-j2oVUbqZr2#rXYT0$MD>Z zKH`UW?AQUfz#V+W$jFFxis994b3edkFo4{=yrpRY`sR)?H{9106=>^Ix8x7U_8sq8 z7mJV;!WQF%N3!)o?>yX5xDnEpM@#TsSOHB<%jhqs+DWr9-v<5x=zf>kZtLy6P$NEm zG+%X{P?QqqwvI_&Cgp_`Kb$4BR$=IgN}HPnjvhS(2Gb6mp8!~-(rllfj3g*}Uka`Chu)1{6{yDUpv?>cDmZt* zGTo-?hzi*!$t?VcjYlL&O0Lyzw3C1K7Lh+UU%z!7BNqi0&9(2nFr|&s4CjQxfnN*1 zL<58hW#SGH?D`Xfi_ykX#Bp-CHko1KdovxhmIWLkA;G*gn+ql5&L|W;;yi$*<3E@i z5dkoP8R9OCyq9QFB%XGPG77xrAz}1w)HzK`>ci~I94bmx@#Vp_q4oTxQ zgy|+-{s(a~+rh3uaywB<<<26}k%k2j%HUsk6C8c`#fzH&4@|%@64DELHvXg>xHRQ( zAbAc+i7Fv&oL8GXccR4;;(@virpndRGbZ^A*} zoE%taf{dl~kQjktiU#ZI>PC91c%LF4H{K*EU1Fd|hQ`y)jfNtGJ=6wx?f7zz96o#- zPu&lNE?u1%RrTwtEoj$!d^M)D@y0cHToBEMb*soHRBuq*PHElz-G~BRrA~fW|FmqS zYJmCU-F&3MaCaZG{C#$$lO~i?^il}x(yvb!el5xb%@n%0{R4F=&~~?=|H^W#;`#ph z87suZO0Y_uLqnHHdxez;nO_Qz+7t6(7)Q&N8P(aIJbAKv0@aq-eTX@#p@C4dIHyn1EQ1()iKd;?i@OhE>I~XszGbu402-cA-*mJBxrtE01q9Q(0SXH zT6qOtki7`P!|pJ`PC`s60JtUiZI3h3Zm(-Z0IfZ+)>6j2xseb&y|`BsvKl++@) zE{LIU_j4y^sww}3EKcDCuvIW)EsB~5!qG^Tj&c+~{9vQk2t_nY&{DOaNWfCjk|?If8REbbL(H-zH#36;sla!Q1 z1Nt-NLtK2kxQ52e`4dtrR*0`!W#BEkY}pOVwv}XQMp$fUo9=B}HrfHPS{sA~akzrb z0!u5q<%GOdCIzM8+QakYHA8j>2+_{P1O(6!5eEXuE7SoP4p7;5eI}n)uNE{wG9y^a z(O-C0jiI$9qn227@)ko@CS@5pQ5V$~QuZZy8zWkV+X}*JYD}PDdS`Lu*>G&!xbeUB z43H-R>EMvG-}J<4fUGALjR2Z%sKzh}FNVyehkugrLomYW+1iQ%E`$ODB6)J77eb{* z{RlWk@!}y4T#@vk7<67g)mXKP8IHE*hu`3R>2E7gZ7=j;MPk9$@uT<>zJRg=4F&Ci z^ut$TV(1SYIs|d7XNsRha>RL?75;uRql}ccRJ1)f-GwMgNl5`2vUOmUh;waYbhuw@VZX`C^sM5=_%=4Lh%53 zD%>DZlLIOrhlkL(jZcGkN$I`~X zz3q7J`j>DU0u<9xkT1~7m{$)H=(?2_K#K=uAIm{ll*6AM2@skI5Tx^dqA!HB3;!x% zq%Q|PAU&;sKm{wHTfN%~`RgK(S>O6A$;BxlCnD8y;{ISqZzwxvYYfN|r~zg7cXvx{T1)d4NQf}Z}f^Ae4=@T#vJ%CCDpJBKnXp&qI2u| zEIB15Zjg(?SP@7ugsQ=(Y;9kM-!e75`h^}1%r0*@)|lX;e+A-%xHD3YgXva#IFAmr zDbt?r=*QF~b4~R7LTc{xASy~hE)ii48@l3t7@=RxzxQ}CH9@>NlAb8WbQpYVou=AL zW=Yt;L0psrxG2HAmi2#0ooX9}$dPndUde1{XLmbh)GlZkj4y|?Y3@ZGBYzn zw<&$Cxw*Ndu8s$S;XG7yKxQ!Iqdwa+3>T&>7*McotUV@mKuaG5iuh$*k%8G-{*^!7 z3FPkM@^dZ;WQf~%_W8%XEAZViF_L+#lX%?tYnY}WS}VT9J#Xg5FAcvrA#~jfupT&k zQzJ#~yk|^;Yf=;LZu~E*VFy`>I2?jc4=ui>9h-lUmUahu14~$H$qPk;i<|^F+LpXQ zndVx&IC$-`PsJZTaQ>Zb^I7=0xoJE+eky#DjeYYZ#(&{m7^CFqJVrWo&|VeS*YhHK zmEt2{g=gNgXOGv}v-^L3eTSC2`00`Q!reQ716%=cvn~M+C4sDjVKQUV<-~~-1c1-H zMGfE*6l8%myNV+OXEgP7bPr1>M4!x~4KnLDY&d@a1)K1SNOg;gF@z5}SgsUd`!a>YiO5R8ZtxOz3KwG?^sNyaq;;{fwmu-8lb!tv zq%MuOkQ4_i>zfmMWOqFxZvFg|KO(Sm_~z2m(&df67T$O%q7>EN3MwWzT~hEUXitQi zDz7Yk>HUWX{akbUG`MlYVI(qSTFBG)GfJeLY z3%d}0KKkKb?euayDR`^-!IOX)|A5#i3g5kJz{k*#1-dnwKpMy^f6ezOYAJ)S z6mpc+f6_LRD&chgmA3P9bERL#Rym^M) zC#HI{HKZBV=K;}TgYg&?t^4C;kz_w`c;eO`Tf(bdtxDMb$Ge}QSFfHKVnHqn5U=8b z*Mc7%UtH1)s2Qk6(aFrow8Q8m`#(P51)w8(GlFxuqM~9^!6qcb0PG*idKPGRwqA5f zE1jI2Q2$bkkD=WH3kv?+A8|)$B|@p9;QOAsh_ZAJ85pDvoXc#0%7R5OrV-(s{5$br zB&d1dQ=}{ZcOG@v{P)O+KWvJ`7=Q3!C_=0^^&(;cE?)}R2MyEOvMu6Ll9HrYn~9+L zG)*^9X3Ru967}Jlg6$=}Fb*1uAV#BDd9sBE}>pwkko_OWB<0c#zwoW#=w9kiy< z=PVQoTJV>KE9G$P^&&+!7VbU;JWu28J#`V?*WV_A)YQ~s@Rx;!g_xCiTmJ>5WOcgD zN`DBBO+|iMF!sxD@(Y)2ysbk=M#^3W1<`>l3kB|nhnEF{%NFoAwfQHGk$IrcpFdOn z-~*E}ykUJ*k88p3Pv}^WuWUGsnMdlSJo{^Ch(rH@S;|;X6Y`k5Zj_ooy8Z4N1yg(I z2>(CmQ0&}h0JNuI#c>{7LCzC9AQGZsOXUTD<>=ki+()}NrTiuU6DI;tmWD#1Libk& zf3m%uoxi_7MQY?8y<#wE6vfBa>mdWbpQh}M@NgC(w8e;mQcyFnbU?B<{VWw-m3@^; z-;=|E^(aMr(NKg6K(SLQxj7T)T7$>&rzC+AaxWX|UzWK>XO1HuYyyqx2JLCh z;TkccU*&K;qnChN44vkiH|f0GKB~R|_ z8Ma0575i3+iHTMI8fwYm1dD_6ru|%i0Lr+j1SM#j*?13L$S=9nsB6sbnYiabETpIep_>geT=xc%i#6A)MKxPD4gSjJIt|>1QRV#D_8O zI^FBEvt-pmIEQH$V9i1Is}zp-+EQK}@`b5(ZyJP(Q5lUI2<0v@wVkC=rIq}RbmRXz zYFx!r4C8^*=cmj-tU3_BC}K|Y`ZJbd{+Ciq@BWf&4Tb3qI;5|f#>Pry9x_fuOjY2e zw6QwRT{`6dVXM-q{DOl0&??Y+(x4EQk3rXzK&Sk#HVf&w0wTFeOzf(ii@+8C)0|1t z#JuSRkM_>Q7e*gYUR(c5x}nfrBI7nd`INW!W>mXDf zl%=tOl^XL5{^-<+6Am`4vgA$Yk!NLxqT}MiF&AJ5`U^1<2#rHSLjyy4*Xm0N{*qbe z?#=re$f2weFr3Ic7@cB7r3d->3d$JqGt6K>rTO7+s6zZ|?vnC0x&H8^h(T&1f_nFt zFXcXjzeJN~2NfMjU=5*eVKwR*HzAK7!50tXjkD|nC_Zyj-xQ!3$X*SDDGn+xvc18j zZ@xdjvST?32SY?PWJ#dL01#YIKo(Ji*CcR-Nai58Iv%N@;Xz=*(FN=6zDuBS3Ni{` zKgmBQefCUCEOKg3p8L*AUr#L!XDY5|_%Zh4V(V9Bv@yJ3v&cYGFAUgtuGbcA_@;$g zPdgcBe(&yG={9?(g_LTP0GhvE9R>=nWy{jW$qAQ92e9As>}W`mURXEIuY)xbF&M{N zX(@J){|Gk@@I}n`lt211z0=Q&Y5 zz#W9x$cNd;Z^1MTwkCAjjdi`b?NDXA0pj47-H|?}dZ2o7Sy@?l23pR(hnjdGk_Ast z80B!qyajPCpBicn02_PrrZAYam@n#EE($V;ii*-RFeEuO&%bL^0txEwt_%mk)pXj; z`ug~4jr#n$XfyJ9QC{NxX!J`U(L;ox>BGni|Hd0<$R9ZO4oFoZ>jJ%_bq|2K*RkLh-o^pM8ky@xsBnphT*4}0 z0+$AAz4l5jr%Cj4vF3);)lsgPOSq?@$~ug3SjT9!NG^-LvWbDMcaE+ zxcEi!3F=&gC=#xIhf&sUMl{W#0@c8=;hC9=|3q~Zwd;`;H;UjplR4N!BUD33GBzy{ zs}d?EL@5vlob^0?6d3j;u-3g@qqHDw&MdFuD~6f88`Q!;1t9|tNF(x4nz(`>aZ2r$DISTNjbb?{xSh+o+80O5Rl#~$2 zKg23zW=_ZoeV!;MVQt7~#0rH&PoltBD*EBkg%f!h(iCV#(j37POoviU%)pc)PzD6_ zf4?WuJ5d72Y$}PnUSiDvzyBX5QUY2uS`+AX$?$xbz%uaW*r!M5kSq=VoBK!520&zX z2zAF$SWL{~f4%K4$UE>xRGG{H==|L=A%r0VwK8Y`xe_Zc8q#3hB(STDt{BoJsxR!w z|18CK12NnN5P*&aeT^c1g(O+>Cvfckd*i{guXpPX{%7MQX%^Gd0G$&b*w@p z5{#P?InBwm(v5_t5?jP6aslTKGmeyKB8$WE;}?-vM%-M<0y7w=f2H6M;v(%Qcwx|b zG~~N;TskUXW+om{T(q$S((wCTH~*fsAl3lv3EIR}$CuL6s2qH&jYj8oN(uu^76dA& z{>8aHg0cgszXD^m3!TQ`M`mC`oC!TRF1Y^o5Tk50KUC4w6am*nxNDWRLhRBEN}Ls} z;5bK2&KlvD$`EqUJG8E97d?Yj%n{_h(#{t3{1%HJpvZ{h}#;(VL2+C_Wq+-Z10wh4AwD=9W z1u_zZd_Q+S(Fh>Y$>~wudF*a#E)*$#i1Npe|4?vl#1ub-6X)wDJR%|-6f8_TWmHvE zcykUM1WpEKS!zotxL*r?iOi~BdyBEca}HyC0s`?ZgDaGfy>kl+&JTlIiKj=UH_f{G zjXo3bwR=n*CQtn!IL_1_#D5KDR3G|=W6X+-w~o%D&=agr zkQGt}zpAqkY}c$9PlrEuKdfNVd-KON=iyg=}>`4U4bhb@yQ{ksK^C=@1Njw5&pE*H*(&b^jgNkOjyFz_Ye~48J1*HoH|m zy2}0Yf^)Dp0D|?4yah$hwOhA_gk<>zN7Vo#8a5T2Qa}a8bi$*jKae@h)-Lc?d6Fm< zXt8gj0Ofl0agY*>d_)9k@T>$z{F5;ZD4>BzpwCX2wQFi>iqX=z{vO&(1^}?V|Gc3j zjrVvzH=>V`!cS>K=qFJXDar|J!N2kx6fDyEQ2s<70rH^yf!i@|_g{p3L;E@g?um;X zCBDD?<%Wt&!so#I%1)CQ2ud0Mb`Epzw6BUOkN?d2-1_uc$F(2d)uxe!Q$rjpS zo~_(3Ej86{tf$J%+=#5tY3=Ng+oBuK1fxtOp^^5Qomxu~@6II7;oYNuDc|rQA}9Wz z{4=Zm5#f1?p?ndWCekYX8ZZ!JAbz?*<^F4Ja7?AtPYop`wSq%}&Fo))wMJ1<8}^>i zG0+*w#_`H#Yp#@F8OiM%Am@{zDuGlpyY+Ozee)45^2A^RYth`4ESP=>AmBO(Z)2RW zo*OnNvo%RQ7+wg5`U8*8IpnX~aCBjuV2O+70u`zd{oT44Clg&WrVxc5Vq6zhEE(tE zbT$XE-`c)}guChTDs_KsVN;)nI-}K1C9z9E3YJ< zSpk|KEKdpg+%je0A`BG}={Me23S18OgYIc4tdutLU(f8cxo+#U#^c2BZ264&c2?R= zJfbfR%(=WHy`tMg#hBElykSg5ox~k0osi8j%)S}SzasQ|usFTe4X z8&m!6?OPnmHe;zfZAwq4z%1(S+`03C@!q|{xKk%@@M8`t z#U0Qo|N^Zq)X`GtJ!O&Ioh_nEuu)?MBm+Pm)0_1GnT0nqq zUr=Qw&h^SHXsgMuR=PMh5L!N&_W$*UvAo?(*Hbg$+vY%R^_UrPl1Tne1R|)U#5D8}3DK`dFlyFA}1q82+zJrEB_d?t}5J;mx zyjDR1Xm1K>w3D7!q`52ctzPmyES10&4$S~Ml(sP~ zB!)w9y6?wJ5Cb0^*JF=6V1M!v^Z?0yE6D~o27tdtlIMypLLOoTp#rhub)@`Gr3(7X z5s${TzoX?L>^5Ek27L%GFdGw18l5BV;6OKzQL`}A8#$X?#l*AQ^+Qb0ylsXX1oIB( zprJ(EVWIw(lpwj$P{e|w!&L*_!26pDpY4k|<4xDZm~f2;As}GF^reW-2F?p3s*4ti z0cMd`Dh;qQGm|0e#Vnf&f87XYxj!EF1h|mgphA`Pa8kIggc}M~csLW`E^!cEY0b~j zA#1hs8FvWtYRhC}K{2aC5(9Clp~K+HN%`>eEXV*XPLQbYBn|vZ!MJ_^B8r-7Sa|l2 z`Z(M-b$_u3rHy%f+!z2zT#0f=40%dlUEeGDxXcs@@d+d*>fB%Hh87|Src1;w1J&KI z5jQqyX=%YfBY~@MV1C=WZCl87!9$z5YI6>({dzZYz4T(1;E)h984cjKBwDLNsG8H5 zdhmLyRuH@(8*s_S>T9tw7AzRtsz8+{$1bJo8kt%{fnqBI(`|iGZJ@LmV|uS(LBcg7 z#c_InF0tzDyfTJieBz@e)GJsVGFHvDY#m#1Bsv{CA5?T78(+i42$e;DL^O>~at=4n!QEu>2_;R$`G z#(QWZH{ZGoYUVLG#kvFa;w`A;79gmzUX_j!}S}6i7OHZ3a%vL4}xW# z^2ech1@hf{WD4H2Ebl+=Xo>WsqTOy{Pn5AB99WQmSdnm03T&L5oG@Q?fyjk6w+K`_ zjkm`11RfV#LTnMBiHdP26>ivB6$hPxbw39mBaULRp;pimdKxmgpyh-N zB!)_7bWM(1OM!`&PKo<~0Qv2NyV?xZ_fNGShm{`ULlDNdF#Nt|9eUn0FOLmdp`2r4qPFb)GcHGx7x<~ z&E@2)!u%g-U#W5==63uKD;xm!j9iG+J%Q%a0mcBUr-!-MxwI#AJ;4an!sCVcud?+y z{!$JLLwa$Hgpqv(8MnfD!|5?ed~=0CBNWsNYWku|GBb37W{cwT`Z)luWCN6s1utESnVWLP3qpc=4 z=7CoV!_s2fJ`ffUqJRErJ0Kxb#_tinp7HWn$^AYd@Fs+pMYA`Geq=vPs=2rll3Y&( zrjgtWhVHzHJlqQ}am}#kaOpJsva$p1){cu<%SJ zil7)m0PjdM6OPLj!6J~W9U<;YKW^EiKfVGBU|VsHZG3Ml;v5r1x)5@3c&@9uaDW^l zg`yW1;bFc@33Fs`udiiAQ@9ztAi3GX_@+7^!j<&=u{NE0pU(B~!IKqU>Nq+jPk=UQ zHsA#yRh4X9a&jQD85xQJ?A!vv5*2PKr49T*$``pvfn01^ykcp%^tXl2VHWFs0CGAB zID+yAoOFN(luRlS?gD{5`WC^|NH}$zP~ge{Q1s-&Q)pgc2r&SWv7d_Jsh92EidPG^ zGZ>$iQc~hZ4ts^>gfJYBY>PxFSa#?U{N(L{wsp@hSjv88@2g43Kr>2*`@7W(IXL-I zS5Cqh0r3!)O_TMRaY@87fKE_x6otN#5cO1NS62WM1_@>$jsAwTSTkW#%=oZX&82Go zF)N8G4HsRM7!%CCvPdjv8#z+ddIqQ~QcIy_gdTbX%hgY*l32u$;B9q2Lx% zGSXFC`A3b|#lR(9l{#|YNVGy+VhZIYIpGVRc9iiMvAMt8_a>Ni!4wn>wBaSW5OVrI zJzfUHLLem>E`>3T=bN3FRPrMSJer!6zvKh3Azjc`iL?ihYY)Q=8r3eGJfux}3c!fa zP{jEH9sOrsC0tobU%7hO!u$=)@Q7rz4pLzl3Sa_gVxWHFOT*H=>3s=LWu_w{~+iWQ9_#D{1}HTgUP07HzRj#BaVx~c|YcuY93$7eM6K9`d*?m;C2x%Em=xbPzqnTFgjh2lz#<*>69d$37VvtaLr!z`(R zG$=_#rXl_k>hksLEbxtK_|EVnVnIifIRf0v6^!@9K_`|JK-!fcP4YTrxDTLy*ZwY{ zzg&0}FdeeMN@S8S;3YZ=(+fe!TV3EqHUqEHc(Z*5J`95F2r-S!Ocrm`kFh`~b}-JH z^{0^mKmcm^(2CI?pr?hUrWDqaGVMnu(&SE2T#Yd5!4eV}fHMS~(?8wGr#*EK@QhrU zM!q@Swul1>2o%KSv_W3eTO?bZoiR#Q;Gw61rbaH&fJ4Uh*Pw-Yx{dXV((T9#`13!l zMiSUNq+8K_3M;AkvzzaUF=5Q3`z$VXnfcXd8@Jv~g7}|shPyx~0HgA+In1?k53lJZ zoXcJXDS!G-zYoo@FRG(;Tv1pA-@%#j{s2@dl4s%6!k`!CxZuCQ4Nk9fG1vUIR3k|_|%AMpY#pA7A_JAMk5U(#lde`}2V=@u?c>H^CmDgSb- zGp-CI(-FR$c(bt~Xi+iS)5tdMGvz`h!~kF*|CqauWTf&NaXu`XomfVg9Xz8wF03F| zcc7GI9eA_Et!Vsb@CVVXG5`f~2SiDVv3mbGO%Hz^EBwK(RBIgC&b1CuwtC*o%t?{UMe*Z7=$pai5c8W`B4PP(w-+PiHiWRmxj6^vQdy+!HGQ?Md543w}6W< z+4*JcvVP17#BvBHZ?VMJ`X?i3H5}k8AaoyC2{QOZ`V`clN~oYT6eQ2ZA4a6?HVwd%Ag1JRy#kCv^WL}jby9CcS8L)=Vb^)Y&aV2xhz@g5> zxf_3>IhLjGY2+m8etPL}xxzs~9vIC1uKaJ?ckbi`nWl$(UC5-w)2C}FuAmGtCIScu zb_-dCC-1EPql(@es^lw(==N}e0rnDeaH}N{C~gKRLCcn8A{{f>Wybm!kO&dwE!G{X z*M4#6gUAKp4NIsR5LM)&b97GRzL&W_eWDA{4BZK6sv;oFXhY}l{G`pN`t&mh^;A3s#US#3{DNt#hbQF)c{Ns_XfaATu>@xD5TFQb zQqJ*zG_ri~!9-a(aXyeCWkUFA828`5r+MWVFfJ;A>yVRn3rgR8?>#S1Pg6}zjS*Or zo=Y_~jwRRk2Xe1cmYFnU zAaR4SNFh9b9%~FNrU^~4MRK!@9o)8*xd-e`MKObR5xXGQU}z^W%lTAdS2?T!1k zeA%`X&pF4BCpIy@wI(g=#(>NpO_|9+)N=cz2%{gbGeVhZ&eemnn z&eSnpg_HRlFY7Ji#tA$@BHb`khXd7<|1WJeeZacW(*`fSQCUWdQ1aXWozqkZvnkSO zBmYn82DsBhZ~gRVjpm5atn_MXDkW7$xN6*8w(J3ObQF8lTxvh%iYyn@e}@|M2* z^2-n+y(noGxld&nC?J?lp5D+4B-spaYv&9X`_`s6IZ)IB&|qx<3i2Y~^817EO~ucC zfk_y*`_t}&LV=CU9)7xtrcask{n zEuQKQ#E~KB9h?EB)0$noM*U{tu;}fqo#?0}#|5DryLDTfXakTG3?$3h3Nl%S-Us@+ zg1Uaj=@K$-*I8bC7yBPSVBu6dL5XVL-{2njUEPa~tBlf0`^-nV3j>-`>p(gjZ{SBiv zflYR#1xwCjGRP#;L`G5SMfzxP%n{t(GD%&Fj6!I>m@8_*Vd)w3q4Y=+3{bh2?#p0G zQt1OZu1C1w{?D%0qUU=RR)$jasea4Z!(mbtZ4r+ji`S#$g`5Khi!smo}qOsTQk$=P!N*vZ5Q zK*EY-+JpuSq2s)EaiICSWAFC}D-FomJ1rB)AGi}%7CFNsh@B0CIaF~DBsUVb6tBsY zI>Dc|LBL`3d|v0~9L}jZUG_~(byp?IKuL#&GDy7==$8@L{~GU8Ni48=)@|Fib-Z!m z6bp|w-)lm$qyNajxD`PR++2iE0Z<@(L4X|4MBPeU1}gF#$bXSRby9A_l6!UD>)jUT zgvV3&`WEGFR(Hy#QE5|z9=6&y@FCt%70pa$9RX>$5CUKZs-k`jX;Q!DZ_;oeJdk?YBD5vnF#->KeUK;55yM!Oy`<4%*{L;j@~r38S0rta zce1^49yD_y;gupY4H^IW`!&}yFiaZ!1=s$FKTb+cHaGl`+7I@i+4vU|}x11gN${_S_)CElpqseVGV zYJPrxYLlngRSj+XlYXY|ilUKWvoNMX<<}xo;Dx$;&KAx>bDPgVzz4bHortjH#PI_; z3ps+l?rb>m8o+57=|`_OU0RLC;_&xCft*1z=FD+nd^xprGHf7=p|HC=Gwf~XVbx#i)05>#;=wo6Ld$E%Z?+lCnnsXvLWKPp;&%Ylz zLYnS+c9W^Yf5LAcdPqr*Ep2UK&>W0HeVP{@cr-Z*$)@@J6TQw4rV#l!u8p(nXmprO z{5Z629>cdj_ydRwsUJM)0&eSb0oQdxY-vo=do)dT3?q0M)Vdkj+s>b-&i4 zC6^j|psI9pljsTdZ#Vs;Vv9(vRL0$}%R6Sl@XzL5K-DHmcHOhfyX@X;?1>ukC5a_v zaO^T~!uX(6Tf||~`E81q&lCnuhXM9kG7l7Dw{iZC{Nw*`Eq-CUH|mm5h+Cf`OU$Numkc38dkF9 z^D!kz`s8NI8b4Ei{1hMlc+*wT1@?0q0|$aVvFoV#IHwC}8-?}A6-854#>ryj#(5O( zR%=tf|6<;oD@!uAhZZO#MM-ip7SnhX&np^C^C!-f1>^DHgZ2u-4twMmG(Tqe@SVO^ zQbxp}T#>|@Bf!et0ZMIvTPC$(`ss$Lzgx5I-YT2oJJjhaqwYZX2^ClRkCC6p#m2VW ztQyzR)g?7m=cO<*eR|)AZ%$D`Uc>uTj;&=j+|FFF7x^ z+fj1qr8iqXfP7>zBq1v^dIlbsBm?tiFN87XbYriz5KzXp_PE_DhZZ_?K)?Y8h(zymrfKMi2g0GJuB;q{Y6PY=CgX{F z_`AMqU4HOPQp4h(haZ*N5{hvhEQN4LXm@*~HR`tztcnQZN(;ENxyRc!giYy#hht)g z-}JR2z50Fi{`=&#pt6QSVAGQKH#ip~ruz;Ej4Ii%Ze0bkra2!x_t@dx!ib?2Nv^Ql zV~}Q@zlwlROA|apsEa*LkpWOeX!_Vc<)A|XiiFrOp;b2An^`Sq!dFKbCfn)-+>qhD zRg8(D3p6B10VX*Izd{~UrY^l>MA=;w)>lJg#tdJ3!dmT=<`H$EhBkxtW*{&k@Qs3Q z=w`oYo-{Pttn(!N>4K*W({-n>U43GLq4W`lIs&}!O&_-TnpWL-pNq>(OJ;a)MPY3s zkO=JS1zO?h4AhqJ=z>{>DHu_fvaL9Z-PwL$s8x@icQeBpbdJq@1zghX>X07Trb8 zGJ1P3hRVF4vGPgBPC7?}D*=^`Ar1`pz}zic>{3$xoqE&H9DM=mf*%|W-Ns2KgKr@` zrLrfbC>P$rYEv2OjUNHuy2;H@QmSKUGD;yr6+t9_Afy_3iY~WS_UX}SxxBY?#TH71 zT1E}S%I=_Z)Nd0thBd1S)SYSI)`>fVY{knHxRlaf7o;d!TQR&OSd;V2?KwS9vQ5>b z(SuhIVvPD_@l~V$Br{cKOr~$!ep$qYJO00htHk6WdXSzaeuv|i>h#_OxQ*b=5Gex&QmKES>3u)Y=R;f-3oYUmZEHROOy`RcXR3?+6<0t?Q+^lBB_5>O{h1->wE$g!LQj(J;1~`PURx5ziDliJ{!QBM3epNJ*a&<_{^Y#|6li|_6j!L`{Eb=`(yssb*r60 Z)6LaeF1MV2jw@x2i0~-OfzX7s{{s1Azf=GK diff --git a/examples/running-llamas/artifacts/Llama-65b/prefill_latency_bar_plot.png b/examples/running-llamas/artifacts/Llama-65b/prefill_latency_bar_plot.png deleted file mode 100644 index 81d1ac8504ad65ea58783b0fe9a5a105ff0bac49..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 30959 zcmeFaXH-^Owk^6yN|KBsf(QtbBq%5#NumT50mT54R78}FpkxI>Bp3iQL85^qN)RNf zC}s?RWDo@n6r``76{c-QL)o!(sFRV4!m}B(OdmnS3HQi>ggqfe2 zqNpW?TXfARiZ+;{XgU}d;ct}Qw|vF_sd(w__A+-p! zJv^OUUDnGg%gW2FI_%};=BX+tckHhh$hvws$|>bPyo(QEa@(@UlcLz|$Uih$nwd@% zkAl zjO|!!{GZ8g+H>jPQH2ZXugz{t{Fs{UY?$@Faa{1l33Z{^KOI)%-Zl->8sEK6`dBvv zw67Bmr8x2DHtWNSY{3!uqp?;zSQme!&M>XQFD^vd(Qd(yY4%qrF7iUY|NVd6vCO`$ z>_pa%)I}dZeX?_OTukLXc%VB!H=C|n8zXqyM7$>7p*HRQ-0XBsef{rC&y4PD7Qa+g zTWioa8~4#sSz4N9^k+kD)bjPrk6p*!b|nSP-41=utS^4at}ae8tW8crf^pqd)@5@0 zW#*D2BCDcTY)no`3HjNOd)Z4ONl84|=;Ex{)!BESKc9K{P|4rlpJUlF%XIPmVn z2c6bO-U9K(@7gv1j?sESVXIF>3P0}>zeinV< z#*HW%xfuJ$j5-1yy;brlzRiht3Tm($YUK)hfHTsg3;j#DZm}g127Z z@Zwl|vAZanplX`w{>*KHEoCRno!k!weP9X&K0iEA)Gn3X9{bSo z?aBwP?JEx={POt}6&24XB%GO_9SZ7gD#&ap?C7vP?Cji)n0?yjs;f)Cx5y>BE?&C( zvQ6OT$=^Mkf`ZZE>_So>Kh@V)4^_;|S|-dcpLrL-Df%wWkeeFf$~x9un6f=dV~dH& zli|)97M^C!`I*yry1sGTG8rfPr!!>D_Vr)ytR07I$~8Z4cP@@QBzCJc_JhkM&r08ZBKtJzitK zJ018tk@f4>Hw;kZCVA^heTHOlS0u=pTfgJ4p4R&7eO+q4`PI!$mJ~_U+s8AomxSmP#MZ-FY{m_jC4jYSp*b z*DYlNn1yu@9m>eEsVH~z^5SuFa$<;7&@6cz*w@#0%cH06)`>sg7$OG-TT98Oe7tRL zpmyT7uHE$Yjjm5FiW%#Ac|EZBHZ&whMTCaZmprmAJ_6VMWdHKzi@btDRjlS*1XALbffonX;l4$b zly>rOU}t8gO)g$!+LW{ZaU}9>s(v&-$&0sdOD~oYM+YS+D9E>BMXF`)e&!%1$L)sLH+09oJP66jk(IK{i7N5@F7;$|YSh`} z>NQ2>XleRjT2)n5KN5QS zG~Kov$|SxEUkyik-_z~9yDKaqVX2Ia%g?IGd`d@n@I|;#25D*OS6A0(YNejVN1e*Z5HFvb`sx4k zn;5w` zym=NQcy46GD{L0;x{#R2wsB(-t)RJGf7{~~Bv+S|s62S^z_j4dR(g8+RoH*EQ>Y)$ z$^)jqCS+L^yU|>@aG|%gv`BmQ``aW3Ea%~O$uuLQqjrZ6Gj80t(W=BFUF#RAoDI1) z;T08{US3{EehJP;o7Syge{Lio`S$Izcs#tl1r+7^{f*v{BT}?G zC)B($sS#I8QCOJEb#3JVEX$TH^B?_Wc||j*Tv2HT5hVgF(AL(*r(jQi>(;G2T$WY2 z|8gV`{j2K^<~e;3zqhAg5pL~r2E(ecvVkkqQSd-L4udnrf$YlTa=H@wEE^7GhBlAJ?(?S<7U3!n(@sFLI_|@H5cx;7! z6d%pnwQKL~&L>}`95h$<;P^UOSvHjXOI*(o;PyZke9CUC@J@vzA0(`@Ib4bZsveMC#*H^^&N&EEoId`&);; z`gGrs-V2I5$$EFaxU@!P+e@mlGK7wkhld_9!pg76h_^k(Z`{ZF_b9rw?7rjk(XSOTI7AS^K@$h-8CS)G> zj!GJfI~Gh7DYLt>_jV#fOdw6}cz9&z4HeHt+~Ou$CMK*)<@Voj6mooXV>9-#=E+AM zWBpC0=H@lM?^7?t#f2g{Re$)v{r%0&OFsluJm_6qT!4sz5yPlD1OU30PcB-sJyG{V z#@3!MCPqfpSP|h+Eq{Nt1X=6IxVSif&D#07X}P_HN13p>q69ZB1puf*aeEKUD4-c= z)p^;q5YOfoa=q`|j7p4QPqp#<|p^DCKU%_EZS@B-ZQV&w0b;$r1{`yca@^a5CH>*A8==nwF*XxQaQ zk$d;EVy`|fMJ1*8>Bh?tK-za zn2f6`E8C6_wr<(B?MzCF9jEC0srV6&ua~9#u6;$V7ZDcTtmZ#qdbIV?&Nw-n{k^@t z5eBg7M`cL$t5+s!0aF$(^VrNxqN1XGqb~!Kj(p6zFckMLTme|c$7dP# zS;F1x=GO}jz2;hX)kMvASPNIW_uKOeqO`%tS)tX1<2KTiwz;`^%VR%FsZx)gNNN?r z@7niYPo6!C#BOG$s)6iTm#@>sV~=?9L^tfwWWX7q#kByVqA234;u+}a8P(O*-M_yP zEO~^rTZTgBVZK-8wpn^)%XgVI3|DD$%rj%QY~Oyl?y}4t5h}vDAqPwSIPu|;Ph7IH zvL3m|_U^rwwg2&>!woq%EfQvL{^`^~?M7HiJ_%;e2ySg{4MVOh_8CgF82kEy+s@W@ z70R7+SfH#`@hW?Jd)>z;M|Ba9PiCjb(^ao;IL3A4$dN8QL63l(y$_{oZ_fg*);%@5 zVP2^c&=xfF%K+_&^mxIqx$(n@620caV__Ua8HcxS-TD;JO3%U~u9{~Ud67545de}2 z2~PpQL(PG^t96`}+JQcxv#(EbZsyN+pr!*UDJjB8@3Q0Nu|k^rqgHI(cC@*0E6R%A z)a?6PdMv-byT9)X=|T~ zh>YX{EM2;H@7`*xcv?n=)Pu_nZzFHsyeYqKUETW6VJyp-sV*S&1Zj3ACQj-Jk`1%l zlRM$&l}6wC`=bcVsII2PqMRx(*8nb%v&ga9lhX#A-IGXCELssPwP2z~fRws`fIu=J ze5yfgU|o#_2QXLPd{gRn}Xv2$^Fem?1_nq9_C3U?j9be zPM}t-SwT<(O^+4F1BHi8d^TNKrx)_k?#cm2O=ipf7qjutKbhIAg z0^C73uk@}hJ{sq5)f=rtB`90-md2gkr zz6Z5YUGC5Dd+xispYA=hwp)yiYqIwhd9-Bu47DjA@8`33aB%c*c4|BS5uwea7s}OxoLOAkU7@&XBX*YwTZK6q|e&KO6X6 z@qQy}m&Lu^J5C;uTXw<*9ruoWdp(QX{eqfv6QcL--OIx#YA4N1{O&R46-cuVd-f4*h4n3$pY zE}_WSSj)Yy^hObVCO1`iaThxy+8>-KTjzsDXkV4w+95; z4Wt`$%zxtA&fNWvwE`UPnx!+}xOp@EJE_P7(EtT?gHYO>x$P!J0gnv-`BQ@$y0$2h zN1}1sYp6~A%Ju8gCR=;busfI!_SD6j_ikKs=+L3vB_3kC?(Gi2{Rt?$QJ&vkg(M{j zy?XVEC*y?X$k@QXk^Uxbgk@ui=Ndrnv|~Wl#V1GaJQ(TFVVInhI?Q$LGP_Muzs>HZ z-0cSrtfs!IEJ=)uTYWhSY>FG5X`vr8-a|EK8Ow0e8WT`1Y9A)z|MrP&1qi81HYQ#4Ms&r$2Y%y z{+v-&Rn_d&kIqivqQ{8bEAn=5yFVJ}LIjnkA)R5<@%~_>Z&b6> zpvHJ6O|0nXPWy8g_dgk`X}{d0naVIJx}R5p#^OGEMU?07Ih*rM_wU~~MRBvrZHjAN z=4|+*sbCGR>@=F#$~SM=K>j?ht2=e~u5-xJ&yEKVf?Ko$BsN9!)i~E_b4%qDP2||Q z5;V0Fli!m#1q7mOf@XQq#uJ9bb0%yzPg89>V4YRa)s06RO$Sfd>Cbz7C_J{0s>EJ; zciVi;fkTIkem;B_y<+i-6)SrC`UpVn1xU%a*_N){CVJIm5mN8&efv^DH)id;yA;{> z&sGYA=IOjVc>xXo-JR^%X)+I-8`$uQ*X}mX&Ogx-UP$Bf^+v`iR z!*eM&^bfaXW@MZplp(fpL{wCqahHZZazws+x4z7t`}6=87n>8}eeponu{U52!wYiuSvV zrydZHR8>|!d5VUHMt;MFGuN*1gEOM;fa4EFSdfn7rfT=nXeOzk;I$6TgYlEQDs zMjLF5oy#=!!L))Q4*?hym`|o(HT!;ku2*R|DA(Hk$uBW`+KH=o_7sS-328(vUAvdZ zSS0zV-2O7H!-pj@w%wHS`SrS=GitnNOwR^)$S7=z;_Dwov&;fX!5th#*@-{mZyBba zJ$u&Mm@kSa0{n0mi2u8vNaeJL>Kl{+EkuaX=OwJH=dq2y+x&StfHY6d4dqS%FLk1S zZ_Kkh-E!-PPB@!n|MiAxFZ3y4mkw!!@hcrkMyDKc_Uwy-HV_8+U}j|Ym+>LNnKkam z<)o#h@k~BOwU8WXiqgBFe#HLp;fWnd*V#1}3*A9y392-Gy>Gh@7``k}GdKDsKWx{m zs;grT3=9O~AUmq3tK6f98B2TJ(f%@My!DQ6BnDy>u)Z}Ql=1))O4b*${B9c^Y+V7| z6n^d6wd&_zrU^pAQ`hf&5(y+z@}#>aDZgL0rK*Ys_w*ECv8Pb;#BX}7xuAZ>?as@* zOgn1cR-a;E`Fioy`1ZYhSgOGJS)Uu4LHj!oq;5|)&)mky#8idQS8%Fd9X9ad#S3)k z8?gXs=P7+Xz0F&K!oJ&h_2)4GCwYBfx;VJFPUV(QN!1oep!bYIH6lm~wI$u6t-x`+THp*H z9?Z_h@8Cu1{rrYIHd9*2^?%f3Zawe&lX&5RaM4P|jex?NwX~>&wfi#UpU&C|2?Y*< z5zRUOPG!@km+JQo=n>7Vz|7dHbY(Ax+qEcC@KE&Ia~S@C?gpJqk?T^u)vV*PAko!6MdhvSD@z&2ilOfIwXbW7V(dkNRDUl z8ZC8M)><1o&Ev6Y;hk>A4oATJU8Gl60Fw;WYm4Rc*r=!{cuM&PUT~qh&R%*Jcm8}Z z9Z>BQ8}Tm_#&E9}rkkPn_!onGuYW4vklz*(f*TX>O#c?PSh_m5EY*7jhw zNg*0+uVDA_@c}|vB_+iiG&{O1aH4Z@)JpY7rV8Uf8`xKDbP3JPm3xz<8H+^%7s`t! z_*tG^&DWgyhI|JD^qeb!DZ{T{-gbL&Nmf~_tpf^`#6Kgkml6T_$G zE02XClnlw3_z>A)o^39JEhVS%py_;lZbxAEk=&Vg|Nh0#>%KHLCWFc0Q$MM=QZtYh z4QM~MP_0u&|G3kW2u?S5_X`)hkjj(M1WZ6(;8?y~a_g@owG&);q>>5mZ-D0$R#xV- ze&iJbf)r8Y4q@n?O~p#Eyc5Wio~;ewttv|;v-%rE_n}DaNY!5grEF2q{Orc^*|B{@ zPhn$Y+k!?4q{A?}^)B?qC$_Ed9&A~TY_1$QQ(S*>Sx4*00J1&k!i|JO28EVoRk9A( z9ECol`rSKIqu&Jx-0i9*!WtSY!E~YUJV6@zQ^xabr0;`!?>k{a#PMx7O7xJtlcPt{ zGBdyX2E0CsYtk_@<2u7G#70L)hu1C+m>zp76f_eetaGLVJjLPnw-~_wxFgxkAX1dw zIw<~)E>YNN9d~of>qeK2#0YNkP}@-hO@aSH8FFb%yDo0HBQQu$Ius8Xq{B zhj1-ZQhF$0a6@AR>uLYxffLa>@KX1IFG`4P=_vi4XJC!F*KC7uC1ho7-8ncIL;4P2 z-{Ilm^MhYGSFB+1^Yiod_1#hwxXG;}STQW|!$XtpiE5IqC0{51e4hX}=sq*y4iU+? zbIe`kHfp-Iwl+c6pc#yPXgRe>7-E&cKbh~g-*JPW8SJHw>#pThmNo%mK5GC|IYPt3 zyFnJ}p?LR<`ThB}gfK>Eh$Pn*nhlxXbh&qg`Q*cynVkM|5R%~ESD~(khJ=KGlMUh9 zbo^e+&L+)I2W`*4OxM3JcX_z?y(pn0b`&1nV+TNaa->%{QSHRVZo$Rq6m)@H!$=Vj zvqKvuTt4+fIwdo6%?RYA=;&xZ4gX?7cvz{aSSN^Qb{iTSb7O^gkad=UxD7%KIy1Bo&K`mK>OLv z5k8hp^L?2~x=$l{!%W04m73IGhdxDCAQ$`&j}V<84_Lge`gCLF+?U}RGgjZeA~V^8 z@VSL;Pz}!XFlcL#+(@hJoGVw#T0(s-L%&=SxT{^0Fj|pJ+~SPDy(y@&=#^Iy(%{?M zB)N6#*8Ps!&M3^k;i$wSHi0WY$8NPSt)5;dI#eqe%7Hg7C3+5M=w;vpZs8Me0qcq= zDq7sR^x>fGgNF~pzzvWrUfmPrF;&9K&b|!jR@>%piVrrtL`#Wq=1?KpOf>bv*fLLm zxk5RH2A{|~z7g1wyI(yeJzWeG+#1jUPY4Bq<{L1U3%wB+TJP7Rqb~+71e~~9_5(yd zqi~z`qO3gyhqM8#(XX6C(-}#(nCXJ}`nz}UjzFXEB7n^Xga~ph=>$VTFTbm==R&{14YXk~ zQTAS6g4HdvE~KhwkGy|!tXQ$H<0bF& z1M66YG}LOH4o!I=H!?x9Ht_ON9CEUzM=}nFVlp>7Zd2l9aO+g@@d189(z39y+)(v4 zvg2uLZ54z{gtB1Cq&G3y&98N9eh7qLy>~IQU$&H)I;u_<9%4*`R_dv%DxTt(G^H*Q z^bE=Yc`mfHw6rDts7qVfS-{vewMru+LCUmSq~wqZNf|-+a~9zstfr>cVC?cWR1|&> zi!Z~gIm}LMbZH63E+ZO$d*F1o=?fuF&K*uqIJLk>i15}Z;g$c1GsKtcxdEPaHCifL zTifKg6<4|OwqM-Z9f&yjjT-^xj>Lp1(9AF*C+#TStO}GI&&DpKVeuooU&GOa(mVx{ zJ=jMmxoj}s5Z{m#3-TBbbxTFEgJ@g|HR8(rHvkp0C3nK=UvAy`cz%*5{x*VPp}&6LcQo|)N=nzF^vP_oZ7qIZ3I zZZ136LZpOvFA3c=?JJa&;jUdvf!k=%q7rTan`TBjDPpxYBdSCq<7$L?j12mRYbNx5^w-B;&%MFK6nRXRT&X88Z{=}7Z#&S z*XvryH??#A`t_@^z>)oO6vz8DRsEs3B3n>P$OiB-+)L9D7U8X=KW||MiW}iY)HTEw z=z;{VEhF;`3=ApY3G4qznrEPS%z4r^X#q@n;p)}1`2LG*VG$No1C>C%{N<;y$7bBo zM`lE0&9)nrNK0;g^@>*rS8B(WIk;;D}xEgHXkvBydaecEPf%0Xl- z#L|sTKHKd4^z|2^$b>*Z`&Jxu=#aST@qTO8wa|+CctE(&eDF!0r zK`8Oe4(e}I*u8h}(yaNnRmfMrx908KiN}e>4goBMDEuI;Mp&edms`uMZuzF&~1cxWu+FFq-8`5MO5&7xu)tZ6*xEPsCf9@s#;~X zm1+^dkhZ`Pc?AWhuU}t*lA7l~>2ct|0g3`u8HU2+P<4ud+zcE>?;iX4$n1R}(a`PO z`bcT1%gI8zody%aL47Ib1ldxP9&g9PG{dLNMJ46Hr)cH097LJ=Ic zzrViV(B9Gv#w?gHG*BF5JC`ssYlA%l-F{Bi`jIUzStaJkQVU4P`mrmCO+{z>c6Ov~ zLQDqxIMy}*4U%eYF^@+jFE3vS7LMiP92oC+Io6w;z7`^`7}?n`-xa;|g1|oFC4o4v zcT?4yW9iZ}kZy0`o{4XYU@*{WCCU(mSiwjUO&di9Hjcapvw%Tisi~>jYK4V`gxjQ& z@hh?b(Ep$QoZ@`%7|pbbH!?bU3CeRUFp@C#4tUsVxK&8S!~$8~@qzUO3|STU;DD)- zDsE_pNEl`B`a97Gw02K5Y7nDi%iP|(e9pFU-AjA z+qP}!+^f*vrys*LpN888R7?~&9S1xVaNedN4s7h~wP==8GBOqc^<24nwd(cjwY8GN zr>?DcWCNK1yG#Vg1W^?gmD;|qP{6bxexpv+!Wok1)~P)Mt4FZa0i4 zPePfv-F|iHF$&`=ML%sfG~A3ga6qC4>09&Z6E7aCHp(tlNu*TB=*Il>D7#jVPb$6T z+%IzLEoTE|mV*0b1zOZg(pc~Vh;O~tk(m^D@os@jt12;Gp*x4IMuTiuRF!2%et=Ti zB|AyCNk{g(^&?VrO61pV$bhvpf6cIzL_Q(F2LRv(fd#YYH+)}Klbe2Z3{IB zjROeg4ZvS#;03fB?x-RR*P}RO|No@kMjn! zGw^aP@$~ehD$&Q}2h9hf!M6YQ>MC+X4LThK?_qOq5Ye8Xp~QWTT$}e`SZQa z*uJENAlt>P1X6|KZ&jBTWn~o=JM3EYg)|fh2QKtsW&xlrkb{lhLbo5rKIueFAR8SS zXEA<-WJWeR6$~R6cwyK%iN2Pl#Rm18BBDH4l@&mw z(90RGnw-JYGahY=eewGBf|P$y=L{VACUPe%2FZ{fLBFbee8Usv(WsQ$?>Aizoy{Dy`wy>#yZO>{)E8$HHe%2iZ; z`NB^g9mzl5zq%o8lg)~dJ_Vhbb;*(o7cYuLe>~{in3pm#->=ElX5&K2J{TPA!sV5v zoRuiEM5K|}S4i3o(nyyahfbQ_!>-MUn$G|f$PjZgqSteWls?7cx9bQhZoXa3qOjkn z2u9}N<{2hcK;k`x=+{<2G?x0@SYZLNX){bN#6!T%O^05?X&?0Q{S|?0FYjg)J`Wle z+zYP#kygN!YuCC!EqNStcXzKZ><=n<*_P{o1_g2MKi=x&;SN!-#BK53Els&K(;qqk zl8E+XB?5FOJUb7s06cm(9&BTyaJTXNj&{UZA=?IAxdbZbGgRHQ`~CFNVqy%~Uqsjk zZt3}QVm?M0fQak_{LHz0Ig_Pj18r?LxL33+c8KmQxGx}3bbS4~I-BJ+@!UXfJnO#g zV3XlA!kV=9uaY!{cJ12r?*04CfW$ptMu&$hi`~09R<4XeKI#PRPrPd^0-JW6y<(Pk zSHM3ZV|Bb2z0Ezd3)G>ccvag<$_wpx`xLBKRsBr`>EFS3gdwq#9u=SP_@u|Vvi6Dx zj6H*BuR)z_if_9Sapel1@5z%QYHC*3T&#;+1Oq2~mWB1TA^9&j!iKi)M?4Xk>q%{G zs#&@b6$~&{MYxJQ{DLP#2L`#-(e}&v_$n(alPhbFgVPK94*P}V@Ej-+^fJ{5;9s9C zZD=U?lTS~N52d0DFnc_n$tt9Nl77jOXn{=5=%^?XN3cG2{LHObG(6bfJ3QR z1(Z2XFJ?beP~aJ2^Brm40c~#YyA(RA1FPvfHR2R9_Byu}u-e#s02*0w(0sYoP4p4T zq~5ZzJ%b)i3UFV6p{~WXxX7ql>_Ifc3^}f7{e7&9Py;%_%6a?@2=-wTlKYe_fxv)l z6Q5|Hrzb2TLJQ^_J&zOshKRMRoVvOoE|u_9334_us6OB(NM+fuVS~rONnc-Eptmbf z69N9ANJkU?3k4Xwc)i4>_;_CIr}yOnegvivb_cNv5`}0@b`!&$^~)2G7^?0cd?|mt ze>YJP3}OVT2m_?<7g@OfJ2K^!O+K1j<%Ou=VSw|Lk~5|eGAeGJy0-sMD+7~ z&=~pF9bhDGH#@r!C~|gqa8&Z};w_tJM?P%7qU5|3ohm74>Bg74aR1DgbI!)ZoB{V2 z2$KPBirANS=h=olXhkav zQV-kfDn&&eR5)Ew?fF1}psP=MMLaXZu;g;$aTDXX=yT`}->sym67B z3QL^xk*nB|2?$mJS|nb1q7I&-6ZBSsFGal+$DcdjV?p4mjE2E?zoA-F_06 zu<0PNjNpRcP+x=Ag(@q3lXUZ@AUtz3?*|q$FxY*}+EJ$7w&vojwL2WY*uaaSzDWqD>rViii)NR1O?rVG*e)hQ6N@9>};vkQA67$p^tK)cOVsI zZlaqVNlw1DP~7AS1>&pkgonp{#mLQRG6W9cslUIoQyT5R1CS65HeCc4Vp1FM+#s@ZU-b`{;Ck|jP%RuOo`UcFY>bSUfs4Zt+JUE@_j-B7VEyM!wsQOlQ$VU5{1Khz$<5OZH zQ`Pa2m$Wdh!GUsnQ|wy6t|McHD~Puc!NoHMw}PO6px_QD17XL&i9bU>Kvs%0yM&zw zUIhOWcao?ax?bF!{3d zZMUz`iUY^r>Z&1(PQ;d{@-(aQvzdYkWhj;o%K$DOa&M@Kc(=8jhA2J=80g-JKUirX zRq1@+h=_7s6Uw-nCw2R{#3{6@$q@Dk(S*?);Nr-SZ*a#TU#@l^^j&bh9&RgBfiBb& z*jXWmIzXJo!~+hR3%i9-MIPR`!!yu`HyazX06M57@{EwapaUK-WuR!RgzW)~yoMtH zhKCVp*x{zkx{<>V?z@Z#kY zlaq$tunWI~5=6Q#04`$e1_KyMNYv%cU;V6J2E4zq@99&gOkwp!HoFBD2gQ4Hj_7>` znA>9rqDMS{T{sQ$z$^12vSBLF>#+*g+n4DE{Fq|_f+F{I4(20Eu z=vCIz0A>&n+Qf)WC=tktt7K$Yfs^#G>mZPXL%{7ux9H)H!G-1G;^Hvm8i0*+n;5op zng-R!48}do7o`q*w_J9(*T_ljHfI!Q=FUu%qG-rQ_P5w zw9kqr8Ga*JsX2kcPN{f)OGdRZdyJhXvUcr~H*ek$|2G+VKyRTv$_p|D0a*#vUW$0} z&?6JV`*uu0K|xC(SPd}xFCVs|qq9c`>*nF14OZd!V2k82I9XQW`rIdfA0!kL%IOm@ zcFY2vzP`#tl!RFdC8*}rD^}n~3~HSWX`Xn8w8jA^3|KLwN`CAmn7myZ^Jp-E1dF*^ zg3O*%pn$q?gO_7UFz!Y;8|-P)15wy^0uW6Y;nhNVN(KS~ay)s#fq}x`04~{KX-O*z z@J|D(4^}12=B&UJ2>cXuM8wA?B|0HCeGT9;^T#=m51nXOkd!*nJrE0xs#k9?WSF{7 zER2jKK_h9?K!+HC+kqI@P;-JogK9I<;45zjq70BXpfw=V0K}31@%AOJ3nwtRL#zz4 zPBXX>cf^5}f$xXU7ot=@f5?-q?kZ~(CG&Hg- zb4j3b!V-bE(D`oc`i^=aEqzzOK@ihvxGLVcEy5yFP69M(B75MtAVW;(#b8T*u~ia# z(;fsGHchO>XE4PAnIZ9O(ZRs}Ro{)ObKKr%-6=iuS#se|z?t0s_(KuYO$R#pPA)k+QA&B;jgYk;j0Ubl`5 zT~Nt0$hTmqUbEYr26s~Tbc5$NlduLQd_U!e&KC|bqOG}1V}*M3-;hogelP-kcH=(a zLrfZSzP=W$33@_(XmVh1=^;$OCnV9rb{`#Y{d~dv7T=(fkh5o*fOez7!Ba!}6g=|L z$XUstFUX_-a37lUk&(CrO}KaAhDZPWjqf}5OuqNeT{G|tU?fX+_!mh1GMGM67h4n;4TznClK|*$C z>}$y07j6?csb}}d2U&|eL<~<_CkOstMxw9a>M{6X(CnfHC5EM zT0tcv>hvOETL`Siq!|Wd7*LgDFw+fdqG_?$R%BNq>yb${sC8so4CcDD0VH?$(di*} zFktUkWSPvBwj0e9Y*cs8&(LVWF~%VzBp2N{ zIXPL?*0vJg2_^Z$6pTJ1!9LEdbCchNvhLokdGUe?qRwX2`ZQw~N(;q0A6hDTOlamw zWhBN#r<5SO+21ea>VKh27X)V)=j;r%3)fT&X9*33YNLb;{e>NG>_TrGjB@=>Qg}Kg&D02djld_AqlG;G5`fI^$=Z+P)XoIlHqz`9z~Xm0ktcsGGF^UOGqfeSiuHJE54 z)i^bbM*T+?!8*UCPZV>PcwN`PFO3WRLy=gFZctoM3-*OHyr|D~P$@rLz?^h?`b&vT z#9YBfF+<7M-$tkR?Q~H2FqiJ_xry@uv8tldKQ<=s{h`l|K^nB<{RW8IOkZpMEZXkLARf$NV=X_VEla9&s^Ph)Wl^cHP!96c9jfP{hqCn$gP z?K&7j_3iW(nOY4`#03*^9c)O9$%UZfUL`6@2ZQ|#OH#zDP%S($C?LPa2KJTtPjVr^ zpL*=uBywv;o@Ry~`GiXvlg^{Ldc#JD11U+hwekN(9Gu0^7OdQVH>&&xUg3ZE>vRzs zHDogY6MBea;5hr??0q{0!W3tHnEQg*t^2eOnF^_doGF10ilFeb2Afe6Movh8@_?d3 z*hBxx-`WE{m~n=$VoNOA3`2}7G%o_1G%-C*447CMXl0wBdAo;iC-2_#UZRg;uKpVt z9@dr%SpZze(U@j%ju8Y1!!1eW9r+{$OtcuA8H1$Vn&MW3J$bwLHXn_Q6k0a`A!3k4 zAU&{v#WnofFzslkJlM(m>oY7&XqN3FQ#arp`*5Pbf?Rlz#71CXLS?649FVCRhp2rx8`brJ| ztdCd7bBf!1kgF0TGH#28f`t@LLedI3`Team?fm*R&UppYriD2yG*X=L8`ABWmBqyL z_#e{+|G!H$K7DxNmjhvusXSOQiJJ$hxCTr43$Dm+Fs~q1lRpy19m+GQG=!8zFQ$Lw z$Q`d;^7#1om+x4t4o2C~#Z9JtN6nxpKsvy9VL&YO#Px);nL!%&otaPo&GbIcPKUs< zoM`aFE6MQ{C`Ct)%Jld5=RtH0RLzS+sjXcac zMyjDArMa!8X*v>*Bn^$65-KWo9$rFn0_WrZ$eR5RquYe~Kl9iB#jZv(stXO?6k}t2 z{QT7@)4(d{@2)>s$Ag_;5)J{MEA|wgPwyX7DXz+Y8j2Y11a&=OB111kkAPnD4hf8m z8=|jIfV{O|ePP$5>6aAsU)Z3L`i6^5ImqC)r>C^rEaaV+>PfKc5zc^&9iz-&uEXFN z(#KvaJ94^GJaf7|{jdYdRwuE}ZP<_?4gWbg1%Vi|fTVN{48AvQL(xGU>>Ts}4>#rsTUlA<`3&uc->eHKbUc5U4O7A@NlC+#808u^ z7oc7E3@Kh`>f8~~B$$EcD0FRSb9DT!_7h*b6ulVszDtfb6k9YtJ6jfRGr-KIpRohK z?-&?}Uc0X_8BlQYGu#_hfMB+V4vjJ062%HisTc{;M8(8BLzhWr=`ogi3t&bMBrOov zimNyr2o;we;w2faH@PBDLxg1@c$%{7W23)+SHrymiZ%k`92Jay3a5Bb6!6K7cg+m`~H8Xrg z=B?Sbbm#wzoS;9*+6EDa@XBx&E5k}I8}Hb#I2cP>1s4@it_}nmBIGX6L%Z|4Y6Zx{ zc<;DfyzT8fje2M^@cJ(z5DDRQ7}k1_2{>irG+5+r7z&7^4on*}x=%0xx6p_@{WQDg z@`BbO3O@rx-xpCxI>fErB@6nwB6(wi4@c<=4Qf# zyW%_na%c;LGhz+F^UbqZvtam4-;pmKjMOgWW(_gEtN$8FU(8#a9BC@hkphI({{d#S z{nhmpX87Ryj|iE6i`H~6ZDWb2W8b|NaDZ@n*u%Spp>h(G@t@Y>b~$@=oS1CE>JjrR zH35Cj4#GA0BusXZ>4La_c^T|-V0$PEzt#m-B@_VAYbQ!%-H!xvJ-<^P(7(gT5%zX9 zG&OPv13B}E0$9(zpJ@e0DrW#&Tmm&UIcW!e9VdE)!i0fOmi>8t z@(iOngtI-B_ERekyMVRoT!A0nBO(=SFGvHlECRtX94yzugKa5RAP+V>I~tMHfxuPv z{=q{YEMN%Qu?*$XH495A@tIzJ)hY^{)5Pp}d%pcsW=M-VE~Q|LF&s*c|7c@|2p*9Y z$}`Qch4O83LzxpGjaVy*{Eg&9h~zAcpB-4Y$K8i$HM(KRA1`VBU7?UHU|G1x)#=uY zi6j%DvHia&8xm`2M#jf$u_NO?sgZ}sd!e4I68aTsCJr|jt_2};J~1)pUJm(~EDo%= zzOHTu(I-Kd>Bk5#0hN$5u9nGKneMtwF2sT|vZgx>X_Z(*;1I4NN+pH^9vps83kimt zP6ULI_G*ySF^R=VUsrPo(-Q*(sxAiQFj64{LJ>$B$BQ&)GKP>Zp3_Vh4@Rp-4iAHU zhn%cagJuoJ;3t5@&{NJXUw_CJV;FU;3$NEmen6jxGf{AW&T?`P3xs7ds|soSOniL2 zMJ={1>UbUV!iznUANbK=PL&*Ip@lTznQfk;%>czxQktGDV_gWA1eBokT_^J66r<5~ z+CdnR;Iv9L)xf9_;!kH!D@ph}G_XL4%I>r{8_86{$J4V0%+0^XJ+e9=5O3W}Ok@Bj z9P~COOEzixV^5`ub+;>WZq~k{oPP%9>hlo5x}X3Mrjw#@vOweggDl9&Irm%^)=$XD znA(JCBmhP`ToBoIB(tC)@4~?rhd{fgR@6Y2o2Jg$jsPA0%=7kOpwM4th7j zqhS;RFii5}o_|N@<)-fBgX8saw^*ZdAXt|g@B63j#$us~i5KXA1k(XShM*O-gQ`su zn1v%y`|#*!7|vRO{S4GEIkyLvc28Nimtc6pOwPS%eI=# zRAyQ&BV+PI^ZL1SENH(OI$E#3z%Iau9(URRjZFKf3LZa7v$6neA zv9wwbxA&;P&xH;l2kjypWdA@r*jjjOl_2QReiCx?^BXZSF%rskKbBI2 zzelf7a-UeMfbA%#z)YaPOH?s3!U!MQ4?JwnfZN!0+rYR$R;$JcRL>jA1)>HXHp{17 z*m1aSyo{!!_27Q;uP{w^w0=BFsGKR{41#$x91|;DU<-EV+cOaBz|72_w`PK9^zK=g zE+zI>P}^j%1ZQtlH#bj<#*muX`zP~(QZU@o&!EC)o4SC<0A$pD=nabs8FD?D_?YeO z%a<$=yy57*aB(w*Gfh-M^_?bmXhI)jnr0Yn3*-sCJAY6LL~(=@W^5y(qcOlE2&-qV zn>{0L@cm_dCnnsw-Qt4;v=*W8&;U8!3NsM{*&R}67)`RVuvuXd5eGY=NgI$g&?;uw}o^6Jpj)8*Qd$MOEva&Mx zV8s(MFf3vfoidXgkm0tfoT^-QbhVS{i|w=1(Rh54(F+}LCE6t zAgR0iNCegy;>PQ3d;f+~7ir%?gfGr#brR z4sWnek#j*_UhuVR^$E)tvOOosb0GNWDEPjKB^<)OM>8%w>t4=6c4o)Yb8~SKTnv*= zPqF&?_2zr533%DDGH!$BCiw0N6ojcp^DY~JWX z1XG-CxCCAMX^_Au(_t}!C)y3MG`B7$Kj0)SPT$EOLd|Jw(^8Pk7GYcc6Ap=ylPI47 z%hFJUxkQF0CNE@uOu?J{RmC371Fy%R)xy-;h`G-~ z6`U6u`T%h8AJ3-EXnads2wvAj{H3=an1X*G9@BoNWQ@AFZ(IOCPqP}iwSt1?yId}k z+7<~f_ID_<5L;C4JeqwSgd?tzIxiEgon$?YYK>syOT9lziP7U#@_-?n8%p}-21Mh z1l&c$1v~$(l{dj_QNHzHP#T#pTd7BEeHbqxsx+B|00p1pjkR5ZvulPi$&zZ388kn~ z+>A+*b37|ZC^_x8XI=?wPW_aIj!uoe_0OVX%^N1BrtA@#IN9zAoQR!WU8kTY5T7dZ zsLx=_S%9s+N#G${XXkV1iJpJ*^!C=nwAYB0vGLtTds5@41bCfk8!^%en?D_eGhHGJ zoCxa+uz04ZXd`8dk0+B(a5%&Qg-L(hi2dVN4yg}b38~e=gws({TKjo7*$NttPgNOO z@L`B|T8emqg@kNm1`ZuK(18I;6taXmkgPBkM!}Xs`!N6SUVV(Hc=Qm2fFov!IgUV0 zG+fdN3)GT^B_0z_LE!}93{p}!631?VwT9(26ojZ%3DioQZu6&g9^x7~0~QX3)s}+@ zW~37Y9uH1Uslt`xl*2PXl>*I%+qWCwDYRhzAtLL6Vkepamj;U@MZb6 i_uU)ded3WzuO*?KW`gU)>9!1*O|J&H z{?+Q!E-Ww40nvK4_qg;zee{0v^y!QE+NV!@N(1GP4e`XEqKf<;$V8sUo}xwWzwSV* zY*$TRQhF2m2`S3K{~#?t({oU8cEEZ{2T6z+jvhUFB-DzoMx6NN`T3ba_ETDar`)=` zFMTgZI2eF=fP3MEy)qX`hwJe+FRb5@(FEcr;GDagq{*fI4 zpxhYM0h{qlmj{d09oS&fs0|kj*Ds=@Z ztWvb;_h@8rxB&$I?5GT~X?sN_2UeelBb;dDzBnWH{-af-&WMEeh0)zQU}69yjIX;7jt$b))wAelB8@ibp~*2w zge{n#8`s37y(pX;#iiuNZiz^Y(aWoO8uA$1$+=dTII_#5*mB494g2QPKq>f-X!ZUjXj@i6w zB0df)N5Wmv(BQwFI)&rE=^Py$q4h~k%adC-vbb$Xp$`40YDvyCxLOz*^iPsA2wkRu ze||N)R@kAS5cd{FT{}>(u-wwqZVMk~f)4}3@DB+EK=&yGu4Gu|;MM*9DAq*$G@8K? z(&!X22@_?&Cs;KPo)))RyiaoT*zrCDdhY} zATi4?OF_GWb}ZR|aSSwiSDdf^!|1+{0XBe~_D2LjH0#kHrr?sJM@~{1G)KiEr)6La z@fqaa3()I`p%1=6YSn<{l?^yHje(ri2s7rd{BVMOMqb26(&tHl$9k7*quJQhlmcW1 z-ffN4u6q&WIE0ed|H9bj@rf=JYDL`6Zg3V5szqgGx8e)v#->Qczg%IWM$V zRfox)$eqP;gGQ(a+^3N$E!HKyi-#ny|3&}J2&0{}qG}Sm>V^TC4`Gfax=p&zitxYHDh3X!qa%t-?q< z>OUEw?fFsi=+QzZLapjC7hofJ`mCf96QMX2e1~lj$f*zrFg3#qDKH;%CIUY}Ksdm5 zMMkkP5loC9APaGdhDA;y$PJw7nr;AVo5s`5h2(IBOriZ3F!usv5RjN~BJIwZfhQ=- z;x-qY8PRaab^#-HwHz5`0bpga7?qRA7X3Qp$Pm%Bt zx0vwIY%LHXlji{s^<>W~eq3H&ZrY1iZSQf;(pbMV|BF#8hl

RB#L+ovQZg{w+@$ zsg!r1`xA&+l=L96sx+aS;FX?kJ|neZN`F||tXDYT<;WEI?vHOQMe>%wtBPx#y0~IW z<|`F!*|S&ouYguCK2A@%Lb~txtyC=^UwU?Q5^@{Mk_Jhi4hvHRUZY^Yl$i+Q@*|hU zX7J#Gz~g_yk={*pQJS49Dd{X$BHlCrJ*4ysoC*Z+=qe(6!A!BSs*q{}iR3@no5ZQh zg+~uy%9gK4#UvmG=pJ^r(Ws`2;5)phOuf;7kz!@`7HF88umP( z({Rv`g7h*LBdQ4E!g|sy$Vc0lke~-4AZ!LEXjEZ!DfvxXzh(RvRPl%jFh6fD%ixBb z8|_J`Brubk}vZ;Z2hs+&SH-d0jUe0_6^ zirPcG6&`&_4%jRjCIBR278FRL`0K#j|5ngEHso4s+@}4Pf+pr^;g`c(V66~iC2zI% z`0){9C~T-(oOI2Nc_q)Uz}k2C0jO~?(1g5@k=1h>yLax~3T2ztZpo4*!!uGLG3cJP zN4PniDkgeGfzagyWKRtC&v6f)kuxpW-#E4E?2HW0KqJlXt+^^5fdI!xN1`8wKPbZp zJ=sJd*tqXitjTuVt2dnu!a!=&`d!gA$cg=3unY{Qsw~%cde+XfY=?F znky`*=P}RC3P;>?7Z(?KG`K|)gG{jYxHkfkFF3w?zfpGxCXX0V9Z+=VysWbl=P3&? ziPO?^8_>EW-jU+b!l*+Bp~+|AY^QyAS)TvAUr}*!L{3i5=|#iiSN(C7EQi@9oxf`@ zKxyg@&%?S+BxY2**XqGiuluN8oHLUBecz}#CrYm?E1?W3k1 zbLteo0PTDN6BLN5Z%a=v(TQxys*u{LvH64+y!(d^jOd-HBA_BkQvZ@H3t}lw^QA9m zhpdDX_{8Atb5Dcr})qnV_&_M|PF$CixX%GPa5Nu95E~uJc^j)4uM7 zR;wku<3LL84xbFRdhK1v0x1HpzpAnK98)JJ-;P6=PNyzZ1b1p+Wx(DpV*8Wsh8x;U zLA@)aAnAfLPH!r@8(|japH&LM}nOT34hYx>rlrRY{+3f)~IW{6ofk+ph1^k(K!D0V| z5k#=%W;e2~&BONtQe>=(JKQ{nmp}fPJ!Zd-+0Me2&dNHA2@ZNMux$TKnG^uF zV;(wVBSp?_5T2kp?7@PN;2%kN5$}T_V&bIDGb#?@0YJ}a6r~wG3?tRG|Ewk-@b%ey zvxy4pLHwS4VpI-9i18S*na0G7*LQX{$wh`cYrq~f$v7_x-`u`y8aR~%33wg&z*y95 zblr#LIOt-tBgJuNq0XML)N4IPENO$C)5GF3gUdSh`q z(@NNfyiY6(bd7W{5z&z>Z%KTI@5}}E>eZ_k>NRFxee4?PoY>5~Nf4r1Tk=041X4#B z^h2O7T!eg`&jGtfoZNieq9|3`KFJCrf&uxN`qkVnEo0))0zgl{;us4{}%v6QO>Y%vyze=|j7fNb(MxF8{| zxcBe$=|oD89F92qg6{BxxkAGfY5qkOg$h3qnpRp1PV{>;72zAE=!NqjNw5U8i|2AS zf0{}8U*OaZR807+#{i%!rLYM~7@{OI^rF16j`Gzst&tf2MNLKn(iD-yMVgifO)K9G zZ{K?|#SQ;XarLt#gm3bCp``hN7`0yHPJy@S!xB7DvQNrcMO&%B;I?8L6VaMD%^+#S zHb-O8a@9>jmD~~Z zzLSF{|HQE?7<<0*Hd<)NW7=`kYA~8|{Q0de>c(7z14$9SxViApH@WcQgAN z*&!qHR~>a$j&5L#k|Uwe>!4ZgcH3|0z%tWQWZQJm%k#+ml-j|EoE~jP3ww$I*A?5z zY)~}9eI6^MCXx`xA;oswCCTENzjDE0y*_7XFx60zh^68T9WOf%!}qy{&a#plu|I0Eu)WgJ}oQ8d2DrjM8e}qDN3uZCKR!!sg7q zPEE4K=KyLgwyj8HQZDxyhold}nadl_TCjjat-n~y;jXnAQ!dZ8p0s=YjfFtG{Svf; zY;5c~oFrvl;5mH0;-x*PKNI=0IPYX)1-n_?c}DSxM~=sJ4CnEJ#4nBJo2s_)`8^K% zezUhdRU^_R2Cx=-%cE>;D_D?Y zSy}c2_`*lrUF8&4QihI6Ed>3cqloCF&nhJ-pOhAb{!S<*wp0`22_Hh-!iX`VVr!2~ z3gT{@ENgkLB>f}vBH}S>Kh1^>TVg)bWRD!Fuhx{424O}L*aPN=saRGJX_*i6oXggYOix@I%b(pM?NHnkLWv zUw7oAvW?Lwitz@AcrF|b>VR$jL0wHuHh@Nmx0RD9Uwl$L$%;Kll7!iF>gnPO&y#6P z)zwy+p})=CG~2c;HZD#_NwA$`AUS`cxnqlf@y~}=ZT++^dmsfCU%e?Nlng=9Xo&v_ z(sp28eG)M^?ZtyAUPuN!REAtSdp3+o7VHp30MIoIc_KeiJl9f9lL6Rq_;}O03vo|O zolNV3%a<(Q$RZlI*dWMA!(Q;l8__cEMIvC4Ptjhsma#Z6a=YyCCKyth$eDt_Yz#=% zd*7q+Gna^4$d#ZIZ7$S{-dWLr=v#eC8eWV@G$knlX+(R1fPC%ym3d|yamcDYEB09E;RKdH2?qr diff --git a/examples/running-llamas/artifacts/Llama-65b/prefill_latency_line_plot.png b/examples/running-llamas/artifacts/Llama-65b/prefill_latency_line_plot.png deleted file mode 100644 index de56dbba32d38eb224187aa40142f282da935ea9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 41751 zcmeGEcQ}^)A3u(h_MoJYa7!5>LPA!hL`K=N_sGb~rlK^+Eqf=K86hhx*<>Y3GBUFF zPWV2~yZ7<^=kq&$-+#XU{Ep-INAGvHF6VV#uk-bKJ|F9OJ-Hz-bztwYy(A`fgV|FHvPyU6+1Jvo;cIwzKtGj4_0 zly59QVT_t9SY4kfk7CN1Y%}BcqrZTkjZ92DyPEKGv@Jo1G5DZR|72ILx`u}B??v>4IiB>uU}{7qXhh$V=fF13~c+5W85g4zx8LK#j@x9IVBg|wV=~ZhABhiGY^WMy&NlrA0h+FWDB=F9I|Ue~qy_V)hyKKH}7Z{L1at!Hd}#lYYM@$WIh zj=WvJMn-&NV(6u%rG?7+K75ckcHUYx+n}08*nw@wzN05OIsM+KWtpVmcN)b#Q3xsfCrVM%{G zHo-?@b#;|Zf?mk3>a}~_zf5~?G=_0|jT#OP4#t`3di-9zxi&xSW?33@!RfTb)Q_Te z_0BhH+{44eOW$e*EWTV*NmU$vGT<6+-k!uR@$k{3U)S7RDoWi}lwDnaUoIfC_;8Oj zqM^@iMl`X*q60sVb&kX5zOgmTJfMHpxb{n`(vg?TGktO9nzFK=l;{=Y_wL?(>yRoThvb(!` zuws{&=VqhZ)4Pb#q|er|79=}1#i z!E2Qb3=HxO+@pO(z3S`h)6}yK(s0hZo=y!^KRc~k=ANdQC^KH;PZyD&oRpL%WdAGW z?OPthh=PKGZC{$A&dZJG+w|#>j(@mM7V+KXOP`F4%+s`6d1-cc?%ePtr|0bo4Gm4| z_1f~-k<#b6&ecA+mx}G>)c}T=48x{pvE}WTxeL$tI+O84EAFaf_mGm3x^(H1^F&6$ z9Xq>Mng!PP^!?gU?NSvJ_oBKL78T94Nio{4Olupup=K~LGE!1f`Xx7cczE3ZWJuC9 zHB=w8INcNVIq|xosi|*q@dccj;pM6B8etsFVVrZ4m&&P%HR0U4WtQm+jojroitk#X zpxHV)CfbiQM|kcrb&!q|bqi&Yi|p_3m%#207k0ErDtz(c1yL)By3*WCPC-HOy}(A{ z+O^#>>_n;W&bP{SkAC)SAEU=&8*{*g1iFTnmX>d#H*xjl$*!Q(>7|LUW8DSZ*4EbF z(3uluj)Zb*7$~wPtec>axo<3erQ%TCk8)+WQXh23$w?}qP0x5~rhXIK=5w~8luR(& z{frE**w|S2&9xDh9v%GLWb@Bj&0u4paqGm=rwBe@6m@-5Q>mXHADVaN7>5pi?99-V z!c+FIEyfP5t)Zu+d?24Z7^0?A|E{U4i}u2W3qk5LjAY+XmwfNC8~?E)k(83+bDq@` z5f#nO%S*KE%8`xWJ*8=S_-lryufP9+EL3^J39+X-StH#x<%!O711Ono@646X=#+$~ z_WrExmbLNbopkQmtO=e*wU8UqS-2T$I3Y zYy9N^j~_k7PN|v&XR4o3v!Y<$8yYeyDJ^B!%uk>55O80;^CBpSPt7~ z_UigNUGa`%{ARP06#KuucjO2=qy0)DPOPxF*#6fi+M{Q*>LU0|1sz5Y9`k>MqhSB@ z;||Y_B{h?#$Sd)e9+I$W74nb}XA=!52_MmY^`|eyyvj*F-yZAfYB$s{3Zzjvr8?}N_>riz$&ZgTyhPbYs1t6HodZeC$5lOla+maGvTUM zxo1p(*zd=br)2K*e;~c;f9#60vtX`ii|3FS`EfxBEbW%P(=ZBJn?>G!Isw1%a2hP4 zOo4Tep!@3E?B?h67tC;<*xEJS-EXKR6tE=ef2t~`pM%UEI+Hd&e4#Yw@u(VusHK@E~)D0iCC*}*0* zE*>Io;H$XK|k)^2tg#nHkb+ zsY=PgAt52sE(urtsa|?+u9+HYV9(n*IjNzu^MrC~^7`Di-dm`f|lCMmBUb0D7*VTBqqB z8byVLG;!sAUsLhn1Ewflc^>Vu!P8X&@MCezwXpyET3RNdEd}_PrjenOD;E!VM@oy> zI%j+SdK`yD*SBveYNHKMr2lbpj9r5F?>C4Ek76~5i3j7P1qwYkwTKisI|;NXz- zKERD)rmtLALsRojnaBFEM%3{v{A2PNtwKFTr=hw)ndJ5e2XkYh->}T&rW+;iD?|r9 zEhN;XE@s+%d`AkZ7JK6U7E|y8WuSN2*zT$-NtHBZ_Kp%4dqa&H(`N7Unwpx!6BAMF zI~hi|S%1xp$;r;X33gGF1BS!K%F4QgT4(ybC6HN4+1a@esORZv?f38AyRYSBHv>oq ztG-de$vW3H@?giloXPLpH^Iv}g}Qb`cdTzD1c)h;lasS}(XDtrFI(w#OGCqzVZThB zQCwUMkQ}}Dz=5VOJEDVwgUy|`Zx%ENNIhEAZ)EZaXmOk(E8LClZ3rqSyaei@3 z6z|Ii@6KZNky+*qkbEKgX;J3;l9DKG3k-!7~)`KGXSrA zdfkhHYH8<%T2)wB*owu*hcGVb$ke6-lDj+oV~vx7SrhQef2&21E~M=t^}&O)-)gfo za(9;M5Pd}Uc~?(QjNNd9v9Jk#BPjQnfPlc449&3XVP|L?RkL*D(4kfr+ZFhxxD&4D zy06WDd85`cG+qceJiF58)t;ewJ}9%rH&wS>3`bO;7msDTRd>Ge!mlQDrG#LErSXn; z=IuKZgX`z=%-SA=hlk6y+|1G`m5pt$HSf;n0>(5(y-C+Ck3~_=Hg9Kne|PGsVuSk3 zrodrJN}`(_IL0r*UOhKh%UB%O@kR}lme;X}BqH_=I%ve76S$agb8|CG)=kwPKYj?> z4s738Ul{?HfE5*^O2h;PMmO_DP2j?$YsG(F3onwqVZdP`*+)pz2PNC znpy7O z=t|(gl?v|iKbL}7^$}T z9IC3Mg~e&N<;k-}yY+BC%4y0^o*v#6^H%}w12C>dgHZ%ZvKxg&lW-E zPHtN$ zcfW@zSRP|Cj`IJ zDzekC_Ig{Kfm#(o`J8yTveyq%Qep*;FkJYRXxb9H|G414IL-aT!_|PY*W~09QIZ4v z>FRiY;phbcrGQqU=L3u9-uZb6_$}O+jzY{7^u3|8jE@R^3mcDirhK9K0u`tFzxtJr z=AdV^CCSl|eEj&4hRy`P z|6$yI~ z=}SqVN)+bP$|?VvYvSX8w`i5R0#2mSG`cO0-VX^mf(KvO)6+9_mUy!ZPG>Y*X7KRt z*xU0x5n_!IvVVkr9f_T0BxAt1?AbySPzp_98yT4k6b6DX)TFMjCJ#{N#x~S`Z;ZVN-!3(km(|`Z*Gm(bo1Xi;9L^(c)l$I3+c;l#$WUW#6OeI;F>e zK+-cZ*mX))m2{s>3=J)Q+kS?5&TV#`chH9ag80C~32Bn1$qI_W^dDRMc)fM9lCK4x zpoqyG(`QT%1+-!X0Q^9$>wXbs0p~|D>nk8}qve70uM{>E=yQS>Q0zZ@H!v zR^Q)&=JW{a+=h~;oUR^-3TwOZ+tGDpI%cpvb*Sq0YY2F{Wg_Sa$-oD}9BSm^;^nn0 z_jKLocI={HFaj8h@YEYya*aH1(>(jLGS}S_;sayXUBK+zkSej}!Pq#zuZG{>nb+*; zWeo11ja)a+yA{cA{xMp}em6O}fb_uTGq93{g%)u>G;{E7qa3-cuSyxW&Y3hvM+_Uz zl$+@Rp1!l}`pl{O8Db^B#h2Y!;j*<~bO3kW5Tr;+Ng?zM#LqaP%Y&k{pxzasP%tN7 ziWIaxK@c08zA_XW1~oOccel8_cTotKZ&f^FgKz>xuE5r>-+Pde??psyX$zh#yZYOI z09ss(O7{~K6TiHrHEpD$856<*VjSyx2Y~qyktCbXoHzt1OcF$-SbU zQnxS^OFru#f5H)XX;60XAZS9)ksEYhMPDJIKhm^(E^H z5M11brM!ih-Y@l~caEWtGHtdu1K8 z&>XNcX&=X{gakRtfL&WPyp(g_^KLJ+D~JI~d%#KZT#I zJXv3|^(q91V|;Hw&h|v5WyqtXkyPT0Cbyv2aRm}y)^M31vzyuK0e+o@B8s} zF_*SFnhzt&SHE_kiSt=qzkc0qTD7fCFleqnA}0Ms3Qh~DkuX836WYgU&Rc6G-3?Y` zX|Fd@gY-o}2f&cVcT*J}i=%DcSj_oqYTcLZV+?EYTQ9K(BmGv2T$fHye9anZI^QRm z+#cUP214m=E91SZr^IDp&-2skyqPcG+)UW{@6yEU;08O{)=HgZa(dyH!3IAHZ6g?h z4)?s$3tv77AgfeVUM>zX+jA(Qt36p^0s84@Fgl>fVB;9n0^8aCi^PLA*K}}rH+Owr zfBb7!3jjuL^yibTKx|QXzC6$xID-l%OKcRCl%OH)0~)J&;>h^Aeaso%TBF>vv|b|y zoD7t$`F2)m=&(vK4wnh!a=&Beo`N!0sQ03LW$KnULy-<1I}3`>SC+4tZ%NvZkLW&W zE8?HtJHyV-&N6tBF$o=?KmcHy#^VE?cNi(kX4GJWu*XCHji~1u^wUFmff5DxqISqn zzb2g=jcV8a!8FF>gF9(zt03FVXV-3j#Nqa{uf+$OA*xEH$j;cX2h4U5>Rn%%N3K@I z>Ez8`#zY_vD1Q$j2Af8j$Ygz8fOKw;+mn5hL~{JXT|aao5r%&|jmJ4EYd#3gUK$6`D4j2Pc|C#aHGQ?UcP}#S{ zimt7V#L@B^?cUtnByb|6UZP;I#@JrYQ`%_8x-_j5h1v}&#$iR3o_Ru z2|8&$KU6>YqbLY6R9lv=IAMZ7ec8QxH?g8!(;r>JLEUH3Do7rsp1_8xLd7Kdr!A); zPB*(!(*DXv2L^E@Ad}_dBAmq$ygJFCcz)2RXo3b%rdt!FD9YBSFFf$}_SgPHs1A6X zLUsbC*f^_eYjaRX6d`{?KZEOV{CX&7MRW6WfYkZ;fT{Lm3cb+>4bEb`{AR89@ZJkJ z8M7c0v(S;+i=AvBT+3a$v<(e-05E?9z$jo+zyf%Eur|OHPtD}I&WBcyVw`C}j4OZ; zqqrpQoVaCt?!)L0hA-m&^PF^k96WNQ8dumuPHuu$9uU3P5KjP}NXSJSc0ib{8camKhu)|z!k$IifCcxtuIM;?4^1MpNvqu(8*zDgl1G- zdDCIyQZsfDZy`X~OE`t`(BPwm9rvQy%9)XTF4y`1Dv1V%70ksg+4y{WLK!J~oF)Rd}E8Bg| z?!Chp0Zsil?z1HlyAZt_rBC)Ur`-!EjD())f4;Xr@2d`<#vUX5_XMy2AqY(zx zCMG52xUEz|l!iXNUnG`(447AgbprD{M)r`nvV(x|Y>60xM z$7mRMJFe9@?14L>G*h;o*$^jQZf9$I6TnUxRlLrw`abB^8=NC{&|7nva4hH>JS;gB zZ3W_wO1k>Z12jAmJ9g}ds0!+M$FD)JU|{g-Hv~5v^2Ff0ygWi0!BLH^cc7*v?y%ZW z)xzcAmh6=|?D`aZe0E?3oE@&95uwXQMz4?Y=-r!NT;#9$3}TfB;B6R_tZ{0f;w$QM zD!h<4TN^W8r+Illzn+E?r2>W(oRE+}U|AH>I9u0HzYv;W-cLDOcqnP;!5scq4SD4* z-?|lvUKa^pcGCG&)2!x0Z@#MjT0d- zE8{gP5)${Mdv@vCb8~0;mZ{B%mx05aLpxT#wHIxXeEh34pVSI-7pMNC4nJ)Vn~9C{Dm^{@3$T8$ z;-e56cxtcGlv7_pVNkihZHF?-SqFRtWm#Fj&l5{&z_%7~<|sTo8SXZQ>-@UuNT@u) zys~7A4IEi)um@D$ncX}Gtv?UlfKz-QV?jerWzv0GVTY9Y+1VS*%gf`0?g7ZD44+UD ze_V~+piGuVUU15UPgRnWpRWpgWPBp4T)>49R=b+xOs{qbm!_(apdcIZtYNyGvmL0C zo%xy`D-PXQ1zz^jR6(D~#D+&X^r|!{B;dz@Lu1L^5rSej1*8Ak`Y}zu6((D`n}3Hb)B?ex72Jyo(e66m(}`Y^)wi7;nrK z2vzh@?ICU4V*5A}=SNC4zz7kp#-%XUA@g+L3gMbUKsFNE6X~haTzav|$I#ZUJf-?sJX7ZOYDfVAkvjkXUbCA?l7ZQfC5e|0 zT@pz?PEXISk#iE%Hw3|7R3eG9a1b+~i z8)fb;_)RYQ(1Rxe{zWnamIRgpTDFYXmxl*68W6Ams@lF|N49?D&d77OuYxEMuRa=< zskZ9d%XIq8lN0iHzi)Dl@!%RNT@a#SH-Y?$+@qHiI`f!%4uyA^gkL z-@nBK^5FyJKz}D}6?iQ0C%c4`P>DaIK-IE5Cl*wv)b(3;x6(@qQuNlajOC_Oqxu)0 zVnw?@>zB0Rd;&evvtc(J^S>Hv!xVeZYm`2%cIxM@iLOHXTBE27POH7H0v&_P{fw?7 zOk9synfCPC!~VOY6P4qAMx5TD^{K@BKMs$-CQb6Jv(Ey{S$s*U__MaKrF5aJtjuBj z%ad>4)X5peqRri8WcH#D4nb6Co}UBEcn6vm985(>0su;o#f-sgYJ@>jh<^bPJ4i!Q z*I(hihQi$i*1&AIZ_l212;|5V9dUA6vDA%^2@k&pC2(!Nk(=?BDI`&XkBE8vz5^x( zGf@G3nDLeu{x_IYgI9Ciivh8tP?S(jVZRYY1VFHUCIFOh6g7p9GuefteA`}<;kC2B zVnUR6+D((2`Lw`KTB&7&h9i6WgJJ{6|4a0+v}eeKoc_;vZ+cgXUTz}xVzQBDm+jU+ z>h~UppUe&LCj6_zEVBJQPJK=9rm!5~k8goW47>28AC;DhLM}TO%APnoHy00)j1Xa9 z5t~}vMr9#$H4M! z7#;lyt~Hx#It}nw0ygU8_q=^PTJ*Z;XPYbKTeK6tK0f_JLsc(MUC-&*1jRP%EfE5! z`v}4fTk+7rgL;a{6U<^q%>!N&#E6IjInVw>&8_nq*5^=TSOAU{WJ&`{Dewchc$IB! zfe?`*&?RUKys)&;-MN)S06qGnY|2RcvDPNgbR1w7{aQLauqQ-Ldy2N1DB1(6#^e5U$!FB;vaLuG-APpg98 z)%Fg&P@LNw+^^xdY!d9hG015vo!Vu=&Ag{>&k+t)qu+~T?(_AmJ`W$-LcadJGSd>w zu9Tuxl#|_y%#$>XRfv@iGrci*@xTAh8kK)1XArrDen<$2p@jaAQ$}>#^!9WOzMxv- zC*a{??|KL?59w6p`#lv12DAs*@Z@L_Mi69c3=x?S1laeV z+aSNQI;&4O&)6Z?^z{BgSdO5G3iKQwuwR7moH$g|`(Xq-OnwUlW1u{K+z+A`5>A}j z?}KqGL9wy&;6&zl>}U*4s_WQei{l;7@E^9LtxV9OYBg<2uMnDdDR4CrI)h}Is-ArU zs>?9UD}yg7H;;?DQYBImnZ(Y3fPj&Vf?fkS1w>#2Z}}PGGPGq9g6M*evPcgSkph?b zA?B9Zy-YdDFO&?PcODK13Xa6X6qHuFtmz{e5{{29I|2=gV7v0h2F9GXVSruuNx7xHJqTMZzRe6)0l`uYAY3g@e5C+r5E%}o65G%SkDiEN;Alm_ zCYU9Fn;RpH{YUj1xThE5UY_7_O|V9+IA`Dw&3&49e26&(1{fi8UU_zu1m&r2h%X*T zACI-t*$c%mUAs6`d~?+siBBSXL^K}|Vp>sGeiCdWob3SU%VIzIPWd>uwrO^LX}T(! z5wt(6Zcov`{(i4K=Q#r)l}GTarCp%(<4qDF6#{%VtzOp^>BXvHFPwx5i*_67zMppAs~eE3B~2f>%-@||XSr8={8%RQ6U!9^RFH<2zN(e+q50w4P&(z1x; zy=*g6&$9^0?%g7A-dxTgO3>CIX{4j2h2kD~dSGm3JeaGM4&-W zkWA3>8xO=INkAiL^+SjluuR>>(g^;!$X*Rqh)8VY&=(&(aeMuNO`Co3xwg}NmnYcx z`?dC$922Rm)6x2Gkx_>a5W;@Y+1n)O(3M}mhJoB2WMF6pN-_KPmMK{wuBN`8y)hp$ zg!o2y+Lj?pnO z*#!k7F;2Z_pClj~sBrhC0^6WVVflq0ot>e;BT*+QL6T~=7{%Pf4>4b*sjVl4WE6F- znU#pl1UZ*OAtK~J6ly$Ndll7l9T9Srl$S5@fTk=gAOA zh>$BRKDcBLk$XdtJO6xNlb)58)gP~`2GJ@SS3iqk-Se$`8cNgS8ZlF9Y?y~YUu-f zKYlFTV0ZlG&O1)o3Hn7Jk7^vl{=Pt@5eemHcJ>aa@>THp>|#f7$Nlue{7OzUkn=v` z_=JnN*yS%Pm3w-WZ7gfPdi9D3O_RWzt_Bwk*XuxQQgTA^)$}*bCM?XzR#PH57cs2{ zm$N${OS@PAJO!j;4)HP4e_6GktHdd}ok2e=hYP5wv}%p&VR_UKIl z*|u%GP5vwWA)T~?gS?by>FA{`5 zBJxc~k!FNTHt0WWdX-@ZF7Cb1qn_PV(GE>8#)aczgiPvc()C4Mddy0FoL0()(aR!n2k%!Cv0GHZ)(?KE9<1u+>1-FI zK6lb9q%T|V9d{_TFsZUkHrxRSbNBuIo0unlwZz4tqS@oRzMZt^ET}F&+JCk;z~8^R zy`8Uipy_411woDiy!J?tu}JG(eLvEZl+*R1e09}1XoLyTI+*Lok+UN{fZkAx@=h3y zpoCD>ssX>k#l6I^&mn*HkBo$krH!A_em@IEn@9iScC7C1eftLSumMbn@*>ix40#4D z&;_D-s#X!NsOzF0)DKBX$ywM9e0m>929Jc|eN1~xT=oO>0|OBuDY&HPp(4&I*Oqy0 zc@kkSSnEWh4>B*WIR`v;$b5qf_TyjJu?g64ubG*dIBo3!qR2~v9#=IsJ_9l(HG03B zy;y8xQ3>=w5}E_i>!AV+qY+0WW$xLx&k)fp_r*3Td>|e~!vHq*f9T{GNfYU#l1q62 zsDwgU0zpkSPArDVK|z*DX4u;G=&VoAd&dL-bFdI^Kp$IVpAZ3-N6j)v%p50Y1Y`t8 zF{Ofn0zQodwgAG1t%h6jqy!Q{qd{JW9Gl90u9{lo`I5UL0JI_aB$3nbzgmeY2Y=ID zGl-m?$A^O2QSXrFf>iOG#VYVTYKc5B5(1pO@;4su(kczQ7ErwzT;lZnN`CHJmLsPN zyjM0O4)r&XJ`UI}b^dZeR`gn<#_2H9FCccgDo!|)dvBm&yIS@@N>c&h12&DetVN!zi zV88eW6d_-PEf$hfP@o3Lu84L-WP2h8GBk3N2*D84OBHZ3K(qz`G!;EPBG|>nB^BKw z6#+m(mm8j*jvd?RpPGs>{|EGL285vaV^e${>gY>d3km(z*j0Xw(RB#?_DZ*!XX(M z8Sl3gyl~9YkZ@?|?bXRMZ@(%RC#H?emdAW3b zx_BHgb{s8|_)RX|vLEVO*pdzH?Meq2#kANHg8@B}c`hwg!F~9xpgt*M@E|xQCI${h zGfzNAl{8rY07yTV!EWaWo=j7yM0^9-U1VSGDw|))= zt6JCh@5F)~p{6#?;W8Ab=w)!v>fs*DB`gND!>sbk7cX9<$l$1YcH*>v^ts6Qm(S4^ zNmToBB?TAcDqinC9TK@HyS$Y$GgERYL7BYykO9j|Tp+OMQGj~%1eO{;Mc~8%SP*|e z=~x0L+7fm_g^PIq?gLM6GOT2@5u(jwTkqMwe+apDsxw-Gnzq$Q6%Gtsl9iQBI;;8Q z$rA)0o`X97?C+lqma z=nj3n)on}li=4`f)-|0>v$j@DOv=EyYQ;$>kmLP5I2bZW7Wu94z1MeJ8h?mT^3R_? zBQi<+;F4fhAUnp|D1at(75+n^$GQM;>MwW(gJ@A*@|&?oVpK10@OG77QY>-K)GGQ2 zLScsD57CT(U9GJ*BHZM62TuIBEM4x(hC zyY6hxt9a%{$?iGvkJp~j?4!qH-JdJZ^$k7Ue&ey}&k7E)IBICe;A~rgj?;0Mew9PQ zC`e!8@Tt4$S~~Ddg;ns*=IQcp^2sOE{?R>zqLN!{$jEIMd-5JJHpfGkNY(epF|>SJ zrMlSyy(9lcy@_q2oe~wiq2$~xFSg$xM(a%YOx<{{Q)n54E^X^=W!S~xHo8n#sOI*^ z`(fA6lf?f2X6`1}rYVe<^Y=MxZpdEfA#J@?vEKNyiX)Vqt0m~~WrVDI<|h0$7C5~# zRNK2P_PxCCvlRsrS)|l+@)xT=# ziQ4034p9n!wwYDx)ZE(2mAD*~_0n3raVLxH{~`TdRV{UXj!i_cj=>E~PvPvzJ*#H+ zsVi&$YPfo5?50jTqyD3&ZI|^+1AZ=XbJB&^Nm70oi4mi}wDK{v@4(i0dsA7XsM~TY z806EVTu(6zhyJ0^(F{%dZFY7%il2t{7Gw#+93wK6x~nVu_U%gm)rJ^!x>WSXy5IgLt1QobbF86>jfv?#WU|4MX`LoCfGQ^}yHIK?hBa zFxUr7KO-Luy+g`aOp9>hu;#^XmzN>RgGH~al%73#a`WEPCr|3%aWOGnq!)EnK6Ub> zi1wgqC&%#AtKfnKvh&oHQ=fisQM%@nTEGFSAt;^_kU238qn?kb7BQ_v2r!&TDW`!$ zS|Ew^UjqV!r0DTQ`G*f!uUv-iiJ>VKgod*xkyE>YUWL4`27BTc&}kJ2d9n;Ez?pC0 zMvFLj5l!pfTqh<@5`z_Wback?nQo%rsbWK=K~1&*&eF)wwjYt-k%Do9$Y5fW35sMY zn5?M7mPQ^i)73hK#ygHQi3$YbhK>#uq9rX&b}8jqb$1Sh@7z!GlK2BgqylZP1H3$A z(bXfg)_p6k4EGK1P#K>z4ZF+&I+aEl11;)2ytq<^_oD3V(*HGOu3O4*qf)>G(uj7P z7z~aan+5@^e*GupjfS?in_N02-)LF@4wsizAwTi-uB@!60Uaq}!j5R!<3NyV*i3v+ zNN?;r%6S7DMwNJ9d>mp3>$IStI)3dM@iu5gZ{TZ1R?RhEm`TA5B@tLjMZPixzjUl1 z;Mh5t4xp^|IRtIBab(Ukj-24)k|SUPV29Y^TEl8OG3n6(4ykhEM(gE?y>lY(567iP zHhoJx7Vt;9hjzkzHS^Yds3QB5Q1YpGhNX=dwQILFIoWrx?+GYBRsFHA@~pdpsYTUn zOVXi7u3hFchbGv>Q;FClW?Tqc7OGO=-Kk>)7)F>&Ia8}k-3FTm!>6Ai(P4R-@#1UO zt~tN*pcire7vH}kOpE<@$wlz) zKwclp%(rqcae@ThZG=?x|3%9?fUbx#ME3u4dLD)%X&8xNAz0CCt$ZcGAV`@FjGvx} zeoC$6Zg;HIBj6yl=gAfOmrOgd&+Fxw zE~k09kXgtFCt9A+r0{nOLdIaB{1#k`i@5)~kjZTov(6Nrp( zaKKTJ(}l+IFj;2%FX&AvHxry!dYnkZ|6i8!ES$3_Fr)*J_j7Qt5(g#~Rs`W~U`qEj zkTrtA4~ZG+?sexu|MRlb+**bI!tlt23Wap6m7xo=)}voi50fD5|I%~gH<3m~aUgLVzmo%nwP! z51w6GI!Ak%nw}n&=k{oCX>oQs`Yb0Btr4|j|8Gipo#(_5?UzDxG z$B)ap{;uRcj$Lvb2-9uI{w)l=pl>Yl(cxn*OA8BR{m988;;#h1HE0S&T|mfQz+vP)Ga&ClWqg)r2I4LC+8qIE-#dMJv2Iu zK^m{G`qUlxy!5n!W80n*Xa6{>{bdVWF_!s+^cGLT5IZEm4=5(Cj_ zQM8TGF>!IaEso<~&OTXz!A}IIFzi{a9vY~JxE=D0f_Eorgkzdt(_=7Wa4X@}tHL`! zw?Vsu`3hM(V#*n@uxc2~gsXv`OAKK`zd{H_b{wNTbX#$OG^Q${f$xZ8f%PH^K49B3 z;!Y9EkuHYErjPvz1>(+3uV|)DX#?U2aqKm)Qur_i01R*P;_=?O6E|e$h9i|M#XXMb zMrkOw``jz#SXAG)mfOX5ic_BQgQ;{vy^fk?#Q6_JY*mkfmINjI_1z-QOT1|`D0b<= zycaa!1(b_J3=Hy+A`pS&xmy79B^*f(lkw>dxDCW+!9aa;rzeq#Ea>yR4x1g8GN~S# z1oRcU!|7NZ({XYj*Ee&Vp@=3x;qMa52az&4r7B*~e|+g~&Fjg3Q$&%eTuFouAS3)%SqLD0j;qd$1?Uvuk{U0Ls%X&A!-A7gsT zNSIC?SLI}M5+?>{zd@OL0-1}q7YrBzlRRz@BYdCUiC#v+F%|QyjPA3fP{0$=3A;RF zS7!QJKxtcG`t0}sI$q>Bp_%9u{b`Z=9ULx#TO?GVv+c5dVj3; ze&TT)S0+SAVq6gzcGF(#l86nDUgY-8_Uf;8|C83R%w{Ew$BxNBv4KhZdTxH+0LCW?ytuhkgWrfZNAR0d zLbfKpnE@HgFk+lF@;wXgl4zY$Vsoa-M(nwo>~5d2|G6Mht$WHnNTU1c5@2tgX$h*&Y;cRN&T+*H?F zeKPD#f^VZkiri%+6g6}Jlt^DVo?uYVIBQ+SQi;$Xj2Ya&H+UFA05!SZE7;?7y}iA! zU%%#oh8tvsiBEm>O_aN6DAUJk(@Y#5?hEYRc+26#xZ=apWpS3vQA)y;EGF!7b{Hpp zq_bjZ>24t>z6pLF8QLV*h?{K=$HHktK8fv+t)XJ2=tY@G3_YWrhAsEsNME`a>`r4V zs|+B66j+`qgtTK^4D2NHrXy`>;=tU>cn3%0hLJ4ot6)VOqW=PO1jL0ZX@Pw7X?%5v zQVFawWdvym646jwJCa>1h)y}U$W4o2+ZQ5v28?L=6E;BOGCCHH zbiy0pcn+M}jwHFLEs+DVP%e*w+z|tqL_`f3a~yF86;_279 z_+>Ep{vLb{1Pb#_eB%MQfgPT{N<(8K5xHqp_uZAmjcG!ad%~W73bQ9GQr$7|qhq-t7*N7!I6+3q$+z_P z$BOmvu<(dc9`D(2xXIZ%6-A8@$qVR(#1{$R>uIhbGX*h=7$!pGk3s@DSU@9_=jn^c z6E$M_j9p4hPeO#l{?IDCb01-3Be&@EgDnj;N5Fg`tioMyJBC)H%E7g zCI6)v)UylnZ@d*L7_XnwaaYUK(nrDtApbE$dmJJuGqX`=jggo^|N^kOEB|KhGtR5zK-hby4zXzLAi+iTE7?529_=?D+m8{^!0u!wJTX@6y~ z$%(K3lX|W~^IKGRrwJ>3_Q&%4feSC6UW?Kq+R!6dXp#Ng8Iz>8wcRSPG7 z>6|VNJ8mK$J z91*@_SK#3MQ5FF`X8)_+VPVvm9e{A^3o8IR?=W@Q!EK3D_!*#JP-y5?Ev@4K!!Hpx z4JDTwVv=F6*>rj2e_#KP5ufk($ba72-=DtyA@5yc}84CWP5Ic1Mzd_qHZaBl#oVZdX$Oy!(2_6xv>7D`~iVIC6?D*P!pS!G45x zK*(#d_+F;?_WH$R16}i7j15lR+_U~2ir>e{?{yyQ-P$2jVldWP#rF^18iQ4SYP?o%B6m4L(qs&m^D@+K1ue`jRHh@rK zodsSfwy*WINENwGoo<|B8EocaQzw6)wey1Mbzjz4vZ52Dve&M?gpGm*y7h#Cnu6&UwlCBOpB~AkxzsPkhLRut5>m&m=Qozr4h0l z^4g<@8-BYxkxDmHv^tYdZ7wfDTBYgK9X7LTn_5o^Qg*!lh$%^&D(XHM!Alr$5UBgq zAtru=5^L5u*>}nY1q}Z@h%y?wTLsr!RR~s;{o(w)Ln>=FW=UlHcFR5gp!n{B8$!hA z=OdB^LC90nu4Qtztlp9P))*zh0;&_6Y+1AO)%Zv`-^#lt`jdQWw{DQk&qbg z%xFYClfHJ1N9PD9k#uf(u3dC}3wrEBx~0Db?pL`Wu^uP!CfC8a=z+V}Nc>sC7m>84J`yGwaLSkcAmb;+ zV)I8sS(bce1rjgB=Oj=5*9GnFhgT`Po>`XiVhtY7SGuFPr(c6Yn2U^q4!hMZLhI~K z!aAvIq*VGmTD-Wsfcc~P?eh^2emp#(@^V7vt?gR}=M&U_{T1iuiLb}jR>MdhWN8gM z6|+;ZQtl1*VYvOF>>i4JQSjdyXMb8diw54vCUxe3>4lhipZf_H7S(sjQ|J~-JgyD= z&th>VN8ogGkUvV^C;OX;0Jr&>doj;`eu}%zPTZ7`G?8|HJkt6pgIe8ll-EAJe8h1| z`t99&*5%%sN6_MRNdEdPEfLl0mm-Rp!}q9)y=$Z2mT|76e80%VEs&-M|C;Mz?H`ZS zjT?TY+4`#S=vKz7;*Pr#WK&-JpH^5U$^UmN@|L1>^|KwUYZ3*43K^ZWd;4O|)yFPB zKK{Rgd|Z&mE{e2~%&gY>s^*pfo278g+nTI{LbCsPK_5TLYd?!eH8g*{Vw%=;>9YRD z{n_sa;q>UJ|Gmef4rfot>cHrb6RmfTDjIFQ+%!G=YkR91DzWp$zpX{RZfVqQQOiC2 z)sqnog9~JIitz*8ttQV;dVNg)`xrU<@4EAf9%ZZ%6Y$ZUOd5DOeKGZGe5FBaJ*U;j zk|BB)VxJiz%DK2Kbm?;wM|xhAtmlz^S8iV%8P>Bh9h;S8_~&nf*?%fAX`wtW00W&W zs@-xm{@piHL>x2^hl)Q)Mh039xrR8*h)bRL z-=#*Eo(A5Q&mNgn3H|=*^-~tA<9jc*IQA?4K)n{Z`gego{jHXHwV7`}n4Cct{(=8t zg4#A{2#W;n`uq9AyH<)Xsh4p3n0Jb|X}RrBpL{v7m+e;j=lCZgGD-IjVW)pG>x$=) z?RJmNO8?`mP_T<%cyx5%DcxKLvlA45+b>U$xcm>jPruvuw`Po7ep7zn;Z&LL-VDMo zTll+(pY+sjP+eBt`B|RfBtJxDJOphzbLH&2AW;`?_6)9gz>*5>wI_wnlr%}jG!fpc?gB};{9??RaW>~V}d`pz992LkWuO5VgQ5^fwI+33oFbfSi8lelyBZEl%l z1JFqUS>DVC**Ip8KNd$}3lT32r*4mzE*!{=yM8cPCz-M2kjY)8sL8JpXDxwY9Rr~$ z%Pat7BmQ6ew4c=A>|KTy8$FnZnSFJ2bWklkp*w{&L_e%gL>8_p9_e5^uTz-@zv55t z`If=zSvZ_>DqOt|`O$_DtYnb(17G6*3bqCC?)%^+63#H_EgfJ^AmZ#XQ@#DAmsgLM zU5r5wN^ z{6`9?pHbeu|C$VeRY)^GwHHxq=+lc}i+6SG#kB?JIq1#=&ZNATo}8uP@pwUy3IXOK z$+7`P$1JEZ{b!fARRz)8aboUbfR6!PiuIzGZGUjX=QT@#v$EC9MPC2Bk90 zB4CY>%`4?yL2U$mxsQ*JV$LmaF#(71%MkB5Rxg5{yCP=Ws;Qi^^S8z2yUk11{7ySCy;|%jeLfT0 zFIp>2mcr-fH(g*Rc27RoA)g0HKN7N85}35E9+b0CWP9%(`AW5ak{v?oi zKQXB0EqVv|CkBd2Phi+VM}MB3j(xwMX7P)FIfNd(iWY~UyohQeZ0T+S&gKJB3TL8J4&l_ z6oeQ6T(-V|YptNiLoWE^-NxE&!W5u=EMk`<-i+v-J&$JRG6%%JSaS~;uCUW_+zj^R zHj)zId78-CSWYNZ&`COi%#~#WF!jDAlzZUdNXMZ8WQjdbW!DK_C_!Ze&BE`d5?dI zmj@-qFv>InW|3pNM%1&3_4(Vr6;?i@t9@nUs6Z|BDZzN3qa39aJC;tRBp=JwRceXw z(}X6lrfaYKsype`w}sTK*uWPWfgZvPlPeR%N#j#By3%{2t(;(x0!SEHLj@=1w{(t( zd1+?EITdqsUs8+Byp?Bb!tZV^@x1|j6RPSK$JdWOwaeR-0jQ_ydJIVE#5RP-FF_Dl zg@|gb=Pf>?)qUdl8rV~LG;}^?+SZgfz9cn&o*S{md6L1J%@+v1JB0a6fv|HT>Lr5N z)U34808cfCw0`3Dq`j{a7h1DyOzDb4pY=0GY@Q z0|7kS0r@3tbP)I##$w&bp>}`ZKy8W-Af|-kGF{*#@|a6Bxsp)i#=g%Fl-}g)0(rVWT-PgZO~=E9bb67#B4Yoxd13bd?fL}`AEDBRehE3$_LQMdlXAs(U_J+2@5%{q6x zr*zArj6ho@5~uNS|A5#_!j|6N-hKW3T6L&`oSTSOR`cSDg2#jF9LaOPx`d0 zb8A7ckNMYcY~|STr*KG_UtT-|;9C_v4FTgE0br|r@LYkKGs|t@uzsrH5vX#E3f!%< zXUqwCYtF{7@Zpt%cYft4EPwiAplX$UUZ?mGarnwT9I+w6n9?Z6aVxPGv48k`!N+y% zNM$2TBk?X9YWt0YZ}8v+*2|wi_&808YdAM2Xw`Co%lPNq#BRs+n~AF1o5h#cr5BFv zh9H6u-2(JxbW!6Hjy7YI#_lFxl(7coSY8Z&D;V7U;2!y0Zp^KN$#Z%q!7wXP9yrK5 zP6QHBa>C~j3?{L0LSs96q;rP<)8!~7zL_|isObYB(G|Y6>U*{~Kt=H()*Es5Hw@XE z;!Ir`VfBu(^J>f3N2?(N#ObO&67k@g9Sy$!7G1|Pk*^~|sC!$FeEnK>M3D8x!8MEW zx(*_JbN=fI&uj3HhSvY&?B({`M!0j(83engly|feKa}$KhlU~dNplFaAdQPyjL7r~ z`fT02e1f&(*G2bSY=cpGdk%_k)6EmcKu*VSj>z1LS0kgq>qgjv!1l2=%FH@irX9*Yq^z8!a4`7k=*mf_3ZB!SVMU zY(?o;uh|hBrto3+wsN$YZgI+~fI-A6<;XNMGvmKHUyW{DrD-u9&iu7y*Og-rcNJ7h zO>$pS_2VRxeT})-u0(>^-XPDY3277I`-uhv70%c(1Y!a?CSoPj3J|E+H2qzaA90O_ zRF@++GL5T>i`L?EuW2g<8Bk#B98LSBIR^TA)Z@qSN~Y5gM60139qYNDKW)*rY3AKT zoUJuQBb|g0F38jQn@uiTi*hlgPFV<6&2Wfi$ELZiOdHsU5m3hAjnxZ8dkBzxSE-*2 z5+MJT>3o8`J;nf_=BP;ai2q(ygShxdu9DA8(xsWp4l=&+&N*kPQli|Dx6>URI9V8O zqt(VwG!VgG_~w};nKMvyF%pEK_|D13t=GD1jjthSXRoTx64BIn;+I(Un4hZEh-kt* z4N*-B{8)3TxN1+3a#1!*nGn zcGvFmU>~xWX$DL4-S2i|^ngR~7mfIlYZUjjW#5bDodPfK%{w>NU#}k6O&)8yJL@KS zi);#Rt7uQvo<58wrM%bu^PIRm1F!YnZ)M-aTxnV6GUKfNS+C2#mbbvSy#0e_!_uQ% zhvr>hd?(*(ekF9Fz!m>S_i)ouR~0&i>W&W*y{zR( zUH}tGJOCuLorA;qZ%D7<+jh6@+lXIq!E2(5y&)t%XwFr%O4qSOB$6zTyPHV*geFyP zoroxSosGFB#OqY{FqY>QR;c=#VlbZ_dyQJ=eeMmb>oN)~L?EVbKIcG!yj2T&J8%Go z;J87u#Q(1%6N4LkooVq7tjs4TI)x5fu+_{*8Z5VavD+0;X&(dr=zicFv2C|EJS61- zF-2=o{QU6ID;sOx$fc71g@9hb>FnRHJszzWk>2stx;HuHcTdCfqA}jCVwKyucisg9 zB;joaEU`{ec*B&2=V`;scg^!{Gm#DX>$mjf12WDBn+GUo_Tao9qu?ot-3{yy(bbMNg=w!2DkjGjyPU>iU$)_ZI>7AA36qA`a4oWPkk z%mg$Bh7N@B6W4os{ukwUSM`6Vdq$?GyCum(UGNIO1iA%091lTK`Ak3r&$m1KgVK%K zfM*Ub+v(&@rpa&a{9;0$$YCNc#q(otb{yFh1j+E2Uj=mzSs&zOPmA9EtHC~k^?G)o zY|UmeEHv~}(M4y7N^e8K31n~_%$>@LiXei8lL6{tCeVd*d4((>WZ`Sish^!|M*>om7(-mjf;3!E;4@u{?ymZqAAw|#`3WC6UEm>^;WcfH z;PI-MJ(_c5 z6$O>}I5D3gV+HZG!BCy3*e@tfiENm7-A|g>z5VSYRyvCn)4NJMPbm;`UunjC0DYu8 z*O>(-Q~c7>9i#TzP+md`7jqv#@m4r!5ppY-UO}^N)NwsL+{dvP6z}Grve?}DMd*kj zn<}}#v&J-b*|8P1Zp-K1PYJXd7v7ai|=B6im!!H2Cs{6jB`9ui82A_i(4<*=33J z6P0ojATO-aw+03OeVrfOR=A&yirm8AV$Mf?*hj zYXf@Nbt%`ym1lv5mgmRwAfEhrvsEr7!)7YwmGX;R8t=d5e94(n2>oM6cX)*q;=_O} z&4$HYM~4a265k{bsG6}u|BdVRJ9oqe<}WrYkBx3oCVOVMcKr0Y57~^GG*K?tGVrl1 z1ko|+2cQvM-2K9|KH>!1v5$P`TLb8i9rVu9Tra=FytBP8>D+L#1IH64-d=?9CG?BX zG&v8J1ct;wXIKu8KXB6KVOSRHoKIwMyXD105bnkolNAa*_@qp3Cy(BD9D5zibh7>& z%T=h=w*eCYvl@!{QN+AzP)&#nfbahJ?*%+`!09}(6!N3(>4{3dS3?>H;^JZFPBZdn#Dn$#CCLlu#YYQ)7B>l`M(niR-hG>y<79o zBNtEYjqNE?6;i+&eH}KSe$R!5~Ux5L9&F(T0pa66`vh zcR#0lJO4f6xL+ns68)g6HZO6YYumOBv(1z|AdOpjo;1g~uITh(vZ6Kq$e}xgX!GvF zdpP5V-N-EG(FT5_jGNG=4%0exDg-O9n$Cvq_Vhgv|*i7T%H8_*an?;%Yr_tN6qjJ`v!w z&)(19$ct9Wr1!t0*kMxffP#se_Ll2lyK>ViZ4~{e4>^Ie;cdfg1vChUp0rj+!cI_o z7AvN5yN*WY?67c>+vefQTvp}zhy8+8a%qX~{!DwxH&Z4IiM?)PNuADe{ReCa#%Ffl zRKhv$jL;53U#6=uU&Y~;;k)Q$YPMlURZ~?(Kq`=7JZ>X$iYP5W%2qvNyEmIj92i=9 ztR;5OtV2g}W>Y1Fw06tjsn;8{UGhfKhCD!Xe8{`NfFXfw0-52_l!Lt&`LPn8mLs)m z=<>MAHU5J1I)UEC_q~z;iyvv26es+7nmV2Fs()5+ER(22b|b~NuU{#ZVI~6Q`^%Tn z+UyuUa`D2nF}~ns71t`s3zL-6PZ+4YV~D*6hd+BSc>gzeof7tO2$z3#CVPbuI{=Q31iB5@iNK)MM<`$<%mEl#=6c(uX^ z8(l<#7zW~_bY~jk{>cb!&7tZ;kLLOVV*HZ7%igK1oD0yE2GX+mFn%KNip(?Pyym_*cL37DOr10d(F! zUpRVzA}uP-pew53^Dwm|*DsDD6Y)@uj}xr=vjU4nc0oH-O2hyE^4AWVd*CIBF8x~*lZ)IV4@MFhSwThucb4xp$R-X_iQE8%;^5F(JA4XSX zZ!F`1(6?j`+LdLk++5A4V0CRa=sJ-fVQq&yTjJ=@%kw)kk7P+lDS z)|A#hu^THdKA42IEA+iF6wv|8L;aRE1q{xYuCzKyu>%;tf2uf_6@^=PiSX`*4D;Em zOclPRA#d-+m|1f(KG%;wt|fMQe|JlUZ-xqUE7E_%p(D?7UH%e9 zV!7JAs+W5E&T>qKwO_1YFJnI7@NMF4pZ+!_r0qiL+snaJuq~c}Nc4gScB7TF%=6j# z{v}I&F3Hr+lq0vjYj+p*uTaKvZ2a_>WeKa=lc2`ZQtRf-Q@J3lu9S$J?de-z_T@8( zXOym0d<$!7k-xbl@Tw!;^NB3z{_&XF)5N-8jyX8AB3|1fBQM*r1bg00?lB1?UL^O$ zZby_3Kdxyv^o8l+6mCi1lHJ9K&fV2hqq?em9Qz#S1V3+VIv00<6|k?O+}lJ7pKbq3 zoyl#S57LUG)ML$jR8$<3uys&qTLPKma3oY_2F$QqgQIr zZlrbk34NI>Y%S0^aF1^FdUZP^6QiT2PKtsQJDJGG1MKNcDys%~Kt-7IQTFc)>%6LB6=l00S z#el&yvWA@Ul`K0LJKq;0-f4$OI!{unxUun{6PdkVH#Mw~?0J#$u~4Y(9S^q;953F# zD589UWADkM5n4;MJfN(=)JDhyp4$DIx(8_(L>|CY?G!wuXuo+WMJ}fN`kCp zXD0J>qDiQRYdgbri^8$LTR8;b#}W+rv%Pt5=OcR5Z@xWo@?^7DG02`OcpUJ6ftx7X za@V{ca`aTg%xi!M$1+z0&5EQ5J}6kuM*M+bOL|0wGZi+>XB2A=Y^z+43|;hHFcM07 zsGy!=QsyXXCVARJm{F_j+WzSyu_^rOI38c!Mce~`Nq9NIfNT`rGDsf6z?a(?C+vd% zL(F=GBb>lJuuqyGbK+j^f<_ExXqu3OVGswhHo`9)KB?;$8VH=7du#pAMwTwGrn!n% zW9pH5i{aAfL_;$=>C+xR#7x^wQWQW>_`AhTbC zZ5DdKI{B_<_RUXWL40lg4bf@WOEnY+C)D;w0p3d+bhVl+Uf6epUpwKGh(U8`krFjG zgb8qAj^w;3zxmCtp?_yk?IfH=rBQjql z&NNQ3(^*Z_i`2?NS1re_-#g-X&63g(w7V6WZd#~1KFe) zs;y*b4Ir@s1n?fVHJ^rPqW1MioE%$;<5~6iO(%HPmsIp9!l9sXR@P#9y!u%a)r3EZ zG3=U+6}=jg!Cf`K^4Xe6n_;epH$Yeu-ka$p&PRy1VvzZUEAoQntTbI%d3P;gZxXA> ze&^|!53=Kn%ydT70xgaSY6cZ|U6xN6N|~ePdx}~CjX_cB&79>`RaGxb02^ZDRzH!W zmJEVWHh0K9hO$^CTuXyuaPj$f&HV*2&v}Ge`!o-pPW`#LWd0y?nkDi5hnH>hHtD2y zI>@!xL$mJgara|e2-8V^IpH{qEn(tr4mA10j6y$3PH#%OG?nx-(av#ZFZ=$7uGZ1@ z>0Kvw-arNS)D36=yxz31q#DtVB7@l=77qjojgaWr5y-N+7<&C3n@k;=mMqiM2PooA z$+)sl?0V_(dduT@R@INz7vp1O?)&y%Yeh;jW0@DJjGn#*-UkMGLMw;TgK$rRKmy&U zed-Uxv!#eaeHk1mMr1{P-=8h|M#`OyIWx0YG)!1+`$lu+T!_$-wpN`XVWU!2nkZoc z)7`|h8>x!n^0)Pa%p_+Bb~Q<93|8HQIu`jgSuaXbn4V8Vy6+aKGAil`#$!JgSeCf7eirV zPUAT3Z)Ar1Ot`iSS$utK64~0X=@M1y*9FdGA0JB!&?uKy!m1 zJm~g(=1g+$8ExpC)3bPr0xdx|yc49DZ}R>6H`Z<|7;o4{|zmie*X; zGd$@NCQjWFM^t2lR}tFv(J7bEo?(XKem2j-w!JmwC9- zi=P*87q;sT#6qM*)4GKyv$W`WJYst1YYF(pE^OT_JzvX6sAR$Ic}t6??V1@i}NH2Q;JQh-~bCaIU_$G<~eehF9$QGyHCD6 zU>cTt5Pi4m6NQ0chC*<(Uuf)>H;WJwC(;t0t7}zEa3#jp>}|q>W%&AA zF^~`|A=@DAh)Cyp^KK%+x&TEWCO>G;)D7G2;^EQcdzF~|VPpRVZ&RtXPLqTTF3GyM z=Nfaj9fw_6_wU@l1trphFHw~zLDNQ8l&y&r8WIwdy4VW|`M(%iJltyJN)?ci?iMy{ zemv3{lRZ7K=zD@W!_N2Xi4x8tl9qCK!t=;|QoCr|6@s*r!q87VQ9FNf5SE@(=xf(O zr*K?LtMR)TEzbeXbc1t>Tuq>W7I?O0n7eIN=iqM-e_5ML>3BO%;wvamWEnLHC-P(R+$1Rq~m_oO?5pMdHW|SC7)|!hegH&Y~DvU_)`CZ_K!na<*By z--%OOnA$FS;PxA?_*w&6U%kk)^C`1M=8Ddjeo1TRWP3X6_T=QU zvhm#n>q42Ko2`So{Y2dzT=Tb<<3}&=x2w9?JFaE#(l1^Q;@HgGStw761lv{|Rj^*L}kMhRPLpZEVk*yE2h3-v58^E?*#F~V&Um0O)op_6on z;-1r&v9BlETBa0Sec~^ZRVZG$xN1zB>@{FsK{e^G6F`z;rSTHk8G&|QupGFomGm0v2B|-{yzuSY zbtU6z+}UCUZ$ryN9E{~VAm_VyI51T8ede6xEdq1QCdizCgPbiLr;~N^n)LCWJT0>Ao?IXI*%nCuzWqp^^ z&d=b=9Y^^4%Zyqbcb8f31D}J5cMBNK;ihBe8uHYl{kTEw6hp*tPro03?+KPB^?N@; zF4gb3#r~+d;(K+zQLSLBCJsL22>2RDMcfy)d2w+(_XxoE$JZU6+mkqigwyYf{1Yj$ zB$ZG9{GZP|3ktYynKgQ?81M7G5RB7kSdO4oHYePdI;5W7XGMG^s{T>y(Sci?vyx-}Ki<_o(N0OoGaxg5keLwDFQ@1_bL_%->L)iOTwYol3G_C&FZk{_ z$2mB~?RaJKI=R%U!eN~R@x39iAJqS)c}o}1ec=coiUWJi(q+#J|9O_}uluBImvdI> zofnKZ>vz12Gud=ek0PVEWcw0V(8^ue)CGcQZ7M0y#%y}R?u+hM@A$T6N9mh^T^v=` zH!tXd{;|v@$@P~4`2a6RQML8DAfak7J0jD4d59lc9nHK>ff_$<8XASQkuNihPeYmpegP6(k%zNNdMGbMZh;b$F_T!tqD5i!5KynOxA zeLbQ?KnW@VUriFmNy0OkFd;=7K}kC4zh?H!-?wOw(gDR`tNt9PT1ZZXXQZZ0#Bk$fE|ygZ$2{TMeHB>6D%xVK0DQti(fN z9N0nUDuD9be+odl06zeR`6WL;ozTQ=W-ud3Oi+-cP)9RRvcntp+4xSJiauiXo5cPL z0HArXGIEcw*-2$XDTQ^pdhf%$O}53c_ZZDeh8^t!LM1Z8GKzdzU0<%79C5HI_#|y} zz+xyOG{l>r+0oa7PYtLZo#*Pt#>SU@u*n1j{k5p_K)-e<^`_>>=`6$F5i!o92nrP8KjBk7n4++ z?UcJCa}+}Ll7sawQAf_)J(H5`_Tgm6 z*BXHq-fZQtA4OEz)7$+S!WR|8bZj>F9bDll_W9@^xU#F{EYFx&`BNDNOr&(lyiWQC z8)$$`N8al1;peAPUeMa@l*^8?wcv#~0~G~K1u^={lccPyOoHD}ync$-)%2t!DwzJm zcT2^9lgcQZiTqfPyzQuS=01za7c4^O*OJ8&KP-GYL&Xr!{jav8~UBqGz1^v>9hKbT8gfgb&BR9)mG z)E1$D8LePa|N70F4+9(P-o!fqt5h*?jL(XwT|R$4P53+KFx=2clnBwxGV5e#tdY0) z;_oD55mtYuvmx*HLP3PXp=RqR;?y%e6nECFuLM#1tLJ?wX?0_W&zP;605zlNVdJK9 zfQzcH+IZyT*w}AU0lpx37yXsGdV1u`T9dEO=<5%Mu$Tv`y&yO;;=q|WW!*_|yg0ac zX|dfPp>uM6I8yZan>Tc~6@Spc8n%!s{^tHdj~#53joJQ28-@8LWX-;hEY;Fav6}kPY(B_xwO(d=YPeUvUS?3dJlQS36H|$8 z8CglSZ`f{-7e0D(=e*6s+0UXfqGiHN6Q-dKijlZ;i26s}n*AJC>o{V+E|pV^Jf;|k z7Yyvw5vkNfP@+=KQhD%4ZA$19&mN1&(h&L6<}R0uLQ^zvznT@QV!+DKd&Ee6SPy;#F(x`LPDZT3t&luZ^@qV6nP#n<>7K@^| zT{m*4oi*^`b!CO>*qzp!-OR&dvi(84&HqUHXQytbI~Trflu?VnQ+{gFMR9xY&z5<9 zx(f%Mt&2@frjumSKTgTLTpYW(K0@aAYDS5KL9HBBq<{KVXs#}N^=-@b@BX;MQ9|nE zf}mx&&&opdq;Bv$FZE3OE1u&0fe&N7xO@i;!!JoN$+CUx9k8xGa`fou?P4UoN6Bsy zgF5F!1b>>9d@5pyl-WP`()sX}fk`Q<$=l^+$yKZ0IS%h}H4eXYz;e80dJ&;=@iar= z=VTEBGgY;+oiUWgs{`Fa1s$JDG$Sb<&%HL|vHvAl=`;GFG>yjb=mNc?`CY!b`Hqli zvzaNjW_(}IU#z6>Po>yy$a-aN^P|hDY3NLdPhE_1x{@WKIONYR=^Ixym?D=UX-YX^ zoL3jPqJB`cSwf6c=TK!xG_m4oDR%cCG;^U?+BtDYH~wRp3YQa1kV%}afyW~xed-Q! zk3>Zt?$jVr1U}UKHr4LCpf)(7Eb1M}MS&1)VZ=l?};- z(~IM=8_7{A(-!yma8WmzA5puCCogZB=5*M*mpV6Eq*?WTz2FU&>edzCV+)%XWxRf} zcFEUw29OW7)JqVH^yoWUiug*?l20Y93s1?pHa~rL4O_Ivs`8(1xrXzySHItSB^IAa zlAwwIy0v7fsXXxdTYB+gYQvwb;o;%mzKD^Q+;4W?l=V*__j5f^&opB z+ry_W(v8?vx7NFl==VRoxLgvJ|D|LICf~$|Ufj(Pcu2=ctMvPKj=#HEs(e?cxQ2On z_tHM51V!47$?;eP4!5b%Z&VD?pNljjQ|hdVHKyBXP0CTBdZFxfo-!wQan9p>Wf#d? zV3cdX5KnB>{LQ(iU+X)EJkK(c;-6`TIWE&`p087lp4!WKQ)bskFD{a8W29)lX}$M` zfb#p>#EWO16D@miT*)XRfBhnRV*6r}w1tfN%cG3PM%^t=nIx9A=CCe&QQ>-%CVk|( zT5w&nWpY2Q=9#C#&xZ6aIGi8(>Tj`d(4V5>YWeZNY=>XFIFXzhQBo-j-Mx~s?(RG^`?}l8D;LRZgRy>;deHTR3Wi%ewYi|ZU$-82?d8niDLtr_Bl)fC}6*QbW*Hg(QaP_0~a z$y<&m=a}wHDDT>Gm5y;LR&?08?5gSz=_mJqukgU-Bc|ao_%+u^vK9?T=b!&%Zg7&^ zC)aif&jv9o$pj1?jz2iJtVAv=>ur;WwPZWVN_tyao^r|l)nx@eDJx+rt(XDE9r*aV zCqF8Xr&b1u7|g!5h|hOsF@P53=pinO_y>22e|tJ9VG}*SB}OuiL>x2Nv8m_XRaNB< z|1h_W`F>MYTwqa(A+SRwjl^r9N|i%XQ z*e-3qWyg49FJeOY%+^`*JZaTuq-*vhmMx@3TZM^vR4Y1{w)U%O6|2yMyG?Cci!X$6Od@+v44SCnCzj1O?H z)_gY$@Ss>=g5&Z>=~v%{JGHj<=`}SqX@?o`-FYLkVo1MXV#R|j0xdHlcSZ(}uRw!o zeO@aL_V(6jcH-xi_4RkIuC5X$PSBinr&@posd3_jRyqbRW+`kA!wX{NLg-q(^D$9IYzSj=ORXDFkzeuQ~|blhL~~WQU$|QS`6*doOO0` zVkO5jt{Lm9WQd2+3o{*MU0q#6Pkmk8;K;~@-pjC)asfk|4x=^pUPksV4{r(h9n>w0wMYzx zP0$e_`ZdG{L*9B#Bk=>wC5Z8o0uSn34@u+uKcBIcoFp0VN!x=LZ0q;?S8u5H-`%7; zZb{mUAGpg(rA$1i>sJ5wum9y~WG0g^3x*r>r@3Kyj|rW}yu6lNR?tl8q@km`1_q_t z4c4n0tK&-cy&+8hXlsX}riT~_x~y9mBax=KiFl}y+5f-cgByuGI?Nx@f{nU&FDN>i z6@oxw;84fz=?UxfFzOLq@)aC zLOXP9HfSomGd`uI#n{)^r(|q+1WK`Q&LjM zqyp~X+tf(stoG+c$z_OGZ70hpDdD6FA_G)pKaKy?G&j3>bKg{Ja}B^`&z>)zI&`0z zyfrP{=2p6>AM1rdtqKZExMvnShK~X}$@XO1iL102A27u^d2S_x&pUn>K2K|T@JxtjlasS^f*zcDO--zEiMF=wbRpyz3u-@& z|6mx}S#NG`?q|JAZp-=qDU>gxOzq)1m3&$y`Sj!5#qVw7uA9H66&4l!VA+3B{z}?w zp}7mk%o0)#8|cL?J6mn{CR31=wZwcNos{+@=@+duxyCm$GBUh=p}#9RxrGGZFzLlD zG&F%Q3p9H6_%S;t260x16NjcRd2BSvzz78pnFzg2Vx>8}bBnT$g#Q0%Zk5L+edQJ+|e2+>G{ z6aJ0F&G&Gx*Y(|+q$e;^f%Oa6P_}ZL56-C*v&9xGvyW= ze+*C_(?APQ3AKXOWkU6brB?z6JffoBb6b)4ZCQDFua)_JjpWn3e0+(Gf)3C9-Z$^K zNcrz`YR^4q&-FJLO>jz|XCw~$trvIwe}$(U3Gn-Kk(Z#{nR`?O~(+Z zua@3v1K!PVBn-l4bc;+-0UWco=7oqO98(Tn;2?$DVTgHpzUSG`s+@=YS-CI&+=>H; z_+b)`+A64miJb_bii-Fv@HNQGxebeQc73$(AiEy{nA8gWz=tA1zIX6EvVUQ;DaJdo z5NG$m+Kl(kHxa%OMMd(JRaJ0=W`w!bLy<7wK=_sKmY0W}T?2_k=sht>_o$awUWm}7 zku1eOd&S$^+esv7I_V8m4jsV<##j+TL4V?r%Arb(X-Lpd(KR%rA<14@mBrY>=GrLj zDmGT%n1+KX0| z9gCV+5=m)kn{hH9EEN?IQFC<_0mps5oMMY%hD6pGh{%a=MtoYVILwYFF&{F7L zOw4t-(C41T_OHh0D}J{->ykuFYX1^pa9lY4bn=V!YjwY=%xYpk0?aZhV$QNc;3L*F zY>vp@>eD~a2tm5Y3wQ~nG2$Wr&x1aV0#)tNz2bOv_-9)M>3{RDo(2fxn6O8V;ofj4 zcvX>?mVtmH!ZpBNV=FP$SD1aOx&$7zwi0X}7cMAVH08}Q!#!+&^j&B^!hbLK(lIr) z$A3ydKobxaK89yVM^CSCuGjzr?OdJ;i2E%~X>+{WgZ$9GdIozEiishNOe{+|UnJ>? znIR*PPk>=doVfsK!!Q(ebbOqGgsfS|*tkwTXKNJgGfK^)_Hd^nCc0%yC+eO3^9RnK zgyjK{T+iOTG`y3~F3qAM0s?_B76pwcG+=Gg0`}(05cBY#eA*{Gu%QimODo8LSuy(^ zgoDi>2@>N(x}dyz^X3hSgk^z&Fb{?Aho?sF<}KnmeApa)4DW){fQ@zX!0$V_xd&Wh znN&{e>qlUY6+Hld%rWByd1)`6N@LXuZ1X>2#!iS{GBXwgFB}WdcYCG;!LUL*n+fwlt*Y zSy`V_4~dECq#9*c{k#JDk~L=8eQ0ddDkv_FMDP!XZtij3fbj)fw-GBISxb)W-=X^%e zQ{M?e_$?FDG$CR7h#t@l(52vWOin`FRkDP2TlC?}@*W-@z=OrTrVL5t;Don=B6+|h zk0CKJ5v$1M6yabg?)%dbvt~c0UPg@Z`Sbl1841Ob57-v-Evj3FYNGZ4T2(hPV#6?= zi`CT!f|Zf#L;Qo`$y}YDJg8>^W+^F9&FRyr! z$`QKmAk?u~_;1CF_>C7S4!hFq+2sxsH!Jwu%fdt&gTQa0S<}`$tq+@X?okgp(Dpc3 z8A280lQ15w`R4^;Vd0M_L`yf}1%fU{M#6y9z|_c8?HJeXTyj2_LaHd+#|sOU+WRn zvpCXjup--s4nr%)LQGw4%n>ok)5I*Drkv6j#%KUT2DbxAG;vITlmTyI#PJZ!#K|&x zhZi3S4+Kv`imlx{%@LpMv0s8AQH#zkEG`~sXGM~sZf3>>x?w{P2Hk`rT_z)8Kh1xj z7wtRs76!BLF&>r}CTfk@v|U+t^t-P7wrbPt0N)Otbyq?D$nX9|1Or7M_b{6cG#}m1 zpFe^WG%Gth6V8CxB71mv$VfV;PX{A#Bi*9GP>Q6OcgSToyX8Sx@*85%D~s|U;J@A0 z31uUuQ-fA&w!uwDP`y6J^T05A-!K?;Gh zR_(=BSTU=Es~TqW<-2%qZ*Oerqgvafz;0-&SU@7;!S#@8An${Hwo@3H&WqN;>-(OT zqtw5RP!`)WIWZBBQuF6GUA(&N%RTg;j@rhulCHmQI|OhAth$Ws?4Wn=0*Wsq8A7&! z+PK6kADM1n>p>Sn?}^5RIL4hlQ3}{acmk)4U`>R)d#5xtH8b5Ar;<<(h98OO;bIaL z8HUcxeS2}6oP=CuAn6LpzIt$Ym>eTwP{4!c`~B%eAylEV2MT?#+nS*IJwjW99}rZH3mf*@jA zKx9~hgklfjfJ8!d{To3o>h9fZkV)UD3)Pzb9315iGgivjH2CI4IYg3O+mK;wcfLzOwZ{&Z{Hb8_ECyNb#`8 zo|~I1$2-13%r}Qj9x+LxcL2#NJdct%PNFs=BM~M6UoMSv5h6{2{rd?QN+PZ7Yj6H# z4+${7d=)6BLFkf>fQI-R&gyf7wtqQx4`Bmc5*MTkj?i=t$PEnoc^5qw7O+^U0d89@%!5@U|9!V4N2se2mLVH!N^ZINSTy z23^CtM4^v4Xa4_ O=lC)0qiHIZ*Zv198s|0u diff --git a/examples/running-llamas/artifacts/Llama-65b/short_report.csv b/examples/running-llamas/artifacts/Llama-65b/short_report.csv deleted file mode 100644 index 943f7f5b..00000000 --- a/examples/running-llamas/artifacts/Llama-65b/short_report.csv +++ /dev/null @@ -1,9 +0,0 @@ -,Model,GPUs,Experiment Name,Per Process Batch Size,Sequence Length,Decode Latency (s),Prefill Latency (s),Decode Throughput (tokens/s),Prefill Throughput (samples/s),Generate Max Memory Allocated (MB),Generate Max Memory Reserved (MB),GPU Name,Num GPUs,Effective Batch Size,Group -0,TheBloke/LLaMa-65B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,1,256,36.0,0.348,14.2,2.87,39404,40544,1xA100,1,1,1xA100-fp16+gptq+exllamav2 -1,TheBloke/LLaMa-65B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,4,256,40.4,0.957,50.6,4.18,51488,84401,1xA100,1,4,1xA100-fp16+gptq+exllamav2 -2,TheBloke/LLaMa-65B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,8,256,65.2,1.77,62.7,4.52,67597,84408,1xA100,1,8,1xA100-fp16+gptq+exllamav2 -3,TheBloke/LLaMa-65B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,2,256,38.7,0.56,26.4,3.57,43434,65601,1xA100,1,2,1xA100-fp16+gptq+exllamav2 -4,TheBloke/LLaMa-65B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,1,256,48.7,0.355,10.5,2.82,38052,39197,1xA100,1,1,1xA100-fp16+gptq+exllamav1 -5,TheBloke/LLaMa-65B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,4,256,135.0,0.967,15.1,4.14,50136,84397,1xA100,1,4,1xA100-fp16+gptq+exllamav1 -6,TheBloke/LLaMa-65B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,8,256,137.0,1.78,29.8,4.49,66244,84391,1xA100,1,8,1xA100-fp16+gptq+exllamav1 -7,TheBloke/LLaMa-65B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,2,256,76.4,0.574,13.4,3.48,42082,64246,1xA100,1,2,1xA100-fp16+gptq+exllamav1 diff --git a/examples/running-llamas/artifacts/Llama-7b/decode_throughput_bar_plot.png b/examples/running-llamas/artifacts/Llama-7b/decode_throughput_bar_plot.png deleted file mode 100644 index ccff3652161b29000f1699518ac296a204ac39eb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 41663 zcmeFZby$^ayDvJh0}~Wbz!nh{5D_FT45U#IP(YCek&+G*MFo|R22o0+TWO^vC8d$> z?mWL|&Q*J_wZ65#YoF`)}Tn-HodQJXsWMsXYT_IV-p?2d)&wQ zj&ri_)iyP~Z^F;PVeoG+IBsaH#laJs7=brgfB&+g35BxdHu;AtOe9o?LJ4BOa^dVX z3*UhzOB?ap`K(c?oprotw_iP&bWK)_{ZR0O{dczLUineKCRQ&pkt0z-`SYRe$37<{ z9CiB5bLVy7gQIt@d`c93a^c>Pv_&#H{b}#<4ZXfm4?Pvb}*~S0=56@Tp!<&%Bd&9;?@Z!ab7cN|&w`my~ z8cNWwt7x6Ky}s+D_L*QYcNKH`wnO5+eA^BjIIuL^9bnT`)^8IKsTjZJ(xpqXa&nt8 z3`fTv4Awt=`*xei;)GnrQ0UpmYffsH?^T^1RUL_IQMQ~dE-WtYaFr6yuCCspuC896 zWKdM%!yB8Fq*)mv!NK@=-(o0D*7N7O*2~S+%wAIWlG*Sx*n84{hBP za><2RsZ8`5msQFJ8X1Zu-z8F`gZo%CLL4T$+WEQj&h*(&D^o zSV=;{$-|Pq1yQ%YsTjmqjx-o%%4FKv`uh6D#Ki^a*TtFkRlUG7&+12OUOAMF5#%mOLa9L03^PBc$jdd0=@S7eHb)x+s6(DRg*BcsrC?xHKZ4(uJPDi0Dt6T`R z3$xIER@N7{zCGZ&Td=jO#3%pRrrj3Ze&#CX&u51|x(EkdaXs9(c=i2pF1rN_;f0a& z&GyT-iD@@ejiV2wExYyCMsw>_9BgcA3Rg<(>)*^Gm9aFZr?0P%EAIN~&c?17_rz+l z&uFAITX4@+C8b@ihb2~6980G&=_>xy?jjO`Z~lr|XcrqB+wq$zSxE*BH`1+maP!t? zm6W_*yMj_UlzEn7k* z{rKJ36=k&=aK}pO;%{*UUE<~`(y|T3ML4PtXQ#PWNhqC_M_Fy^{b?1Xqkm= zJPljc`c_`_XONwvFboI4t+dWtCNg$s7>I20pHu zQS7vXEg{M8w8^v0O#H1qJb86~oREQuvRHgQ{ z?_6Dbo4)aumX_YQaYH&-Y=;Mjs=vCjkkv$1b2IbWwQCT$Q` zi=t8kPiU3h!sw;ER}7C|zT8+-QZj{BGh>0bTa)$aifdK( z?Y18;beT#FGj3)>8+M9UJsAS{^SG`Sj_N zG!_wKvcT!nI=OQlEtki=3wpbThN7xVhclN;=ZDEc=w_zfV(K81kQJ?nhzP!GOl<52qxL-h&>^1a_)wkZ@1vum zx^;iPJC06H25bKcx}l^b8zRntVlmmsCnzMus+4eqjzgvDsFakHR?d_4ZXBx2CMG5~ zZ{3`i%UvI$HvD01J^FY-866(+M1w;^M;YxkMVG7Afv&nEQ=I&Md=Z`inT<1XHN#ZNcVnNFE? zmolLQIA&%pn*RIBN{*5ZV%Qc zPW1eGEMPvwX)&VKI?7Rc%AlTXwyoQ@KMDV(*{;`p-`fuXM@0e^|o%0y;xaZS)?kGXG9Iu&(kC z=Pz8k)IICgfS2o3g>8HB;srIcAU!sKe56wF_=ixV`6&Z$UPH&Ha6R2GIWk)D@RhDiC+JkpOjJsdcekCJ_=EM|Rm73`NhRHiQON3AKV2^x z)hLR-Nq6b0HEY@i1{NCWPwrW&tE;>D?ZIpGS2a{2?4~MyR#sN_TVLId-}+jPXEW6F z?b3}K^yp80;ca(vokk~XRV$-aMNr@`;oDGEsgC8l?CHja4we3J&SvJ1W6HO0CZlbl zcJqV0Ard|z<7sZJa?cpJHMj2GT~rZ#)yUZRPFJy4Dzl@jD-CK0SsK)gd|NMF%I{|W z_6*Ob#eO+mzRpj;oc@%)r+nKC-kXMz@l|v*2g(t)|Lf??C2y=io2H^(}9@vZ(vd|vA_S`UI_Su?EyJD=T@8Q+K z)3M*%_e7R>^L!+aco)wGROq($_Q|>J>DJT7lMP#Hc8R_7^OHb%odp1x8Tr0tbYh|d zn5#HIBmj4E6t_4m|5u#G)n?0Xro&SHudspWj^8Zb)gEl9JYvE39j%T0Iu{R5XL5_( zNBiZayINY@DB+PQW&T3IDz|2OKDzWYBy$+1PkN{XjP44sonpw#%X>x7_2`@V@Cm&d zc6`-5z_5g=kF&A$*d1vLS4?}xnQbZnS_%sa)LOG1Vk5s)jMw~y)v=35_Xpq}{b}Qa zxRO>};oTq4w_#z6+v;DquXI+H8(f+E#|%6 z&F0RgAdkKt=Aogb#WtKi?JxV`-0DRAg_+R}fnSu9B|}RF2J}T`Xgg646et?GPCNRN zUcI8u&CkCS8yhPlD=SMYD&wYVH!DizhFcImd4ikUeY(Y7mC9{oWJEB3S`gd2LTCQU zy?ggI1P;{2UpjVGQc@oE)*Tf+Lc_Z|9hB2~{7hWn6B{eK}71y^w zUF_|CQGaW3@9D6_#KgAl?oe@`Qxd0*JDy9XV{r2=l6y_cG%l_2o4FJk zne^P$V8nG^R@N)A$;q{>_Vw7@JIGryGijI@Lul(J_vo2>l#vNFHDKEwa2IN986ZJbOXQr^|4b zbU6+4;pXd3bR1EWLIFN$HeFTGs;ZXrgUKfPT7a}00vBf5UFH}o^yJi}sPumS{v8(o z(6H%iE>=np%Kh|EQ+t5@(jW2h;b&(3HE&UDNe{|g#9bozbjQ(a z<-O~9czHw7Yb~ArbO+cUK6=y%xM4dpv$Lb)TI{fahGeNobzGM7w?Z&EeEMBYP1%nZ zH{DTJ5BTPSXYT{pSRSFkA@=@wWa6<mPB-VFSY{;0ipOk7+%?0STJRC>iOUc=^9 zf%pI9lzrj7*T{z|MEHzxqrba*F0M(ZA?XW_U+p!r__Lhx>aG>XzT<(k}jyE1yFoj>(_GiM*5c(wj1b~ z1AbQf)}-G5v%)E8q&GC6i~ltMyw|%!SB(K1K>7UZrofKFr9V89U$JVr|7eEIY}}mT zXj^Wg$0LeGe`IpPQ1_t>g+IL~6PEJ%2bv`sb zr=XsyN{cx+EAWGH!82b`LzO}`AXS`sx?u-B-~GKn9Wq7lIaEtQ22Y!Gy^?=7GSR=6 z#hXLaR_e9jB7c=lN^^$oZUKRBTEa;v4cK>2-P~w5GxD858;+2V48poufSOe1w-9SN zu8At~1Gg-(dAPeQU~Pffo9Xek3Tc1ti(&XSt5&Zb1qD2zTXn>5(0+N5S4T&Of0)~1 z;q*tZlEsNPueHKeGlj7k#Jx|xm>ut~1R{pSbQKGYiPfmZgn%mzm5>=$MK3RGy?U7M^Kc>2ds)Sb(rCJ+S!e-Mk^T^HZih2k{aQ@wR7TYbvL(4yJED=Ye)d2*i@PCR&5=dVz%KIE{Xm2^=sFS zrMrzRQvP=vQ;e{y&a+Te9IOZr4_DGt^!D`ZnBkop&ELbodyf&2tEO*+V)~BT+S=Oh z53f^DjBfQ&XbEopeN0#nwX==WBO{b0-cz$E_cirHJw0KQinAxcH~98-?@2Ixx5AHt zr{s=;_Idr2E`{#r+YT7}WqT;c-Cp<1dAN`{ouzXk81SIW*xD8JrgPV0;j!(R!eeZb zw$tzZ{9Z%7L$R=Sp0}P``y;|LS6f~Ee56w13=nU1T@#!9mp~|R2?h-wE+R|x*rV9{ zo?PnBN?Vr|{#;*R8XOqVu1{bk5)Y6tyK06nq%?qr#hZIfdMgeA#a)Ed=f!XKx#m=f z^(fe>0?SXq-a-|3Us_h#RTc|R$2vZR`@Uil@W2=J_u|&701eWrvGveOw+o1xu9Omq zwO_U)xH?I{?i{+hr-1o!uz*@CKNNW}zk-S&(Tz?!kBNZ?ZQ6b6TE4BOmX=(qu?D)r z`}QTCE6;<24}sp_20Md<7l%bhqyPdKQK0Zxe|?KYb*e})QU|0dNV6QjZ)TQTT>J`O ziOYV;#*aDgy=`>Q_^u{&5~!rQ9AiZV{nnhxGyZhbfa_scg6IU|{(@d0HmGH1_4V07 zbFKyCV}pcU70zxCcK2TzWds*S9~hmU*21RdpJ!J|OQ~5L8M*Iw#T;7B%?ukwCiCh2 zQ8!bATn`R6rpoE%{3){Q(%#-f$JDNUWHmok74<~TK{;=^BiM^pLsb*_xKXqQ;do~s zpWUD%ECK?`{dBP*4UHuw5{u^GDW5_@bZt}og{NpPq3Mz$$oH%hJ#kBXG z`~ui=sZriIR$3x+e^*ytxLhdUT7MCHXx#(L;ND-qq-pVzaB_topyI*ln@r~{hK zX_lmF#H#1VLB9e!4Ki2GT%6k7+uN)EJLYTzZ?|P=SHPz-EgKaqEiA;B3Kbq1!`bhl zT^>i5EyVIqIDGTwSJNvIDtP$e%E?hy%e_zLvL3EX(5rREB8<^2@n*E&D=I3g(^M8- zYlYGcf%6yM;HG+wFRS&K|dZh-H1oJ6$2p#@W?S-ZJVTYlS zCoE5&Jh?yAc)BK9HK3hONL5u8LYJqot(9|ohyS$77YKTqnwpHlHZxw6C-E)vKVEV% z_24?e#q|M{WdTyPlF)Gg8@1}s*Q+9x6@)Z`uP99q)P1b^X1|{- zR(sy-)Tt!l*%>s@o{4qBRuj*$WMq@?H8DD}=n&e`Z|L#vU57ln3|phd{5=?4Fo8t3YT+~`FL^Lo;}e~$`cc3I?3YO z0h$f1p5Rm>BoVFNdWsz@H3-{A$(4IWi*K4dycIyLQoqSpsgFK|@k*O^pX$9Ba2| zMa-Csujof}%BTb-Z4C7FQA?r1(d^ur2U4V%W)VBLfS&da6yR6XEde~)RMXz=xMg`o zMOXD0-=J@)sHhMcWMN?eG5#_n_poc>+f{8wSL1hrl@fKyjU_@81m*n)4yYC$nt*v? z0X-q?*0)mty7^Evpa_nDwa1tbs{kCmOing;c6t5kRmZpCj01c3Isg~yy=F*+_=~Im zT~|k>E*e_e3;5zajj7zYod@>qdst9#8E)2h1yx&jJsYr`lX^9;@Om!Gv5szLmk>J6 zP@W6mFrT9WYQg8R4`#c342cFTM(@L;cd@r3m{=(E|?};q)T3LN|tdm_S;W;cs znW&qm0o(I|hGDi>FIpfQIHXL9#i_uKWIK=+n@^p7~4BRx7x`?cN3O1w+}3uaYNusD zB7Fjj1Gu5sXdc3d*)xW$_DR8j0maJlX`z|8X6s?04RpuF@89Qqyk=7lY(GM}hgZ!s zr&|-s34v31X-3_z(4IivBFYphNrBwh za$HbQkaqRw>fIu1V$QIy{Fn-+$O@7w2BhS^dgD&*WAme*unvxbP$)nTbvyTT11sw3 zHTYHG3(z~2Ced8NpFDeZl>8J9kDF|;7%Pb6M1C#`iuc zmg6DYX=y*gp2n<2Q4ICec4|6+#uLmn@d>4ruqmXxu+xpt)mf zDN?yX9@fkCs-#_PZB@$!G)a$E%tNQQlg6%zT?}d#Enr13m?nT;*jQ`!O20XL*?5$^Te%2K?%cim z>EOYG+V0&`%>wpIX+&y>r+x*^MqmF+TYrB9+QJced3iB-aPA-`y$MDg1>1r_+y8pB zUrlv108{wOwD59hQFOteTu!N}scjt{pNNFj?P-xWc8G>WvP#LD^}vDiUZ+h$#-Kj4 zktc}K?v5(XhR!QH1a1TENWUfH7F5mGvY@r(cCg9>Jt6Pgi6^gQ&|e8P=_f2`H}hu8 zOR+IA($MAIf!dG4x~a4r?3l7)`#CPsZldIINyb!Fj)~t?S}8&MZM(3sbtDxW7CBnc zUyF^^(?Ve{+6y%-{dbs426O}eih=6?wRSGN*#46xz=qiLqFTL?zS>g(4aY(?5{)b2 zxe<=>6Lrlb^5d-XJ-osaPB<-Gr(lTm{P|u)N9vejutuX?Ee#$K((*9<;zg&iXB^j$ z{V=gXjDebo-wn=am*tuXh(+4JWA>qt2ZP1U(*B9p*;x$Wkl8YG_Iq1(;m@BrMMW>+ z*K)yMsbz95Ac_u1%M-}@<(c-&Rn~~p6tAlb4esw)Ff8&1q>Z7iNXh=DKefW<|@w3bs*EywZ z$UB{*ReML-@Yf$W|LqSa)^G-}I^_VQ0jb(FeOR}znKL9L1SUt=8JF|h&!sWHqjY_Q zEDS&6iiCviU$2k`yoEcXe(&DW*Yn%X(eA}-T%oSx!9EQSXT`J4Zfv}1*ST@uHK(9A z{{E%7QK#XeFf%h_>!l6F;PyN@=rG%X{s0>T&OXDeG~Kz5g8KSh0LG;-G>~5aM*DF2 z?fz&1_I($q_PU-KQ#_mgpjS{CUGh#4r)x9{Hl0WPCremTg2n~Q570QtnkL@krk)*#9oE{Euc^I?}C z53((BFAIujV)dZ~`T$qG@$@VJ&WGq`;NOsHHF*@$TP~UqDQ8;q;6l$|y*haO_-1bI zct+QlCr?)4#re>5p>przCH$SD&(|ZKzkc0`Ue^fi2NJ!X)!Vmk2_pu$PzNJM@0jg;uR2lhNS0sA zr?t{ODfY|4*FRsojS9q3EeUN1&1CHeR0N`L+eL`D=Bhe#tUH~c{ z1OaZ%V$YlX3k&0A(3XCoMFO&31Pp~IP#s~5)t0lBT-?a``1i6hj(h`bwhw?=a0ahQ z08J>K2M{3Wxu|HPMcNNh3kyDiMX!a+0!7F~$RCSL0v=x_C`oG)XVx!|6_$%4bpOGF z5D1InUMHTuJ#guSQQHM{>>Ftoq^#T?YD}d-;v9of%L%o?#@5zEKN&fyf#&qx7B>4v z@QC6;CQ)SxfC9w2gFe8=$H%EzOuvSjK2Z2U!JB5)BOp`k#hv)h7Yps3hsQ;s2BVadov|LU-_GB;vmf|eF1ZAY5dzkI z_>{=n?_>vLeS*IKHS=0DE&lp2KdF2)}lthsTYy-yi@Rw#zPPe(y(yo(Ns62WagbQ9sX zEI3{FO--vJnD#v>h)8?IVgGjYPg|y_6(x5gL2*QCId#edv9(ApQ}dz5+mILFE`Qd5 zJb{a45G;ke2O0ijGj_{-@tm-*F#k02U6IpEaq;m~^a2mqvCWCn4)q6_l^YN%nzx)m z9WGc~vGRyY0rUp4J>Z8PJbYLd$u_eFY%zg^l8(G#e%+rMW|$O=+L3JZf>81MMx+R%nz5`j?@&q0KAd7CnuGHfz`=>6-vz3dv+5E!*R+ z&lO58gA|yp9W``+_wErwOb|SBOG_p7l+ooNkmOZX(y*~b)Y5(^*oe^OGirvQs7aP5 z`?Q7GP)vyCg*{^B;<}^l$}G>unatlSXTA&%KRRbcd5U*`q#0!QK66!|c<;i?^J%9} ztiSLtc!v4Rz9m%~n}z<(XYmuxby!dY;2cpygZ&@>u;jiz5cw!5g8a!;E7wp+YzhnG z4BTRbqxOL;@8-Ryo%r_68;8tH5nytVmr)4$u3lcV{F^V4OUMbto3tX2c3E8fnxTx0 zOxAK#BK#9+B4F1<-6{gjO|@I(hXA0kC&1nPDc%SY8r(b=Xb?2pwvljxYSHMr@8D6O z=M?NMl!z3>F8VjKD!?xb1nrI%pvGq{?_2(9MtQk|EyQd&zD)QdxCK1a05}2LAu*t0 zAo;j$WQIS|-`{_rB{KlpT>SsgmBywiV{t3yuMYR$^WPJvRmO~NQi7(1Z*vz-oWsrG zCcFlC$ZdcAWP{y>$J-*D1?_V?9i5|>m!k8P8|Tigz$?{3*0I;NVznU~!O64Oa@8ER z3_=*MTv?>r`ucuCY^~}a>(a!69ccRY9?V82L0e{EaB|+q1}^Xyr9>xm45A9)Ucq=T z52s^bD8$k|ZMVP&1%iB4l7EP{O?!@oE{#YQB1Gdh4Zs-Bm!0i3wzRrJ<^~mA4Uv#g zcOaw@Sk^?wtqH#oyC3iYV#FE;Pfz-z3Q@1(X*={-y=oS8=ejkXK88ar2g+WDJgtd z*yrFZ{l-1xR{v%(x*reY+UINQQMd72C-_5PNkLUAg{NX1ISyZ(XfJ?tr%`Z0i%bXW zd1~jrJs8YsYPuD@_*b*cf#%Ix4B~zf)4rM(uCNSCc)BF)6mGpDyfPCIabrrDTMZ4bwX5T-IgsZM~*y4 zuf{S8sjNXrkscvsceZYu4@MGFgOFXrloF5H+1U|9gB}0w?OUoW*xDqnjCGoeglT!X zG|4wF%#ND~W#H*>qO&5i@EB$j)M*N{%Q3$b8XInXHO;QC_lBI%cQBg}Dfj|aP{Q() z(Ek|(EjUqQNCbGxmMy4vaQ)??Mtk&7mWW-9jdAYWxvFkrPXR3XKsO0%zuo!yFt)jW zr)mJ@+0&wBg*!JwY!q%f;gI*+Fn8z zkxaF$e=jVQ2*XGzJ>8q%ySdpHL7o2@{8V#=%n@{q-ABKBcl+R=1neJV(uziDEmT0Z z{VGGH2*R_Q`Lh;oNj`2j@W+e|W~Qb;e*WBmrvViTkmy=+_1`Kj5}erp zX8{d9GYOeXa~mCA(6F398$ETe@iGv-PI({|r09xoexX>Ju}BkFYbag}KDJp%4#ZEyTDJ3~Mk5JTvO&Dlkf%yG6E9ilMAAV9bkKE#Z&}Wbf zWNIWXUw;0zS;fkVA9RM~7V#2v!Ao#|5heTtJxXct0n!hJt}NVe2!P_SZd-9xg#)*L}I#??)h`)h*>d%+)Gb&1RcP(8uFQB;G{ejpT>Y1H!`oh{kQ* zwaY1Xv|WY~W;P?=eFu04*b0B`?)A7CQuBEF`IpwW*TEkR@`rphhFWf8zd#>Js8@nh!au(=5Nk23eKgFQxeP@>T~`+Z z_Es>yKuDp;SUiTTmMJYxK`jh{UXECu3xuGrroA~xu2v5oxcF)>A0Iug5^yyOt^cmU zS$x~n6SBtr7ir4=9i&z>#JhWV{Ve^EwIhz-0&&YWG!2JI$C4r%m0E;SN#X! zZ9yc}#+!iIw!{faNN8xLbJJgf*E(u9$`W))>?b)`=+F_4DOrC`Tr+V2a*;|rm9j{1h$lNz0qhXg^Y%u)ayUIBn=YK477UL7yp$7>ik1XD9?fgW&yuj4^|o z!vGcyeDrKXXJQ2CRQA;5{`Xy27h8ZCx(ePN0CSFm2ZwFr)BPYyC0z`&CZi)Gtq2EJ z)d?HapM)ZZ8M5!i#q9Y9*-`0`agBw^kFBt7^JYyw+K2~*`W62QO_bd)=Ia=<&v(Rv$Nz*Mm}V}Q@0%p5`f5HKJ55UbUm2!dyi zKc1v40Ue@IQev>V_#X^}m2MR_YHKlB%zD&{E)NlkfU%5g0!ZwU0k^+nSAp?Du#$;0 z8?Bnj_}@~FYJLAGYd`zsH9Dw6#4#KUB$ggJJ~J z6_TNNl$qVqtQI;M8QMZl`T#$_choJD%%Lto$ouBz_L^TP2K5Qqxw)%NOvbzOEU|pa z5TEa9lNo#?G-)-w(*&fFRF@s3{cFWApV89N0>%~vR3fo!%xdtA%xtXw@#BZ|?R7#} zuRwP1+X;x3#^OSH>+@lB)GtKEB>r+|=W2(bBTpP1ja+EZ%EpNk0SCq_%LaNP=kIT`~&@v2)$jUej6PS&bMC}U5nHh=mBs3gWZsb zfF-H0e1ouRbhp52L5Zbs{o@LLRMZ~o#L_a$w^2ZPeI*2o1KgfX6YByTQi4*28r;a zb(76@uZiER0bK`y!wvW*8o=_H2JuAuPFHvSukmk+FzoTO5K$=z;Nm@2+#hZh!mt=g ziIQvyaLsxX>`{-XYgh6?MkOP6xWb;_u^+8=75(_}1o;65eAx{X z!z}?&Pqxw1KgXopKL)+s&L!3`CEm%8tqq%YefQ2^5pv>F1U5j4!YiXbemnr8@9E_g z{(bvNSl}Nsn1uudpF2BiqHvxxsJ|BNxjfg;(K_01``?I&o6zIUUj1>6J*8W9IieYY z`OC+TSAwO7pmXvMLW>q5L~4u30;kkG9oYbG!$J-uOGT@SyoV00#sn3pnJ1r7P;)*ZDVc07))WLp%fL|9Q&WFJ!zZJ5 zfIV_?cMBJq6ALb(M6{z`fea!4bs0On+5oxGHn=NF>Pxhu`w-Cuiv+{?P%8}D!DlfN zC{Gwpm4UZCkA_rDY<&FpiVBjq*2{g{5-m_NKGM;9A08l@T!g!CZf3WfD!!!*OQ%T_2$1wj_8)hz^!nY-BOvwzZoQ%XF5rOa;72N1_ zm-FlP{jm4RKKu}tof94khLd*!dKg}GW-LTAB~x8cX94Iwubj{AclY-GeE8moMYubJ zjl{KV2M=DjWoU?He^*0;>&}mLtwv??fWMf;`T*laDH-iL2AvYQqKEl<3p(}HHyI8e zKkkei1_kMjXYb$BVImF*qF2Y1lQ{ntVFR7_ZCbzn56N+3Z2S)#3K9{)4o(~43I2#E zF4z*JC3TR@FSovVK&Fmpz@~+~+YbCkA*wX44SZv@;85yDq8YaeSWkWVS}cKOr+OsB z_ga|rLlQIv>o#f4qCyP|ac|Upmx)9vf;ksqB7lrR6n^XOKKZi^h-P$nLMdcreSLie z#*9dTm6zWMwnS_gw4)I?>6y-Ne$Z^*BP@;|RnO!9Rx$@X(8sNVnb8IoZ8Ne_-zi`9o!D*SdR<*B_3v7+%La!f}Q;N3I)4bc))YY%Dc0{qIW%bi+NvdR> z%4fqF^P4RMuTC_Xw`jmigUNaE7OfIN+vhq&9v^amx0$xQG@sSpzv5d;%6$w2Wy}u= zkTm1@r+-0cnN|jUYBh+`AW~WFS8WmvTbRMRNqQ8D?4}*hwx&9{xY#%CBk#H9cFZa) z=`m6YNPrU$Gc5k3tE=nAfmw6>eb8Pd`ndDaRx5Y%TG<^MJxBdug9T=~Ko3_y-HEr%;_l&h0Kr$HOIT}x zA-_!w{{pnNVpfe{&)Q7YX%QU>+!^*#V$w1KT+cR${`I?OxNofZjUn&{!_5+4OIQRr z-Jn4F+fMS~TctQ}tRUe(8U}_du*j$rukJGj_qm8yLb*oUb-{2|du!{+*YDd&d>_y2 z5AZrg+?VeykzUhJC2o}}s@%aTJYE~2(%I=gd2jr}$olmD4Wl7=>l-QeZ!QOaC_~RNEI3 z5o}<(a?wRTK6rZZ$h;vzap_l(D4Evk_sm$FQU+ZV)z=^3ropYaO!64$p$M`B0?Luc zYBhKuvmkhAtObytLBviFcJCjY(?CY5{D*lhxKC zu*KQw;g(SBME090Bf<4Rwt;e?lEgVhrSTht&Tl&K`z=1Fb7lr9Ht1nwkP*X9BsLE_ zG0$aBbPdS>{vXN5>5d0K1qc61vorx&an z`eh$A;Lb9Mn2k7WSwb79lUuf4G^E$h?01 zJo%lL0PK&!L(!{ci(s5RhMEJ$C|iV? z#zemSmm6mxU_tmKbJn;)Z!peCfvp7Wqd_A5s0-*b6(7#81pp4{ALY?5cYwY{Ob7_& z+E5fnQXKX1yT~`K*3;7?MAUWF7VvVQJ{eSG#)i`3;=2$oNg4v+R!~q71KL4&0;zf9 zg3uwUcb!~^aDo#pYYqtF`puhb`rHlhz8^k(z$f1^G?cNAutyjyXjCP1He#v05|-vlc<1%NJ3R1 z5*3hSIo>K#Sp{F?HCQ$7#UngNfaor$a(xd(WyoqKWdaBbQHJ{@?tcpHR1|V9qM0Nc z3&iA#12-!2=Tzkp(8pBsd8lec+yXFyv&?DIB@RqjWrg4=nOnx&Y^A3!z+a|fki&m! z2Mx`6ShhwF9>^CV?tpgt2d<8;A6};|R0;zIE$?Q-t>k-5@Psh%>iFZw1%SQKdl+at zg~-Ssv{Z~OJE8Z}?AQ@(UX0s5RCsI3Kh!0f;wKuNg%Knd^K zdpl4SARYb0FDpW12k}(LgYDyb5B!4-PY@|`3=|~I17TQVSrADNEgb_aeH-&PKsyry zEh3Hpup!ya2$h)NCgC)I8~7+7-);mL4JnV4-E++Q!Yt{Ev1O!=rpID zhTpn>-##h_JQ^I`ayIhQDDEMnh~0vhP#Q4H;S;~33?_wN8TT;XXS8GJqKbgHr%13_Z=uwnM52Cl@FUzFmZT`a40#RZ<9!I|_&k zwicP-gfFiKq6R004nit2Br%DJ)%;yBi^@?rg=afxCOxMI8}jk?}10PV)j>LTXeJ)eEcOC_%Tl3IcV|sQc1gF1}9S*ffbbHKav}E}&Bv7p&9_ zHlEmR)=}^lOd~8{C5430(ff24(c-m_$%<(Fqy^z9gJggw+=<^NIeB7wBF;|hfDI`N zM20E7fP5`6cnciRe^x+obcAww!fHaBAl!xM{DT~z@TBQsji=2IeIuqmDg?<>w;Bz% zfIya#F_+4aO~Yy9n=y>RR2!|TU@eJ*B}(v>6~CIYLTU)YyV>AYVjRL70rkv^_9YHP zjsIiNMb7De3%Z>4UsG!MztT&d_eX4qjWQ?o>DL!S7??LO)=17w!Noz?i^V8JVzVpP z?CdNY^uX&ow{KsO1)59J$~BLAi4_&Ca!acPa||_e0=R9Y_#nDr{CXN>l0TFuz-}`c zb>7i{|1|Tj#XVYazwO6*yo8t_^%*$dAkse`z_%Ul6q4$>u(fc2OMyv%GM?0bCb6G| z%y!|nIOA*{mnSTayZHnJ#^LybCOPB_IY2PZLPh}%1Ck(sYl8eY1Y_jv_a1f1!A>}X zf;i!P3%!#T5Ka)DG!lTAs_=)Lo~S^M1%b{#0#w@DRD*{+f-_WLh7h9?c4q}L@|Yvv zDCkfB2D`&dw;aC+UuPRiBib<35JW&8Vmc_y3?AeoM5a)X-V{{|<^T85Ao)8Vj%g{79+z z;(qBK+Aeim#~6486foS`btQ^^Q7zwi5Taa4qjsaYbmZU+Xt19(yjQXD@SvE}I5VDp zgiDqeK+v*p8DN;CH}E)pMeO~t$oiS20M;-kd>1DgA$ooL-aTg=vBZD;+DD{$`<8iL z7)fe)f3n4HyC5pcrB%v^W$%KngzRv+ZZc9SuW&*LvH(Q5$3ve%==ERk{8b9<3B<^S zh>HdQuWO0AIwok>MF=G3go*V=j$A>?mJ0`y!SsQ+D=)RB%T#xyP|gvDPAN4=NJtpL z+UyrRCM4ts%V07iq&qN>WOI?WP=m7oWvU`lnHQ+Dx-t?2P}F;bZ+4p+qUn(|7*4M6 zQke&Rxs4>mK^B$>`v_=WP`9gA72&joZd2`R*WBt~YGBlV(#qN&3B3Ns>4_@)I)8LT zgi=evvoRNTobn$ieY0yQ_22H5j&ta?!NEZ^OLE={VQ&_K0{lpy!MNA0wzUdL)U@$E zybTU*W$z|rM2km_q~a11s+>xG{X#Oyjj?@lYU(iB5!eAaQO0j@7t#{OmNbb=s&18D-AGXZ@usRXZ77MW+_?oUo3gfTibrAZF+0A>WY$73K(9is_Ve=QCY zGevR%3|kC!8@m#H@gdN4;`{J_oc9VWjiH9r5D}e0q=pX)(JI760*czo5WbufBjp5* z4>*^MzT$xrZLC@zhlQYl^^KdL?vRArt*A-C#Ulj~O4qz!`VeBdza%8-z1TGf1q8-hCKOQIM?cM?Her{-LYQk^3@SjhFegnk% z9KeGUdjgA#L|auX7WN1-pZ|^Z_|<%vOkv~hbV1P2gawl}UMhe?pW+dX1CZoHO^S+& zB7=!|H6nN}US4%3(=s?{Bm)D#n6*C(wF4tLM$YO&!u&_|60jm|;OFpXK{57+XHxWnR} zXLNNxf0ZiHzlSn~YyyVX)a--HJPfy#?8SQiw=;>M`3Y=(Xjcmu(Q7c ze<$u6%o%zd%*fVDMw6J~5_P3dH-Chri5aG@lnd8kLSQ)PFqxg)_(>JRgSJd8hYz2? zvIC-q6IymPp9Lx1cW;9fkFW?+I|dzbJ0kN4*-cKHnftAyy-JuS-@ku9$)S)yAZ9Ia(<4^)VGoM|n5SAz ze&&BXs)&9ZlbqZQWW}$Z^zR}Pjv48-)o+`eO}@!`KtNzO*b}zO4#q{NME@r~ENfR=$mk&ls~=M~Gk=^iCqnnP$mG0c?v}P%R@=q0 z=!q5nlZ&^c=}q_bpiCj0U@_841p*gJic5X{2{>DVi9C?!vB8MwO-^_qayDc^@|3gM zvzth=2@45J<{{iA0`_s}*oUP%(+H?rj&}i@z6UzQ(Jc}fVTZg?!!}EkBTY`@Lx<)= zF9nDV#8V+-#5MjjfcH7nr`8b2fVza^2}o27>Ms~O;aYEMB@3`3iTR1fSOQu=a#;`u zQfrK<18m0DL!;=1yw?FGhXhJ7O{3>`N*N^;Jox0jMtbll;5Kq3p-LUs#?hk?>PapU z=L%qc5z8_bgi=sKavLNhf8zCmbJ{G7l4O}99za;9HyhOg&Wy6ujfTIW^fE)Z1lc<* zbG_Ge*4~hC+2`1=NtMmm$RVkKFTGgIFLiw1cpL)A`)cQQ!0Y6D?6(3leMzO~vJ@2+ z!DkiqUUNaN!stLIj4nXQD^M4)Cdm1VaKwl(OC|#Gg@{~->$>m|+8CLvCP{y+%$=w> zNIVg04dPIx7^6t!uX7!H9;<9jfqX#D&VrYJ9|smdDLYme@RvCEuv2e~8p^5(I|uyO z&6_tXy{tms{kRCGNXYLmbEwjsv9_LPEK;2~y6LYUwRLvtt)ORS_75o8IGTgSij?tw z9NmJ063Jm`m~mGzzpfVC|2? ztnW#5Sr{`+t!m1w!+M>;i8ocX*2v~~3ET2RoyGYi;_LP$Fz9bD0QTucyan5dTq&ffFWd$`r&sK)G>dI06JTIZaLNv1 zzE(_f5Pq=(w9undYi3cr)^FGl1fE7*iJqPwEtMn#)H2Kl^d~O!W+O?W@uBDDKz-s% z;%y);hZldjia4mOE{;tI@mQmyrbo{tqD4Dfb3FZN%O zy#H^cF{j^1z<|UzQZq1+z{xLW?^=;{;l!~kP{IAw8;EuWD;Kzg24@N4{2`1bBVoW= zYvIqr&Fu<4j)N*mN}3qxz;{aNaZs@Ko;>M>94ICq$w`rjafDlW?0YH65f__oKJ^@-pIrR@cT0y zSV7K9{1O>yiRtJRi_su?MiNS>tHa^JyUDBx26d_i)$?9`00QD41<(~uZ!1Syl;qUV zDan8#3Dm$ig$xrlJ&uO>ObrS=F=^S^ssH5+O&k@B7EG`SzHp)R?dR{W{aKyx*ER=CX_0-4!*!6M`FQ*LGibz{QeOlW2|5z6%mW5joT0t2a3stn}Gw6 zRvc4GXoX^?G)d0w#IYIVGjRKng?1-%+>oP8FfRe!`Q*uKkpbS`Vn9E?Gi(JAI|#-V zlX-7UeNf=-E)Tf{W7se^tt~1s9zW5u4WQl9cGuIb`&Iuof0Uf{mQJ5OUC7#M z?}dwq2iinpmJ5d}p^4y}M(GGiL`cw^^N=z?5B%$Blc-xhp>mG6Z~V~=CI3j>LTE?Z zAVMy5GY%hKk%fUguq~4EfDJh5G>gKpf;r%@ACMV>W)PR(lTCt>K_|9~@C7m@Jyl?b zsTAC^E1~{Sdq@-oO%(^Kz67Emb7N2wULvXhAO=vP%QTLW4sFPU==U$IiqXT=@f<9j zX_#H=sF6+uLqg@Ha6-Puv9RJuV89LyB{OP%{O&mMv>fwR1b<+1WU~3v$BXstN;vjJ z8SU$Lx^;5RWHvc2cPb6UAGB}}IXl>F$0OY8N}_t%L68o^@o-l55bM#Z{4re*-Sj7F zp#=VloX3K_!8($XAC0YsQ)7kmZ@^@^f}pmk-6t5*{h9~>;eCPMvw#Sz<6tmy;sglH zaidFEh=KfjyGWEqG!n%Vs+VSE{3YU1Qy3(u1dp; zIB{_$=RYM*9b5tWfKx4^;)jvq)>~x4m5|f2fKF2L$gHtnrMHyGT6|QslL2NB`OEt8 z@l=SYtinr>&C@n$!F`i2tAof+LTKQecaTTJEOAF_StRZWYUR(AESBbIzli3Uz$N*_f+RQrsjx17+h~<(nW8{Ul!b1``4lD~6 zOtq0Q1qSu(E-e+Lh0e1zlXKsZaOHx7kSOk|2H63XE(h}`oH(XQHy=rsMSEl86OhQh zV;^D{R6@i}Z-n4|0P;_~N8rZZ_gbVh8 z@n@jxa4YYv^}&aSZ{}T(!5*jJtkj{*_7#(f@cqxuZH03=Ozcg# znrj#$(8ECbA5y5&rfbZ8H6BH)K8hM05EHfWHe!MStZt=Q&;x57a(%aU%VCq1pti#BOrc7<+ z_-X&AZ+r44@JPLM?#^Nm#FRTx$gSn{Pe^?7i{y^y+tqPN?(X*FX; z;n|3>Yu}<#r)De#1>(&Gd_iUR+$|Fb<GCEHLNdB@$syFjMAlvF}@ zAoTHY06;rncY6Ecz%%pafbR+~hN1>hupKiIu7N4uY*=6>-@RKWfLY?1&(E96uz-sx z29!gNc~xUDXfxv_6x6IOZI)3y)G#tSPjra8?c(BsV}FnN3lJv+9y-(<#6B%2$Dl&5 zB0lzjGQgm+13)1|esr{}R_Di6Fe&@;S%jvyanyngG*5mT4l`8q0KuGBlG6>oPu@-63}}bN~j5c?gt|@^C5KHr`dN=!m(0 zz|H;8aX|7Ei{?=urv?2z>QlBV`a9I86*ta)*6UudBmfHwD470YpTjxZ17HnSV@?9& zV1fHzH*?FS=NI%E%bkk>YL!eo%s!GkttnMWBPjWl^U*p-sPVUA=5hoo4<28sL=$m~ z(K#P&z(9%`NJN=3(4zS+1PO-nzXkT|8`OuwafM#3HsrSn27=q#$3C1R&F=g!Z5dhp z06;~7{;VW5F_hY46g{QZ2$2)%VB&!S!v* zn!9D9Sw``>7M3i$CBwY8_6s>cVs5t!aZOIN}bV3i`&A}ny{-d$yJzwuG zDSEpS3Q)h&#zFxqYTKuBiB~N!ooC-?!+#Fu7^X0iki5nk`)_NP`M+0&kj0yI2k3M~#2ms9O z&rUcMO56Xo*{2A_`*7^9R@%%H+&%r68DOS#{PIZ$%X8)b7Z$RvsHz8*6bhHyU?oS6 zAHQ5QLi?>_M8w-q`{)rH+S`BL>=!ZI@$tC#Pjk8)e%}-X!B=yuMh8x8msbq=KqNYJ zIdfT^dl7$m%b51@vx@3+kH_Xae$xz5DFZ@HHpX-eKuFvGWhl}+i3Y^_=G3jRw4R>qrAl_f@E31LnCZlq$V>9LTkW{B!}OSs}e? z@}r?CL_sk&g1%9HH((|tLG18^lET?MhfT)#{E?PUDSNH7iMi-aLuiM@#Pt;B8-^C2 zI%!^EjKt)d;yEx~r*r4oZvyH&hfNr<@m*5d&6}dWASdv0S6?8PeY@BnEDb!vyU&i; z*p%ESCYXL`Zmqm)w2x0ri)!yijb~Csw<(T$=-$M`>#B}baNluWUccTL@7kk>SegxX z(RVZrPN|u^D2s;3@YfR6Wh3st2W8JYGt! zpC~-7h8;ghKQHKy)f-BLwDj}?$L>X}!$*WcCiSZPC-f6Vo=Q9H)XEkTo85aRlU`#9 z#MnB#Q)zj5kMiPfB`1yoI{}XMPpo0zW8B_{hdtq}x!>F{faU?T}M+_&KKA|h#!&wxUdSK?HbNy7u zJmC`;l4CV+e=`yff4G8@pu>srq=9A2mbrc+idWUq@g#W38VR8@vM%5jgmJF87s+Xx zyru52|=qFiOTc@I<)d5;2SeKVb*)!qqLA273HbIP(h3`-1i6LuPyWju6z9J zRtytSU>w@2x*tiGCMFr)<#Wd~q>iXNfZ0CC_su_~4P3>Cq`4=QS1?aUql(1qG1rs3 zflafz1;s?uPrpyPG3nwZW|`ogZa$O{bHav6$avV>YPYC_Q`V--*G+}6CfX8yAZGmQdFKou$UB!9u=hRq97-CbP9@R3EP;r~(fgh!4)(!+v+Q z8xdww(H2G@g7rh5Fp&i<0NqaDT_RFo6b0XR3T9LdaV(GtEz)k`D|Nb0k3wt58Ck@K zpgW~3_!hrxkjmcZtBL8sD)0gWW$4Cdi{3v5H*bja^_i3xfRW^$K14a+dHQMm=|Hc> z!g!9)^_@Q5lM33uXn^Y8C#CY3GYKA&=pT1kcO*p|dbc(EMP%kw7KJS8z8@*`0tylR zPZ=Kj11?>@Cq$t0M6Gy)(sP)YqvmG#SmGl}jWGOE_D2vyfx4pD!Kf7;=T z`z_saIAbPHI$8IR@qcUlsZOZ=!%^u3(%G|5?wr|BuD@kO3K+QRPZhz73kP<-3pX&K zPveQ^$KZogwRkkgos45aArDn%9lCQ*P~fN$s;e5>*tD72SCNmP8KJ^xb4{XSXuHhR zRp$QJYacn;uT!`B+V%_>X8QTDUZKk?S9wv#0=baFmVtr9iD{?$H7;WH;`fG#{zKx@ zK8ptckNN0vBodUhCOjPSU@l_I>dHOj$U=LW!!*{4GE$S!ng_a|cqQ;c= z58&Id*2&p#7R-BR5wk#fedYEKPXNL=rZ<$n<*mXlp+-`V6~~ZAd1uWUoY59jR9vH z9G3DitIcPUt)S}?^&j@2)n#m-<#iWhSq%3b?$eOl;lU%!4_RNsn^*P823zB!)T9Mg3G{{hFmdQ$^>N7+{V z_YUx@Zr{1{VqKfbw+)#VLD@+I7WnfXmEN{F_2T7AsVqkQk&HDKr}!%?>vcZt$HoT` zSTVlG7~$9Ns{yXN5knI^@2q2R0bT<&mq?T45$I*QyD%0c#59(1*F)ICt zbtww~=S2@uUF7>SV2O-C2zqAzY1dC;jOFJyKY!*Qypgj%6(#H*)amc7QcnCE6R?ja z;VPp0!w1QHG5W~Me033nfFV>88X>G|L_cpFCu0*Qi5vsaRFHt{#sBP*qA*~`h3hEf zOdf*Hw61Rd+XL_0xUEEL(#-A00I=A`zZ(leTu}C8|D&Iy4DZAVmn07gF{Ow zZX={a6tJQhM!>fgWP3HZ_0y2sb!w$wJ`IoyXl@PS18~}K#51I z8Uy2uhCmH=a&?DW4OzED0(|~zTw05Yx~zrfPa336Pllx5TyEP_5RtK&J6Eevgx^Y> z+fLjaxYls#*8Wi|)SZPj6+$iri;A4hMhD|#qTl@{7(V^t`Y*lA8MZN-_`Ucis?`D_sE# z;dm)D`l!j5Y~1J;{)M#~sHlb){irIN?>liOfjsn*k($pqPUDO4XYZ_G^nMs)^f-_O z4Vw-os_^f89CqTFq2Pr3w@wb-*8sON%@ztaw7kW)jTC|HK~~YY2&?J+`*HBipUOJ82Cn*s zhUC(UX0whd@@s~!?`tKi?$5*bq!m$<5QoM^-4n9_$I#&gFPh{r0E#Zp#+FQ24?HzO zL=y1zz!cXPl`3A$jbV8*E-c>nVlapv%iYI`3|0R;sOQQ})NjY^gIul}Q6K zK2)Acg_*rwQN5(4ZsP~(it8p@%OuhJ@L7r-a{s-xl{5Qj)n01Vm_cEG}wJ=4?O{!L+=#wP8i{<|yP*Zb$dqt%;m%H-3}e za~L}_Jcvj2&6c3S@Ll7Xj!>k&q$ z;BjyXIP`ysj=0qrrD3Tw{rwrp)DTSYGYfkxj1B~FGvMeHu7Ap$8?4dEfm zxk7lR@tVG1amD@P@Y@9lMTZA~qQ_0N&gU~T!}tQLn2M>c{gr-m=8Q{xzFn)+>g?50 zTerGtA0T6B)ke(<((hk#vSw;z*G9swASHT>bX5}J9FcDQ0}dEZ5lb;3D+USuwJ6*b z7C%72Gx`X~E2zOPi_H^a7T#;Q_nl33s-3fR3nDxNtKIfs!Iu@(I6`QJ({_QNXf^80 zyolSMv8xw9T+wMl4JVIgPJK8~^ck_*^c&lbYZ2#Nh#wX&A?1^O< zyn|n2ie^h!LaiZ9m$>IcZv)WGVRpvI1VPl+Rn^$>8LRYpHglZWS8qYC$_S_s6w#9*sv4kzQMGBIZ$Qe zTuD7Ld5{`e>`nn%X~@v=k}a;$W2LQq6Dy8R%NcLZ0qeHhV%M?Y*$Qf0{ zR~L>f*F`}KAordXXjnq2%W?vSDUd!#+8gsM`qndwo^@~bvU$41hO+Zyy3!XnqzM;X zA3#un0#X3VDXTx{cu%B@qR;3Dctbc&CqCSS3dI~sxcdMwtq>A2TtZ~-NpsFS_r;y_ zJGrt%EeH_kn7`J3&;IO_o3pP&Gkp#89nI0hQXgOZDJxSlQaJlM#jda zS+9V9g&!@{PIqy_=es^j>?08_v|O9sr>?DsL-;W-nvgU;_Mfn%NJuG%MeMi#?wvb7 zJ=o~;2e-}hNM*3x;Ul~Yd+_9L+EQzV)hXY@#R<_6}oQ@8(}*pBhuV(cgb^9n2mvwJ7EC*fWoeVn5r8;S9KQx18XKzUM*pof^ZT8 zmUab)RkiN9@>UlbtYYfa9~V!87H7$rDO(FFWU(8?SD;q?iUt%cIq&(!{R?W z#BRjBeug0uSI1`55Gg^~*LQu|n>UjaI@*Gv$>qq-_2^om5{WJRMHs@a)U+abr|un( zM)ec^)p8S)mHh#`ckO~Kwr6nhe(rq+PiGd7Nb&bj_*ZTii*MZz-|Um z9lP$IUX8KSySWw6O?^FZcxWpN{;D*8K3ut?_`6@{Pb>2O{RhoHiVYjm`5rvP%vMko<0g+C^+ za$Nx=T-CCPZiF_WSK%8beq{8Y4QRmX+uPs9ddKoy&-Np729#%3Gk!eEg3we0pkPRJ z|AsQR<22$0m?k}p?ZN!cJ61TT)q1aG5@YuY2?89uj_R*pCC z_OJ5?Zy(PJ-Y5x6c%z^;&tT;fbH+g1u`%B*XXxD4Ogb(&BH~}dgb2ro{JppR{cmjo zOaRr8IRNz5N2CwN(6Ov*L#S2RfdYhQ{=f8D9ckTFb(Gt!=c4PG+6Ev+7hGA0h)}nH zr~MlWXUIfBwWz?e63_bySNAM+-~XS`g0bOWq|)1LvRK_ULARTca`*70u3{hN7vys^%-9 zg=UiR5r%Xe?}uP`V&zJB-U@&F%+$FsD8z6}OvPhtGq?Sd+T%+nRZtq)CdgGi2a^{I zVa*c=n1F;pu{hKBC-~@-?w(u?$TwlcX!1x1y(m0}-Pd(yS0D=4>|uso^X}8C=a2zU ze!=JD=!7`AgJthIm9vqe2$afa^Lu_RA8LNqw)Sh$0T#r6aP4OZygf8&K}W3i1X*;y z1B&7bt_zLuwEN7`mqVHO(2lENG`R#UVimfQT%@q6)YUdNF&lsEUvc}k0s%rx=+3#5 z5}O#EYqoHs3Ex{bM#=st=y<<+mNJ=uadhtgtHjt?QDXc;93e-3xq2~a!n|+qZzu>l z91b9I-^PGU5e^`e+NxSw79BeZbVQ@JC%28A-DqlBY!me81^`oFN_nvoKBmE+$FD}R z+xo95j|L=XW7MAlvM}_d?T4Or#114k``nBn=I}@){d}fT0HjI8h2abx`@S9CpRNu%G#@26G#I(H^opXWg9BCRjyBUH2Yv-eT9n31{Nt zQ|2E_J+^#PaA;_+jIpgGhomz$aQk&WHNC`;=rQsoG9og(6ovy~BumlHV8*MaAkoeP z!#NL#)Y~YC5751|*I;FFa4_Q*5zC+D$@K8_tOQj@_7FY28-I*f%Q&lB?LD{XkL0?A3wjHBizrO#A&66~3 z?d@yP#HT>E@|gVXL%A!?LJ4{ny!Yy=n-3}-q{Axv_^AIYW`SKtj*R&88fZK0?LYK! zwMd(h3Bn%OVy!8^^OE0riGL|UG6c0lCB;x%18`$8SzP1N*+`uc9C@mKa2eZuz+4P-`sruWib$ zA_r7aP`v3d0`ROGF5gYp#+;@F0E_v%$@b#mLbIs?qZ$MwtXE4lu6pCi1(8K2FZkjEiKNpCBWSd1)%& zpu}XiNdSG9v#)RLlsVW^Sy@?KXnv`y;$CmELwIV86$OW0z{W-VT1KN}joH3}FBP?`)vfjc zKK&tg$ z)IjUCi6(M62ekUEyTfs3fa6Z9>t4-Zv|qS*F^JV#hcEF!p{Gs;cUMz5H_pHuGtSpj zZybVCaMts3KMpPUlBENo}m-tHh4MRi2r^jtB!mu!4 z)A_K$2d>j!ysq7=4{tVhUf0eHGsuW}xKzTu7_rs;P*;EjNS0c88Lh&K?wwvj2{9s) z8oA4X>$`Z!3SSS49+-On{>UO{`MAgDG>uWuozNOQ_UM%MjT={i5P&gmFug1*+eC(o ztGhle#3qa&23i|2im0kgU|Fb$1rRkt(tLNv7*cy>Y|s z#MhFEx8SRYVJ>BbQ@?%&SRe*+Xm`?CQT8ojA^nuC7{YjfQ=kzF!oQHxGzhu#rmueL3w5vgt9TWQ1vG+V8K_3ye%Ha;CTA)yXGY*a_bVa?vhe9OD-ns&Gt+Jn>CYo&(8pxLh@U)e=H zFnVF~!IIyNs3gxogP)qIUU=-{QED|4ljp>wX}-TxUgjP2!OZMV;h;x+$i%jn|M@*f ztEqmo)jxhyLlv2`jwlUy*)?Xy0lcnL%d_gMc<1h3|MG$V<(JbOpDQJrSyOFKPMogS zF6i^PknxG$wRoLh^M0)OfUUNv9ZbI4`HpAbq^7xDtIJ7TudUUFjGbii zLnHY2Oe#q zBwdH0mxPC3YmzCQ_ zMY>z+-XEV4R3qr~FD|+-swjGOjm++T7?kFc>8t40TNc)`dO*JC=;i}w<6I%R3Gvll&jQ#n1~r_Qp* z_e_-OYQ{0DS4jjEK^p3e^EV zJ~iRdud?4z0Gg|WI0mALh~B+vc#j@c*_)$HVy7?=FEn=6%-p-sE6OsmDn&ghqa%D4 z1)oeX&g!t0Tg@NxgF8I2ND(K5ujx5Y3|&g9?yyK0+r`t zh~7A@m;y9(>9;yxFD7P^aO_cyr~@_j@3z_rZzi!OgYl40j-jBIjwTlALG*63>&rUT z1op^5QVJ2w67LcSXy-u-Mo*qR`Qe_lLHk|JMMA<1SU?Y$REwBph=TYery5f3Qm`B0 zso*nIlgYX*NO81hk}qX>2>dR{b*ndR3dI*s1JACmw7fmvuUNTK99zbI8xnV7jm51> z7O-FUhWE4N$<|Tix!^OO<8U}rWH__OKd_%swiz&6V z=DX0`j-j|q)9dC_dAzc;EdQBXdwRMWjaPT&`hq#LX3fUWFe3qk9_TEE$)lPAKPQDM zkP2ni(zQLh9k@P*DpD59pb^h|`Eofv1Oq=0=!WRw2vbf&Bct_SLa)Hfg+q5Xb=S0x zZlgIX@^FaX4F?c$o;F$P_ZPdbd&>k%mLvgIueWNFXiXYenByH68WtvmIjZqSes$K; zwA6++9)cm1oCfSx%*d?@RAM}&c5$cqaYt`sh4qr z8hQg)1WllJR(Kr@?(uBUpu?SqhK}sQ)#D|W5S@FPpD)1MIvm>u%l7T#uWtYS{ZX;4 zK!2Om)Owqe(fjd zycph6jgMjG3q>y-q22p1hdB2Y5Cx!;RK^E!Rf~hGC&amn)SWJ^k|I}`(QFsoSlQOZ zs6P3f70NuY^>&>`|J@rxp-3@03IU08H(Fn6c45V>DbGfvr4Itjt5gNJ*cA(MT z*V%KC4DY>cR9#Ce8Cnr_&FG`sN}dy;)^**T!UgraC>3^ARxrv-{F55=@^a@=3q&i+ zT;LO1QXVT3Hfb-G{=OOar4iBZ>Jo193kn*XerL(v2ic4RJ$ENyi@2l8f?}?Ni}|If zktLgLkp|#pU9Y_CLsI^W7czJP^Q;ZteKn-*BFG%OIiQR`L^v;kaGaUPFjnt-;QCXh z1N6Ik^Vqu0U-?G~;DH+aYv*;iOsix>)HNGrrYONV+K*89jVz11XR) zF~AxScA@0xo-?|e_xNJtp|;~%(+(YmKcCQOP_C(>Pu0H8eoH|CtzVpba{_#0E<-RS z>}~-h%d`g96K9gvSqNkkGJHiIC`pn#Pd(IR-;a z$I#+*t$+&|8LQi#G~g=_gJsjpYP}1hpN2T`e7ReeP%Z0@v!zqRq{NQoT7zf#&K8oB z|H~TkyYg+<#x)x^E}^-X9*3r;>4{6G47bN#`{q_zJ478eyw8kCu!um%oeo}Jk89tv zF-T0LLS20sd-olEty~f*{{80TK_AAv3_FU4P*MwzC$_-wRTf>x99mdj<}<0h5Z1@_ zxkYePM6dqZV94T@<8s!oT^j_^K;L@3HVagukO+fIz6B)EQ~I*Cr{MA9%-69CoCbF4 zq_Zis1mBu5zPy}#G^QsAiKX1xbJF@nsmu8p9?c&mn*hn12J;4&9MkRIn z`t@tCY4f^tBzH2T48hPI`ySffFsKC881LakSXU9P8^heInhke_IX@qtW68acYj+-S4V{SAA^oEb87Ya2f@5go zY|FG(+3whSfb(brf`FUy8TFNOM1}XI@ywFmUjC`*>4sxRk2*p7z@E>DhH?zE*N1>m zg=f5rxSuxMK*z4ucrk zvSnaq7JY(>;f25-AuCp_AS?y4HoacvC6Vr`IUr-%d$$u$vU-5nyy-v8$o_O|o^p;M zD{v)4`M$CZ(2=BpaYQH|>P4&7NIDp8LPlnbp*)3fo}yR2l%HC1)JBqnjb?apYpg zV#u9XN5Z7kl%k2`&T3{NF3K_<&V{%D& zOnfm|2})lPYW-t$)4uh8{gK=xQ!4RfoO@~ePi{9Fqv&sU(QV`%Vqh@EzU;glDLX`# zQRD#~JMpe8a~Kjol;Wqc?0vXVnr-G_89e0FjA%~!nJJ%c4NZXuxQxpG*(-AS3-tid zAIHt<^X)5rEJ++TWQZ1hk$kmj$JnDlRj#2x`D++#)$v8?I%;vaB28v>R?Y$293BQ1 zpdNszqS>siR2p&so_e{DzqF+xg{0q$ix(LSVkl$Cv-=P+4IhwABi1M&&U4yJKI3#K zjJu6n14Hk(kiN16FnA2J0{_X>T{dwmp?Ep$&PgWX#(X2It6(=G$QbNeW7hj&A0(EX zzAtauiMJywqz~EWu>-oGyq}5 z=dnWotZ>E)4=gWu;eSCv#$dbC|^8G>AVmZQ9dk-(@~A^92k; z)26R{K-nw{$Uv7DLGT-By?)9F>RhnVHz`%>y%rdasZ;2bX|le~1PiSC6o#+X0}V07yN{Z4i@GFAx&3L6bC z*rv^y@^w_pVj^=hwo=m9VjTkgataAg`lV9|*`J}+$ugvaLt@pTzUF@0Rl^#%7 z*1I&Mm2>8F5ok8C4{6KcZ1RdTbiI)E>(|o_EM}h;BXOYA5EK&>jpOcXK8_^RG*&{% z#_QZ?f#T;7ZTn$DMi=}8R3#@&AUUez!c%@<<`8j)VwviHi9FsRU3cfa@9FAlOvo@M zv7=#LMt!U1Kz;n;GwL8$VI>L`?4Bk(@4?X2wW-dinXTbe>>l>vWhd5-LPTU%8k?2C zbui|6H6qH?BxK3js9n(H#!z$Z+Ox+=tKaAPtH%4>3mQMA(mR%63M7&L`t{+9H~GB2 z*i5x-rJr-B8(Df^3wwbNyW4-&X7K$VQMvCbDnTp$OGvN0irVV<<~u?Uq$>QchqJ5G JVMmWe{{uu=0;~W4 diff --git a/examples/running-llamas/artifacts/Llama-7b/decode_throughput_line_plot.png b/examples/running-llamas/artifacts/Llama-7b/decode_throughput_line_plot.png deleted file mode 100644 index 378f14947cf991da0a139aef3e42169a6c824d57..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 70079 zcmd>mby!vF_vfZlx};kKqyz;-8fm0M8Uz(lS^-Jvkdg)|X_OEo6zK*5r4?yJK#`P| zS^Iv!^PPESelySfKljn+-m9E*_IdaFuJx(4Beb=Y&k@oSA_#I$RYg%3K`;Xl1Y-^# z7yd--)ASPjOTt~r*j>-b-uTLy1J;H+Zeh9IQY=x>Z-nWB3L27;(6%ISM%{`~gXj8c^W zZ_^T!lT%ifDnFbx64Pd4r_x$ZuBxs&$4X9TYqZ)*`R9i^J#z8R>b#=1l42{lj;F@t z{8T#Wy7Gd(jsBbVLv(7|k$pbDpU0+ro0__#@WxBz^0j{UU@~ngB^~(n7WaK2Rt`^O z9{eh$)kH4-^Is0<(-GZ&eqc;Q6eItizb>`Ai>ZRXgvy|Q7&{Jq0Z(aiE&=O5@1uW7 zfl~IL*B0Lj5QInCRdh2uCod1v-~X&4CRg%wlprT1 zh1Qs#W=Gr5Ff=QR+0oIl(rrcSw!Qthd-zz_r%yU-Z+U*r|M+1%lKXd4+UFpEn$M6` zN~*C?AwFZNns_+}9_}tKCG*9lp~n27@D*@L81b>Nu!@U|TiV(% z%Zw^+SXgA4dH8reJlZB_WHjqk)_Kz&Mru1!s7WX7Bi{JogYca}#?jZNw6LOGZ|wRV zI13|nU#%jg?R`mnpNe%jsAy=|g@wuG&L~PQ)D_JitIgLqmH$ehf)S(0$wWc)krs@x~3CzQl`Mu3fWT8GKRicPi=q=cfrrRd1OgXMpeVAEo0zztUEsXfCzw ztxtSv_V*`d6elAmC!gE$6r-~qtFEaDE-2vIIcW{TL)Vl4(j{vrC(DxKzQMuJ$Vjz< zaaG^S(%AiB6riEt4l_W`jVumZk-@kvK ztg`JU%|IyR<5E(L>}Co6{{8#oYn@X=#L!Sc`#h_-INb|L51V9It=68Na8*nhr&J!* z{^Uz`^!MOih8-MuYGg^BXCx_7vk|=M>M9~CDtbFPBQ32JwxxxI#jG#pf!$=zz=l^XHQ$_sl-mr@%Hs?;0;J@9_9|SEhuU^WnA~YzvC_^p=gDfy~#v z1BO-t1tNz@`zjAZB7>f`e%_*AfU@-#lcIn2^xMFJ3EEC~&oq=e|!` ze$?V}e+r$1DTEU7MnXMAx6hZi_whVDJm5of?oX8C6BFO=>*(l^RaYlM z<~BA48%g*KUs?63#2CS!z`K7;7i&-8Qon5z=W_+_S5A3(3;`uK>xBzN1NAa~C-}MA z;e#)(rPN!Pnws*C51rje*jH2~<>25b@&0?dZ|VN)3pFf?ww_+lix*5;GX56!jtFd@ zF!dCHq@(3Kw{Hi-O*VX6o5+BSs^BPcH9k?lLAO=5T>J?cKX~Wep6d;j2COn(Z@9DJr@ECz1f}v~z=5YDR|bY?H5u zuzJ@s^CqA3zrNPt#df!6`~M}7ZE};+o*rwk{|K9H80qWl`!QBVYtig?sk&l!*L}Ld z4e!yTN6SsgcJV8h@lW<=W*WUN|M@wEWov7@dFTV32=DjI2P`)1GQ-#MLAd8y7Po}% z4zuPL7OIVH4auCf3=WdO-kNi5xI0}hZ20;XPP~f#!%RjJW!}T*bRxI$5mt8gg|e8l z-_2*ZY}mHlF_&CUllY8=-+A1Jo!&k@&Dhk`L|xka$}MI|>>M$12UN>6xAv7;nGa9) zyF!R)W$_P77s~|R=HXO|E_OX@U+9c-S?;42myp0f;N}dT@LJY8v+3*WJ8v(k5<>~O zGni>TQx7fm$CtN!?Ck79E(?k^55Cf{$oO>}?yQiru*AYmV!L{k_S^f%-Rlz-^9u`4 zZ#<))A1%>yc6V3w_Lj8&n2wWkK)aG77vy~W=K&PZ_JILnIO8EfK|vY=+g(QIVF{nj zw};t&&R2DIc5Z?HgsvL?=+~EAs7ShY(3nXsUAn|?*~Bc9-u})=Oo$~3?UjH*W*KJzpdS`#HqqiGM5WG6!A_olR#7Rqid{r z8{8e`7{>Tp$yr%3GJg*gcwMAim+%=G8Lez>gWz>=`0-%L6;cGuH-CKMNV~n|hSnW9 zIgBs04@@1>xYU#9po;R=QE$Q)K{pPRE*gGggo}&o`TmsM ziXE5sJwgi*0HWZ~P+Y)%OGgwRdwOJV-AW(Trl>NG4#dJ+`19*)s)!xoz`(%f?y8>S z*V=PBIy&NyHyGU*;pQttpQ&BN zXrhE|yNFBbVU>ivckgPvkferHzB~RZ&1}Up0(x-JM|)d1x;;HTEnUwTgdhDhhRag> zrMzwX_GY#;_2KiLxRuKX5-cn%R8&+}-v`oEG&MWXpLAVPLAM^@B4-bePB;x!_a_7f zGOm(_k+Rs=?rm>Bg`)QNV3D-Av~&TwELx9)<=L)0__AxTb%C23Ghp<-F}bv~w2+XH zEY$Ja;~%{D2%As;Fl4@XF+Vql+`M@+nobxKR$d+|$?`uZJvo^shEd|F*!^)v6x)m4-EgFd&jzrJbjWbWL(tAfO3Ty+xZPk8>E zAf=xR;DueXs>Py!`MbEhJob_|j3i-jZ^FVA+3gPj#S$Q{2bubs@lj=|@p9AeeGpJT+{(@D;8b|xTZ|WNu^g;nAU<(GyL2>TfxrL=APJ=7L!tnrO z`uWd3!%@aZw&5_Oc>wV37#xftlkW*8pbUTWM&xjF9*b?Md4Ih`uPAjJ=nXflFcgJQ zlRxwAczK0|H=urX@jq|OvP|#^efjbwd`#x`C$-Z%+-$)J>vL{RcK9-!fD@%WHv=ZW ze4#*YL($P_;82bsyON;^WsU3Y>EF|10)&E*Q3Idt_Sf16*odmC>P3(2?Qtc1Hh_U5 zU%yUHl0o+?u}g6Kmh#)pYj7)}8Jcz4=pCEx7$&a2zTM9lE_jr)<{x-u3ftZ&>DCDU zDREw*HM&m$vg%dkP&5f$i}D%RF))jqpFJ_WbWtN1o2?n=UbO@hu^;z|we zKoiQ0s=9jFX6;bcbq=KGDGt%?an--cXb(%ayM3F?+}yn6_u*&#rA%AcQBWByY_31v zn1(F{AYw@(pI-0&`v5Lj_O!AgyB z0>`7pCUu^4z`p;P_ZUSsCx8a;*J0tlj+ntiQ6MAq|we)H-`_IXZ?l* zy|b1SL4fc(Y=m#bF!rAOLXjIF&(ze^o|<6S1~ZLRpruz39*5R-b(E%wI7MpzP( zocb~m0QRto$w#K}u{i*93ToBV;g}L2+C^F~MEWn8zoSx8QZfdBAF-~)P7ajjDUhS~ z$;nic^0G3tywAY}E;ngeCe*M4{w5Cz39(Dw+uRI*)vkH__EVGZ6SMb^&Joe@x3Ku1 zv?fY;tWiN@v2t+W-MgjQrL1bf*Xwt7xWW?Nxj?ah!O0xSzqN5$L}YVoQQ6qUWFFuF zbceXoI-Qp{L~Lo6CA!sN3FRF4)0Y|#4;yaq(=5Wxnw>r+(m%N@ZqtDeC-(9X*Xy=F zfBxvUUz2?FGaT-7$?y0fOXc2uEhw4spbex5S{jP~0ZzF9_y3=7XZxdXad8m~DR^|Y zwz{f%`VqExS1e1caxAm(rP$@eFK#ixOK#?4dd9wGx4z*JQ$1c*d*=ag>YO}&@+DKl zHM{BF-d=RmK=BoFoU9aCk9qbiBqAb09~1jzLZ6X=q5q@|V2~I!HX?d|Tw~2kX`zhR z(oX`b)9GhTjcD5PGkUQEZvlx~!Pvm|`4)7EEy=5@ssN!@4fNLK^K~Eq>~KZw)<;W% zF5JxONfd(umE&@T;GW{^~>GD;Xah34@lA&V|K9HPI zxtZloF1fz*9Jd`Eqg@ue=HWCM*E{D993>`FEiEne_zVUt>_jg)YzT%qCU$~8(;j** z2=*`9OrbH^ZqBvRUA8B#H!Ihlf-m0M*$Ko7r{4cedc52`3V3*l+YEdZ5ct+;g42Yh zrU$LGRGnA)HPRXNYg@2-szDTI-90;5ekT6#@{JPit(|NcERE{-fs_$5$ET>>4| z-=X+u0ecT>*KC?%E@MRLU7EGg5#3+=nKXQKHR^XQyGv)Id$Q?x8u- zX~~mqDQW-&HhB)zwaH3rEchrh`>8jHdw);gOhbov0Ss?e?~DP5uM2v3m0ds0^oJ)# zYrf+-a#-Kxll@MQaxFf1JG;2Z#WG({b;-7=qPujfre*xKsrqy?C&4`6XRjS@KpXQb zD=4j}7hIQ|q@8^Id;Xq4q{PvC-?%tfU_~!mTDhu7dMc2Gn%|7&G zbas~I#*G^qeA{)!Kkii49P~w9gEDAnXb3`|af2%tdWL`>1c!vQj*TUGF#**b&^W}V zy7_u{Id&x=0EuRiVFvm++3epuAPoqwAnS>@cb}O%EhA$z$P@O{-zGp}c%hNNNJq!x z(K=t3<;F>{I}zjZO5m*|Mk4WUGN)dG02LQkfvcdV6A;j>>rV{r&x2kf1?}D_6IFUt#Lu($`wNg zQ+Sg`&n?VgvJ%1Sg!0-l9@VHAY&Pr)%jV>xq>E0Omd9nk;U?-{C>%p>syN~Y$pn)}OT@J1u8#c_0QdHp8D{^# zheZQ8Ot)^`0yS1-g)rwps@=XiEnnqxWPDbZz}TMQt@|fHYa%WEPpR*ZziI_BObDm} zql_OT&^-aMaX?x&0D_F)xnB$=K95Ng6#{R)2=WG~1V#AHRADjs&VvuF z7O*W@g@nk)$H!eKJV34l*>UmYa7XvG#Inanf2}MxAEkrC!(7W|za-t1#_U!nYAUK= z6oX7m7_B{x`tjp#iR)r_tTOO{)D=ta(cIizR3`XR=M)QI8YRN&-hI;k=6QK}2$kj5>)c$n>IxvANw;Pv;u@v1E$Ozem%g<{;NHZ5be0Pp zZon856+?f|Fu@|-iLfNezRExf$RL(QrfaJDj$NOsAJ^{s1Szx?agepD%ZrQg-#_|= zlAV#G;R;I`Dr#y~O&wN>h)Uo?CEk1#mYUVIwYXvJt-u`${KULGJy|(86iONLp(2A; zE&!V-)s9xsA~Eet2jH#Wv&|{#+uVYJq*7b)wfR}QM7`|U%nh2{(;>na_K1`CeV(l z-np;pE^Kl}0Kr!|{Rmw3FD!k{!*BGGl9FJHBu^Q6*}CwpfnuX=WK=Oc9F?xkcMCa# zm1^`kq=0KzQBy-@ZE`X)1d2c=2!IZZ!*GvK9j@l?Crn`8V13-s)uoV^mnQ=`sXLYh z3;8~j9XnEV)3zg=97UEWSR5VIhgUNL>{bTEEo9e6g=z_)F?E1uaog>sUUAtDe?I5;}80is!1U9I5mF8b&9EUBBDTZxM-$hn~6 zqenCyTE3>Hrm?XxY6)F?yn$Z-FsL^gX(Bj)_~bP-h)+&Vc$aCsfyVt9Euloc4`6|9 zV3)0~uA;IFFu)K%-7mo+;Ci?Yy|4N25fOl;b|^jhUD4p@*Z@|7qcK_M!~&HNh5M-X zHT?OryqOszy!~*4+nt{?AKs51Bqk+&D!R#H_Q9K)�V#-;?uo0M|#hcd!!ne;c9V zD}a6jC+EWU&Q1YE7wAPf!0}T>?@_=`A%#QEDD9IPVcPcfD+4%foj|gOVc~%`FMzTP zm*D!=NwS{s}}^d z5%h{GRQU%B;GTIxT{(O-cpX+P0T`*^bG>=<#(8s2?(Y5jDO20W5`fvZxA*tMfPZ^~ zK)-MVRiKCXG7`D6bJA$1DL*qbSGNX>3BR%M@bFIl9{+}_Ebh7#K26|kZ_f=9UtDr> zC?KJJvy%z<1|6ohpMyjPF=;>8GHH-V#NMzPM~jGyzAq9}?o;F2iMUwz0( zj*qQqZvG;g;cASO_ik~=>cypJ75v*8SR84Q3vQNL%Gxvf*WN$=&0SbnsHmzMFW^8&&+1$Bv;j)sd(!vQ8%kYK<2_UQd zkJadUN3jGg8d;%KTySu3$Oqk^qq8$)>HGJ-L>ag5#DJoSKFE_sOwxc_Mf021YH$|{7U3Maeo06G3pD{$nFKgNmtD3Geb9G$b8m#Z zlVydzj>P zy}hli2dI=P9A5NZfGC^~p0!AKut=exNtI2==%@x9vD@(1`Jl%K_b(?%daT8PxJ?c= zZbE##9O!HC)o%~y$SK~uiSCiSq9WxRvCNtPGV<4wiv;BHCH((N-J5#DrDJHw4WJQy zvn60aBEMgJws4Z~lj%+oy+q3P=64CSlIMWAlj>tX&)i3PN)5{J+1{*}d&vYPh)ZWD z7rm-im&}I4^Bizk65Mh0qd;7i6|ijLzr1It%6TmKOXqXVt%CDn4pch2y6oA~zPxXv zmz=!7k^B6{PQ7i}HKL@XWWXi4kbNcd^I#RUR&1ofd#}_7&oIb1*k?u0Ku=$y)@e3I z!tFb&v~*LOkt`k#PA)V8v?Yd;vREjQCu@pWKT=ccdn|J|wN#2d9v>e2y{G8mcH zHpmAI1nzbxsBA)Tw$e$$N^PKocYVybx&`0+cN0$Kcnn6qp9ZvX-JzZB?M>LQZ5j5jsYHI!p zt6$1M(=NpeU%%Sf+slJ?{oP{rEsiGA+zRqkD#L3%H14uz>jj=a7;4%h)}#38h#yw( z2cOvwPr9d_U;Oj(-x=CG!GVDoJJ&53K4EV%sJ!#V!NmAiLS_w5FC)xpY%s>@-x-?d z|0+WxiCY;-F0XBD91i<2og^f$_;)mj&cLc2_QW|O^BizBkuq&RKWT6t;*ydKMsl^O z_Fm{37=-*|EkD!@lh@I|Bv0AG%T5yo<~|V*FK-^;Vle7K6=vTyT5@4{^Z5i4xGKViHBM9VYeA;B@f0J*2 zr&P4jT|Ne2#0~8RxqkgRWNS?tZPk1jaq;(+Ub!xZ3Xg`P22YfXlAoy0rdPO8a=lP9JVI#erD!4vX=G0`^R#-9LgJf3j=e<2^%e-?@Wk1Sa8hUL9Lp zUJi-pP?qPRViy(FxAz58>qazPAl%cW`VT6cPr<~7wL>q_^U0HTP@Dia-04$PR&Iwc z2D66P}Z~F=NA^CZ`*bK`Tmh1^KuG)87&XX7u${s13W4nm$iB<*s>3Kxeo+xBfpTqB03d^}h`TclS{mGS z)H;Q9#wHvi6wgD0G;Z_+@SDn}r>w0_2Ca&1#KTNq9~>I64S+a2 zGP14Gx_ub5bo9uSn)hVfgj>3CmK~AX5{s)YOqs88E-&*&T#d=;uU$TV2LG>P+jhSG z6ErnAmGtEDaeCVN8yuG_$R4g_ryTO#?L4II8Q@DSYw8==0zh2lwPUZ5EzJZyH9R&p zth)O0701aIPJe_9)&y9fv8gGl6#z$32B{_F4>bkFQ-~D_^|ByPKV^BS767|xoWFnn ze#tKYb7T}0x(*+}lXnJTA2K|kOU$jW_rLmDbC2@YTZa(nDAx0BA*fFc(S)TK34VTJ z;JSDjA8&vtmjhBZYE5$$!}d;mwKTBs&{FB`9qoH>FnrpB=2ltoW;a>&(;WlyzFaPG z9j(iy4b*=*El)m>lJZULA}J=TPHGqpUwMzg-%d4#O^KxaqAY1&$&=Glh!2sHgattd zLCs5DJ1HqCA-HAm56T9*y1LLd2RLJS=BABJp8hsofLB&(s`WmVv z0NMiDlvu_L4dTlUQ_b^{4nsMw{Ppdb)P=G7j+Eo~XPCb@G>^-=%dmXGRPjv$_&syI#14@AfMjoTf?akBfM}>^ba3MI z8IO*RsILtZCNz>{KMr$iFoRGMhk!u2+P)?<%a`vq53tNm2j%5tUXF_J)5ubdY#l6e zxDT&tKl^l=3GL>YF@Lg|d6l_lO-@;PbVqa}MK{J9FI=!gyJ`O zgALjVs-U*DwWV$t!UBTMU{v`2F`x@^p97)s3QNHWKX7`hAn(F?;Q}hoB4ohh17%w{ zjgym;LBdS=BEZTTaIY7n%^(ofrn?^jMo(L@x3_n7nk_fK1`ru=bRMXpNI+YgJOqte zhKIE`ya5(C1U+RJ9zRlUS=S<_!dZ=Ds^Q?{x=_%!SbFX*?dD^Zz>iPA1}tE&^zUkI zPd^CoO5*Z|7z#cqsWOz42r{}rJ;kOg`aE=Y z4SuKDrcUq>H@BD7QGo!=jSg_gn${*Ew^HS@ctcT1$r|t(dRQRi()sG9hK3?s8|uCw zo{-0c3yg=wMu2}hKYA1jjx%ahf!tu9ovxIf9`}vQ(;ol)+BXndu6M3F7h;SEbQ!Cxv#o9x1OXU~<_#c+=ehfX z98|X4;^J^{&d>$_G21MIo^`O)^z^1kw}BJSEi6E~^`Celv@KL9t!@JLjzm_lZa^jj z!VJ#9Wq_QWgS)G(qZ0@W(6HRB*Kq`}3kZ?cgP9Uw9^j!-JLn;;U4sF3SGE=kI2kP+U2eXaaMTlsp0EdH>(B~w4{ecNuz$5Mv} z`So#}mk#;KZ=d>4jW5MkGP@Ao9o@@V$k{W#BDnInU(e7{Mm=$;5Fl?YO_?&;CLa&rxQmH+vX zoB{Ih5O=9AL~G|{$_u~hCQ^YRQd{CtYTTq$AHc2EjCymk1zyJ|5~=Mr0|^h zOXd9X4lqA-`#OH(-x2pxT_tXNn=hZjJdcGL)Cl?OcgH*0GtI?!f083s0XmlagZ#m3 zGH2Ou%CtDFda`BPV_KqiTDXj459XT*alAWwvkzP7#nZDX|NDZ8Hd${O`~11MP1reX zVALhPF3RZjme3@`L|aFj-p-+qc$kHcWPiR`Ad=Q! zr+s|EFrvCKHQXd_KXZma5@}3|w~+oe+yf|Ek|gE-4Soyf0N*RAJB`yvI-^ zE?&h5dW5Cv-D}uoc<{Z%qWH~e++$2(5fRL&&IB*e1hD=|E}p+^p99(#VEj%%BDXha z8E9$oAlwT%1kizO|DGHUF!BCFlM-g%kAnBY^3*oh#i}aclR3d>(eXvsS1gnKS27t$ zgC^e7iQ-RLW}dt~IhX2Jw6qjkN+WZdXbylC8K~|2@tBlr#aUt z9yOI_fg1`E2WY2>^?r~cMb*&*A8823s_}E& zH1Y)y3m}_FASDXy)t3JLvgt{P)Vr*YUn;~g{+~&W#lwXkAe%xk;%WC{F0dtotQL>o zGcRV^Vu0^GPe_Y(VcxI2`r{nZ^3IoCcl}lS$j#Umqb1vC7A%hSk4c_B-bZ5Sh3=9H z9XZJwybt9l;`BSf-h_Ps;pQ-aWe6*T^dUXUom*_#EKNa4nI@8b!9QUy!K1*W?jaNG zq_nWWK)Gcm6;+E&yjoLa<}~o~^T@Mr4|N(F3GM!$c~O2@Q%(*7IgvnUDG=kR^f`1b z)Bx!jtcBs1%A~n;jPcx4yTZr)OQaLnA-q_mr>>|GFd| zo9L5kB19tAIz<F91#i&n@gKY}Cu5)^~UH+Doo)1VPQ z1)a+l+`iaHf`Jwu4)miCC`_N$RO?$)uP?_4n$rTrA0|uyt^V1VzjiAZK_~l!A|s z6a*cstlMz`mR(*3PaR@0Siq(xzkXdj*jnrv94sGhT3Rh7B_N1j+ek@_ci&Ixn3$ji z7pbzQMp0fK)5q60%>&4jJeWjiI+fq7o(%jNd0E+jY2mPtka@ThfYR@d6rOYU@c2~c zWGVIJkH&_in(MDGVv352Fr0%AcJWnsA3!i92#R+A1J5rl1qTOXgD#=(yDkc9gW8RU zY=z8T;cx#6vQb10cP)^AUUyWIAG|iYa<4ULgsyq^I&MKDv24X9p*@<-beSvX?(Aq~^p26Vp z<6~o%a29^1{*K(|ds&b`RYzQ&vZL;|6?Vow`3f5xu(0N4nV-LY0WSkB4{sn-La#I$ zDmLVQp#i(NXN|A<%H*RS!VXLCrHv z1gxUSxj8E|Zwn#vWWMi^RKr9JUl|8NjcoYZo4Zxc+a%*Ms zR6m5m0h<`zF(5ozLo6F^jmSEr1O7_@Z}L`-Z!5}lv5)cz-sX7=zlN%!D5qGCAwRw; zNDsunc!5R}V0Xi?8sx+N3))Hrn}Byh`ajIo^3hnz_zOYk;xNJBJSSTs4mJ+oqa12Z zFD7Xx)$gtisHY28Tg@*em3epuoUi`fJ9ZwPa1+D%c^m-ONjw$W!FDTI^P}ya0S1We z$hwL(4u(=dS}Derj%J0IHD!o#z%p^piioUgLt%R%?_8KBa=g2WrQ0`o1!6D&DvAcm zj3E>||NFP4!F*D5XF>&Et;^5?gC!BA_O`gJ;nq>t_4`XR;h^=SE-_?);^N{0Ahp5H z&mX<|5Ym7x&?@cTJ%N;B4D1_QNb$PG* zp^pOtOEXP?dw?KTZyyODIO$3~6eUwba;))*XYlAe z%0GFQRP3~K2gr!kVp_e=J#etBWPO4Cx4CTWk?+4avFGWL*p8H>Bs#5tTG0S^=70Vq zMh9OY00wd?TI_%phh2a22gVZ_!5adVKM>YM{{w)gE*Nb;htp&VraV`!NAA1|@sei0O&5v(S}?)8XK0 zae(FwWhSAN4xX9b(!;swx0$HE<+_@_7y%4Jn{T(sTR)yd+ zqt^{$A57Q^lRF?a3`hdB^#b8K{IH0b<8m`XFPUp~?}+0= zG1)D~bFWl~LhanGTv}^M0zT3Q@STx5u3b|ou1E&63WBP&Pu;9^k|?l$<``9M(=4GfZCv?m@^s6vg~w>b~@CM*eJ zyEzaQILQqa7<;~PnS5H=(wfkP)FHifUy`$whL?vTUd&H0Ja=8mrslwDJE~j7X5LPY zzmdOZfU=g;!|vO{?;K4{f`~@5k$J!BH%-m=iD=l8#FbSp!PrVJ%!i@ND{Kn6f$;F~ z05t18cS=YZ71@n#&xKLC7u9_-CR^w90L`30cxdFBgZ7x3YdM@CIemSq&~psj`{E{% zrNpXjQuT~P?Wd`tJjG3lFG8DoF39s#OQdF3gp+RxMjdCRlj8iWMNTR*KF&SS9p8Wa zK<<4&^J7Px9G!{un5QW3A^7ICL*9ILiqBGAom=>rK%4Z2G)B71nvH@6C;iF_(nsGJ zVM!z*pq1}O_b)D6eCq~3_vGWnbG2p>*y0hzwyqRPgq_v;(Hy0zOo#Z4$lcEjboE4J zKax#Y_7>40?o*ZzzVv_xo{)!*cO;~cvm1yA~NS@{4hZL9fbgy-Ay7xw7CF&zz z-*(bI8Gdr`H_?F;V_;~gh*AJ`!1TqQP-MrfN?UshY6Pmak64v82Ghx2P3ARh@``w8 zBRpJ5DdZ`bGgVyJUQnw2mNNhq;=!PcfsHf|Xc4AnL93Ai%rgvn78vyy-rh+rQKJ7i zVoi$pfj93m8d`W_#JggT(r4r9^k9b-gg2Zwp*y`u!jD5T{KRCdV{q4|J=-FQI{+Qa zf;h0q<;$NSw*^HTY&%lGxlk26|NJ~(Zt)=!ypmR+Y=EzxCMO#jgev|s53i;rN1mMN z8(KYIHRo_ji!rqE8KEZC&9__uU_pffS_@cM z8UnKf)+YoCBuoU9@)1X3YWwVmK=Fj+b3msJ8w@fRD)5d(QuCp6uK)7j$_v|bdDgD& zv0bNaX2Nfc-z=NE`h|q+Ev+^t!;VaWwWSZ^TFDyL;t-ba1eRK3I*Ab>c|=^zBPQ+7 z=lcFO^WXa=Nfuw6&T%YZV2TL}=Diw+F+#){ougJ(M#oT7FW(^n#G`9&ed4TWL%wlg zChW}AfBI%4X?*?K>;dFUIv{|i%MhSpCf#1WG903gm zoHQtM5XKkw|LcPW1d|8|3FQIUv@W}Tg6KE^vLuEKAe3m%2+$}i8ym*j+L|lV2#j+L z1Kx3^R0eYtZb<+<}_Z+h25W^sbABN;Z z2aHfcp^C1E2?96x>&IfM-LL1vX}!8zoPJ=`>T*YS@-sd;V_pnNv#P7bE5yO-G8AKS zh;U_=O1yXYTpmA?f--(*&Uz1LkFu;5dlO*P@TX502#kQBnKhJ;0Zu;Yb^m@;XB2J9EpUG>(?N!ZTO&095HcE{v+at$_|^wB z3|O4LXlexa96dIuKn>rAPhg@5eaHduwM8i0o7>xMpq4|s&UdXV;92Wtr=WCfrpM`i z>BloXL!k971BtS_ZGR=EyR%ha6^m}~s*opX4*zWIw{-2q1l^M5V%Io_&z05+u8_;n z)z?=fCMNEItAaTsU}Vt{n$tI;OXB6=2trsP6fd&=rn(vpzNkEU^v7t7iHuVAmgiem z8VefTSQ?8=4a3mSANQ<&JGx3 zFni}70rianCYg2j=V7=4n1o&O@$oSwOgEugi5=wlI4}bPAe2ifk8e_Gg>i4Dv0?NP z8l`{%h>ITn0O+xi9e4@@JQu_UM%+LvR|Te}gf{Py5e|S!0#z|BEwY#a5YJP+B*@Rt z3FgicFtdV<=KObneALZWzJKSYE?eSw0+tZk(P6|}9c1)X5YD||0xk_gW!s$d&f@u% zmD(GV#ibV{j54Kt(_fjtPX=dY++AgqbV<)p`ur1~U{RC0Oi`KXP-9P%xWTMTJ!9wg z2wW)1Tz&Z2OfHHs6`H--2u+~mrbKffvj{;226hOQr4`0r z5i)vuLOs2aHNuxSP|q|~4?I&D%Jq%cSb;yq@wd;;#%E1q2Tn8^);PGH6&`$4aSToIV)HsSS+j;p5}S zUHc9MuF?OD86M>E;7d&)WHXQc{EC8J)@$Cqf*L}=xDi=^X%rNZMb79vEjSwwXPchE zErQGqHe7-NcvdAIFeL_#x~xw83`RnpAFj2_y&rY!6wFIM1KyMi8vm4cAK+_Ad%X37 zYXMOpK{#R&-aQ9N{f5n3wMf}3W)d2rNJ+`Ly0az$vqK$&uUA{4JX={?2g3A%HIxO7 ztH~gjfQVykZarE&ooD7QHD0B=rq-t_U$ppQiI+i=o63jFuiz&wfd?MZL?SBKrH)9f-|g+8lHnu(yZ!tLy6Slw($C zLGT@vdOK#w*zi_278Dc=?1YEoH|$@aiNZkCllf6OdKg3)&=j7+s0yTfH5!I&*S^*X zE-WnMCrezrmeP<7@)qFeR@f=ffxym#*$0TQW@?x~4g-{b)I$V^+m(e#NoO|u)d~B6 z_#>A4zaIwa1_fF1*|k)vU}uvOq{eYzzMr+Xnx10eJM16qdS5cw^Uei}-_CCI#g%iB zmM7vyuPj-PuGNlsA$&Tp1Xu2T#)d~UgutPJ2n;&KYy*#p7@jzDnaNBFDe6)9K{pk} z*0d|M%2sHZVezox!-qe4=BL{kZC;T{{v~Js7jA!GdH(_%%yiHJO@(GvVs;Gep%t2H zeliSc3$JsYsU`bb+yJQ2y=V;|s_UY^cdWH7=`+JLxaF zi}N+ancfFeWnHM>n}G+o*xREn+QAW10NZikyJz>s`y$Eh>=(Wzmh|}U;yX&S;QDXi zH7nrVZ5YUTj9Fr1kcEJz%m{ua3OEL{Wnv+UfEtx|XBs7-c?H9=S}+}f z$bwz~h5Nac$*o&$v$K{C??JCC)6 zy6#U>D;p4sM;fqywz^i&Nm`a8Eu@?O3qXJ#I(v4R8C4w-Qfq=1$Z z4YwJcc!Q^};9d?Lg&y<2d-Nx3Pt>4D*n)q6hJ^rEIj@ag97q?7GRaO&4d2{!hUWv& zBj_^(z?a|<6Qh=O>3;!9pXupoXLuF{bXAd&X_&HKD6`=O<$?emu7{EVb0RiBC#!T9 znL^~*&_ND(&V;9@CuDxmMYDbiu{bU*%H_`B?I_!n6e-*1~6 zYiACB*l)|j^ivOHJ$nJPYQfWyj7R46RSdMS6jZkKg+6HgNl3@-I?DG*x{1M&wy@M6 z2F-2j&mV5c8FKRSMqY$7ss@f=I7q`FEwjOLa*b!i#mP=OUV#Y;e|)xJadGj)`fNZd z_8Bk${~1K_w#6+K2o$5?Yr8^W0#HLh?L|k(A&o+L^X8CitP3>Xfn7lgbn^V$w->Nr zPr(6&`>p^+1Dio%{yh^!<=6jFv#`@dW6IE?xh(zkA%Ls`Xe$D8^PrlrfpEkQZ;MWp zYrNp z_@czc;f4FTuOEG-U;Xp?-2a+M=xspHH`H2CY*VKfZ)A(w|vu7h8DYwa7W!!?{*OvxYa@bolb z*2NC26qK({_Gz>_9|S+W0gvavwYRt4DiSTO$tK=?geT4`#`N8&*Z-Ktg_1ju=OTex z=sD{js{~xvuiJkx)^J|$C=*$)_uZzZfBP9T5ES=U^+S4nv`ZF6~PY;CudU-cS=Mt%^qsyhtR{JYK0 zL4NI_RAfRz%e~KZp~^8TT>ePhCNI;}YQX^3fxpSULlw^?i;(zn=CN_i)aTFJ&7OI| z*(GL@qyh{GAIJv}k17M!%xeH+#hPKVkl*##*}HbaTuIFSSD_NA>FsZSmw0pdDhtW( zH)L~d)IHqt(*CMmhvuWF;v86JZkI)+-0>s1G$OMv@n1VEyDL%ZIF|fRONOUqp^sic zAMo^@_alr9-}-u{1@wQ68AnM;{Es-kOPiu7)ekF;vJGur?UvF2b#e0dB#wUclff+# z*Riy4)U+Aba8gNMtyVUx4z`DRUbW-xn3|dcn;<9%FGsuYaZ_YY^w$<&rGI@U+TMVb zwSp-+TxKL_pH&MZGC7dj8KcHUpCM@QUb9YXUS*VotdrP&YTVhQn2hrFMQQ8|g&a%c z;Ju41pT6|a(^X<=pZq?tiI6E{Mdp;b6jBm}WQYjKJeDZRJS38N z%=GNPYu(Sg)_R}yzR$D%dAIkEu4~)wYr8wo^EZ6I$M@Lx{d3S!F4@_iPq}wj^eY8P zq3o5NC^LtGS@BMrV|R)c|3nBM5Tcp%O`_7)B*h?VWcKas>(hnOD55PrJ)I$Z6_Q>istP)ccOzZcAF@Rs8kSEd{Ez4g1JV-xu1Qt@ahW znc-6SOS1Ku_H(}{A!u~OKYknzQ}rMuI6`XE7kxZj=DkwzPU*=%Z;7Mms_YD#Gg)-2 zE!r-}70Fbad;w^t=?5-@@c%CoG5vx z@hWwxY}!(WnYo-UWmR`+-f*@PK2pYG#tNo03hYt0aahrpcb`2(+{zfiKi)xJ!*X3z zW;*#FbaV0s1@9GFP>~e8m&AX)E54noxe!i2^M>BhM^&bk#W~q9FjO)3heH0@X%+6j zO+H;BNxqbLN)an6o0^?Hjwkiyl{hxN3^-Sj&)EXD1I|7K2MJ~cTUk*$KhOaQw=$Ls zQaR2l*Cx6S|LkdFo9C0zjn2+F-s9B!Qp3dLq)8bWNwcPhd!BPYdFX`&-H*#fDuu5P za3%Fzy1Bp4?9=!mrZy4JN4xYqGOqIeLO4Vr*WbhU#T(}PG@VZt?>T;E#J?1jQ-pg* z&^?eaD<}OL9Yrd)lW^)}+v`k3sQ6KevY4QJNrW>uzstZl8!d7ybttD?L!^NH5RZ-)dmJ!ACrh;tDN zDG1p*7m8Q=X|s(#{5JU-&|{Wqq|fi$`&YY5oKn>m6YwA+K0~s=$v!yKi6pCog9BmFCX#IrkSRoNipZ(J|5yWwOajlD zaj92PV+U|X6bB9*z!6^o$Hj@=w8VitKfek9u+w7bwr(Sy57s^E1C)x$}$LsG76nnI9*WnT9s zpVgT2vxmr*K7>rfj02~Ixw+!^yvuy^OUwklg~X4Du=NF3*FzptePngb&Fk;p?Kj=C zU%ja|)cScVqt1buincKP-qpN4TLHr8F%}HN@mx&+eJIRK%wKdv=ragpg1@6%T^GyW>$gF5g0VL-K2B5KR zF_;eFaC}FX)M|vBQqMeZ2gMArkrKP0@`ua4-{zCYXbek|HB2wr{ zEf2?;p2Rl+<_Ios3)DLEKJL&Cwe<7^xVgF2k0wp_HZ_H#=^NtV^Ix{3p6G5S)xz5u zjOH!TTZ4fcZ*X7 zC*BBOUG`);<|6hI1C&-CL%or{NK*PvC<0N|1_}qcw0XR4`+aPgJ@|+=M)sges4xle)sNF z#*&5k`GBY>R@{pF_wJEk9$vn7w2J5UQNJeNB5J!B1W&yB?#oZ#bW(Y z2kr}EzSPLm!G1yDN(sNi`8BAB1Q-y#d;-@2D{L~D2-wS_h2O$qNRTgZU=%-iunH$b zahEY&B0ddIGHEyEzJ2@DmtVaSCHMru?0m#glW2dFQEb|U%T6K)4qe4=jBuox7kHrk zh_9t>O}}5Sn1ni9()e&5P|e}3b7vnOsF z^1g+|-c0vtg6-{39&w^1B@Y&Gxd_U{7C+YKuV#-J3G!Z`4NBzIDE$@+l;E?8`V{NOoU`0kpazfS!zm zqErRX3W3T&)1rYr*Gl#&fX8Nl(h;#A5Q!k%xWPB?-7eDs7X^{TZ}cBUU*k?$S(*A_ zSQZGM5nONtb`*A|oU1<$q4TB)tTTF^NYF>g{zRIYcxb?GGP}Es>jTP`%IBBr5`o@=T(o---ymq}6yee#RR zle$)oT=zG{oyR2C&hvFX4kxvkR4p2@Kr_uOv%YT`T$#Tt#{QJ@3?l7F|b( z=v=~AmK%HKgnJ+AUKR5r!A^S@jbh@Of=5U-Wg2=)1;i&Z@MZ{?E)cf_nDYtnAp~92 zZy>e+uM^Qt*(znjWLRJqOq|nr>oml#fXjsdcMzl#{P!1agX`ON zB6+TncvKM~3m871ReK@t^$|uqytR#zo}QlQjOcnk`p?bRA&vmF=YoRndHO~!i^ct$ zbLz>znTRRjt|dryPDwRwXg|PPaZ1eD)pZjI8Mj?~_nxw?xQ+@a?~35c@qurqzA=A~ zz~VK4o(NNnW4|V(9s;ZI^U5{2Y|}enXAqfv_jhTDL_*?;?*2)Ctbb~mAo||XZRo^~ zMWD?99@v5G+X({3=Y3TmGBUDnXowQeJF3UyyO_XI485ElWrN7_>_#LHEtHCThOu#R z)M)u87gAlrt^q740+pM9V1wkiTW=CK{{c^sKNAs2N3P~oSB{Q+wkPeP#9Y_OXB?%f zYbm_$d!8k$o|hK-esy(!ouvpV{2jU29nOuF+XB7=x5!J94y4H}>*)Hg$2K_$_$sSk z`#2(C(yc+B&~)ohG>5o&8fU=QJliW|UNb|q*c%A51^PA5^(t`X(b`%1on1DR^&5pU zu}}mt^IaRPX885{H({?t0u2hXxdIXB`j-6eoh-p119FOx6R;wG`t(Tw9he7_pnt$w zh6P+~df_^}L&%6;N<3XEGgLJ&{YiA(yl5C2XnFBood&T>*ZMT3M2-nP^&Ssu~mw)Mmy!s zJ^WPa?V0Wl-*GT@ZA;jm8YNHNf|CYM3e{1sB_=x4#*NgBzrDLAhlZ@M>L7hcM(j12 znVDVF=y~;Fg(3W%5aJ*@n_s>he2aW^B~aM=(DEyOiG7zJ@Lx`ncHl-d?=^CPQfS9R zJcd&&F2fBAHWmjJJpM3GPET|nSzY+y;8b&V?a|rNqv7pOUMlQ55nAG9w;oU1ZEj-r zVBzKHC*g3J{fyHGd{q}Rjm-A$RrIad@?w5mYr@r^sY`aAZvQRTatpHef(KQf#?EK# zw#Yb29Od9^B;0H8Fg|(yyjpY;e58Py79bKhaJxPNUUw4<*&u4NB|LSoefIVBm0xF! zZ9BN+@$E@L!JODdA{LaMUKhRGiYz)1&n8dwnlM!U8_u)-!G?7_7wl&b3Q z{FBxq=b|XPbIAPbqTaVaWDeq6Ij#~j2S@eww>R9~&2a3H?0%1HR|5ei$7k(E(Sjwe z-9Z3I39Psg6(uD^G;!Kks4ePlHiVBZB=!08J@Y;yE&pnQKd5`}4MI@)uoLsPZNaF* zNhBEdfgYtmJ@~w9atJA{r*>%y{JGe+XQE&pEah?ppXoIR!=~mq4~l*r-V9=>AYW1X<(pMq-41?-<4|I0irM)*fDINcyeeT5VW5rK| zYaW&QGxjR4+s}2_(U#hHC zg=UB@H`LcxpfLZ3%8%1wAnpfvjWqurpTmo;3J(#{o+lC;sHcdiWkdv{`k6DSV>^%C zkb=A&j1`wav0$;m9z*z1k+}O(-v+-142~(-Tg0WMYp{{NMm~;z&mN0^HR}tz1|AGN zhP4|69fHQqDtNXMHqd0!)89dazYjoVi5&jg0H9Qf?+b@9 z%552l3K)-opDZpVRf)x&MB+OUwv-VMig7SX_b0Gn64fBa4uGe!=Ri9FoFTZo1nUn@wlKMGC4r}UWRhxmUr1H7E>m5;lH4|JpKD}6-40y=PLN{$+uRC-n@OEYj`EB{d zhW1%X(&C!M)Gsy17S%GxS_8*uMg36&XzboQ&CDktH1vHYdFEN0S6c{;vgJn+y27lNI-P) z(}qKy3a8!N{mflBUg+h7_Lp*H1DEWXy=MG6kGz&q(eJ{60t<5h{zwet)ei>1dU|nD z5LJ6+S64~p4OSKw8XUzpR&|aJBaV1Hc;&wOHhhEsj=3^^)RZ4fJkFa(wnqy8J&U<;=`8;4KB^=O6Sy_dh+S_5ec|>O16f|6#^}yhjKf z*fc}-5)4_$^k{tGga`uyhf)YLA9?NYmraz||5D&Cinb`ub; zT{uY!qOC%}aNxL(+YPzYw)n*xsbr*b{^m1_oQ2-NPCbtQ;IC0CdxU%txqE zAeBZpMvE+2{7pf-=YexYoDT7GqF{+&0x*a8e}YmOj1d_R4ql0Eo5mWH zK$F3q1|&1uJUTzoJM*uyxQ`vafn8Nk(9fTVcb_n?f!py@e*;4Kf7=T&bFzEuc;WP5lM0UpDcLHk zm9&g@FuXtd;ozazD?d0t@Lc=ev#_+X*@Q159D@ zheyxmn&YBrk0Xna@KqcN3TiP6~}KR(%I2%*VxEc z9R-tZtPGmJ%EzusJ1|L`^M*u5eUOk+#lGz~-rjtb{Ctuq!2UtVzkCD z(G2SR(G5Sn^QHSf^QiPR#04eD(5AHMUOS`yY3emKTYlcq_d`ENRgDkpK3aXSozh=aeN3U<{+c0*^sr8>4b;8Z6=XJdi$yf_XX<)%&b>K*FD2Ob>m{pFi8w;;(D zG;5CMy!+H%KNI;jo=Gwxrz^50YB95YWIbnG?#LY3QSvp@9On$7+D?IoQ?fhPJ(Co4 z`Aqd6RM^8!8cuBmA+u;f92}IJu4oipuL zFa6=6f!aX+dp{2(OZV4m#I0VWqULUE zC`v3jSu=Al)qHqH9@RB8Z3XsyYi+%SV;|-$REW;b&u7+a{^L{D^-%H9-(JENXSt7p zwC5SU1fLk|0&Rauw<5pIEdrFu0DBR&os_>t`h7=s8{QJQdfV zJjTi}=VDJrC4e7glOaW>d;a`>$D;7?GkO!T(7;lLVjo3?8GO6c-%Kdm^4ed z{fW~;ju{(?b8T;5wo)S3nk)P=4Z(>22UHLlVe>>i2n6aDl75Z_6&fxzT_O)tgujGp z>=&Q?tIj{dfBXV}b-@tZRt{S~b=lB&o0^Dw80WLV#S$w@R%t=AX|rF?yL_{~^>K7o z&xZPvu14HTFrdl2W4l#6mtCg5-E}XSUR}ofrb7Y(RKzqM^d;_Ngj09%Mt@Kh7>A@Cl?^u{BoSbU z2zmwd9aSx@`??R3lXnqAOb}88^5!On85QMtczP1hkV5B?Ed)LGYUX+?oI^yr3iT$c zvpuJlBT?1i)J(%25eSJ(+$}l%6t?jW974|oPU{@OgsFF~e?st|lHtn3!J--k6I8Yg zw)2+R71NP?kG-5w(7H#)^wRi}Qg$k=)rIBTEl@6_Iwq$Z|1JRau#+ z!3ZwsMZvR=c7f491RhYMBnCQ!g@@N+XAFXw%F)S5*o6n#A8->dSQo*TgY*zTsObjS zQ{{M>Xifq}hEfvK>F&Y2div_(4~H^lOeWJ_yo8#QB+dxU2{kZbk-=tcg}uYSWrP)_ z`5V+6EWdHR_(>;;*(ozbC7zwFf%RB-Z2fdb!#^n@ z7n0)QRDU)Asijmo1 z^kzNQ{^sj)QBZH;2V0CdN?<%#A|)lI_-{@zLuH@kVa^AGS8+yjbaUH3olj)k;KoCu ztGMmkX{Rg#^=g26EjTPpG+%W7&rHZn(C;(n|7YOo-OfADKAlnE-uY0mk|4)rw?a3J zEb)~7rzWjC6Pv1IaZEMed!pOV+?|PP4Mkdu1_CmzM ziPSmNXymd9CNkGicP3aRv$Di(*>+O~ET=f}Kvi#S$~*1+yHaFmgriQ6C%v+G4ZI!; zRLDf%9*$HHav5=EC;GbZaj6%H}%S40W-(%Ca+Sq^ncS6&>4)1<6L9FV-Z~|zM2K0mel_Q9*Q#V5AdsjDE)NfTIv}2wr43&>tlELDD7~FgWI+7XiKc*lR@OvhT8! zP%;9VbqnWg0`>&On-WlfX-Fc(d@D57ab6@a$ni#s3lrTtSOx3dQo$D7g9nh9Ed_8p z=bk-#etpa4f!{Q>9>EGa{8hxU8C?v5uuC)mAnH}%0al>7iIbg<912}X*~BP6A}>xX zR{%aB0!cv%!-*dbf{%F8LO zj$CvlK4dwmbvmy~GA}W2hv-6!iv$LK?ZqsE!K`i)DYG*-`Pij`u4CLDodGgs2e>ZN zc4~g^vw(#{Gn213rlM^z`}+xppF=~@G9yU^1+JN+y(Vn(F!V1NHj&<2QpCz2n?;#Q-#F5Kn<;Y?^Y z@%~YeibQk6G=-FLfN?*XsQ&){QgdJfzHj&qiWHHXCjqmjL%_ z7VkFRK}Im~pnD(wiFdpY@4x`-m&adL+g`Rdfa;u-pe0{B#|CMnd`C$QuGMa-yUDh18{-dh% z1i`k(S^T3P^$uaIC5|sp)QJ!jkPja*2$MvDqCt2_5RBEfc$Jlvg}paw`M#1~S#8_N z#u7PJ+D=+cW@bB$vnN80IZWnc{10^pe!K1-b2q4p(0?z>$&vj2hH@x<`Jg!Qhu$gl zn)x?NFRwVNM8De~oWCn*YAkrEtrcR;0415l-rl~pp}{hC_~*|G03~8$<-alHU%8DA z^nWCw?XQP@=YK;&+n9N8gBI<7)A>$8f;;9OFyf{@%K@F|@H9b6i|Sk?hI`Gsfl*Ki zSr-5rwQy-(UUGCVL}w4eh7+r6hx-%balw0iLx70=VyF|q#k#g@xEs$5o3TlRqGxA; zR}1DcaU@=hCa#FwkB+7k6cofG3i9Jl@|z^kzSK`YR)$mH!A9KbNt@3p0c~&KXuxs? z|F9p{#M@wq5HkrgGD7gn(r@4{g@P+ZCS-qM8HYV+^BpXM#$`VLWKMAYRx)T_Zhw)% zJhzW#aARkL|2U>zWmA(aL-^lS;js7&T38PYF%}CeMhX#4$R5YB`M7~WQ5V^YJiW@69XhW1 zF-8*~UtTxvS}W{wTDxGV*rI$q>wi^C)*L>~A(xJ^h=MWtOiF--0;UjvOH`lFi0st^ z5uvEa$at4XUx5a>{e1s*>V9&|$Umw3ST%mRq*i_@DKkFosa83(ji`Fy6SzA$lt=b>F2|P zPQ6wCrS`G9^B6FRaML`P=poGUfcWZ{`Mft?`G3g))L#$Wgi<6^A2kOdUaxe3vnDdz z)!3*$@#hCHgn$Hr2S?Z|Nd%r2Vje8^lEZS7l3~D4rleQ|cL%`~MBsafIWQ>r$%IN} zWr2x=9XnQdWjN+*>k02u4{)C9f2x=>K|xT{1m!*ulO5{wuX@a*JtukL58gz?v+oOk zNmitQ&*ua7VNi<+cgAc_bVs2hAFSZF(baqkqfajv=e!FYfY~l??9HWe^rWr(8Y>1g z(%oORafd&DtbF0jve9E-wtv*lS?V`g7bw`+pOkqVpe&f_byw8ouTG_CVcL*n2r;JG z9N$T2$S^GLt1TaEdzyr%b1)88AL{B(*?K^Ous7xt+S0n8i|}>U zN0!0w0h9kj4{mVtVL(BZOMsCH2p{savHLEBtgd>2Ai4=p>PG|wn7Fy`;^|ykS}Mn_ z&*%Y--x7tj{`(sTA)YaFauUM_AW19ovq)sB6Gui&wt%Ke0jxgZn}bn9*}{UC_=^Dt z^J@?*6-+~*z;&`G9L2R6pi~6v?rh>wT6y`hY}o|@r_xnOK(9_f9K&xC&^wR+KM^|o zHiBmhkIHeVnZ&$XaBorW6&4kJK!^ebe+MXfBr{~mOxeGns?4$PmL+saB7X->18x$l zO-?mcRs9k2>*dHYLA1L=_$;u9Hr@bG74IA?HcN7NC*k9r!Xo(;2WTR?M?__C3w`I8 zCxl;K-&e~1~MSjKMyynNuC zUMu@qy@Nhrs^kCY9C3-uvDwhxEC& z%(+eY87@xDIP7QP>zi_}lv|^u5;*i%!-25g{8UZ#(QJ1ul1Mlc8EKWakD%M*=+(}C zA~mD_vKh_ZM7ruP^95~l`%UN_&M&ixNA5ja>II>g2`)=C1eNe7eg?^0qIHj`FDu%$ zu+f=Vyhl;%-((#H>rx<-62STYf7U5tA5QG%=l9RE>x_ivm44X_{W*g7Pn_#827Bq} zWiyCRwdlJC<1P@ei+AtdSz(?8=2rjtt&?*l1a? zqd+QE!ZWP`$)ygpUcBVLv<^Oryrw7|g@5KRFWFAV4a zVV)Yk*;6Im*h$EWMw4I`T=C?g5!(&6MJu1(V zr$!Qf1~4gwNamhw)c2f8d9XLwru&XncahGMKUKUEMNUiGI*-k7?>sUn8|=N16T-XZ z^9lFeoLM{A*+2SblQkd4vLMf|-S8B5+H^*KJKP@s23oES>#Y9|m}TF!K7*Y2UHy8p zZvvJznXf&$lA^`eU7Ak}qb3~H&^W$b7rMw2T15QG590dCBR< z#>8c=)0A)s^xnut7obaSIYVaI5(bBd-$I5WgwGArCJ%AH^k;(G_f&X0BF;;|lU%H( z(q^0IIh4SFC_FJa)t?ET#syhE{xZSm7yFt*O5ZY-3+lK7I^*(xtv0YpFytk?0vJ2+7@#d$7^0)xK~u~{PkOz*o6Y*MPw5Je&c&AnFa5f^x-zbuM_as2W8WUWO$V}Dl*7a~)oYel zKU1Eotqb5Orha&K!7sR{MY@Te1T}~Et0Q>`3rliaGTHVU9=EV92aLJ1F$I2_{Y>&} z{+Li+kzw&eM?O&aJ!=rWE{%^Y1R(xZ;=B%-Ix%B&;6HGbt}}(966=;sd|`^-DgIBV zr+?j#qIbCHnYXewnRIBm!FO$Y&cdK(^gYtu`^=joxYMuAEJ_J+yEU_EShhQw@#PiR z>$5-6miH6BO2&nQI{x}icvpxu9u3^U#6)gLmiIBv1aq8-ag4wP2f{ae!1Q~|vu7Ta z=xFovj|AC1yw@M-COO0Z^-fj*7>^|<$%=bbkDsZm&XT1U_ZcI9F&kZ3&B||L;)}x zAi_!kcqK{GOYb!z=|FQ|!{n1Bdxc{7PT_vVBmN{in{i*E!h!V~MYOs&=cs3VFZjr9`*m(ZZ17-piw^(L1>B z%B1aOx@4`?SwX%DZHAn)Rn~#-F7mWDRYNAP^t`!5u_{{|?<3gmq$juO9wM@^8>4wc z3URPc!pJh1bkw@zW-np*7}&jpzYFs2x9c)&D&A@E8o|@9sqZ)2MrirhP$>v^+Jt$T|D-A67njSTsHD-hef-o+R|*VV&9 z>baA)t9+JZ#>wiO&UaNlJgGIC+@L)E)(4NsG% zcP=}o_w7T(yU%Ey?)EXy@~pmaDhtfI7(U*jP)XS&J63LcJvV7#K1KNXch>Uncd9JF zHr;|K&Iurkpj)p^X|V~YMAEndAc7v>!JW-+gNg$bL`^Wadfg`1qho$TtdX9sU+^!x z%I$@UC*Fe(x8&4Y0adf;yiVFH>YowAj=BJX%Z|OZd~%Wu8xy{Cj?9XK1>b)i?qPeC zl$RQvA@h)Qjxaz&E!Yi47I-(;UrVcKY2ynJ?PrWe5)xW{G}s|LHvTeK-@{EHq*6uP zGTqEgoh?s=muuH;^F#K0?+&eb2#Pfq{t9Hw%JMr&d2q)Lq(*2s0(G9qvh-M$J4~_$ z(N&4t8ZcrNyq&2K$f>#Ugx2if=l1r1tbLDdqQJWm<;|h_@v-3J?;=i8a@@&nzkA;M zN5%)}(R~rOYUS1cwNdghsI}#hVnypk(r!&2DW-Q&5-H6{X={2mi$~|VO71G@@_G_>1J`1l}~A6v|A5Zxn((tyvKcK zI|B#RMgF1bGO8L04g;Eu5ykABrcYy5x(3Eg)@3YhSr-zJpL|(>FQJ2>YDNLO{h0<1 zMaRe0CZD<8!CXdhBtLdhlKw%w(|!J`g!3goB`sGw)E=*1e5J1yx$42NO{qM=q3haA zT+E}Eh}pa5wbFy84dFe551-^l4QUvItxvMFuu#kBUavP3hhM|&r8vwhUDkKgxG%Nx zMAeGcR~r4UC@Boh6RH<%G>nSbo$KLSyC&1A*GzpbB<0?Eol-03{!Qhc<6FH1&zAIC ze0Y?n$VjgeEZ|_D(jUyJbTNn4t_3g4h`U}pn0v|VSBmi?(^KG*S<&cVxGAx=I9H_cu_hV zZZ|YFofLP84tl1 zpzY&}I7-g;_V_kQtCQ)A*JXFYre_5Q-|)^ms8oLh>E*nc>QIUe6r=7ldCZvmF{1yO z(5pj>{3{dE9%-si)xI1$U3QUv?_^WOqesSX&J6zw($=T(+i2K7c+mY51CO+PO*t;| z{RA!l;%q)|!MEj~=c|S9sI=?iqlYIJ@@?A(kV#$Y>iZsmA1oY4u0xhR?h|k|=Lc!) zUFxWch!i1VYr{=Ci%&S?TT4$+sT#P>85P_Uzbv_$N!2kt)MD9Jo}h+^RoJS-x}%de zz2m%7@?>FDz_9aNQy64Gbt@ zvHxc>H8BVizP)h7BOvb(r1RWP<2}=e_7oNjty0X#Jn?W}nMcdkQ zfi0Wwa*jT6xJFzL-1#`bCotqB>}=ZL`^BE(Co*_gjgxdn)8dy_lw0Y(OtlrwXxVPM z`X=g>-qH9MH-b4&YzCh#J7i0)Iy;w~_E^qjJ`%Ef@d%A9{K@(qU(I_!-rMQcT;Wl! z3odhyTUp$C>y37c)dx1Qlp|nX+IA(XP8slC!^~Kehh5jmU$ooO($EyzdLog6%8(e{ zga$B!AH0AB7#{QGG*Da;Xj=d~%<&6xH2MOIWslFwRT!k26C6r|yBS=KIhAOpCw)Sf z!_T{wg?z9`&Uz|8%h#gWeXr&46`K7=N~<3+GirZp$xrzx>ijb=PqMqkDsQSGCF+^- z?;N+*nW<;7?we}=ejcdq9A}rX}q; zbcn+b^UBR6AN)dBqplR-r)Llv#(%tJ^h2+Luz|0xWs3^5fo(8%;~3}z>;<-Q|rhP%chpHg)PGD zhQ7X!zUQrf8`Kv)=VRm(Qh4Bj&7U@6#VXrwKg^W#LRh%uv=weM(t*-*4=}AXX4MV! zT$|p~V+U>9`rt3GhC=)5xTpl+!}wta_BEVJLG`?uyTXIZG=_Am_vQ&RWM{06`dSN; zCKEQX{BUP`xhb#Q#Hi?eMy>HmXW1TK()#`uh1BJd$q%-A9R*{XB?P~GwrQX9$=Y_) zz&Q8MG#W{Ti=yPRWgS)8xN%np;{CH0bp@KKAG^ zsc?SuIOw*ibV_;a>q})vDc*7)Tc6$;#KiKUcBc8Ta5OXfM{C-? z8XjNcv3$eK>TloTVD=M7nA=F!)x?AwC~HLgg3$kd`ehV8f+J|nQuo2WkD7}GU=;;8 z-X}{}_MsR-lZEWH%kOi5R3N=EEuUC}s?pPPGzTXu(vN-imJ1&`pRpNV&K)+NRJ@r^ zdfI+cY-E>Qa75Y(*P{?EAULS;C)5ucjdMNBlcWVY+Xn0c~z{7J5V5xfP+u$9 zSoK%y+FIm${u5U@q$IxtAFu5^JS|Zga)i>4;XDKq30Hp5Gd*MsZt4@6=e|HP+PsMZ zt|F`H)PMpqsqGB%8>+qH-C7T6mj}nmafqfZAL=j+nwtN?@(-nk|#Hm|CM^5W@r#- zI9#{(;|`x6vc7%AlckopYXZ*GIG)vDnIm`FK#LPT+*X!v3NpxAnKsGg;VRwML7oKh3Mmz0zdK6un;fa z)KHX$8o7`^lN*g`;P018-o+Y$!uMCV4YCI9lp{ZVj$_N2hniRGEZVk6m+qTPzd(!I zN9O0s*q(ARV(jA13?>1)0+t{g+5#wy|}tGT|fsiNu}7|z9CMw7yG;A)(?dnyNu4yg^60EJiWT} z;TeNBxxqW8MW1)*UvJOQ@*Y;pOH7pZdKC0=@9KJ|0qJ3nQTUdKw)DBHPRdoeoqT4!pR1 z?DBSoQV#mekj*dHPFC(X({z@d%W?Vj_RWZjIfWsUI^s`k8d|KbF7DkKTD9ygVi}uwb4`d2N2Iy+AewMW$rI0eivhKOPKz zX?g2mEx{^U3RHPw?TusrqL~xi)0DWJvGqx&D)*AE7LL7_6$8Wb>jV=?rL)4nMQIAKj&6p?lE+6HnQsP-rc&^G- zaqEuC&r*J!cf2y&91;(l`I)GC=V`hXZMa2#kp?^Yxl4-4j4b2{XTfcBl(n>N}$PeHkIAO zkKI0P%(2?!QzAI%yyCjCeE6kae@iy+x=YggUX$drBJ$hF*bZI9^@G_E`@qciE7=&x zCKP2HfKw1>6zj+MtCqC~hChFoxX0Mctw3F3vAz22N|u+jZ1lsPX09;%(Wf{%jJw@{u!Hn_0^9P}X9P=Y^!xA+_r@_sbe)-fZSqv10A&s}>0gC^R^9 zngmc?6Tq3LV3DwMQ6rwny@%x=Xc?*AH;6vPM=dXR*prD`#r*DNlBu)&?@js7?>ab^ z6@QZLVduKKT(6&VD-0qz4OXqLmAUnnmiTavl)Gt;LvKUBx!miavAiLyj?Z!fNV-UM=)tAKb8{8MS7c*_7 z8nY^ZjHKuxTcePBH19a^O}p1!$x;>>`B{-&>| z*Bqo|54srsZGB<6>%;spt#rL5|H7FcZ*(@?vzj&9qR5%dorfQiniHh6k23D}XG#f+ z&$Kq_TIQvWVjOtpXm(j!GN0c0aI$^f(yb92y^jfpBM!b~q4HX$-V!P9K^Natcx;Y| zEc^XAr|xaR9*=$`R4!B0vHg6gCP$p4?>xELcI9`(d&^OyE01G+j*7pl)AJ@>IQnOF zRr%Oh-dt`|0PQrVu*#mF-BRyOB9OX3HHlI{@45_bxX!DPm~V?E%@OzIEz-W`@be|P zcGf*=Klt2qf_~|S$Ml->&S}^5)!8<>j}<{XCE2bhlI`n$r*(DxXqYuS?zK#@ba2~N z@4C{v_O4PkJPJ=gDU{rLv@zS^Yf8)iIx#S8RgbC2_s6v@$tKxlKV4(g2z(wOA_a6R zSStiVQp6XhjD5mZnZwnZs1NV?eo>Is!8DNUu+D4e>lf!=7wGXWAApT-as1>fDk_-* zhX~1^E(MwNf;0NtIilD}`GzXt&wHfCww|2d(_>?wd^6WDGxFU2fdca{`F-3_UuXDfV3&;N4OIdlI>5XBI zj+I+@6cZJX8?Ko1Q@ijxOpit`zQ6V8a+}1xzkkTDIYp7tJC5XHYXwNx#ABEpqwUp- zU8}mkAmm<)6q3h$`}tF+PY-@%l&{1}Ua^{(Mq{(PENjl!yJ;kn@D zh?KF{_+%u|VBBb6)i|p4<_}T*VbUMouQ2p_b>81_>G58?Ks1F?9$gLTsVt`Mp)!je zXznW%S6?tWnvryZyj1NO2V|!7eP%D6F6C56%uXj?3MoZw&*^Z_L-_o#K6{LK@ zK|T$$_M)k{Tv~Tb!CU;Z0E$ex7#6_%cTz>Cycwno| z&iH4E%{NoV^0NBtsAFDrVeNR@4-Bg4gocY~1yvs`1elz97p@p`toFO1y^UN%7*wgNWcI zKbEH^mY=@R-QMGz*sQVF<;BEsW!i1S!Q_`*5yuX&%n&cc)NS9%&Ji_*HlD)Cb@C(B z4DImW3~A^dKJ&~4`ur9%8M@2vsOrl$J3sj;sqeox_Ho8^h3vu*RmxgtIcRL@0@t?c_5Yv<;a`&2QIv^{U}sm+A? zT-ka^eNW<;NbKPC%bH5MJDoOSPkef-yiSh4oHCs%eRGdgynhMh_72Zn;Z+&?a{*WL zO@=yi#Q>x5I+U_==kF!m>o25La)c|B8V1Ho`BUzunoF3DcAn3Dpn)4#T%wt-$6t0h zYCOA?JTvxQRVZTVskfeTvMWyX9`d3rfu+IMyDW5-j}2u;oMo-~+}F?iLjp_|BOh9W zSPKoF)9HE&?%t}GRPs(-6oqr?_DlJg0UeEH{ z?!v(n8slHh=RW5yL>ipTY#6+%BrMbV`;w%75NemA_ow*%FU8rtb$i#E+w%5r`>~bQ zIB&J;8Fmt7$wAh)ChzUG6)?TsiG2}9i&T>Y^*wN@RA0${UyTC&3elZRQ4==|Q(w1~ z6#SwwkmX`wU?S{$F1KEosl+)@fJ)<2clTPx)x}<4d90H#NXDRL z@=uxbqD}N1{rY1JkqnuaCVcYpXKPCjw z30z*Qd(@qv?5d_%=PJ!ZjZ6DXF;KzyJ=gmD*25bsJbNA9Gx#lbNVqfdoXe7+cTB^= zvZMjeHKv1msRW|lhyl32(>E3_|7mNxI;{Hf?oG<}c?Fl9!B-|G%c5kSx?kLj1p5cK zfl8 zAtvIx;lr{=gDjM4vDACo*W*O0t-V-f<{Ja@4z$H|h1q>4WxrrN54_J8|GB-bhAy`3 zFVC`X9Q8idTfg?;RG%AX=&hc?9~aVX+J$4~zWgm1oj)J#n|jIAaNP99LeY=UuaECy zI8HIO`C4iJAV;E%VwdxvX4nbicfG1O)+KAdY>>RZd%_)y=+mc9PrpC+vKJEhxZfu*MrnwLeq3ZjUIdF|y3 zt*3V~&kEo;UmqJUR+7IVM!I9fwJp}2+nD0;jTXbi4Oucj$HlMx!A>@d7Wu*`MWeE% z`^K#`%#KA|;fS?LY^F=$%OWf~wTv9b_s{4?m zLZ2aK9Q1wS>)pM|7cYJ>4RxL9ste@tsf7$rM$j&xWerXgPZnH6*In!K_?fPTIr%h8 zy;pNJ0d+Z-%0GvOzLyrCYp_!12#{+c_gG)wbkS#mI2J7HQ7(6$XOX?c=sCfy86hjS zCeHC7_~^sxBJWfDC#>SeEhnF)`V^D!s8Rx8!q;f>GPj zNn%^p-1y6$j!>dbvt4gFwqk$h@PVk%mdzDg*#ojpxY%ZyMtmJg5yJ3iuNjg9F1hIyHg<0;z2y$d-eXAg_XNhf|b zQhh2tcKP|dnTgd_S7C;)HRonm&E4)AH&(POHjFvhE=IE5_r6ePIwCkWJ!iDj=F3a# zB7Wbm6-ooucT;~gZj&-G(KXFiiccgj{`W3_?# zE&Y$qbM^kTB%c8^QOr@Kpe@#lutVgc7zaI4K-D%wLz$Iyb_Tx^-0kRys{e>Jx z14hHX%ZB7tNR5qk8E|>f398&wjh%8dzThK!Kj>VhMdyLc^xq?f`y_^bZyZ^sxOTlg z#%S_bl?}g7D?c40kK&&(wR!f2oa_rPjhh9OFIwgD-h(|pF_=}*2`xDyt;E(sW}Iuy zfb3IQy5-N|;pw?A8o3zI*W)qrIPwola}4N;L}UC@rjaxWjGQy@hjk!zu~YWC8H`$> zPTs*!hRIw5heG$2KW`r(iQrJWyvJ4h@E-rdreEPVPB`p5V{Fg$K~_3eCGkVIZGZ{S z>(5=m8b0!5<(?6-4#k|8-u~X9SJJa)AC0k(;@+~$#p}w$eG0X2Qe~vnSTVJK;Y#E3Zzlf>Jk-c6~U1!Q}o>k4)ko%CX_fk?m zf+^WY>jRVT7rEiczJy}+qEjO%JCVr66L`0mJ*Ur}t%h9=?HBDIV9K3BJHK-z zyRLV5aetYc*rwZLxBKg#GQKzQ6^Yx0o8Y3eyc;qgSZ^R4Id%b??X?TkD(tv~g8J=9$_x;5X6c1qtV zvVCrD`@T5SXz6uWQEI+yCuYq{io8a!ff%X&%CxNKxkxau$nPAs|BQs4 zfyefj22Wh+O(%FhZ@m=J8sUMnpoiIYR-6SzX17Yl?h|mHwT8T zv~H$KS0*g+sLEhI_72rI z9j!q#0z1t!onfi_6%00$%URtz2%tMTwpXUNM8K7G%maI>%fa6kY1UdXh<^bcZtLie zebi~>%4K(N4X%osiRtU$cMzQRLAwPqApo?J{K=s(C?RoW1Z0%Ufms1$gb2u#-i2)` zA|W9#RcFWQ3z+#@Ac3#-iY2{SAKtsFQLx|!1M9nMz3=&sj7CMypLAAl zq0Vr)3u=Evk6k!BAy66fvkSH`}m8Vjg?|boluVSjf%C4J!7|LzheF34P;i1@GMp=q#hfp8j>oN%2vIvcXWE z*hP*aN91?$YkhzIl-57dpVOS}p^8u;R~3tKzuQ^Y8kO7&fdDb!m(dm3{pFavv%so(AsNBtCq zF_d=YBVWY%P5D-f!y;#}2O@78Xrz6?VMfvlBwsymvU|eKdO4(2;TGMLzmQ*NAKs0t zCplU;O{(jUhV5BZf}$s3}moNs8d?L49|~zAs8b3 zCCSXlaZ`TDA_f7;KT&Y@VS^_~Gg$$HY7;|#R6cl=+ZWXaWkZ?eSAg(Gm1S%>59x3d zH;Dq?hL&K|FHSLX)kFs2@db=>cz7`=3@al=EX-AYBj-eRiVAm${O+_xgnPMVN_aDA zeHiQL$YDn=flR>RgwU8Hu;9mviHdE`Ey!%=HXIO}|9ReCHj=+F({YQwu^{epXy?7< zQADcc%(6J$^RTXGnTj@7#jS&Hln-AfT?O$`e;m)p3lm-Agr7UAV;82MvD;Toq z|J=gkO>`n;4WUkL6~oajX}GraEQoIOUUZ}Mbn>-O%rq58@g zZy{haGhuG#>)F@9{W8EQt8C7*vZ*=)!QeA5Pp!M`P-;s2p)P~1| zmo`5D$a(9Bix@0sE-|eV@&gvWF2+y%*u}CJ*_|hGVYft2ys@p{TWWrkp+<0cfzwgK zT!P?S$7vAi-x6Ees)aQ;o{>iy}wElE)<@vv}2>sa~Zc%@8#-j%~Arj9N6cjp~ zt6^eLRwrOYa_tm50>!Ft)Z=jEs55Vlg?SYYbo}CZ?AlHB9`ofljGdWGI*lmxYNE2f zpWnGm`~?$kJgN3G>sx9}AlSVwQKIp}`Qq053X7-N1GuH8RI<#PV}iG-zLdU~Ll~vw z!h?CKetg+@bi()7gflljF+yT3L)7#Ve?Q5mQQv64s;N2ZS#>OGZMaTs(nA`JFpd0- z@iYm*Kwr_e)yV@@{@8_^Z`uu=^w!gR-7V_;ViP})JE4|RuY`IZ^Bs0S z*ZCIrwf7GS&he%|<)nv)Ps;-_W#h=}@1XSrsLJy-VbEB969?8%&t|Q+&}1i|1@Y@Q zQ6C*0o|&E3CF9dfqp3gJ@829n;H0$VWa|-Q{;E9T{cYlEBX}*TYhi?k+GazepNQpp z!&S8Ew&OxLW{|l5mmb=iNF!k1c^7{uL$uW0CsbnD`!<8Uzl3?oJePNw1*5|;8w>Pi zZv5ukz9?~A(lI;Q?V&ar-dsGteCV#@a64a|D(yQNT!(L_c?)x1%*BR7)-Q%jN>{p< z&E~&32nd64eoEY&ffTcB3^5G(#r5Fvj{IsJw3#;_1KIO#&WvNEOko@rlKXegw1lXR z)nZ#Xto>U}fA{v0pVHg_w&;9C_#3W9b*Yx{ro@WJxJ8`@09j2>SF+5l`v zeEX~Z@wS_JCPSvwTw?gq^4q&PFR$ejgtgskvKqsBuLE4redPvtG+TR&lZ@c4V^hPP zFeqOxsKgj#xnw&=e~0yuak{;D2)%=at*<6{tQLPU@z_$G_~z_r?jS>zuzG6s=53Zl ztg07ilA$;vdZ=+Sm50uro;=f03v&MBVn66S0mE+fAoRqb5;Ma6U*3rnUD!Y5$?^=! zM|VWg81XMc2`#guyX&*+o3BM{FLn8JfNy_c;4LfPi);S z#iEo%PPc~{{-#@xn5DcE~&RA%g8{kmw##m$;ZFtjGdq4t}_%O z#4)V7R<8l~Y;-@LJ_t!;MWu?pET@*vpnj*kraR+|@dm5md;{0jg9#->qh-!SL;i|z zy!JJ3L?^t!#c^McRUER9Y`Wha(kEVe4NL!DC7!Jp3V`;ZG#q$-)te6tR}}_BL%Bm< zVC+mkL;*BXGs>#d1lC}&WwH4p`WDw zh8Rsdgd+wmmYOb_$PAWs$M3D4GHX6}vLXa4Fe4ckX|Y;yzM|hJg08-H{QFdIr6UlS zz){1mE_3R3_cMp=eBSrvff0ICX2O+fVp0+8hQ<|{94qW=y0*h(f=*Z)+FuD7tLw0r z_frE9oV69C$@hv>jm>Z9uwKM4Ka@$p_F&yFQ$uW%@?S47X(`}+;C<|1rkV1~g1Zdd zyWsmlW*s3QfNEiJar7~OW%PK)`SC;!b?sgY*Ld^nm;$PwTSk&N_k4(vyM~jT(wl4N z$FA{p-|TL~e#$MYQ`c5ej(R43^`!H>G#FS`9;elHM~Xn8{$j1VDxHT%c%3PGXSU%h zU<6BG5QO|Erx`DFWKOFt+q^)eXl`+bt5b#B!KKY`5F%%R7$ zxzoGY)_$)qZRz8J(ij0r+N5cv$qmGfGB|XF!o|xK`%!nHSG4ZXs?I+T>Ruc_x)TYX zADKuICOrM+C*sPC59j_p@ao=eni#v<*2#pWwhWW`lmp6I~rI_;A+3b zfiN~U94NSdFX5Iqy75OT<^Ih@T^ZvS)kGqyeu@05zdCHWiv&YPbsHJm<3Z*+yQEd3 zT&^*_CM{g#AVTH6G0KEIGpiM{-+L%5OsP7oY3>8#=J(x$lOlB_HNwx3mH*= zGLtgN1|jEtT_9d*ij)}$K*R#82btjU%*X#+%vjFK%bKz)3WUTW2}xA+ZLTffHkFSO z=nZ$;?jAd8^1j=HM^wdUa$2l-tqaOBtv^3;4ztpp?MS7e^Q-JLXn$RP+Syvqx?#Us z4&#{N;vK5D2Hal4*;$0J=^TG#?_HmtDl3~#m=78lzMy~&Wvy7qfEd<>T0%%8)* zp%fm6=dB=!J(Ok5c+hFR0b0wZlf2Gi-)B4Mp1I*Gi z5ESr`l{E|iUL@QS%YgYKa^=8zVsa8xevRGT1sP4aqEWa(LqGclGqVV?X4e*iG_s9< z{b9#f4_q-)Vi%ZwU^eZ`wy*k*McamlUntc*=;)KO?wnUOs@l12K3U;$cQ5<)+FWFo z_Al{FWt-{W5`*URI+U@F&yecb<9_U@52&_JpDJm7!|{^{@yu66wKf~jkhpBNl!GBk z7Qsk%g7r{rv20_G8?+!VrJhQ4a1$`#ilgCJ=;rca*^TK| zN(%*i;Nm&PvB%ZfwB6PjaA2_p4?zh!{atIKb&x_)p8mq6#06w*zj{dk5RLK|B)pDRDAHYT@nq=1QE?^fE#}jcE zY|W)tvbRUlH(~V`f<&Sdqn+iLBWC3>=aABk1-4^dK%5OtOd^w$yTQke>@0-ZNKGeS zpg68pP5<^Ogt--ED1?8Nw^1^pAN{6#hE`WSxM|ckP+*Gl?g9=O`9Cl3JvcF#r0Tx9j z$${&A45?VDl+w7Yf9Nqhveh;ZCz+bbikn)0bv)p#+VOMCy!Wm(FM%iboj+NV&&nCT z^{dL`KaOFhw!B1rg%65z*iW5I-CP-%XsN_bK;T6TnXP_)ZOY@5lasI|lqdtLZvC?E zbWIxJcJPQiu)}bS2j*!ByS#42jiUNVX}y&2m(oqot3G|yD40tzA^n|e@cU)UYn87j zClBk+F@GfTT$|Zhc!FyD>&ZbybRy6|NHH>)2Oon!4dju2^&ZxmAETo1;a*Bq+n1_U z;$-NfspE47eptLGF{>2#p}s}*xmT#R2t})DN&g;;hqiOttX(R#K(=QpQObR~0vrKJ z=2{7!bd6_4rB&XE`oSSI?h}sKxVUKpxZ7j-j>0=)KCKVE={*45Q7zDDfmuolKt))K zQ6MKi6hy;2z^(tIi7KTOwD0yx6tc=*vJic_puttr8siqC4m!H}MU~%>xHL2o&y%)j zcXTRU^Qk+BMh^~N?+)zKdMlS4(fDFd$|wA5gj}A-oeKU*BIm3VR!Xc#d{$v*UtO7v4K*ic6olMG z*XTejKft5ghlj1u&<3bTWQ4-$jw}F8NcF#HG zOcF*_T%Py%a0TQET$z7bLy*s*&&ap*NU7`$aCYujw$OY4y;~~~9C@lmlprYu{Q#75 zc7blYa;>l$nO2WX2%2`?y9L@$WZMO3;YuJyHLY?{22dtYoFl=ehtNg{Xdn0%0B6-! zfBgk#?lw*=S(|@1_47`-jH5cU>Y0#X`SEd8H5pNi z@iLGFKoH3S{OC-1$Cm#~A=ND!xHviOG@{?z9x(`;U0dq{-<#ulNgIOKVUs+BluI8n zQXq)M3x1V82(1EBxAYP<66XM8cO#O#LR4Q4lc)JD9<9=j!Usw|MI21t1xTr9m?ubAZIA_$kPp^DtqV!J067YC zTa3ILXe(D^a)nwDWW_BhX=gaKBwM_UE!=j)RJbYykaZwc7Y%i8XHXLP=EW|03-g=* z=;SZkY)G(g>^r;ViHQ0O$bqV=$%9N533m+1eht0Z*88O)Q~IQ!b6D#TM{0UQb8l+W41ljn3@eX|g_9}sRnt3kF17z? zJ2&?eIBhvN81O+Z5>VT!< zw6h++9bAe3?KVvm%8RJ!!DN!N6M6jmUahKbf5uPhQ*W%e7ytsdcpAZ*Kkqnf*gikHST{iT*@t5u<%Na{hWYm)9MI=awy~hKOVBB`vx|#XP@aq z_~$`-T`4I{FsL9P@Z&$a`)bhLJEfYJR8+KS(@J4kOY)~WrG#rrL>RSagm(tWWj5_R zxfNFQq1BC#IR2@EY@dj`HcG_%KwjeH7p4?j`BP#4Z(y|2acZQh!KfoDLiD^j06&@c zrWuzzYmMh{ar$n%DhA338;UC&p5u9~A3*ZTXHePx<5Q7%A_OdJ3fB#p!SwLE%FY~? z4**aq%Mwl9rQb6jN|>UCb<5@2r-ZZKj%l3>HxY4p-V+zWY90ws)S<22jOBLP5x$Xc z9iorWB*Sn;-aLr;W;?jKdEJAPC{{L^fr%2Hur_0nbjXI5hU5o`URVT6#f>yzUtK}L z1?4M8zM)(Ot=O>Nh{N{N(cNtXZW}<`J|bJU!6pQu{&Diyhd}T zC8rr%zhzRcHhR&G_S3%k+B?-)-74%{KMjMjQ7+Yv(^K<^X?JuP+V!=e%&`og5#{>J z?7x62eSGO5?gq1w6a29{;RTlhHl<)3*p=X%`{vJA5@2tZAQ>8c=Km|IpKGB>9X91W z2Ra-=wqhU-3{sSlPrg;{!5`)};1%7xvwhf|*G%8P{K2WpSNf2jT(z76`}LiJP1f?F zK8vSlSN!N~4U=B><$C*g#IOJjd5#^|kJDR6? z80GbD=R4&#Err|4Wq5l?U5G1D{!XcyP3y?gvY+G|#4)vq+`dY|(3V||5gZ6aPq_a7 z1064#M6Oj4+Boy?MA9lcE@0#;9m^(oAFaqgo|g0XxPN3trh*bi#vXXYbGl3F`XryRktx%P;-cS&5gw+Dw+X*^Q5>f4W5L-kAP_umq>)5(3m-%z_h zKZ9`ai&$V45)Olxo>qwEj-)C@n9Lh>SWxxUi2E+?Wp20Q4zjQ)X_gYFvS-qq;EA3y z!h$Lcv5X+9uQGE1wzsR+EeI#$FK?8qzx8;VyeUhT&~4Ydc%hkpZl+w4n%T3A0$M?M z>F)JIvkAqq%FS--@A!2b>+i^^p)dB(c@m7hnd#`?d^PAX6(VIapV~zQs~51W^nH-{-z8!fCM0 zE9+YDv~bD;XeLH&|A zdu~saSn?VUAoI;7vIg)nIkv4LgZ4D+lhk*Uc>gsC{a@-Aez1IpgNL94C)lY5OOLp9kEl*5Zyqt)d zN``;@OxwDjrPbk18x&2%ld^DTa1&9p^UA>2uIjNFp%YWINPXaak?~ihFHMRX4T}-K z)1D!Yga>d!zUwo%b*rmCl!=#Wyu3gh3m%aR&$p#3sJ>n-!I|qZD7y|blSmdMpFz;$ zM}T5bk-G(mrqnCmCL=KkCDCd;dydtk7sLe;RT>*7EBd_lhns9GF1dfI4C_TReXPks zch7z(0q1Z}cq&O!LtCAIdRL91o>{IH!T)KVBikd)xbLgb1q!5%SyZOh{%6_Jkw za&$`*Du>P5CW3HR$OUz`@8ArU8rCPF;!*#hoZD_;BDXb*5u8kLA=7>|Rjfg9dCoAW z(;H15|1sl<_<{#t{kbUYQ5ld6l&_s^h9%ON?(Q~@{=hpI--7?7XLK|kmdnxIC{cf@ z^`nt#C->z&kT-8ujO@6tquGY0upLWcSZJ5(e5vzq%)kAfg^!2_of&zA0#l?yn^a&s z%fZNz#(!(y8_Hf6E^FWi`Fsl#p*3icLLjLy$4x&|mY3Ne_Dj?octH(`9mtrR>u6}! zJ9FY`m2dVSUdOZxgn7{o;8b#Sm$cqJ@>Ew%I_Gn0`jjd~j{3d#%38@tX&)yls97#a6GKAcZh%k%N#a2`E7Pw}9PmN1Ci!=hr~ z0rg?3>+Vg609uY#)qXT|h|JE0RF_du#x+xH#^&n8)tU1H&)7uEywxI}Znxu?UhnBK zA8~P8%!uvXsC)k&HYcX7!F*h(w91q+PZ-{|#<4H?XNZt@o0@BM`_M2x=Q8oOu5T8xb|eKyQ{!HM_RBc&do z##i}WMW|vxzUWl??@OD<3Y1&PZP$eLIgh>VNyo`BgJnl8UxgX^=4IjZE@gQIejvy< z4@bUP6X|tf$0npEJwWs>Xl{Y_i z*l5c^bEd9m;d>$Vjj~*#r532B<+LAP?xev@NKVa_OpaNsSV*K{4QnR)9C>uMg>bx0 z>3#L>O>UO-2b>IYped7JcZVZuW^OJ994;VJSjTk-4J?@YK(!!|G;8!R;=1toGhFN5cjb$U>L*DG^PZB64BERAt!J&xQxG3vXyzbZZ=@Me{( z>dgx~tAeGv7Uf3gYdvq_R(9De^x{ywJg?jVRN>X=C@z}~EoGRn!dp0RV!%_;D}2Tj z8X8)2`Zph$2?r`QWCIVF6iImBrTx1}dUgCq1Y6{e=$@Z%u?ex?h?YE<`*5dsjt(<0 zW4m!_k7)|`WgTy`4HOTZ?yl$BpsC#L$}nH%Af!L74wDU&MQ-rI8l_H2f!aE%U2+GYXT# zhov?bP5`lhf^(mA9AA=1An|pt&Z9ul)>63_i6Zy#(b`lBa=ASy!F!+B2CNq~?AJUW zWaZ@OpwT&6N*vf%OD8jUMV872Qg<3>?;wa(a@lD3Zbz$b%q{fRI-Y!g5b4vQL%Ccr z(2=XWJ(c;AWTfzvE9^6{l2^nnecdfW&(}b2KR)AYdZp*C>r}e59C-{brMFYqo^1^% z*K5unGYE{+Wr+HJ=>SFy#z+!KT7(R)N9DDke`bO;115-(2gvtS{~&M3)pmgzAwj4X z|LJa6XSMcDl1Rl9#4fh6gqwX_Z8*uun?WYWQnWjqH*1!5eo0`B|4E|RHMP#RN}S?) zGyGY_=j*C@a1b>T9hsd)q9cFv9dA4S1nQUS%XgqEBO@bM$I2POH|YxJK2D1f9C%LQ z41ruZV6a?*)G>$id0i^8QKawTL~{8WTTa7In@2ZuT}kojJPI!nh6^ObRi`+5H~DzS z->3;!-lAALeZA+Xs(gCe?0M7eVOjhx&8ZZjhca%I=CZeeeg4867p9o6LU0$(CT5IB ze4fxfWCA{O02g4I^;6g)AS8s|A`=jaHGWku>*NT2Pi2GQO|K~}$7IyEBaR5};^Cm> z!aXN#nz{0l-kGTMUKhPl!23kWSy;Qr{wv9!7~>(qLZ_Nv&93g+wjU;frK9T%67Ig9 zJ`_5DfzIZDR6*n%=c@`5>=>iN`h9XXE(rR)iTcJHj9$=!7!5y)PV_knG$uU;AtJX< z0y?#KZUVlz9(+1Y$8?sUqV?|bbe7}7mclMkc!cy-i%Wo1FWpE}xK*q7r2#;k+>ury zyrORBw27B>z2=5u4E3X9mPNN5cS7Y-VF1EfGX1i)-Ya} z%bzJJDLH|7y7DC`vgq1`^_)iDBXX8VNn%p4QMz@v!)6xH8cIf(d<*!*?7af2+*!Vo z^@TohXfvnW`?V=%|F%+TRdYC5+khdbe>f`c`T6lp{A9jsq_-|vYrU5_gw)|n(0GGMfsI-j8JYlAFpaoYWjKT+fcMXMz<}p35<{)ZrKC~G}mujxw2Qpp+R zJbg#zxTN`RA2G8W86b1=mzOr^=j4H_+Nq3r(nir`pYrQf7vJ{GH;8W=T9MxgUajWS zW2&$5AX)Rc?h#1vYo|+1!@B+b4SeQ~Ri_Y&621@E&9BzZc6r78zd)G)0Oj4jHe?7# zuIf)H>ThuRDS;lo-6D7E&qjUwnn;SMe41R@6Jh5JH2bl9$vw<{33qkZh`~6x-nSn~ z`_oVv+zKS9J2*@t_BI^ANWS@UM!+i5zUDB~3^sRqV2Y7#UyE))>`^qshPTz=;%axv zsFdp{B#_!x+Mn)BFL-flq?)i7>-+-sp5-m{nfTj7@h9)_IEU57n5o&rbO=)(@|0n? zm;ibI2FUwyB9}M?g@9CEuM9l8rvrpAs4 zrzKp2FbO!y-aBy$;F(R`l5@X#$O7(8s|#0OE*AqtoTH9Vj%|@cw;02w3~jv|sc%X9 z2W0R^D^FX~bVsWy8d_T@u88%255B$0LDI{z++kp-EsT8``vFf>NsBPwhv%ra;|`Jg zlaN4%#|MH@k!a;rgmfPEvT;l*Ic_!|?uF8!ZZACxAsNg2!xiT6Us93xdHqL??A`C) z^^NXEAlgrt$Rehl$^P$HdidTJuBH0Z^;a13n_yii+Hy<262H8|#W2;?o?anL_Sg2K z7ul2L=jnYvM-C)Mtc7m@)aw_UJR zZs@T?gD5M_@;DyTM+Q(CV5;CprCPQ1;x zLf-y}joo#F3Mo2%`q{XHqu@tH<51VLgkZw&MZ(V(N?@6BGwb{A@G`NtjN|iXJQ1hI zK7L`IQa)Q*0ghpm%zGCwjQrf?D(dt6H(#D+V z?hU zKTd5Qll~sVjZEp~V7!Bqs_dVdFO(W$D%s?jPu6x--ap)^E?LC>^5&Oiy*=~J%USdY z9gk}*rp<*a%wL!_{M|Oj{yh|LqXFj+4#oy#37I_-DwPfF-3NFoIrN(`1cij+1sn>I zrkf~^$d=|kfz;)&N|E1rNL8oA@VTJrKAYfoOiq4Tngur7gQxv_!Grtu`jh^L-i}&Y zjhbKjhmmwqXMMlw|0V9pH96@vGQ%K^f&{^GH1X^=y3Oy(su**4@8z}O5H1`47_C4> z8jy214>F7jLpxUvfHybOaH75Y^bP-p=B)=$2P8{%)hys|vb6hEpFXXZdzv}_MCnFv zp^!{rqs;sho2%#HT2E2q)mPG)cLxS2jAX)t18x|ACowpg<)2+8^XB$0LcuawX zm9{#+-LCN4bkv#FXNN3QpH1_PYF7}!v$2__SNwU3UolsN2oqoSZ-hRfqFrPd8n=4tX!gI~*%Y;gs)Rtvo*P2*b8=kdXWAVO7wJb=w~4cmKT zl@tHN^3!;DWM3Vq;s<5Fp8xleV6Fl%cW=}rHM&M6Pf4vn1!RX!wMSBD$v6(BqBd0X z6=5bi{ler!{DgG0{dL7S8p7Ayh`cnKcD3YX#vShlDbJSt8G;p_k=!;xp{)a+_oj(V zF$%e71#GYG1dY~1CM8IAk`-X>3hr25E37xbXvd#>8_4hDs<=|dn_s{cp$%ie``85Dk@6h z47}R?)ecrkNlDv#dj_yA$8|4h)~Qf^iGawXUQ3tIYaQtC6vj(9g;TzZi~okTd>>kY zyVqYXJ01;}A%g!ZB;KJk_f=6mZIhD_r6ijck@?&5nmh?2dL|-HH!KF0#7i_fCcht4 z9ddonj<(^1$Mq5(SME3c-jHz%opmz6JaqK-euipfa%d7mhWGp~K{{ao1w<>Dc1pfICQjQZ`+a8c3$te5l33b70(D!V$hYQPH#1*iUti8-djnmv zy^C{Zc1!GPwczHLHTlgQMnWt;kt@-`raO`4n|o`g!GDG;k}~g#`tN>Li1LWve((2O zTDqyXHw*%+K>|huHBH+G2S_Yb@NmQcIirU{_(%!VW$@dsGyt!)GVu8hP>~?~zg7(0 zUr5!ph6X8s{#Q=g|Hp)^HCZj=-^tV)*9I_u6b*ke*X0j!H85XjO18l(uhWDKwIxt-d^x6z9kfeQyIl%ngG zoyqxb;xy4Rx4>!(xzvfTKgR3j-zhn<#k42o3QB$Jro4-sFf4*~mwoqOXS+dx>gcEt z^#0`~ep&qbJ@@-x9PZuyTyB7*m0CP5|Mr_-#hN_dr9*-) zF^Etyo={{4Jj@x$B)p~GRJmObnaVm!mHcQ3g&lyp9Z1ASee(YOW4Pte{TnqlJUv~% zkOXpv?1=iVEgknQ4G2_muB{uq2hm03&u&v!1V3aF!geSZVw}^j>@xiz8=2N7khM(a z^!~*CP~2z+1Yh0T-vxhX?Z2*zkear%Mtt+ABs{wH##=&DSR?{JGGKkHj;_?fDndKK zi=FmHNVPy+B%Pwe8MkYy%k7YS#1-?s#pJcEU0;$yp= z7YeLP8GTIp%02$W)zF-s*D@UMu63QAPyP?Hif9jAZIAGioV>CZ3H3k-TT zzvwgljVQO;l2MpiQk&zXS|+=1W}=Q_)6IE)qAnRZz(}=$a^Aykd*&&8@di~LHy`by zW%$Aj4BP_m78479;06!XFc$Z+5eFHhg9Ei#1zXzgoo(N19fx_Rps+I=9uA^Iy8 z!a2^*&k-Y4c8pM&!1;FKIAOWGq9QhDC__q$RmNl?Cj51Koo85Ucr+1pp5^1F+M*}J zM0}))EyY8A?T<`s7FDXJd1#ud4zt2YVdt-3Yuua$UFulIyPT{ljK#pK7hCNxD>{IBoubK@{*v zK}rwu^2~V;@~1HH6DF2>$>$G2Sys&F6z4!_#L`yww5Gm%ql`>{Ln(+JSo%!MhhvGPNq*$6D_|**lsH{y zKP5ir3jG=wkX1E`eN=xdnRa&|KrF}IjXY93UD{0tyswya)r?o}fYH=WYt1xg>xcPL zpjIx&`x>dN>oGGtI9s!7S1UoaF=Iezh=|lkhBnXdw7nAGjp}d$d^YHx!%;#^>{XGr zkY-Y+@dE3o++Rq!DSUXzkCN+V*)Kzp~apFc@NBT8+J15B;hVA+5F3 znYl%oUe%yXd@qZrt*HT@e;efU%gOb6Ty%?YaFhu?!xeq~8BF*R7L-p#wNZx#Wxoc7 z-v6!dHFZzVQYfjyoxdremK`BeRYVzCJ)_f4q@_r)-KPFL80hv+|;hZu<#OZ^<{!w8&@gf_$F2Jtw+Pxw}Iy|s!uqst--Aqvwrf>uGql~1M^pg7L5i1@g*Gp&V`K2 z2TeS8=*sU$C;Xjjs{C!m=EX(ctG!32GP%O|`a&ZxL@p--yLOvSR?qj@Ks+ z7Uc}<7l){H-*vw1ZxFW&$#GTYN$CV>%Pm#e85Uk(uVrTwZkYXPHd!a0a6Xc-OC~sl zAg;^gZuf-CfhU=bx(%{RTNv_s56Y;K&o7BM$)K`8$pe0JE3}o0q=mn3#HAJ@%Zl%< z9R^FxOf@xzx0z19cIfJLbq+KqF|o9-Y(KZuIH=iyYnHtopISh@SJqp+w1ldwTFotN z&^C9d%c>p#vwEi*$iJ}&nGk_4M+Isn8A<~t|1ss60^#!yYVrjkLdxVW6DYO^RRu}} z`N>(u(JoYRD1Jvil|H)rpMh{q;@jt^p5`4UMPK$Q#{8Wv-+t{Mj!1slI#zC2a7J#f zO^L9#Tf?Vwzaw}*AQeY6DiR70;)8v!Z}!~}TTTwz)PI_9zs96m;t<1Ln8L`aPK5Cx;05{* zt%kkAQ#a{`b(ae7^Zda!_bXb9ZtY~(TeVQPCXHf>cw)M-9q1z6clpYo0 zFobOOY{I6}c8&JmN5#P?4jb%1p_Tv~pS8`-!1~K;yMhDrK?uy?8J)#y%y7kLUlxkI zGqwX_>1wx9+A2;ZYW6p9zhX{IkEF__SOk15bnNsU`ciCSaGlO03_T4}U8=Mtvy1qc zLxKqy;jF+ImGJ;Up1|3-{A2muk0Z*}TMXVf1kI3>2&PZRy>1?edjb9nvSyIc7r6?d zIh4#m*fB5=bZFeNk{~F4+x7xMX{8mSevyzdgiq%Hd8V4G$v@JibQGl+K$!OFVrZED z#x=z4?u1!^O}?aR+wf*Nu+$^34^T_zMl#TZ!HYieBl4j;fK7=&A3-OF zVjWzVHsri2jDxC88VX1)kYE0;j0)bgq-!sopGZ~V1(`128s^_7S>FB61a<`JMVHgH9-MQ6-dm24|SR@t6P?;agVP5<+D z+1Oq@%c@vPrHN6_3M>szxe{&Mf9{3sk0&%vVPTH1i9v4*lgFm{REs{b`GU?}wM}=2ISy_8^ zVRVl?)TM`~_4PEI314C+7khhWlrpTJ(0+*Ycn!;~ZC<~sO?Lk!{=juM0f9s~bloK&Xu$%pQEV&`=owq4r&FP|1x%OC6kAHC zj|j=N8tSs#5WKl|f|i4i*Y&S&sxao-WevIoqnVI9YN~e&mH3c5e4~4r**L79E}o8Z zi|r3*!c*M%h4U0K8MPaT^sqna5O7En_AZ2-J&RRKqDM}|s|D55=CkZ^u*>bwDg_J+ zr^t+LLEzv(PM!rrBIpqC@bH=-H8Tjl*0kFZ1r$TOg7Fai5dH!hhV~D!!k7&;*SQhj z7`VbnxPqkuTZi@03aGzPU{cT(bNuW)Eso4$+^k7I3;r%1Pt*3IIWa!x={kEw=D{h3 z%<2t@6`SEm=UW}1jPP=2ZM@0Q~C?2z)>S8jCE-_H?@R8MXBML1c8C#T(0qV&tyjLQ19El z;W?Eh##Ws94n(Xim7oc<6t`S?8`9Qh_CTw^AhC*cittwN!nlEglI5zpn6S?vmtl*3)rVg*5Bg7|xGQxfc*{u^18EHx2i{p^A|I=1j=;YC~ zx5vj(4Z*EV;mn`MgNA<-WM)=wI*6ceNZgJ7Y;Y1|S|{X8ocV*-dLUPMkVT=d;5Q{t zHVWeD6RqeEvM3Z^BBXTAzB=B`=@F}6<*8J9IM-i5CfqCJXAoKapWi4-2<`?%_zzng zlS7qIQBe`_qI03)kVn+L%Jo&GIz`lG=ky0fFd=Prc3-pUbs~>S2h?wrttI$)UEC5S zCo;Q>H6bxWLc{~*w@hE8s zdioOl8$sAJl&F(*>EB#^DZ+m+Y2mectuWf0ZJMvN#ku_bo#RUvh?-gulvNp(X)7m8 z!0_a0a`?-ip7OaY1w`v(ndrj3AuZ{8&&FRD1Co~rF-ahPeNE}I&JJ@JJrh$F@RDE* zM`}v{E5fMKl7v~J?&g*6B3*2xywd9s&0#}5ePh}qeEf{vFV}=V>sy@jiX=4Dd*h8#9^_JLR68~oxnI1mQvgX0 zLAUYEG{~BR<A{z@w zkoF0h>3L$qN5zi(0YBC|bj7=wlMfg1Q2>8f%sU(+`IATMwW(S(m@da!Sx7u#QuCs` zj=5_y0~yoxr;`h1e|Pr*^aX(71j^2OfEHgJ%)?6t62Er<(^~QsJ~HWC&C!BNo=+Ze zoWLC1(KFjOA!yrJFgyDu>&6ruy@}K-tkHjnw}{`bD?G&BQVRM!rrI_s?=VzyuCKzZBWEW=#pK$&LRUiy=4&u4QBTFYZt*m-w(7>yD(_RnxRmODZ{FKcK3+=YQ(k2 zFbSn$QeRIjG{x&wklftXS2BgJzx)=Xz2x48i2$2MAfvPS89zsrv?b*mlHgi^9V=CQ zDsAGuH=f_8WlEM-JbhAv_-kH>7M!7L)&5h{=3pfqR>;o$*LctY_X>%IYE-<1tOc1Q zUR=pG^J!BsyC|omV_0kQrZ^`7?`y-m$c6}2Vf<+Go+Ki(tz6b&IC4m?x4qDU@01`sKD?_g$rnM$geCpFiSW4}Vj^G#9 z^AwIXDYMym`%|#{?1)dP|tR`3|gv+ins`1S~bleOw}JmLO~ z2TIAagt12z!INiwyscMLTWe{}pZI(202Xhc&LK!aQ{0dG?K4PTBc#x223Hh4y(zwr za{8oxRpChjaWON>SFiidOuOa+r>fkuKg+27Qh|gtT)0VDSbQ|& z$$NKm;U-;0J>$J`I=9n|=eX~JUl4W&$uh)olAsZcZrZVp{Lr%+oDatf!1S3p*#kXS zQ_ge{EFKRbC9xP|0WhzYo}NNMIl5*`Y8nxrY`U0jx8@@?ddlRr_}JdcBv&&oisw@X z^Xim^dW(NeBMw@+%72WRJAHX(=WF*>+_U;w-u$8w3Gamos);?FLK+WhngY{t4h0>r z9ucKeU!`3&QCJ1x>;yZ0w1(EMI>cq`UJ5EqCHk1W7wfRMe1ePv{{#XksxN8pDNt;! zt!JTU3Q8czz<>?EXtu_|GYCKSh*gei=>$KVll`?D*_HYgHczSW4#YXl4zX9eB}yLu zYF|EMjZxY^{V~Q-V`D8|l-Lvfh;t4yg-!qOt=a4*=Q>T-I(Vi-i#Rf?2h**((i8}4;b;o+RW@d- zsPO?OG~+_0)X1mFYU#y9m3^a4>lv(?u9Gj4q%y&NSpB#LXkZ)%hC~7{NIrUmiW?o; zs6W#+u%_npo-N1{=% z86d#^Wc(|R9S~{6r||GbP?(_xX4tZx#N8>p?L|emC3dV z=P05So8OJ25qxVB~eOX=MIPm7t6|7dvKOS)}!V=`e89J#{u z%8cRZ0>_Xkx+{e~^T!(gyiuYf_pGzpbRPYE*A|qq z`~}j}$91o7Z=D#aFC+p@$Z0i|1g?2NH|z7KQh`PTtUwnoZt#n8(6F{k2#^-N3i|@9woHS|UW?BYQbbsLJ46rijH`&gf-RjmL&haOWtGS>jwV3@52L(le*g zvBwc}`0eDY7hy8+7IE83_Rwz)NJLpok;VIgM3Ft09zG0a;ZbfQtt|imN8Mh{LJi?T~=d!ZD zc9T~{f=jpYO2Nw!G4^NQ&^DIewpyvryqp1I3Gogk$xBQM@$Go=bL5$KPe|Q~ma-fAzI&H~;w7*r$YiV0rOi!Q$a+BrTEW*d1Xd5p zs66yQz<-r2P_db5K%oTP{re!fiMqSg6{Ut-FI-{n{-2R!?`3T-!gUtD{)v4d+y zvH`L``9_Of3^-G#kBpCBdj-z2ICcYQ&%Du~%NDhI0_STKDDz@1pG0yzwcw?R;0uU- zQ?3EiwbbDc!jFxw7=U!sQPB&+jG`=wCpep$H8&@ujmA`B*p#pCYjt?I)-yO4#NqUW z5Xhb*OdT8? zR7mAW>^3wuj&Z%zfCUH%?@L2r1H-j>ZbI1)PGtzvmy!mZT zk>oF3_ik|L-f!sk!c8`3FV?~+yv@H);gnGgQl*xnp_dOwOD)ynPb8QMdL2LlSr#Xhqt*=`E;V24p3d-@t z(R9f_@5AN2FiR^don3m(3B9j`GJ0euhYjdV1Nff`hY+M@Uy}23+&@^FkiKg`)y+vs z#RBkRc6RCM${@pBuThD)IX+SEQras)kD)sH$eHHJ!T+bUFOR0`efvH#B+3{Wi;y|< zJkQBI4;eBfM97pvW-<@SEF8x?WQ>%MnUq=W~k)3cE9ht*1O*ItoQljX|l?|tv3p!GVFrj&NeOYEYv@*jT_{ zF|4-Af!^WXkkr=S$R^4U-VkhW&(MoFcjA&tw}HHy*=sLOh#5PJJwont@};OMmqAq? z_SU({&?|K}?lQGo5j$?1NG#a-Jv+IZMHWF9VG(qjS_&QMQhHMy7WPEbD1j=8+#;&o3x=BAOwuvD1_Bkawx!ajS-tpY;1 z?C37@Y#QgmaJ*|*@)|uU#xJi2{|Y57Xf^Bvdyj=7mcuP6VVpOXz1Dfr4X%eg4w2RD zDxR3i#CIz5Y%3As?BO+S>hoh5wS%Fl{DGE)rXyY5jhCY(YqPJL3SJeZQ6{MwNE{FN z=_glL+*r(haigLBWXx?bax3M;8>QTcsyAR5WX%r4V$GP^N6QAQ!#h`iE#^cQmjg3Tw~zsZeJRI6Dj zEPgZpo(4Bx(GYyhx;6(433noy9K!!FKw@#KC{VkZ_#W{`?&$h z*>B1=yn&e-FRnRXzBikhG8?&b94(Yi#ha((DnOR*6GrgEaE#`JDV|rg-w}3hqqtNk zXp=)xj_|`F_ zz#rp{`1*x`4`=x{E4xaW!oNib&5YN{ww|!(dHBBC!H2Sq?A2LyluDNXwfqcDQ&Z&c zbj$1;a$V$7ygdvX=jvXeIog0q?yM41`cm@wZ@PUAO|`d71n3|8^?hu} zaDC6faE}F+6lMaSt$yjWn&}(PCXAn67you!r$YN|D<(xS=W|ZTZ*x5=_r^8)`VEVArc85* z_}N@im%Oa;^T4!`OFQVF*BzWUKJdO+zX$>KyL&P!I?JRmvI+O1>PdTPlO;o6%wkDs zJmtZ`#nrJnZ%nUZWyvtznZ6I@k2&YdNsQSy>YdGv_VQfs>6kxknM^2Z$aFrv3e!1IMS`s5#YV}j1obJq8Zm{0 zvs(}CW2r$&s)ttUO&b;F&pOCyGor%WwRP_Lnk)BNd%#yL*AO{RDQK| z+3x~>1TtT0cogKg`xTyk*3cg_j%WS8XsRcuwz(NqUx&*$7l?7HMR(zN23~kTOm~~J zM7@d`rMtN9tkWWyTYunzG4>&OzIuKn{#x=|xyZV*frJN^K?`@LviB%=MwaF-IjZUI z-TtkZWn;3KU(V$#<>h0u5-6{ClRY@?dE@s_pYh|g|2#sn#pG4r8y+e)q7yNXo80Hc z;@Pe&MKR}~Nqh<&8@kTD7{b__h^27j`!)3Px&&eJsGG0p$g4n?XIf;pFx)uY+E&tU z$FK0zDNi@`JuPY@`m{v;?%1N_txikQlz2Z z`_T@M>vBnQEU&)>zXVVK)8p*NFjIQG|G%t>e9jJuaSwLTyp0*XBhYPL;&kJH=sE_kLmg{IMu<(SBE7ZK# zMi<@5qtExzm618i_BLwJ|8ZqZ~xFIbeU7qbyTv*6jr`-_ilMP7;-RI zt;M%INyDY(Djg&eRF9eJOe*=n@Ikyw;GXQfk7NbcI7aPt*ZSIPQxS3MqokbKsErWS zC2IVnV1i3hyk7!xoYoT~)f{lbGr0EGBy~&GJ8;}lR3#M-7Q__WJ{)awolo2ueso9W^uKs7zM!_ zf|&~!zMSR%q+Cq#Qc_t&>7@ywfs!8VaF*Rpi$p=fKZVnp^UKGN%~2O5@y}e)Hr*>v z%AO6W=;&UpBoQ~I`aWvz=*&N7k*E{gh3~2Dp>m(34dXwW&w;BWxV<+@_Rs{Skdrm1 z+tOp?3fM>JTCF6*6}J(f&tS9yK_*?wbc3_(DfIY~GV1q}I86OG4~n^Ew(3#N78PW= zOCg-^2zHJu`*_N9o*QXgNJ&z=h|8?XLSuB%SvvY-DEH5Wgn~vI55Ak7*zp_kuD&7I z@%D1pRqqBHayY@x91V&#Z0GPx??Okzv3^I@;`G2Kn(B(Ith;GlxtX)%n*kKQF^amu zQV}837K3g|$@!uZ)Q*+Mo~(oiGt<-0u;r{xcu&#&%f6>n~sCl|nxkjQ9| z0mm;7;|kLsyCB8ZFxG3$$wgJ-#SzqP5fv$ce}y!>?WYx z<}XNFa_af+n^m9mwym4j)qg*);kR!$vr9oy%FgS9a+6slP(^^#pP_(BMA?4f zO$}dv$P@ESGd-B`tWCkMTyg(CU>FuSkq(R{JzbF-ci_$;>-NZc#2VJaX|lUJ3DQ1U zL2VLDi2M;pN6{U<%xtsZx6CbLj}Ibv;I%^C(}bJ(fOiK+u#AhrGpe9@l*dMP^wnh~ z#^T@I#l=LmwtIZ@6KFM+NKjh?gcow&v9}T~H+}WJ(jJ*`TU$eEeLd!17#kcQhz#k* zUu2l&U*Ugv&&_{8(+u8iD1eBY{ZB89WN|F&9m%0q?`&@m2E?Q%!1kg^)RD_rU}3a| zw+#e%+izo~hz=D4ApgLE6_JDnm0@m4r1S^Gi~)wSEkX8Y6bN4kx=isy+jbDprXCL- z*p8POQ88vBC#~FES{s)arwXPB3^qUrX2Xs-i@-|!F09KZf5!qQ0~W<^hWh$&^b>+5Qs|Yz|=RwDk&=V zv#BBjdiG*w#sbLm+>)_GdUq(>vIIa10I=ugyVK4M$r%|>0dmZhTBi9CGr0~_apvpS zNyNln3Cj4~VVNEOk+SveXP>Y4$x_Sfz+Fqrce}r7Cn)nhrw~RWD=X_L zOb5D_H(hmLABu@-F7pMV*zomh>zF9$RPPdzqhPP# zHEU$Vc-&IaPaO$7ieK8g-R*4uTf-X!gES9x% zrTYMs5D^+08pl%k5FQshfcG&-JO~3V^wG*&Vu;ocu;SD;H4$Jrh+`*0ySN7IivoXb ziUHzLZ`j5ps-l7iQ0igY#o)uHg(%H8d#)_3t@(|&&OSMn`E|HeJ)+T+N~aDD&E!Hp&oo(noIJw=XTP1pSY;^|7#Q zFP1*pAEmlDyt!sB_X3zEdD44>LR6HW7~4ii4a{n6yF_Mx!R(`wl9B>^{Acm9Xfc3W zV_@BX+HCFb|AvBrp#o^RH%r0LVi#blX;0i!(_>N5H|KZJlze>EMDa@pB5SXc|1 zjoffO&3ob*K{8MZ>|9YuD-zBj zmEt*F)JOlvGAn=F`0Wo`#6o(wNQ?gRliwb2`y4;lvH{tWuOVUF`3^us!NoYP<@@XM zkgJszFvWeP%g6hng@W`UwK9cESKaSxAd+-|Vz>j_W5}f4!lYT*&b0+2+ zc5H*4BtWFwES_Rp4%3T<#7O35M-)p<-7gXfTSW8AD=_qC$GZ>)aideX+!veHic&yON#!&l+~Gb)}b_m6df4_5mNM&$YFojH@i39`CK#L94o(cp|F1 zrw3ToVS|I3D0vWu2|SuV%OL%b41&C)qoX_+@?N)kLzr~Ke#!9)qx6~H%g(Ubcm#7+ z&%gi|>`3h*g;+#{h2_h~uk!PgQ9Q+i|HaxlilEuaN^!2cyE~R~Ar^UTZ7#aEhu}vg zB=i7U;^F?jHH2}6t4W%4^O<69x#~k_2vEet!~k-$9WGFrf(>Zu>lRVj*$nxC?(Ssd z1k9V$lKS}3WAL^^g!vQ3Te5)M3tA9_t70-Y;5oBv}4`gU8pg2DeQG9g7$s;pyp`{ zIHgmoHcgH3@bDl8qT1&1rInQz0Nt$`?d(8%Pd{o~@E>zOBmREXOVZ2Bi}07FrKO{@b4X<+uZ5*$ z9L?v!3Q6%7M}vWJHN)Dqg+;=#eeI6T&^46p8g=|Ean@ay^W=TWGQ%}uTE8lT}_ zZNp|H3-Gc4-ceFsu4H9(8CdWr6jAsYqHx768sOyu_qYR##nLd#(Z$MK$XF{cEoDZf zG=_!Y*+q(?HNE3dx;8fXHA;^;wgvekE}TFA2~tNNFnM`+d7Z!z{@L^Asw^xl$Y~Vq z9UJQE%ID_h+B-R^6JNSsh*nFWGJaOml_#9T+c0K|zDF(>dqWncydl-L(ixdH4}4 zK>7Y0)89WmoySW={xaC^OH-mzc%Vl>rXS}St;fLH-Fgh@%#7tQOZ|^MMS<#2GZJ*@*!^w0cSkgt4WR{ z)a2yM^{~4ep^8>8Frb{Co<9Hl3pcn00+_(G+67qshm}v1#+)0zHV@bff_D;80w%NS zY=)P&x3>+1AziN2m-Y2wfNU=)DA*p=k;6ej6AmPVj)tGa3gE~QrwbIZgAi$Wc=)3` z`lW_PEp>I2P}_6>K02_c_B2HE+T+_FAKiC#4R2@=iHL~M)zb?G=_Kg%0IOj?_{|?L zaEY-dBqk1pqVJ}L#{AM!7=&^B)YQ}_zrAZHc^>Nb08DKP7VLJQwv&sA7ZCM?2m{TU zXYG%mDpda%lv5TPwF;pbRQ#{i*442zTWEvvCUhqXC$0VbB=GU^lMK1o!W9+ZbCmZF z>!JnbX-k!rVaWD&c1p_1m6ervLD#KYD<YHf3${4 zD|z>Bc244Jp!Pa|=Jxw#q@`D(0ZVqk&CSgzE}rh<;?g@Y@f^aPIwK>a;z{Sxi^tz3 z@~iUlf)ExFH8u6p>go*$%8z=MS5^$bcD~PEaCd*d5Y)VAV_((OB%ySDeP6YU!b6J} zL`P5mbam^{&($Rmg@#-pIAvD}f;<0UiOtRft8C)z)WG403@C0s;c2HMaQhHF^*k zK!g|TLS81d`eql##J9owUR0pA!J2^4Ju5gI3|#=g*vD^wHr_C544y5X{#0*G83<$3{n$ z;3TT7-j+>XhZ-9~uMon zXBG8nY%C0@Nk4LJEn9qeVVx5X1))t}G8^1@#oM>@_>;ZYM%dtKx($VIO8n-=Mp#-} zT6^ogd-s^r1_T5JLtej@EG#T+o0y;mqd61+1btwwLA>(uMGt_s!jj&>ln71XtvBZd zlaI|CMG^PHxOV!*aYDWLGnPfB}uNx#zKMcEG-QTW(Max;^N|v zL=H$lat4O;C`Tu!_Nl38+LgGt}8xWM3jjgS%v*n&# zrlqA-JcWgTyQim8V4&=?n`|&~=T}xDhllk-1GEpbKYaM`<3QzVQsQm8t7!w|pc4zg ztis}A7J$gXDwu$>t+iG5%@OzyE~lEjgt(M%0hgVJ8l9hyV_Rdom0n8W1o!AEWQM7L zOEff5aQF>YiW0*zPAqUc(XU_ALCs(=am&tB0G3JX; zPKM*TZm=F6c(7B)2nY+~p^$dx@bq-_@8AAN?T{%E%j&Y>k%)bs100LAG$iliqK=bnPfwGzMhNkB4 z$tC^r@dC(%GBPsa{zv?>pz{gi#X%uJOpc30Al06aM&lwlVPt*e`zACL*IqD#e`g7j z926Izo^mB%pZtNR=kV9B@xa$wNYRmPE?@tpqot)46&>9H+ad^`2TleTk_B)963Mod ztKa>Rtg>hhfvt-D!$ab&z139?X#A61x)cd6VtMuT7a>$bjq)B;LqVu)=Tu6&d+NO1 zMiv>Zols={vI0Qj!bL|j@D!DW`T6BxIS0|ZZ)PSN@+Su(wfp#SxF{PN+kPjdP9joh zz(xZo$%efKEKf3^Hq!{lY_Pz=Hq_HHtA!~@ y#S=?QdWtvr|0{W*3er5!9`wJhoRE(hJVoso@L1wW_P3+ppSp^+a+TukkpBY7?Mn;* diff --git a/examples/running-llamas/artifacts/Llama-7b/full_report.csv b/examples/running-llamas/artifacts/Llama-7b/full_report.csv deleted file mode 100644 index abbe8b67..00000000 --- a/examples/running-llamas/artifacts/Llama-7b/full_report.csv +++ /dev/null @@ -1,36 +0,0 @@ -,launcher.name,launcher._target_,launcher.start_method,backend.name,backend.version,backend._target_,backend.seed,backend.inter_op_num_threads,backend.intra_op_num_threads,backend.continuous_isolation,backend.isolation_check_interval,backend.delete_cache,backend.no_weights,backend.device_map,backend.torch_dtype,backend.eval_mode,backend.disable_grad,backend.amp_autocast,backend.amp_dtype,backend.torch_compile,backend.to_bettertransformer,backend.use_flash_attention_2,backend.quantization_scheme,backend.data_parallel,backend.deepspeed_inference,backend.peft_strategy,benchmark.name,benchmark._target_,benchmark.duration,benchmark.warmup_runs,benchmark.memory,benchmark.energy,benchmark.input_shapes.batch_size,benchmark.input_shapes.sequence_length,benchmark.input_shapes.num_choices,benchmark.input_shapes.feature_size,benchmark.input_shapes.nb_max_frames,benchmark.input_shapes.audio_sequence_length,benchmark.new_tokens,benchmark.can_diffuse,benchmark.can_generate,benchmark.generate_kwargs.num_return_sequences,benchmark.generate_kwargs.max_new_tokens,benchmark.generate_kwargs.min_new_tokens,benchmark.generate_kwargs.do_sample,benchmark.generate_kwargs.use_cache,benchmark.generate_kwargs.pad_token_id,benchmark.generate_kwargs.num_beams,experiment_name,model,task,device,hub_kwargs.revision,hub_kwargs.cache_dir,hub_kwargs.force_download,hub_kwargs.local_files_only,environment.optimum_version,environment.optimum_commit,environment.transformers_version,environment.transformers_commit,environment.accelerate_version,environment.accelerate_commit,environment.diffusers_version,environment.diffusers_commit,environment.python_version,environment.system,environment.cpu,environment.cpu_count,environment.cpu_ram_mb,environment.gpus,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),forward.max_memory_used(MB),forward.max_memory_allocated(MB),forward.max_memory_reserved(MB),generate.latency(s),generate.throughput(tokens/s),decode.latency(s),decode.throughput(tokens/s),generate.peak_memory(MB),generate.max_memory_used(MB),generate.max_memory_allocated(MB),generate.max_memory_reserved(MB),backend.quantization_config.exllama_config.version -0,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,1,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.0336,29.8,15239,15239,13738,13767,13.2,38.8,13.2,38.7,15954,15954,14356,14480, -1,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,4,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.109,36.7,16107,16107,14295,14636,13.8,148.0,13.7,149.0,26434,26434,16774,24960, -2,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,32,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.826,38.7,23464,23464,19487,21992,27.2,602.0,26.4,619.0,53264,53264,39331,84422, -3,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,64,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],1.65,38.8,31911,31911,25422,30440,45.9,714.0,44.2,740.0,67584,67584,65112,84420, -4,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,8,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.21,38.1,17172,17172,15037,15701,14.1,290.0,13.9,294.0,64977,64977,19997,63503, -5,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,16,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.412,38.8,19251,19251,16520,17779,17.5,468.0,17.1,478.0,28076,28076,26442,84420, -6,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,2,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.059,33.9,15478,15478,13924,14006,13.5,75.9,13.4,76.3,17091,17091,15162,15617, -7,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,True,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,1,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+bt,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.0318,31.4,15239,15239,13738,13767,12.2,42.0,12.2,41.9,15954,15954,14356,14480, -8,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,True,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,4,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+bt,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.104,38.5,16093,16093,14295,14621,13.1,156.0,13.0,157.0,26457,26457,16774,24983, -9,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,True,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,32,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+bt,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.804,39.8,23334,23334,19487,21862,27.3,600.0,26.5,617.0,53531,53531,39331,84408, -10,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,True,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,64,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+bt,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],1.65,38.8,31651,31651,25422,30180,46.7,702.0,45.1,725.0,66915,66915,65111,84420, -11,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,True,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,8,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+bt,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.2,40.0,17143,17143,15037,15672,13.3,308.0,13.1,312.0,65229,65229,19997,63755, -12,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,True,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,16,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+bt,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.4,40.0,19186,19186,16520,17714,17.4,471.0,17.0,481.0,28109,28109,26441,84410, -13,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,True,False,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,2,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+bt,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.0558,35.8,15470,15470,13924,13998,12.7,80.6,12.6,81.1,17087,17087,15162,15613, -14,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,True,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,1,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+fa2,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.0321,31.2,15239,15239,13738,13767,12.2,42.0,12.2,41.9,15954,15954,14356,14480, -15,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,True,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,4,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+fa2,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.104,38.5,16097,16097,14295,14625,12.7,161.0,12.6,162.0,26268,26268,16774,24794, -16,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,True,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,32,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+fa2,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.786,40.7,23347,23347,19487,21875,27.2,602.0,26.4,619.0,54047,54047,39331,84412, -17,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,True,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,64,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+fa2,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],1.56,41.0,31676,31676,25422,30205,46.3,708.0,44.7,732.0,67563,67563,65110,84422, -18,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,True,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,8,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+fa2,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.197,40.6,17139,17139,15037,15667,13.1,313.0,12.9,317.0,65229,65229,19997,63755, -19,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,True,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,16,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+fa2,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.392,40.8,19182,19182,16520,17710,17.2,476.0,16.8,487.0,28256,28256,26442,84404, -20,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,True,,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,2,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+fa2,NousResearch/Llama-2-7b-hf,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.0548,36.5,15470,15470,13924,13998,13.0,78.8,12.9,79.2,17087,17087,15162,15613, -21,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,1,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav2,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.0457,21.9,6620,6620,4991,5144,14.5,35.3,14.5,35.2,7479,7479,5609,6004,2 -22,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,4,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav2,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.122,32.8,7635,7635,5548,6159,15.1,136.0,15.0,136.0,17959,17959,8027,16483,2 -23,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,32,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav2,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.833,38.4,15184,15184,10740,13709,35.8,458.0,35.0,467.0,45304,45304,30585,84418,2 -24,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,64,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav2,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],1.61,39.8,23439,23439,16675,21963,53.4,614.0,51.8,631.0,59111,59111,56364,84418,2 -25,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,8,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav2,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.223,35.9,8700,8700,6289,7224,15.3,268.0,15.1,271.0,56502,56502,11250,55027,2 -26,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,16,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav2,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.425,37.6,10866,10866,7772,9391,20.5,400.0,20.1,407.0,65550,65550,17694,84385,2 -27,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,False,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,2,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav2,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.0708,28.2,6745,6745,5176,5270,14.7,69.7,14.6,70.0,8696,8696,6414,7220,2 -28,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,1,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav1,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.0416,24.0,5892,5892,4265,4416,14.6,35.1,14.6,35.0,6752,6752,4883,5276,1 -29,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,4,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav1,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.106,37.7,6907,6907,4822,5431,15.5,132.0,15.4,133.0,17231,17231,7301,15755,1 -30,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,32,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav1,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.688,46.5,14457,14457,10014,12981,33.0,496.0,32.3,506.0,32224,32224,29858,84420,1 -31,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,64,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav1,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],1.32,48.5,22711,22711,15949,21235,51.1,641.0,49.8,657.0,83617,83617,55638,84418,1 -32,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,8,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav1,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.191,41.9,7972,7972,5563,6496,19.6,209.0,19.4,211.0,55775,55775,10524,54299,1 -33,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,16,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav1,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.356,44.9,10137,10137,7046,8661,24.6,333.0,24.2,338.0,59378,59378,16969,84406,1 -34,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,True,True,False,,False,False,False,gptq,False,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,2,256,1,80,3000,16000,512,False,True,1,512,512,False,True,0,1,fp16+gptq+exllamav1,TheBloke/LLaMa-7B-GPTQ,text-generation,cuda,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],0.0621,32.2,6018,6018,4450,4542,14.8,69.2,14.7,69.5,7968,7968,5688,6492,1 diff --git a/examples/running-llamas/artifacts/Llama-7b/generate_max_memory_allocated_bar_plot.png b/examples/running-llamas/artifacts/Llama-7b/generate_max_memory_allocated_bar_plot.png deleted file mode 100644 index 1df43ed87b8aadd95413b4d39deb26322872f5b2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44722 zcmeFZc{J8--#2?^^qheXr|Uw<~h~hU55tKhyUJJbm&AD-#bBg+gIH zcJ$y`3T1^Cg+kNDK!-n(t8VJT|LwYVNavb{&6R6Tm+Va_>X)wBTH0K*G&2&sZes6X zW@9ZYsVFJ6UC{K}HCu<>J9b$8_aBh7vA?=QE;T&?zl72D=y?YUh3yjgm*$O1tQm!p zByjBDzH`n`2bx{<)MuCJN9x|D*}OYvDjfH4b611;t=IcRUB%5rFIS#4j+1&VCCu}h z$2^!$_Au+a2Z@1q9gl}`+)oqh8=o&8er^-1oBjBm+Hg;&oj{X=^0Y%sPm}9ccPHh_ zHBx2V3_`x_`;Wd7nvA0r#8(VL0$d_qf4>M}`TxHV`mY|s0`rM*Nk3J=Hi;i~bpf%l zJ+X|v!ipXXiYr&p+!0V&iaRj7DNtGV>+`)9-v%#Vxw2o7U2xmB_lgS z*3ZvRz`e1nmD9c4b9r%^uKCW}w-T0CR=n4Emsicr&CSg9C#%d%$~|*%W;8r}n9j}J z{nF*jD-RtyB(`l^(a)c2HDc5XYZA0ooI9gl`TDNhuAh_E(!Y-1bnDiwdz?p9E#3F+ zqglIltspx?TYtaqaHFSyySqEX$`$p$ere`)+eRBwR-x@#NGS%Z)Rl3=P%7!Jl*R$<Au?FHVnQQJlDjg8rFKmTr}M(jzyckg5} z^{x#)-t8D)Z=tMJBotT?#pSI^(bUweudf$WR$kHwm^y#r#0ei&!GhWG9-6iXi)HRC z>(;Hi6%#=#*lpRL)e=;@jWr|p|4-RrfXqVJvBO~)7Hny&9pF0a9BO?Pdv&G%b?3Avq zu95!a4@N^x+0<^=sR6UQgEf9s>Yh9NO8!ru@|l{Mjn!Z& zXRLUrB_$;Vg@x@J0q3@eYb6%kS|i-n)ir@7&?Rrxu+qfD-t1T@pQw$4N;3anqHRme2%}8AzY6{%v zLq|iIn3yAZih2DD%>*q0xuhI!DMLzA zb8{hgF`h-L*$+{c4KgdS%oGL&2UQ%MqyCwhaZa=R#nGJOIREqI&a93OqhJA*jeBoQ zc!h?tUC6Ku?CQEqUfGd5e1*8t(hL6abH5{P3{6eNGThpW{KU3zUl1!lLr1|i21py1 zsh&E;hM)Du@<{#`ubGfy*LnEpG536lGAvkJ^xV%&wsdS8^{iHK^YStuIdWv6C2yYd z5Zy#y4KtUdUV+VrMgKoPVjU_4l-+J&om`q3X^+~O&!&0ij5n6d$I8mO6Yfga2UNo) z_1oIpz5STEJg!GckDau$lU}=SorQ#~Q3=CNo0g}pLtR}fWo2bcN=s{Y(JVY{$#=h9 zP@u}fEky}f&Cj756&YE%cu|o3XQR_g$xXt-)d|`=(yW^=Sn8MG;kWCrl~@=lW}6u; z7f`ujc^?bqWP%0}iUEyTpcb{<{C`mwdOb++jkzbGg! ze*5+7%@YH^9_aoa93%@vk})yvk@{=gPw&q-PMcpo^lT#|?FZDJDz16=?j0+KtI1hP z%8f~L+<}mc%-QX;w3H1SHmF6)v+mx#o7$~l(^7)P^y1}98W$IrYl98z=BEZb9|Z;l zbx2;Fo1J}m==OR%+oF;7;?ape8mU%`i_@*WKVsP)K79BhI(h{jv)%`fXWC^yesES) zRAe~~Z^Szwdnqd`i$Xd1Y`5P%;UoLCwYlu=?Y~u57k+*$dj`MH)m=x2o3hmHaW5!{ zDN#3bHNVnzU+jdPa&n{1POa^wx4Ae3>g_tqgyiJb;Uc@jy1TmCdaGY?D_B3(nZ^6s zZb0SM($YGuq2Y7?KF8d|A0OlLJEIj6SW#!so>jE(Vz8ObZ1rG1fBw8)wtaVA>Zi)e z&Pbd5eU_HnZDx}{ECt1=N7g#uOsJf9pPMivPekHEnpaNF?)>>7rH79m38|P&auTK#Wnc&@#85@!v?_{Wa(ZV4%vU~7~|5y_}A(DB+=!C zakj&E_!wDPS+O&tkA!Zlk>8q!zf9V|y?blAy1QS#eH&tvKT9_~)ZAHt&!t0&Jcz4# z@!|zVTwFXgM%G)^3$^j!ks}UK!xf7h0()=xGIENlYG`oc`S;eR3TId~imv}!9KhC& zMT{axwy5mYF9*%c;ttMlH}A-D85cbq!m|p+a;HTt%a7Po$x}?@C{TtbCads2Q&UsJ z&k@o_&0=UIm#$p#$JM>GY0W=E))1|j`IWarFO!qMCLFmxZtH09!6Ss0e)Z3p&e-H+HYX=1%U|zS zg$k&6*tm!9xajZWLwinB^JQXU@Y3S^ZMkOcIySDE)@(6Z*$2AUM>`JHH#WX<8UKx| z@$&Yj5mk#AX;D;DQ~O4BacG%lryM_VBK7ho4sISE`wt2zrt9YKY!(%bb{cLut*sq^ zCAj-qui#YnqqL-tACFG-*Rhe#gtxDzu1@MQf2Ji+_%sSRPj;8{;UTK3^pd;!Dn*b zxpT*~CVm}Rtx5VyE9vN>snmr$JM7l6`zrU^ivwsW-+%m2F8mUPw3c_xDef#2uCW{P{B?E^e!Cy7{`sluNYzjhQ<* z8CF`N|LJApdC+;Czi{E5$KqUSl*e2k8qe)xCr-Ra!yfHh=nlUap(<$TL&s{QvN*v> zegz=T)xN&I?ZU!7;yvx{g}7Rs-0Oo80Ri;PY;0FQEQ~Q52dBnq#J)q5d%sRxD<#Tv z=`l){w3(TiJPPyBjK9&S>J`N6M8K0WE(%#-~*+O+uJ9aG4 zvA;It(W6H-H}t}`pD&`MG&Skf#;Eg7p{zOd*S^&)$9>iJ*ClVD$eUEGwQj}+c#MuS zK0a<7DQ{(zpq+e$fA=**^(Q+oVTt{&!0(aFk(eyt7OIB7? z7zIn$CSO#McOEfJwU3_8?USAw8EJdo)72%VuC5*v9bL3u;zG&(0|y@a`K@|z|9%I5 zZl+D^JM1c1iY2OnB^I1zcg3T`iyu4&LRFU9D$>n<_^UtP^Vq6B#oN!{{~dZbespYLGn44trR~$|9W=z0(@EGznZ^-l9FywXYaU{i%V8m zSXgw&j<3JJJgX!xC^0cnC-&qsZp!t6`qT@dFMI1FA|u}m9q>aK{au|^&y>O+M@HT*y=-RoDS!4iZw3&Hyj{n^A*HjCz^gGY zUKHSFN|akQO)g)SqHLFuxi396NH2)_yO6P&7x(+TVtM1s1jY(esUp7!O zY+9959itt-VvR&#fk*Ck+?F|1pK8oPxqbU~XBfFqT(`89mDPG~?vEZb?etd+4GXZN z>VLhznr&a>PQPKkHT&q=@dsN@>HLUMe~jkByn6K&-3-f1x7SM;iHeF+>ooT&$d>7N zcql&z3%gQVTif&wS1fParW*6+&BrUBBMLp1X0QJ4@2|Xk`7+vEvGcr;xOipG^}!0P zq_0KA#lHrAy+7ZY@7}M+WgJX)oy(0I@9<9Fk$S+&r+AmfkXPQ4U8#js6tf>OmP-q> z+xP5wiWVA-q9645@nf>L0duOznpiSxb4?cbTn6j|2a3R!K6L0u)S((49 z_@d6K?`Pnj-rf|Tm=^oJd-q}!~@axy; zb@Sh!AFExs@MOM#il-BokRWSgV`IRAorHc9lG#*T>_cM+*w1x4+dg=cxOURz72aOW zA3URUQOF*epW?xuF{0?Gsj=8(i+zgEp>C`8_L!U48oAe1en;w1?OWbyp~Sgv=lLlK zaS4g;n#qn*=9sv+PiTt;y3fPH$}XFj{6bA7n@0zg@_b#A{x2{d^N^6#5PW@tv0T%t}XKtYndLS;wvjFQ{Ao6EbXh$x4>x5!56`ysr@A8&Jjzh@wI&Gohd z8iVu4KKPKrLaDzma-t-oF4N{BDXmI%Qe~dY2cTLHLkkUD>5 z9ldJDAH5Vq8rlNTXy*?Bfbk}{T54-@-4Gk9tIHV|QPj()&Jm0;<3IggpHL&5v)Bs}{x8%kA?OOTqSy_q% z&lY&mP!&<-UnC~#S@FqjP_X91Mc+o%v0c`xTVST(wdqBA9y+vYY;0^~YEYMOFuZr_ zyDL|&5ZIhy-OPkNL@v0xx|;esarz!MBe@MI2`iixW)2)VLJtPBMtojqI5Rw)Yj`mu zBZJPE1tJ2K8wEMYb7?j}H|*|R3RWs;PKb6Zs6zoaCvS*_u`#WNhDM7FTC9$)uCS=6 zeW;^lg?vP-BWRJU0Ua*^{-&jm>PH|IJTf4nPX7(A-niIw9# zJ0_xiYXANfY3b?Gf3aLpP>|g9q2x~ww`w?lJv1MVi*IW#c!Me%%p(`X6&e<{z9G|Q z$JDb24;~cHjCS^~Ee6847ZGs_&=owFOt#NbQ#->YDUr5 z*Kf(`Q+6QLs0SBSh(#c@drRHrU++wAg@pM1^N$FD?N~X56%|bSIoIMeerOmP3Qi5T zhQ4_tHl#Em)YD$^T*Y%W1?14|ZTG_B;wau6q4hOM*h%H6SdeFYQFrVs({WGMO^Q#k zr1Hw+wxJkMhUxa?yNt)wzFV_qjVabY5kv4?J3ihMj!<%vMEA;)v9q(2l9u*PO_d(j z!Omf$WEqPz3<30lng^L@ZZXrREYB}v<&%YhMmN(F=Zv{w{|gU^3Mc?_C~vOk$KbLzBpc)v0dP3;8x6PQF_&@YFE6>e zL6xkr7E@3NuVc*WV?1;1Ci`SUu0zjiimkv*JhXGtB-KYpDsEGrOnWqxJ z8r1AK;s_DV2M~O0?)y2d><5$cIu|Y!)z@>Urlt~l78a%zGEYyzmSJFIEUb!DaM>b8 zOG}&j&f)jx!Y|L1?3NZ>5_MAdZ)dz;f>k@x8LHAz@kku+zcfPHIDYYEeCV|9gvi^5 z&~s?7)5_?i0^mP4dmXkK85tG5O-{@)rlmk17@3?bg#z|6HZ}lTp6W7AXKig=6P7#x z@u3*~(GcLTe|XdJZ3&_~haHxe+=oieo$q#+k(Dh5fCDfHm=+zIpB_GP`ZW7l4Got? zpUr>V7Ez14dU}k@dBX5@U~e+?<8l6R#Hc2t^8>qbaKrT?4=&*~=oL1gaCPS9Ynn1(NbnqQqX zL->`}qwpRAdBnpBz%nl>D>K3#&AL9=;7aewr)Wz7sVIg5*fucmSt7aLB}+8%xMy=b zbXaBu1qJY{4FrAYF5}XH+O_A;gN(Evc5oV~KNIcj<>a}2w~~@EwitLF5uu?!_(I>= zck<*N?dw30c#em#>Lm5Dyg=_>WMxGtu#e$dj-EQzGv|v7-WalRFOP1fbtyoIwC9rB z#Bi%XtJ^s1;b88<8nPemK78maaw1G{*RJ!4UweVHTo}(dMm4%l4~@2rH&BP2cq!z! z19R1$Si$gfP)R4e^>Mr%I1B&M(vsqhi8u8EoY8tO0r10l!B(~BH!e;!+Kfz32ZV=* z*8|^wdi>XN=m#zy$l}t0&4{gG*_;xTq2vfa@H$1q7kwjF%*_6nR|cM38@(G`N>W~qk&-HK2yia-%%|?| zZX&OuBS_h_?17wl06J0V#=SRVW3PsIx@$IPg3XL2ZPe8I!IdToo>&Ym3v%2|^sqX` z`!}F7j!aDWK6t4fL6B_caD|bB;QYk-cPfmX(z;u(J9= zDg<`EclWLra7)l)*<#~x(B`OW7%I>RJ-;2g72h|#J80zSH2*7 zJ3Rcz@#9Rmq;mdpKto#%Wt!m_t@tNwBBG*2&CUE!cNo~%xEin2c&FdHch5+H>FDv} zMMRRf@2)WK3@gEnmfY_m+GtFC{0+U!l!EH&HDnv1xmTkCO{UT&2uC($Z%VE!DS#pdPeC4eo_6l(+CLm{(yn2*Kg& z*X8lJ4UDr}J(u|b1WSRWol_yk9!#{v+upNx@5}i3AgnXn@w~v3YL3#^2Xt1hT2)w5 z^1AGg6i>%uQsDFF8%b}`PBs`#S`j4q>BMfffBgOFpna|3(O$fGF;$=%pdc$dTS#7hJ$A8ULoDtsP$D_!6mWYnMB5QOJDwiv#ynRANNaW@ zZTng@Z$qF;eOKdNhGjy7CiO5p{KItK&mTXE@rDYxJ8LwX^M68bP@;1F09HA3=1gJ8 z9;><}er{taDXF%l=tXXb{>*L77iXd8a(L2P>~^}9zolkTdQqC-H*}qkU%n)zLU3Xv zOXy8PLMI^mx_O_8*5?6Wjx^NyjgSWFE@a>x+d4XyVk>RG?nfos=(%%=qprdtBBdgS zq7%+S0i?!c#=l*)31n3jH6R6K0JXWOv~(3vYYB>>Met9C8-E+V_>=7Rt+th~8fy=>t3Z-3}?hFFq!0nJe~TjqJHYd+wHj9{)%%K_F6jg2)r zYuX#8{y-U^PQQbT0BrDkv3p=(AQ&|%a8POF!&g60NGNi{eTM=5)14OJyl~7@@n(wQ z4&^!xCrurBG3L4pGTk1Jg$RXGHK4>0^>Vp2ll<#vAs11+Tz7oHdILuxoz|#_r;NKU zIZ+w?q#d{T?fdtR9M_|B{A;Ur=}gvVm{zaAdn^S^z5D1|h#OhJz~c z`q+7SZ-Z_+R8>d6p6vScKyO#|+M|#1oJaQ$xU(--&*#U5Ksb`JsNMcLHrCA&T3}hBL1F86h8%jz_sp2)cw_29_liD9vH}DwwvqPP}7DB7cQUzb2Zk^&vjH#hUa<-DuV3T z`|TyI(-!tJZrFFYRb~*j7Lmo26e$oQ-rv7}k4#M+Zp^S6?fS&IdCQhgYzvO6BiaNb zqs^=YT)Ot>`zqtI;CZNEy)##DvkMvj*a1r+=Yfv|DJC&jL09F?dyyij_ z?e^Z@TM$ir0|JVF{(M+RFSK_rKMM9Y^=uGh1V2~?h=nf0Ii z*5N0_+a>)FGO+LG&uTh4I&En9q+!FDWf{7>gAvb{_(k||n3Zo*FJ3i~mX_{q%6_jS zz;O>C*QoDj+|gsl_Q71;9ApfwZ)URYVtZQ~xg(NQASLtT#{#gp>;x&m>KT{>Z9P2! zz?82bnvg&Oo}#qN*ku-8c^1<3SXoP7?DLTVx9a-+d(-yqEZE3KnHFj9-jSbN!^0DV ztU+Nx0fj(|_5cA}bZn5^;>Jc^s5`KG51_H4<9uE;ba8bxhJ`1I)$K|qkV$`>6Qh?TNx)uqClt3H1IOpq1~c#;M{N9|~|`A~`? z*`HPw@3Fk-BqlEIBkd2}MR?Pu6*~;5s?N@e(_HDVTlc|Fi+J*+15CjO5OU|voiNPk z;4Cl9bcXge=PKk}`*V{pYfw6T%0V}`JzxmSHOiD>jsi4T;t_K1G-bbb?b_X_DCq7p zw18qaaWRlW65Oru^JF0_Dk*hhaZ$hChG+Hk>C>f27ah1mFwRQIgJNeNX<1yOzbf%b zB8@6Yh=OMeHmnew7((0r`R5<1uO0Z}x0)J1Y=!Ca7SxNZtV8f4QlV$F2q^QB(h6fF z#iVlmoA~(S*H%@+4F)Rs{UouusVT*xPI6{urVS4dpnDe}x zU`bl=EeIJQDg&~wmt{*sSV4vr@AEw_XJflAs&xw$!Xz|-c2DdwVjgT26O%GFHvZk& zS&TPW>c74<*C`a8Q&vJkq89tMHpkIo3LD}*cH=W~jpm1&f){NfTx#Mr!+9-3q5{N& z=RtV*RXl3r=g*%Hbd=uK+rY-g7W4Y`xk1|=v>z!{G$=8jiN`_B%lidiq9o=3k#mVb zU;&u{%C^xD1`xH!@WiFD_K5lj%jp-YMz$HV`B@j9h=vyx?6W{r8Ahc{#M1(okANQ1 z_`y?Ud}PEBB|+~qFqknMkakx0s{@|J*RLrt@sG>nv0@ zd60+Ncun!Yx0vtlx}J}3yS)2}8e<*)>W9MjKUbm*iqLr}I2mg~JYu0l?s5>d`LHcv zH!F+r@SSF&3&W2m_YD-*Q`NZTte)OyVnwmAun=n`0y+?{w9!hNZNay0G1TG{M7L}y zM+H5P=8|sLM0I&EIoKFZo<4;df!9+MqxQH6+a+rd_&mxAk3zc~Sos%<*<<+T7A@3W zk;?A76-4g8{!!4BW#<#ZEqycY&6^bPD;dB|t+Qu0X?Z~3O2i8$b|O(|@nRvduf2|B zM-o1pwI3_}ZR1->{|Yu3?4oO#Tz)i4iT-jr-Gv_fxuPQI6aH0abQo-qlXsi39m*R* zHyM`I{NePnFHNKdjx=i{VmuD^_8lFlDwo5;!nP|aMkx3lt}!z_IH^7TCdc#s(*TQi zSHEs-;d`=4?wKpJkmA?>{8Jj^eJMB?NaRtNw1)M68aVq_pc9^7o#r#0{qerPAQK?vmGQ_n@ zGt;xDbH00|cIIYB= zK)!?cywHm}V(!K-{E8dYTLzvaF4Bce>nd#Z^Kf8@=c^!h&hf{Xl<3l99%-Y4>Eo}0 z?ij1HRZkde#(LkIP!bDzHI}v~y;Q%MAX|?9>R0p{r%xLaf0~%5*rX48n?xHt)IT;HIh(qVwX0HS_N_3(?0zgyT1iPs z(_imIJQhaFd15n3-jYYWkM<}@UVi_0np5g;0#o<^qu_u3!bf$<58E6tk{f=)a30rY z@)OEz!P0P-nD8Jua<^PJ-pAPXVFdCjkIja|_?>sZHUa@VXJ%WeiP0=@VQo^V^OQhq zwUd9psSDsl_Ezg?*)5I&&wW1!63`9)e)gE3+2`g=o~y+Q|GJRF|K}@mr|@zcvjC74 zHfGwWIEvxZf`L_@>@@rSW?pE`er49<6yDQjW?P}0ozc;mi8~lTAWjyMS0u3g!u z_?d)WuX(vL#hXOo(4|Q?Myvztx;NQvKLcdv@88evahqYs>j^-+#a~r%)bYK0_$EgI z-5BCl_e>S`Wu@Ke{MD`Zs9ir})?7BVOziWKB$MG4f~Mw4r*} zCItnq(b3VLCcf-rXHH*7$~B4xo;g0X4Fo%C_uBnOg;f~^%^n8bzTG}BzyfNIJ_Bvf zv+wRv;VlvpAy`-s!@@lJPBRFFP$p>>R!;Pkd!|{LEVb$M)IHtf!ckaQn1#*z?*02z z_%({VcGYA*4`36B2Evt+ktqUYEvemcF`o~Y0VwE~m}r@POa~W_5T7sB;XbG<8#iv$ z$+j1{e0gcQdM_fRkeF>3=Nx-`dkb1y1<+8^%hvPrhd?4EJOSeUF34A#8)BiG3WHeV zX*k1GGoU)tK)+74$0so`FchFo3c*(4<>kft6X3A&LCDw4^xCSsx3+HEb`w#7qN1WD z&Ap({+;Zk?5s$DPYTA`LiL~P1o~{1m++crYWhFtS1exq|paTjd8BBDyY#Ac)T)S4H zebL?T=g)IU8PIZWIaz}EhegOBs0C5SVdBAa+`RFnzMft%@{52-wD6yRB+PhF;D|3B7lg@5hJ7M66x62 zuZfFBGQ7YHFmr(KPg`*+V$YAoeOsq`_A1!gVfZ|H`ua!<+=fbh3ABJJ10RH2_6iF+ z7)g`?Uv83=9LjSa-&XnQQ@e;KC94OmCp`>(TosB6uCt`C=G!-tbVJX0iN2>>2UD0_ z3&rsAWnV-j6ZLb~Lp!`H_8Xj(o|1yv>Fe)La#vvUL{b32fBEvI(3!oy>}Rzd|6F{& zb%#slS_fMFz>0rdULVujGCR@BBsePw7FUdQeh>EqQQSZvXJNsO2p~yGNlh{cn;blN z&}~A|*vJS5-{-wBpfxRAfE1@dsq{Mrk?W#R09aOnw(6WePn@Y0-V~4|$5tAw#@94> zDr5VS^s;Z_UOLRe9wE_y7i2lWw~dR7qfl@?U|zd!Gevy&9TFC|>i094B z&l^`f+=}a>rGS?hg7^{F4bGs~t0SRCP`+-3E~jL#N6v|e8>qKJ($Z`2&Zs%a?2+5y zkHX`4^X82zVy!$pJf}4^{SawJo@@m|Ouc{p2q`LZQ>0+5u>}RRONDkA;ZH>sY>9+0gV@miorB72~!s!EyWf#U^{+88bXkpy5? z`ruX6Dz5rG|M`g=Gr1&?sTWXYp$Z!4!`^S2kfVHqj!#j&e0j4%fe7vd6xi0mfjAI} zZ{ONY^q$#i)iB{>vDZ?)dMK>_Nq-m5Oz8E!mn;9< z7s_6~3m-mz{(Opm1t}$GuxaoU=-mDs|8rk_7hA{0MYJ^R7|bPn{Q7nIr%=sDu+@_% zPe$!#!Eap_+R4@yn{sYY*t*~3*+u)Yu460d={qrk07bV9eMd!%#${$ih}d3m&GmAx zt6MH2X17K_AQWv1rB^G`7ai`>_t(c1uMcnoW#F-TgN^M!{M%EY84^?R*RN~@gTNPn zbDTXtR=aukZW7Naf?Z9*r*GnpO+6=u8oW#tg+g%a5qRiOk#9e`9U6KBuQtgbj|u5V z%rmrs_un<;6)38$U5_YX88l8FOJ2O3B_$NFH3+V(gb$3s=wiZ+=;&xENTa0CgYy~n zBM5_KdK3#MuAvSdS23&sTsw)Jg5IxLx6ThaYzGspfHw4K@MtFNETCyRgb!~43fA&K zR-ZOBGFk!A0@Dka>^Pcs3T81;CDu#o=@>u8-Y%}HV#RHj;~5Z(&aTrufA}}FWGsmt z)l-akk#j&uYdRqYy?7xAK|#;58NlP^n>V*m2CaX=8b`p{3ys*elG_*pmM=Cl`eXn& zUuq5k>3D8MO-TBBB5mkMQB( z0?6hTO)9#&y1u@?Z4f@NF3=xVQJ@w=KW0bk^g~1l{6r#bdkSi08gK+&fI{&ZI;KYJf zzU+ydg%q?c)US4EG9#m-g_v+KL|W|;CK9-T9WPzIN{@Ck(3H)Iw25Pbmc6|Uxcf^a zv?ShVW*!0dPP+Kv%cIoy?>qCpKqQA;C0Cn^cJ1uydO&RLCUh2b^7h(Q^z_9=MJwQ( zmqCh_cAvY3Q1fv<2b5L}XRHL&gOR>pSC@D7>eV&!-gTwO@85m?{0`nA@i2(7lVbTx zhEf2iAhedLeQ+>jV8HV7)vL&%^jx$GW#9kNlUKy+^%tm!!PqawH8tzdWqJ`Xi#hqM zLY#BW8gDYq0a65!+q5x5qNAguFYOsBv%wg>m+NydFpI~iCzvoer<;s^Kt>nD#hD?M zAbf7ye`B)G574^!<3|SIRrleVlK7?B9sx=L_9!txz~^51m&e<@|*~u1{&E* zyh;GdgV;UbOGki!RMpj40o35Q_(Dhkk`$DXSS)NrWgBZf!uVe||2p$XZ{79@XJ}CO&vUZ7!z6}9BmcSyA&14BuD1w^6~)f zWZ-0yaI#ysetiefEk`+_TDROLR*nEm!5bnhlLY)7931qh&i8b`au=fqU=)BA5)LM< z+KErK8(wi+N5=}#3zUiC%1UMoe^F&%rX7k>c83bahSrp5kauBw7*YzvQHBBtp&AXy zG~(G3TMoP;1WU-Hz}sy3H~!#(bV_Ixco)MDSaIiV;_ng>+x~uxEfXw#ks1TMBVY8@u;&*9vL?PY+pz>FV#id_uC6Q4j_Rh zW%Cs?sBmOd2g&L}s9?|mtpzu4UI{I*y8;)^L-Lu}z;>VzVq3RD1mB3ktlt$FGa;b$ zsG3>{;>dO=*KW|Gn&z;8h$8{Pir{(dQ9_@I_9c!0$cA9V9gvU8a+@WBC>fF~u6)c92z_rLSH@(AXi=eD5 zCoZ$;U4?YIbu$l&E!6~*1YI?*-4*Lvq?KK#wv+Xh?}6thwqr-Q;z?AFkj5j==xuA_ zH8E4O53M@$GnnhViZKh4nLbFj`m^%VQHX~AA}%hF2*bo|Ku2E9r|3npXW9-PbAPt5 zY}~_bQJc6AiVQm^rw>S&Wn%`}5@b>YLM24>3Xve>B+H4?4k?tPiX0R$VCtn}I=Bd) zi3i5cZ~I?`;95kEU`T-*A~5J!J4`mW@3SyH|O7dSbrP=+k`> zgjD<<{>=T*jubgriDc{&^coL7+qECIQ%~sGBLq<*Ugk42kkV)IuNG7PdfB00-zE z0-kc|Fd?|1D(bwu%*e#doNX19vyxbbFvnnE`kxl_#Y7syTrO3w;5^&{;sBTj;bZ6E zxQPoYYH3;WnR2g9Jm0iLziO4x&Ycgnwc%;tvd|4!DZLm4!Pn$DB&mTamIZI1C6-rz zEh_?*<)~^d+ByC4p36(7t@(S&v?Bh<7?zq2G`CETsexbQDll7u(V;>MYf)gC__;UQ zlx0+T%Z4;Y+__Gctq{rb5c3F12UvU4$1i<{9kn)2o=3#hZ+fZ<#I_Vb*Fr)=Q{Cw^@NOvyMY=Vk|F{HdJ3_I#Nr*V&C>*6m z78bhwRQE@r_e}f3;~{AEhH_z{aLm79G(;|f#50k0qEPU>?4Sn|XRf%o)7XD(pH`CI z`A)XK3f{-?j#u(TeHcG{WQA8?AY+}L2+yzc&2PR`yiyitW1_S)yF-W>&J#W$Zu2Te zCVH~wJOZ^upWSQ9v>Ko1>lTO-TMv@l(tDIbfVaZ#@36+;Z&J@Zi=O`&bl8!}Z$D%y zf^_vBg9rgDu0Z+MdkoIr0XU}j7F!MfXV#oS2gz@Jn%u;AKtg51Vs)VJ@^zq z9pL%*HhdBgq#EXT*OOUQAiyo3HWPH5S=rMp8scpb^ zQ2jZf>68Mxw)gkXOS1uC0+bw?J~K~&M$ZcCxD+uqc6N3O=n*77;b$uOlQox zN?;k069_IUpY3Ti^D1` zU``-+P>f&6J(%|oU|H8VpYY7H{c(>MqT*bdRlXK?C zCf~ai2YZ6xwG!(8<;xc+eHd`Px8v@T+ov9ts)qtw?9Ts(TG11$)i~^Ayic|i?NrEj zrwkb9fBIrro$}+y4>Hh3M%CaW`CzPRw`1Q*xJYod$A*IUeGHF=N`wz1j8;`p)x%>c zc6FQI$+!OBaw!9_Fp*1t{rUyZ`Usq`tVTwRo{(r4I^wOsz(N2KDdbm7 zEi8&#TJp>4VXV`0az5C7YF^{tS}FFKPKx2abQ2O4#w-a*1z|fCqm3k}dmSR0o;}viot)Sg^ z<*j4z+mC-*^c6maR}~i05MGg#i!oE;Zlj$THDx*dVEEsOuTBXnW!y*)5_>t#><1mx zK_kW&z4;ZW!!_3Zkhx%#(80N9fzVD;8j$%T6l^xauwsIoGe>)_f`+d>9cS#ROP01T<-={X@pcmgp zbX*uZ2=HRlSbV1H!=EUT*`1q3CX>n$vnQkF*ybeoOv1l}f@A4d|8s(i;st#g54Wo3 z2hsxDq0mD#B!=uT(>83Ove+_Zult)lmt*$K%531}H73*fdt78lg~J3lk*0v}sGECW zCt@VlA7K{grl}4+CqWl#%ydEUq!bhauro710}W%_Wnpqgy9{#@)4L((B6w1#GYq=c zhNcV&54aw}Wq>XhA0G=80lj>;4dAln6MudnH!o)QnTY%oy=!pB0V2zcm>yG9RQ!2k zRf;^I>tR4liXiqVRvg6(tfH{Em=@Mp8M2e4z!4ycfOmYVWVM_2!-X~b8C-^U5Z%HS zs{l=d20)G|84xFsXv zzow@50}PO!=7@S!1bRzDfoe(uk$5H2h6R*Bqb20zA*S0yxMPCk4$@zMbpc@=PQCxZ z$y<;N9rUHPl{X;JX^@Sgen4NJAIqkI_@Y{0@hqSye9Yw~MX)TP_Dlid3kLzZtKaG> zK>BOP>Ll_qwm3-h>VJ(ScN>=;R~G(HN^)!3|4~VAEQ)g-?nF#$)$CuV3V;j5%K?*;#27F!$%?j{MC$=<6&4QG z&*GVzB2l485R+UMELUh@{%9YWk}!t|4V#=b4t@#1fae%CBM;s7f}-cr9-Cjl4q1-T zo`)*8g`YuB0VyN1owr|M9?-F|dN&7uK*fKjhdXwiP*?9>@&v5@9H|gmmptO0DLOVX zQh-&O>alnOPZweF)lfJgQBm#5JONkScU1H1aim)6q^uO>12i+ zXy$h<)JK2bmq&R*Arm{_xM75`Sdx*(=^YSEvnEkwBIGRAgY;w*q3Ro^Q^AGQI|fm@ zpsxyHMnN|$B7y+~Te6UY$&VZ`Fzgr@7})e^4_*#8hQverp7@F@xlSe4J8*Jx1{#yB z92P7Dc?_k&)CWw$lt8&e0-pvJ3^dSA z@a)MoAwdOExERkGIFJ_Whd5P{4&Tem=`qz=0CiFNQqfAjy*Jh(QM})c+NE$PdHdF_ zj3^2I$R~k9uHoV;PY?$-hqco*inzTG1QSUAaAj!lkV*Li;R3z&MlcxsorZ!wxbPw( z{s-y}aytA-xRP-NXxqgntPg#E$3Z}t!n=1r(X~OnY-?sPf{6HU8WpkAj01)ls2Dj(#LJ5^B=;{SRStWV zEN0Tz5LABaxVoU>|4NEa+x}ll@qwbYVdtPwkg+uqDa4r`NXD+Z^7VPu+f%5wKwcE; zi8tbvA3u8K=X>OpV);0(`|WTp+Fqk*KJNqjtl)|=?gX=c5Cnn1o@FLXgk=O;P){&h z*7FNE`;39Xl3n!TXFOGa)sRSoSMM;fir8jxNy$oRMp0t>&vK!Y5?xrKPC!V&r&sUZ zJr|>LaV#uUZPtz>Tu$NJqc?i`LwhFUI13s)KJebYkW7(0|IT;X z4NTt11dc$z!X^S<*}uQ&`!|lgyF;Lv!|U#{_K!Sg;4A9IoYXE^(E|Cz_7y@LadI?? zH!{7S@a^4pURRlbeTLZyb9D+1f>@1_T*ckHzps2p{#0mrO}eNT#E$)FXHXp>nB9bG zQ1JQlfN?2N-4LHpee1LWa`I~HrYvuwo1o0>$G>FQ4ZyO43UEtM7;lg)=9=gzH#V36Gd#C!-pu5x}4 zO8CWd7Ar8OXP9PDw;F$V0F-+pj<^8_E%^C!!nkxg>Y=seSoKDM4#?ZwGM5>kzO4ca z!|iqY^bs=yvaZPj(oVrz_ka6#(3P_s;a21>4`8o?hWlY+@8G1uX7pZA zuOW?q(WvW4A|Ru90Ol-&@jD7aUGFt5Rybp17J4s)Qf8 zKKoxCE+Ck3ozAQlv#c zevISp&Tj*)!^#*fW>Zl;aiSkNAqFA2csUw9&BIQohAUs5GCDYnn|CVWtO>Gq zF-HFaStx{YiDZW^ep*Y5j`AOw1Y+6{Zp|&d0)hRvcFP#aBHRQW8w-wLqJmuU%RtuL zO4qlej(tNyF?F*uXk3Jfr}7QhfL6_jK$QYMg2J%~2ncX**ieWw!*Hriy!7*F7vNlk z=ID?YA-P}-viTs_H5?xezY^z+ftfP{o!~enJ`zCYHg1~3d>b(;;lEIQ6}D|-!qgDt z=n{xlxURFE>&_p8c4Gu`ZQ7h8hbuC)vI>DIK}#X0z#yJRVz=ED2v)L@3F!>0$b*v? zkDfQH5cUBvi8 z@!rV^OX9{?ua>($%gft~p#_UXCk!t`C4p9V5TiGED8+Aago8iWB*u?}p`92bCjwFY zq2h0ppglJx2x@~~=nK>aSIib#Q(*raa}W%WMIs>IzkeT!*lDZxZr85ugrROr<|E-& zp&gTSOy*Z4KKH>oK?Z~zNk&n`_ym*=p{km zp+;buSs3aBfWah>Pj9d3!fa0zr0f95jC3sAUZ}xG&n*KQkY-6t2JjRj4U%CjG7eO+ zijI!VI>CIDf>%ajawPhW>@`l7+r({*mfT&zwSK)302IMxUG_0&22}f}I>n?ie}HCgmt71N5u;-Z(rh$W@NSP#7M}8LwU%ws)qqIGz$M7-cdkJ2}F0W2CK7v^RL9hh!S15uw zA`6g_@T18FEA2PAR=o=KeqPi)iQ6$u^zr5Az; z9kEkD>s^Oa*SR3DgIqRY%V?1MFxv=+uIbV4d&msgEpsj2JDr{D4Kj~vR#kQNf{xBD zJQM=4a4ZsPH<1RD4PZ$n&C?pQd{~+j$Dq#UJI@f>;X?=>$O%=>U+94U3oMww zXFhdzkHS4@LQ)iDog}ZwG#ww`f^?^7R6S@83M_GaiF>Q78>!0$ofb=JoCEXOD58YC zqpBZEV`-;%qsEfWtB2?nVf6Q+U2TM#xq6vFrdp*lz`U?u^OfmwlI z%D@O{s7iK-$iem42(8B}@Kb=g!v1`<|L_Wd|5)*2&{}M(-u{b6hUqvA18k*QHOepG zQ~(Yu_=QF|;Ok>m)gho68VZOb3CeJ6M^>zs?@2_&NBz>XIEWA}pk!WaGJYB_zkg!( zKPaRhtaMrmPF3)MCJZWU*S$?)gEG>rAS*TU!tq#A8XBCqKvhUq7_Z}hcK`k+c?E-- zd$rMk?*|dh)*3K>5g2$ZJvl*b-#%}yNLcI0WX6ZU!sWn(7`Zlh#U-`3)gq*5$Pm-m z*i8rt2)}Mt{(=-`eU_atIoywekw6;gN}|;kt=h@4v0LhX8eqBNx(ps;yr}?ZbYk|0 zJYbCHW_=Zf`*6&BkX8&Y3JHd!t-~-+@ZEjzwR8>p#(1Wcpq@i_7y8uu&-Ua5(z5oYm!KX+JW}c zlOzLYj`(vJb@7Ftgit6+bb()!)GH1*Z9l9t_kWQ93tfKUehSbI43|u*!~4K-kE4AR zpFR;0!pRMTzK|YP;4BG%R8{O|h@eCfMgSTIf0pa>3+%G*qA%U)*mo{bFPj5UnY6A` zDvQ1_@ouT>^uu7wC=Ob_HuG(K;X4)Xy^|93ep-K$}pSw^N*G-PB_GPVRYB9$`D)-f;3=MQVI}tB1R+CS(;2D zLF+Sw=|ds~fLVmC!x<%K!5#*Og~?lbUxmW0?51e%;E*kT9pg9t_$A2I-7G5FZykvT zw{j#Yxe-3U5WM%7rx~!EvT}1BG^{D)06*L*luX3znW5?#LSoT7vYdlp&HBt`NQ9Ud z>#kY1apP(v3K5B+rNDS(#@9GeB2--X30(KBsqS|9bjjUnN5^2nSs6{D1Q)oU8d<2n)jPDUgpdi=72WM&(0(2n0=#4^I*xI`6 z)3G)X>K$gP5i9V6fl%=26C)`sSo78`)WBU1zYQ_K2;qZ#grs|t0V5{udjb)U#n1Uzhfd%7ALRSy#zkyqKRyh1>)Z6# zwi>)JJGJPy)Otqu6WyJYjKkAQ`vqQPv<{cujPVxgl6AsC8DyRwC!2^#NR$#Vbno6i zhp$kUXAK?ASBUz$xdG9Fe(py`{~L@=^kD+fcg_Ss@1cJsPBMe>!pbUK&_uw|;?vc- zw_aY7fSj6Bw*wYUdG&uw5yk+7fSIm~yb>*nsy?}%{jY{#rXwfn_2>wvvgZ;X)(-~4 zdPM8h;o_igh&<2nCW3;m+eULW%HBPDvX&8=7REGNetv$_J0j*GR!UCR1u^@Ta^_#6 z!42Uv0tIL~TCT}ofY!+6IySVyO~9UW4y|}VM~(_bSt6AYabm*%Q(f~gZc_+f2c3b8 zdVnGMWZKM*v+={fz}Yn3P#AGA6|#CF>rbFOBA7rbAk5;_`7eJzc7f~ILy9~&nCH@L zZ2cHH5)$|l{tH$#{jg~lx;DY=@+-N1XJRSK=P|8D~N*)ks%r+6`A zkOH?6ZLt7(GaU1S2J?1&>0giS*yiJua3uGj&=Qyq7`h)~GtM%+Nw^DUmmOQ)Aj<&% zJ?jYA8x1wL&K5fWhcHw_nE!W_8Ye!sTc7sq{U$^Z4mr9P@&}nI#c90cgiJibg4{;O z4cy#gONHVcm|Nbf)r2vQxSj#^>-SL^i zjh-+3Y34Ods2J=_eq;gz0;e5j-iT9#%!l52NNv4+eVb6O$$0;NWcvPwKX51mnbJUo z#}7Q#4tNvXyZ$1oE=e@4->y^eHXMx*wh$O$KUIb3dTE83LSsUi)w^)i5j0csQNVV_ zK;%Z#dmfcwD4;sSN|$@X24c?|^&Pye$So@LU)TIM`?^cCq5^Q2u37CU<;TC~|Lko0yn_*~4qYEsLB-pvk#$ ze5Ex7(?`F@O2R4ItAH&89xj~6aa9lrz;g=FB}qp?HmeN@Aw;XZF`-`177s8DLXMO4 zckQBfok}xEOUjUeOGbbRy5VW~c~%?F@YigWkRUqp@F>)^e`jTnT~981f_Hui(_V;g z6+lKUsl_DRrLK?nAnl$_Pfkw$z0}tPtpTeF%AOAjYl+52q>GJ-t(s+fDdc}^?#$zI z+}E~$H3*qPBq1fk3d@wKMN&~_QD!ME3uTBULX^3v49iTK3@IdpsDxx5N?}=~GRtbh zszklt^Uhv-t>@YMdH3^q|9SUkf1c;F-QD+n-PiT|{m%0|&f_>vRa+I>i}}Z1z#7S+g33LJ5JSEW@aK z4XPdyEQ4TA%_Gj#1lQv+&A*AC2REe}J80F!)F#$I0ivBjRZYX$FEfs|_pQCX8#q9MPvxsLry>a^(3=gmiHUG=bDY6iH$jg+1*j;Sh0H z#a|q2OkpUpWMDFg2P8wA++y4YN1ZyQE4d;GK-m8Mnw*6B!&}p&8VD)42&C^>-f@=Il7G-t1H4XgpBqjS^?*?*@_Ex;+t(_uQuhj*{`-d=x3~a!eitc8HY1dPU~!2?4Um95 zh_T`w`mY+y-Mg3Q>FRp;cx7ZfnKE&7#ot=V$0lkh5aa(hRN^830&ep=>pvK54P4hEt@7{|1zCA4~iURxyRIq%-B z=Se$b`O_e?5^1s&kzkj-U{}&9-AGJ-&f$?TAH-8A6)@(I@``brWa#iD)aV)2NNqCL zEt*_Z+TCJXPa)G~#=Vx}jzGzwC{%$`Chst#h;L5*eY8k>F0q8FO`}eoz;i7I!gU?l zptY}X$O(5;s6zDXSbz~_&tA{1g#pY~hp4c2+qOpV$S#$;mq8Pi=5CwN#@cJ73=VuS zZ0OL{Ft2v_l9I+=I7{@2H>dCcxwz84)#vt;9S)jv?n!ysw`l|gdjzW&9`PC~HNh(3 zDHq>hbBm3)07#G&WR?OTnfdHP7Q4%%Bj~`fRs}V}AGhTBlW*E8Ada1s7)xW?jQNBk zi{kZg=)1r1t-ogf@aoIQ`y=Fb2@{CmhB7$par*_*m%{c7!)&=nL$TZ4Fqw`AMbt=p z`y8z)YQiR$TX`$^2R->7j959+`n4OLJvrnyZaoxVVc0;-z=Mb8Rv>q&6)3oJ4^LM) z)pAq(rTvuu?7~JW3cG~B2a7dxCB;dX@3DNq9bv=lKXfc6;C!JeR zlca0i$aXJwKOq80n;u`I99j(G!H7~s3Js#0nnGz@$OCX@rJsdIgS)XG&CsGYNB;Qm z;W7;=9i*HeKQ0hG5YMqmzvbS+e;kVCAsYxH3m!~pE84A#*_6on&Yn9r|0b|U^lYs; zGYGDLO|M#@Mv02^&g`eq#iSl+o#n*gr8v_xN7ejl;E7tA>Q3sD{*I-2_3T=~a!cs1 z$#Xq^;zWbr79y1JDp*CE3|fo?T-O9TVT_@0+<1{?=oNbu2fp`bQ&UnDuSLfM5$37P z{faWuYwR7$qiTpMSV3x%nB8A6>R&M%Hk}{UHeD3mmErvwm@evhFC!xb!g73401{l55ec>-JF^|HgO5`v!VrWv}& zu^s^JMlq5+aXIWBQPvGN@TOfxMYurcIHOL2B~_9gq0@X8h2A~HShnKveDTOf4U=alqPHM5ar zjYCDWeDBuaaMu40vx~hRpZEAlKWO<*%{;#*JlOW8m&2U8JAV1anmfAK-$tZoFkCA~K1C9NsM}LFi&ya5M8O;l2xzl)7sk zX}4S3-7)@jc=q-438HCi&9Dxxove4#g^`<4B70QowNB0K*s%5Q5}V-@3x5pIoAkdSFm;fy@wM9X3X4|F$@I+vYc3k5RfBZ z#!ri9M+-zLJXW@dtDD+t07*^&$`QIKWf?JX&`n9k8p}nHER+`JJ~gI zNwj-fop$<>_$H(_;ae$(iJC>*8VrV6k`_ z%0LaeW5mo*QYOiWEcO@^Q&Um4iDepm_D@r;4ya*NhqT4x%KAn^z^4viPuJy~!fVTw z1d<$e?wcmpZO4EHlD5phE0lMDBDx(EIiwux_RNi8X9xndt?{xJ{dq%!T1vr4A=(_%$xCLskeeYb$;p@3|uK@MM!~{ z&x_NGnZcn{y65j83;9!Y5IsA81PKnEOuAz!yUX*YvKRkHp+Wo#ptj8^a_bQWN{T}p z>p7JN(AZlbSS0zP_u+PP9Pv}vCMG71yU-O(k9=MlKeCt)jtIH4XHIJn9F(b&W;}c; z_Db?|Fto1+s0bYOW=f^Kp1yuf-YTRW20{2jW-!3CakebIJ=Y z?1-#DshHqZq%L?3mq@|dW?`dzYZU3>tdcjso@@*9p$=yg4$puiPk<=I2ZZMEhGCnM z@IXiZ~}-)l`h&Q6SudRVkt{mK|0k+ zdfNEEQacqJn^RGVT}Z5ci9ueCrh^=MZft^J&+ktC21Bn|)-d`sFkJIEJbL1)2t(va z#(s)>>V7JA3r8Bii;j)3DM;lUf7rBEC%spf)R7Y=tjD!RIts|-sn_dUHLdU8AQ>=N ze6wVt7*)AW_OXwyU;NBf@%W<#v6mJQ&{5gRhq)GvC$_z)~;v`OcZFgYHs z97SLxqO!}E*y)Az64kl)V|3rb2}Aipb6ykU*n z4QdINv9$(4`lQnwciS391+DDWp=nHnY#>5Qxd8omDye1+)mBA;)|H7 zq-@6&CzqPwQw!ZHn7J4E4DF#_9@)N+iW!TyXOE#u_`cd(-=wK#E(}F@I(?RX)`gs} zq0s0RLrR8%OtwztF7oZ~cG=X-X3Uc;A}Ug;SCmlX_F&oS8Co2gdokWrqHY!2<~cN(oRrropcW_WfbC2879Re z!8URLeMS+XQke_wbU}dXOZK+_Y-Vm)jt~bhs25*4Kq(=7(033eW#+vu-_URq`-X*( zZi+Y}FF$y&eWHw?k#rEyjRim2`pTa_Z>&rS`Nw=%u>sQI#fc>bxdog*2x8z(rvG<)nCZR&ujGUdR7=Yr-|}-MUqq z#IZlHjlg4y@SbR~hlYmU3stMS;?J#%`jBj{hj=sR+Bnr+yJkDmRsAgD0sr>;<_l&d zYJv;P&2*xhEKIjXgkw^ehhrh?aY_4EAPgDf;NX3$*SEJDrW4U_lpH<4y9`DZCk*W!IDNIDi6?$=6H7iiZv zy1N=W^DFd0B@q&;B7}IFN^+8ko4%{-bDy{(9O2avw$mia-YzrC{@%stv$%M5$vLt) zxR}CMmg1PqMaCUC?V{+7fX|3JXcdMm9QrAz4I4MsBB2tf7+ZQut;(_GsvBF}I)=OZ9tnM;&2%k->BWqez)(fdcIdj(UC@He9Bj#Kp?h`@`*?Pd;O} z#8re#d*#mF{-95Mqd3bS6V)K}h@#A~Q)K8IKP8z(EZOg|TbM9v(XACXSc(LgF1`@4 z=u+lQqgv#NSd2wp1wIidVmU(Gr_*V`MAVluiAg$iXTGpA{rvna843f`QN2nKF21vp z*eT+sDWVo>i3(tdOa>r|R;D%GYo%qMKFy#HOR|Xg4_=K7Nyy6RLHAFTau9G_+}zd| z7J6_rPNlN@bUH+}bV4E~G)#uYFqKCSV}Br>GN!m)X>G5*B70R&bKnH-gv`}A^Mxjq z7Zmlm@etPNQQ-VjTM;xG<7)_T@m(oqCEcX5i|^`~cbZ<=0Zd!K!y#~mO>#2Y`~S<;YGVCxCex=;qpfOOQ+_M(F@NLu3*;uD)TIl zX{K#`!as$3#%=rN`oo#JmOhHxO}}@M!?6LQ5b_c>JX*BCln7tuaETQGqwGWu4*IYX z6*NTG)%px@M!!Uz#?dBQMjtJVsreYyXO61(u1~#CdR@pBgcHH}1SBK+EP_TugM0m} z4yb+&+WLZ@T?9_K2P^N<@vFsk}tMxQK6Xz0vJCJpPHOAtWl z3tYhCCr@e=&guGk{K=ovj;2*DD1}ZarP2_%?%@x9+aCw}fs*i3V-jP2)yyUZiRz_Q z07y6(PuzB_NblXhAAKfS^>(|StM5sQZ6b-~dB$4i9rxeU6dg z3Y%q$Nb&@f0YZ^&4xTo58Cl*c&2Q;K2-y%6$$O%}OQuv_oTsVY|Euk{EGK|h>9#dB zSIMGviZ^GJ4Q~lAS9+rc#(#N&MCq1PgTiaMe}5Gk6#6msn>4X2QAChlqwJDhhGqiT z5ru9x_>d*dPDK$(97icUQ+M=!3H?`nOTBEvH9OOcVEbO-q>rB#Z0>1Ae@g6(2P-Ol za#G6%-PK;de0dSen^|%Sb5yfiJf zYk6XT$-)B`3mQ(OgzBw*s755XA)v@=3zG)^wL$WhT<-u z6dq72#5fSbWHoq!h)Y+WSq^elr$IB-8xQ;NMqx%ms?n4TkCrupf{AVak~gt6zsL5B zFm#zh0kF|Nx{K<~WkUyTy-g5tp=GHOFhUWjZnrgUF?}NZncd&~Bj0Oj-_V14Q_RG; z#o<6UQ&(BSOE56`Gv^$vFkVAMlnT>Rn$Dg4QmtzFzWC7AUfm1ie{GQJ#;*1@@obto zuOxpm$*2yfr=p~eTc!EubFG<8qezB<0vL%njBu)@U=rv8B2uR*^%QsU@DzTu3~&Wr z#ssYzfd8B_&Ok}^v}y&Z9#Td>wo(h&G?bX)W$1NN=w9soo_5ZvC$ZlJFSg_OEiHOE z!UXmR@%V4$i3~}S_IzKx8k!O{NCqWM>ORh+>g^SS*b6Z0WatH0kqt?UIC6k$x8l>F z4%TVjs_JTO8%$PX@XTfQrAX|>%bP1MW$v@M38G**7UueG&)DTIRx&d(WJs)tgJq01 z;Na=g3xj)}1rwoQt0~5JfOzy5pcmgh>cj9n41QNqWr*n|LUu1mw=qHgapF0(X02MQ zVBm?5G@JwO{=>ySRVSQTTo1MB)20n_@Q19QVla!4Ba;{fCHVvy-6PE-c%|jbnVcb4WykZF?GI#jI#$6gc^4%}_XTB|d4B`ExWEk${+0-ij{dZTNsEo8FUbwgp+7A`UwjqB00$!)YHi4(eS%isXYMqt zom`%x_zQ@`9C@jC;c)K`wHkNu4GA$UNeB(~-GV*d%opi_!%RBOGI-#zLT6O<2Gi$q z(ZaWsKDq3@(o&I^t?FS)VhIoew@QWzkp4(d7IV!t{M^_jA>Z=bf-UJ$5i1AWeKvL= zq*sz~p=OY9M0acz6(N1+CvT>uRfUZqt>&qFVYW1sS>*I|fqX%>ca9tV`?@H+6S?`& zbY!p(kjCnXi)fV4#O8Uct9p*<>A1n)+%~AH*Uj&b@478dXIuSD$$zPyoOaRmk$1MQ zDh#JMx;LaX*+XciSZ%>U)e~b&+@aAmT6nFJ+k(7`+NOx^KWjagvPl}NJcMj+E!EvO zA}=R8CyEWNOGWv&#NqrQ?|J+ToRuc*&dx6CK`r{Orwlc+u-Laj4F3LG*6U}`s0y~= zO4Y^6kFbl^iw3mN0i4}*kX8`Ii*Ij2=x{_yJFWcuYu=2ZlNa zI(<&dvO0Aa9`!D-u&OMGT)oQD;z#2*YkSr8eQ`DS=iYU+O^Vg^Hlq7 z)5iTMb4QFQaJ}*^QQdNGL~d0kmo($QyO{j>FGM{Qa!2Mpg}IT}vFei33rR-sIbV6a zbH(1i+#TWjku)_m>8$!m>798!qN>~%3J3Xce_Ej8y;EA~ve(=*>Yu@f>=ztLd{BjR zkG#yPo$e)gYoyos6ioaxS+Wn-S^+#S5)cXpp>8B3cw1lj7Q_9>HmT=b=FF)J#a41J ztdnKZ4DGsAB=U#;{KCiw6EFHpY^M^Fhp0Fw`>F|2&IC+B<|6>2ROzJgaG&J~HZQxr zygF#oq4*WYQl{PSlWy|;kQ!wPBsa_h2xUbjOauIrD(pB&omR~N)x9>W?Z4YMx{VLh ztl~2#R-sP&a|SJ_rlW{nl)sd{MQ=AjMA)BN{3ChiRtwpM#8t?H(&oUp4?2UUYi-Ko znQxk?5?H&wTOwn8SY|QjOZkV?a-nlb>H#p$j1H zv)sbUn8Y9IZHUUMR4Q6**Vj?2=31-K6U%uI9nzx@oA{0GziiT$fQ*l?G!Aar{$oy3 zMS~q5Z`_Xie0|u>)*ISiZ{0sWckPy)>sK~C*kOtN)&82(k}mZ7GSBVDBIj-aoeLg* zxb?y8*@s=-3V!wq-T3UJ7)2wLUejNr0fLRq%dfjBvuDlP|DggN1oF0y9nUW-Kkc%G z*XnQXf6cu5aTPc%)Yu$y7lR7s}J4^5kK z9FPOr!0xe1Lp~|93x5Dnz^-z(tLrKVjjO6r+!z~Q-r`xerF>PUVdfpBV&1D5a|nZJ zLz>8oT@&04k|ix7Ac*2~@7}#`;-Of-zW&I#IKhE#Vg}OJ2|hhFiD03Tj<=qkW91ZpB8ZZe8X-9UOj}I}H+{C8Kp_ zJ<<_Bx#i1~hC@NGD@18-x=070QylY{GnGJxs!Ds?;5D$3To73_DwY-qVTdBxiqgIxlBsXnyEi~kMLLq{#Uuwpf8(%afMUC((%f3&%>XfT)NS~5UC1@1D1h}N{H(0S zubT@~dem*!EmyMQTb3gXRtQdv)*BfyTOL!?kRO|?J7EVW5>V3c=PX7ckO8Y~M12As zI)31kCWeMFAC~h|J?76BdWty7(am*X>B;&BQlr$Ii0A3@* z15a;YjYS^Y)Lr8d#vu@mG~7ysfBPatC55krJ23kBPnl755&cmQ*SVq>AbXM)|X6-)7)XMRhhJs?7Dn z*rWvS@t)oJDjLNi)g@&%JsJ-xeqgb%^qFYi=UHll%}!po{Fg3Ft33Zx_u&g?SHJty ziFaMCy8RgQi{G08t=+Zm;lZ2!A-AIZStWM7U*a$HVJZ^^!)3aUU!UYf4nvE|$mx9_ z&n^FYF1D3yhs>Qv^ev5S4m_v&9cjAl?x!o{vZ`rKL}au*L!QH75CsUK#PcULC~F|1 z#+iMV#my;myySCa@#h;k;IG)AUZhh%)&%<{=F~y&<|9TQb{Kis;ci@_?95KpYj=NaTgtDnGh{@c!1x=TQ>8MixwfP99 zuaM6dU94{C%t=rxLSqIq1^@Odx2GjN6mBrcZQtWaCaJpyjjg5%S+*Ru#J*~Z>|*?b z`y)-N{HTY+Kz*g~v3tzhfVoIqX^-^qZ5wz9D(S$WX8u5On5;i%mz{TTpED;&nnRLR zbD@ka(_u0fBKpDIp&&_jMjTPdX?fT(?l!$6i7B1cvz~d4>C6ZVh_j-Vg+PPeb66iB&-da<4uy&^$or3#LOMmZ9gX!So-E>7-6t`w? z{9;_*qqFEzWQ;TEE~>(J9X137Jy_aVc23vY z!_9M7E}FP_O^oALhqlID>y96*CYGYemM#gK^2w7)k(qA6da4cH{KQY20z(%Wu(kDRN!@Ai)|-CCJhFYH*qe&VAayZfSd@4BF_ z+={5yq)yymee-Woo;#zS45nB}Jjx|QxPph9q6jw}oki`&L2~6EM~u-xSUQKbC~ZVh zK$B@I+3azgMi2iP)swV)(Ha65V`{|#vneeKUzQNNVKnz2LVG|i4pu@p&HQC>B7WA@8aui zStV_T=YK7DkyTzd`)dK8c8VP2^{rhRN5^bHqAyO+;-l(d+6Xjj4ZH(=bJc5(1e**etXpD6apW_oj;5ond8 zW1>%ffn2OrQ?-mfw~bcwejU6>f;$I@xOQu=xpXhSMrvZ|3Ox(bq|7Ni0}5hf^mU11 zJr=Z3Ez-{8OVrRQcEMGmcJLbcezz4zv^-!6%On>d(%Qiowfp(i9{Rv28&FxC9*Mn- zD7d${MQ6pL-cDYOth+f=(W+kEpqIG))z54^zD_p$&f38MQ+%Lod9`e~0#Z8BDP#Um zjqK}T&%mXc{3Wd-6}m*}%~>h9E8>gVh+crP#9~TP$vVNm$j5!Oc~F~FQZs9L`Rqx! z;7KCqg-HsrVAekpIex0Na_9P#Ph3+3aYi3Jc}cwiaxd4CJvk6uqM?^}ntX2c4yPr9 z3c}=ruX%8nS>~7V1C(xw;%gNqJ~q*RKVJZvhm^;ig1%>Vq~C4=rgaw$s&? zvF_93b=1ilMX3PdC9HT5;o#l$(Z!^XSjUXeOSA%6?+e_7bA92<%TAKq5(8z}vG#a@ zS&{Z$#*GzfUh;*CPccnr*J3@{Pj>j-C>u)ZyS*O-XbOHg;pmv@@y*x~l6uNC1BeX{ z`u-{UO>iRIrd|E1gauIl!h=ERT*6*%#@U zHTfg-E^*#8bIY*XokoV&F3anAoemRE*Y^3EH$|LqVd#lvDiC-~T95jh-=58WI%av1 zyFqQe)qZbAM4nb#z#K{nF<>djx5cukVet%{0$Ak?YH>DwnpF+a@*4Vl>4Ff3Q*U`$ zYvhXJw=6tI@c;m8MMdt)Y8hmKcb&ZK@o}pgRo{jJ=bXPfd=TYAO_kl zSa2#sA=Wj9f-CjAis>)INk|!JJ&yiIRvy(?vl;a-{6L=H@k?R^2CS-p+@?+334CQt zLixI4B|A_^jhuIC3X1+28IhYazNiT-2^i<`rF7omgLns#?~MwiOAiAX26^+tuiln7 z`CMK;ic`Rt*nic={U8t)1LCtZ1%9ecg9Mo(jOAS5QYOOP9=} z@+2!3>L@}NQ`WqOk*06kyXuGq9u}2cs7$nK=om6!;6VP^YDvtP=rm|bt=h;j7!^O= zqGk#pHu#PJoh*DsT4?w$-CNoO&5MGYt_A4Afg;jXptkW*@EY!wJ>lv(&Dt%?f)LEC zXY=ka3ql-6Cna<>-KK52?OpHFOg8j;bH)Dm-o8FBkhapQ4JA+0npxr7_Vz*C3pvgY zbcvfFR!B0nToFeeRyywV!=hv+aA>BR@Olov0>H=LbF;%Qn>Hx`$!2q&jEgFJUP4jf zqpAv*wLY(wp%*xC719wZ7!9xaIkuRNkn&Q+c4o}0wA8;fj3uk#SWpeRxd$jjQ(gv7 z3B{}8LrA3%R%CDn%S~~n?B_`frX|>{Ri=?o}sigJgE-I%Eo(6oEh zXYwr))S*WW)m4k)`BV+b1%I6SNRbnLSCbyT@&|1WKIo)w6#jYc<@5cnN7dS(@0@Ky zh}OUJ_rF?ON6yjB{yDvUZA@-fD>z-eVajjLXIqk~9h#>lSNLy1j>{V?iE{b3H>zbR zbGgpC4)D96o*1vdSN_%E^@8)#%~-@leqdtm4DZty$;$H@xKXw{I$JW*N2h zeJw-9>5C;7d)4Fras~e%yu8y}<18I>YA*5Th5`qjt8GAY`FYnuhn`czq_Qnrw=?{3 zIgjbd3ulOyMnLmG`5R~cCLky%71gAeS&F?L@~9cK)FGR=y`Le^Qp?rly^=Z#<|XAk zk#?|aRb6fkZ{R=(i96t?|HleaPDP2EFnd$L<1N)syvxuwS{gj<%Cr;j%%&B;Q$o+4 zvRnB3hxhGW^Q&7{Bzf8Yq0!Q3N|UMml;V;r8%`K16?3*eFGy|p~ z{-kZ>(=Vf^woCX#AI9k>d?~&u01V`?14Z>LN);@)KRLPzadwHU>RsL&V;818D@rRguTl|5!N)oIP+BcB)W`t@r>H3!n~ zMmm`C-{4qK2Yu72N2zk6PhPn8Ipu5N5BVF~o;BQ;y}Yz}$(4ql%lbXS*XoCr_PgE> zXi0FQ0JmnaoY}!lH;g?Zf3{Plib-95)wfi5K*FUHRuM;{nC;7n06+CF53d#$5ZFVu|~{I3zg&0p3%eAtr;C#7FSQN0E4AEiCA z515KBNEB~E%;cbS=6{NWM)D6=+isE5zYY@;BH1d2!htYP_c9DeN zMUwP9&<1wKtr=yfkk&7vZI+SI-k;wd65Ba(uR%A5ype(2pYnkRM1?0^9AIwrw=Kls z6P%)b2KkR3+{bQjuSR7KS~;|ZA_FvCuX-5)-PBM(5r5^fbMcc22ODC*T7RtZN8si1 z^wDOwr<;tdQ#dg^QwIh#F+z(Hur=}z$3J?y08Zp`FU^LxY(**&OG)zc0a4Mn-a7aI z0oK0bp{_@$f=3dTUhnb*aoFQk_HxG+5;`034I$FjF%0^+k|Wt?Wg#=ta^F8XBGau4XN1hxBM%%n zc<|i~by}uX-gk==uvYAEp|TqMF#Kpav@dZ9Az!-19dpCzQ|QqmF+&RkgWefmNa(3u z7Mewn*7y#LxF{phbaZ^V=&4u^G+{_H@`AwTBML5kWij~{?YkGohVbshUN2B zs(ArJcTHWrb*(3}Z^Zou7$cPdXMU`R+{BWs%X(A1jWWfl9*wS7L1Ib3&aL~#e*N6G zR$7RtBiQ0_dagXOcY6w7Nj2Qc#3bYV&*$9SAXAE|FX5hPU6SMQ95twk@CE9=uusH2 zw>ZU@@nfRKRLP3PyO`tt&BK_GF3;Zvv#HR*&cg~{<_qyAI0z?(H;fz;Ef}}N1`y<) zPUAFqK5xL*#f)B&JI(T32U?8QM+OFw`dk4N!1XI&@AiGwrz`#n<|EG`kR@||VCgjH zeXqFh`9nAf!kUD1;EbUF9TKTa_|XEKEKHfuZRsDgW;Q-fs}Ff^A53Ourgw{AbkYi# zVQa=8hiLKOe6+8c8^ZDXAddu-Sb?OXa<0|z@UJvE^t zKHN&5U1c9iO4bcNAZs@7_khJ{O1LP@a+5B_Ut;o){9I_~Bi(zyEsIq%vj_G9$4xJn z;-hch<{;~G<@JBbJO=J5ZT|BGNvlx3knt-@6^hzp{<+6UK_})U*g}Yd0=@f5*(UDf zl`XA@Fy^-#wQn|{q#vQ5{U6x;cIduOJw;tAGfJh&z@-65wh&80%pU{v9xGrHqOTOE zQ|u{hDGGU^@cG7mR#l~;usAD9d@A*T*!izgm_DRaX0%Vc+pIQ8LRl^AMfJ<~mEufV z1o0E8NAu>y}G*HDD{of`Dh zf`dy8nh?zqw0%!cp$(_24f5m^vYi3sBC!$rTlZe5iQ2EeOnCiU<;)YxVTSugi&Uf)iVq24P2w$}b<>@)jHH zJU76|rtgKszGSWS-)0XWQ6<;?XO#Sxsqnve**=h9odb1$9g}gJc6W1lxzmWA($mm9 zpZedRf|$Gn03JdpTk_L6C*!E|#OBQ}=t{1&3X3e%u2Ex?n-KH>BBd7tfygoF>v(!$ zkh=r#JmPfwc)B7A&G6e<=fN7#)9<%+zV5hZ*hA@AVfsLAbCH};6q*=h!X;5tXs8dE zGw0a#z7IC~MWs$xYq&WGn-B4bpFZ@!s6my%xImL^i2q4ukUqmuJdgH`=uM4^&Iosn zRC>l6ImN;uY%$Wx`Kb4WNxc4Ny5g*J>TEw;?uU zfUQlZAf9)9U0sKl#WXR-dJUg7&DE-g6wm0cOw!=exVXEwmuiU{&^B(==oM-fn*&Tec@*qYnNM#9q$B?4$68oWCA_AX*i=xN=n+j{#xPAK}6Dk%| z0ND<@d3x5b#?l@kK0?Wk#mvP+}87C?*BRG5PlXX=hqfvMsL zv9X5HzAt-2?o_t~V}fuZJY6}FI-!TNp*tx&o&D2n9QqaNbgqAWp^WRK7KPa&(lfrb zeRB<8sLe740CI~=9h8|%AgnSKK|w&iPUNXbrbG<_{tvB8O_9bvQ;B2(GgEIF@qNZ> zI}uZ~qQsDOcS`Ayibakh!;NIFgm|w*(vex=uxh(tD*5Yg9aakrL-{P0F*6h3y|XXG zCYy>~#vJp#*3p^j%J99t)Ra=t1zE1L4wT~zK0yHM!nmYs*RCZ?^kzO%aKR*eH4w!I z0tp#&#mKPeMk|3zu?kI~lGf!4B1Z1gK zMD|H|M$djTYpxC1$inJ_tCkmBmG4P47dQ>s-eI#F0Mq{QNl~fdTSFZnci&SCr@yNx z+9=FO7&eW#4#%HnW+lXrOI#9n=aJ!bWsT0%Pwz5isi&78nqEz>;{+4B)%#Bdf!EYh zjJtI^nO@K_M6JWKh3IOz7j2**O`0~wXf<7*?Mh2hgw;BYra!#_QN^FUX-s?zC6~i= zdzQOdQ#DJ>5JGR{{XXuCZkG=4_RXbPnDH^k&w{jOL;qGHY6ggV!?^#hJ?J7q18C!-MV6_qGJ{eawz<{mLm%M95kUM1qXpH-Vh`s4YsDcxA8bH6`gAr07vTYj zH5o^Mx*`U}6eU7XmWdfZRj^8;5*`U7A5xR|tHrDN@U0cU!8elAqg}hm6BZmY$@HfG4aFWf15RP=HU-cVI%G8{N#wkco(cF zM8hD~j?~(!$F8xH9v`+qUlMHV!>w;d3PBkrENf zNr}|z)!R7OT_70pWWd<4^p^y!2fb&r8cVZR)hkRjn43*|gFN-f@F2FSaP1m5_Q}5+ zjHU_DT}`R{THqtvSx)wxPCjL|6bzsaSS?q~f5Viy78#F8n>I4^%g1ztC$WZX7h}k@ z!8`ilx(am#>D54R(>o`~f5k10q$&M$efzHhzS!B)Y2rc`IPrXsrI~|f3oZxpD@<=v z(aS{pE;2jI+*4Z-1mX*@Sqd!=kDN=@paQ?Sl_AV?Fgs*n1LL0cL-8dB%^+k0p!wlH z-=-sb#~K3%ftSU40IpyuKo(HN>ghdpa5d5ZX1}kWn^dOt%{56;SnZ+|AZY&#BPi!k z$Z{(NOWk?R{8bFqJ5KhRubTD*IbKK*l61g`f zE|FS_bIErF_bdK5b3rJ$z;4Y6hONKuC3g|!B@ll0(KA_>P;rR~96AwoND9`ICO!H2 zwMn>MKlQZwCFC%IaZULcz*d~#jim>_n+Ljq%(~Wr zQQzT{WGp3GN5;oAV&CYRZi`C)E%{3tFm5#kcGLsyn>->D9ErXsoiL#Q#`^CJNx)i? z{%!fIiZ@u^m_lleWB+Jm{X^)W9{!$U5D()qh84XlO_D;x*WM|Keh)UT+DoILq8KJVW8e|n3wxqIUmXVTC$ci!| zWxJn8*Y|tf_x=0t{{OxnU5`&0=Xsp(#izE`gE%AS{Y?a3@B+^QpuErs=EBAkX z@wRGR;-a4IZtXIV{^zciHZ`#AlH*W;(_b`Q74 zm>s6KV>7HKDx8%oReG1!?>gm%Yk93G{*{c9Nu6<4PW}5=L3&{6_maPIs(2)WnyLw- zCjNZf{j--YFkL7Df0oNnQ$+v!Ywqo|ZK}kV(zE45Wl4Mg{dJP1mz4SM2XpQJ{~!GS zusqA;Es5gPk@c~9(lRn3j~_cvQT}GKs_@+{%Dg2}-1_;H@p?ReYBD{SvQSCN$cRI} zr6?sOrBDf{_iaw4%_mQu92pt$T`12H-?Ma^OTo(^XF*P0zNVwYBQqeqV>$zKwl9;n>ti4TaZsj12LU7BMRxA1t_ z<0gBm!gp_xl8DF_Uu7;_jk2<`%&7o#14F}VyMn=y5&p|7K@`+gb_Mw3KK}ke9{ZOU zn*2Me8yjix(*bJ>7H7kboITV1yuK-J&l!cA#|4&@^*G&R*+N4@StM*o=f1vPI_5Yw zI%>N#H(Jt5S|5&585|rWDJdz@P?9GlC)?KD*e>Vu$L(1?KhKWv!SV4hmk*B)sjFYt zO}y{($nc)v{N22dgT@*?ZCfJiuNeykU%yWA{{F!ueQAClzJ2>fT-J|I`$*X(17&%K zM49=xxk=Zlwp}YIknH;S(e~`wTOXfTWITMRnkLnpdaqf9mya+0^{&8(5w3vsr9BlJ z>pt1p*;Vh}ZIhLiwJGYkvbrd#rmkM#+{Vc-AW&Ubr!jHJZ=0>H?dOs+iKeVUq^jXx zvnw-|_FrFnn@*&qrRnA5$#->iF>K!+R9w8Dg@r}u?3T@&uid?Sw}$K6MxlP97>|q$ z)1ya^Xf|(FtMp%URmgNb_R4Et=r*=c9`(>JFQJliZ7E9a?d^e%h%LLs24{zA zzxTZmCsozeQ7$bl8JnB0<^Edx)xf2rqf?cs8#i)J#eI;(EM`T+$Cr`EYH{iJ^cOb! zhTEHXD8)~`u1Znzk#HYWU0vw5cK7mnJ~dmR>-_$HP)Z7?yyti0zBHkexQe&8*uoA! zkgx9RW4n6wYT1Qhs@2t1_g~*Mt;#)h$C{132mJnW+?6>a7*}z+gO}K{@x$5VBz(Nu z#uz?ZJG*OuW15XGv7l)V1~X zn;N3H^0FEO#ZHzF{+a5N^BAFVYE7!nth~Cgx%>Ia+V(W{Gcpxz0?d&nW@eN!G9|2g zXI>2WtH1J^8XOu5(u`#GyO=7OYlaJ_BtMkpO{_THR=O>0>u&NhXU+r%2b;Yjv%IIL zrgQJ7ed@wU{&p7rT33S$GJ%un7{Z234eGZTdicxYQ2D%E$q)Ga(Xm~wS-IO=nO zeS<}mp~=|TnEU0+&MB|T%WImOH}}0bztJsAqU8}15+bFxS?Dcc>wNFdvyR`ExX_=us#ZY54cV2UBly za`L3<)?-CZX@{;;8CzI1V5xqsulXdGuP^@)_x$l`+vMbAdskO*WaJi3WnZEAh7TWh zva+&X`SWx4^4}SHLqo%|moLdk&riMz9jXn>xG?;7@YfXyS=qO($zf7N57R7U0e4DBc_1B0!cD*mmRSy@>_omu)U zN>_^w*3;8@MNgD&EiEl=@9ex*UM^pL=||{|8#Fn-{{H?rgk@KLrJF5d(^idoY-eLT z;ppb->bf}nYKDQGT{M0j$4^~b`*F=qt4m*JDmT{IK0G$k#+iTk?Aa|Y|3xxQP0hx} z#wy&Rw2TZ24i1jidk5%)8FHjNE?$&GrD0)XyWTLIh7Iz$*d@+Dxk(`O;Y03Xx1Jq} zob;$@yHNZjuguycFXALW%*qPEi>5W(-$_l4+PUYj%}8Smi=-Xp-2D87$*vq>Yl|3c z8~erCq4tj-LsYp{A0K`&a>`U#NT_OVw8i$^IXWy13oGllS2Gp0Mb2$Wia6D3sH7%& zr<7)wd_VX0He1Qt7g#pFd#7n*!#%$wrXcof;YT-%iofFTg@x+jU+smC`t0(aI)W({ zd8cd}qj_*o!*Dd6-rote1_n8W%6!I$#2n)ArGK5rIZ)$2f-Tsh_ zWK$o()LO95N8nIl;$l=crFD54s!$Hz1M`Psv_v&fY>4{`cVy}3GsgDG{%(ZOL5b;kX| zg`7FdqgovUhnk#yK0Go+2ieuy+RDv9)Bf0q>$UIFj$nqT$4fTfVwE9JZEF~;z8;g3 zB4++J=g8y1Z{NuI`1oe8DDFSo%lPEzvuis1xl)J45=A~P#<9qI3X!;#mDh{@Z2TPv zpymm7X}fcIebu|z66JRKU7#`t#?9(ScMWA|iy9#84h6N#1{_Ib6E3w;wC_+*^bzKC7XxAIZqb$Ut(y zd2B8HJ2S9TU0q%3%qP)8wDCgQTB=Um^7PzXz3DHn8b(Bfg?aI70)m2q{30U2!Hqpd z&a|XL*N?fv0|$2Q+zC{n_C8g`h!l-FCe_zl?4s_!I7B_B^vZ8Vw&+>;%w?Qloq-C! zCKSlVBuRT3lGORHik&5AETpdfK3#~uCS_9v)<8)`C6%`;vYuGe;Cj25@SS^NA1GXo z1)$nNs_X8yaJ42e=L_JEPRCM@`=A zrw%DvWn(q2Y5qrEx9YwX=U?b5S-T5f00>Y~Tdl6Gw0ZZaWxhyC8sXUAi(48&#ZxP19C zdEm|6(pthZC|$IX^}GWC8%jNId=4Bdu-lYlU8yW7BO{Qi>}NN3P)?44f`WorFsZY> z`wPzxDCgK8G1*E>OFM$47@6vO!7nd=cPoedo$Q>P7;Jh8Z>A->v5}EztQL02_x7~b zf?Kz42?R6jU}Muqwal@|<2(i|^>23`IM3#|j{YP_YD8Ud zEY*Fl1SG^KEG+DRE!A3zhDRO<5F+71&#B0bj$GH?9*u$!LEsBCKSAO|-8*nzYH7re zns#a9rOu9d;EV9vx9KDg9N5El;2b|S9XmH5m0&_bf@z*ez&XL-=$kjU)(pAdc<_KD zPH;!K4p!}kDmQmUMTG-4Y+XY`7!Fu;^>u3fDeA-)0j$>^yyhOCztaL2$69&5ytA5d@{>u}4I#IF61Aq3vJTi9W04nSE@86?0ZDEdnRWj;A1*`%r(zc52I@xx0>@WKHHaoWsAl3h#hT7+!>^*irpo2@hO^x zfti_^j+T~IRzrazH8u6O!NDp;i{F=x=X%b4c(kpcu#opesT+@ef+&xbSfb}-SGef$ z;u{2n`uVxwx(-`757P zS68PlEPr}-VyC^mJuh=)R;QtX!Ht(M4|aU|B)ElHl$&^?XU9u;-DL0GR|%+S_7G7V zcW`prL2^LLl@ee6J+ae!V_j(;jn2MwVe(^SvFpbO6dnl|yMiR*lJ=kfdS5rrm-fN^ z`$5N~T!$~>6XU_?}MC>qn+a+;rU0vF} zqM~<%e*gq&SX-ywN-1{R{iT)Eg>M>j6Cd_;=G&@wqMw;sSy{Qb1+Q-KTcH+g z-MTeNYZxGnhU78Y%=B_w)nvuM`}glnXI{K{qlBxe>+3t=n#v~Q%72Mm&U=oZO!a8q zseMd;^KENsKxJL?$+@vqioT1+!spf0$jMZR{u!@)x}Dq3)m3Q`z2B4zwm+I;u5;so3v*PJF z$vXk3&zy-v^V1tO|E;iy6(VD&MIURWA(sLe-<+ZpvvvnUOP$Sd4Q*VU- zy5M>#NhTH+dw@dtfC_kkWa67FD#$ETEJXz=sJ3zg&$M37mLIqE_FrFeCJ(HuuWxdT zyYEB8;{foaVQ6^kwfB4|18*gnoI@i+&8SBc0Tzo7od3!IJf8D5E`3`S;Ai9g?h!Io zL4zdKuayBtU*Gs>Pxrr6U;ZLMq_lLLp0uPS56MA!JhsiKM@3nHKlMn{imi*w-h~IBEBs#d z|GnIrBxyQ+K}|W2Ec^SvW7+;N$SmXHP)!IWsi2@BW?6|{#+7l%u215FQ(&jq9BSZAR+)&sGqqv#gYI(qMMZHFKd z-HXJ|d3bUBJ$LXRmQZS`WeY8BP(lJT(8xZfuxHQq0Vj^UHHqoS&>lwdE<@v=5AaS5 zAPdBa?q038lkEKP!Nk<`8aNl9y!>(Dypj?bko2I`fc4!tLtq0G~xQe zQpL=_{_H6_L=%&I>jz{L5fPD`{phZrdwRezbzHd!MhCKHeo$+4xFJgJ@-(~FE!O(X zm*wTx>g|Sr9pWpFrsU^Kc>esn9T?cL6Q4}NrIVk3&z@m?I#M8hE5m+jjgR3#5F4KnPX6MUtPSgm0gYj1gfnJyGBY{I@tpnlFjMUH^7P|^LEitGSN-v zHKJ@~rRe_=7Zu&AI#DN7V0m*ppO#L&y^T#Ea6pUi_!UJ$L9lqna+^!#_Ux=XiY({) z`ntq{17S~|h&%_Z7|D5m**FCNZWus-;6JPf&WV;7eszA2QZ(s}YnuHvtgZ9DiHSf4 z*REghz!ti%aG4HGFcf#K7ZB{3z7YU(4Q@nuU?6Es;rGGup+X=j8d7^lN6!9%SOI-w z+&Hd)Rg-D27@gqx@iu$ZcL`}}`_66Kwn+lPt>Rvpb3UFcE-Aq+yCNzk_I&>gdS(Y; z$M=beG{2QUps2j~RAC_5lHO`+YBnw|v|F}p$+7=fN&=FSqmf`S4m@YSnVT{CHD^FYR#{*RkEwVP&+V}YU*6Zgn? z>YEUXT*#JPvyj`QX1p)y_~!|iuby#m;9p3g4N<$vp}_oi`iuJU<9F3A-BB6OKJDsC z9@xKd7yCI3$L+P>%28L?mGvqRQN?9hflLckkZ4mZfgH52q^clXl8mO;k_8Vda*VKH@5Bc+8Rdm_-(_RepZH zs=K=c7TDIt#&~F*Ocl&m_WakIIN$mY9^T#W!W(xPg-yok9EBlsW6S%Z?JFiAx^G|dlw*^EEQw%j z&_oB(UnL|Z59#R8L_|hPI<>OATxyYru!MgRTPf$4Sge-g5|yF(sn=Jk?gWnAC>4Wg72{9ok&Se-u?FG?(4Wp`|4|(r1}Ob1EOBPR(zQ% z0}A3Kdl_7TDS}#P&A#K~M<$V@PpbfxhQ5El0hQ9`_xBDcK&IXR)LXf@xEPt3hOkpC zLF(q_<}wA3Aps`|?Xm z6OvhYd1kEKAjb@D*vGZBJ%L&iFK>|mv;;gRv44Nvb!vJC;XA9R<2f|81TKj{Tsg>e zm_*r}~+GdDJmMkc@jiqjC6`gOfm{1}~u-qE{uU1SpG&iavb){5Isi z+7BO!YAP$5XYQq>q?pZ^9Aol3Btnow)N*Q&vqgwlBKLLI`p#EYLwi#X-CEuM@?zTK z$JBIT*FnpgwklHT1-S9=`B7F@=338h>+Bo`a$VEiy-QbDm+8;pPa`Y)#NXs{MK+u5 z&o~Q7D(T=w!E4v9RdUcFLKR8$-p%lS@ecBDDAuFg)f=u`UZHtnS+J7pvzP1MyX zyu7^RE=@8@zj{?ZP%Uo$@u10eXf-GdS~FkYq~+#DR8^@0RbkQ7YPtyp9m+bs`xa*= ztJM6jq7`J41=5}M_U09eH&1aKhr6u<>LHg@p3X^+SfAail4M=@o)Ci|T?Y(eQHVA` zR0c31J-_#0sdWYpgrYFnokx$Yh@DA65VgQgdr%G$x1K`Hb8E3$G*DC^q;7{SFYWw( z5X(;J$5;RS3`f1cUOOaY^$GB*{fULRrKP0}wm$kYRbq=K`Yt$+@I2e!tN|uX2NEIj~r3=^xQwzmfFVNV0cefQeT-Y zOJ5}ZJbg%5m^}&>w4iF>497`Pm4Z(L1J+|&WHB)@1YE%x+jHQYCe$F)M=|W%xAT;k znJ*2elas{6#1@yvQ!l{VKKYZz^8N?*AJMZmQ`lW`yeTz;?x`9PU zMNiO$1d!NtqC+}(;%UArvGy!y}v2^AZtOn2e<;>t?PNc`X3=OIJxmyv-plqJl=b5B)ltp0{9FnbZaYC<=!=hYl4P%B?3D9w+RRckn}d1E%3iZ z1T^mle;a;x6T%*Z8D1C$HHvUbh{{AL$Iu*3j?s6DE76~@v?K|wah5<3>U@pc z8G>_xYPaWu`d0t|8p>;%D-WQOyD;ir&4LP=DfTA5@;G-#&XGOfr?iA5h}PEM-|xOS zYYR!MpvE!)@{!w@awgE&YS0@vYwg7@N1)84qcocTsVM4k`#t$_3ywxj!oCyvUQ@?K zPrYWr<^_f}Eqki6zDN-g5+difTx@p!@TE{enX|pcS}GZEXbx?xubs(!I=M>c9}e?q z7-)`W+cr(~_T=C20dss|@x-&1AE_{8U}6eMQx8iAc#*sMJG);cy7`65$ii{zO`C%1 z?aV*ZQmk(1S@jIZCMD54^nCjQ>b${8m|{!`)T>2XTR~2sTtaQVLSO^IBTJP!3%`F4 zqCemTo0NJQPkhZ12J&Ut9D=s+tpC^Uh&y{-z9igAWVi;E0XOsMctT7JPrR~S9nB35 zUI-m}V=}l&QcF)du^4WBrQ0A8Jt@AhPy7!y1z?c6;0#v7eQo*D&w)xVfI31om&9Ff zD=+D)3*R{euMma8B1wDq%wm6SEdy}Kc89_sEt{v)7ZX^|Cssvx2IL5KL-^RRjWI_X65x*8R{0UKFDbg z!ARvizcZMcnr4>z>55-I=I!n6(MC&3`c)9W;Nj!r!zU@Zeb3?h(F%uW^xJB2zXm;C zZXVD)-4rVzBqsJ2pD(lXAUhp&i|fFmx(j8vbIhpB-+N0$E*?DIc^dmoIL@e$()1W) zqF8S({(#>{ONwJ6A~rjDngO4adnLVS}3Za%P7pd?BNkRu^ot83mC23YfAWHc^s+A7KFpMx0v(fF?2AWKR>IX|}L=CF#=}C6smXnhkBpfhEr3^cF zURU;8Ry8+gGD=Y-FlXuc0Wle&Q`;SPop|0DNJasVBLYXV23{vF`17mF;!mGG&3wxZ zna+NqGi&zu@4BiW3bH%TFMq^`Hb|Dr>WyPGFfd>&XVWvR9AMfl=gxig_xB*QQNDu* zcjK_i1*|K=q1}WkKIV`|vuoF`t|u1sJUl#v2N^Of-?AAlG}LQ{x6wLVS@+YRW)htX zyy>wqJ2bE=K)2gqgSOZ-07!%%1%02OfRA=Lp$!o?Kmx*gfBB^?U&=43!Z;cP}k0 zKQ-(#xRcum$OCV2|9m$(pJ4&Sa#5=?2H03EQVuleA5hGL3Gc2mQ#VQ4`Ebdk=nU*Z znC>JVN}Shfkh@R9`FAzATRvnt;lJ9QTP!OIs7OyN@|-@k z-IH?M48=4b!Z#x`vz`pg3Qjk0w0%u5Z7Y`#WI?-c2{c=`*1_tKggiy~M?F0~IKHG{ zwKO#)OW;w0o?UFZ3SkG0Kee6eW#YjeEFg@s9T;7=k_K-^2b zWCtkTqszJ-h1eeyu(KiIf5UcHJ$#ssj$IC+jyHNf{w;SEE{omfP$0Zx`_;u?`6VTq zSFg&05#9n6Jlb={>?mLsLKE#7+Vn5`X8K?9Nw<=O9@rc&bv9J%m>Eo*?0+fk>FG(> z_&8bDA!Gn0nwOE?fAC+QYQxIK;^9sCKx$hk#d&u%C6@+@7|2(4g@VEdKfxQqI~%Q8~%1@ z&gd`tDtB~s@e?{UXrq0?zHFTyM;qbm9!Ew1qX-*K5{jK7XA~i-uL2>~)z)55N=lND zy!^dAXuoThj_afX&qiuW07!w}kA1Hg)is5$wpEO*J^xyKv+N?+>XjZ+sF-(Z^pEyhaoW)gEaZ*UDz$%B@>@vKr38y3;su z;vV6z;3n%m>)R8Pn5fMxe#!u}T@Cg}cu)}8ZdtdaE>jlpMSkKfaJ|GT2LuGXyrAg6 zngr+h0o+w`RpCc3&JV=G^4tMY{!t3pK@140emu;pEmDwfc^PPQAgMON9!e63VLVAS z2GNRe4>gV)i9>^700oFeA|Y;L@z-!BL=PO_O z+c!H81aqVy%?cide}QwX52Hb-Bv@<7(T0Dt#plj|ist)TCb+5b$JLLdr(bNtv8M^E zjX6dY%Al5>kr58@tfRf1n>;8Gq~-dtw!)|Ah&!O>K%{zuCSUcvNki*AUs@zE_AOba&~ld@Sq7ffcEo4(S@Ih ze$WIvnYhQ$W$PLnZxC`Qzo4LA{4*W6J08owT{@9BAvP{t`6ke>WP!|Pe=lK$?tsmJ5S%b6D{W+7i~5b9&aPN{k8bUF_J#<~*wKb&af%Gc!aKBk9KD4j zPkfy*g~T+`m9ukbeu;QvL@%Ns&>c>@crSB3f^#lmAh(f_ZJwVvkudT`5pt!=mvWhl z)8dt>OgZ20*YakDJ~uS(I&d29iQJW0?#|B6+%Fe3{gUAu+2a#l*;s$mwbg2}(hwOm z1aA1WZbmEI30*!fw0G|z;FT>HZ3qIJZw+NMpL77>I1P}5@zU|fhN zAehz59momgcC>hPy%11R;s8m!^D2dVVMdiLuq)s_ZVa*}HwpO{MP>7?>P(^BG2e}~ zzdA-ni9XlKCMypa8PNl9Cnk5PuK5W0|DcIjPQ3E_`*z^|hmRkJaVU71mA&5fFJEcy zxs{vvi-{SBx;WX9|Aa0ji^+6|uQ8f8v{U`}+k1k1$sn8p@TeX+Qq=5o$dvezg|pnj z+zJY8khP@#);$+FO)Gb#iO_#9v2fOBX)del?71I1dP%F~&=*L0Dyp$;asS?H z;cQEDGm(dzS2(q+gKBVBwACM`%_0Bu{u@%4ycdstc|B|RpQ{P$_`k1)E%4v9nq1n- zL6Pkc%mAX6k(rrUs-7k^$(E6xPHt}gQC^8IJUzSj5L=+0nWg0o6b$6PkZNPp^vi(M z3~;xZGs>bb&6v?Vtexs%?2URm#jC$*;DQp(6We!9BsE=#iiZ*e=gPt)6Y@wjXt}3P zpYHOqL^ySOFbNDnB} z9WE^}OEYTj)$+J~1z7=rIQ6ib#*7Q*?VN~gW}gqNs8B}N{sk%d>c_y`dEng(bIr=H zTl9ib@+Cv%R!L%sx8t7aVue2H0(7%TJ8iA0t&Mr`;FeLU@}UzaUX|9SPfE~*QGjni z3Gbx~Nq7CYekAhON?6?D!?`x5Vs@LhycLPubp`|0*N?oeuh6G;@2R}ND_MQZnd^e^ zn9MoF6`GhNp*Qc*)$InWf+VD*$Rq1(;83VMxer(x)G`XhaeVrZ%$ZMwPr(3BGJS_u@kkmkoi5xFgYQjgz!ka~ECLi=jxh&J681(gRi z_;6?T7H)82dxbvmi;j5yL)^A2=w8wVIjRi0evR&hME5_3a!r4{d-pEn)~#)TM+=&x zNU##lM`Lrdnwi<1)<1}SZRJvCM_Ei*xf-KG33N&biKspREE-TUQUBhK=<4ZRLoSWL zwMj`Vi_6P)-|E8QYglC!z{1}Os~d7&Xi!j4jdU?mbOhLhC7fNO28vuVJ!%N+6H6m~8Qu{0cWyTtK9jt5cBK z)#FZNf*Z`wy0mPN*31WzM3}(LsSjmtc-(OqjwTkh)sbdZXP9xNcuwMVnKUAxU~beZn9Q>UYrhJ0tUraoSpnA za>d8T9_<;CBfB|G4GnH_ar(oD4?blP07T1CEAHF(tWyz$w6U!XIkd#R0P?$#q_Yy+1tM?S2i@%&gl`h5 z_1(MeyLX2p_>c|)v$=kCb?G9B$ORTSwlEWS6BrM7R>RJY7a>k2;jna}+~*xTWvLh~ zH|ZLEwp^<2A}gkjUKRzguK_UH&cSgDu2znu=kDFRA@mT^RNUC~wAf#%{6bVoA~)@A z1^uOS_0OL_Qfo?)>UP6-#=$gatc%kYo{&d z^2y3FzkK-;_e00fkl<)V`{d+Ua53;*?QuX*xpfQ-g7G72mq6SHA$gj*oFAw_B4%A& zSLB|y5aYGBar^d%hfFW*cl(@wRpGc{?6C144>_drpi-Kees|o0>x^)JsdzJ2g;K>& zUZCjKf>DOM!y1pLQWdM352+G)j!k41S>Hw*yG20h;=|Lkq|3YR8o8VX9o|prYNxiRJRfz1f;eWl$`N-!`H8zNC*%OB#xPr zlP=A;aew~Lsi_NyJLhym*OgZCMx!N)?tJuR-Lj{Ul$sS^6c?MxIh{Ch0?|Ewetrp; z`ucjE!-qq|!^3MvjnU3D4S%0#G0YR^o$c6d&`tLAP%-r^Tb*su;DPqdC_z+PqVI0y zb!lGOR!kmtvS^z^`2G9rihj#?yi>qnQ4c{y(?NJ=|NIF>M^6wvb`1egPyuG7cd9|Q z3DM#E_tTV{HdW&rx&0;?TDGIj@s@9{%78XT$HrcR;DjCyuE!%NNJS#ZIeP6wG&UUK z8q`lXQdEfR+CoJlrUtIO?yMxedGn?KJe=_k7X{%!&(WUT0q5(+IM%2m-k z>I|Hmw{cc-3W3_96B6FCr;EOgvD%6KTph2SQs_!)~)morJrymKdzAfPqI93i^+7ubh zYQ>j%y9sa(Sp^vJJdB&ON=HXWd6bno(HMaeQcs5I7`Th<<(Mhl^6+s^=xsyK_)qf| z-N~3ofO&)*SP92w^Dv$FOb|ziWBKIClbT_Ny_1z{qC0FqSC?L?;rniq8OCvH*AxGa zUD-eXzIy&JF|hIiyX=-fhCe=fu@I0jYd`h6WyBsu@(RnFF7NwX74FLi9VSmBCWCT6 zjqDm5stfx-BJJ6)J^yxYhEDUNXfK71$p#L>^CpKW+hVQv*UVVEacvO>|wERn6wKBz{+cj%F>VF21a+&%LFArvHW}~X7lnz}t>l^CY z`#(6g%OY`AaQk=@AE}CaO4)eV=}#USm$sZG{RcpQoT9lGY9-5N7!lZ{{;3-t7Ty1T z%e-E2^;U~;9vO*`^`*L|W=R5HuFwArH)ZF!w#9D}aw@Pf&%V~-H`aw;+<{-bWQw5z ztCJ_O{?PKHV`5DDc+4CBW8~#bNe~~SZd4(y?EO0u>5An$%6qjrUjiqEfSx{uYpQn2 za8FxKY}E5+S$IQXA;J|v_UQ2lS&cZy&!ev*yk&XfM9Y^7B$bdX+l}4={mf$?C$|b} zA^wR0j3me`7+%PnBL<14dP|UERxfgT$H2f~?C1u7ROU6sid4nzt}Zj=qxKdXt^Az{ z%gC^g-_`)SQ-#j61yx#e$gYxp7Cq9E|yS?0~?(0 z$@owudxqPcsgsGl14!Td^asZt$j6xvdWCv2sG*;=mBn3*#lnYWZ zIq-9_{!7{8qcn6pvy-n2OKWrbXcl^KZfHrd$;o?*_8mBoh<6KvrtN=fh~8-Xx5t_{ zQuF)OpZxwc+m#snT!J(LpP_XgQh3eyysoh^L!8%0<932K6Bs5lyxz{v$w>=gc72$C zguEmCw02-170ca}85R<79Ar`Rg-W`Lo*SwDpk`sxWkm~is#l!@+vB~m7H@_IN z&?3@OSC?5c1>MO3GOPn8tuj%*2#I8MwwAtCa&~q$x|fT@zjv?h&~i?%z(EUk1+SY= zPrgb(lj`!)Va#GokKHl%rQ%baeAQipFEf@;3e5hjoU~}?tjY^#cqsH@Qu`3|Y`5Z_ z^@J8i)8)#ddHwmlrkpVr@5c-B^Q(ZHBpjO=u{#Ih7@|471*o@mbgUnVym3Pn_wD}u z`-9Wdw_&F69XRmf_@ysp7bqZcT>}$~8$+J!=~GeU2&0>ZF>tmZ4~Y-DHD0 zS7rx;Y8)FcqGdx;9|r6qQdj_&2uK_yks!y@;ia#5dlMsCIXOA-fY;NPiPPJgCM^=v zxKrKGK;qfo1mYBA{3Tz%8!;bPq9tR@Kvwvbq$0e?SNVNgm3dy!Rei%`sZFqo0eq3F z)dX0RKsq@`5%O0Z)DBojHfPUnL(lMHEjtpJJuE@kRPtv%Hi%a|%D!Q7#zw)AQ zg#m(F<*4HTq!JPm(Y>Ft^mh^Hmt=G9+-<}r2p5qMQH91}>Qj&s#I0{b*{bU5Vg&vO z#b^gq%rw+= zQZi|eCzU>%ik^I(_myH1K1bQBR~aDJgY!$^GfYV2)Vl2nwn5qvi3v)Gkw3jy?KawVo%r7x5nNtkaRjQAFBQZ1n9Uq$5_`D>Mla*tYHJiTmR-hs6=cqep6g74% zR!+i&K*yC3d?YZDfo7AeyK{4h-mN&4*fUFa7RUm|PLY{?c0YP{OD&aP3C9jB9tozV z66_Dm&$VUBAm6)mfQJ&%fLg@wBrsb=sv`Uk#DMQBdhbA?(|{LZ3D_VG&UBB%{S)p z16~j@C6?Us=Ex(1YLEdULnmw&1#0@~>9OnJafJ3q0@(d8DR8`cuC}%|IYkkG^0ce4 ze?ZfJ*}&ogVa)!zf-y{0h%#iP|78SC8%9@=60=`lL%&BTUneVm7aN<%CEXR|3gJxI zyj{i#zM;`<&3gl|s=PFHwwjtr_P1VLlH}auUb?X528%b8utHpl&kt~-vJw+vkfn)X zDv7?h*jONNL78l8ef=#6?X*5zhs>`E-QH!DED)#mvzu&DS0fU`L>dTiLofFydb`tCdGdtuYjPMYD2iGBGDP8{h_3P~Xd@X{tF|Y$fFG~NT9JE3wrJP}# z5vnazJWL)_!%WEU$+EV^XcIzAJn&SJIkpkggpT_0QKCzm`zc0h5)1#&O=;zanerZ1 zB;@r{EP~P#sV*R|nsm)b!gaz}0tci3wByz)fA8zdEp*F?zo~R_lT|^%g^hYr>I)qo zWn~>ArWl(jVx>k9;n%RW<(XF$5qTp2%3A5@^}y|wci#VbCw#HT^0Zm+Vi9X*aW0&) z>zqngCEA?d-Q7e&Q&&Y~ZaXt`C`!p8P0dXZRSBWH9Vu8?1|&2jaJp*{Rt;Xl(+G)* zqQ~cyz(FUbF98r+|G>qd-lVOeQH9_&I~{cZMxq6cQnsQi!X?3$rVz1M517hEre+YP zQ4%s{XqvZ?3>q3w4G1kNns=UB!H2&v{+vM2wGh()c7)do{=)9oUDkH0`ieEGv8u7WqN9#YYUp2EnK$ z3|x(FW~lI=rsSlz^$5EqF_Oh;pM5&zo-I}3N?|O@Oq2~GkQms>wTiw=C~Nrj#k*;~ z2?*5bt}Ti@_{d#VljUh(^ZtaYdZlM;YOMGQ;O{lFLeJLE1)U-AK^RFef09egU2LUt6kg3n|ZbmJ;*YAl87$Gt)mcyRVmw7z`ORXLiD<5Op zA$Aua?09&1g!Hv=W<)3lAXhS=H6x>G(xvh(MT6(s1MP*Sh%*0dgzav?|>0TxV;7`_+ez2rHhoNpnQO9=^_u#cdSfKv_ARd$NmR07UZ;G|;Ju;NCzpB8 z3x0ZjG7`s>7Yz}VqyYz>j08mr{f7!!5V@IesC@QM0 zTamx(06)(6KSE4}ds6we`h*A%nBIx1hKJKTT@~2%Zs*BU<>TNW)Iv$DHIep1A1CIW z0dL2glvqp5qh^M83hUGUJH>iLd^lWAtJzFf|+xl{yl3WTYj)>l%niO0=+Cx(&=%F zz9C~Ko0w*;l_T-Z{}lop#i+;sD+8Ebc7G@oA*}E59E@8F(jrh1(<#vh=@l||F*E~Q z#r5_nG+!b-3;KTOoeksvr!CZ5R5Gz+*Jzq-7*?*9Fh_aw⪙L@j3S#`$q479V+Xm z(9s)A^%FZLdCuKTFzmDVwK-Sze;p%hx4+>puT*c^uW4t%m0tu4E*EYZu@^P{Vp{Ke zuErcGFTPu(C&KdM-bW7%%H_I|-ZnJUwzbiNA89~Yw#7^VbYHj@@aW_^vsbVa1%ax$>B{*y@C(dDlo~i-nk)ZFEq8bav(dO zjD>?^8~k@dAPd-7J3z!tz%tFB9IgKHg(H}ONZDbJSput^r4bI-5v`dj?xXSH4DEAy z!LuVW7Yax@GE!7@?8H+O#9iHA~3YtTQ(cx^IovAUf zjk$Fvj+VCVTwGCvqky0bK6&K= znabDY?dRv$16t($f|Qv(tw?)z?#cRi@7}#FY;0^8F5^Vl_KS1k&i@epL6{SWq1#P& z@!}y2!4Umxp_`)o-tm5;!%z4)#FSZRFENXso7>!PB>x}E4~nP-HZ6AQ^$k#dfK~=X z2bunt^&j>5(tx{8pgI)wdZdO%#}4|h$`NrOWFvq7?AeL@1lZl75JzD6-9T|nU+IbD z1}PBe4SA8hjN?FHvt#er+?q4g12TO?h6;VYoN25E#RLB9laR1*?&%}9MmEH;p!_5qC#OfPrGU;J=y0+3?yVei4BHbHca-eejZ1+gT^Z)8 zBAvnynP`pZHacgS$OG@oUAX4W$;D*^SB-Y_X1h)#h}4jDX#_q?-d|v8fB|q~+_-6V zX+AtL(IVs$bVMRUlKdMpIvCs1#Q<+yDaMw088_uIpkVY`VVLAcKnTg?o5j5@R`TpLH z$w^6#U&hAA_b@#f5%B>b_%<@4fm^AKdDsc`?3Wl}&RSqsx_SqN6;p#x-QDiY>Aq(| zZkP`l3jvJV0_JP(6dBW%!@ms}$o6 z=FXGLYu5I%mHqVPI5l~XOnH;a#@dx1+77#%M9esIs!J;NiHZ_U6W0AE43H2;Fz!}? z|JqgfSCUH*Wii)6jgt&MI*cJ`JS*YZ(US+x^{awIKFrH&%RH!{zz6jS9l&8u?x619 zP6RUnzwJMkdm#1Pa|Rfc5850mfL_d+V zVoDG=HfyFi`s6F=Mtb%}{)3?+<3^`6O&+#ktAtOMFYP3&e*_!pynYbcO`FQi{K4mF zSFvyaZDCPS41(MUov0#Zk4Oj6Wq^Am;mhXqm6^o1h&P3u%s=^YH`A=_%szRp>8HkD zvpY-}vOo2I(ZSsIHS8ZO&h?hjbhIqghr4Dem}s!Wt{_I>V3`tuZ#6YMvmwV_395tX zEfrGhJnE0$yn~}7%n&YLW$Q<16({jr9^CeSkFxpEnT1rKC5ePgl{We=vhfj+UPKt@ z5u?X!R4)RMzWiqK-Wy6l}jVizTOV@o{@zA-q8{^E`XXe|d zR>n^E9?jF5e3+ZG$rn%rRuKb-yk}r|I4w+8dbonb%m}1?76mU+gwbY8?6N9&{tw@* zz~%q&%`V?h{r~Qp8B`QX!hxj6PBnD_xq!m{XX*rWB%*Z_k2EnB(tx}GkJMq#$c_6*VBUUDN$XVX|i|75S;&&qcG`*rAN1vPO`8s7=k_4v`x{`~ri zH76`Lo!T8v42!a_97gTKachTA1;PO3rAYypK_(t5fwEYkm%riV%cPvHrEQ)(Gu`(p z$>V}v{F2w--fbuRPJ~Q8b3r&4hi22gcUtH=Fin%sqQl}THPLr}AEF7e6a`L5ulajW zMna+rZe?cY*9yNDx42d7*rT#h2n}GI0+S<|-%i7xUhpcu%m4q72y)&Y0~eg1L2A1- zRfP-BuyN|lEkBS20|s+N1EOY!5iK zrHZ3OF`u?N*h@y5%uH0U%xgq9%X?jUnbOdWHSpb0H#K(i0zDrR=7OT|gpPDnG-B$e zu&{6zvYvFG_HFj2R^!avw{`{G#!O~Q-`%=)FO&P_b8k?*C*MLvPT?s#`PA4u;yt^$ z8RRO&^$PHy1vtAkkZUjr(E5isFi@UvgooR5V!^(Ddd5b_A)o24ge$)h8Cm=4^2SD_ zIG!e=mKN<|)6>^C{H;FHG;a$%z4O&M#7sNjrj7B;#JV3tg6EF+ITx24_tyXNf16I2 zNHO>C6V{jDGdBYWxWSm-GrJ2^pLh@trjqnq_9V&(^g7C@2DA*W%sHIRG?;TxJrMuj zk5orKMtj5TwQs^#f@HW|8$65mU@eP!fkbt0h&9CsRI|3<$m7Xect30w(bXa z3cbYRa4l`G=A#hXy1CsH&`%)dWH4rn1T9@)XUa3kB^Ynmj##W|6nsV==#|GB#03OG z_FC}oki3M4E|5q>P86wL#P_I(csTN21jEA2_e@ z-eOWd5ai+zB*3I`XaVHLS9|aju@ssuTWTTL;;~KS7=R~o%0&DgRWJ|*8_kWdX&?1N zWvXdtpax@S*?*b=-98jB(a8rS6N5h7UdVog)m9TDt;y4}g_%Kja>?@u467W}W^=sa!uaiP(4?1rd$MlauTry&CYb3q82S&SWm%{}2PPrg?_Q z?BwLQ+^SbIG5dnC=$jdN*GXc7LaxD|#v%;0%!13KEi#s}n+tRV&ICiKgtsN#qJ`)) z7nERZZBk%t><$ya_47NUz?e%Hb6*76F7#3wNG%}^O{QJlrF;&8bR#DNLv?QQ ztsQ$!EM$t;y? zdb*r?W*@Uke{EJtTl!$%zQPb0`*-Hd>*?|vUv~tiubXtdxq?SAPL>zbi~q9t#SvH= z8u?JfvzWE(3WqS{X-7WoJK4F{6F111X)805p3t2#VJ?urNE4}%NG}cg!xqZivdM@hU zaqoHqth~)g8 z`oz`7BfLE$hkRovhSU={@yZiho(u3ZEtE0Y(mEC<9!q9=gSa z@9jMB=Q_0hB{7BJ2x6CyG_u>0^tr~iw#H-W~o?fXVAR7#nrR7hlw&?q4)C6#%~)PPc% zN>K?(L?}bX%*v2drpPR0N{SGXc}nIa)BYXZ_xJ91?e~4x`u4Zie%5-{dhUBYuIs$c z^Ei*=|Nl*Y9&DvojsE#4l_v!}cZE$D^zWJ(wXIZ59q3OU*u3I=(l4sg*){4HzikM2 zz9E(-pQ0U1UyQ@X46h~8y+geblbY&73{?3}TEiXjY%?|iD4p>#z_J2T1ker$zeWWp z1>t(DlQRX@1|hoE#R+w7tC6bmK>@3yiTBVnZmL5JX@gaBx$zclrnZEzI`u)_ha-GVgu!R!*{8 zzrFrH65Z`|LJVmJ8LI$-+*92A@K1@P*w(EpK?{SRLNr$y2wLnYTY?9$;?6g};D%%r z5rg+!U0p@bqPu(tYdE1zz2evFo3W5t>BG40;Y1=Aq|R2Dq3ieZ^*w}A z{;)rf@#!T16lKr{#QrH-iT??Kl;vg5iR0BZHAH!JdtpKaZ2(jcY%>O`s@`_f<&qTF zpIh&pw`@2rGJ}=jnmG{mJ&8h}{*y*ue6TJ&F(^;iAb1yLabbakOswOQUV#nt#rT1y z{O3~x6F2Tjs)G3ZbgZMWe1xk|_40~eD5If)tpKrOp1uF}!g|n^C`{G9LV68WYGSZ~ zU(lC{8iXDxA&8L`4ZeA`A>toM`W=LYRlYrFrU{6Ddwz5`$v{KXR+gfZ3T1)>I3p?w zdG`U*nW54^PTXD8&L}PJrJWKpNooY{4WOU`tTluja5Jus$@IMRe=4!m3dS);z<=~Q zNr;t#CIF6H#$=O2=uV*REXENJ^7SwDCt*xCS-&U)l!sUyV6k8o=Qx&9vJtonx>mqS zW)LK{ps)1U|JqwvT>Kr>Rv#hwB)B{Q&m~P)QII_pV_x$(bjJmjz-6pR6nO7YNwA47 z!>&5`?d@*jkp{Q`SpnD>Ls3RZj8S)QM>rXJJfO_o_$o&jV(`Ai!nh7Rs|O#WX}69? zZv7wZtRFurqJgsQDqU$Y4{igw^u4+-#Emz^qZw%ds(;vT(;6QCXX8@wNj6o5 zpM54CsBC8Pk%}K^|i6AlmOCiZa3D;vB3HmjwFg1j-cQN;o4v!M}IL%uJEw^njZZ zeINua2V6VaZ=BJ@9n{kcxqI(k0AwcJqocvWt-{e3N524Rc3%Fx9Jd345M9|1VaYHwu)g@cq z8D|d@!N)IODnNvgJw(tMr-@qq2)5tTHa7nMY9J^CPJ6I%w_lYFq!s{;`{%G3+jhTO z6DjY!0rAp=y@zm&h#T-H)D)yvU}Nzlaa~hKG#Yb;o_!oTqGNvG*}(^4FHCo>OWLya z(3bWkh_*fo>}L^;qaJ+YOO>O+pV+8UrPsCfJAPs}Kx`e3hkz9T%eYK9N)31R;Y4}k zvMdkd6*CjlW{0Cv{QNXPrAS)uC#XvLOjhY;LsftmY`Z@&O`Y)0jt-N4r(3u3OJ|_# zq7-*tiu;KiY7+QhnVsWXz{JMMZGu}Z6?TnG|1!L15rK;N{a}@agC@r1a*EUX~Yu=4bB>V z;H{FTh3^pHnr{08BCNJ+pJ=1CTegb|hAtJ@S1hsja`6B8v6kZ6>a*SU-PVs!wsVO_ z+d16*Bh9{xBBT~dfhYj~DM}R-wI}WQ^Y4(;f!9ITJej(3!3JO$TOM0# zN8(!Tb?ek8e{ECry0nU8E&3+)u}_J*x+)OE|Er6CfI^Vy;$>ySbK7tHBaA=QZ$B0! zqGgLSY~gqOnPYV5f6hX-LUi%d)6*0R)Hb-MYBV+E?Xf@a=(y*zPhxWN8l1NtvuU@| zX916=&dgilyWeAjjp++&BRJsV0OqJEKd9-eo?eCbpxW8k0I0R-E}+)H4XS)|P}-9qI9y$x zL#ku{cgRE=5DP=dVI)(MIE~+Xh<*egK8O$*%6Aa=SkvxG>1ioOxE1}HHvxa4Qb?R6 zKnk?oB)1*fJS4-_-P$DrP3c^X=Ns1*luPRx9NZL-8;i`e?^~w!aN7Ejg)P1$zNk3a zly^o>E=ZG8Cs(zSGU+G>SY9S%5>E)x+5UK+8{WDKGy9aep%Hf6U&d+nDq*AB^0eWg zq+wEKg(k6*UHz@SorHW{ z>d5E98YqXZfH*ZWt-ozRD@jmt;zPo}C3lfH-y$M5Vl=@TiI7v~+*qQO=|xN4*v<%t zgbFF`(V-zMH$Mn6h&&s|EN}$#8?-g>Ah<_I^8&U#XA!~4&MuYF1+6pLMNrm&x^eeo zO(?vu@57%h{xyd`Z{f&am8Pr8y*HYU`Q{KiCFbaY#!E$Ym+Esbn}R>LB&4XfWvDOJ zrR-b#=4)T{J{1=3O^Fs1A=y0vwO0lT1%8MgwRrQ`1-08lz=4QD{!lw8kZ`p>H6=w9 zX>uCiIR1*%-h!b`jghR<|E<=;zVeY zAf)F+S`6PM3E0(Me@rLC)yK!@J{l`#IuC4ao}dAu$wwecBcQ4WWF-iy(psi3a4I7| zJRqFvHZmKC1G%(k;z(=y5_-C$h|xx9lMvKv0zeY}#2FMrigON1(|3)Hz_e0WAO@C3 zq5+B!XO@2%Rt5%(NtZv<_kl|WVrO7hN{L#DUgkKq_mhCik6;18Q{h$n;8ajV0VsMz z6hkT{b)nDFUTEvqM>w#UVDW%Vu?nrveRp^FTJb5=p3KNPM%`BR;ltyvg2~So;ukE_ z#AVlprd+HTI61gB?2$t1<&)1}mgQxa82Z{~Of2BN^&;?BJH(Z@C2Ii~STswPN)0(D7Hn_{D4x zri{ZYJV!#0cIqySWZ-&N{qS`!n5$B3;yVDkoy1@T!`j?~W{azyw3*@kbR6%P*L1yg+|mA;tJLM5{jJM? zA=pBl zE7Vu~u?k*;YaSbHvw0JPWIsh&c01MHe5vnh*DFtP?f79}_UZy_!Y<+s`J*z&72QdP zx))l1-SB5WSfov>KYtGF7zv3Rc8%|9V@}mw!LIcEo4M~_+~&slp@iHDQOVe-8MMe>`qWXh zD#KDFQ6lAr=npF31dj|X;<+0OJ#8{^HB+`s6k|`?$UsyK7Z}o+Sxx=_>WG())_x1_1A*{c+h5t-G%Dt1Te< zZS|TpC8&pqg5%cgx9hb?>imD{7v2;&fZ8Z@=S2<}&X}MBLMwv6jviEWO&z=h_x`i{ zNc}C(m-(NLnmgxm$C#enJFDVi+qcvH(^@`t{_StBtQU8q%sf-mRW)uL&`6@v$G`?y z#Z(0sf&2pBG076s(a?a7l4^=TInb5iEf)xXUi&I&uu(U6ZtfUnGXxeN0t~_aAXic!o?U7^~cP08_q9Z~bydJSZ zNOC5Fan61(b|XFt5JVcIs?l;zWb6Qr1rpl_b{jFH*T>@Cyjc%ZubK%WHFj9HuZQY_ zqAvUEC9eNtbF<|nZt8z1FM_ry2dHQshT9DRmSVx4brxl?x>YGJ=D?*^f{Z9AM8!FL zI%c{zvM@1GpnLB}<6F~!^W4zKGj2)DB>(6;C%d776xs{3Zh4AY|5R5ibIY0NRMnTe zxNNUF#O}KtsW}`cAFV_Wt<_1q0H`)#4q-!ghf?%6`0>;NlTcXOfpOdZ`e8uSxyV-{ zY%Y;mJbt_igA^PlX-Nt-oQ_nKf6N#GqXCzL`~J0Jkf_#2M+A2JC#-X#Tf6pAaPZa{ z^=@q9bHKmq^mnOnXqqA+}CVF~of6nqAoOiM_GL+TTKXU4|pfu?31lxZP_X?#r z@pO*QEl5h3hM}pPHP{~&JJN$Wtz3LlOc;(auG zBvZw36d{VE7)y$ooqZ(;#N<&N-$b@RJhSK}!kUj&+}yB!{dW2+jLuO)C>EOTmE+43 zQK&dkW0xYdL<)K3u8@(rbZe2MFX+=8@*Atjsjm?hW7%W za0qW2$zdjih<-ozI>Z>x)kz#Q?!dCbk@#F)`R$8bn8HY!A$D73#4CYwQh|$V9C4yM zE+s|2(q&8ZYPnm)96Pyzvi2VnpbplZeBgtCEmFhD1Ihz)JCk-tll#El~6Dm+Yh0_j9jA;tqY;y+l&%ORTJxJ7~U@Do7^54Pb;RzE<{{R*+!p~WG-6VS>Kc<@~t4T$0!fKU=1_81hE_B9eBA|(Le z3DrY75sVgtpat1^2R$aV6OlOpm6<1gJPsC5>~zp?DyQf{#KepWb$nuiBx?hTCJ50T zrqW9;(Bc8Suc|E!S-pC-B6c?Tm1D8L%~28S)$Dmno1 zK+xlH2qDxrOnVd9iF-gz$XJ+IW!mJU)}%h2<6lJ~)jS;G!D}vS^rX^eGV$MxFn)!2 zO+w;?Rkk%6Y@i;XGDGG1PQF3+t;Pkb0f1gjCjp@gM|qsUk0I%+F!y-wW>gf72+Uc|fi&roXKq)+_m2>tWt+N=X&0 zxLJ)kQU71nbfV-1#BIeyD-w5MU| z&7z`otb%H;=raK8knk|HxqmXwuB5=XLFB9u%v3;1D+T8%(T*Z@&kb%Yw8l*9*Lz_d z6Qu3iH*SE>6~G19MHP^#1Cs_hG3=)Lj}veWf38B}O$*8Cj2JMq*KX)jL6-d;Erm&I z??6;4u7-%5F;GEBh!%fPiLJ{HDHY^9(Y~Ck5PqAR+l4Ju)3*Yb)QzEL$fnC|*GF85 zx{=X3W8-hbJcqqgq?mL3dt`UhP_}=u5^UMzr#YPKG`!aM`!U1A3%xeq?%kX{6(TkBJ{1~5fM=oEN$<=XpBQ~D{7=tAN48c zIU%MIcy5PH0Q-{bQQqCwn?6s;0uLmZ8uA`%Ke~eIIpW-T`O+mb1j5UiW?8YHh}`3b z-L(wVA*#WTS{}0r>%GUu!9TJ4FZ+XNOK2>fQQEb#7RFCmnGOiR&^vkI- zaO5!lD)sqy#ck45wVJ&e;K*2p%`blaMlI`7r zuk_$XJl??i3vCKPMTz~JFbznwz`%0(xK^2>o6`cGQ8-wPUJKTLRvQj@fqJ26apC}!&vxe6l5A|qy zHYDd^J)&H^n>aaX(S{&YCwWxwh0&woy%C6Xz=2P)F<`Y#GORgTfQVH9jpsY^_d;3= zJM5#tz_RJaqnXx=%WpRxDQq_P+z8GrJ|@OBtTs3>&>b*dF$7Lvj0xKc_CY(>+SCZY{2sk_+x%Uc5EJ3PkEL3ceiC~su zsCF)ZC~_G@mV>-oGN-N6MRH%UGnVadbf)N9S0>*RKID8HW}1?dvs7|17wu?Ta$Y!| zX%BIn6Xs55l`ih0_`di_MH#FM+crZhGBb+9ANk`)h&&d#JBWHfC=MY3aS7Y6J-5#b zT=vlyf2};>`!eN4TuM*`f zaq3YLGve_d^Dh;EKLhR;V4M8k#^px@GmOvRTp^Bf46>ns*8-L(aw`%$Dv^N_2`q&O z%&biBCnfzw<*igsobaoy{edbT>JX{}2M!>4Nd*?au==M&DEbmP8N@$|t6T1VBRqVC zTQ-B`zMNF!yA_`!vxO5Gwg|qld!2c`^I*{GtUFiF%kFbGE$NjXMlN0RM?|yYSfB#5 zgy_0rgbD&8P(jr6D-`(VmHGoOTg9)=7dj<6<@ud|@ee{zXeEp0Z`5GGexwp){7$oXNH5+q(%32$RKzK z-xs*AV+z%CD6*+BGxGtiq6g6!FkSXrj>3{BuawW@@~nK(5^y! z9VNf5pq-hM(-Rvi5_D;VM{}nT5d@@~1dySOREsjYF&~dT#6?#P3%LwUr0sB{2;v{i zfc=8K3#>J5M#oR^65;|zNz{WWfC1j$fJFl8C`Zi}w(2v+i`t>1CZ-xxS0!ku6mBl? zs>|N{2*3alsfS?MCxOARlmO-T<8prs5e_NMfRfl=xl#rZ7Ic1spWXEyfngxo|A%1w zhKLORR+sr(5_*c6- z74|eKJuzlGW2}uo4dq6F;Q5AxG|gcr`+;rp`L%8-q94b{qNx~Y>2FzTFDj+{dSnG^ zgbzFbA4KDP*gTq?hFmoNJ3UH!y~Of`=~LYb%Qgh*t>rf>>Fm8Q-S)qthFojW?YP53 z$$}*Otv zw8$ukMhsO1{FOED+8`D|Bu^4Ve@t(CaG7%3dG79NSA}iJysOUFc*4&0@43hZ36p^2l_jiIrvZxqRj~$0EwFbkqc3Yyf$4#w-kU^kmMqP{yl=8=parx z(Fc|POJP!wl;j0-@dHGgvFPaNkSS017`RoD>~IpdV-4_qBJNru7&~4!GBpvx6DSWf zXXfYTFzR(mZmSX3Ttjg_%E$tixL;`$#v8DNJ ziAl#jg;+-@7eDpIusbbE7>GCovH850^x?1iz$+18>(5&i4~)M~S|PDf=dyN+28*;1cZXX9nSh9lg&5-TP+LHR7PQy8iI0Od zLB2}ST9>cULvH=9<`=;=g$`~w-XUe~8W}m&7OSl)&Oy}A1X+QGf)eECgaj%;|3hTT z_5TR{_g1}Ma*b8t9zWpW%Y130IQLWpS_|F@78q)%OnqZ|<_y%k>^@Gn8zc>jj2&LN zGm)rbl)7$G)x5onEZd0H2IRarLw+5qR$DB|k^?L<%!dwkx)* z4Ilohnk*zJw{otv{3DxJ%=P6RdmyIUZFdJ~U{L%ggJKBQYcI#6AWPifw&l_4y?gi8 z-Y|y%b~dfUry0B-vbcl$Mc&@64Vj#te*WdoH+YZsU5lJJkWIHy@KAA(#>QYx6%$?N zpT@!k>{fw7LdAk-8Y6ztpV+Bu6k}xyNuvMoAVe0EaW#$Y{V~*S#^a;~y!t zAMMF_rjip>#ih3KWAJtdY1Th^c`k^}J0{h{^~T99F=x)LexeZ%dN@%ILdpGda+IJB z0|H2B`Xv1mRyEq4mE(p%n{s9ImSniCw34;%uNM zj|az{Yqr0azllNiaqi1VN~+9xHa;6}DO}5%s=Z2XE4hSIl$w7o-ale3OW|nW_SCH| zId{G&gxx`Q%^4{-x6Nz6rk^do2vG?Qt!SczYA%}jmAN5)An8zh*1PEtNBL7_ND>VQu!xTkD2jL{_aI z(|SG8du=Ff_r}f^M#}l^H#ziGtuAR+C#uufq9=o1?(g_CAn9Q%Ow%(i zv&cY-kOyc?jFM?0Hecs>5(W?+UOm0yb?Z)-G3()?(Z=9tJH2hZmz1_Qd^mYw=7{y2 zMeQ;7if01jZEZ^Nwd_j6orhvqeaFNvH_WkJ2|)oc>`gcU@1Zj;fi(%`gmjK_oazKr zBM}iu`&cSQD}e!LM{ZJyc%Ok9Zo8g3R9zVJyUM{~#?TMc0r;ce+fEA=)8w%=DL3zu zP)!~%za?uTuSy$1zauZ3^RHNX@%TKaRuWZcR1^cER49o(ivqLzvEhhaiPln;hkF@) z@OL6HIda5RadV6*<(M7M+(|CjCH7Al6PGz#&vvq; z{w5>+GZUsvoezs1$?3|mt733o;OzYf1|C`4-%∾rr6>9!5+w6x;jGMF9*do)Ho} zh->ER%iSp2W4Wp)_ei+1sYv8~UQn4Puk`A?4rYx2&_2sct94W1^B z_4eL{)}{v?2@%p@z*$=6cS3!52Ly)&`YF7Yq!zujCpGiVE4MJs;rq+;^wOOJ%E#%q zzu{QH;qSRhUS2iX=T9D1^<&F3XQ*)ohWmLCwoQ2DB?T$^G3}P~>RT!vFjo&PiirxB zq{&oKC>fcnlf3A_GHlVNUk9n`jDtvKM=>fNG`9?4(|9;Y(#v^z_V34}P}_7A73X%Z zrxa7@Y0qE$qNlolR(faA5%k=$OkLsd5-uQG<{<2UFnp8jq(hc)@rR8rk&^Bun%%)?tv?s&UVT^$TB zS{9+(Nr`}n9@s^7!ZOTJwSWF`rie6uSSzFPAAVJ!2hSPjyxvaIL3{U0@1^2Lvv9^1 zD=S<^3OC(x>M&G`h+-JWluTmErr1JqK@=G9EooWY`g=`OZ|s)Emvk2?t}Ao;150#2 zg+J18k@M6)7$fxl-mld2wwSN;<$DS4Ps)B=6&|)f98u3<2!OMf`fuVb&|Y{tpKH=2 zkOxP%$M!Sh7M``2*EVg>^vuy5{yi?b@qSJRHAOA%_A0bo;cGBuWZMQM=Q6h2DiGj3 zfM==)Fdgv$08g+m`g>iAJ++-wZ|Z7jBA6c7tpi7)* z@BVe375wF~|J=8vP_%SeR^-~mA}4RfXz&%6(<#41@P6Rtzc9+ZL#6}4)k;R*!&heu zcnOu{sa7d+m}o!I{@0B~@r%26{&I(b$c&u7lmGQ919c6m4JO}cU+SOV{#j4NL8Eh( zhXfTbmM0kw1rQ+cOOcBUGWsz9oEpCP(Bf?HkO({mB-?jRk?=5n}8^oR#1hGnOpiKNs2Y7RU7TuhQWsQc>r9Y|4%jt8KsBs|$Yoq1J=JE8td_v#xh`9SqoKTnH=*tFs3On+xlvF&r@m{lNX16bL+c*>Ew-mwkoKm;9%dC@)foKSg)CqDZJ(^ z56GW?J8mH3P5mVN#$on)x!<>Q+xdkG9=@4*M&;#o%9UWNmYaLEq9rewD)}uDLRyI zQc`Z!m-yn>$)Y?SU7z5=L=`Hp?LQCLRF}qAy071`OFRTWcr1^#7@H4@pmL-}od^^> zuAgP4ZG+k zp5&R;dP^d*Y|1z*HTD%4wT_xz-Y8iRl!tTd2E< zt5HfFpaU{(;zmIfia%+%b&vfdho4re?zVNSXn-AAB6x524CzVl7HG z3*1OXQo}ree26K{O60)T*%U2SV1{TdFe?I(JO7DHKVkh!p9EzI2Hr=*D}$c6|HG4- zePj7&FJAf4B9(W2>_fq9$1&7d0Fb)FETc|CLX^D=RMa^H9Ocjde2WJ8BLbNKqX5+q z2^@fth3`dwY9jypk4bApG#y`^oU2_Fs}A=jJp%lo1jWa z^xJ)@oy=q zM+>{){5S}B5B1dBUd(J~k$u8|xpJD70Ms!j;dNH-_PKr6m)dhvvmjP{3Q_?p z=53oW@yYuYFz_g3Z@13IkiGN%P)$d7WoBZ+g=2q=*Of0{NJ#V59nqGUZT~f|A$~}A z!FXyyKAY>=`1X9>ZPDC^4>p~h6*YrTfIKF^b*&gs2IXiMHVZ)kfp*;Vshg? zK+Ch+*INslscb;@2Gow(=}^nd$_n5P1-DO|+emFa%XJ~#=kNzM5B6bxHPc_ZVVyGk zBL0gJtW>KzcWq>68vk@GDYBZ}o_c!rJJyX!%y7H!1-4@Wk=V5&YZN5_8$LoZnrXPr zFzqp9$lIMJ&*2|{EQYEHLS22VKmz04V`93SG;kV+Jg1YNUCjDQQd*#h2NJr$xBvC! z55_(A(>L~m!AvZ~Hs>WK74~2uG@EVZ_wHn^t)0t3*=&JT`cG%Sy`IJ)!S`XvG2ut$ z$AmRMk(E2N|6tQKt$l9iv%YGIT}@BKk|oZvjTPU}0Ykc6>A8-JB)UU^Q38w-G%E`i*HIYyD1(S~!9`dUyHXJo7jW#3+w+(2 zG4lGEq|<)KP$^4Tpz?oxy+}6pqNP71bNSVoa=JF>$_HF!z<@5`_vU=Nw$apih8-my zM#&}J>7d8s&dCeEU z7u%rd>*9TLdBK!NOR$+Eha(uJe7t*73SQ}n>1hEmv19!-*nZx;8{0TAX|XUMthb(8 zSA_01hldW$nU6!$yB#D`bxt>^oI>H@FI#=Pbhlbb{HfhmY75;@U)&T6sJGcSKmjA~ z2~2jS)ghocz;y^CO(MP7w zk=gU`4UloY4m(esk+`xN>)h!4!oo8Hvkt1_(ZeSN>5be`g}#{G!4W#wHF}?bW`v9_z)c(jY7Qc^WXcIF~B~@z9=W@>yL1n76?`#yF89+hD-LU@X7r% zY>-D~Pa&5re_^ub)~{B}K8_^vlv~+0OgUXJVT+gvqfCF!>`HUHBKBjcO?OkJ(Yf;G zEXr7Xge@I?d8%nA-P@w1ydSC*8ft31Gi5qCN2g|I2LNlEbA*yl3N!C}I@P`K;@x_> zkHU2B#`~ta<4tv17G(mbT62chQL@hT$5Jnt-eAr;p?6x!D`g~njTyw7=)wW%kEM)i zi?VmUZa2dH-1+dUHwx45e7(RJHj>P_W7_6=o^%~dp>)0KP(a}=zIwP|7U z|M^X9$0`j6V;QQe_V&bR!_MqOu4U-So-1A&MGB?N1$TU!s%v8FCt4QSPucFBbKEr{ zB|^D-h|^|{gKTO|&o_9!vAedIAUD?=2*F8e{#p1M_(Zb`Cfc~N;}}(@>7qEq#+qE> zO9K@SX?U5~uW;S|jmt=e)%HRsPqQDl*bWAbtk#{hA202a0UxroFm;ls4N_AtCQrdN zEFSWm?ETeeF1APK&vhOX7-{?+k(S^2M@u&Q#>>*4J-L!kKGVuHoZo!4N1H-z;>6X& z{%a#gw>|Fk?2lGuREbm=&8nV!Ws*h+6|U~=fQ!P^Qq=5wTTNEYgz=##pU=0(bl+@e zOI4cw zuNx>Xj`uU%?;HPh?$e3>?%oxAF7+#XS1oHu?iJv`4-2N4z+$LH!7ljNX**f%=9Ay5 zPliTL(;W50orQz^TQ;%yV?i3OIFn^a z_Exg(p?pP*IwUZ^zq>QOG)(K`jwg?-tI%X&gQ?)Mt43Pfp;*5m=Vn>)IhvNY6FXWj zcyzr+Kt73+vxjMd}i&mytikE;NN@r^x3n|cxR6Ntp?#`lV(&UFZX#}ACKmK z`S(_rCyZuk#k+1vJ91h1R>a!%Sw^(X4IHId@tQm^eAPX_Fm=A2yP01kMgDC+B^#4x zF;2+H{uWM7sEXoh-C4*F4X8SIuJ_0MrK(Q;l&5Oz7v7cyD!7iiY^uDJ`Mt-`tG;XI zyz-ri+ue35o?Fqg-XPXK(z?t|)hpa5wFAs}sHG;M013M2N*=XSbxr;0-r3oVx`&>I z8jjVrLXxxdUm8wRJq!-7?{GOmqb~OArnc+ydrplJ?tKE=aCtWb{Jm`?dU?i#KPr}g(wh~DvL6TB9x zITWB!p2Achz~gGTsbsGLd(MX^wilYJOCH@nb>DcuD;iNKW|8AmJn5%`dnC*$wyUnM zM>+aYnhvY=oqE~2c#Y9reheclDrYp09DiR@wMOYnJtZsLwIQAxH@Li|iRL$2rI(Pl z+B1+sLYP+}Z7yJz(dBUV;p6+`y={1nj6?zze9Eg`YW%2~9=)Y8){uyD=20?z=I74O znzI29{uzu{@&5AF;Bm>sQ|R~;KNS7$_UU7Z#0zehVIkq-Y4eFwD?#^j({M9)_|!MONwe@ zzzTJ&pu`<^T}S%a7kqUR<1WXp4U9JOeVZB*UzR7X6&sc&=0~HxZ^$N-gJ0hL-gh>( z-j?Eb+p6$+X*nHdpb^NQsMwg)bj^1ISqKa>U9&X$=Y3ZOd;1-hEWCAN=c)o9d{W#u zmnR9E+wQ*g@o=U0(YWdOec#oj_%fCp;kPV`fmH_~6W=iqAWV0uGcz5nq zV{fJ3m)`RRpE2ze6L~MioFPc%>cwB9E~dK>b(6#Q*)wvxMBhs1xzW(RM%=}}g`0@Q z73O%0>#w%D6a=rjwJXV9$c56T$MY=d>o!Hw3sW=M5&SLCYA zXPR$m3;*`t&r=&G;GXL#te z*C{rBp1IP7)2)3)oqlJsgjeMCes{2<&69N)|IviczkezcuL$Oh7Yq_HmzK#L@)B@S!GXq z3cK~}w}HWz2X~t3QA2NSAnP!E5aka^*+8RCK|IXe5YyYaMl|G5kg%NX+P2R0TS&a@ z_-lQqqOLZ%Tb*nD7;3DaFhIGNle$ax{^E?G+fZP7#V$N zRv*U5Wv~XKIe(A$n%4ZATjQ6wN=1a&sVu};zurC&^1c$U7HHqoQvo=Zll{5^LLRj3 zpZ)$RI}J;q$-_8s9fEz!@B}f(b(=#z6p)B?jWePqkcxz*!?gdcH_OV>SyoCuZ1M#+ zIqF6nY_IA!pUV;z;Q1gR*>=Q2RbHerqrob+;@*WA%`OFt%y?zCCNJYPW>5ttM-^T9 z^!anc$c#A;xrcUpi=NMi*RvM#os1{A!8_X9ao6euu_uIgUw``2uIu!qv~Qx{JSUHe zUXZ)xSjYO|=IiQL<78i7T>bmQzOml7V}%*c>6*znu}L3ybF3hz{SLX*4`FY+mF18A zek_+2UU8USYyNqXbX8}{g_{z1Ml3tK5>{oEESEoG@KV#Z#KJv+KStgMv9zlRvZfdO z+5np*#-ct!XmU>eSv(z$y5n;*>=u2`OP}jDq^Q^T3h8$B{lv3fD01U*O4bU%K`g75$DaE72EC$)Zkd4gbMPc(iI5&T;gz4!3u3wY+!nE0NF}==~ zIU4VQ1>>g!^@kJVisOVPY%M&>1y;1Q(yT^GYSk&50*BAL!C>|Uo|GpOMuojg(aa1s zrE-GhgbzWGxIf^j8pm1PgFgVcHTe0Nf7-K#Gq1?iLU>LWbPr*)W46K9K)N_ut>9s=@hR#f^mJ8w`h`RLNJe29>VevGCD8=4XhpT2w6z)gPxtEj4};RnJ3lujLk zkpc8wzPw@Ar6Y(**+%u?A9OFfml5UxZv=+ELi(X$menDQJWakBc~QoyQJ8`yk5MjW z0N}HyfJtD9vT}1fmpl3%=0DssSM&+cfX!eoAblf)SgB}@skq;I(Q6b<9gcPI{4O}_ zo_Bz%%Q`7d@VV8uzKz0kH?HhDko8S$*LICj!zcX((JE zM1jYJtZ+yM3Hk{!4DK)LZ`CYD%&bF5wkzN+&8p7aseup1X~%IR;oU@b8>FKWZT)y? zdJk?wLW!J}pZB5+5J;5O*K0o6&8Tr{cW=A~Thz7O5DovuXq=dQ{aepcV-*1!2gtI=t~F!$OSNZ_XizBPRBw(;-mS*JYES$R!a9!2Ep zXnCDob29AV1`{*ZEG*(lX@~A^eSXNMR=nx3O>N9j2iJ;((bCB9;ZVgxSq@DS1xREe ztBm{?7%tJr5~l&dssJ94II*m+FFf8Q;${thNn5Lk?t%m*gSv==6X;(YaE4k?NQjk< z?VQh0cT?Vg?i$mU{_bEH5v%?M0}sa^h;J~;uFJh>1i&k#v>E#w!wEs2{=`n6{+EP1 zF<0~Dn;w-$<@xPlY5SJj5wB~qKdWSnn)W<#3YD#V95|%I8UKN%KxAWjEq{{m&y|gT z=J)Nf()iZ$q`=`v%OfDlcE88*sEr(#<_4;2YsCXMKi3sI9RCQv3ci|mARsOc!2ECo zi5OT76d)NaIPIZSPpiZ01P^}88Zz#K}l zWq?94z>EgG)#UJoedK#jk2@euGOG+PAp^JftkaMG30<@vd2$oCBMep$a>k(j=*Q-A z2FnbY1Em~fj4tt}f;#ETZnN%5O)o@Ltv^4Paa71 z&&_aw7MpfN~@Em(7CjiYhMg}{K?F;1_gsrk2+z+@R%%K>?3z|QfrX!$m!5aeE?)lR}N z;UOYkv2d<`W828i4fL=r?*mtbErt}P}+n@ybg`ZCJ zO}6#wPi7o<{IhTB`$O4tg;D``TJ*IgBz`m82(LBeJS$=Fe02={I)U-JSB_VQx{Kc* za&Typbw$k&QraE+{3lq3A|oYCZ)M=%?c*>r7j(D__@Y9*ZF}I!?jixb_@|H86uw!$ zIoxbDH2Bi`$eCLlVuc+ZQk@TMk~T8RF;};0lx^pcz5DfZ;LD-Uw$=^Q`XOFDEzV7& zE@Xd*x=d0w*`wCntbc89xXB_g_`7UWduLlnFK1NZVDHh)rKQHmovg}>fBYDs zYu1}Q?KV(TM&CK~K04BK zu!`*x-*f)h(iYwnW&RO~4-a^J@d<#XVzr8bdrxX}eWDQt997%bH!%5$ads+No-;UZ zyP^wiy1aZHwn8SK*6Qdhvv%ih(j|S5657DTR`mGYN%@tp#$uGvOzSa1js zmZ5tW63z)ReP=%Wwn8I6FHVwnhGSaSBZNBooMO)IP2BzTQvIB-x<|eVR9MC~B@g|% zq&4kR)b-;IFV#4&%&HHPL<^L zu!I->*mhg(lxlePx$PpHna`e`{gMi_!f9Lgp&Om^k!RiW56ra+K0fK!_aoV8TEZ}I zfAb3S3ZZigOZ!0G7tBj^EdC+u&pOmtx!8aIul0xyje~F+>@gD;SJk)coyWoX5t-oF zdQ=_S(w{>L%7Y%5CtT1@^o!6DRcGV*nEm{9mFcfmY7Uod_l#%ucE3J0Ty#7u&f2+U zezx;KYGMUv^sz~yl>8CHm6xQ$;B-U6DU4%M>(}+MhhU+qR-coeJo{CXjS@Wk`tp*5 zclkMK-SbA~e2Z#4J_ECVI(~uSsU&hw`6d2?Z zikcl4zmRFl1X{vo7b@>8d%eR+2t@a`e=dE?vE6E7JiiK*COuUTrRdguqSv|Dr7}5j z>G3UQWs|4OV`h3c=#!84*VJpgbf&ese!bM9?%n8%x0|@1GMSgYSYeA({}%Ef$mkvv ztz;`kd?G@QR>2G>KEOn74+j7{ZlUGO)XS>Zl=iADYt|aFPXvT_%wyX9e>@-l zD&72G{#X3O%+`gsPH!?V^bBtI5SF>NjT*IiEY6oiKgn95THel1G_e!3gU*u^CcyHRtt@ujxVCHzwhqfjG`DO9P7&Mf1k#*Ni zjuQtCU{4H^np-IJd!mT$tsIUSq}>#slMa^M4XclftL)gu6Mon=s^3I&7Lyu zo@eW7%06a_?6g!<85te@F6<%9i{=|Bcs$fVjW3KN@WOvYhA;`4jT1)ZQr! zFZ2Frn^#92B8_}EWLhoM>i)c9RgzI4$TYZ-PVw8z{+AB|WL*bX6%~Ax6pA|J+D7u7{&c>j)J++% z&AR;fIB;egSM_Bx?aw=tcAva{bL0P2+*?OgxwieHGzcoFw1A>C2!eDdAtE8t9nuKW z9SVY=f{2pRB?3~?IYGfmhtl2M-EpqR@BO~9$2oidbH+I1tf6b6OD9j<_jO5E zUss}Nv(YVv<2NQIq8KpNR&3l!j?UY8!OEUL9?-M@c-5@P67cwc3z)MuhcjVMoX&*f)CiGB9#!tikfgR0vSY6NNx_CR zI~6c<&dYruRsJdlkS<`Ot^n12i^wC0!Dg`Lc$W9wv|L;nWn2gsB+^)l1w6X)dnwm! z_J5>FaZr4dyyxRx6^9H}1hd{#cTq-17Re?E5MuyJY~%VF)^~QYQt~NsQ|R&oECfWh zFcFA0u+Bm6;S)T_X2NICnxElu>fl)|6VQsj@gy~z(D~U9or-#zwr)3+Oe-RZVO+u5 z#J72TzIto+;bld)l;M28Fe3P{Y6KzqJOm9;UZ zbW5FIYwvRkE8Wmeu~tQ}rzpiP>m(nw-ZY`QuI(fGR%UeA=ZMf>?eFoE>^>}JyEUIW z(OUB2`0UUdpu9wc6(%vZ4qwu`AgC|3OJ37u_m$iIlG6>#QT44cL(E-@G9#ij`p`((S`30HAzMVa9 zGu}LV{(E6td52A0`gU3%zO`sexc+$gtj3N(JAmJE4m%SO|UF^GwLO6|`03nVKD zd`H4A$ng;Arynd1UUBVkV#k256&QFRv}*86RRYpt(7S@JFxjt??}DQ2+DzuQ%Cy4| znVFr@jm1Hd_~8Pl!zhVHjZxa`$^JBBiET+m`$i>fZv=K{6TO_{JyPO*Aw0BZ2*33)A_Sno%5PMT)ui z=jc!fzRi31HZD0>oE#i8u+fabzTpJ$5TZv52)QD{;sq3{tH>DjdA8yFjad?HYCgfN z%~Guj-O+azH@=9s-#jK?9{ui`Bis>{jwZSiH>U32m%}vQ#;NE)Gj8_@8r-aeg{S8Jy6tpb-LScHM$LPWcJS91k>Evy^lU6?@7N0YwqAt zusS;~{&aeI|5q!@bKjCRNOC94-8D}ZBe@-WlDCU?u4kh^Ig_BjYK_6Ui=hWpsz0i| zYgvRO8!8|`rcJ(QPDIoOknvdR)K$rMR*U(o!H2AKM5RlGKNf;Ccl!7xG{^BNw3xaxZXtdpdbWb>4otP| zUJ$fO?X}9UlraX0x(Xc4!DbpP`iI&-bT~K$?}jkGwJ2(RR!nPW)zfwFME1y)PCZwy zLN6q}sbxs$_+6;1M7p0>;)}-!YTWRcmmoHY-}*H z&2$9>z}(p;J$rbR<6}Eq>`bx%{zn2w{In81v;chiEtlS8X*63bsJJFCldJAK^6>U?fjuLMyG_29Tr<0NfGkG7UArwkq4q%+B9kEh{*QK_!L@lPySCsK4Xb zn$Bt}E6*J~g^ke#8I3Wzzv{7YY=}nwpUnxUORNVoPqz4NahRE{y!WW+^wW@CtNxJzrkKi#rR(~g z)$P;o`o0z54pNGy(6DoSGtOT*})-BnB8H<>Wje8ptpFB-byH|0! zxijMvE}Yh$*XK^tLM(g7_OEPWF|khd0S6D$(q+dVQ4x+=7HWU_eo**V)%t5J&%nK+ zzM5#C+&80W5Cu$Xp0igWlZDKK;LV?yJ#=(E@%&`b8oB?juFr+h7Z88Y_fXDIq%GEI zrVSvOqRB?rrMiS`yPkOMEd+}bCnK})F_*+|ohYuytP~;%qMtf$j5iqZ>kxd{NS05+ zPzk)0y6_N-+UG%?dB^WR$*&=0PL(TS4{q)3v;adcFn}jUWW1ECTx~#6S{4~(PlpJ! z@I8hXm!rUR+exFkv89;o_cG}p|3}J}7F)@7!GE1?xq`3dHU3S=%1JE2&(=;cziqcx zH6Hn!9{X;_*9lT;xq%h!g}SXq9$Y?Q*e?)2-}pjYfRzJopl~cT|*e{W7y0rN^k;5_4 zb}D8vI&yYm9qXdZZAqp}`Z9p67QTD*{ds?7_{r|*D^gtQQMxwT_ANm%cao?aj*IPa z*Y-x;1aa};1faL2a=nC^@$K7H68pvTO_0aQtC~6j7%!r6h1};0{_VLqJJ3s|0QmD; zkfFdpuxdaPJONk{m`w2)|0X~tAP{qje9^`!Fk!*L#YK$Mz_Q%~2G}4@I|bY`ya{Nk z!m~&K#f(HiRui;+)*eL8|e^b8u#Bh)N!M8hi^1AUq5sklk z5VFSU78c>=7ZQuJ)r(HoJ89Qg>n@qBNKBeor=1%3nav`tJa9D3U3%_7;~SM=-POka zy1O$}f;-0k{fc-ytEy78_k9BTmIn2Gto!qMRcli)95;hi48j~mz}5#~9wHPDLO13h zPbon2vtq^(g8Ya}KO)JC_QELHjWpJyMHWlZ3|72=^~6T!)D93b!cNjJON|wHf&H z9=^RQCfpn*G(WL_0*eb=%~D}?mhuBEm4Sn!Eehk2j7&F}f$47HlaP_20cJ7);MrWA z61e%w9=?teDv()Gs!sI)Tq_ve_PHA5YIIjPSpXv0ZYc(8h0tcx8G!1tP^&M_5Wg4~ z(Izw4@OYC^Rm+&JXB&n#p3V0-7AoeWQnPofJ$e&=5d1Z?5qskBj}-Jy?z3eH0-WQA z_2p;q@$W-IU|5D+44m{*;^G)E>49GIT%esTjBVjE5SR`iP;qa<)FDCfEx1x(?1 z7?*=XdS6ZwM10mpdWXPE2j;&}Wl+QH>eT-#%&qJ|pocZx@;d}MO7zd%A7slrktJV%@CEqQZXkuml zwFDM(wW-l>z?)wnAoyuqptm0)^MMjkM(+TPFDXmQ%vE=QU&3JkmYpgn1kM556`n}7 z&i)A7B+;;5wIhu5sDRENpOEm^lGlf)9l5OQgiI`S3}A-q+)IMHz$|nGo%!Y^T^RKuJOW7mPF5fu28meir%+<*?)}=&+${e zfcu@fw}3b_8*bdJ(9t+J`hKmGuWkn|GIMq+U|I6!!;MgjP<&cQ9eNueTLLa0cCtzq zh+;5-Oirm2aA*oj1{!0*pIw05};zEFhERvNtb5%xKk*SUg%UaZp}2iJ_zNwO_6F$ zM!l_FYjQ#4V;=qe^_wy!Lh-!@!Y_2w9@Ez?JrV0Pf5^Wid(5;yj3QoVbiu&%DT;*l zlSgU4jM*MXue6oq<=Rt24qPE=cl?S!t64gx)xv&tC@O$5Ypcw+heS$j+55S)N8**Vpx=-6%U~Lvk4E>S-MG$ zTRfi_6{;+dv-o7R-;}4&xt>?0@EXAeLE8Rkr)=l=B|#@ae=@`M1~toH584v)LUBJI zO>KYqNGCl#E!9rd?O@vlIH@43$8ZS>80gt%;!h-I)^;bZ{3|-Ye-&}lFvmG z(Gm%xIm`Q zz%z&d5zaIgLXZ#TM0&ktz5q<>JKMij_0(qK%O;CV0%0?&`bX&y9py~*uc_p(d3S8; z_3zN7%O;e5yr`=uebPb7&nc>4@a?aw!h8-|?HOM3t7Gw7aWhVy8$?=8PJab#9}aw= zvV8fHmaLE@^$lDDBt{;yk(x+r2=+H(MT3Kb^9N|E;3=Rp#!X27JsANd^>+lUJUtAZ z?qm44#PJ3C*eM+*RTmS1bk=O5a1*6_#v1pnH(I)|X zo9R*y*)jq@N^(=fG%M>^Yq+=7{F|DVW}yelbL64S|-ovlVg zV}I?ota`keo-xxGI}~YLOvEQRJP=Y_vbHN=ev_#M-v$ua44@h43t{(jTwE(?pJr+1 zC+1*~rxKQbEqCDbK0f88t5t9?$wo`AOt!xWI*yczDWI9>V9xtd^(f&Mg_THuikZ8` zn)NZkTAl&fzEWzPb}nI~YkHv#n!9QUpLmJAIf*2)hwqv=I9@?#7Lj995~UG!S?Y0w zuDCM)H!iPWR;pGd-t^_ggx<_AVFedkjfqP9>sz^W6raSz>lF+`0o1&nxrscfr(4k)XaJ+v6XZ>mh|NC%DA$l++GtWRYxxy;T0ynlAd3@ zJGCKRI``!O0Zv9&YtNzjI!5d~-W(F+=1#Um{5-e6UCDWb-F4bDmV(KGSE@HGvKhE@ z-0bNCxcTgDeBy3PQb_9gLth+Hx>?YGC`G-7SEmWm;%^mK7|wQVOX+mAt)33cXR5LI z{<2E1X%>G|Lw-kcJ>99Q#@*{kQ>!i2vALC}@HjtwWze2lH3b(#lS}N5NdH%dMiRol zycvZpAIm!dGyOG&lEPGg#c1ej&j_)u&L{#H?>Jm-EQw)iJNg{t?wgUjW68 z)4~%h3Ux_y$NTWa?Vrit(r|Kkn&|e4D7W&B>UpWSn#R4s*61+dS!)ceF4xJV(l~|u zN=g^Hg(rjVW^^eBorHgmT5nvDRMT*MDGL4TOTIu%O|4AMHvA8cDBU~tT54=_%ai$u z0?Lm}4Gv7V6N?TrnpxzS!)bVWn7VkCGHWb5)U%eT(WsY-L4HY8J3&Fy!g;Js@}B$I zMG@Bqf9KqnIO$N6a}@Bi^6C<1`=HZ)@?&oPvyk28#qFB<_%M}`WK+V|gt+T%!B$w` z)hw~GA-H@qzVVR2NC5IUs0gd7r-3MI!B63^vtw{M>BFqrl7x!m(=b~F`tC64z)D}| zkbsNr()A@)a!X6UDuz5U#>m<4lV2Lsu&CxuJXpXcu+Nsw^ub>0eIdI_{p62T0uU1) zz8sG&qE_FGuMN&5#f1qA3^&-z=E);LZ$5q`NwIASNAX764_acoIr-&Gd&wnFBgH3- zyWli_u*jSvB!mGMy6JKBoVTWi0bq3%!3nkY&7IoNNmp*ZMNiLm2d*6uh{(04w#c^& z*1t9x7W!PB^t&xB{?22D`glbq57W%x$K8^D(XwBzQS3~RG5g?U`z7I4=L%QbHx4)B z8%9&VaqmxYwZBLGu(+YSy)o~Lv#?m`@vgaC1CLse3fl#>NN-DEOWC-Ii7W4F-_e*d zQF*WJBO%}>mqYw z&%Wdt_JA%_h_%`ErOmUOukAjf!5TEkN09!_s9KQYWRk6BDTo_G+0g*bgNFbs%Yq>z zAfCj4yo$6os;6@z{uhpzgm6w=%oZ7BiD$V6)uoWIK3uJf(BObsZI* zyO^QHF=IFWT8NJjk+6i(Frf+`&0ngA~ji5 zYp+PvyDD^0sB}n~dT8_ztzE){v%Hiy2R-SEA#>1U3<&(Ab|DD)% z#e(B?Rni$!%xCs4sfRC9c&kK6QQ1+|?QudPPE3b?bzkH=RUO-UzrFA81f0mZS40l% zpO7KjPcVAq_S~^d^ju;E^R7YgbAe369Ke3ac!5FxTT6Y;0%fuQSBS=6TUfqXLV z`-P0Jg*;+t1Z(`$ET$f}3Cih=J?CkAZmQ$dtaM!{TfM2wqzucZxsK2-o19B;v+roa zn&4IwyWhr7CSC%&C!Y-4x7_vU^NayBg8M~B3FLk#kfBok|0E;0s66R@sA~cJBeK&o_5tsx+HCgyk_FZ;i zMmZ2~%ucvp3QS^B~S4Ct0_ zdhT5DhKxVs6NyY97c_?91O|RAg6W1fTBLBz^KK?!0qWB^s9X0MWp-E<*z=3TMC{!A zYLP%M#vHIx&Fe_m-OOjMq7?C@MkrKL9y36WJ|O6`WMmBdP}nsKt0c-&qf{r!f9piV zzN3NbL*%aO?EYPE({-!96T}48g?k8XERsyaa`lhq#(FTZ5 zd!`y2(mevjI1oS}JU<>kZxLy*nho=?sn}TE@6Piw(DmSn3x3=zD5cZz940hjF>!Jl zuh@iesb*pdSy)=qf5ywFl=xz$f09J2g;i^_C}}rqN+llo8KX5*Sr04Uyo_rdw}ZTL zQGDrb36p4M;)*W2tx%Bq)5)rS^@}{#%ubjmC z)#m*gMSA*!i_~xW>t7-OosA1`NIn{CVjTcF79b^yJz3XaYK3&l7Ir-iJZ8zDo25O* zt#Q57!zQ-t{a@{nAV^Z4Q3)jAkW+8VK6&uOs)4G*ocS}{hT<=6fYO^ldhs z0w0c9=NjW-V_)-ZU0}bRSwtXYC*G9DNwtadG4uEr z*+ka3lT;eQJ07H3otI)_S$-?L9+G=4M{jru*OkNdM|5{UbDVm}>+R+^3A3Nt(|$#L zmnahO^E~ZD<58RToAtxD34A)AVR=h#I&W93B?@^3U(cr|acaGk5WLi`W@&z23(ZjA z=wk>{@$#aI@a_or{_d53D)EBj`8|uU<9XrXWj$!?x-Hx*AM?n_Ss=sG#=&x3C)H5^ zU@XYVZw006=vEr(WH~ZXsH(B4l(5mATKtUUN}VH2OPi+Bl( zWID$IozCFlbkBWqaz!_+8h2uuN;26la}@oqHI`I>y!VMj-uqFXq+7P9tRFngG2+FX zi_5tpzPa^^&*w0%C;13+n=l;?mr>L{POpPH-#f_zZ=U=P4JC>sl?UeZZLO`-08}>` zt#pCT4lFPEga;5YVGNc4*rh>32?ClJ)B$}>(%F9$NjqQ zW^to>1Z!iH;CE*>9_ARIy^8}^V+N~F=iU)PlroRumN(Xdh4R<(f^sv{3^J1AdD*>l zFFYsZ&2@cZ9}At)8;|OnZt`k18&rgK-?K0hbt$5-8&i z00p$QH5f4Z_Kmb|Y7Z6EA9=K4MtImrsyz)#wVzXB; z#ZHs`+J%L?Q1Y;FwKlEQDi+Kqb$=lYlCLQDTzvjFi|NVQ#b2@+O#At@$ePBjawV&M zoZBGwov21C@mIz3Yxs}2-jw(1Y0_IN$DZH<@HeV0!rL|P$;30~tK?1&tQ6}vDRf*e zq@CGFWagR|zB*1QDl#`>W6etMTHNqnp!dcnmZYFtYuX4XX9T{u3#ng6TN^S^rw8X+ zL=6TcP(i_$07&GZW;#m+Tvs(AR_4O@+AsYM60(%GEL@drh~Yahr&iqPJMreJjgvX5 z_V^MWj;T~tHGO(8eQ?qJYHn>Ub{d7`$P%n-5IA}Q$q1b2GvF+6kye-#z6FAj0eJWL z%YA*1Mpl3-CPLmllqAs>do5o0laUDBSWh&pWSsup-;R46S>a5dH>X3jih?yyt+25+ zs02y!YeG}9=5${W(z69Z`vc(7?njFmF(B!`wl?qf2{E*JD!Tcw0vE>h>aTl^8DCkj zac4jMr#ZEqwgcYl9X1|C5cb6QoGb$GRT~tCri{$f*4y zyZ60UsW9YQinP;7;AQk`&9PEeel>nMVBIKym4`BT>e{S?Mzjv!)V8U@tj-t+rO=C* zF}v=t`{$ZoCsE0f_hMvS^{u@Zg6(UyPgOpt;`sjReQ&-W*maVqw3DZ^RrZ43s6kwwZv709nEMHM<&77_WL_sr6ESM5K9rHPV397X?zH;eMo4kgUHvx83IYKRa7ercn zFCc`7I6V4!F|E^Xmy^Y~Cr;niP1jM`JsVM}&jsaoi>g9jNA352{kuwUS@NmK_0Icy z<|^fGcALR`)S{Ix_4_gOl+?WTzi$J_fpLWmJ3KNH4fL5hnDxPA0I`Ps`jr8C%7@TU z1%uRF1azkTi1T+Yyc5FgZ>tqk9+A`$Q_6bY*5y#J2)Yd8CG${xE^(%^u;_$gv802p zKW6^h8x0nJZaSx^uu8mZ0kre#3MLSf5s6v^SB9`<%PGDC6qlj*Q#e8BO{)x@wOv%j-~MP(OWI4K5YO&1aTo#4hv?8O_{i#eHFH)dh-% zTS35%qG1B=Q^=8PHA|E<}5g! z-u2vB*a}rc=0Fuqk}dCn#cmo#nj{Sv^#<>47sJLvhdXl0S2}ElCyv|hm5;tGgEJfU zwdNghKJZuoZK^?)MufruMO{1c6Z+QXx>PUnk@*4Q1bFqTpIo{z_%Nh*um{zF{*n~* z+d4WroYzKT>fg$Dw#q(zn&ESPhLA{RXVEZGXlif2(`ycF?Gcap1fauO_2o3bxp0m1 z{VdFGzX96_elM2Kx&bOI2mQ`lZ!)V19iPw)N4CtNWe!~GE}MTid&`*A_`_^b`D0@O z`itG8<*(l!z2q8l&6eJuk4xoh31rU=RwdmDSnRQ+k~{DNn#m;o$a;kgaFQQJI``83 z1m49T80`)3{{30mui^K(+;eUfq9rr04jm?#%^-M9~<4{zFOo z>-=p00m!Eel-VWOWLCg!0o1t=+NdD*PRVOY0!%RuB61FlAkBT}#V*7-=^mJa0QMFc zgaDPX^o=WUMnqv^^Z_)6@Nam30+^y0EvBn-_~Y8TZB<2 zNk39KV_b#A2B&|M<8Z5vtL;+Aw*xnwHJ@|k^@*n|&v}XkFI3+xV3#Farai>1vwgCGz9>U0jGEKR(T$C;#o&2ok(``!^treDR`gXC+ z<4x;dla-;yyzaw!X0dDh9J()CoV0Agq%*>Pz{2D;=(SqY{xOcJOUwP3m*{y;X zZPI-zHa$9Im_~GPRB^~2i8{E7oQwPni^ z-!%0<2ovvodr{33nw)xIXO_G2is4N;E+}=az^R33^*}VMHBGmZkidl5W181~;j#R| zNKG9~p3^_#gBn){BO@aqd6kx`HL}r-jTvfjAQUX0=dczJP9AXQny5~Y;%ZRIPUb2T zgXD5<_AAAZ#ynGlg~OgRryj3Hire3ztEh=e1DrQiX)awD=(G#c8BfDYZ1E4dMOfbr zzTVvUEWefp;z$)P?0Pz7zbed#6hrC!g&1)oQzsHUl%I$LPwLrC=%qyTn-I}I45+Ze zIslAEAppg}6XhrHJrUnO4DcQ^1pv&KcJnds?RLTo#XOXNeSy1ja;B6VQGvs5=Uzpr zDc9y>rV6U)zUl3adn9`Ot^7WnB2BpJ&1cm?g9?+cu}vEz7`#PXpD8{;%}`hBl-S#L zR|-yAj}q$V>ZFr9;MD9gWf)Sci=MC74RSNuV2#rR%cZdn3y4Gfo&b;?fGEJS)*N(T z5GH+m9A~CVRyp9=58;wI7p1GHpz6n90A?Hw%?pvl_;>}-D7UczZ(uo8#|!@O#6(-K zmoV{$<1BqB{`gGCrX!&?9!`iTYJP}K8kt$6dpUz%YI}y9)_S;;Z_i(vInnMRAe&vm_98%mlvUjYja4?AHJ#O@bk&0dmF_EJm@%H@OFB!=>bU}uAUR~r+4oNp!FjSCdM)!Nr_W> za&;+^90#UYZr$<&svuy4EaVTMEr+afgujT)woT2=3qh$D=*M(HhkzpiR%)@`_*mZ@ z=nMeOf*>QTE(egc>45tOt^462WJC_&=E1dE2!C*d=TjybIw4uVqddFosRmHHRIBQgm`cmGi-PwKuWY`*-cd%9IKr*5e z^kBT2Hv#wv8jPUEI|+;zGx+B4;vgfD`Nym~k%9@e7(k|89j^-fhj<4%C=KoH?S06J zi?|B|PY|SG8=<@40jO2^(QP<&=OB#FK?)8}Dir4Ve{yvNs*8{4;?uI0!SOsgP!|@#%@L^_t%j+At^>u@w#OV z=}#Q$=G6kbpMpGAu=gvGo0!#+&~D7M1&u1XK&N2Mg>Ugqzl=G>NV6SOu4%Q__tjIV z4P&EP55*kY7Z=eb`*QH9d#nDbU9xE{Xx!+iJNCBf@f%dJW#ZFwI`o|^0Ng?hq-pFDWcyS&_4BB_M9^8rVY`ssR~>? zaNsd9ZVG*$@=WI9x9LW%iom|{TGN6@*0Apy{v>0v<(_x~@^Bbvc&zUINcq5S9FA&SaCU!65~ukTIg zcCClK6$J%aTx0rD+2}uC3vAf=0E!A=@X?)9giXK!;9GkV^!HCmHf)h?vx)LMay49S zcPjS+Fer#=ZSUiayxc*b@A6N~rDfb~{2hq5pJ`7^L_;tS~juQag`@t2Zgt<(mWa< zn}{jEB}Sdmy6$VlPgYmo*YpXEsOqnh`LV1XHa(PcscFZkOlVvgrd!e!ov45v zwkO3Z*?f<7$fGt3MU^jK9xEGPm40t=mzhb$ndx)t?}rK*PgOfn^j@(S?-=AbOuERC z+x3WS5uI^~hW49;PLh(UF|CIJsjcP7RdHprLDk{4M=U{p+?{%CF(Tt$nj=op(a)>h zH9pH3xc3vqa5u3Eu9<6G`zQPUjN*$6HW;^}b{9O_i&Jt(Vuyhcdou$;oD* zB|0x2HTCR8XKstz zwKTAA6R-spY30_Q^R0t!dLu5SVXPFYZRWt+or&HVhzai?ZcRKcG&j>1A%L7mT--s0;_3&e{4$cKd_-_5 zeDXK9DyK*aj0-+(%|=5XM46QknB-ijSO3zZf!#!4nw9gx+h=1$e^)Xse#XXY!g1Gx z?Z-1V_&21JqE$UV)@Nb#Q{Xu_#-7-})B~%4pLhA?tCWDaGP>OGxvc_L!oHOkAKre9 zT*@tCbeDbqC0vzLTiHv<%56XFA?Jd3XF@Io7p09j1{-sRwZXg`94;`&o86I53z|RI z7qrz6nh=yviSSrgibofz_=YRU-a}3RCI3o18M)Xx6(Mekp^W!hUmR zmNRn+QifC=w~KwH{a?}-S=sq|qJQjb^gOb6*~2h2cxFp}{4yX9-u2$n2ur5olI`N4 zU~rG7W9Kobnr}6*3L#7=B8ML+__u^V+L)2`{&8$Rs?0~qig$z4MM0UAV{ws`4t2Al z>e-mS@)Q+ER=@fA=nx7BQSvzREE$woAe;Ah- z10gZimE1&;v#(kYCe>Pwjs|Um;^t7px3Vu9Ty}Tu$lm)R=pO?c+q7W8z z!?5mN5g8&D7l!jA?6GLPM|;2}NN4fSLr_7t1e_0*7$*M%h`SS_n&y1rRb38osvYQMr^omLoYK*Ys%L*|jeL{+b}Y~J$y$!XV8ErIzwh{%u~ny}Q8peGnrj}d&37Vy zvOYQvQ}W8(Iof_7S0zI2CeV{daWdRaTT*qM8em|y^B*e`&daJq*idJM^*@5pb)OXz z(n}$YBXIC%z|3Lrv#LttYNz^Rlncu z?WbL)YkvI^^Ca0lrHC|E! z$*fNK$-OiQ7WjTE%Lan8<3>EJ!3>H?BOBQ zslU3ryZ;&qK7_w#_Zz+^xTk(t^~&y*gK`nvu`<=Z^Pn_AI)~4?Zw8e8p>Y?nSYpSw zWia@3jQzOT%j|(i{tEMY{G>E(@P{dUEo|k{9abXknyS2FG+W zLm;Anco(q6Jv<^c-(kQ9HgR1L`CmS02343;t+~gZzyG~*?<7IhhT=6y)gX%Lh-ynC zNWeh%7NkPDUVlawv}8oS)i5@}U9dhv{M4W;Rk2z+e6W-cSB5u@SXp zoFAZY^Yz=eEfD?!nDRrY-nC(t!$>4WBaz>~4U#4XJTn}ejBq>uxr*@pmRJAoHG3;v z<@+|Z<-eC?l{wUbC}u)>n)gpS6TUG2-+hr|3R*2UpzA6DR;p}~LC_5o7XB*t93c51 zv3?n7&p_2#7_Ab3{)e-(mUDZb$NTFU(9we46{tMRfdB$nAY~prqh0xrELiES1}Noo^eV3U?(8@tBq@Kq zOVABw0x22_L+`((wcakyM?23Gy|-jR>l!+Ph>H7*@tV^@8&I+F0F5Zn&QeF?c?5rk zSPvReNHf$M>O|z)hJnTjj)E5O40Ax(8*K9ctS4z;keDU~QR=_uAb=UHNR!GczJI!- zT64Omi@;?7&UXl^2gn5GeBbAMe&G+znoeyJSlcav)e?|DOric>8AHXLpRW5*UJNAs z*t01qB?T(BPmwJFMfQBn@md-HMps6wqQOtu9u!3GpPx7(^*QwP;f0cc;Io7Cz-74m zU$kx`uff9)gwad^c8y5rL&L(b%N3fPF9r+Kb?hcJ%t@7>W3~YDpN95P zqo}CfAv70SAC&8U*}(|Xt*b~uj0W*#R6H(0sSf^42)#w-txDD?RP;#81#ldg2u&W5 zc>H2s19pyfq3H$Ct*ZH|51bifFaj<09PCu* zYCzF%L6<@$-#`rHpxfauyThA}5wJ_lZHLPV(#W1gQy4lw`&t&Yw4V`JK8y4^#Ju(p zyLNbhtpBaL4*)2Q)(WkDYtT;uW7#C60smF@8fXz9tRC=91V64ne<#&-G_uKjLB0;b zPhmjMycu+jVnEl;ZTw}~?RuwoAoxUb{kl9D71o|?5rLG0DbTml!+wHcBI29?eM}Dk z*wawtU~A`U43*ln0x{~MIEWU(*o_!iTL?)20CN5yhO;^W zJphFCAy5!NPtnC$FtQ8APCx}tf$5QDi4Ms3U*~`E3}^%J3l8$&G^rdl1K75@T+fQk z+bQopLpXF%W8Kyp+uKsvy#`(Nz3@J)DhMgXGrE<_Z1s`I3f-RPe^YT`T2z?O+1#80 z?*^f!fq82@M0CpBX&iif!`*pvxDfzGkdTv`AJe)5&B5T+p&9TaW_{n;UFyPS`OYA% zj1bx6qd1IU7lUP{AVjYI81Cj zJsvk9IBMamyOw{F34^yaZbR7ubG}xP(R(p|0|dzbrw!=z%rvqUs43b%o?$}hLa3E6 zb;J-A6NC8YYVRBwTvc_<=yV&*-Zyu5BZb1F0n12y|7hVhIImlRvN`A$ngS&scK^WN z6rqulR}WA}OWBAbRn=N$Fvv$ngLM8Tgx~_85jEnj-uyobfPp?bG8Y23%U1`J_|Sm` z{|UUGkl9PXf$y&PW z>QD=aG^WD?mU%m8zjyKqhHX(Quq@Ee?y^7*qZGxdzf|r^YqyGGk^_N1v~aUL!Wm1O6(1E4|H*!UJ`2Z9;yLQtbEbjnMRD?cdwzvU#bSc7Z+}ccS~$1;};oQeFfyPgk}k*JT<(3~>iN1q) z4Dy60d&71>g!lj|uYUk^Pgqflg03_&f_M&{M{rbR0=W`+_65NF1iubUNUwlWhY_%}J=D`7=0a;m4etv!z3?QYIgZA0xQpHrrq?PB|V$vYRU;{B8NK(`=%s>_Vo zSY+{(u1`-*A-Bi|=yW-8ao;UCr`bU2Qq#M^>A2Y3)HFia0BGQ5FgYz<2gmoU5r;7 zD=RrLz0lMKYecf?(F&&wEEbU2uQiT?J;v%=2o~~i<>ziSg7?LdcnMb@0Y-SUv(bhc z@TR~Rm;w>SD@wQ~IYMnpphaK=c#YD(CDxkA76S@#47j_Sg<>LF5fJ7OcCG8mfCK^- z0GJ{8Jz{{yo*!~x0ks1$tcTf2qfOZ;-cR6nKyaRgIL!w}_HmK>duAQ0jO({?+N#OP>5%x|FP zr3hSrRIbMMA1Pv^5*^HeN*8tv7`QWo@DhhXHKi~3Aw_|K)AmYHC+HtGLU;tGw{4c! z$Lj(g8|l0d6YOs2;e!RAo!2<$-1>T`PVO8md~nMd*i}GcJ}MtRt9qwP9?2JA-(o!a zKq&!3O=PM8)6zL$E2qPeKy%-FU~T^ifpW8uFgD-<=pf3d@luCC`l++D3=-LeC5ye; z(Yevg z-iNc%a*zUg3Z?v!cXhiz8>*Jg+)K`V0Cge?gKx9z2Q69 zSjNVF$B@(014qH=y~>1ZSFfg4@Am0xR$or8KHVEGT?a2FGI8w+$5}GNduA=9X!xhE z`v|Nl*kL!8ZfgdC@#8EcW$G94{0kvNKz{lA{k5P+Y04muqW@hVh+`2j#WUc>qkq_Y z%L&9B(nlcDdmpcsrGGp;IMD0ogS{XP_Jk0d(?3#>k^+C9SRCx8AVWiCX&vt_H^MBv z4!oTffH1i|-9}joRQ_D07VZ7Z6Ky1md!vI1&(w* zR!N%l@A}kw9q&hX-(c z^fzPo?iYoJt7fgMvqj$dRecpIC8UZ9r4u^$6SmBC6DR5 zRd;v?)FFW&WSjigQVuoe_`aAV)e4FH%5xxd`~*mr;CuwzPrvjDfG-f`U?_?zS3D9F zE&Ah2p*Yzdvd-TG60g(Ff-*caO}3AQKBtlJH&S}XM)E~M9!bHt9RupF*@<8N$b|#S z<(e;CxBk@!N=sDm-jEgnY}x5CSRz^Od5_Xn{=%N(w;S^w+=lIdC^7GjnXBFObJs7Fwd!9pj{V6;gSwZD6H3XT6f`sd!xBvkS z0iV18e>D)mLh@*EvB`_k`}XxK!r^^?jTw;!K7>^QyP5(6L2Diz@w_~8{zxNK2M+}e z;Ykj4A8fRtI32D-1llm*p2cborij4Vx(#9ygq6wjv-R`5c?_u0}Q_yDR{q4+@Kmv$igSF?c!*mxB!QHNj>h z4@5W&Fe89lJ}?AJMk-6Y=o^^s$Va}0BI0{jvII7|xmotY<;xm~7K4b}CLj2YLP{Db zL8*F`1T#}zVtQ(L4W>M-5FtLQ zfFGak2V$|EPlX_o(b@p`L{CIdk1WCYUlWa>@iRS0+(5(0>d`;Z%>Pfnm^;VxBm5gk Uk8{3&0sqKKD?ZGVeCGFm0py7IhX4Qo diff --git a/examples/running-llamas/artifacts/Llama-7b/generate_max_memory_reserved_bar_plot.png b/examples/running-llamas/artifacts/Llama-7b/generate_max_memory_reserved_bar_plot.png deleted file mode 100644 index 7681b998c278f0665c696d2d736af7d872348762..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47072 zcmb@ubzGHg+b+1UTT~PzMC1`f5u_Vb1QbQOOF=+dy2DmdM5IHIl9HCj0F>?yC8fJ- zj`e)sp1t?XH@}%ZGk-kqE3jDH_jRAwc^-AG*D`k{4j-UAKp+qf-@182jzHMqP9Tu{ z-nR$e;cDm}#Q*VGi>X-4o9kQK-M7>uNZq%7WMXb@VyJW0R?pJP(Ac zS;cng>@tbm$JZa2xNnfAd+tfU=H*Ax-2Qp_xYO6v^tN^xU8xDKSLF|NSXFd*o`^GF z^Y(~#f3;(WWLF>U#uxi1j(^2!+Umv~rW$&}!-LhAmV|jY1d`;)#IEh5dq#TQnRB7n zZa4n1kM2jod6KAq{~;m1<0|oouYA3Bx#K6#NNG>+Cp;(qOql&R;lRJY_~0J`8S!T; zKW`E)5Z_?d&yOHOd~rWL{r~bCInPMGyKrr!HoVV{Ys=rPC;z!*tj^Lz$F`$tZ*Q-? zlhgM2P~qQS{*3LNof-GPzp&HFsrNp9Ub6o4{qJtsUM)$=F>;AtY{pwl)N0+zoDLJr zdkb%sxNbhOu%L6>+8C^hq~+n^SzDT{igepr6;6t?DzKf@`Sxr-f1MrvH$maz!otEA ztC71tll%AY4+;v3l1u#CmKIrCqv~>sC0>=|Yj= zE4x(H-26^jWb<9Naa-~9r`Lj&M{*+ER8>@D66MaHV$rxME?!PGRA4b6K3o$T-&HyM zHAOx5Z)5DiyLazWP*Lfcn@flD+fazOZa5`1%@5Z_`sU^F;U!9GRmI$$9UXpQVPU$F zcE=t)jIkQ2r5y3}^E<}E!s126;}b6(Sv)=FQs3RvGujk?^oft2UJ|c*e|i7Wqisgp-KkQ0wk^Z8jVgR-vW!{|?A*DtGuv3A%;RK3 zQ`5j$OOi=Xe!%(L?;p(%OL>s9ZWVt@kS8uzL$uhoB_?_S z-QR|o3a8ZYpGn-Noii=U+9ag3v~{VP1%LZWL~_mh^jniv9cs-7%DtaGf6n%>{wz(~ z!eXj+$=z(DmMd&*AB@{l-(8S>g7-Xe{P;Jo(;Pv(7Sa5Z#V+f;4x5dQjX@%A!dqKg zf5*p1+RvOg@%`=j+s%<_NByvJRe`K~Gin1TyK*@k<{p@Jf1yhWVYS%aa=9)l8q8_f zxSm=mz@}S%$cI+MCo+=u=~o@a7O8fK_-{JVwV87fk(cyB3dNx z^lL(x0xl|6rf+((7R!Y2n5PmX@d6vIt#L9|EsfQU`MQ$_4<6L(Ep(vln(EGr`8ztA zIBth6VW(A+i~IQS@#Dur8{slfa%E>17p~T_A3rYW>+4Gfv+MQpyJ=QcS1Y{r_wO?d zCcXlTzHqn7Iat|c2^%AkP?@Zn$=Pb4uYa-SlcM|qa+VEoy|uNqi+HVDjElAsqv_&k zlLXt9D>B%M0lMne)(ccaiMQ2r%@hekYm4KQ9TNc_@tObb~VpoKCX4h z`2touRnTeaj;^M?{TH1F4?boYw@rD^IkRe(av0S9!^HGjF-b`_LFNN>*Wp8lD6ZTi zhqRjuJG{0YZllNHn!HMS5{YLlU3Nf zyu2=`WnDl)VbLE>lss%WNAMnQGYJk3t`6n#&CsjTF){H952q1wS{iJKkx-*;$}#P- z+}dzR)hha=MSk+6*rP{ROuKSw{Dir<0xSl~|4dE!#KxY-dNrqNhKjyCV)&6t#8uGI z(UDrvUPgD5r_cH6hYxh*EE@KvJ=j}@t;wgUc+8qrwwt6y?(6A2_w|)dT^|~ncA02r z+jEG*9p$+D09h1v{l~btxs_R6e%pyx0RbnKl#~w93R8>8a?ah9l+^k8hK}sA8W}x3 zy<{Lu%|u70)yJwS@jmQUy{h+RKYzaJDX^1|zOlb}Yi*o*(QR#k>HYin3lkml*%g1q zxVgE%yiOf1n=QG;FtLbLw zak2KZ{b<6!hla#dR8)Kx9QaCnsQIo(3cGyH&W^q5cTTNk#GymeZ_l2PaF(uI^NO~% zz;DkF>OOez%-{cnKcjT(--}C2OZpfn-Ni2IE|V8zqn|mhH^oZ_U%9^ny%TL$K0$^q zgvy19j=W((K8ZFDUvl3z;3Mk}SLUGVtU=eC`a zE)_bRoRDxK-+J`A-#M`>?CjApAFexZF1GQN7aRJwVW8c@eKM(Le9T`jL1PqjUXh{K zv<+MzstPph%DIdZX#S*lyU1m8ZP9scG@dvNFI~Dc)|}{X>^YmRTd}gxBu(5s4}QI6 zr8$s^DJZ-?!Exu#9W4iDZHdsjvk$_%hAIOF>O{7GR8@Iytj^1SQeet4>DW=~M=vom zGgJEG$Fs)R+g86zNxV1B`--LTpe|_Ub22b6IM4lJ#4a)vI+HH$O-VsXX_RbbYa3_M zk%6ih^6p)C8r{*GuDdr@GT__y@1o-_ESmW@YC^ay9zD7@_|q>aB;+_5*$saNsg>1{ zNW;#oMDew)&2?Qpy|R_rfjpb>Q}e?$F_?2EomscS^?UMdtjC)7)6&xFR|j3Qv}C{i zp7{fo^4W_QMj8*Anwn;3XNg}|M+k-DHgKD_PNs2(LG+8>@v3TiU^E|s7236&&X(0Nvp{_e&U2Et`bf6KAs-&QZO@yD2}`d zoS#Z7*@~75q%MxTQ6LRp;tv?PBQil8(Ks0+Sn!H zf7h>HPtYzAPE1Txqb&f$dK(mU6Qj}r&1rt5E*=v~qtO1CfPlc+=@DgOi@+c6{$laZ z`SV3NM}GbK^`pFehiYUtt`fN845~2J&kXx8C^(qSe&$|vb@jd@NA!um@XtR91s-?| z>iq))c}~lzYtGeY#^3q*UHksxFrWR*SwJ%V-=&ZLVPME`+jjl-?HlKlXbcVF_0&ZO z$tEgPhw=Giu+J{{xs}XO$`{&O9HHjDiq;Qc>N4r%hG9N$KR5WgJU`EBgg73AR|iiJ zqGMtbwTklB_P$3GFR+`A!;;}z3Uj*Ff4n}SkSI44+l6f{h2kJSh$5$DSHsdbZN`te z9T&&SP$U4Vv}R^z*!OSnh%NA_m0M5C?A=qRy)$dG)~L6TFOWqu+HHG_2$_O{D3+%C zq;K4K#_yfuFxgiUS+czqid}cl+`KOKb|A4A=2?#(kG~t<_^_HVGe7^mqa)a`DK2s6 z$mcSgp?K+1&9X_lPE? zp-HhYnG5+UaFUibjUb8EVQJ#D@YfIHf9K-(D#JS3mv5TL$wodMi|@Z+BQ*Nv^N2Z+Tx= zQy1 z@t*sZpX@%!*}6;wlpGxLZroXyFEKEPvvG1N5P_>t2U_CE-`Sd_9h%`77?s$kE#uOW z!YKrgl9CcfO|7~#^a-ZX(a|*K?N$E4r7^t1$BHg@Tmri;0Bzj3c@k@VNjX)4aQ^Js zvlTfL85!dXzZfGgplOW(=$k7OhS0X0hMW!@JSdOB*4&f0bMFxafV4QYiJ_b0LqoB% ztB$fVGH-TVK*h+Vp~6DtfDq1eb1TuC;;WOlZ&xWF;kkMHw!Ff{0QLtz-%xi&lyINn zv;3#R!ZE!{w#)e1(yT_dk)jVfiQdM_?3f;YqwPr1uc`d%tqt^3&0ry?oM^ekca}qy ziQ7J%t_DJm3xBgUw;WctswP*qF@+CAQ42XL6tS$^=NdMN(@&z?kqHS2#bWzOii^J} z&lwRp+R+v~GVk^JwJcDn<%Y}AGlD@emzJdp+_N_iIXXBnxo2QFEaoREr+q3R$3~3R zDzTgHr5?FyHWVi4AV=~qOU>wI%{1%b-z}q*tTHRUUNX<}8#BsXW4d*AX$izle6w7EpI9`BOe zmoN8vMAsAl%}?A_F}L;FQZ?~w9QR5I-Wx9toqiI1VWPK4UAHJOkQ`l>a$G}01DL0t zBAJGqyaM>Z5WnD~{WGs4*F18qkX5@_5akkcJCKF4K?2Z@xc{Yn_py&yvD)7;T_=(xGEoY zm89|=Q%pll@aFc&_@*+`qK%Un=KCOSb3s9&aHMSEfJpNl;oQ_Wgdnmb;Va7xShq? z?U`w0ISgL!t+#XMQ;mwP8#iuT!IirL%~R9nUAsUitdgvAt(dzjLSBc>bqS zo~53Kre?<%3wkVGX>adVfF`$bYw~^IM~xy!MS7V>*4EX2w!m?__Uy47u0DK#Yk48#C!G49?2pW_i(e9ST)3%Q{W537 z1G@y+*r5N%>*{a;a&vQY)X;TY1{*tjrMy;}Q(uZ&w$8%e1^`Y>vQn@qz~K*=f)1&6 zOX(m~t?g#Iy7Qf^D}x;x_0eaX=u1MtJ8Mcr||CGn&(~vU~0Z+ z(Zq0a^O!*3B2ih&0LJlRWp) zqer2RQgA*bf8{(sURLT zQ8O`V|NJMLe;AGmJ9MCrAnpK#zV=SXrHRC1HYV0S z)u($7N28|`r3`QlcJ#A2>BzfaF#(s9Yvr|2ErZ}CcrIR&0T&a~XtiBe^-@Rvb~!6$ z@ku~l{Dq#Gou4>jGq-lSx@x)Fb8!lyZjw@pPJPrhdq+ndL&MilIPhr3EQuvd?=Fn4 zQtqt?a*k_qukfXvU!N|K|MKX*_X1$r1IPhmdf0W|RH+%J-Ct%myLa`Ek5|nNRqO^9WLMOmQM1Bcx4xA;a?p1fso1 zsH4Es_*dT&^%0271DKw9y^r3SV$!{1R(%VmXViQ-_q8U5#b|dP2c8#Es{ssy27ZvJ z8jY-F8#cXQl8=*#yK{Qcn}KzWXRDXhU#L`wjZJnS(9O1~Bxhdc6tfB`7MRWc0__!x(pear&2Dkbf|N%t2&+sV#j6cjfhCfnHd^6~S} zjmAf27&bAWfGb*4)D0UyzRr(5mL*b=>AQ$&!KrrhjG+CSc!B;*w*)~}K|zfk%>|aa zw2|2O_?_DeuGGTLK`)QeX4s4?;Sa>zwS#Cvy&bU-eGlC#94F4{9ow zF7~-B)1YDNqC_C~8JT24LGA7i1Ki|UQ>UPy_*dLwtEvHWN4X6{;rp2`1@voMHHxOm zwdAIH@IET@FZ>9XwFNo`h8GtVKV7jIQ}8BS$#vP-+-%{=R!q|ps_RpQ0fH9zj862$ zqsdM#yzEC9pOs39Yf-BMu<Vfgp7qtTl+kz{jx%QXf|>y_3^A_3Za~9yMW?wRdQh$IhIoeY|t8rKuu~ zu=5x3qef;$jKCDbSl8*1TW7z&Jjw{S$a=Cf;P2nR)Lq&eVn)WsNn7>9w&}l29Ru4? zp$c$sb4xx({IKKlR7!Cb8*9GZbS)$< zF5zAOQCjmbNzgyPLgwUZt;1*ykX)?vT=2w1F2xW$&22&f1)ow^toO&OWAyebGanqo z0&+$w(ReJKUu3Wrf-F_ZYjr+s`E(Miqyv=z!ibUe_Hb-6xV9gwwlJzSN_M+&Pa~dw z6UBEs;E226VO#3ORrBMJTb(<|7rKR?LWp73D&(ywhI*Z6o6BY18`j?5PCYX1xmxTE z#Zsuv_`y<8rE<0hT8K z0XAQC5F35P;C!dnL1t#=&pA26V1|^VS{ns=4;~Dzgm!dx%AqGLj?5^oEl*1mRpY6G zX~JYlCL}zoLLNp&Mof^FA)`=bvylsflN=o%Z=C?44&pSF^Es8x!zW=k*(nBfYMyh? zrrzA#>1Hui*5bb12TwiBq6S)&M@@Pw+Y$ny_@G0^xa04{L^bLXm2ep2L`h`-4OucX^getKCR!7P6}a>{h{yh#RyA;QieY{076EDZ1|3Gb$pyR zioTdz3K*j!-{umiSs4U#++2qBs2uUu$_=#gET2OPZp^bXSPb$+E0TwIiF%DWqa2%p zS!=w=Nv647q)kNo#KGjeKAGE{Z{rDLF~hu1ILExN7GqG0KUc*JtbTRSUz$ki4pVa4 zj|TCWa-pq*qa7n9y$a0rlGZei)X+lW zQD%@0I4kIh@+(mngJ2c~-T{Tv0+M`fgwT2=ZD|KlY=nvM4eJi+{eWKOyM{4=OM-%7 z5X8bj^81H}Up#(HQnE33u%V%0j5fB@b(?5n$KR#VuMNAUrbc<>$dL(6i`8y_g%VbuS&rxfB z7Ijrs1~aByKwUrq_kjmOL#c=+DOPiA^o^DQY$~FU$7$633O7z2$!Ho+RrT}dJbj45DK>im(8DE|V;+*Rm2Bs$eMPnsz7!NdAIEOm$w@T2JkZwV!0`0I8YI!0o13IQ#>ZbGIFrP5-YE~EX`31Mx;UG)CXHEoIymJe(+fBEP6^mFrBH|knn>le|#+d$zC{Yq} zn7aWqmLdmMzPmo?I8m}#1^zxMAt7;IyV%*IsHmtB;A;pSmd~fk)Bb+v+irL-8ldNC za;X}8U@~&Q0$C&FV0^^FLmXqzDB?X_?OD;k+BQGU%FnO%?<=z&$}T#sKnDrH(4B4B zt0DUiD2%DKwRMasE~99yGe{`5!gC}$r|rq$;DcBQA^%o|nOSOQB5nl8pXKnpCuGq&40xAQ>!d1K3=3-D*UVhPKeMu1v{v@Ip#Os`$ znUQ(&oakMHX zEiJ7|lv_)ewr#k>R#<1c?n_9DmM%M_maI+liok`Sytq(On2J2bOC>up^sL^kXE>Iu zxXeOr$EafQ$j;8zLHR*!xPVMc%NmnJ$H+vLR%c~|>o{@YEu1CuvXzeWdrc)EbUpC&}mfgO{wAH&8WBvoP~7p?H6$kxhJSoVd}pEjiJ zS#=4|SF{(Pw6~^dO9KO)UpdiFGq=1v0=NUM0&$sFZnEempu7F+spZ>V%j|n(UTvR; zctQ>IfW101^G7tL{N4dNYoaJ9Z58LS9cK>q?xOt1%>((Q!dN6ano>0dOicgPI&9cm z9uwn8=5N{lTX^a8&i_+FNtduKhRqBS>Zp~~e4udUEn;-(TpF1e9UGe;iZ~`daCu2^ zKVAkD9)k*>E};B3??h`eQcL5_7;kucN2#0a($2E!?!2eZpO-nMdJ_{;TgvcmWKd-` z$$rYm30s~S1umDFng25_Ra-Pl%8|GK#{Xqb%8P`oU+A{9G$R++qKmTVzS+kR%72tz zEsiTI5v^sW>pdYR`|8!Jv)tTEF3Mu~aYjx~9QHALU~)YlLR$9H8Bay5to9x{TqBeC z;KmEmbDq%;-iE9_Yj3|vAclD&jQ`ncLuBiR)0>Eh)FWHf)f0VoHi&qg6&2+m=;+eJ zd3`f=?}L2f;90yzXqa_<2kSG^ykEuQKY8zeFh>4s_ROu^G}3;>K}x!<%a;24|9c+n z75T@H|1`x&SV;!vlzzP?=Dabmta+a9Sv}Kf*^58+#+5G%^r@;c?pYajh?I}_0qJLE z(GrxfUT{%LUU|-1V)Ayx86*9+H|2RI)yC()e}AU#8mWt{{Q6V?o|NE=7cVsO=6EGS ze*WAi?lV#I`ozmCTwLGl(oV#POSMrjyY>IL&v2Lej{VAcw8Khw5?!F(`Ag%c$g*BE z#k(1-FP$J`e(3%Dd92d3;pQ4`M~*2yIqQp=!CU77Sw+4zW?c2L-g{)?0kUKGJB)~J zDWpx6RR(lrT@>9%XMIf#AvlM(VPVfg)y3lOYAz_P?%JIKMD!j2hV-~4f5lQ(mI5XB z0-V;~ys1dw$O@n0kFl{CF6;Y;Yt%($>W%bkDC6&*IA-DRL*tStJHW9tDImQ(^^}4; zS>WZ-X&1d7v%ccTnO3JYG^_3#8>cN&n=mo`vUhU3wY?Ri9fCkZC--lGVb_n*fge5#c%n5_RpWd{f7jR zEuoQleg8WHhiT`&7l(q~5lk6xy&S8;CQ4YR?$Da^}qWhdudJ(8=}tinY_Tn6Kh)z^l_7`_2gMAyG~7$-5;jZ2*M}NiI)r z)BMCX16p#rXZFe;cgyViz!58%2N}Xd+;>X!!Gm4ck9y4sT3%;_MCpI>xo(hRg#bc) z^=mpaU7XJT`gP0U;y84LFZu7cw|}#G=H?!K{d)him)dgiiHb=P<*vQG!oX>Bv8ycV zIRUnlj=!g-Zaw_#^8Rwyr`{avQReRMiy3z27hKkT9^jIwh4Qt>`Lzovt}7_~gMdzR zns#tyz`=%Mmz}9vH66QIo{?@d5aW^jEqPY72sMdqZ%xiFPv0r?Xn{gmA|6s&dXm$u zo6^O_1y2=uqFb)(rdAP^4l`#Z=~s3(S7CyWxoO|gH8qvc(b>Vo#FRH|tfxq-JFtPg z1ctM3fa0~q9HSPa#g;;Ua*ys=?e1O4A8k#^!G`u7DgMazf<<_X4Nw_|oljyL7+eIdF|CMC zv@OT1r}Wh^p_Z!gw(0WkF-y0b3JRQtRkVtoBU*>_^!0aKxmTLIvc|pl?Ad!K6_ZZg zukfjX|2MB#;Pyy3-0|vomVwuLv2cUc*eRQO{>h^Uo95pwv(*`@#ZR)wC`HWGz7N#h znZ_%ubUx}U=gNm8KeTs773!u74$*LGt`D4;2+vI~atUm$kzX@8FFrh}pQ~Qp>Fd9# z%_}_pgcwE*FK}Mnfk%uu+_4i@v;qa!$4pB$GW@{e8$bUM(cMA=hdV^Y^9_NXhEIp6 zsGP2g);&yinmJ2JP*-GnBisY7?hjVoTYRg01tn!)o99Qn+%- z(sYUYO9@kMQ?*1JzNx7QX!CyU?G%goF%rwfKbV;r%3m`(u$OB|z|p!S|9o9_HMK;A z+*si2*S{`R9oo+qpxe=r=hbSHD)W^F87;H!mD@!umoIw@*qOHhtZmdh&i5=+PiMIWN(eX;+7;j)x0TrJ3E>6qm@M@XB;CBnV2&`#q0Nrf&uI@!D00O-e zQU`~^`4mkG8=WR9x5H{bXsqfSDj!yvZoZC9`gTrUVCz<2S@<1ljLY*pS#U zUlU}D47=O?H&&%)=f{71zVF|>PIhVdXJ@uR@RhHTCoc^nDRxZ1Ms2H+>7G`+K^>jO zmzljJBq!K-a_?=d5WEXP-6gM<-dMEE)VFa@ws7qjpsMf*e~xS0xySnVXcO5*<)yP1 zE(Gn^o!pyn7tn+OXrO=RJcIOzeV|eE3FxDi4SE}z$LfJ6!k;`Yq$t$M)`P8 zp8p4}2p)5;`Qa2-Jx$Po+I*V=E%iQ^_3PTf!(#pnHB~l6!NI#lHb@s5H_@C+-_V`V z3breDUZvnM;~prlsg=iSUJv|uQ04!KIr7~R={^`Q-RIb+2;>s(cj|v7_3iwKjV%;X}Gm9FAas-J3XBZ@ew_ud_9(lU>5K)F_i}DHqp5_cFW*D0_fJX8U$X%bKf(o zegjM+GFhUYjo)Hc`Fu5;sD3+dZ!R-y=D!0ECB{4<@gq*47!RM820<8|?XAs;_VlL^ z#^!2x`Vw*(f!7dV0XV8fEQctNpJ2H28~JX9cs`5%U7+qCzyc5@SGeRNq`B}R$?D_|2OQtUtHAh_wM>g=+dg=?(ev#p)p?hZ_mlN}PD{J~f zN0M&5KE45~A42DzpGKPyVb185d;J+tE7@}onnHU=N4jYjw_2`Q4dnLrOoO9fC$V7o z1l*o`TJf3(NH$R#Cu+*@iIIBvQD6TC5maKHXvdBn&5!%R7FXx0Sc#G#0fLi0ZkT96 z0Q?t^i_!}pf9{s$Y@pY(&LCX39>#C;?)+_ii`jm@=$L5$epcsIJ^#W$muVI*qjw)4 z+#-MS)M_gsM8xgl-lJ24jj{8XNsw{|s14yNQSfE(Fy5v9*4%tZq~xv_B^S-kr*15! z_?CYEkFOZH(U1N_BdsgGsnHv_|A5-}(Wa72fT07YvdX_*FDUu7wkVQe+9l4Sb?DD8 z{~IjYuFqoi9#fAVp>F?G=-~gwa!4AobD~y{XTYUgL&fsYhYjDTcyc9KU4Dd<_7BW| z%_g)a$UcQmeEiIrBc(7gd2J^s4pTl55?w&n{)U*CEJVFUVVGj}_V$nF25%AWMhM<| zykmFsSK6zH`qU%v4$?BlX*1URwZ2{^yu6~~Wy#i>Cj^T0i^Z@OUlStF~$`A}YTpH1;UBS>k*HPBLUU zO8?)(zGBWz0N~k~3m+6$ukZViax``KBP5YPKZtf)aj`H_g%0Nj!s#R-J$Z7sdw6(w zxhovjF;ikpzq8ZGgy+hYsC!?ZP#VGswHov*s;R8>NJvOfem@_LvYVag;8?Uo{4S!m zfOLDe%{X@y&}E((jP8rV!iqscPJB|K`n$PGwkV&KxG~yJ3%~UAqiG8;Qu>`)$6y4Ih>D65 zn`yd^50tLG1cMQFY!ffNs$qx@{>H@sHdb?k#|YHa3hPXw&vULz@Dovb}7*@ zRCV9llxWJc(q1eUay+yn<4I08loSrL?yKcq!D6A@JDbwyAlt}pq`)ff8}Z@T zAXJN2@7_H?&y&d1pX?|WdNVlqQ82(}s{0lI;VU-e_nm@&McuY@D ze$TUFH80c7F}XC^xmeoM<6mFD&a9rJSY$fdsQM*O@xgSj%-RyouP+um`G783ZHn#L zo+TvQ1m;70l0D!Od9=&=9*v?k+W`+>8iAkL#>zx&?s;kxoIrj5zMlu87^Omd3kn$V z{%1KCgg<;(X3|hemAdXcA^e5#2;l-RYQDF4KJrksU^}>%l!&0k?nNHqjQiie54|^$ zWNhhz+(uMM+Tmc{)zAnN59yw>FZK$I{nfigv=xsbmFL=%XT?}It^H~{zczebCPnS? z+OoNCiJQSirDXB8)J?9&U3*faECyPU!xuI+D{YjP59Fim zbsb7$V*zNMH*ekg#ie~I=?MkZNJV`;DR5qL@_|ppY@NUBrg;D0;B~y~?4s#ckd*9H zs~bQ12yo*cKi*mKS-<88^W_xul~4)a{;b*&2==FslYV~N7+WQm7Qu?Lg;g^BtGfEf zw{OQ$pI+Ycx`5;5WQ>WASG^2O0h62rd5j;uy%A_kpNRA2 z5qvgS9V9ag3xl%*6<{BC!;$IyllHtNGDR2G5%=|3RO4-=!gQu@sHJ%pilijYwgMdr@3dQK5jLW-!(5=2~-v-X$d%7<192V3oG8T;>I1$Jt ziP$Y2rWFo@GD6IpAOUa(5g6iRN6v1H6XDP(gae5=0dSgzLI zVu3hZXJ-gf(T3G0RbUaK1^oi$b5${cx*~k+4`O`1-I9iwb&z~Xao?@GNtWF`LWl?@!VJ7;8gA3nh93gi zmUeVVp+r@z=9bXHlGNvrlSX{AZLR3}v#`qai3tty@y$yLk1DV{R6JY#P143BZ=mvr z3*=sq`}l=FJVuvkaeKP2#1qe~Jf9rdvBz)(7TJ#M=X;)*80?q~P4@+2AP+UA>3(b% zA7vqM^6(fSA&nC?)3JsKVIYU81IWcHC69yy?s=s$2yg+epN+X-BA&d66LsK z{km`8Ej$CZPYR?zX9jX{27Q2+mrQr?rxw{og?HHFZ$j3*))p_}xR*?vl==;ob_d{% z`+vS2es^)`lixIv4?*$4F`3;s35P9Uzi`3lKN!(j<_W^bze zG`FvI@axkH`nA-e@poO2YuXVd`cf-dE&C0MyeZ#C*@x!faBkld*gqpwMa!86zM1y3 zcbuI=X4a?IJJNMC#kIiriFU=fQtND<9$?ZHpen(C1qUcLo-QT5b-Fa4?d{*T+iy*T2C?s0!HYufJ2sq+>BO`HO8>>Uv z$F4hTplQqOo4sG~=+DuO4Pkj!=`daj0$^HmvT7ih(nJrH2Pn#(J$ryGBjTit0hL6b zJhl7N-~S%6#KZb1Cf)J{dT=d}WSqydFzMZ>>)93-4j^eSTqWf)vX4nis|(>0py2w% z<4xuCR-}Z<)is=gdm@tdMVSX*hVA5ChC4F`Wge?|4$o9G-vKM3bMNfx zttWk@Kil;(*iRQ^nME`jTw-H4PnpT}(iT=-^V}l$@#XI2E`K9TAWEkH2qJ zm$laQ)F%-0yoh2RUgP}#;+XC16MzfHg@<=X8&4rP`pVaBqc~|n5}CIs9Weo5Cmj2l zbWxVrH%ohV9%}C6*$(Zk@PO=PPfv&55#&RdRoOi~$?J~MxPbk-{;6UO9psAyf{}|6 zza{zJnvR#3xngN`KHhTX_gxlEeqZQ;%Bh02ehdIQe)LXO{XgjLNY%`MOee@Tv0Ygi zX|(-63A4?0LoUHlQCwldn*v?Akx}=q8G{#QJ4%nYZU4+MrOE?%VU#|e=e&CC{rlaP z9|a!WW5jXP{>lIm?6^2b8&;*%?l{HMti;g+GOn85PyR_E(I<9jJd$bSuPn9Iu-C{> zKbP(TavToi2B|sm;&5p4qSMLi8@aa*{;Qz)`#-Wu`q`Z5;KVTO`Q(XGQ-bVmpox9^ z`NDObu-l+TG^FcJYb@9O{22x44&$bycykxHINgPZT$%db^6{@e?hOXP(U(hC9^A}ro4WLDrSI%z_v^0&6V-D`0M&ombS;i6&(0=(o9L)A?Bu+Zu)5fG z7V=fP(=wliMmIr*?0Hm_;+@c(pzF^)NGZNNxu__}IY-HFeU1>yol={f3^nQ48Q0*_ z(w&HRkOWY0o3J;fFeCMrzEP0!;Zc z`Vw$Z;agLa=VkSbC!bqW3M?i&^_C{rPu{NDu`n$X0_68;d9p#5^v%hvAqF9#aEpOG z?E2Nroms1l?UlkT4qOJcyLRlz@R9dBr&=;XPk*vDQl!eR&zpgn`5mIZmG4u2Jlk)D zu-pLx)>tKH!oWuOyd*E1&JTq>haO5hI_8TX`~-OBPuxq^aSBYz#D)9Y;PT4L>vwNa zffr?qXG4s^!(DtxoDr#fXZCnQvZ_;#`O$x1088t^0zR(XHn&Z?p$BCfzR=^DncHKW zhy*mX*8!p9pjn>f-x?Z%kI)qN|MLG4I2;f};Ts(Me3#pG=YIzMS)SD=Vkt?f3q7|syZl)r=}ul! ziZO26qBI2es{dkP3?tIQ=CVY(&iU2J+?q6+(B;CtN258Ph+HkAK5-&;Yq^ZGwbd3D z#+$cqpS*s3%f~q-`GAf|7%kAQiRmO)&VRz_9SDAX!(>v;F=4S8bN-+hPJeD|Plno5 zx$n`ahYiO!X1-IaWz7sP_kB;#xR{_Q+<`98ta~Uj%{WEPhO4nAY?U6N;oo3b&a-0Q zsfl_JyLs<}&g^vt>7j#kTLu5B{Z=F-m!RrMfBSYlNWd<;oFac^r2+~3fZ>|1r2nQ> zPZts+>HsNsHu+uFmX+h|?3Sz3ohLg_rc~kR^R9aUK*0NVpC6dowJRCt^^tASx5@vX zQSNxakf1$j&FA|<393z}m6dP9wK~f*x*vzc`5NKg6Nx+viy4UNwejV1&sNNvZXNuM zaP#Wsa$g=Y(>Pm(ogoA39@rjIRjU}}+3h#5Bj3OyiikMF#rb zJ%T(8#VYYw*kqTT*5-D7eG|C;lgsKylG=hp)eAGiu07?4w1uuT zS{PTOki*-X1-9BcMn<)BLnUu;Vjvt6e$5II)}VxR^GltNF^enrg2*bhdIxXowTe&)12_!@BjJ zgu_38qnB`~<#3|s6swk3WMrSZvdX?$*YoFv{tVZQj}5(g6$)v+0;uiCkuoS@<<;;5&#Q#~`m`->ZCiv=cU#(ct=Gn`sb_`SI^BrN5Szx~!5kI!iSpWu3KwYj7j#-aJts== z=&F_zDpyK2PyK*g{of_{{(+`veQteqIETPBhpfdpNbu^p89sF_N#tx?ToqqGP$((w z2F#CgS=Yj0*obe>J^O#lgvH)AAgTZSVJg+v$EOyDMfgF;CE-cQ#~u3Kz7jbL+Py!^ zP(CtV+F$kaJt2IBwn3Qr#mEh&KAy~if@s2u5`#&?!gnjR-?#&O2HYfb?hU{hBI&rT#p_kKa z;?~wi9Gn`R_RqJ*T!_1Q< zx9@@4N*`drKqvA3<^S?iF3~l7_^`FxlgrpBhdF}Z#vfq)@4xHhFDXkl^B@ZH$#-ul*d1pO@%u%>8QA zUMP9woHd)n_M$fau`JnhL=XMmzP;{E$@Mv-u_sN92uk}%o85@juln$Jo5i}0QI*4qc@whRgt$!8D=AfA0epv8KsI~eCIj#E>6y_9GzRD<*n zov55YF@c!9cvkP7YeeGZ<5OOC-nda|@7}%m=mf5~r$n&==j7}QDE|f3`@$WQ2uStG z>lR;tUgIqxEVS+RA2)X2e!Rdv5XZ=ZgHIC>-c8mjg2rS_04(g|cKY`8!G zf-LzyF>rEn5(zy`A5P+*C++_}t3(nd*n_kHz9<<i6I52on+qL7Lx>bZ{0x7v9evz+jNBn`;1v9Qd4)BUj;j z;T$49=mu10KMprYBe{p@8EsRXp3wFzjE9xAwMJN-@MubL*cVAU0nCPP6^!Fwr|=m7 zIy1k&!k;3;*%iF@CHyJ3%|-H@oE&rO!e=YmMDPIgsFph=xjbdQHFOdVg1EPuh|_8~AJq@N}F={fdlVqB_qwNfpv%(ZoT= zZ{vZFl*vTWm_@s|8fj)CIKc@d4B23uFS>$L=_onXd}}5jA0J4&5}U%Dus#Ea)5U$LS6ip$Uu}QNx1o~BHG8U&t6AcDk~Z>Ubwp8EoK)tS z-mUSpYf#g4mZz-XE0d_!9Xk3yWo9gQp{M=d+8S>cRkd@?m>`L;yE?ZT^nep0G^yK> zk;i(Npbo$cKuiEphd8WL_5QoP{ktfeaWjnq+hCV<*RsY8!sfbfoYhG2m`kR59ZtW8 z8TXTTc%kQ@CWZLz9MZ(_89K>qKTd zmrZ$NOug}Z$r`7ho<;7LrHTEgFT7*tj7`fv!6L3d#;M!d7B?F&x_W7j*G=NNOlfmO zTm(HE7fUHCqXFsa%*7?8*oU%KUXGDK`d2La@1gkdNm$PiHxl;=!&$6FLXfZvC#KMY z3ZEv*#n-*fC;Z=Le@=nU9%_K!j>E!XwJ+vZ$2cNu$o0-+#ikT?P(ig2xJ z#shrR*M9y)4cMs2GQ{KTA3g^RFi=8GjS{A{d_3~R#Efj1pu=%@7^%c0uQ$tm9DhE3 z{P-8^(YO3H^9~QqM7b1SOa70FZs5g(gY)u8K!ft_M6_ z4q8O%T2ST(HMko8<0c|m)_&D@_u+{SXTHZjJdet-pTwtvcqMuUD8Y`k)ZZgKY|H#8 z;it|^N!~$@`#(0Xyya-(YNR=$E*kq*yRpsgeCzwNef-?7C*uwND5eS(m8k8xFu^OJ z94-vnW;5Ao{)oyZP*r)sVne}rXoZJ<1bcuu}0o4U*+U@*6jdn7UJzvSrpF0EF&|Ie~v zW~r-#4CngI3GhxzOCRg1Wa{sI|6VEU;h!3CHnoxYLd3V~OSUgiay~?eKJ`v`KIuWv z9ej-^Sv20gJ~1NqNx={&-6Y_gwbrV14bp{P)#B1uX_ z$j~J76d97xD9RKXlnAK|4M<2bW=NUK6otr?gbbM~WX$-jQ_p^$XYc*)|M!0XZ|~pU z?K0f=bzj$c9_MkaV;yV7el>mlc&~w6$lH%R64}K+`1U_T?lx4UM9Q>G-{z zJM%fZamqKkV@*u*+@$#U(huKj{`^^@dK6BfS`?4A8^RNy-=+^_7I{Hq3kAi{>3!jo!Y_*(JOiy2*R>Y;O%=qIvPqpjE zcf81Q`B}hn7EG@9j*mVJp)Ia(dEWlB*rW+B7+cK1dBXWex3(NJx^l|E5#q&VJ7l?U5S+bs4E!1NC~&~5n-mvJNwM5C6^0+OL$N-D<%1iHa}5bZA4n4{Zf=w6eJ@&UcO{dQ&Y!4 z9xb>df@{B$aX6U^_a2?3xe~97Dlc$+(to!GyJjM9alHB2gg(*2tf<=*6s~T8<&})V zWvBxjYsrbzY@jZfj=}V*d*)#@n$`C2??gY9C&2>~E6B&%;l;0C)8Zrq2!}YQd+wqTf`Z*O% zO>Ku0tW3BXGfhABhA-y{n|kScBT9dRY*t)-&s}{QBegwR@AY2&I9F8V{zWa~@zq&+ zk7uRaQL?HuvK+B2nw!cCdRyjFX&FLGAsKtrblfA}8%-1C*UM_>DTJ3G(P=ZK_; zL?(?*>6zDMev@~uj*6I?nREjVy8rr;Xy7?t?w>NedGFrS=Jvj%PngkMfX>)z$=)U0 z>gJo)bw*$4^mM$|{FHf{u&~#>MLew-dISCFu7uU%^FNx*5LjMh+O!-AzYl~EY-OLM zq$KB12>y@(G}Ift4kWK-V`Fo4K03X5483o_MumliPd#?7&k2 zw;MwK0a`E>c!W&FZ2e@sIiF)+V@vKVb7NpE8gtzjCFcsozP2PGI2%-WoAkfwRLu z?;cB(c*i2Y-AG_FyJN@$zsSId;#JRXuD!T*xL6^ zDVu8dGq)W(<{h!ee|%Z-Y|eGLnQeaIhpkB1zPq^-&4Dg30C=pycvl$dHGy{L#XAG0 zH@q_=!)MEvE8vvN(P1X@-lPhKgdu9hGe9?>s&q%-GeXbLJEx8wT}u*et(XHmLrw;U zhNKZshTout_AEL1Az(GoZ8R}HL_ze&AG2D#()NUL2Y!_}gg_vx8h0|Z57-j(GSMTL z=ph5Nt*xe*a6zpVlNpa1h#&=+FfNM;cPqni9<&HGTR1ipmWbfsmO5pkC#Is*B#3S+ zV5u{wg^S+=TN|CS5u>={yN`~TIvopr6- zVc%T@E)Y~Rwz?`(KxO8Bhlw&#DY#oSy)Tam=zp!MtiynZXtS$Uj_3YEZhEJ`pk zDCz30^}+CVd+Wi4=s07wz0bWMcC?aD{mbj!_y`r%(W%t2hWGuxy*1H$-8n_^ur1ah zju@od(BP=plIgPAwxW=6>AVlQ7YNv`|I&^Id`*p0Vuoq-G3qNj`;v?xXsGn+Ql0&G zX^*x0Z7{8QytjV#9vU-bZa46D;Og>2<=iiXyw8|yy^|{W)eIvDKcWs6H{Nt!Q?7qu zM!f5XT#=j7?w^o<*qAv`C5!uB5-w zlIg9`QEDW|tPiLifK2dX=twy&ra<42$4Dzsky8_^5_rFWevg#phZP-?(zT>%Cg^mDEg!Oa)q zN?!iup3Ls8U1N8ho!ScH&m^G{{lp+u^wq1Z2Bd@NBm)-4B&qEF`*n{@a;^eFd|gzu zQDtC07%N)EH+ob6GWOP=dKYid-jMC&>zw7UZb@xDwNuHLry0;DhMpEH`}Xc{FAnRd zJmn1-JkDAzz>-@wjS($nuN1k-u2oo>rd za5~qaQ(61A{9Y|hO7SmC|5Gde1E!c7^eFd&3T9Q=>E@qPI(#_%-1pp9Z{G@)yvXI; zDE)KTnrYE;H$w6IoVh6K!sMx%PuCWQV(wcu*5Jc+F!$KFV_Scigret82-cW6Q30n5 zqNxjqy42kwGim9^8zg3j71#1Q$=BSrXS&y|6@8O&WX1lMe8FWMkt`uvEq?t8Dg69d zoT5$loPruYOFgsh7{1qJ_9B5fb>*SRlFtW^FS^lflDus%U$g6Xw{zt6rNtSbIg%q5*g=+%W;=f{4W z0G(Y0aTr|y3!wBsAhcxJGFTNlzWUBrMnAzObQC3KuSz<`rrz@g?(u*4uy%YxH&X5P zJu)tUZmUz~b_{k6I<<4=V`?0guM&~|j8yg&)>0laUkaUMq&x;tNP+sXRS}cZH(x-F zeTo-2lP)W2JU`_=!t$*v=BASonr?1gLz3uy_WRQ13#j-FfMlFv`id6#EGRhZh_p=*FxB#+d|eGvZw#?=n&1`4>QCr<2#d;rE0 zH#<9@a-YokjlJ@QHqm1^c0YJK_5p!&UmFir8+t(gR`{PZ#oHJ_bj3LBgX-vwr%xYb zTRA&^K~HTeZ(L&R<<{QwBX-}y?%QWA{mEx5Sm=IRhEd6j<(=5=OPC_tU(z`(zWcw7 zP2Tgn_)0GLIp9hOAP(SPYJi3V%zvoS>D#4Cb>Q*!6JGBnh6EgpHNCP4hi3(#mVm^D z6})0dQ>f@0a=r=pk$*81Jf`d5>J`3u6Vh9ogXn%=*K*2eX1#yrq}blfY@d$2(c7Ho znJLBPRvTn8&5s2SeQZ#e`qaFQf&_4Koi!aTZF3se>caV$gk=O953qUQkI(mW&isRa zjE}ws1-Q;r$79FU_wJ20QBe5xCTiJohe^bNs!_);uuSt8C7$8_we1qVua&#wXk3`jC}= zV^&19REJvkT66d1*Nyu+O{a2gg?4T|wSq$KXS3njl`EJ?1ZgFjW&sO}%HhK+(bLU{ zi(qZLtcminx9-HvzURPc+S;`|Kc)u9h3}sHXL#r=?KbR4&&rDHL*u$Zrno=_>YC@-y6Tx)vl*qcm;z3X(}1q`-#Q6OSpg@nkn z;~$%X=Xv}q<@l``X~p~p((%UxE08xTB>N%A2nZ7-z0_kL9^ZGqFnY#mDQcg654MGD zG^&HPqWY)jpTUyl7Z*4A{Imfm96E0dsdBT~i|H=ALTTVW#_b|BV+k!4kiLZ3CAp<* z$LUzPV=03Y_t9@>&^iYtS~1-w9ZKhRi5pWI9fQJ3Z?v;~bd$U|ZguyC;d1fJ=aQ1% zU%RZG*Dl(sbTA>A(cXbtCArnpD7DhOzJ`0Rzd};^Z0`r9prX%hjj2LM9v&=vef$4mccgF$S8X;#b^?iHlUOzIN&*T>IEApYo zn5pK;7408CA539Z_f6Y4nQ5PJLi=aS*`wW^xobxAS zafBq)oD(tpea9_ORP2Vk-4gm-2hA5~Xapdg(a3YN6wwn^njBqZ@{x92+K00o$@U_! z4$LP<+wo4x=lh?p=-GP860x1~rP?p@W;Q<-)iq>rr%LZ~kB5AEmLI|I!z=RTX5|&Ou>#eKef6s6 zw;f*;xW`THa7&=R5vM?RgW&Vc+z*r6za4Uzh>8-bNtoogzm=B|%d>Ox zqV@(WV_b|vOLPZD>Q3WBj_kBPlcDN9F7?aUZlz5`l$>JeJ{8O3VI`5mg`Yl@?q7BG z>pL?vp)j}}TW?@sKvaEEHbnaHV`h@=zk!Dl&dM#@T++16voxocd#f)N?3r4jZ3G(vQs00z3t9!SS{f85O`t-u<1`p7=M!M#^a)vN2( z&rSZsWUyX@m^WwzheBI9t0>~eKfGIRB-L|U1e#P^+F@F`}e2th<#(a zqn#unA*$-EECNsw_25nLk};Tj8Z1emOSd7E4JvCdr~DfmhJ_0cUC*1<$E<2+quD!= zt(ECv*H>Q@JS=ykygpTfo6F|+BcS(yR1eW?Ku7DEx?FiCYv|4k_sj~eR>gn3&$48h z!ju%@Rd;jQqJmFmFzUFAc+r?2r z`+3SpIsUETBjl`^EG(Ah{(8&+O{!km+?mWpP!(uJGNu_R8Zan0@3yAN_TCV#%)5m* z<>QxEt_pW3UGd4&ahd%R3bU%XUM>K>)0rf|fi9 zx_Pd9Ax|(dVp|d>yBb_5P+IbUKSfx+t>BsT<@?82hhp$N zTU#T_kMA>lTo@`UPWEm=9lGUjGrtlN-%Aev!K07AV6XX~cuhsMJp``$i`QhCnw$)x zz8}UT0LSpI>if|RZjrFj^IOV|Kql`mMUU)DcX#~B>9@g^H*efn=!*#f%LR<3FI$GR z=PC0TKX6w5Djci!@|;UhswBVS#cQQKh)LlVF3(rHL~ar};%zyipmbuwXDT{jQ;Ng| z>xnK;+5Pz~#n*cu8yJs?_f&ehn@zRchs=CCh&8$yUTeXI`gi!rmW#tdMJ_K;`Cj4s z{eoeiyc`O8xEh1p`kWaDAKP!9w+Sk1&=4b{bW3=6_8dD#l$!AcE-kI^JJUc#yNcrn zaj`V{Z{v0nQXgnM*jT5>2TcBj$lsJZEd{<)*ThI1*dHM+Zp@EVSZ1Z1 z`yn20HQ08y=>=N_pdUndyncV4E)Y zz?dK=fQ>VUt-ieOL8mPgY-e+53>22gRd@gJjBnp=_S!V@@T|{?Pi`1HV&HN32N*On zt=iURPSf4!KtU(Mtf%_p)byAnL@1wBP2bl$pnG-8SxKwbFyV8?dj?z^7r9nkwd zuNOcj>12>QYu`hAVec-{}K=vym{q=ZSur%;R(*sBZ4IX0V`CF z8vbb4@En)wvFGnbb$S-AI_S1uMm+R>)bU~gxgtcoSC=i z-sYqG*3^`=`0)n8 zk)8kZK!V-ke7*lLkg$YxlSWQcyn$}qA^Cifk@4bZ7FK4aruiN;yp9uFJj()Qq51h( zo~AsA|HR1m9hcCq;{xdd=TDmL*dh~?o!g5?(vMX^m_X}T!+;k04?T4!R(=2E9@z}J zOF=Y*)XMK2faT4NH>wQh$9mu0OTPiqqJ(YTHVg)qK)VE){m3x{j5NywF~|@+v)M@GSP9C1kdIkL7F<$6#VR=DZwT2c;%j} zOzpQMC7-V`Zlt+uUHCmT>REHP&HN`rbvo?)>e}8vU*~&C5mg?;=f_JCLJ!7jgRI~S zh4^alI^yHwcQ)?02wSG)v^2q;JNcM>SghXpInF|)j|^+q)mgp??(Z}<16Qh1$nssO z(Rq{4j~e}@7M_fq{$TbWVI47bhj#$_<6z*zifMzdA7baHHTT~*(^4u+u&1&>YwABkY zLPV!Zjd}qN3-QN+{{EIaX#bv_96e#o4!RisGLb+&6tGb7hnzO-F}9;t?dbW+zPhkP zMTo`}qUlic`&R7jzJK!1+gv`R^(H$(H$ggY=ogPC zYi7*tEDx|6$}xRk`&J~-8{>vhaw|nKHR>%pH%?!e_4qbtJ{medp&dSinRhgU${@K- zsA34oE97Q+4?k9LeaF5#D;6JMrO+A|JDWA_wTzx`1VYoOp!IrwP*}xydQtsh_x-p; zNQVh**}^UqQ{VDjtgxi`{iePu_1IYT=Ck|p>nYlw;@90?csug&ZB4K2s*f@rMD!g} zf|+Fpyk@OIuY@}J(&ft;p)tcP!n;p*y?tDV&`UW1DJko#YcOELsAcqh=)m3zt>*r+ zYj&=~D~9T%kD5hJPoUtGuD(WJ@E{zEhzu zEg^S$%OCSbhy46+T{N>xOPjjx?pv|;(&*Tj5On+Cl#vXTNurd9N2&}_V4@%`>dn^X zt~o03q3~MQSBvlAtT$W64a0us8V}qZb?Vw&UU#ikar;i^jGv=|H#?Rzwz!TwF4-wo zGViCG>lH^MXS0MIdr|Wkd~WbP=N*Am>|7G~U7E zpPHKb`9S5!5kcJDuG{K6U%?ItBCTXOJ;_+_{rbh3b~JNX(R3s1P{`G~%NG5+6vWc< z9ip@UgA_!{zequ(udnP^nd4U~K0>n?_W^Kdds~|Vmfjy~5UW5lLwi}Ka|GZfcx!6d z!VNhtQfT*L{ck7b=m+A}(w{dQhVIhtevxoU0+`3MV@H-$g)KxQ;mCCd9n{!*DE&U2O@f>(v|>ThB$^Raym1yM z!UQC;mC?f^AJAp=+ZT**!L5J;G-*!ao(GiwIQoC@AWEmYf4?QbSuP$PP1sVQQ>iX5 z?;(AVNFqY0c?|>*>7WWzNtQ(qs0W}}STl>YqMi+S*+0RT>#&XB_+ z9!;1BG{sY<(08QiSg_^$w{JC{2zH%b`#`A%q;VLH*hH0X^zVA>XmIb_cQI&M|3Shc zE0@K$f{$SgOS+GLJ&qYn=ra)wX3WmI4wXv*gKn3r>cljOJw8os5?IE${UUSPW*rZ2 zQ>P(v59ZSyvLI$>rZa39)?-P}`9a`2M7Gt#*{-MiSa}z}vWiMv#>Y>ez&eP025Lbl z>=1mnyzCrw)w!)iry92+4hKPMgEWcYydX5SNtO22GPKC!0dxBV`Jk zH#oRK7V!NG7ro_xTRZS5s`Z!g@g2Y#ycoY+0Tc)r6Fqcx0f!t4NYJWXN~x=>W5)O$ zq)ck8Vdg|8*I}#`O2l*_MHWkX@8MY8aed$5D5x_7BiVSru0_ z@K!8O`#dj9FRGFb%c1kNA2b@8V^G_MHB}S}Eiy!soV*Ft!pf$XvodEPRC*Cgw&Gw$ zLO+&q;ld8Q&-)M&<=;$dWg(h}!NI|zDKrleTBo3H0? zA(zOQyK@oN^$7~30bD4gNakY=L4U(>bVJ|jX2d5T*853$N6)kouelrWaicqGD~ zIk}MiymIAA_oLt6j}VZCdf`N$7`EPRiyWv^RIK&loZIP{5x1%LkS73|-lgvj2SK9I zNkBRBT|aW5L9rOh>%`s(lt|F;OAuMOr0s%82oip)F?*; zY^2bO@*y~&0BQr=XujfMTVbCP1dF^|x7PvMd~l7KI5#ZF2hHyoM5&Qn5gS3|OFfVN zb_abpHU0qe>fL@bEEt@MC4j2RZyUpK5nv~}0zc|_X?ymDdCEHLvTuUmf_ZEFXO z-*@#rTPVE;DEwc*u2EN$d} zI2Os_PvVWa`J*`J--9N+PI1%#&T1rx{xD0>fIu#>B7zsie)Kx*b>!xjJ|;?P8K6ME zm&8a^O@;6)1>3zZ%6x9MwzUlEZuCI<4#@z_vuLn01n$|5EkNkSJs6s9etPTosy>g; zq0P>)Y}qnjWB}`JH^ga+03~UD%=Pr8d$7m^tlQq%+xxV62GXtI_q-JfcfEwE5(G{mDCr9LdwDHDoGuNQlf&Es@q$3h z%XbUb)?<85OxS6_AyPNVBs5YK8|V?}m1s4$fBEtXrN&I)*aosG21PHNrdcJA@b%Jt zHh01SacE>KT-S(U3bECIx8w_$Uf{sjI)woTY=hvM8f`QhUjY)<&vBMSfYyqEf$Ulg zV-QVpDn2fH6#!LDu0wZr2LM6O!foW41HZZQ2=51xm2VAbMx`c>I!* z93IFc+i(_ve<%#}XnJBK5`6_&;OH4&gFp(5zIH)-3^C6F4Nf?4hGW1hcam-{=0+&4 zJVH6b00Al7KKR)Yp)#=MC?Y0~8AmLIm;-&iy^uvJ+6Ga23QE~gP(_;?`oSl<378)5 z{wI?TSSS&g5%9%9H%DXty>gSPQG60k7T9(Gu?1ZLV}Wjo)+s}8$`J(dkoEG=R2)+7Hy}Zxtp&N)d!&yLVTMxc zIebeI18vYdqQhE<#8G{`B{vk2U8-ynp{MD^nac{EOmOf$fU;s6NxvrHk+&NuAS90{ zFy-t9+jjeAVGOCo9~%G25uxSN6%e|12MamgK7W?1hVBW(6LvY=Yr7!!Go-z3idiLgu5J$*=Io85)ZvpfP)(j!biwDGIt)Cdrp{Ka9scooIWk+6X+1TuOOZ%v#+LC)rtFt$=qR6KkaDxhv4 zruGL~&zV1yf2;|7* zTb^&53($i>sDa*Lj?8`LSBz-y?|iJ8CbWs636KKjq9T0a=ia3-Z=a9_1?72zmv(Ew zhlUy$FOQ+O_AR_OLXih~Rrq8POD@YF8e3S-K@QWil<2BtfrDZh`vevgW9 zdSXlgpQ^aN<8#o^Tq1~7Pf!hgLqkotnPTAl0Q6-L_I@sxqni_yRG8XfU(GOu)Gh^l z`P|7LJ5X;k=40GG1jh(;7cG%~{?j!PYBzEMu!`bM^binMtXKi77=QXd#JX8S?P)Oj z{H3?|o<$BeJy58B%G!wG1lI71K6qoRVt@WEv9PD9e6W2f{C41>=Yj1*0%Zqst#6I# zb4?v`?|7>q{sDRc5+{OQ5U(MaW#d38CVLn$@Nch|M$tmWN&n>N-03*Z^4@?N$8kvJ zTFuSP?cPm~O?`efw+q^)NpMBYOcv7dlNPM8(`?Y(GP)#auO;r0ZE_g54}$l*uu_=#{VF1K*Lk`PTR{4n1l zfN;aV(%D(MN9T8CFuNpF$rc9)aLzrK=CTv7St?ZNIGH9zv7Gs4&)haz zOn%HdFUBFF{pQS4RUi#OpXTm}G-~Dn(7m-24{>{78Ty{6Y+?OlMayCTA^oY|z4iCa z<9**_yd2To3L6siGU}nQw`l6Jzg`A$iO6GNuM(`|44Mj5rbA!x&DDzbf6uNFuuun) z%;aZRWs*tDQ=RqL_H$$d@>+h~M;5l?o;2Wk!6~iM??+V!#Oj|O_}VpVtkb3}eSCZh zpt#~YG>y?j;M*{NQqB1RAOnSBu=>tl^zencCw1z8y?t7-8;)wsd)|_BcY^{w5#2<( z$z#2E<<5ODB_Vd*$SZNyN~`?(w^XQkJZJ8Hx(Xv~)WJvt1|v^{bwoHIO-}%-N*Hnb z{zQ{t1MzgkU4F0M-``((DsQ?NVTGInM4TR#K<{^`#F482dX0Vr`b?%e`o zv_j-gc&!`g8%m&RC*DGURBElao@gut*`6EL^YBPP@(Xpol~~)&-P3X@kvIB7CjP0-G~M{vCKg)nd8%ctUJDO|I9J;#XsC(r-zcO|GfbnUw!V5( z*YMgWYIdOs!!bdMYp>g>9TeMkQSiXFs4w*09I^@vob~hMoEz!|6x%+Y_8W7Rt4s|q zNv(6KbFS2NG^yWdd+N@eI}~*8EpUMQA#HkuGYddn!H)jy%NQH6PK4DuMuGTVy?TX# zOeGK>(a}go&bP6W6HhweWT__Q?BYWF=b*#Ao|cYILtozuAZa_!v12%h(@U5aE!vHl zQ*6>LjUTG32M}O<&tcdX3wRY$MGAM333mxJ=RM+Bv{+x;Tt?&BhrTu7U)5Y|)~tcc z><2GQBwPv)4`P&Y2zHXG?{M`vdQ3OCLBh6%;!7A0f!77)}*e99L{ z`-??PPo5M-p-WLP0_r^17xaCJoAlR@p#`u`fo_r~(i#*!sJ9Q9?fb~Z0lSTQn|cXc ztj3A=PbqC-_G2|q*qYsEFFXT?1@gAAKV}~Sh;{7U?VC4O`I;R)Iu8d6 z=v+kOkn(_z3jWT&VZ*+-N>!M6M%fzb9Ea;iK61!pU`^BFP1HKVSow9z0k9QUWuK z=Bs;~5U`1_wkt#sxuC8H+fY5^ro1>KtF!S@(UDX@V1570LIuj83xVjt+T1Oo63PHd zQDSM04Bt9Bu8`Y-vojy~nim!WGBZZJZ!wO*gV4Ov3%i9~kUdgo#DzSl*>z|?%y7(* zW@+X@pBafGGrV?$1NQ)aV}fq+EAX46xd5GkkcbFELwt?^E&ro-p0@9+2cJ#=++UU~ZM1+)7fWq_8H1vkfscoU`q|9ORW7WI~O!Dc5zTdtb2g#QOL3RE>%Op)uY{doY zIzHHwuhu#g@D-gz8T`xZNzgGJDl;>)EO0)#klH05&(SQ!U#i|-WLSm&54dPXL)Op{ zQ+zB?>hG@;}h|;TNWqFfw zM(?fAVkH-}@5pC*O-)VObi+mXn1KG^b+1tIg?u*Ec`583z;8XFT7V#Q8l?^%y&39E z$97*SC>-X!6=%R}u$3wW(QXnHq{jy-1j05@wm#`0IGvcMD+J8#4XCN?A3qppMH3AF z2Ngg!l8vZ#ui;c~&+5;;ZNWr7`mO65)_YWPx~ngjbx!;93JqxJJpfLbqE>iQQK77+ zc3t_fw)UMvALc$m(2;c%uv78Vt@2x}tI2xz$jBfXmu%9e|OMI-SMfpSsGX``b??BqT)lPs7;? zP3l7d6Ue96|M_0IbQp)S0>(uTSW5vA-e8r=O1Kp)h2xtNwpeG#C9+YyT8Sne;Sr}JaD_VTez1zUrG6({<#%Y^;R*C zBR@uO=IKv75L<~wa}Pu{j&=Z@V1TPd0I*tHTUY5FAit)48Sxuq&Q}rc1yKG2yC?v= zr7F+eor`<0Y%^IAxz$fKmjnOB0KhdI;05*d#J^Gz2+Sa?Dci8+?K;UCR@!{aGWG|u zv?64NXL_pF9RHZYfb#9~wQKW#4h_Z0)&Uv>ecBdd#INh>I1%&!$lab;DeHU%jWYtT z5+!b=b~o?ej}uoucWx^*&Cv;A0Je^n7@c~UOqCaEPzC&&*~yc{Y|RW;(P;;M;zdlR z0S|*rk`Rc;*^S}X$bxNob1q>NApSVOdeUCN%*{e5(V(@WlU@QHZ?@d2pL@|6f%5b1 z?vJTLIML6Helwu*VYf8^T)Q0xm-2RYo3Twiam?@S*Ndw~awD&%#)v%v9o4mP>?C_n z^BsUwSloF)qnBY)k<1d9AAG6~t0l}%@*oKKOEi#Vz$iv7? zRPxEs()g_i4F3>UU+h_;<_3*>T8dOdfCIi@DX8EwNJEB%EojtMxA*?XL};D=e@cW- zsB@DgaR5OMe)C@vGMX>pkOr(oJ{eMdGu#!-K$*W8-`O?p;R2Gu)8~91a9!0{npoLxXI+Cc#KgpczgD z$dM=y9IC!;N%48{Nl&?u$~=0+-_g~jn4qi2bru-xo*}riD?|Tc*f3gCr|&EY*0u8z znDTLKya~K^6<&EziKPR=Fn5SPe7FiE^ZGZ3>~QL^5q@*u7P60@M(NOSv9k;6@$sOH zCXh1a0S#lM!WUuKX@NOO;dSd?!RTF5m*(u*vw(Q$DafFRWEf=}4<7g+Uio81MR~br zLc&HgB#{6uw6JJrvM4Klf{g}g@Pg=ikw7LSHi=(jgs_C38i*C~-UpDTap=%OW12ejskuUPeS4?`|!mJGWa_F_5P`-F(hIZSS_R- zhg$;Bm%Im>rF+DG{P+QxxB_f@f=|GEZ~^iX&%C@mb_v(<(@1pYV=cWYE!~ApggWW) zNs9%zHC1nw`HA|w-QB1YiFk?QVc2|foTw8S%03FI9QQ~ z=S`0;2l?I9%xrb~$NC>Or*02#!2s@7J%NKtXEM>3e%Fr5P=?V|2vcAhO+nRIUM9 zF$MIn4MYe;K@)GQ@wwDxiGeN!r(=SB03@P^r{cELt%0Dxfa{}E+mCb?g z+AYEfjKfPQh(jK^CCY_B+u2I@89Myq&mY%BhfL%+)EKX#GXBSLo0dtR4bX{l|2_dw zZX3g?p%F4;sbQk(fm)}nXVbA%sp#L6l%DoQC*OhjZ) zqP3T*^2zYx=?m|*5)E!R&a{g0X-h1}uH1-x31<_@B|!_hgmftX(>n1F%kewH#QOE!vz6jbuEBe>nbqUKs_YwIsrC`cO z_DQ%R6iIj3IXFnv3^M{3=A>Au1P(_A3O788GtEDN7g}52Op0idc;^){j7Tu1f2vvpn zDWKQfhI|9%G0$A$+{0q9^&&X|DR^DUlSOU3fW}jSCf#sj$rkc-1nH39;w&r^Ur1i8 zmKOiSsa^W#KXXwaphZ=TCcpq9O*=3Y%A-Gbaxp%9fbH*O^Wp}ER!B#+_V=?Omcl+F z648UgVN3@Pm4klk$d#+EP5Dmi=j&U9qRA9zSo-;%Y6hS_f9}6%>RnmcYw!>Z53YVZZ>Togg~?}AB8{_#?9F|IroSorz=1J zy0Ox2oCfNqFC6+fp#!jCB(By5J+X%fSE#Q@ix_h3Ua0BTeMP!Xk|Fd znT9;~JoZ%B*j%}G%>o#lDw2EXp5dPQXJ=>ITv$s|o(o8G;fU@?3qDN&BQIC)#s5vpgCm!QLUsNckU82IyRqb1 z5tq+5JIzNALsJy}E-s*91Ul&Hx!%yA53-mUGGe<2TggR--i)S*dk;j07ocsz%)r3l z2=b6P+>TraUd;zGo}z0Z=Qu!7rl{;G;5c)IR#%r$5e#VZi;Bpk0_B0^2aUV@swxd{ zc(-2L6AfKmtLTuJ5@DZp%Mn!Qt*xzj#L=6X%b3WLGf_@yntw3jAdy`_>t-Hg&~BhW zK)2};cD$jl5sFmYebSnNDf>LCqoX7L-MfX@hqYb~_V!8#4*0PT*$?W~9j?>Uxv?i& zK|_NL33-HW-1-~D$=351=dxvccRZvS5O>5@iV8-F@fHTcuReUxdOnEtc?NGB{MF>r zABd5C=sdDt1CP9>i)IDLNz&L5SFy;Btq=n?vJjf#MM#;eo@fZfel9I#1m@Fu zZdupYudm_Y=IwM&@ASvMJRc}n5R?ac7StaZj-xIz96lMxRlGojlV;}6r8j0N@s!_k zsl`~A1RLSmdVGPW!r*HQ#?_l(r4Edr8I*+6;01J#jf{}_=6`@a?w_x&-LYdiN)76v zw#YN&%{XHK@6k}`oWc{%$l6mgR=X|uuv-<^4v72kfR?flEoIex`wHP{O-(tdNsEiK zAq;bTMKf9NgwcDNubK}Y{e zXecjuJghJ&LW>f$k2xrK2SozAn_fy#578w7)L4l9_zb;RJULU?V=q{=D3B@b|BxRY z`j7e1NS7d%YBdMKA1HWgS@!8@vx!jhz=|asA@J*g7TrpUm6vxBGHa+C(}RsUFg8{q z)#55p#PeUzj{pRmhOYxuw+nIPqn3-V)mQM!^Th=9KY$%yD6MTge|^jVUbrP##!qiB zGBd{w-x9+bt9q-77eLn>Zct3H75Vb@>*GNgQ_7W(oRI;10nyZkPl9ua&jiw!(}*{A z3qi=k7jSD{{>D|f-=xqwg8zvE;}s$`&gBBo1pv$W z0~F)OvT1RfOjz+7R9JY#fMy|zI22O;*k`YhH`&19LJk?K;NMPUFx*-%v+NYEpXrTD z+9PE*7wx83?WH+}#S^$kCvh6sv14bTUdLbcKC&cvkOfFvd>)0P4Y@@-n6v?5^>dZ| z#IEPdcjXOL>^Ob;v^;h$+8k#l@(^H z=?UHdZlLn6T{J*G;F=+~{VLf|{Mn6Elf)c25Pz?dwY(rf~OW|wC zp7ndBwr(Y!>RV1OQdH6o7~)F^lY8N^uY8Sy(h&;ifz*k{D_}XsQlM^)vU1yt@2G&YX{}Yq-1XZml;i<>@-Z@3dGU5~zM3Ry{{q zfeCol`7C^eax;T0062}~=;snO6C`S23SIU0_aAPAu9LFzEN=>G17x8Ow*H2$-XSS; zMTf|@e+ni;Dv^0`WQ`0b5!7BCm%F6ms=hE=l)6}X6>H8}@dQaH5uAL9eJNU9LLo1m|Mh-`aXlNlQA~aGI z@_EequL+ZJx`d9)z96N!E9b+NgCq%sxK~kXw+DckC@i39O|qiFo?PDII|ZVqks|!Fs^x$#4oEUXf6f>m zuI+Sh%{uh+7J$F=8f+w+9|d#uJ?&Csp@Dcn^AV_UIYt>*Cxhiokaw^EC=YbpIPv@l ziM;v3m@!gRKIA6wtfq$%7DbICnqxEM>2v2ubB>@F0My7?0*Vzh+2nyvY767s zUV&~%5tyYE1)jGeDskP!L&!8547V*Mdr(rbUzdur0}+Fq;gyx@HmRtt;R{bnC<^rM z-RSQ`x3Q4^63CWUu>RG=VTipJAVU-v_)|wLEmtGS_I7d(O9@GF643-6fZqR^xlH_i z@KfqtX(poXYbxB0DSAd zf(YX{HU5kp{TUA+GLZFg)_e!I*`hn;B>E4hRAd1U~NSxu?bp$M0c+f$z-uA^r1qF?wSYD5k{G;q_3}kB76zilLkGXPng1h@H!Ae z)ugxHDS(a$c!=(*IIsm_P=v^R0M8f`85tnB3|Y9C^5I64+7D-H!KY7IoE5YrV=J%1Z9!aN zRnTrsIw(w7H#m4Qs=1Ju0IZEJa{^o27x0cAw}gH7L2%yFdasZTYO>k+JYxv{;$_46 ze~uwHnn^5Lb+j$1)|$@GhS&{;)|Lji+8|{kqZ*{D#Iy+FnF_W?q84dBIj`P8%_?ha zvy+7f@ib)i{9Ctj!U;}4&n@S#pNTv88x|#q=!xGIf})Mh)5Q{VFLGvW<^lQe&p9s3 zY|?oUuz0@X55gFEIgTbWJ5u~4FnB2nMYZHTE9=4VO5Qk4FB{(kQBAq?$1hI&vhdl% z6cHtV&Md3+XQhSK`IXevA?BQL22&1%#MZt(Xbvioh@XFWx3Rf7KX!9j z(f#}P3-GRy2>=MvZ(4s(O;4W=|F!xlql}zR`!Di(osK7;77uBq&!!JqSXqs|?zf~F zm->%6cV7+J872#u^Yg_2k7(f~hSsx47UBkOC9U2pB}F;{Z;g8&=l$Eqadaz#_2i?; zYh?3vTNWu1ByKXQCHjW znp~1L>=>XTxC*LYGA+bW3^bgL$&(hXzqrqWBlOJj&Z2)M0rsKZCVLJXP{w>X91^{e z%wjZ%3oQCwW@aN{%YWtlKtp`IOK$c$#xDBsQJy&tn!|{ViMZl|&rklj!61bm)UNbI ztq<4pCXI=d81y>`{}t54*P!&wefHO{9|cJyTtG(n8aL-U+76gwS^{>glBww`2m$(_ zANU1tGLDq~^w)RXBx)nnqrYF}(3rj!y=3D|`-><#NrMc{77S=dKLMurvH`*T3o1LY zDR$pkMzLXP`{rX*AsUhPp;t}bzJ0rtQnIs4Nz|%Zj)lvPEwl^-ghzNcb26@QKurv| zix;+%P1+(p8GU3~nkw8q{@!l&khwjCCj1^L#-0dNdDp=l(rw59eHB!NwIIYEdTY>A z4AwS3vipgQGvFq$!aexuHYoX_1IPieLo)mUUmMdZjYYy)-^@>)N^0LXtb#j@{I|8M zE2d9-xD0w>e%L$Upt4Z|FlYFJ0RsTQ;eHYyJ$xWzg@mehi2q8umO)1-U$ja2lh&E4HOUd zcVgX4X;cMML9*BvD$POeY(_>#;)g0`+?U;8t6^nF;rc2-y!SreasdNTe48XCcLR8a zgsC>UN%gTkq1o9oz$jahVk!|A#nyb9?CH@@(D2E(p(@zN0^W!wQN)fD5?c8Y6V^vu~?#(vE*Y0?6B*g=Z{+nmUu?p?<45Q%nU^ot-<8FjC30BvwGvkMd;(I;AXNj zdHSW+%Hnt}75-<;#yBQ&8MHBO1_ximvL>GsBb>0%D+av_Qx2d9B=oU6*nkCAE%Fe2 z0E{*GVo>Z%dKxX9T7Y`1z2r!OL;-i9s44N=Pz0@(gkuonBXIob=eoYEnZ+DAnZ81h zA`LsxEgV8le76DlI&gL?s0%mn6v3NP&GVTVZ)YLMG&uAhb`@t9Aw}R`@UPg9!ye~I zY&$&Jnq$n17hgr&dFQW=r5HZw6PB1JQ&q3us1D7x!FsdWX4!NwMj8;&((N+zGaU!{ zQ8_r=2|O7X6!Zw7Z~o!+?r0lrLXZZ5Vd2T592EUJGoOGy*}v>xH90wHfkg|3O=7yt z2m^^+Km%X6{z@6104j={(<3KG_Ryh2_91uOfaVk-NgcrS&#U6%d87$Rf#(fgZSS@n zpW%YAZp>H}AUzu3;Vl95r0havPoWQqCGr}98V1gu2m`&+lA+^SEs~(Z=Y!Zeh?dqP z^gBVSDIvHmq^~}&G2V-G;yt>&xNoiCPGW3|8I2;+hX#v`(2Kw?79uXA_$U5L<*!f5 zHfNo!y0`TNkBR%V1AuN;%!wADV+*4s8*mqSDB_Tgi#yyd3#CqUDHX7K9RgBL_Bb!! zoU>vvIwaJc9axI?q4!LlbT?6~@HKt!VB>r}Qo-HD)5THXiyKPVcPz*!jEQjFJ8+mq|izmSfy~(yfJrg$cWtM>MjMUYw zneqa`v3KjB;V^pi%%Ed~4Iql%av|=0+zqLYPx-=EHogB;K6hvepQJ@v`Tw~H{(A|X z6LkCbWmKC5pj-e|O-M^1eg9|5CH3=hexacfpGQ6Ih6o8%GfCUWzSNk~Zr z&$!~e126Ou$Rj$>Yw2Nil_FGr%7BJXMl*C?3Bzim2MMsmeY)`fd!| zl28L2j1>G}GRz}~8(7SA6c~AES4VwPy8gr_(~V1z1hzI9$)TwJl@_sf?q zi5y?5Kp19hOP25h++77;v=aIR01&PgJ7S2L4JyK{U%!5Zm(v7c)q$i7TR z=u|Dh{s1i=cnYltX27zGLx9acLiDZfL&;W^aq6(yi4z;qT7=7FWFf9nWcmh@Om<4i z%`I2i1+9!CNK!=AgDA2TM+h5`*w=ti$a#v$w|BboYYPHu>?u^!a4FP6O~(l41se~~ z)4^PvN?sUQVZ`ksrZ0oowmSFvu?^`dxEQAIu}Cz@5uv=$)`g_Wgp1o&IjRjgEMiVP=`lmAu5wr>*D~GE&gB;6PV^l&lzLA)}FSZ@pXm0E%y~ z@Z!Yqnp`v(c6Ta1rJ6W(a*!8S>G}O#J+0EDrI2qA}ro+yqAmw8Qe4_}_ z;0%5rr6)UqN*R9>O>7Zd}T>N(gL~x39 zWt@jq1rl)(wS~`4#enf1t+@_nF9z14iHV8)m|lzg-oqik`ihsIA198XKohwvk!`1% zWk?syR~*x+qLddN9aGmvBbp79-*D!=Myu;xxLjWKA$U`vD@yBu1BPAU&S8i37!BS4 zO#y?RZB!x5;G^ns2GDvmV0A|KV>%-`IVI(mt|7=>>oGsWKcW}p!>aJD;h7>fvKMX+ zSk|CE$Xx-63uK?i%7^geNp&2(oi*UxIIlc^bu3ntDbf#~6QiS}wLI*yr=&7-3%l>2$*CMAlt}*%9kCeT4IeDog{*$gV#JP&$^2 zMo$CUp;s}715uGPQ{!h4o&huRq4`Ej9m=$a6Jo$Nu#yOiEn&*i%1STuBfpp!3x}F!x{1;P*4HVjrmKg~i8hZHW&-RkSZ16Fu#mV0fC*Fm*EbqH ze~2_2N1KoK!@y2)a$cHbzi4tnHgB*9dR(fpnwiNT8>dU=ivo_E`hZY;8FQ7l4BHl zcuYbUw(>u9YNA(AlGZtN=mw!ewd1_4^N% zvDZuK$P1AySyf~|(IH(Q=`qoQQCZNp4SL{rXeB%n78rv>w2CF1*ZzE1oy@sSzQo;h1K^%z!uw8W4~T zzib!&M6Y=cv9|2vS}sIn~5%>*Cpv9dH67 z;>oyp-Pe}`dxjm!8#&BL`%$wc)dJxsG&B@fpy>Il8SN6+@h{h4qHTnIs)E(E08Hu+xq02^A>d$H0=g r2Hn50oBtiaK*0Np{QO`2_AmG9l|Q%_IPB)2@E^6^2b2;OOfUUEdi$o( diff --git a/examples/running-llamas/artifacts/Llama-7b/generate_max_memory_reserved_line_plot.png b/examples/running-llamas/artifacts/Llama-7b/generate_max_memory_reserved_line_plot.png deleted file mode 100644 index cbb732e0989694c731fd85433490ce073f409601..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 63481 zcmcG01yI#%*e4)@gkl2FF*0 z>j`vLHb(R;MP<2g5e(~hDz->SI68=bk+XO+O^}d|4Mhas$bX36NU&E{7#s1N5p$$S zSH}MC8J#tnlFE2zbkvV9b#y9UPdk8v)8Qho==F~qH|%~7%g8*u_vXn*m!4yEBBer4 z^@nDY+xzk8-P7xNb=~#k!(&C|;-MrMANh#y-ap216zpwp7ggu9ZiTES&-3#qo=+ggO@Z&7H0K4bEuS4a< z`-S}QpVtweG~WGxy0q*5Lu$nU$plU}3#NLpXWY)8#mV%JHpY_bXmD_FxVX4@TsXEL z5D^7ML_B)RYJT$90Bg@%_mAJ)=H{hJhjN`W3u8=xaL_ZiVVio~`kWglJ97dT7Z((o zwd`nUXn1&dvw#2cC-JzQEGK!?XP7v3UtONZywc%MTu*qVgTfetef#$9L@uXLN?umh z$8K(J`i%JQ?(R;S?vij{*fHWlQ`Pn*om%y-9Pr#2@#e*jsD>Z*Gidns;YIjQp2%j) zk`5PWoR>*FSO~>7TA%9r{rhvVK}WIm9RGsNsqpGl<@=ZWOI;mmdbW+J!a*yORZK@m zM=PtVI(m8nU9l|qyzUoDC94`sJxM`DMGX0B6^a=KTu<}!^F_qPJMB87tl0;XGWQ8ckNdCm|>Iu-LCRnVg(lnW|w6)(jF764Dw=>1jz9i`v=WPt|V=#eu7UArC1*W zhlnT5xGzPk<2RSV_0=VfYUxS|`$wee>gtPak88crA39X46BSn1G>eO9+}zxOM7)w8 zKCrX0vQjz01o25pZTooiM$oE7HhQ7o*ts~hy*gW`=XBU;jbpP~nXWrmDwhma8!t6! zDr~sIcNJiMMHUUCYGulP)cuipO^Oy8Jm@@c4a%g)a3OyW)Ycmq8pJ3EGw_r;4(q+*d2 z+l5y5G&D32H$i;qJXU6g8%8d*1oxJr!1Oec*F)0U`c)X2#MyX;>pjEH=*F=UBa^hf4vU{JH#cWudO0C>H~-`GBE7=N2-(>%Vg`d1ZtIbZk+ViUBjNh2sZ4B zO%E-QrbvUq6upV{_#%t3uC_KU<*mQ_VA0rE8eN^sab#&}X&US%gLaqQzkh^RJH^Va zX6TUnFVBw^`k4~H8FWNO!@!i?kS^Er2uCr^BLaw5S=KI+GP;O%Cw~xSD)oO5mNT;q$dI`()P=ad>MacDFNTTGi6;??PUztgXpbTI-|Ckm< z+Qhw!FIoDP)ijrAB#m!k;%a^^O@^cY-}w9156$BHv29~9Jb!>muEUACcZH-_^|FsNJ+np6l%?{ zulrY3aV#w@VLxTX773NNBOxJq^M!g#qhekT4tzN|Ip?E|7(K$ep!9T7Nl8hr4?BFM8dGMAdy5?(oSk)bbUxPC z^Efy-*sb<2rrH_UMG$h@eZ_o4^}VpLC&}Z=H#e8M%xt7vzlJ~g=kV}jBBCVqC{mI8 zav9>u(#iZ#kfmX)#gFwIYF`gqY)w|dVtxp9*K{c7?YnpH$WABKwaBFs(;T;_l~q(^ zvZSAvZ8${*6uhIQ9g>WKf;G3e__p!WO}P0#)6{5~OHvyO9MmoY{>J&K%;E)e+!nlwBcE>TomoT6Fh zjC%8?Tkjp{FNbSGAtb&Y*OyA9QVEWVP)gy-88B3cHTazm`3ak4yvar!XZ6+TG7+c! z+HWb|%e0NF?f!JELd`nakd76b2B&>8SZjYKC*#+Ka;f)WIY_08M!b6U>L>gumc`6) zOxJa(6Q4>U=QFG=-pd2IaseIKN5Y%qWql(f*V-COpE-|-)3hcE3JNavdw2(P6tHgG zxIsWbFj{W$6bUBe_tX?2Op|ChMPpy8utK26#R&rxj0{2F`z!0~VZ0vhs-?!sM<0IY zDCDN3rly82t*pF*@u;$29~MqHhfO_Q<4Aw!&YjAdnq0TeZ%^5>6!V{ymzM|NF`&S9 zEjJ(IhcbL}zTIFlUYhOaI#ch)z{X|-b(72a;H%UA(%kZL%TiZd1mpT}ekv3JPk=wt#+#&E# zuEbCRR{L+*F%c4fp`|sqw%($rFBIP^Dk{1-9MLK@8Svhks(FBm+X97l=ip%B?;rng zTD8ZX40_E!jC!6`J8ZTeu8%OVvM%)|3(#oQ$HO9q?GZy@;bn(j2AdnP=T5&*19ITu z;ZaGPX}Oq&_9`CF9)g8S_rrXQv9q(2-EoWHt=~f(Xrvo64N1^A=hoKz3Jd9jx2%~> z2fxm@1Pzp#DYZw^b|ee<;Bnefz(Vd!c&(Hk@9BvIqx~rax5i3*Mn{#$E366?3G2jERi8kQg2kmys*zwu3T^f5a9z*Dr0wgym#>(Z z=3zO+c;8r9s;;qJzGE?7nrzgQXtzC`aGzAPH%Eb*OguU`+w8hChKbANNZf!cvf2P9 zKaf%`15tbT;NSZX9}2=~i-?HG)NBEm5>rkIPn zAweyD$;zrYrT~k{SXWo~!`VUho^vPOI!RQ=?$OZ_6yZ|KsaR;~h>3%>a0>%N=Di=Q z<>WiK8z{4aWMK@pOX9GZnj5aK&Kr%y`%>O=I3G|TBO?!H%aOO(dPKn@y+=e;7|QCn zrCy*}Cwg^piu_^s@68BmB}I2_6;)LmXhksnqUPr2fm74V-3h6#r+Z^dBUv(Du(Mym z_1(`mE6fSKCwZ_2la&fJ%_qul^YZeltE+!FTzh2H9X~gmuSQggFU^Uz&2wYW&4|Tn zI{qn}Wp9O*7OXZQK|xgEK!WGY%<`0}1qCmK19!F?uKTNOjo=+C-_N6UMAE`YJXNo> zev{5{GNCwrC_f3hH?~Qm`(0;a&n@gq20fGmk*{UqGh_lg9!dX z+C|i?VWZh@lwBkmxwH+T&T9^uiYu7M|bSlz_d8KvQCo zG|5m1%twEGgi6?(t3*FuZqbpaQnItVI|s`j@9EPUt9mEh39p6W6_oN>cY~gB>q7bX z@Zp2?{*pwxRLD~j5!gt$RQ6$NH?_i}qwz3;W0_4tDd{;mRh>`w(rsKS+O7cPM~X&J zk&=>=Qx81E{32gws?gTgCleFd_gk&PQbQzJnqmO`4grOUsi|n8RzoC-Na)|ce@U2_ zn6h1tx2RY@`}l}uWMojS625i_M>6^S^?L~xzh1Z7j;~o+c(vFRfi$0=~4~DViHI5^xvlx~V6AOcROM~$d3ndX@vziV^ zT3u)hYoFPeqMMz(nodkiq-t=Cpi&@-5hsH_BDNTh^@xfDW>74q+;&+KBN(bmif%$Qt)Y5XA0<5L62E{33 zMtsW504AKnA$xhLnp(9q3-ujrvd+C)TpH!?p-|GT+F<)RS!Jmy3JQKsOHF<0hd=hYEGh_LQ`=n_i0zW&(;6Rt=Yt ztE#PQr+rY4+u)gh$~US#l?v1;k^AF0Y#A2~!XtF#U_+&6Wl9u@Qs%k^`Q4q zl8^}YkBlfeWFBqLXpL9dW`+j`2aA*|=BrX2kDQy26~9GE6A2@u9Dv13^@u{6&o3ZA zKEz_Gx=i2H^sI3X4Tu9`Zqaa_#Zzui8O_6ZGYt)#YXq+GU@@-QA~r-jYTE963-z&t zgoG`Db6B5!%=_z^1`n-RZkKWxS`sBCrN!;6t=H7BZCdEACu^^ZbFQy1w>|C?!#)v> ziIkav$|w_3=eV7i20)a>Vq8A7W0A^fdm0D7C&tkpcJ4dCBr)zY(Af;u+Al}xwC%SW zRm)68JWJot3mJ@WPEaaPEGoC;Q8z&6OWdj_(pLbn%eID&|H(fS)qH5=R|;#e)h&tANI**7={NPt^TiQ*?>$|yId z3fn$^MukVRRm-SCN$wF4#Ot}4PgOqxBnh1~Q`aXh?kTWSmGKZlz$lAQVb!bcL{8aq zx3rL8I1Oxz`ICv^&Pq#}spPYQvt51-4MhO*+8fdGm|I;H(bp$28_sJROgMbb%)(+a zR!oY3#GVIjL@*c`Lf@Xm=<_ZeZ;hAbwr_84Z3U8u*lmt0{PBh+on@B|y^#ps1v*w{ ziq+0+#T~bogF{q$b`{6w_+T8vex~PsZ08F=^ zvdT|CU5m6oSdo#Hm0g*rC=^XtboeU(2?jL5y?Y?Tr+0QQDUoiS8R zw$r2)Jt+3j{$IX&6$}mEO`7ic^EXb{ui4pO>9^hm5ZDZRG`u0TdYc9cfSsKkJM@?G zrmr^pS1!j-BqSxt7#Ty`BdBlapH3V==`!$ub_fI(x|5H$cTUId70cS+zbSi?KY)<} z$c!oI0}?t9f9e-IE?QW6#>~hlyE;Z8lY$C!!v6Yo?t!u4#dX2dc4Sy=Y&!q|YisMD zot-o-&yiK$pC4I31s8x)xdNTOAgBR~ebhEexA7gf1~sJF9^Pvb0O^1Xv+Uxgpw)?l z;=OjDfvwmjYXx;BZX1{0Wd1_$;x}sZ=kaCj2e^}c0IYcysUCN zFZWB;HZqD5aaie1HXkqb^~a@$##|~c8O#aXGLrY|#0%JA5!3?J3d@9a+i={5itKDk zzKr*2h=$cL0EmxL`N#LY>dv2IV=6J%bSgu)JpmAcoQ059faezj5Qg@$3Y9%_Z+m`g zs~gmx;Eu)5pFh7cXnzQ>U7^`E%OxMszds;tCYZ6M?gW3RAh39nVfO+hSrS!}T%H14 z2RrYrvN9eZ_;~jBpT2&*1+^-|tcGcA9QrNkOn*OP3+_cttp``y>s*jLka@Pw+P5xv z^_yyvg8)T&gUkmq2T-dovgxA8s~UZJ%>mDeiTO_7$A9GP34$&oog#=)#?x%$Oy>R- zT7QY<6ekoVSh@P!({-Kk9ATgqB8JQ&Iul!0PtRmH4v7)T({ees=Bj`V$zkJD7FTr1IUECA5eFRKA%49&2_x?gp~o@m$>827{2EIxz~$Fm^YCGys_IH zetCL%d9}McXszQ%BqrN}q!q&-h7|KG3(0X7Vc|3MECdl=8x&D# z_K~J|NQA9zYGM+T8t7;;QBDi+u5olU8d|PFF^3nBn*l(?fX;rwN&~*jFDDlU2*8Nr z!^MopwE_<_;Q2)AI7Vwn#}?r5fCzVX=94foU6x2x^*qxkrJ56vC*p}& zvgw$JO4;Onn}gF6`u|_h6xC}#$o#2lWp4r6Z8YI00~pZ!aH%5-Cq_IY@_`z#O3=Ea zIcyCx6muqnHT>Nb8J@wm5dpObDi*pA4w2z-o(f{c0T3AY{^mw{JWQ@Ap&{1|5NxvJ zqr>AIhdq&R7?1vV{@QVC0X}*F;p<2k_6LCBt85l+(sMz*i+}AH2z*es++1aGe?Cgf zy>0KwWw^`R1hWH}>B)LGwR{n7j*tnLS8Urc$D3Od74)DL>w*+3pDlY6dOGOi2#q>& zAAqR%odZBc3S^3Ze*f-;TLPfa1#gI`LVm6w9Rn%-X48NGVh;)!M}dEg#Gw9NT>PWu zarpFLN;F1(WQ?2XXKM(f>PgK{k2&h2{gYM_Y7*&Uc!Xl!iGuzpc(8vhqeK4-3SoLx{( zN&xhOU>g_~7ABeq`W@(oW$BsvTfThV634D#3j=XJ?0I z5%VQx&uOH;BOx>$7<=eg(x7{1NG9a`MKZMS1f4Jhy_RV9_b-5F%4HbY2f4N0`NH1_T6c-t(z3VP`H>FD=M5zhwm zoTX51+^F)nz8X=%&<8sLG@jmcFiLAWU2hX3vDrhB4T14BuNjzqv?!e3xd_` z(OcEsegur98_Wd}hwgX|#2$jJ#Nf22pQD)H4mda?elhwf%z86Ox(b$`j!y|1933vs z)(b(he@v&I70LsOfIj0GI#K`jK(YY3<#cTuY)}Bx^7zzy4A?|G@j&(|TU4@L@_{%T zRr@eLtF?kBMbOzB59K_vXTVK~;tUh6UwxTlS1)c#7$e;s7pwz24R3H?C*49?>k?nJ5-Y6tvSWqX^UmRa;lSOqfT^fRv`jAFh+KI^hYrWgKuD141fV6kAuDeAM zzLq5nFpz%g4h1D8#Sw~ytwi(wG>j)to`}aXQ_A^|x2UHgY#|`YRZqMU39G8m_fztY zdmKTyqoQqro;;YZrex4zJqw)*kni7%(|sTvu-$U>RRRInx9@`B9;465U+fm~7?gEk zgpva!2GKrjNbib-emL2A4#J5bbYz3hDF8=VAlQOvngSRO(fDgW?0y1zwSTZGSN6BB z&L7qgLc|d8xf4#Wl?W;fC}1MS#uSJa4#-D^M}Fo{TN{Rpi;FilK0g2>GBPp*L{}+S z)W5g|8VX=)xm98Wv;wt}(e2C*p%B5}`-X>?04gHx9@b>;Z#g+1;7H39pq?NcIS@lB zq0Mu-o&1zJT4BhNm(J)*e7fK#@p=V{i(!XE4gF1}bvWlsqtT{Sm(;cz>F5DHit z)TDrC+_AtdWHQ9@?-22H0x*Z7$X8zUr}!6CIT*SU0ESPPje|F%+3ZbvzaxVR40@dp zkK0+6TRx~X-cYRvN=@XT1pE5?>p^)0@JpeT+1jyQ06PI2pYb#39$?ATi_>DRMH+xO z82nryn#YZEdU_hM4^Yw3Z9)5@p`jTpU_(bo&rvGuhJDiv`e`(q)ie0uN-CtwwpvPB_Y!?|2ayq-ZR!56o!k7t=g%w#&Rnuts!7Q6k*T#U* zCsU;R7Q8S-{{!p>C@>QK_!J2Yf~ z@HVUcq?nIsT0z4a8jc7L{|!xCrP|IU@)VR1@fb!@Ip=!U({QooIx)+wMyWp&6LA2= zpltj3`(rKlBrU@g+pXVmFo3({ustmSILFt|uicI}`>CZkT~rCkpa;HOL@bW%L6siY z^#>sCh=3#(3UXB%@DUCU4iOmFP+0%5B3FQ)LkkNF_4V{#nwXi10e}y!sHgzO!0)afMR*=4Atzk4Ev*XhHDch8?cH_b=g+w#M`y1J5``}Kts9Pn&lZ2f>s8QVPC z4!%*Znx`=EfN+bp(=0AmV;#U!7rNW0n^ z^0NJ6BavxaQx5t_gg&E6Bz3R;XR{8|l8TBb63X>guhyo$pnryfg&dPTxv}AX?RF2; zU;C{|75Je4xcNY9R-y8n7TScr?=q~&785$j2l_7%dfAYS@{t$^kKLM>S}~89i|Zm4 z#RZcm5lWkmkx>g2YCJ~$))7|Wotuaq55OIvRa9Y)ygaE#H!PlV8&s|`kY6Zhey-!5c1H`qyobpr8yRl+}7?5s8Sm$}0l<*-gbP@IS z>(^N*0SM>@s3n^PO^*2P0wHL8Remv4Cp@ctsVyU{bG%<5XY@&G0ro_x*~lFLWmhNj zL0lO2|K^jH1q1%O2$A0ZI+E0kT;G@SjnsPh87-KGD7+#Q&$z6CR9;t3iK%|2q+p({aOF zsGqE`frgis{x(zHak{G%$+r^`Oyb+yD-AMfLpdeN$JwWUe*OCN;;r9T<>F6V!jP+K zRC>{L%EVg!R=DL>jCepRq_S4ug?p`#r8EY@E%e~h|cKvxZjrco5UBf1gM`1`I@5}0?=zA#ijqYnTbE(!Q%xML~N`6 zMaqN{@m<$jy8(Ay>3&C${#ODF4Uy?;bb&-cf1wsSpbLIiR{}OGAw5&mqq4613w*M= zFIp4}NjRvG>s-ArCbO~5(9pCd^0xibKBFGds`AO|bDcdF`f#}FU!OcT!n$5F?~*0l zhXTX1-1_ML8!4^#Cz5#ZbJL+MoX+CULYn|!b-54@$1T*v#KoeLfpk?C zbXF_Pgj5T)^1SD-c9?4%5?->OaFCH@m?jLI9d>}(^j#3+fw_gj@87lEvRf;CZXaA~ z^sEYl{QWsJs+ncP8i-xWuX+3%S_qaG=Zk;;QJ9Ax7u-wi6CO#2WfJF>qpzF^*3X_T z{Aq~z`Ypf7--EiSVEYYgz_Vj`wBST@O$ISTj-9^eJtp;>NDZ5~(bg+e?i2LAuJ%ae zTfEoi1^^6a|H^u2%i+3(Ceo@Y4*bv&`}PCDp9kN*?e-NOQOF%$+L_C>TW9r57N~ju_jfYR_|eJk4FbZ1 z_ck_v+S`RT@yta;d_R1+;Gd~aEshwg*tR-5-sbCwOyF=mdGgvZFGaMfjBC{0L z(eZZWB3<-`i+)tk^JA;|Rh7iClIs+c0lQ4Y<1Jcjg3*v0=mpZXl~}>gP-|dfGbGeH zt881GE;;>Tn4%0+nwPqAA;*VQVOggrf^&yfBzHDGP5X+A3Qyez5Vo#p-N&<5IVpa`7NbvS$~TO$=NNedC2`Sm$arFFZ zx0YgHHAyn1l%VkXL&k3htT?`Q95L+^^yK8cxvpc^>m8#VIj;CQ^eXtPlsUy?bzi^H zNLZUGeRG)z3O-n~fBb1X>a@E)%^UlBFo|q>PAkhHkF<}7aR=S^=PuW-jAotj*W8u( zL{@suh1tO*{b{B|dBU56#+)?9!Q@koNo#sv8DR0TeIgMd$!r@6ji4r3O03;|6H}FG zDVzM`X6zFp_o7u3lch1L8+YzV0)vQ?Np+UXmbKSsye$;C@h3}$DZHEL=U7RLh^T0q z(6{Q*(b4|O_X=PO`+xj+hVzG7se67wUUc;W86*Ton*}Q%=Qor_Y}Bi464!>-a|o8_ z=GtHd77x|9o;qYm5jAd3Jcm$(Lur|PxRY2^cZ$`F4E8gwkT7x_21|7h_p@wvfUR;J zOY0+Ew{UQPjCsL6-Q6=_hv=R`XS51Chr!e%x|3IkvZGLAeuTn^Z$m2(YK-JZZ>X4< ze0g1jQyg=gT8rsTpVkt2@#}Oy^}5f|PWral1KG?*2&&je;vSI2Nm)!E3d6K`dOoG1 z4Pam>3D&MoTRAAh^J{;jH z>yV@9kNbXh`1D(pk}c9wr^?$em|sR~9H|L84=#G=KGF1c7(vLQ+s5!u)9%*h@#*b$ zJz2R2z5%sw2QcdKTr=>yCJe8-{XJ|WnQXr`8Yv&mt9jTB<_o*|Tj?Pq{WGn%oK<59 z&b#Dj$8vRKu&3Q9g*mQ;9#NW|&nc2}(@K=Jciv{R(tOFlfDBpJ7cUa^EG`{r{st00 z6_J!|o|w>xArrZe`Q;Nj&X%rKlC2o3XL|bW#ztI)e$9GUCOQYck4F#K)Ka!vLNW}( zFDr9P@!Yot7dxIFC(I6I(nAvwPpaJcSY)Q@c4pZ9fZ}v|O{Np&@vZuMAhDi}tD0Iw z_$lieq#eI;QxtNRr_zl{EvS8R9reb}?%@q|2gpPAl^XY9@HRL=zoy)0vrv80ljsF? zJ;&y?&BBwdEtb(*r}Xi~$|{Aqqs?{DjwlA8rGF|a!Uznc6OAb`3d$yMTpPS+AeI;s z!r^@eAH6XjFZHZ}@lK)IWS^UcEFU;O&TkAYFuUSgCCa+^U62FB{V5yT0lcK^~H ztdD$r%2oiq@ws6)fj~>3Tkp`&&F=y}nqZ01%Vl8CUtLasfD#QpesA6R&mhiYs8Yor zcEz7@Rd%cm`6Je;M$M7ZlCa=s#C-|}ZQZawKeFfNFSt)gc+(R`b93TY_h@4`p35m3 zB&%08`p(f$t+pQ%84Lu8DxbJz+`N;yLC7Y1fJq#dnH-V9SuXmT7Hz;4*O*mzMX7l2 zJb7wRBw8h2@bzJtXtjeWv*BZM6E^$)5r&P|^Da5U_t2J@Ui|ubtN11Tub+I*6wywZ zmiO-^<%yRt_UrLUMD;vIA$$3|?Hm?Ei0RPUD^q!^JA{}^nMnl5x9-#fg&l&l4lXkC zIfo0YCy?kK?ZB_MTDrO-A5l9(I|$O!dJ~zw&ai? zOnd7e81UrqwW_L>a0Q|bVmv@YbV$sH6oS`RdbR{+?3R;AAZWIiTabsFtmHvlB%NhOGJ<3zdOBIcF`rI(#5!hz0&83ts_mch9dF$LV#CYa zW_2IGJe~4+b+%n)FF|RCcK8#Kf$ZD8{Uev-ON$&@X6xbs~DjpU|+JMBS|-4#6Oi=YAs{7)qE_n4NjD(iudyWit< zEmpLjl57F|_SCKh^QfP;aS)LB7OLr8ypt8PxE+BKV;8MJ(~QFRQ*%Ocp{3$|Vou-A z?;i4#W1yla2x9$na--gal_2`(%w8N4vztr^iPPEV7);CY6*a#hIn+-l++wxvxi46CC|NWE}O+oY5=ESN~!A&i_ zwZA*|w}DUC)^ngs(?u~kY$$JRZ1j$dAWS_7(d=7R@`I5MF3nBMhkROENg##6(&2u@ zEh8g?;Diu$ha}_5cp3HXZjQ{~YY4a^g4z&Kg=hyNXb`h8R&7rWo;jSCkO7ef@)Flh zlIjS4hs1Terh9>U-gjv5yRlz@Ok(aWRY$@v-egN6QWQfqeIu+g#3x&)O9Vebrv9Pd zb{};3R&cH$?tsQ*)Qw1hipD^H+iM~6_+hsy3zh-{GxKikzjPlLB75~z3LY*?r1ng0;xK=cGw$%U@i4R=<<`7mkFrZV46QC zAzA2<_x|T3jo@1!h9g0b7tAa_M8oh@-H?kuhSZ8O2ni&LMvxsDFa0nq;UjL1v9ct+ zYh`5xDSMS7-7k#!pr%2pt4hXsqRNI?@914LJ zEI1*N!vE#qL>hFC1xU~=f`b7vd@#VNSCD)J*}ku@4_O;>Z5xwSVH4LgJ&24ZL@A-r z74}DhrXdH`U?iPJ?0nVN2tL%YkSOYpxAqd#(gfji|IC&X{u&fCztm+cF40wAuA;)I z%}1`Qr&VIJ*j;GRP97G25c|fXVzxYxK(6Z3^MWu+rAm~E%*2-p0#Wx>kFz+-_D5Lv z{a+}e+|zH#Jj0t&$I=EvkHcXD4+PGev!KI5+RGd5_7fzqgazPsv(+jJ_VB=XoSd5C z@VMea0>s|Z*(nYf6f6K-Xy-6<5sV0bjMD=Ngu%%wTOUZ|r+3yY>bL5Lzr5@EfMQ{V z>-D#*OAGjM8}rnNcI(#{IlGO~;JrmGT{G(W*~SE-^yRUV%!kw&0tuW2Dxl`^MhJb& z(Y3PjEGbcmWYp0~wp*F~0!%brObv`5X<+;E`Vd$Am}%s+v0<03*!8h1PHEYAB9hkZ z7B*K`fyVN!JMMwLzS*kFrS6xEfaoY>exa(V;RA#!34iTuM>IXXH9E0A|5sNsicVs% z|GMbpL9QS?cE;)@7M?Vz6g{F9C?mq~XRj$qtLd7MEBUnMFwiFR=futv`cQ*Dh~m zDWZ$>3Ezy17>nJ{?^2uBNT&=!A-eZC-A7zjx=#a#klMe{B8P7$X)ewdslt5hm-p?b z;7Y$lcn;ukfJqN&oY+DO)j()Ygs|im6(LAaXc0Qg8!+886-~7J_T^lRXtmHU+s>d9 zcUJNrk_l+w-F-p39c$QL<8jRj(!|DSkxWdv%kkrl&BH42QW+&eNnl64-CrU!9xtVh zW6f_(Giwd@Jy>D5OP(pKGX8#VK?khgwofos_r>CXGi}(6q8J04TX^=LM0C?kp6A1P#D?2;h-z7r; zFgcRe(D8QmpT0TWizpdPh>UEme18cg@o{~9-H!@-NdQR@7iUy+88qmQ_YJ2q5b?cF zhD|grrhUXl=!XglOeL+(j`@c#FoT_g z!#5W`@z7%ZFe3lqtt&NG+dH#haz^BPB5XA<&JbY^C`UK6_4IE1gm{yDARIFK!r0T? z><#guhzm#nrlh3E>lgR$TH=qj=`S?&fSwBOt&Am{g=#xI#d6ms!B8>%LSrCgtDr(f=QV=g+H2FrjOIYi5%Br6I$(r4-+Z_{nEldp!u8Vm?%bS? zv9U2=EDk8EeCJ2EE8Wfq$Uu9s4RATOEV7uucuadngnq{ya^1d2V0;q6h2i=Mv$M-^ z{Y>pftL1sSu6mt~&}%F{jkow`%+GG~&BpSD_6g8)__cF$2b;Ls7E*tltyzjw!(s>N z^4qbc9mE|$Gev?UIzZp8!9{~;B32sCXKZZjg^HQ}8b5Gxf31%cLM8+g>@y@(RMhX% z6p+PHYDXA_;7Nm`kBrD6!kHmBK%!1*3l~Ad#`|8n2=f7<4N|cQdwY90fzuV|clW}& zwf{clj~wmk6@GrEIsBW$3l}LK)2H~M)?dvom#+Z_;cgiO8|q@m7K zI?4VAO6|U(kf(f`x8WYr=mhR^e|iax7Ee?x%X+qft+8=+Dgt%x&M!+>S~FZ-3_yGx zM>472zv{Gw)+{4GzbT%Bx^IN-yP$6<7?{44a^8&Qqxw+dphLcq>LDIDb+vFv+A-3& zAPsKI+RJ#=G~V@=EbN{;edsyPL*g)*j8-%31b+_;xtpJ^31%kA6~G9K;k2g$OBit| z0xE$QhwV~tsfn{=7^K}GLj&g87dVV*3(;>^H#f-15CcX)hPYmgmiy@!aI9>>uS6U* zfwLKaRWh_24{$+dw}3lJhVUPp{Pcn#1rwYxgbtZjnhqHV0V{JzX{2Ss&(wd@h6ImT3fo-G9*cuDA*%1bye}u$=7ylliH&9kZ~I$yNP(E`HZ$j}q1OqzU-W&76u z%A&blRxk-pP38m#-|ZhlBGmzw<&WK_4ickt zUl%@z%JFe9McLx%HvZa6j?!p^=yKn_eTxBq9dQ6zQ3y`3nDmp7NyLWdi-W-ntxvD~ z6bVuVO9!hs9=jFBeV}owx=ITXw?5EkEUVF-d6?rh`GfcgMo)y^)%06_Z}`Oua|ro% zS8e%$7X(!pthLEO*y%!8-uq@Pjb3lsw}wFz>MdE;RxRI1vzg42sT-?xB25*#EKuYC zO$aQYY>*vero=UF7PGO1S#V4XZ4%?0{QSmpz^#q!S~JQ&y}j&xxbcM)XXg`04`l!z78OW;c*z7IG^Wb2FQcQm_`jqd||Zc^5i@y)7FT0@x$7$ zFaD70Zw(_4g4~0=!&?s%&ARHI)`6z4!-GFrAEC)>@ym4BEo;cWi@#dGj~PR@RD?q@ z)o`j*1RPA}WJwj_?m3f8u)L_ARu%8LExex~{mQ24({IJg1pS(x63sPG?BSSDxsAu) z-v{qYXjSdfRLeL)ie2ta)`sjFoK@Ft{B+1{p_))_Ux#sP6rt^~v$GrYPketP*Br3> zl31JfaGgzeIPbmr)EVnV7=%rc$}PDihjN}7o0=9Ic4-O)UVH;cudS{9&i^qXKIef& z4`0X&2MCG;fuvEa`^Oa|jo*1H9&>YZr!vdau!O;3hM4gKIH{*|al%1W4W>p;ycJM6{)H*eaTKa zv)Mh6mWNS8{2mMK>Qr7XQO5IroW&w;{^#o>X7`CjI>-7#Y-8UWi{4M<)U-R=NPDq& zl7!3UL?ui{NuhMWf>uD<5SR}di}y+AeJ~$cZehC~lu4XyDHl}`oj>Y9&LVZp)bekn zI;9jbiJx%A>P?Y8FxY55Y~}X{xr5;Ao0#>@jm<&S zc7-0Fz|CciPf}pk{-eFa>-9BjE4#CP?*2A0l|J|uV964xXBEa9Ho5*)@QNdXhv7HF z6$8T$~wsVBX|fYN38f=N@TwJ$tVIzyQT&7j>?M(rj_BN*!c*uJD3 zBPn$6FHYSH-fHmEwegxc;nKyY=(Tu5jBY24An^aU;oI8xp6h=m+ewd(x2ALfc6Uba zk~cQ?WNXyC32Z@hG(&MM2%CSbtK)e@{a9yX)E)fIg`ADjL6YW8^B#+QX6!}GwYZ}c zgKtSC;SvFEO^a?gNEAnV%wJp_+Z9y6HL*ej2zCfUAa=2eS2NL5vlza&GqvB*>7)$E!p1 zUdF}S6!^}wXAQj-+gauYN1NwyQ1p9?4U7O`G9-XHE{c{9h-&dxzPvHx76^>W>{?ClF4X!lN+zKA+6#N8shW?CuX z_-MbztbMXW$!y%qEH{05UQtxRrri?wt-W-rrb64Qum|ER5hkG|MFt&Iv25FX0~r=T zA>NW{AdFbyppWbv5$*kfU@O`lZ%I6+Q4Y#AQ&}4{`PrKs2BEimkbyzGn@pc1` zcbW)>htu&0BHlh=!5O-^^YA>oIL+jRmHBH}(Hm@GB+wM1Upq1a5N>R2yvxR>rdX(G z|Eb22$TMl?R(n)oeEJs9)#h*OJ<$v$JBOQJ>fGM@;W{SEq(+X7??1n|BnRRnfQ0#| z|K~94lnmtE?nCQu8X7_&Czp$5EBte~{-%8%=18Uf!fI{A>RplUk$j=1&s_uc1IeG4&qmwrWCUgNR!@$B?1jQ?W zV9Z}8)eww!f7z{#v3c;4;P9UY9MI`0(1-)X+p3@fq3@Qq?1zz&!|lvCI8u;R0Aa&h zHpBU_nyLljW)OlpH9w@&s!wp(te#&|`~v(Hjdf_UYo5nNs$Q4#TS^4uC&WBHa#dG8$fqDkt%yPCdesaPN z=@&riMx!OmC=L#Rb}Kue7r!4{gUDKAYbz>ke!SAH%b8>ybFougPf_1=ykQ#})mW7V zN!ieT_J+mBNab@zrzYA;_x7cTPtfj~0kE67W%W>m`HpoDlUscvGHe!C3yPf59N-?gv4>!;zKefp4TJP z`LLF06;hb`5E?Hvi`rMM_37?hDjbR8BWt3qzuTnRBcvtFq;}fjvK4k9cY3Pq)*nc+OPC_f*=!7I@jdl_9Gs$Xs_&y9di_Hf48-T-zOD+_wFk>4GnXc zxw<+{eZA&G`mJBHjSqOTj&*mQEPdV-2yLTyVAbqNCiaS{l_#LXU$|(pYVST$W@U-; znIZ463?!OV_R?;`K_Z`c9=F(*AR-7UeuYC25Zp|KlQ}>b7vJ7PY4mueaof#dsErH% z1$BY2mX5gfHo;;>xSBYnZ3RI`ml;@DLb|#rfFJ|~xmw1nJsGW_4#77r&2L_om&t{a zfVhJ5Z1T@^eWj{_vg-I<0#_N*&Bcy`-C+ED9Ysaf_EPK@FSWGI%|F2xr2r^RQLm~1 zDaLjB5#|4A6N~)ecnx;#-u3J(Uxp-M_df<&b2AwcjnPlfKtbUK-Tt)Ghg9-#I$Q&C zQ2Y?U^;3sL*5>%-r{rV?kIS+f78aZ4_S034o+6HlAvwt(Z9ma*dz@lTxO$JZcgQ`8 zpZahkScmZam-}^UDap;Kj}Tu4c0z(~4N3*#yOQAa3F7b?cp>mDAmnh`3tWTmnVG`y z4J~*)uIz994ewx5-MuyaBCKt2%a1mxf$DMfuwcC}@%npDMjHEkO884`@kY%u4?1uQ z2<`Z~UkdI2{bPG^G5&r>JGi*Uejww~!3IY(=(gDeY~W2oK&vfFrXg1$Fi!2!)rlTe zNXR{4=*ZUKLjM5Sky!NiaB1lP3PY;cJ@ZV^A?|3fz zzkk?hs3am(Mk0Gek*#E8XOuE4p%gMATSX)zo2-yjLXj;a$zIu6*(0(^8Ta$kd0p4{ zx~|7{|L(_q|9CDDHd*SEqMY# zBiB+2@RjNER3G#DrF|}ks;lcxoNH??45w5?ra$JiAD9V{d>9F*vF z-oDD+0CR6~vBj_quhejm%qI9E>I7*ezRzy7_5h?I6@X_R2Lzaccnm-Qdge0p40acB z58Gt5U)ES3Q&`{X|3f;}y6O^6zj-;f)XVE0VU)bz_Q1J)Ir!s3_XFW;asp^YR54w} zaEZdU0RA98M3m?kL6W)AlY1B>6mI?DFP2?}cLQu`EiPT+M5F1n!b?+NkimO`g@wse zFK0>{)G5GB-@gRhLRsJ&eWI%Qy$Q1^XBY*e^Zc**Qb1AuAYNrVFJpM!EenuXW~x`f zP`qxa@&wv!e)O2v0QO*dYaGo?MWueO`T~w=JmyC?^nbO!Q+TGh8 z2-GR&5fOAL_nggS!i{xx>HaZ`ivs)xoalE~(l(Mav^seibzNWWF4>MdhMzwkQP{D> zEf_o1aaZQfQkC7zH&ZAj(t8s`Z77NUH#^Qkh3MJI=#oMV(yLzUwUYvZ?ReW=R~=m1 z@=EgnfLAoPtY>}HwV1R4=HKf4Y4XYIK5+E}fChrMIw0kAHJ}fPm)c3VD$G57K`R)p zYSL`UvFg*p&#!u|JKnMrVMEnYzjIK+ffmed0<~degsnE*4izL3=BP5YFW=6s8kfJj zD)nnLasj%}C@R*MTyb~KB?Lf~FWviGPq;mzat|F7IIn_4`-dtHI_Gl?J`>_?Wt%$i z%vX2qZNkFbJXE&nVXx6FJhF^SQlvoo;qzgX)f>fI^-X=|sO< zS*OV8lcymJRqq(3*OR0D9vhrttk_(-6(+Va{HM2ME_P9Z={c81qVkYC18t++OO~<2 z?!2#GKbBMI|J2=mkn~}3OuE3Y%{3in)~1RwDEgrdzzdZG&PPRmjQFWwS+99<#IuZ>8M()odmC2bT}?&ng*5znExEriovOxq|_|b(lus z37B7~!SaF&(tlvJ9GLim{B<7YA%{e6`(|gK*jiuIFzUvV#=zhI#rflojvS5qeInR2 zR~iJ!mnZ!nDKwVH2u$azSRA6Sk}6LzySl_MUFunhduT$~d#j(a;KU|KhzK~a``RcS zG)&k+z_y@-)&uIRINezBB>Xs)dwjY4B8||&W5WRxIv#?R3+AP@RYhJh{)6gQet)YAyt?z<&)HQmpvc>t zK65mYY{E|HxR4MD7}5a%icPLwB|dFn(3|>(j=*d-9~rC|_Ae=kqS?FGkHez1xORUn zji!KAg5-tr0zOCLcY*m{ZZW@a=3V+fTg_l~}cNL0MbjzRqLi?Hnz|A-{hdQuHY`{Mqnx&}jE&|Gwy#WT*Kl zT?Jh2sKlQf;DK7c?!)qA>rPJ~&IBie&Gj`wA)zG9Z6Q>Xl~;s%0V**lA+MO4RuZOe zKc1M~8Tz~n`?E3qb6sWQ=Fkbf z^$5(rho^79>|F(H@X^_{^jtTJt;hJiaTum`L>%TfEc^^bU3d@Nm56S%9Cb@vg4iGn zj##-?eYN0=IIT<{gydJ7NdTTq3k#Tkdc$|)Jsc7)W*Rj>J{%?lZ_3Y*tbrzA2X_n1 zq)tNxBWgeKRLcw)H;giTu@_LoPbyl-6xLg8M~@C^zfnC>ZO2v+`yq`^Nh~)Ae*}*# zH0kww?@5Ij1j}e0-RF*rU&f`Wo?FTH;N6D}=SYY}0S@W!qNiRp7RatzY0 zQ4r;!2h{`vg-bt|7jBtmFjZBJI!+8x1DGLx&^1F=LA4lF`zRVkEcJpX|BW1vNeh7- zkCDvM?CcZ##Hy%HdgSC1B!EWt6U|S&_nf*jRxkL7V&+lKU1CW|$?Tl}pc#ba*ga$X zhaUgHr%C`be<77bkCB+AR~#{r!T6qj~-?lbm0wxw;!i!OOuOVT+9oj=7N<*0Bct zjhdCs&U2tvp9@aFFH`Qdi?lRX>;ZGEh6S;`gM)&i;!X&!)Z-UCbop!=*GbZKXsq58p&P(*Zq1vx`XGEPAj$@Vps?~CSVMGey z^BBdWg>TUYhlG%n72Z|h()t`YKfGasUlxOg;$l~9v{t#})<6#B{avQsBRPOTJRSvV#L>T@8NfcA*yLt1b|KZLAK@+pVA3b}(utoox{Nawhp~SE;D-NFy80{Ow?LSJru()%3~s|5VVr4v{N^NVxVS2Fg18q)kG() zf8xiDL0kkhX^nLz9K`brapxbyRC zisOv|T-sj_DJrtMZ{!@sQob+-E!%Eu>-Hde&%(QPfa=(YY9rFCyWVHt=*<@v7caEv zj0V96MaUd%cBXssf(>q_xr(gf=W=)q7n?D7O(2=5_0ezb$Lb93Q_ zYrwUbu^1Vg5WLC~jkhNxB*fR>UyeW0oC_}*5A2EpLF}}3ImMZK`YCk?8_Tg{Z*sVX zhR^AV9;kjZYDmHsLaTn3ia9d7@%+H&NwO&$VXf@KPmlo$5PYX;!Z)6^?ch= z$tQHyLlZ3?*bdJ8St*5G-)eJft4aBTy%2&6s7x_GY>^T2VDbWT-#rB7f|lcE#9gFBY%GJaLs+vNH~lNn)H7; zsd~;A0%YUv;x&37itWRL^LyeXPO_ zn3#A|bDmEaVY~LRcR@9dX6-?~Lbz?%iEhX3OM7j@v?JByoT(|_C6u`JB&VmJjTSmE z`86feul}VP&yBWM!q94piWO+RsMdfBPx!@j@z;ipb&-SEt)BM=`I6~wx*31@^yyiC z{!)1B3{~!dZAX~UY}4+>t|p}~ z@Poi7K!x4h)>f~@epc}!Wan_*Ov>7|P3Gu-0~yoh{{I0Pi75{st^mTtslSq*oca2- z(&Sfb88&`H!^+T8niB_v6Lk!vzpYOPX%kcT_Pzn#RBD7d>Li>?`9{0y>vl4Q(jX}R&N|*`+VZ&S4xX^+b`YSM`A(IpAwM+W+mUNwPcT zNKkn#_eZAw8H&#K_LLhf2|5gqD||A!xAsv`T%T$iyxrp<8*O&`ZGYA1Opyz{hYR0+ zq57{U*HXT^_A=U*YAOCI%`Et{Rrm37abcjUE?so421nS!(P zX4|XX&~I|ve%rE^DK^a2QQ@)Ed(P&@6}GF z7)KodP5Ql+>0RK+Y5nt%l_A;`u zX>>4{BzWo&eA)i-3H}84mN3)3Xsuv4p_M4R9WTmf90V}yKHs>TmTch=9IPC4-(K{# zsOGhNYE<$U9KIgWF==ZoDr)^NwGtXt2B?)7B!-p|= z4(epYN*_GSA87}&rp$(p7nSb1Qb*fqV-{91*&t&TI<5D8jg6XX#b8g@v>lq5n5Ylu zq2qkE!b2n?z;FAF^T6fao<>&ebO(Cs>ldi#=webnW@L;)uvF}P;C_)y!0jJ50i|L# znFOsUIpepuL&8&U44eJ~m9{HvxO!ECK&tuCwa;d4;W8#&FQzRt&iRf{wkmQ9!7?Jr zc2w{7KnVfrL#38lEVBK9bpV=bxY?=!njm-q;PND4kqt9n(~uvX0*-a0j8e{Fkee6U z*GNt{Vv(cpZ^n9u-`+!5V}p8oaSYx*S&p+t@EbmbR|E3|7C7Es|N25n`JjY^1ndW@ zVEF_Iwlr$PAtW_uJ@O*_Z3}`*98&{ z-jt+sn2`+DOjUC;y?)P8fQryEd9BR22)eA?12xe<-BL4xKW@8cp5v3Ctk-tqCA_jvTIxTaNYEm`k4aTCxenZlCPoVf-avv8p|X z0nYCu_0cAx5|J;h3d=BRs7qcBd404R1ZMCr2k z?$Kk&0ENu{;ceTEV>OKST*7D%o|m^xVRDhdSO#^7xx@)r@&ysz!^s3!UMP`1FkuNfHBWJI#J4*3<**t*s-oJo&LAN1oaI;=tOs62|)$BRx zfnDISUwU^%3ar9ZD?BTh|oZ5HUkz1Q>w*< zeejFL@LvF8Hicw`3;!l&BWL`*Vv>1|N}?m9uL>PFR5sGkCxJRvDosy1ca|`%y4A@K zjgTDXQS>tIfqofa;S1sCQTJ@K$#mQQB@8cZRShJe0wm_x%?&p=e7*42(D!#2I&y~k zt=2KW2?`GWf!TF~%1A8e;*UIus7pZCyE(9FnIFz_mR->Gp2nLC+2fcad3h^!bw?yA zjb3H=m>c4mR`=>+A<2eBpy}p%4YTPj1w$ZI-b$f6sF|1)N?d8t1xJG%hdGQ0Xb_bH z9@}$sC#ff(n^)~COhMbto2=MWeMaKfpUf*=@0ivXRGctns`sU8#nP)*?1?G=HE&0M z^3|Sz!@9?erT6?^I;igII_67fWF(FI3{zHisP*NJ%~n1!ul+%PW%6q`uc3Gh_XUgB z_VatT8IweZMG79-250<#^8@gYqC9$@;Lnb&>gv@&|J;?-hO+Px+mX)9=l7QP2dqcj zh_I!;mF2zzI95w~4^g2b^IVQvO~k31_vC#)D}F|szvDGgY7j3yk#4B+Eobu}U1Szm z@*0+gA+py7zIxnS6Ag6L%)4`<#5=uI26B|EpS*~B3zvUq$N7Bc`=18}8tnH0Eob_u zYE!3YiTSutt)Fs&-Vt2Lms*wF^%BjI!>O|&wBi0Lj{{Y2j6TfhsB$AG%p@X%hMQGY zzHHXGzunY(`XkDNfQOGJSCql4Rmrj&XYw7)(SrDt3AlRqk{=>3dZx_iIbG5_P()I4vUMRBy9BsMKLh&8w zYxhT&Qv?z={PW>$c+Ju>06IJn&#}`3Mn8kL5v%~JY;#aKzn^&bXXH}Hm%bfkqv5Q5 zhXOw}e&X~=&Khr7)|D(c;H}D1HCRsZHZ@50Y(RG&{{Mf5Lcb7+Ae8_XL{tY4)*k0qN zb4bao*5sUK6A`YvdcMSfLU9+E?!l7L+mK9>Xdw94`z0YEjb+PZs)CX8x{&ghgWAdK zsumJ%dd-8t=7Jd#{M?6B{3?yglbW8z?gQ&^;8>EAIv!ieiJvl}pSqsPD23PDQEo2p zw%WnRO}oYx*Th^r_A_P4bdh1R@BXN@RI|{DI?`Jpx#ju(&C>cV0f$c>|9sJaZF~2+ zD|bA9zINR`&2T8x{$)z7SgDlT^8OgQ`1qbZeF{`I1XApG2^R6+EEO6Q z$CR}nS^9YutlW8Y%Kp#DFV{|zcuMMMe|2Tv%1?n;b8ZLwu5nPVS5fg1gr0Luz%9|K z#DX{RE+?Fddhz$I7&$Fjwq%NxBdXNAUumL>Co5iz<+OCHf2g|Gwzy4J-HUjxwbbR^ z?G_qCiK61;YuWFd7A<1q8p8f;Zc@L}&79WFRy^{{=r4IKj+KkeNZM~q>71wlvpg%8 z^z2mNzi4}uK&ZerUgwkoML*?|AmdbA>^AgQAaB zY$ZTQtw}EOYkXjrxW_mD=4sHO2=cddeaA%oJTvC>f2Z!D`6JA@*A{+q!$OVgo=aX? zL;CI*5mO2_!YE^T8IeSffFV&)&Hp_VzJ4_uslG|%^+@`2XXg_PJCuA(l>)dQmqULrHq=C5LSda{OA*365^)r^0tNeJ&s)?>OcF*)k`edpY2 zLK%qNaN%3#=vg%evOtZ31n%i?l2TKvlH^Z-aUe}he2VjY*~+aLj$h@!CTPnLh!SoI zr+I=qIoJgcs5#ULu_PDYUMk1bZL!(l{^_S@y3)z`t@di#Y!Zc-4BWA~MRqmssI*GK z|9hfew*A~A^5}CxOr$op+~FyV1mZemBD|O0F##)Sd)*5A8xDXZM`5*2bSck@OWvhbj#LOqoV3gvgfv+>?DSgyx3Q zesAN8Cg)q3y2e^{Tf`i0%V1$~UKWx;GneYI708|L2lUpTX{KtT#T^j9(3C}am=bG& z?RP< z0T}685nL!SWs*eT&hzI-9z1w(Z((E?jOf19)$s_-Kmde$D5vLVkj^J87T`hAzwx{T ze`ggI<5M&Y33D|3@-MR*YFb+OJZKyg%c4tGXn20lhAyS|Uv@R{bBs4E5($oqdYYDn zQza-=n~RrkAVX+ma1fCPyckBDuLa9ecU1H*QY@B`-alHG*P3DI2n2|dVLcW<@hqaW z_kN;s5H1RUw5bR~1?|+pSC)&QY~nCXOiVn3eg$UkL^D07s&lvBS$v-utGc$;%p06x zzj^ma<90Qj>$H5&$|0c5>w{tJMdV$uF>eZDG8wa~62O34}FNuBGrUUCvTfRTUy@6d|3hh?k;!mu>gr ztdH^IpdjsF4E%(orAsjTI{-``y@L>{(Dii%#MPLxDP60lmq~Gzm6g0ZwO%{cP$i1G z9D5Ox3}FeLNQ9dn`2G76ZuMb6$@zDGFasHiNlC{61sCIMW)T8;?I3~+ISK9Q8W>2Z z@Y!c#wJAdK5m{w~NH7?@LeMTM;73Ka*;=iAV0vxidiqi(;Sul+d+mU ze?{0hf`P=~|NN?BtwaM1FKh`zLzU-m-`-8qoYqN6pkTuQi(bt3P)3I7L6O_L&pxtG zwi)gK$qgFjj~0=j95aS*z$$BS`u3N#MUL0HP5ROOZ!Vad*4@Gv5RfU-9sU<6_G0EL z^Ki%Ai?xxwmmh^3tK(et*&9<26h+ka-l;f=KblE$SUa`vOsW)+-Lj_)Z#AcGBf4E5 zUQ2kJi*|Huf32Bp6|sg9FPjEK83scz&57pPfJ;4Yxv7m8${hi;Q_A4g2R?eV1BZ!@ ztREcM#>(!`On2ERM0XTBxe%thJUfpI!0eh7c!AE(Ok)os$JImKwM7LNYCddr8ZWl- z|0jYe>2WxT)P^-^v?}E^zloQonEpAZ?YXV`De_ib@ezUF~z%wOX{kA=KKtdN%~*d6jK{j%^+1(qqn62>8BwKYAKrEf-&A zxI>i7O>yPQ_nQdQc;2g_rUv{K_{dAu7{Or;d_xt8-H>s$z0x$0H&?DbT;DJto(F6T zkb4}a2v3)P?1M9|S=A{C+?R4f*AC5krjh)S$U3l=baLBFzfT+Jt$BP={8Uu2mzVb~ zJc+&P$qh0nxjPq@%Ye8JjN4*$w5cmMuHy=AWKpO?;hx zrh+P`EhNBo{n_GHQADyqxQOXHPK${DrRp(!UMqwRVM_OaZ(+2OtrfjXMI}b{1Sc$j zXZag$RAyme{m+g`V~Z4@^RkpPeH@D_NHh6Z+f^S{u5HN^M7(u`df+w2_6?QmCUntHCdhjG zA^JiE5WzV$HO=7Gj*g3)3G?@pQo_afDuUxr-Xu%aV1^W=n}%o1Gd^Bg%J~;(RSXHC zOu{Ic03PD8Al%GLAprRYHiX7lZ%HCXgt4+R#r~x$5c;Pa6yLw5s2mi!PKFeXW5UA1 zhVLV25oTx#BUmR*FL)9tvWLFzJ8;V~VV@LrpDOj~e)$18x?6u-t525PK0Poo!MTHC zTCUVR*=c@A%-wxP^xpAv-F<~{CUah4P=%qNPT*nTH>a>oTyJ|d(9T_j0@Pz&FdQ&7 zFYi}#$prDc+wej$FdAY?59kI??gc@Ct8({6iT5>0p2PIX4V+Y%gKGstUQHR!(}_rcyg}#OzCxzlu33g8ve+lS8jwsusy)h3WB4}}pLV7emeVG$vUua8-y1KO0NG~Xp{Nw&;Re(rmDfvU8~!rJ3VbK*VS*Yy#}XE}4d8dfB6B$EC<|MDrF(!SMfnFWzy_Tsp` zhM<^&2Su1*LbqEJ+4O+l@OR4#C1Dg-YLGvRIW06R25miD67>2%ifm4rGX03w$3+rs z>XxU339)^*I=WQ*rl0ojo&HbIX332fa!{A6K6xMJGRTjzeftJu9pyc*PR|{yZY@aw zA9E4W#jpVf=2_p`?TxPcfWA2ocg`L8COKiI*pkQm;d=S*xuTw+w|xQV2oF*sz&4cl zGfMat18i%KL6qvT{+D+m+fnTZgq4G{x#mHT3$^cD~JnML@)k9f(S~~&$v)j`xk3r{`llefPO2~#=ln? z)hz6Vyj>#Cww{*Ib~?yOvv7Nn0RMpkFflL?`6VE2z0_{{aYn{ee%7<_XHV%9$`W=K zEtbZ_Om@BEV=OF=8x_A^%|;lvQF=!_0m{ z2v;vSdhaU+yD{+@)T5a4Z`56BxW9@yM z_;NB>9Ztg3?Bw4&(mR>&79VkZ@sfT2K_`MMJUO}a%w?8e*7L8W72-b-Q80*7f|DzI z_Ur}zX7pP&bKk2R!FzoW8+^0+9j}@CRo%n9PonpX&q~Pl6f>=j?5-Y@Fkn|2iR5V( zX_Bt52i>Xr-BmGc{iC2I`P0uIhV9CWo2^S-cDudh>g(9pb}f|dE=f=Z0`WQKAD%Uu=d*5706=2GQglI&{X^5x;ahwF7 zbSK;2_oz{^%O(;grEM985(0h%cyfzj*L#h8OM2oP=Z_a3-R;ftTThcMWpvo;nyGBd z$m&>S%et?Xn^H*ryC241wML>oOMhY?61o^l%i3BST2+E_xDj|OD!~(Lz)uQb*#UVX zC?Edd>`@|xt~>j(M>GiFPhZquW!ol0(bK5Ts&tmLhvr^LhiBJFUbb7hn*BkYOwpV; z>5QV4!JKpP{ecSXH8#H-cbcgkN}e4GS-k(idiP&{XWTVxz@uOgAjy^zY@DItVM0AqE&8gHyJp`=ol$=2pU(7@ zsZ@hSV~BKcOS)QygDE*V;nCwSxBMo0IgAH3IqikdzhEEh+eIeL6Y}E7v19T1`Ns)F zll_LP4VZX zUYvUwZn*7%#1_2jkB)ERjhS9CBc6&H; zrx_bpNU9}8Ka`12@f&&E|3OF0Ct~1;wD`fa$TKu9?hj-a#}hXm$~-C6MWZQKmvBVC zWY^B)4`KM9b+uy0=4$EIJUq@UIy2Nj*8ob`kao0$=dVLwKgY6~WNC&Yhk?q&-7Uv1 zcQ~?sd*;--;o)B8_JG1WDQ5pem+x%5m;UTUn8<=$9$u;3U+fo2YMvtz0)1vMzQgwS z(oHlRN>KLNr9)ZgkDi4P(+u}gg0unpyNhEgA)i63=0xldsxa88MS{QkdIIz4B-ExL z4qP!aQ@}w4kU%#Wuc`->13Qqtg281JAbhhdQ)DBGiisscGKSo%u2K(=j3z~_OQ9~i zYYc;WhoVxs=3X6VZ;CCA)EFX@UfAY2WW86QNJsUip+=%GxYC?1sS`|Pv zDHtFYh)au$Kf_!%5ivJliRWVm49;;>OzB=Paip-pN`4=x$jVo)96_qhJ!j{T*x0vW zH#;*JPg(U6xGNavMnathEBYRRLA0b(otYXyKM0;jKne*VNWA*Fko^7Z6+?pZS^Sc_ zC@5Yc*NZ@ELw=~nD;B`)$wxKR%qm+eh`Oo{q=pyS#ZO$|WK(}z8t~%jJAblI3+*yA zQKD4QQDRVDWSy-g#i*SXylQn@d;2iVEni?Ls)F#!V8piArr!dKS_`Ra*$uj!G$C+i z0dm4?`VD*xLOR2w$JU<&q+Qq}?RM8waOGcwiQW0`iEz;u@7^6(3Sqm6I1ob4z|vU5 z;qWY#FwVqY>+YMBB|@@+Pspt?dvDc90I@gUmbI ziDnj1!32eB-CVPwhtUQD1WDib!^00i1xJX2sa;1n*?uM_b%e-;-$XnIVp8lj*PVzE zm=uJJdW71Zul?^{eUH;eCMRpqn8LgMVr{X$`Xf~ZG1OSk`9mZFN|}C=lnG|g&!mOd zP=9+&@626CrExg#~-jJ#@7EE6#d|r+ z3)DmOgPgdH=$ST}VV?xb)4H+CDkIZ{94Xgz38{~vUgu?ZPO2SwaFmWMEbWNzD_=6- z2c9G(ew<{qZL3B-`?mAkOZPRrf12FuXyUQ3W(j21vPCZmWe>=ZunKw}7kldUwrR26 z?!@@?WtX^uL%J(sF0)W)5Ixw;jdntU$FKpgS+v_JjfVpiF#J}|forgPG(BD88e&Ic zV_hrVH#43;_e2|1<&xI!oE|CQm9HdUwJt+(u)IBE;$9tjZ~bUO-|MhzGpBZP@_!gM zk&->9MSefC^pWPnfl68582ti8qP;sv0l%}cvYvftUuKopifzltWkwkLUllZpjqtm? zlz%6YeLgc4ViBkhX707a%`!=QzpM^Pt<`u1=>So4+o4a%j`kgFKNHir*rvsNjJj_W z&Z_iEIDT>|BT~Dz2hjMlUJ@&0uL5FjKS~31?8Vx^N?P$~gg$scM0eiuMTLZDy-`zE z_8T_1bLTjCz$uM%baYr>1zMK;2Zo2F`8&sKe;mg3Jgl!*t>O_BoS)*?BeT)&DZVT1 z;=eoFuqAiUUeZu?G%z~B@bjaTV`FFnPmGYQ^U45D|Qx4lO!+5SU^b`vtIv7Xtvxod6uEpoD)7R^3=`a}f!2;>nD zu^lMJ5Y$0=JqUWwPHuoNDJM6#?FUriD0f~4>UV=pm-4%yc}!Nu!~FfFxrGJpW!x`2 z<@a}1j}lqcyeXgX=u1@Uk|~vuGrCB{w8rJdE;l4&Mjo+ad28*VEE(~)dXkPw`@l$} zOvhsW;!;{f`DrR7YkvGal|~3wo=VFs(SkSCRGmPn{RgLw<<4*rdw-8c8l%i8YQtuz zgJ0o-kx_iIT7tBj4g|Sv2-0YWW1$WltlA&$lb#Sw2o6pJn%)My_J|mDjL0&5?r!}~7HAswe=*3h`Veo$)&y;^~EyvcBA-57oAapI^@9&{*Q% zn(qDJ#p@s!H}KPPHZrP@Z=00F$6gz6%Jv}k?cevwte?JjPZXIxfJVhSKGoINkK}Y3 zwI=Oh@p`2d$gEd>$gO5`IAEiSa%6O2UqZf6QA6Xi#h#Db&}b{%@6yNwZbQIXRB>1wUntg?$$V1mlRaJj%IkcmCSnQ<~OHfYlSnv=mq{`FDl4%}`6=STM%?*k3ohqV8boX>A7-H;& zB(2|W;;zH9F9Kf}_RE%qYFGrnp-ei|dY!RsJl=((s81cU>Y9dz(LjlxW!K&lwh-jA zzWV1AU5lOBy`34C7xqdDK6mxjGjd2YDav!MiWrQDRA}zm(_9oj8<9mRdqaz8%3j>8 zF}4ONVWO_Tj(2x=!*q@T+GD;?dsyVcpGM~fB|O=GCNuT^Ak(uQk=7ecGLcN|GDT!f zL)mRo?{_!#f1EE~RJRPl^Skv-`pj*Sn4A5@Vrc#edh#m+C2o3;=wyABEnAqIg3~&+ zi%vJ0xV;SHxrmj?&=pJ0`D}WfJUNEv%!!0fc1;$!oK!r#t<JB_Pql!b1LXhkn7< z=AWB=g~BrHa}Q|^$ynr4B0}9`Q=E>44jyWLaxup(Ht>SZ6_c6HokXufv#4g@4!7O= zeN*+!8NC;VrRgAJyFYzAqnT!t6VrjI$lGzEjGf=0pgOnqOQb zJ#zHu1H^s}kBuoHBs5yY^4UL^8J-aSIj`HL?0M?_83(I$imDu2;GWaY&f@(1qdlAF$XK|G ztXUIA-=5JpO+`K{_T!nf3qFvF`6s`NESI|1Bkk<7ZU*|f&79gSUYv2zQ$rfu5%If6ffW?}EZnoPHuD%Edq!5>h2!&}_!HmA2x}v%Qi;|DftM)eChgcX%UhLbCAm4|>Ec6nXkE@{*fTv+8Z^{0$l$L-$ga&E@n;P9f~2di#fO>njTUc8Ssm^-oC%d2y2Q zC#?grzCtIth&(pe%;Bu80m-KF#`fMM`8|OQV*mC^t-t?yfFgQ1|Jwma$M^sv56ff^ zqBR?vHi^RNuN+Nu-U0h~S!DP#Bwz1lkvsTIk~AtJLIGB8(ZfiS>vUC-!=skCVDLFr z(PKw;;LC#L{Je*DX^b?fDZ#@`B;t(XE>z0awz1hXFIs=it!|6&RwywND+14)ez#Raf|WQn zvdcvd>3?7O(JOh2^Y=5$$s26X-g{SPxkw+JVC15{sq%_})@>Eb4-z>HHMf$KL^C!{t zdmA?nHPrE6Z~EOz)Fwe?Q!}*h$i&*gti7?6vUbnXNx}6#D~)zasH${p)8%K;taN%I zm8`E{G7WR3hn~>yNbQF{MR)DQ+ZtcV`R$pxHx~bCw%2G$y1(m$5aW2jnKRp@WJojn zKju^$kg+IpITBw2%LW0Kf-wyzZ!UBSks()k525A4Qr9B9`MX{61k94V9~p5K3dcXH zoan7KIAcXMe@mU@{Op7f@7_b_x5g8ydrBS-eK_eaZ1KJ0{X_rtT`UGdqPq zWU#44dTs0Y2zp;oRSO+w8_}6gAmWdgl+`O=jI;7n*Ba5un&r{=OoQq^-*U#-yw^k| z7qrMkv!8f!FezG#4EGmM_iz0Sav4|GB7Az9Z2*zMkgu%RxfveEZojW9ZSmLP4it;|GTn6nsl+vU-Ey7&%Rut>PcY;4VdG*D8Lk&|BsOn@a9X+h1%NWg2}U*aXaD^So! z){G+O6+~)h1ameQ*qP_v?zT0|xJ;%g=C)9^I5M^DMMLjYIIt~snBm(q*^X}4`dkLt zkj>AtEmRYIegE77wGzUDy`+o?o1Mnyc22#j|K=ky$S&!eocHN{};UDLOH z;A+LmI&CtR*d*>~d_)?84h$9y6cio_MXOpsuNP>7TcYg=nyRKUSB~qNZA#~l3%0gd z$%KsJu*k?rZcfhg%3Q+e4BC;ktM%T8g7FR9{TXK^?oPcxSnBLO>+EhFctAZnW!-DEt)l!eNh$s4QJlxv!j>Xp5iZ_<0*AQBjsS3fmwu~`Lomz?6 z8&N5gNrl|j0h(NjMEe-U<<6ZWgKhNAz4FC_68k1{tHU2{A95r~ zPb0bv5yDWKSGPdq%8=Gx3)gT4Oa4Hc#0GDApq9Y@BP^0Te@tS|ncR>S#FoCmoo8ZQnt3KrZRQG_3$Zjrcr`7-8|CL%GI@^CFjgDM3L5b-&6b^Z(MhN2#W zz2V<3rC&smCu%oFE)p{nV#)tGKkPp>^;NHDPm^iBFY%3Pyqrv2dX!OP)um-pw8|lF zuy>Nd|IO2HsY&y0bHs4KIAn%Wud(F?k?YR_1Qk%20jEWP`Fn%+zgK4JAY27?Oc+L} zuyKp^_a`s;^HYLP>tlvlebgB|nc?uLs7oze*fupaFA5EeM)dOCcwPE%hj6TZwGo&6 zwBImMoARZH-B+r%S`>wAv8&W7;yi47t?i0THbTII#B6aWcq;Q>4qF@8e3-Ua`}M2& zg%UN=iB-`?;Fe#%em%@0G$ce8KN7)$&(v(Y@x3h#ODLZ1+`W73$PuE4<#%dC=&l*~ zRbDHvt4nKBA=AupExN{Cqd?kUId?rFGBz;u-73Rtj;!Oaj!wlI1T;Nmp}*DE%&&=B zZ>QSs)5$H>A6NU)fUnD5yLQy%i~k^+@+9PF^x8E^tuNFGbj>)VUc;Ql4j~`}l{DVj ze^73Mh%%XQZVXOV`paRHKZMXZXtIy%`ZuA!yBsWwF@_#6xthgRa@s)x znJ0lFz-)r}#>oftOo(rs;n4HiCVD`Tt1{QLikM8Sfa1jVU$9@i@C;p!)iVteBm%;#LgeUV9k-*0jJ2P5F&Z3uj6i_j^G2s&K!t9FDi6;@*PaBb7zV_emp4Ud=l)c ztEge|?joq|9IkZYYs#L#@$PtK2#MnW-?D5+l?lBy7GO}NR+jG%h(g7jmpBQj;Mho9 z=gN1wuJ$=1eyKaSDvV3}7wntYkk8tkXU&W+W#!=TftiQJM))ok?#Brir8OSZ_!gN15U>sj6Cl3iSOVSmYVpkYEap%a(XTpE?y4q9&iTeh;FeMNu`Sk!`FIa4vpcYjV zQj#HTsjjKH3fRb>UXZ{mH4r2WOPp{eVlFGZgb-ug5|COR9~!rgE8bY?nOJt-v5yJu z3o+hT!W9QL9VXvFz=u$2DI$m)MLMgNr4)Q*XYc1`M+UkGluAfze- z2y&Z~&et&tNIDWzGT|ez4SgeaPY;w^_ zWe|WER!<;{@Gf5QYpQC;yb2ZJ&6YT{sLBV%=HV5S^x2Aet+)+^JO_=IR=$r^47?fEt6!#viiA=7>|j8eAu{Nc_`nIVHE z`mmwk~tkU}O+cE4_hrW&+EJGEI8tnuD{1>;M@GL5F zdA(@;aXEsvkBr4DcUnYRW=S}I;zoq=lTFKSVvO7l@-@463V(I@9v`Dx*0uV9uoIm` z$BdY6E0DSNm7xn&iEEv-*RyBAuio>1-c6)%qM4`iIfa1e#vg9hW04g-$_^B79FLN$ zTJ_zrY*((+!GqSuW)4`t<)}Y?15pQwgqgK%2G5n;_dc?p93 zh=;Ft)3L~nzFbp30*%PXNKL4o&jDF7wV8oj-y145g)g?OmSbC!JvvS}G4$$W%{ooY zAIQHu<)!aK@r+Pzc0Od^{=Co194q3Uiwia4j3L;C7eq03sCJ7%a~8P+i8Kkf6N0`@ z-Uu77n0wc+?&GH)?%9_yS10NT5DZr^mPwhk&^kgpM;JK*v?Jh+1cfNkVUgS4QzDIy z%E%<>8oOP1V&z3COI_vQrWlq&)=brZTgdsNU9v~__dnxs}m17TM73=^~Ykx&g~VotADI?~-sTbvFpngRn4tE`wO>|+^& zcXd5EAl;Gt%Z%xZM=weAFLtk2`%e~Zcf}kARJw~}_s}YU9~Vn#kG$B`Xa2;I*+~;g z{RpbKapz~}m5iLHQa?0^36*xlSl;)Rk40uqyIDRj1HjCe-NV1ol6@ezw$lz(7quHho7Cs zSB(UVIt=JH^dE_R;5Tynb0g|Cs(p`V2Ru>PBIM*rTU(pZtt~@IiA$*PKbZ$NnFt?x zrFnfMTADg8kh*k*ZF@mwKHUpVR?D5lnI#VzUv+0{0mWcBxt4waRSIj6EBl==Biwpk2EaM;Mras==T#N;DzSH5(bIO@qb$YRRKNKSym&kjR~(M!Km0zh7_4AC z2nwt>2ItG<7sxbsy^76tO`zUsMm^&n_xi{9tPVp_gSx?I@0QZ=C`p5Pw}~@++JvW| z9%_+}fN_)~%7mML(%oQ%*QcJ{VETAL6A!+XAx7CukBQxdiACw~++k^khdw@!qH*m3 zsY;b1*FKX)@Lo=m=S1yvDm=1$b@DLLjpSQup=DRbhq?~*TnzKyY!_pC+28!%8qPIc-~P|od!PS| zbH*8Gjj_g9->2}-cg|-%&wXEY=WF5NH%8oj%QmF_nNDt{`KI&b9TmL;F_>kxRsI|v zKI{nuxl01C&296mf7>A(I9)Wu(;8Aov-aPOebx9*C0_A%pn}OQS4Q{tZSpT=dKX5w z?%0bj?iqs(AO2RZVswewA2fwS)Krg9#@f$nrHC+%l>SN!zgk=V+B4qjzRQH--@0d7J%7AJtZY2mC$hP|6vC_1_=6p>45v^ z)6vNib@xcI0Z|k7ZI@-LJ^Lgg=b4{LD|aDtXfhVJs^9m7ke& zg&S)PGn#Qtw=+8&02-n7t~gy$gP$Xn2 z9v7nY^(i1;r}F0vB~UyNJOci1XcHslF5TvhUtHAKe0-P7uEO7@>AqJD5$Abg8#n@!j~Weyal_nhV8LF z;AL6|0G!l30}j>G(^HBGxugt0$RRtRBo0g*|B_!NcbQ;hmTvk@AO*fLbv?THU!pny zEdDug;8voyW=foDiYCzuNuB2?=w2c#6Jh#I?Lz;pAQMYil@ZPt2hQXr!)^2{k1kyW z-aGWBO2nP(R>mOCQ+S?Pu3x|YDJ>0P$Zq12&_=fIn*dM(lZ_7mw_xFr@bKqtCgmgd zIA^v{Q22$1Pu|%(pV{U}*S(>5AgM)4Q({S~W#gE@?vH5L-M-tYmL>*V{v_Vd&C`GW z5Th!GJYhSkL|uDLPY>}r-23%y2WrCveG#dxWWE2q5{uN#(Dx06(c~D}XweeU9o8bv z*rJP?g*xXHWnYhhhkD!OUxW1ER8O$vrMlvY%PhzOw5L>X6})^G-c>}>w+ zmig?bwFzWWF_T^siiK4STjxiC; zYc(=81HHlijq8pTs=c(|DKc&Ac@7gq?78gY83C>kt_g(<-J&=^nbKBDqtwqi-?Fi+%7GH z{4*$WQYK58Rt?y1G|kl)oy(%HlP#;NI-x!Fm>^Md7(;@&ET+C+`*#!IY~^>y3BNk! zwxJ7&+1t1{eo%<(fJsj76-GnFsdZWGvVYstxN8lcyX(7_DSpgcKAdq_w7ZRMx=!e9 zQPQ0LJTAu+XgWqzaHku!=-@#SAk<44RscshwU4(Tg5vvMe4b>fxO3X;mymY9ZPXe2 zo1NjF0U;Wdzii@2Tg7v>hp=9oHrMH~#B)fjRwlKZ2w=3UJ#^C!LqV$Uh2J;j_1^qW zhAM^gM$Y7oe`HuYO++GpPOR2n$W@7Yn^pAK@E6Bw!_Py#9vg3x%J+VMw7cax-g*LE zAi#Nb`M$Qjt2gslwzHbeP5MM7S@BO$TYCLS7umzku$I=G;yWTKeRdl}1#-#cyr)hn zC5fTh@pN5WyRq}8OxBOSwlnop*-R`hp2f}wel2k{3#CzSB&0<1)1M-Ufa4O`Be?Q; zPyT=+<)^9|~`v=|Oz92|!VjfS9;j^}H+x%k-A@w1AG zbyO@xi2Qjkdd*dKj~RvGHYx8Cw_18@)&Qc8z1VV);`Qz~3TY$Z1DWCUB$1cxyT@-e zB?y&y2xM&dkkK^t()*{qPMp=(r@+S+EKj{h4$>hK}6BON})}KzuN7KO_%I`>U<}8f8O!oCsPvASU`fFlo zG#zaPF)=#6XJk4q zj3Hk^X6(0Tlf|Ekqka*Z?y*8%ud4VKefK~64;%8v2#1k+o;@ELXv~IpeT&>sM3oUT zFbG7xK#BPGqUz#{)m^1ED9=)BxU@eyWp&)A%Ye;nXsNMqfRoQDXMlg6>~GtYcm-_} zF31q_(S0;DSl849gxH-gYGGkPh<^ykB;G*aVqX3JO#fIZAVi?AF5FW57+F@FTpw>tj3 zEBUE6)>4v-4h^dyzw0iE_xfOMMgv)!#9jg4KU$clEJqv{pFw6MA zcgv5<@7~@q%}=5gd%HBPMe6;slyIE!waq2C7>O6`+Evm!)JbcJ=7Aq~ixDVkO8%ey zh3m_l02X_bbAKH^b0!isOe5+rg78%ZCFm9u6cE6v^Gq4t-y6khd&#YmYQewo+GRV}dSLPc z4d#E8cEWM?Q?YSzryD6ICFS26^{#4bZ;w}@*AA95>O`;5a=)vdXN6WFzE!?;xUMj4 z!S?ubg?}jq&f7Z-T=-m%u(WVPauDBtih*o1GFOih-v_1gwwC=Cm1yO z(QU)^W`;=>@IrYhLKaQjn$F3<{t>3B7Zb5@@9H8}I=UzHUV2^HkWDU=AzP2ReD`HM zZl4QOgOM1x4aw9o7~mlYbY1M*0#s!T3+ck+N?YmkPu*Ro8KFdr=O>c>p#j;H#oR%ZT%x5jQ&|`;fikZEmh)j`ro|CNf{lhV#TOmkV+7W^uFhOWVlSa>cpCQO`T@=1_iAL3`N+krP^|RdNlY+N9be2tC+%%WtBQP$HIG)6JVgRe zsVJ)u(@1eL%ejOOa{`-v|9pMhq0*r{v)f-XVA+FR0BLA~AFm+0d9!-2^9 z{X|*?v(_`HKeJ95zq=8MHz%O>w zQps@A>+(=0`$5({yuX;d0RcY^Q^a|Kx*bSn^@UyH7hk{s1lcQP=(gM&w}r@Fj@^ja z#cZ%Eu3zu%pyeIoDt4Xx0Z#L&cAD1jAO5ljW@mz8b+ZbI>ze?m;j%kU^Ee(?ao1cfp-3AD5VRc+JRzRnA>`g!Zi6CjU1nRW=Q z|4wn^(=H;xTfeCY2=)@h3~D*n9FW}(p-qGgqjJU}td8KGIZm9=12aVMd?z&XUV>d0 zZf6v=eGWxPQ_SUEwv#`;yuBJ=KbcyA7_``!IwI=`s9u_311!%zpp{XC${HCN2^SnY zbg{c_vr3ySnR%F#A$gav5Dgn!jJt~)E1BHX4@>p-&*f_w9e+LSz-qmd2?!?_ssGuLd7bPy)bbp$&6r#eq?w|M zde=t)5lC`)N{O#upT(cx+bN@pmHG{JjT{Z?WBZ;2MYhwftM(O6VRi4$pFiJ>D8yuu zDNs2S@fgBkoU47vOh>}`q)fVcr$YLTW`$#VkvNE!r*q?VtHIvQ{|A;_XzTX^P`sPuTGZ?)QkRw z3CtXaUQy&h64Tk$wbZk*UO}hjY|&8zBgXgC4Jrv4%Jm%=?H4?mAjE?c}_|DH;n+S9q>zM2*-1y|s@Ne6! z-jq%yAF(=9<7HxQbuLcZF(m{A*pcIY{f9+0h1q7LdoCAE+I=0f{@a}jW}?r-qxyE- zR{>e%)zCszLR^bCglKyRrYvVZHWuSAc=fBbyxMMlP#FhDh*>;81A`XMNf~K?RJlO^x4nOC`Ogl`Z3_Q2B8E8a~kO6|C4?G($KJrhldBq{}r&XI4H?p!`(wPgV=B9 zhQsc`15r!OlUD@0AXAJLdJH7nEg?KF0>hb89e*1UssmUvhbwif133r^7tiz?UOz%; z4t+VoFZ4W|dCn{8Pq;-?9QXBaEWB!00Kw8<5!J2dx6bXqf45_sy*{^yZtTiwRv_q_ zu`6|%kXxT_j~!B{ZW?3c_j<)6=G*h}_I^W7pa>TnjJIElB7^H~5W$}M z85(=fD_u{_deOpG_(m?}$Ue7U@9mx_T6lVym}uFMyXRB{>rzdZj1;W=dm%LFQ7{S~ zS~%~pumfB-KOa04Jt$CD`fM*jCjMoU?oEL64QpS1L3+_MAD{BFvNDT2;DLZ-FB0{x^7BtLY0H&tglvkPXZf^^ zSw?c50UP%lz4?@Suqxfmi#zkR{?>!-je7r7!VXZ2Lj>Xm>4d`RTc7WIfg*v>ZpxQU zE)FQV{ywp^l)5v3s4KICJ>FX_fmeBS)U?e^KaXk$a3uxsp;D{sJu>CiLtR= z&M^GvjmW2tXWbiTR36?xn=AV8X6J++y`)L01?jIKs)VNJiGDvC;=mAA3Ibi0{t#^! zJ-PzsQ?E}CP1|C+){NKXD!q@FZ~B(J78IfxF*l(KO>lTQxzsF@&s_L~rM$NpWfZ>FR~&5Z@g^!LU{GCo}j?Gc+L*?(+fmvIbGwD9>U zR+8N$Os`Cqe*Ht)Li85C<>i0h_FJ!hZvOVKL{#@f@wP*W@h!JT{04PPo6K|uKg5K4 zMhEJ2{HyozZi}CGnwS4ckRV2~U@RYgIg67#bO7jJnYQ__HWoDDR(c&$!P&RSvQqjU zX3=Y`_w5a_)mgYfI1~P=bRMEB=B3byEz0X!0X?;BMe{@9_T(nd(EJK}F-oTq7&8bSZ*zGh$b}Y?R0>WM~ z?pG0pX>CXeV}dw_Q|mEMXycXYhHajq7m}p~5-2xGJLi$@6_?>M7t9u=nQOjpt>AN` zk@J9!?4>^OJGJQ6lmbsTI9C$T^h|>i) z775ti7EH1~QgHIZ^`}N(RMczzB2st-+1(2zNN?P4oL+UX7~%Z*fCaQ&YKX;cgp#g?{;AD(Z@z z)G=Ct<4%9ZmiV#|;bm0-)kGOm5YIbpD zo-=T?2D_m}@L|yl znuQbklH)~H8pc;NPKTI2E?I482#~o@HCNzh{Ua!UlHWbBcJrUp_7=uGEr#e24@~MgpQ_><`BY~A zl4nh<>f|CxuIbpzZA@%Bd}=RqY>%@m=WJD?ifx+dXR_iO-Ed%gJwM2}vm$u%sM6G; zW#?2DQ)@6+atX~ge3`Ro|Bzl_^5f(ETV~nF4q2Y58n$F_nNyD&8Kr5T zNL=>v5+Z}PrF;Nmz1>qc8dqNd;S40CAOITtA|iGaLEcvh{tJ3j1mvh0kf`M)!#io} zs+6tT?uZm;t}_ot@T`vKeD6z;y>6vCsxfL4%W~isO}iJfjpOc+{g-c=?LXaV^!3cg zHD&KrGPQ<53vKDEGipD(zF-iOp2A&X~Fx}LfMChf@4UpH%JDuqpWnTDJsc2PCZ zxq5dVIGHe~_bc9IOOCY6^j9fc$1^KB{_g&U8|oeDB_FItFo*#UzwnMFbr!MW5z4*% z=}r>L5S>*KpJc~pHp4NigHz}H>i4?8WA+a19AYq|&iq=*$737|;)~74};QH_$ z@^TmZ3(F7HjgiB_d z(_`5$5oT&+!Ny#A4Syl*jO`)Abrywde-3=h+o$0arkqDpRwobJ zrifF7v#c3+(ltezB@GmD#Q=Z^ImM z=}l3TG!Z%+k`w~A>wa6$jzW`T?9uD8P{=9739vBol*2Ug*EV%pR50oLI5|1NP16 z#Y*70UPceYy0w*L%InKTKUW1t?YcJ>o&+T?Zu*h`Yu}QvWEM^2F$*-@!q^~`>^Rqo2)LbMiB1js(DlfkW+&U{8TSX(XMyghE z3yCFw7MsXN0Ju3Q1mQWM69B0S1+*XP$#7`1`QgpK{C7?U8~~2K$ulOH& zNJ6YX6X+Zvu>~dqD`z9bXOOc);+g->_@DFCZ2f}Mb)w?-7A03S|A?0usM5_-Iksd4 z^laf7$zNyLJx+78@vf$VMYK00ainJP9C9mfQr?y>7(c+CoPKv|y4^S>0Z$_d--82$ zcRDPozo8IF#4O5qcxF@7Q!x>Z<~5uFNr4K-;rfo))RjTDP zbu*>Fenj@P$!*;hOAG10|JurPp0;Ug8dv(B;3+$mIC(Q5epq5p>=}mNG3{ONJw#TP zMrb1Z@x<;9ec(IgKslW~5?KPLIsywUt*o*Kd_+I0osfwT34$yonA6t4fR*Ix>PiCm zq@bj95sF|&Q{c3h;CsSRAC1GUw6ydqtWWShGn(SAGYFb7I{lf9h6e?I66gupJ}@Q{ z`-v08AT+f6LiH2B9UU<;40uR(^3|tF7Khibe}C_AKZrZ-$4_0d6E6dOy3g71jrrW& zoV!KU+EcYm#RRO#Hqtort@~B22ciyLIR5vw>Bd2BBfAjqsG=XKo=gE`H z+5PQ5Z9ybnM3@AW6M8>CAq}9`fFEMb*mlbQ2obC-X6*V_J*-rXppU4ksW~Dp&J zTP1+9s8Fwff`Y<_;VVfc>G%E=wXH(Th4)(pLQ!M## zNixp<+D;Q$R?&Qtw1vRw0Nv@Z)qRett)8;dXdF-1eJ9(@3lG-4ZuK-mx?zZDBNPSa z(q6VU0kT3csjOri%6Z)xw|^MK1Ga`7-RTfU#!nj?qgCfB=2CHLRb!1)q|DeLv+Wtd zcZgIR;dRIvSn0h$69(b#R#QT+!>H-DW&`8_xp`%f^@+Fv3O4o;hQpiCUqi8Q8E+#~ z1;TZK3nHQ*rkPz^N{R|^tq-t}4^Fp(r^`&&&sYf^J4OMhKwU$FkA6G-_Q%p0*}Dg( zfH8c5=$vutg9@k3cyo3+CtXWjW7mNj*~<=x)dPgb^~r0q+JCR+elK(w_qn^6wWwt6 zDJ%M!i)JF!Stdi>C}MqY;f$~Dw0X+tXZWPr!5=si$RlL)$RQ<^BSY=Y8XRTlVwa%L zfh_MGj=|V^yV0*?0aX{3l=dM{fKnUJ@c;!K7;*5A;Z$?=rNLiBd(*A~TB&C6JiJ=#tFHJ+rN zszZU@GloE3eFzwmaK{rPWW}(K8YUf_oCe0n38WzAvabP96WC%g_lNj`+o-8y@amA} z5fO=fh2MG6yxV#18Z0IbeI9febC4Hv_JFAE2}1CQX)w?CpY|l8oPlbk!Wwy*o7n3B zq#=FI4;cAOz39}7fB-^wRO1ryW?6)>@P1Egy?v|qv7eN`K8U_#n|k>mcWgNObRxQ3aHNE?uvTQ1nMpqNDvE9MuN<*T4iBBduM>U| z0`vxGf7h?|+83kr{+~auVJkyzMib!^e{f5G>)c#>=zGg+M_SZo1w%6Hih3Ov=1PAC znDcizZ`3n(*gPI$nJj)ymHo3JbhEg##JL9XTa9-?&0LH5SMk5GHsyvHA6tR(HYcg> zYU;_b^Yro}shV9?W4`@q z1@saQCWO0{Ky|1Cjy~G`QP7o3BH79BN-KZ(950D-)%4QiQ}$2!G9|p*w3A2or6dKc ze7h=cCYj<18qA~N#g~^4$T>y0P@&L)Nevfw%0Q_X|LxDDddY%Y{o!vzCqhT43-=2g zxo}{op;G3vm)qaDJ-6V`nx53EOt5*Wd%6(o6SR`gvSW>Q}}_4!FNp0%^?la z2M#DC0L7v{QxcgGZJq9lN_%Q4d^tFmnRmC6N&o$F-|0J<+vtD4O3u_wh&U5iKX!ou ztl`&364T5=q=3~G9zcFRtQ-0u!RbJg6vJ|XosgJ!1d-3;Oa%+x>s}&v2MwNl!enRm zt7C5UI_btD!oM8PUcZw1Y|E?XgMP`^b-o@rE>d=&*i7{(Q}w}$n?*jAn@L8I-A}>7 zyYuND7ZSyN2thiD5cEULF7xuln?<~l6)V3dDlY7*%DCP?{K>^|UyoUM-Tn~k*?KzH zbgw$GX6t<=IX)Lsn()dC*82TSc&}yoH2nDkFQ4O`Y1jK(3MG(0OjUCYP4p1>Z&;HE zLFC8U8C0;sw3f7Q61}?iR0;C&q<%m4&^gn;nCEk4?S;(eefK+vjL~ zg>{!)+-2_rQP{fw=BV6BRYP`)z7Gr1F87}vlpud&)RCi0qg7FRJ3^Y(!%XDt^@oS4 zOI}HQdGWELI{o);W&KRf?>xfRV*w&euYcs7@nUt^=jif##g#CiVq?uidrwfYJuVAO zBB2@dqga4f+qgf({EZ zy^fLh^LP>_QT6(xU(2WI+=F9sk&Sa^W?C`%OxdD?Wn=oR@%~Na%gnrSFSIr zPT1iHQ39x&|H#UWfC0Q(84U?_wi-69r*G)Vse4SJ=RxqeDS$gkWQXMtX+Gj&9C))Jcos%67@+|afzmbv*56#m9(gEaOu~=Z}5-|BughUo}wC_+?fpwAg znm&K$SB#dyNr^SOz-VheyYklq2X37R=UKhEFNgPii(+S&zJ1xzn^o+wLa8e!ZiO)A zCjK#FZqfY-o;Tj;l*VhQe9SPU?^?}^^LLV8V@N^$s=C7Jn#-xFRi`wK8HIqmon1B; zDfHBq@GA-?w&Pc@aUGf{{^Gxp-~2RQL5=F$Ouf+d#h#B4A)Z%{0P*4A-~eCR3?xVg z+)?nz$xXL%u{``z1R?2jubR5-k%V}9$uh~ec($j-@27p&`}ToD&8p!=keg5V0~t%N z_k(SgRhc9Haa5IYFPt^_92`ZnMI?hYFZ<0@^V?GY7MZo=zxS0!SyPIGzzV>VGcDgq%0jlylz-x1X?xdFYs=MiR`r3PF0nuj3v0vu zxNjqqRdXIuWoIWujT`e4d}=r9kFe?UU%GYX$^ISDMMEjin1df!?`K($EaDgC`~Ffv zKI9;}S1RJFf48WDi^y+ICAeHP3Z!Y2-?Na!NR!z!shg3PJS5xX;9T@O%wADl{i4zG z-EZ{~gEE)?VQ5Rx8;(3Ay202+!7xax?`K$Zs3%g@V4mx8TxH7y`(m8_x$Prb`hm&g z&!zfqt7diBWU-4US3l)sRsYzVXjjrQ`Z%|W+Ad8nGTGHWdud=~Pi=SUwG*|POTMl9 zN0k>63q!&@K5_b(T-WWG)eRR9+T>pK_ZwPCACau5W3h4^p3=_TwABDq<){?+FE`f_jB}C*1R5(GMF~3~6exwT&3k-o;k=&)n zi0^shq0iD-VG)md#@5>Es)I`=K z?>!;7JZzRZV6a6nmswT6NSVQ+G1Q_ZyI;q=tZwvKSliy~UObV>>x=fyMGgE>#k%ns=Yx*=6HHS@M@+oj?*#V!tZHk3NkUnMIWobap?-{Qp`QpSo2KI&PkEZNk{d?j?u6$JM7Zv%nW$MG*)M9=PFwHw@RdFldnz4mE^L% zqsq}R6}V;3t9@{;$?aU6Qo><5yY;2?E##Cc&pO72u$S(2JeY z>33BMU%qeX9+^+$WNi5D(CNTNHV{?%dzHm?*fIKTo`7$^m+dMaCB>fk)$MtX3k5TD zIJo;03E3ccCpaVC!Sxg(R){!N_!>3v_{Dvv=I!d z5!A_)f}PKqvqJNxKQ8u<4L|X9I_Ro-Zu8=!-J_}p?%7GiUFtdRRk08dYIvldnC2BF0W?*J30gNeGLdiiKknpaf0e1@xExyoOzwrmbfxYFYqd9$d|Tn9!&C=UC1N zNT>6nBC*^jhHZXAe{rWHgUcW-blYB5!jVh}mx~~~=HTWwfW>_?1;yQ9b7h_Mn=^m@ zu=4ZoMg4zG$CYl0nI~#}k)@69&Lm0Y+*5Xt}#l_^sg?Bc5_tGDJT+!8hQAw`3!I4ujHIliMNl< z1$gMCXli^@iz`XH4liJNpXIu*Fv^u6MF->OmQ$OF!Zb{Oe{<43E%HdlC$aJ| zp3=}lmhOmYQ!z&iZ#{mrfvjt5iwpU~EU{aD5iWdJOD{39t*BRRcUI-$|LN2k8+}h? zj3vK1A$>w>|HN$Q&7np%^szA@@x!rDElm+rUPt+KCauu$^9wSuHXZQebioKSd89p~ zRlXxl$Y+!URBTX>D$4<|f&XxC=J;k<#z^*yH}9}8OT`bJ8e^&NZ+>^}n78+(ye*v< ziuPXDPQJC4kqM)keDFT^^Gb(6@Ism@*_pU`_Pra8E7S7#nBK7qDO>~E)F1IyO$7yie>?cps^t@HHOO)LB{(r7$O*6)2m~!4V zIoZd5XE&Lz?-!*c6$%6B#ov7LCx@r_J#XQnb6?0=S6e(a8UUIt87xQEN(_Z*4p6(- z>lLpQd?6fm97o?CZlc9iR$V1yzj#dPrIw8I{8ihFto~6}1#lTkJvkHI%zdUfdr46^ zUv~>hB#i-J2pB8+cfGw-wRqYWl@H<81YP;-K@8WstVB*BTt>2QHIKc}BWFazI5_DP zAe?SRM-uU(h+3Jd*hp=BnK)+aMLd@y#7~|@{7dfla6Ln%!xY(gq}N{@U|N2xz;DPh z(j8fvfbs9v+}w*hnLOdmdr7{D^Q0-3uW}%9S6@S>0oSD7$auiM;h=r41z*gpM{oJ| zz84R7vLcUL_(g$Ad)Jw#8}WXy{>N8-#+_BNSoW0dog(ZM=ntrbuf<83z*QOk;8Ot1hLP_o9$yK}Yfg+)`v46&wa316%H;DE*;>1z8=(c zM64l{INK29AtBMzNLY?ABkl>R*8iX{L;&P3p?m2!$GUO_F}DVY+U&-9&&B{ezx^@3 zXDmOrWmz17S>PZ67hroM0AIispAZ8(VK>wNE(A%8@1lIC3vuNctB_#}te)>2TT`=I zwyQ5)92fGBbo6}Svhp=MyH8>`)xY>&`>E&a>ogGsxb(1#vE9^aaZNMu#PFH-o`sHt z=jhQ#ghCSh9r=sU(5rHCyP;znzQ2W;NIoN!A@J{fOVzzh!j423=fLhDgyj*&pz#a^a+VVDo`ZS*gd^bbO zBd?^rsi-h`3)6~+!jWS>Y1FiG=B`I?P5ttjD%mvvw;*CMXhJh^DZTZ2&WVMfX!|BlDte|cCXnt&sA2=6PaqDZG#{QhP7-4aJ zDvBpNqZg)&+&b0vORk;k9lS~;6g|o^OKd*+;y(eSgOdLhFzOb@aQD=HXvVRCllIlkHv(iCre>))a-M3EqO+Y zly*ao;yyaOeD|XxttS=fdiI71iR#i`t?=FSdSSNsUg?p=9V|ZQnD+oQwAiGaK+e0k z%Dn{vJI(KJG+Bs2C6!^?A<_=_UGTCxGz(CZSvj8(5_%aJ7>MZ5yp1@4FRDN8e$h$Q zPE`xfIw{{Wq_=N3_J1dV@N(ONY3jr&$%->JGiN^eSvBNe`Sxrti9RoX=D-qr!7hpC zUrJ*B-QTV$Cm0e$&~*fg5K$buDoH_5s9~TKy>|vqN28EB@4-vaxlXue0 zxCT{KL6TdO> z=_U5CUA|tI)Pc{Hfm&+IvEd{mBO|h_A?(diio&}*zOvd>SyDckKI9X6H%s`><(`wpx`H5z_dE2M(wCiHtr@3~Jbj{7Ho&ukV4jFPIKK~H$ z`4wiWK%GWeI$H1p6`H5{?%n*-#<#ah%!-nvTlZ?S*o;v3L(1sFJEG_CQEw7okjr)A z#l*(NUdi?DVCLxRM(}F5W9QBBjH#hhC%P|u*WLRuLYx|^{mmO}kwqij_ebAO6LpXW zDvaY?FsGl>v`Fax*&jtc=ULUj_3mSBwhq;-CsV^OVa}mAn$1z-UJ16Qw{A!%96sS;0KIqh$weqT`GaB(quc5Vdi{Pow3=-Xvf9^?H-P5u< z>vcEpAD)xpA}?orqip7EqlcSP`du2jMrGbvjvOYwI;{dpMPBvQm-deuuMO7NbKme5 z&R4egT<%}j%+sI`<*ekgob;DxX7&0apC?Qu+&U+bdw?%@zl8I81(Jf^*QKt$n!0emY>RaW`^%tWg%|Z)4L-$59i~I|T6kaLO%PT2VErzo zaTMFl;N#?`0-`9BMyS*LAN(vfZ917LHz!4Bi_##_F2OuvXYGiWAYCT@?EH+1LS+zh zbXr#A#*(u|Tp=Kn{PVt{Tyx@C`umfA^vr(6@IL8G*F!t+n0>=- zc&2Z%zH0t*-NEV->L*yy;M15rH>KfC=Up4hn@xX~W3sJn#4A%~^2UuHCqEpM>`d@~ zQy;0UP)eirVNip8zMjq{F+RSt^|hVd<*6Z$`a4H?E!emgoNJ>mU0^q#vH6_(UkQxQ zQp*D^7m#oQd=O8A4}q8K7L-Z(YJQPQw#_w@M%Mcb>FDkmWcuHVrxk2kY7{=g{mX65 z1drepgPe!Q5AJBaJS?IBf6%cNVLt6-FHin=#2B|3g%c@e>=Vp(Qey%+F_EE#T&{ItHhzL`BXXl*9O_i&A z?D|GN%T#bF<(>|BBg45g;#Od;J~R|0>K5Tuqa_(0VmB6*FDC7ot{|bs-k;Z)n;V_m z%M2Hr6+%#C*)AX0R@Xr(P~?l0ee+I-?i-@9zsC3p8*VtMOK8a8}goc`t({$*Ue z*Q~RoSgJ=>T;iWNE1Q-hP=Jq*@~bY@z6Wvh=-0}vQ=!ZfnUvq@|s`qCA=uR zLyi&`^gVS`d9QZp7exOcYGjO4gv`Q}yH=ZmkzehhqV7zc&|ubq6Z~qZ=eJrM!*4nV zm_c!(%Z`_lB1pt+JJp--xl@(fRftbg_v<82expa3guiiVK;_ox|Uz|6E0mRSexAuwYO0vm? zMhII`!mJ@DCzmh)DhXwDq09;jR4OVe#wkSfl7Sd#6+}uJ-b_w9BkI^d#i$T+xeF+A zbs<^9(L&VkpAZ3q0B&whPU?I2ri|Ca_qnU?-o4xXi#CRG#5T_$my=Mwe~u#TU+-ir z@-jPq63Gs*T3d^B*;F+er7OhAGC`L*Gdr8Z?b10pD0)I-X=nPO9^IuXJ2lr?CZhpI zH(x0ZOp?RoX^ZhW1(N3dHj{VIu>$M<+fuxZ_ww{4hB@fB-#S5if>t^~7i_!WK}9&Y z{oUkit`?Nc229Bvm=zC>a(*(z))Os|JhCuaTiHVa-Oj`R)}Z}A-TD7k$c;NhoQKoL zQHS;UXF79k@+vDv2UgSRNUM7NfB)#?D+}?8_4O=hxACLO{wFr0!2a)6pPD3kAs-@3 z4AmV#Ac>7tE6F&eAtF-%wkH~BLdZ95!VJKJTXr(WkA6=4+^Xfq6d_vy>BDi6pyMS5 zX*2y^JOsQ0TK(>ikB1O+_o=BvRGCh7PShOz7U3{Pq|^Y&Q%HWKkUZdhJ;VRzM{P2E zvh90Cx_@0tBmx>j5F3${MN|m9O=Wc`g zICa!o1UmhQ17U;5u8^ro4h;@|g+>N%!kVXSZ1qi0@PsI#>D{%9mo5=9kE!l_kT?mW z&53XE@w51?L}Cp7u9O4vC)99N1T&4x&O?+*-qLW`YR|@+mSmht!dlZ*5h1CB?3s{> zSX-ZhM5-SGFX(cHC=G8qv?ObU;pRjjvjUe_Ubz07hjKf85kek?5F>&a7sC=8OS=G| z8eqf*(pAvok9x}HDqOvOJq|=W%oT)541&NV@GD*^DfdD%2!9JQG%X!RcGhvo50Qa2rSV;ifv+ZDtvkg zkc}zJT~cm{b=^YX4dScQB{TtYG{X}UNk}3Nkd6e?H5J%qqJn~7P$;Q-qRlW$8CsZ? z3R|?tN-&)%;L=sl-jAcYE?BQx+r&V&uRo7EQvfXIaiM@jLvWo$ND6iZWn5OhX=c|Y zXdAZdV&1HcZi|qLBJ)GV+(0=5L1V}TAZDOUPn%Who_X_eW3J57ko0&=VE? z`!vtEEPq6t(y#mlO|Ubi;CMMj@}&ryDa5-cXsOY=58j-X+LDL` z2bTyLfi5F~M-M7-h?esYHzg_kU%Hhtl=j>KZUbE!n$!{CHHlezoQDu&53_;8&-7N( z9o%PP$>J9g))=g9BG3>9WEi4o!U-P$dtyF7Y$5^zw{ky02{eu=aipfEqSgKe7w_{+ zNANYVR%fA)zkO$%dK0t7Tq}&%utns$O%2# z=^nVcphUX}O93W_GZXL>z%-lihU)4!7`}*sJhOm(Fy!kvZ$~vDv{79hF@Z7zA5Cl^ z6;qYiQKlhGmIJmzRk_GRTrOAaJ|dMI5A86#j&Q82T1zPtx&6q#e?Y z_(kqqs4^2$%}0O7ZqN$SZ}*WuVeu2%o6K%U<52ww_`|Qk;d7NzS{(gm6 z(pVR8M<+d>)jE&+k+Rs(hAie9u>(xZ%=P#$2FNRR)XXWMn0W(~PuO|!mO`H02b z0z4aDyG(NaBoVM<4cJiXu%kYJQWE8WmTq%(Q0|*1VQb1T(4c7KEWJ(&Plgq_i+Oah7D-WTWRvo69 zGtd!QAe|{qzlQeA^5_xbU()K@8l;>&zZoEM0vrN~;fJSz8w!n0FFdRee;MO`G6fyI zlC_a?d2E5iS-5@T-zX-1(_?sP4Aoi-p6rM5?;nzoWTu1=re;t!$|xj=y;!4estDVh z672L>va_<{;mV9Bu+{#5k}(~1f9&}S7lN>r;g99So&Er_{-7YGvA#mljGKI1U`A>f zkg6hLS(IqKB#{U$<1*g}nTXbn;QNgn$7gI5*)mg&g$#-C7(~NXo)0c=FD|V|P38vGTVGp@+4I zMyM(Vv@n`>yRMjGAPuonu(9S%rp_j+rZ7)o*h7LY9&O8s^&vRqLUL@0gYxs8z8})_ zMqQO}h{RQ~`L7oqJ$i%*Dd!=VW8l>$K@7O%HDp#tj~%OW{d!(Wr`%H((Z2b=F_Q@^ zC`iO0RgF=CMcDEYj*PDu;l;_E%Zy{*!p0^)*+tzP<2;Z=TUk>xfcvS7-HnKi#d+8L zVm(C#l30RK!c!l@cK8-ACo;zTX*g97QbRA{f77k z5HHbs7fcgL#2HX+B9H_-CNy)43Q;S_v26iJ8#w->nNDkKhWj1-P+iN|t;ZnnCiLA60_?m5-AZSt^6Cjtxgu|j6p1v4`0L&ra0 zgZPGF!M_SOT(KDJaWwEaY}ZDBAU3{FI%QNky1I9J3dFJY2RpN^NRUN5M?ho0HVR`@~Fp&FUj`v;b_i4^pB=;OVbm%^ucA`EbgM)Cs?n41Yoc&PL;dAM7yLJ}A zzevLEkU%tJKi0!$ZRmsv0X6l$dUMaBwNmj&fP6NM;y}1J8Zy0+zdRpS; zHz9inVm#kEgWN=8M|@|d9%HyGu@9Vrh#Y(Nc6S^^yZ7!b!D_65nuZ=ebIc#?g9lZ0 zEuV;ymQ;O>WLskh46C(y+3oBW6jBgK+R|uHP8wg=h+Wxsr{d#LM7Y^AYZn6{u%dIl}XsL)MDK-n?1G}3-&d(#%8S1Kn1A?A&c`2 z+bN-6O;GrWOuz)FS}ocbAMGAGLJR^quH?%sj_USYf-Vl2)_uAt$dCbn$)j51_6RDw z?88V#7z~l!^-ewe%yiKvLMV-Qmq@0y8CLKB4}5^g97@c?MiGFJxyCzhQB#MQH09VB6G>&L z9*#AH)eR>(zB2_GM_8GGfWH8_He0l5S77x3Mo7KJPtbc0t^79PrN;)nsTU0+W|z6c zo%IrDK9f6~41rZF>^4Ll0hy;Sa?j9@$R|4|UyN&NM4$v>1MRDbkpo20q4nk^M@Xk@C@~e24(~-#B%}pK-VzanSOIO= zJ4nc2fGN9xa5PMIWcq^aR5)ToI2zGxgp1gWd#>%mybV+!M6NbgEzy*RAHb@>k{$T6 zg+&j6T#Pu9V5dgnc%|D3Za5S$%YGQ@W5RPmqKC2`DG&`R#jWOnTRFsN4O%-RQ>`ps?5XCgKn%8Zsfk3TF@be8(?E_#%@Fm zSHXTiN5m_lF%q6#%VmqSe}EDlMXfl3%osN$(8v;D+;|_$Vf-$}ovtKmx=;Gl1f+p} zM|Onc(c%`GwcT%azgf=doIQy5%jR?TtE&g4IP_8Q;0q{YGy!HEB;rsu!lONY=+KwO zVLVPb&?w3VX*Vb~Z;nSdCtMHXMLgaePOmb{Dt&EkTU;aOSr;%e`t?j=_Htd=cB?BY zE@$WDB;wg6pts@3?Q*S}>McI{f|>^d+c2pz1u-Ovs6Uy0qQ_|A%Ith|g4WBZD<|C( z!xG(bphYp7VrGl3G5{0|T9D@2lxE&{zUA{>Mg5bZN_eS=D`py5XTHD@3)+&HxWFBH z@8-={j^hb&O6a?SkjOYYKd%ITu)@YtgIi#ofudAU#OB$BSf|B~lSBYN0`G4mVy*fx z;t6QtG)n$I=KV~eMZ+GrA^q-)MZhCFK`k*Ex(uL^Gc@<7{&cd+l_lAkUl@SE)78&q Iol`;+0HlTAIsgCw diff --git a/examples/running-llamas/artifacts/Llama-7b/peak_decode_throughput_bar_plot.png b/examples/running-llamas/artifacts/Llama-7b/peak_decode_throughput_bar_plot.png deleted file mode 100644 index 6afda2bf242e869e9171f7bd45792de5b655677d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 40604 zcmbTe1yq(>*EM{BASp^?0MaTTV4|d?0tzBZ7?cPWEuo~+BB7wDNU9jLN{9l|Vt|Dp zC@rXnbjLq8p67Yb_m6M<@x9}DhvyvOzOQTVwbz~h?}e&c%i z^|Df{?Oa_QUAD`}oc`|*taos>mD!y0upB>zuyPbKX*MI|up(%50- z5#RUK-GsC0GsA#G@AGwmo#le+472nQqbAQfT zm(+OpoGDA#>UHlmPQ0BCcKEJMcX)?IOnlvN*SnVMdVAKt?CQ#kUdHY(SH))?zLr%i zXOTrX`Dd|O(hA%An%LZ0R*`Qd%C@zjQe-{;#y zEhj(B#VX;q@G;Cm|NlNF_s@|;W$&caR1r76l!@s-B|_0V2gX}{uT8k87x1wyeN=r+dn3^V^`PIP4#-^pE z71Stnm2c_NrG-x?S}v8C6o0*?;4XXm%O*wdsgZ|*!(A^q(+}pemCTOaG!UQWxUy5U zvB=#ap*qH>Y|#9uiqX-nHa0eKadAo3)e!~f#xw8bc>lR7W6tL7?S0h7hH13GwoV|Y zd>xNNYjNdydY1ZodqrZT5ArNty!f7RmVjG-%jBo(iT9p*`}aTLOP$Z%o2u2`m}4C$ zYe~Ds;afPCCDn8Id-kJ8L1pjX8}==`__ZRG zQ_iB4YM;BKdoBA^(}tO`W`}!*cft&?T4&_SB=`5X6kho8A+5Z;d=d9LqlpLAQiq>% zZP~JAVzMW3_^YyY73*V%RsoLC+l(U8W;8T36F<@tqhezNedi__9vWv=wffFqI+$<& z;`M92zJX%h;H83EAMWnf+l1xTB(1rz-96#T6@7nB<*Weq+;d+NDb)g}pP!|Fo(fJXo|d()0#)+45II z2A`sbV{!AzhV;;1pR=sz=Vt9%i@lk&$46IeITeD)WFD6^&I}h)53<0k@@=(`4~gbJ z;^wyHw&B{3A3ui8G#)>GT;An7D>i6YOt*zaO6J<3ujK#&p9&5$`J#6CFh2sb)S+bdYR?((%wL6S zfy})h(jpHvVc!?IySwB4Q@kdJ;AyO zsIPpRpmY&&^#YmY?Af!upC0mldw$NiO&%K&f+eG&eC8%?lh6Is?UcuX82tWbCAO-& z=4jrv&`iY7eR~6j) z{2o2ph_FpgOA9{n>4D{s_jl%(YnKNtp`qsIrn3%t>glm!PZ{>@+t=>>#Iig{G19!) zYukeZ8O%ssd@EL@;BgxBP9OA-jJ3Ur8yd9mF-}t>#|vkjBUc{G zJsLVt{M@C7jcUWY^tKdkti3ADvU~S#qs&9gIVJYI=5)8R5}q3Qu?YXy`|DRKa@0;; z-B*uVUtW=jj*j-{kv5yLJ?!)Hk!_tUVw3#ZjT<*)w`|eEaUAjzIQnOFV8D@%j*jtb z_Jzeu)Q%k!=iuag_t01dk6Bh49-p1or=tK~{#m6U*x9W4Tv~a&qJ- zi-vU5I7QD5+ui%aX%;PR8~yd!1Pi-j^NE0ypEH?CF1!8tcIolsO{~i|&{b7c6?hB^ zBauh&${ex4@k{hB*1fhq+4Rw|d;8MZ=4OWV@h&{u#}bsz39nhh=sP=lZQ#|=$NT&2 zkcQ%K-I6==(`e1wwLkttxcsG03++OqIU6CbJj+ASy0iR#&%_jg2iNH1q{N zBI@p%+AO{f*U8!c z_Dul$$y28 zeD6CE@*A_$ucATH{PB(kPT$iB4 zS!Ls$p>=76(CXEz^~cZl+mGpMYH6tnM<*c-PTCtEI8b$a*OhwIjQLaaEd@>@Q)9m` zCi+Y)$im3XeBR%mMp#6|(#9sR>5*+`O|*zfwgpXmd^{FYracD{t9f(F z>H6pA+>ahRW(6G3`|&=fn!0-X@85^-O>}m2P^y{NzMp*VGrI!WS{vU#bXuw9@o`Q= zLqpk9jk0)4Ya}SUmI68ZZ_g(8kIwafRoLY4ZPoAQd~Li`4$w`wpo+k!2m948@<qH!-lSS~GA`(ygM=n3;`DoO11`U^+WHwOPq`F5a!LaR(0D z?fdt4$sBo>gd}-6H+RFE8=H0ebGABs6XoXSE*yIN__1Q0obB6Hmv@Se^fhfvPIv$F z?RG}SZViq0_I5m@_P1}}ViFSO66N%7ZI7kuv0=ks$)nP${{HerL|FK~kfbpngBZN3FqTF|-MFzr>FmhPHAPOHi)oU& zUtMFd$=%-n<;laA&MfniJ(OLkAB}T2kfIpn`t#%3!k!!3&zwl$OV7x-k2fltMJ2wX zA)HLokqwfun23l7t*U;4V)QvFYWumdbORniL4AbWrem-9lJ&-l-Q3*nqSPjP&CN{G z`5`IQ(N<5IpP9Zdpma7WC&wvaNJ2kh8D)l;u(7qh^8C5dr+a(VCz^ey=H9-3Ey~Hs z>Fn&B0{p>P&9r2R7`4f!mMi)7*RQvbZ-*6aall`G`C?X}VpYNT{=dwpL0i9DlP@IJ&;_B5$BKkemMZ@83sIA`7i2rlu|femEYnYtEAf z*coMcGB#RW`}XbK@Nm{bZ`392yLaz?%raL=);rLO%F87vD3kA-snv9WP~i-v}C zqIy%S$hcf+$d@N4ul;HLTwANtKP&pM`fHK9@8B8VDdpFH4xOEw@uXEnsB_e!DCyS; zW@cuJM(?y6XnUb%3iuT(W3F6xuIocjEnZtfLgGYA!62{oTztMm>t&RT&8Hf(4Hn8Q zO_GRqL{03gxvRN<#IFPU(5m8Drs}MVii&#fm*b%~Y}}X%_*L_p#SbU`>K~_LbqTvE z00v`v&*PO5MSiCKo}Qk`fKtF$0a;lxXiu_z=Y32}Oe}yEqjc_&N{rkk((?NCYYPVl zna|yqi@N)fqCAGbFCm3exa`Tv&qIoTaL(T$WJ#{WZzi4|UCY=sv|;VqwUn}(w2Ifn zGT@l<=6r|1iPjR|rTY5%@k-}-4DWN{e>iihD42nRQH3wf!Xs8~JOA{|FXqE3xjiGp z!{r~+^aC^R;7v0k~owN8)8uQhM%HbV8ZG#I37_ zoht2pm)c!h9lm@KHR*O?{#tgnY{Ao~wND!PmM`~q>x)cHO|4(TMA^K$s@Yu|JAA3N zu1?>6^<0#YdQwV?@Uw`Hk+I(g3lHSZ)R@sYU55v;yIT$msY?M$K43nV{qS4?B2b5r5`_X#Y!3nU4CEP%7G(I$Bt63+*mLEEFXej2@(?y9&=N97>DV`f+MGEpP)>~(sDdj$YvU57?XvazK zDH{ea#p~WvTsw)?RbiVJ?@Kb#A7fCk`Yhohscm6cW3 zH{HYp-QkP+=H`ObPyIJ0;P{18dv}c-mrCFzB zv!(o_l>UI*7z$^pon4`r&ReuJNsOL;44lxAF`h+;bL1G|)%NAbX;uGQaBh zsZ;R|hEr=zB`v@l6?+;Ko@$#)Q}&h?7Z)$^nca>?Iw(~;vIF^skTmGjmvL|i=Q}(( z@hKowN?BPMZAy8m|DyKq-xurZ>bB1|^)VAuA^jThy^e#DLp+Lz^v3Ef;=>8foNS%NzV4w6ug&Zh-2K)DGIyz>z zbj6STZti@2ePh`yp2Hg5kaP7dkQN6r4~3#3NI}YRZU%fCmI@x}dU-|h%9W#alhTiT z(eZ{Bdr#NKcKf{1bYhG4&STkFl_T#&TN4(Oks%$z2omO=(f#+f<3*ms;sS~u1jiTd zc$E0-7!Xq9(=(D^@=jkwj%&vPYUaAW1dT*P0aS5zFi=S8b#H6vl;>7_`tV`8uIXq) zLqh@TQ^F7;oT=Gn{@c?tEE=NYfdfCR(NO&HfAVBA*b4z=uQgj~I*?j->gmPYTw;f? zs;JN?thPml!1L6}2`Zh9=_wqNl)ML~r`G$&?YnpJ7d#|j9pmX|4c#3bOB*r{M#sd& zIOy)O2T&@-(#Qcrp$lem$4q|2N|ua zL@$$b?9B?j1jWfm8CjI9d;cMFwgIxsfUEZc?f3rtDJ&t;^YB~AmoHy>-``<*>iUDw z5rnEDKNi5m$?TdAfSP1g8P=HZkl50C46M;SYMN2H2|LBCcy7c6xtV?Wa?_*6Uy!?& zo*nPMq0=eLKgCy*WM>+4H%tgE|%+UOVaq{HFv!g$~w;VIg+GW2~KylgSm&e}R@cuLq z zzsvPiW&^OaogI}ITe*JWOS0|1Zbxb@`|yFYw7SVGFE7uez|pe!)vKEIKT>bsrV|tt zB+bb+8S}S~68p|P0S|{hw*s_W;A!lFwt>MiAYV#bTidb(KU6E7hW_+w+R0x?@d7m* zX=1upYhoD|FQ)ZtX_Pp4@F0N40sE4qq$I_Ry@<1Ne2~-Z)R|-{At4%YZL{A!E}TE# z^>nduhFM@=->E{e?ZsR6G@Yu*nx%C+bB3P1y*5^IzA!7Dn-ID39s@}&dE3wYEUl=Z z0}8qND?#Dx$Qlq*H`Ct!`C23~o>1u2sg=+ja5T|$xfm*|&F_-T+tyfE!i(XUgyAMok934B*r*L{r;ve=_C;JIb| z{L=XuvKcSY(DPDUO=XpU4xSw#3MRMa>-CH)a)fI17w9J{zu74M3BApcBS*@1yCp1A z&MTm&qf5dODl2T;2PoqlIX*OWdF2OY>T~Ada$wcKIP4Eut*J;eW1c#A#FQO(+h2A++!Zz~X}10z57q3o#|8}p(GcmbNNy{9J_VORFz z1r7F;Pr*$JgHh%Jf(>72C{t*#u0Iq?O{^+BV zP*9w8--4a6L=_->1<0i^B%HFcGFmw~IeG7?O(P>Cj=l9lR4FR;;AeAR!uOHR*62FIvo6+Vm)4-aI7P{J9F~a5p(jz}`TK=TXa6Y_(_Qn+*WoWB%}?uGpP ztqF>r=aHn#-@O}r_?i7M= z0t1?F7Vk&)A-pN5_x2w@uJ;5VoShmCY-pGlKG)}aX8uc4(+jM!dJyXlEv>~@rA%q9 ztvfjm#wh~+&(91m#o;?3TxYymY{TccSms3_3UxgzZvrKm%S*(SUx3#OccjvKV$3}pq>Z*heUftpL zp}sz8sQcAE&?AV@@9tjt{{8!u4K}s)A?zY+ zq@<);yETfp6w6HBu(23A*wFStOuzW?$35sFb|)&|I(6#Q&i(sirKF{gqdIy1RFaSg zu8I`Y$4d!;@jcOyz8tl3O`XfjBj@nUyAB_|ReMcFAMi36%?mpfcJbnsiyxYs74-EI z*ZT^eGf&*+5`=cHroKK3z2`kFA^NUnwdz8*-)0X?a)j4PO8S1fEDOa3t&oKA)pN_^ z^j~(o&FTm4*oD0p#yV`+uptFh_ABW?p+`_4$gkbMf1l*`ACRX!ulke1xcTyzi9 zyLVqoPfxe2Pujt?a;0S7u=%TN9v&W12?+vX;^G|aAcV8*8nZ2r9zVY8!Gi~ADYOz2 z61cdyL={o^HbVdqX;-p8c~X}EGNkjD{L?2R#A0=hkM%VjJ$-sNS^9?uq?wm2c{6NQ zV$`ak0XZPW?e`a^L=;@f4}eaAzZ0jWYUU0gO$veQ0gT+_J2#_PeeF|ck9}_U?SYpY zWKp$Kag+_g&F|FK4#r{B0z+l-4-k9+^&5+9|e=H@HN4BO=84FKT=Cgv|s zE8T3B9`~Hr1y@p2R(Afx=giqAZ|8YwUTHH8RK?qwnR}M3+;V&6R(r2c!v)|8in!jT z{aKk(mnegJr)g$p784&ov%k*tvZ#ip;))w`RXUAe>IjelM)Gm1AM2^%*;5RXGx~Ln zR62;ba^!dINCEC3(d_p7R zcu(ZTi|U}OiM9Z~Do5Qa+*|9_RcS&E($UlJ0LzQ5GQrY#k=kpS_&cxQ3S;|Zru33zJ4{hnX?0J{w+=b zaJneEB8Lwje*gJ%n$PUiULBoURj2XQ~ettivF!}NC=k? zk0Y9lcQ_{y(>o!e5eb3_lz@qYvy>a2c@Afv__Qt{ARq~{Qi0dx2C(QXQ0t)2 zP}KLj#7JZdA=FlYbs~cx{(}_U`x!rP?*kiwqV5C*29k9$h{pkNL_m?n>)4uXE+?q? z5*~=Qfsrg$u{-#2Fk4*3!z?=OMSgEyzjo|vl*1uj1bwy~0*F&*)e=0#YN#TBKGQ#~ zNjU+@hY)yf(12PF;lv67vRasIBBX~G$S5?_u3fuiH*c;)$!!1i>msUL*}Hdp`ybGu z&$MgG6+80G!yjVDtnDg%fBomr+J=T43w{NYwIdm=qs+cyQJ^a_nFE7-*xR+VBmG&;UVKQxx*Wjzfo5J$v>npc)ETd1WPiPhBDd6|dqe zfS_}}c6(@O=sXzWl$4Yq?{A?0z#hK(N}Lo`%G!8+Bxq6}L4XNKNwI=spr~i(#`pAI zo;4%L=<<~-?QLxo#N)26E+Rk?=7(Cnemxt7TE1>v9x9*ZK|BG{Xjk4tusAg}I~#lU z5#a+ZHw%~Kf-~bu?i*^$BPOUvP1Bl05yG^}Y$7QbbM)L=T9=+aP)i>)pvjo1XjDOHlaoMH1C1=Ah*E`Kx2zK!dLV*{c0yiUPFeX# ze*w60oXceb^J`H^?mc|SyXL@x(PVw#X&Ml%LOkmag`hi!D=mN(+j+;3@`IiXmeg^mYnSCY2sqe=wx2(l+d2dLL0&s`*K%7XLSo*rF1D<) z@=j^i6_PXh_A%e2*jC9e!&Yho7VQNkKtu}Y)e~2Dy+gjJN;iGvI5A*J6c*6c+7=e8 z0RtE)a3Oq<0I&j~7Z{dy07DQ!gX7PDhKW*z&|Ck^(}fiujcy4@nr>!x77H}Jf1@8P zG&z5+L#lzswAPJ}S>KYg6f0wiszYd9!XTkt2J6oRMAG{?OArs}2bzwnHx4|FHl!Ij zBC*~#q=!6EnY81=0E{#&(*)@vhJzCm1%0M|NRXadN=nbZ>?JmvL>3YYF;Upt+k?+q zm;3oGHg|0peuIQE5oN8Huo6ux!WaY9gcD91b_K3LqsyZufc0bnpdo6Z(~7=zi+DVe zp_zD&jSdd(0Q`;+PzpnoyeKVAYV4fmdB$eJ?t%hF)_nN;?v`hsoGAYxmo9NDNMW~B z*4X>kN-8LDJ{cyXQ{HVoNy z`ybG0Ygc<+DS@p>NKS5V!Ojc91{0>Gb%`p$8p1npP-rO9)gqI%_4YEVsHlXm*sRmV zxZL{Al6=**5p_>gyVHE<=k6VOz8!6pj(qK9UJj1)NbjVPCK(iU2z+p$*^?8HE-64_ zEkN360DB@4Mc!08w*}{mPRPxhD`9a74GBR8-eqij?a<-F_rX?e0xc4qDjAQQMyra{ zXcb(>_bRxkNZ}HnTuMcP%%)8-1azY69JR1eJ>8u5#=Qi3SrkZ68gfKqO}xb3K*#~A zCZ+%pB_$;w+`GH3h({Z(6BAD1mOK!3>lXk0hYznn{{Ru5Y*2S4Hdd2|hX-`a)A<&6 zK&!6){yh+oj6$Zeh%X4h3ruMm*1aAL!!CXONTj!6Z!{kcKhR2;Kf|GAXMYE*$wZ;W zeus?F3$XnWaxM98FzFoZd(WQTzCY{8Ets41G&Iol%vt`NKUngMyO?DHA${*){(8Si zmTkrl4=jNLflygn>|55@J0fFUf<@>eJ~IUcssI8TWZ!z6Wv}trC#q}Dg8>5x-iD1c zftW|TXi*L*kVC~@%K>xaP?Y;yOCnJkHl6sm)1w7VZE|YraOFW)*G*K{asLflw~~4= z^q8tOOWFcdYr8h*{R13d;v@40I#Pt#f}n*PL8~&1``sg3{Zr72;pHjwLB|Vw)^x{i z&2HaKlsgKM`18Yy4|SL_|qadD^_&$Ybfk2<$uCmc0BdMIx+*bGl|~gyhVCX3UuTVaz}!p=1z5~|9&&ef zets}W9atz{v7qmZ+kR#p+Al}wsD#@?zJ5E-pl6>;*8*AwEdw}BWwgGO*QpWDsgbqN zSZTaY3DrICYKGsf9?g5af;$^X6wR@-CAyl;BD0;_z5*45B?efpmv7(hxfcmq zAUZE^jMoAF2NB^EtQGWP_x|`6mrof7ulY>>-YP8oU~lSTz~E@S6?(EO_+FaTI72r! z*ys!z2Qfl~-GxR4`=W_PB^8GppSUIC$nzVZxip~d-vP9J2U`0$&g$*kw@F*H?xPDb z#Ci0by+Bp2f4(XG1UYd%F3td0pm=(I=bw3p*-YaJ9#v#^K*q&DQ=~@J&H{3qdxgs# z_*g)%bEt(@EED06s9Q>oBm@UJXuzNlgy6Lxj3X#p^mlCM&Ye5eTFj3GEd6m<$|RdY zeDpRwl2G=Vpm=$C7PDg$?c?8TfC@+wx)6X(!r#PN@;`iO>I1-4kIvXg!3!# zAjxQWd3hzAr0MDE3gKFAoxZ7)XN{8+3)enZX5Y01gk0LbeFZU>Y~6b8*JT<%DJ3j;j|hSrLGC)2gl1-{i3!JJw-fV$d3=o)Mg!Jlqzj*n@Q{ z^IYqgmZQ?6!@Pjrt@O*66*wFO4>_V>dtHetnV(w;m1B6R(^{!2uBLpS&yrp&79Jk* z0RE(P4C80E*7J8{;hJW*$y&mp5Fi*)tE!TtUPGYT_U+rP(5Kwn|L|&KGS_mjdRW(0 z@-$Kf9XeDY{lH3T(_A_N4*>`-z!w|B&j$1$bC1(-ci%D}`D^D|xfJNe_zfrMva!q1 zIxc|qhY2F-!Gm@Ahx5!72q-i;o;<+R1_}wyncUc-Z6mJ!jY2byX2uhmearZ)6Akd& zCg2IcMHymSL(3pcJifCEEp< zmv`b5Ah=ZGY_aq5GK6Z-Lck3PVI&dr{Ol-)f?}0U;Qs=30wN>zi>IG^PxHWgK`;QS zacC8c&tV$voTcc1Nn;Iw3C1E2Ofa#2p*66^f>;GtH!gL+-jnDh_9Jxjs}bwy)n2)` z5`!Z+d$CnQKvx2R`qZb7AG5)xDEiFsf%hSOEXa~zP);J~WBUg9B)0$r4~&logF>r~ zS$hF=lDgaR0r0$K@L!>7qt(#_=3|*%1?N&JAc8E+u0~MsA|fK-4{%BFCR8~PBVKmw z<`G^j+Wg%CCJR+niYk5oo&&0i26{r^qez%kQ_gfH^vlSo; zcw~-*L+n&LcyMKg*^~2`nd^~|_abeyJCV$tPcM^?pQNFnYO+#sTTZV)1>c*jMlWUd zm=W%8(z%6)heyGbn0dI!3J5SeHTEQ0kMwN*k?Jr`qki1Xdt-mhy>Q1;D(WYdR5?`lQ|$LU`Y3YKUL~r9xKG#Kv;r+ zhnYcM=9Ei5@mPNO&oyxIWU+DV%3E4i?YpbD1L8w~X(9a&+AUkRGEo0tif>R>6<)ql zqx~R7lZ4P@T(X2%>f1Ux&ezu)pu-O1=aE!HmsJJSIe>(x3O9Uzf4{tAn`*)=Scr=V ze&P*B$X|e^IJDZ#5Y1Irzj=Wp4XjcI2M#Qz;Bb9`VvE`ph{H-y0N?`eJr;=M|E{nE z?0NjB#Kw!ww3joq=MXC!>=cBVft!S<>6Z;4oCjA4p4hF$4c{d! zzh-%C9V`i~Nfw7oq0EzR;qIPW%#eyr4j!~d^iY0y8yeLFQGGZ11OVEYpLFqZRTa0R znA&=Hl5NCv^H_;!n*88F81Zsr5rG6v3Z2DB9HXsNhmw!FQ2{v0Ji02^W+(A~#PsxE zj)4RX;+HFsZ@vi@JTLeFFli8o3a=H;M}rmwT_dppkr0Qaikx4>K`NT~z_SVTEIg`+ zS%{Mr*g3M7So#_`AvccPe9M@mj96OIRc3vsBfH!o(zr;>>VoLWXk#!@`? z-Vz^gA|W7;ly#y*xrEH;JJ<+%*B|gN!DeSiM~4B#5K^%B?AhZyT^A>}2nUg0NP&(?A}=HT~x!qPM~RnC~V(b=caziEQtBQxyW!978V9c>)Wyp2X*1S%lOny z$E`^N2_2_Uv#f-HTYMD{YV+F<+x`~9R&FHk^0G2VSP+FK7b%OgNFGR6MPAbkPao;D z*-0vw6B5drSRL&BjkH7i_wVmjK3aLEX2yz-v4+w-R0pu7iNaImj>>pyuG|A3LZdWjU~fBr@&>Y2QqU9 z=msx5RIE5?7#uThimZa`kN5Pbd&1fkPe+WO(!FU{e3KReP(^VcDs zg7ya8cW#Rd8Px#CR1bJ@?HVt!Ah?Ia!39{$k7Es^u_~;}(Kl|G9hD=`rI)kzD6Pqp z6Z=V!UB7Jg`R}Y0vC8l7UhZiZD7rQd$<$UM7NWw-Q z(t;2A@slUScL!rB%QX2ebgotK9kau*1KI<=?JP%`tK=Q~IGLO1pLtC>6ME^NsV0N8 zJ^X~&#LIf%`$+ACNxr4nn-Cm$mKtMzcPNmcP&FVh7#y><7MV#5A+tGpH#RW=rFOgkhWrw80#vSZ*?= z9?K5ixS*h*4XiZfM_fjwrK&sU7FsVI9i5P{u-b9YjT<&Z#m7szlgJ_E?-WgplZ(rR zn>P8qEh{JiD}1Nm&7t@voE^6%!-c zf>U-Ry{?)pi{MHLKg}A{R{$z!8&axCuV4E5H7hlMuA|T!?Ty$e(DUkEUe8*_U`jy7 zWSsWWL0v*@Id5H9+`4wUY{t!Y^%OwXHOX^8Aq#zE{4y)pu(4Xq%~!5mVdv(iN3Trz z!5{<~`~S{L(jB2+cnyU>fkh@C(ai-C%!$r}_&^C#OG;V|p@fD4RRHoZ6flW^#D8e| z1?}PbZ6rl<{_4-f$HvlQZ{e#QVznU8$j!8ex>!#fAW(a(p&$~r1DYaFMq4=Q zJz!Y|nH3cmM`maO0s~)4Yf0aX?fdbA34$Bp%R$I$AH>NFM4j<=E7A~t0c0Ty`o6B;`smRB1TJ^4=fWUi@zt_On%CZ@1ualp zlJ4F;Q1*`G#c*yF5Wq2K?(2whka+??&$2xD&EOs3!5oBDAlWy$1E;vQ&>VW^AqQT- z59cum-E}r*R+eDWXU*8?H2ImTill6&HY`6yp-0;YP{IJRBU;3YM55$!q@CJ$Wp6Xm z29fxQL6!w?H{CIYk{x3}>Jw%qNoGZV^zv&RyrJNcur?uRVJ>oUlV36!+&e*s5r$dM zT3`u`G||H4!Y=&x8;&rSuvz%P$0Q5moaJ5FgvS}y#qU^E{48Qa1f zC)XwX^M=8}K|*R!1XJVKvTysRnMxkGq6#LCuu>%P0OdKly9a{_L`wAsftXWH9*|Gc z8DB?eaa4MFuL)U%YBb&%aFL`A;4soq{r9QS*2uf{@qs}J$OV?R1dqN@0HVp_g`)sC z&}`bW1r`CH1ZN}*4E%&G14tn&hwyPkOGSVhfJQ)WL4o`X)C7o5xgr1k^*^*nkRL#Y z(ZRX__(DU$U7MVl8G(vo4OO_T81xjkq~6~rN=3gI85!9qv=B(KYf$dsu3hKG_;)XL z+BsFP!{k*_U9G+@wl?c2E)ww5HtZ%SFVT{{;R$s!qot~ z@W$wRf;EKM6+{=l)e2oInGzxgT3%j01T8;#zfgmTGZc)fWCkp!d|~odQVzgYTtb2+ zAI*d%&KsG69VBMw)8JREf@?`29_ZrAWobkrwNDphfAQhNK6}9gx-zoh?{$PYOTpt@ z?f zI9d5*-e~{+cXxDo!L=_&0TSa|SO$|pgBsJ)!rySp0op?-C?jtLe-_eCGJ*-yWGjj2 zi*jJO&

4UQw=MdPh(cgirM z=|`3vj=_oP1Rw#J_(Y&?<0Y zQu)Abu3{Ds@M(4*?k*veAPg~!CK}hpUAb}rF-w(#izbqZT=~LQN^U@O<0S-%QwmJ- zVB3pDlq!M*9Pw6|0%<82nAaW)S$GCb5-=2*kAnKKKsF%lX#n>Xz-uF8{|5#dCQY(6 z1gI#}tdg?|Xqin~h2DS`>Ppd_yLUSfWsWW`<;X{sY?0($ z(`&$|#0i2W!`p7v|Fv+KO^%l$QxLKmYiSky;-A>?Va2mBboXG5~vUghr zB{py72A5ln?ix(NnivT&F-zoPMpjl0;OO?gzHrP^X=rJ6fH|W4z=^RCRCZPRAn8wt zhYduW!hv)XA@}E2_-MeL4ww#jFJwZI+_ZUf^8Ndvhzo1rnn2}m*_P#P7$Cp^18k_q znX6&f&TIkW18GLycTSNaaE3^kfF6O~i0B}=901IU>gC8|hre9o#p?(uP+UKO6(!d4 zy*Bv1#Y+XkA*_%wRr>AUAh$b?{@j0v%nRiquid+UA2OF}W(&;sPEJmkDY&B2d^?A`E!(_r?jpJVJBqTrcIl| zL|VMwT;Iq*EXz4PBquaR?uOb7e2m_1R4r5`Qz^U4h z(#3b*2mlwf0~7(Sx`O8kQFhkebve{ z0x^5AzZiKMf$I;Y&1*~!CTp!C6Ce5Cj4SWHjw>Y6$;x>BF4} zyf)Gy``vjXLJFPXQ-!w%1W_peV)9KUp#(>fDuE*t zva`>CJa?5&fSMZ(x~e1nbNBb9Iga%1g>RA@-Z1B8d_U|jilts{VH7G3{nssQ0J$V*zAXjYJ!t;4%GAJuYZk z3rl?W-v90@4jNEhR$!gU4G{7nF6w81n>KP9h>KzO0V6;q&Ke(r06YncN`(!{NQd zMO$FbsbIUb5ML&Qn(4&Q2b!4w92_@5uk{D>`W}uiVPsrW^{*(wnZ8;yKQ($1WgA(V zDy^2hK&~#Rs0jFb!Ab0P=50&}0*SKIfI0U}ABls@x(I@SkY^xz$+r_OH%EQpNCH4# zr@n82IYG+2con#Oo?4KM;Ycb(LHM`s0Sl}qOtA?=Xm8+?(fJ>KMZyoP3g0HXWnh5U z#T+hlrbljRzhglb;lMheg#t`eqqrs`$&&jNkTjw5vw#jj%e3dei>3CK_H72pB%)}M z`48BoFiu^HS_h8`hr*_XP__cGmZWz_Cl*77OWQg}iARN~v4MYR7yA)X3Edo_R$v+@ zGxG@gi~Ky~M@(`IU=BiEg9UIoSPM9FXsFe=XarP!70Q6@iH~cdMgGDrGv-!WkTuR^ z)EiZJVG;%gSPbS9@3mCDSSMwHl0@zXNqYE@1+E=9+KCL|JWYm+VGG550zsg}5j0V@ zh1Yv~?I0qv7>i7HUo#&%{2*vy<7Ek*h(!f-7>+@QAc(3!zr6viGMQ4w6&^HXd>KfR zOn{+RhjC`bW@Z4OGHjz01TBy>93WbG^B^#m0t*3ohJ=S}V73eXf)L2xTyvTWN2WUk ziw0)|3TkW`ncFVA9hJq6Nh}Nvk$MfXZ!!55JSWmEJq6w&0M%FoQVZt%gK)P4ZkoXG zNGJ>MdWs@+C0JMnVpc~}^Dae`WoB;rk4cUdJ?!`;v-BIWQ+n-dXZT7XqNL&&%45t| zcCjp=7DUn4Tjiue(7`Q;67WUn+1l8=l$QMs;}0qmF{#0xOolD?W|(b;NrrivR4zmm zaf^Z`$#4h@CF`e$LkP`y^}2P;I22W|N>dalkrxQvlC@130wb;`!i*uyQ>v(;AZjdt zgz~fQ-(O35IB#bO-`wB1rzc<5)TrXl97mOH+Y0NQ$kV5f?3S8j4~{cIgN=6tl*ss;06>CwLnr=(`KMili{=Eh#!EMK=o+bR{$0B*1}d8 zh1)HVZwqH}=?{S}mWled^so9~jZJ?1H{WxwU?HRPAwp*0SBnNAlNKK`0Cke5*F=L3 zK#oiZp%FNL>C&YZH{6PH@48E~0xHvd~p5>Tgq{`%F4tms)Qh-Ufp>EE>cC*igr6AW;hVV)we z8JcvUH&G^lWgua**?qjn4%b|^L#wi75u^vC{p9p?R=_iik}iT0My@Mio)=!o2hMwO z)*+``b=5>SmiVfiI(?ePOK%QY{Q|N(7uIkjaT?QJ7HGu~x0SZzIeB?ixNeAfq-7z+ zo9hDZ13@kYIzgLUiXj7VS7cHICpO2iUG0DPhgRBqSy_>=IO0Z`RnV0E{z*L`#X|X4 z{3rDwnm~n-lZ^ik7{`y9o4%;OW#UqqJB{ugLvo0+v$Oq>H!$!(OA$jWnm?I;R~;k= zE8v?r6<=kdPdg*FNe-=8D87I34H$Yg%$~qt!v^0)@-uv2f%~6?|6wWgDQhb$3tQW3 zX@}Ou3ZtL+cP0m{mB;u-!VsAz!_q^+UP$B5VAvgiL1uyKtf)|37fZ&s33Y=74M3Ft zdqK!eE~_w#C9x;o5AKe;de_e*nRmd{3%)>aPDoW<8)-+yaQtRf6$FELS9su zWFZW7paF`(a=$*2Fz&PShk6!o-BWZxi;6zGAAxiw( z-_;d>iO5)szt}@e-I5bC%|@u$pc`Qn!c2vZ>|e_3a&opcu5m-$W76xDdoya1j{N`N zHw;9jWzXo^L&MOhI7vhDdb*KeBi#T>{Xc#Pab`fajEs!`L1hX|M3S-A+lw(vvW@?{ zo6^u50kzIGoyMesE|b>37y@Vvka=&Ud7)2)9pEA(``?X|A-`h(H4Lh?u%hCjH7k|9 z@4&(v}9PF&r~cLNMZ>dM+S!2}7fWCxc9N zkaCBUpBSxCaY-HlZ|gWcb{ZBSX24YVD;Ca17h)EK?-r7+kf{ipf}&T6`J;MFY=H+1 zO594M~AJ&{u zT=+14JlFq|*+x1O_u^Theh}Xsu`j?skOY%knJ71TG00Hp^B`(CIy(B@xg!SGP>w-Mw5kvprrlYjdsFhNYy>_bn9+>oW;cyAxM@(Z<(Uc-_ZN z?kzMiI(Nu5Iz%PIx035Ourol)Tnd|rWe%h4uENuOD0cw#fePVNSQS`4DziS* z*U4%2qAvTd#1PmQ&x6zD{yB)`YX9Xi6Y@$0{NxlxN(@dD$WaC{F{faytMG7Qx)mco zv4NES!*Bwjr&X;T#U!M&lJ(Fjram_nX$5&GO#u*4MI>@W;JU z#Cufc4bBbp08qAJ#XV(g{sjPGusLgEFbo5Zhy%8pzYS8(dsWMW)T2j_K(`AS--W>f zylS-Y<*k5$Bar;h{QS5B?Qae$%roD40f^Y#f7_YaHZ=31!kCKC&iu6rClt*Hys&u& zm_AjOD^mxNgyG?n#2f%7jx=*nO^5VYE$36ADu8DA{)5d{?7}beLx<)&I;MDeKl{Ge zbuW83*w@j#`7+O{Rfk?j|5X>HP#PAX5qkMOw1ns3+h_FudRwM;N(Gt>;lHd_2=U+D>9~d_ zH!i?AbZrJYr?7Ua2VwKjh>^P)F|>jN*e;q)ce*i~2B)zKE^L^jh1}%ta7g&|6+K4l z2wjk1TcYoPat#LhrrOLq#K_f!7$VyNumd<>!KU4giB4M5&XL(N@lTsLy4BeYz1?`AcJ8+LfHU`=r zpZv^5)dSP~<3ADBHlDf#fe)j+eo&@CZNG!{7PD7Nq3c@(-<2g-(_nLi!4Du^5}KCy z`tKX9d(52MA*3)*=1Y-(RdHxA6d4J_f(53EkiXhWcF2*=1(g?a2+7vOq>X!TN`YU9 zdx!v(h018UZlEa6+ZYm=6Goa@`D7^w-fCzTDGP8OsHI%}k2=sdTtE+*3iCTzJotGc z!LeJzJHIfgpcwcr*G8AzIYV$9(!mAX6i4}iGR*hmO^u1+>T4qip~72=qp5gnYHJ=gAC7l!!q$HWcJZ;to0 z8yPGiCz{Gd@p^9@XWG<~~2j!94+(%}NH_0Rh{e*vAQ$fD(e_xribIFB$=;&KG%Hjtf7W#}EPe zX*jl43om0Q0xv2#@)~rqb~KAb5XGaAneAwi`5dCBq4r*Y#gDQ;=_7GN8fTavK@O!r z5ywR!mE?{Khi{5tJuF}aCtgUP-Ad{6tsx;@KMlZw*_mFBO}l$n4Kqx*tCJc)8`%lm zjYdS^voHJ0!#$**2RXxz*%TO|YrJ`Jl@xL7L*rrJ*kb5kqLCvP>-NkbfOH!&v>U|$>HUoi4y!m3LtP9xtRs&ku)jr zVk;q!kV!&puHwc5#sMjdvBObs>|wl>C)aY|w{bIoM&ld^(In7}#8!(;0n^PQoE;oH zbSObuu@P=*=K$oAoplO z6x;#X52M42+pOy%`<^+#bPJRNS1l)bT^=bDa=~o!+_uSSEit9usGJteDd4WS)6Uoq zX%SEgQ@BaGyB2-y9@tDZG|Ec)n!R?<_9UM&%g2sXGP{Xv@>#tAg-tYuXy*a zAu_xjNN=kElb@JhyMkcS0&LUJaK$7cx$_RG>1Id>;$Y{#eOFLF?%_PSLWjW3bmGD( zDsvMT>QppE*RBnP^>K~f31$`+5wsaUag|@T{+>N=Z6z3L=fwM(ajUk*FmCA>8Xd)w ziQ(@h@y$Ao4LB$(w!5ymla|)At!e0GzMa?US>K;75;g}MbDw(Q@ zQfePOz`b}?Udal>PPhkGLn1F6A3;8~1IMSKF#?NoG*Dn*KtMf47Vm@n?fUU!7r26} z3T%nKVOnNpHwNZmW7-ZNl^`lBB;;@3S}d*Q!HoQ18anBk>xYYc$n8hyw=gehg_#jD z_%`;Zb*YPs3yBtNpZ-=B_ad%6mcQorwfr`8Ow8oJR&2wMf^L>-Zk1Am%a*?v|l+ax`?nx}_C zl;?g%goob-LD2r$1GB(N-cvES>PHRqx>2FC4emiI%W@ez4b(c0;`Zg~nTL+%4VPUp zn;drO5(eNPtKh(<)<6uv4GX)_cyo+(+Gja{pjHQJIEw2CphChxrDS;!gQuVF?+Yd; zM0jcv`#KK#rh`qy*DF6DVdTkYvrhf56a$AHt+g7!`>1i(kcKxwsdTDKyB{&g6C z_l^6TQA-G?f;kA-aJk8l0n$q{4p!M5uCXj|{lS6FASR4KnSO_g(Rpv;)CZoBMGr`B zDy6b@X%J91I$kFn1JcSEXC7j|s@5?j6W`L4h%2eUQLIAI7#JE-!^jb?EEO>>A(LES z8X=mKtA!IB55#NiR=KV=iF?^zwzR|pz^UQgQPebH(Mkfv@&R;=^0d5w%f3xm{!x?! zTt~IC>C2b*&=P+Dz~3fX>UQ>72fa4@XuyFhEg2H&j>tik6&3Y}o#Q}VNB^(R&OEH= zwEg>^_Oew9*(wpIW#eZJrCbL!NALf3lq672K&>(@j1AA1Ql z4LZAf)`iD?%@@w5m{J1sZG>fuA{f``x+`POvl=6l= z^!m4ha`&e%V38Q3agERJ8X zv2k7J=q)zeaN9K=H(|mrs3@MF*`Jg&0v6-VC3S;nm+oerbt@J;KIbfUF*l%l(?Zw- z?zbM6V^>YwWW(tXEIYGUuh7N(unM!g_9#n*#U{cc=;Tw#XYhT|Bnu9sp)ds;Rx;H4 zd#5@_yh(pP;^^(4KGntjefQoy6}K*`ZgKyiBcc(U-)xMp`^my|=geZe!a+Zm47$JW z)r%La`H*L%f7s<2d=UpEN+zFX2Bk}El6k79B87UAO_mpmr!d!{p#E?3b`Q`H2nws= z41Qe^K@+~(`sm%De_Z@L>3ZectZ2``IPD%id#bp>5{?{^P%Jt;(j*;)Hf(=a*M?|d zQ1sfC__(&EK>fE6@{QW@_RbXEPc}vAao}Oe@Fn15@u&o6Z*67>6idfWAa%vu=J&<{ z7ZN>)yuxfE!oq!jN}2Wh0f4>FB5+n;xMNN$&o+6tY~)nvZ-l?dH!QVZ z+sdwOcTbaA`zRZ6ve4TwUm~nndLG*p>GSR%oCt5J311PVHnd09DO62ETll6_Sj~G^ z-zlP^lg6JMS<)Pc{?1!{P7xw{IL*WsNar8Qpn!;sjb~O0n(5;b=Zyq*oO%s+he^ye;8fxj7}A1z7*0(6A2H?5dOyTgKb$>VRef*Y~LmY)r3p> zB1ZO=W$i>dPvA@BS43q?%Tto(YyI5NxS$MGGCofVgGGbMd?;Fs-GIs*A?fuIAT08R zHpz)O^}O2F{$X!xaytlmU{)EVv?E}SV*)Xdmm&74z_wEMOq($yi^b*tAtik6tm*ma z?e=1a1lWGN02JR2XpTZI z7c+Gd8fm;i>JOnHOCTET>ZfgG7Cty-b>~}k?rBxW^z1wfoLxuPu{l%_(>b8x?AFN4 zp-s@YavkV2qBUo`w}*p5g@r!3x#`WIn2p&p1HSwNLaJS zHcx=q4ituL$UZ5gx_vAly-L&~B>H(8pW_}}BB zDDh0{BHW9IxVsOgt`9Fic~~>*?#JsxL-VK33<(K2fFlgpgiWg+Ga4QI zij9wVqrbYB^LO>g;wudz-qd|Pi3J~p>mI7>JCMnW&ya{klTN>4Q&~8VsXja@J)Uma zY(wGL##wdedVhJY~J0Xw$r8@}DrW{#`s&#Bo>MvfSf zz&_3G&<@Z%%OxasePZTkTT=jzdsA`E`CM{0f;c`LJD&MYnGgVTA3vTrf1&4SfBV8g zp5U6xn%l(v{YH)_K2u#hll!a8ZuUmBVh(yQW-~<5RWmW0fqK!QO~2T}K(o=aRAJ&F zgk_l`>G0fW{PsCRG>(-#+(UP1vTu3Jqe4=O16djrJQ?H?kzktRY+dH3Hf!`Wbl( zyfE+FjUA4kI5B`|>hLMcLF`^>Fk>5$~ne64wP8^*vxI0gLpiN3l~1lDfwpzof}jZ z2^7)M#uH1jKTq-niXFl~p^iMjX?9vxIy#ApU?kg&Hx|ovL~_3Y(BoFqYn_i3ShjB; zO71fpmshb1KY8Lr0zA}MB3#SXtz$_RxO=18&N%qz^lQyWjv2FG&&YB;|L;%p^S9En zoJ|Fd-?xAHlm<CD_o|6)E6L^GrV=FeZ(uJKz zGb{{)2KSzogFqOUV?TTafhW0j&tD%HfPeKDlH!kU0ss8r2Z+!5u8O?1*Tsl(`{S=~ zEM#yBO`=3dpK$gr0d0RAmRyd54QX0((s^Afi9M`~&*H^`jw>Qa$RUX_Jy-xvpeLteY=^;d43p6ro3_wMZlVT?3x`(q&$+(3?|k>V8e5PnTw0p*NX^ zqd1F^(053T$t94tqnu)U#`$Cc$Vp;YKK*jEl9!(BaQ3=$Y0z{Rndi`v0*N>?7zECy ze5~sp@|-)=w&7n>>P)$agiyJG?zEPc)@?qegl|mQ{o@>5%wta5Xj3jE(g3-IaQ78I z_I-q(ofC*={)cJ}*muQqX#*NJZ*H0IK6Gdz?2=pjsi;>)qk@>Ufq`yI$yU*H14pdk z&Y}o%8B*v|v&MGmwL|I%WL)Zb|i4Bydf z111_an_^o16iv^5Hiy->^D0h95~=uYYVb?Can01Nf^A|D)%}7tw7$e8gXgZBZqlro z%)dz_ijJfZlwVeWCeSJD7r0*WRu3f(o^C zbYf}eo}Tw;3ufo;U~zKtqhn&oKVfS~bq?2tcHqlb)C>3ioc+rOXrqg6>ciUFa*X{Z zXB8^?I{FJPs=B#`4^K?hp*|ZtHiL9@6M=j%xz4jyrOre%`xK)Ho{E5B`qz%$u}oHw z3wy_o9j|Cpsy>x3_&{|?^`3+bU$?XSj(~vH@jmV25kmig^9FC<@OF0j7EtzS^XI!Y zZq;))KjDxxYP1(9ZatS2;b!BZLx;+0U&cq6X;=jhyG3ZFnuztO1NoEHzew>x9~96Zd38*uOSZR1JD8pYE~*Oqsj#H>UG+dAohXh5?iV`+@WHT+Gtz zg4)ozu&DOmwilcv7W-|!#1QM7wt6nZhV7-uSf7&8TQFLR;M}S4X@)$?__OneQ_2|} z?a21dO@_C0OKTWol^SO!1a$c6PRz)s?&39ZV*T9*ebA3n)KB-|XQBR=8!iFF-T zp-D<01m7X8?^XCUvwY*HVaXw@keoBp1IskXLyAIZ5SENGncdH@r8eI?idtwPQ>KJ_0A`U&p39IXi4MP8#x6o+0DwE>?JZX=i>60? zdVmsBH-oA)BPSK#FCIPYW+a6Mbt+0X4>EA!J7Jj-5WabVdEceak6z^c7jajN1>gqe z7i0UvKF`1uNX|q5_~Z4=W5X^58dzBEe%sd<39FVBnrawv=@W;gSXO8ci zRC@m&LdTI(Dxm$rsZmApg14h99JW1Zb|Q_t{W!FKUv2f2OyW(Sq`iA@aq>R?&`ykV zhi~g1p53QeH>$pzbLoJ5{lQX3q1EHSo8bxB;47L^-N%{Se|tFR4IcHXvTnm8q;VUs zM`|uEEJXA5npeAYc2W6}r&YrS{MTm^X3ADH-anei zL%{2Q&FjL?*8}WQay}5akHghZ;qyX^ZJStt4&^SYV}l((eL5Ub_vm9@>*!G=u$)59 zruna?fCv)^2f4X50oGBcIdQ`4iaw!Ijbu6`H8nLqtTkFZzta4cqKV}sh3VO`l%jee zc%l=rT~e8pwf}w4(<0S+4uja2{X6kYdRS&=F|={;H~0`1GcwYszgBY!KMoTD5T*>Y z#Gnl(aR^BiC5ksigV-~0Pf*rqm!-En^!Cxhnz+NXVa~wc1!TgeU~US&4?Heu&a#ybQ#Iq?}`~vT0Iw5jg=3&nsY%B9~Xp>P#^m7GEhG zxI|$p2)86zbY7NiSzx%nLy$ByUQMgBvJmGWqz!|YFYm)09Yu1Ug@FtlsHVMr#ph*| zL5gG9E=G-n(efY(T*ETAqlqDXQtoOZ9R5sKU9#M$dCZo^!s2@;# zgKlBiE!-`^j>Hp%(GMQ`hP6~OH~})-%9F3VCVR4$AR^TNi9vO>BKU)i7s4w>0;j=c z6B-kvFHXN?J<_-LzrO$TUs028M@2`65Rm_^X2td7-)dF~7&fTSlDCnZKx$mivG2R5 zA9ejwARC;^(57Wd>64%Zw`` zVIk=tr?EF+zAnWNb8;LdktQ)1!KI{_xt~)iFHO({2=U^b@{e`kN=nB=sh?tnt3NPm z_O77XD2~sXHc8KNc8(PlKHE&ZUr2%=MmF#smUOspB^)?x82a-LP&0O!@yhWq#1vs8 zQ&iu&b0>?0VKUtq!rmSTP;`2C9A7}oiptsm)-dF*0hj^DrjJ~^-PrgN*sj#ua@bMX zI|-8yDjwr<%(&Lg5qfMufo<<*vBBrV2p<3P26hSGcL35m>q8_kxL!mD2P=ZqlNmPa zmUZXd-7i`XS5)ymq-6&LC&Qs=+35oJh^M9^KmlL}Jatpi1i~Y28k72`j5MHeCao-n zafr>D_=eL;Qscyl+K{uWC|npatBkwh#dCMVP1mPQwLC-SEI?^wLzh6JK5~9)+dWga;P@Yn_2rWTgGMRv4|Mc7euAv?7 z6NWuzDqYow=`!(3_zeWEdT$Cyw{5ZuLs(J{aGAIlc zQW$6qzE_IaW@RP2#?PDBb=Pl`HV;k0PNS3X)S%V2#~P6_Ewbxr)0REE+)!vxG&6l4 zXWG9w{iaD!)H9MX^#6LJtm;htq=cN;0Z2^xBaZQ5LayzX+?oAhZYzYD=3WgH8E zUo00CIklA6*26l{Jk?Uh8%^fdZ?N#$?qX*MmH)4I<10il#DW zvPX|o_Ay0zGaIPhS`HcA(_!L~e(8Tds(AZwAc7BxbkX!_V7-cwHEH|squ0WlwaH7b zq!z`aYNq2lT{=-B;BKg9gTa0>QI_p@TU z%VnB?uGGVue*b+aJ6`OMHvlF4eRSe>t^g~~4-wgv15`kySRN#U9j&b+BOSb^*mm!} zk&6EyrMfZQB9#h=_#VuDu%uDYZ+#apu6fqs*n>P*H#bXAn(^pkJ*z0f{;NreEx%#M zbDTu8uHrDZx$}fAruP2kXZv;Sx?W5^z3pw%UpB+PASs9E01(>AX|>ukuOMwFgvm~$B#xG9KNh99PuJ)2rFEUQ9>94W4U($U&KXc#`UeM=vw0Snl^Ca#I_83PVknrY1w{QoI~C_=6qpV1Hxb?hQMDU5-mo3L47s}Kysa)9d$caA zNYAby28^N*@u~GM`x;T1l>rZkTqC2-(hu@)0vDJ6r&oryhyTi$nL2VWfS~juNLQo- zB!NbVvw(Nut5T~Pmlms~JBQLJd0zVgb`b4{Y+^c$QHC+PNJuIUS#9!SOju0y0QSqp?2*o2p`v+w|NF1&bT z@7_zN-b5Sb1?64^j$Rb?2k-cQEdyHZ`|o8yC#wKwk?xuN{Huw8$0C`>98y@flGKq1 zNDc&xPWmD+ne^fCUEk5$i2Kvmb=~nSxjyITC#z9yx63RgC9k)+Qi6$x?JaxLqTVo z>Cs5~8R!#oGBScmI&&UA95+NAvy!ltM?F)V#ABY`1%a{|3YaK5A{~A(`&L{r&p&IO zKWvZ#+!>!x@81Pmmd8LXZ4sw=mHxQEjE^rY`=v|aiweisLd`Orjb8`00Z3^Y6-M8r z%O()kwSdQiRdrw2J2Kl4nNUu8@)|*&IH%&nZ){-DpUorfOt#leJH6{yuO@PH-J)~+ z3hqpn%@8+-w2APS0V?l8?GEcc&UYmk^qt$c6KHh4_&T<>O~7`V-s4l3pI#sE{=tc& zQ!dg(9Rf_mhgc3e@Ky(TxxPiku``&@-XeGj;zvH+5hhJRjYCaRwpBq^QFWce;fd$B zuzOv^m@wk#myVOygRqLsUewf3>QxL+-ps1j+HX@Z6Ls261Q*JiZm0P ztp=3V@ZC8sh?(Jwn?B95e#32%sIW3SG{lP={D#crU)$ zPxX3*Blpoj+jemJ`$7)Q9$HsYJ4-WAu`k>gmNTPvXwpanLx3Og#uhs{J~EpomhjD` zod!R}Di8bT-MW(?fnVi``E%vcs4gMfTCAdFj1AZ2vWOg~bhC zh7@vg-=BjJD4w3g>mP<5PHIfJOoR1Wc%klu^QfYiGD=$PdzUWMs6qB6GNd zs+y@GuJmX60fi=x@i%(h9|=Q{7@;^=79+ow8B##DdK8-83G+vH7q0_M^tkXh71X^w z^+lSNH6d=!vWUBvm<)8#jw@mq7e7e7xL_Q6n2@04ra+x+MQhAm@ za#m8m46soHxRd1DK6fevHP(a5MZB6|eXYiBLxFhMH714z+BXyy|Aue@JzC9r?~ zjJmctD-%=M#BizBQ9xjGGld4COctO7lJCH+s))5n6sdZ{Tph(huSHXm+i9PJz~>6n zFl3P0To#&nW7f<<12NF!06~$E9oNh6l>tEC$HP_GX{PSU-ov^rY`8neH|DI!U#0ZB zfc+9*U^So!IGUnxW_yXjI`wc7@mdZT74Wjd0w&Z2ijxmid)aNBMjy7`^cAfwu!xva zqK;9mJ?@8nR~FgV{JExubn2@uLnEM`4207;QXjQkf42wS-04`wI-Q#~#d@axUpf_U;r z!KPl1Iz!RamTt1s#>^uton1-CXmW}@l?V>$TEJCkF8mkp2H}&_msAwG#kV2U34JHl z@k*dizItU_p{CQL(ZbWQzEBtXD@BI*f_-?NYx2_^W2LI4SHOqPtvFd%`z$#N+7^!@B3$xk*s}r+1WTUjd$k0O=NqPIlRcy~mjXy07fb*Sqs= zYn$4%AzacHy!Mz>)CpxYCbsOF1ag`Ge3u7YaT4s(J(^1{NaCJ9r zpB6q*S9b=8RUTV6o7XL8Q^cEVaF*}+2~0tDH~#igVBn`S?P?Orq~ayZ;{)O$Cg~~9Nb|R-AH8c8ln}4t7^n~5N?sU#zHBT?Ikf;XynvSU|d2l7Ku!8^ENYt zkko|yLY#8MS;vz|+#K*N&IWmfA#Kqp*vdkTnjj@!=Jh;%_RL~QrK7wM%%AoEI3vX1d;%}2 z!ayEZ)$6W>jR;HPB5-9_9vPDT*yjg=0-%99?bI-{$_KrD1_(hMHnhyAMN&iuRVIwn z6A>AzOl~iIZHDVcgi%IQ^S)7{Q&NR-7&C6%XhQkK=D19PB=c(8*!vs685Oe(avB*e zC~Pv$UyH9DzTG0Nv2s;STeE=qyy}XOkXcYfMiY`t-@`Uz6Zr4EykLDnSnG$~2UERi zma%pIt$xB>>}W!EpoC zkek3e;PU4m$k4`=fZ*b3k$@QY%`&ycwKVmsFs>Ab5LtWc;JUF6x&V&Yf ziMge|moyHTtXFTpsH(Pha#a`q605vb+bZ7JzUYh9<>2CX9|Ih38nte{223~us=p#r zP<}9$}ocy2xc~l4~`P%5`CbeZCCqk zVp7M02V<2C!gJ|EA5N#K%rKxAximm~W^LqkGmXi%=T^@AcD_cxngS^M)J=yqREkCJ zLDfe5q2t*5!xu7#68GfE6C!XH!Tjrr%23lcj{bg)Oknnqxdji}v&yr}%j=W$M~xZN zo)iY4ih%)_h?e-f`evtYQ8=(#n}>!od0JCT%Oz-xNo}N!M&0Ij&hN5E!;^A#o$xOG zdLUM@q)~+j_s53}+LKvO`^E10%a<2t^~{2U%rOje+dt69r>tMakUa(2)%vC(=DSl; zTHfqe(dN(boi9$Z^}g8H@Uh*7<$r@?set^-A1^J~eDvVoa~6`&Px1CnnANkD^9W;M zmi_1OfddAxGn3{*-)jy=-*NPR{gLBgFi-|866H|?oLUZhrM4Qh#}Ekp{k^1r3>RD|ZFPUwtJ>p*t0POV z_Wbc?=!E6V&-H>&vXCBm;^;%0kG5VnsGNA+RBM0%s^WIhiPm#YkMD;3epIK;&#>zo z_S3XeS=C018@HYx`D)1po#ShSIo-m`UyV!V&TQ?ocW3p4mlOSI?4iFCc``hN`Fqdcv_DBY}ae^Vw3z(84`m#|BwgBtFSh9%zM_ zXXNFzAULQvv@o!KBNsKdwbh?oF#tlWyuhcE+NoKW?q2cEy+8JFutc@jxdj-`j z4ydhqS@!vJI4?{QOM1xN%#>?IM)lx7|LFYJAMrQ4R99-NqY$vs#0!U!O6|$)Nxg|> z!Wkz;-`DDoaZ_p-HM6|Mgv`=*=w`#i!=k+XiLIUX0R=8+ z)4-oxzyH%IExMNG_qcKY{^)b_nF1s##U4G3pzeq`7*`G%U5xDSuJOg6`quwO-O0+9 z{rB9~-FCRM4&vYev>|RTzECjC*m<_)Ylscd>a(cqc6(S4>pp+c_m}OsCT-kMW9NGh`&15ktZ<|91q-}BGp5Lwf{I&GNU-|Xvdgypc3CT9 zjMp-)@kd5C5cdnIhGoJIsFA?7a>qhthUF6W&p+dHrgb-2H%K5H!f=>V!sN`mkIe;o6)+eb+xF$J zW~`Fk*s7P!oYp2p%{;FajkyX#2{~jKYJbji2G0(KHhExFC;RGGdnz$z6bZhx8n3NO z4XgiU6Pqb|$^DAF&iUzy7*jltF=FImRad5UZ^@;SF!JE;(T5~o;bS(%#koG-cW=1m$s1r-jvsGg-p`>jc&pCeo)-y-D zW^nfre1GcnRi=F=)%V z)%C01=K2h68P?)o7~Z1nX{f%;x~BChX)2Sg8Y?8T8RWr?;Z>A7a5=-(>7a%H`!>?n zP=0Rfr&GUo{I_0i#JdrdCT~exu$2sA>T$5bfU-=T%GZaE;c?u^DCSPU30d~gHRG-; zul@QUh3L=m&ZBKx_G0^jDb&6M;4XAfhWB#%ctuWF<1V2;D@%4H6Z6GALdGfc8lBIT zVtUDWwEz0{6y`76{qEa*Zu9+F8vmm_cryK;`KSvRn|F~27>$Ww$vM2wTgo*vjg%pPeQ}CVIqy3uO;tVF;BJ|{B`+#X_B)ieV*qSN_|IDi>3p}(kUca8h5}Cu~05$I}8H%UpT09~$Z{Czp$Ls(Z=+(r` zVJ*p@1SJSm*gshtH*ntHKKJ`4S$ynzKz;JOd7b4O2Yf5f_WzJ;m1Jav6jEpsq7neS zQvPw#wm=#_>N9^3G+T<^C^{FGpNIPywpC5t>$FeA{ z6YkH*gO|`$C3slx)z*65r~Vqh&^TEUIy`)k%pymCL-pyQ$ycP zT9X(VRQqw4n97sk(|3sAn8Jq9nREJ|OYQCS>sPb5Yt_BK5K+b;$93)D{%*VW*`UX! z!>Ab~73DIMGeW)e;QZ2K9V6y9m#u4YZR(wt8DzEIh*C|^Y}QuirAz(-;(6y|+nKvH z2Z&@`Oktrqa0Ul)w5vpWfI655WwW+2VxIP}I3I1xFOEFiNIAodeDi&2@4r_o zBS-c)I4aqvP#p*Qy>A#WUzwL)EA=1FDqmi%w`m}INVU>o=b{y9e_~I?ZuahK(x;yz zm^V(MF6*@3t++kn`{DFE@1P(2u63icI=_kTk@wG{Dzzrz8x zrzJl6>}r%l;V-4LI3q*Mlb+}whYqQIJC-1T74FsTU|*Cjx9q&wto}jTes)pr%{k3U z*=)*}8<^l>?&xTSTUr0<(*-Vs7^QwMFHigwsKhIa%<9BLT{k4d;7)yxX;>5QwLrZJ z$MX@;ED$?0Jig}Wv4pzdMUp$COT$H2hWI=kW>^nSc4^ z7D+Ec$!wjBo#_}4Fm}EGm?eHAD_8cD{tQ!J)p*X-&+j^$)~|8Jp)v2(ekJ?fV;Exo zl-xl_kzcfoExg=fl8QP+k|++cS%~?NihUz)et%ZS>bKUt1mIBrUjdF#r-WB)7P!`3 zmVmR zrjdp@-*Q8^xAFr&KH0TVFh5l_U#+&DpXOwBRhshSaVFaQw$UF)jyTKrT-Qnq&PMY2 z^X-`*3`u10qD7|;MhsTXh|YF0Fx-}tURSdK)=p+_uHJ&w&&@?r(z{ zlMfo(;6zhs#VWy2wJw6`(vp!uLZly^RxV-Ba{(?~c*5@pI0G+>jE(cU)};?78i?%mz+Z26|sPxyk3wi4K;)__+3ANvBbxmm) z931SH7ty%2Hb)h``BWNDcRs@(yzuFcD`Tufku8Xw3F0v13eO57%&yEmmb#h(12M{y zWi|`asw|m%)pg0+v54ekiXg*qC&vU0vS^hoZ&iCexZv*OWD}WFO<}wD4S?28uQ9*` z_z$|};W6al<@Kca{lSq3_a581b0>q=ZXLPVE-^E7xVcJYDORbvA;HLN{LYoWe5s=_ zEA)+Djn}nH6&A6lYwEm=ol9E9Esi?Sb|!yqSO&qDO<75ycAI*%xI#x;JCQ$h_jFd4 z6L6WD0*^*b0pTGuwRY+QS#s;A#s8i%O>ONZ%Z90X`uk|FkIYd?e+II?ByOO^@}5Y= z>3&6V{m7hL>sZA%h9j{W`D$9_hNxM=ljW@#!YL6;I@znjMFI4=h#@3&d*EKi@jdHT(3*;> z;>ilJJrfg4#2gM)&y8f-{PMrZg6jQrdAxsTS(RhCQ&2vR){7IzMaHEJHeIF(ISf*8 z=^C?bk=nWg-uj2?a^+&kj nMP-(H>N%&V9widw#V zmzEJl(fU&qO*`Wf{LSX3mTvsdwsYEM=Zqcg&beEkJx1wUpL06xc<%HG8)3I&XI)M> zI&73tl#!Dbwm)~y$wf(4_RQaZLB{dyaoNq;&mQ4Jn4ESUbfG9#Yw{nOG_}+d6cw4c zTWg1jN5a5&Pjmh0#ib*kALlyeaE0dF&k;K)aoI*BG@C#7wF0L|d26HJt`D`W3r4>o&1@*g8nM`g(^83G5pa;{{Q_Smo8p?@bcz<7@PP%o}BzZ!{+%_fqrIY zhVr-coo2|-&wsGm*367=X8b3esHkYZ>%oHu<2D{&aYN?FpM_+T~I<^&yl`o{-+UnT(_O|j=PpqY! zeWP%^>yek|%I2oWeh3lMFL8+*Ff36E@5ENvrK@`(I9Mk%ck#i`Z!ct1^lwQlWHV57OP2XRe!KyHOxZ^} zwcu`)SXkG|Fuy|j1lA|5zqhjR`N^Rx7DeYfK0Q91cJQg)I-QB0snM4awf?vZTFRlV zczOHxDRCbkpO!*TPC;e2iva;N+f`I(3i)kwJ$~DKcx1}@_3Kx;&6}%O)#m(r8Ju; z7$~!Y2Sep0#l=Hix14UKbaZs^#7iqH>3wF#%f1)5ADf7AUpF|~Q)mA^WjU6(wSz;1 z=h%r=F$gq=$EhBh?NeUlSpiW?53~QHTXSW8bpX zvGwB1mrBJmzt-owUb=Ki;q^eUEp^1)oVl<&dh!e9&ns(T)%_~!$&)Ak_`Z{$Gp}H` zN`1JMXIrxZYx1sZ|EKB9Bt>VDvuDrB9e4%jt?bjB{ zDcP8;xs-3^${lzXZ{NOEJU^bMcRM$C(`A8e95-&SeM)nl^Su`5^`k1ZR8vzE>i2B7d}^5Yl7Pi|-(7q6?nL9blamN&^5?T< z7b;Sf(gdE!>F+POabF5*kG99h$4_+x3z2Ka)s)xN==^X`6k(oy`x8Ac;r4ABs_6Wm zKrNAg@bK_FRoV3Uu?Dqp=X1xFdQrOSt-Po3*_Amj9NWs@yjlME(6e$BpDNu7wYd>G z5mC{O-rit#(VguRgWt@apI%970Vxuw45aEG$g;R{UDCh?8-^18Y-@A~@4>nM(}t<( z>9YFzWwA0x%F$4qf4&nJQ1cZ)bHfg)`P2XT_czB9(m8B~H6QGWp{S>JZ_1MNRfV(mjiE2vkm zU+=&pe&X1sinXy6JrcD>6V+M9qA=vpvy)|Ybu42)-c&q2-e6^8Bl3I^D`kgN%#T^# zf#4OZw}-U(EeeY6yiC^Vfef<%Eco{B*VjAavVvKKIv-^fb8uFp6Luu|EtHi8(5;Et zBY(DgSNs-D4UIeL>7nwr)xzu7>t+T|YGzrM@YIMLJ9@Oeuj!uI3kSiOKmC%|DTgmP z40ug^3P%~GOw#W{Z;Za(=zeed2C zpRz1jEiK#14HlM4=-(7`KX|<2((>jaudM(K%i6!ot~`GHcnj(XdBYD)vm#K|tjjMl z?b^M&tg|Yd<$F!GZOy(C-+5Xup~o2+?Ok0LNVDv!iImOdb!xn`gWT5niGfh`x~|Gl zPR8ZSt76uhKJl5I5D`=!8H&tkY}`lQA{NC36R9fnyQr&IN1EhKJ(&(nkM>mMpu|?7 zqeVnSFs@i}>E_K4wfmz#gQ zxfP5mOixd*ZeYNAT|z%FHC6JZ`)~wWUs+Af(!G240^HD2EiYYJl-&l@vn-0NjvWgk zs|F?F($l9KWl&ZJT7Q1{VDh`5AkD_cW~8%LyJn+51)MIWc+i7y>i1qsveH2D+dO<*e?ljaVCK9@{EH|kJieX=Ly@}+rNrdR#s-A zE?WA{@0YHtt>vh%ufMoj<$TDoQ=c=rDMl7vndtBhTeq6B(eKD}8@x%Wn*nvy&p4LM zA9#|R%hk_K?@y^K`po2-x%Ivk+yNZZ_v!I1vn@{T>dDxY<=9L|pAWxJjkG#?R4lQi zFv%DTZ%idMsA{C+5x#>!&tt!Se73WAS6Owm(;SFT)XA0C!H-I8bM@w+p~{Ci|k zFhGh_^m^01d~#O)W;`4mB7Hx9G67n}rYrl-dF(iH!+}PN7b(?{ZZv+P?Cj(V}XUB!3UsrClyG*H*>oQxSj_q;dTW@fZi0U!R>~z-vy%lGoU|^O7**uy3O4uMg}kEiFfVpQGpm z#>B)lqR(a5zk0QcZ_8=3fLZB1ho_5ly1Kf0RL4Af3$U2WzrAp*8b!xuT(V?|21;d+ zX_m$7swkbNVfGzt7Z1O1c>Ffbu>p*N~@^JNU zZ3*qczZ&BeS?D`|2z5E?O9R#h!-fqTtU(1Bs?Fn2$C)@nwc-?x?a6*V+;lG(U{!p( z--3C{_Tl5C;T=7GJiG_jfEDJqFx{*3?ZCLQq>un>AGI(Kt@PMMR47q>~PlZQ>RXCa%>gAj_P~~PDJ`dKx%5LhM%j_wryoi zO@`)~7DY-g&;1NU{}V-d2^6;K|Ma-8C4VFRj;q^Lw{G29=sj&TBrrWQQ=9NU61{;1 zos^xQKU{5bj+0YDkIhS&eg|8BQJdY)ol9^Zfjk=ysk70eJ5Jo2{etTe<6sKBaG~du zk4L8Ykt2pc=Fa)n6_?~TZHk^=*;Di316S|c+ZReo)DqSFNcBp#Eb%3HMngm6XhfM@M`2S5{V1RA6A>(OR9z)c|4QHFp(I-mrn3(5NGU++WB5OVaD=>Jm(i6E+Ns0!ru2eODIv*M=CKDY|DG>%~r*8eOEnRw=_5NN$yP`{c-fWso~f2iz+ie z-tcetnc>1mT*}UNE^ZUY4==rZ`7%!c&x=4jAvFt!f&SoOL+xT6e|I$&fa=_nAAjBJ z{bL(8v(mdn=Nmp1=rbrWaW&Ayh8v%Ova74B6`W6r?Xk@26#ef|CS9KOF~ z_L1~MGuI(kn30h&(iN%3ig;szheKbw_Sc%zzKM*C zWQFSBgfjUd&5(|~L$J&C!NK{aJeqF)rw}C+Hg5bq_1iWrJ-wqUoY$=A{ARqt+m8G~ ziBAjb6vG}Kczm<|&H!pJ522ZH^0p(WhWAhmDgW;7qk6ZMxKTc%`48WF)$kL|G40T^ zhxI+9fhJiNM=~?Kr$?Eu{7g4`z8--bJ}S8|$ey4wyea z^qqg(wr$PN&)lllH{9ybQi>{4mAn)kcW@dI&!E>bE93oI*_K-nKM^_wg^ldnV8s#3g7b!ua6rK8hugT zcjl*LOR>*kiEXcSMOX@!Espl+M&lu(&RodKk_8=V@9PVR5L6}rT40xD^m8!IAV3+= z#P)^xS%qGulF7c{q?p3wEn#~3_g+mhj>yoQZY!SGDbeLJjBc=M<@+|da^=b%{ufbE zQF>mUwyvhYlBh=5@jB*{3v8Ex0`lEd=ejnEiqheIY8*PW3TlGL#w~IxDo5;2O1QfI z@m4#GVq1lt#ZoxO^2#sv7giyjTj$6{yz<)1F&{pBh@O_2i5^ba9w@waJlOAGHT}p& z`%fA1^>Jx)g^Q+#2!k`pG(VVP`pbu5zSAfL2%rmyiBH*08XKG*Jh}VFkGptI%*!(x z-nJ;eIk9?QygJh( zIXqQVSjarbFx4U3cXxb&yZe@7F%2c4bdK@1vinY;Q1qo>q7OlxB6=0J+E80bBp#&S z#7`5|nuc8>QKetMt^yr~z(iIz8#ni#CVD6AsF29W8eDVFLTT-rH`;(G%mY=qRBU^1gFZwEaG{vbQGv(&^MuRGr6u=+8~}CN)1j&iX0i;04GT z)*m14Kiivu9p7n|39aQGnAXVncxC*?<9AY0B#MOqVJew^s6|ia14V5uUYI$B_bTW$ zF|Y&T5LvwF3}xTH7wtN;Tu4;_8Xn)avtkJe2|yh5&;ikRx@T9m9t3^&!+>p^z3 z2Dz+iHAuYcyM!goASw?};>tSX6suBy8b&6jfY4ATyhk}gBIZrfjkHs)goH50^M~g= zecA+7AOSi=f&1_c-3)B;NFdA+gLc;j z8-Cv&19P*9be3KpM@boi$vtr&w#YsGW%Q{St@t3eSMv);p%jCp2yDf$p6Mf(r21ly z;MX83qpqpWu$UP>JNdC3+<63BTO*YPxHupCQ!I_zv zaT)x4e8O1V$?2S`v0#w2{!^c)gIIW-=4V_p4!_u&u#ru@*gb@2!sTm`*NYbYjSrIX z>egBodk4gtjGg|P%M{KlTMg1trM%E;ik@{pHI;YvxV5dVXzS}mc_6!LEIT>(A@h7! zLT(%!WE@)ZXaE7T=g@os!5E-BuDKB-YiZ$d?Z?2tfOkU>p6%S+TswFxbi!PR@2dcY z*Ka!U>GNkDCnq`l=!xf;9ay4HVKjk+jrk8zur93-3iah%R zDpLCSv#H(9+qmiV+jqOR7K2l+{akQJV0Lm?=fHs*E&6M!u>IRpXV4iMQ}p?vW!6QP zc>LCoD4TX_`}6A~=VH>WTP#9CLPSMGMSeRu*4@=5>T+UxN!!77>((hcwNubJE&I5R zp%ALy+H$6{2iJc8{{4{aj*C-WkqWT`YFoJODH5g2%WJlwb;ysY2Jg7_!K15J+45Z> z1I~Ba^*78u>aQrt8P&Zz`lD}Vgbzezg{!NpUXtpOOu-#&M^mmFngFeibid}5KKxwI z;d}5bDrGhLkG$J}S=ysV_aJpK4@;`5TG;8m0aW=*V+60;XKES@sptH7UjVj$Ctldu zZ)(1Cd5k&_@=6wFSg1ImZ0dYQFv?2N75j% z60MD8Fun-t?~>)K=w)SP>6a||aX8g|xRvbrq3;DfpYTHsy#$ud?(U0N9JTr%uO$^K zzX1+Bp09hxv*d&7T!OTD9vgM^$E`>EAKhqau7sdf_lNSG+SY_uJoxn)ci6}G?>VvO zIj`@BKKJ3_ewMM`hK9+X>ph3RB_s&s>&Akw?!+%t%}agd85#1K>F_!M%yqWr&{gOh z6{V#VdbP*=%(MEJorO9O;7R3+FVGHuv^J@%Av=pT(j8jzc<8KAPAt@LnHSJ{=tdAcj z9bwIyH8ft6LsigX!p@x*dg(i_M3huO#z)W?Y`?#BB~4!OMiZ2KQjY*aoE8^+AZ^*8 zyiZ%at)Ja?g-@Q=!NI}Cs0I7Y!^^8HHOaU93jfwh-8qoko!CLDbz^?mv5{c!=lVY_ z2R&*0@S&3E6mEluARO9uR)#V-^!N5IZU5f$rf@7O6^#PovP5-+U^rUXUC0c5(9Z+} z1<}m|z*5^I)fUu^jEs_F{RxuC#uM1?u`fAwrQXd=Oz77&0HjCHn+^;PcJ%j$V^MYi zVL7?ERlmM2NecD3!LR#MB#%Ns8G!_kEka0Hg0lMxQt89rzkRy`8U}>kizr}wlTJlq`(aYY6bBbTE2fbgz`&7ww4wFcsw-_jjZhLL&#n{apFYUfyb*L z&@h2D0>j$kg((4!VO5A zHuRr;#onIe`oJL-V%fOaIXOj(vqqmfb!g^-1FC**KN1KkAPO*$n0OaiH8iiOAGaKd@Mm7& zwtW484E09`9`6}^Er2BwB+NMdcKIWtJzWR7a(>PK6lOMV&YurY)+Io8;D7J&bZJp593$1?<)c>eTO}9C~VAVh8z$==;5Z6Rafr>S&k#(OuWbFts4?0U9aowxrm|$3xEl4 zhLb26*n`Ya!lS?WLLjE4tU-2!R3tqqMxroaT?-Qu3ALF+KtSN=TbD#EPiFEX;LS?2 z;^`m!)@TqmwqfPFZ7u`z{tYFdHVEedfXNf1p*5S3Y>n_ugo7CO=fM2 zo!;iRP$-9TpX}UK9T*xKdM7z~4S^tiA0NeDy?RyHc(NkN^UA8t%-&hWe%@G3s1lqP zf#z6l-n^+%6~-L|>dArs*c>-xjcTI zzeS1fhRdr}4y74A2)lFdo+yBq5teE-n+R+l5Iz zWfztP4VNrh01_2K`j_=Mf1VjVc)z_`m~SuFgy8IV_ckW1N+Wx}WAR#Ss_*{@++7SI zsC@f&0dRI^wMPd=G+t4YbU9*%Y@UTQ6g=z9Kmmc%U=tn%NQ6K5H9B}%^RZ+o5U>Fp8dKS z_Y;_Co9r9ad#; z2qJ^MN$miRaXflNM7F zBsgLH&%p_Ke{nrF`a;jKCD?ZR-PUT22Q@V|s^_2WF*z^5!M<)OXDLT?OiU2;XrS2j z%3LBd8bAB{E3ows9XeFIA}GX!5x;Wn)~)cmSXsyfba)jg44szKT4EgZG_amgt5(Y# zc}WjB33YqDvb2a1yB?+c3e6=70CN5M^}gwxhQ+8*#C3qQxkgE86>vD3a9~aa!XKr# z)c3?aRJFMj!gYO8kl4XmIyx?|7};Z(?Ig9dEGftj_n{+$QT%m56sNk;Zj^Vt{-t(wn5asvT3t?r5Us3TFlLYUc zn3#YbGcEaybLGlVuuM&<7(fBev;9xOy&jp$A|W8wHV6r*y`v*wVBoZXvfDD?4o!&5 zDSK~AeaiQl-LifAb^`RpBqVHLa1x)!ZMgM_{b^!}!pTN}gcb5(&~W41`g$#h$BwxF`L%zOLkr)jPZ{jj zZvs74pkm=s2i(3b=L&KT4s25jUTrz@&(!gh)%^ty_nS2VdspwPQn3ttyNyczSpx zkl-s}c{xvy9vf)M=Y&cY2geqyOu|7K^pzSuFE};^JqJ5Y1PcQ1m8)*-Eekt)Q>?#= zzlqqEEnDK${8S0Qf`HUg;Lf_Run?#0o~d`urM;BqsGVK;$B*0=78a;VobYSHa1_LsNtD1ZO{&G5Y7%(pGeR&VEn14E1}1OX~tt*xz5 z|7+myKo;;ISF~lzfsQy%4h{k!p?Q^+mC@{fXcACR;8h%3isv33gljyC#jLHTx2&+R zknAf+Z#GjS9SW}G*`^u@5ncpXORyo3?r2lC=diB6e$4I#Dx?l2IOtQ*7K~!N}48040JKbcbDH z9H+nJJm_1hjsi;COt-7*>`==#Kz((6{pFyiq@(3I0NDJ+h~m_Cuw6wFLs6>u`t?w< zPh5O_2h381W2{1|T%UX9Z<$W`3ZlLt7ts!zHzu^ArKQk*=^-u8-j4aY4irQ3DAA&H z&$VGChuN?SbK{jYG$MpRpfudUV7P2h-^4RnSy^jPoTB35))qs!3C=26ScM0$-$M=W zJ(^l-m4~&F2vI8)!1J=*m z7qKxh3`>_TMV(oKLaqa9#=*e>#LK|X&mZpw1{Dr4Z1tl)9`+8LSp^`(OIZ5d-QB|S z^6b>U0=F|nSq7~J`n~`SotTVJ2K5rRuS6@p01uifZEF((cdP*A2tkO+YH?w{xzKat z-Fx9%KNF2Y-m!H%_yl)$b~eahxZYtb#9jLO+zkyt-&P=Vkj5$yNKK8Qa}OGHRTy@5pUZVd=%dOZ4atfIS?FWs zcwtYhE0&=D5gnS?!%LR&-Dy;PV6^V?<;x?GgF!KCGwSN=2~C8>#B$`w_s-QkF(U9R z5F0T{=#Na>)a_#`CNAEAZTc5x>N*NKjrXkG z$Yf8AtsbtE)KvmbK1mhbtwQB zs)3fab}}${Uvsu7kBoT;bko=MCnk%r#So|om1SWGH;TB4&*^)QZ!&wZhZ|x;1&Wb0 z90qVUz!;0-ZzLdw6dac&5ZQ*uP9&tN${(Xo!IcPI%DP$@innwDxIbXyJ|JB07fnd~ zKz6AQC!buBQZMHJ^XJb_+Zz8xl_HVh=|^zE{NZqzl@R9&vICSnCv+tuIU~VP8ot~0f>wY|{X+{|$~tk?u_iP#Ismiz;GtXIzP{xjSr zL`pFnglIu#Tn0NXW6mJnZv3}W(iux_zl#RWs~^QC{ksTB!lKY)jj80eZ7Ve|Eh_`7 zCwU-COUsLwE@^0X`8j^i%TvS+bs!uA%v*}GTh%+ta}?VS3X~ka`V#ndZObx~IOq?y zDA~Jq?OMCZ`SL&GqI@k*MP6RsO(#Dvg5Stw`>?SaelfI@p?TkXebTH5ilCp7GIWnV z$l>U}dlMAbyiiqE=0}jI{OTIb(eJ7-F(LoFdi%B-X^|(uXj(gWhQI^(q5}&r5HC=Q4gRKu@EC?OJHY_qy2Vp6zlP9HaK#N57sS|6+M$pvMG=30dm`zB? z(yD+&i}3Xw&|agXxlVt3u5ZrE!?OfB5{zaZsB=2#Em75>5@KS+5(5WA9)a$mNjlO0 zyk!G;tBQSQoqB4y;ed3YCsYtvMt}bGHF2l2XNhtVb?sUpK6$UvUM;O48`@pkTh8_E zgEz|z07%rfn_JF=5m)^K6DzAulB&<0%*+Vl>7g~C&*?@@T)%l!3VArTSTGru$D%QN zSiJ9sLoh?qGC4h+38*_RE-s}15Q@z2t{MQ8RY*_iKnNw;R*>!AQCHk{PX*GMhyw`O z&0}I9zI@Pom$5M?;Dt7pIYIyE*3(*ycWZRJz%x z%p>8DL_{HNV%I3x_F*3f5Yq^WTj{aUQ4J3dkMx?A011$Be!Bu*YV<2_+xB{cW&T<1 zj`^M+KWHf^OC%Rl3K0VT5Iu)!WL7@Epg?rnwu344ZEcn*M~I^L>sM`MOk(2O)Fy1& zY^PMR}lh6jv5t?WVSnBLJaLh&`!a20p2lgAk(sB1$br91aRpkqGm*^cu(y! zkDYY1_%iE?KAU{+-UX7mkyzp4GIm$cp1tRG@T6=o9C1SE6vp=b&^KE_5idT^x$9^( z_vpHffQ#4SD5EIssAcHAHTQR4zk2N&EyzAHl_3WE@Dd4n>q)U$sdR2Fc}Ck@eyq(E zLcj`ndln#2lEI{*fMSqS;Nq0V50E2xb()u-fBi)FNed2 zNl8Ojmj#lwCUioIA|Y%eBM#yMeR>4hcxn;#6|g`vWk8b@5mbsw7w@?#XVm;~+j-z} z1R3r*B|vfb>cJG(?LClv9)zUR+Y1V>7HmuKN4oYS4F|w1VNqPf;;Sp10j5I}>_Co% zYJKxN_Orv@y%Bj0g}oc$d94N-A}$Kv!4t1Zhku~B8+>&@Di}&CWb^$BgFvAR(RbyCdB&LWwklk*?WS)fB#!h z5AJQ$sTVM%85tSXL6eH-Cl2c&2mn&d`MFo6dKmQ3>Hlx6*XqOa53Hw>2xbQ~?GLa_ zOnDS^TOeZMBWBys`a|hmg2dAfTu>BBDoT(Icva^NVwte+c=3J+Kuir-BXU<`_r>zW zq@6+6_B?>DSPuN&Q+@iFon0q#wNIWsi&*P zADTvcQn9ncrfIED_MhQ)c)FH(cK)#xmKTuBKPGV?bd4`wyLK&FkQN9q-HKHh+dL}F zn)ct;znK2{$Jh4MLQTNRfzr(?Z+xELh&BWI3o8h$(PNd8#{ z(jDQ#{r+fbxo%vqgE&TtihRV32bm&qbSO1M9lf2`?oR`+*z?TbH;q)7QAqX)yJ z@r9!$SO|P4a~Y=906Eo>LXjA9k!pJmuKYKoQwH{mTnFf_CcGz>%c*j(;7K{Ap%CG* zxqJTChjkd{**j2l&dqJ7hDKoC5eV1WfPd$PT2=zJX#j}GV=c}h^6oJ^aVFaat2*Zb zlQ4Jal`G4CAQJ&HBLb8>IrN=m+7QuTfsvZs3dDRhKAs7v#>3N-q+nmYdbRdlac5Uo z6I`OWn>WLb0moh4^C0is&ueBiA3yH5Q%3l%vZjW&c?e)d&SS(%aoe^v;^GXr1kTSv zK|y7$t*fyHtB}BE=i>{3O%Y_fch{~`G`!@LlqDd$a8DSJ7oN7DHLZkM$;Qr3LqU@w z!YD;nJOQUjFh>ZfMZhY%1G0i}`uZo2;p`CE;>YMV=-v_w5_d%S(IycQhLpnq_lBf? zfnH+MIRRoxwh@{vS~{6d$%d{J8S%!;^5n^fdZSC#&G+i-Z^(^6+8PayxDTH*|2I$+ z@4Dge^Ge(T4TTqjVTiRbaV}z-YaQ1ZniN&opMUt@)}d~1$GB7{U&W*?;LKyaBJhd| zr02lkv=F%O{^~i}&4hI^f{`EI6N^9uTL99-j=_dk^jp|Ylo1f9jhBY7(2@1xhsIW^ zAS;{YElU@h3vZURN4Xea`DO1y%f-=fCk7)&yv1EK*0L`4z%CMq$J{5=1qQ92hW3RYnSLe^ZWJeZa& z`1jSV%}^0!ZZHM_&31B#>*>tW96z~TJQG!fCobA638uq;T0#*>(2(t#FmjDb^KDhEq2*>7M5B@2oi#M(Gp_x$;DX2;>aK1QTHX13_bw4L2hLfSrrO-wT#5U%os( zCvqEB5*e5P^Q5JQT8qN*pk94|Ac_zG5wZv^I&6-cuD>1H`+DK0-uIURn!48tR$kbeBJgKd%i3yFx3`IIfBa-Iu+ z&;+t1v0H4HSGh5$7bLhpXP)OoUP~H6yn1?ir2pch=$9_7C@-faxg?~-s5?kKSB0%p z!$K@WIc`E?WY+^D;z`;S-ZPQXhA@bE&QKVp>QXUqp=c=*hehJAfd^{&{3Hf;x4}yz z*@eoUJ9mVsVHssqBp!sY6ul;-H*MMkr#1lp`W<-Z7+2s#p|CoBTrw8ZLFf`}Y;68< zaeUshgD;4*g}kj~rn{39H8Qlq2K5^vb{9}(HOM=$<6&^4ncD#Dc5ZKXo<9GnLKqO7C z;3*^n*^&Ue2B+eBDM(-NvQ<`Xby&v2!cw28!jDNO_Ro?a2rja|%}-I@2Q_*H#jrjM zL>L3_37Fdeh9(PoH?u0QZDl4J(JACaM!r`6@~@MvyuEYT`;v?4LJ{s{XIKm@QcKUP;A=GuM77w6)jr{&ywzbXMSR)EI&F*us zww_Pj-Q7K}pn%s?84Ch#4l8J(_OH$Nb0q(!%(2IWDZHmMBbh-+d_0I~(!-5ERjzJERucq-3?0uif*qO5J;Xt$wMe{LSEiqVC54VWp1WF z3&dX&9sAgGcnS)+&z|+v5_zxorh*I86qH*hCnq)`Ngl|_=#GRcP@si@sL97J?XtXz zfF!BwkfwoI$rMG7=G{f!l`DyU;_M0Jo^bzmNZ-cw8=3B47#sO`FHB+6dQ9qy%gAJw z4yNkgx&)GF6#6IZ%9-?{urA}ohoi-W^%)yKEw%Uv;W8N@rt#y)nCYO!VtgyOw>qW( zQ3tC5k$=OiidTZOMRI;*h7x-G@8cG6gy3ODRncvL4+Ylh?=~Z#kdVH>y?ghF&j#dm z2MTlF*JqD324ls*6r?Q*&AwPip8yahmWmaE387l$WBOW_84A~_cfH0fsSmx^;*CM2 zxrF`;y!z8f1i7*cAsDX)indK>;8qu)E=G3<;G z2t{nw8i*&T+6@`k=C5DxLOqh(xbcFPh+$tGlHE-hd4vL$U8)4O76e#GBvLE{0o1nn z8Sc53H;@l8{w>a zF$5HkXIh2$Syc`!NlkP=5>2Utyr88;UszNGu}+OdAD{#9WJ3V4n1SWiX&k2^qDj4C}OZnY%m$fusR8EpGVrvAMPB6_)Dv*7)lFP z=OA`9KKP9nvy+&Qiq#F&!E=Gq(}f|u+82Z1<05i$Y@my;eXT$;2^<@R%)vtIEmAm4dW$ zpIk&1h|Hm2c7zc#35v?f5R7)DL;%~6S#h{4WLgzXn1mtWKe3>218kG!oaZ@~q9=eE z;Q&njj{o`7lzYM$S!GjmHRnz(%$}IbtcEs3_!QNMA3<(RMvkP=`f^CIJp`&_Y#5-}z%(=DmDbjzLLuAWRbNjGEvU5{iTfQi>iZji3}6 z=7T$5sf5UZ!Z)ahYgAQ>OWj^zk{lU~ir2512)RU{KTifuFno5>w+KIgv;q7fCEx;4 z1tD-U@2{w+NOUSEcXvX8xE;DQF=L=@V#0;8N1X9p58ipp)@2=ju>$rJNgtBDBfKpe ztkX_;M1_}=)ISh5_A2$4Dg_mSEFXX_tn4M=PSS1}6zPhvlSu*-JW~hbd1NdYW;h*; zk&v*ka;)tUOzMe1&V~d@UJfyZ_RS*q1ShNtCmJ|GJi>chim`I7Lx*na6=4NVO-&Im z58gHz3h|xlSW4Uu6kv>B;UBhs4FB8%&L@FuGBFDtbqDv?*=!AMEMdE+k;I9@b3ZxW zCaGE4v&zCMPlMs0_ux1n9Ne^k*jQdNZiM%^Oi0x{WiK!uIiUbi-Va%pLYTa%1QZTC z_Y(u{`z5C7gk=!y<2j@QO&91KA*QHPpW*$XVIuMvU7+_F$|-3w6q%4fikoaN-05u% z`8JF#mKN0AhxT<56(75e!jdD+R6;_6`Po=8UN=dJ+C&aOuO_(Bv;mNZ#09iOcEELT z#warp$H@c^2nVuJNRkS>)Dv)v+yfE6!fL#s1`MELzH+7=u0FjJQ}%(s8$+0m`FN>Nh z{7ZqK_2Ly6!0af0tTp5!L;nm84q7`p%E|^bAo% z6f=?<4);hw9sqK}|6#G~hW(Sp-s810@4mLS_6|UKZ$VY>yv7;0n`i4xGtCV>{ID@7 z|J~OmlHvCw&Ivz>9&Q07K9NseY7(e_Vv>?|(bKcDbQGF$C|dtr^sLV3fDZWe=x>vz zmOkQRE<`bhh-6UNT);X0e=yJ12-hIh$u(cV#BOx z$9%^U=t#ulpW0P}_&Iqws70uk1XpAJR^)vq*0ti?4mSF^iEoaWpfFK%6S*tP-~|Po zxbv9YLdb%UXM`-+Kc8vIW5+!7jos3Jm!LC8x+amDbi&~f@j1Bju>R47H|xDhA|P=h zKQV$cFKR1sWP(L5-G;k`V^A%!wIuWnYKc+$rZx{j0RgPW3s}}2{aGb94ksPQG>d)X9U25cLr4dJoYFDojq06Y zmQ9O#Nl)Q08eVd;jQzLBXWxJ}ej9n6Kd%H4^yn`_G;n|cDZ?TKIc~E};AWG%Y*<$A{$jQD-pRasg$v69(Q{@2<|i~IbE7O6<`n`&ta$r& zugo-^Kc@3K;PTo;en*4tXm9s#Z?{5rGJKY}_x|p---$zq{Ysz(x-<9}duj_sAqPoB zYB=~L%Ln9$F)z^-ZKmEQ?mcis%!8c zGI>L`3-}yTcfqsBJ$7E<79x@#5X(K37h*1ju#2kd%C_LxA6%*@vdXj=QMiO~9C)>~ zX%@sGkD2jxpa4A*Q@$hZ0aL1b5sJe&7 z-pCV!3qT+>)Xff900~}@{=y&|HSt2BZG!vp0BUU2vJwZduDUIx}A!vP>LMtyN)CL72>1Z@d0)?;BVUoYpHB=$cbGnuj_ zM;n2GV$f1E*(W-BvG(53LY*&`F4?%9v)^Gr17PgLVviPB$sQ#=L0wBgYbUsAdeq=n zy2^i<%|Zj%pWy-_A5{Df3@gNeiGU-I^R5P3iwYW>G55rTEGmJwc6N5q1(*emkz!Cf z*XIwy%wu*OS`+rcrKBVw0yMCdcxIZP|L<&8(eNBiXJ;pID9F$eQljV|>cD=aP8|#0 zsU-#j;k7du2XH|XMndvk!NG%&*4*OIvi8Oq#7Hnvf-pypnFJ#(cnFcRz_1ahG|zkt zlwgaYYP7o@#m)m?o)Yp)h7+L`i9!J)dxN9_ak3U=g~>8< z#yW`AfjM$AM}_Uk%hynYc@G|PZcTQ!JULee)uIf;iYca_z?MO1!A1Nbij%VnFno&h z0-WbjH^{M1FtOq;eDT~ni?_* z3>L**he|ITvxgj1lpXQt(W81dkO{DG*(cw@J*-FC%g_-tNYeexH;GILnHP$RR0y+! z^dJhs0fr`XO#&jhmo)My!|nMkk76uj9-CBBFy0 za)2$Bp|oe)y;=1;4nvKX5IhkEiUww+{0ZcRHHcTK4jBmY*aeL4@hBgl$lrK-Z{Kp} z>qR(;L{9>yr6cpPP}~vWvqrO-!ck8MJkVj%lANB0lf(9<&BJFPxEC9cpaBqq3y|+c zHf+#OvDmeX(c9bmYFr#6o+wcr2{4h+i}Q#2sL6(7aOgw+ynp|`35N!fSyuENZ4^49 zpaRIyQNRi0+_(7qzY#8i|KlB?Qi!~q2aXGbZXR}iRcOc_t{NmpmP3btu#MM2l289p z>PJQp#H1Fd+Ew--EvV5CT^+}kk@>H)gE!;ih`0y48cNvpgZ~Y#TVxhmip!O^E~n4d zaRX}tc8Pj)95V>eU7$SO0ZbuLW8#<*lnXcvss>T#>b4)^x;O4%(UB7qt87&v-@zZ* z->rxg+!j$0k*hdS4@VTyQqb8iz~WtlTpFpY_jX>914<#IcI4DJY)Nv;!urC<=Vy-9 zJi}2Ts}QqS#E=>=n%Iq5&{>r%aFz?#uXCp8#nZyDq|`yPq3%`A0BVq#r&t_Cg%gu@lNLmP@*OnpNcARd{02Ga@n zZGlkKP|lCw-gXGG3fXTGRSpjo?$p+%`709xJskA5E@ONz*$%cWHu6DjDdqy>y} z2K&U7ug@jJAw&gA8NjpXd|Bu>R@<6o3s*@{P>>in3eE`B@(!wpuN~fA=(o)rfhUMQ z(Kxb@le20OC+5*&HE`Cvrtwcswhpa6$c2b-ROlp`frY04AJ-Gc*XA>M+x#+#Hz09B4k3uwiiVX^ zMt?O7o%1*e`jN03a1rL+08@4pwyXl)1$?hNGK4tje=;hTUzd!!Ya@FDr#+$_aj8BB z0jk^Z?dw;m!Xh$(mzztbH%YvZA|ufRtmFP{iY8@Owqg(k%Hp)e&P@smf#AFlUxT2% z(|NeNp94v6Y4H$hJ#l-hs*EiAd#Gsr%VBf8aW9y}`9!ZO?QCptyy#8GLezO2aa0C8 zLQdzad)%-Q*>5tuf|gTBl9a!GJsj~=MbZCfmvn9JV9nRTwvrN$bPyy#7_17es1-{9 z%uu^K(3XirH`Q9d5mA4VmILeCiL|onHW`t-*GfT>!IZk7F>}Y_1rPv<;Nu-A)DXUT zd%xkJEqG0Ir0|fu2gFJyCMIxga)Om!0h(tOT0AL2Fo}p9hlHdiIwwwIth7iWuNT8v zh`R-2;t0|{88<*~-e$D>H5o6&gQY0!XL2B*@7!-4a-a_yUkyqOyP)8a3|p+SwfI9Y zN$P;6A@Ut{Vm;a{rW87-^HEFjy7m>Fcf}=_Ha4>1qsT#U$PtlKWD!)N$U$mh-0!jd z*;lO!!}U~P7#!!XR$)=~q-5^KiHiI7u@T`FHOmU4`=EgVd9CmU3GODr(FYHL5QQd! zzx|gSak6MJL}Uf`q-yoz(n5Ysa!40WWB}Wy@De?6Knw^+FaRBZb-TZ;e*JxCML=Zjlqzk zTCcdy&JLF(XJ)Q|Sc0A_2HaXAPV9+BS40+ z-`7{xkHJSJk`JbSm+@~|25hVF{xPvGNXh`G+Yw*9{d-;+Idu@wC;(7v0!bz?W8U>| zx1+rfBNk=f@P)$>G6X{oWd=5N##9UJb{-_Z{jroNG9u?O+8yd)TU7(mp$p81Sbk(V z<4C=Hi@Z)UdQ}@M8;Q6 z{{=eTSu%S6GV&zEuLEbr=`S=R4v8g1>;BzYmDJ*k!9tjOuWPzuuKlOX>@C5?`}5w+ z%Y>=e-%&JpYNTadQnH^xmWI=(gFJo|3gpYcb3^0@qVGdozDY? zf?-e9nU;@$x5tX?4FL3#u5=J{Q;WF0-ad> zBvnB_oGFq#7OD^H1{=FC#}Hp!kE}={T5xVhq`R^n3w6m-M)JB zrV2x2(SjW8>1;6u)K=hcV$4FI!%=h_ax<~Br|82X4H1q<^@P^x1P&aR2b{Fe^4C}zwv!Oz!C z4*eLs`+IMKqA9y4Ymg*pw$YY|Eq{$%%m4Gp^}l_T{pEM50cZf#cs9uTg~CNlhBd^j z=72MlAQ$aj$7F>>I5~j~_drg+R2Z8)aq?s*Drq%fHUz5kBl&R<<+PFxrXM#Y^cfWf zFZ>FKRVQ+lOmITmNUZIvO|517yKTX;%-M z=AS)A4)w);(nb^;NIM7}n;Z@NKu`iT%P@ZbFDBs7v~pJ>`UgfKCWzGEgB4G_5Ke}2 zSJgv~AVx`O#!*Qe_wZmuq`pPpq}-tR;#`mQR6C4PILzCKCyvHygXnziGIGyp^+qRY z<3K*ibR;!GPAtN4e?;2EUxeX|*xm8T_%imGgM0KTKu=h76h+iFay}5eSnod{P4irO zBNQ~jSwy6s;!Ba$=f~)DDJ+d_gxYad`D$te zMdk+-nRX;|Yhzj2j=z7Q50^QhMbpFm>oJ*7iW3>67s+uSP+!D%!U@zeaa%WkuoRjP zVQ=dYJc_?|7c@ykYIe^&#}nus*m0@*z60k7f`5pU91VNa$#s8Lm;UX&lkTqMK6Dv? z@(zv#z|q>(zzAeqnT)>?D;u>g8@T8nFOFUgVhu(k&6e zf(gc0u%J>cD8+^@Kx`<*0LC809&AC-hy|lWMZ}IMh@gmt^P7w8xc5HU_nvXS-J6k- zB8Y3P_h06G+Uz{_LIZxqE^dbi+wL7nEttV|f`+{ah-YN~( zX}Dly3h#%d=UliTn<&QGbI+Aea&~ea2n~;SR-)3xNx@PIF4`fhKJcpQQ@eFA^qk(B zr z({HU1HT!_;(6^xt_?gEDc}RxQ9-Nc2=#Ft-7x0t2JX+d%0AoZtbQm@w>aTC0;V}Te z`+)FSpuToS;a%`vu+R1R-3=QyoI~ago>B9zHOD~$$H9x@V#!|fUNL~@lH|)d3z*$l z;HshzD(Sx(b67m}Lb{c;;0-EcFD?ivrT z+wh-ZcJa=cie?6Qg4q7p7UIuSLXe9;0Ozw?vr0bd8Qc@75Zx)m2T~Kz5u!Fk;2ZGH z2w2K}+IO-Gykl5ok`Bgplc5xjj11X-g58dl)N%4u^|!?GE%@YipqcZ1#1covH{_YF zd?`tjceRN`08Z@aj{Vu9&>gAEET+ zDK0tJ$4hiWlzfr|djWYG`2P-CIxre9sMLSD_spY)%jqnIKPQp(wd>b`$93s7yjRHT zcim}GVN{AYcwJ0PEj0D4Rpsz|PMn9R;9pAR8*$u;yj|sM5ZIG#*@^zY%2kJ~uPcM>@M_ z`Gr5EE1*T9@WhWL7}0T1Zc? zC`i^me0ZI6B)HlJ7B!Ui%KWeDqF)G&AvOvP1r@1;x(a&1XT=s%|KN|q#;#fGeq`pg z7WKb0)l}WlddkA*Rck3=kVM0_l#7zne+Xzm#GQO@=1FKXfyO1N8JW7(&!5OKlpazB zcyN<}%1Tst3a=GMK0hDv%y2y%4go=HChSAX==*3l6|61xZBbtN1u2fAw3 zM;N!sA1Yq~Wc&(tD86GUT0rkVv{;G2$+rA_Rf17QUl=)5VV+Tk$I*P&GBByzzkh!g zr9_m9ypoUQ_3mHGq^knyYPH!R`8Ez%Q?0Dbqmzq(QAB$*8bgp+<`d_EXKt>)v#Iv3nzV_ z7_QOxiFg2K7aGq-M@AxE5g~VMXy_1&p$_sPEHY2lr9jj$3Qx+YfEpMwUG5iDI+a+NRcK+C~xr$A{wWOYOTP+G+LO|lc`qI#u0#l#=-1b^AYmtnz`g1 z50Og2)%f4WfvKvRWJo7){}}y_`RU<&PnfGqYKIV(i*w30ThkgFqN}DIQ*+=Ed`i~K zx8$GwDnvdU%bmIY%Dv#2mzhW__G!0OUvwjrw<)M~-FGmYC29HRUo+`7d;8CR@`7wj z?l&30hBY;IDe4zO1vm__1DJt~+qZ+B&%{uqN{}x*x8z4PLr?HPVhF$fU468uq8Dr4 zqESGLq@{pG*!#a#zm&bXnaTp9M-b&Him17`zmwIUPn2E93hjMl-U+XzKzB~ml;gtE zJ2^=I7DbK7XBmUS-wf7TGM|hmyRW*)pMS%#NWVY?(Zv0H04J-Y^x*T>m@?(Twff80 za|e%1p0aD9ckCmx#(GYMs>Pam3gS=bNU`10Tq}a%>TNiWU!`?GqcBm< zH|$<;V>tc@wD{|cwKMSAM#m@gkqn4&cmIODuZy}%XiGtF4o#2la#r1uZBYFony)Z= zoC44%iK)BD?p`4rsQL{g9I&VIh4YXw27KxkE;A7jQK59Su;lF}92m)$p_bo`s_-qz z?blQJ`#d=!Yv}EE=Aldyr6J3 z)P*v~#p16cTYo#aoAC2;^09IUqxC`?riV%?Zu1~nPfuSvjn}LMi>ERYZRZN$QAopR zW8p@l?415+k|7zVlr7nR^pju8b}ky(n%4U)(5Wc*y;rC|WM@_HTBUTKBCr)~kOqSR z9iyjx>lNmvxA>vKHPwD~i zrk2)`i2;?(nlJ`h*-w`g@O&@2L3-=6khfP*_|k-*0r!IsjBljN>%AHa5>g-t072{m zd`pe|sp`2k^a4{3(#O8=ruGHk1+_B12(MaDR1|@jEFe7(t*rRRP&(1`h{<4MKmP@% za@t?sfD$j&)$+BhovOV~cA6WvY_aNk`7#M!lsqCob@xZ^HxM5^*<;?>6kMPvn@V~} zJ0W3#aCdk&L{=uct!bCHj4KEtV~9|>MRYRCKxP6 z4gZf--4VTeKh|`eXOh9EG6+8o zE=9V1kt$LdHL(fkB@7;j+Z1}`_a@#*MuAP>O^w2%ILy@4uh zc&HU}x7l}*yTy35vsL|gsT|tueF-s#gSa6mk-cWkGJwvrTEOj4lGZ^e=ZGw9{WmuqlHV{a4J*0XwX6G-f{or zA))DY7!bt^wC~)-G{LCzZz+27J!2-cpddDHs4pZ9tkfFru54BRfXW6yA-)wd(}MF7 zH~2WKi@*J*qrg&1?gi{n%$)`7^#P8K>yFTp0W!ZjDmS#O*l=w+l|L)LRsF7Oxx zJ8eU~bm2xi(jh*{so97!SD}F6h)v2!8U?jZAYFXFAok9XW%KJb+T(xv?U~GufJvzi z?Lw3We^(2($u}k&TZ@BQa+w&j$$b@@mrw7fJp6|f4$6W9N_a(5BgI83Sxa~wjAqpZ zyk4{h-)UfsmV_w zR_~r{>=6??qqVK;)Z)KK8voy)3HzUSp4Gi$qAU*jSTP1grJ*93hMb9w$Hs@Hq{I-) zapWbT9QK__OdjXGQ&)sG!=R3Z9bLCh8c@Ix5h1_(eQjjqV2>A z-1NXnVE>PP(cvZ;XsLlEYoSS-45rwOye7j|t@b)L|16iXvvg1&>4WI90&v;G%}acf z%gpK1BS0xEbAKg>#}NcKu3gz+3P!baG}+nbm8P0aN$3&R)yJQb!!{CtH-{yJ?eC+i01WJ^ocBb>IE6>Xkrs!?(zwHJ)S`0V=tiB z#2|?2tT85JtDj%u2W5A(>tvrS++L&pvrVgVj(#qa4X<`EZ#YGPU%1aB;baW=<^q?G zh1#O_9TcC7;6hKsWDu^uVeQLdY(+ewU99Czt4}OF;lU-3kfu+_Vyu=;n6FakaTyJ^&?`f{Tb56%m4@VNgSv@&S47l8K$rsyMmKyLX z*RkYXO3yWX&i{9P@iN zbIeOimQXGFXYZ>#*0k&Bl`O)}O9v+iSW4!D18vwi3KJPU8WGk#Ui{-E_A(>rS<1z7ZbS+C$$PkG`^@^z%T zN}7C2t5T<9_~us4#=4D3G08m1#S-oDZzr2WoSlM}tUgbi_An9h36J2^9Tn+}B0#w) zzXm4@K4+0UT%HYAZ@ZBHT%&)YZ4%vZ-4?wXh^CPc0Fmi)pzK$*6^_!wd%bFIGw|`wYH+?EKeb zvill@6h0}m|62Z)(C)I<_R6D`&X=_^a)z1&^~k^f?So*|yFByMdwUnXsJ*oC$&O2g z*&!gtfVGe1 z?H=||b0}A0VkG3fycvRFA%G>D5j5PWZAY6_*V^c3L=wi%&aOYqXQ9cbl}`#=Zd>VB z^IuyJ%r>dPR4Jj;iXBxu|Ni`WbbK`}dsLB@Oy~r&nVO9gFtTxZ<6o~_DeeD$;HNGB z?I%}PEfP8W0zEso_A@oF=H@O(5(X61A%?UysnMY;v;CS=)W%gQ%-;OQD^O0J0|~q% zP3nsV0fB+yE~7K7iyRV^yB2~=Mfs<*r!~r7EKJhMkO;jGGGvQQU^C8U_s5fjkcvJc z%7woz14MGBi12Fq^y=rOGp;7+aDasYz)c~HT2zaygC!VfIzDfplE5=gB;VM>ZuN4` zR+mmVarSj-{eS$IV~^q51EWips1N;q-jn|<%GdnIO>S9)2UgH4KJwb(w$4wxuDf^u zwn+Ri5^_N;NbwW_zHiaTDtR;Gu(28G;@+KAwLejG`ZCb{qaYA&6@R?VLr;Ym}Eg@7rIJmab8!LK$T)V@R<% zh2?}=e=iPU^vbR&jv7McfF&~xW9uvr6y0!@TmnbtElZj_MKdLVxM&LKweW_-ttac<6+-2B=_ue0{t)nGC4K4djs>74s8J^CX0ok~ztXTMg=vfI5v(AtQ!R zgurTe0h!2;8ehc`X3=VQs{j4*~J@o34I|WXwS8^ zZtq7szf25!X3rqVQ-Q}1oUJsaLm19LP6m$T?JysL57`kBS{vf688GsNrj}N8OXt+r zrKJsEG+Mj-&IqZ~*RQ*bFpNI+;Z#HE1|^8_YYmHWG%ul4C=ZX%#<_O2bU}gX~W$6x!$L1NHzroKU^V~VxtSIwLglL2$Y5+WY zuT1DBlRqT<5sj;o0YoyJ*i3v89l!4KD2pLp^X7HKxp4pS;}xtDIj_Bgo&tN-7xg+2 zv1Y4=RW$*;g9ayF>N`e5fz=x&2rM~w5)vprCiGxKc8;R47GN0L6ej!8Bchz=z^_+{ z>sFX%f*(_TM=#e1LHmI(b6~)PgtTS^aEpJMi-DsJin6|Zr@&4vH(ld{)dQB3sqf-8 zK|Qq{Q;p0SW-f3_ajDI59JUfwPn&?WQj*Cam^zFpVXsR4LOr>6xw$ac1BR@jNQciUE-V5I!hPDp zcn{S7#Ab(y;u<111dT6m$b5sgU9vMSoD9azP#3yP+-KH~XN{EcG}2rR(L&0yWn=4r za7cwJR&H>8PW1JO$%T?y2bQh~?I|WDSgW1Iv+eFe7>YhbQtOe`lGT@iRQ}cV(ftR4 zP}8Jl*WaHDN9SIwGlLB+4-gYd^vkBZ$L@zv>h^H)Y2qI)-tov#fG@*o^$>>}BIA-g zcjo_{tAIBV2vTe(z|(R?ga`8ow~#Q)iKvLY*HnZrA`18Mfp<(zL7Z(l zvVpc7Hn1AQCQN97=26vhy|Yk@==zsB#NjHEd<1myz{wpg4qPL4M!$RaykAz|00^4c z*)56Jgm5Z*WYj#?w`__LPSG-E4^5cH!ke$3Jbp~H+n0#%4_4mH2f2x-)hX z-F;1_?cUtNq(V8hpeQBaG2$Y<>CnlOO>stu7S?R2`!QJ)Y{WwJZ!*H(*~LXe;o<@C zb@H8;oQb!6=AA?)>EZR+?)#t(@bM~%I{|$*o`Kg41k(fOC6abJhX&Z!b6MXhh9XIzhS@lTiFhmiJ2cw6Z?B3dFFU_sX5~(t6ihiS0S zfxrHKWZ%;8g{e1FzR^4O<>if5o$^;8O75~Xe6#Ji1KZozzTLT&^%SG-6B{I7n;LOt zNw1rG7J4@BRHyGR{lCaBPR_lb(|pgp(PiyVJlXQLe9^u353`>6UycIkCE+xdduup@ zcUv&L+E%lFTjg4AZuwNLU~K|*ta0!pTz__M^8O2)8zp($E@t%g=w@2|>d^~z#}K+< z0L5w8(wdeap><88ZhoN#L95-RrlI(|FDJY&e6>)b?v|YnL3~-u%D)lAgf|Q3*f3YnuCvVp_f(0@t@oHsD_g!v2E@Q{)rQfc%@~lXp zbgldyryXocc^8^CgA@jX-PHrq($ZYt?z#E=E4GGrou57}-ZEkg5DQ<`+K{DIJ#r}R z%bOt=pVHdK#$;xCpQnc_+9qTiY24W`b?k2UMvWWq%AVJIYgzimM`!9-p9uLjKJd*p z@3l)O?hT&d-|gnjo3IR;u(AY3^)D+c!+I3PeUMr94iwBYxDB=~iU{+L9S5^03-lolZah102vH44{Km`2vZ1x96hliohvnS}=N+RYG zubNAu`0QS;AERSqFVer-{&;=Fym>#%yx|M?-Zx0RIdPY%>G|{LCG^|Wl)=B61GoiK ziso`umR;(aWBz2+k1jvC?`l$4luc1wb9Oj6GyIG5N-DSk(#p6R-<4k<9sN2YqBay} z1ky=6S69^uaQADsZl&bDT~m#u!o7&YU{B)g=ZasVI&cKAwPDwg^*xmNir)~b3x&xR24!1+hMOnUoNqE-O^&Getp%9{b zn^w@c98JBwB(^v3id+?aP}_9T4v&bLh!(OFO2+t}FHxR>9{7DSi}vx=ToQ;`rD z@OcM3t%nXB!UcGapQfRBdwa`uDI?lgE9tjiG`2@xDW0(P>y6wNjToUX%RxE9SQJ6J zFvKNFETVzQIS#ossHcMN9SoD9A|gtPv_6%hs%AQ;#Nm z^%OBv-CuCr$=$t{B9Vb*uV24H1as!uyo3`cBsal5@G+G!6LBIzDdgoYIPS4%=1G|e z$CWP#1GIhB{L#EC_iejwzxcIJ$%$nsImcQtSV0Ot;wQzyi7SS<##%qQcjU^d`gkYo zyAc0tO<7MZ$Q^31+r4A0WdrBSsBfPs2lG@;rv`XS4M|^ysVG}0EMJjzJH>(J#6@-Njn55eR6V`jy%jos4U$Afl=L*;mA|s<8OD>yfwFk zI}RM!R7XcgV~Vk%&ksKsD~0oV4>=c{yL%-M6}_!)v2I8}>1o~Uqd89+D$Y($hNYXR zHnRChqoaNYiwV2<$hk=CJS{rWPEcX%5gicnj|#zd*Ntof5L{nSDL>>)FleG>*w3S~ zerv;2+#^$nxJ)~cl9KYi=mGY_(su#mz_~}KA3AjUkuS4c>+8mJ@~9fH|1$0qsg<$7 zeudPAanGb$j%=EB3?m1tFrv-i6_p;(eUCCnqtU(+%I~w^bxTN0)Z+3K2|D9L*Tu%_ z3Wl2jw;|#L5@tM_z9U*%zgOOw=M{xSg}=Y~Ru7mt`j>jO1o0zuH6b@|{Qmo7P^4Q-zm}dvncaIU;n-#VJ+M&)-Rof3?<6y& zq9RUvmA)TQ7W6K_Mih=|Y1w=CeA_g3Oc4FK$})SkN*71Wtk5K0&`uOHaGqU$^j?#3wLkDxK<`6`Fs8TC|=NqQZ1CO z`$4Q{&}rP{u5(l|F=uAb&!(g z-?`c9&YOQ@v)e9ST%~n>u6483wZ4edK_63m@YKnZEqiRP$NwUub-uA0M=~^b>Ee^w zCk?L#8+2IGAj%FISznX2MMoeB11nx-ciiiRyhNq~phUZR=gz7>s{>+TG!zLsk=mH* zZVrO6URy`q7``=chH%#Sv<~?PeS#%m^9Q%Kwl?6T?i}Y}=MfZS15_9;?~pw^`{)ET zj}6ebmG8|;zWF>f^TBD>xp>UPILB3_grJ6!xmZ%{F-JCtXc+Pp2F(v7y1IPv$u6%R z@=+@xG@keAz%r4>O0qPX;RhG3e>t)D$@|guS8DOL=^f8XDi?DAsiL%~wOk8dyx5(d ze}+9t{^}P$A3kh(Z!Wz6yp31-ZAs@~{+FcCv+ygK+6t1qq`J>2(`H`V%I(8!yUo@f zZ_p&@TZx9 zTvAO`O{n!a9y_5<2=(Umy)S7VQG&HaM__z%qV*S%L-GUPdOSlN`nqYkgXXiZfqcCw zSr5>iym}X4cbxRZ3!Hm$_mLj!>gJ}eyrV``_22?BYSqe`Gc?WH>r_=yM~I3Td1;Z6 z8>GjcfAW0;!|+wBR8)yJ54f|V*|*=Y_@|OsFu7Z^M@OG*+mK`P-ht!S_FS;h@RDTz zE8jQWNZJQg>gtWZO*8wI3nq2fuIF8AnJ%tEZfN3nO|26dT|ZcjTK}I)9!`b-?Uz3o Z#80c^o-{NjSmCb`Lq^%|8RWI}zW@+^KPUhI diff --git a/examples/running-llamas/artifacts/Llama-7b/prefill_latency_line_plot.png b/examples/running-llamas/artifacts/Llama-7b/prefill_latency_line_plot.png deleted file mode 100644 index 5399864ee304f5dba7bbfcd759742de6bb89910a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 58741 zcmd?Rc{rB;+An;kq%^2Zg-DYO86q+_%MgW7iA*8$oLNPgA|WY5k|{%Gi3}k`W@RSx zJdYv!bN$xZ&p!6?KF9OE&->Rtj=hex)^9Ct*L@A&?|FWv^YXeTFSU!3fs#Zb?UKH7 zQISLPMu{8pOQSB`Y-pC5AU|G)fzPJUc!scvmqM$vI`Qhnc^ zZ8EpC)Sm7sZad|;GPCxTs`yo8w?tSJ$V!ybwc1a_32z74ofH-(5R##$|0;=hyWYwP8Bte)X|e8Rf2C zy?%n`;>C-+h7Ct=W!+zBnJB!XOSZgaR-@Wp=*W>H>gwwIPv57>G3(}T&bFQGsC3^> zZP1>5a?_?w)uH@x%41I(*58Ufm_F1Pr@+R?m!>=#SZANiPD6Y`MCZ@^NDQOF_KsYur}fcNN`)@YzR#YerKLsbRlSr8I}^~<)MV+yS*swqUb-;$ z%cj75wrlC~<;xxU_Wa#C5tqMSXTrDqvj4Jw|Nfr-{?ex`E+@J}E)BP4u-*A=z3K0U zSl1WFoUmvP#*M$LJk-bK-dgm1d-djxj+vRluarBoLGQ)aTIYL`FFf6?x8M19dPc@b zb0YoB%naAfbobLnKYNCUD_-&GoUxxhTDZP=JGXR!tDa|yq_HK^^{;T&{pQ0*jy&E> zv3vT5P4eOgUz)B_${aVd?$bVb@}zBoo?HEFg45#c@?#3BQ##*v zZlz+Di;)TbE*eIE`RVS1`vna4kP;FS#=8q6Yb92F1KAYz?c1l=`zPC^{oCw7Rn@WW zzW)Ak_H%3w5XP+VQ@o0%zKJJt0AXL|L)gEJJnnNCdA zMM$@{wz@xga>{P9V|MpI!t(E@ENZw2tRUS|&%H;F9h>R2>#qN7W0Y;y?dR{W5MYyZ zGvj!7c3-)FhV_V|i0!xkv&E#etW3+;I4N*#Q@rwr zaw>I)SY2J+y9Nf!74BVSWgeS0Z#9@&U0n?f4lawm`lh0yqNt)`*wFnJhf4CTOV-u` zT3T9}hK-l?H7zaEYm7RJih2y6mU~gN5l_^I?j(n~D>l{2-_@y5G5b@^A8t5)ucz(4 zZT96-FvktjgQ+h|#9fr!SmT%SXO6*|9AN6~V;>pPmnI%nP!P>$+lgDp7bK1TEKXF- zk2GdUW|N-mKIom8$cc5yqe8aaqpui|-`MNnOz|@7)2B0TWTJ;0mL{)$)cEqra)2_+ z=$Fg)0CCZXFDcRVMVue=$%>j2RUdZD$l`IT7dVA--}!vk)Kq@s@A|_IzRQ{}E5~** z2;dsG-M)Rh%W+B^%OrCFdy9;W?24>xqirI^)~#eK<)oKcN^$ah52o+l|CRDAAYiYp zt?kZ(B0(q`hE;Nr65%s_<(sc1s+=^bzJC3B+bN@8DKR%Q?(Jt})G;x+8Y$uY$z}bs z{B8TVhMI6usnx{^K0dxEhlSDVNJ&Xe^#cbE;1~z#>FIraeN%IChOqqm@yNHaqB_09 zog$a0X=r${gcA%X4;Pv|dgjb4{IQm!MoP+TJN97}UMp zl&JdgCwt)1Z+x?Z#QMC_2i1>$US8WPD=YndqYvTyPtZ`#FKlc$n&M{6k2d?fK5c|Q zH8(f!iT0Q1{ky)}b_%=e-pcGiTVGY)K)rzg*PYKnPj)eC<9qRFM^I!Q#yxRaUrR$DJCMKq9 z3Cd?gMfuIT^0gO^|NS%eaX$mY>}(Ye{ilsU1&MRXDs$LmG&fb{<#+Sz*Swbx7l~Kq z2n!2a?hjO`sIG2ekGpn)a{QU|vU5au_z&L${PDMQxKYcLQ`AFkC))NhFx=AR;`uVa z@A>MQ6g$MYww+IRfHpO4Rkhf7QxlvkG zwZqEFii?ZOp#B~C*w5rDyNqW3)NZ zsQXQvLgZkuszz~B6OEKqxl=hE3rp=Mi#~&vR9386H8r)Kv9YSkKsFy*eLQUI7m{w@ zgE{9KWO*$9uC0`dELK;`37U3zhJ=LFbaN;;IEY}AF$Z(1e*60M(YeQ4N1TRe%cCW% zP$7LoLg-1jjPp^~dL|~4J0nAyOk2|j`1~fzSRF}KXS*nj2EGSpwOd+R4mTxGPqby} z{Qm0Erc^>4@!`XVI}e<4r;=QIj+K>e+}33WRKR#aSZ`7^@ylI!NX z3Fr7*SsbJc{o28dT8U`8sji)jg1(}5lSGxB!>X|AE!`fjX}o>=c09&V;;+$(=BjuK zc`bS`G6))be*DNox}WvhwDWVCx&qiwo2Q(0MCUrbiP zGGSO-TVr1Dm(=|p#J(awmUJiYj?Lu|`F5 zS{VrOH}2~xehgSc-?plTg0^)RLn-b}?R)7!=GGl&j9YjgJ$m$DEIDVts4bV2ACuN( zM{by;Ys8N?LLtK~tV>cqQ5Fse8t*@OJNu}xa3~rXH8ZnD|Et9OEmhyY%j8XbEd2ZR z*RPO!KVI{hvs90SnFpZ(5>4xrhtG(0s`%Tu`a6`0H1%Bl|!ndywV+uTrXBl^z1 zW}ERAzTM1XS5Q*%iLEHMPIEtm9ymH$;XIs5^e_AFKZn{BDq_Q7XlQuW)bb3vJA;5h zQGaD%f^xz>Hu_krYDTBI0V&dWnHxlVcSmwX+C}UJVYBYVg;e&=d zho=5}V4y75YKS?{PDD^pWwrH=BJkM}KECVi^)sp0qc2}iP&^i^5Lx4~gMJwY{bW1w zdhw~nb?-mb_SkRn8ec@L2CILUP)WYO&D#0HD14;9)WNymwG+ejMu zmMz=Pj<;pW;=W6%7rNX@(k#?K4ZMlwlhmF(4(mF-GOjZS=LL_{bhB_$`;0Znkc)(okkG(SJTOU(YLEa)f2N2;)ThuRk# z6(qNx9dFN3$TKN^L{7E`SKZ9cNK1Qcety0be`k}8XLD+wx2~OIG_S zetwVe@NfXhINNpUOgi$Y*m&xA7DiM^p^X2vtw^(Ap`S;G0xHk9;Ye%BBA1p7T zoA~(p?hH>SZx*(jRFArLq7+BTY`^Ts#7_BV|MJ+!!Yzj+)}~9GR+*hrNLnaXKnCwS zZD;$XH+f=({s}0jW@P02ppwi%x(*cmduod3XYy^febm%*!6!#gGtto<-??*VXPwrrFQk1Gq_1np|J%{*}wJZ{MSsm}BVVJ(HbzZMQW-)*{3mBL=l-d~n8# z-S%Iu+s92aOYBm9e)WxDRK3A)XI@SXUwdoo^&Hbqr*h+a_hK7G^$iTJRJ}ZzYdPR; z+y3ntV}fD~C5zMaxq`)6m#d#YeX9F#gA>IeYqhtpZv}m0q`S~H&BX@Dry=Pk|HL}L zR712BmF;-T!&}Lhw-w_MBVAua&uqL4u`UP0#KdZ!WuqLZT@_Ny@Ph6$Myf zc%D+ zXD8XqUwlAi)vt{h=q>ZP^fFoXBlC?!m9ol8FD$p-@$qVqyMzfXA5YI7d;q1_pMcGg zaUe_@&I>mdC)(r0^)yfp{g{NmCg0AXG)_EfBfiY=qo!sM^&cQn3Adk2QuYZIi(f=U z#G(tfD?2;8+-m{$?v`IqHKT#F9(ELZ;IRX=aL(GFl#%X!OtomB`a#)?0Lu68%S?5B z$!Z^KPLu*owC+_&P;Rp*0bIbDXx+Pa8K}s6A#^PjjN;G2m{jtutXI5R=e<}W!@{bu zwMH6ZWTWUw1*?;JBI08NJN)zKPur5rb>=1hp>y_;J7`12$ki!d@@V?``0T*%P_Gq1 zNt`yXEG#U{J@feVVCFEMd7F|7RTX3DA1*);{cpYpP^u*~o$!lG5`S@Pwy^@jnm$}* zk?2&3zWmIIWl%*$B}~Ld#&H8|pea#RVTi`y9MO@{`_M2LP@vu2-Jb*g7CqeTH=Wgk<|Blh-*qf-l7?hRsY(e(Z?^Bf%XmvnKHSq%}{ja z*-;!45qX`U6gS&$Rye?TTPdutaiA(BK_&Th&B*N6CM@f2K~By@?PnutpeP`{T4mm} z>iG`>fGl{;y2N7T!@a+JaV}`gr>d%~i~@vR()Ql!dQ+JXy|B$_knM1BiTB)J z4*7k5M&csVfl~^Wdw0C#zVq5_1NHeDM=&1tm~l=a^6g&^}Hsz@XD5_A%C zJG(fmp_-jcXP#ev#xTH{XVG^Vkoyy^R?~TbmS{!N^0O~0q{5#l$jD$td#}t5^$iU@ zef)UykJqQ8Wka}kG6-A|eO_?XrnAsB61QpJo;^jtE;bQkk2IpG3RP#Hw9)8)4+k_j zYf9FVrc-`cQr0{A=u~k&Up7^GW+oS}DH=}rpV@DtDQantfQ8?wBxbELGdu)j)3=E@AbmEoz^31Fk(_EGdD|E@!C~svOC-5KtaER)!OjUHfL26oB{YbFG zH7!l`fdt%TCb0+4K|dQmS?E+(SJxQ`03he8Z5}(#unRaFTw7`Ams7-3iX-&OTfLXc z)>6?lr|YFyfC8%7BAvcse;an?3onc|b6VUXuN3_Y5rF0o`SLWqwY0+q<=klEN3vA2 zZi&x!Q+~ISVRHh!?kBLl;qsp+@DY(vy4ng)V?g^&267)c@aOW4GUZmRRoD4935o z|K3j27qwfrP+}oeC_MaF&u0ITX6W?mh=>}Lq7?u&=DcvRW6o4`bea~u5&ZhQBb^ry z5OUXGb*S3y+tJD#2l(&#%8M>8E{Z_w-7>7M76dT`+ockF3kyoBpwt1hs`?e*1;_6C zdcJOQQBrYnF}_ry{>*oXcGNU9@4rk948-tP+8R0y9zA+gY;k`%y@wpB>N?By8zEK0Yob zPp=I*0z%#P-MeF5H~zYNc+lFj7CojEsR0*5bH`O)uU&g?TXXhp5$8pg1-mxlLVWH;Ji+SG(bKyz%-$?E_Uq#i2w6Y=)1)!8^j5@X z^!e-8e_C99P*aUQ_Vav?i*)&pKL`~v(aJLI3mTPl9-|UU-6oeaEq@p$oBu3cLvoCHTXWod>=`lfcvRUsQhW z;c+P~g45B_5j>#SbMFyPAD?Kzr|JlCMorD`ea#E_`<|hpIQ}24Ula$En*x!8!WaVEY!%3rK*!U8WKDH?iXMiE+WDaz5c)QNxOZm_cAE9KCNP6= zP_jBaHx#0}#>QwgY-1E7aN~Z*HIQ8ybLFxnNE@3CB&#X^h;C=46cfMM(w%_MZDX}| znUPdeH9TY4@40xthT@>O1EGD1r|U1`_7PMaMT6I2eweIkezNlo_5&)XpVKPfa+a5s z7MPN-)u3EK!Y#@db}=h<-OeFX%D1~h+DfJZ(Hb@nR|~1e$%i}XP=iZ_ex#WGz2p@d zdZ4DJrp!AF>y(gL-XsYc{q)6}N;Uuel}uH$(y{YH^QcoCw~4WFJepK?XYWt)+>eb< zHD&Q6Z3=Ww%9Z;a&MZIXvzd_G=j6#h@!cx2E}>eHYku>3=!ezJEls!uhJczP{0v zEBGfUxS~BMk)mpP{QBgvfB(Pp`aZiInrG^}x+9j&@od(**JbEh#snp{aKpQ#@S`I>JOy`aBg8ZB zH+%msdD=HPc-hHG?Dy~Apq)O;%gg%?wwFM;znN!y^10*p(dpkMTgb_OkB=uEtWpDa z8f{7_fui-ve(pFO9UazOT2W)Dy(@I!wdc41!@yoZKmuYG{2Ap$J%|@CD}>N996glLODynO2x10l6xQ?qh#j0VKHVYL<$dml^QC+l@_5WA9E#=Tu|7V1Xs zU|SZ|*7`%#dhzNNv^9E&u*aa}fac$cW7CByv%I?62;l`Rs|a_$l_-a2Iy*Y17Z+n8 z{GtPgP&`dU)585jXqRE7^55v#-44$^ zJji8Va%th@G7ah<5ujgBuM8(At)TEr*h7s8`a@$u3T66(nO-NQ#uLHc$u#4R*;7d* z;4TqVjbFE7Pg|y8$w*@y`UCgkmU2QL2S+GsY^0{94s@D|6Y6>p-4bX@K|!IEeCu?$ z@Dk+Br+ay+q$T%|WrwqN{4`qw?{5DYIT{GVp%{*m-Ikp3M z)hDX*SU_GSi~IOW*eVt>$!DO(m00)IPZrSzzf!_^PoJiRT=Nc>uiak}Ku8NBGc*Dx zubwLp1T%(?bC_Vq;O_3wBM2d@6exF2+y#Of*&ZC3b5^)=vuD5&DQ_TB@6R@4l)*B` zUJDq$n-d$#bzaThFKqc9n5ORAEP#=J-n$wrgY4M3YXvBTOgWEGzu$uA-AC+$f zSl!OT!UBqM931H|2uw;i2kW6jXK!<}u}L{i7gM~$*+7sTmfIc}7b+He*h{M5vxd`E~6gR{sHtK{N2u7*sRDN{E#PzAwe0FhWm(F|o z`ab9`2AYw(ckkAo?XO&koq`^w-COGQ4)^~n=meoLsDHL*@17Gj`pH$Ft|v?ASg@41 zh||7T8~4^1O99F}zI^$@D=0_@U_}f6aG!>ThKOtT*Zwc}cI1hR1*>Y!#>THK_xbNQ z8Mu_QhKf;^H&wuyl$7Ki8X7v>U0C=DzHwR``=owNw1f^Sk4ZhMt9I7XVWn-?WrnPd zjtI{Ha;M#I<^cWn`J$3>KI$%(CwauZmm2CuKa;fZe=bT%Rh>@OybG~Yf2Oyrw6s)t zne&3CHDn7ySA<`yy3Bl5cE*ts$2y`$9SEq@ zB%dA#(QHW7Dxr*Ad&so;#Em@`78XR!#=iDI`=)jH?RHfBN`f-ogLD4#+Vcl!yt?enq>&wMoL~a9U_q^{BaioCp!n5{LRHO@IIb-+vh~m_7WQ?QOlmCz~<4 z!NEbxKP2MM9OSpLUXhlLg5-1}m^On)?zo#m4@mZzGiQSA_WzSu5Pt$Jse~0eyE|IF zV$1Y)tc}gNJcoAd@Uir}Onl^h?-hCZZ^gyuR>d{;R^k)=;fqAeS>VNbNBAGsB%`sD;c|gID1UGVc+@Zp`n$~ zYtG;)ffGTh3bs>warDL>xV1;%`2qbBrAV^rm^8<85Ggp@Q28E00fDG=0i>u(luP;r z2g!c#l>>?Kvej2!D2v%g?!_}S?)-dX_W_(v!t++IqtXrhML&POY-v%0WTV;@h0kq5 zGUUT^2mvE-?$LCiz5$9=i5^XOgd%kzStT$u^d>&3n5#6@Z!T!eS-zoG*_xUhKOn~t zBY)hj_Dt4g^Wjc zx=K<|F~b|&Dl1#1eXtQ6aAt9l`;-NG0w>g%uPrV6IXO8sT-O)0n|bhT>cd<1mUiCc z7T7i+HP_c?xTnoOFGKxI+L8GD3yx7(G6%BP<#tXCu3ze&qT+XO-r`&+DVJyOnzk+{ z7n=R%<;y2epHhHSy1sLO(=yVTCxk@+nsl`LZ&DH$T5=gSxn@sa^ZnE4W5+l-ec*-e zqoKKY_wFGS;pUgQ4htLvYyv?bvK?@Ozy!Ye9Ta6g(2j;;VPjL)*?CY@G*4SIDm)@W zeyApV1;!=i$qZ-~TX!?T7rQ{1<}lQuh<#LBkj2W-WwYJ#+hY2{2g=T3zvq0Ab4bGu4q;)!zy!KXO--$94FM^IzCnCdY3ch?8#77jl2xM;`(^*YkXR@F zx9s7a6D&N_`=HVqcjez~f4qRw*^p_dL;w&I#SE>It+0uR{sO0Evt`^TJd%6RbO`>k zy!-&l(apPz@cm$|9@jK$116S+LP~7r?Cflk)9OSHkthK)MSh^7)Qh?m&kAN(y+jIx zSv<>h+#9m?xy8jneDBXLxB=Qg2@v;ZYDA~ZCp&~)P{2{yO9>SM{_I?H8mcH@pTha` zkL0KHVEoF<%Y!ICwu98ud}@=?Z4jLP3tkf$;sP&&wKp=8hhEwihFHl`g6xz-E7!N6+|urmDmcvV{LsMR5e0yWphi0KGn#`2(T{i-69f5fWoa?x4hbb z{`Tx*6l^-~N&@FT47&>k=KU&sY*;J>%Y;e9I=;?9$fT_ln0RJ>9yjF)L^isKqeW_M=TjMMahQ35$t^y%92dr_5p4@P0ca1Xxj{N8?e%6kRr$TOXbzXLBXV zkhfU)yZVd$+3Bw=X31O>H|&zlkdvVB_bcsLW6Q0(;-SggbeHuqSsii+yUDDE;n5uoK_oui4@UXvyq9(Geihbr(o%hj2l6P2t0n3tO&u8)Kp&BxaZM3y=mtFib$^{ zoJGKXzd;3tfAxD};;OFhQE>UP4^l6VCcMjyG|y13lJ-CDNQx;>cUZU&B5(<@87wyu zonh3f)=Wd{&0BZYnKJU;B|*w?7`50+L7@Y2{>wyn;YPp!&U~&@5*}Szd!vB>iSXy3 z^AqYjlpHL1keQ%05?TevHl4)Lm|OA=0~wH%S|UJgSj_8Jj{xKQw)9(4;cc^2WwM zLc^h_kL#pkVDR?v@DMDXouAi*XeQ#nW7n?c#s1JYZ~8#kzR+6BET4Ny#ZqI}-Q1kj z!Y5?(b7!zj8BEH|c63U#!msu9X)Ox~E8Y-r`17Y@HtY#m-L_*w{EU0-3lHKS&!a1^ zs%neHiO-N9QsiFYl6X8HL>X&i*SDVa`^4EH?dSQ*rnl^b<|!H3SKSqrlya>{UM&8F z6yoLSS=83{8eFY+YRVsqZKTT|cEBqE!v=TIfHV;5!-o%VqoQI%`~`~ASQDI9O^J;) zDS=}_(1p@WP?KC&r@aKZ9=r=$v~2{_g^G=$9qP@T6Q2>K>?M5Q$*qtQDiz`Nzevfj2F$tPoVth(!tBf3|7ocPSR9_4r2c2=l=^@eAk0naT z2XIz*z(gqb1`1`c{FKdi7P9b*r>D*g7Y0&~EoV z`!8*1B&EmF`A;4T@*eUS*~FCqf^FVYvJoY)&MDM?7o>yXP|XkK_a$v`+c!{3A?_piAwc;_#uqJCDt zJ@+_sU=?m4VUwNG-3A1w(_P?92sw~c>2lRy{gb#>;O3Ni?A(8gH2G2 zo&Q|t2|wB3l3YFe?Zp|VFSYyh;K4XGjnbmQ5$h-L90`2({DliEJ&&oNND8B8#^OOi z%^OQ7=O*qWZT-X4LVJ(7{%DV3Jddt?WgY7EN+;HS7UXGEQ!N7>{2Dk6pb?n$N4|n%1z+1{)TC@gmY=A~) zQSe^coxr!naLm=okWy3({Rdx2By10T&vdn>AURqTJoC; zQw8pS`phD~@nY5W+2`sv{~IyzE|(WG?fCQ%I^E>tB>7~dxFbBk%wizax_(YLj07*% z)_#Pr9il;OP(k9V;j9?8WwLVwQ|jvvkp-?lB_}6OBO*@-`pC)o)|8#~Pq!S9$^Y^N zh4yUJwdb#1sol*5PilY*k6xRWp3Y%;^DEoH#Kg%V-k_r+xt61r0zBQ_yUsQ*MxWXH z+VSP2Z>L0)OlPhAIa7+-{OGxBk{(mV-e*yH)2#}NLnik$?Gh{qT;1U5w8{yI%GXQBk2IxC_+5qN1}XM;}gF zEsWkEeuIP!n%Z5|9YP_8O)7RO8=RwWbksX9PxPM%W*MV;*#A2_sG9{_4^FHDv-_>y zv9dYVTTXjd_>~)igI&**(}P39vUYK%=AYg3hFSK3`m&PWeA!XzBK_vv?z<(2t`x-N zi3=aMb3AbTAtJ(rTnrWONOytj#yUM@JuvUM&TMc2+(=|)!ffB{dF19s0!6t}=(3Km z@{NMegu#SHaIxgct{uKQ*a(El0C}tdo+FgZP+_a%xGpwM&K+(<-T(|S&7l4;b~q89 zGW)VX2R(v)ei;w&W~RYj+?EUfL=boNbCaKeFMzPG5BM)IKyAnt_#01z}O;UxSU=VelW!#O`UxV91w~qKbjfc;R>M?6xIz(L*C%vsghV#$M zfSs=LBua+H^JBW^t1Ub5G?8zMcT4u0q z1l_fY+&011kI2l-+?QoDT*pde6O)sZZEr)Yhh{MdcGY%DLgI7ZId~bRs3erhcA(8W zStOVg6cn&ES~3j-ZMzXTpPruX0j)yJ!3(xvh(VYE05aUxiwVjN@1?0B{2?JY013M+ zCHfFtph09C34RUS`f-nutBd&yHOHgJzuul5T|=N^@=&0H6CExTzExJs$x zd*{y{=SAidJ(=rWhgohnzrWu~p|&ZXm;Rv`eR`voe3oMTmHx$@F4wDhRuoJhMj5ha zzA-{OArJ^mA8@}b2!1c4j9Gt&Rpt$M`E6X>Q!H>SkmIP(u)^r`!l6!t_fyq%`9FK+ ztQ*?juZy&iW>1g=D8Z{&uW%Gk$gwpAFj-DvPJ)C-jhvfK<%?t1NHweRY35pzyFr-F zcrc@lzNv;~)Ya8>@$zLDns<^MnD2ptB34Gksu6m10%RcSbSS^RHZ*e7B8U(xeqbIW zHv@yCqZ(SzlC;Ae%=KKwNLh@k#VxlYgRDJltGgf5oJc*L7L#zbG48wf zIx<|HRKKAVD;EVDO#lWKwiYaMkgYaLs&D;cW3txPnKgn~+~E%5Vq&~}eDz+OXhkWP zk4QII=}*lBJ}|UobIdjse^hZKS917`wY4rPzXup6^=2GPbefOg6hlUd;Ia&Stta^2`u-gdavEm?i{Sw=g_=0`C|1+q?Nmc=$mW z9nGz_tBWQR1^F98CXT7TKzI=9^B3EUL@bn{H0x5_I&WWF@;I*K1_v#k@z2nwr_8=u zx*d`7BeJ-Qlbwm3_q4RG$68=?DLBO4nX_Ki;FDIp``FNry7(!rDPylYCuPT{qs|R% z4f|rJGQ^jCJX_9K_`3WiC2tJI2(Z(XiPGL4qD_r|=~yiFu#ZHWyE>LnHZU!FLP8<} z%ZVQ*7&Z!_O+qbhYfSz(^;wZ$^4rh%2zBJ2=y>8mY$!<5SJl;-1O;=)BtcxYwX~wH zroxXz?(VA?rLk zU8s~CUU(v-pXc=HU_@68r-n`@>chZ?cY<_Rd2#VW$lXB*fWw~n3WrUtz)5)T-o5Tm zpHee1$sw3$GYZtoLjY!Eg3oJeMh*`fKwI?k^Shj^V&qsB79P&W&mRq|_ET;y9~?LW zYl7ks%nOgRDW!58aolvhDx{&v2sn&nq&AuW^v8uXKXEQDUofgUge|EV7-X(o@sz&< zas>V0eq`%wqG6%ptp<`Yn@k!oXr z=zBh)`e4~_Odr97@dV+luHFf+V?qLzT))igpwa&KS5U%UzZprckSlx?^NcTx;l`$( zaw|!)Y3~wFYr9E}h^T^P_xToJ$}NZRU$YZX&8Wen=#dHb?=EURd7kS7PpN>{C^2;Lkenh zRQ`n*?@w8<9lOBvHz{~eBUkCOXOUiS6~30HFWLGux^`D|M!HA|tfYCFQ(4ph&MB!9 zcq!%Vs!Bu2Dr4dIobvd8NRqS9T4fXzhR}mPef~VUvx|rCnTVa--BAFfc4w?&{p@OgjTjzW^1cbc$;){vHWY0G;Px{I#*WR4$-cT!*bDEOBQ? zw2P6b9M{gu`u^0x4hGWJ++8nJS0#=Se}eCLf68j`@?j0HLbB(Rf-mA5bE^vM^IwZT zYn~kL?2!B775v@%SC}Se`4vvOf6rzr-&>q&d*m~FJe2YXcUjKXw<^&bYOSG%-49L6 z9U?i8E)}auZp?-&w*K4gkk&gaadJmF=NQS;YP8BjCwJ_x;N)}BnK{Xvjo;N@_~Yl5 z=M%c)I(m~l|2;aY7`i-BBx!w(X>-rZb1Y5548}$`?`oFsb1TYtNScpt+-!7YY?d

v(}HhV{)yx|WY;Q$$w*YPHRnDvniKn*?p_m?x;^vniPZOs zIb+qkJ`apD=4hEtGVMr~;$Xx~E64l!d25HoaWAwP z#MQu!W_OPSAQNZJaOrPV2X{7M=Lhqo>TQK-;XRy261L#jk%QEM*v?7It<# z<{MaiP;GVr9w@`0Ejk4a$$FxV7P#2h*m%FBic=rLdTD-!Ac~kopbtF*Lp>M+b1)`-gM(>NG#%1Lc`8sVL-};Jb~`T} z1WtjpOSke0S{@NT19W+smc|E9zaNQzxThm&eV75}NSWiS%q++`;AUjY65u4XIoL}g z#bkVwnw`tR*4M^j9~|mhVl9L(-?D%Ob|8DKja$TIytMA%h0sr~hcDRejA{HPKmIv8 zQLgbZCye^K4Mx=BA!jQOqVde@3g8*zV;?q3SGYG|kq7k+}IxR^4VP4+n zNUq(lLZ~|!zajEW(BX)z96AA7$3cJJ3B*&e>tO@=0YXrEyi+R=`v1sk5EC8=NFzfL z+YB{Tnd3%)Mb~E=A1H8XK-(Aqy#U9B&`rYay4Yvau%i@4YYP|aE>AHr1gU>&+)1+g zIJ!UHI>ec=;qDdtCpXi!kEDU?Za<6R%7OL}{ssT zoyI2^whGcjU5`>=a7iO+BQn!QfHr!dox^9&sO3$-8KHx&&`Use6NYD(i4tzyU2q4s z$93TK{5nBM&F*;i1rre2sAXaliQbpvj6~4auV2sW-n&Q1>W7Z#_Z^c>erthPbHcZ9uO( z7+9e4$kx1e2PQxHB;5Sh@=5k)t!OBeNEr}QpRkiyHC$HoW-yZ`mL3jsVjE=tfbj4; z<1O#gLzZsNAOYuzFqS8}Ly+B-7xNc;mH5kE*E($H`*B`<+48;GVwY)}cE9G~(|i}U zv6i-38F@Iq%^OZn^QpK#6eR2G|L1SY30Co)2nQm82 zru9cb)j`_LY#0AJFslNC-27zD3KN0|7h@U)RIN7Oeh!Va@n!peX{wa|{HcU~y^Dd*wEAB(Jt$(dd9K!a!|! zcWDPv2f|MQ$`UJKcx2@F=}>^Trz2yYII`YK$%*=DPy$vYTysYLMM_Pa%hYcl7P_m;BU^2zO$5Zd|1 zYCv&v@!Q8i`L%pbhY;J=oJpI{Av*70jBg5Ju0nZ3{F??~9fHcJZxm)QbqUV9pZ9E@ z2|<`|WsN43A7Engh2FoIjp&=y%cc4GYP$`7twoojm(IEAlzdlHt-7;#a#(K9p%7){-cJQ;*S-{=($6;%Ob3RZ(xXHNo1f~A1S_5x`H684 z;M5kUI7cbuZt=8NdVP4joBg0Xzz{vo&ApqEk&y(l1^uq>xEi)B)Pt)-7BD^ABE)c- zK({{5#{bKA@;CdLbbuq+8%3%v<&G%ne_Bs78`R+KE3kx>cw8y|g5Y?=U zk)Uo`f3`jj7ok}y_nOyOZ5ICw?WUlT+>TO;dMvI##T zYHmmh^bQVwgElLS6o9Dh_&BTBqpp$FSf50<=fOX@>^i9f^tLAnsg zu0+!%iUV3_+d6m`&dGd4akA(o;q@V9>f|&@HiYa4EG`fV0U|n2*m#W{|KeU8SD*tR zs<;a+ZwPJ}o~EXLzK*5kDF~HEPo8{+hZ+zTCigGLE52DzG6Z#xwwA|rz z;Lwr(WTd7(5+oDTlCI~o=lnKF$wIPrgi8Du`7q~+ znNd|@q!C5Q6aLfSn{T>^q`v#l*ws%+)8%NZM8+N$D+_^X8^A>xlD6Te$gftB4QGZ1 z2OmRPgA*vvJvTRZdS+%Xq6_RGH&-`fA`vD$9aZ5CK&DH81s!HLf7k4xobvJ6uW_0; zGo1cy@w-F1@WPG24+2?+96WD$jvy2P*7yYaUXa}hMe-L*H}}%gV)*bV5eHj5b5q(u zWi3_cMT(<{mK538DWT`D;uZ2VY-S!bKg?U2YM2S}(=|%J*_SsMsa`*v$k;d=Eg4`=ey2w zLC0CWeUy9k=coI9`iEBnEx+(?@9E>#eEI5C1ss$I2#!^qI7P562aELuGp0L}X5>9P zbNf~q@|Aa8IN}_x*S%F}zq$si4EY1C6tcyIg|c72a!UPC&f_!-UC;-E<9}Uqn8Dm* zu-!HsrZzm>bxP-9*G=tobx*I4?urR-uRBtZ1U1{_ELBeV(fK_PeGp>y_JtozooNd& zZu5cfw7?1~Nwa7!C=6pA1XWCU_xXV+dCr^Mz>dgkJ1O3p%HFO{B2Z+%;FaB44&Rhp zwoJq2BE(l{Ty%~mO-RE*OicJ9jGUT95JBzc$N!8J2|J8V91$*m62B}0b*Hj$yxqt~ zgR_=5hD;e`C|RZPqoySE^Xu;tCypyb$SEqmShNK!I_0ZK93tY5^isz4$eqRKEh1el z|FF7~Q3x4*XyU!F?OadjS<;`MQ$7I&VV-|)&VBXW#N`Ee0yJ9#fI~RD&-0!n?g1ik zCiXU7cJX4$f+JW)bW<8}fj4wn%ioQZ>^>!M?_zAhmbV$>?X8ab*GCc_Ij$nExkr}Ppt4`40>G@-;X#8(9vioG6-?vM2yRBLPR?>~{C-s#Jb zqC(mdv1?0@&CsubbhFxc_)kzwnz?JqH#7^X|K;JIf~RNNm0yX>xu;@6X=mr1p&0}T z_a8Vg(Eg9T^&;gcK3*K!d<~D$*8DKpv^>+{Ps&&Phm}_G%Z=>Sc(tXjy`7QuV!>@3&kCpZxxyu=lmZs{9{pH*XA_BV2*Hy3{(2=Kqz2vo4rs zp*eVvn7TpFEdKWG2?D&xXhtjDLmEoJq>Tozo**Pev%e?)`=nA9#ef*wA2|4J#{_r^ z)D&u%YBZD{P6aXa4FjSHXYRCnZn`h8JK#5EG%vGjem)rGq+}|@m7>42w)Qigozvc9 zQyrBts?MH^bA3NQSzKIQu^73*@&YpljkYXyH-ewy)kgn48_eV*cYnm_6w-eXAlVKd zeuQGhin zy325)C{Gq59R}^+yZLX%UCclbX{G#<2ggw}l`uL+OcDZMeHUet=Abqz;jUM36{^2^ z^-4u*anTk3tvojs4t}v8Y=Z#KXxEkBzmxUA){ns-aox(?rmCunImR-)AEyixZIt2? zW@ct|CvTM&IIqHQhnBE8S6r8P`H5L~!K>G=6~WS-q^I!$C4%0-|Kv`c#EjNuf94;A zT?9$Etc)B}D2OZEF%Y|iMB_j255OoTw82mmjb4mCH6{*EO#HwADN#Z2J|l>g9(YI6 zPF8nYVg_@0INBfIEwbGVYae<9wwOEo!p8`4(Y77A&4e0?p%On}ARSEAf~{nJnL?IT zz@W|zV?C<3|Kc(sre^@jYB#%%xA1{_oW(v&&CJ9*Qhb0H!%sohK}6mU>`DtxFjfL$ zW4*BLaKXeu!E1?0d3mI_=MikBgHcZ8)}TFPu1c=-Q^Q^i=3z#Bj>!Iyh+qBd?M(h* zd}ke4V{~<`gx2gna86qE{=H55l$H48`*M_CcLzm?PLQGnqM_{j&cBsb;F*n;m;Sw& z)H>ZAvUDMs^r$^!=&igoyNdI>w+_ro|Hm^|AbT-Ppl{!{?GO+!CXmP!swl|GF%nP< zx9}!1`pD%B;ssuH%vR4BPA{*`)k0(bhvY)qpGE|e_kPm}6f}eG0&%Q)hjFz{#A1Xa ziJ={FxRvh!Z-`Ba;}Ea>F*oN$eZr&}dDg8TZksm4m)i^`eo|!}bOX5|3nWfppvVDM zVqSU&ekd|Ptb3pUPfyRc7|0+-zlkv@a3*3p#(htl;oCz?=hN}(1!9bmc%clo4d&$Y zY{$*tjUNIL|q&DQR6@H*I0nNs#+rx1+{IrB%F)bZQ9re=33 zb2RL*qMe>Q+fi8S5&J_X>Rm@fQ3c&epBwoa%aSMLOIVSt^?X?r}7^KI@ z(mlXU!W{<5$Jy};3H=ft1m$bMBoSh3Z^Z|pK@w5hedXJ8bp$cT>gnefi#5EM zh9OhjR!F8H)1fck5BVc<#W{ZH!n}M2zurz#%@1SV96_QWKh>prJFlklQR4^HF(Tl{ z%X@7o2oB$2lmcz&18pXVf}WTbFu%ED_wH{nyd2vLT{n(YIXXBD=D8xHr=1g5Z$Nql zUJ4BS9;^nq4|foNz5b;m?{QRM;te_3E32!Up9ck%;;tCB&Tdo|L_{$Ecs;PeO1yzB zyznvjDm{=!2Fm(FNnf-2{SJ5hoh-1~aZXgIU zHXruvpeP;Y?(yo1Z>WoSS(If!tw{t(KEKnlIcA`J@L~cGvG=jDgL&VHH#M}m4y|8# zVE*YLG;S2nw$ooQ_(F{0r=+Anhu)5ghwuNWWQI+URl?%FXKlVw0R*kEB~7+rZhXqe zZd(3tIE{@Sx89o2jMdYPm(F>|L0`KF#o~&VvD}wwS8uKmm$fs$x}85(SnlR5EO~D~ zr=4}X4Q{10M&U4|02_;#Jzs@IgmCKr*T(?dH%ycGx| zLn5jPx7E~w48x8QDYozjg_!Om5FzBh%=X!RNl0#CJrnVBOruV0;^pUW{42`M?FTm+ zKPTRpfO$x6Te1H{!C{hx{sBXML&HA2 z*$!gEH|V}Q`!x<8WM+;qGtGbcF|&q|B_zZXZuQ{UE^EACW!i%3l%U{GuO9&LL_6s- z0o;ZgfOlg258mE7s;hSE0{sCNij*LNgdh!qNJyh7AR!1U-CfdMqI8MU-3Ta%bcafJ zNlSNk$6ed^obS8m-uwM=@A$?&W1Ml$IOk>m_I~!WpS9LpbIzsiw3zO4yp1^Cfy!YE zi5;-BU0huJPx*ZSVbrd_3P34(El8Bg;2_Tmqv!Pj+=FvdF@=Lj?z`3`N#e&Im2g_F7&)&yK9H~{7=cLu2nS6Tfi9p0yM5$ z@~@khe{B7CuN=h2@a-aB+Q*RsLUQsB$k*V!A1K>h9f?dzy0gmf1|6Vbr{Bg!#qF>` zQDDI63y>kgU&iu38FayQcVEP(z|f{9t@s99RKT070maR*J&yfD#TM1Nb|{q63^3 z1@;bvn#i7j)aEVXAOaO}DM(m=?x6=1PH5;8x|C`7@D zj+k}8nbZl74*&1lcRB)+Um$@q9WDfl+ZE(gfgHWC$^&N!{uX$g@W|BTi{k)Ct}b(f zdOLZgR|=u;7$J()gp3IP#Yo)fjls5d~*l{QDCs&fxP05CoUD}R5MNy_bJG= zS5F~zM-Fmuour1C#w08(>Jk{_^$#Pi;?E$oa-9wQB9luI^G)!<>j2&r;IdRoRI>3U7)wzK1?GekY=;L?!N81p_#T$^E23P&K5HHxTj-wogo4oXfZ) zZ^_X&s-9LALvT_!h$7!TRg2$B?A8tr z4NuFMYiee7UeFuz5|*sTwFqK@bh6W@o{of)<3X(~xLbt?>Yb zAr7*t-A5Oe3B(RK-w1Au=bK*wiovNRjr~=+noA;dW+KgH)){}bHjOBW9(HMHRouFT zaP|5{ z+2rog3B*8OC+(cyrGCEPCnT((M-z*1w742g#|hD|tUs4p9CdQbE_(5(g6GpmXSP_A z+s=Vf*KHzPnS#cIP@!|}RtB{W3+T8x=lZqxjnuDRV|?~+zQoD5Kax)@jp#&qK0h_! z6iI$$FjNwjO#qw$qxG)pSHr#0Xm`gu@)OZgQ~vFGeR$%qK!OpL94FPPHk4|2<2LhG zl6jTA=UMbsuYL}uvfUz?8~J8#BDtHc(c*}b{KS6|gp~kreJE@r#irnE5gXNPf7@Ek zx%P47II?6Vw&;^>7VELy{MYo4;a;-S4X;iOv6lGkXR&VSJ^h!d_Tr$;6m9NTdy6AC z1&&*q|B%`M%slE_f63TdJ==V+uMxua1~}V2PXj5GSD!JeZ4rD&Ks9k?;Vb<R+?aizu zL33Ltjzk^}-i->*Jh@CI{8`W5I^F67CBdajgMnl!#=8<;p#tu5UPu!jK=OZB(@n2g z6DT2Rf&^3C`7N9-91&G$Av=8+yw&uQtcdW8{-7C-CevO`f8XjCIb^Rxag_ke>h>$+ zRlhh=v(CnN0fVi^|h~B7B34?ki^M&+cp~GCyRcPU;RjZGkFeH5D3E(8InC zSRRCsBgI`1Enq+o8oD9ZMunXBAYvGSsk2`P(V55pRUzsC&L=i4{~H=n4KS~XlhDiJ z&awa$M*y*b1urc{B_%L8`vsvGk=8O6FK;f62Jrh@%5%zba@~que3=_ghI(w^fpRFC z``TBSsbu_y)@<}?e|qbJlCoU=d6nE(1t*SZl;AIxMBnK~8SJg)kQ<)gcpGyir8lPO zE|suzyel7#b9ndH8b-n zdoS@K)Gv`YM2HYA<_=^%5QZ!u>H{|q5lTV!z`Wi?ou%l;PayJkR$4l37nP*te0k^k zE_}SVR|HG@hF@lUwwzziMaz6(MeS67!p0)N151($OQLgZZ}3dAt#2qHBC2_By(7#B zAM(&v@P|djNL#DNP%lBrtOXFk@SEFQrnQi$gGGW-2__{pbu|I=Lm&dE8E3zmfR@N* zr&|onA->mgBLFwx?}+9d1l0{-9}3jod{Bht=jWpkCn&^-6OOc>zki2ADBSEePM8l&P%>;3?%*qGE76bt#`KbGc#ah zV~9wXYHEZKFRQY$ZeUjnPcuk>A{9xjpj1u)yA+^-;2{=%>LLK!Atk9 zjsXt<0@f@@qQIsB6K)KUd4%B<$*R%s2cef7tm(AD@eURXoTMV{&Ov-D3^3kS{WO&B zP>f+e0-y&78;G-eU#>uofS<^KV3%FYBx74h} zioFh&Gua;dLh)j}D&U>Y^X#KaH8_SufYXW)MfhH-&D`5|!#;ld!9`c?_-*yGxzjl9 z-;t3Q!o2^NiS>fK!HB~mED~ek7(#=GN(K17aN=kIgx#RlS%HfphgL!l#7= z&4;cc;xrSK#Wm$bmZ2(tYypiY5Sad$8e9f3;#Jj-)C zbwZS{zyMtsEn^4pp{}XP7s%v@C_Z2B8kAqav4@v=8C=jo{`pc)4$NGqp=(Sk=z^r> zfSMA4(VR_7dmK+FFyDdx`xv0h#erN3#I+gV^Et3yyF!VuSBqVYjTynlUkh}faom>g z0n6^Ka<+#<7!67I5$3lz351JQKw++~PCl2Bq2uGbZDO(nhRIS6MqT=F)y1EMxV<1!E${U@H&*9Ym@aKqXC_ zbt3`90W5DT+}grI6OcF1k-#C*2+{+kd|gb?QGj(c322Xjv0(eb5i)%QA;r8wYZ!b8 z%m`qIRn=JvuSK;3&mR^R+!IMqpmEcIzzC4keS;2Cl80t(a3J0Si)7cMok^%DPN0;l zH=9OiH)S>dNxLZ=@xdZaS#^vD;s>xMkYmMWDU2_t{y(9(^YtNc!=Z&5Yp!J6O$T-x z;PHsS9I%~6gGAR$GkXW%DfL%grGs1x;E2}!yCNY8oWfV!uqx`bEh7Y?%`wn}6|Cvp z#%wG@?S>wjUtWh#x+78&kC^z@Lv!ykmtcNdNYb2`flE9Gesc)V{MM~ZYY8~gKOtWl zV5+OduwMxrNJ_IW{=Zdg8@3AE3Wfd)6yUrgQ175UlaN51p$bihD# zMUU+nHY^6zwRlC!YKE0SVCb}0-kzM-1h|YhuRWTbZhV`BloYEL-WvGVfEnS!3S!!B z>(l`~{{XP)gbZ2-bREzZDQlYN>I6KH7^q~>=&1+51nMx*B^L?UI>OhSc4|Ks?0MPe zHk_LwpFTYXe#FNS3~r!}L6aVFlm^Q<@V{2i7z3Qp#7F1~CFGR2ZDz@TuX$S@yRChpD7yXnb4ZSO^u6N<%QaGQm12}8pO$c5qn2d4 z*d9tyCnzn%n=;s?^un>}V zNi<>m{QM5Qs_AmF&`pbf3Y`Jr7S+QqNDeah!1OjR5oU^Qk?^E`T@z9em5pDG^?%8Fs3MkVNG zXZ5wT&YPe?WFEgeD-6~%pM;a?`?D0G?JmWXwWWsIy2H*yh;7zSA1-sQ@DhB^0Lan@ z8+B-ebpdt)P-0Prib9TZJ=g|q$FGqAj}Q4Cz#@%ao)5Mqcn!t^WCp|%)RpVk>wyOv ztLX?hH6IzcdwyB1fAH{O@1S5#a{U(=D&xImoL<|Gjuw3=2BZL$Qpzt~ZwH6^#f|YQ+Tf(oO zKN0hL2oq=%5Sd3|+>2uHy)R=x1~!o?%J}>lpO7&9&y#>1M4`i(DNibenusF=pG!d?@++`78vw;bUk>(SWcYIw4?O?+57_ zsE%F0N*4ATqKJdcw-MTp07$_Qhd79$;7J7_It>k*npkgprDWe4WDt5 z@<{8<$_kJUiQ%5>+S`Mmr~3dbCBUQU5D<4_@Ph$SDk42Zuzi@zbBou89z1A>Aa~V- zgDBW`1qin=#3OM)<83_bSpY?Bmi&W)K%6drd=e+0jAo)h}I5Bf`~UYq@@r}^#Jb! zWXb~`hyOqEJ_vUReFEmtAmN|GxDPQGM>1%@lIkGfgQXerLM3WNL?{P_6 zG1auQQr@4^$pvW+A#ZX&+;E;I=W~GP| z9GpMHTabj|!ITPZXs>BQX#CmUo>S|jqpc0pAp(F_@r18T&d)1o@_`DdTaXseYM|y{ z1xQRbU4jsi-vE0$o^=d1*EL8nki!uV7x%>s8#sR`!#A*K3R4#dK#YnD-FbXS?tcOY z7bc~=%4mZHxf*!*gR3Av|F6kdP*4!G?}*?-n*#ZbS3^FiM!^kDap7!b#gq-Yu?Tw` zOvUOz_4(J+4X7)HdX<%xAh*2hSyNL}JPO%56$eRBaU0|`Pk}e*DUptkQ@so45e<0F zQv%c`0gkJ&x69G+; zUhy#;ch_88;f8a%=9$(D5GoQL*rDQq1dqNjn>xIBr;W7R>d1U*qx7-%kaIC>)#grgqiXsDl6L{wuI&#jPKUD`GLa)z2NQn`v|H*@73Ty&MTG-S0;4e(?Bu$3sBw` zzA&*KLk^HSAaodLZez}LY!86)PEKj@F6bnH<%&iXNebcqHox*y4b-{lA6Ji;GG#OL(*3cl@F%fsp0iN~U_J1npZwFkc=xfJq~M zfq^s7X{ZE$evohMe)<{!v}ZyQAea51cd^zMmmkqeYe{w+8GV`mc-|_~b=z`>p-69h zE$4*X)`j1@$XILlLg=()`m~ogO0wK#2>NMA+aJE9X;2j+?zo7q25PA=%e0VJ51^|E z8hBu2JcfUQ$MuK<{GJ)R>lpy_!U4Y>@?V2j@Ym+mIp)u-II+l;_u6ycX=D|KyDs0{ zd|W1;-hN<7PhLu;!AQZ{CUOS~FhnC$aq=sp1J-C!Mm;e++J;=B>WEFRhJb)7~N|j2bg3oDHs=($@;A2Gn#z)$Git9w7U8}8)e-) zzPUde>DwkHrCeB>?RpHc$LRs96wX8_yxg?3v?iKZfZfuF#&YV8Nd@dN1CGnKPk)=} zdir@$8Vwnl`i`Q)pi_0)Yw+xB)gEO^D;*XpD0EjnioBMt2vjRb_9!VvNX^b3+SYd7 z*&bQ)-W|d51Z4Wjt3_Gc*}mz`N&g%x6BzP)55%fI|_G@J<$;3J*4=SxRP|0fZC|+ZZ1Zv8#~Tlx$w-P8$Lxxw zSaforg>nC`3=Xe~UduPXE&&*7U(FfS=^z^cBO#Yn&%A+z*E$-Q0t-L}aF?1ot?A+j zVtRr2`d5MLy{aL;p{_2RkTX#IAlL+$U{+N9m#ZB#`oQ!K^o^1B&*Uy{jx?)qR8jm0 zI%hdE9KCD*X7h5S>#};q@r{zZ`32`fi`x@FZt;KD&3tvnXoG$Ld}%h&BOoqX06hmV z=gMUjGQA)W$$ABhXi-oFLYQ~$c|jxY^cXQ|hd%WinIt&|nYL$oVpkq_{h+_3mu^i& z$Q2%vIj8@%U`1v8$W>BrsKhhz;YZNus?WB(A{SioTMm^N0P~fbMJ_{tgO!2%tkBuP z-H*=01K{k65GW7euT0=jQ6XpzX+N;Y zfrAxc{i-Km<)G+-oVgn9uG?BvO+)+=Pz|_AtY1~EjEwA#xlP&iF))m(6=S)7p#Y{(Gfb__b(~P4W%^J{wDFcqjjuor1KK`M)~%# zA~e!0`Sz=iv2C@fylt0WEasFXlnQ3;05>qrJFc_$E+Jra9eHa;2t>0(hEb z*PW*9^36u61?l%qW07xoM+*N6p5lhXARKj$> zv>VFCA&VDDo}sq)#=_%=%xK5y%Zz7Z<%9%RP3fdRK1oQ>`e#mN8qp|4hPK7?YEdh@ zH(@hfnHdjJ%N@DBKvd2&PNg$`a^=UWV_B%6%HG1ePpY{m(X{`J&2)M<`}}k&J{rSa zvdB>Ct3t2RUJi%Txt#EmG8pqk9~hW<)E!0lO@>kB-@l{iU_*u3&8DlzgV9wgxn^Y= z%VO384>r4@tF!x5{-$NXO|%z|3%FROZxWdev)Y>fnc1nX`R;P6!(Be|cOfxzL|LQR zNXH@KyPLq!KuY&lxr@um>tUjUa>V~~uB=KjqciwEKl5CQ-59;=facl@`a$BN(5S*& z*+JQkIUOc4ecrqbeif|>s297xaJtw-L!*UoT3kt@K=KXySri~*z*W-$BC1z6dk!lb zlJ@f=h2LkDIkmpNH2)u*#x_e{xWas!*6=mSE(#3Uul+Hvc@iJy`Q;mX_?SxpK}tk| zx0N_5H6;a^SA=lHfbQBAeQT(z>tm$4lON*h4PDq4TeiQBLTtKonM!XDNS?4~%VGrG zrcHhky~uprpP`;eXMA`0$QHl<+r?V_pBH!*x21(I7Dz=9row^0V+rDiSGaGZW>}8p zn=^ZE6ycRPu))6S<$81f&A^)YlZb?rjD#<-F);w&5;7`$i!)CIBbwaGt*gkt0Utzw z8pJ9cSeX2)_y0af+09!wMH<^i-4aBx=er74+v`08NJ|}2>B5I~t<~v?+>culM;<5F zib@}yj5;o1PQaL+0JUPkwe@a+IcErURh0BoI*W5-^vs<5uhP0}OL(BiU=4H~XfntY zu7KZ4X{C7LxC;ize-ii&7qb`#(ClDP6P{}M2SaBUa*K;)1d$;>@^0is%_z>Z9Pi~{Us#MB&Nq+AW}dov?^4nh z-@2xz$xG#ANg3HPT6f3>D|dP%Q714a3vdyu)nQKL`rzOFbqnk7=1P!U{_u1KSB8pn8 z4>>oSQy>F!+rAz%qx%Ym#~dClr{5@i#nQrw20kE+faXC_;|C1h$;Cy?nY~L;SEunm z0bw{$+&YF&6^x>cP>-gU=#uqKgj0p#lD4_Mr+LdA<#s^_wnAu{>{|R+1*+uylo%;y zdG+>z{3`-!5(p4~fC+NNDfnK`0{xi2vFY!NIkZ|5rSfmhqQ76Kd``ltu_^2h;ySn8 zV}b5`b2wX`*l~Skt@2bFDMoJ6ti`qcCP-ZnAhfs$xSDY(Af1LAdpYs$tHWH!iLg#< zJUxkP=cmC@z45@^AjgWakzh6ZBcbz;`POvw;e&w`3KDpdvx`}Y=zG9#fvTsT@fZdi zfun?K10HM@@t*BfGh^Dw4~<&%SZuNl(>>+1nQettZ)Nh4tvqu6{75y+KqgRCRf54d zi(g=uy!OlMuRTeRF@}rZNJ4Rf3>yLb@fQHyKmq^|x)G{wwpKE}9|<997mSaNl7ea!oJ<)a-~W9>RJva|SI+oiut}cm5bhgI#Gt=cyTZG{u%X*p zZS;U0w%_9RhSmbqQ^uB(vtLykP{YH+4?uf}FuH_=(IH-sLQVoABxImqSyCbLk_dbE z*z-!M4N){^4HfsdNb6P$N7D0GRA97IGU-pw>nG0DkNVOt&kf60SE<#-AON6&QgeiP6CkW z(>>M;>c+H|llHPHNijc&P5DQgxpLZuw@u#BsqEb|>!xaI{?XQYtrv#9u$wLtQ~oQG zV&@1s1bOdU*EGaNnZ3LD3*yzbU!!f6Bca5F!ZMd#Doal{viar@VxrlnT^yk{^c;0l95_rCW z3l>mX0wnDNxMVos7N5Z0IAIcM9k_X|#zjR{e89EMOv^8dv?h~Qxvn7hF;*TP$Zqfg zX!W*znLSuaP7QEW-onHC0qQbjfGYHM8ag^YK+7@_RPNyhVBHLoAE}YOc|B%Nd>uV1 zmWau&g*WIOmr%jb0i(U^QA#6;$~;-CSb!-OF_e;>QC1WfgMdpRa}9tiRS(|*)QXoN znx(@|1&kmJiVf}G1^*1!kp_ewnD@x3B_jAA+7Nc=+RND8M}GfRL&uoZ&m++_;sQ}x zm2UIV(167}&p%ioTWw$6jBpJ&x4h0%qTN+e(l~7X!pNIhz@tR~t?e$f;P_&JjOB?7 z`!5e1FQLXifJRk-iwm$pA?`A3R97mls{KzIe;FtGzdx_Ao5OtXvd9kBxL+A&Zb?m3VmribZg}cL!e?7_?dsjBv!N z299Wy3)o1zz*OYQ)4kM2cBuFM8YS9+YEuQ=8tMVHLLu!;Xm}wLq@h(NaE6BL5IJJ; zXRTDHHt0z{Uz^PWjUUbxXAzEkGs7T%J5*B98ks6C`784=1~XU^SGlJf$d~P*xJ(B> z0>pusv%%qA45?5%11FO-d_+KjXaHqcp;;%^DhHayjhn(Cm z;e?JS*Wd3yt@uFt9J}2#w)?yBFZ_XM$HNDhLguJi{r7P@WhwLnb&S&QgOEg2=&??_8isT6A*B$d}(#0xj&!1T_TFPu0K6H{;XL#IYKX|QF-s)>IR&;zeWX!>>-7#{e+ zxqv4FjQk#Wi+Ai}BH=#(W_>q0R`J;5Oepsgz{>FbYW?)YM2!BkVaOM_C?MjV|Jr&7 zw${`Q!tcD)v82CD`_8SOyd8Qt5W;%=x$JEsp9agtB>!{g1r?P@r?#GgoY7o#7Q+uU zu-#y8ihN2r==c6dEnDbgcOU@d*)Ge2RvKoIXad-acs^KNy#?#L7>%D8;`iVIp*!to zl^4UM7IjUJDppj?BgbfR+39`b1{JX&tQt>DKPSB>Fy6AuI04%t5b~)|prCHIFy*2U zl?6f=g-9mxDzc>Hp|G#;K2c$d5ByhTyr3-K-!qQm&7c*UWDq)r9^-D{{-Xe%3kj{O zr_OPD<3Nd?9BWxe%RJg_q<>AbED)xSH5-G1Q4aB7)DjN95*xrXhSm}?UILj^1!&(a zaSRe9!zP}>$EKKe%#qFCIzY5C7_#D6T)f3;>MIgC#hP@dO0mV2 z<|C9XJ%D3vVLw6cDuGAk+&@KvmcA)Dxj7LI-uEo0==@{(=Fvo~AqV5cTqAw}Yxrl6 z-gx8=2OT7PTh9+vLGfVakHmqHmht9416f$5Bi8wMtDB2mDNy(8T%Y+ZS5#S0 z8gfM0Z~Oq3yxa$QrvwNEFeXI=S|lyd`LHd%MhQoA=I}Nv9bf0mJ<_qiU#N(c`u}Hqf?ZG^~8-+WgNO%GV2VlZ%1H7VXZXOIRr%nrG zQ5K`|$`A2B(`RbgpfjDSq;Z`Fek{ZXT)XzA z^4x9c!aV_n7TcW+;4=fC@m?qHKX*{TGo!I1t7&FI=IpVp`;e?|_?i$JatEOa9r3I+ z7n1yIgpTCp_3-j{;uzpj;A}uB&%h|MS|1aDM!c;VvLvs%b#-Sx+HqmJbA4WxrQaPi zTHYCKt^RZE%ANTRGA!f1TH{CO`B#*ju)~)bG$AWPA;TZzSGTBeVIi_oj&xc@0y&cU zBtI7~-A_?hK+iQrlc-)%u~Q}8m(RIWtH07ne@)Tt^_M94phsc!jw2Tg0)SkB06<&Y z`!{ux!WZ?!u3q<9u5PO_sBQq?hLYtfwUnbZw^L3k0WuvNoHpQ6$0nuE%fTsURfL25 z^-;}}V{^?ohA#Sii-*J5uT`%Eo?mu<7&l%kr>C@1{($9aNU7xw6xR*9mS`?&vRT24 z2hisw22XPE=$Zt`8wI_5Z=i>VS?eOJuwz5^hy3Cr*Mh2BjO?Xd8@4~q$Xv4Z#;0yF zH^uLiy`+Dsr8OZooIgzonb?AJHRyXzW zliR7gnwl-a)aYG5@-R5tOb9Bz^_^aR#%F=T!un2pY)LG_EYOKmDA44B@gw&T(i`+V zhF2lkpn-|zq<8gaiExp}L@EJ?aIgsuV2{}6#g_Sb`xRpnnZ zK7!i=;C*5?a$5$5$Qp@jYfts!{PQ6_{<&(?*S(0c;b^WhwpXJuv3iQ#{w6lkaHyp2 zd2AE3f_m{M&Hs#3&ZL-X zMt@C;m}c>`H!aL; z>4_n!n1YTrL(guPoLml5q>~f*{Q6g+c9L& zS%ISz{qT6@0kouKylJ5i)K^Ak@*%aTf}xwYRCu33B6U_kffn^-1S@B^*j&$f?_-TF zCQ2*#MB%3EO@o>43maAYW2s<)+OpZxBRBT;+!g03HF6o8S^4bqy(=iZ{^tuEEd3>= z@2d`dzAxMx?OBQ#{+&v&$XazlLaSn%#QXQ?VRH28^vuk4rd$ef z)@7>5%x|z{ccS8;^u$e3CmoCSVzkiw)vnp<@?s;Q1aqsyY@}ogr0lwXoH#CFP~M>H zd()NDz3~hk#*4%Jw#P7l5?l=#=V3->C;xWfWvUy(S16YnSv4roYgcV=QH0l5f7CyT zDA~_0c6}*Lw+PyqN3|PTw^B`NRD4TI9jVbyfaUQ*!m^!nV56Q$0~b8OEISN2@QH}T zUk8Ora&;Y zJDjJNCcs1zcm_$ooowh+zyU*ce$-6FSev~5B3JQ=1A)7g4)$ipo3v=xgU{bxbUZ)3 z5$pdV2(LNXyc+Y-#$et;c7HjhDu)EQ+9kwv#XTehAI|5zk=5`HV1JC=(87fEn*aSH zhxnxY{--H^mGo&$!3xv?cQuBz%2iA)%);wtDqndE#vLy!*l{(Ka-)0qnf>&|rE}y! z1A8D{uqQ#re*8|bFH_wm{RDSt=|(mh^TVH~;tQz){f9Z#X0!8Mm$epj>K*QZ@ zN`;#|P~jiOPy{p7yfKYOk5JLkvtJW6K>P$qN_}YZ-_^_BC~O{26!x1rk2xDBZe>Vs ziaNqa^}oZ&QypH5o4P^78cgdBlOnX?E>K`z`wo&K3(!F$Mx@{$L-z#P8XZbj;;Z#yt6(N##8zT0Z z30TiztUI>>jT`S(PBWLVxw@#D76N}N9Z)mw@am|wOx~HUOX+-fS5(v2pLc!JKfp)K z8MR(IjlEX6EscVVTUbsGAHol0@Lm8w?Pf0{q*$;oj%wEj2}uX^nxs|SfFll`{Z|9( z5iJ|I(IjPOudH*3z)xpV)wOy)*`D_du92ftKgX|(rG0ELcE+*4-I+%r7|3$RJ*lFx zPo#8;swYWab8l-a1>jHu0)j3WvQkiP2P_33&wc{N9WvAvQHTR`tGTHuv-kuuq&Apt z-vTWg@KC@3b*T>g)BJmbI`gRvuyQO`EjjcDJVB32ZVIF;7CMG^5D{-5RL}Cq+?H7E zr~ZB1)Uws$5oE)5R-qxk@3dUa-;|saZD?WTTD$E@ zyuWdctV#Y|_Wr$TpO2|KOe|AMjrunHmluq)nT}V>O0BHfxQv8r+aC^S+)^{V2nKtum}2wG{s`6Xuz?9Jdoi-w2ob3^)mE)+5Ed+3LUR}>|B?-;!avgP?e zc6YvNA9cm`K#Swjld8hU9A?+^FK?LY>FG^cm4U_bR~vqikHg_3@7%sxGA<2XfY5m7 zkzk0Erh{*D-G~BB20kQ%1jo6`1{OGNEyai9+dC7(axFgT@jR92xN>8(y&7Ues}8oy-krY{e!m`^qUxoGC;xzW^ZvZXx`7b1;ngF=Szzoq z0QqfrKh$nD4`8&Ig*r-Ow)$g6NZ{)&V8&i0wx!`mapF+#@SqEBMfJ8u^FealEVZcXWQdpPQz{$_+XP+irO zKLiC1_6L>y1{@@aA|6=QUouBl?FvAzwN`U}W}FMWhpdp9MVna+i|{W!vaCpmY*L@o z9co)8m>+KHFF$M<>4+_%n;1MFS5`|@ig-Dz*!|^onCY-*gD%xm2)6UlV))1a%EWzM zDBzlaCj!S{5)c8U5(VR-2J%<>fp~v2GWs*T{EWH}7QfIx6a{hr0!*X7j7>TL6EqR8 z4MM*pE15cWtZ0^CNO_ui@N=Z&$@zeHwTR?U>As5Z6+Rs@O7(CGG^UF9z(@_W zI~FtXw{-29EfYBX#qXd%a6kzi?~DW|f0*nJ!pSCoLMBF9Z)j~QENwbZ`O$$F8vkb9 z4N^Y4z>gnsks(+BEU#zL!hup+OP{W|twbeer$7BPZ*ZhxH?saduP!U|32S_KtY*9y zvw~64fJqnb9HHoZS4a4Vd6p9_XF2@xq&x4|LPQel4>M%9D!0v<=;@KcohK(Q+45gA zegU^fwzD`_LaTk=kDfrXYQ?kYy6$5kfusF%98#JY`^uoVyx1SrmHSS@Vwr>@P<7$=SoD<1#gN)3JTPPw|g=wiQ!M>02tf+8PzkiULwcj6}JLB3`! zddwlGj%35l2a*pkH?JmJ8yeUQx9D`NiUo%ppIt_I3Y^?0sdAADD5n%sf-Bc4(KoT^ z4YiSRubN>ms+~+NPlfXF;#Y04G!6UYi*9OHjGb64m|!VmnXEm|PjJw1)N|z!Q%zeEKyv{Nhq2lo43NtH^$fPEZ-EYzZNAGKw#2m+UBf>v zt8eCK-`XZ^_h;NOx_-+wdO(Lhk=6}d0bYKKvT8_MPwQD7VSqzzOC<4}DL^ow{PjBK z1W-`rzgmI0CjKKh+^&1K&gz3L^$aQN=V!118yjkli#1<#pbs7r8}2e!ExD=7oLq4i zD;E$xKR7G;qQW3@)GU%{UWsSD+;7{DO<9Y9IWhUmXZiRZG&6mzQ^Ka^gZC_L%vrF5 zrmmUa9rIZE*C?mcwx9F~ zL|+8JR5x%r=h1r4Fg@<@(B)Y5&I6Wd6Sb23 zk!FY8*QocRAL?-J=Dbq9xD|HCFt~vdr5qX0^+uUYlnS||(o};@|69i^?iBaz;8L` z&Rn%!l1uT){ks)gTTLz|H&K*0g1-GD71G0$by@I^k+gQ{pT#SPh?M>Lu)uE0s(i@Y zBCnyf`Ep4irQ7TadZ~2kG{XJMhaqtVKJc1XZoR(T?Zk};@C8)!_Q*$no+8aN4m9itzX&tR{@pG)h{vUhD??!FBYwRF^sLIG9 ziP>x%Hqm(7*kda<&E&24=9Bb%LldYG7F)5NNO zJ(O*g(}H`W`ozlRTWKY>GS$;|Sb?4G?XS1S4T_ZYvJ>IXFFj$X4_;rV*?lC)6{P*C zBuIVrr$gVdiQ*}xVPV^f8(Y+<(7)!S-kHMCS)XsVt*~+ z?4y0}n3+hzak<|WS*w@&V^vcHawEC#WF=PHoj1>CsF(;PA|BQREW#dnyZfCYk-t0Sl?$NhO;B?ti!-ga4qL^<<1F{Vrh&$G{ zB`EN%)LStvs~v187a)@BaF85M`cOo%8(_nrHWaClTU<&C)!*7E1<$&IP^j(PBQQ<4 zrO>~#%GBl!aUHw&emC{Ex^EBJlV77BU;BWoi>)o6sB|F;qvObxS&cW9M~%)$)I*S; zG}n`5zKeviBSFjwB?Er8h_wnrhb$OcfH48Uw*!JGVQhN?I8q1@C&1@Nv801aIyEC~TN9M3t74_TTW_NYB!6|vmEY)eW!3!Fi-$dMO zAMUT@iRWMQ&9sLajT9Ugz^ig*ZcaN-k@3Sg$N|CgwZ-ETe7T0%cHabB_9)~~2MyX8 zZ8Du1k?Gn>r&y?7-gAoaxs^w`-P^;zbGvCX+m&j3@<({iHh#^xR$nPpJ~6K$b9Hb? ze{l325W~a|MH)5M=3%%7&8Y(JAU$OQVpF{#hdeJi*p@<5kDnkMo~dD}se{=~t(3xD zqYWF|ks)GF;Xpnhv-rNvm*ruNXOprvT zZ>UG9y_0{cMjYRS;=YX(qd?mSX(W^k3YGna90Y`f$ZY3t6>Gem7XhG2k!5JPr*jy% z%Ke5VWQV1?gxg`|3+|EW*~758E$3H!_CV7ANQN4+*;6?c{vp zxMS3RM#k5>0aguR!GP7yM2I9ya&~qgSe3-0{*!^}EwGYUj@V6|Z<6Ja|UzN>mK9qtDLj+TsId zd+cocs3<$)#ctFBx3AcIyaVXUKrQ7H5fMQ=E}-<0k0C(9M^=#OFV+=_uBCyc7U2n^{@iHX6#jdyLhA#XYp zW~lE>Trt>ZZL~0&&{=;Fn0!;d`0!@y9M8;cDJ+wrcfV+iIQGpuoX;I_zHY9#MUG&T z(K`0g#~B&jN=fxoyHBAbB}G>BERJ>SaPlMtJR_! z`uow(OYIL9nZOt>B8U1q#XBat%e^#je2*twv9z>oyYMNOyu}((Fudy%FvT{Z7_wYU zkDOXvRa-m(Crkl?X!xOPuZJgHsx>w)qf`Q=xnfn@%B~KdaG`l=ODvP0D24>en#{DD zjHNl&4h0P`saHgV&9^ z0@FOGjbKD+dG=#^Ph6s;eC^Av`lcC$U5_J@UCJUK=iECvUU-|#9ZDmcb!p5)j`eyfb-L#Tkk`-m{a#-w}yIZv|oU z0YW?j1Kj|aQUIQU&p}h$!GH}nwkyd{5Aom1RD)dljpLTVF8h{v)A52_e|{^S`}qZj zNjWn!LpzLV-eA73M+I{oG^>jSj~LLsv1z8f=DQaJj!@5it>->n*Z^2%KBr_%8)!es zZ~bKhfatsQ&sxpv#6M;R0RI4eJ!2|L{%bn8qGy8%@30BXw2}asDlwlOGYBRMO$I&? zsXHVOSNX~0519hU^y&y7Bpphz)jU7Pc8%z-Gv^J`7aJ-K@6jo5UM@1I zm5OzGu{Gh_E(?VO!vpnL>AA1Hs4M03EV5N(km2P9=%v7y=IdSzqUJ@0p0N;(yKG+vBTMSs>LbQZfqm7Wb!1=> zs8SfKwRNlLC_OhaU28J(CYOJqTnvgsb#1mq23$Q&W5pxnTLqpgf10MB3Q3ZTvir?} zrTipk>9yIz#ZCd4y*M6}r{E5SFkPU~^1Uff!%TR$yz$a)`qt?>mpssXe#x4%mHsh< z&aoq4vgJ(0KW;;Yc zxfCyUiH$zJbi0F(En14O5fnhBkk~QhTvQyfMS<^2_o7W&Muq|}Xyc?E9CiboMV~&^ zynm}}(T1=Qyda(c|HQ6>Oo!9M`8n;&^e8A797N5O*~Z(3{YJ95HEp&FhMJ$OzS$XP z$Mmsc%X4(L8?nA}|Kk0Ob<+3ueiVHfcvE71B7`Fg7+q2?-r*F%$tCRU>HaIS=lnew z+}d?lU`3b&q^qVs@PYGmwoP_%py*vh1`T{O!p2dvhk(nZz;v$P>b-#aVUu#dFdChN zCLhSiGPw=`t?j-ac5_$>s+VGXx@gh*ft$BIV#Q;$zT$P&2 zwuIYJllzGT^U3{jFdp%Bo3<4{dzw1?iM)2Z>Wrcmq&>tS8H8jAnbrz>`y~KRV84xq zNbtZ$22ksZ?#F8tFa}TmKB!E|9VXC#w&^(OMdYyY<8r67DFmnvoQz-fP@28?_*&Et zO^`jZo9optj>c`nt1lhzej%1*5Y6b`lvr0N<(TPF{=Lk|Og)?6%rqBvmjD_{HYgu|-Xb**XX=D%aOQ(j(C16hpzkcz;Ju#8u@bFME zM_$GUoVbIA%V{nT7w^#o1Em6Dk4m1Rk;GBRMn< z>r};2?fm%o?sC#*W{&Lpg*6%Sw!<&wo{{rhZR}26`1Q*#UI^!9?`PW~EJ_kAaipN- zFl>ISyS+_!jHYN{mp`&FwYqm~RA7JU@^z=RVE-R$#g?zzUi0^#LYey8KvIg@vCl9r zI{KEjj-SeX3J=qZj4V|7ab*xsFEJe};xUbld08>-MgPfW<8dg}<8{U17mJZ@tg>vU zw;5T$?RodN1lemlB}uQa3*|87?f~h^J!Rbe$;*%s1bm=&hdn`(Hsx{~f>@*M9MvzZ z*5_Pifh-f_i=hm4^{$rf;@08#=0OGxob1qTGsrH`?WGM73#aa)B4@m)%lXsyu zAi4R^^KoOx2F$YCzX&r)9Zb_o&I{Xnr&l@UfbEmxzM0p~og{%-e1W}U-SnjW46e2? zpdnsug0(iKU0H5NWA_8y=FFg4+PC4^KcJ5PNX~)TnAwBSYnCJ&E~d=&OS3aK5(l~n zNN4jF9Jq{E9Bx-eLlq$_XyE3e@?D#T;ha2xk?B{4y}9;Wbx=o!oysrF?pxyM$UA&| zxVruH!%CB|kydh6pqxhl;iqjjy|tOyt}F5+qYFh&pEU&H``_hsDr$^P(U8V+K zhubRI=^|SO%-ef&uisLj$R!+nmt)lJhs(p{bUUM;%KlUytfw5TcrAU1Ak`4Vfj+j zKEcF^FJ^(Fe6Pl!S)~-$9d3nkp@VNZnNV0&@t?b4E0&`}(xJ3k={%8N8Q%Ei$_0+y zXH<KZ9e4a~QZke1oNNc25EXLv>`FXyNh4yeL(>8 zf++`El#OP)IMeYo23STHx@gi<_Pwg}nTu>@?U=6~4A_{%5=O)pgE2i@RZ}-T?CAa*`8Ohzci~nrPU9)BT z=95hoMnlcKtU%_f0E$$hT0U#O8&p(7$(O%&x2y;jU~Xf7%_CQE6J)R-f3mo@YFJh2 zo4|Q(eeKUu9*r`AfN4suoKJkO#r*Sp&zF9<$X8Wdb{BFXylsP> zaQ!@7T`7^Rb-aa0P~DJI`LjA(xO8&7adxBS33qdk^zE8zt_K&RUE}s85*?`)xdi5 zMyK}$41c!B4_eQbDLDrU%qf~v*MEkGd$9>73|`!3d-}9bQ-(S=C=8}sZ@+N3#X{f_bA9TaG)|wQ46O9u0)^??%jNz$*PUc! zlRpnZ!8oN%b_oWz6APTi8{4%>vuduGPH*ru4rS(}*c0>c@WPT82!97OM?U{%Sy4Q- zTNBD*MX+|_!c4}8zdu&7;PHN`giV-(>R(s79YLmlzP^~`B{KKG--fFiGhRf=2!CHx z^kar>;dSHI!m-}_hSz`h%YtMrZPfMQm$VDH2?D37&b49N0T>H`rU!pl4Mvo=S3Ma&9J-xm+X3}+$HX(cR|ETOO zpt4%Oe$f|D!TuZ@re)r9x=f4fJ zT}ck0fugH~3_nU$$p=n&A0@CK-uj29}a&%GU-Ed(*`Pj$c8?+mx_gWS7 zGkn6ljK1|a8;f|-%RXi`x=Urk=MV7gOs;OZ8+jPTj zdg*@*2iVw8ue&HWef-IKll9{eeya}OCrkI**Gb2-hoRQcZB^yEmlpm({hL+E%tszx@CJ+@a@t+fw}>f>RY{#Pg{#i)PT z-&7xw#h=T#|8Mq7m2?ywcMjC;G=DP;F7`8&5h@oY1+hHzN{Ry`Z2I+GSTRO=v` zv(S7FfVSoE^D-!~{!*}QY1qs4`a8(S?d^C;M-Y~Tl1@X#rkYz9UM;&pQ`ZL z344ToXrQqALNdhhGJX`tYvcuGOKY9o1QpsIf{sUoe5S)THqOz?86Uv+t05rqllmJ85LnK zeNQ4MKWU(;O?c%rg^P=|aW^B4L=TsdHUM3z5KFO&!Fidu=nMn-yztqrl60eprD zfm|&a=g%@L;?=t3x~e0T?dAHyw~`}qzFzb<+s<&4ltKbH>r>X36aS2_J&>-jriQzQ z!|hu}wiB`Ou8SFX*f|%iIMfd~gD0NgMN@YgYVX?2)Gb_1B5^0 zyXq-h)L#!l0D@-%2T=8MBG?=NxxBzlS3%+NvuE8$mQd_+3snlnNMzTjJw%CBFUa$K zOFVi%(4Qz#L8qv&cxiTev07(tv^`d9Fd=t9>F* zp3x1shwEcg3oy{+zT`@!jr3rpLZvOEZ0~mdng(>ZgCUm&i|)NVd>a2giWuzxw(m_uuuhe z5Ve*gZs;LK)ZdR3s8^IZUAW<;6w$U)eu5Ub@4`{rVOJ)&!|d+Qt#|F(ta2lvpqunp z27XVJdGy?|_bn!w8PRH;!~Io>K}MELHqzBX8_~c4E7*?_)K5T@Zaz0V+|VDK+EVtW5O$Q|W&l8F6>4gOw#}%JGzdOhn?U3BI?JIkMS12w6{j?i@Cr=-od`o_I?sa@Xv$oy# zQ)M?$1N)adl7Bp3X&YBjNaHZK=x0n4Yq{*z$b91^eMr=ptWUabKNdNI zUqtFXH#+HBUr+-I51xpy)fO&KhyP3bz!{rd4Q#${i1RcxXv$((9x>^|bp75s~4 zVG&)wOi$z)tdAG5-<9lu7FF_-(Oak@Yr!`Ig{+oDMP;Ks7bf*jHr_L(;B7sBU&O+E z{SPPK`OuC&T&MM3Ka4RujkO*R<#@PWwz>ICCewJpw64}Qzm)q5^|=s|9Aw;3ly-3Q zyw&LK@bY!H5=zv_o58|tp<}&cp1cIZ-fq%?uIg4=-=Th*9D#&#(xA2HVg*M6?=w9e(YAIt2qrnA6W%J1)m8{t z)ZVR%us9FF<~tVb@~(5E;S1*UKDrks^P2fr`kS|ZNTa3wQEsBQ(Qi^lDa%TYHU z(I63ACrw9=hhLq_Vjq*Yi$;nD9>+S? z#n0TLjXagWMcvZJFs%RP&d3lmmFthvA&KweZ0y~ZJ4rY=^GeuUk_f7(Gus#+wIZ2u z7ufdMR$Xx+=NI3v-F$y=w3Jh_y?tqli*)rmPgAW8dXF{KFgKrEDC9QU4q~HNXu+M4nT>OEFAPy-9W)Y>J@ohY6 zQnr6v>Xr04O(It+)g7L4;+IX5c9RqsZnkFZBn;e-w>NYALLKWRUYN39;Qn3WDcWh% zgtpFxg)d%ZdBgS0jb4dLX@+26;jC3IDV10#ABluv&iI^R*Q_!##Z}~Qf*GyAW8`vZ zZm`oYb7-T8fUPhL_POyL+==a|WX&eVcT99WPRw@lW|nda%AetEnMtWUvJJzmSL-b- zv|UKqKbW=KFUnEoO}<&6A+>RIC^j1mUSbo2(_xxu-pP1Q+)piMa*dOCnp(4S=Q;Kza?iHRsrpmi5>kG8Zr&Em zr+fMGyw45;?`C5-V{P4i#oNS^vs;FjCSAyo3!Ysze#WnV$CALGKhxyHUMZ6M#c*t( z{n9h%$6_~ey^>4TiE0_$@)nr&iIY7dnIu^2eMQ(+{Pg50M|#(#xV;1L<BN8Gc(~8=x%(vE zwd-#k6@}bb5Dw)<-~|?*i%*34%%7bso#6{PSX4JJ_O)9)4|SZ0kT1xW8EQ#^9?*>Gjsk6$`sIxG(kWQ zlaV+t`!eny@7Pbv_Rbv;{yzP@E&GdhMt)4U=IsPJ#v|ivm+lPn#0Kfoy^vkwnzfHO zXfY}Q;5-Bel=(9{K>4wtOT23)rdUY8gpb!iKEB!Al*O^cM2uj$2T%pPoj))_6i$`aMqM{6N6ynt`AG(Iq_JOsWHyx^Qi+>DQmT<$ic>zAv5CO|2TaS&Qn;q!UD+|_@W;Q+`Y*}}D8lV%W%xuvrzWL753`J|duweA5)UM=sS#F&YoYD>6I4~ZuGAw&41JI7X@$Z9g{`b<$ zjfbawmRujHm}f_N_oIBOP^g*591T-`aOYsdEs}rYm*^luR;{>;W4Em9MW#3X+pM3I zxX?=v|0&Uif`_>`pa{lNzr1QT8{Tus%8IbyCi2@2cPR zb;#O3(|Y=&FB1jj(Jd1gM5L*H(;CqoGZU`JJQRurG#0QKy++& zbuP;W%b}X6$BCqlvgXbD=bp)pC5eU~SDIoPlBCj_s&;cY}H>fcKSo= zu`KkwMy!q75rrxY@gi<)ACO7R@eO# zE9WWW85YTX&Guw!J!_6c^O(=Sds63)X1dYQ)on$2$u;<#?|KYta~`cQp4wK4L<&(-_ zz!@3F&z-4B$rjX%Ki!NhV@!k;@*@=kYH!IsOwaE{-Ur4F;JBgvcMX_6WBr_#>FH> zKp8?5ke|0+3&5k&%RgZHZ%FM`-WHZOk1imQ`*`~PN&kj*q^p^-)}I_FA^Oh5(Fzgb zjQYcS+r$rt>3+SqoR+?V-hD-^m@V9UI*O!XOw{d#;t@_V?yB<$^rX$Z3$=zMN`dAi zDbz?Ng_rgp6ao0S=KgmSftoWo%gow8!oGH`pg8nDMVC+or09aDh3zoSP+SB*YHAf`)Vp}@S5M1u?@W>T@TatCpD0OAD~P(TGuO##yZclS`+gjh}Dx z@{l{DiT#TJNAj@PAOh(2J|qG#@&G+S6EKqjAz+wYkWF;!R_k=s)z2wkX~G3WL8}+4 zP`Sn3(G1FNiwR~w7}VfWVZ!JTQF1@-qps$?%^&F>oAj2w42<1bQW;u9C`?SeUfc=U z6DDuja8i#B`nlk!`qQrLpJdd#ee)(hA)zuA^%ZJ4g}k~r z7I#;77X9&b^yrgbTOl|bobpi_n0*mOGOnqb?#EM&6?qjL-XCJsUo&3Sn>S&=V5V*1b}RN!#E2CG5%PC)TXX?t|9f z6!Oyd92iGzPf8^e{O;ZBA{O6aB+4p z;x;A2Qr`>|hF(w>my(ps$u{@LH_M2=d!n%oO9yM$;i5dQH;1-H74_Z*y<>l9M_Y?> zbB(_>UABeDM6~_4+NSs4l8t+D!lMPQti9Ia$^}_HigwIWir3^E2xA;win;9i&b+vk zq1yCG@!m@ml){Wb3KX1`euC0JlrQBR9N3_4ddUE!gux)04PYiH5XtIPmKSURBGogf z2}4gG1<0S--@ktsyI?yMm&Eb$)>L{i>Gy9WJqTexdFy^-gd1BXiKmvrgh?o(XFSzK zdBq-WA>6rWaQ*l3OJP=G*PXX?aeJm5t5^2Qq=pCR#M<^4f+Nt?7kGxvS<9`1vK-Ay z;=KJUzI^<^+cb+gNqv6kw64a(19kBg#Y`#G{~N*}|IBe}6>5Ekl@muJjpwSlDFA`p z1=#M0*&k7z=xC++kGIqXhGk_1vf_HvXjhsw8p!kCIWHe|&$x#AY0K^JFlbpR{aLM# z(wG4pFXeMT#a;<_Of9Nc+zfgG?=!X-d1-$QEVEWxg{X^)9;q+=_;Y%S+S(ZN%fx&4 zL?jET8&};=70DbJnv%UC8tBn)K8*JFlBR?tup3wyHy8Mi%q!_DU$CU=2l`jJh`6;J zZM_|6?Zqn9^kfYo@N_4*ElL@;>e|m>fF%~-XFX8e(aY68M_(qTSSFOW#ple!6NOWR zOR**W2QIGYvA7gJJv>K^lejI^c zM^Gc#n~EP6=;gzc0j!k3fUky*4o>>a9knIE_mVYq(ahQ%HsElu@!PUl>7tBFF2Xb3 zo?!7+uy%FWDc)cmp!;@qaYxxCGSMW>x-sDJW{XL#_}>lXbJM-bxu~DlH$UTO5D-AJ z?Jw?Y+c?h2M=?Ex0PHiiBcbl&AO{nj6&F#*{}#?Va+!k3uIwSJKueOxPKl^1r+Wom z-AkEg%WG454ZgP6?mC4t(mJzhljK=<2v3!rIHeSIVr&hXav`lEhhcJ;+imClUFs34)p5N%KcDH;-YJmv9$Tu|T~D_jE*O(Z z?+L=;*LCt`{dVA5K;HV^qV2w_#Oh5kl^-2JTE28%pL1AGYYv(#tz7GB=zv`r`9>Zh z;Co1O0<}82=yT(XJ*|ByncGsY-nW9wp7tT3qtqRLG|XYMh24(xYx`@DQ1n_Bk;`U! z644HL0c)a67Ft^5{y)7?4}E11*JST*ZMu-433?bV_g{WuZtl&a3RN*dNC>MW;c_+D zED(Iv+D%*bY1x)Ueqr_f9t&3oCbpuwI6HOjqlB!OA19aM1Fdeq(N|_XiT2yXHQVhP zufG|~i$fjG957QIAbYqpZy+`Dlj-uQ4C-9xT}R<%?E&ml4?Pj{-u$hVNu1FFy z3>Xn)0;AYcAk|)%{2e=wRBdU83`?%lkBP>*xZcLx7z|P@ zVX7aCBh5@ZGV5i|o0;m%^3C&fgb}v)`Barnq;4fDK2Z$8FA)5Er{UnS7oQ8?Dy=4D zjb2f`cFSltGZXuzQP2cuj3v6S~(9g=*+ zX0~VQFy0tP-*}IE9E|&IN3X#sB;fTT+M&y)*sf6J?9Auiyeu66R-QYJn&}qh6t%6) zpwAA)&}PqD%hgUEt$5Y!g!3zHxMPKe_q4=NIAD?!^X6%jO|@rygyvK8MSY3Qn66#_ z(@#Eb_nop@gaZbSBbm+1vSR_m*!NQsoxb_9bTtuF`gho+VHBu^GvCyOD(!GcxA#6M zEW0(6z{E&zu=*`oC#p6e#ht&3mm@W8GH)as>#<%Pk6!Zlz)p=~S+>+Juzw(TR z`sUpBNxyaar2ePI^?9o)a@R+iwfFtG^Bo<{HwzX zS4s6TE_+BikMjC3`C+`pQ#hVbA%ZH zjxWV;T`ixzz2dJQc)76Xl~?^;4wkqz8YyEso3`q`N5|cq-$Imv=zBhzNZp7_S!82< zY(DHVzMdjuw3}F*fbrm6qIGrIqc4F!3)bhwoEBFY=~Ci65d`!vWGqYPUsQGUG6`ng zFA^Bz9|+YB84A1zy4Nc_h%V%D#ae9MjGRW;LYM?AJQ6@FdSe!EZf6tfM9SxsE=n}r zU+{SHiOTrcqJ8VFDC3_3%u^XIoyKl)cmUB7mx!eq^2EO24!xJY(`cOcD-1%chflpG zNnxAc#+E|0Twb@X+*ClJHrdckfXDBzOU-`N4+k9c0@Zi-81(pJ8)94>^Qa*Go z%M8(!GT$m5#!Pl|>Qs}wiJr&baKiTqaFdhWVn5@IK6=g8^hV$Q&iix4W=zgZ33E@Q zyESuWVdza3j3(cEleZZ!Jq&(X*J*`b^U6#AkIQc6l2ao<{r_Q)ZWoN(+NniFSuLpY z&=s-PUaPEKWa>2^juFngUOu~;k0yauG0C|`BK0Wz{`UrzvN00kE9`8wr3G_60<%|D>! z?vZ(`5&x>1ub_}P|L6<*g->&G!sxjYtfrrSCB_AHSO`C_<58h3zz?ERs(jqWBl zkJZjGY1?uVUYPZ$C^SktLiN*65~o9d4f^&6Ef&IOTJFOr8Z|2a%8E-iQrz?8Vo@&m z%Htw+FW6ZhS;ozpBt6?Yl^4hp+~%86zwwEO!0HRZDv@}ZzCiKUIDE2c|Ms+DVj$6Z zv-}7cQ0sgr#-ijVKr+*=F@CYC2qkvNzaSN%L<_-eU2sxXTWeBV`&cllcu2cbtV{D7SGq|S++;OZA-jQ@3?9ut0)I*5 z5vo=OTEwLCb0;QdeiiK1keBTD@Vfdg{iPR`SU_q3=fwv? zftRQ(TJcTUP~P}1@8>}{)?&}xu#EgB2n=we??Ec@H1iIQ0fv%Lv7y-!0gbN|XRxCU zlun}14s?zyqnd)Be$LsU86x#ukn{4b*>*Jh#A-=S9DNWEKWF;udN>ox4|u`@q5R_Smop!dsRdtbUMyd z&rruCjD|bsI@_=0Kpg8=Lv9Z5E(3|$!QuU-FR$z&Y}V;>9r+O!sRGSY#^X1=^BFJq z=XT8n#r*ZBy_4=T^U<{#w|^M0h}tnS$uB0g5R=mqC(-_)5WC+{oGtsBsOC&o%usRYw8d34|t;?NO z7D|@yM+usY$XNjeZR{-V>bf?;$$sC{nMQqx7EfttVXEHO+SsjM zRloPbcfTU}L(_6{3sHxjb5N3Uc8lqtA%|r#$yZaAn~6l+nHhrx^la4(C6604-&+kw zm<=nvV0Y(;yKjekmK!v&Ovqc0uFt8#<&jZNSKx6~t^T%JCQE}jqnaa1O$NU~k6Od~ z+xFhOrWK;VA@i2DS!K4UVcW{>_H((=BTN3Tn*8*KUHhiPbr|M~-4q`USB4IfdwgFZ zoWwrZKCgbp%%i5wm_~fsv=52s=2(}D$(*A}xJIujSUM?N+bb{r{xrDusd?>F`~0W& zjCPJE6=F})rL^DI#e@ebKX#7d;(WZ<73)?@)L@Py7OtwC(tJcx2efnpuA^U`cY{hj z{EhZ-zK`5T&vzwmpkp*mKB{|`!AdGyR=pVo_c_A{zBiw)b$quMh?e@eoi&>HBH);?8b}^kT49FB6C=as2}t7>m&d zyutna2oYEa%=@=^-tYwp@AFE@TBVHQ`&i$o_4Es$2n#3kioQh^MoATRLqGX){#P>J zQX=0{VV~Hh;FL%DsrCg8?+=f~4Y@VRdX(r~@jO!a*X6etdL7&8CFur9yJn{kkXzJ$ zii+I$>E{ki%QdfmHFnb$e+at%5SMB0jL~)HX>;;q|3$i%e3JQEs5oKeU+nPh?#?l;p&RC@H z8Ug?N1%6fA&&0?No}c=MF5KJyM_Ez8ogHjafR%@floi4J!?Fu}4Pz&4Ys0`vvlP?{ zG4Uw7!ToHk+)78h8r)bjcu*Dnp zAO*ZUUd{;4DX6A(7jVq`j)5o81lli9vIGnYJu|cEScwTyo5$ZjBH(xg)lHYe>C(?N z5$i6A&#aNBTnQ9Fx4jqjcavjOMntWM*3t?mm~l1WjG&h-D^1v^8-n0JWNxvSkC zzxBtgELClxhYt%|4-(i-u7HBnvzXt&lklanL|)HL36NFbTAzaZodya>8v0ODQ^_6q zx$XiApHCDNe!A;x7p0}W1o9(rahS(9md7JKRXtx6S;}vyu6NYu30A9@tH&KkGl;6-E#v5)*oe|;Fx~pV zWz|~RE99xTN#dbo<0HI2R{ECAwh9gGLhI`5k!25Psy(WLx8Sc70>)XSyHOtEiq-7w#n*app5}z}VhKqAga$GF%h|H|28XZXI zG6xwbV-U|S1A(xCbWROmFq#Y(XhIF&50qe8omW)!JeFyvy>?~g-!3|s9iKS?Vvo7; z^aVnrh1SAY@pCMM?Pgk9okFu$2X+In8#_nI1zZ@xMejUg`rCS) zV#ZhW!AM#e0gBek)HF0t&{p82+lTQ7G-JADUxxoNxZbFOwe1inEelX6`8k2Wa6YIl ztMa)F$nxaEDds_+5s+ImBMd90U@tO!+!Kl4C|W!mrbNf{WVy`{0J0)xRW zSoZ)e^%kIz_5(quxMI-z+25SOYl4Y}Qs?!H!!}_7a?S6HfH(vhxq z+&D>EMUTF`mKQ#Y28JqWC0S2zZ<}R##t6@cGZIT1s6<2c*B*{@LQ+y`knxG1Xyb{ESHZoR*$sYq`*$0#+KMnsqhy}!B4vJ)uGrv0e6b!%@4t$+ z^?PbCi(y|4(qPsR#X@9w_{uOJg8wXt4j>bW2#v8%!i(WyI21tYNkao9Z`& zh&fmwp8zmxJy8(`uSiHj(gD&(0u*|F3of;2$ch8GGE^Gqb{9aTf>Uh(ptkY>OE(Z< zHt;3_nJL5;SUi;YE4+{n0}gbfW&z{51PJ6`FiiV8Unu8YYN4n<>wjAhg$&QyczFnX z5C;$nk;RSBXu**PB#kKFRcuF4K6VhP1(zkw3TqPKv%iF(6Q6>cEizu`&H<=t@CYTT z*9+GxT7uQO2t)a}Xyo^cY5@u(za5w~^}pz_$Fi=GafbrUPI7aZ1OU02Bdhs;?aM6& z7BiAAUo+q&MRfBK+9HTcBC>MeI9EUC1pxy1`b}VnOe+%$tV#jm!X+?sN!njpgg-zF z^Ef&>8a!hN;*qCI!WYy+Lem&GcyRkM=$R#@84z+|ER*Ip*v4RD5S0DI@(T#-X+g_U zb9oeS-WF3eTtM~1=1czZSTC`XTFEspr-lG0^)0PON%>w zUO>yb3+PF_+(~H!3gvV51Pu_I6Px0nfkilQJ_)Z_&^fvQzr|ra$_mV5O*;o}?no3j zD{%kGAK9nP?SXpFScMH4NS-&hwtBFZ1ek#Jg%^nYdBK4I0^yOdv6u))@K#96_dYHr z5L{4g75DJ;B)oa^-gCNqHjubV@jZ733qOmAiiaSPV*(^ou-ok%NOzx1@DUaSLFH>C z>~9#0>LMXXeE_!O7Hrs3=zsNIUZ;+tlbxfwIK%?SG6eSv^jYM0z(GJtL<9{vH9vlQ z16IEgAWC`V$`uqg4o>x-bNWP9L-aUZ7ZR4bE%eIC7gxa_a2C|m7U4fkIIaWJ($b_y zh_7LSdn6ce#M~FPB^MC9uBoyQUPugSog@Diua=+Elpy-x>&sUKD8?^~#J$s>x+Dt|AB&_D<7McRtSW{GQEh0FLK^lpJH}SOLiBr8S)H$n;)b69M&Q zB?kwGPIJyYP}*{avnp@1;Y5x>b=UjSAdIjeS=C#FY@G)uai=kRE~uI0gLCXj*@6j4 zykoYabZq5qBw|*@x8KA67qtJEK#8PD)XeNi=Nv+;jxhq>K>jquo_{g-zla%76a}8B)xqK6NWVMg-E|W7`MM$fC=i%a zx2$b3N{7>dXhLa7&6`g5Egsb(M3wvO>;!_}k;Cq@ z4<%+31t3LE>n_Zf>i)~Zjd&&Lg~i1@NUigs8M?5#n*X`s*b-cgE2Q@xg0CR$y?gXg z9Us9VdJuB%7^MYIBF9e3Txo`KgRkfdOH0|&N|t~qxuO7mhPf(B;H;nm#x{(4o0HYb z*NIslyL)=(1INL+jlwktL_J9-++L&(PfkuswWdu0byJjcH#;g zjq0Elm-EQi@)WcJKR*JWcmZdS4WQ5L%Yj8b1d0NTx;)_V%H_6YU{b810?d7IGVxY@ zUI$xH^(7w|TZNh6QVA8H%HhJ0rE+w!o4bv+0_mZWG=r*|+Nbl~)Qg<g)A_( zR&#*SdDKp0K>f7P1|%uaFfi@A9#+D1b;8lBx6bww+-nj-L$4t@6CB9TARtxmk6S@r zg=PS9j*}oUqvJZ|Y&}&I0}Wfm8Xbkq2+aP)$;^c$NWgP(l}j^0^t}fOEnpOK`nTc7 zK$t6vmvwalpQ{&1`J7jpA&Wr_QGu^8`J(G#xoxQu$l9qzBqcqA3k>5sungmHn0v}? zGu{jd^iRujz{d zB;tVSFZe1j@Vl(df?%X6qUl3NH+%1sLqO3g49n1E_{V-vEXI9H*Qs=%C|p4`8X>Y z$$L~lRsDfp>Nw-!T4`?{fJs#IXlQB0(2!*})Ueq>*%W0nvbraoKa{tW;`zB%BSz@~ z2Zu76CCrg@ZeG3ZeKB$IJZMnYwR)iTnhE7q>X;uqPqRLju^BSq<4mWD&ogdWp!(;iZILp#0s0WjZWa~{Sa_6|4B)g0CE-|1GO7oQVfR1jZxJ6F zL65()kYgZ{r9^_76P0#Luu8wRwPkmf!(aLc85Gy>87xAustD;ZeGVL&NnhaNVq(6S z@jgI{h>vea`tNXk0x?ZBv0i;h=)5t0M0SSnDm zfGd<47%?Mu(J(_vplI<5@u&mYUecloaIrv`IxsMx#&Pu}ET6d-)p?U-i9Koy7s|ki zLYk$mm4&A=p2BubECfU61GVocKf6c`+ z%z3z?2h%UT-wKZB_Xfkwkb)rYxk&i++~7t0U>$n})t{rs$Hul^=b3B*oy`_-Er%UL zM3g?TJPMf+U-IXEt|8ifsCY*q4ba5LDA#5iu9Uwg4`# zr4|EpC`2L*Y+Jwmm!d7d>qfz#@@*@)@;cDi1cS6gfK(pM`r#odFA5C}4b+UbVHY`q z;2Gk`^{@JI45vNQEt!_)D3HZOrT83MAbRJB`zr`bK#Aq5UBl6$=G06h7&MU~p%HPN zfW!y#v6qlvAXL0yH+g4}GKqk6oXoCH1mw(HE=obW2KE9%(1VPk)NR`cj`wbmRE>w4 z2sln$OgLPjCMA}~6TKPDvlYYvn`xWFwX z28<^a5i4y4CrN7*w4J+H++N>^s?6U21g& z_Xd?82njXZZG~}ETnAIPabhSwP=+Z<4*`heU%^5^f)WjSwf`RyD@eNjA4(bjKQgr4 Z3xazg$BZ-mc_{ctN=)wQXHkRK{}(6K_F4b{ diff --git a/examples/running-llamas/artifacts/Llama-7b/short_report.csv b/examples/running-llamas/artifacts/Llama-7b/short_report.csv deleted file mode 100644 index bda8c65e..00000000 --- a/examples/running-llamas/artifacts/Llama-7b/short_report.csv +++ /dev/null @@ -1,36 +0,0 @@ -,Model,GPUs,Experiment Name,Per Process Batch Size,Sequence Length,Decode Latency (s),Prefill Latency (s),Decode Throughput (tokens/s),Prefill Throughput (samples/s),Generate Max Memory Allocated (MB),Generate Max Memory Reserved (MB),GPU Name,Num GPUs,Effective Batch Size,Group -0,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16,1,256,13.2,0.0336,38.7,29.8,14356,14480,1xA100,1,1,1xA100-fp16 -1,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16,4,256,13.7,0.109,149.0,36.7,16774,24960,1xA100,1,4,1xA100-fp16 -2,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16,32,256,26.4,0.826,619.0,38.7,39331,84422,1xA100,1,32,1xA100-fp16 -3,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16,64,256,44.2,1.65,740.0,38.8,65112,84420,1xA100,1,64,1xA100-fp16 -4,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16,8,256,13.9,0.21,294.0,38.1,19997,63503,1xA100,1,8,1xA100-fp16 -5,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16,16,256,17.1,0.412,478.0,38.8,26442,84420,1xA100,1,16,1xA100-fp16 -6,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16,2,256,13.4,0.059,76.3,33.9,15162,15617,1xA100,1,2,1xA100-fp16 -7,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,1,256,12.2,0.0318,41.9,31.4,14356,14480,1xA100,1,1,1xA100-fp16+bt -8,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,4,256,13.0,0.104,157.0,38.5,16774,24983,1xA100,1,4,1xA100-fp16+bt -9,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,32,256,26.5,0.804,617.0,39.8,39331,84408,1xA100,1,32,1xA100-fp16+bt -10,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,64,256,45.1,1.65,725.0,38.8,65111,84420,1xA100,1,64,1xA100-fp16+bt -11,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,8,256,13.1,0.2,312.0,40.0,19997,63755,1xA100,1,8,1xA100-fp16+bt -12,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,16,256,17.0,0.4,481.0,40.0,26441,84410,1xA100,1,16,1xA100-fp16+bt -13,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+bt,2,256,12.6,0.0558,81.1,35.8,15162,15613,1xA100,1,2,1xA100-fp16+bt -14,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,1,256,12.2,0.0321,41.9,31.2,14356,14480,1xA100,1,1,1xA100-fp16+fa2 -15,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,4,256,12.6,0.104,162.0,38.5,16774,24794,1xA100,1,4,1xA100-fp16+fa2 -16,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,32,256,26.4,0.786,619.0,40.7,39331,84412,1xA100,1,32,1xA100-fp16+fa2 -17,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,64,256,44.7,1.56,732.0,41.0,65110,84422,1xA100,1,64,1xA100-fp16+fa2 -18,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,8,256,12.9,0.197,317.0,40.6,19997,63755,1xA100,1,8,1xA100-fp16+fa2 -19,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,16,256,16.8,0.392,487.0,40.8,26442,84404,1xA100,1,16,1xA100-fp16+fa2 -20,NousResearch/Llama-2-7b-hf,['NVIDIA A100-SXM4-80GB'],fp16+fa2,2,256,12.9,0.0548,79.2,36.5,15162,15613,1xA100,1,2,1xA100-fp16+fa2 -21,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,1,256,14.5,0.0457,35.2,21.9,5609,6004,1xA100,1,1,1xA100-fp16+gptq+exllamav2 -22,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,4,256,15.0,0.122,136.0,32.8,8027,16483,1xA100,1,4,1xA100-fp16+gptq+exllamav2 -23,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,32,256,35.0,0.833,467.0,38.4,30585,84418,1xA100,1,32,1xA100-fp16+gptq+exllamav2 -24,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,64,256,51.8,1.61,631.0,39.8,56364,84418,1xA100,1,64,1xA100-fp16+gptq+exllamav2 -25,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,8,256,15.1,0.223,271.0,35.9,11250,55027,1xA100,1,8,1xA100-fp16+gptq+exllamav2 -26,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,16,256,20.1,0.425,407.0,37.6,17694,84385,1xA100,1,16,1xA100-fp16+gptq+exllamav2 -27,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav2,2,256,14.6,0.0708,70.0,28.2,6414,7220,1xA100,1,2,1xA100-fp16+gptq+exllamav2 -28,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,1,256,14.6,0.0416,35.0,24.0,4883,5276,1xA100,1,1,1xA100-fp16+gptq+exllamav1 -29,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,4,256,15.4,0.106,133.0,37.7,7301,15755,1xA100,1,4,1xA100-fp16+gptq+exllamav1 -30,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,32,256,32.3,0.688,506.0,46.5,29858,84420,1xA100,1,32,1xA100-fp16+gptq+exllamav1 -31,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,64,256,49.8,1.32,657.0,48.5,55638,84418,1xA100,1,64,1xA100-fp16+gptq+exllamav1 -32,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,8,256,19.4,0.191,211.0,41.9,10524,54299,1xA100,1,8,1xA100-fp16+gptq+exllamav1 -33,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,16,256,24.2,0.356,338.0,44.9,16969,84406,1xA100,1,16,1xA100-fp16+gptq+exllamav1 -34,TheBloke/LLaMa-7B-GPTQ,['NVIDIA A100-SXM4-80GB'],fp16+gptq+exllamav1,2,256,14.7,0.0621,69.5,32.2,5688,6492,1xA100,1,2,1xA100-fp16+gptq+exllamav1 diff --git a/examples/running-llamas/configs/_base_.yaml b/examples/running-llamas/configs/_base_.yaml deleted file mode 100644 index 5df2bd58..00000000 --- a/examples/running-llamas/configs/_base_.yaml +++ /dev/null @@ -1,40 +0,0 @@ -defaults: - - backend: pytorch # default backend - - launcher: inline # default launcher - - benchmark: inference # default benchmark - - experiment # inheriting from experiment config - - _self_ # for hydra 1.1 compatibility - - override hydra/job_logging: colorlog # colorful logging - - override hydra/hydra_logging: colorlog # colorful logging - -experiment_name: llama-experiment -model: llama-2-model -device: cuda - -backend: - no_weights: true - torch_dtype: float16 - -benchmark: - memory: true - warmup_runs: 10 - new_tokens: 512 - input_shapes: - batch_size: 1 - sequence_length: 256 - -hydra: - run: - dir: experiments/${oc.env:HOSTNAME}/${model}/${experiment_name} - sweep: - dir: experiments/${oc.env:HOSTNAME}/${model}/${experiment_name} - subdir: ${benchmark.input_shapes.batch_size} - job: - chdir: true - env_set: - CUDA_VISIBLE_DEVICES: 0 - CUDA_DEVICE_ORDER: PCI_BUS_ID - sweeper: - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16,32,64,128 - model: NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf diff --git a/examples/running-llamas/configs/fp16+bt+tp=2.yaml b/examples/running-llamas/configs/fp16+bt+tp=2.yaml deleted file mode 100644 index efaddd22..00000000 --- a/examples/running-llamas/configs/fp16+bt+tp=2.yaml +++ /dev/null @@ -1,22 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: torchrun - -experiment_name: fp16+bt+tp=2 - -launcher: - nproc_per_node: 2 - rdzv_endpoint: localhost:29500 - -backend: - to_bettertransformer: true - deepspeed_inference: true - deepspeed_inference_config: - tensor_parallel: - tp_size: 2 - -hydra: - job: - env_set: - CUDA_VISIBLE_DEVICES: 0,1 diff --git a/examples/running-llamas/configs/fp16+bt.yaml b/examples/running-llamas/configs/fp16+bt.yaml deleted file mode 100644 index bee86deb..00000000 --- a/examples/running-llamas/configs/fp16+bt.yaml +++ /dev/null @@ -1,9 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: process - -experiment_name: fp16+bt - -backend: - to_bettertransformer: true diff --git a/examples/running-llamas/configs/fp16+dp=2.yaml b/examples/running-llamas/configs/fp16+dp=2.yaml deleted file mode 100644 index 80adde9d..00000000 --- a/examples/running-llamas/configs/fp16+dp=2.yaml +++ /dev/null @@ -1,15 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: torchrun - -experiment_name: fp16+dp=2 - -launcher: - nproc_per_node: 2 - rdzv_endpoint: localhost:29511 - -hydra: - job: - env_set: - CUDA_VISIBLE_DEVICES: 0,1 diff --git a/examples/running-llamas/configs/fp16+fa2+tp=2.yaml b/examples/running-llamas/configs/fp16+fa2+tp=2.yaml deleted file mode 100644 index a239e767..00000000 --- a/examples/running-llamas/configs/fp16+fa2+tp=2.yaml +++ /dev/null @@ -1,22 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: torchrun - -experiment_name: fp16+fa2+tp=2 - -launcher: - nproc_per_node: 2 - rdzv_endpoint: localhost:29544 - -backend: - use_flash_attention_2: true - deepspeed_inference: true - deepspeed_inference_config: - tensor_parallel: - tp_size: 2 - -hydra: - job: - env_set: - CUDA_VISIBLE_DEVICES: 0,1 diff --git a/examples/running-llamas/configs/fp16+fa2.yaml b/examples/running-llamas/configs/fp16+fa2.yaml deleted file mode 100644 index b045ebd1..00000000 --- a/examples/running-llamas/configs/fp16+fa2.yaml +++ /dev/null @@ -1,9 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: process - -experiment_name: fp16+fa2 - -backend: - use_flash_attention_2: true diff --git a/examples/running-llamas/configs/fp16+gptq+exllamav1+dp=2.yaml b/examples/running-llamas/configs/fp16+gptq+exllamav1+dp=2.yaml deleted file mode 100644 index f9a58bd4..00000000 --- a/examples/running-llamas/configs/fp16+gptq+exllamav1+dp=2.yaml +++ /dev/null @@ -1,27 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: torchrun - -experiment_name: fp16+gptq+exllamav1+dp=2 - -launcher: - nproc_per_node: 2 - rdzv_endpoint: localhost:29522 - -backend: - # for some reason core gets dumped - # with dummy weights + exllamav1 for 65B - no_weights: false - quantization_scheme: gptq - quantization_config: - exllama_config: - version: 1 - -hydra: - job: - env_set: - CUDA_VISIBLE_DEVICES: 0,1 - sweeper: - params: - model: TheBloke/LLaMa-7B-GPTQ,TheBloke/LLaMa-13B-GPTQ,TheBloke/LLaMa-65B-GPTQ diff --git a/examples/running-llamas/configs/fp16+gptq+exllamav1.yaml b/examples/running-llamas/configs/fp16+gptq+exllamav1.yaml deleted file mode 100644 index c7555a8b..00000000 --- a/examples/running-llamas/configs/fp16+gptq+exllamav1.yaml +++ /dev/null @@ -1,20 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: process - -experiment_name: fp16+gptq+exllamav1 - -backend: - # for some reason core gets dumped - # with 65B + exllamav1 - no_weights: false - quantization_scheme: gptq - quantization_config: - exllama_config: - version: 1 - -hydra: - sweeper: - params: - model: TheBloke/LLaMa-7B-GPTQ,TheBloke/LLaMa-13B-GPTQ,TheBloke/LLaMa-65B-GPTQ diff --git a/examples/running-llamas/configs/fp16+gptq+exllamav2+dp=2.yaml b/examples/running-llamas/configs/fp16+gptq+exllamav2+dp=2.yaml deleted file mode 100644 index fdb85109..00000000 --- a/examples/running-llamas/configs/fp16+gptq+exllamav2+dp=2.yaml +++ /dev/null @@ -1,27 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: torchrun - -experiment_name: fp16+gptq+exllamav2+dp=2 - -launcher: - nproc_per_node: 2 - rdzv_endpoint: localhost:29533 - -backend: - # for some reason core gets dumped - # with dummy weights + exllamav2 - no_weights: false - quantization_scheme: gptq - quantization_config: - exllama_config: - version: 2 - -hydra: - job: - env_set: - CUDA_VISIBLE_DEVICES: 0,1 - sweeper: - params: - model: TheBloke/LLaMa-7B-GPTQ,TheBloke/LLaMa-13B-GPTQ,TheBloke/LLaMa-65B-GPTQ diff --git a/examples/running-llamas/configs/fp16+gptq+exllamav2.yaml b/examples/running-llamas/configs/fp16+gptq+exllamav2.yaml deleted file mode 100644 index dd314d5d..00000000 --- a/examples/running-llamas/configs/fp16+gptq+exllamav2.yaml +++ /dev/null @@ -1,20 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: process - -experiment_name: fp16+gptq+exllamav2 - -backend: - # for some reason core gets dumped - # with dummy weights + exllamav2 - no_weights: false - quantization_scheme: gptq - quantization_config: - exllama_config: - version: 2 - -hydra: - sweeper: - params: - model: TheBloke/LLaMa-7B-GPTQ,TheBloke/LLaMa-13B-GPTQ,TheBloke/LLaMa-65B-GPTQ diff --git a/examples/running-llamas/configs/fp16+tp=2.yaml b/examples/running-llamas/configs/fp16+tp=2.yaml deleted file mode 100644 index 712149df..00000000 --- a/examples/running-llamas/configs/fp16+tp=2.yaml +++ /dev/null @@ -1,21 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: torchrun - -experiment_name: fp16+tp=2 - -launcher: - nproc_per_node: 2 - rdzv_endpoint: localhost:29544 - -backend: - deepspeed_inference: true - deepspeed_inference_config: - tensor_parallel: - tp_size: 2 - -hydra: - job: - env_set: - CUDA_VISIBLE_DEVICES: 0,1 diff --git a/examples/running-llamas/configs/fp16.yaml b/examples/running-llamas/configs/fp16.yaml deleted file mode 100644 index 432d986f..00000000 --- a/examples/running-llamas/configs/fp16.yaml +++ /dev/null @@ -1,6 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: process - -experiment_name: fp16 diff --git a/examples/running-llamas/report.py b/examples/running-llamas/report.py deleted file mode 100644 index 316f5bc1..00000000 --- a/examples/running-llamas/report.py +++ /dev/null @@ -1,289 +0,0 @@ -from argparse import ArgumentParser -from pathlib import Path -from typing import List - -import matplotlib.pyplot as plt -import numpy as np -import pandas as pd -from flatten_dict import flatten -from omegaconf import OmegaConf -from pandas import DataFrame - - -def gather_full_report(root_folders: List[Path], report_folder: str = "artifacts") -> DataFrame: - # key is path to inference file as string, value is dataframe - - config_dfs = {} - inference_dfs = {} - - for root_folder in root_folders: - inference_dfs.update( - {f.parent.absolute().as_posix(): pd.read_csv(f) for f in root_folder.glob("**/inference_results.csv")} - ) - config_dfs.update( - { - f.parent.absolute() - .as_posix(): pd.DataFrame.from_dict(flatten(OmegaConf.load(f), reducer="dot"), orient="index") - .T - for f in root_folder.glob("**/hydra_config.yaml") - if f.parent.absolute().as_posix() in inference_dfs.keys() - } - ) - - if len(inference_dfs) == 0 or len(config_dfs) == 0: - raise ValueError(f"No results found in {root_folder}") - - # Merge inference and config dataframes - inference_reports = [ - config_dfs[name].merge(inference_dfs[name], left_index=True, right_index=True) for name in inference_dfs.keys() - ] - - # Concatenate all reports - inference_report = pd.concat(inference_reports, axis=0, ignore_index=True) - inference_report.to_csv(f"{report_folder}/full_report.csv") - - return inference_report - - -def get_short_report(full_report, report_folder: str = "artifacts"): - short_columns = { - "model": "Model", - "environment.gpus": "GPUs", - "experiment_name": "Experiment Name", - "benchmark.input_shapes.batch_size": "Per Process Batch Size", - "benchmark.input_shapes.sequence_length": "Sequence Length", - # - "decode.latency(s)": "Decode Latency (s)", - "forward.latency(s)": "Prefill Latency (s)", - # - "decode.throughput(tokens/s)": "Decode Throughput (tokens/s)", - "forward.throughput(samples/s)": "Prefill Throughput (samples/s)", - # - "generate.max_memory_allocated(MB)": "Generate Max Memory Allocated (MB)", - "generate.max_memory_reserved(MB)": "Generate Max Memory Reserved (MB)", - } - short_report = full_report[list(short_columns.keys())].rename(columns=short_columns) - - short_report["GPU Name"] = short_report["GPUs"].str[0] - short_report["Num GPUs"] = short_report["GPUs"].str.len() - short_report["GPU Name"].replace("NVIDIA A100-SXM4-80GB", "1xA100", inplace=True) - short_report["GPU Name"].replace("AMD INSTINCT MI250 (MCM) OAM AC MBA", "1xMI250", inplace=True) - short_report["Effective Batch Size"] = short_report["Per Process Batch Size"] * short_report["Num GPUs"] - short_report["Group"] = short_report["GPU Name"] + "-" + short_report["Experiment Name"] - short_report.to_csv(f"{report_folder}/short_report.csv") - - return short_report - - -def get_batch_plots(short_report, report_folder, plot="bar", memory=True): - fig1, ax1 = plt.subplots() - fig2, ax2 = plt.subplots() - fig3, ax3 = plt.subplots() - fig4, ax4 = plt.subplots() - - batch_column = "Effective Batch Size" - short_report = short_report.sort_values(by="Group", ascending=True) - groups = short_report["Group"].unique().tolist() - x = np.arange(len(short_report[batch_column].unique())) - width = 0.8 / len(short_report["Group"].unique().tolist()) - offset = -(width * (len(groups) - 1) / 2) - - for group in groups: - mask = short_report["Group"] == group - group_report = short_report[mask].sort_values(by=batch_column) - x_ = np.arange( - group_report[batch_column].min() - 1, - len(group_report[batch_column].unique()) + (group_report[batch_column].min() - 1), - ) - if plot == "bar": - ax1.bar( - x_ + offset, - group_report["Prefill Latency (s)"], - label=group, - width=width, - ) - ax2.bar( - x_ + offset, - group_report["Decode Throughput (tokens/s)"], - label=group, - width=width, - ) - ax3.bar( - x_ + offset, - group_report["Generate Max Memory Allocated (MB)"], - label=group, - width=width, - ) - ax4.bar( - x_ + offset, - group_report["Generate Max Memory Reserved (MB)"], - label=group, - width=width, - ) - offset += width - elif plot == "line": - ax1.plot( - x_, - group_report["Prefill Latency (s)"], - label=group, - marker="o", - ) - ax2.plot( - x_, - group_report["Decode Throughput (tokens/s)"], - label=group, - marker="o", - ) - ax3.plot( - x_, - group_report["Generate Max Memory Allocated (MB)"], - label=group, - marker="o", - ) - ax4.plot( - x_, - group_report["Generate Max Memory Reserved (MB)"], - label=group, - marker="o", - ) - - ax1.set_xticks(x) - ax1.set_ylim(bottom=0) - ax1.set_xticklabels(short_report[batch_column].sort_values().unique().tolist()) - ax1.set_xlabel(batch_column) - ax1.set_ylabel("Prefill Latency (s)") - ax1.set_title(f"Prefill Latency per Batch Size ({short_report['Model'].unique()[0]})") - ax1.legend(fancybox=True, shadow=True) - - ax2.set_xticks(x) - ax2.set_ylim(bottom=0) - ax2.set_xticklabels(short_report[batch_column].sort_values().unique().tolist()) - ax2.set_xlabel(batch_column) - ax2.set_ylabel("Effective Decode Throughput (tokens/s)") - ax2.set_title(f"Decode Throughput per Batch Size ({short_report['Model'].unique()[0]})") - ax2.legend(fancybox=True, shadow=True) - - ax3.set_xticks(x) - ax3.set_ylim(bottom=0) - ax3.set_xticklabels(short_report[batch_column].sort_values().unique().tolist()) - ax3.set_xlabel(batch_column) - ax3.set_ylabel("Generate Max Memory Allocated (MB)") - ax3.set_title(f"Generate Max Memory Allocated per Batch Size ({short_report['Model'].unique()[0]})") - ax3.legend(fancybox=True, shadow=True) - - ax4.set_xticks(x) - ax4.set_ylim(bottom=0) - ax4.set_xticklabels(short_report[batch_column].sort_values().unique().tolist()) - ax4.set_xlabel(batch_column) - ax4.set_ylabel("Generate Max Memory Reserved (MB)") - ax4.set_title(f"Generate Max Memory Reserved per Batch Size ({short_report['Model'].unique()[0]})") - ax4.legend(fancybox=True, shadow=True) - - legend = plt.legend(loc="upper center") - legend.get_frame().set_facecolor((0, 0, 1, 0.1)) - legend.get_frame().set_alpha(None) - plt.tight_layout() - - fig1.savefig(f"{report_folder}/prefill_latency_{plot}_plot.png") - fig2.savefig(f"{report_folder}/decode_throughput_{plot}_plot.png") - - if memory: - fig3.savefig(f"{report_folder}/generate_max_memory_allocated_{plot}_plot.png") - fig4.savefig(f"{report_folder}/generate_max_memory_reserved_{plot}_plot.png") - return fig1, fig2, fig3, fig4 - - return fig1, fig2 - - -def get_peak_decode_throughput_plot(short_report, report_folder): - # a bar plot with one bar per group, representing the max attainable throughput in tokens/s - fig, ax = plt.subplots() - - # - max_decode_throughput = short_report.groupby("Group")["Decode Throughput (tokens/s)"].max().reset_index() - max_decode_throughput = ( - short_report.merge(max_decode_throughput, on=["Group", "Decode Throughput (tokens/s)"]) - .sort_values(by="Decode Throughput (tokens/s)", ascending=True) - .reset_index() - ) - - ax.bar( - max_decode_throughput["Group"], - max_decode_throughput["Decode Throughput (tokens/s)"], - color=plt.cm.Paired(np.arange(len(max_decode_throughput))), - ) - - # add batch size on top of each bar - for i, v in enumerate(max_decode_throughput["Effective Batch Size"]): - ax.text( - i, - max_decode_throughput["Decode Throughput (tokens/s)"].iloc[i], - f"bs={v}", - ha="center", - va="bottom", - ) - - ax.set_xlabel("Group") - ax.set_ylabel("Peak Decode Throughput (tokens/s)") - ax.set_title(f"Peak Decode Throughput ({short_report['Model'].unique()[0]})") - ax.set_ylim(top=max_decode_throughput["Decode Throughput (tokens/s)"].max() * 1.1) - - plt.xticks(rotation=45, ha="right") - plt.tight_layout() - - fig.savefig(f"{report_folder}/peak_decode_throughput_bar_plot.png") - - return fig - - -def generate_report(): - parser = ArgumentParser() - parser.add_argument( - "--experiments-folders", - "-e", - type=Path, - nargs="+", - required=True, - help="The folder containing the results of experiments.", - ) - parser.add_argument( - "--report-name", - "-r", - type=str, - required=False, - default="artifacts", - help="The name of the report.", - ) - - args = parser.parse_args() - report_folder = args.report_name - experiments_folders = args.experiments_folders - - Path(report_folder).mkdir(parents=True, exist_ok=True) - - # gather experiments results - full_report = gather_full_report( - root_folders=experiments_folders, - report_folder=report_folder, - ) - short_report = get_short_report( - full_report, - report_folder=report_folder, - ) - for plot in ["bar", "line"]: - _ = get_batch_plots( - short_report, - report_folder, - plot=plot, - memory=True, - ) - - _ = get_peak_decode_throughput_plot( - short_report, - report_folder, - ) - print("Report generated successfully!") - - -if __name__ == "__main__": - generate_report() diff --git a/examples/running-mistrals/README.md b/examples/running-mistrals/README.md deleted file mode 100644 index 8d122cf8..00000000 --- a/examples/running-mistrals/README.md +++ /dev/null @@ -1,60 +0,0 @@ -# Optimum-Benchmark x Mistral x BnB & GPTQ & AWQ - -A set of benchmarks on quantizing Misral AI's model. - -## Setup - -You will need to install these quantization packages: - -```bash -pip install bitsandbytes -pip install auto-gptq -pip install autoawq -``` - -## Running - -Then run these commands from this directory: - -```bash -optimum-benchmark --config-dir configs/ --config-name _base_ --multirun -optimum-benchmark --config-dir configs/ --config-name bnb --multirun -optimum-benchmark --config-dir configs/ --config-name gptq --multirun -optimum-benchmark --config-dir configs/ --config-name awq --multirun -``` - -This will create a folder called `experiments` with the results of the benchmarks with an inference `batch_size` ranging from 1 to 16 and an input `sequence_length` (prompt size) of 512. - -## Reporting - -To create a report run: - -```bash -python report.py -e experiments -``` - -Which will create some quick reporting artifacts like a `full_report.csv`, `short_report.csv`, some plots and a `rich_table.svg`. - -## Results - -### On A100-80GB - -

-latency_plot -

- -

-throughput_plot -

- -

-memory_plot -

- -

-memory_plot -

- -

-rich_table -

diff --git a/examples/running-mistrals/artifacts/A100-80GB/forward_latency_plot.png b/examples/running-mistrals/artifacts/A100-80GB/forward_latency_plot.png deleted file mode 100644 index 8a5c64b1ec83e047ffff609df04a0361397af7f0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 39767 zcmeEuhd0*$|Mw*&qas8ULPmsyY)VEpnaL>G*?Ut_wvcSHvdJcUXD55_nZ39B`PTRR z?%(e|=RWs8aDUF}lIT^95*reDf6zZz@GvVhb)CG4G3T*-t z1OA1hYh(ug;l$7 z_2l@fUbhp=#b?hJD(KuNo`~eeH@Ol%5mCI48K2Ic{J|%Iis*+MgSN^=!q51a8a~^P z@5FqLUEv+4co&+CA^z}Qr%QH40Pj{u8(SIH)=Uf z8X^3XN2>n+|Kb0=2wa((6{P;6bxXY&z1Ku~SU+G~tsNdNYssM&tP@i7yX!8bUOm;| z+hxE`?Cej*tGu$VXg9Ytn5WyS6qk{i=_ejS+l*d@f0dN|*P?!3LxbnSg28Ipn!xVr z=uu8bL{Lyol6c5K?i;aLjF(Sr%VGirPs2?|e_p?N(EP@=0xT31(BDwdDLUBKlq^_CRb)ZAR-(OMO*d#};# z`efaG)gSJ+;t^%~)4L;vSLM(_ zQc`kbb2EK=OEVyW{-(buA0J=($xyZ z_r6w+TwXRFb3HPF|E;a{#EIFhI6eG3GdIUG_O?)MWOOuxSD4a>%B;W!0es1r7O6;fEVw~q6;2Kl?=E9vVt&)_j6Ghj!!=+hg*Rgn8#Jpt zz<^i8*o`e(2M{r-nXgss@r<#k6?mN;&kK5d{0Ivs+WM8rVLryL!#Brbx^cv^mFakQ z*vxu$gaj8Ccg$r^10@s9RpX9^zP`Ch#m@d^KF*Ciq*>80&VSHYuhs+0dZ`CXP*AYE zynLZIgE&Ju%km4i<2F4dB_#^hdaz4}+wjj_u?Ux=U5kDVJLLA3mGQ>#+Rc5Ua5ewg z7eTwY^S7T(yBRw!DTXfMs2 zstF6&E?lRlr#Jl@eaH49Dow=#ZpUj?x!OB8TqhxE{&G)2j<6vubJTPm-X}#VL%F1^ zjDU!!@pt^w=FUz#2D`Mi4GE7=pU`KUCrMIdVyN!j^L}J9G_kVcpOeGFIciP7Yqx6s z15@H$hjLe7cf`X2jo1#Gl$4aLPX`Ka_9)1{E*>U%WE|dYv$ra{oS76%(N zj`Hb8Id%26%M{L!t6Wc>iHpa>Ch+j^SnuFnTZiAVY$CJc!XIagYl2H%=t{gSFE8&$ z#!CeIyz+2~5#!3uYR+-zIx@SFN$Ym~Sci}9?#TWB)s-mbaI(|yaJZOqu-VE&rc1=4 z*+w16Cj^`CCa39b*cQg)l`(MgH>%Nb*MC37Wz}sVtUTV7Oz9jkew44%jDLDGex_OU zu~{$QV%nec4;)Xu_Q(dPUEy37RLU8l_Y}T!wOl3R`O@G^%x1s-3MmuM6%--_-2{e< z&Gb(WcNTUW#RhR)0>iB4^!wp$W`137^dosHCx=%&V(FXM7DO$!59O}pXtm7S+nbV? zH_C3@B@E6gZ=}!50)raZq2Ui+UqH5Th2zeJJFKkN-ES}|o3}C*<6XaAW7WwoW^d2s zc6P!kc(P5<$-fufJ^v?1OYo=7-^OTeE7ej<8msw^WT>)y2QwkW6@jAuq-g7~*{xs` zOfN1v3_1x;N50LTgnMH#WV<}Ts_DETl2UK6)RR`e{YP>HK7ns{;0=_V&ABKu%XW_b zA1+6>P+%CHr74XriC|j9iue-UtG=YAUtl(_iFNg=(&pyo?P1&1ku-xsHKaNRm=Ms( zD57RhkJbd&c(T+ApRbnAlQ$g&knu9Ww=KRv57%FuK<|@hz4-i+9Q@`Yy^6B3?O5sI z&fk>!#zr2Kw>_yr7wDk`Sq(wS%3*Zh>{Zd?E(-nnmBGTo0{dI0T4B0ST<2VS6pLD; zQ6U3uNVDNfSJz5>BBJkno4=?7Okgi#f0K&l+9bEY@9)!({n!f zvoF2b;UXd;a_?oasSU$hcG4SB?Q;c892`m(R#pn%r6Q&ViX67|F3CYPo>ADa|K<4DImJca?Yq7<&rvkII~k9)9FNVC^`H|em-)8~^k`^kEpQa#+alPSHh3+I z;K*uhIz7C3MFPs#mtVi0(Xg>Wtr3CZo0`zQ_$5stg(q~h@NL-C)Rg6zE#=Y$q?zC& z%}zN}b#nW{ueLUwrU0@lr0ibR)sMvvicE$r;aF|8vQ$OeWz)slPnDa%*3@1ZDnt%`YCM|gxDm7ApfBg8pNDAL*`)_}z;V?_p zv5>IvMY~a(us2aDDRi*e?L7sfTHyu+1nfi4S4fe0U8uG=QpyHpv}}0;zR484Xd>2W zHNK=IJcLdbDSxo-7W#82F5{Bl<>wz=3~!=vyX`MJZ9kr&!eadB7JLWvmbd9XzP=>9 zw#yEM_WBf@<`a*hn%7U(eIgqcI9g^)NKAxgV&}PfwmkF{I%2o1Akw~OTS6Z@n0$?k zqv+RitC${>iRBZAYgpgiwKZb+gyR+%HnqCCx*u_}Qao-C=W1hj_dAsS+g#?=@*VB% zSF?(aCQJ^SRnb7(fq#K=I=s zz-){UflyGGV-BY;UAm;P%X8yIDxU8ifA{>BpS!#J)*+v!mvV;gz>n8gZawlVFX!va z(dvAE2?s$JrE8T(x7{9Sw3Mw?wkC-9A~%>U@nigP88lC*3OM9;whHBVHl6@=6+>|R z2bKr(=}>N`2Xx3zf728YP=jw)zJWnQLxZ#%k!tTF#->k!9rnu7kV|WEX+~%NuoQC;XsrPeT z=Rm;Q-(KYF9Usic1(q5_;KevZ^Zoiz{)RD1RG`s?^(iI8>PNOOrfhvytD6pt0 zEv~I^gH>*44zN3h#mvpkJ-<5`4*a0Jdh3x66pq{6mi1kwr_hI9*Lz)ehTk4Aew1B0 zN7}?Hc)Cwb8{$pjx?hvl_*Wt_jg-Tves#1=9&nrJU!lD%Go}}L9sqW}GHWbZ`+F16 z@r0JC`VU2rvR7!jX-fB1^(KiEplsL1Tr@ZBv)`d#wuJqk55*NZ@I2NFj{y!aSd*2F zc0ez_UFhnbIJna`@eQ!k56f8z$L$6AEb*VcM#)J@^&76u!8GblD}~PMHJ4>(v9Yml zqsLB6jdu^m8S1*osw6%KBv+|<Vv&TQ!V_yh!Xe{-r_$0`}K94f*yGc$3rUui#z zIsgFKWmHHky1PZU>8=?)4H|7BKnA{r;) z5>2kgIbx2Uot-`6_&4hcfVkjb{4u9BexavNS;FZxeto8V8V+9-zK+h{zARy3;q_*^ zIQ6b=QWR`PO28t#W=DXIn&7zA0`VxF4&W_0n2QpZme#F#f2qKD@J|{>OI_WEuS_pD z;Z*&B+I0aP-Ej0LN7l>Yxvb)`1mIzS!9M~3#=CV(($@A7)Yxb)i(13%pFZn5JF|uT zn$x*W=eFJ6#PxNHX{W@_flS01gtfg1v=vRZX^imlZ&I(iOff!TFu zz2t;5sDcp5#z2ZSgP|4ZyiY$}(}nxKJdpe0mT3uEg=Qy|(F1@}z)WJh@5C_b7OEXU z)qS@~+MFmBnD?sY0-z%Wre)+C0ua5)Xa4{i((+KDZzn8jxqu(+=$i5IxJx)huya0y zg%JZnqNb(&XgB8Y>7`jkC>(YK7E@7C4HTPEIPES^7LU6g_3xQC!a0S;#CUel>9#Oj z%mUPAW@cu+fmrJY;D?X66sw9a9g5(6*P)BA*AvJ_t4NXqFN1n48$v5>KO3roKzQg- zy4CJzP=!B7Mz+9vx58F=4PI&^VMUth$dW==@c>TklB9oU5 zqoW#zBPF>Ht>&a=>u?1XQ8XMJz4F=eiJ~n)v7BrElJnY1m97*G6UxfU(r|I9IzU~# zt?*gVnl#+EB7j0L8?dGBP=O%=v>z$=u2d4H+#3CeJ0*QSmN7m~5`&AZ)i*xdzFs9^*Cd!j= zTPEROzaDRIZ%@zfxGfhP5)umBg|WhE&z9i`PBSCZ%OXW}b@h%Yd_CA3EpY#G5G1Xs zsVM=Q{u9JdzpeTD2)h%URHctZEQOF`rTD-ZcvvpZMi@^Tuj3Kb}lILpT8~Wh)X1( z1R?+Q-+myuXnAAVmKSs;SPXUR7%4<~?78og07O_yN9V4Z$w?+o$i;OA9UUD!GO{e& zn}{H}PYioV1_ZXAYfopdBqLf)hcYhYO-RLctZ+I`LP@Hp2)@3uB6wPI73!8ZUjTCQ@AJO)Ta>i?09 z*H-<9x~1hqq?Ca$RomYm_BOi|<3o*Hv)vt7mPZU~mC@VtA9-oqP<_kBf8uV*bZyQu zh$!zpyZY*Ck+B_Zh}(#yUVK+BQ*CFAcX827W8ZZp+fg{H1ON^IE_epsV`4%!J3_{< zkUX4uuL}pQbffVW3DiJnkhILq%*!LC3Wws@lpc?B2Ypi!XV;{f0IMF8OjnJ1~~SF8d(r^!~ij z2)={GM;#{|zZlpzXHtgk=W7C#f-hc!0~9E5h)GXj(3L7rP0DGyoO&@x-PvjRSpo$Rl?}5D32^6>$I0M-*^9~sn93#P&B4hrXxlZqdSM4nc#;wZb zKW(xqtCv!Fok4*zhcM8mj2dKAZ7&u0F+t?b}K?=x8~YW&f8sLpkjRv z4z4YC*s|a0RdF~w*<*Abi;lhxj8NFXfF3#>l*q0!JF~v5mnxH1h$B#zSy~#ZCZnzv zlZvKZWE|0b1i}viDQOG5q;xLwEwf{^Xd)#aUwL|+)dN-Ig6Q>Or8qIPJRiS;2*un= zJg1cHy6!@4twNxZpjbldPU)oO;fd_mbh--1DPOOBpztkmPQ~t!NR(5j4Twc3z@haZ z94SvjS_--CG@ngQT*+hZGgsu+N_*nCO_ZWO`7s8NkiltE;OezcSvt7Y~gI zih&gZcVY8Le&vjiBD`^fyJE=~@H20F1^gyLq!`N^b4N`tzk*Y9g`6+^8X3>}(A%7@ z%ol6|(3TjyU%=5>dEMORF4?}3-ZgeK@PP{5fBs(;DMLNA-Q z7?gFOW;z|w+;?ed^YlB(N!bm|Yu-i?%iulUl*VsNMK1V6Vyv*6Bw7H^40d_WqSf z6|P7Aq6+cf2Q)V*h4)KClnlyW2|m7xw>mGAvyW#zwDjWo8=yrpYn-Md_9B1RADNDX zgLtyLHojJHQvD8n#JXELlqdx%<4~e;E73#Iz?$G7G#)ZaS=_opiz|a{`aGTJH;HJs zP|A5cp(p{kYJ|`te2X}%q3mXbrtGI{WL(w@zeQS0N=ql_=4=aJYkrlD(=-01bDD%6H zfTBRFPV6$c$zenYy*){z%=WzV2^@OC#>v%b4mK*oh4Q(iaG%I>+|jq(bM4(|K;`SF z33{Eii5k2pXVVf*Q& zeCt)y)*RZEoR1DD799}#zykScyW%60G7W;nG_+5< zg5VJAcIp7iwn&-niYgp)z>{fxPS=GnLQOB$0AwM>zyBwUjTMVpu zB9@YU48Jf&P$zwKTuMvXKi=v*0BJF1E_7>atFEpNonH1;LPA2gfC~=_q_lVdc_35B z%gI#(Gd}>abpXUshm#)W@2RPnU`b%(;B4&d*bM2jlLB_9p{2#Jv)glXF?&=dB(ZeQ z(#PN9u%w%V_J`WrQxw4Gr_fIjT{&atXV~)>aXGLBmg4ELJtf0}Ic!KJUXZA1X;FQb zYTRqQmBIg2lE3P&FONL>K8V?3lKk2;j*H`H=(%-SF750&3nVlOfWDH`{{}z}!)^~p+kS7SGoP=++ zv(ziIrscH4p$Y7i!RHy$BT>n>ID=XB?nBctx1hA%x4L+Z{JzVfqB0%+WBI}k=~zG| zUaj(NdX{tT(>~eHWza7E#^dE9i#1@gK-`|wX({B@ICG9#mB4FLQ;e5EO#LI4R?H~5B=*>)#2|B3t z{gSD6$z3IoTv#gZqX4_&?5}#yXBPly33+8a&$DGBnQnU1%I__jL&t7-7Y&!+V=i-$ zQ@EW9>l{;^-Y9Dp?ieYZaXZFBnQmAJcSL+RdSSKc6Dj`hO4M1WPtkSm(!A!e$8&eT zOmP?9MJ*z_O8N7}bJTF-J3aP6oU;VfCVD-#pWp6KrARkGfiHYYpz@e>{w+`0)#v@S z*)+3fX2nVeG?%ZEdWa@kl)r!fz66@>0Z>*3=KweiyfLuNJ}76Y<(f~{GWe-Oc~Z)o zo3lLLh=+FjQTx#}u&rD{XO?Cyrn?;O9D`#NuLf_@x9sajncW@XB2XJsP7bStw|r4x za2@sH#f$R&DPP1=OG-}G=yHG}6A11S#)o%M@q_5%oP(hW{8ssX5td0Uc|TA|r*i#5 z(XlhTsi{W*iFEbJUSgNx+TnJBB_Imv!oAD;M&GP9FUx&-QR%XVtK{eaRHL@84ht7I z1*!m8Cts(V0z;xeU`-SQU-rJn&o3Z55dmod&MszYx#=(uGCmC-Uvzl*O~ldw3^D)& zD729o@XY4GRg8ypvUBxd{64@iPetZBS>~O@_XJA4HMS>lpM> zMYYJ71d5*Z-@cXfDX0kY{C-Y*!H45z^!rn1D9qb@;f-1@-Y?aoTY|hZXr!xqo1bHJ zQ!!2oOE|Po_$~O{pol}u)>N_f1nh=oW@d(xkd*ZH@|pluO~|P)T1H%4d;|nS#C(i^ zJ87}-n=+EqY?Z$I+Y@w@6fFVB&fxBtgWsVpe;*ocI%up`MqS#Lnr>wGi2!tZMRTL) z)Ccl=^4{I(Hr}3rQBwH&78o~v{Sxu25T4y_yRm!@<9Gw)dRyF z$EH)ZlgGg2yB*+GuJ7$#{sy*y@lb&`sF4!L`Qwo5GHY#)et_omnvxV3x3)2`?ku}3 zDMyR!F4ilXKL!kagu^p}vxzRg4@=DjA4?yc(w?DZW#=W9-|-jypq$b0>VbN%gEh~n zQruHG4YPS+UxPVZcswO5(W)zy7^&vl8kuNCXMYJR0ZE|!v(%E}cwltrTMaiB--*hA zKU+Hc0b7I=ltHvv>FTZd&XlAi4-gP`d^$oxLOygEShJ(KZ8p=5*r;4G;)J@Th*OuX zkwi!Hn)@pT>D4D7plohGV9(Ar`b3-l`K~*MQtR+UY#_@++zCI1xXb1M;{#k>0C53L zj0QC-5`qB0G`g_kQU(`y?r5m1dj$mO0RRH8JPn(Q2nY%dVU&OQ-WZ+Tsq+st3vVxa ziiNJijG=gMqi^zGGNP%I$Qcgbf6{gvL796P?_*@Nn{ zRwJw5e$ER}9?wR2vQsdG!|>tmly-rpriZLO*7&e#$UE{KBv=6p&Hsw=9_7(&*uCRQ zXPSF~yDQ$&{5bs!*d6y6mk7^muNP0Y@vtr6lSa#_iw1=k1rP4l$O^n`6#3(RC{gJE zACeI8JX2Yw^5hyF@qJVonTsM}P`>{3VD5*Zc&t$I^XlC5WNx~t`5CF_>vh$>tOJP?v_J}qS~{8fwXadV*Q9&y1f$Nsj7Qdw&za33 z_YP-OP<%XgR7P`B+w$@ZVzfr2|GCs}>@=RR_jC31-uvQE$_h&V{@nC?L1$IfLg3#k z)TCIWnq$t09M&o>`@FA|M}SLXqRC4*g&g7k;kmjJ(ll>+4E)OM&?oHr-LNP)uVsaW zL79k@$*@ID{V}$^6_x*Jt2Ib#Ai9twYE+r2zrCep3g8MgGxIe&JGkm`UimZ+^NfQUd1 z#P{s*?en!`md4Stu7iW?JB@r)S`H$?g7TEl(N#ZW@f8J=+++L2+-hr#vvuW-aeuCQ z|H4m(vQ^Nck!t{tS}N+1KN2~B%u2RKnQDnSCHUWn3y%Vp<%yW*ii-6krBa zKH5NU28?H$mw-L(ueqJI0$3vhVq$NPS_cF7q4T~u7|?aRYvms@G8o6*&iG~dHZGRg zt%1zTtz;VqmG;()t*z$B2k)kCFruwXEy&FHku22JLIvuf4Mz3JF$QMo)GuA9rv%C|*P*Z_xH=`)5u2EpxX5_$eg6wL@y5nRL~2i@KdYaao_-28HQ>|ooxYdO z;K(0?t}PWOFe;Cj4TvBOjxyPBAcbJZuOMoKPyqP0GN$0RybA&?-_dgZh|At6G=nUO z^OcPX-uc^noA=sVQ|5&o4}mkjcOS?_6GJihEp%t-zLVbb5sFA&&dbqOVC`1;9v60u zMxS!VNv(^~@O=t9W^LJH#S+{?)kpbl0NryaJrm+GJj0DTQ_pTpL83p&5*eWKvt z;ErKV>jgQ`g$|~CSr~4YflQ-hn;)@E!p42N@J7|j%EI5DX0G*&dI4njV{bPRBUDCE zjnTK^`A~(ZP>rwBQ)e{FsmtFlKblP2%aj`g#Sg$D*e6y)C{& z74+(}(<97V{G=<3+3Z5cw}MeV*wEc-OpCXw&Q5Scnr8+C^xAGBzP_I*_1M^0TR7{7 zD6{PH!8r{L4aOC9xaCpwK0>F&TA$ghhnas=;pr-*q6tv#tof}jg1dVI}UuUdQ#;wr#M;<>_5TY4SVa zbGwj&%?|Zp344)C&Yzw1YOG^tYb8E40oK-)r*)no_pXxG^4E#`NoD>%$i90I2OE0| z)NFka#lL+0s#)Z0ZvFt$0YHhgx@=IZKmRg=5(yUNs>2b{ebvXHrGvI#o?rzcpoz-n zeBABQ6bFn(bCF|_ep;rV(!L+sm=Y4)2lQqQ@*|GjBF zl>TCD9z5UgphH6vDgsg#U4WAc)R>|;&Gg1AU96X^57e4`48Z9EZV%xhEGRGJjti6^ ztszGbaM*mPie|jc-@Z^wCJ-1Fmqn?$t>%Ky7E@3q93~Po%Y!AHZ__Px5}1Q8{`mMa zyUV70XucPx1060PoFV4&5BN;PR)jnQtLX?AWU$^vMco4Wd>1U^l-QrnTh=1{;G#l` z;-THDIs_!O539T8De9sC-{;TQsek`0O}ibDn!(-@ z+IF2P8dQVH`FXL|ujAjmpJszi1jaSw>n2uLPjhl!=MN8j&G#S4Qy1cUx^QzSx5enx z=sif-T@e0(&WL2>Hnz*x7iL=youPG0`;2y^1q#yIt!B^@;r#sk3P5;MnTl-T(~NHB zo#Sj4Sn!YChBnq%YYDbDd!asj4rNr??0abj9+ng=YVS4B1JVH|eQL9qrXT%*hFUv2 z{^X^i**LpBb0DNhBaRaBCmZ*r7neSP}p8Ccs z9?Daue|^PBn}w0usZk2M@(>$U{FTWolI?PFw}J_lyvDx}X!^U?d*ly;gC9`vV*}s_ z6cGBA-63piz#o*5 zXpVj4Uz;q6W^Ok8HFYrG+@$B(>@Sb~PygB<4eRp2lfwm8)jxk8XV4fdOndO{5Rz7C zuKe~e|Hq>&LJ2cMLvV;KH?xXs^y+9HzE6FT4-)DNcuC2ftRXZnNd@Ca5}e6$``w*i zOeLP=4yMok1ZxW9D$~jXy`~}vD+S&%EAL7hfhZIld_YajhDVUogpD5GT?ERY7qiCC ziN!@Q;&H(Xm;=zAp67%x#sjxnlw}DqRRzsb(pQl>(Z*PDa)a z8Pu2ntwN(4fW@<>U7kP3hjKd$!u~8^J}Md-BBsRCxV}>dUIB(dVf~U`x+|h0p znLPbFD>AeELbyHVLw0O73!@RPIO=U`!fdO!n<-ac=KYkFf9kDfviK`;aV#y@0~%R@ zgS+5hu=x_RV6+fa1GRzPH$`9OxMS29K-T(QI_d$bKMKGD^zWHxD3FqVL8Fe~cX|Z% z88nG55V*d}#I%6ZPQt2v5kh@Ac}LfUyjt`anQ8eA2}u$~->}@PNql-)yYlQ>)k`y= z)RYXu_|-;9^w_(3d99Vk4GlU~4{s383elkv=B3EgDXXi;Z`~v2>7;Q6Q4@i?5I5O? zwc7*{CV?Isd>xvH4})RB(2&p_gn5wY8Q5B^aD!3C13%E9Upzxq?N8xA(<1?Y+@YTo zx-bZ^h!fi)09J*Z_+VdF(wKmILY(V`n1BiWxcDwQis_+m?*K8%zkZ0BM0yVc{fO5# z-Gd;s|04OqpF*LAD3f8I#_s&ulr!W;f`5*NaB4&;gk}hy^CzI7h|t6aRje;tLlXKw z1SBVsNC1c%0Vj|aV&~zXtpNky;7<}(HnHj|{lka&T3T92Pn_Hb4dJR!gV5@*S;*e1 z(2wCRF);!TCnEBd;S91!JQXP;lxN7v(Ow8BzTivYy8HU#0AOOFa9&44A&CZLE8Qnj z&e4OT2`D$F%coWZXA~3SuSUZa3s%z0oH)iwI`cnFHp9;u683&+KR^|HtCF* z($F$Rfi^{&3_A{5>b@*8Ki=4ilI*-uu|A6J|FGNMUXfmaKIbG~EemWBo}zm`@1aNo z&6$TyoLo@wr9Fx>->8=XvU!)j(76ZlZDN8Hq>bp0?G?~8GhY^8%O7;sObCP4DKCE= zZeo_XACztlLXmj$#bO=l7@z0LAN%^lyqZJSD;8-14o{``c#~EycBgbhCKkXc=GCi+ zRSG@&EU(E0^efmKx1k$>9a#s#ljq46M(m^ov9Vz;4m%6)B<;D>jZ5$NV}z}nmqSwQ z_W{Gt<anPx+M>gxUwNs@w)C~uEn${gwozMXoIn7`mLZ~JD~}->^mey3cmjrNJt>r zS|kw)3U>zUCQkQ)wTC~2-C;X-^YG%{>*=QWu0@8OpGZ~`Xgc5FLSljG=+NTM_}OWH z*A?fL!v0N2H4_jMi>a!HPw#`5xdot^2g5fy2*h&4?N60YL{d(la4C=mj|eyAu#bT$ zI$gYUNolD9ld`^-eze4!x)}G{e(`v9gXiZUYJ(E{>xW*La9a{A>22g>WCAYGzwOH` zHHdIpEfp(;)IdxTU;t@2Zw8(@3aYB9D2dmvCmoI;ZI}nKbfDLYeCt-7XTxSK@5Wrd zRJ2F(YF>8SlgD;DTG5KzdXA*Vlfn8V9w|2LrajM4rrlR7c!u@`0(!M+ZIHmfF_92A z2_f5Wki-_Zu(MM^HT3rG1dn$mD>lwEg4dZBB_*-DJlHZe$Ao+UPX_vqu5O#Lv1I;!8E}Hm$H)xikem85NjFAl^$y*3@HbLs0(K;p} zf$I49nEw9#`n_mzLI3}O9M(i~2kD3NZUBY{m}hUhs<_DEX2b=daA+B{5aoc7!IjY) zV+y)1l|aZpF8;dyFj(~UNEtEe%xRU#!aRO8vL&7NOc8%*aXuFG zW=JAtZ*TRfK(U(7qP*{c*W2v4L>R$RDcnrqhMEWse^$S}xKJmg6d>i5u0)6NE_wq@ zfi0krxp4eR)%8SoA-|WGpC41CtFKQ$PVU%_h5_zhQOYyr7c;Gj2^Ktk zoY+smV|%4*t%|NWE|nPUy;S;hK?yPzOm4>;m*FzP_>q+atT}2p>~n+h*|TRQ3#_K5 zrVy6rseqcw@y*NvUe!3?pBt{b-TBDN?n?`^&0fZENLiOviAEb#{4SwH2 zfvxd7b|*{&oSZnfQ*M^ZeJX&#{e8;C@u3stQWOvORVka&o(Gn&L%{I@dm60f39zRpZ*bQq16wpT zWd^DbuA0^c;X@&>!-mKx$;N$RdW|w7RL)pG9=8nxb4?9Ukshk<>YgCd>d0pqy%1x1 zs;5T_EJD7}rr_BHRIy$=V9;B|`}U#K1POYd!CE(D zCk5Kz2vANAL>tWqrzf0?;)xF^1g<=u`1ZuZ&z?LhBcmk8e(Obi#PLXAO)A}*Yz-`qj_W?8i*nDfHKXfez6oN~W&M2aQPb}sBB zC^M?%zRx7qs#R9?YOMS4!{W_G)sZR50%bkTzfVy{q&_$ym$X>^NqzdAjZC|n8Beg} z$N#-WAM5gvi0B{swx)o0qECOn3B119)@`8k?GxUOb3p2)_7p5@r!#cRac2SqI@fte za}(s4Y5;WhQWH4`zJ>2;s6Y61y@7gh2^DwC?5_6aU29iYxFNzT)QCIaMVhk^ru8eD z3(LuZRZUj5R?`uy3az;HF)rmP5CY&GJ*q{)_<35cp0#ULm#Zr@ULw*_;p?Z!`r3w^ zFNeQkkqa*Zy0|Sa6sWzAvwuH97x$Q&krB(H_Z;QRC(*8pU_s>yc`#c8p&s#bt{@lsf78l@M`_O8klruflRhKvPMg&4MNEe|!}lMrJW1?M z1F&Ngc}@k=W3+QW^PVwn5;0#Cc{2WkQr=2$x~nJ)xmK2c*Qyo~NKOMl6VxfRb+FJp z0x1IfS|x!_qzeA~!c2fL)}zdRMeh z;d29JZ~IqtrVZqxI;6xM{_|y&(%HF7oxh36Tm!A;rOtaibmco>4H zS3wn@h3X3te^l#kJEBH34QEYw!+k<*u0+R+^TCe2SAj6APKdW(aZ+wt|QD7c^LT2Zjk#dPlMY`ldH4f{QB_seH===>h3@rozykWL^0walvA4RUTscmS7g9dRZYL}{UT zP=U`XmvB>~u~Qt@*^$;A1TL4{3+P+(te0K0ZDuB#uBW7LWwcBI*10 zhcJG{@st3XF-haeWiH1({u#H!dnhpKgyHvVD_$tTVG>dgNZ&$z|Ni|?o~|Sc$!Q_r z1h`Nmkd1}$s;bC%xCJGc4W@Kw-LlOQY{U)5qQtkQW(v~mwXFPMcTzGu>%I6J1h2fj zhbZN2^=9zf(J-%EvF^D7I&v!aeQs`@0JnF4x<~EGkG3hbml#VfUIM)WZ%QIH5(N+@ zq@vC=EBxR{v(ApQF}BEq5T zp-;jO?FMYqpNq6(_S61GellSB(lGiK)C7KJ(`Qz_zpw7b&pPe~$NEBukyQ_oQAkO_tfeF*nBY76-Qo;@jDfP97xZhh8%#(3ZQcaA#6$@j z+joyWH}*#zQnq&p5(4D2a9^pZk-|yi_|L%w-ycRk7JtX11CyGDFs@KaciMF|PjQM1 zpjN(Syhr0S#Eu=te5txZ`W6y5D71KEgHDiu{cGzb#yj;J2WYTfmp;buic@fqN~)0> zuwUdq<$^fH)5Ly+!6^yBf^{OHJRyFccG0m~&zTN49OFmoR8(G(90Z-r8t3cOfYc;# zS)8e2z({ytJc{jZb{2&h$Bw1JpLO+mLgjhmTZ*WqaHD#=sk2N&V^C3!8MlsKSv*RM z93HWMZ&0hOF4gxX?B3yq7wsiwy>*(MQ;GHeccFA2r^(?PPH0tKXIFtE9gdx@9p* z9!UiYQZBJ75mbW^nh(=Y+Hc;JLfJW}Iy(}Sq78|p1jrZjT1Iku;RCMa=89dlX_Hws zO1tuisPh^b@`d9c_7YS>1`U!-LQrK8g~SV7PRJ_iK@i92Bb@fauL<;o_fNj|pxM0| zIUojaCP6bsa>j3Qp}eB^nk=R~dQYnNHzpz+hDo%{Q{^ex2rg*=!#wOC5C zaIlb9WTUd`Gtj08Szcne>(6%xcSL#%JbaGy3htQ$ji(szfInBB?7l=24gfV5#wvJ0 z38JTyiGFpwzk!S;b_$-UuRMU|6J;FzBGeeps+ymN5G4ILnuRX0-#zQP9G-Z+vb`70 z;o5<|cme3U0FwxC{vCjt8H|;eCbq$F(-Y!|P)$rqW(hEZT(+b)$IeiMm-GMCmfm9?0mznkH(-8DyBUYu1!MV1GI`G<`LJE4p+|i^3 z_B0*@z9Ic!DKgIh`A5hyA(MO~FbHr%z!au@Wb)lVeUuO+rSv$=q>~MX*8C;=fB=>A z*6}*G3F$w$4nt6i(x}9Qgnift`!HNI0Gb6Pz>%OEs0L^6tuAP`NOO+X!yoGgR%Fz9 zZOyHr367o8J*b#qN4(;-!S>H!j87CJSg*QlG!q;SnJ@9309(i~^PTQbr)yL}sv=y9 z7Sf^Zpn-yI_sJ+1l6jnEUC%l)2WR3m5C3EtDj*(sh2eRJ?BPhKbB8%Ko!)dM$OOD9 z9*)_S4u25el6?ziI$`c;0Y=?m=mnXO1*iH4i2FDJQWy^+q~K$kjv(B`;0x!r>gL&L z6ygElQefO!pZtVHvcadu$$7q+M36@t6f&QJz_J0kO#IuorR?oX6t+H!CccKlb+%Lv zHw?*k!MRZHvH-^Y9Y`MWiW?U+2xQ7)Y!{5}6ju+}e)8vvy#VcAuL@*ytto4SW#Zo1 z$r}3OC8Yne22^t5#$h8?ZniK=#qfl(zG+M)`S=tj)Ls;uj*9FBLRW#7%r!=Q;|A!o z8f4Jpm+nZ~UW=G+eEyS?1hrP_;%z^x`!DB!Dc=ugcW!cwcyWRc4^XVt?GK_{ugC41jYBgDp|Btzla*H=w>#3ZfI_ z>^@Js7#T6Zo4BN;)iP&PyWef@U^wM`{QQP6c+IGiZO!uuz5s1q z_Na~@spr9h{BHwH$7Ok;SO*OM42T}UqLCrqUOsZh zk2VGNKaX&Zp)ziS{dhp7-%+38#EFnS&EWas77lkXZHzRbY^FyeTnb5;l#`(1Tu;2h z`LJT79(sI6D#f$L^n3cy;fstl8 zJj4OSH)Q5PtIc4L%K zToOd(IM8lGVF6E02UK1dzUM8{f;muU7$`bNs%J8TyDLd!7IdEDZn$bpHM0ET)13kh zh4R!$wZ*)wH2kvaXq6Wk+Wu*`wz{oi0n=Pu&W_x70VrNYd=41eYc3u~$ePYw$Tdt3 zriCCYS?>UK!OF@Cf=>KqPKJhQ>>*&-U)}O1fVKC3ePoMrhWV9sESC7K4SDW|09YR= zMY+Ruwm}nHws)glbb`RJ__JqnjQ8QA@E`(rJbC%K8HGFiZo$}?a7V02&+-ev)09T+ zN^emqfPaxX1Bzm1r-x6hC zO#s&&8l7w}X*GBkd2gDoWhyh83>CaS*qnu86%39T-6AJ7VE&dzW^=#X(Po?DWW;`1 zb^ckH4!jGZLJw(r_QmJJ%~DfBJuHYmeq8+PWeFbPC6~I7!tJr1Vr91Hxc9>5v2rPf z985@sgjkL3!U&`_PYm{TSSU~6h_egmXVhz>c2DHj%eG{+zcPmc1(9b=8@jx%9U2J2 zn#zmY8UQ3fFq`BY|IA^+^FMNsNJFMPEFCg1qMR%F4=se*Pe0=omt0 zCxyG>@qt4yE)$F3o@VX^RA;sZ2m$CW!X6-MfT}7_gN6vS?weJa#P?MvdPe>(ElB{- zdhz4cJLH!yqLQ1Nb^J(tIp{8LI|{`)eMN}ovpqseVX|)$7s4YWv8!RCsugCnurM*@ zsIO3Za0QMU)7p&ztt`O=1QC9d3e<~48%AVvc}XN1QB*@oNOORcjJYFy4SXR0AS>uR-tLt$?{s1`!&MR9Rrq6Rj-Gv8)6Q z2Ey}evh_VG>x;uGz@YIG5W;c_i-F`hz@p7F8y7&@2n6%saS+hc(@7%n5@LVM%?2Gz zNq&ho4O(C7`rx(L{afo*j3w+lM#Y%R<}m6C!=Slu`XR(K+NbBPbiWfim$uvIQhOijJ7c~uP~%8-0}08>uL z3GR6f0%2xaHsO){(7B3LHcPv;PGCeUfNgiFb1)zDlae0YZL-f z8OpQND=;LT1{>Q;P3HfQc|AaREjdF0?h3y~e(}yk&1EcUQt!S|jae^%hKH~sFsvB^ z$}(b)9qz4lLY*4~9tzPK1TJLpzW(|1Tr}WzGxPv>g3LTTM<5x3@Gt6zgJlqw9Y1@}T}IKR{Ge+GHH)V>?H_DRyZ zp>`9fuDmQ(Qa~UcTkkJKf;zqW*3>llE9}0X5+HmD$He1cWy^6xQlB9}7z)&*#fxy* z2@t%qcX|}YoCJA%qK`-(pWq8XQ@QlJ?rzY`Xy9oaK(wi@1Er#Q+aJtod%LKd&)fnn z;|4qFd{8y%Rn$K(M9~M<$Or>YCXDJqaJ=L!Q6Bc?SiPynNv zNg(!DsTJ7Yjx`P_$lllOeZ19!tpdM&1%A7yN0Eb>(IfS#)F<2!frnV|+#L5UAM#b{ zsHyJawM!}&kWBaU3##38_C!KG>Ij0KRnh4Tf2Yl!fP;BQdq~kayP_{G`dpM(2~7YF z7b%VL4k^iRT?Jkc(8`r-(?BT15dbsk;qP<6>@|dFgTh&IKP?DI-ZZ{y@4%>t{e7Jp z)7kzGX;zya4YaQFMfCr!dVi*JL+^l)#pnH7sVnkU^Alrv$w(Uz&O+3*gvN&yyrPbO z=>5uU(I(vXMm_v*`Sx`yARYdnHYh)hlr}p=vug!3@cZ!KI;$>r<%te9F`U9Hy|E}; zNDtOp9h(f(;uPV?j}Q3x{EB3cNYHXctIn4srqXX0rnnw&sROs#PSNQC7uZxyZnpqz zz+k#sk+Hat&;=9#2pyO~IUb z>e00^4hhej;FH`33vbgw7pQ~gD10r&1l6o)=RFOX%AH38D+{;S?-+F$^9>cNfPd_f z8p7V<+4m-c7P3XggLyE+ZlAIOXfz3?mm!{$PWYnXzXBhWm6er7pQweibi`%pT()n* ze!Vc^G@Ruj8}8mQ{1RCpn^3;0C%sq&Fgd6S&u?)UhCEMTSy>syB^ znTjC(dS--hdn66Cej!CxB{4Ky3V~=O-)u5m)K&QQAQ<42$rOxM&cHPKbEInI470!rjph($rHr3o?y*pGn%t3sxFZ^SAv64oKz>vg~B zd1`|f13?d0X!e`wfI%TG4{ztA5!Sa}BV-^GAr`^QSn{~^w!A3<$DgD7=il}EfC(y( zAckb$^Q}^ms7CP^@@-(Kd-fVBFYbW#>hum4*GRViqQr8ZfQ#Aiq&STbv@N5-s~CCu?FkdUXXw>gx;P1QZ4FG1AX5v9NGcER zH)W>{A_K@Nw(Dgy8e~VeQQxiiM3Ox&Gtb&ST|>v)|3&R_#qHWZkATtxZrUvAU|KMP3ITN$z|2#Sh4k_V4aZ^HE91GMkQbwTo-G;4bmMDY2YiG_y%8l?uSS3P z8-h7T^w534v_&Q;VO*V_iHQclIhbfLpDJ!(kP0$(k2$36MoM!mml6X}=DvY+maDf% zq8=$BkPI&@VM(nT@E9M=*E52+31g@~z#@iCAqvYE&ZZxLYy##w>-`5}Yo+}`$Cr-R z+}KYK89}GROCSR9IRbtkuzcZ+UU(J?@GF>?VtdZ*4y#%=dj^it%S1i4%~lu9Fr`xtO$7Y1)Xdx@}MLn6Na=pF0sW%X+Sj} zICyU%sKza}`U!4gX}xnSe7X^>(%Ee6BNI|2U5+p>c=S=M^QO3^mDP(9^LXUj%OImZ znlAnYH6L>I^rwePSpCn|aNd)X+(%Af&-3)tzoVc|WLNB}eJjVZhom$d{#={C%AgY3 ziOe4YIxB_DCWwW?iCyb3Po=RN4l#{xcp}@#$cUG(?;Cj9)!7t0ZltWu(D3%j_=g!L zbxfc4;~Hl)b5S(*1xCqWYe263eLB_zJSFFuk&#hybOwYTV3%(|s!J)=8TbYgqd^k% zK+urUAmoul$n(WQ@dNP+BS61$J>yS9U#;Hn*D`Q%@pe4nfEewi74EmTIB>9B6g6NH9osP%<;Grgvn(g+9}vRuEl?;B7M5T!x7L!_jnI|UR_kVZg2x{*-2OQfVrMWnkFq(wrI?nW9xM38#s z<$sU;?q`qje0kS!jOB9S9p`n%Jm)-)-y|k3gKZGUP;r!y*e$NO?sZYO7Te!-R+N~i z2aEHl>OoxGf5JRjILpUSG-A(Q!gB^cTMV#jTvmH?pnSn^H2{7SpE8xwjP&$gpg#Pl zDhA9L6DzCn0XZoBH8kRe?uIqI0V%d7*0e_EDO4b@`o@INawZ|k{0;IEZ%0LW`8^P9 z#=5k&P@2|PnK4b$7!@D}i{1x)GM8zNwGy}0-dUc9%%xUXtQQ!C$ z_PR%hKsB>B(=QRyldJbQO$v=0r z(%k%|gf)Liu|C?*PrtQ6?k)9W8`+q{{#vb+C-y3(NW1ISnCZW^BsXh}S!qxhGh9@1 zVuo86z)%zb;zkatA~>{gLE=2(zWqPS-b{e6WvJ(P7hVTA{NYqb6A1CxKE+z89Oj`4HE z_0I!~8g8c@7}v!<14d66z(0w`qW`Nh{(j#EH%HL>Dft{CAn~(yQT4ne;pakiMD}}JCb`PeJ;@HJS|T@;VY4ac8exHx%)8R`g|{N@gr`36erUK08xmWV zgDu$9kS7Wkm~w!*f!iEVdpfR}APv34l88z5{5Q>uqsM?mQw1+A;P6TcZs*8A&-Ae*E?{DF+XzIvnNC+_pc!+79#L%3})5iaj;R zsi=Qa|By=UEPF%D580D9$4a5j6$UGPF#jH{*Pk84PNP+PhIYZ3qNp@XnzTiAyE7)| zmM0@h0>}>SJz4`J0dsuxax+?{T#sEwEne}(EB7m{S`7~3(4H*(-W_DFIC*Gj{BI!& zg18N872g5z0UDFxtMXwYg-5ZHU#Kq75uqmy4Jdy?FAP0$2Ce>;2JSuD);?U`ek|LG zDQIE4Tsqxcc2m3SMTUYR$8HD}mzZjS0tVU9ALTr@pb6V5KJr4o1ChJcmj+DB%fNDp zFs+dC4!V0@T)J-Mmy1(e)U;j=1Xfhjre;vmfy_S@w}iU0f+ybGyIwV7{fLZ1p@XMN zGUn*maoDcE9i#_KU5-VCf(de3vf0=ynf+6Tq7%*b_k~kaRfbrefB2=n!8AfB^~vZP zlvUo2t^~gyiUPr!gfr8sY$zJyczXFGG|$ZqIs4=cADVM9F!KfD-Jn#00g~&+&OehV zk#LP&Gs-*&cFmSP$Bx3%F;h*6`cW`*WWtE^OyCC-UjX_S_hVLh?8*u9_~dVHPcTQ> z&-sg2y$d5DTzLVaykijOz2-FwNqu}p!xGVi=V!(7c?(8NL~-4D{pAMV&z%WbB!)|EeXfbFF;enaQ}uI>{!m8xCba`&)hZZ*F_$CynmDncXJ3WRl2z(Onpd5_)p>}?YdN}xf-K*-& zFXTekrn=*;3=FZU#Z&z*#Oaz95kI5JEfcd*1x4eg-tVgUXXp0k@Nm>k<AP?IkYzeOs>xj$rLR)ZQAdZJT{h=EN!bz4GZ5kE657ze%lmV!!KyvH!+~ zFDICJlS-?Wi?#bT(n{wi`i|WU`~Y$k_uEkh@B5Gf`G!rp-bZp&?a>{`QFeSvIe1AR zV<24F*Bg^g9kZ@`xVr78mm*s_GQ?ZOIK$W&KTmlt^7Lv^=$MJnMIqf4f>zgHFwtuEzm2+@Hs{O7#m+=!fb|4*{bSH8CL|=F5V#Sf5Gt+`e*2zwj-W^Sb1jsW(x{#tfa) z{h@16bZ6y#!#HCnBBO5GRke;at*$QX(jYj1QN}^x&JjIf;?O~`tNyI(Pp`xF4_!?zQ5jL z6PG?h^RtxkU;s-kEh)(V;X3@)N}z(%l9G~7@Kt(BZk1Oe0Vg@Y2e}vCnLDo!3FWz zu6Aa%>I`ZjKL9GnATDnB_RCF$4=?52ZXzYRwZTV~mC~iG|8WmTDOi6E#2UVPv3gLj@LE`+U(ar2}KT>-aT3pNMeFy~bYmsU8*U|R$hldw+;zNS3L zcKc;<5QUrLS$BO>PmKIO@l>XWTLj$SvT8p;(31`*Lv+O}8W&& zfWdjk;6JuxAOXZ_B`pmXrCIj6Oww);=t-bedN4L5ClS6(3V7ZjX3#6IzM_7!2_{QL z%WSGv&dk&ARRZE^A&&ZOsOjGg8y}f9I^f)%slr9G4QPnTgyzxnIVaJz{}d`Xski0O*&Jze4VTu@R=7qB5kVqQr5 zGttwd`7JM-!{xmnuJO5W@pxYXYMky=vCD{;?!WEi2*o4^z0vf?u9mOp>e5tZC3WP? z;&`dFY)^I)6J{-AOG&<={mlkLn?aHnt896fqardn58v6A)tV;Tc%LpcerJ@3YH@0j z!}9vzXeq<_;T{G;3j?2JN7M6_3{8YLs&!9X9?UtZj~|8|(BnUB`M=8GVA&hJbx=L; z0a*;s3YAK#vm?PL_6&0gyH%2{90BcR{jOtw2R_k}&E-)Jte$T_==@+cQNaHgHd*#m z!8-meh1z#GBYOWvYq(3@{g9OSL|`d3hg$>83tp{E-~|N;@_Pn`jP+ zTVz9X2bJe2={FiQ@3QMx)Z9tZyUVHSHo3dLs61BI&r}!u>XD{u+mimE`Hqy!=-)<^ z_jdHSi9+dkGMn#2qKXVYoLYYm z48pUtIFq680zcShMYZ#)6GRfbA-m?_AYbu`r14U)u}_+r2fA~Kmay7eyBn+hY+ zbTY#p@;ym$!jBms@d&wVu%7zu+SQKy-2uyibH}Qw=4~yiyT#Pp5<69Mio3G4+1gB2 zV#;?C)dFSNt*%n0e_iI7&Cj=Z+F>z?Km49qd{LI1oL03^saeI4L2Z~8a*q_zs~@q_ z#eT{?H|6GJwgJIMvo4OG_f!p1ezs`~LGDp?6r_#85Hg6Sx_M+xN2`?!A2wPUSW%8g zU3!j>Eq}R6tcm)}zcjsH<3nAXsY5@eH;3T3zU(6#FG0MOkUA41E2^;2!h9R?Fe6nV&b)>-&Ij^FiU6%mene9dQH(AmN3C&NRO1FY`ll290MXQ^1Fk z&xZ4~USklIR!7HQz)rj_MaqJ+}Zx)^80P$y<6d=!9nQ{TD8XV7P6Xl-R0Z3 zm{(o#j5!YLYjVvDm|*XCO--h{M0f8g3DHCK-<~KNz8eZ$H^07$mt7q%Pq0_Q{QTqZ zX81Et91&k%5Y@;SBz0IkR@}L!*jmf#aQ&+;c1GlA@HL5YrbTrbBh0?<7p)!kZX#W1 z-o%^R#mY6c=7sN=o$&J3NoZAL>r5u)ju&#{1wIwVi#0#N55s?0@;{gZh<^&g>KTt7 z<0D8o1By@Y^~UR1B;>lP9jKLZr^HhmVL_WXD@R@GRooz|`>yOXYDVof^IRaYq z)^CvYV(hhiq#+8+FBE$b+_21gKkTNd5Jh*2K-%dyNvzMs@{rr=Z{hq;h4qKX6S0oY zsOXr%24TzM6kPnZ4MmSXJcM_9)-s!td1Oe8c*$eRn{JSkoz5Cr2D|hDd)06MJ zjBKp0-B8Z$R+l-XPbJ-v>EY@+XXpr2J4}kvYf>HSbdA0K&eDJHjko=r6RSLG4>@{Ji;2hYo8r-i(6jB@0uw4l%W* zlCT5S3rV~dO1(t1v1#;4h)Is*6^vA&i4e$kw%uTnfHcK`2p)FFCtW^BBCZW{{&)lisHs$!YdjNk&{cUA?>!1 z)7NKTpZeBCl3@#y5#Gzp)>o!SJ2Z=Vo}yxMyxdc>?@uW_X(A$*Jhm@RNUM5c<$Lfw z_TDfND0H2Ip{i%nO<0kyrpQY^fG|(&Zc1Dn$3mu9<41z-*YQ`7h8doWllgIcC|2#& zuqgc}fQm3ueQ?~=jem2ML*mrdVc~Tq^FwS_uHn#3rWnLsvu89p7{JHhHiYm2{)(w=8|mtvhva4ZmIhtnPHr+$QaRgmdf3# zIpdmJRm}umgb}#PY#eVRTf$;BXN%I**^Kq|<7|-wnz{anXn92iHd>=7ZevfzuXD1| z`h>aapiB&N8COyHk}4LQOY3{-bDo(Z2qk4`N#4Q*VAvq|os43oy|cpYb>?;Tljnnj zU>C@#dVMJ{j?KmsZ=O9`()W?*ozY?OoS?;$vF`$0VO>jKiJDdyB&@1o-iCc;!p?qQ z3s{Q^PW|%L)kQ+W04N-idu%g+0z8R`RVttHsDKrlMua#z^7O+MC&9>>R^xi##6L?t z<6m|@&Ww{Znla6+L#s-QCxujbkJ8}$P8obKOp+8358Xl;}}Wg{^2jK6*{cAjhOW0352}`GpR-2lfQ+?JD2SZ z>DyQ7alNqNzcGJ%--6TSCXr2G`zr>*s|q5NYR_)sutZbpZ}p|v*>yu;;LgE2KKT94 zm-x)RmZ&IP7J5N)$$K#SCm^k=lvGlU!*qKud`}gytcD&U*2+L^6~v~7Tqe@ z%u#~l%0r>dPp-s6{?wu-Te^+j?yBT#{j!$Xh`vLMghl_f$fOb{|EN3SXf8chjFGQ^ z2j+e{z*Ade)iuAATX`C@ZrFnxv zp}8+hY(87N2(3=&RSwPup!*D1WxKth#aQYgYEq9zcO1JbDM4=ys6_csrjT77c9t0y zWW{5hcwIkOzOPWya%?F+Zo!(CitKjMOoM7#zi2iei0^2cyhPZzA#ta=pll6>3&&U6_1Me*F>M z_Rt{~$=GR2HsK+3D8K*ims#HT%rR9?Bv{vArW%<4k(1xG9@hZj`crZyLr_l?gBl0!TD@S3L_^5-{8dpt75$(IiT_Wsq&mFJi>ydlYahp-9Rvz~~pR7v()Gq;#9 z4Vt`3YYEHBU2=GSSDQTbcF$x)bHbydd|IqxhUcDQT(4nxR~2E{@M(DJmy+>g z(OrjEd_6YDFsVPA*4{a^Ar2m8;hTwGsrK29Vo`7Cs2H7mHFvd7sk}D(HGF`k9WEw8 z?01SjJf9W=o)RyID=_vod>QwDX3# zA8_Y(e!bQ0%H#tFw1&w|$4T0!!b3$i*=d78hO{jTZ$)Vn+v9B=I+XKhQC=A#>g8w$ z5!Z8IW!mMwp@otbz|BI(ad5t*^TaK#^q~DzQKd8Oy!xM_Tl-4l>$^w+WVv_WVkS>s z5r%SV+f{fbmK&a-(q`8+L+=+{;g%8A%&cF}Z`98+v?hx#2CNZ~sgAvt5;I)M@}dvo zJJyjN#(t6}6fa@z>X{`$MkbR*t3E&9VpXLFY5B!~B6kk&{loN~u3GUU3&KiKZYBw!B7Tr-z8)e+V1-O!GRLX{{9l5($Z?{Z?%9mH?N-xwq?n1mdO|pz~V1{BHqQ^fi>HRu_#0n1evXx;0F z8cF0trDc3#B0@lX2K@@wCx5s8`p5~)S@0=}+JQo`&5GZs?Bo4dz>jUTUGMgMPV=a^ zPEHOV%Tl!C@9F?MXbDt26?oY60iv(VCOIc zwRMn@In5n#+XI|#_ZheE^D(jiS-b(|1MidDB9f92#k(cuvQe>%wT49DG3B4NB&BDN z6Gy_uu(ICA(6BOur#8v9yn3ZaD>}tGTxKEb4VT1l8jaHmw(-C4_xH$iQ4kbJyRo1l zl`!Zf6qcv$03YZSIFHCp8~AmH0bSs>1f*RdpQ_X4h>J(^t4DjV<_DfV!$+7I=O@O> z!}7u7MHV&Ne}krT`DJ`4znAyPt{E6`KN9@*va7Q9 zQ@bRsKMEr&uK46fY;f6jDR0gCMP12{JF0?2Qx&ywK0bKe@Jw}*7F<83&8V~Hh3_cv z*&Pfftcmh z>~oi6pNMDBy#7!~q%LTF z=exgT6z4Ne)>MX$Aq|E6k%4mmx%J~C87tcD*uqF;Ga8lv*667ac<#WP?D+AXs_GkX z0DlB1#mCeA9x^ct$d3V|uf9R1NQYRS{vwk5ONuaVhq(+Hu^f0L4(OQds zqpX%gT%FYi;+yb;Kkq-h1lyMYe1BIrHw5|)6Ln}< zhcyVjjeyN8B&2Kp>2{+1v{ENvUR<09z!Aa{%`~4AI$-rySIt1rzub>gH*LbzmN|`2 z$Z0&m<2X?Lgt-|z4QC2tK4)xDfMuY(#J-Y;wo32cl%9gga)k%HeMm)E#A8Plg!>5L z6sXf~Q8+Yr9O-~KhG-4^Ko>dyO|Ss>1kEZ;z-b3fK%%;6yjAMjEQGdHli8KUwSW4UjWb7_$G|;SUQFva-HGpN21A zzcSF#`9ZO;93<7Crz;rc9Hv~Y;{UG5@_4!$_rHMwz_$?A9Ad*H7dTZ9WGSm>+K?fw8nkL@&%8a z&VLldb;#%hhR2lT1l4(%H@>CLvXqb0-v37Hk@38FMaulReZi6yYQ-1B0)1=!U%V|} z4KTgma8S9T!Pxz)@!AL~A{Eitk1n$lpB*ux;7lMA_9g||1WZNi@^Q{B;rdXu8{M`N zhgeg%*GwexW4K$b-=?)FS!U1<8c_>p?|x3c9DT$yQ{O#mSo&696+c`p;F-hrC;lG4 z_{ZJpuE(rsS8LcWJz;f}agj7MLoZyR0iEtUxM4jIZUZ?3Yji811p%}nCLWdDMXQ(> z|3c??c%Qx5T)}hOuhU9j%O%I%t+_EfHXg9FTfdDQl+)?RxHET{?zr7s`$X-TMBn8T z-9&#qyehl--A7JKTQ7W~1H?S{wfH7;^`EZ)X3gYcWv|4ntUaL6bXv&3($9^TrUBJ~ zda7S(zyo^YR0O;<>%H70OlvFnFJqq)Z5fJW>_7M$mb>s=(6OhaDG4{F7T1fi*Q8~9 zy#ys^!PGVDk?&ti@h<-@IeqXwfi}k5r0mWZW+$e1;=Mk|_P};}D#j%-ZZ@%_%5qVc z+$)o3rXQ3gO5|YI9bNo(R5$Fedgh!a2HS@)uw5e7^t;Mvp8;ruoyAX(5gQ9srw_o} zDUdICWB%@?^XWZ7ONX4#7LGTU%qoYvt=^=`=VTb;-pZVLl=HNIn&`!{MY_qJDEFXjB zwrhJR3f!W=ZN(<6k@@p&shP_vi!*13`V+A&cM%FTS3cdfHKNco50BA?n#H`{``32( zG9G@92sKiy9sfMkQXvi}B17n>MqW+8)ho_2SfzuA^ z?GKXR+6)OCCFMCOmA{y``;OI5X13fX(|TP6$k(?{Jc*sFTAuP!0cDXv-eNKN!yo68m+PLPrOpX`O{P z3rv7(jDJ-gmvB)1=7w{|VIv!kof=C6g{rF)ZjSVQ^hTelUv@99meTNr<6;4 z3Kzs3TP0Q3HWe(SEMM^PTl%DODAr~n?{MVVu(3u%DF=xnuMR9C+6~UH0b$J19gi%Jd5IiMn_svHGx%{u(Rz^oS%)D77@1iX{6iz$*S>`r*$&pMgnqQ@%qNX+ z{^s0Zj)uySus?{p?tZX>`zvu*AHIqxe3d5%ZQ=V{I5J6qV1W29stS&vt*LdW_U^=% z;Ni+HM^+@a3!{EzjcHk0`qxYPb}P@1Aad}MC!(5&pRuPtuD_EcT%hp%T5<-LCl1O% zh^(Ot3FR06-8YAk{3BL}1z_N{p*=tyfJG+ii4MPO-Ua~s(byAUBJ8UFV{nWzhYpxlNw;*Z8PM1#R z&w48XSF5#qP(t+T#2T#em-Na&VyCbM=;DDU^`FFAi~vVZxf&UHSnhcF4>CXCr+yQ^ zJCGNfJE-D(A3#wHAff`f4o7g`jqc^F%E^%08FkCRxDo{-LxD{Grvvf53WmCN_APhU z?QLgMV<)V!aHzXL`XLNA)36d=>1U1Il9!?IoAssnZ@_9$<_ky{E*d2&$E3_&F)1{ zNPi(+SK63IYYh1$nYYL-e%J<3qb^X3J*-*>#hoR9oa$NNhHJjv81wU`1lrA|Rhd65(lP#&YBC@H zJ+muV%BanH$b)92KId*v%}|?2Dt_j+si&qfV&@t1u@cfdR2;%k28BDR*=;xwOm7cy zy%(4qw;A6Y%$&oa+;3elJv=v*J2vo1L9Mw_i11&%^Ql*l$1d;GF%B|H;MtNP{Or}A zn&y2hqjO(9y>#Hgm!u+2xaF)WTqB`z`nSQP_N`!N4Ju*HvD69e^l8lX7u~XHwJA`j zDe3OAt;D#v)i)|Oo&Pa}TjF&|G{)Yme=mGfDUxgb=X$d;2 zd-y8@A!2YqKbCw*Wdg{G@lUi6x8eP~7Uo-5zx8K)U@g=8Vj@r`Zt3k=X}`(>61tXm z;~8JuQ#oVmk3=?ou&t)k7QfIiad3Jvw?+T<{-wmtXQ>~q&laC_z7y^c%Bg%@m6 zvM?fFUc~{OaPS}g81ma!r)vBamc+=HT`LF#Urex%5kX5k3&_|aY5Yyt5ajf4tqfI=N4aVkU_Tuvaw*K*a(vn~CH$gD)F+}@ zhz#w5zjza#jey8dEO*doT+_e(O)e2&I+*|=CW8gqYGe>KIuu`q&As4pvJJ8ur&%#S z@Dm|5&RsmA_vB6^k34Oe`7|wFJ|7L6;`zqY=}z6_7G2_U(;Mb=AB5Qn)#20=Q@j&> z>Q7pWM)wm1VY}6%%asK`W+D1J(6N|{bV=auvpFj6)T|6060*@-5+o5k(CS_#aHAX5e+8)!) zv$CXH#r2VQ*t(j6iH>%n;NrJ7c_!NKyzw;yKJt~_1@gMMai%a62jotGKT<3x-dq>~ zO@wl4PEj}9cbJ|JTits4G<$^bwEccSmJeaHQK`GtNWVqx;O<2U{glC4!l^JD4!DY4 zeA!Rcxl2boASF7eWP9I*pZ7Uin38!0Z{|qX)gnpPR}f46a-#7fOuHmJI$}q>%zlR8 z`<_&<92+LS`BZStHK&0$^h#ExZ3({*9_`n>D@5Bu7o?V_O4 z0uTPl@fYNI-sQnbJ};*G8$Pj4faL1V+MVf#`EOm8P>QW%B44j(XcHn+PyiYWxeeE2 zi2I!UVfFbg=lc7AJA=>#n_ZXvbnfsn$xX?eFTKTs&&vgxLd5O2pYaVv{|;5Ju#=V0 zK6#v90jXT-o+39SjcYSwBS;3jVXYGSBP>JPY{lo+UR9IDTN3qY%TeZn=C&qd-Oc@J zsVkZlt-Gt`i=q=p&uN(7nW>*pTzWRJdGHxoewV!JwcU?kvye*!y6d%qFH*4$DjLM! zBZffJ$s){{lF%ES@Te#KD}TkKHEW)V)}DCu;=_Qx&lK|Xy%uIG>XL#JJ&IROyf^NB z>mmb%i3tU!r*@AxENJ|P@`Rs77N`Abq-AMYmkaVY4y0mbwCE}Mg#Oi64&2^k3B1Mi zEA3N&B>$znxJ#8X56msU2Z#**{oJLB^9ztc!XCstJj-tq@3l4XzBA-UlJYm!IsQGX zVRqP_oqs>g4*e%Ih}qX`BC?rq=_j<|ijcv|3x}fArTAb{db`2uwdnfC_iDx$k8V}J z&N~14Mn0#NYW9h?BWi-{;uI%n(l$yW<(p8jtY?+6SZ!^r=8_;>io z;;T3xP_8+Ri=Aj_b5f-+%)uv3e6E_L$X*XUS_CNtG(&>HL~O_~avs zgrD_3tc@A>V0jUfj1>6Oc!r1xTG*Sng-cGnZpx%`$r(ET3}uCIOvaf{6ojKk773l= z!doLT+_Cz)zHG5-oBM2n+pZ#9!(GudPA#snc;-9%7`M20zT#+EHs~D^sGtcS#hW_B z1SCPqx1sZr_76L-FW`@;z#G3S!8_p5i?;2VP}mQzhU}Du?>cCD?cGJG)h^~{GZY2f zR-dCTEd0x`(A1DQ*6_UKqt9Db^*c23belK6K_z~Ik`|aKbReaR?ipI4y zW^?7*`3{lARk;Zo`t%VIR_sx@w-=M>9-&F=zB)P2d86R;lz#fPkK~W#^wrrPEX|?g zKUeDiNZcbLq;Oy`8l=u?x+hD)Pn_H!Z+5|qz28%)@zWEpWH^gQxW7+2rY>8bh=(=g z8ADxTBrg>Q_4d!NNeowBE@tPH?!B&A`)KQGZ1hV}PEI?T!{1d(=Nr?DrFLwCbfJs# zrolEka>tD>tpS5CiY>l}*=v){gFa&iw;rq2-rH0$BpBf!xF+59(=7JUvatr;g(@D- z*FvX~&Vy^l!3Gi1{xnKgnyT9;+gMEc+3TZ(Jd#q2rWcA`KUWMskE7a|sn1|xV2Er@ zlp;rowi{gd?HI+6@!llVfnI*)p9W>evIAE&(fBQ09Dk}1lx(Oa-`yOX^y2u5jniI$ z)xu|Whr){`_ot%Ha`G5m^GD1l$(yP%amvi(19zXMM>=FTixTf=387Wa(q)$|-u$rP zkQ=&LJk&&W%*asj;;Nt6Wm~6>!!3V%PZqsuy(Pi$4C-IlXSLX&Sb6hrK zPcMd-`u44@_yy_=^i8~?WMampt}{(HY1PJl9a&Tmf;J*W1Q^-DR-c^TYE7L{%~5@6 zx3QE6p%a)XiAQ(o_qlqpf6;b1=f+>riWF7+f#!l6d;9ibI$d-h|B&;$(q8HdrW|*D zhk2Z(v$U0n!BT3;9dDZ5+_(pIHzSqt?j0{J=$Lt16jKV99$YwDQ{p$D+wf_pH&CDN z&^&KEJEclR!^R(&4!9FeMEDVtj>Yw=dDFdxvKo@cuZ3}OT7j?b9- zq)^q2MYY}M@;4O*jfNUNxMRc%Lg#9VViCiWF*FsXmxT}DPb!9He|1f&*v>SMFIVlJ zolfu(w1u9cvA#%Pgw;1n`@XaMYoOI=TK@vs^vuK^fxA90SrgP_zflDPpuQ#mo`sV6 z?f0bI4Q^y*asl2U-%;=%0dFvb{P+bP#oFY!&oUypUtUk4sqXM@FOl7TtpnE78T zvW1gz`>M6rUR7lm*yS!wCC~mIrgYpRC}|UQ>$ri}=T#>i;gEaTY{vNFs< z&<0xTqN%XQ9^cE^+q{t9e5>;1`RV+@7O~RSWL8Xv+|omD>-@9}?7ht{!nE;ZtE9qI2`4S!+6pTu@1l=e$DlDcTR2=1-JKltq)m{9lx zNG0N<H9edopQ{wUBf#DFNPNeCMb%8T}dVz z&R87SGzg(&0oZg_h}%_ z-08hbMlNx)dUD(SL*=!Sxj)}oq*qqTo*9m99jj0i?B0O0!!pPDu2lE2HkFuupe%{N zz=%LfRdu{F{`zK29o@b5%buoE+#X_;){<4#LiExce}#Q_P^gsQSZNtCvQWmy* z|ATz`r2=D^G3Xh8PfmC{2&8axF211^ITG*S`FeIQ%^Q+tg zE0R0U4sVkBY6o)s?oz#0ovci-{v)PN>N>n5!`IR&ahE+#${Dg!TlN5yaFfHczxlPF z%p}`r2?*kTo1m)j~4 zWo6ZcS4`esa`CPw#yFcMpj`9x4*GVM<6kP?y#OATQi@;_9gT;+MB)b2JZuAjjJ~k^ zv|-zqdC=2FTVN<}L%!%>+)PHf?5{LuOnNlQ7_Wj)yEVaHDi1%H6bV1b_T+JygOeP9 zi<^G!O;n34{7&FKQgf9WBU3_NYiY1w^wmVcOu9xPB^KH%?=f`JOIKcIYYScb)KDLx zMTdM|Jovo4Yy%iGZwKp~oCvQLza!$J)SGcyT4ZufRxrDt9jZhEjbY=e=h;ng>El)G z?{zs7MgM+I@Akn@&ggp12uVn%6iV|}TKoDV*p=Qd-9fjKo_uGf!I6>ea8<#^km(@0 zO38 z{`<+@YkxXnZtQjYb}f7&WBm7TslP*4k--{%TEv2~pKGFqK8qAdocbXEDMqR;z+`{_ z;e&PUb7^Tb7S-2&b7wmqcigwg0p%W9Zq}6x&72}RnsugJZ+(+Ll=CTLy>|j%Q6s-1 zwc5|j%#47DC{Vy=6omp9CA2-@1;-(vZ+~VU0(ZyC%1X-Jy~ar&P(cigjOE~}Q}nu5 z!VKemaI~=Nz;Of3*51AouphiohFKCUcn|@+B&oc?D(@(TJl?0*kq`IB?m9|MP3?VQ zA)hDgG6drGMGPoy9Ua-yG1<48DYiNV4+gBOw`uZ5tbMh;QPAo#NV9-n0zQkVkr@2E z23aKl9UlUoEogF5Z)pk452nd59&!hjx*cMEfU|npZ9cH>$};69LOpEl*4XuaWFu>Y z21Sf>nHWdD3l~Bv2Ew?9QK-qOsU_%zEoWqu>6F%DB4uPmv$(W0CC(xtG5bawwY<7I zTPqIl7*|~uK_G>C2GkZFw~Ys2t~LX(FhT$y92oEesudav`f>(9^^t*(j|j!%eP|1P zxQUU*JAgtDh8iWpI0Cv;-s>5*RwpuCr_B) ze9yuZBO)X$1N$;`H6sQtzXEjf1R}XV_)E(hq>!i2Tx3Bb#p_5z&%lt|b=}ujfAC5+ z`D6v(4-6&-1}~qgv)P&zULMTP*WQV-ZM=W3+8Z@|K zLuZ-Pq;`wa`wty>To^Vp3WZ+8CY@1ztq*NE(obqbnsBcq3BhJug8P{DiE&s-+w zz6p4<1uK`SPi$uX>#yiFXZ;l45v>9k4 z04;`}o{VA6bzPPH&46O0V7{6Ev;Z^KohR@l4R)$-ArCh4Jm4V zVS(P6c~t-iU{MqTI8ZjnZzQh#3mjHJzeUfTt_s0i!2*UIVC`7Sdu|s(fA3|0&<+=9 zq5OdGy#%yJC1quT`rE$>-@o_ME)+QSD1LC0fRYjy`0=Js4D;b#2*WS!?Ch8(o04J% zq5&|X6V~1!^bIfX;pGTy@hgQkE&xDMfDRyD2W$T0S0< zp@$dxQq<%?2{$Dr1zye_8s&CCdr*7|iU=SCEP;KnrLeRq0zj5ZTi<&v7FvU!LAwJ2 zQc_G&QBgU0d3sJxJa|~djr^6M1VNz^Ri0EBzSX?(N4FBD1NiBviC9$q($dn#S6+9( zoT5+e+1n^Ajev#r@ZrOSzrQz;zNgU6xg0v>BDU{mU=S?xy?9aH<6YFT>co_UGfYZ`|yhp*_zC_JH)|7`FutL!QyTd@ikRm_#dvnYT?1T(DmNXYo=GcQ4 ztY$z0UK=Z=jfjklOG?6ow&8`_d@~L7=D_8|zk~+=m#C4d5+UycE>Il!57G>AVTLgi zH?Cr$kfsNrAQM5M;NXHLs(#4pgDAo@>CYp0<~RC))jcsWG2ZCG{cm3rws3(K-VYu= zd<@NCU-qoPukmJINocHrS8s6blM%Ojc*fVMDN0}_3QQaVq3_XeYFB85ArynIASY+C zzub**JJh0%K8#qS{M7ULCQ63ZqfUx6^pAkPMg?SxV)uhxApFTbd`JSm@2ELrVA-}o zUloW9=-fRs`g?kCo$IGpo7x)C6;ap$bS6M00f9g(^t{F=Cueh4wh(|OXg+{%W0?Xh zaR?Z{VCBdHZ}CBjC-f|ya~FH^T(hz9d?KAcvdq& z%`iNCB5`UP*o>KCvz8E?oSOn zQ}g`^E=dEMGasLZhKr3DhyKkX%|AeZT{sKLE}NVw_+ec*+aVa(a<;2JQNWgS>pl^X4}AaSr$O z<}Ym$bB*FVL!ZS|e8ISdLC?e_hrBrK@pOP{ko_1?93+QdWNz()oSt4xAF+e}AEzoD zy5?r&D98zf*ajc4_DiBwg^Jb!3DD&KO|oq&i(dqw<%@$z#mb_ypoO zaI9C{ftEdWKF7DYjGLkW*!N|{`E*04nXs|Q9USxY0ss_fMA8T`#?=sbcB@7BrVB?&7R zIe1sUWyJ!|G#maexv2XsX!B!Z>+;8HIfF6XE*8)=OXnbo?JlE~)YLj)J3x+u zB_&f}%fOZoB=DD6S+M}W$gZj)=Fl$kQ)S_6vNtnhJl{`lo`*;V3qcn@(AG8u_{E>% zd&SyI0IQKwR_=uw4p{8-(BaK^O$8`krDj-~?@`oP7W-9qd zQBls-*tr~+j-f%{-CYnG&Y@6vcz8b#4;A1)AwWI1Fl=xw*lWf>N9TdQ^tMwqc(|mu z{POY`;b-ZRdvdf3qqjj>6$toMfZ^xBSfTu~v+3gh*gt*R0_Oz*71dSLYhf3b^Pi!T z)3u{OpIJb*Lpixia5xi?k%dCkb{vIz1gw4J@bx)% zHaod4d5RB&uRCrV3FY<-xiRxs=m>Pv0R)>mK>a+FnW$V)52zHz-j`( z2?0zOc(9KeT-k4OaL^t#z)!A?6pmrYCP24VUeLqU)(U<2@L^|ac4Fd&@$=s#&{{I^ z?cL;7c&-2$>&TW492g%z`s@Yxk*k2LLUulgDgPb+W{~l6baLXAfTn>c6inpnKH?0y ziXF&`>4KL7iU~XgX$WKyPAfk@zX`NSLZRSQ!iiWHCoHq%VVr>g6#{!ic76S^f9Vuy z1x-7|jm!0N4E}Uiu8VWsxkHWsTag!l!!8GA#8j zPL^i7IoB-7Wl(2ws8xW5hK8&pSy|b}69Ec5Bd>L4q}LX7CuBLnyhasHFM*1R3RNRUIyw{y2?-k) z7c3Zo)YMcSXj*yo#_dbU4gkGPZr;3!M7%IjPZ#y(o-N_sdQ^r2K#Z)s{AFaUpiNXB zM9$yDe)WoPSL<;iZP}65M$no0_F?7rwI6_;pNAGXuYpemfX65RlWom@qJ)ueWE~(R zIMcl9Cui;iyZrq2HXa-pa64awPCQ4@+-7fB>mC{kT8;+7(PaXZ=P)2i!3`UB6jdWL zKvycMt9O(msMsmtAKZ;;5pJ0 z(5x(yBHMcf#i`4y|DW4Ca`XCs-njmq4d=88Ds)eK*x?G8jNx=UB=>jmA diff --git a/examples/running-mistrals/artifacts/A100-80GB/forward_memory_plot.png b/examples/running-mistrals/artifacts/A100-80GB/forward_memory_plot.png deleted file mode 100644 index 0143c4de91c2a02cad08661d74aebc3a343a59cd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44926 zcmeEu^;cGZx9x){h)5`EOBZz=>cb9~ebR*K;jdyL| zGtL>~{BrLfaNjW;@a27;y+8XCYt1$1Tsz>Ev?$Is(rYLb3P2B-Dg zoB5k}d1)VQoYqoZjm5q;WR_KGqWbz)5a9uAM z?loppQBkqJI6J;JubM;b=H|vmiu3E6RF)6%HD(O8`DryJC8f)kFEd+CaD`CtpM-YD z#K(7;<<+#lLC5@FR$$0_>-yQI*WXmHzZd0IRdZ`=l2wlDw*)=;c%3$~!#J>He`O=9 zJ@J8a%GsyB{%M$=Mx}jb>d=5-TAqxK4%K+6#avO3za z+%dyiUI~#5YW=CAp`^y!`~2s7zub4)Y0g%){{C*jq-^f!2s}D+9?E|GJWDC(7PB@A z_2tVKLPA1qJw5qV3&NC2*pQ^fvZ*in(q)oN1+6qSDMCU*20f?9WC?v68->=^*R$r` z=e3gBDBP~&;o%7!&7gapu5)hG{k<@)c;OJ_@1MbjQZLZG^6As3v$NeEhk?R4dF9Q_ z@2rdb#s&s0&++*6ZO!Yw(LPY{$7W>EQh1$h$66YeR5)+nOY%IS3l+TJ`g`2tCD|cx zvUI=y*RR1gVq#)gyg^3--;5iqmmgc2ka1e-_a;B%;^Fzdu+Um}vCkAj&TA-_Zojm> z)R)S)RgcbQ(*N$`M|{t}=T5Mm?{UfHRxe;M#%!b;m8*9UL6EUmP!@)XS~ku;{hUEG_xN zH=Y=Ef6d5P`lcB=Zdrxn{qEi6{gJY1Pl?T`nu4;jw#mr^o4HoSTW=>T9q13I+?amG zakdQ&MS6Rq47=mSo;_>8$#a>hN2{)`zQyOvTv1WMnft5jb0m8t*}iyadmNZ-RXUf_bEJFA0|tB44y2&-@krzGzOE>{Ucml$HKzm zrb2iup9`y{r6nOTvF2_G9sg#`6Qiqmcx`=sH@v*Orkv++2t`;j&6sZ;Ev1GoO;kK_ zbaXsAI@)R`kog!BVW$Jkn{(1rzRoh4n18FW;b(xYA)2;}$v%W@o;^c*{vP)s3(KeU zBu`S9*V^L+G7ifLXZgHgSQoUdtu55ddfD_sgtF#r?dir`K^w(=xFN(Y0#eeBi}O<( zJ3I6b&tyD31?0a;HZ(Q)OjbD+IdAJSs+ZSyM9}BC)bP6<;8RdYCkc2Q!IipJMz7=G zq`fXO%vLFQ3*iXeo6_?LjfRHi-u?UZ1Dv<6PuR4Pn~#?Uw};XED$oYBUxmUEHmjHyAh2zDf#a}Xl^O@<1_ntfnyMj&3Y|{TAY}oyH zXI?U%>r<`ADXZ6oJELaxj}(!hky?+c_SGw`U;4JTtc8Sxod2F5+E`oPrKR;pq9cr^ z;m^68x_Xz4;Cb_;)3iqu1$B*P&yP>{Z$qTQCVs39;jSx=GxYQ4oBR8hoPP(<3>*kR zhQDViyzqV4zR^#Na49G#Am*s%*e!J3_B`KL z-l#d$yo8SaFYv&T(b3Uad1ccULhdpb%;~&8s2l^Qzy|~8lJ-pf8&tMhSx8^1=-lFB z+mjwXpS(OK_@v(c${^ov*AwbI31?@n^V7q{^TTPQ^OOC@>g6AYa@3v}bcCBti3Slf zpyu4prrb7j`Um`bdtV_-9j3R~oiH~arNx1D=jS;$L5dQc5ZlwvNCtUt@jCJSrM~Ce z2M0=wTD92}yv|$6x;i>vK2Qp3@^e^C@m+a^gxhPw%M%qvw$mPmnW1j`D`^w3Yp*5W z;dPXhB@{V^D-%BDhHt7e2@2LVRx~y>MJVN{#@sRMOYyx#Yc`zA&VA7*N=fIvGZ!lO zy=`Y_C!E7#jMlr*w!RV#MFmy1(*#e0+ouW#$`wu{%bKjEE#8 zB<5jPaky;EHoYgJr>75_im<$3q0@M|rLRvGhBI1f5%>G|?-bz| zFCO{&`VL#N4p`nnJ#Sqzvn*Kij5dM4XLeU!xqEmJq)h8=i)OqO1)mx3&L>o_NXlZGhqPMJTQ#NnVyP z&8R0)9P)*gvkZh1WmKA27vzD7y-bF6q8@y z&UQMPApC7qZ3-}IRDSh5J~`OGCOiYavdaYTymc13DIS1r{h!9Egt&}u}!PL z7w2f@ryEr^j*hw3XI(K*P-~M_QN_h=dFJf6 zK9+XSdddB8(h2Y#J~6T5%+!8e0bLEd>(rRsQ4}dK~Ktkfw zvzi6V@-qA_aLSJ1hJ#ew-_PQ)WaIR&s<0VDtjV&!@uR|S5%q|?V`Ze^zSqUE zk+0aXavn9t9OjSyGzo5)LBMh^Qv2`;3BO|aApi>&`ym&X%6T$6gk;{`g`f91I3mx^ z+{5e)jg012R$M1n6M3CNkkvZ+_{7LKw^~Nv_#Mocj)8%1Xei$Lc$xCJ%g!G$LXj^P zp?XWOD-+&T*@qeV32JoefxfkfpM z6|<|WSs3=pye>NrAec6Fc4qbOz><+IgoK|fgIO`$c7|!sehxl<{MdN#dqjHy3>huMj#ws< zPYl_~%=#|Mb<`0xn0ame{hxK3{9^6^U`>Wc=t|&K*2vIfz6FTm27*Jeu&}6ix5d+` z)MZRQns;}2^72+)+k6AiM8UaEskO23ZKixWtHX){gy=3<k z$r%NCWcOg)DyxSZFg`qX8vJwLYQThD3Ml=#y*(QwN-7MWS-;^N#^Gxp8;)kP`;1kJ z3~?eOBZ)dB2v%42UH$MVrC+?jfL!+lz79zFb9i_&gax@-Pjd?kKz%a+RP`X8Qrk6t z|NRD&Vg`zkCP0FKieoR2C{K$*usfdd!F$9|$CID6rkUrYUSVqEThVdo88Q4-jNnBm zMGl6y;Q1bbh=>T0(%{r}p9XKh#1JVp(&m*h#Rs=3%!jgX!&WhZthQ1BVCucHri(8G zxb1~FtKSo)~szY}Ho09zOY-^$6;Tjq*8MY+-&K z-ntqa(-s;2)P?A=wC^hP{Q2M_0?-JFh_+8R>*Dq}6b&-cMS_U$aNAxMk(G4yeN>g? z_1$50h&E72TRVBoM0 znSZbU4j$hY3ONUKbaX`2V_F`Rn0OTWOTAwV_mj`%=cnbB4_~ylwP|raEgyq19}m}E zv?TJo9YLgAg42<}>%_5If#LH8YK!QzqzK%qNL&j5!m*H#3GUocPxaDP&-wbbSwHO% z0ZAXJo*zM^m0B?{FvtulN#2=NXP&~W|8!sU(H_^3pMb|nzRRwmoSa ziRYmd{6~MnWFCdbogx4^jNAKn7TZTrKKhH}1U) z$n?j?WYr)lA3n#$%exEX)wQycfz&m4lIArZwaMMl-EBBpXke)I1R(sMKl+|0%QX8c zo}Qk*d++BkyRJ!ghc1H&725&Wu<6k<&(CN!F32UByZllao_6` zCF-~^=~)|;lb5HTZ1`GU9*slBp{=9y46^nkk6g_>BM1kCBqX~~Mk>eN9h*pcLS-7a zhYD01s{4Cx^&*&5Qx5Lg!xX8SF`_(k&zMW)Ah1lq~eN;>m<=~e*;GkcATNcE^KY1_RkLiEG9yEH4A_h zo=rZd%BdK3QQ4Gh5F`uRgSFSKA8z{;6+MACm#tb72&w!Dl5`+Rt`6nI@VjxqM3|x*!)11D81Mwom<2D6?n8!n0FQl-i|ZC( z?(Mm@V;ITcnImWC*jOwakWiS^o5aMe0Fm>)y)hXrd^V1DhuOv}uru3$Kxl;P`}=g@}ZNWF9hrG7wIta_R0TpE}N9)~+Q3 z`04v|q-IrJ1T=Z!`WMK4kAz zLZu;(1l5*?hG*M*dkTpz3%A*W-&dGuTkf6E2Bsi9jrhx#{=b)>`|iL5h9_h3og` zwzez>7{qr#c#=@Q%K=i1XruH}hq^-BcYu8USABS;{qpDjOnI7?;bCQNfCTL0HmQH# z%c)0Typa7srwZfYPqSO>4$R5P>4&tD=CnE0v6@$xt8Zkabyxwr?^^QN>GWR?Zq3*o zJqXd%ZC*#Sms0rY6qz7BaD(fl(u(wa&RbNVPnfTHa1GGT)?V`r%NXg z($i;M+sp#oKnMFsIX)qQNK{lb4PaHr@0K9?YDQ&x4;hV6r7T5&{9oU_e^382mR&wO zCx-||@(@aoyIYKVN`Hzk+X1s}{n)~ewE78c6tEyNvdcl3(#@&Ow&g#OmS%~6tg`qcY`d+HYQ zb@BHe8tUo~P{@!-qg(krvTShOvEfoeY`Qm5*Z%#&`rp5>V&?nN1qz9YVY$}S=Em7d z00Ge2-(Q#;_wR!*Z=!~h0ntMi{L#~sxB6`YxM3U^PE&h(W^UX)fo8GN6Hr$R!P z07M;~p6co8{mM`#Q=}1*eJLZe2t_XtLQ1ZeeDQ_+AALsU`$1F(w0QOE)%u2pFS&7c zo!dJ*zaiu-+4Eqb{_`o3yX@HomMrqWzH%zUJRB`P;JNXiIff@Aq77{P(;jBy9>oVyQo^6DMi6lc|nb zxA`iR<+)^Rt*x!L;}&JJz)J=NVgHBHRoEp*7IeuoK;qs(mjV3vuM3)^Oi%NYVHY55 zM*SOfh+8ckE3Ia}zySc+H4Evo$}nMMMCC=uop%2l_eW~nIH3m9fij&{M(!%rWfTeP zF1I^X+x*i^o_c5<%G>egeJzqn=~{cS$lgHc4b%W@8=C}$vfi~*iP&V)t7^v0o)!#o-Vn#AGH3eA%pOTUi0>KE-f5AzQVP{81YNbA&b~rS!|z>^@iaaCR~BT7MWa7Nc0K(gj|4ZD$Y!}lq7Sv!S5^Yy z6d;@J7|jA5Jrn@lS9((>VS47n?p_-@ze?I zBtziQyu~(Zl9TTeGphS*=5f;BC~D9?Yw!drEt<>b9ttQ!{U$#gci_Lek_77vNg-~e zX!a7XjM%K)jFb6V#4V8UXH6(<=bPohyOP6uy-lCWQbWJ=d8Z_Mk_<{*s#n#wvS{z) zZ2Xz)#r9gwBy?Iv2BWWiI!$~y#|*F_3drjAKh99^iAhRk2T@}|KzfGZ)7VFtL%Ovt z(Isp}8}!FK`|+*b4xfBS&37fcDJ4<&NrU~u*DhTY&0qJlbo{VRR*&b_9rjt#|9zMg z`P9!GQMm2bKI1Nmz$^JxH_J87l|Lqw7({f-lSBPyu;UE*PjT#h8E`RhvI!_Dt!&Sl z0|-8lvVR{Nql4$$Q152Y9nX|750NqIY`15eCyom*+p$$Zuf2FfF*2@jc75Bf#AqD* zd456h)6~ALr*#X2t2iU7gC0h$Il`CfUAvmvF_>t^UAL8AK@|+e225w-T}H+bU~7x4 zXN5}m?%(%mZWjI=7L2VVj?S;8r3IW7tTrETDF~qnfEQU>q`&~?zZ;;v!s_9D*kB70 zOE`C`Kc;oh^M;38u&S>b>$TT&B%1b;%xq{6xpZQ>kp{nbTSxk86>SE%H-z@%@;u`L zy#fI_0J~(QFh76#)Bq6Z2+rV3Ny+BU&LEI5wvZZZMU~_D2~f~<^561-_5*@Fnpx-4 z?qUyT`)d*cf~M@(MMqF0K`5Vr3opPyW-;lZNpf#Q&?|u6<*6FpAQGl;T6LZ!JE{{e zJx=J2&yKc1PwLIn5`4_S-~v@!G@nbMD(v05EW1+_AR$Zyk2!<(hKgpZQpPEDoj5 z)?WqsX%*XV$l)Nds@!I7X}pXHFkto3?;xn>-T~+XPy+{o}R*6I3J_xG0i&|yg~&NscnK7Ve6vdjf4M3=<`C)CTAFOz{sL_W@UFrsHZ zlK%#12YezT5$AFniYWlU_3&||_($m9)KCGW%mmytx4!Pt{^xB>ZH9HUHGTJ#Z(? zR@u!=Xe@Gn=eT^}?{xd{>6ztYQ+`yns;esx1SUY^F}=OL;;Y%}6`$WzxcXOD^JQme zBa8q%KQ&})2wNxwhsJZ;K{jh^ZS~`}TY!gXfr%N|{XoVUlAZk+a5wf-Akvm$3W173 z`#`SH?A#>W{DwL9wGU+%2ewk+|7WOa}=7^9NiP9D8>cTE+1b!?71z#(!Yv~WOViPZgZGN zx4QsI9Rqs;)zZ?kHeMDAmCE;dQ2zjaei#|~zBopJM7r;9b@_P-7|BSi;$JRyrSXN? z%FvJrwyv-H(AtFjbSSWjCBsT_gulo{^`(7OBxHe2awnSlbc&X9<7x1kd^hb zbOKK%Hr7f%98#C_{o&Qh+wd&H5%YvABc3a)aWc2ujz1S(lk7OxSif3T=&YwrhNr%k zy_%7h!ZGj~#bq(Zrj)1o6Qo_pTCaU~;^XhYjt_to0)%VhSmj#q(&}m_gb}F~t!igk z2KsYN*KBuks3ZW#78DmZ10L$kD$!W&6j{tCF3yH z?uusdwt49aNk=k{Bd5P~d~6KpeK+|-CP?`M;UB7XOA{sQ`a7KhBq~Ix=iekM-()E} z)U9W~$r9B1WeB|Zf5*`0PL0(E9mJPVjh+Z|QD| z?|Z#TcBKJ&m_yHlQ6q1bl9w(3gXSQhQZX`0Pda9Um_Y3S!NC~nVi;n!W=$*{5MXTI z0XaxOEjfc|3ac0x7&qb60>AwlXi9*va}bF0V$t!nf$;z}Wf!4BV`IgE%2BCuwD9$% zLN=Mlr*8}j?;Xk#tA!okbbqb($t$XKPIc;}E)4R^P)JxrGyg#Ikfaci>?2dBauTF*^v{*TOk+jIR`)8Vrbz ztNd5WVgkWH^(YE$}$sp_tu>CYpI;HfrGd$91%amx2 zXWl4qbGT`PnWliYd6pC{MiE+~ipueL!^6m|NeF8XTpu z`)Jx0{qs${7k|%=kE3R?J}G)l1+{C-mrTe2^8-o-mN(>;v$Hdm8dr8=VgrI;sL`eL z^&g-BJM#cR$Z3~O5ChA`BiXMa#S&zT^AARHr-@LPXH zjwe;vk2GpCN5|7JGrvkeAE~f20$kZLZdJ#CtaF2SC!Kz0fCR*)^sor4SpSNeP~?Dx zKF}rJ55MePB9@vt_9{E#Ji-Rq>vo5idPP$-V*s%W1`!)|{l!03~c0qY%xRKRAq1UxuCiPbrPVKD3#s z$jW}Il(923q(&qpkPu*KHR6k65)!E(O_E=_0IXNOnCNDj(hj$Td)J#L(fKX#m^m1#P$2zPheN&b?z#ixbY`)7 zn5~m2nb5fF&7rBFXBIVE;c&M%VKv@XZ`ZD(>Jq`%8zOef(=#{#pO zakBC*^-AC*<3;Xg|6}5NZfFvCKuq|*xL20(Zsb?gsQbO(pEUj4Njf}~qr)xII7e^Q z+dQ^mit4IPH!a&uCb1`WaIch36Pdj8dz@;;?fvo*&QFy z7{)%(EiEe5);5!IXvu{&Z+!Xht~lbZ(l^v(zG;ayy9YoP3)?B3l3r1-t!0=jD2f#B z5>o$1$KdbBN?U1r=|)yM&%Nwd42ZVKxoW8=*>`iSJLsF`XaCp!WS>yL=?K?^g)7!a z(g$&tCRGY|!w0L_9ArtX_RKf@O5m<3;{P5(cJZ2n>p}caZM$k4Eu#PXKsmmhE$di?I3`!2sftjakP=WchI93Yn~`#yJ5%61hry|+)Wm2S&RQ%jeaLk ze8gp;uu}JMV%|N>mXbuxL51^cFmglc_ODp7nF9m==tSTKd=e7Kf#JN2iMcy!l=Lad z>MU&%>@>i&z*Yx(43w@G@TJg)D<14aYRDK?g+KzX3pmenGc!-8Ydv1qy1S51l;EfE z0G;D*iu-reHWu@5Ral}`_FP}y;ZEI%U3^u1Xi1u1vJyo#^LKPJqwBhh6JH*er}KWA zJbQ3Nkm4tXr;WaKm{xbJJ&oD5!XMO~m|%A1YSbN11;jKbC+C#QB6+3bI*^MD@9`)X z1pgl6LGGOo;oU$XId%pDxMAMXo%&vZuA(;}g@BB1xT{@M?Z>-Bp_e zcL>*TkRQbEz?9xv54W&6@OlvvlfJhFCc6WyF-CHl;4A_nuf4k)(J`K^s?tcu*AEU7 z3DyO^$%cu^Q7r*+fWR<8;;O8j_#VbVQjeV5F`V({c@{Crd7GY!BQn~Wu01J zGT3Lv9anC0`+f_E<~^;-sBy1RW%~pnfsGjg{5>~%H%I>b&b5=aCn3sXbBFss@|I_& zzK9RAD}%rSj#WT8ad)gh;NDh0__pY}QL*U!=aZ;4`0Lu*(#7rE#!4+D9345@+uMQf zhzkcj!R}EkA zuP|^7oTZvg+V$`QMtu*YJ#vu33c-r=IVq_JD&@a@6~6eKjvKZJ4CC^Mxs;&EP}KYM z5XD&%m8bTE6AnZ(!#_%8vYQ-_7tfvCqatfv+(zU!9!tfRpr)q z+kQl4XpLze1j-_>zn;kHTIf#bgvZS)3CJ6u!~#Pi z=}VLj)Q)5V4-u&PKrA8d9I)D@h$R3I1{N#u{nd|;#{nt{`F!&i+ZCuN_f`h6!9E44 zM1J|AFv)!f3p^4)m4txVu?VzS(>fzrc5v|_POh70ST;COc0ot$Pj41^ay_kc8hYZo z&2e#V#OKy+n8Hb0yko1cYb_Bev)0+*a{tIxPLSI7a{pMT*jquW%dar`z2i8o3|9s+ zVboDzC)9M`7J2dE79yGq5~Ty>`T%rC5Pg5ItaN~YQofp7g;-@!AQgj`2*|t90zFJ1 zk3nXo02C-XQaOUz zX?uIy+0AYEKmv%qh(~>7T+RT9cfWi!?~s|YDK600CX;bM6ZNiwWGkeumC%DG3EU0L z8Q5dM0xAwCTvJmM@p8ItRA738Hi?)v5iSMHl86-rXe|)C=H}+mZ~gK^&g0&mJrv%E z7gAPM7U(hInjxUfespx`f)NA-wL%j3H4DK=iMoz~j)sONnIw=v0wlbjMHIKUgQ%vK zO_sz@M*YqR7k2rutA4023-Yj0cB`JRz_&+#QBY5w9JZ< zCm{r^Hkl5HxC}D<25oInxgk_!9Zx}(UrR%*-@R*lEx*aWNU)UjcRiu@iK43Fi$f zt1dzqi6|}0$oQd8#+IX`*1}6HwI&|1FS0>i@7l zpbw~Na5R}i23~{$I1iD*NHhM?R`BJr|Dmna*k{NdKwiUV&*;8X54O1ika$YXM}>ew z9+-m+4=PhwM1(;pvBYr6;O-!o}eBXSRry(+4zx+W&CO%nR; z)GhuO!Sg!-n~D@0AaFvWL{~%$S=CKeT1KP*s}dn-%>6UJy0rS%cW(!a%Jf+drgvZp zGZ-NOZ-{_D8)6IfJcy`B@C0WAf`8CK1Care``NQ+sLNNbY=a^i78&_1cSxhmlAMu| z5%_(v90GE33)>ASy)zd96@3BC7a>l8bw;&PEyIPF!JjTZZfRx~l#^qzUkzwpO-}kt z$a*?s(eb;2C?>kB`mfcQar|GDiLI`i&%Fskj=;bhRlI+^S|AZU;%ByYbW~bgtEj95 zA4+Ie7CqP~5T8%uebfy8IrO^i{1O87i^fHk>44|Io22iRZXkP^gA`o9MJ zo|#FW0D2kvI0D;Ux)b@cK#LHEmH}l%U`GV#zGZuA2S}!0;S4{AeJcw&#K1OK4#;+; zSWc^4Fo4j4VlD5uF=29016n%0H?YhaU$T>vpK`nIWyrmHrG0R`5(c1^76?>vVc~bK z=7=lC*|}V-1}rkP-XN9ph0Vcvl)rT8l4J;jR;@aS>7g*0d0=Ry0}ZhQROIc@vEg1V zt-gGXZU#BeNv~%y$4#DThjFXn#CA$??l{MX)CN;lqT#`9OplI59y&~_Q#C8*9v)As z47gC#`(>Bzpos4X2#uX?&Vi@oK%;76prUJkL@#vq&!1*+dWyNb^IcqAAT~-QX?Ar< zW@mp$XanEcETF-3@Ip2>H)r(VVPmJlG`a(Gib6j3P$F_`NX-iA3b5xPhrG@Jh8adM zfmAN!pO(C&cq$vWhvAF9STvwn`<$du@+J%Khze_dNxBWAmKsy6+e@f}R-Uv&u-ZWd zl679JbM82;-#a~=HhYNb`uTGnwu&}bo4$VgRtT*`NOJJIep|@Wk`dC7;`=#0U0r7& zhn#`SLvS9XD4_O%6G+2N+nlHf10b*iko*baDCV)MJLf<|4j>P`&R4YxlV1$bn~Vto z1a#fGIeLXBYzMctd*yeq0K){ZLij1qp!_DL3w$g0%6cx)o00p9hZ3>zQ#XUD6b@5% z#`{gr&?GoHv(L}Fc%oXTYQ^nB5PlK~tx$uzEPZ*8h$>*XNP}t}&|eYAIvCe)J(ZTm zwXv~5Xho!ffQ+mYoMR)9!hq<$ew+2OHw0h`=mdZcg@9|ruJBxl_wNYtn?9Hn8u0yn z6aBP1;-RiUq;Bluua@q>XK*s#mF_nVE4n87;^RaC&GC|s_HM26rq`Lf#|!F33iphT z9y7`^ykMW>=W@`(^bi_v>+wxkRrgeu4 z^dfT2a+&JRJ_GzXJ3k-P3rx|54lnRR-D1_h3P=&a>wBn8wDD*%1SU}Gg_Mvi2@<%!ra$NCG)y~Z=4KJBQp@emYO=VwYB_=Mwc!{Ik`r} ze*#+Z5AXpF>b#Lga>j&TlAAMvN28EQv(8a~#xHe*1N7={aTV0mfSa7Dxe@aj^&~|u;T@IYNJYWba(uY{6 zRTg?YyvO|f1qb^r>xphZz)EPJnW^I9Jn&;5bPy?^x{z=WL>;vOlvtd|3L*xy=YH2` z!6K6e(*_+$84e)ULvv3-rOH7$d69CFc&9IZO2J1|K1I>Ne%jAg9OLX9kZ;R_ zYw$$Un^OE1Iz6D~1Dz^cXbTh8s^RUGJ3RLOu)nu}P?SmdtNiHk;|~zQwxPdE!TAC~ zfZ*ypf+`r%9*`gcq8ucjo1~PfIh1NMy>hnv(}YU%AnM zo%J4yhQ$nm1Qc#QS;yiTR%GO%1e+ZfImij z8w+3&+1ohp`8DUSru;&aB`$;vy_SK7@spZDO5~`@Yy62ZpK+cO=3*bt3rmBb1i3=& zE!)DDo}VEF9_U9pJuY&E*S4)c4=xB@Me9d*mj8;Gzwu+$%}kEaSKg!|TQIWKPmd%( zqO1AU24{GB7{ha3=O@LfoKzP4Hu)6?)A^u=nm=ZMvKRcg1ykNRii57PzW8O-%V^;g zPpO2-=^0t_d8vGmsoHiX4Y++wJx|_=v+iiNV(J(hxzmvNBK6aZ%vGum7GBiAvOUCf zs6|6T3IghweUy=^nc?x1ylAOy6LYS})<}ZoKkDqoy!j=V&v{R*)iAAMD`S${!1eOZ zC6^4f@aNAKK#v`-zlUCHQ#H`jqDv?O-C@1iub&|012}p>>9DdWQNG%Til7{ubUwRx z+O2qJtR}~Y1TKi*-%22=b~_#rPptjV=|W2PEi@!SfG)NGNNN#Skml)WO6ap;Wo6~E z{c{!c5(9uQ2sVen{R@u$n7NyeHnT!vsN}?*m`c^ZL{Ya!>^PoF4a$x7S}wbT20NjT ziHrEWhOeUz*G8K_GhBiQn>G)CK@Wlmm?Dw3lmdrUB`j1Y0*V1%gI^cg5CN<`OZ|jb zzLHFP4O97;M(=6%0rmHAT!SMY((4;a#ek_Weu!*b8PcWr1 zwT%26_C9^lfQN0fyoItHD-I|x=R(q*wKX%+28m4Vl`B`EoIu(hZn?SL?x=|D*Ac({ zoe<~sa_@$po{71@Cu!*~@Vj^amr2|yHnZQa-ZKz^B`stX!&}RJ`wEmNNLL0Tv4FE2 z!MnR49O~)lGyvHb^!`02AP-PSm9NTTe|TQyVqRY8J-6~j1LP||`z=EiQ7a*kr60gE zUhPX(cf!>MDH>^916`d5EQyHEa}m)TK;uJcgLeQL5u$)W7*zFHC5U-`*YY7xjm_YO zAAOZ|e2%`c(^AB{uF1Fx9Xe#vFvJDwta{O%!RC(GJ5h)t{37)WxG_v!K*;9?crwKhB<-KMJR< zU&U+fo?9O2n1d4=wLZmAU)-~GedKXSt5!X)K_nbh;(dp&sF1>$zX)uXqQt}JRqO8Y zx5oP3L+o3=(JPGXT%nAwWKaDuzQTx`SmwU}d$_CHGU}WtHm59x(FIBWSUb2~a9i;6 zD!1-f+;t9{{Mtg(;qu|!qJ%Xr#G}({S$_2^G!Q_o(4fEl9zSIvd|pz4%w*qicXU#Yw)nTL)d?;TI>%f2YO1HeeeWchEYqIW_bv)T?15zn7En> zf|{GtfjyS*%w}NU!`oy2X1YA{+ztF zfV;CralzE*9R22>FU!N79#ZYH`#Fx2E$;Z1hW$G+(mTf%hA0(HppcMM3UmlHKR>yX z(_yTsS`CfWU0I@st^oQB3THdq0o;6hUS(0!skGkFn+vl1o#V6I#z!;QB#ilhk0Fan zLmMijR`pbmzIV*Zj&DL3(w5NuIW&b#uY{FsCXHmwIvqa4i6_AbC7@USc7G;=o@r> z9c-W(=)}YHd%L@O(5eQ-sLj!q4l)*K_yG6>kZ3>a5meKS5V1q*>IA_>rsZ`$_>~V7 zGN`rtAZCL(VtF8w031H-ner+sKcFVu-rA}M;ucyv;-K1!9RQ!&16J0Xn3$NrY`Fl} zN=rw_l1V^7@H|hm2I2bvYdqGftrkDLcNx~%iAJAU@Z{EWl2+!U^IKDaJTKN_I>T=f zjydyg3#-s83EXb3je5U3J3{ecV}Yq52{HkX-6TalR6eXvp1AD(M;FrPcqjE14;s8r(FQv)!m&ur1^pDPd0^}V-38+OB7tzcM*^ZC#!LK zIw?B(mUKK<0~B{x>w)R&UA9Mz?HPA?-~Ha& zXc(Ab!|cL`z?{^|KCYyoAPkD@N%{At>68m@d#a%0zP1>$zBiUXCnP)mOE_5EO%U&7 zr?Xn0|B)XeaD9F0dESDh%4Bx=i>&6qVIce$t4v}$m?rl`5 z$uB>nL>E8sx6CXqDw~i)k4&8>KY~wy34o+@_>HU>^e!MA+M!mIhEjW^&;T1U6||(* z359A{#PK>Y0{tRKX;z{NjKWI3$nT``k1;ZaiMYWS7hKWT>pN-|N0Xx4KYcado2M2j zA5(kswPqOdnP&M|C$eh(ew(pq+E98i)gIl;t2rQrG7UNx1z>YS!=)%-e&{^{KRz)i zV+h|)Aj4<#E0~U!77>s1!AAn!hL?Jw8v<#81@@pPU#AIPLy!h-Ibd%K)2xT{{Cjm3 z+Z)s~P;EH4ot7WX?l|OXa=Qb&2E6&B^>cJM(Jezm2j4d32Xa0hawu{|DrYaw_?qu; z|LIk_zCI(hNi6j*%QQ1A!f2uBM!GT~Na#cIHirgzpj=+PdgYy-P6J-rKS4~j8AI{j zJPEi7((iqJmoanG(jH>?pfNLE1(&)ZxSl3F4kuBd=zxLcCDbS=Al9lOihoT@n_XYW zsRwn#+Q#M!OtfhNVxd}vPIPJh_vH>tdl!;2>Xk>~;iR&3n&7N})t zS|>!_>H*H|OZDM&^8O(qhS1FqvK>MUqM&|5yY;Icu}s2C3qWIeYHIoj=>Y<^J6eo1 z8gvzA=!!&g8hAZjK&lA{3}ofS`H+3@dlF#4<`(tR1Xc|9s%~}ayb(6VPf1zibfdmR ziB+zyS2bcQhm_>ufDE8Pg7HBy|Hmk#z$E~!IX?d38^jz`qf1Dc!S5u0#}HFfb@zs+EWr>v=*~R#ckom5fPc!G{Hx z0NnccF`KE2xFed=xRr5a;GIY*}T{aP%@&9bP&)H5y!VzzE~XslEfM(^LA;N9o3Nlxq;au`UpgJ?TwU zpz9p)C}T}!b9)A?JAqOi{0Zh*CUeiWB9eQirTk1`ef+%|vr+I$j7Wqth4=8hgI>*H z`#9nIpJJ3HA?~c0PTEE7N_CK%Kfc}DG1u`MM`rApD--(-+~VqXPYe;NJDSA@dKz|H z$*h?5f8bMi?8$Ca?@EgWKK}moRl3pUs3iLg@8@`_7TgG9VccJ zu*_HMC?IW$r&agVDSll)iya^tyM#d9-`Pjn3DRwd?Fa;1$PD)DV-fIQu|PF(e5c}Z zL63oY6`lzut!=qepJYjL<*0n*W`AqY$I@1JcL7F6hXGZ%Sa5{9r?Slrf9Z5Rh)9ps zNRiZfX97HU{hBGcrN@DfEfG8T#npu(~kj-+{ zWL3nBsqMwSDDvhgSxT6;R(M`$D@L9dnw*g&jx+-!4O^=SqYP?RSu5!y)3*ObjPcc# zzvDN05}f5Aqxsu7aOmsAnB4Q81*w6($52h^8qnVwBd`ovL#+X0HdIfNy7?@dafg`#`U^}HnU(Zv_CoYpIIt6s8d zUk?^;yB!o0l`7EBo12MN?4t0EnUL4>r{9d33zCF=MJXh4|vlO?No{K^M%6KIyxS8I?4jB6C|A9`)@m z4ZF27_UZs_hK+~Wt=XMg{`I$4gf9j_-pm?TxbE_w&9_y0gXm=t#5f{};R)fr0V-DY zLmfNcHYH%N88Z4eUJ^N8g5KlbAkF-NMMc?fOvC`}on3jf+w@)HhZ~WbYLNK#`10s1 zfsMh5^^UC^dbqUl=IVz8hyBYY^FI8+FYY6F`l?M3YZj(8(D%@Irw2XCb)M&_5peMU z6M*1lkeRbu$y6y754G^?He_@UI())!J-UceQ8hj~wa`&LlhTuK7$CBL7&b;HBNJ41 zE3IJ`)(Ad@HkJRn(_jZ|T!6CBYQ_w&z7QAxp!WW?8uES`!YP0sq!!wG53+3jt!~)& za7<=F!6a~3p!Y&&?43JzOtW=x-bmg30Fe0qoz*x2{vqgXF;8&jp(iX5_r<8j2EP;Kj)*6I4+Y|601w^^u!`Qf8!tDhKT| zB{dbAU~`ubkR2@KPim+{Q4eLmc&*^S|5khV8f^xw6mWELp{Xb9e?B*aZw;?GqJ8}M zD@aCMTwKr^Y$2EU>67gJV5DgP%==3;E$k;aIfBPP!CtGtB0PWVe?l*#g9@IXlrFTp# zME^bNNf(Uw|4{bUVOe%xx9CGFQX&X~pokz!r$~rMNh%=SAl)sEf`CX#sx(MQW64Z-uT=5eBZhDKKt5#z37Y2bH`e9%{k^6V}d|=9=LRLkOsBKumYo9wl4b? zD=Q)w+JVO9b9GPP^?;EQ*b86)fdYY?!D0amMAT)S(C|NmP9{Fxo<5@U*%e^rnF?oPw!HCPt$u`&z@hpK}+};rI34r7UeABwD-3Ormc3KU2?o^ukqOLfu91+uK;BB) zBJxeXl}aDEaoC`P2dr}37A-9;#@)v8C-5@h`W0^6xqTb)&KnyaH=X$M0O|A?w1m)c zaYaJCQwY)O^k9|{X)T%q#*_*T*-OsO&Ty3o6bn@s9yuEdaTVwWH9MlX{!eWK&=d!< zod*W4dV};0+H=uc(9_}spEJZ1190UhU@P=Lw#Ya6pG4&3nMjcaCQXQD9_-ihKx2=z zuBBl0cRYAw2CD@3*|a&-A!hOL@d^M&1?z*Q5FkBThj2p4cFI3{mW1FsQjpD(K&+kb zWXHEy#=q)!j;?jB-Sn9{d=uQUycV*m<*d%@Sv5Ofhr)iG;p7Ff>! zbjjHjKw`=-s>8p^BqiIZ&P1U^fX1VdbZol8AJ`flAhr7Nz@EO~%Qh}V{(_#HWp&{K zkKvJvHqa(c8WEt?gKZXUH9rC>DS-L_6Brs9;Q-E^IOSi^a3YY4(H<)L9t#U10{|=- z@dNGtH7V>(g*Wf@k-xtnAF6d96~|$S3vwetFb_aH=0Ndw0bt7!v)J<`l8H7rei8gl z9@5a!y+LF=U{2u!mWx2jdWg7aEMNX7bWh4I#XW$&2w-<{XQ2FxjEqF|X!lp5z7_{Q zfy*#?Ikz+Vv(~%6?drdj1?fmUkNtPu_|-DB>EV8kv#!*qC%R|*Yh7!ra^gw{9VfX zbJqBO`+=Pi^~;* zzv($`3Hl>wY(cOFZUD^ zIm87~ewAy$WB2^GC~gI(Fi06<0ZbOPfuX6cmD&RxE1;RzYU`Bb7Z@=M;?`$q^1<;^z;&+IWG>%rBa~5D*{SoSX5LKu;vh}P|*C%KyUra-hc*J4Hl-4p8s== z02Xdi%U!T+Dc0t^{kUT+{~ zSv@IO@T4KSzX89$E7z{6^{pejL=;0g{G()NqF0I*mDp*W$FonqjlIMzNjEib(b`h; zM9wsd&W06;8{{e;X+39zj|GJgpyX&Ky`L{a|FG12Fa)lcn2by%761J68a{V*&^kA` z7N@4Pj$XbrUmjVu*+&@@a6wZ%q_J&WOVDb5`c+!Soq@W#%5i%_HK7w4lRt=xzg&uJ41iSVCGrCg3Q8le zrW`aM@BkhuC4W^G;e*juzKqoS=L{^{SZ!$0~69>uM<)cD;8x>&q^V_ zw_kNU)0G@ZwENOOQT=^8BA+IJv}V9#5vc({xv>|@@*yTh0vcBzxlDx>OZKupLg7=n)r7)hh7|XH4}$MQ?0W z2H~~PaS$IT2==Ic=x@2b5~%M7OV1DvlEAdIG@wFe#Xa;;hZ7hiVxJHPQhPc1yN^J* zs`O@-j79U=Cx`$D0oD}EFt>pno^1%HrxMDrD?xCj2%*P;sva^g1u&8Ue^Euml~D>H ze|6B2JdlDl*xv#%Fp>D$HB-BBU?(#^75MEtyx!H>sf>dBYjkZ92`>1u)61MvBQ5Wu z6lo>yN{l)`s}K3Jl6SkqMfk^Xu@d`|gX6eN$&bZ~ln>JAU= zFZ7xFDZ(i5u#t2m!ENIKpGx@V9RR>V#{#5#gI)q?+B#|dpdktnzMxsia)Dc-yn1qeN$*+c-n8p@{(xd&Mn)7f25kLid@;Go%X$6%`*vr= zO2><+AneR?F<=HOMPZYQQE{$%t%7UUIVBF#c37c%h@k?Q(>av!EP+=@19(aUQdZr& z4LL41o$OtJ=OG1?l$BLl#l-&9$-VXaw|x62F4M|ANo3N3agE%8uq{zW8yGA_FOmAP=Mg>PpH?B+N z;mt2<@E#0$Qp5lPR9bb(Y5(ua>aw22h42z9LD-_(`Uc5k`}qVOM^hYp#EV-txu3YQ z8k7T9jFeGB5CGvsLQ+6)intT7)&P5+%n^zbU`Id|i?oGYWeNrnAYw^1*%I#}>&UNACh9m=nhcYDiu~6RfZ+>Y2 zGFc}moBtQ<3ebZf@rLrw&ovLANv-=yW#=uA@{tV$`N(7rl#|f5Sc}s1pk$ky&obo6 zQ=cZjruVa-jA2=w0t13P7;6(y3$()XULGl9gpLy#l(4t{ak`b4UxSZz9G|0wi~P#to!qusEUihU$~UZte+UhYZdHfdH2R&;qSJIoWGFz>z|n ziFUwVn!~vBI+9QzrYy*I*m($kyHH{kfvyBv0EhKw$3mczYWD)ar+ed}5bET0?qXwL z9q+%VA{B#^E4jXyi1=&UB;g<&GPh`p*1MCncM%DHPiID8cq7EU4A2-MZe?NEIj0zK*jpW{7*A`R*n z6&(+y4zvqw;Mb;{tNINg83Wq;1S&TqtEa<3WQ*9c@+7!t4+DAbwSCAb7@H9{$gg`} z85Gi3Cqek5e=xulLQcNYODJQ&3A++Z(F0Sa*4BO^WyZ^wFD=W9CVKyisf4uFeJIUE zX_Xxo@J9NP#DU=N9>SU-&|=nbq|gvtWUm}>wLOt|mJ z0=Ci&4r=B=JAijziLGVFF|uR=r}JGtCmF%~qOvB_Rw8(8Hn{jrj~5Ew*D~DV+BXDtLXfMrK%xgzvs9fC@t#Ct}>}qqka+SJ1pq%y1=gGnOaou zEaZkBRk_o_Z_jZo`FN7}kS|pHts#N8oKLXv4iC*f%B~K;^O~Z#_Ez@YRmY(=zo+Nq znh#-N&otK0vq8Eh-J4#U+NJV6bAJ!-9OW{V{T!K`tYnmUfmyyjdOus!!%OEVmT=$W zYwkn|u<@!8Tc|24)X?M8@5l#a$Ag59`mlycMW-rHl37qNxa#N@ZBW$_g?zWl#Ab$< zvGa=6DesOyrZ@Maa5Urw%RVpdnnFJJLP9K=KPHp9!p5oDrO^h-WtIjDdKOlsJ7wuh z7_Q(`3?zVi2Z-@DOgwOKab+!Hr#9zwTJpYdb9Fj@K(>XngQVmO35nF;!1Sd9+r7%> zPJ+czKk_SFgcQDz`%sk^dtNxJni0S9QS4)omY}kS`^)dsBNn~P|3tJYpQ1FYeVsbg zk?it1?=xjx!#9dg0@y9vEkz1#A~ii?a9N53P5DP7$_=ayp9)I~r|mIuM}HyRR%Wi) z7@#{h6O+f^RcC$xLFY$*#U(s!gID4{BK4lgW9QJvjJC3J9O z4K}#eBGX2dd%czQN5R$?()Wx@b3Iw*pIqAi)j0?Lcvx6-RwJTL&$(PQ%9EBH%szq; zp;BCYO2Cd};Nb+k$m*C+F=lJ?(i5GXbMbRac3>mp;&5w7=!02OxqNS)-4s|Cq&Y@+ANu%Q zT3X<*$M60a8x=X~dS3o;Gt)Q6Z4$x!NnfJ1B%B@1s{naM(6mY5jk`k!6%j8Wt{F)4 z9SYwLume__4Mq~`mGG3ohBDC8r0_G$cJ=lo2*-B9C&eL!Yx}`q4==T1_8U0T=UA@Hw8fwQRVl?b` zKzd>10N0m@zGH^v8##H*44JZ>$%!*6HFM>nE|b?|!yoPTI3Lqav_OCp9#q;B(=5xw_)gc>T`AFhC zSa1UcV-#i}I?q7SoYjMk*yV92-xjQe82in~5Nlu{z3S-t^_BehW#AOw`65vsqt1iu zPcq|u8FNEDG3(8Vm2NA_#b^@+0`kQzOVtZx``*c)*L*3YO$IV@W4h|eR3~ok+Wa#J zz&GLQcDuFo?ps^AkyZ734s(K=U*W7m-2Qo;etsy(00a)P$R7hrGWW>WBN+tH5N$~5 z>ZCUw=R?ZC$+A%{6`iwFST7C!ND!@Rs#=p8ZYrO%w0JrqC#4-I>7tmC?#qQw1AF$= z^Tf7QwuU3rn)bHEtM56=^VJovl||b89M?6?e_Q=Tz^YwTG+;S!q{A$N&*D^T5J*iv zK#ZTW81Y4F!aR*+_x!qzahu?vMwhuKxwVuM`42%K_1@)mo}|WbLTNmPDGq~to9ns- zlf}~NsLJ}hxoIC}$FQZ%5}_*h6^<)1PfZqaU+Jm19{N3sh%R#znYjV2nQowsfhkFM zv0=Nqi9!R7moVJw$!8L>h#Jcp2$api3|NMDB`rqF8X#uGv|g}4Xfjb{cWM}7hay!S z+sk%eJ49?>Wnl)@%0q3z!uprLh8mn{;$A8-cV1mw;bg!L(R!G^59Txu7km`T!v`kX z5O%td;2?`ek8lF{a}6tTu0D%Jg3{6 zx^BHr@SmHhYz?2kl$o1@UawErwWjntz3rX2F--GCk%0$&5ySV{sRp9f2dy3$;iGA& zs%jo%QsBGRo_?CI;8=bPLcD7EmovWXmeLMf)jNy*vsd%;D}%^iNx!&F;Bh?GTXnRH zE$GimWuy^q`@4BE=1e?!jdr|e!Jz0tZ|3)z=; zR(q!W9CIE&tnXMu<08L}mWrh7O75rbYWZ~$clu{3aB$+RYw+Qv#l80F8T(#!#6qg6 zfKf;FgTMP~EJ1&{b+2}O`(Q)wwbc?0y-C(Ewkv`5GEWzV^T!IU*oOE9XW|2@q254% znqOF2>J$&m2g4ylo&F}WVjEn>9g>`k99mp2x0qvdo*H-CvaKhvFTt7hJ7|`EzLTdD zZ}-NNLNB&&8}Yy8PDY2?E9RwJ<+ubt*7BE9X!7K)j^yrJiT>g@!a*p5h$9%Zn?M~R z3$OWT*~}VJfPiMn#Q5O-g8FW8v50zevdGTo#f4vOA9)$PUeIeObjb5MoX-|9)6O1g zui!MiQ&p`QvE4}>4;Ds;`x(-DXv)Ss4e6mYE<%x%=*{0LJ`plpI&iOPa7=f2+uW3k zvVqnMSc(Gk`zZ_!0&cx#wZp}KtjY5{&sGu7#bwT7%$JAn)0pL`jNRVwpC#nIfu^Zw zyLzu2gK>cDO3c-k@=Fkk71c6i+G2-z^mG*sSSKH8=m><*Obv+#c;Yy?q~F=f+n^8n*R|O^)XaNA=x^j69Oa+Y>Z(fPykRNi@;inUgpDB zE$4Bib2QEN(z9JxkeP9$TdGx1`;=Q;_Ho=<_)pGh8OsBy%8H7rLhHmkHzWpGN{;g8 z2%0Nwm~KiI*4vg@g?+;vY#cA#aeLx|7NMslJ!;obUGeHE*|h=Evj8TH7+YI_( zc=IqkAOIwhi|x$yWN;kjr!(MU;Si9kY~^dcJME<_``I>186%%+bB*e)Rt!sp%z6CF z5NtU=k|&xce$|pJzx!p!7)&}|TXO|+g#Z!7k&JlZ2qKKwdQ7dlFip?BcC z?#Qc}68pshF9GtUV&g7q#C`yVAV5}d5Aj`tR3RB5*q*-x{2+)G@w~5`9Ff#rAza;P zc_q;PWhML}aRRQR&Ry$ajiNK9Pu*wB+w=4vBae9xh#-FwP8eT~N%Y?trYf|enY$gE z(AraDL9LVXZE8)!?LG|!ehIPgCX9RlUo=Fir>;%`dT3+lw;=XNNS^~*ri0=dt5lM= z!SOLY@X8f{l#mkfcUB*4Kp%`w5r0@1_}5=b&#eIF)TfrwuU%L9&UOjU@SRF7bMEqd z-#g(tj0*$2V53Ij0Yb5iKb*?#nA*B6J6` zzV7_D;s9wtKJ;&!>ml15zG!%KP>(T3Le7?;M!_ko)}3sJ^;6`1M>8|cFVjP>J%?j# zo($nGIz6fqqedM#FyGIm#2mmsD>Hq9^dxAekUK)ebL8YN`_A!0kZIK;T5G`R5k55{ zwN2_vosakvz=#X8cm!93 z76Fi4d_W}w?Vv?qINmpH2lEz}`6w0eXk`NTt9gKaXm8z`N#6u~-^!8hq(kFZY zMx*z`V$ED{-O=gO)4H=cQhS*uA31gaxI+K}KxX=g&E0RsdMKAR11yT4cCYWF!f=HTVO}02UuI2?8GhYeY4- zdV;nFaTkQnr5%vRp*`ap8%r5+=P9->ter8hvlH-DNrX0a2{f@m{U)NR3De)ecUg)Y zqKIe>U*OpKX!2(!qT)?(8&^rpFs3TsUN9sxyb^moXQR$GYbspQMLH(h_W=LqP+RcB z7*<_pJ4TD+iQU`E`_1e9QvQy${B4E#nJPc2(dDG1Zb7FXOb-y#ez^pJw=ir4Fu?@? z>Q9Wx22EsKQOLX;ggVgz4Lbm*9>7(mLU$683W7`-g>*R)+`wiU9Z}VSjuWv71Id%V zyTJ#sKWA`dH%}_;a!{eEU*SaFG%gaC5-U?kd8Of zOM^7A|_?zeI57%05<|yj0y-2hUql2_@^kqt`N5mB*ldc?>3-Lpcnl|*j_|d zBut@#w@3t>av&JU5S^$&$P^%yikqc2b8c%tU|NcEh zVg1o+N95&cvQ+*InWM6JI?);Ond-$=!%vYFw&o%MDo-Ep4(!VC2g{BnhQ%1U5*wct z>y3Y9v#H*5x_h|-iGRq%1mKuMi+QNtO9;AUw?Xs=!$jUfx0=FS6L=wrt{DJt71~|E zO25>~F3JH5bsDI+0F6SE{V)&=l+4uN4xRvd8X8rg=o-=ngDT{oKaXya)L?m)1&!7h zfho@l&rneE0aVf+)6cnKE({n=xjEfg(=l-+Hpel!W(T%VL|bO84b|Xi#fSnx{5CjB2*5NmQ3;8hT3c?( zj6PeC1SB)yCVK3v1?zNO|F4z?qGiA}O*wdJ&`m}f%u>a4BR(ybvXi#K7V_#kXKOBe zn~Jxkf4LkNgCV=MgLJ5kzuNicP#H6g7%tqi%1n&|(*Nw3DxG<;t7F z0iR&!A3!S+nYe~{xIiiKOYA1>gLG(V^iO2=PJzbS-eRn>=Y{*k;9yKCSD)1ZXlOw( z2W)SpRn0FZ)m-(vzk^pIqg9N+41vC(Sv?P??syo z27f>}-x@q4qA*{DRG@zukB{Ip)w4fC!CgW3?>ZOR>LtP3=aR$w66ZTuL=|=xvn;9k zDJsqQ;&r?b>aqb(8nXyMZb4>-ux3z5BN(XFh-)`=_JPIS0fO*+uh;h&@K0e^{{YZ! zYm4n<8qD8rF*3dbmx~^lk%Iy@LBKcNBrh1G7!PW*i$ECPi_+1BmP#u83QRN<%`4Gs zz}~BqS_Q3(R*5Cd8X6jD)A0i0yoB@bTO&){2>vHvCknt_(YL!ycrgH^Vf*Xp4HjNy z`8=0i$UIEGwJtL(nZWXtxi2aCorJ{e^;qJ#n5Tm~wyMou2CnFo8L*}YDqcxYLRJy{ zdidQV5-~kU>_CK%7zBg-(C?e>*Y+H<2yJ_Ndl)SgdBh8}Aq*S>;FE_yxB@?8Lg-V0 z(43-HyP)%jy|Xi7H~YUudpk%E{(sQk)&CRP`%C5g+&PSa!)Bru@IxkMTRdEjg%wI3 z5^3YbYD*JRlGS&^&bEmo?aqA1rIJ-OmRGQ=oo|a;i*+|~R&GvSIGVBJ=ISn087{Oc zsnhM>a;CeSlKoQ(!yZW|p}q&Q4sy~!UmOwFf;@Nr&>Of`pql^u{Wr)Z5NmEN&f#5n zt4M2BR|ZoM%vvKvyAk&U?kmD>uC5$#(83vPVAlzbGTYGF&v~0f#-#czO|-i>Nb`dH zv-j>HRx58Z?ZvZT63S}{pCtue}nlR8$X^d^RAp)cSwsApINEkUkb(=k9D{? zG!h@S*6tusgY)h+Uf-2U9#%*aun9KG+;LLs-gjAZVthr+bzqJcHa>-KPL8={X^wE% z{V<}GhioLI?`vE*VZi`&=BvP>@yaqI1VVeNxSp1kMA}hq(r545EVq8qpNpl7GHtbM z0vDYKZhy}7MU#}Ut##<##pLNE;?1;B9juMQTgANsm1r|jpyqEWHI);hrzV zTJ1GCz(D45bwX+Gt1y?C4)^{;OY_wG^%y%+8E`L-U^+H!An7J$o1bMKm-4QIxdAd7fnf7Dhi`x3BFxba**DM*3_A$Cl5&-%P9G zs-d9!E1$1*lFa4W0e)S%VmAo|cZm>HIil{Atw2Eb2M~81);>TX|A{;Dc1EUQwCtzI z`IXiF9PLFUWsFd}gXHZ9a2Zn`N-Q$`SJeEH`4Ml~ZB=XG)+WwUsUd&K3+AinuaXV< ztt$u55@f-H0*(!cU3Et*j4*Kw8K+|l1D@`lf%}5lXImgAs*=*(P6`ebr=*ho{gHFU z_L77|oIiepp~LME4D4U4njRzic~G!1n^Kw{ra#b(9d^5{Ki`>@E4?cz@Y<(tEKoZ* z&6c6ULAAovx{!+>NS-s;KaWiLKwAu58}unKJfk<<^Y{Q*1qH5_loZsvV1YDCK}i{h zO~|vba_LfD#ra`Bd#$Cv${nYVFhyn8tSUYY-veTYrs~h6xmm6*X)eK6ZaMIl;33K_ z#9xwE1nLOPbbHw&>qBTq#o8Rl4{MRb=z+j>#y>|f1#;Hi_Pg8ivxQdMJRzEV zuZ7t&=Lw?AcPoD1x~(E@({}OGO55JHn(DPbea!JKi<4KFm{zBNZEl3+4Q8eY!wmv? zxEo!+H~20C@rsGHc9;0--piRHDC%+izCVVpo-ie;*v3K!ebvz#pCgodZ^_@3CF%gW zZU+GQ<=E@(1E2F6>082;A`KNL1_pe>h%Y^6q3`CrbJ@IYetyS9ktej^9C?W|fBfq{ zd^3DUcr`9iTFJd3>;4VVMjnKUkP>R80t6i3_oQdJb+qcr`U>O#+im+xO9+H2yMfcU4>=W?OSX3! z==4b-iyCrTQ!TjKqxG|<70IAvN80`;;;w$5=5yhPuu{zGt5;L<1N2NQ^Sqnz0N}V@dm~j_m#ucVo9kgO^<&B0K;9AHMiZ&`&CB{ zu2jc;`#ElF39VcmTT^DHEsvU_!=!)U{ru6`_bflAt3<0xZ(O&~F@JslQd@*a0(F8K z%z%OzUG(EOQ2M%|g-2luD9JBpF=OJO8aq)jdH4NX*Rk(EE-p?t*uD*F1f+yZd5_jh z%g(C!Q-rlj8_Txzc0UI`MYTHLmAD+b+niCIVojMRFZVaP&1{AX^K+QcNm7zIS1Wp7 zV_kwtJTE|i4<|PolAHp~1`4x$1f3Q2NRgHkuQ9?0<_`&#U^>9XBQsn87r(#E`^HaS z&s@VlkLzA9S<&XqLvFG5k87Ot^BY&oeV1+?*l}Q7!nNVlRO>L;!)L~c!g8-0Pfpm? z#HGphhns|&2`mn$fD4B>1PywXMcGU zUL@~09G`bu%o9IH-!{7fg|J|j8^bTMY{O`irTik=+p8Sks)ijH29o_8wW->BOG?Qv z@Hl$00&mZLT^JdtM?&{-D2e_holSIK8X(@=bpL# z5Iu)pcBM2Piw+Ax#P_%97Cvm`Pv;fUd`=3OT9d|OU}|i#nOCXv$zY!{=WaY07d!nx z7VLk{hbx2)0Xp?5L?;K5fkT*=0iJ4EQOy@=L@hZ;u7*>VR6(=Nwn*J*Fzp=#?#%b9 z$43KdutjNT_z|?n+4e@0U$K*Va&pYK@TpvB-#M~wQVC>XVN%1cZb1yHXB@0Wk+~b(D^zBW$-TeZ5r^Io zNp2liOadJLZf(-5C12VYH}tQi%xJL@J{=ir_8ya<3oqKUb^Vy@x@3+=Su>crpsLso0n%G*#+vZolpyx+PfF`uvN;{-2 zY5WWV7%a-Ssj15=g>cBGEvN%7s+1%xm2#W&5?4}fc>2B7jKo&8ijeSM&&v|tloV_t!HK2^cGK6QAOnl18~|oZ3HfUGZ39}HaMl37Y`A3xD1_;&bam*rlgGFqH_^3J+pL$%yQ*@zK13oU~` z0pR|KKfwiCMF4(U5AB%;I$~3$WpclSr_r5mcDA^vHr*_uU&40{(R`Q!S6}6T`KGwZ zkpLI3wd+o0Qdrn)^uN-(x`u?zNI&Bo7a&R*`EiSgM$|{?9}h1%IE3D-i<&{}B)cc_ zq$Vy}=+xU|v6FiwT^cK9==~QTOS1AJT0@*+v0?i8^*~48xkY&YPeQ-n%0+Xp5w2id zRzt`3F4GDw2haww_~l|Sd%dt^vhB3SAl;2O^k?M_FxYsQHEd3cTH4XSnPd9)CpSL( z{%zL>3QBnHfJI}!Nn%9Pu@R=3a24b|7V+vL}FXXZ}IjEzON# zR8+DP3(O3B@vsB4B9I*@uc|ZrNg?J_;Qxx{XJ+~o@Mkc9Go^&J6`tD8es>eZ)=%ne zRV&Qp*_8(fphq!KaScoWM%rOxrpReIGxFMc6h{RSako6vl$jXFgBD0EPjaM2qx`3& zVZ&bx78n=>hwcEe0|h`#{+UCy*Ow)cjKRx*H1B_2M-Hj1Tt)hMD_0DNr5Ap%>go#9 zP-A2Lb&Kqny|jd*TzgFaJg7n6*HEi)+$4GRpaq9_L}Jt$w6zSq-&l|zGgln)?1*<=xu)YP7Q zHEBup;e4`xG`jj?Mv13fSFfHO@clRPf|~8#=7aSI;8GS_9rM^jPmQ1C%?5adwXxo= zO!D4uqEqEs$D%60Tjg23q+Av&0i@&Xci<}>pbn?CSW74XxJ%qi;o-kHE|HPN`4cwK zrMNoJADnc@tjUdc-0-jaykmJItf1Za=?e9%llhO)2Z{C*pX;0@FZ$r(UhHJmeI!iN z$pX!pUr~DFY&JFp(M}N5wUDmAd}?549Hd#n&`1F88Za5Yp)Dqh*g~eIXk!z#xTGa* zXT{qII7Wv!vs%x4Uf{QW=mO!&F0Q!45ADmfu{)(y$SNHqtLoE-wrVR=6=TJXY zcCXhts0s=w$BI1hq&W!;wB6do5-j*^Y&w9$l$TR03y{O)`KGv9{myhQSIdXT6(0+S zFq%|VZJJzwXb_5Ys~UXiI~iv6P6}4U8Y~ui?QfV>ui-^Tc2a)H{?n--|B~^9Bb7g> zHr&^RXy|^{KFFg)J&+*-GdCE2wBB6&Pfy3$df1Bc>Q&y|Re$M526*Z%*^+zMYJ5H+ z(9j%m)nnA?Y_U<-;3*AygOyNC%%Z09cdz^wcf8!?cgdq3{Ug-SC^b1f+^+UA0b%AC zOw<7BJuqa?r@yyBv#MW3rwP3H-Q3&&hC_%$vMXRohIk)C>-jYxr-+jX@W1FC!6Cj8 zMjIEFl+c1FJh(1CH$m{@Wsu=GIXNNXT9`>~R
2#eEcaz1UMtY{nqe-tHMgRzYq4 zMuYie4%tK2dYRD<_EOclAo@S!Dwf|S=U=z~by~Tf5bP3NxGC~scky=gNo`Qy2qqKh zn+dEpKYzBqP*S)jEfrK7^obG8blJJtodr<@B6hF;Un32jLR~b-{@B40gd1!w5w8lM zPTKD-$%DN%A&k`qgFzVo_+oqR2Y9Wbvl$&w!&ngz?i;}PhwBgBKOiE- zg@tCYenLYW^#H{b0+h@c6!M5SDk^=C3_72};4 zow)cXWu=Yd>9$6H$sPyf^e-#hfWOIqbXZ@9ju$sKyTI_f9gtx(;N~~CwD<$#iqmS0 z7qA#{kjo&7NDw5V>4NPZ^zCe5mL+K0kfE0^m(yYZe{5Q4Yx{l%bb#5E>>Z(ydLwf3 z4V76A+}F4%FZ7IF8pzQ84$q%I?d7m9%RAXqe8u=8UVS(M9;;G%myI7sM& zZW@$q7rbEdPGk zra4T7y2;NUYdhQg9%eh$*Vl9ZMY~N$m#mxvgYGF2Hw1vLVc4+w0mc(KIU}ISL`Fvw z6A?WE&t5RMNa;8RWuT6|l==nZ-a?Ym0lz(G@Ly~vjZq%kP_U@j^$dJm$DWw0sPt@IBpvir}np6x;VqDpRH z7W|o{)s;|OY414m;fToZ+PL_%bN!!rGy&1pk*O`6I^$`jiz1Y`lCnI zLUTFat7-*n+h(X7JkoTd$;%C~Ur01)Mbcj7eh+@JkY_adMl+kV#4#}^rMm|1R{hG6H}9=#LQC@b zOK&42!LjhW9NZZmK0a0w^jQJjTHOb)hnd;gJj=j>gF^Jof`SZ~k9>3lZ2=IqSd4Hz zJS*Y}J}y|Ss6WZ;Ff~Z+0V3tWx`X@$|HGcNAwPkD$*t*^6V5sjG5VYuKiBx{Lw_3N z{1Eu~?fu9O&#}+ys$_1q@FvBR!~mSFAZm1~<5#i95fi|bZOk)OLSqA;X!xq$zP`Ly zT_#+JDPsa)*rfb8-bnCu`%Ye4g#0%EKYki%hu(r2;y8;^%X#~uuw|BS59^2_mh_K$34MEJ*27bs2)eh zC1&+qnm=|bgFPlG8#vB~^>C1CL8tcH4(fF5htSPwllxkM%+n9u;Yhx7H%l_dmL&Q* z9p|6P7YP%MAtUx5^2z6S3RRzE=H2PJ6WEyf;=8_ti0&`^P&!|tyjSj?6~*Q-Q&gza zg3Ipru|tsgpu)#^{DpSsU#?^JPaCqY{}AaOvyJUb*VNB0F!=L6>IL;l_|IaIxT!$- zhoq(V`%_hplL)IqC&sD8MvGW0D|yN^05??+!+b>JiOr;>r1`V;Yvn%WJ}v*g z=x6Jlz3OWHsUd50c$pxRHS|`B@@13qu*}~QHwI$rY(rRpvY15NM&aricT|&TSYquRQno%Q4gWb4|02tb zci@2Uk6)JR;k_@dD;-uRkpRXxmIMYfn!k4D})L*^)qf5-c)Z8)O?wB zf(Vx&_8_|RTdH}hK)pXx@OMJ>`y^$!UntO1>)WDEU)e6DdI_Jk;{5JA8ZI)1H8Mg0 zH_Ihl!qjAx?f07bIrT|M-kre+Q@*Fs{``wv6kB!f>b%OMA&mjTOEp0$5;fG7nObXO z$VfEHC|F)riV#*6Nf;hpa9NJyEf4t_e)6Karnp^8y0!*)j8go9;B$&2%;w}xj_}5< zP2X-H3kw}yaV3_lc3|E%`t`1)jiEnlOk+>3D!p)Zwdw6AMR(8bxK;XUsR8Fb zIk&Hy$VOAg{8cMFM7VDif%$0qSGHGc+Ci5W{XtsH){<>TM;$$Lld=v@M!Cp@YhXy) zdnMaA8A9iB*Gk3p3^aGN?xb{;XV;H1lb86H-tN|P79liFBYL!Fyk*<0^`Ff~dD`la za6_q`Pj}OXNU)?r8j2i#w)7rOuPJkAkroRG@Xl>uZ@d~vcdk4PCx3Xs7fTN4doGx>CI7x|%gtO)k8q_j#4QW)a)%+?O_#-F{Q(%JzVwr$Bs% zk1YNuDm5U+r9GJy${q&V`K=K)e<)54or(ON7QsSQ-6QC!X!)e@-5M@A>2Mw<$*yq(7AsTjO+<9et#^I0uCw^F(7i*#|XhfO}O?XWUv6`?EY zP2-P><*WMnFJh@4bIj!}K0mbO5EN*I+mt9|bzEuB@GdjEPwe3F#3C`E&WAew5B>%U zvG`HHcF{XUHV6Ef!U=^w|7|bGb^e|0@#VK%K+C}iJqgM4%v_z-Gw#>xdbl4Gs9(6g z4BH%?Sa``-Q0PZD$hTC*y8Cd9dn05i8t$>@HZe*pHuXct^A|6k75^2R%AM89aeF%pRzPqZmB*%?PMcUL4v52JIh|FXm~sx?~K60#yQp3 z)toVt^$MKAb>5#Ed5Gm8!S$)`)oZP91%daWxIt#i$X@)fq0pNZ(o z&^dnFd+|oJXksw2V}T=^IoN!EV0L!Fr_Lwg+qBO`4vmBU^lO+B!2X=-hhYS48vxrpYdMKe_xLPdqXlS$NneP(*IP%F2g8Z1U1 zmy$%KXj=1V+A;=-)4~Xj$e5VfRmS_cTx!9Wee0}Vja$7Ow2H4HP$^@)7X^d#*wkbyt17iB z@*qf*i#)71#C*o9AjVE&@hV~%pRj>2Nj0-TS@F}NA8+g+MFnP^Q?E3p6OhycCwIuuHTyX<=4At51aiyO*vqXzNE1nhLZjr;l|JFq`-$Ld*7)Gl2 zc6gov<*6=&?L^)>74U5rfAw8Pj&b`w;w5213k!ZhFWaxLh9z{1)XKm5UAd*^Sds^8 zz>?1o#*kv>!${A+qzjka&@!Fni>hh@tzENtsg*B2a@!=p!@bdnd?4P&TL=`0?@K^> z_)|r_)m^J6Zlu(IF-q$fo7c0Yr{~Js2RGw~F&6kndDL|-JoOHE*j;o(O7&^BJ)y$( z<~(_QtWTQ-_S9=a_d!Kj*DtuKF@f-o4iW=|@d$3n$UF*SszoG)T%2(00~4XsqF2|B z8+lOCiFMcaaf_1=P{A4Id0#}54RivQ28oUy&pcXxomL**+plBmG<;n%Bmt9RV3L=J zVAVBicw9;3sh0KW9epC7?>Naj17)htNWe6{jrQ!(uw-U6DpU8241aXr1Kmp7#_0yh zCeG|fa~(1gOR90&6jn^p@1&`Y&EIFQ|7}4ZtLtp6O(7sllEYB5y~i`QD10F3DlUe% z&_%U-V0xmSk^y_p_Yry$>OeZwyh17F@xW+IVvgp`wS2#%!Go>3*-tU?Ieaahj_sLe zyNoIO2@QXGZ%#kZX_eY~{azApi6=+jW;-Gg`@#F?ozO7WWmh^OqWYS)htwt2ZQO4I?^p{2$c=4CUC5Cv})@K?yFwg#q4P3Y?$JUS0Jz8GcI zm%oBou*jt67U;UgH8rpEJ8!%ImfykQ;kge!8&CofJ9@y{!64=qH+K{`SA*Z9ca57! zA!3IHQ)K9vneiijG=7$hcnRaDRg3hoy4fxrz`z4&Wa|H(X+m_hKmoi4{)GuH3)lSj zCar>uZwl>fr+(>ns&A%RG0*oH@4J@tu44FwJky#`ed6Am)!~R8=5d|`-7RgbO=;Ew zdnJ24-Rsl!NBFdE_|dVv@W2C<)BBmR{&b5FUM42~4x~ffaUM|dz@RKuu-FMq0X`-o zGevsFsQLMMD9j6itqdlKE#RquOwvZw&=YQ(Sh^r^Lmb$E*O%P4tO~SDApOCNv8jcD zLi9$UwW-1cUhpifPE*&|Q{adqqp(^q!E-0Gka!|r^Z3AnD9!q1rq;C2?fn{4mpk2| zWYTSKb8_Ao^SoxEju>uqYH;GfCt0@^`8%&D^j-tR54jZIJU zm%%ithp%jR2!~*Nf*6uRiM0kiTVyT)@b6GS_YVE0d-LwyA4e2ILP9XX z7QiN>u}WK*Niw^+X^roNg0+>|ZwAz%<_1C`89+c>YbPj9QQ;90A7Lgp$h9#sF*Ox6 zEG?OV!91`CL$;_VUnizF583!zdD72WTc|3pOWou4ZMyGUD~Cbzg|JeK{k`3x=@!94 z4$7nkX9^|&a&j|imhSS7 z+5>!BlRo-;_f|d`aBb9RqvHO78m4~Q6R&T?hITz4XB)ONnBuRcBq_0so!TJcE~MFY zT*IM09rVcIBsFHpV-?w(^BOwaoD4hC5=_M7VZZtF(oScUZzMy|u%%u%;gnh*n{Mv* zZll+(33>u~Xt`pB(FP32fdf4Yt{7(bMFPAO2JBS?RfMS?K=_3@dh~K}OWh-$$D@j9@{1bP&_2+Fj~C$7L7<>}5fbKg;j< zo5Wv=fZb!(VRc`!rel}E`@4tZ%ja3-<%%>9H@8i-D?+F)k_%DSOHemAjgAsfA)IpG z+OldCFANt1>&zyweL^kuq~d_B5h8sC(?l3#fcprhzyfJ^V33rQ^zw}xgpWX@4v(6r zCGWq1G!}#DmhKZn3ufwhS3ym>D>u;^wex%4bE43)tA1LBAa4B9UVr(z$9Fh%#OPjo zH98V%DC&r4t5L&M+h)!Y`AZYpSI)61?J1#38CABj%wCRq;!ubmggZ0hO*$TKKP!0U zik|oqz2NjjK1*Y~V-~smO8h()l(Ldia@{>$FgMjLQbi#V1>AyQOqTf+Cq=NuXn8RB ziC*;v10w}xWwTuG8)}Au+!PSIQ%gckY zp#}NGxE0!-6DMRb=#wWzzlM!w486lYh5Lo6*bN=+`ToXS$azYL@ygL^H?No7wugPS zyt#GJu0K!sG8jtr9o;^UC>ik1VT|}TxMUImbOZ4aagG4zCOv?oQJK&0fhW*QFpK~$ zLAe}8YTq(q28Dw7fq1J3z>q~I?JwjI3&E4;+I_3*d|C}01|ZMkMR=%L843D;>ws6! zHfWlQL9QU-NJZ^(Ls{@rwCjFN)o-~ZA=c=~%PJ~^Y?pVI>Kn%KX;ZH@J)Ulg81${3 zS(z~>$jMS!(%VgQo~LSztiEEF*|am~!t0F2Yt*;Da!~_>)g$%XgK|_zD(_2MT5YNe zeo{~1Gy>J$=gLZQFx)f<6>J8nnpv%Z%KsM8nFVVe#8Vaqe*~-N!Ng5=Fg+pKWX#uq zQ42S~pI;B*`GeO8ur1l)&NTA-St{Nq{&+=)q4xa7lWqcMDc3=^p0Y@TP|vS=WK9&i z1Toa4)XPb@o#>YYdmpC2`W@BFqaXj zCU^IZz4GoO>(OMu`T6zks83~e%KER1mNIx7S_Zj#Pp=T$^^rc{5*Ds3HTO3+VK}Y- zOJ@k9Pw9QoWS@nNA0esYg_f|(U+GOYCT|inJC&TOB~QD4X)YL?&F<5p?pUSpag)IV zXg!l<{Fhw~lcQs%rj~rxbB4gz`W(r1`Y9IhSW=k7Eg2GdL*vSl10T&FiPr;)OWs~Wg+%gJY>!x441yE(rHoXOti(5 zu*_W^Sv4m1FT^L6n+Nms7)I<&mkNHH&pN?3A-Ut_ugdhFMevl?U)vDk*UT6NmH0XY z9R4OlY(pN8p;uk}d~wb3a@JT7^5>z@G0a=E-g)8w{&>MH96{9J8{&chr9kff4lMvJ z@YDAnka;6tZNmW-_W%8_NPpuNp56S94)h^Rv$9XneB-Ugs{uB&W8e+8VB}YK zD=Z^U%Psd;3Q9u$a@%zdoxkzvM!=+yyXS>M`@5fV&V1RRwn$DtUv=bFn*V$W{7d`t z({iq=i!fmuc|5Hbz{dc7i~xWMd2rJ(0{nk|2=YIO)z z7VYGq#=HOqs*lqY-iL?JEGeulKx0SXcjFb{MS)R?Fj(3RRoQ2xwii?WukzONkB&l| zoSfHys1A4D$!x0GS#74ctO3$U(~8 zi!1{$AAN&t?IXC4!Bm$2B$<#^X6EKPIG8$BfJFynZ-N`ut1J>R`u8ERfvGJwz#Fpe zaDfuBBnQr(?4w8CsnR8}|H*Mrj|S`Q;QPQmxdrSr4*#Fd&OIFKG>zl$rqN-i)V1j# zlFdp{J1Hq0j0#IzjYLdDh~$`>QYjKv5k)$Xx~h?xL?YB1$;g;UlbQ%wr;uK9XwW!h zKhMlwyZ`5}xZ-->-}^lGbKlQ>e?MPdjlId)a_zZVT6a<8{8nB2ac|9#xR9>@A%swV zj)AB8rY0j_Umaat-Fdcd@9)?IBwxS&GWVb!j)uaKhW1jBpzNU79JQ%x`Qf&X>@pg! z!e2{2AvI)fMwF_|8{G$2<}(pbV8#dVcO~wKB=_F+X4O-T!jcKxT7l`i8jX1$n^|9* zz;edozrMZh!I9$kKO1zsiEa}|M={ubWjo_=r3H?`I)J$2J)@R)hv`5V9cAfk5SR{( zNVYq?@Cq3FxdHvpFWsmvE-s#wVwEhgM&rLr+izD>E33SJ=-voT5{G#fr&Vgc%M&EO zs%-C@oJ(_&P;5d1@U$VbvY@A*{Mfn)uV9~l8hW0KkQ{fXtd}DtdsAoD<_@esV(NmR zZ@*O+FWUZ4kD~B69`3OpHo^=>pxEJ`?%QLt_XSpdbZeH9%Sv!_+YXEm1<>RC*h+ia zB=Se|n`<-q?19UX!Y)foKVz|VYou8s{sFbLi$V$=xBU=2hBN3WRx>7Noi+Knp*v}q{6}!wugtc_GC_Icq|4mM_Jal;yHcKF;Bh_LM1PBUs05*D-0X- z7Vm|xkig6Ua}|hZld~>*q8mys4ZjB;QZwL01m9-~>nmIaw3WO@3ttkDXc(m|l`7wRndv@7 z5Htbt8z*iQKgqDxmJBvb$*T*QvQ;t@#-k8I`E@zUk5_*4Q}^L==aFAoucNU8p6TSN zQ-%~k%65@8ZB?1!kkhWyhJYJe3|wu`n~(fL45RGf>e z(Iu_%@X!Vx46>-uFt*YJWyQ_& zXQLkczrR>C*>(1b`VMk#1~4W}n)I42>B7RSu+4_L_?UNgZjoE`(d+4|u%Zt1y=&iN zQ1EpS5sU_ysEEaAJ6fZ1SupMFPJBLmzA62=sHi1QHa#~dXY|;yJ7X$V+=u!2tE?hQ zuhu_CP{%+GXW$M?(+xmZn+q=IIbA^e@VvpMzgH5?@UPAHSiB`5 z^fP(+hUVsGIDP4=0|UAGbyBI6+QN*k=@#iPE?rJT321Jh3>e+4SO^Ubm0uBn=gPEg zM;niO{aN|Y^mL!)bn*)d)a>*0{Oh0awY>8!VEF-{6r0Vx{lun2M zca!&|iG~8;k=wbk8hCLKC(NN(b)#a@6epZV**c$$ zkIgMDwVkfk#>Syn*AAN!g!^_H^tm~BCFbF%aRiy&l){=CI}zh!8QEBxTDN#rqNb)Vnwde&WRYcs*xs@z5QH8 z|1`G}zVNUJrjY7{);e;&Ov-oDuvB#VF=87`46!@){_Q6h^T^q=vAz9JX@3d{!#_k~ zl@e){EZD3x&*|Qu$JZt#B!q(cNtFvn3xcD%)>c!VKdU4&G?h0aP*9+r+*M3bmd}x} zD+inP^3>GTM}lz`8bV--PVYYoGf)bkg6isOC1Y>rp0_WAc^aGkDb6lP^F)OdK%soY zGj+y^9N*xKu8CmWYA(gcmk7U*-d+uLb+(U&4^TP{e-j1{&X6IlNwy+*b~lwU^b=mH z83(eME_4%sD`6!eVBFf)X%aEABE&vKzZWy;W#&CyFiExo~R=5mX`a<#>SLg#cnc8xj(o}hD6qJ zXt%Soy10cB<4=NaOV-5yWzK8#xN(_TyqykVbv;U91}Ibc!1W3Gl{wjr1aNZ(lYLs? z_ZCUr|1ub?56xUpu)|pe<)|SQ81NMZ&{^Vq+L2i$yMOJLhLQPH`tMx8=WrNH277J> zByv9|(D~)P!-FjbV-yrb@GF-xj6R3>E*6&3qA=nGpeOMnaJCfk+@PQl?(z5&1@m06 zzCNCvttUvevo~DKY2CHk*OxV1ux|C~q@*MUagb>duKf#oN#6u@S5UeI72Dg}8&lvX z@Xk#vEnlpY{Z`X)*o5mS5ej((5^=2+5%D?v%9JP|(Gt9Vy$%4*y<_>uazzV`v5}Cn zi=;4Muu*lBl~p%tPPvl_38mZ{O;=v>5y{EPrMszku4o$c zOL6IXFS8$x6MsM?Y-r~?VR^qw8)AjnFqI~XV+4QY?S#+><)zP*0;j7&C(OL40sZ1y zw8aB<@P@>V3>$r)nkw(Wq#lRIDWcStYTKpyqwAWRLn}S9D)-_Swd?IKbM9R-I}s7_ z5Ih;lm+T#k!sd#+kGg_d&G^@?%6)K>I}vz8{XAwo%iv1LgkH5-B1$A>WlcVsADHh@ z2hEjptyEt}egddq#{R^l{iBi+C$iETws)U3PsLj5bxTW3OiTfn!weCLOp*B~p_oRB z&+wE###XG>ce5+{2Wp2e;{1D_)RWso9@as@g4BpP#+7MjTrWx+44Te~*{TZ{Jlb6w zRi04dU;Zz7pN#P2*|SC9GE2DMO&xO^I}r<03bxz&2qkBsx8=C!7E%m(pjn+Jrtu_K zjZZX?R5&oyg3(R}x>E98&qzTmhi6>OoZ_9v$YmeE%}G9hc5bb?rzgIXK;o2pEiZo7htK88>e`6M!W7w{6`Tg!3V`!Z+$LYIv5 z==7gnHBlfB?Ck5yzB>l+ZrrNGSH*=wyGGGmdeVOOC)AmrQmqavywKN5J6QceQ^J?2 M(fT#1s~m#<12D3v7XSbN diff --git a/examples/running-mistrals/artifacts/A100-80GB/full_report.csv b/examples/running-mistrals/artifacts/A100-80GB/full_report.csv deleted file mode 100644 index 01e5b19f..00000000 --- a/examples/running-mistrals/artifacts/A100-80GB/full_report.csv +++ /dev/null @@ -1,21 +0,0 @@ -experiment_name,backend.name,backend.version,backend._target_,backend.seed,backend.inter_op_num_threads,backend.intra_op_num_threads,backend.initial_isolation_check,backend.continous_isolation_check,backend.delete_cache,backend.no_weights,backend.device_map,backend.torch_dtype,backend.disable_grad,backend.eval_mode,backend.amp_autocast,backend.amp_dtype,backend.torch_compile,backend.bettertransformer,backend.quantization_scheme,backend.use_ddp,backend.peft_strategy,benchmark.name,benchmark._target_,benchmark.duration,benchmark.warmup_runs,benchmark.memory,benchmark.energy,benchmark.input_shapes.batch_size,benchmark.input_shapes.sequence_length,benchmark.input_shapes.num_choices,benchmark.input_shapes.feature_size,benchmark.input_shapes.nb_max_frames,benchmark.input_shapes.audio_sequence_length,benchmark.new_tokens,benchmark.can_diffuse,benchmark.can_generate,benchmark.generate_kwargs.max_new_tokens,benchmark.generate_kwargs.min_new_tokens,benchmark.generate_kwargs.do_sample,benchmark.generate_kwargs.use_cache,benchmark.generate_kwargs.pad_token_id,benchmark.generate_kwargs.num_beams,model,device,task,hub_kwargs.revision,hub_kwargs.cache_dir,hub_kwargs.force_download,hub_kwargs.local_files_only,environment.optimum_version,environment.optimum_commit,environment.transformers_version,environment.transformers_commit,environment.accelerate_version,environment.accelerate_commit,environment.diffusers_version,environment.diffusers_commit,environment.python_version,environment.system,environment.cpu,environment.cpu_count,environment.cpu_ram_mb,environment.gpus,forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB),backend.quantization_config.llm_int8_threshold,backend.quantization_config.load_in_4bit,backend.quantization_config.bnb_4bit_compute_dtype -fp16-batch_size(4)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,4,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,mistralai/Mistral-7B-v0.1,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.233,17.2,17865,29.2,137.0,18509,,, -fp16-batch_size(8)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,8,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,mistralai/Mistral-7B-v0.1,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.467,17.1,19463,33.9,236.0,20524,,, -fp16-batch_size(16)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,16,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,mistralai/Mistral-7B-v0.1,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.937,17.1,22458,50.1,319.0,24393,,, -bnb-batch_size(16)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,bnb,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,16,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,mistralai/Mistral-7B-v0.1,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.94,17.0,13155,76.0,211.0,15254,0.0,True,float16 -fp16-batch_size(2)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,2,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,mistralai/Mistral-7B-v0.1,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.118,16.9,17158,29.4,68.0,17523,,, -gptq-batch_size(16)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,16,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,TheBloke/Mistral-7B-v0.1-GPTQ,cuda,text-generation,main,,False,False,1.13.2,,4.34.1,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.963,16.6,13314,66.1,242.0,15235,,, -bnb-batch_size(8)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,bnb,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,8,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,mistralai/Mistral-7B-v0.1,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.483,16.6,10261,59.9,134.0,11330,0.0,True,float16 -gptq-batch_size(8)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,8,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,TheBloke/Mistral-7B-v0.1-GPTQ,cuda,text-generation,main,,False,False,1.13.2,,4.34.1,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.486,16.5,10303,48.9,164.0,11280,,, -fp16-batch_size(1)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,1,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,mistralai/Mistral-7B-v0.1,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.0633,15.8,16907,28.3,35.3,17066,,, -bnb-batch_size(4)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,bnb,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,4,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,mistralai/Mistral-7B-v0.1,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.254,15.7,8797,58.4,68.5,9317,0.0,True,float16 -gptq-batch_size(4)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,4,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,TheBloke/Mistral-7B-v0.1-GPTQ,cuda,text-generation,main,,False,False,1.13.2,,4.34.1,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.261,15.3,8747,36.0,111.0,9239,,, -bnb-batch_size(2)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,bnb,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,2,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,mistralai/Mistral-7B-v0.1,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.141,14.2,8166,57.2,35.0,8401,0.0,True,float16 -gptq-batch_size(2)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,2,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,TheBloke/Mistral-7B-v0.1-GPTQ,cuda,text-generation,main,,False,False,1.13.2,,4.34.1,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.142,14.1,7918,31.8,62.9,8279,,, -gptq-batch_size(1)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,1,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,TheBloke/Mistral-7B-v0.1-GPTQ,cuda,text-generation,main,,False,False,1.13.2,,4.34.1,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.0828,12.1,7669,31.5,31.7,7824,,, -bnb-batch_size(1)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,bnb,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,1,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,mistralai/Mistral-7B-v0.1,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.0865,11.6,7822,43.4,23.0,7960,0.0,True,float16 -awq-batch_size(16)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,16,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,ybelkada/test-mistral-7b-v0.1-awq,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",2.52,6.35,14486,51.3,312.0,15080,,, -awq-batch_size(8)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,8,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,ybelkada/test-mistral-7b-v0.1-awq,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",1.27,6.3,10787,34.9,229.0,11175,,, -awq-batch_size(4)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,4,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,ybelkada/test-mistral-7b-v0.1-awq,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.638,6.27,8958,32.8,122.0,9101,,, -awq-batch_size(2)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,2,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,ybelkada/test-mistral-7b-v0.1-awq,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.321,6.23,7824,31.8,62.9,8136,,, -awq-batch_size(1)-sequence_length(512)-new_tokens(1000),pytorch,2.1.0+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,True,False,False,,float16,True,True,False,,False,False,,False,,inference,optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark,10,10,True,False,1,512,1,80,3000,16000,1000,False,True,1000,1000,False,True,0,1,ybelkada/test-mistral-7b-v0.1-awq,cuda,text-generation,main,,False,False,1.13.2,,4.35.0.dev0,,0.24.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540684,"['NVIDIA A100-SXM4-80GB', 'NVIDIA A100-SXM4-80GB']",0.164,6.1,7589,32.1,31.2,7660,,, diff --git a/examples/running-mistrals/artifacts/A100-80GB/generate_memory_plot.png b/examples/running-mistrals/artifacts/A100-80GB/generate_memory_plot.png deleted file mode 100644 index 74a91ef85b175756eb245901d66e37ca6a8c96dd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 45209 zcmdqJg4*peQIUAsy0P(yaoDAfVD+Qo^EB8VN}W>F)0C zGp76b-tT+Q^?v6MIM21c_I?)2#hmxN?=gNg*2fnz&+u?aaZo4}p7`^p@+cJM8x#s- z;tCeL!qqi01OMW)6IHQOurjoB)Ol@ylGd@aHn*}fH_`pa!Qi#6iIpV>GcPmigMWkZ6SuZ>u^@`^IyL$Ix%tJjRQD+Us z-Lt06epXfvT-@jtU%zt9sPZ|#y9p}&+QsQiVN8AYWlKeEOJnTo?fDOw7|WRqQ$p1; zK9Xgq+%Nk=^-p$b-9LUS0x{#1ziKWn8JoAvA3AO1@{O;@Y@c@5INNsYyHzuY$`N`K zh+Lt18m!aXiHH1`H^HtXX2RdsZc<~sKwe1t_yt9WysfMLIf@W@kFoAO6b}4Egp=(C z$_IIK)Gh2cfB&}hr;Cq}pZMMv`~UsN$j!gUy)E+S(W56kPdM3jcgv1XwtvfID8}&H zfA0vQd1?XcW_ep&TwE%W-DhugIJtkhYkAjhZ6vZenD!Goj=1Vdj%b8GRt&H8% zs;w5rbVY_9<%a!@lyI%PjEtFmLCRzl?mKdd3`EziU7MMoS7sD;Sc%9GwyNA!x={Gy zk@LX!qrq-t&V$3l%|8mVHhU|B(L+WeBA7W9TM}NEGLw?-60>MYl~|5iTU*yOH)DHx zdd9gd(cidvlkA(c$RFb6_m*!6!Ul46F5o}hK=t+YJv^B3vRfS@Vq|25HyxfGuODvw z#21&4NHS;*!yEmhmS;09Hs*PL%8&gPMd@)swYRr7Gc%KGzpQk2Jbs?GY&ZeXp`IyI#W z*Ek3l?>Od6dq=?G;>6?!O2d_bJmaDv7pr6f9@xvqJ2El6wF7y2CWHAj!^IXqTU%SR zP6BwwCcN;4YFfT;YISzBw>NZjWG6=Jyk4)|pITbNsp*jwZjWHoJ3#J#wPbvuKS%pQ zNj5)PN%H*c^zdjoC-YKGD5LTX3W^TLargd5=wq|3tu0I5TU_4x`Rsmvew~YU%IN8Z zgAg!XmWc$O($j)^I5rLME?xcA-fl3MZ_ryu8KQkBira#;s;bIx_~y-5A~|eqZGU!j1fCyPpHnk2p@;IgEr#lA-d@Nn zDERq`m4Jdm#?7rlHTD^$JE5zqE3#6*zEHE>!bP?L>SU{xWdv@o(6|c|t>mAR!@^8jg3 zsGC$uEbqV%om^d$dreJxQt{x?Cl?m({Ae2aVbADN8D}%yC?_xfdh}?zDaZ((u_5i4 z`|+O5$^ctq5T*9CD?2-TgpfNw+_iC$+EB4YE16ZrV^!72jE)S2G^vuHanEyiwGzt! ztsL5W_q+>?ekal4Aiu+Tdv0K8Xse|9G+<|E$F%7P9y@ZanY~l1tL-z*!R%_#wqa3YDm9p0cMcnu9-GeuIYvnXGHGQ~rl~|)>lvXANmEd`P*4y&OlnXwT!_2Ig z?Tsgesod^z*E344${c2oFrVBI4GhA7vG;gn#>2%-?ngU$2ZEcMn>*{{l<>be&ok$h zl@*mDQ=;T&L38EnRnzBZ>*sAtva$R`Mn*;myLC`Ueg>zVOLsj0;#BaNwgylNjjfsW>@9kQe&Yts=8Gva~9rDk>hvInUjW z;xcO-t8{gSL&xj5MxLphgY~NWoA^Ci+MBm;H^V2ZqOF4OOHXb87M{;*qR<;EG`UAZ zlc(Q&!{hwa5h*j~=H`|&!&epKRv2VV!ziZVolRq7agiKGx>i=dq2?a$SD#BODG^&$ z9nQTBD$)6gwN|;WMd37oA#}XL;h!A`&rI;)=}wQ^`RTq?4DWTi>ZATVy=Olh*R{|d zT3Qsa{BtLV+fq@SABmaNa$WZ==h`BlNJ!xL6Em@AR_iw4z+sQzw`W*fTzsIC`v&eh zKJm$i8$?76D}x1m9w)qTcN7JN?NbH6g*Ynrq1NlHjg(!ye0jT#Q3QzXN0vFTpaYYLoA{6JCn^XCNt0fDN6sjJ22gLUb0$x`+KV${PWR`avr+Er`& z^INm6pUC;XBEJ*PPo8Nn1G21~qydY^FX32@I*zwzYnD4LWuz_q$+-F9+MTIb$Fa$U zuH-Du@;>u;_#~s=?=PV}T~b!=UmknU#Kbf^)!>gJBq6aq*w9fJ$Rhm)m)5j4T7Fx= z;X!*e&-|YZ1-FCA54gA4y}i9J?zjkgVd3e*gQ+-PD@Trw-O?ZInw|Cec+Zadk2g2> z_l-JY_;~sFHsK-*oVN`Ngs(7bS4pX;+}asr}|VvFG@3jym16fAn(=;&x6`tNRzxcihkEHW|!)dIJa8LC<3 zrYbHe$>BabI~&;on>-ahli|{(OQ{wH1_mu~>luY+{T+oCP)BhR;0Z9o%c-kS(_Z{B zE&BT9OCp_LlD?4 zx39}Jc#tq_7!N6_sHBhBG~MwlFE5u@RQv{u>jt&bFElh1`bu>~rRQRI3QocaG}Baz zH@XG}DfLZF+S>!3mOtaG`R$h!(WhMQmPyIUM2`-q{8^-im2$MusFuJx0&Po%`ucQO zUXK@h(t^Im$6F~W?ar4bNx$4zh2II4mH~i5nW1M(ON-R{J}h-3+*if<*|8xMUWMWhmix;Q zZfRLrqvO5R5x_pMNo+T#M47cJe!-DNY9HW>7-7#bb8T6{-H<_xzPA@H%d4rizy)IX ze)@Een%WBvXyl$F{4FTZPZpVG1q9;M4GD=Te10n_fpCznS?MmDy$^RJ8rA$odh0jk4R8QeVz!%*4JlGpAPKArTRq z`;lG2*~7z`U|wEcg6r3H9LUj1t$IO%hQ;kIEq<3N-9GOw_Xo$uS`1Y|sa&hrmN_{& z$v&>kD=s#KOHb`54R|jeeBWlW?h>@%S4VMBKR&`Ej&he}c#nd@%i*-~ir;a~ZB$R$ zxc58JmoHzQiizc$^xTJP{M69!KDF2#Hi9rdVDWZ(?F%ObsaAcdic(4t`5b zjXHfN3}xRhX(Pw;)Zg=TS(|HZzs~3C!NEaEaUrCI9`sFD8LGL zfE5f340KjF+d>JpRcP`J3YrJ*pbPu7H}%V*t>@RTU!4{bKa{Rl9XmlIdSE&q|MFAD z$=J=CWep-g@*qPSE*uw_ml?nhJGz zc-R&%91l;x=Iq3oP?W8*>1(`~AJx^cw$1(u=WWx(PFU!qjO0Q!CYGv$yT~7Udis@- zvfu`Pk^*RgfC>(lGIO%agZ$!ye0_bR&-iX8F#~xGh)PbT*3#0dZ)kYQm{}^aJs<4Z zfI3$#FuYDmsy6L${!nP%RIigkMvCYd0l(5-Lzze02mhdz)K{03U?2SafKMwZ7&};C z)B_*^z!I3Junw0pv2oW`FQ=_jHE#l*z05FA%q z`&L=oJrpo_LeHY_$B#(g>phyDo%IP1Cjlm?59DHJWw0lySSh8~v}``kRi(;}k5)FW zVvds!niVdkPz=17-MHmvtvG2K|907(+fuPNQ~RYouu7;88@ae2X?{UrYsm1B}_STjz5UNkNIk12`DxuXHpqY5I=e;%S&qfg6tFa1aZy%q?(B8h^ z`TrqY09>FV$PSb{vQ)b6S6QC+q17+E($mv(;F`)wKWK#xeww=PywFJnHLV3e46>7? zhN-EkGkYtCG~6tP3MbRzJ(2*=7QKFb#@Ga4cnbOsg3)kEIe!&aPKi$m_bl3(59Gc@ zz!ouaV@Iq2w9e|uy#Pw#xZgs@xIkbV1Mj%DdjLEAGh{Sj%0NL3JJffv;PUMD1Pu+% zXy+!S^UQU_ju>FKekLx_ewkhU!7(u!7C#aJ|6)c^j2Qz_@jThQ{PR6tX8$3Dq@-kf z_Ee5)!F%kR_am~eU=k7%Dwd;N(D4`El$Ms7dbag)k&%)8uv@Il(`zbq+SCp3BoH~= z_o(y6{zM`8V~CUeg|Kj|h=%_s1cn3h{GgR%G*$l*s)iEd`L_1f+S(d|tYAm^hJ=Ke z?wUo7&ANy@d17O2JyF!Jm1*<+n&h6U4Na1%E0L(q=2S!Pl2T0lxtj>m-2e(0z|-g* zQ;?U}-7NiPmbU}&A}u}L7hmXL;`_0Kot=Jb7*lC^IfvVUbq~6N>^dP~J)9_&eEsH^ zvQj4>}mNk9BHpt`T4plof<;Jy6Y`i8@{w$fJf$tJ9aI3b#se`SNAJZ{?)( zC*OH>WTUwQIy(Vja=|SaY)sT5R08m$?Z(7o5fPExyu3;10|+aE>SYT{F!o~ntgTyb z#CblJS3rP6r|x2^bo6_PlqmpB6>Hc7y}hBoemzI3CbW%mhgEj6AZI}(C8dg;ZpnW1 z{z}2`e2-HXu>jJB- zAUR(hv}=T9TGG{*m6ctGL$%bKY4iH^kHn1y=$k;eYoS^gk5^Ti#LPg467I3P3uKQq zQwcq_m=XRFQFrdFn+1YUlpY8+?j<}#4M{F*e7De%lIeE3UNkNr=^5I&;=qLaq=MS`TPVclT zUD!NMoB*&K!bAGi+NyWBHOn*l0S7s%h0?8lW{fEvAHnBoV8s8qIBih&z=h53brs<89335%A0&Y)-du{C=1Wz)GoW(asNz zMt4KGpM{%Lx&{~>efIGtZOAhU=ck)HJBrY42!TBt{7IK<0Y)oj&yZ?0P+}E7=<2vK zAUC~pQ@~;61)O0bxKq)z9(vd)a)_vB0L4C)|79A}VF55H8eU$Nypj@S4Nc8Z(6;1} zTO)j=T5KMfp(QTZ$pblA>7`q3<}S?mhZFBpQi$w0hfxQ43V;JYv1B>qq6F3}6r7jQB~l8>8mpB=@oxb)Mq5-5 z85uTzrICPVPXwi};pfjM&U2AQA+6yo(Y+aq#~VhlJLFNb8EJ7p3@fAd%016LQnaf* z{AAJ;)ZjAU{Craz1(_1*B`;1+Pl?bKhtHEn&0($FfStQ;a+l zO_8T(W@c^yZi^=8wXVrpn{5pbZK80Mvaz>+3HUA)4jT;{n;gh}DeQ1~3Vwk4nAiQo z-b@s!G8$;@YOS*E4gn%{b@hhY+Q*BdS=TozF8=Le6y2r3|F+YV*+__E7Jm z;yuqIfe5JW#8EwYZw=L&lrrpwh;P)dTcD8}KNUJV&;!Y;9ZC@C4|);b;Rt98BEDi1 z(NsZqSLgQKIUj%jH*FWC#hVPOd7 z6L4J9?@W07=rCUlsa_yceBTs=(xMAR0pzUd$w`bF05Fk)uH2vmKub0J9Vh+tsrO*_ zbLa{*Ko5biU+7GerF7X{Y5*bl&6fHP&^o1J`FqVLw=$bsTI#yGzCbzr>$B7|iFAwy zOi+D>7f>2Ne2Q~a@-RWM#Kgpm;4r%8?ChMHzp!A8dEtWeB>q33zhz}Tn3|rJwtwg4 z)xLL|lvD!hElN~W^zRW03=JiMPM^x}37q}8l2X`UIAZG5qf;MKV$w4*>K*MYB9;Bk z8&uEU=5$j}zJVlcnU4Vh1^{5m`CfakSD#a$lDmwF&rm&P$z4wE{IE>mSJ)nI8AwP< znq>t?M5ySw&i{`8JyK?Su9o)KH6m5vvQjc)POQ01>`A5wL~^v2t_XJ z5d#=|VsWt<)IG{CK&_`(E4L_YEHc8n8F!gfI1 z0y$N}!Y&lbpuD_%TO*6#P@;T>@mK{Pm@9argkY#mg(<9KAV@1qqwFgH6&==FQhIyj zckVz3m<6E?c^S5Cca|y#(6~r8JuJ}Wk|e{KQ6STGmf0FncpT^uKTvtoWjuorMEIms zMmt&IQ%*RYlfX3e0EU{6SH-@}RJsm4-n13WgGg2#OhMPZZ*XWIs1^ExKtbWUl2<+Q z8!01SzrM)qMfChGWBvREHIUn3XU7OQ1_JDA4n<7BDY4%DdM^zF_C`PPsNXSs$$;IT zgH<61i``vpp*}lk6#o*iLHnNV(jR&Q=-No91^eau$#(pCmd)J-fwj^nUB;Xe%YA6G zm@7wsjTF=hjT7Pg{4FF_)u)Cw+VBZP32MV2Y;A3Cha#<%ho^t)6uT8)y`v7gld{mB))rx3AX_%%9FGnt`}FWw6{))U}F{NQ`CsD;{SC) zz0p60z_|pTfm4?LR^e+uCu&_S5)^ z`hB)}vLEy%G}v!S-W;7VF2K`ua(1?vk9C|{T>MzHyGu#@8_b}ZHy9P0jbzH`n|JO= zgPcl5MYS?o9uB4n;yq+#&FDc-!0gcMnC>i-n(8pxnxun!<3Ldf23mF+>v7Ki37rHoO1vLy*9#|av{({8V_w8Aig zhf&~jE`2|agS`fR%{15-JQmAQv;DAW4^J1%>-)?BRg?QP z6{LPzdV}YtfZ3o4m=+D0i$B!X)*f@7zXN9M9e%qBxYG)N5s3Chyt)tg1XTeBZpCh& zIwE?yZ8ngALi?$-ly|7e%<<>x4cJ&_0?~0zbG+2iQ}Vui!8Ywb%xL#%Ynt*rNc%2= zIzhZRT#_>=asTvaS8uY;CnPLPt?4Ph$2%tl5J zv$(ZqMpmt~*@ZGM2fE4Ix%5r zSYnaof&cB2o7isxR5)DmEcb@QHkCV}aYXRh-iIZ;d;k7>FiV4cu98Hx^z`)bfza06 zJd2;Dcu`PRRu)urK-UkCR|+aX`vG1W3gyQO&?WS)UjXcvDiH)=s%#QjH}=hryD24? zMu1~FTkzT{+y*2MkPV9xj0@wJX&DJta2NW_CZ=5Z?xmY8X#db=x1CZo%e7x-X zSS7TblWEHH34p}xp*h;sW1PkKuRvl!A-@*cLnx_up;)L*hHVj#U0nDoraO+g#$8G8E&ZBTccAY1s*69ip%(~B&ExC%CHq04 znVF6)lyA}E)>yOG^|x_5R?9=iIU}0OC!>nb$w?4+z%@%4mMgf489gmO@Fhab!zLr6 ztv!L}&$!oh46U6_+gddy1>A9*f3qS}X9?ZuegZ`MNfY+|HzTQZuevGTk9P`yiZp)be|^Bu8yqI?2L;2dnM9R z-3=p*wmT^sO=VIA`Po%6@UDApL^`t_TYTg^UDc zMiMa%)qtPF&BYk%3`_ohlP*)--{^4i=Gh0ywJDCiGS5Y|WT6#z%z8e-8sM(4yp4W5 z{Dz4L@A2O>j7>BhlLD{x(d)yADktiBjxA{eQZq)ZB3tSQHNg`yYrKmF7Ne9#(j^(P26}+;&um8f8$BsL|mg`zqF4Cb8<_QJV(q(mON(6C~b6;U-{;Kd=%_3p^e8=&)u+2CAj!q|sBKR-RWplF^yHVcx#N>3!e|PfonL zuf0>H)s%c{=pJeer=#i7OtsEI=0s1W*0tLpALJXfqF6}-5MkmIuzi4UBb^=~Gyn*_ z04nbZXe89&Xn_yGRPonH?Y6yoMeaN7`P;^%J};G~H|#pE6ThH1aw&TK zm?_i7m+T*m4W*4H9_u9%-_znT%mk&(hMu0AC^*B2g^hKM{JEMMDM%PdDh7Ds=9uex zp3!d#D8x+w!Wa8<;s-rOMn^%~zk|9|B;4eiSsJ47TwjC%xnCihFH_wuk&)`9 z3$61DS2whN74chq`@80u32=6;eoR`UEWm0l&smY;@p&WqBrcx5$GnfhF;IZ}+d zoZ~!xa=gu4XQab(Rz>plv8`S~nbklp29QM*IE_GIbwR~cQGFT62xN^IC=m+O9}>`X z11W`NfVD~6NAa-WXZZN|fMC1{@;SS4X9C%}7lFu#VMpqG9zC;9yN#G`hkiupr2}5p zyEXnVJY|P|4D(1`Xt0pROh}h|@qz#e1fjxNwC5mO^9m|DAd-H?w51DFpfR8{O08=j zpw2*7zlDp1bM@*V=)(%BQnd{YK1OlQsc|lFy8%s`R|n1c_4RElz8uUvD9EQ^ij~f^ zIry<+nG!Z9_$_9gvTmT|2nSRXdhfJcW+6Eu=xpM!&cYC4YEKRE_^fr&WU>NVOP zhcwX5tTZ`Q0R>2jo7{GlO(~%1_!V}b_(}CrDtl}I(UYNaj=Q;TT-PqgwsY;>dwTjA zH4wpfRI|hT!mK5SV$TUz!oTLn8p$fi#swxC@wKlKfQ}+sB+?-Ol>6`ZfYpcs@nds$ z*8mJVq(9kr>H-ng7Ns#jjtFdu;``se#;NNw6!fccWGNipF!vHDP3-@*sKm((5yOyW z<(pjpo4^7sYHNW2*mDVqy5GO?V`Uoc|8O6k;ZTu^{HOmMw%jluycC*g!@CUKXV5Db zC3{q2FKWglnaicr&+o}Yv_<<7VSsppml{@hzc$X55<1*V{i3a5%3#Z3jJ>=6>ggL2 z_Y;j=rPdqfQ+@yZ%`qMX{~rxV4;m0UX0oQKISf8s?4nb5rB4BzCQI>u1C0|R4jAgw ztxvRfnk7S&R0ETXGDnd!_a+EAbC%A3-^Hm46&xyqzogFjVyogEqVeX|w@{tMF1mDg zjB1I*m!grHa;59rJQrdncG=ANvq%p8ToALVCpg|E@$he|%_vb#cR_Gc%DJ4?M+ zy=vcpn1FTV6JdeHgXUa4zUkD-rkwuzHwG>ku#Qqf}~0?5cHl*WfqWcohE%qK!O3LfdQut zge1^a=a5ncF})Ze_b4z3qGo3FK?T<<&H?}KIRdYx7D(NqHt4&ZwU5Zjf(`KtKY*ncm0`xuQ9U)7<6ZAu(ItYWpsqfjE}81YPU` z))7Sp_%0ZNk;MUb1j0Uj!UYb8`v{!wh2BgOAjcRj{dv2v`Ir7=JUlF6?L}sSTbtNhuCW~ZO1Yt7HZr_k%aN@9aBRc0ntpi)ymSdq-^8NH= z0l1Xg-L-dIExh8lyn#9Iq$Or$eS%;@Pqn8f=tZVkNR$CmYE25Vf(FH>K(-K(b79w| z*mAT5gl0!DSt1~Ch!RV_j*pLDuW^ zEN6t&Ffk!vc>eN$D0fqMPUDv^%H|BVovJrS%G`A4-mYqN4gNAE7y0w;@r4PXP9OdK z_4>0lxx$PL46Z{a5guJ8n(`R@^VXel5L2mnc*1$c-FgU7%XKk)wt(kTRgc}=-IrIK z@S2S|**(wP?%utNWXJ*o1Cev^fx^WXJi3JbWfKSuq{+r(J<91vN*P#4Ki&67jg*vv z5f6`|1xb%WIP5+f8ygU=srB{Wa6QgDUC)*i(*P4cw$#Cvd)7TbZl!-bsrcHTL3!G` zsGqf>fK~@1ZpUmCH=qa#Lj5kM3MvLNaY&#O_Z zq)6}sYAqo#aZ;}-2$uJ_xO%f>Oida7yC?-e$=bZ7h9*WApIyG%r2T1r^@0^gL6g*p z=IQc@6BxqZz%;XV+H-M^uUV`urkY%^wbR=~dBaux+AOgh(VYT*S+Eh=c zL;gEDqk$^z#J6;1mj&OwQmrf@te4ljlxmj(1@0sq7*GlDpNfhC^H4_1Knf2|JBYwY zMz!mob1=lGKzqh9M))oSEDY`MQd0vsqeMX_1ib8Ai(&2_uX|v7n-3QUKzjD1L8~u& zm$t8K`ubFCVdb%EB1!WQdiGU=7m9VLJBQ7ctMbN`J-dy zLlDrM5+j1J7eXWb{g3{H;Yw#!CWBJ41EG9JKe!#g(hPVXTwM}GK>{igV!`>Aqx>Kn zz=1)5fM9N60qyTDR0X+S_R0NWn0RMsq^k%4T;oyQ~$sA?V;T+N&68B0sOH(DYULa!Nrm?_$ zHQgNGU5NYmajp2WcX({L=y85gagh_&F-U}YklogDUF9m+Q10#BGqA7-2nfIfZJ79u z86-&|3mARsnaOI>eNW42@=_H9ia>)NnfnxZ2}oH!)V@mbMMc%O0ecjCK!LB%t&9JQ zOrJ7k?4yuMi^;10i=bc?jOA&FztI2-2T>8#2?}5VKQsmicjJK6iZm@?yl<1}kON#Y z?hy?(Shr$7B&Wd$fuGbuGSLJy(RjD5k0#e1y}3-`^ae6qXCS)jagvrz2MP>84yNex z6H`eKaUpl5F}H{^4W~%#XxfpDYaU^a*{v*n!Ek<{YxI&u(s|YL{n`pDo>NWlB`oBIe_dyxph(;Z$9Uq?Itc6+W6++4stZ1i=+RN>!bx1E_soc2%_-|67ZmM7`ARdK0FXkcXLXvgJ54P$bNp6m4Xx$6g+mWslZHN3BJa} zd;-T%RTb=fh!?Y#>oR(m7J-NE_L?|)y2WZe?s=w0z!Qs2XEW`aO6{(nNWZk+E#{hYF-JBD(Q4eF2;Bfj& zT;t^yUlhwb0oMs#TIB;DW4aqCGB%RftL=8}8-?35aSr<%r+MpabEGz1n(nq2R~cmIAjfG;&Z88}b~F$T^ilGd{cRSd!_KY!RSBM1nI zirMyJ*FjZy_Alzz22tR-snRU#_5X#rJA)Gf*LWB7vAL;8FMu8ukrqpUTUe{>(;|jnsbhtal_;nhZ%ZBL@&^P7polG%Z4gL|~;hA-!DL7s#d8h>KJBLuv!?4Jf3iUl@({H70x0BPEPl9C{n)I%Q)X5&7teA%5G|%OO^}o) zN8j%{C8E(=L>$aaNB0jsJ+`p0u>Twz;cRzL&uwn=ThN8fhYGPPtLD^4K?_ehnT@Z{`TSQfT*7YWB99Wn|yv;ZOcs*Lvd0~71ZcWo;xb-RSzZ@Vc-8OuC;6{845Qe?@DyiPbG&PCP|@4mRO459C2UZUs?@1l(eQ3U2Rz3tcXnM-hpPfZBmWRc`_D+rV#OEVacT zn0@(Ut`HfD0}=%rQ|;WU{^9q>fF7a|iKw(4&J27IcLV5|;dL33&Z+EXj6{yb_B0?m z5qRQjtQO%7$WuLi@^{Xv_h86pty)|+9Mofiuk35TB8*$ zOCEj4uf79C<~c>?+SLAZ>?M^d(q}I&JFXONEo)to6ZfD-{Eu%5r6HP4%=z@$Zd`$F zC%OueUL8>rjF~pL=JS3X$R-i_Bb91F!sQnjD27&o@?$x~4Pmgly1KR{i?NE^5SD}^ zqg#9mG4@0tB!ap-C+Q9l)%EA{JVSas8($`m7r={hVOPl}9N0z;9 z%{D&vZGRiUPuY%Ft0D%XPg(HhSC7|UIT_(&dy4p)ePsmCo_Dx}yM`V{>osJ{_GeoDT4X-xb*(K0`cbuw)@}Z?Ja;Q$WZ7g^QqB--ECR zs&$@c+mvEK6w)>}Y})R-M2MNK0%r(xhm3F!Y@5tqZ7hbvlUpAn9Vw)fbHBajy2kP> z)&#pa4z2M5S!Z9edl(zUpUW)oIJ@qBF7t4&yjX*%I;VT;d+XytV5cF|2|grR|G-7T z58y>vC?yz3WDP8l`S|k_S}5rM4m}MN7~uyA?%hH`VwwzMoHL+KA;cne`UG};Nd1r^eJ+~lHx(#=O1cLpC>FQVOB2;VOm~l6e4MvB^gc1!Huj|OFydeQPvbbRK z8E_&&kr?JHLHVt1zV6B<>FR3EQ}#4Uw}toP{Zw-(hRU(2qj--adVE1ej~K`(fy*~o zW=s3zJuVe2bb2}+QY;}qBmz-aNUiFD`wmDmvgP#T#25y>P+%#fNJn!cLDiZTqq4Yf?o*P z*4p9W;X-LSLdZe^BSjVfyjWx~1Cr8CdxJ)Aw@|z`Q%|9a1AGq}TpdHAZ)W|>z#hL} zssu_7h!e?eHCPZ%DJ9V1fLEj}qK!k_{aX63)TLfdFbDGW$Q`@Cg-GGw7XF>=2T^AC zBE#4fKiwnclpY=X3&(kikn+=Z+q?w37sin4v&_NGc7y0H60m?nh(ymIBK2#c)(Zm& zM=h`suv;MhukUcl8Zv_j&2R&DWr+V4g|G@Z7Z6W>tF|6Qu!{!YXTRWfDw{jf+fPjY zDB&6_gC=Kfgs-tO^Sy*1X%fV;`mAWKC#y?-0co@{`(u)gfKbT-RCvskXq$$C%OT<; zKfP!PA~?g?2%ICx?XAIqVi1yeEEYW;j>ymLXvWF zUj{i@Aui1f=^eiIrF2=5KSf`=+4tfkjBNj^5Ap>-0y_s)uK%fx1@Q>9`00dTXki2a zUSRgLHTDD-50@n7=RHo5pWLZArE6$V>FshV12i83Q(UH#_$?PEV4%-8Abm#Mqa z__sekjFE7${F2d@P-Li*w$;f~S}9OW8u?lEO4mVp$nvKSVYdU;akp(eaB;5!6NY(A zd>$Si05f8svH%HuoY~u_>`E325fQ+;&24S|2xA&{{ zy4;haEesXU>;w8ZRk<5uTi9FDb!PAm%R zqIV5XFMM3o5`gh~xT*(gJS+Hd@(? z_1I<_q%2WIMM8*_&BH3~EZA)KOka^x&cEt680YjB^v9&YZ~uyaJjS3E@9ZJY6Qdap zmE|2}yf+BVg`j}}P#GZ1n)a7rXFSlVh=6Um1+k((qGSygEx&)~43XA}a2B?fi9Xd* zeP_|1=T8`_1W;vKzZGx_BI&jFt0E0@BtM%J1*uBEfcD+X5B8V)gFH@;lSY}$C$P?_Qgvtnc4;cwY1@yWz`Bw07Eox$QwV9&Itfwj$Z zpWR6R*$+-fc18h5zTZ1DYiB*-8T_O_g2R7;>?p48=!)$Q0kIDIc+8HD4upi*Ep#H< zzOJsWN}=&}a8r=L83Z_Ug@)+833yiicbH(Yd*ma6Xnx!{(*11^)m!B!+bQH!0s1}A0R)Vnf#L7xrzqV&Pq%Hh|g76`U3F)3VWMmI)_9N4ErnIcA zm7~ylyz;T`*GY@~j(kC_Wew?y8%V&wt$h7~zzr5_e!<_0za=h#CIC(WfE?M1*azBd z&#q8e+V{s`8A7#&QLI2D3=diDPYBxs3*bFO&+y(L6dcK_f{$S|VxIcRoPnL?>(h40HRB9P}xPOtoqTq7&9=q*Z0_iVO7i?ou z3o|qIkQNbxf=SQ7P}kQN77=mlhr=pP4aAui`_N=CQLx)L(KzEmnecB42V$j}lcQ5P zFESKi(23mD^%$H|<9mU(3O@Qu&n<)F-O8Mm7dRx*PPTNvLYZZ$jke}==lyT{6T_fk*DT3 zP0TY9#p;2>d2M#DN(4w3#wq%9 z(}p<>t>R4=D%}np##QCnaf}VMd6tc@Lgr&GN)H9vomrO*%!ZE z+C%+}c<9Uy72qcfhqCE4evXPVRhkEz4Mb^}%C~WJoQD#Dc97&}VDN)sM`T7B=7^vJ zI2Y|9(RxIqh?RK;sxS;SSfTe-uh`gw_!QY-DJm2NaA*i=uMO6P9Xz_fx|Vesm3iMm@&tbilC^Oim&{zRdv!VEYj zDC^KvUl}P29iPF(1!T-$?Jo7g1R^^`de+M3DUg|06a?z9@$s8L*%~c(4F3AHjymY! zJJ4)Q0_&h+LlV*%dY)>Y?j_**(;sQ$Y9LGpp;#1xSz+3t8Sy}S((n<{2$F#Sb0*f- zEN$YUYTW0ej)&*(%Nqhjn_X%ct=L&&&jC;BKA8H3?6UE|4?dcWVHW4-BVPvqtEat9 z-v_zu`Bbvc|1Aj#`T-J@ZwX~AgN~2!TuSO2WY3?%G+Bw2HbfcfVVd>1j7%epxV{11 zpWSuO9AbBHVA|pYqe1VPnwpYoB88dL+qZ9<1MzqRN?a$n5CG{hLuAKRmX}*$64m+a za6V_)q0IY%_pE#YjuifDa$fhrLd}$SLLSWCj+^(}#&7a?c?(fJ9sRn!Z=a2-g2dDV z_2N&EUEYR4aCQJqhy-4Q5g9oVe zg$Y;_>W0qvqFs;T0|ufU(?t;{5*9F?rq&Y72HxXPJ-un$jyizw{ucZ}W)L9dBL)fa z#m>YFH)&<`PYyOc`MAPELlK01w7+hJ69)2PfS6ZsaImT-=<=V$k~3s_H>{8`d6fDj}nEPxi-#Q(9P~MGY5iPG`b_S)B~9$ra01PzNs_vGE~vi ztvNfV;71s|6w%%%mVu{XsGmTq6LvlfMz~T5GFSe8 zXy~b{adB}lW(ae9Iu1mvTwFxp@hij%3!~LHL#%uGJw0CY!ix~41C14OHSo0wq|1sc z)4vgN0LNQ8ryxInV|TZ*Htwp8>+|1Cx~-v|B!By|ytcA7t=J_Ow!59da~(%m<|&Xi ziPGRf-KVA92I7YN2?p`}3BF)G1ePJmxh;L?g&ZLmaxF0Q@CX12ME4A5A*Wu`*!UjF za~ihYLLp-mz~PWI=nD_gE#%7`U|Nq=5KVUb_D3%-uMExqrFI_&e)`0@`XsrlGx-@l z;)p?0f4imL4Tn|_^g*s-&CK4U-WHU7mz}{3Q6862D5YNTpnKiJ9eb0t=S~SE)E6o@ z?7wu<0`MHLzz=$k#n!GrmGn9An6FB9_%I0!Q60S;PfF=F9? zr_u@ELV-+I!nXtFZL^UXn&R3CmFFAkQmC&}njlw@b1v zorM3o{Cs5THIC$f18=DB0AW&Ks+^+y?IXxha4fZ~jyh1h|GN->nE5=%ClqQxL(7aF&Z)VO*$zWejQ16WuvDV=lDhBI06Arq?x zu|1$gA`uFR9EgI$0E&xPT8|AxxnXirsM&GU$>$2 zv4G$hBLi1xk#$%2_}J={s1eI$fUfY}K^nHWuXJ>%5Hl7=5e>lZOSN}>$v^~M@IJI5 zh-<{amrL}jdy(C|{p26gX!uMIAvKC44~geAB@zo}YvyJ@Dwo%p;EucOnh%e$C^3Pn z+!Gd#H*AkOO?mmqk_4Vcz+)Y8`fBpZ=kIsC%iSY#BGAAPTv7Ys z5l|f)JG-pR;ww}S6cyzgk@OT5w+|XyEdMIQllbuDz%OsLJ}pf(+^LeeaRjkDMQT}$ zt2X|0%uM+1*YoJX`}q2jX2+S=SUNE$I)_6^>F>KVVnOmF%>%-iB&&7zF1JtnrWpH{ zpR|D`7SrV0S^U>K##G2N^PU9_vFlezNLU^%eJq}G_D;$?ab%0eD%YngbpP?vkfS|y zA9T^A>ku0ohz{v^KcEk($TCHm9j<~N0ucV8ha4Sn1^;~~O9359CW7@XvXTf-w0HG3 zW|wFMrBi}->IDuN{_U~|1!F|Q1v%-F)lsoSt(rwIL|Z_HD9@uq85$bi0DBp18t0X~ zrU=M>!=SJIC^wa*fPn!Hn-OyXtPh}r($AW=kH{@7u57bZclYN=bC}=Tfq+3eHSzBY z5n*AC$Txx@Ggr@_{{u}Bxti+JRV!%+nMaY&o~tDNOY)y;V7oVFVg3~$I~fymJk^zv zZ6b$1bK>rG^RY5A91CC5fCPIHFQoGcKB9%kg09gkN=C+Xi29fb1i%Y=%WFe}1rB5M z?X~sVbY?AZ1Ku}+FbzP37LHK;6HvM7VPgCSlwUQp;mofuE$=NQ4& z!JJPhv*r(0=*aLTHC7TG!Y3O@vkpPgML`_hSGGeJKJWU6uYA#3w~uy6A(HnHz;wgJ zp5?;7i)Kvz0Ot*4qabp{W;w#Uw>H{($8kgsEdyC9m|`y)c}PV{2|2`=ukG;c?_Nu> zK9Y~iXzLSe&y`t2cIW47Yk&70rv&st7Bj0nN_S4%~icoa&xhXg7 z+NbrY)?2#Sev8lu1 z>ga&J%(s{Ch0sMrkME3N!)?F^444?m*dvuAa5=6B2nhsAFQufU>~>L5ExlYR!=1B| z((m{cXlJr^v1aos->mY{H5yw^t|mogJqyUpS^_H2RiVXSp+R;~si>+QmMa9Ooe`G> zVq)MtXDoAL%UM+g@yUX+?{>CKf1E!!)m3TO&+tVU9E?F9KV}3sV#|>rk@(k^Y|W8}Swo)0Z`M79tr$v*-#uO( z#m3*cTUu~Au=wwij%dNN0nIRb+VF~;aG>@ts)Nk{<>-%I z@nr7KcA32di@h99JU1M5D^0S>C%fE4JwTc+Khn=nAR{OCuG|6ua z_o9y0-a}4HZe7vu_31JNfQ}Qu4?9Z+LaZJTgoeqK&tJaCDX*IS0euoi&+lkUPPJAB3Z#lbll4(XwYIe6 z=4A*Gy$2f`(gQ_fbX4CN7!`c^Y$F4}Jq?tk;wz|8G&D;#gv%Gha=QMf3$L?~rL=B!~hNU8opA*Dx5Qf*e35zeR#Ezl22V zfGa~R;_C+Td<}!zimPS@ZYC=`QhOjM^Be#NH2E-5jL!c4aL_G0juvTQ!WIMhP9Ko! z?1y-5jv48hrb~hs%|A)%NXC4TEED}30sblAdSk3180hjWF&cRik`G+2JpChu^<^w( zsv8NB*EbFj7T>>B7K7w-x`zp1UV1#%LZeGXo$Hk(?2wT_D99IXK3*R#2?A@y8r*SH zd=F&iqG*iO#uZ}R)BV#VgHCkNIWzR`XtGG$>GpzXjuLY^dM4;H)TSgV z6+#p`D+|6*$WyaKbR=oZUt-W^1`|0WkY0BdO-W^!y?lon6XJeB#2qT)f7hEdMij%k z@zd@9hqJeg%Cha&e{VnmB~=6@MMO#@q*Dn+L_$&;=?)1cMG%xmN=gJ&kd|&xN>GsQ z5~Ne5bI%js_1|l)vB%zH>`(9eJda-YmFGF*IDQjl|6qQ~2u)zLJzxHu$y%|h?!$lS z4KM6^Bw9l&rsc&A&Wau3|NPAhLAZKQYY2kOx&ykqD;0~^Q0`usNBtXmMUVP*U4Jew zhk$Kid8pWQnk_oBDG!O8RMWu{(c22G73ac(hNdPSfU-CV)51FgT&cHO+JL{g2lXp3 zBm>r{0B|zY3*VMmpnN!WxJ3;L^vq}z3bMkLi6>V)%ad3BUW(h@ShW^4zFb=tHPc=q zPt_A&9JyC$I2ILBIox&|KIu?mdgU*(>nxB&J!%_P`bqfGJVy@}7LW$$Gpu;8De?-B~ zhglk{PJol;28|Z-q(epEf|wgG#PugmoEULBvUMJBPGsKw#e~-Q*ck=m8H&PXdp3wx zc6KzWWSF8ApO#vGFf&H^P8I%|Lpa_ucp+YB`^o&pP!Zyyg%2toR$CKn=h5y9>vfG~ zQfJz2V9vBbRwX|S)#FA`Q^$#Wx`iCG44=Rb1JYWM!eNkHyf0VRD*o%P4<6k`kkAlO z^S>VJhZ@C7*%UEcjivIQUF;LT|LhrT=jFGpIb$7z^7}LAe@)k|nq5<2`rA3wdmJp{ zufHpdg<=D2hi`_b_YX8QolbUuxEVP&oV7DS<->I8(laoWXt29O$0Sa`j*dgC$N(~X z=u_Z;G;sy$BiGG4E~;$f{D-GD0f7{_p~2$S2I=gvb5m1B80ZL1pF5Tt34*& zix%}m+&mqT#B{jF8+ijqJA81h?0IXji53FgW8KYDx3t9=Z1R+Z{+6YI>Gu+DfoK%= zS3~$8IC4N#6j`@l4^$LD5pb;VgED?n65OT_HfoEMAx&8&>G}ELU%s?%SU^gBc>jp% zRVoXOLd#5g{m1Q!0Y`qZ+9o$pO%tpxw$9m#Z;VL7ZIJ9a58o}Q(Y}FSer;{-Exh1> zt9c#W+|bZ-X9^M6=sebLIteEYZf0t%05{{=tu#YW83X@?9+)f4HdwC#4+-khoWg_t zVh}lOUe=vV_fz!}_VD?8fiIvYv+v%D;?poAu_6U{nMpSqQ!$ToxZRwAkLHEMZAe<$ zpEC@729U49Mn?w1pE@X<_4KF~3ZKst6BE04)Tf%BT6F~}ED4hTnR~TH(*48ZG~|0a z38b&QEBGr*HK=yQv~N(8`)v+&y-4NMbjDP~kIny6T7?Hr|9|3uj2U1@zK8ZBIEXeX zoPp`}0_?Q__Q0Sz3WdH$1cX@Ky{+I7MGQa?34Uyc7xg>c<(I+1@1WeGAr`8hmD9~0 zTs-~#y{MOPq8#`UK-;aNr5^VSv;rqw9hu7_@eYP79%d=ygujXlOQV=us=z|ppX-=}LC4`0JL9>ORgP#p9!aqPpy#_IP+?P>cCB%uiUIV&^AV}&_IS5Kc z;EP**z4f=bdxka>Y!N+&Ff4Aal=MUJq$Vn)b)ncKu|b7Od{yz z%b!R|2r5=kW=_t|HiKaEJNn>q+X2FcinLJZ2*}D}hX4s#o=x|(s@?dIRttU<895O7 z5A3&EBeDJ-!BwEL&=1qU{HOJCO^{GDNqPo2f-I-MgLMs_T{;vy+-wry0yq?nCnuT>_MLuLI-)Qeriddo~^me{jg$ulep!pNqR*z^EH9f@RO0 z9=;sFN%}-ET0iu?vXYL_&E)F7_|LQ>$g#yhJ7VAUEk_&ee^<_8II2!ofpt=RcP`Cx z*we!!SI8!??8$j{$E$mMFE{Ck_niczUtf=Hqx-(*P{2X|qkqs>+zY%t+C>GBNr&m5~$$Vr*=k6n26E$KSBnd z1*ihrvhr$LGu5)5{$Uyc>_|PxF0*nv*=_aiu{-V$hNV^eF}h zm(ZXNQ-1ORy|K*knfF9Px8&~ZmOGG%EZ=iu#}8Wl*MIr=uK@mKY>W=LZMo|A-rtXS z^G45%$ol)j!dXYdJ0C4%h-H-j{qa=t7LJKU9jqmY)c`3Wy51oLl*tIl8hbP<q$`s@)^00ld9P zrV4B7&*|w#sDjBF8vfiEyM}Hg1J#9~e&Ry#hJ$(z@g~<-M!_zy;}5m93TyQ8LBfs$ zDI*|u{z7EdnX4-TYO=eaX zor2tFuo?HlX<0z(2UVwwpdr*4Z2?c$!aIkMB+*;wd{Dj2GYK=0sm89F8Q=ebZ~4ai zNzhprkL7FocsxBmMp$BP-Xnr-sOoR@5Te84E9+nNI<1M;b;%LBf2O?uC*t&Xed4Fk z|19o2esK88ijcNxB+PQcZVsPhpQSH47IXj#STMJwrO}p{VDqi5jp^#uU~qp6TfF{v zH>mp)@$$a_sp^8D{Y7Z9A~7_`lL6dWQE#Ff=h~ghVMs zJqvbsY!Q*l3fr`QN0cEydE+TO>3pD$0>KZ+ubv3B3~FoVLH>V-!h>FNCCG25gzL0& zVnC>YD*Hp^=gz4J9A}Lnl3B^bvImXD#=w8w6jdNV5{TB?MKk=`@f^ox248oxmOn|A2`z=Cx;oIDsIu zi37JRVsj#-5OVvBNdNnz32@oKHk$x2%?wgCf$kc4*A&Hf{JddBMNGN>af@!HMby>Z zzyOj3>U1Xzpt%fhc8~7jzsHdSnJhRh5+U$->!B77(pqd}?k>`@87)5E%kSOa|GD~j z|L>ay`a?i7J8Hx8I~+ecpoi4}#{h}T7xj}EYzR}fi>RR<=(wN}dLS5=a$_WNggK)G?B3jh1pUJIU z`7%6A4Wc}_vbI)O6o3>SJ*c6n`T9W;n1@hs1+{&9N5@InRhXHXo3jOjUD!moR1djqQa0xcF7AQ@=ntFa>XkDZI9*2>)RjVh7|3 z5^$;m-4e9jTY+hS=ysOb=l;p#7`$aBOkl*tFSp$R-`1tr`DdNZXTv#qh`IXm| z-yNqfMYqHy5I0J~ zX9?Dy)7}`k%bE`!Aj2%u<{;@Ga4P(6rE&M7ZVTw-|5s8HN)7BpNJ`W|8VKQ2ydIc% zdhds!^3t%h16J#An})`mi+{d&m00#BmVUzB`&0QuE1PoQ>eHXJ&0YEwH|9h2Tf8>h z?-N(yo0w>f+GMGXKU`eZ*RL*HhKm4xa0_e|tSl@yl*v$m6lqhS)rbg#mc=)r?|~4` z;Oj)N801iZ$8!@JwwA?IX`L^CJ^O??I|niy0NG(rv&SL>XfNP*xuS(cM8G$2Z#WYe zFt_i`{6o`Nr6s_PZ&*%aA(_^^{Ky56CUU-lLO0@IG~0Q?+nTw5wWq&5Gq3}#T1wo z!L%YJ>eQ;v-q2Ns$Bi;XAj&ktMq#<3DPRd9lBovKzt*Kq9XWb~iQaJq6r?4*4B3n4 zh&`NO)}{{b>odqdSx3(2J5q6`%i^l1oOVn=jmbAbdJ3}=>S5baEABIPPydfS}huJ)Ie~ zF|&XyDCy=_VKMR+{td)4rp|17`g!fWO>~kw1JVz6Vda*NR13c|7}2&*cf=PlAMm26 z*FHsVt~;xT&}H?GZ1di{+MHCUqO!5~Iz!9tP}PuiEVs7zz<1NPl?>&_X&D zPNz6IIWfB%i?;!~2{_WXkcwfDb`Jc%pxHFa{0lH)U{wW&+}pX){lBs>$OG_X^rZ-F9MG?*(7Ha!6fbdO5#M{ce0QALTk`;A8fM$TiKsh%w zLEHdhR)F*3z11$YMHMy(E)a1hD2M=!L4X>;V>dTPc*#-o6$-Xco(!K~7=CevQTEjkBg+6%VOeD-DwV>W%%my!w9F15N!gknBnq?y_B1p`sJ8z%C9?n!pDV56c-@X2}o3LL6!u);Ocqplz)%` zwPA56F(dFfNVD5OUZw!g3KmPK2y@wui5BAVgB&mpv;Y!^+l&y0fELCd<`7UoM^hzf zU_cYvn%@CxXYcHc)YBjt8D4sg8jC&*k__HfZnPwp|*MNDy^7+i3i! znzU)ML4w}xiVC^8!Y;0!MFq*BuoSCIj*}A07ug-znOays2(#soSNh=C;}bP{eI@O| zm6BTjRdF1`W~gTgc^_5{mO)iU1c+Kt)m8H>%2SRc2}gmjmip<&Q&xknkNLD8MGkx@ zWlEEqdsNiL)8md|SYBaRtK!><82%a>{O&SGo9gZN{YDaxm7dH|3zD*nRQk`g5Jmhg zJxSt|k>x=O-xcF^6!oWae-pgx@P%aQj!u1FRiSnN@T-sNzA5|H;v=JD4BPO|b?w@1 zCo6vq8d2D+oe58TV>|Rbx>{^g%GVzM)Mwi|Otw>n|3XOFoqocnfvFuep6)PXM05m% zejUJ*${(u|RBq?Nyr?8j`=Y7@K~%IsU|7&q(vf09x?tI?Nu!nQ^33YrmTF+DhM$q~tGbm?4&BB^1A zb$wq5q~~o~NXD;-rILV^KZ78p)K~i@ zx2;>wCZ17 zIm#vAddtg^TkY*hEIT_Ng!@)jDE&#A(b?T5QT3)e?IIEm9fET=3DA8#bDvB$B>m!w zhzqU$kG`&NOFhG{(hIR$>6`3JOUxc;kxGgNOi{SLOtibyGRhYn#bz};{MbEK@jDF_ z=tFZeGW~8jxW76cjWZ%gcJe;)D~c`J%Fv^)>P!jA8zsMn)&0d4sqy?vPct=eyom9B z_v%3Aa(U%Z=%=!QBI<+~4tPF|q-UR9s~fkjsx3P>wz4o#iOBf;#rqnhUH^mm(m-ab zQEX1AM`dfrY>tC***Lzx2ua=67MY{?pxnwEGp?AJc$YMbVcY=VrGmHeXgxgLTt#$c)TfHiUKp34YTs)7yE zecgtk*E)!6-YvA~riYaqh)e&Fi|5R&{NiRPiVZOJ#a+}l>tU9Pd}O%P`}ogXQ(a5 ztL@yk)NjG+TykA)-k?7*oZt2O)nSS~eUgZc&s$NK(^a8KOuly$huhEZp@cD)rEiMi z?j>7o2&X~c8*+y6782ut;t|kZR~Q(4piEN;8{EINxwGuN@#jtsonZYo#gFbUdj|9M z)8Bs|98-NC=}92DzhY|cPqC}(9bUa{71-4!TYEpZYmuAqLNMBbV5&_)Vb+eOWDho! zoq(!|f?t?}BMea%YSn|V6S2|^3=DKjl_A41_xQtEAJL}sLcd;-VzaPUT`E{1Xj11% zar^k^uyFM>e7Q$bTi;fVIrpD^S$AS1H!LhDZT2!JB~D2h*jNpVR?ztS$uGl%P39wy z_Pa8Mg4ab5EpV5|PRO2U+UDDL`h79Njic78m_@Cr9#6Q4p-4^8S0%A?|E$#J=5myY zX?$yMXy^bQNj;prXRVi}1us!2V5-{3$@Q>Qvw8t{rs_`m3)Grrm6a-5CUF;F)dD`w zBdoIm;b)I%Bj!I@_YVIFP$0M#`hiV1Y3SU7$g`WXgqlZH$%zoS?>Wol*x;O0NefC{1+z$9JVwen@o13ZpUwRd)`r0d!wXbp=e^# zIJ2v*oY7F+kWkPlsCTpa*Zw}%6IfUstw>1VYewH0T39Weah#-I#Oq$ipg!k~9hF+J z=$_i#jP`v1#Oxgq(#-?mc(epf6U!h0WR9Vj*|{1ePi$wBBZCQ+?o+)M*B-8&ouKqV%yK*kfH_^R9+g z4(js2omwn;j`lI#a*xwz`yO5SDN-_%Xq=Vwrd}(Ab^Sp~>g__zmyv+Z)iSE-=Fwlb zIIon#T1@a?YcW+|du`$OFm~HhuvdaJ8G}qC;Mkp7TGHVR0fHOD!uR**!t-NP0=zHs zZ1HL@uU|f7UEgqAZk6r7=tj#bwc{mF?motl-ul?}-Y`FSZ19IR0X_9nJ5e0&UUo0+)k;E!(Ze06ATqUCr&Am3H zkYo=x8{QL}1b(8PGYlwjC!K(JH(S5<4OF_7R86XI>JZ|6k?6Qv9G_bE`~>{O?*{r8 zS9uG16@EBq6AcJs8HAko6m{e3T2ZK=zxK{}=h`2V8u*FPIcyvUe4@{=9et1q!krH! zb^Z)18aVI~8ydW)_Z~jHgoT9#$iLGv%5#7Z`1c8wiOcvzNhnSBJ1%_{E{V~()TQv_ zPBqb75++~$L1%v&#m^z}k%(}`-^~&#q_yEofu0$v11^jH490@Q@B&Rubt_e$NmYO} z57gN8p!$Cey#!oeAMoE01I7;(Fu;4bP!WQjY8%3Phs)v`&SElT5wU#6t8QB*m zJegh3<8^TJ`#$4zRq}92pO;H+ZNjZQJb<;;fRf5@sO4jd8>6%@6w{zX5ewC@&W|e0 zw?b)JkVq18fChjcAV1>b@hyx`?K&%1O~B;E5fFNt)Iv@AUDBJs3VkwaJGsWvRDY+G7ZEqf5;Tl8<5&3)I{A}sJI4z=an1MBQ(5WG`UHIxA%4iR9SRx-z29Qmbmyz@Zp~qy5l?)^`y)U=^RLn`N&7XMAemeofam|S9)0a4?yj(S*RRXG7=8im zbgc&u)M`>%pY?;6^!`VdoPrxJDwqD^gLQ%E&-IBpQ@P3hg01~)mz?{oOS>cWn977S zhtDkVzs~>Lwl%H07b^x^MJcvsFAu=Urh>v8Izi6d$Hsq`283pJmmIP#d3=C?DsHDN=x!OWJ zJASrBBA=M-`9Ayf@uGdaq?R}@`p*ZD-UCU=8k(vGOpF=uA_8u}|H~Ii4Ak5a^kbmR z!6=FUJ}h)Nt)h4V-tV{!IVw31KdXy~DZ6PqX8MV1q z*aJyCQE1(ndboGdkISetV+dKOP^A>QvXlvIVIx}rZ+`vyHMQ^r0HVG_3H*6!sQ`co zimm$NTVe7OB2Pe&G%z>_%9J*+lSn!`77U6bh7|B__uN3O(6;dZB}tJB2Z-e9SjGC4 zEtYL#%3<+^mL(iJ9!=*^3p!it{8Gl|*3+&@1d75TlUYGW&FNXA`0g$oLnO5yCc-3c zy#G>in?!U*RB?EDcujkQqm9m6=Swnah@XrGCJaRN&<@MAaRrSSBjc1m@#}c{y3$I$t$dq^mv3X_ zFznTIdkj*oQsZxZH0L*ndM+?X+Y~;gyMX}*BQ${*GdZrD|G5gXDk!WTRD3If{}=G; z)mcz0$N*IUxJ&=QR*j2+4!#;$kBS|hW3+Ng6mUL}O71h6nAqFW)95Mo!S};*hZ#af z+uO8_nXMiDTH}d(kM`%i$^a;}$6Bq!w0?n*(jV6l>TtQ<-r2ug-;kCvK^kmpdky7v z&gpt~N;`fH~+LEU}>mE3&{cGjw3U%-gF4nBCO&=;$wT<8Z&?UM>#m#WccaUMn=uqO5j! zXq{#E`QSm)9C>osLjOUO$t{f1>5Qbrd=ff;D4n=??xkk(cxizW`9D1XLJ~(@Lq)pF1+fTnIot@lyw|J2!_`NPEl0&KuF z_63$C>A7?DOLZy8YAWtV5Lp+p89=({2{jP4 zLl%;Jo3?5S?Q^8DA!liQUnSeI6>hH8L1+a9M(av&7^w?LEBcw+Lx|3W*@c;|FxnXP z$mne*6E`_Q(IR}QR!3L%BW7uzC4L+Xd2BW;(o}89P>7Rpm;sEifMOl%O zxa91+FN?Y#)s2o_UEk;^nW9VMq&4DHsaU;8#(bY!SQ~~d?9g~JFwE_X9KAObfm%`| zC{y9VCJH)WgYOSEX_FE{L4GF zWb{L{Rk9ZHSEJ}Ytjoof=xP%(Os6Og*Cc^%v4IcrFzA-(Hn%}x>BQGxBKW9os zTgLlVGVhW&YTbnr)E4bVeL82W%{|w!-`CP(Go|pXJFoaj$;v*NtNd&t^qllw0_3_f zZN`K?q?7(+#$O`Z!t+Cay<3)E$R4yzXI3{8?;kVBQW){<3sJA7W)+JvQ2(&lbuysi zPe*#h%em)etd55@J}=+GpeM~jQzA&1CSaEXQ|5EfK7(`^uo54?@dJP-hnq3Q*#!k;i03b}(3#NByR0i;FFzx4bXh~PR3&Ad&ra{OgQ#D0sf zd|+ccDH){@EV?ZJ_h6PqOpocfHrPvWTpKj79vQU~4~5<9GHjf1I7AhE0IxBw@8=>< zaH0g$OVqSeL4B<>cu7@u|IFDwhusl76u=gDKN@JRS8bG2<}N5YmvV>r@v4N>{71Hw zA461-ZhVs(&o|VpEaR1z@FTtV3|E4lApOH+#8Ujz~X5+ zjA#c53ED)fsFJDLbU{{=ihb(x1T8tOjc|Onot~J;Pj*Q!Vbb2pn;fA&r8Z4r=*YH zS$Fkc6ASY1n3IP-+-_v1%e28vHG3&V7c{TIsJ}Fue_@?rXhF=7TcFSi$ZGCo4GaL~(u)!XbSN$DuHSs$$*W`r3Dc1ArMX%d-vhj-bQS|qc$OS=_uwd4c+0Sv+DwLxA*)5I{Juio`*8lThMclc#|evRKt;S?8$ z%h90wCAg~ejnqf=mf3Mwto5yuXH{rUww5qRPmdol1S69tYRow1{!F`PLJTjO-!mz&d8 zwD$S5znPo##yGu|Ra+;JFSvRjX6&WOoKdKxvdLVb#Te>S5Hr+wI?q3AZO!y?WAY>Q z9);>);Cc4PAh`ctk^@6g`ZUOCkAP;4uw+bt?V<_`z8>y+icHhn3(tMoDO;lO`J~_K zH@j8gIqnLmT$f6671O zNi=-n_LiTdQ-H#ac?K0&hbw_VB_-8SV#N&ZKnLv#Q_Oc51yB7qHYUbY9q<>X>Y$_B zP*9#2@p^HD#bn5lQX~E$wYenb>|-rMgQ_nk?$5<^*_x9Q0sp&`>zXq1z5q&LZytxXO&yb44y{FP2$DIcyE^m|-ZXHPN{(y!y0c75m zin}wjvnVzJ)$d}Fk0d^ps(3Z^5l%zi^kY4_Cv%1Uk6PdVnIBc7-}iQRrK2#Z-tyk( zBOv^a`|LB<>1UJIY%Oxj3s@pe2Jd{!j!f^?l~^`TZ>;EU|JdE~CsAQ~l(n#;aIeTt z%ktTc)Q@GCu4Ui{PU4n*VkErYzZ&L))%E>8<+C4ue@`$x@~nN-HcJ4nx4UltR%ZF! zM=i6d<_N3WX7tgfXi z6(6mh+o{?+k?~e1Bwvurir0FZ;&Oo}5_)OBk)sjRP0F$7A<~D`6`8he_<<7)(@JhcB=dz*-= z_i8Yw6hgF3s0$Pm+_RZ^#q9PhS9yY`Td%?X^Q3Auh)jNecU z291YSB72RQ@{Jo9ff0Ktl|I-mtWIxLN{z=o%5`78Lb>rSpf^A~MLxoz_z;Xt4q5zAZdF8LadlcUhyD!h>$}sJ&T>H|jd-bAZ zD2E|!#zzACKuLKMGh5-MFz0V)qMqU8wmrRa5%bqi@cZxI+r%`FWl9aLC^zAlYyhjn z|9J(l;+?$hbkDx$F+37F|2!_cckh)!M1$;MAC13i9r%!)27`HFL`JJ3D{cf(S zpX1(d(o3b&X~we{A(p3d>GgsxS537=o0QX(7McV~HGJisgCyen?{nPEO{p+( z^(z~UQ9X$D%RE5U3_$*eM)=qV|Ro>PbArpPvOvR2gN&^ zX5B`yazn-QdI_#+s=2PE^A)bH9K#QTnk$WND+a?M8p6$Ka}BhL$a&hL`dv0o*Gvyez2(lve;fD6M1CezUoZdhO~ z9rc~x4Yhq(TN-o1*N2ZbCvh#)JAuoBXheB5Ss722Qedgc!G#v1aD$pA_{pU7cYbQH z7aje=ZiDj>RfKD?QF~wrYXtFa{_y4ZOrPH}e@c6vrl+@JTSG-}Yjkm{ufx3u=H=4pxb&H}g|paYM~}&`RTkKcnO;Vlu}o ze8mFqY-|p*y$U&Bd2W3r-_s!jdyWo|NTT|Vlng3A8K_p)62V(xzTEobi_iJoj@Uh% zX1;Lqj!4q84OqIKbKRN6@@p6Tmh&&%)gs=+k-+43q<778gKDIiR7&cnz87=%SXj+XQMRBoqx{{-r{o@4-s=8m1 z3{GQi&lYU7>*n84=@9Ue&;`RnK}PLcqAL2WN$n-AHaYuzs#UW+Nq`FK@b5sIM=v!qtP~u3Q+4I5weOH&oC%ZQ{u11PAipAyry4%^jzh;GboT0c`dFxxN zZOgNa^VNSv`4+pC*j&p@cQLyCxvy>b`?1?nQs^!O!sq%!i8V0w@lvB28w|^Af^G*^ z|59?VC4zlS!5QMcEh~HdA2%TA+>q%9CXGLYat#3a@t^wnodD&O5JXSvS<$6qZb1nB z1y+wLt2}EDQ1og-q71@43=(-q#l{wb0N7OwLYDX5FmG3{YCCX&la=)|)DXZ(_1*Mu z@$M&lhfkUJzslJ^uSqm+P3lDpv3Wbcr1vfB$+5}BA*`13lH zzLz{ktGd(Tj1N-F&$i6E5{Te&foA|tBsDEc4%iLr&uyyAqUHPyya87u$2JFupx z+o)oqqDa|h6<%hk>0$}p$?b6EI#XHwtgS};^7i7+7^Un#unVGi`x=D+A{bwx!}b71ZX zA0Hn~8EilB2x^qPdGltPxO13yuA=VVdq7D#36Cn45I8h0MTbW&o%Vcis%PNa;7l50 z=Z_l&DPoO8xj#s{@En3HPD~Y4b$S;@W_s#ll!!8_s&xr?o12Yju4^6NUI!U1RZQ@U z7j+Lmg}-_=N$dgB!mS4jp2JiD{HHLfNLf|20ZNQzpl!$6KHOV%5Ke-r-3_C&ab|Bm z+m=qLi4eSsijoFvrG-Xi5t8-7FhE1dwpRKT<>VfM38alzt8-?6vo0lSsH2q1%=yB+ zxbWHUZm)uU60Jygu|2R~_t=tg2M}%N3`$KCYs_U~cbIbpQ-;EQQgJG}+5yzQNeZh? zjHbH!Ed_f!ci-d_$C# zqwP>-i-WNa>RIQXR<2Y8fgC?nV*e)znh6b4+y4N5{4eB975ZDqLWHFJU0QmI2=2T zC*OS#U%yZt+;Adn@`DQbRSO~2>?5p3Ti4P(X&J-Nux3#QY`s_^v6~QgL`VLxBgw2n5ir}34%n{$7AcX_TElL;gfGdWq&OCFd>8fJfh+zkykxQq#jNf(p!KQpuYYi(K~-pC-5Y zgLdHGS7{1t>=(&X89r&$R#M}oN)9m|A#kKOes#o48=3mX53;&=F8{){Ro?A3(>SAc zS0z7L)*(Q=IGKKCKojDl-Os+pKBf?t&@o^s71=O%G0Z=B@q|v8FnfH(sjlh!x+)!dchcCt|YoE3jl4YrR1y6c@;(JCEyAytUh_85kP<`ofnjD$MJx05L=a#n-k-k_ocwDMOy2B;zlRGYNGEFpS{6( z=4wwNMIuNiE5EIO*H^2*ZA9o{o0I%0Xg%z*xUuL|=w6C;VRV6WP`Q%svhCLVD0N}m zjMm%o&z@PxtTB!a*SjI?j$fcI)#n->iyIjEusFU#To-Qto&Ji0?dRBEtwj?Sy1cf#@C>s1JDsGuc1K#y<1|=K`zhD{o2TAjaHB}`j)SlMDq0@iYb#%6EMh=%yWlnbKj%L z2PL4AMP6vySMutWYwzE^I{_d8=u|=o{^r_j>BsF8G&)3hHIGJLwf#J88_jk;VSgx+ zhO9~0jP-pe!Akn;s(Zmiw0DCoxT<-+m4#Qu{K?Ty`Oq}loTWku3kaOTwzQ1yA4%f> zmDo>b zVqc)(52Z}Tl3o#)+L27f5Aoj7UhRbsj#N{zt+;f3**7?5Il_ zeyF^IMU3mpC?xpI*zAu2MD-zS;zi<|C$pl-&_TR)TmrsE8avsHF%YET2L>7 zUcREJ!$s!L zA}<)$Z1jKcElxJ<5>pjFZ2Mg8=iKxAIkR=9Y>eF-h%P4_Y}YK|r*r-O^_Q=|xS5PL1YV1SiO(5TRq^goaKoh&pssX7nJ((QNzVFM|mypMzauQ7hGC5NGAyIBtgp5`3|y`&_juL2@T1PjK13Z0~VEKxOVoNJwM4YhJSM zYgDixZIi+_&`xDh3O|K+?xH&p%{c5Iky=Evy_TwVuD8T6$#Eeb5BVpv32fRezsMU( zy9Qnv%Hm`Y3BAARb~;+*sD9qxb!u+#oJ#QGTCS_{yJL__>d9A_fgv@GvfQB@0#7Dp zDn9nl)i1kaE9hOt->vV__`_1 zg}7>Js)Wg+Q`ESFZD6GuH49&ydkFh}uTF+t_wK_m%EnD%u{*sz`MLIZQN4I|o$iU! zw9l2@69s!L{Y_crgWZ4ZRVV0V+XDoq$J+ zraPy%_-wZb-U~X?P-ADmQ>Q5SsY+#Xd#f@b=e^!~x8EzZ#QKo9-1>5d`P08e7TRao zF`S%)_5Ke-LmCBbX@;IYHKj^K^{`yNQ%^a1-}g;fWlYlidUwO+k}>n==c&8=(NvNF z_E#TLT5Y)9|FOzkZ7&x3XoJH1#P$iUD^7$)R|Ouv^rjRXFyEqj-zimLGxVa#xWZ)$ zov2?k(DOeo^i2&P#Ia-8PLr+VB~R~l4tZ>s)OVVQx(lx}sOicqd~Bm>-7Dz4#(2PZ zfm%4Gv!94*{|5K;_&cR|VG`!pluiRr3m;U|AWc0FSN@7@`ex&U8z~CV-#jzQ7UCiu$HHN`t5B2iZtaIeeW~l}2TDY(Vi1+aHn{soD{608-p;Q7+4tGNx$2{Y zU2$C_R`7aej>Yy|uN-i+y7+QOe|~OM~AJy z3msT0PP`vpn|t69PJHpf$;D*cFE^!LJVYC%bQU94K2!NJt>z)Apw)JtxjpCCkwqOo zylUrcTCV4+%};m=$F0_jh?|IEuln}7gIih+PdVjANriQ^_2?C zJ-Jji_N-38BX2b7Q14dN*K_Xewl7$!>ghp^j7;X{r(tDxcm0hrSRP-*{L=FFIwEf& zG$1&;rIxsuK+NDIK5y(|Ir{ATM(u~$0Er*6i}-{Om|1j_l8XekI(1b_575b&%zI|@qsma4Q#c2}?O znp4QI<~hzd9aIZ~*2zDDkz3|AB-&Iql`eEN=r%BD3B#){n`nzks@xAr=ND~*n2_;r z+7^pudoS0;aXhs~5whM*HR=$`=HSm{=X`%RMPzKah4pfvD;g&+PAAy@1a8LupUw21 zfUhEW3iMK)o=qsK@ekbwT_Aq~QMH~Tu&oqDf1ltZ%6)ng`6VaneE!W_dr;wI@rdX* z^K@|7t&ITYCp(^aGpGCcrOp$I5@w$6Umn?CE%WVg9ZMPh5aP5wE5X&=D0d9(XyELc zG{uzK4ExtBdp8F{&J607`c+XVSzFx4+CSR(T#z{Cg5+bYnBgZdmzb3l_U*&wgAf%+GWV=1qvzX7@= zD4zhOqGAuEf&y+iWo3eZ`i_4rU3+8nu^L6&$z!&OPBghI92w|XDo|a4PNMr(2`Wo~ z{Kv3ZYY;sCaJo$F>_K_8E~CO65)!q_+c+a((9Y%@PJj2#ueDVUly0046xGzutEs82 zG{cW-h&9nio{T&G*m|^_FdksdM`$wjtlsKn0Djqf)5G*s5P||Kau((%f))Gyokb+k zfG4CAS>qquDEVuPb>(E-LHZa20S~ebPR3h)3U~^N2suYb9uU_bnd}i=;5qRWRPdOq zR zxRetE3K*C@8w5zFd{8gIJB+|&|0bVdt-)91xV>fwqaW@N(+Ji9L#74D6YVfBZXx3L z@q^<{C%=FJ2sw;uGXl|JNC}v9BNG$lp+YU@wmtzRVT7{=48oVu+i9KoP|O7m0w5Hp zRw`F#z{+eY5ZeiQbFQ|6_ zI_I}N6Z-RI_nni6ZJscRo_eYQLm9}Pf@PUR1A0M{FZ46vpCOA;jQx40fOK8WUN|XH z^vT)C7C*W7f1HH+rD5HMHUf=^OZL61p{!FjqsCP^3Bje#R;|C;0wpahSfR95Tw(Ku zi23?c4VVkv{Rui%v7PWBA0<5Y{mA&={zm5SlHRylVVmol$M>|z@V{MFDv&vv^$-70 z04FW|t_3;ISl_*-;fj{W+NCA@V>4GMPyriaUv3^86a(XTHlZ#Sx;aMwf#wu(|F^>d z^Mjo0@tT4Bm2*%&m4|_qAQDxK?X^Z?J7{hK2<^8M>%AF3bRhXOum&d=7w-b@e7MwZ z%jv}y{vi&O7^gc|;nG#43Vq(TOx#b+Q_ z0!RW}>tZTUt0@0HwCpm|o`epI>x-4u+lpRD&`yYHtf}pmkK0Khk`SzBU>M99T&V2J zHLlv*TAc-gSQE^Q_WfK3x=DE9tPUS)pE>pP8Ndp-L4yoD8F_%1xl|&LcUR}MX>GALn+Da{xEepEXyjEG7g$k5E`GtJDl*dKXJ0)8DJn9X;l;Q?C# zffJ#dF!g^mcJ5&{=Xo4Io_4Jja!F+-CkD|yks%$eAvQ*-4PA86MJXyNO>7s7$)#kv zOf!d0!z_oAT+?kmjgY3&rpV}|kuhnaFfLK2_VvZH&+Z@d?E0_g(dqnd-_Pg!`@G+; zHzui?%tEU@Bkf$=+>S}U2S{S;wItox%xncHm0TddMliWnN@IT-<&@PpnbVq=%!s!k zaHh$D+M`g}&(;?#1W=2-(&vDWK}jIQZ?i(wZh!`E^{hl{P`Dx zhK5`T7`%M--;0X$5`mm*t-5gzI`_`nwIV8^;mFM6T#7G&Q$H{w)2Y)I!@(Q zUqeVelfue|2Q$AG2~Auo9aFa-J6z{&XwQkwZ z$CU~{xD&h>*WhLn2O^S*Img^ck?^P_sJ}kGBh}Y`6c$>`ZBdR}uNVfy+czRghqAtB z)nwuJKUdntmSh;h?D@01`#6D=8)NvXFMW(^OJ}}nbMpq!{nr~FsE@9bc_jwBb2hJV z0Z1+0&RSpv4^-eVCK?$XUv_XoTa?YXnZYB4Zu~9LjPoFpj(DHmRB_Wf1sVK7QLAx< ze8p#^Lj~k3Gfd6jTyrUU{x!(cq_2upE`_<$K`3_xD1tKrR7hF9F49psCebsjc;{k+ zIPFCEMrT2S>R5Q!5L_U0zZ)(`S#bdwpP1JH)m+MV6IVfm5@0kPrayaR{>r@w?+MNT zs5*onD(*dr+TG+z0j98poXclMk^g3B$yCK>mt>lH08!vR2;6J)<>Z2Q1EFu~#WX0! zGt^bLe;+m2#l4Ow)=dBpfJzx#BZ0%r0yi-DHx`Tn52p&v!xvjE1A&1STo z-ouVGc@0NoZWBHSOd|C*f~juovD~8q6vb`dtlH|fiIF?$zdB?UPZyzGhUryt3UF`+ zU|^*uyY9^R`cvKP)?iD$A;X5P&uo_oqMyi96O<2$R% zS66+;3`#b|%E|>CpED>KrK!(F)u!KH7;S97Zpza8q7VFJm&r%}uDt(IUjJtk@(w!i zzDn=%pnzU!Z5@zzzbZ&j=k*q@r+|+Z1^nDJ!b7?-({vSpQ6ucRoat6VeY|`|6hNnB zD7%0vld1v^$8_ICTwK`#;2@PWpc|&=#Y~F(Xh-ly@oOy3f_zHP!+H}|(qZV7740D4 zf!Q{hq-vjnZ^hj0J0Z`1w7ImDNXxQ^{rL!-9K>On?Q>LC_?`AW?FK*D&Pnz{TdX-O znO(y2as;g$5mz=bT{RTemP4U*(C-7@s%7a#Q5`NK!M>+p;Ee^b_MFW_PC4^VtS&xN z-#L}AU62F#XyzvOXN)yjX$e|O@mbp+VPQI62x}3EO`_1Y#Kqfw_(R0!XS+;nZB;E7 ziDU#zysJKVGD2f&3f#{oy;;=8YCc|dm1_dfaI)K6@7uLW6$ccaINjg1xA zDR%BTSNtYI+Tp?Se)`ofAvg!L=ggUd3fL;7aa#G<<&`NPRze}TeKBq>Es(ahc0}d) zn7$hZ*}STqoUhd69y0JOj+_E@H}ToCNH%?L{1g(cZaqO;uqnYnD!q<;5nGccfT9hgEs z2X2y*P90{o6K5nMW(=Y{1Ox;KvMl6|_lO}l9*r&Sq$v7RRcS7g7fs?oCRtc?pXsf= zcI_J|M_I$2xBTPo_^_#FIFJ`ScH8@)eLJM+f~|JNmE>4P*ADX7-GP0ocNtDrn0DC8h4{}x!R(<_h>>QyYCT6-_{q>e}dS0YG{}7fmFKByFgi1 z)|d8}&9f?DKrQTi!^Yd3b8ar*rtz9t{17dz#VBTiQA6z?Cyw?nD^}Q}1EJf%<(NZZ zyDghBSWLJ{cN1BqAfiSR^W{f>n(}g#ROfKjeI&=TSCruUpNK#FUWV^OMMS1G(d9V|2l zW^M3e-A;Kqo$J?q$5@m&eOn@OME_WrnOaVBwg}FlWvqvpH?kVag}XGZ}P+?$>54UyGHJIOGu7@YGD!$`L}By zRPLbM;XC<{``2CIo7=6oewUvjKk}jzyA+T7LN zc!a`4ep!z7|M!>ix}#ia!%7$GgMRg9h_%8-dshsfbcURphew#TLIe7n47r!l-1!}G z9|zK<2Y$TG?1&S;ruhv;L+2k6ai2;m`gxVzeqMflT~m{Xm_H%whx*F_Bz*Eylk{($ zFOI3%bvkH@cVnJ7lZHxY_tfkzb%&4b%D=5SKOjr0n@D0uW6 zy`Zpgb2U2UT#zYaeqKw>=~$ca@h6w!Vvd}goXZ#(Y?do>Z0zjq+3G^~U6pLDpL+G> z8BOwf+$8adj3jvdjJxdy?YFbPhY;gQC(fVFgYl*y%guX)5$d* zGUl1)>x)%$w&SZdK2c^~X0uHZ=X_xsq}k|tyk0WRZ901yF2J;z6Vfsl_@vA0_Cr)l zM~Bwo#$@Gg7ay^JT}UXsLfDZ5@;<#oLU6~5bcOBgc$35fQ=hqFs8rd7YBmStzpuO0 z6i5~r6GMV}a<+v|=-;&Lh?~gp0)>mGW?}L8mk0K*;o)$@@t-p@GYQsz#mO8S&W?9k zS#52uRu&c%(O3uZFudlxpc<;n2a z*Z_BT_l=+M*ZCFHB9ZnsPj0)j^fZ$>zm%81=X;Nl?#>0Oqoc#-PfGaK!NHLJ6Wg6f zgN&dw*Q3ssxb7hN`gM)Yb%{I>(gVx>Z4gKLM|h}&`>-hBO^Z|t^obvoX{JE|D?tFtoIjax7(bqmt-gv zQgL!}Ca0xM7n3!bT3d^!$b8VTGUrbynrhx%>`c<_PgPV>nwgyy%2liKiLpL`N7$46 z;_l??WU1+#z$pm{iGM~0?fO_Ty;+>AtE*Z>mE+C#up*n>q8i(?jpcJPva*rV zQ;P52$)rh<2%aoIBzT33bA}^a?qT;PI`_qAUL5@xw~!L9MnOg80qgz!mnTl) z$L?4el1`TS)k`KT1GKRYJ1?62iJHy31;pWLymH9`nWb8XEQbxm9BC z-Vu^HpRpnv`t)#fm~UgOSby(NzbCn{$JTuCYhDq~+?-yT!$ zbm@Q%m-G2PetS6c61@BRzP@kAyG#9<3e0fFpZb!oqrAPni}Z)3@b9atsEp)&K-X;Y zBNq^;YN1>@oOUL|!opIsU+37_+1V4T@(GTLYKIbqT2J4P6|h$<(7OIwU_V3+#o&Lq z9l@%p2qnMD{!nLaBp>(Qy#o6~ql@DuGI*Xzd>flJe|7d(1}l$deMiQ}cj1Ated>9j zp{bdnR&_$6?wgy-tXKT*&|LYBan=4ksB?q*%HT1%EXB8-{~?kGJFzg zB*er#X6NUV`41<|?Hr#!fBuG;gk*nAJb*X@)mEh2@3VJ2Bsj%le_$Y*B14PMq|#IT zDuU%1?MAaT)O5`Fvy;_ayLBQiBZi;nMkfdBnMWrl_w3^m)bT~7an;q;ue{+jnn!5v@i4b(NRs>2QJhvEU1tB3mC zu>wBOJ=pB_KBNF`!F(9XG^h1v|+Hp3EI0BKzTs>(WE>ck+sAjOQr0NxVSi}^O;oxz;;Fp$`rd(PJMrD#r+nwF2>yLS)iQ_0DXZj$mVCOS>!!Y%igTk205l97>}9By`} zzKVL2IY{GE4<+;(CZ>=yZq=qUtnLd*$vJrUc1y%OW?J5h8V8%xag548k-HztQEi99 zGFoc-5Dqr3PvUZbeb3=GTmS%D<>^KR9IX9wjGiic>$CH7%aSM#eGgYx;h@c#^@)iI zZo54i=E|*004L_gjcXP%(qiH4hC&%p-`t;gB_wnS8hz2H-Y-yrR>u@_R9ZCy1;pSs z_p0$HE3Cz{ zXlcE_bLg)aogN)6PdYz*GZCR+z1Ts3Dz{pHw{}cSOuT0l6&ZdAcvzVHTA8sW{3_H*i|_=a@Q@ITZu5|as%j>#qvK;d+ckGAe4i?V>AVlZljmGa zhJ>?X%UQB%0csVIk)tF_BLrj4uU~gl_*$$CW?uayxs1ydm{l~Gtz7b48K7%RR~Hoz z55e8LcVR;vUz~0tU%mp(>(hT{tC_9m78XRILRgkSmkcQ@(&DNnQ zpt#uBSb!DNyCdT@p7G~`gltQLzkazwQ)6+yaOjHW`2|n+2zJuZ^xq3lPfzin_R11^ z1_rH`ujJ{2dH~Y=Q&Lhks@XqwnLst>%lXc{9w$mJA#o`_y$6ce_SV+#2JgG@IBzo< z#f3O)UOIqABd;a5;xR7lmXV>G-#mGrvedLo-Rby>we=xI^CU0D6@XAmTE!{vvKP0T z$*3tQ-7{pZ5{UJvO zt1J%dx(sj+AkMl*9S{@MwuK8B}-1ovSfoNi*=imi)P~$=EMg^HsKwC;`>I#YVsrDjKbDV&E z7?W!G@))#CIMxn9Te5e;nbnzOek{R}LY4Q53ZGuLzPOm|^p{>#juCVuBqAbG%_}J> zfsG$Nb~<(ofOBo9^iiK3nekW=g8yB`&?IF6VP(i?6K-#BTMHPC7HA{D2l=vC8OUfk zzqq*2($OK@w4ni?y}2x6co!Wi#a&2|4sGPuv(T>57!e928|T|Eb&W0gr3<7tWJq zq3g8(mQHFZP2d`qDf+0L-QDAjicJ$!)85W@_M*7>^9uqq+C6#XqEWx_@$s9Xx%}x( zx(c-y8Zm+qjX7VbT1p2LLGYz zv?S=j%j;T>S`~tbH;+aDVzF4QDaP6^5&-R6&$ycui?E^8TNwqG& zk+VT|R>`!}Tq|{4GQj=vn5RIA0Pg|x#)hL@CHZ*TVHZ14@Py`76z4+OQf!jd`q(2X zssiI_K_<1zmfj?B$$_iz5AefU=nT4pKkyJb*4XGiGNL$?tKJP8sUG&OUE}8Jik>N-O~+X|agBtzYVQsp*@l6EkPp8+dNX9H5m*Tvi^pvKnt#_QLw5!Bul_vAI4 zKE-MW9soZx(3OWn2U-6;Jx2BFXj<9STeohx#KgoHT};G0*6njc8eZsUv^}A|Xdd&C ze9fx>^ZzZFbdp$da_DG2?>cdM`!G(e6!G!Wu7W8&I4Wk04ER*ndi(ePxO>s z!HvEYEWp}J?aVdv9g*yND}z}6V!t*fEB^GQNM@}4J%t}^t>)J3G~9jN1MLq$#}Q!7 zBVapu1qHJ}%PZ$J;~Wo^P`p-a>8ExRNaF@N)(L+jrKorxc8Mlv5YV*0XJs)2kUf$5 z^oeGvJ1z}m2b?orw^CyHIEH<8&U<)AMGMl}o~2L`-Z&;Ad$cDc6|&!CD44FE1TzRG*rf z8aBNFnyMWVfIf0iM138x0Srh$XTam*<4b{e&<128jb1+=Q0k;9)Q7g@7s0r2N&s)- z(WW3LkRDo&d{%Zgp1r+&3XrxoBIC*gm1>949_F-X+V5{1mjmdeb38p@JmBx`?Ui`` z+zWxU!0l*(jkW;`^AX6gKiu$velfeam|A(f7}XZWsO)o2>HxILCpR}&LsvHgfD0YG zcjfQ!*$hmviIahGvqX#LZ$$Qy*8 zC-wfGovp9^jD9@hNhWc21T5ctp)FK{Ig`*=B1NO#?fN0H=slrp!mY$l$wg9vqN0?w z0!F%@{Cx@MAYZxeI=RSON^{T`Lz|xbJ^KYzr2co^KX?GzipEU6J94n(AG+O| zO&|xEQCb&>0+z9{arN1b*oKfHrYb)^=f?Y zCxq@E930>jwYwlHrk}VpE*l%$*52O0IctvH!Ke_Yt7CwGj$*WIq zDbTjifY70jaeW0O$a5%$XrCU>U%zf1A6Gu)QNw!yf6K*m%FE6DpA1SD@q^znQ%~&( zmI5@<`v0dU8i}shge#hmmv^i9o$8coh8*k!Rt^rvO(W#mc;7D9vW3LOy>@Wm9~~WC zm<*Kw?E*NndrXX~t+gm}QIkigu-pD(!l2PXAF;8s)A;Z~Iz@pfluA_kxs=r6Y@;v2 z{=_bUnCo@t8%j|O`cm(w$2Ru%pybF-O=eht?8FV={K}Op|Fl~8Ezt+sSwDl@vBr=`9m4& zPZagt*$6}Q;`ejjHaZ^Sx8uSx^>o#hLP% zPw4dgWfr^3Oy|VzF{^dJne_?`Oaq1J(4hm)BGN7gGZkPJOracU1danFQmwFR92&X@ zy$H%4+_DsH%_U^ESKXFfxFl)S*^^oN`?){8kU_N^e>rK4N)xiw!Blak;( z#KSJfctQnS2lT@bL}-{W@5Zz5sCGPE9?Zn=PkX(TnOj}o(cuS@LmU4KxOO@RCi|}> z8lwndG`ZT2EgDked8Tkz$C%?tjw z`ZZRvpCAgGS8$~&c0}?1LAI5JMZ?q-sjRFlvdsa&@|u)!FWxb^`}z-JucM)}LZ7gORTQLwAmnv__Fsyt3kS6kPbF+3OY`yeVh5}wvjxO|B zxCU9~>KD%>yz9DUja}%Ku#Yq?Rs9ZJ^PK-WRgM2W+kNeQFq8c@^B(Y~XkJT++;hO( zdw>!KPH3T6&LsBIoZU4K)xK?NaK`odOlTdCT$26n3FrQM#dsXF;vau<*viNzeMirj zO}eB8V;(j(dDr7p_rF*{g%MtN!6BEui?@lvp>ftHoc@&g@|2f-(*->ZP)a~PEJhRD zpwc#gywM1vzvW?Ca`G)AqIM7_(JY>IPfn6R6O*IEgI>tHS$)a`PC}Zf?>z>k0>n+Y zLrADsGz_Pwp8H#$xsvS`1^vwi(;2;=IaNWP=&v#C?oK391q?0P1-_WBBp5DZpZVG7 z+r^{HVomi&aL{DA_ZQ81?%E@iV6q*@_WL^aY@Y!%3+6PvWxyj3sHr_`YoES+^~x4H zxh?E6%R^6h_dkmVXg7#BZ@2)p4kuvOL9y8GXqlOt+b)DEJb&>b0i+{?(Sj=gPLrzd z!8_AVdXqhq+uzFI>!*3z`e<_TidC4-p7Bg;9*XSyfCY`!6Km0ID$}7gika8;X_x5m zybEl&xaA5 zd{A(ZZO&YMRG;2$&y}*jk0*3(`}eF&fs*NifY7T<-@ZDT_ayoobHLu|FNtgxNPDWi z9{9j&zQtZJy!#vF$WLsC_=~Ca-HIPyZ`XQ8XLw6ss)6BVvpkRYWAsjFOMw^ng`mCY z@P>hYU2PZ#!M){IBntMf-3k%WTE9(ODK|1{fFL9N?}il>tE+a9X_lZxkpAeE@`L7; zq4$a|IFm$W8oD}1lw10eAjWCxpf20tz%q&k1WU2=h)bknyZ{NG^K~Ga&#RN6M4G* zCr)Z#O8x&c7I713Fq@fE@)_6y@*ak_cFlBvSG?5inb^9?1FW1&hZ>)IAKEEbkVsPiZ zhN0+e)+jF3qvbJpzb5wPh~k=Gr+m$E;u z_5A776%q2ji1}~*6&YcLpp$08fmSYj?-8(6`hVYAv)`+27%Oj?BL<37t;|(`L!8XQ zL%@!YZ!lIA7E~ic_0L#MPyEKdqiDHI(f)AGtL$5fzVUCP?mUz+Mp2e`u)llVQvduy zP`)izMnZztbb)I~Mj?B^>l?MYZ?1(EpKsWbhGzUh54XE*P5s~1ga!!RRX=`HBE8)_ z^!E;JrCQeLP<_bnd$BjLw-U<2HS-(4Z$3I4(3KGueu0H092}NvD3_7vxF;PgB$T(= zM-b{h^Gqg0;GZ2Vf@iY*j*}ioMQh%FF8pv>ew4w3C8R=28`H&S5`V0`nb+s zthBu`3nb|;jECW+>^ZuBJP>G;(6oNnux*}qPKt*rr$P#jTfdCYW9V`xm zbLJq}&ufCgS;~s5TAE$8VQO%Psnr!TjoZz)1Rfp?%D+fLaN;2Phy23cx9l9_^y#NfX%)a^Quj&@iVtP zNoC7mmV@f)L(3n?W+U~V$YbL=D-??`6M0Ve;wu+UiVj)mB^%V2S#z`0BWG7i1@3AiNzP{i-z z;WZ9rE5idwf^uF74%W*U-cYEg@3-?hWW6w5{bk9|cRD$*zFywH{TP#}KWzhm6I|!g z19I}uATvaP#BiI4NI7vAIu<4%H8V4_YdAQ7IJS>YPy2z_6)gXR7O%u}tarhm!#G52 zDhRbp&$#tBr`iqOIe%kIW!LVsPmi_4MZT7^h4?Wlzu75%CZnzEEu0(6?e6w|GWC`B zBBqb0xcd#45)fxVrv3&bcdW#?8+ZgQ4NW`{zj95)3J(m#0Z}9gVB^|&Ng#-eE2?)k zMhmIungf@no&Tn2w}&lk%j}7|*ZJIbNt}))%{3(3F^}|k3Z4t#7&H@W>yIGM^B;_8 zKBA@$4h?mO+71+J7Eq$b--~mA+u_DF=V2gHuf{ZkqeD$ijZ(AwLy0gKXrGUe(|$RP1c4z>yb;cwya6Q+pB@An<5za&5E8#;To^5ZS;#Ey84_AigI z-fE|F7E2SF{b6#y%zBJkc+36Oj`q;e@(T+!JHK;0)*J8whW-A-hXgDVE^tJHOsdkg zgoM7R9aWhA^*`Ns#LOHBM{06?Z*Px}RGC5a^OrSyvNY*m9$YkcE{dqp*P#$vE5 zhz*RAG83JifC>-vmDX})Ai-{bMN3;(@=E3YkTQbQkQ*(Tvh$^mbNB|Q@E%{Fj5IE% z;h2bl0RxC2Rh!ko9bEP%E)9{Eb?me#N~x3|E6uMPbV4`9DBh``%)i|D9Sf^o~XOX*Nb}!_Ym?cqeZ68%;k<9}=u*iqILLzyuZqEry(t(H~?6u>j&$P+Z@pc7sk~ zpwks?`c=^)k0`GWC zKexBUSRDm_jrhfIAqH&1$2}w7)yaONmie4$6Kll5I?QqfcrAzlqs zrz-hvm#-sx*N)TlqoNDfy1oLlQvPBow~RtIh2cISX7D$Q^`Y!{Gq`uYz}}1)xnJF! zo6ZflQIwY#59$F5(O+6xo`as%3<6DScQ-W$$6I4u7KGq`(-9GIn>q%-V0k#%3cxOs zP1|R~3X&01Qy3n3n5mNCObm*7C>PMMSV4J3i2tEwh2=9ZPtU8U7cXAep_zcD0xUH+ zJX{O zohT0-=y_9cumNS8muH6iJxYq)HLvpGn2s(NlL%#;*88cYy*&ve#E7Vc^Gdy#s~C_HWGNlHP!($#(au zE&Z0NKzm-f3c8W6ON``@bWp9uID$e5wRPT;%;`Ny`eXgyqgFR+OqHIRI|eorot>Ro zJgYRFYg(JGiGx>qgEaaHcd?|c!%=#`-J0fdm#*lohdj_Xm;LhHo(1kMY)Xo<{85zk z;*E@i%;(A#_bl~(Mak?fAck%{2Nk>(`b@b$Y?{ED>| z_@QlilKB^^#hH%%bF@yR@e=km&ckfX*OTnULNP&Tg+fTW%n!^R$KfWh`K;M~GsWAs zL3meZOGE5-t+maW#XYW^^yEffJ&<3t=5ZB#WySf2h!ZJ-(GSHf)^&`GxUmVn=4wAH zs>J^}O-j;;`D04qil zsxT;7q_FnL*#efRpvL?n&O77hKs!`G%=23|=ebpC=53G978Vx%+-pDxK9UOn_DDf# zvq1@j5zOXE%HQCixPQHcF2>~!kuaJ!`XdMGciJCn9!qfGCRs3=L0 zVw{|u6qN|7acjKHMy@AQ&=lw9-V(YmZS?uOKlPm>0gAA5RxV$T;k?2}$;DRsoHc}$ zG(%Z5TH<2g2ni*cQafpeB||e981goI(bMW&^RIt$ZEM)#b$MGiDOi)~gUNGAF(o46 z%e3agBv9E~I1g6LcwwQH%#ZH;DT51J=WMq1pp45oO&Tka8k?&^z8?7GxhjFgg*#Tc zRlD832_gUe=6I)wDR~?PFJ%Z+0TXC@DrST>BR;1@O3>cxt6P5XU1|t`6IF6Ceyr3G zM%-)Z%+`}rLuZwNJH%NO50Y80WQ`` z+q04W!RSY#_(N+TGrON+2r21?R80Qd$;gxsF1O=wFIkhAm=V0Z zvprAl!zRjF8Kj3c`Qzq4xZ2>@^9lDrOkG<^iy`Xot4HeDjBdu-MnW|@vGBZ8i;aun zc?()gK_3cy@+NIU<&j2z4heE=n^Ppg1z8I8KW_Yod#{6*bjOY*VBzYlmt>hzR+;WA zak#I$6ZLmJzv1e4wURr|Q2~N(<*|7)Y_vH01&$_iLfr#jXH;X5|0e++zDXa-o0DUM zXEy4@_Cg_RbgIn{jZu&7acvY82Z^LbSG4nAN{+`Nds~*D*mBi5Ia%v+WU1GO3`qDS z@^z$j=3^)U?oQOXXTsB_)}CM(B35jVOhB93(_Q&oEPCCC6-U|I z&fv0LYo?B^o#@P#13{_rzsTkUMak37ROh>jqy)`V%bDc|D?jhK6&+|icj3BVvO=CY zuGviz3d=@`J}yI{*3K(WwT%MK=K>Mh74_3FMX`{D?%%>(HlZVT)gJccNYlYb#mhWv zE@whkR#}*fp+n+L77wL+5r^|v*xgPq{c%bPmwnKZ89Jdc2A& zBxeY^QWl?8Bya}efLDXX_Pp;{OZ~Zkyw1GsZ2G-RONgyxf1u~fVLW~F%CKv4F~?0u zyE$m@mcI97vXnyj;GO4ZRrysMZ}eQMJlPn_>j$h3aI{e%frCm^YaZ^ zN`;@sifF^a!ib+ZMT2t_3x@t3V&WUMP;>ufsi4op?`tpjBn)IJ1p`PJ&@)6EfPRFS zgMb}U#?5=j-YK77AW(B>z7>JtfZ_q})q$62JL8Uz1TW*9PIM!yq{u}ElW8!;nN#x_ zD8jaq!du&hFLKz}rF*Lqh<;tFvdzlDN~NS7A|`vivND*Q{q9xWLVLMYX6h?b%? zKia`>j=DHMO`_pC)iO2?tf=4vHUveRi$S$KWNXU=9%MFO)95GxNgehS zO{bC~b+o6uOe9=uw>yp+-sS*@b9}X(d63tu_whd!cjp2%HMHNZe;bsja`(3=W9!`!@eq%v|KGy$5@%GfLYf!aUbXi&)9M6=(e9N%%Hkp&gx86@j^ zK0W3?-vcFLGRvIcDTMiaR6R3eFAfk&?@G|;-MG!&$yRD zj#cjpV^Rp}7$H1a>j{-l;_+Cx{mvsOTQ9J1#DimV3?H-B9psB=aN4j~#qcJuaq&OL z+|K)o`Xn->GqmQc_g5g0FT&<4eo2*0_uQg&)NB2cAq;`s1V{|HN~Fv}sj!2sGCUO9 z{_d{|qDunjhV1N3F0PV0=Ndpfk+lL3kpS%9s#CB=yaNJKAu#}z@0v76V~SSc>{DWp zIUbP9kLDKvdZ%a<`?>p~}2I$GLWS16nj?uKMy0-s!P1Et{rbGm$v zzm`aEApIGH+%{IrgoK15Ank?}1(3Gw4jm!S3NcG|jRvn%-jZ91X6^KHg_Lwb3qhu5 zjt7`rL#b~ISwFs^XNHy}3!?&1y?lhaf*vO4+@N&)g%J|Hx?9}YnZG%N4K31uuP9uj zGqLY0>gX052dnnG-;Yz*qQ7848gS*jIbT+Cn?4q6q(GVW5ur0QnybwIyZ-jUgQwk-r?aAq!2p3bNE71(tLYXl!1{^ zcOd;X5;tP5w%Kg&_pk%eH(j-Y6Io1fJhZg6(YCRMx4e7pyhKAouOP+)1A?P%@F;CkhB_uE>%PoJ^)qTb$;(V_ud&a!q zcXEPBPAcV-XmTb)*iJEbjn6Eyqf!X<@|B#0{_nQDr^dWPmUSHiTapD=yuFjNm4x+{ z=`fkp28xwA^xXQt)^YEx`({*u&m-DsS<=1vLr4roW|m5odb#v!h&7iVRNP3|>qv4x z4+0$Es>x5O-T^@R&I(OGP)!4vt@QSva;xbT&VPU}6hG&t$KT#G+A|K zr$*<&0XN3}FVmg}Swvh+fUX*TNnMJ!S(eLHi$M}+3zOL7Zb39YLLx6Q;VzDLZTM{g z5hzDQ_EdQD!O78GQXmPwW6&8!MkEP$&^MHoz`~<6Y&4k)DEMcr0Bb&HrO=mKq5lcO z3luyL1K)Zw6;QARm@5=mtnW{*fsoyKmr;LKOVft&%_}uh0_C3wWIlNEcX{kpOpGZp z3o7mFGetFKT}qL+|LLB+;MP@iJu2j<2+00RDPsdVt-m{MU8ewCOW2&b89{!I-+}S= z4SOU=N%nL8kQ)))xhCa$RdYIposO1{*^Trc#A%yo)E~{_Hwg*_b{1__sAvwFr^gjU zpr^J9^KgEoTTt+PI`6YSy}~#9y@;I2j$i0$pV3VTk1j8A=!dQ7-%4crhq@A9&g1I= zIii3?8nL8O0{qw76ixkZNb8de7xo0U`sWN9%(CVHUGdYQzur`*C!{qxIwRJ9zJPRW zKxTV6RCgge!Qb~Q~nlFrhbV*&r`m!;asxRfPj<>Ubl2TzdIncG%|S9BEWAIU(UI_4u!wOXmctj&mFfdjbO6v(GfgrKt%0u8!#M@m zwP`Znt?|3{CKZPT9yG%*@339Gvl(xmpCFRV+u$a)F=`Q7wzKYnQ#9bj*mwH&0Z03Y7QW=_|qq9eG1TM4kAa3 zt~i44A)yY*?L5dlAvxVj+uaCmQ;o=KQu2oLvhR`2E5hN5_Xs2{cA&Ues;=L^xBLq^ z-3H2EG&+xuwT#BaKp@aspG?!|vNWsDcXmbu8>KWUq!oxr(}|Ylv()UV!n6=%1HwnQ z7uJoUUM&4z3^xyT%-$ILT^*(>egdPv2P%K*OBXAd+ZZkbW}Tgxse>r8&EcfA?obwi zarH43q#{#6sz!8Sh`XWn{51S0i2x>T0}ufrD+-tz0y($({6HC&v=PE9!k{G|@3b>V zKGyv3lA-(UgkJ_ z|H%a>+aX%wFu2^xcLMCS9EO(zorfZEN!djm{Z9iT@8nF=@ncdXV% zo}nBCq59L>P?ZvKiHU*R8xyJy1WH>q-Gd40@j3Qn}b00ZoyN( zx0&*+V5PdA(zww$ZtLgxiBQpc7BS^3m2f@)_asEg9yAddZvmk0hk-VyzvtGFn(!YC zHA|YGH~H~)RfhXEE)|s-?}MKBU#n!iUTsZHzx$b0O1}al#BhQ6t)Q^5CIC~SkSRd6 zE1&g-7Thy3iv;>LGUNmMbhOMoZNurwlP8mhdmyrc2caq5p?-)Yk-51|PQu32gs#oe zf`H}3=B<4@|ICs%a+R-e=DoHRG!Gnqeo4iwsRmBLlb zfeCEE6u3em)Jf-;y8w>>KSPaO!9e6Ih&Awl;D*e=e1HLwrWK!3UkF#+zJ0q6U=@l5 z0`EZm7~p398=Lw_^*Z+Dvu9{<56OU9P>_T524DT=Q~b-umbsA_jqbwaBBS1A-x{Ib z3gM{DOX$SrRAtApzs5M}$@I2w_2EcM3Hk{#Ktg8A>E;vFin2f)DVRXJxg1!QsgE3@ zhI96JC_i7{q^^DMDx^5TcA16r{!)-QX|KCio0PWUt8?M2o3<(kpV1ox)c4H(tXJ0j zy$QoAH|*4e8R5M`1`SGsrJ+wEgr-{|^7#q|vRc~ODA?G(!92khOopJ3p&&8;Nk&Fy zq{g{sY5|#u+JTJ~92cklXj!$|A#!gINtb^#A#=ZVNDY!i)7|Ufjx5BPgTC>fOolH! zW+Oe`*!3Tz{fw@nT=vcHRh0Bj5%@qKK05}IB0kXF`z~$CFR`Sqk#D&6UWy0tUm;hm z5!760TkJ4mhXQ=s!unoKMLS+5<3>5I{%Om@h_VXVhzP|q{->ds>|7->)4K(Ed0&wX z3QVr`&HcPWg^qFKH@w{?nBhR`U0xnb05OjcOq-z6VYm#Lz-Pq(1%4h0fBfmc>f-$O zJp5g;9ziA%-j}uk+!C%deFd~kxfAlO6OEhjU*RVnOUT{>2Ixn?j?p|$w+^vlFR1N1 z^;pb3aO4-e<6Z;nfW%KfNk1fUt4~(wCpChI5DyKKryWR&)XAw@jUa-#<_r;56i%)@ z6}qpiqelKgoT-qXEi4JY-(sC3CX1l2DXPw}a?674Y{ex}KDhWJ4kAR2Xy1)cWGT3>d^<fU3Bn{FK1nmdGFUo=3gz4P)q!bJy>f;P0i;a!R zc`;Ey;Pt}^tt~yA-y_}+X9NRF2_ONwcPlia*K-O2*hoSUVtdrEZ6UxQ12Y0s&VL;R zPuIZ3X9a%}fqW$;-~xxDp<^JUlU16$)*D%j6=v-Lmzrh0;MS)4fC}q8^j6pr* z-GZn_wOUuj6wthdW`bGgWzNHlv`1b#wiG`4{AuU4xRJpE) zd!gI2mgIgLazYsG6vd6-Weba)Z)D1CDXQWloDM|Ndph9CM1K<+Bh!BY(WSo=JgOC< zB+j71pPy>Ns0Jb`iZs42Lft1MY-8`{M}KN#!wtuJX}&f13?L!mlfx*(QgwGsSSq90 z%Gz4i@CB@N0iah-_0@jp2PJhuN^4&iW;tVeByla+O5XQ3Tt{y^v@ zzak?E1`8w)lAwWIB!TlQEw4tt8lVbPh}ct8)^a^IK>kKGgnKf_s;qI!pMhIDRuKsN zmhGpZ>#D+AxuGHMy$yfhl3B_{U#CR@>RhRXF~4Q8xeG`aLOT2<3_#o$uzL*SCl#B! zdXEVKhjhR^hY$!<7VF5k9E8jvYHYSP!kQ`-(+0CqidA+TsO#6S+s)wgOiGoeZ0%&j zAZ<%~MVYRho!tq^skVm37syF~DG6%ZakY9JO{_#peS3Rm!#=t@jWI|cdm4mL^sa;S zk+s_Rz4N7G!qn8%tOb68`i+Vfu)hZZMc6Pf z)z!mC#maj7cfc+NP?fF_T3L&d{{DU#A*f#6ohp4v8&2n*Zig$XoMj6-Ig!+(uj=X8 zx?wt0U<=hjaO~1~w4}ug&cA22NLsZmY=2nY<>!r;kV}HF|2!DtRFF=B*&B8knsRNgZBhFI*ZxnS z)&WIqT#728S}l9DtIrR~$bp%q`>Oel|G`2mujqIBHpb_B|+%F;n|Hq<`^OwqHRfMd;PC5;+CXpp5imUYe<()^78eagU_Uxfx$%3 zA!pXtJMX^cuY3PnYjc-`Xdk+m_={1lC3%rxsxX@ zeJ*5|lT%gvKN-=Ct3Sg$)D5IR>I7XZ*~ut)e6v3*1qrGj9Xh`WA@{A_RRuN(7z_|{ zcz(MX6dmK|>yMSKqWnQm#%{NIVRzgs+y&+-5c^*o&W6Pw?s*Vz;eM$G+tzh9|xLC7?X1`~#-5VP;Z!=Gk! zn3*UlN^`oH9+A|>t4ZjTjvoDR`9C{mW@}0d8b!CC;^5#Qin0d|nUP-_JLWMKyzYX5 z43z3wML3n3#H{ti;pLJCHLv7zu@YbQij{p<1^0LYww+$1QB&K z6VH}rr`C`P+PWGaA1~A>ew(bfu|hIK?Pksozk4X4pY0G2YvS9i)XYkQ?R=U@gGtSl zvU|F((HUv$j*9r6zC2bN`T=dq;>XgL$c!1~LcB**K|ggkfELh{+DH!@4W{4zXP1xT z_tWv-A-`M;uBlzPqj6gN*RL_ie}L|LKmTKBaPK8(&PJQZO zmJlvIW10&xb-_RpkbV@!cYY*a@&N=tjFN-;!2~2X0NJc!U&I5t*hl1FD=Y5H`v}xq zZSt1{(^k4yrLD4|Ah`VoFDDOQnV-EZ2lQ`JJ}bX_jPFn;klwF4otN>1;ne@pgdff+ zI`hX?LV0I=`D%HvA?Cy9siNJ?fEU+)>a+*?Q(6w{^~vX^vK%jZhC;ju@@|MG{1c>R z3K7w7k#VhISjyL);7e%UN48Ot^9b#S< z>ZUcb6zw9E%`fO9h>ZmKF-2wNBpM$-um&XtVH~C7Mj(aYx{j(KjyV0)pYdx%lCp?Z z2nJ3)tcfAR%j-rPtq|OJXTukorMUa2A8c7sfFIInDY`^&=|KAxQO*KIUhba9)feP+ zN$kU2w@694PRv(dw7uvn9yUuC{mXm)XLT4KhYx3x?)_6^))xP_wX*1GZ7^b>QN+dq zY*-rxry*5^9xBlYpAdjJBFLxi0C((&;Tyg`N%OYAPsPi#`@iX#^ax8g(c&#D4T!p( zf4A3A0AcQX7DRgu9zbZCM;zw+J0~Vhp}wR&LubuZ+7+~87FjUmSfV4+;JkH@q{*4~ z@1HReMv_0OnOt=jb&oT8lUvAC8VAuVN=!fM{T`6XM5l?H79&P;0DFdoi;EfpzVOiw zD`PNx(gqCIU{Y2?M(Mxs69L_lXXsfcqXCcn21tX_u(ArMqWxIN9#*m(|Ptnexm( zl-{x*MT12nvy$RRz=+b(*8ZR=TcoH5_7EflnpQk+CEk&2E44pj-{Xeyo&c{$5OZPU zG!|<2YP(1 z^=Y1=fIHwL2$~=_)d467=aXa&deXiUTG*dck(-9`ahk%Pl22+ktjuZNGx~Z{p>Tswj@HOcdct1F@_-|PcYs$G) zB>jBUfejM-;>IL$9?LYT(~Hf+UK$vfEDs}_G?z~ zfL*M#jpD^0oS&ceccu|>E_xVy)3n7w=>xdR4F))3t-`<`43D#!{C-OHGQtHuM&REQ zbu+^nczJTpqMUe#Rr<3SnyjdB68(}%SNns;zSwla;@o~7nAITNNe8eg zm1TQTPZ(79e=!M#*DnnW+qd6dm#&sl=gX#_{1$Cq7+)5bL*m0UeTX zjX9B-QtzU@Y$-q;TI=Iihn>$OA(uH)Q}a{Ne6oDuf3bDeQB|$qyWe0SARy8pAkrNY z(t-lgAt@yyEg;|;I^;69hGo(yywBO*26m9`#YNezOjhrPC;d~O_urJe+7j&xNg9r z+L5PUY2KZJh5`|D4xF{dkVO9XfPZ|X&}DL1cb2Hs{y9EWP+l6Nx3B(Fp|RMU0VGRVd1+Fzl03k_*sJc?&e2S#j*dVkG``$lr8TfEWvneFs%H7_#KR$ zI&+Vmr218|BeEA_wQ=7Uxp3(zdy6NT#(P1nu^tK6gfeWE{oCpRb8Npvo9zz7R>VuiEZph3qE^6E2p0zJl9=}w|r z$6zLX>7+}p^`R0nZg;_WW3rbTTt(%6y;wTc%vhkt^9Oyj=`q^+NDvWSdn}soPGQ%x z@w6-N3e8s7uW>s#)US0c-P>I>YG21zWw6I~7$z%?fL~$XU?LkdPn%~-W*eQC`*CM_ z;=?t+^|GfqlJTM7aNOkyz#NGAU-gAv!WIbIcfs*%>&O`6_Qkt24bi6Nt1j`7*H=8hzSeY z&QS;aN_z45dcv^=-qkOQBG0}xR>St-4MiIQf&mVgs@DD-n8)TpjGlrJO4W8ND^kwQ z&_LO3Xf57mrmJ*mbCtH(^{Xwdg2HY-`RBYRil@RCmr_PHr^}QQzEu85;o)m6u}-lQ z=6Fc=ezC;`6Zm*1_$jZ?4!nXLW}#+GdjW{_m@*W{)( z@U1*~m&x0^^Jm!8`|fnlw0nl`i}n|bs@WLu#1uJOv1jP6M|gTu*j?rVSqZ#)gccj! zsh2=7K$eOb8qLfBF0Va1O{%^y`_p_wT50hoMe+BaXvcD#3W^FU)Fo#UV(|s6R#$21 zR?s^OAH{p36x10idfD}>E9I7mRav0+Ug^uK4P>tNKP{`iGfCJwFSjZ=P zlBa}z?e47vC*%8W#W5T2Oo4wscW(-$!i04~OrL+BBPk=DEv2QS{sB7U=;LbRgoEKT^6 zp$TQX%h@TKvxf}?uAYalU>T(`C&USiz$Ji6^~+8|3tLOy_~2NEAhJ;8^gos>C5`*{ zvv&go1VU2~8rT~_Kzx95u`C|Z*2k9=xy)hEHXlcQ;Baoq&#@nU%ER#f>bjH<{#S76 zWeghpc~g6Swskn=h3PYsN|Hb&e^`zbLgkTevV-duE3&XhNSsR?3LQop<4I|lL5cqL zRTHFOK)(&D2A`K8T1l6=c^5~4z_g~z0+*+V!SnjIbYI<2KSeQYx}WE zkYI42rn|RLLZ|H=SW@Aoby_Q#FWKE(lX&m*KGFssFO&z*7i{qKDdYQVQ2#V^b}G3f zKgl%(Vv9EEWta{k$lR`J!0Drnlb507Wm@h z;B>nMz6wH1!U_S$xs|U^j7$t1A96Yz*l;MhV`ikFK=jHy`rUl<{9$=3bGwnuDsEk8 zK*7egST!shRh2hZsTBg_DL*^PTXbg@)B2PKikD4%@eBQvB4Jp5OAo7cS6~J90|fE; zy`cP{gb$#F3$Q37wnw%~y4PGkGA{I{l8PBs2uZu^B9yNRG>;W6(jB(f9=x=xdkFsZ zRq73;j`HL*o)|n0BTsUv`d=|oy24`(G zQcUl?WjzunjipyKO*Fosaj~#q0b>tQZdFunKq>O8vGIy8sF$GNBbp}?;DXg|9^xMy zZR>!%cyZoY=vlQzh@u@i;i1jsW?+!w3^!RDZ)?OPP}$xoSQ!Y7cUX`+B#?HYIYWoV zI!CxvDc@0Eu`(Vu`(po1yG`b1D$QpBZZDUW>%%$}*hXvA@lD84K&&+b1YRq2fjb^6 zi%Nl^8L}-P{C!MH!a;KQNZS&zV77i+lmj$KBY0n^twkD3eWWgN706Q>tf}S?m8

OEuEyP&~`X4B8i_ywN0qQqN(wK^+ zr{M=WwDw^`+3s1iI8a)Vc)S3g&rDP3gWnyE;Jdf<-lJ!LM}&;EoACQWFBMXQLcxHp zi1xDDG1$xlsk94S%-~aKs&)SAnS1#{P%pTA*k%FuC~m~3QO!c@lCs$OPX`ntvjqcA zSHZj|?F##oA`mUGXhPU(Rq7*&WVBt_s0fJKsfHp`(HaeT+QLHU)&odX>>Uc$4r5^% z{Q|FIZREwZa9mEciVU0L3iv6!yg_K?8j4zsWY8jL!Ucd!0m5gBe}sP z`1ZtEkT1}Tczr?7p19wd{m@O}cBp7@erUAA-?tp6!$nqsg94LJ@33ApUJ^=n{||v!NT6}mhlI_%~wWb!l@LcrKLaYL#kCe zoJv2$nO%DcBC!cfs{(4#O9QJENmBbouIx=mmvB zj!8g9nYe|U1)F$z;@1n$_!M!!!}Fm7z0Qh|q;g9CW!z_W`He3~%U+S_RxwTI7ZJ@{ zlv?TT^LjqP8>syTx(&hR#??zczJpyq2`2q~NHBfyxYMIv0qh34h*VlyXB`FkdgXqV zl^*Z>K%}ZB<$po?f7_Avpe4d$_Pw}YiGfnwTV!Q0-wc|6_k@Ia>}DRq3pG?|hKGXg zCJ2vH>+78mNtFa>4v-T??Lp3;BU*5Z5AP?%zmtKM}5u(CQGp;u0llA zi}qp3x1D#*dCAC-g#N+7q|vjV>-QzczTJQPbCTqO;Z)D{LoYj}h%lV7wc~zg+Gjl` zd^?IK6Z5OqYTedg@_{rQv2UQD7o$oT0F7unz%7m|-A=(V0y4Ei0)J>*%tpys+c-TV zBY;&}5q}csz<{SjfN8*73w{6o2GaPH1%?`e`Gd^2G5qW9%3wH{4`=7Uu(KffYy{r@ zF(RUX>#0;))vJdZj#HjkT^wO_w59KxKH;*FQ9i<{oLShKJ3Zkn=n9jL_!I^z>Y?Wa zB|}L`BNc1I8bIDy=;d@P=Ejos+A6J6<} zpzJM9R9CLYLM(I&tdH2fNZcBDT@{-c-Gye#!0+t>dYe4#2EiiA1_o3J#dUOPX$h~- zmWCxBmLylBB?)_M-pkPJu2V(3r%hCAqHCj0SdiXqUT@eI?drogtxU?m&HBMSuRxCCm_vHvMd^3{H@~_)nflxnIMy`LO{h>Vt3$8S zm>?h#g*Z(l$$cSj@?S%9;^2sn90zQ2^I(5U1?murj*bqRuCD;+HBkV(Z1|sxVdZ?M z<7F33UX5{(`o9#YLBsNGuX!e}>Xhq>WkBbizV@c0RStX5#f=tGH#OnynCLf<6mq@T z6#e3S^SmL+gyi{3l#Oa030etPK}xo=mfxAb#m2CKTxn!Loyfgv)ymk}cGEL=FNlrO zdf^-|4z3Uo8N^-CK?WPs7nt#$o}O~mL-VgO36Qi87=K){K9$}BPLUZ1rvL7wt4G^q zW7Q*eu(TX6&zci14z~Muy(#)xmvhMXyc`^7!pk~+78crkw$7rb!(`>nX?MDr$Z_dz z-G;*rJ4wl6l#RvQND!>?RQY>lkv7cO6>eU&hRK65&O2fUrXzPc6eWKe?2qT>gzSIq z0}oNz%jr7GjSbjn-*pMX8wom7Gl)K$+uOaxm6es?^HR37{5HV^nCCl?2&ym!A!E1G z#W-p+)Y!%haOD|&Yy=vB_UYW3PQE~cBnlq&b#uQg*)5-zxldxbI6hff+I-eQUE$)- zENyMu*MqQbGqAsbEny%@SH@GbDsQ~4GZ;SaasN7Fwo6-U()^xa-|H{_#lO0Wjmn*r zNy9-8gpL}*wI$Eg%*GLmNn=9Htzh2z4zvXb74NRwj#uzOZ~-~4fm;R*v1>rU%wn=e z6b1F%0#F|5n3-iudEDU91*2ehrfOqiAIn1vFc6g|DDUjyuI=k^I&Pa;<_p`#MY+s} z@So~ezR`$aIAn|5WVDfMlQ(|Qeicq8g3fbb51$zOL{;qGU905tq^#&Y`*5pY`wBrL z*Ar9~5S=I1{QS^ysm0o%PgD;(PN#KOPXk9@5^2$3A^I!X(xFAu`9XO(1Oh~v*h5|x0#FRDR+U9ahL()Q zWVJ>@uH{-Dfr1$)6h92t;|B*x*gn%e)-Wj&@^#sUDv%pIcR=km-!xv|C7A&v7%AYe zQrIt_0fBrQN>e1J4kzR|ai5wA2(yxKZ~=G_BG>P7bJ+o8@EjsUv2kS4!6szonITwx zO&TjleXfhx(z< zpmD?B@ZvTfr-qB+rG=|V#i+a{1{D+=6IR1{`gow${N8w!r?>cJv`sK>0PFe<1e>>4 zo4h^)IbSj?r{1O{UGmMT6hbI5#{3PglW4)`Tf=Xhi?b)U4o~4N=dvdZ zkd@UQ$PMnAk-B;Q?OJFW4G)c)Rt5c6E*Td?$X-EBS`%(;OM~qzr!SG`Y%p0sv2&bf zoBEBXzjMV(?E3P-2RE;}+n|zwF6dsWsGtCR)L*S~3>R3cpKkWrlYypS_K_#c)VQ-N zeYCf)Z7ZyKp}EPj-tgHSVyDyM>5XXgjWm0q7-uE8D$R;%fv2s9E!<&HNLDi(<&k}P z{43>B@bLp5gJhYVgpLmK;22|I+3Av!$W? z%ViU8e;F=jq^u9+AN{PGq@HXH=FawB_#mlTkTL(|$8=SRW7`&Mv(wuC{QmCliY%Of zd0i|xp#H6c>XVNmt-67A=eXe#*K$J$*fzGnL%TESGm`cLLO#zSs_E{r52ZdZ!G!8b zq~9lgkjY)gcgz&~WS&$f+)x$qx z43?s0ti-VcY4#vGX#ZYa-#-;$xS@3-tlZ30<{3BiftI<**VK(3MO3Iq)+3{IdEur!#T{O=je<8MP7$3`h8>4UjzO&BRv?K1f+?-0;p3R5%< z4CNxKPg!wKlube5cUCy+sp)9tZ`4;_=NSJn8x?T2=jQdm#2}f81b+Nba=sbZrJln^ zhY%gQJJH6VE(Tja3usaq>q9XH?(mY|Oq0_tVtfPrR210q8gm%qpP9NZ;3fJs&^?n0 zOPi8pgSe7qeKOJ{W0m_|-skXIDzIREOz37Ifzz7UzS!j>45YcJSSh@*duRQz1V`dT z%6Q)@Ey!m}+MTKSnn+7*b||2nQdi^57L7THJ%%RL((ZT(+ZRHAC)K zeG9^mw=^f;xUi3^zW)}4Lv(u&RD*GstDR0i;-;N3D6Fh$v&K}qv?t{yU z|Fkw{Z1T_&SYJ#wH%3P1_lnDE@HTG84IaFI-?d@ScA5P(lP-B^-N{P2cF}?eDqqYX z{GI;F*q(pwW-HM%8Buv}h3lrDs18r79@ZINd7s~DcF;JU6Kc@59$d++UPP;8%5qnK zDOzbF30b?}yLqMgeblLuRF@2}#QCF1&A2-yg)+U&_pFcBg*t$<$~_7@UHXI1`YqG- zb$FKi0%BsgD_Gb40fkU&p9rl5{!f#SM%ZbUaFQr)f~aI$JE2}0EjgZ{1TT-=PjfUC z=pF%$7;Hn*YHByhL!~#)q2{2-W|dU8bPKRFfG%aixne`#!^UR+c<#)-aeqA{IjjQY zqWt7{1q3{O4EzF0U##qL1$BiCM%f4^dm8{jX-&@Xq6XRXKrfgZgoPHl9B9Uyutu1| z2?~U&7zk&!oDu;30epmrGCgUQ|J2B`Zcw@Sv&?K|{uq0ebLfz27r1KJO!M;K+l2&# zhktNqyZ%Ared7SOE;KG^VHW^>NMO?D^58(U4L4u;&Nmx7>gNW@nGK-3$FS=*f$qNe z=WBSuC$+aM^nAj}W+XE9_xOn@upfKh>Uo3fv|i)j&vBY078)0fEhbNFtePEdC&acc zm#bPRZX^z6$BW?cFRG% zn%sqGzD0fx(UWEinR+gHRf&I|&71!{@r06#SBMZ_)KHIci?9bV4#-W$#H1 zzf%ewh_aDwRq8E15jsSED(Hr67VWTs-UV-7(av!ghksk3O^NQ%gW2^MTIr5!xWu@-`+uTsEHu;OPS@G2uatw-wuUuPsB4=r zIH+G4o9<1`Rmw5xjiq|El@|p#VwH`3@H`L0ToZ62H;w7}Z1eysHO55ib!KEIo#|$K z-5K4Ng=xsiiJOfU3>>X<^cda<=4nR@C zkJK|P8PF|-^0IY%7cgQcY5Ch=nQm!k{ajv35G->=rDK1cR%*Fj;aRdr*w}0{H>AnT zww?hb&h%Z&lNjAV##L zQ@wj1Fh5i{Tivy)IC6v@?Awxcc!v9k{{GCu;eil@2RydGqBJQWh}B>r0%kTsxlm1) zM#5OePO@*Y7Q*XeT%G1r!3on$QNGn5!Ct^q>*1?nW{Q6>w)6Lvpx^J%&B6O6Us$j$ zL*vi3PB4W@no6)oTQnAy*RdV(5NT<>X1su}lpQt^N#vS0A^Wom26k}Droj0ez}M(E z$jE*}MT%7Ez~^?{m`H+W=ufSWkFk^dTddg_uZ1JSp;{)a>o0U7!XLyNnYtgyYk?sM z#8TRYO(TU{w>FO~Xf$ys$g`R?nuo2-kWX>ExJNfP#x}0jmP(9teg3tFBLh4-S17J> z@bt1jw1YJQ)So&eXb0z=^uhCA7}Rj2ynZD&+nB2t7-gr-+o%`5?kNQCG0nO}22;?p5Jr2zPXUrzDo6qub^yoO2=L?p zqyqEWwcE6`^`!dlPeHK&{pn+LG%^>&%|%9C5;iuEdfA#uCtTl{ih=2<=9Tfg>gGQR z-+35`4CCX+4h>HiE;8mM@bgK>uojCrQTa5UNr8pJ$FP@kd^>i~co#B~#s=ZCOVPcW z@6&S!U3QPF#H4bq1-Snp4?5xUK{Qm)Pq&Y(8}RS5va&)-PnQk_>{k+2R_i6KQ0erO z+8Y{4PtrFB;%C96d}(T}28Zlqs5+R>%HIAh#l=j=X6dF3Xm9sb3Bwqu`=f0HYhQ(& zCitZA=S^s9Rb<3#6x*+w(0@)CJP>{uK$J8%cq+SAB{o`V>jF`~PhhVA>#r*U8gbpc z4f>p~Gwu}N3tAw)gZUHe!60l06jabPJYWS;o3$|qt2U1ADcW$5 zFe=>kxbOyJ&*u5GM^1u(fMhspvV2?&DBFzCRT~OP*lE+V)zZvv(090UUr8yOuZR& zIbP6!5_0Js2C&>r2OJNUl;D%RT7LzvY|-y3F{9`36Z}PK54TV}iYzHgL~mjX`};G% zN;@H8dwg@iF~?=2mngnUghMIz(ZHln`j3KN3xOL27ULt=M7>?XQ3!UAJA%SwBori1 z%1c=I%iuqOwjBs{0+61C>lb$73wh5~>pNkGyWHD7JK+;$xxG`)yF~xPc}?2HB;cTQ@|lBI zn6*anUY#aFnD9Gq4E?-HXE1z(b?Fr2LmNuO0}{Zuqdh~t&sf)7k)EZHPNCxq-BZgk zkFVG0#gpYZl^J&OYx4qYuK+QZS2CR10MexU+bnhFmg##^mZ5ZqPZzmp3dm10G>`MpxF9fvWc(nM8f_-< znyUhGQlz!Jx67}PLxMo(-VdC8&4ZfDSvWrV9|}ye`A@xeaGb+Mp24>|z4_zjkgIn( z@SldpJWPtq0zEpAbH8ODd+j+U&3daj+IO&;x)WrBb(5C%5)6icQ8JcbKvZ3?3pbdL znv-GbV?uxdr(boAOuRQL?B$GTVhnuzLpl;v;}T}~_OymX_cQU`PhZNn$W1lRivL({ zX94Gji>PJ*7ND1&l*@TFoZNv2Hj@{&m0Fm0SUt+XzyR?XB+va#BSM}#%@Sd-2@Kwn zQN3MO&gc-x+dQ^AK^=`;YBE-?sVVfSjZ#N`O+%is4DIgWv=i-Nu2^9_H2hFtalBlC z!E@2#iK=eA%~Is!tAvxt5|Ah_IlqUg$_!Q=h=%rRE7LbTSgNc|i>scdLn)W;O;?{Q za`BbBl;ca~&TkhjeZDwij7^y3v!&Wk5{|L6r8aQE@}5lyXZ)$Ji~cq2aniFD6#U_D z;ErAY_m`K*oN%97TC#vd_P6$;Xs;B8IJQGfADN(J3cnExoY9~EVV>UzxjNO@d0sV9 zSR+w9Pgoq^_UCH^kicT&ruP~bhFsRv2K2p|$Znq6EdJ+uy*`5ZaO}>~=aNS|1yS$4 zw@fP|!b>y_3saE_L9WhR_ZQ{8m%g7wJzxw7!Yb0#dH$RZPzf(4s^T<>UUsPs%Sn#~ z1NW%*I|17nBeAbVHN~bRqmMsTc4mJ=?Gdqs)u3M-8K3yY^}uFo&bD>2NV%J*4PGq@ zezU0~QzSmSrj;4Ayi_Ur?bGA`PD4`v?26PLtK2~Gf#d9{Z(N-7Q~_jip)#>w9}|RZ zzDbBpB#Yanca@ORcD0^U1;l8G+!bnE^VY}KUf11o(t2)O42%LP<mv@ z;#}s*B8&B5!U;1jzzOWasi{B~WD}`*uVJPsyUD6Zj*~cZql zZ^Q`*EWNkwqA-1Kp0>WVF_^@9RO?u%FOY9b+L!ql^a;sG?DQ)?Qy&B(!8)FsE+^B@ygq0#`5sH|gA$*niFwc-%{qZgZmhQlGuh(szAaE(DC@eO1y7uTu( z8^F-xp#VbjaM{*8zbZ2`Got+ikPKRfq`c-gAn8^Q5aucGmRsm{^+&8aB4@MS92xUd zOXiK7#-lSE&xSl?RS5yL@ZPHS9E*jBs?coOvF)V%$A>AcAke)9ZoJ5jF{BP~y@tep zq*02BipZ>r3pnEe3zFA*WcO8POjMKp~SOZzO`UJtY&CJfi z82}r0IgyjqLS(H!-5<5Z>l0AjJ0(f(+zi0i``um-sWbWJl8i4A#S-BfgaHjYg|=^M z@x*hArj`8+ndOc*=EbVqcaA-6vj&lk`-Y2Yk^E1^c;0D%_QS+C(M=#ptt&zncEdd+ zBZD3qGK~`uZ@(yjsT+4_Y2hedRnS55tv|CGMt=&e=lDY|bn#>On~=DQ!OHsz1n<(^N!3wZqrw;$V^lYfw7 zo$K@M5){u(+x7Z{m#e0gQR&WmHibr0e{lbH_@$aD%}})O1J`dv5;1P|we+zK+bIN=!_2oW?`o&M=+_8IkbGHSG{@8q8e~ zKl~StlP~T{dp7LF5C<0PM@p~aA;JugHA=?H0Um26ELmzF$=1CrESTCE2(BKJVg2-o zDJ`t+!hafDn#&kyi+aNO`9~_5qmN;Ur;%%iEv##=Kjhc1U+zc(K_WvfRWnHc zwFrKfd@VJNVQ#9iC`pai7b1S*H)r<%-?cqf{#h?MZA#360hozk;33zr`;{MM#X6p0 zfa!gZqH49UN2!l{5g_3`-*Ezx%Z;>1P$Sld3hK3$8v#n=cmYG!n1K#mKRKBUtd?Ft zL7j+g{J(VJ3A{cn)h#gNH}`2YcQbXDW?>}bS-%R1Gn(UtuJKV}SNZX*Bx-E(jJp2~ zE;D?J(95+Lk)%!z)Dt0X0;5QomUbPk>%83eTU%M3yFx2 zZw2}{INteCFeyeRy$33QQJ$5+%rejzc5=-Q?MKN_pEk4;mZ~Wtna1>vmaX!JD znW?FigXx#)_lr^ioDE#*oONfw!9u+L7eEgI91O(i-zgBtb8%rro{8ZlE#LiZn+S=g#zT4BFk~VnMC}@R z3amWv^9O=fJ+8I2pu09!9sxc*LJMjD>Vq19V0)0Q59UuH)E`*Lz;G-E{qG9n$7*VT z@JTbES%eBv4NC%B6a-P*9HFM=2P03cHV$mtH}{L>9K-$|?UZlNX`#xLlH~TxWTNaj zLm&7}dX>R`H(y`$E5E|$Xixbqi`>7x-rCw)5QY*$W0Sw>4_QFfE0)qRbUs9~?@)aQ zdAL~NGQmqv2H1K8^E^)|e;azD`L3+wt9rG^@+?{mf1J{iBzrf4-q9Q_QMZ+_rWrQU zU)d4(%0>5I#s0C&mev3rNmgUv-pW)*Q&Bg7n4QSQ4jKSq8|&I%A1N27dC-=6h;I^W zc^^uUoa-4RsZG}a_qbHNbG{%{F;-W6NbEQ%>Um00;s3c9?F^3u& zzK@t^=wBPretxZdTlW8?CmMhsM&EU{cWgB4;X+x3Ikzme*JaFpA6xB=hf!QR52Mz&L-K{8VKr!pi@nuwsgdSsE%P%&7BA`mxOvhuPK z-RbqGjbK-8`0{*q{^@T{;}Ny4j+%!H3OYX?lAK#ABx=*tO(ywG9um)5tW0S7QQw#L zutT$gYozJ-@2s3;xoiH&f(N6M0C??W&A`&lG&TF8J|DK) zHN7AG%go{zk~6DzNXumT1h_LG!$W!_r~T3ta4P`E$+_|szwRl8a>HGi z?qrjEl2Sgwm0IZYyM8ji_TxP(VpP8&sZ6MBgk0)Zlk~Ew!QNte&JtHuzscO442Cu-)fV|17n50gOH)vWDo}_c#75Vn)uu|fRXTjQa8tW>YiJ7$#=v5z{W1!T1 z)&=!oykhEgRSk|qi@ujeoX;8jk|++HQ3QWx!uvZ`bTP@c1V*#_du^HrUc7}{E2+*R zjp++=8nWu|#X_2cES)ti2<8g)!1+)3%3I|&`a2AbDN(h9(fPNH<~>mA)WTO|nDa#( zC0GQ7btq1i^68Y5Fx0oFFh+VRkzotbQslUwx>sn;9jj;l-EPL^x5SxvCagTdVqHeLrb~7JV|N+x{P1O!=X_Q1V_R2%RiC%P%|^nLcZRd$f=|`f8UTRz2pAY@Pw~*ak1Qp7mpT3p{5ach*Z++tT2w~GaG5x+wjAR_}2uvp4VR{@T+>8%)41%5k6t zvrPLtUDyFN0ood#JL>Xm9YuRn*O~V|o-ZWpeG($#B~Jvz^4a)BD$mR0m0r4E8@L=) zzMH=VU} z)@xRpqx$@+Iq$vyf&%G*wXn@>u{ZshNcZZ;@fI9>I_mNADSnEt6QAp;sQK3QL_L>4 zAzufZ0T|2omPG+Z@%BLrg3CI}`MJ4TjF4lN?V;uFu3|#@?y}YYWCf}YKB?Bjnk%eA zc#jGz;mZTfA0@d*kGp5RzUQV^k28^y@A7%V3;H_qSqK#hK({a_`Pbwm_1(L_lhan8 z&k=3c_4@xM%ycVK!ZBDk=$dN&Vmn-yp=H?rztaKf0kt!Jh7K|FRYNn4-zZN}d5Hmg zk8jU&uC0;prEHi@9ccF$D=Gxrl1N@NszSvkC3PU21Hj{;5HK8^I*~&Ih$1}JBfQW^ zS4RWE*Lu$`Eus){5lnn1zPhoNP+(0FV-SZP5OD7@#x(cJ1NAQ^;`Wa83s~Ywhwq5K zYChVm)R*8`JwkWxy5b}jt0U1om{Q;NltJSvOnCVWiUmS;fbpupC}RW;M{ltUi10uW z1h{&}a*Pkh4VYGD%D__NRzcstc71e&)%XOLL&;l;;{158-eG^02Moo2dIX*;c zX~m^yl@gQ6zsjAD_T?cj#s#M{$cXsE$gV$mN#~Z0FeHtDlF~ZI<}J$<*a9XDNv{0d z6~nPu(>=|jf;8s-D}O=;Mq>b(3$aFkp7zEM{nqjBF9)UJH{wuzzK^?{F!6epwYYR` zEOqdO`RQf#z5LJ{k|MX^LVy4MeOBt<*4p|JPe|1=&t&fI^xy-+Orm36C$nAJ;vi9u z|AQV(ZSmT#_Lx4}lF4i}4_l`6H{2ny*kJyJ7kU+!!!P$P4JVF@#MDmr9{YEDwv7qM z=w|?>VhE1p$SfZCnNW_@_VC4Mp=m0hDe#c~OAnKRf%oe+i}J(%?{n!A9YHvcml6c5 zWR3|l7G+U^U{a?$mEF91zs}BDu~r_Vs8lrmK`m#BY4-MN6ub;VP}jlWo;0}o52d73 zwY6IyGWj+r=ouWF5RwF#bTS)iChVeXl^BgpY!HKvk*$frU(){ugbBY9S7*5?eoP7h z1G%;Bji^LcHlc#I@cX~d97S#e7<^_$dege^FFtu zq zXLe@m)qIwS0}cl5wE+wRYW6n;tv3StUbS(zk9%SIX4&?o{0MxUx2C3)*uGa{%P&R4 z%JVUjfU&@~Z!^I1Ch+0*{oAZrY$-=Ll9PU3CsRiV zMy-NKe7HJop45;gs!Iv%KCv ze+qtle5cpA6IZi_{%hLq-mtm$SUMGdw6Zr0p_Fv{{@u6vEZ{+gN_n0Om94Fadh1`_lvyaspL+UwYFbozZj{2Id|=(A}GKoQ+J0S>lZw`0%K5y zn~R>XLq_DQy~)uW=Xqr=4Zi1&87Zo;vahLfF~TF<(ZXWv*1gSl(|AA6{`7S5q8|E9 z`nq(Hrgx`c*t%NYq~074?Nk-mjTdPEF_ufd{nyBfb1} z{Wna{C5=ClZncr2#B4OfG&RPPhYWfw%SAo!WicwHIXiyVzK<$k)tV>0k!I)8441oC zY;)GoRT1GH7-golJ=;i44LcCu(dUxOwK7ati8>g<;{K79$6ikE64|m(V@@uQBlF|B zrqH<96eb`1sd96Gmr?n{9jWVHa;hyE&!2DoUWTAp5iiS%+|!jH#%*APhvhQZq3 zO1Ntl#x0M3r4O&!=KFacJ6j(Mv2(tELV5?mX!BXcr&u>b^r`b1HNki09KhdgLrlnQ-TMPTwSS>?K}tyY5UKzWrGTi4jnI-o#(-gWPAjcE$}>yn z6=Yqn6@4yoRVmVw9}#*zJ87u)ctkBT$UXhjlE-Q8BTKn0+w0^f05TW?ob0P@D|8TF#Z^Vqxdkq z+52oyYiw)`gkxk*HJ>I~ns1Qu@6VI*5zoz?ioPv($|!n$8WQ89d#(H6jh|-l4u!3X zXUo*BW*5WBj$PwF@8sUKPH2sl-0Kh4SlC>vzj^v8N(sBU_Ki5h19$YyzV1wFUTU0y zurGN^*Jv7yGb5W7yJMPfQBn#*5FbVar$72|2LaOlk4GLkH-T(t8p5-!4%SpqMjD!% zXa7c?^cv4JE;(X(X(ruXl;oYrbACZJ6ZZrP4@&VpP-|;zg~=y7 zySsF}ypfjuyV?98j5TYUyhS(veXioi@z6L!bw?w-LixIn4mbDM;yA`ywa1WpzS3_u z2Zz_!~iek|)AIzf!cmFIHMV>(vp4Vn<2)X$DOF&@`+2Q-jQf8}Gzuqw( zwT$?9?P8IZ$9BB#N0C5AG>3$>ZF0KeOUIXCqc!E_G2Y>ixxboE^-MK#rf{he-Ydl~ z3HAHzGq0SpC2lTCbSqf|FCCWQ(?$r1f|Hhxo*s=0+}R&LendtrWvQh4g`kMbgOnZ_ z|HS*+&Ah2u{n61{Wx2pQD&)EiF6sT_@*QnL_Mn69IrAVfp+|HK^z(6@XE|%_(l1C) z#o&?ewXHn9e=@AHuJLC8MheW7lhQ*9?iV#BEAwHt(K;=+;#B{^Gu7=m zB#!-~_h%i}=^MYv8{a**2}NMp+2%~Yd>V!3909l5&W^i@Sqe?Ss#z*bKe+MN zlX@-DKtYb$@izC()kmH)m|fT$a0fQWjvyHlP)wPntqcjzygCqJ-_=U?c=?P)4{ z1__Iltc}}h-2^s2%2hET$UoUOZw+@`tN}w<>^28&*hgG?G_N6#p|iCQI5N+SI{nE$ zZqzj?KQ$5dIxOVGO^U7;PM|x5l7_|zz*}ILM!$3kQGtOA3n#|Eu9#>q&un;JiGyD7 zp!Ub%;HKw`(L;9Kcb~W~p-O)m-E&!uwW^%VKUcc1g38j#;@yAI&PnNGw9e{aSLo2z z(>dOmB$(fxYO)mV=E!#?L$~ikgZ$(&B(uFr%nG*&;$)a?GfXU(cKc~MrRY@h9q|YDtV8CE2aXg$CEBc^mM~?I znpg@rS?nnnqb)lgvoyUm82Ipl=UibgA!tr~rAN$${zf68mw`{E!0C7I(dr6srCK#* zLsp$@G+5+E=lnBFQxK*gT~SZ-CBGP*ObmXMl37g@Jdt-Yv>D@kE+TM(=i9Du>uhvJ z%(4%Hsj7tY$?;u5wVJ3+=ebg{;g@ul{r+7=D!MJ|oMg=3eHd;%K~=DQFB;6B>B*#S z5qT?lP1T%iO~4asy6x3M0IqdGgW#UqHXYn$m;mq!q&V7>vNWa;YVl7{921a`1m@>+ z29t1pkqp4-;%sbc;(7WL1%8we7$F0@E;2Fx-aV0Xx4TC7#FNT?g)`cusQo%!ciMc} z>Gj&8sUzX_!CEkfAXz1N(&M==MG2XywD+2DAGJImFdWF27tUP2EtYehnzOiN8{_3cdFctJE5XTq5nR2n@07)Pi zCru1_#@4+XUJMF>X;|E00Nfv_jeC20e~co-$sli&0L|k9lQv@b>dF9AOum?D za1eG8epNuxh0axEBIVge(pIcLQ0;N7A@*$jseaZ?dJ^`B)+SNuicd7jAME2Aoz3lK zYFIMx$35C0uckj0WjiN69U>TT?gj0u3;SJ&re5sooS&b=7_|Tp$ag*%!x74MLIiLE^++fU77d+ZOQKTSNmCfqg-R6n z`2+=h;j>08au{>~5CTM;1SZcI4CfBW*AS_EU8EqUnDXX zeQK(rb3fwMjhBQhrFWg2V37%F$xQc+VbiID`nAB*+*%V-wtoKpJF6m~5x@}b-XC-R z9};2L;)S`3^?`eMWz=Gxq$DIH0HjME{{oXFVH!2$VdKq;-!cN_0be}u<(dr^z21ws z(F}gey>wA0IZfiyH`nRc6s{3>cDiyJhGlqk;#aBuR;?;M6ZK2?w=1%LZjqhAdthUe zBmeut=F{uB4x?uCmPdXcai-JHy4`uS+l=v`im?gVzdi+2UW<{UCEAmuq@;Jj!EJ>* z52cp}nVej@%K5V(Ten{TLxsl1?5|!6mG;lhwsKAwgMk%h2YTo{ULDxS$4^N|FTxsn zB^zP!RBM=6{s2P!4Q_7{*^~X$A0=P@&NOPru*Rf!^dDw>R;?J{`AGY+(8<^?yHTA@ ztw%fNLv2RWFY1Wt!$DtkevWR-JAG~#ebQd&58-JRdsbwN0<2q}Uio&*IY0%W*JYf; zW6**Or7J9S*w0pHb~9LZqpT%vTNvd)ajh4B>tmVwXjLhUO?=~G#a$(vncD@|*=gnP zy>qR0!>Y`HBH&WODcqiDFj(6B8I) znlR2F0RKM*kg86c;IS0R9!+eZ`$F)YfizcXdT+#F_l2I&pz{IHl{>Y9(KchbsG7A5 zJM2!Ua{WEab{@?1&Sn}E2~8gP$Y2KEyqYgQSxJF4S&LcgeN9$Xr3k&Ml1zyG3WmL^Kcez`yt`O zD{Nk~+biGD*l4%?R~A6p7(c0fPG2c~O$P^I_}L39E32H`+?^avhtK`Y*PplqEjBo| zRtj>hb&!{bbt{flC$z{$pfL^9ic3{E{t_!Uy%)_!AIxB&7<G18gVdZsBI0KB_l%+;Ww597c<1`yz<-*pxm$@ zQ^ExpFZ3Kh?3)4HA*^69+7gS1MLh*vk0F%8#GIV@tL5m~>RFPbCDt&+1rL@on!9%+ z02d4cVg;(Is4~WqC%y1^c4MJXN9e#kJS}{!xa^HCAa~V7pFece{PL*y-Az?b)6fr**jRY(5#pB@C_UYQzqnKXg2HH4Jg@9*E|fWDOnA|`Zn0U%9Dk$@3_ zoeynh(*G>k->P-r`^Mec^7u#eRAD84LY);h?#<-*-<6zErZX%3 z)D{uo=qz5hjUd8*WxY1y>T`l)sjGXxwxap<1A;^LT>;e9LCSWFesQiz%uh~2*UoXB z!vl+4?_a@jug^CAIndbG50yy`!{ns&D%$FJr2rDw{cGxlCXF}_+ad2=@`zzs5!(aA=Yvp9& z!AbOndzrz-O?R5@^xcI7#4&H5ZJY2m2G3S|lEvSVLY*yBo%tU-2+i~I6Z3oatYXJU zwN^T1U`OuK4GHMi#s{WdVa4zYH}n^KYP9=R5DsaCPo+-=nAARB0|axEP*Mvc|EG$^ zW!mXXb*_9XYq_$a`FnZh#bZ?5i0+{xd{4!)(Vk6y@vHqig|NS~`qTvE)IkwZ1i??( z(n@r#3G}4IUK+8UilN`MSuV|XynI16Mdu$4*Ew~O_Hg%xG*AeIKv-lXp>dL0>`+GL zL+ShrY+tl|&8a9kK`ldw5NBE>N0b?3{5vJ;EUkk-+?C1>L81V*r zDChds(@p*y4_)eWy{hT8zwO4*EzrbQa#V-^Y$M1eZTMfb-!LK2Z+Iw78Ok0 zLxE1%XOPPU-dJc1RC>GT=PznNsvT>n{(_3WKwRD?BRb|Lg_&9yQABBFwHNNeP%c&! zQ9&Gkp+jzBz?M=c&AFH7Ua3Z8zR{qGF4GOp49G0vsV{|OiutWo%r!m@$vl4+?BpfK zLcX0Nk{~ka??`G9?X>y20fIBR(<=?XV$DV4ZDWVa?ochq2l&j~vAto@eGT>deni91 zNqRqw-~qqw25*0<^&qF_hafI0^`?wheFgPt_SXhTw_l0mOz!&-8h$3Wk0437h$L$e zO1ZD#Q6VplBs+OU51OhgcVJ`;Lvt+s-v%TNnU^VTeMuqO5k`22}U>R#Hvb11? zN%B&f3j*m&K6l+I_$8*kQNK>;_&nWbe27w4HCWf7W2^?bOniJ8LIPl$E#SE)12Sij zl7gOkW+5SJH#awhkMC~l69-IfdMaTsqNv339O;^VVb2Zqv*WPO0eAtLVH&L@6la#? z8;~{OL(kcw40k-j<$ruAeDwL}GF|9728sH&stogQeT>!;dTq!@F5kAZgO?I9!-PTK zw{E%Z>yi?FV2DWtCk6Jg7@mTIBn-i^R8a`2tg+RFX$vZKuEp{{<>R2nNG1lv3l<(?iQE zINzx^L)h*ohtB}zcieVL&a40kbL(8bK!47|Sw4L3$@&)cE-kT@8LOSC{k%KuaaX-1 z7K%S>E#fdn33Vz)7D8pslPEP@60*e7pP@?FWI+l$b<~tvmG+8CsnO8 zr8`vWLdX!vdlk~QfZKw5lW6f-f4U+CkKu3aCj{6dzU;v~0o>S#%X_r%UYMWqKC(Vf zD&WCgHxgnkt?w~BOh2V$>hi?{n*xo^Od40UECx!=H8i3eL|;EVUO4GKO__Av4J;o> zUS7Gb$OJBrmofFv)0L4TCg`?henMYK9vIArFE#-C+_vn7)CU%ybZ z^r*so|CVg#?0)R68vy>}(9=`kGHCc?I`q8+Lo016?O6QW)MI5nNaAFv6bM8(ZIeiS zIc^Iq<72fZja~hBuBpGk5dip;yn;dlP+dcH9*PlifI&rs=m9H-yfpa@CoR^~fXbNn z@_XbeZ#L9!w{FD>J3SaFrQzgcJDL}@k19sLn zS^sQ(4$1Bj_DNKDLLNId5O-Ms{D1>{i^QyE+sz+w|OC$L<;%;0EAk6028!!TJcs%hIb+tA_*st>15EJp5-lRK;&JYK=E z%kcjY$7Kndl5vvH9kkVync3K04I6r1@Pl&82M~Yp{tXg|0PP{OML+W1yq&|2~t*+T*``f=EE$EjB|=^KVAxU*${!u z2=@jYI(F=3Xu4{ofqLt{u$U% zSru~U5(!UW4gd9_Tmdn%v7%70UDDl6>Fi=E7@u}pOsb^kwxTpN0e(iwmOXf#rJkr$Yjp(ikDR#h`&%vR4^z{+@@GJuZ!$(dXzF{*) zUtr=+np`&KD;3eza>;biz^r*P-we}}35k4hEWp8fpX!H3pjb9*;jaST(oQEF4bUYZ2M6wf8kn~p z`AiM!;{h7`=5+O{EZUM&En?sjg#LDw_7764jB#lPpi&n>1rCUzW_%_dWj`vl`4tLXt6wF9C7c}Dc1cRy3~`LNSAG>YPS()-_J>Fq zN7Y`a?zo{okuDmh$IUI8<9Xv=-Tp&I`&6ItY@EZM!1lE#??NE;d9H`;rvFDSy=X`y zT)KL-BkHCWip}VUXny{7ua^%vyKBT@%p$igC=%Z`6Fd!4@vm5pN>jwBqi&pT;YVI# zJy+P*^PcNAf7YyK2yOv4n|qGuVyPpB?1rp=z}=7bDU-flxH!teZ|*_rVAf}|-Kx-) z*A59O?$O;^R(FJWQowekH#U=^#|k}4z+x|c|DIR3&)L*JTd)lVf zyELMq*rh8$Tkz4gyVHW7cw(7u;ZFU2*1V4?p~c_`;-#C|){&z~lh#PvFw zWoW0>59Ah4P&DM-xNw_(aJjKg1REKmNZyZyjUuh{BO)?22S^Sxkgas?YIi3sahUg9 z`9|X^pE^yU8H3Kni@#liXwFKE3pd#Ce$hVKY@lCEJn}p&cd#dC+FgSezmisXf$GwC zo|$b-OurLW)cPaYR{F-ls)?N7P4TDv;rrsa?h`GULAB`jDd+WIX$efE$x&UuF?(KE zg9=o)Kafm+dKROts5NHrO*-$cNt$;3s(rcA;`etf%%5Tl$1Xdg1y(F2!UnJBeft)5gH&+~g5-1Q(>Y+$O*RIPL| z(blU$BSOWHG$RlRNQoE-{VeQ z8@_hu!bxU*qts@i<&|+2{IZ~1If&f)8wEW7*rKq=*Esoi<>B>ytA`%~&J7+~^JB%$?3Hz#BJ;jvXc ziHPTuONp2;pMndgsedD*;-uQvU2ez)&Mqz~H%9;2*6J}jQ9kq*(mye%UDG8I=?o}V zR@RM%Q9kT_fa08^kTq$@s1^3W(7nBLC3^2`>#uiTgj5WJb^Sl{XeG_JRidmo^R^cr z@rZC;Gs2SkaGi+|2@G(Mz~GLmZ!J8C;rTn1c3YnKm2N0-LgE^{aiJ11tNrGK-~9|J z@x%q3hZi6F_*ignbK24oRH!UhDBgZaV)Rmv@?TK|Zp`E;;-XZo4Zddslc+bgdT~|E zx39V9u!H#=DWGdg~yE1-jx&zt~~6Z4S0MAUVW6cA{pAp$xYA3s^b3e6QT zoOs{$PubVx^vo2m_DzrucR99Pe1Ghk@-W1ZC4bw90=GLifHqOuhBN79u=n(@TgJ2I z%gg_4VWt|sBxGnqLRy*7v$L~D(C4TF(Hp|#fC362uE$>=O}9ldA(g|C^uKZG=z`*< zPCK{`LuRC-tfcm#>EDPf%2)OeKV-;c#8x%MuLncm5sWFh_C@bD=D3D=<#RbVSiuys zo237C;UB9~YU01q&$5b=vDyK7aDPrbgK7urRyb+r*Pcj^C`> zN4|86_UlaN@Wkx;x{A5^oS6i_nAn%y-QCr- zz(5?CsIx?aaCuWGRH{)dP83q)UCl z)n)Onm5?{prP@=tY?4+>T649os(oaWQnj}92lz~@P z_463<`+*lJm-XsMot`~QHf;{ouY8URId6azfZ|I>9}8rchb{3ou0qNd&;X+apcG5} zZ8>vbi6Z#K{nO1pd3T}F!mRstca)Z7zsw%=W+%0E{-ns!9Upws5HxA>kV&YVLM=(t zZPE9fjav+I!Iq|u8^x&u7(D3GZ|&};0kRwDHpwX{K6;-(L>CwmveT~^kx17(Hg<)T zHS-#e#^(w&d3DcysBxE+YKgt{xgyhOwAX`Wv9}>RdgqIhp0P9zQr2+^3Di?dJHI?PKuCi5gF4LTt=U#H6B8*o8P8l@|047wKz@v*dFSSu zAZ3kkqY@1gOxgL$@}5Q9L?K5{@3K!)`-xqa|6zH!)$UKL0m{sQ`kxNQ2lpPRqM~pQ zNdhyN>e{=Vkt;n}$|hdjXF-R45K3B~s8Iyc3h1zyh20N#`)ht6(U_u{v-G=?ABs!} zc@vzG_NK=Y)x?p6=g~*!QS`@A&ExxwKh3C&KVua=uCd~KTDc^5B9|D_6)@HE5aas# z#qpk}m`!X39lT6>E=iq2_Lj><+{gcgN4UD@x>+-KUW8kuEqvtBN&96pY5dFGdtPBg zg2k`gN>V3U4735lnwMZ3XYDveEF?tg9A`X_;%bo!m`Ez;^$~+@agSSR5A!9FVvMe? zfkQEe$=Hrp&{Si~wamED>Jkr8U>KA2>@}<)Tl0{F9sm$IuB_Ox^*X+PeQ_z&9WF=$?q@-7Sb$kj}w7s4Q{(s@?nzH@&GLzsS+1%xAnF zd#o#vgZ7DY0T|0jRk-m<`Urb`a5z1WZM=VG^V|{R_{n~#z}{4Nj_La+w8YYyhvLJj z)q+BAiW{wq2fxlFAzPXkwzZ6Sj3!?etU^B_Z>G1Zf+x6jbySqEs`qKbqv@nCq`7zn ziFo5(Er|6THeTWVR;*WRko(>TW4Q5>|D`UnZ!E!G_$>wsh##Ng(CCQhK6c5p=p)OO zFbq;=xiQ447?8aauJP&)@mnukPl=FDio(K|8IARBWLDch`J9kr<<3Fqcl1s((~*-i zda+3$bVP^Ga+o=Vygz&DuVi8dQDjZ41|t@^SbSE+3H~E!%I8_$X9r6c$#A^Z@6&5S zN0DQE=tK;&$O!Y$X<42aZankX=^pf=jW|5bwiwf6hImv@pemcGOH*dgVaRxTHQtSN z*n0Yy;X&v*d^ff~ID8r=Jpw{i)D*NM-y~Pq)iN{BZzD59#0Fz)+b_Aet4FJ3e~CZC zMq`wJM@%GMUM2@`<7lU7hdt4aRic8hJR&e~oOFK_Q}Rf!-2fwzQ;TaSfsbGD{VuXV z3~yA<8H*?XVpsa@#XY=j8kum^bb{u8^4qcKyKT1vI94ndk8 zZYs#WjeRpVh)YmoW>N&fUrj;r3)%qJN!0bKwVaP08>!##7OCWH9C<43;z*j|&Kq`B zJ3)GYtRXE8pEvQ9iWk}Gf5p(3^ISM4!}MC zU918btdcYgcKWXsc3sCodH=zjcqlVcjWDzPSt^oP*w8G#q4J61!u=Rgv+=C|`THp; zz*qVp=05#7PE+X6!_-QVWTn8O%zw~VXuEJ(LZ_2i=sQ_=F1FoqNlj}#rn%=P;tjo| zkZG~vU-H-`MV*ROh7$E+iA9|Hr*z?z{c?(z?3-*6D@AeJc@|FQ(ETP381$Swy_Q52 z|9w}Z0Xa4)YVV}3AKil(>l@1=9ii_*Hr6l>EkpG%rh;_HqJDI*@Z_=f=z8L2=F9ei zdi=Xds$|ZG+leievZ_gI&T4um6+6geF;2dxtMg*K#dvMp()K{9|HIUvjZiVc%&A92 z3!Y8wg>dYo5g8`}%{2l8O)(MDqz^ITCy&<-JBN|wd3Bp{?6# z%_|3^ltIDX$x;r5Jke%dX2sismTbdT z3PQx^XMT%2j#dmOdzY;EchUMCQXfv$a_(x2%bcZ*NoTx3?kAmoICWmqX4XOWz340! z-|)Bbq?XHvme<>mU|1ZGg%7XW*7_K?Mj4Rqd)DG8<4MZ;s&$wpz*hs?1kAHOQ_Dbs4kZMF zhu0sVI#s*vUM5aR1xWv4BwbTg@K1c<-$GKXzKdQBlWB}dstbMfldbg`mpM?&r(@yK3?v*O!8w_WOm4SB;h}j z%l*pvPe%{k2u>`k*PL}eAr6v^M+yE*y^QHU*R+GX9!m#WYgV5C4hp|sRvX*YNruh+ ztlx$b97600XPrh%Ed4u(8-FFrgvBeNj@+q~4UmB|5V2R{aw1VYNI^q6tP;)FG!#D4?dv|gi5B) z$;(RB|NScl1(o|Je=HM=0@x3db&!0o~Jbbht`@b_Ihal!*4U#m#o%yY@5~PV79JgjA zo5t`OUVz-BKno)Umo9~#o*tw%>w(=)$soGks!X@#Ev+KL*i%-P4so7Rf@o!LF1iwW zP@%Kh55j4n#|4fEC>^p90cxOeVBiuw0;)Yf0biu7VN2)UzXvk(h`=8_*T1Gl4EXa9 zjSraQacA7xq)g0__$tU0%E52|Ba`|oW;kK~MNguHDu78L)@qrUNPy;aGCDf@6I0@g zgnlo9^z$MaSzRR4eMCbtE-tRr{?8{^8jTnW$JPjXOAtgK%+(nfOsDRw@jTev+iULc ze-Aj&XV6q{(UYJI&|M{iPxYO#w*ml3QsOu*4e;e2V7Pz^0t-NIcwMYL7=?x7fWZM8 zms9j3dg6sY0Q#;3#M=Sp5(>WqKztauxVqY+{)V#}IDnWHw8=twKYD7)7(wd8W0yvX zQVBV_uBxJUY8(It2?%!_9v&X1#}?G8)e}&%1TNMr{YpMWU=~;mq30h+Sk2}Xh-qYB z`GT&RCh7^`mom7UlgKY!iUJ4$8q#in$v^Yv+2Kom2iKTj_n52b~;s%rsoUZLC&1;lIpfCU58uhn2y-zA5zFiR#ZwYj8$ zC2CQ3ULZaOK=Pl#dFoD(9wi_l`3pLaf-dZMZhOm*sdx;Pf`}-A))yaes%BGl{)nu3 z26vaST=*4PI2*`1fm#{DRRxW<10WiVY&94w3L1VFMmE;h6TEYIp}pN~t}PPSq-a@L zSv6wJ$oMTfB~$x&L5>R!nIm9r0GfKs``CG`+^z%e>>fZKxipx438szT#RKXsn2Qzc)fw06S|O-e%UA+m?mt)bq$tEc>i&>db(Of(p7OUcUK-` zxSArW(lWv4Z-vnr!R-+Rou$uLBLJQxu1Woe)OWbR$P)HjQ2D_Iw3`DCWOi-s3kJOc z8x`7x-|#(YkFEMHs+FsK3tFQ`F(mwWOIQ@ZoCTM$5nzXdbCnL5n+)!sNxG$0^$1eW zY!Yd?2b>EH^#6id2+P+_UXc8xj~iNm8tO-GgE2TYhH#i|T)Jx~b+xt44_GKH{!b*5 zB;1#Pq(0rK)+d9jTXH?57MROnz34)zxNMf+pU;5J86jHn8@3|SC=V}mNcnocwY@zQ zY*_4{4PLW%C^iBD0%NF4AfyMttz{v>ItPr#d3kxSb2>c1R7Z$TS~)nVg19^Jyi)|s z5W>-oP>w?LvLlEQLq)Qs_(=z1!<-j;{Xv%)utv&&>k)EUrT})`=>Sske>RMr{WT~a zn>&OA1wA|7wFb=@(9pHmoT^8FiC|LH%58PPyoZ4mclf>)ycFt#rCMdEs;WBiVucG3 zfXGto=G{TN#R4w@khNuSpex1JdJZPBhWq=;Va+%me>GoK-)S1icRSpiM!3d-vZ5b4 zB+4BoyP?-$wA!s;DU|!XlGu5jSh{yBE1=bK+r?T=BR13jA%*PdcaIWvKpexZT{m05 zvbflGluskZTn#*KJG75e04WF{Qsp3r@DR}&XBHFtGnhXA8X&ZD$7id+n3ls9>cbHW zwvz+c9YV{>%KDKxgM>f^*D(u>B120HXzTL6}3aAPV(k)abE3-Io<|3MZ-`a7j?q#_JhjMx9=Zpl&qf|brUIFd++LckBU<)8EJgod^M z<-GrIA2}_0KChs!ufKib1oF)9YB)c`3)QceR6X0?L_g` z*yN|+E+FSc4Pgni6l81WT>%_nw>TX&^#e7vcMokYO24vXfcHmkl-i9)zj{RgG~)zB zMAE5!n`2K4;;M4>fmx%k?5P&$h1iq0ku*Re0v6{w5D8Uxfq%)?E@E_bbv-;h!RhtE~b_4m93z(Ui zi$G|nJ%%SS^>SBVUnJy15w@HkC)ni~y2RL|m;^pG7yH8Ki4q#%@ox{$x z88FQV-Vb0106k?-BF#dwl)zPwh#Lb;?h7b#gMHmLIs;|hCNpa#13v&ljl)rObaX^r z7Z#?0V+B+5QWrC|k!#lS=I7}Fm@qN`Jc2`_zGaG2US1Bf1r`((v)k+|pjYiJ?cEj# zJCH>{LDEvV`%7HhPq6oFTKU&d(41`9pC%uX=Yp^p0Cy2Uht;Z_t${E}30OrCHUn7N z${!0p%AqN}l}?uNU%ns;IVhl3N`ulT*b}a544{)`z$||Qt#;QxNUk~!AK6y|4ioV( zLr!2cU=VVkl)QOxumF~K)XwK*>pmFGt}C22$b&ja zd{F>20GlmQUIWn*l0k-iC`4Fz`1tss+a(991MmtDQ!$`}VFy3|w_?(#acGDNQ58dU zGp}8{R%`?-i+5c>VE1(*TpfB8;4;8CSyWMx>2P@bAdK7V+)D19wl z6rUj1R&3IU2O&^ojRcrb5NR{VpF$Uap|_l5fewPp%oTocenHt?I!_bWH;53vAvS|5 zfkf|p9oNqxp7>a~xw*YIWy1Tva&^nKjGUYZXLk{V&kOl34S|FR z+!F$1aw@78@F5UBlD2hrrU9HaqBP>oo4GE*$uw9w$TqT|MK&TFNq`g*WNU$k(Fu@F zkcb_HfC(5gPMQ2Q!B|425fGFm-e^}K(?D{x0Lr~4;S&#sE>6qP1J@6B5}$b++3~@K zDwrB!=S4D53hmmWffJqq<3jcvSneH2t)eyoCEuSUg?np>t##JD*?Dew%! zwF!>}S`MxJH^7JlA=oOAdVoz4>pz92Gw5O6La?kxshOFq7p@4P5X_0h*~z6mO+`2q z2vZA~Qh>lQw_c})Y}-YSc^x|Ye^)B9px_f1_5YSlN5IzqAG81e{>YB6EEVUv#F32z Q3jWB;s@%($Hh%d(08m&`r2qf` diff --git a/examples/running-mistrals/artifacts/A100-80GB/rich_table.svg b/examples/running-mistrals/artifacts/A100-80GB/rich_table.svg deleted file mode 100644 index 2c839c7a..00000000 --- a/examples/running-mistrals/artifacts/A100-80GB/rich_table.svg +++ /dev/null @@ -1,235 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Inference Report - - - - - - - - - - ┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┓ -          Generate -Forward ThroughputForward Peak Memory        Throughput      Generate Peak      Quantization -Experiment Name    Batch SizeForward Latency (s)       (samples/s)               (MB)        (tokens/s)        Memory (MB)            Scheme -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━┩ -fp16-batch_size(4)…         4           2.33e-01             17.20              17865            137.00              18509              fp16 -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -fp16-batch_size(8)…         8           4.67e-01             17.10              19463            236.00              20524              fp16 -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -fp16-batch_size(16…        16           9.37e-01             17.10              22458            319.00              24393              fp16 -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -bnb-batch_size(16)…        16           9.40e-01             17.00              13155            211.00              15254               BnB -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -fp16-batch_size(2)…         2           1.18e-01             16.90              17158             68.00              17523              fp16 -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -gptq-batch_size(16…        16           9.63e-01             16.60              13314            242.00              15235              GPTQ -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -bnb-batch_size(8)-…         8           4.83e-01             16.60              10261            134.00              11330               BnB -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -gptq-batch_size(8)…         8           4.86e-01             16.50              10303            164.00              11280              GPTQ -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -fp16-batch_size(1)…         1           6.33e-02             15.80              16907             35.30              17066              fp16 -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -bnb-batch_size(4)-…         4           2.54e-01             15.70               8797             68.50               9317               BnB -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -gptq-batch_size(4)…         4           2.61e-01             15.30               8747            111.00               9239              GPTQ -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -bnb-batch_size(2)-…         2           1.41e-01             14.20               8166             35.00               8401               BnB -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -gptq-batch_size(2)…         2           1.42e-01             14.10               7918             62.90               8279              GPTQ -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -gptq-batch_size(1)…         1           8.28e-02             12.10               7669             31.70               7824              GPTQ -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -bnb-batch_size(1)-…         1           8.65e-02             11.60               7822             23.00               7960               BnB -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -awq-batch_size(16)…        16               2.52              6.35              14486            312.00              15080               AWQ -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -awq-batch_size(8)-…         8               1.27              6.30              10787            229.00              11175               AWQ -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -awq-batch_size(4)-…         4           6.38e-01              6.27               8958            122.00               9101               AWQ -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -awq-batch_size(2)-…         2           3.21e-01              6.23               7824             62.90               8136               AWQ -├─────────────────────┼────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┼─────────────────────┼────────────────────┤ -awq-batch_size(1)-…         1           1.64e-01              6.10               7589             31.20               7660               AWQ -└─────────────────────┴────────────┴─────────────────────┴────────────────────┴─────────────────────┴────────────────────┴─────────────────────┴────────────────────┘ - - - - diff --git a/examples/running-mistrals/artifacts/A100-80GB/short_report.csv b/examples/running-mistrals/artifacts/A100-80GB/short_report.csv deleted file mode 100644 index 197e1c68..00000000 --- a/examples/running-mistrals/artifacts/A100-80GB/short_report.csv +++ /dev/null @@ -1,21 +0,0 @@ -experiment_name,Batch Size,Forward Latency (s),Forward Throughput (samples/s),Forward Peak Memory (MB),Generate Throughput (tokens/s),Generate Peak Memory (MB),Quantization Scheme -fp16-batch_size(4)-sequence_length(512)-new_tokens(1000),4,0.233,17.2,17865,137.0,18509,fp16 -fp16-batch_size(8)-sequence_length(512)-new_tokens(1000),8,0.467,17.1,19463,236.0,20524,fp16 -fp16-batch_size(16)-sequence_length(512)-new_tokens(1000),16,0.937,17.1,22458,319.0,24393,fp16 -bnb-batch_size(16)-sequence_length(512)-new_tokens(1000),16,0.94,17.0,13155,211.0,15254,bnb -fp16-batch_size(2)-sequence_length(512)-new_tokens(1000),2,0.118,16.9,17158,68.0,17523,fp16 -gptq-batch_size(16)-sequence_length(512)-new_tokens(1000),16,0.963,16.6,13314,242.0,15235,gptq -bnb-batch_size(8)-sequence_length(512)-new_tokens(1000),8,0.483,16.6,10261,134.0,11330,bnb -gptq-batch_size(8)-sequence_length(512)-new_tokens(1000),8,0.486,16.5,10303,164.0,11280,gptq -fp16-batch_size(1)-sequence_length(512)-new_tokens(1000),1,0.0633,15.8,16907,35.3,17066,fp16 -bnb-batch_size(4)-sequence_length(512)-new_tokens(1000),4,0.254,15.7,8797,68.5,9317,bnb -gptq-batch_size(4)-sequence_length(512)-new_tokens(1000),4,0.261,15.3,8747,111.0,9239,gptq -bnb-batch_size(2)-sequence_length(512)-new_tokens(1000),2,0.141,14.2,8166,35.0,8401,bnb -gptq-batch_size(2)-sequence_length(512)-new_tokens(1000),2,0.142,14.1,7918,62.9,8279,gptq -gptq-batch_size(1)-sequence_length(512)-new_tokens(1000),1,0.0828,12.1,7669,31.7,7824,gptq -bnb-batch_size(1)-sequence_length(512)-new_tokens(1000),1,0.0865,11.6,7822,23.0,7960,bnb -awq-batch_size(16)-sequence_length(512)-new_tokens(1000),16,2.52,6.35,14486,312.0,15080,awq -awq-batch_size(8)-sequence_length(512)-new_tokens(1000),8,1.27,6.3,10787,229.0,11175,awq -awq-batch_size(4)-sequence_length(512)-new_tokens(1000),4,0.638,6.27,8958,122.0,9101,awq -awq-batch_size(2)-sequence_length(512)-new_tokens(1000),2,0.321,6.23,7824,62.9,8136,awq -awq-batch_size(1)-sequence_length(512)-new_tokens(1000),1,0.164,6.1,7589,31.2,7660,awq diff --git a/examples/running-mistrals/configs/_base_.yaml b/examples/running-mistrals/configs/_base_.yaml deleted file mode 100644 index b02223bb..00000000 --- a/examples/running-mistrals/configs/_base_.yaml +++ /dev/null @@ -1,36 +0,0 @@ -defaults: - - backend: pytorch # default backend - - benchmark: inference # default benchmark - - experiment # inheriting from experiment config - - _self_ # for hydra 1.1 compatibility - - override hydra/job_logging: colorlog # colorful logging - - override hydra/hydra_logging: colorlog # colorful logging - -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - job: - chdir: true - env_set: - CUDA_VISIBLE_DEVICES: 0 - CUDA_DEVICE_ORDER: PCI_BUS_ID - sweeper: - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: mistralai/Mistral-7B-v0.1 -device: cuda - -backend: - torch_dtype: float16 - -benchmark: - memory: true - warmup_runs: 10 - - new_tokens: 1000 - input_shapes: - sequence_length: 512 diff --git a/examples/running-mistrals/configs/awq.yaml b/examples/running-mistrals/configs/awq.yaml deleted file mode 100644 index 88f22cb5..00000000 --- a/examples/running-mistrals/configs/awq.yaml +++ /dev/null @@ -1,6 +0,0 @@ -defaults: - - _base_ - - _self_ - -experiment_name: awq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: ybelkada/test-mistral-7b-v0.1-awq diff --git a/examples/running-mistrals/configs/bnb.yaml b/examples/running-mistrals/configs/bnb.yaml deleted file mode 100644 index 61cf1ebd..00000000 --- a/examples/running-mistrals/configs/bnb.yaml +++ /dev/null @@ -1,11 +0,0 @@ -defaults: - - _base_ - - _self_ - -experiment_name: bnb-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) - -backend: - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 diff --git a/examples/running-mistrals/configs/gptq.yaml b/examples/running-mistrals/configs/gptq.yaml deleted file mode 100644 index 927a172e..00000000 --- a/examples/running-mistrals/configs/gptq.yaml +++ /dev/null @@ -1,6 +0,0 @@ -defaults: - - _base_ - - _self_ - -experiment_name: gptq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: TheBloke/Mistral-7B-v0.1-GPTQ diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml deleted file mode 100644 index dd8a2808..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml +++ /dev/null @@ -1,71 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: ybelkada/test-mistral-7b-v0.1-awq -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml deleted file mode 100644 index bcfd32be..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1 - id: '0' - num: 0 - config_name: awq - env_set: - CUDA_VISIBLE_DEVICES: '1' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml deleted file mode 100644 index 989520ff..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=1 diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml deleted file mode 100644 index 8b6686c5..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq-batch_size(1)-sequence_length(512)-new_tokens(1000) -model: ybelkada/test-mistral-7b-v0.1-awq -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv deleted file mode 100644 index 5b69d36b..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.164,6.1,7589,32.1,31.2,7660 diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml deleted file mode 100644 index 23deebd5..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml +++ /dev/null @@ -1,71 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: ybelkada/test-mistral-7b-v0.1-awq -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml deleted file mode 100644 index 472713b3..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=16 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=16 - id: '4' - num: 4 - config_name: awq - env_set: - CUDA_VISIBLE_DEVICES: '1' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml deleted file mode 100644 index fdb7f01d..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=16 diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml deleted file mode 100644 index 56f693fb..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq-batch_size(16)-sequence_length(512)-new_tokens(1000) -model: ybelkada/test-mistral-7b-v0.1-awq -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv deleted file mode 100644 index 709cdc10..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -2.52,6.35,14486,51.3,312.0,15080 diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml deleted file mode 100644 index 15ca6c32..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml +++ /dev/null @@ -1,71 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: ybelkada/test-mistral-7b-v0.1-awq -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml deleted file mode 100644 index ab5e3ddb..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2 - id: '1' - num: 1 - config_name: awq - env_set: - CUDA_VISIBLE_DEVICES: '1' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml deleted file mode 100644 index 8211b85f..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=2 diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml deleted file mode 100644 index 52724f0c..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq-batch_size(2)-sequence_length(512)-new_tokens(1000) -model: ybelkada/test-mistral-7b-v0.1-awq -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv deleted file mode 100644 index 44139984..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.321,6.23,7824,31.8,62.9,8136 diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml deleted file mode 100644 index c5659988..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml +++ /dev/null @@ -1,71 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: ybelkada/test-mistral-7b-v0.1-awq -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml deleted file mode 100644 index 676eb33b..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=4 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=4 - id: '2' - num: 2 - config_name: awq - env_set: - CUDA_VISIBLE_DEVICES: '1' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml deleted file mode 100644 index eef8c9ca..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=4 diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml deleted file mode 100644 index 528e56a1..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq-batch_size(4)-sequence_length(512)-new_tokens(1000) -model: ybelkada/test-mistral-7b-v0.1-awq -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv deleted file mode 100644 index 8aafc741..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.638,6.27,8958,32.8,122.0,9101 diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml deleted file mode 100644 index 37a3bdb1..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml +++ /dev/null @@ -1,71 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: ybelkada/test-mistral-7b-v0.1-awq -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml deleted file mode 100644 index 6403b0cb..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=8 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=8 - id: '3' - num: 3 - config_name: awq - env_set: - CUDA_VISIBLE_DEVICES: '1' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml deleted file mode 100644 index 8cd14374..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=8 diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml deleted file mode 100644 index c830c5f3..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq-batch_size(8)-sequence_length(512)-new_tokens(1000) -model: ybelkada/test-mistral-7b-v0.1-awq -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv deleted file mode 100644 index 7f765da2..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/awq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -1.27,6.3,10787,34.9,229.0,11175 diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml deleted file mode 100644 index 5aa1ffa3..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml +++ /dev/null @@ -1,73 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bnb-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml deleted file mode 100644 index 19bb99b5..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1 - id: '0' - num: 0 - config_name: bnb - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml deleted file mode 100644 index 989520ff..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=1 diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml deleted file mode 100644 index 4c3edbf9..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - llm_int8_threshold: 0.0 - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: bnb-batch_size(1)-sequence_length(512)-new_tokens(1000) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv deleted file mode 100644 index ce9e3c78..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0865,11.6,7822,43.4,23.0,7960 diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml deleted file mode 100644 index 18995afe..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml +++ /dev/null @@ -1,73 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bnb-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml deleted file mode 100644 index 9e9c5d39..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=16 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=16 - id: '4' - num: 4 - config_name: bnb - env_set: - CUDA_VISIBLE_DEVICES: '1' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml deleted file mode 100644 index fdb7f01d..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=16 diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml deleted file mode 100644 index 0ba30c00..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - llm_int8_threshold: 0.0 - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: bnb-batch_size(16)-sequence_length(512)-new_tokens(1000) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv deleted file mode 100644 index ea01621e..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.94,17.0,13155,76.0,211.0,15254 diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml deleted file mode 100644 index f1495501..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml +++ /dev/null @@ -1,73 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bnb-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml deleted file mode 100644 index f841c4ac..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2 - id: '1' - num: 1 - config_name: bnb - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml deleted file mode 100644 index 8211b85f..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=2 diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml deleted file mode 100644 index 53535c91..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - llm_int8_threshold: 0.0 - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: bnb-batch_size(2)-sequence_length(512)-new_tokens(1000) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv deleted file mode 100644 index b2ba03ea..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.141,14.2,8166,57.2,35.0,8401 diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml deleted file mode 100644 index 8e5f788e..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml +++ /dev/null @@ -1,73 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bnb-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml deleted file mode 100644 index f998b572..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=4 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=4 - id: '2' - num: 2 - config_name: bnb - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml deleted file mode 100644 index eef8c9ca..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=4 diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml deleted file mode 100644 index 57923542..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - llm_int8_threshold: 0.0 - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: bnb-batch_size(4)-sequence_length(512)-new_tokens(1000) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv deleted file mode 100644 index 332ef801..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.254,15.7,8797,58.4,68.5,9317 diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml deleted file mode 100644 index a53fdf3f..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml +++ /dev/null @@ -1,73 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bnb-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml deleted file mode 100644 index e54389aa..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=8 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=8 - id: '3' - num: 3 - config_name: bnb - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml deleted file mode 100644 index 8cd14374..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=8 diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml deleted file mode 100644 index 730f327e..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - llm_int8_threshold: 0.0 - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: bnb-batch_size(8)-sequence_length(512)-new_tokens(1000) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv deleted file mode 100644 index 454db2cb..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.483,16.6,10261,59.9,134.0,11330 diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml deleted file mode 100644 index 76483dd5..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml deleted file mode 100644 index b6c71a98..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1 - id: '0' - num: 0 - config_name: _base_ - env_set: - CUDA_VISIBLE_DEVICES: '1' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/quantized-mistrals - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/quantized-mistrals/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/quantized-mistrals/experiments/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml deleted file mode 100644 index 989520ff..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=1 diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml deleted file mode 100644 index 3387d1ab..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: fp16-batch_size(1)-sequence_length(512)-new_tokens(1000) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv deleted file mode 100644 index d6eb5712..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0633,15.8,16907,28.3,35.3,17066 diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml deleted file mode 100644 index 82aec418..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml deleted file mode 100644 index 385d4305..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=16 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=16 - id: '4' - num: 4 - config_name: _base_ - env_set: - CUDA_VISIBLE_DEVICES: '1' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/quantized-mistrals - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/quantized-mistrals/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/quantized-mistrals/experiments/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml deleted file mode 100644 index fdb7f01d..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=16 diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml deleted file mode 100644 index e8f7d723..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: fp16-batch_size(16)-sequence_length(512)-new_tokens(1000) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv deleted file mode 100644 index c5489483..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.937,17.1,22458,50.1,319.0,24393 diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml deleted file mode 100644 index 9431c98f..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml deleted file mode 100644 index 52ce2bdf..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2 - id: '1' - num: 1 - config_name: _base_ - env_set: - CUDA_VISIBLE_DEVICES: '1' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/quantized-mistrals - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/quantized-mistrals/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/quantized-mistrals/experiments/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml deleted file mode 100644 index 8211b85f..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=2 diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml deleted file mode 100644 index efdcc561..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: fp16-batch_size(2)-sequence_length(512)-new_tokens(1000) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv deleted file mode 100644 index caa1d8d5..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.118,16.9,17158,29.4,68.0,17523 diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml deleted file mode 100644 index be5a83f3..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml deleted file mode 100644 index 565f1bd3..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=4 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=4 - id: '2' - num: 2 - config_name: _base_ - env_set: - CUDA_VISIBLE_DEVICES: '1' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/quantized-mistrals - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/quantized-mistrals/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/quantized-mistrals/experiments/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml deleted file mode 100644 index eef8c9ca..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=4 diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml deleted file mode 100644 index a6c8cec3..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: fp16-batch_size(4)-sequence_length(512)-new_tokens(1000) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv deleted file mode 100644 index 6073280e..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.233,17.2,17865,29.2,137.0,18509 diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml deleted file mode 100644 index 3cbb0127..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml deleted file mode 100644 index 0f896a1f..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=8 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=8 - id: '3' - num: 3 - config_name: _base_ - env_set: - CUDA_VISIBLE_DEVICES: '1' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/quantized-mistrals - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/quantized-mistrals/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/quantized-mistrals/experiments/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml deleted file mode 100644 index 8cd14374..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=8 diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml deleted file mode 100644 index f5a98c47..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: fp16-batch_size(8)-sequence_length(512)-new_tokens(1000) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv deleted file mode 100644 index 8a714ff6..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.467,17.1,19463,33.9,236.0,20524 diff --git a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(None)-sequence_length(512)-new_tokens(1000)/multirun.yaml b/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(None)-sequence_length(512)-new_tokens(1000)/multirun.yaml deleted file mode 100644 index 1dd373a8..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/fp16-batch_size(None)-sequence_length(512)-new_tokens(1000)/multirun.yaml +++ /dev/null @@ -1,243 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: [] - job: - name: experiment - chdir: true - override_dirname: '' - id: ??? - num: ??? - config_name: _base_ - env_set: - CUDA_VISIBLE_DEVICES: '1' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/quantized-mistrals - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/quantized-mistrals/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: ??? - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: null - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: mistralai/Mistral-7B-v0.1 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml deleted file mode 100644 index 41b761b2..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/config.yaml +++ /dev/null @@ -1,71 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: gptq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: TheBloke/Mistral-7B-v0.1-GPTQ -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.34.1 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml deleted file mode 100644 index 6f61abfc..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1 - id: '0' - num: 0 - config_name: gptq - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml deleted file mode 100644 index 989520ff..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=1 diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml deleted file mode 100644 index 94c0b06a..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: gptq-batch_size(1)-sequence_length(512)-new_tokens(1000) -model: TheBloke/Mistral-7B-v0.1-GPTQ -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.34.1 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv deleted file mode 100644 index 56fdbc88..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(512)-new_tokens(1000)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0828,12.1,7669,31.5,31.7,7824 diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml deleted file mode 100644 index 7779b85a..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/config.yaml +++ /dev/null @@ -1,71 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: gptq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: TheBloke/Mistral-7B-v0.1-GPTQ -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.34.1 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml deleted file mode 100644 index 83e7db7c..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=16 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=16 - id: '4' - num: 4 - config_name: gptq - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml deleted file mode 100644 index fdb7f01d..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=16 diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml deleted file mode 100644 index c6285181..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: gptq-batch_size(16)-sequence_length(512)-new_tokens(1000) -model: TheBloke/Mistral-7B-v0.1-GPTQ -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.34.1 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv deleted file mode 100644 index ebe0ae57..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(512)-new_tokens(1000)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.963,16.6,13314,66.1,242.0,15235 diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml deleted file mode 100644 index c431c470..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/config.yaml +++ /dev/null @@ -1,71 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: gptq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: TheBloke/Mistral-7B-v0.1-GPTQ -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.34.1 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml deleted file mode 100644 index ab240c24..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2 - id: '1' - num: 1 - config_name: gptq - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml deleted file mode 100644 index 8211b85f..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=2 diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml deleted file mode 100644 index 3cb39040..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: gptq-batch_size(2)-sequence_length(512)-new_tokens(1000) -model: TheBloke/Mistral-7B-v0.1-GPTQ -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.34.1 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv deleted file mode 100644 index e8493eb9..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(512)-new_tokens(1000)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.142,14.1,7918,31.8,62.9,8279 diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml deleted file mode 100644 index e2d6f89f..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/config.yaml +++ /dev/null @@ -1,71 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: gptq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: TheBloke/Mistral-7B-v0.1-GPTQ -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.34.1 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml deleted file mode 100644 index 4506944a..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=4 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=4 - id: '2' - num: 2 - config_name: gptq - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml deleted file mode 100644 index eef8c9ca..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=4 diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml deleted file mode 100644 index 5ac36e22..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: gptq-batch_size(4)-sequence_length(512)-new_tokens(1000) -model: TheBloke/Mistral-7B-v0.1-GPTQ -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.34.1 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv deleted file mode 100644 index 3ebc2394..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(512)-new_tokens(1000)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.261,15.3,8747,36.0,111.0,9239 diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml deleted file mode 100644 index b7e4ef0d..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/config.yaml +++ /dev/null @@ -1,71 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: gptq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: TheBloke/Mistral-7B-v0.1-GPTQ -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.34.1 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml deleted file mode 100644 index 237d4fbf..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=8 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=8 - id: '3' - num: 3 - config_name: gptq - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/running-mistral - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/running-mistral/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/running-mistral/experiments/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml deleted file mode 100644 index 8cd14374..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=8 diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml deleted file mode 100644 index a8d78a9e..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/hydra_config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 512 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 1000 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 1000 - min_new_tokens: 1000 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: gptq-batch_size(8)-sequence_length(512)-new_tokens(1000) -model: TheBloke/Mistral-7B-v0.1-GPTQ -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.34.1 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv b/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv deleted file mode 100644 index 4c6fecd9..00000000 --- a/examples/running-mistrals/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(512)-new_tokens(1000)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.486,16.5,10303,48.9,164.0,11280 diff --git a/examples/running-mistrals/report.py b/examples/running-mistrals/report.py deleted file mode 100644 index 559ea695..00000000 --- a/examples/running-mistrals/report.py +++ /dev/null @@ -1,229 +0,0 @@ -from argparse import ArgumentParser -from pathlib import Path - -import matplotlib.pyplot as plt -import pandas as pd -from flatten_dict import flatten -from omegaconf import OmegaConf -from pandas import DataFrame -from rich.console import Console -from rich.table import Table -from rich.terminal_theme import MONOKAI - - -def gather_inference_report(root_folder: Path) -> DataFrame: - # key is path to inference file as string, value is dataframe - inference_dfs = { - f.parent.absolute().as_posix(): pd.read_csv(f) for f in root_folder.glob("**/inference_results.csv") - } - - # key is path to config file as string, value is flattened dict - config_dfs = { - f.parent.absolute() - .as_posix(): pd.DataFrame.from_dict(flatten(OmegaConf.load(f), reducer="dot"), orient="index") - .T - for f in root_folder.glob("**/hydra_config.yaml") - if f.parent.absolute().as_posix() in inference_dfs.keys() - } - - if len(inference_dfs) == 0 or len(config_dfs) == 0: - raise ValueError(f"No results found in {root_folder}") - - # Merge inference and config dataframes - inference_reports = [ - config_dfs[name].merge(inference_dfs[name], left_index=True, right_index=True) for name in inference_dfs.keys() - ] - - # Concatenate all reports - inference_report = pd.concat(inference_reports, axis=0, ignore_index=True) - inference_report.set_index("experiment_name", inplace=True) - return inference_report - - -def style_element(element, style=""): - if style: - return f"[{style}]{element}[/{style}]" - else: - return element - - -def format_element(element, style=""): - if isinstance(element, float): - if element != element: # nan - formated_element = "" - elif abs(element) >= 1: - formated_element = f"{element:.2f}" - elif abs(element) > 1e-6: - formated_element = f"{element:.2e}" - else: - formated_element = f"{element}" - elif element is None: - formated_element = "" - elif isinstance(element, bool): - if element: - formated_element = style_element("✔", style="green") - else: - formated_element = style_element("✘", style="red") - else: - formated_element = str(element) - - return style_element(formated_element, style=style) - - -def format_row(row, style=""): - formated_row = [] - for element in row: - formated_row.append(format_element(element, style=style)) - return formated_row - - -def get_short_report(inference_report): - short_columns = { - "benchmark.input_shapes.batch_size": "Batch Size", - "forward.latency(s)": "Forward Latency (s)", - "forward.throughput(samples/s)": "Forward Throughput (samples/s)", - "forward.peak_memory(MB)": "Forward Peak Memory (MB)", - "generate.throughput(tokens/s)": "Generate Throughput (tokens/s)", - "generate.peak_memory(MB)": "Generate Peak Memory (MB)", - } - short_report = inference_report[list(short_columns.keys())].rename(columns=short_columns) - short_report["Quantization Scheme"] = inference_report.index.str.split("-").str[0] - - return short_report - - -def get_rich_table(short_report): - # create rich table - rich_table = Table(show_header=True, show_lines=True) - # we add a column for the index - rich_table.add_column("Experiment Name", justify="left", header_style="") - # we populate the table with values - for column in short_report.columns: - rich_table.add_column(column, justify="right", header_style="bold") - # we add rows - for index, row in short_report.iterrows(): - rich_table.add_row(index, *format_row(row.values, style="")) - - return rich_table - - -def get_throughput_plot(short_report): - # for each quantization scheme we plot the throughput vs batch size - fig1, ax1 = plt.subplots() - fig2, ax2 = plt.subplots() - fig3, ax3 = plt.subplots() - fig4, ax4 = plt.subplots() - - short_report["Quantization Scheme"].fillna("unquantized", inplace=True) - short_report["Quantization Scheme"].replace("bnb", "BnB", inplace=True) - short_report["Quantization Scheme"].replace("awq", "AWQ", inplace=True) - short_report["Quantization Scheme"].replace("gptq", "GPTQ", inplace=True) - - for quantization_scheme in short_report["Quantization Scheme"].unique(): - mask = short_report["Quantization Scheme"] == quantization_scheme - - forward_latency = short_report[mask][["Batch Size", "Forward Latency (s)"]].sort_values(by="Batch Size") - generate_throughput = short_report[mask][["Batch Size", "Generate Throughput (tokens/s)"]].sort_values( - by="Batch Size" - ) - forward_memory = short_report[mask][["Batch Size", "Forward Peak Memory (MB)"]].sort_values(by="Batch Size") - generate_memory = short_report[mask][["Batch Size", "Generate Peak Memory (MB)"]].sort_values(by="Batch Size") - ax1.plot( - forward_latency["Batch Size"], - forward_latency["Forward Latency (s)"], - label=quantization_scheme, - marker="o", - ) - ax2.plot( - generate_throughput["Batch Size"], - generate_throughput["Generate Throughput (tokens/s)"], - label=quantization_scheme, - marker="o", - ) - ax3.plot( - forward_memory["Batch Size"], - forward_memory["Forward Peak Memory (MB)"], - label=quantization_scheme, - marker="*", - ) - ax4.plot( - generate_memory["Batch Size"], - generate_memory["Generate Peak Memory (MB)"], - label=quantization_scheme, - marker="*", - ) - - ax1.set_xlabel("Batch Size") - ax1.set_ylabel("Forward Latency (s)") - ax1.set_title("Forward Latency per Batch Size") - - ax2.set_xlabel("Batch Size") - ax2.set_ylabel("Generate Throughput (tokens/s)") - ax2.set_title("Generate Throughput per Batch Size") - - ax3.set_xlabel("Batch Size") - ax3.set_ylabel("Forward Peak Memory (MB)") - ax3.set_title("Forward Peak Memory per Batch Size") - - ax4.set_xlabel("Batch Size") - ax4.set_ylabel("Generate Peak Memory (MB)") - ax4.set_title("Generate Peak Memory per Batch Size") - - ax1.legend() - ax2.legend() - ax3.legend() - ax4.legend() - - return fig1, fig2, fig3, fig4 - - -def generate_report(): - parser = ArgumentParser() - parser.add_argument( - "--experiments", - "-e", - type=Path, - required=True, - help="The folder containing the results of experiments.", - ) - parser.add_argument( - "--report-name", - "-r", - type=str, - required=False, - help="The name of the report.", - ) - - args = parser.parse_args() - experiments_folders = args.experiments - - if args.report_name: - report_folder = f"artifacts/{args.report_name}" - else: - report_folder = "artifacts" - Path(report_folder).mkdir(parents=True, exist_ok=True) - - # gather experiments results - inference_report = gather_inference_report(experiments_folders) - inference_report.sort_values(by="forward.throughput(samples/s)", ascending=False, inplace=True) - inference_report.to_csv(f"{report_folder}/full_report.csv") - - short_report = get_short_report(inference_report) - short_report.to_csv(f"{report_folder}/short_report.csv") - - forward_throughput_plot, generate_throughput_plot, forward_memory_plot, generate_memory_plot = get_throughput_plot( - short_report - ) - forward_throughput_plot.savefig(f"{report_folder}/forward_latency_plot.png") - generate_throughput_plot.savefig(f"{report_folder}/generate_throughput_plot.png") - forward_memory_plot.savefig(f"{report_folder}/forward_memory_plot.png") - generate_memory_plot.savefig(f"{report_folder}/generate_memory_plot.png") - - rich_table = get_rich_table(short_report) - console = Console(record=True) - console.print(rich_table, justify="center") - console.save_svg(f"{report_folder}/rich_table.svg", theme=MONOKAI, title="Inference Report") - - -if __name__ == "__main__": - generate_report() diff --git a/examples/running-vicunas/README.md b/examples/running-vicunas/README.md deleted file mode 100644 index 755172b8..00000000 --- a/examples/running-vicunas/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# Optimum-Benchmark x Vicuna x BnB & GPTQ & (AWQ+GEMM vs AWQ+GEMV) - -A set of benchmarks on quantizing Vicuna. - -## Setup - -You will need to install these quantization packages: - -```bash -pip install autoawq -pip install auto-gptq -pip install bitsandbytes -``` - -## Running - -Then run these commands from this directory: - -```bash -optimum-benchmark --config-dir configs/ --config-name _base_ --multirun -optimum-benchmark --config-dir configs/ --config-name bnb --multirun -optimum-benchmark --config-dir configs/ --config-name gptq --multirun -optimum-benchmark --config-dir configs/ --config-name awq+gemm --multirun -optimum-benchmark --config-dir configs/ --config-name awq+gemv --multirun -``` - -This will create a folder called `experiments` with the results of the benchmarks with an inference `batch_size` ranging from 1 to 16 and an input `sequence_length` (prompt size) of 128. - -## Reporting - -To create a report run: - -```bash -python report.py -e experiments -``` - -Which will create some quick reporting artifacts like a `full_report.csv`, `short_report.csv`, some plots and a `rich_table.svg`. - -## Results - -### On A100-80GB - -

-latency_plot -

- -

-throughput_plot -

- -

-memory_plot -

- -

-memory_plot -

- -

-rich_table -

diff --git a/examples/running-vicunas/artifacts/A100-80GB/forward_latency_plot.png b/examples/running-vicunas/artifacts/A100-80GB/forward_latency_plot.png deleted file mode 100644 index 2b407eb29d5c83cfabd59faed0fd3046d2b45024..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 52063 zcmeFZWmHvb^f$T@6(s}_5J{0n8l+27KvI+j5$P`JFh~Vdx&@@Aq#LEArKG#1yW!5| z`QP`x>i)2lrlUJ;uZ}yrgv}4u zZ`piaAMfQMClGkeiN)gHuJ+IlHS?VD9>be!;&xP`jG12;ekrHF3Ak5#kLigR-b=!d zn$Ox-C){bePh_a@+ZdE%Lv||ibYkh96BoZScDB}p$f*!qp?ZfWeoe(HQ-RzEexQ2C zp#K?#iM*)B_5c6y|NaoLORYxz(f-HvWrxm-#FGam<^fq@XLCK> zYH?k!SY+qoqGnHeq< zHYv~QLw`-VY3S%k)^i5TzkU1GCMZe8M1JQI(TvFH z(PC1u%_N1kwsw4cybD|?zun9`mAKY0`c%~{KCe^0Zjlq_uCA_Pi@}f*i$PeE!-Gk0 z94qhRWqLIxLM4%-uRZ*4-o9O)tgD+o|4b|IvXty^>+IYDPwRMZh0A858h`8P__#Mm zosGwGNK8Y6_|Km|%o+tKANeTOo(v^QODiioXXnLW-if=SqIDf!JXfwgvA2YpDqNS+@18k=o~6Ev~zH1`12={KGEeT1AqBKsOb3@!*KktoxU4D>C+xnz`HMPqYIXGk+;lK0@4`;EDBI13 z`Sz&gz6{(~uU^5;b6Ah@ISPzh&4z@8NUIQxlv-)ea74)YR#jE0d>iaWs~&d@A$5IqyEW4^+Ds>DL@VqgJ|^wvCLk*-E8U`_tBW>N?aGBKddhx& ze*QR7C}wO-$GAID0S;&2hYyW|gM&-1o+taX3=9DQ0oMqml69+IWvVfqAFr^H@LFTn zhmZ>@=M1clRrGCEH#SNtCJMD$9nu`1ts5;^IhQDGeI~ZVGKR-tuca80nscwJ)|B zC9<)xp;L@U_L1o6Cc5YG?sP_y*GwRNB75aRrb?>qt5*wW2b1)4bbgve##O zum6ejszL9vHk82qoTtrp56uQRs^?+eeMpdi>fLCNEr(+!%Hj*Jq1*wcH1`t$+< zvHT7TwwqHA&(BY6IyyUXtZep0CR$4-brTd@DjXK0*$pl|Fz^2YC)|%`u`6Nr;(X&` zX|#;Z%gd{=rKRZkk4pltR!xbSZrr$G+!oG=oSf>@?RMl)`|wzepbE_T=-l@H`etO% zr^zCIi%Dl}8mbL8_H(|-goND7SFbYpe0N&;qk87C zzc%XJ&LbryCFkW;+f&%zuO^iPYwmyJ0h#>c$7-l~*br=XA*{NU48bH1RJXUcv0HZC zYI@TZNZ}l0N(#XlcnU8}AClTsC!2DU(6X>7s1QiW%F91lU0aKYWYK1-sHl)0tUPzk z)h>IcRcdLrdsuQXttcAz@ajhD`qTtbT&cJ5gOz6M>F#gSe!#z5R)!KtfO9(voRYZ?9?$Y^mv?TF(;#G0pEU z164*-QtmZ%cPnM-R+WB^jWypL+7s=w;L!C}lu`*#SBPUuPEL0AP=NPRLp2Q#Yr4WQ z2*1N+One@!V*?eo%rnW}!6D-JZ{?=;cKN{I;HLyqDlcBVAPAODeViaLkh%7WT84m& zs|dfvHxu*aHI0DLL+EeA#b#kWICpt?BH(B;8=%9xlK8d};<;o*|-F~V-UQ@hK3I<+3dOZ;qgr<_a?ZMJoAZA*>< zu&r?k4%7e?aN5l@;@7wgirT==gbnRqWYSF%lB-d8iIS2M&YVW}ptt8nvRv9s$)H~9 zmoGk@alGhFHJVVE9asKzZjOBZ{P`LVP7^dw(@Zk!bCHRVh=_=3=U32{k^UzrC`jQt zcN0#Jk*SH9S)i8gsmhnW`EZ9TU6O4WSxoCw=ijxS z?p+Cj&Mhl&p58(Em|9$H=@z}{D7TxvcH>4m6j53)Tx*lLU!TbM?cXJN9k;U6)Eak^ z?79gf=gEx?`qMQ`Or|ZpYbu1l@CxwCpqRdoW4%-nsXS&$%*w|XjYG;4uCZh{YyN`a zd@~N9j!9SiG&}^T{90w!R*$g#aY&nC)l5fAbEhRprr_KnRn+8QeSB_ijyQo!SeRT! zre##`;Phw*j$Y)U4{K+^%(zCW~q2SYi?;rF}g~rJ-|wj({MX|RI}hYntUX)!H+jr{GK)_pXxCLi7PArR!DL_ zQ?AB)>PzDz9v&8#J7hVSM~?D7+q(@lJrx>{UlYE*$JP~1O--cFWIf3sNp#p>g;udF zUgNfRxZ9_+wYBx*=g&8QY7zjGVb|z+x2#0^132J<6L-A3+yF=vimFU%54+U}cWPpZ zcE!E`zz^s4#QgetvD6ci;Dz+Ha8e@uvB07tE_hc?8lN2Xd^$i~joBk7$%8BK*G-y& z2zfnEnmt3rp%A+Ngy(^--wpD%i}M3fIOEXwk)7GN(R)@oaNfCXc6!Nt*B@cg8cSe#yi=cu8oxNyX`_vCEha~U)>wa8OTwe-Pq`c8~Tv9 zb$+rzT2l2ibfkoW+reQ6Us&^i>F!;W5A?^#M-(>PKn^J2+Mr9c1QFgh9VuqSA>n2i zaDwv#tHrf@jOJ%BXz~8-rAwC(>|`}s$}&h(lbIRt?HhB$8GKYU%hvCFeG5HLUcE3hw248{(a~+g%HG1q{~4w4r8%?ZN1mM~lZ~;cyq0m6?nbo6(lqR}@BV zPWw|f4-7Q1`zu-i?udBs!Rhn>PFG%4HOZ|~8Dr4k#{su`^#<9m*XS6V(3(bV z>S}V<9-xTW^g4k3$dgeDyK%!wd(j#D00E{#(D5Z@WRRlqcQD_kAKeHR3LcSWi8-Ou z-~Jy>O}_Bqy#)pmwWr(iBPA9HcOd4mBnE7bpgs2kYXBuLi`8|_`1trXhfR_Y#MM*n z>d;X8!5^2B4mhh(d*q#w*tobIK>h4+dE6v82%g$=0_LYt=Oz61DozUg4p2Qxgfl*l zg^%JP(~geo*+OxNG)t2qDdRSy6~**{cScT zw;H{`fB zY(f>Zju)m5RR>$oIK~bWVJkA|RywKx<+JSPVPjKr-Ra~dgt~=1P%}UX-(laT!84R= z2_X+2IS0rn2b-P{0g~E3fjT}}{*xQ`!p0_lXk_Hy0|f%AJF{q?YH=ZAO;1fNaRe+d z0ze?c1B*XuP+ADx-QCjw%eO)0w<;d*2;Mj<*_m8@HM?>G=r9}}79(u8pxoSC)qFkS z`2ltPrzt5Z)}%>z5ufB#+A>4+u2E$pOdKh_pbj-)u~jQ)FJe;&MML=}-LuD}l9Zx$ zK>6a$5Y?U^Tdc0G%7ri75)WuoTmYr75y$InPd$WEv0d#AmL?h)W{f5jns-zHf{p>0eGBINA~euc6H_xbLr4Se*ciF#^aJ!tXvP(b!ns|XF6IdH{D_|Zx!}$dN=HE z79dv(u(-h9keYe#-o4+;j%C$~US1;bZqmGnBpc9{E=S`7Vh4cxVy)aByWD5;NIyw( z06YPGFjz5T7I54moMv~_gk9`o?_h7Zlv(<_?Ev`y1r5FXjF=plw#~k67?}DuQZ*=vlv4_WXnq;Sy;7 zqC!}myNa^1n588%KR>^by@<*lzg-ynX2bdffCjdu>qMbMBWgusQ`1d2h0rc>up~hKd0sqP z78;66%40bN$o`AnY_p5II~Np8y2sJVbX$M|h)YvIj$jOP2+D5h>}p=$?kiZ zF;5yYETGw2>y3w|ARqaP@F0C;9bH3nX&1Aqn{KB)xqVe6?t|jRl+5a2zJ7Oan!IM2 zH91^4x8=}gNH>6f9O$zd(!nIK8~|$_`(9N!Z!9GHQ~dn%hX?}$L%?xK%-Y)ePTeKA z+K8Uvx7WYv+x9aYq5o2BH#vKg8ZR9EJmpt5#WLEr`ahJx^KbP-Z^b2=@GUHxHl5V0 z^c5v9qF1ehRt>EAE3}j}FU41{c%Un#>eqQ8T5-D7tBnbT?nL41xVR?!tHaSTF?P^m zQ=9l<>t@r}=o!F$mRS|;`1c(?unU`>;=FtP`IVA$Q)cQ&8GA!-eH}Ln_=$7l$QYOA zIuI9j*oo*t2bnp#RXnw|wSIc_Hg#u24h{~|ABca1Gd{uA9@|X;{{k+u=qx9a+ql#mRyQs+2 zF>&&}$x++MMHQd&4?-jg6B^n3Eg8G@%_)vHC8?>@`}_NFm{U33L_|b9_J>ViP5=JM zZ3n^R5EMge{xSDdhs7>5cGvBf#vRcQA3eG@>9K(w^o#la6={oVycXy}a(MM*blBKhYGxR$scp5xjS%b{Q*R2(P;wSg<{X z9HrdK8~Q~A7iU-u8qdNg>g``zOiIt=V$xj;zMG)MNX9I?;oW3cV%9kIsP$y7?4mzyLQ|v;$tL>Uyu1H_1Tu! z#_ngjPiwz4_gAPdG&a>2;y2LD@k?zDn95yS70KD`B^J5-T+fQu;X zz(jw445+uT7|o%S-6tD$N?~*g*8q49e>aL3a{UZFEoyWHDzyqneDLGvE8Agniv$J+ zR?>@$D>sR?;-{|onWy3ZLZ^d$KTwoRzmsV5>;3HuPnOXsFIiQzo)N6OZgKf=}d>D<*!zv^vtx92D5)>JF(pRj_qW1ku!yL^&SR@&vx{K*wsxz0A)SWVT ze`v5E<>Woi=B6{Zlj#i8f1jpeY|n`~^nLa6?GRdYyx$$cUEPBXz0G5dp8{885#S{r zz-hPq5;HE6jGn#*6{;1e)xF*Mdar4IEMC}Zc}o@D z++{RIfZGo1Kks%X(I|bc(5#2!MRz*7a$Umhwc{%W)&|D{yEA|O=S_t9o!LK$)(NZV zbal!}WWZJ-NPpm@kwWl4|6iApslU)-?@ov%8|&i2x;Fe;Pv`NCZUREke+U!wFczI3 zJ`)-;OMXSFEMHa$PqOb(aJzBCV9TT0_$y+4)+MG#<)S1-+boN8>!H=!gu`1U>^W4- zD_2BoG)^Zqao&fDJ_k4R1BGxbNIO73pn8GV5eZo9Q&<=};z2S+V1g|Kt_M)h*%nG~ z=B=Sl>$e!VJUSMD=b+Tx1PpCyX=yr9%@2y9=TyE5Y>{QcD2J+_xHnJWLVRJxYh)fI zcRqRj`ShuHFS>>bxjvRC_GNDXfplKmWAGG}XYS*2Df`Ta!NTxwVMye&8q}kWi(y&L<)IVxBmTsi*)v!_c#O}o} z3rMzAJT?A|ilmo%{WD&$tZ!v1w2a}?DPhuOhwwr2$l(;X`?K|+fZ9d@w-e445m3a@f}pM zo)&v_gc^@q$n%%UDdN{}t3{*V)HAZZcZD;r-Z827L6YMYX3LGS8z2N5gU}->DM?K0 zJyHzM6h$TPYMIsg#D*+ z(Ilz-Cj}0LU*RlmVSlu@e)<}F1dAH9+=nUz(48zC#|8!nL5K!^14fg0;36Rb0iqs) zVB`uuuAEculMKaaaA7ktOafb&nVB~a4i@{fo*e9pBL$*ptmBeb+zFd^zQ|j>jH$`~ z;B%X@k8}B$pD60LUs^vIBXy=Pu`#w@G0>FrM&4okq}X{w50tcS;RCXxqa(lB zTXc`75}YTf*w}F4O45~*x`0NzO%7Hy0~+q=qJ4)q)5R*2_T6lY<9i?0@8~SELEev7 z$*a@iwF#-OKX+BllH(;H-NfT_A!Iu&UltO3x;2Z28*voL{`&Q6DEvVDK7^c{oS_j&dks4HJz?R5`|5d9^F~74dcZbd?}01UB3z3PJIq-kzprSc zjClXz<`X!^4Swrv4;o9-!yR&hUS2D|r)u$KQAT}ZELvH&X42speHga@@EjsVfG+X%rqT&s(W2$o;pd*&M$1u8{PMTm0 zU$|p4;EnwWtDWwoJ<9p{6{8>}`$A*u8QTgy0o-Ksj~u=ucQNgHR7Zkf6r?HeiHMp( zQa2kb=P)mc1x3paXqC8xMEjgTiTS_`XgJ1W<#rmHnp@ypKDV~+0@vf`SJ|+`%?4bv zp#p;#9r-teLWhY6ztV|*i_GjtVGkGt8L@q5o0`1=5?X1QB4ajR8ef|3Ncbfcd zGBUD5xUqu5tP$^kxdX{-2AElP!dg2)=JD_h-_4mH)7srm z)$AV0gsoaa`yZ>7Vhs9O=dJf*Fm@)0;#F6qqkAEm-fPA58w%0Upn=0j5QfJ+txsxK z#B-pzgQ}y;6XIqkzOU_YONNf=(!WL+z_hTPrN+Y|qWDTngoNvXRnm>G1~QC`bm#vG z3Mx`ddzdHQ?~N}uI2bVb%&e9;zM<&Sz0O8R#>wtwic#mfbUMCEc>&OE2l+ENqgj{Ih;6 zmOox(q7=!jc}Yx1his#I=LTr2GXPngof4sWz>a`0%{*vAMi2zJeEBkLlPHXvrI5=a zHQVC1)Ntun?#h+DO{sNoUCfZxQ*fqhuI%fZ5f3je!?NH9a_1;~dwk+5E9SUv8?P^e zWyQsf4GrkHxVWIdNAua;171(V!2$j@F(h9s{(MUYKtD7z^cxORT%@%FgkIp+LEi*6 z>~wm13MQn-mqIKm$)~qV=tC&)?L%)Zda&4|PTH4e^O43or60~U8Do&RwaPmAC_K>g zwFJL6s@r7hp$e0N36T^mJ((Lq6@w!k$F-5?CPqeu9!K_<(9yG0Qm+mV52LEj_D7)= zzWws$?u6Hg6_Skr`<|Wa{*gr*117?s;mLsHx}9hU#P{#FyZHfiwW3n}qIM{kLA#Wd zk5`BI<@tH-oN3UQWh$g9icj+_KPMf=yNq~DrmI;U2vuQ_I>c28T!0q|`4#lLf`X(V z>VYoluXQ$A)(*^kYbiDK5)#Mx{pBHgS3JK2tOvFgNDJ4+K!b6sNdzkPJ6XcNDo6@; zwjF<2@cWQYL`iSuu5`K|-8?SH4|lkA@KtV)(EDgU>hSpZg^`gI&^53#HdsbMH0U?| z^hPf2XIon~S!(W(fb)6>ycr@;yC1Blfl4KGy4esf>OJv09WKW2;$V08GDyWgmV^Xf zt+L0PO#k$S_Q?(zq-|G3=D5NDhfkaB`|<4A*QUkiiGl@@QDC&W2 zae%pm6lS=Lmr-<0lk;hV_8JEoZ zv!LJIGD?xdM!QWwU^`WhR{848#)65i76rg^e_(WUv}#g@^>`%;)Dk3k z2#KB;A=hp601eI3uc-Z)x=bs>Tb^hUr$=~M&PiaN+q^_u1d8Y!8R{+Zz=U}wh zOv2Wd9gB#0%KPGEQXz>8P?yNz)Z1;$PjDfzku79FgpqVkAmF7Ay#`PF-20x8i zw{jQA)A4Z%L$gF5Y#!+raJ$t~f`R*Jzvutl7@0=@tDUv6LscRJ%_(!Vt*_r8kSQAk zP(UFdPs~7XRtO0VW!obY1yv@N$EpoVhV?L=kWhS~VGGH+$v^$p!RBGqXX)JJ3Yi@Y zHyl7g>8bboi%u@O>|NTWP&%HgOIxZE@x{(%o>6$tsd63vn|P3EkI5r5>9QYR8mja5#=mvTb))7W;Qf1Q8k%>I=OQKY zgUH#B%pfTc`XHJ?&B}Vi#KZ)^({PE!ZTJP=0eM6pz#&*zIkXsHR9p|bQh)zfptZz` z;UaU>HOV6tM$RV}iHP$3545-iPES==3#Fu{Ztd)ZVN%aT0yqiyOS`N}M_PNHAFn-> z>h(_e-uzhG=$c4f&U=vH5$>v{7L;P^cw%&}=Xq846NNUtd?ik9s580A6MwpF#{U{W z1_N8wN6|X|hr82&E=UHZk{|xi7}bpC<5Wrbj{ytaEi~yJDqk|^=M4_xQM%fDNCidX zu@XKac&2=SL-c8-yU1CWJ4o+GPiHKE+4`DSd+|1OOceip1ZQ|cWOq7kO9fd3rdUFx zmcWh7`}1RxzN)l5Z}Qp*5<*sm-DUL;NQd3`Q+h}xBXSK zervFIGS#y2_dFq_`50E~LkP&#AHeuCG&Fn*W+3?aFM)<0kXt|_2Q8V7ogEK!+G0?? zMoWz*s$G#3$q3{|A#f@;2BhGI*yG38z-fb}kvwE@-c3@{_Yf;&iU@*se1nY72XZ(` z-scr&j}b`?jD_lrLArODI$e|s3n#b-_WVqQT=F=&S26mm9-+&gOZozJxDr`tM{QE+ zIREp@L!CRkAy{v(+NH~@bMjbwObzN4o8AQTbp`~=8L(qej_YGFbr5EwjE#f$T^BXAU;iLrs!ItvvH2p50Z2SYf(P(hX>Lz4uZ zLqJUbZZ|6po&_I-d=TLmjI|f=Ody6goOuR(IUqRrK_%qR+A@R7%7AC?1osqzaXY=X zW>ro}LLvtel18ZTLn$OwbmDuqQVw0<=N+-IdMM!#wrB!fB z3d_P_HzNrmv4ox;g>rS-c%ra7_Vp>zi_-?McaFecC+4*#1Il$71A_}{68N+Ykie{P z*_sh<60o0p1hfYZpeXDNYCb+sK|*qcFI(|b z4Us4fU<>IMJ&!FZByQQ?eyvG12ce@As#KB#;fc`=u}U{=O&zu-3#Ah*ws+qt#|v9- zY(BQTpH6a?t%zWopp-A&trfEhQ8BWo{!gZj-5u{^e!&xca7owLIIGLK%-!<>cu;?i zdMF^BZLk`PT((|-`2r#uM7s-NeNmiTT#$zSYBO02Ck9)WG}#9N>wo{6F9mz-4X7UhB*YF?h-QE7JmK1sc?Kz; zhwyng2b~aZ4Ya9xwFaA(17vz3u)n0c9qjC;A&dJQ+BD)aF;EeM#qmiO%_haGIkucN=O4plvpN;+boAo(V z{qh>f6MzlYu%sa=*=W^W0$K$;5J|{P`~s}tW(o2E%oY4v0vrC>3>m~7=RsPdVPy@0 z+x!IBbYfQvc@ZfZAV{N_^{S&G*Z^42Wt7+#FG78Lp4ltDG@tM2$-|-4+FjSwEE?|v z*`DQq(CAljJ#6qVx`rP{l5QZfw~fSBe!^HEVl}z&Vy)G&FW;3G(Pyb&plfaK%+uC- zPsEavnRyLj(hVF^Dh0*GFQLdFMG6`wl3W6qen-?x2+lCvKsbovaC8m1$4ChX8e!cT z!ENlpa)BriCnsm-lZ+e6W7IB7zo|gI{__n9NWXv_n4cjlD=P}b3n;>n*Wnb%0F4aw zhu3u+31`W5fz&4nfS;KCIqJucAK+^JX2`QZ5^GTKHK2aXwuC|m_W^_*r$QEQi>rx1 zhg;2WUp?J|_9TNIN~p4@f*u203C-T&N`c3QT+MK@6i1 zq)IsatbKfdnxdj~fBl)K2etU7&j^miZzE;)Kl^aQ-w0^wBF(_!`HXrd#uR*Y?d#^u z(G8{uJA4->mA9`{U1I3GQGgSXLUCK)<0cYSYDska%Lu!<@qT`_IKaEdXAL zIM`yG?)#PvFFsiyP7z3?$kM|c1*Ba4K_!f2)9-@!`2iLrkjG}>CL!l_9T**uhiMEz z$_ROa1U9m3VD95O&Vxh;$ji!(;Y_kySz6lKGE@j&KqI^=&)y%P$6l_2$5;7kP3zv#+nLL@&_Yk)z#`ij+6Ml~juTjO$WJ>L zDC5r9CP->(LfM6!77c_|VF=|N^r60jsX0Ah{YaV~-bxPiDTP>WDi{et)MRMR=LdBc zcS^hg{y|oYyRv5Qk2bGx8K4N*&e1XB=kBVoaUp1MKPxM%EdD;jyG##)nxC)tI8KGQ zK7R}MARGJU`ZrfuyzJUTKka@x4{mM|gDMNjg@J!5O@p?AKKuH8wDV2S>Q5Ein2 z5IW3Wi)1v~bHDeFL3?=l$WqTfUY`Dad&QLpgT2ecl^PoD^_rSJi;H33UtX8RsugeT zyb^&K_l%`=_^g{nrmig;eePA#KTXdf)O|!HZYVtd};ape-NeYG9Fl#UJ1^D!9h`I0yWRPWdcbw4Ev*Tv6HrDLJ`H%n7n3a+a5 zq+-uYCEW8G;kM8Fqr%%~Zb!*`!(T6y1m6y{JSw}rGOH|6YR`l!t@eQ@ynKkdW+9h(r6$Coscf z%&q8ol2X}Fs+Ds3OAx{3BgzE6hJtwoJ1u=nn0rHf8o#YfJeea)|> zGit>Te{<=#sVxONs%%hC%E2FJPwFV_GQ>h8OFkxn9Jyxs(vGoE3rA;(GgE};hHBTE z7leY4=}TqcjGNPw%>oKh+xJpqNNWb^LK#j5hD~M@G_xD2DrWyvm*0b@Odk$%-Gj z-7=r4wtKvtHC6_Y3WAD|B+Z;2LV}%uRDwrx_3I!g{PcyyBGjcPInP*94MCPk1(yqC zx92uCN+2{qrefT)g#>g1D0|nju^ZdkFd^atdp(BNh5`aFey<>jfRK?gaM014=9fXd zBMJ6vS9i{+qvPQr0uKmYfzh&qe#pbug2#C1v`_Ww!h=XEACGuJv93(}(Y7$LYVc?#?jg$IDdgn|2np|uSdCW_!bky-3FF;UU>3cQ9&R{;n#=(S zl;eYqZlE3Q5Zp6^$T*^|I4pEj7?FWzmj=+R%;OY_A-B~?I0O+vmHz=lk;66ayf!s^ zM9@JDLG6}a%Xl0kZfW_g#|pG;X)EYbN6<0r?NF)Jlu(12eZaUnBDqfvkO6s z#b$?Ib@(eE@t*k8uYaQPnSZ200HT9)^*lJ@h67oni58HZ0ed{+uSbx*b3-#SdQ7NvuqUGdcjRQTCgM&G? zd!Uc#pV?hWN;Pv?DkiRFEH)XbD{i$UH8qdBB0weh$$QT9bTmqIBcQ@)c}0^|t2h7x zxfDJ?3?aqW1#n`yKkF7;fiWCL7_}%eY`O8bKkF^b>Ot7)IlSoCa&~%}30WZ&AoP2X z7RGx2S(^no*Ywm>eb$pQB^MhcI|x+_cyVw@Rb?fK8#Qh$OIq@`P4Y94X9}kHXD=cU};?q#c#?^={cUEDJ6Qw`2;m&X*e7 zdJF>OW;x#X?EWx2W@Vv{quG<*c?QK+MoXE)-a>NahdDa}NZ> zL3?YA<}hUV-+9GLnEXKbNp8A1|MjhoB-4X&MkHJla+{D)MMEP1B6rh6U>HPmoAt8% z;}`$S3vHHILJ%{!DAFy~JnA+}zmt(LI(X{HumdEuthf)hyhgDgR0vs-1kuEWp%9Dt zF|s9|Vze81Q112SR!3!Y8o9TX;!*YY?xL(*_<*Ixf;t4j1!fszB6Gd->RIwpUH*h- z&;~Y!@&+vwHZkp)2At$AU<3%N(e(Vh5nnn3W@Ka}Ja~U#3SkKeh)<6Q3~+TALAUAw z{K9C{Pgv#;e(rDIycw!+pa-l;>xP!EQz0%bebp(?n&|KRd{c7Hi7XmKQ2fC43`FMI zLMqA$HijsPi38-)?lCd7EyY8w9i^cib&9fea-wBt-wYA;2T523Lc3sRh=D>15RW@( z1-a{W{p|>5$dE|4MF7I=bn8!-vJvHm+s-4@s}Mh>Px^S&;)d$q-!Bh#!&yW_S8g?x zj-a`aYlFBFI9LuXaY~-IEfcKk!Y8_h@so~O(**R*7Jm>D5^i{JJO-llvYcGR zgBVB^c@})i#!xot4OCzH@ZrAd84V$qc zh_M90M&+?tVnqF~sH%cN3@gV~txFpJ7m0vSCF_7iH7UF&g2=GR1$JK--xePug|toW z4rWfq8vVQ+&z~DaLi=vla!)Qp^NHH6sG#XO`dkz$q!oXfHNYp_m*j)9Vt0p6`Ewo9 zV>i_OyU@H(&L2WPa5vIwWDiSRw^?u~U%M;>Qo)FH0wzQ-h5O&E3=NRA@~gKokz=_& zR)LVa0dpKHAV|p2CeC}QJY+-yNp}L(1)_vROhM4lto7hW=97@F0}FIOumfEOqTMV8 z1_sE`Fys(YpXDHNI}rS6xVif?J&ld&KFK@)km8#a%``o1TPRsVm^AR^Y?d&f&*DHF<<9KE6|qqs2em2;jJd)@Py z$bax@Zi986T(s-xw%*h>sUc$}{`;6H`iUZu)IVjDkG9o=GsCOKsU?a3m;3D&Nb&W5 z3CRs@ZGRKwJHCIIU`x#m)y@c$lPk)c;ehg+;-f$&-%}ratYJEe&kzm==D0iNpJ7`i zlDeJe=VR+TRh1v}f-n_xpH1ry4)+^`^TU7xwCd*XM(tq0DF-HNQN8nlgQy)zvG{X~ z0Cg$0qVMRaQe))TSaGi^CYJ2kx*-0dN1zDTEtJ0u#ShO@z7Anw1+oNoBs|s%R45{o ziuoPhppMX|p9{1tt*s1VhI#3pb$9=kg#}SKQ(Lqs&SWU0PL1)Sm{%}nIn3~3E6{~J zj*k83lQR({D0bKCr}7s}3o%U17|9<*ktjE`uXiaCp$Hlq@h*B=Uh3wKeTDCQSPSOh z6F#EJHW!$eCjJjG`+oWq-<~ej5+E3bCU&;3SG+>{uELjwB`vwV?Vjto$LA3MV}yGuunQu3@FdKIY-Wa$wz8!q}*Sr@!T)B@?Ci%jSp^sukRHQ8jzgI!h#N6vRPOlj)8nKZhhGFCuHMfTC9LzJe5hBDxG z5NW`igFy&2F;M-$vNO-P3uk{)$_|uGl*o@R$2&Td+AX7lV)^TEQw}@Ez?hLtY%gk)15s48SxI>)slPN2-e>JHZ4*Z5H@1}Ukp=bPxVN#3tng~?GT>*IFb7D7Z1EMoZtR0zY} zai;=r(Zhr=>(fyHkPwm;6BDaP5BWRsix<4J@)~*iCqu<^eEiF>yGQnLQXaY^7yrK# zUrvT=YolQWx%Jd9B7!}fw?T+i0tUQ=6J5D3`coi`>_g*QOO zkx5g9@A*hl)kiW$xyw#&``?6H*pevkK}gdCm-JW>JJkTgM)uq~8JIF_uv-GwTmO&V z$J`_`GQ}G;?sg%p$ZkQjNZ}d|bX_d<$5F-NwV?uvD3c^9BRbMo0pCq}FWW+AeEE`K&|L^7 z-E3-)=#Xh*WJ;S{a6hkW57a+S%OQ4T8VQ^5Z=ZZ+>266JunkPj|Y1>ZSL# zrE24gojs>hKnf<;@?a2r697yS;?^sPo=N8G)xdXj+=cs>p>X}$zX&2PjIN^t^d=3- z8ZZYw>uuJ>%Uzd$-%Kl23*;eonZrs3dUF0x@g*t#{P!sGhp0jY7} z4plK@)G~od0uf^p{wVf*&0A}Y*N2>z?@7$1=tOh>vwrF6F_7tZE|^F|A+!I&Z_{AZ z{+(imM9u&SIn3rk)OtI;dl-Ns$ksEz2fRJffY~U4e=>bO-U#;R)2HS-JfyWE(jbtd zVqfJ*wYQx0M9cD*krL9ePb~t4ioey|yh9KD^ z@mGiue1~Z!6c{&BofzdE!dzToxAk0JgB(vXSj7zdyoBxGf%z0Urqb+W-cwz^sV{ zB?9a?oO;CM0L4PylR{lx9drzs{*rE)ot@p<++^rE2VjM$dq74TIBN88eL!)9I6SYe zCm98WH8(E@HFav68Z%^cs#K748N^vj$__UG;&wTbgHLP?Vw~n^=_mFxYRs-AjQVU9 z>f*4Ug=QoDjXi;Xzl|NBc__m-Ovu8cjTUz2QI7vT029-LdhWq++)0oxjhF>2FWoZX zx=0Q_7BWi*Ecymm7|zw{tJE~_GDDDjs@*gP2yaL}b!klo(%~wQ4Wd3;?7AsxB`~bM z7;T}WYey+a)FKj^s@lF;&e*zGcHIqCR8&-0^M3n5nw2O7XW%P4BtcV#MErC3%AuDg zCS36SHYmu+2LNh@aRopzL4G*FSSEtHx>ioJb|8K=5lBsMM!sJ?KVo)Ec$4s+cHEz* zp6hpOfAUj?$`9C|YvbkHf(Vt>E-x(f6LWtg$*+FXtXTTv?(pr0tccySLjUkX5@P%{eP8(A8DegwnkOQeksq!EUU^`@UKG`0E z!3>BJpk1?`l|4bif!~JoJKEoi@J7u^y_eN1$D?J#Rbz@j-fI9aF1n&TT0qDSWBbTz z#_Bl|W)SM}zcmaGb-U<2J7h^gv+g%jv6s*N3o;4kU_SkMm=Z%JK9-My2JuK4y#XPa z<;>DM2H}4qEK0q-BA37YQ*IQ>HpfT_aNX3Lwc*S!Cco-i^Z`jE54+CKP|>b#DX<*- z!tQMzk5rcb4PL(KF9!r9Al=&pxRgrQ({zAdtgQk~ijk4V>->tD==n8}jql$_lnoLI z|CG{5t(Fw~&xnGWT@iNC_}z1!F=*RKV>qS{jGEeDIeogKYy`>bR(x*c*H5X_IvJn- zZ^IA#`E4<7X%K z)>jt0Z;75!e=oJZA~p+CNu-9h;037>dRTB$t^3K+JHxjy_`ziC)W(Jq@{KF-{RT*~ z$>9uFP0*CH~P63|K;7Txi{u((R){Zv&1*dgWJ;jXh| z*=_plc+5iF=PzH<0Nq0dhhcOw(Q{uDzWPB9#zP;Ik65*eh{I$h4R4MQ#8G5B>SR$c zAU2=;`o}6C7TUhnL33Yg;Js`a)+?yDt_Yz$Y2X=@NB-j^4b(nIXzn9HW01+$#3R zYRm~F)b|BhsmtSnTokYO6dCTbEk*+-ZLdoEOVHsOB6zgi9xpjS7a2BivZz7{j!|@Q zjB0BBLK(gxj0|P50ZwP;C#n7%kYC_+4}^1S7rdTv0&<}Jbpj+Bx`3j~N;4w##4=td z?;JE6;e)TZe~hqa38l`Q7?vCJ(4Saa2Cx45R8_b27+44R;-4KqG`|8~hBo*Wn!rOr zV-5nP9kAo0kPEQ{$QpYG++e~(8TcKEB`UkHoA@;pve1+Y%dfAW4t;O_ChBp>?s{_O zp%TqTYySO8R3e2^j-AI2);KccJNRMy|6(Z@$en1UzzKs=lf!>dS!o@%D*R)RhhzH1 zt@T{ z20xn9{T*!NsdiHL3DL|ta`hka>1vS$mM(rgIX_J4OD!pCGG4;h`G6y4ZE3l(<@*rK zAPL?!nf*ohL8_3oVsyY&l_Bj^N^Fm5FiC&y%^ZH zE*{=Nk(VHO-^F%FLkSEXX*4?6j9jkK2;X~mrkaH)x`{1%dO&BTUCs$RSM4!;Vpdkx z!0>S7rT-sZkTY>`<$lTS2NtJ{fZe17b&Ae7D&?fOv5SaO7kJH&W3%br`#Y=nYrN_f zA~c=0AYGrmV45bGUUWQ4`qJ2Gb24db%g*a~o)j<|G$~nG!`=*O_{#r8rNZ~?2TESa z3f+axWQBBgl_KX7uyX8gV>|FVo6XB4M~d|h=|`5BamYMHGvnqL*}VxxU2JyF2wr4% zw_H4;6Xd~wYAMyaScBFN8wK5gQwSB{_4TGayn&3mJkg7@gKIq>g4?5yd0jHwn_lLv zeN%kI2Ox~*AvqS8i4qYto{xO4y4ig{!{=FHu!YkcVD&G78V|)w09VQHar6)Z0$}g@ zLZaKalOkx%vO5bB6%6}k42?2LVjL}ZjqZjD4&|wj#m!L(Nu2fP%uzkY^l|$WU#0l^ z#%Y0W?m*YmfK%h0wp(D^N5z+Y#TA3cdr+_Uw~3l#O8UZy^d&ryfJR}#rk>^^+}0{GK^96pk=@uhiV+iyDy)^tt zJC48PI{RW1+RRr4`pY9M#DlZ*9{&%9Lk&s5uBKV2G6ki@uFQZOsnSC(Q^g)oONWq! zjsK~)6oC8zRK$WZ7QHSr91mnMSDDQ&0@w}0kJ1L!)@i1ihc$t61iTk zVefVDLahWZ?{(AM?pTYE4-9W#>*~Nb6#~Yo39mTtVLsK!Ko0i2vk@bo)~&RNaqL^1 z|5Rsot4*K%SzG(n&)8WFp7!KiWL%`AKma`}SEk{OU}j{zDw25aA0&uFvnH`KrAd>S6Rr_wekBle@1U+jzQW&RX4XKgpNkw*I$-k0)cW#&YQOcw;UcPK)Tu;1eO z_@HD5tfE%sjT^JjryBwRitrZ+fL=NSbZh?+AGlgpNH{@prE=H{eds+uUV~%pQ!fkud$jw}LsXM>dZ>@+upAN6La;F>f9)=f-MCMlSv{en|)FM{2)`0py})HlF8?LF)HrqP-I zQ21?&qG`lMyB4CqqM|B{;_dDY_LPe3ngq=o%Ea*f)IDD`1Y-cn4)PhM@^|n^6;o;q zAJ3XQ`Mt4%)t&p|=kMy}-CgMy2RPEZi|KQ7i0Pxb$f`yT46e#=e08kPoL@#<+X6rX z5u|Yr$QC{r{C*gM{v2{ggCYJh?jAQJPkgRh6RltsX&ZSDy|o2T_S2#7pr6)AVlM== zWq&cbMe#DAIhYi!2p0=9i*$L$Un=lF$AbwS6}ZNDG0O?LP25A)w_X|;yakKx?qV08 zxEYL)lJ<+eBUM|s!M>Nx!>lE8>q+DX52c*k&a=w1zO2Ax-q1DOTC|6bu1L@E1pd7Z z*4;DT>??8+2nk8atKC5N!?66m)Nz1}?H0%$gY;+|X59|*LM3`wzyjU_1Du&>ZYQ0C z?i&GA;{-cq$tnB61G_r{*MWwC2S4yo6A{3Ypn`G!ah|`ed2|%Z%MV3`>sY@4y8dF2Vxa)S zr%&JZ7y^Bl6PQLI8gQw@AKkH#2_+{k*ZfkB^4-{L^|;h-kc0<@2iz55#zwTK+(ls% zpM&H?)n32?uCy9yMkT<4a1e;03`ooZBm_jGAPeL&hFAAiA2EBAb%Z-rbX^^ZYcOlgsVW2Y&{QbGZ>xq3i(GL!u&=K~N5s z7((#nqTn^~p7o)2-nZ(I3?0-w=;4Hcz!7}YcaZa>;{mhxBgjd?ZM#7lQ3s8amvHs~ zs&OiSX;^N-=!P;Y9EXqXXRZO#{s6b7BLw1bQSMkU9iX%qv=Si?jbDS@XJzl=H7q6R z?*k4tN&4B6PgD;DMT+0468VKpdEGZ?y%{t!1KFzs*g(RnJFmt!t*#0~?r83j>jF(*3z6(@v%93F5RfkOWkxSlcvBw;W`>DG^@E-y5`9jM4@ z1^iZ2CJNiEYYwA^3gTFlvf6!e)Q{3s9f~tC-ZehPj#%gvPyUSVBepqN74A;xm_OLk zgJqzFLNTbGfI$7n<=E&b**9dhACqzZ$qj?iL)D#DQ)oOWdFBKQB+QXefIeLcu9rr zeXmg`>{qW4MHa*OKy?grTUv}Z`Qj5x_~|TquMKmc^6(V0Se~C|WY9t6;V5~-fJM0$ z1TDL@FJ@+CfstMwejiAhATGEUC8Y4JlYjj z27t1HxTX)l!-NXZ>YKvf|M`eVzi^L5)tH0y{(|Oj?RcKz1Y`+%Rsn;BJGfVuIk}|c z1;e9k8~xJmJ>)|gPv58tyZsh2PNDgvVNQWirJ9vm7@Nhl>Dr)NR4E8Mz7bSfn=DGJIBKUnlpFip6RL6K((8l3506* z)?mZwO#xN(=dyo_-NkMfod@p?T?bAC9AT+R{Af4y_4_dH!-T}Vs9#U6si}!iM)n&h zizUzwE7f5dfyN9Kr<5ri4mQ2`S@@TU|1_P`Ljiyt|2 zlp?r3evKldTYrH708%Sei+xwcd#T{|IGj@YuSdu4Y8KUx-7(ejkNIh3pirg!h97? zK5mcMBC^-l?cLX(CnkH6e6FsBiA_|Egh3i1CMv3wqc(1JJ>C+zd7$kEB3o%Wxw-no zN$@;{ef)?&Sg3ESdE4P{GkX7!{5#04H367gr^X2jn6H(ATqFb`K+OB)c|AXkZyR`w z1=W)ptjWdE?&Bpm1ZA%+?e0i{-hHl+G;MC-os6IAR6|v$7iB3Kj~wL}{Im!TNOXZb zQ1<7wwIkQNiHxXeXqclx2Z(aKgBGQWTzuqooCn3O*;V;ETp2cRc!w1hH`iN zW(WNQbEiulQ-bD(;>Z!+NddX^a(soXSZtBCKIQFS7YVifMEQA0FPwI_%u zwk>DW(+H%*SC0FPjU($`qA|gx3q&OE01fOCLLpve6qNtGjv$u9Ymg)UHB+yi6W=S)b_cvX996cGUOJaraxfSt=cr_LUvr+6}2PS<%BmDchMfl})W zGEubqAW)riHrsC@v^w=~ICt6E!=Y-aG(Fihzf$&&fUSUq zYkj$}V1ioN4X9Z(_z5`WkAXCmrSpN>!KbdS&JZMnseH=_GHKk}>;YA|+9c8vs?as` zbF^UQFc$3NgUQ|mpmreAt3voVM>3dAxdG4@DhEIxxYp4qxJ+(B*MedN!99Q|GVP*5 zY2#`)8!Uf!!|Cf}p-(Z^C@B8a=9Wn`+34YodsU&PnX&g{ycce6-|G7{S$+e-tY+lpuQJ-rtxH@w9n*qCiUAmyZG1aOyMRn%_e^;RKRR4&6=>KWecjujk58gbTq~vg zcfP&FoAZ6g{wkW1puo(*31i~vp`@{TF)9M)q5Tz9)&>_?=)x!-iNe(k6?`3$ZfnIz zIY35co^tkY>w3ngT@eC*7wYhOn@XIkuo=4d=n);ej?_-tPsZcp6!qD&4z8-OH#l62 zK^o3b(rkWah&8Q@ei5DJ%n(k9SA46$iOXBUE4$sH+- zY`BPoXx1Kuc;Dv}0;oCLzVcrX$JIax{o6|5_hNTU(B5`Aq-Zv))%s zVa(3Ux2L=Se9k|mBCo3cGk^_b0MG$*I06aLq-;7Vpru^wG+fNbmWWoE#1l z5wOM_AVHo1i8D89sBpIfHO!cX#HVD{?bV}||1?hX?Wslh9e?cI{-WzhhCMMfHAQvJ zGj9R>GlO)%VQiSblK{}br=SRd96AW8;6^=WZB(*$i~f|Py@jGZrM|2F^%|U<@W_gp zfo-_Cpm!*b=5OhXrHDwhqSbz^L(sb}FPcIX!k_uhWqt){6IGD!N-8PggH!IG4uOd& z5SssJNF9jhmws*VgPcrX+u)I<- zVps7Jcmw|e9Bk0bfc|y%zN3wAHaD=;N1ZHdXT_)&=p-;h`*ni%xUWC^EpqNq*_?Sn z*cTQMm0V5ZchKyDRy_mEs;H>L2Rc=Lt7#keARIg5%*KNK7HMNfstUH%Qiz zzY~u?gRvOSP`cyo9(c@$1uPk_BKpqJ68>6Zz8q!zQxHgHT4+SY9{JFMx*#aOxdW}E zt<3;j6L~t-5unJ0NgOhrzv8Y|7~#In#tp&-t*T=*?HC){^qRTsAt{SGrM`F;Eo@^{ zJ9e_RCRkBeZgr=ERXa)ykhLHw;YRt#*X;n8LS1MB9$|a%UZHLS0zD#R5u2g=r{3^?NvdkCIF(^x$c*2l9v4N^vGfM^BaIyzQOpNPn-%mdzIHC0tae7)JS zYgf&_4x}X!g^3+*Fa$xu4RaVa*Y&MmpJ)LHOoB2J2?H}E>UKgTjU45mbsNPM<$**n zDjjSV3~rYz^)=`NoT45g&;U?WxiialpGewPGddzKfkzAlMUitej$Dv@egfn=yz}v^ zm_NPy@u(jE0!b!Hnl0$M0axqHLMyS-?VHVu?GSvldaC+tooR!^vCvwvG8Knej&cgc zCWOaI9CRijVykOsg}ol*)>G9RJACUkGfMq{|2aybkd~7sq9yU!xYfAX!sN|Rt%j!0 z^gvjEvStC23>j!?-=nlupnZo*An^g0(nvT3f1d6@jtrH$$5lPkzg;f@KI`6bw+xR{ zRwCk5P8j)q0m0W8GCDYj8u_Sv{w(rwd2Tw*IL_c8%XqaIDq^LEMs?+HzZiJcd#wnv zHo!}r2=!-n$!v5qRaA{nAnD6_<4?Qk@Y_V_YnGZl)>592uO`V+lA)T+u1m@0tmHIV z1&pSR)DPVXuWtY=jG{Sbo{s#R>?8gi%qXG?S0LnPzE2@phCow7E*b&)7XqMg0eX4R zmW)6DfK~W|ylhw`ek9tH33V!`Y=pB1ngc|t+gMzKQR&8iRWQ_ahZmtxXm~QYOdlbF zKu0HL79KslmEjKkkMNwgtf{bFGm&w1yrg{ z_V3HHr?_+!cR7V4BAM>@FjZ!cJKnJ2-T7Jc00ppth!U#URg{xlBK*0pC(<{24OM+W z1$1e1UKnOunYwSk%VAA9g@WLdFK)fiuDtT!44C!F01`YMw-Sik-}+vq2w+z@>dZ`L z>1iG7Oi;3Bq_Ls#a9fg9*@>{_yn(OgGuZmxWaAIj>+ylN0NUqkTpodqLa#S-MxcR7 zBk03(a_0hRa>AkQ0aMdRZtwpZgdleW%$iLd>OP1RGbM6C+EQ`4|im)r9ACpoo`{kV2TYakr( zoOpf$8fHE*%BgbG9#SSvLCtPRDk&$AFMwl|(I`(_viUw2ql^^dAKV`B)V)K#7dp@& zXk@V8W9A9EYJVLT$K1>2rz$%tA){t$WGh42iVzdyYawR7GD&owB`YCWsW5$Y()Or~ z@$)iIVj$lsyLiuvlbn=7y5;Z6?giG5bXyHj3;S31QIXfEP(}dX5O8~u8&q01z#5g? zl|JIT(zEb{|B+R+1L6iT)mrd)@16PT(ZYX|xcM-9DWGPrxD|i^BMRFWrDks3BMfps z#HFXWm&J(84$W+hV6c%l`)ZAfvf;M3d>87*#Ed=T_^aka{^5 z6zMO}uzrG}+?W*87Lk`0MMNNyAU?V>0R1Nz5IpCJbujQmS;uNHmH!%(7&$Qh#fN~M zXRVSBbjz52C!*=R2P6mF&bAh}d##ny(H6*d1(P2OVzI5UDJ*ne;@C?*=^>{YMEu#^ zvAr12)3zn1mFddBng%tzW|bWcSgvCKYQiyyf^h&e0T@6T8@*b5P170F1!_>$WejL? zSTrK|-EmJkhDu0GBOkB@pj2ruy1{z)ow{LZUrJnP}D|4Bi>hHEDNr0akvHwVhEZi zChDJ^qVh;yHX)!fdJl>_xMblLuwsi4I09keQ$wo+Gf!*b=|QYMYn&#~fF8pZxz!_S zWWzl9>-m34sL0~-SbcMriG>z>JMHgY_!xZN2z}0{mW}Cv%dBwlI4DxsH6bE7nO?Er zLq_)Ho5Rk{cOB&Fm*4uQdv04)vMNkllw*t~Jdj&GzV#dLsQnVn_*rTJP zhhP?kOY2@E7gwDG5|ffZi;jwkd@#Vn^*>zYl-I9+P(d=coJ-06GU%IGT{)`QOF8Fq zF5-SlsqbmS223@bThG0yGY{|#1K&Na=T02je?-!3guU;VC?_m!BuaM*`F#)n-d#Rk zmay(nGvT(ge!^wVB|0I7Wsmp0r-bOz##tQSd#;A6LHDEkkL;vTiaN2vgN<_ZWRjQ8 zi(PH|l7`2Pn+v;Nj`?2aTnH`_XJV*g>!);;7Iv4Wn&x<6RMLXBeFF`EVp?$c&A?d; zH$TlBBfzmw2gy)yqe1TfpG;ZSISl4GupYrYS6xd=^9*k4@FVm_mdqmih~`)2w2&JA zkrK`-G6u0;e6{WSMhsn^=0J|>#x|z%#5~Eo1KoB&s_UlRjDbV2L+Gex!i&D)e2&yn zCHRQubAy9Z>8}3Gy@Rok$$#kV^u`a*OGqKF z0iAsJhgqs{prAS3bc>#+5K%wH#XkSUihBx&J#qTCCOWKXfU>rBcf~SQKnUtQ6)Xfc zIaDT@ZS2beFj)o8C&|S=L<4gfD6!wP^8!OKw|H5}%tT8&I|-L@vVG2;)kX%gy%RD#ePOm|D3= z$|4MuXKx6j;<;Q}jWL7xc;w^~k=FHC8~wg-&EV;U5fBpV{j1#|8DkOnRmM+EpHbIQ z?m{n)3XjRlvpIh&@SpH9hY&f;D!VfnwfEp)Wu-pMG9S{K*F#n^@_f!1DV09=B3{}D z`9LTNs5`~L|->$1OL zFDULO;8Iu(XY&V~34h+EeD&i&)C}0`b@aC7MRK|<`xh~zOFPA~S)`#CsVWb|5~nj_ z$8fR}A8zRCX1detiUztg;5Pi+JuE2b-LKeX(`ENif46;Bf0Q1j>AX69u3w^2%UXr; z-Sc1zX-afBxgN@1cij>Ge1Y?3blBOBi|31y%=2SgU1cFRFAEUDt=| zpp&mF>-x4VxYd1&KACN#mA8kS}z>+5@%bfntgNSGRkCh=n9 zK6IujtKjPLILF&wcB>q{hJh!^qIJ}!b6x_py;7#i=c8zcM#m5TH-?0FkJ}44K9ujq z11^gLOpkz6qwY8ZywNTyQF^ffV0SxP6nD8yh9-riwKZa^?dHF7=X6?|!tmy5Z*Mh| z=QHl<_zbGqEU0}Wwy6hoFplrRw}%=Deck17KO%dJ&^7m9mAwlVY`SEg$q4#-qfMF( zoRGum(#CVMktDd@LD@cTEmXloUyj+`P(`YPyI?L={b^Cw)jyuPX3LABv4uBm4qB6# zBi)BgTXv+>ZXZhPwc}52>guBc^doW5U~n7mPKb_)K|+|zJ5=ITEpV_=CyB7h?nA;T ze7^9iDt^?pUyWY%26}(t-r*378(j%};qTsEL0xy7zFT*BaSEN#F_nHC{vjI+PcmzJ znB(%Cx3hYd*#ofU(|Q z_)kZSlCtiFGuEj+UhEclKokyHB+UF(Im3TAG{ObU~(yu9~+@4i9-%2BzDVYr+ zXy7t8l_}dU+vk2TjOmQ`v#Y1j>9PTPcY5dT<(nh64ZmiqS>A;TrQCUMArHMjO=lM* zG8rbMM1`I8H7{qEKHqeDGwbyI=is}AhE8}^OZ+wS@VLO=N|UU|X9f3ypZI2n%yed! zninsx0F=lnb+RL?5?zQ$!u{-r&c=@W(|PzI5FGOa@9{r}rZFaV@!Lur;I>dtI}FYf zte;VyhrT(gTa&4NB`i|P-&V`(y}9GnO?WRUcRe}y($l&vt&b%!;LpRojdhV^QI%->!Za;(occnx;VtLX>OwA_ zHKVc8Z*sub{eBBy#V~Pu7P0qWd0_kzo;qT$2qggX0xd8KVQNFUV?mTSQfZ29ATS zn3INREPYL69qBCZ^lsD3ADnjb7TR|66S%04vbo(WWGDPN?~Gi1DLw~l+n)2mFl>c8 zg0j+B-w53*{a!cilwgn4%+M|3lGv_keR1~KM@P?ipe?aecf922@FcT&JLkT=_bg6_ zg%sR}d!8|uY4`9VB{grfEh=)!=C*@#u>%uTXMOzEfPKOHuiVC6MCcI;s(vQ9#b0d$a2i%~X(jvyj{=J@tj>MntAC=?|5m5I2H09>N zmpZ6$xE~);vNJ**sZw`cUyc)5Xk_=}imuN3gPS*a<4+Gv1(u6}F8aB3?OK zTy&(k*uAIJ0L@%_dLT;3`;XefQC!f50=M2hy(T$IDhz5N0cBt0h8w)CBB8Nr_ZEAILHq{F+4{@@2KAyzPH&e2uJj zXG}vWU`OTu05sSxO<4X5GW6=E+|7DTPV$4w<1}T-f1rfqfN$;l5K+g5=e94bWa(d= z{aq8JNzI@4>Il|XEY{687CoG0Nk|`48v7Mg38J>5giQZ-Io~2!M~--zjlUnbHxzu? zE4Kv8GNJCP?X5^_BrXbH=umiBuUq@15bd;%E{=OZP<6pjxUaBwgX7?a=du@UelXP@s*0u2T z$8RZxkSf01bkl7fBGFl$9HR1o9f+$d$3^G1)qk`FqJ8VX()fly(Fig=E=c=khI;`sB& zZR7dTLKFVND7wyohKWV}Bln{0-ix0JeFDN4w`~lkT6EmKwWTqYzW|ToFlY=d$J?W; zEk-R-W{ZZKhEfNj<*7%*r%@a2pVOXL>^Uc}UE<`>cZF*g**7mL{f@A%{P-~r*|4Pc zaz)6#k5Dc$EvQQq?ll0z7trT^3FuaqAx9mI{x}m zzL4eO6a?Hzd>@$!HOeRBesbD5Hq32((p13o>_rdDo6gK6mVzRC=g&4C3U$il#LVv! z-{1Ua3yxdM9RHLk{E)#oD|EzZtBIDULOS%P-V0B7%zX+^RhfwG>zv;DgeA_>l%D?Y71(GQDlQ7YEUsmY`d zk}tjlQ(tyQ$VE#ttjdv;Ah5JGOYom;nX#%BspmRB4c9I{7&08duJUmgT2xckc3tiL zqgCEJXrX8xe1({=`w7WG(gXa6IA1x=7xzo+8buh~BHOM>U7?|=78&|{Bla=Z<}Sgn zaknpv4Yo|H_xxF7hJH@Yx-*p!7Z22Yex=QU(}~f@@h0>Olf7y`Rn5qPl^llZ6x+1@ zd*=_{+U4q(H(!=L{g^nEk1_QCGmv6S$gLjx)H;;%+|ko7CLi~*fQBrTU97Uydn0e} zcE_jp`g^(o7ZC`h0AY)4bAt)n+WqY$F*!nXHJnE<0e1Blw1A{N~JQr=wwg+e>htIvW!^jpMlk^$KqPaDiQxGTrLYncZ08U?o{K|@+ zp2f|t56uf1{Y9`4e*Ea-%EPtQL0uZuO#?m1!{*DxCf_q*l5onVeTvJo`xRNq->vtp zKlybD;kCcVd-dzr?I=s^9{n(v>+}@_6oR|Bs;V+*~2=$p^Vq{@?HNUT^5f(t|uZ}blHP0ct44CvB-ZC@0Zn_aXE>- zcbdx-cePrUh>RqVU%ESVMD*&R6oRoux{RlK_^4A4*LiPS{&&c$?-AcuXU8f`@diXr z@8oOWK*N+#Xx(X&3E5q2kE7wpu+XwN6R@HvqJHvSr1m?7iF&1@BWra?I9a%QAJPyz zf4`xVEXnvq7_nHmPef5w!Ao{)HqDZh!aGg%W@h%zd20j8O6th4bet;pJ+)x$8?Q{X zLTWM-J`+iQR0z`xa7@1DBbwOsNyypiwIkmj)70?7F`KLfKho^WzU1$#;)G-}moM#3j+kxu4i42-cV~iMQ#O?PeVV%9T{zvKXXMxp(N_$Z zd`--LeeHTbF1h&NJqs<%vsSb2hfF3BTJPD1B4l;UL~6$SskaNiyPiFnj*Z>EVDV7S zxr%!PVZdvzWhI;a*_U#&EwOI%v9XPlZaJ5!krPrf=dQm}`-o5q(b6jv9eDdDo>HNi z7KhG6!l2N?WWv_fzOGl}x50{ETL)!^aKj&c&(M8S5pdofN|?oR2l{@XqI)268KN&6 zu%SamS{t^hqH!2xCC+^ym5tep!NFtmt)?c%x|88vRk+ACCTt3kIsJ%C8awn`j?vyY z4FPYHbN62pA3W##Hm`~OdnMa^ud92&;-$kYy5r@Bf%>>*juy$Ab+(20i=0@9pV>~? z!>x_*yV}_qr!c&@@M+4l-nZJH+0qd@6<=% zo*+xaS^2G(TK-W8c{r_p_CDQAtW00FJdlI1$MW;ktH?z;| z(NGgpDmhWcTZ75*rTY05ab>f!*OV1mUz(j^UNAU5)}-h9hbgY$L43{^%W*~Hg1oW=?oeoXQcD_$}d;j4`!q|X| zh4$~2VYiOAS(5%Xa<3mAFZWPt*A&YsggjxtQzN<;c+KaPT8@CyX65==0z+-&x2yYf ziXp=5tL5uc;roI2&5u`D5PN}vnj4Mb0ph`PDb=^}#g!RdqjJBaUeWKbzkTdyA z;7z+jZ5wqdDSGoG>M_h+5+5I}ZdMU)b8{>0_O=gS9!YM!?!I=N(0$Ch7B>;=}s^ollbapl~%Ic=0!K-SN>yEBL2n7=n7*6Nv`$4!QlGO zKEqlb2D;YDq_JCD@&R9b3JO_anzO(=8k%;bSm%(silthsex%ZMH61El|D2Pm<^~J0 zB(q?|CyU~)w?Y+4iEOHJxa>M477NnK_i>K7#MYM$Psh~~qMLWh-^%Rp@6Lu| zWgQurZ|vMa=3$H+@$7gSeoc2K{y0z$0s!DWy^-f}fb=Y@ zbn7N}Iz#fr$$g2*ZpaDi;^1YkIu zV9I2|?MXOQ%2L&ICs@izR0ed2AF|Ohi{TtOuvXLg9;RRQubqg=NFAI$#ZJVzFx;bg z$k50l<|-$A?Qk~aC7$FutHq<}cfA6*1Kig`qyw?mWrn4~4XkvkjFzxDLZo3&L{%`5 zl21*vr{2@2?(?<()Ny%1l-6N@W(fDTG|JZV<59Psv;f<^jgB}udfjn2B{}EXKx^xg z{&8&8B-xYNZ@`aeB#5zsSanE!kxwG4hlAcF2o58|P8w#qZ=0m%w%eZ#mUaFL))4Cm zy&1)je6q4uK-`c)(T(@;le99wnWf6G zCmgS$HU&Bxr$)tO#RRqT{mBJYb*ny8;BWdA{vY}nBH zoBQBOVY|1SSqSXrcJ6MRwXy24S>qv&5BfdE?!lM)PFgsskPO#PeP)uMJbm`6(#MvnMV`VAr-3)-Lj>)Juij>_nl97Vz9}3&B=#vkAU>LG=EqDM&*B%($cl>c=vUgs@kPa54S2TUFCPeV zPZs`=kWiDD;&MWxK%NouS{z`61uAT+zqe(37EWo0BP^K^+{{^9lUaLiCd3x*-6~-e zFd38AQ{2qQI`XVYPwN{NM+7XxPHC#}m}c+KrTfXojCy`sj8}JU@I*PYDmdZ)HM7mW1mnk5i%lSoX&U>MJQYc=tDuA`A;($$PG~R}59Z^gMo}PMp^~ z`sz7#sqYKTN0#(AdPxusr8b}CKSOU1ryHl_izds=XBpYw^}f)DM+HPRBp<2~6~(x) z)SS@|UoCukW+Ww8n2V9j&vbkF+$-EwNzccHq!x$AL(`UPpV0GqPEHnF3cr(538uI|C-S3p&Zo?EM~wg6yp=(mDBy3AjIq6ANx>Ri{MpMAEQiHGLE?ZBMMwO20|PhoM#dexcg2Lk73r? ze?hN*F@M)4hEy!}%Z-9$n0CCPx|>MPo=?_C`51q)5&bGp`}gk$%p{IkZd2Ay1~|8y z4@ohux!g}1T8bm!6JJkFu&?y^4b+?fjl2PA9?B#ZH;6f=Xv)O;G>imVKYTC;P&2@I?^~j5n6b36RrqaId zm1JLnjJ}7gBN+u7^O>zLu_L`}MFS!u5lT1RKLlBM>~!iS{;_d-s?}NR@VE~QrhgEf z56Z1r_aM+Ov@-SWVA`u@(=M{5sGl#o*DSt5`AHoZGERrHm@qDZJLW~8=g~<8-;W+v z-CTFcN}pOu`N!|ER2Jcoq3=YTH+2S8RJb!1EXfQr7G!4)%qORklV#Adv(^~O^2H}c znTd8i>5asH<@5=jN4ryCW(hPHiz`p!rH}(18d7}T~)4W%Yg67 zQ|g6-Q*efqWZ5V_@wYLuqYHCEGU>AjuU#`0&|W>a+@fT`C>-;a)6~%@is2=F@S;2U z!L$vTmJTJ0yE9SLK=G{E)eS14fwbm#v2i5nHp?dT;sj0@7TVY}5`1vl6{%Hsb)HOZ ze$D9&l(sMQy4!V=fc(3!d_&m&~9weI_+D@1NXJ;Io(#TXZwtAQSgr{)^p z=RF5oeflu5KG8H#)j;!mXIrAJn^-i_o&XVK@N<1`aDOGWmeXFi9k1a%hEff>MSNJ{F}5c`jrW5?bkFkBtM>`HCcXXGnVp}YC8se) zQQ9s`4R4`)5<9b^&9?sZJH65n7HwgeCPpu&fyuE1gXs^6^&W}Aa~D2mq=)f?uZhVm zb-7%axOav{RKtHfUC%94K0Fwkc6h5Gxwk(=7{TNB?K<@i&I%Q#OWm>-l?NhJp;HZq zp>Ss2?%crHxv*O+Ab((iPa?O669(1z)C9ve8iqL{XR3NC5~8}Fr8a`OuhNTGYA@C` zA5*DQRFqZm`c3 z!Ieh<$p1^@9QUS zSUa)``QF-~-?I94rt-Ue2zrw^Yd(?R?QFOJa&}zra^+T57jzS)E?J`n>BH)B!DHU1Tx!(6LyqoPH1V<8kkLSe z*|)EB{cMyoyvEYj%bMMfc5K}_nJjqB8N0XcHKAJCC$H=W=Ll;-HgfquQ4|Yr@KXXw zzRPgzCjxP)k7o%4qBlig@zSI+V!QKc>>ei{%v$W?8+F@XltbH?z@M_K*!d%K_={Kk z*HCh_?l`%Y31`xt;6ic7cW=DNk`!d>6KPlrNMtN(;2}ny)>cl~%0095g(0`CI1;P( zz>ue7r2lzD<}1fd`WJht(>6l;mUmdQC|=pxXpH4pGvyjCa$NsHb*F~o?{}C}0B36D ze8h339(qNZPBeJ3(t`5hq|nW^S-Zf(ewkPPf#_4oqDDF**gGy;Fx#EB>#H1g)=s;; z*Hhf{)>AwS_YPrWUb)$&<78pS3x_7OIxf^xp_1E$8l>a|x&L(AoatOW7ALnT%KMM6 z)OY%9YGUnGVHgxQs9Tutdel43W;Mx#qp~U#m>FKR7UL3VoqQ_=X7ErW4Y2A$`iV-^_Oe$ zlbPIFm-zov+gkut8GZe~M-Y%ukOl=rKv7DNk`z!-8Ug9q@|=w zx+J9=)-G!^Vg!H zk9&J|$!=RuqFJ?Eo$d=rD$!W!o^Lx1$uDT=#TDOq|74abpgtvFAt?agz+>$}yzn_5 zLQ9Ew*ZI;f+oC`ugGS*pst+pRc&Fwda%Cc{bv?cyf;n2iuKURk|F)y^yW03;PRPO? zE@X^{NXC0VGl=pI@4M5qU63rYGUjJ|OACEyT-^I4Qf;CxHm6;j7-Qz8%Ye_QRKlcG zk+zhRWK?_+%(l>hJC|X81Q{)Xf2}HA56w}`8;ioaHV|J#JuIOHRhpmx+G5F#6=H2> zrZ-h(t7+fE1w0hXYsLb5Zw0g~E;2&EPfcUg4dH%t8QmEYH{4KiEBVybT>CG>k@GA@ zIM#uyzSZ|nRW!3Y9jfL&rey2VfKh9~={#1Ugc4&)7yl^CvBw<83qBT**&_?*8 zV0u$td9b`$W%sj=KP#d}_eJlz`aHy*z)^DLo6sbVo_u?pTcG@mO}&0k_K)M);NQ3@ z&jA-aSJZ|VB1`%FiGq}$eHSzW6x~*qV&E~z^+S~Ux|Z{FD`4s5PN#ru2}3inP4YCL zGaRWPT4Z&pu^S`M3!gmd~$G0D>N>1?B za6P)kGq#Gm$A@0OC~$2}Vm!vmoHEH1`<7cSl?!q=X(L=XvE}Uove@T=AGyNfxtm`e zqN1`=5wzLR@dZ+aGDpnSigd3e0HdQhE>!4)ie7t`#Q4Fen2V-1o}Me~SznN0uYlwD z^+PV=_se}kEvLgnWi&!~-g?bfnep@J&%W{LzK5^B-f91h6lb>%UQ=~az+rhTJ76Ru@SL#Ddv49$fB1q_3g@l{SH zB$W+XSLal&W!m2h*W$*|5-#n1PoV#W;^vjP@jamR?ZLw@_=lYSU(q%4-A}iK5?QPc zMAVnHKQ|5IMj|yaf4r$wL_$bUrSK?e9~bBD@^V5TvhY%rY~WIQ{!?B>`D?*i6;l?& z1`lD$@OWqJd2EQCDWTUNbR5*j7%Dt=;76}#Ytkm=W#t}f2-teS;UM>=^y|bZ6@!|q zMGh&r;2zRZ>RxPYT$n@HVMNUF@g2E5%X=rY&Wa=b%8YzX#LWgVGI|5Uo`y`gv_GzK z-9>AJnkW!Jc%AFD45IZ6w0gRs({B5X==endMn3Gj5#S4)v&ToLE9n)+WGQu(Dd-yR zUdPL2*GQS;L>)w3Qj!bHDh)0dXmZ{z#klalR-&sfN9@-f6DCG;o9+w6+_vCbj!Dy4 zX2d0rq>kIFY>qLMUPEGh*Dl&zd)WS}m?Hk{k}Zd#CTU3(0$T>UcFH=OPo{}KPmVlx zXONJcdX;bQ$X1)eGD0dNv#6m+$jF%A@@vfhNps~U6O)(dwfV=;hPgRiAew_1KDvsg zEykP_+Bm!fRNe|jZCI0TqwdEVU4U%nFU&T~ibYNbiRaR8M{pW@@>6NisOzEPNo}EH z$*%E7B^(GmU=XQQN9;T)#d^Ek5(CaDpCoJ_x$(LG(i*g`K9n0v>UKf6q3g)!!+6tS z`)(5TJ{3fUt4>biyrFp06Zh_GbgeNm9%6XZ=E}~zdh=_HJd<8L*-45k=`~#tHAvf_ z(cMK{RR#6`$|*Zsc#zEyq-s$75>6hTQXn)Utm^+7M?^-x-Rr!6Mdx=1R5p*ys=R&; zLfJk8ru1}l1|3@qnsZjiJ0k%B<44t>&e#MzWf(>sjJuP#S)87u=LpT%{ z|6r?2$lms_9!}cdcM;sE+|KHF{RPv*@r|RW!2zY9;_u9CjF{4%@mM?$WG$vUMU6xy z9IFna({2&p$SLK2eS(>smJ51$oB0s36H%iIus|c?ZhIEM!8ARpMJFuizq>m>_zHCI zded(bAyCiv#m7@pdmJcy%gyZswF6KRHrIdg{5gjDn^908GHCDU>{NiZrd%ebT%N6H zy3D_~oT}Qjl)Fq+P#oxh=x&?#lAB^*&7HWnDnRFwZc9l-TQwhCF!b%-dEUtLt|3Ye& zq;QA5i?wC)kl9woS~GLv#SZ8E_)$-PcG+C$a{tPdEWQXWA8VBr8j;EU#J%sYC+j^_ zQ#7AK^fAMg%d<_*_?eHz$5k>G~JcRo7*V*->E_IXNkF&ux7wx=BL%qq~e;2Y35xMNsAQT$xiIH1aQ&=5LmJKpTr);nM`6*Bad~Q`yh{pwHyDD@Y%Zw zo%#=z<1u##|87bJ>UiQe8AM>2{La$*?`Yqk@^&wwvEEYNXXY0gh?9z z8eErrh@0(ozg=WFDREe6`Hd9%bQB$iH!V}2y4?{y^VmVr_-}>%Q15z2|E3q=dl2_K zkQyseRGX!I#0qXZF6&BG`=fg`G}w%&Zp9`Ni1$K3_iy^q>8XZr?ZfmgR9-tywA+ zN2j*Dgw?OBz5HbUGCIy|;0NW`UtbACLmW`|23C1q;o&x9=={ntHF+^;z%#sZb7qr& zrP2r&6~h0$trqrYuWJDBPRoJij*DR4Lsb%jvny)`G`G*5dN8Zr>MlDmgV{`#GTjit z96F-aNlB8>R1-`Dvj)?{mIx~1>$!d(|6N7?NkvxKH}5G&veV??U`>^2Z7+-3p)6mWj?`|Ux2fK}-c5O{ zEAPM*PJ54@_jY4Tj`yiYa~w%%HHj?VBEggeO&dB>B_Y@p=syd zLJgkJX~SD)Cda$e#FHN$RFIVB8*4n3;@V+wJut#38hv&n9QRv=<7b1MF}yd27Y1b_ z_4J`-TY?VA6x5y;V3APE;n20l`-&{Kh@F*H6N0{~1Ki}|k ztR$28`R?Iv51(+5jF;dlfBY{&7kT3TMn8kgFXrPqSNb)WQA>V@6dTo`y(jFhQDg2* zZmkviPE@n+yVhqv-33d50WwS|?POnhMCXb5sp%85yBBQ*sRdgWLIhp4nXrRvRU>y zJLNLu;^@dWnJ8oaY%Ql!`2QTy!~O4w-qMTFlfW+pcFbn`n2BkV{cGvjQw0S(Z-ghF z*0wi%RreE-({?RWw|RR?PK0^h@h2W+;`C2GB5eHjtLiK-b_gbaA${Vji0N>x6C|Tr zD?f~uloL3Yv)PLHiaL1c$~^s=e|>b|rwA^>Lc^lA!rtP;xQDy*(@Ra?-bS#5#aR2{ z%_+~3*?m`cmXQy$nCsM7eXdG@ExoNKnEFmZfFCe>As6ST&DdW@$Ju~D{6Z5yTH zuBkWKjSe6DDL>`gQu_WOrsXi;LBWI9ecM!K0)&rPSvh+vOGb;o@;@*p6geL^cjRs! z$hTRv$UgabjC8upQ=|Gg>*PZp>z6~2G?OI-KJ)l4X@g(r-U38y2Mt=z2)PIKTj=#b zclXCfR3A+=e2s_LSIfw96G4Fq9M~xMTZBtTv=qf!F33nCV}H0LWoo)fz)$^6Quf?cybD`+Y4_nq zzEk$*@PF!a@aefIW1|Riq7C9cQ~N&Q__BQrN-Cmwcd-!?$+m;DWku|h2oV6ZYIKpsAYMS{}*rY(g+Z%C8GSW8`mzJIV7jvlh%eIbq-2bhMq3{b1EcDe)`GSqAc>xK4SEYN+To>m!P$ zg+rU7i&R&qLP(z&DeB%azVR6OW<@qI?&-%0nkI#W78RebkLPlk{f_QkvQtvgOZyH# zsn=z5yz%TvJF$k>g5fdu`Be-Zv(K{H7*Er5n?1^?c?(E2S)ZuO)lPbG4Vd;vCXjAw z%u9vqiAxgGibNE_1pYmB=gQop#a&m%5)y@>S7Pp}0+5Svm>|L;18&56sbu}hzKDPo z+kB=j=~R4ctW}*Gc_jKIZ5fGVm z8s3suIj$J7%;;RA2{hwU=g8m~8L7K%e4*UCax zmDZ0JV70v6d2Bj;-+Yukrynxwk-qL#@AN6dxUw)AujRgXq;yQf0bv)RKtK{Zu>;j|&^ma-g6ko;8E zJ*=iVh9W6QdonzQPJP{DvO1|r+elB>`FcJzp@~EO7Sw|T`T<-lp*Pwb+Cv=mtJpT> zx?AviRG-^?jb(j;htnE04va)??*a0-`o(KR05;@GVK}Zd(RlxslF?OedRX$i#MI1M zZ`)t}CpV`rS`8yxTJYKlpR4dCt7V&NRf^fR6~oUpshw;|ocVKWiDCn3TA370>VA%V zHc!rT2~ELybHZli&A2_=rA z&PwejJ2K24 zeXDf7>ZbD-tszr;>w&Rm<+Jkk*fTQ<^>81Rtk$C>_uHdvi-Xy!q*xT#_&3_g4e13sjxAK>$<;%nSw=Lf z4l^|5-fo^0X|EwFhB2&GY9`OxO?~e?sI_opGvX%i@i^1K9v&B~d_2d*TsgG1Cmcgg zokzze=s#}YW}S0%vZOY3;q? z)n?`~^}PF*W%>*q+;E@wA-+q`7Mn}SLV_CoiQUtRZcG3 z#(gk=|LWbOcd93!t}aJw1r^uj`UOiYh-aPDE2)vu+g^;s@#5k#o_opt7MNR~w&>_? z+-pSl>r0wkUrz~?%{BOs`ve+bzgUKphd$AirKe#`mqw<-bE!YaLZ~OJ~1>-K)ulPa$- zU^fZDM2$aRei*+rQ_d6tx%VouAPx?DDamp#?pO2iY0fuebI8QbXH(O{w5Jmm1FKj& zO9!s1@;jt`HJf=KLfa-bxtOW(nZhR3`UeMcRme7^`d4SP=me~u9#3|znJQM=<4UfN zK@Bl^Bq7nyyaQFo_uJBk{Uc^vi9A9_WjYeT1LGY9Pej_K4(Iyijpt&ww6X?jOuZ}e zt!@aqJ*k=y$)Y|(`qgnQ+G>0^+S``Gn68xX^~v6l!HW-+C!{6d2hpOTrl}HDc2@PwOY5eTaR1kG zkB)XY+oa2N-|8~>?=HxlH*Y!V%uf9j&1T*^SFcU+e#Pv4qr=)BQ{cu^Rp|y_VMwkh z4GrJi8OeDbd%h)!u1H8X;%#m0if7yQ4l53>;m6B`jR*)bZpTI|qz+jAl(IiOf@^O4H??&EXc~t#ndebOC-FKT;qichc59$>t;K-?% z0YUx5VN+3Gxv5cAXaa$+99pAZvyvBqSOQfR?YCoQ_2*4| zUnI_u>@ckS#bHZfG!xDBeJ+=F_Q#kw)oquUt^T@MPr7pI$N(I2tC|a}{*clX4=wMp zb?S4j#>K(sK>z2is&LWWK2(>H-$vQyjQXuOtjq4+Jet!AyIL$uswGtp{0odiW4bTp zP9CgQ`Vy%z>l)LKP2LGr>5OWx_pG!W)xPCiWr^T)Tw55CWDc*j5*?3!w%Nuut}yQ1 zwa_3*PDT17FU*d)6i$&(*k4KJpc>bb2wmtmW6`Qv8=6z=+4?TT!p7fW+fzS0+En1jP7Z7&pN+#sS|rL7CUwdfuk;>GsE-W8{Khi5=41?iGDbu^zf>0m_8@ z$-qE^%c`%+1m4BwueeLJ^{YTy%_A1eKFl{2TfuX@N=B9^pcca=z`~;;n%X*ZdUJE> zu24`WL0kJR3No6o*BSFYGDca6$xrSsI&8`Q>(f9~pJK7V5G)6=%Yi4Gk2pCDQs!UNxJVrc2kD=sz~ zGYy`asS4M~^*8q)qKWx*I&aCLE<@*wvVIXWs%=N$=H zI>;E$pvRQtG&H?cR`0aMzkm9Y-7jRcuvSJ%U2BO0c^W^=OG)pfK2O+eBE})tBpPxD z)uR>Sv*t4rGirJK3f!zkx+QVX6^Wp!y(heMnMFg}>BXkzcME4?*Sj#cetl#4pM;5G z3O_OQ=#OeKm@nx_irPq{g^Rf}ThxjdqT@9%{H&gf;T@EYT86%frE{OGFWCb{Ptgzo z)HO@n+CG}%Pz;v3@Z}rJ`$$3iMDf@LGc00P1%k1(Ot$$t)dWy)CccC}r%N)~C6vBk zP$872{qAZ|kl2c{haVKQ8jUR>|MiP*B*KO~qSOWc!;f6L1Ou3_;GFz~YLoxh7a`e* z9YB)M3JNBHJh5?KvW$pG)64eDzkOcDK*<4`8wf*lUcj%(U*o_XgIi64$bJEk`Ifq3 z_Qwpu6+qew^4chk9`1zv(cJ{@H@`s-b_GlLHExmpxOqfgpEPQ z4}%)( z_D*?uxr+cbGimVb0%`DjkYYPJOFFZH*c0; z^WA~mjerKV%iI-6h8J8^my|>R`SD{!1czTKA!?kSa&mL)bHw?(Qi|z6yXp-}D?s|n zd!0MG@H+Q1^saVFoOX*HH}BrP3qE(2EmliF%lsV%hW8-l z67s?Rrd&xdL<7{9+_wwaSlLN;-2DlZx8#nHO-Y*cnZCXiG5cTvQ zQ7>1LoS`}Uz5J;ED02Ao>aZA;@@yd?cKEjjdWqejEwOhvARoF@VfT7wsSG^Og>*E#|4U^sXULv;YkyfnV zcSJpRgLQ16*kKQ_Kyba+GcF6EY2Q(iDbYX?9(}Y%P@z;*RBXuz7m{>zt(_8eb-Ly*1es#@B-CgsP;hrO`6t%tjZ5!A5zrt6L>tn ziz9;j@2=`0kdAswKMG0%|F180Af+1t>VzPgYg`(~R8ZyPfjqi?Sk@b_oclU$!1U~c zPdMl2&!4BDp+DN%2tn~2gepNga9v)`q94qQ`t9OP(BF^(XHsS1hnUYYYE(N6tI*C3LFkA3Y;GdeLonK zf-ifVUR@mraxgnk5p!_*H!H<69-A*hIe88=~|+-iPJZvpu;KwX!r>MG7<%H zLCHd*KK$|H$5Mfzo8mXqu@ZVR5kKL=e9#u9(9lpxW8=&cX!KGuE}Ifq?p9jzV5(}d z{(CzpXo1{)?>8y7I;HBvUmEAs$eH8%tSOt8>tdW4P3Vj??mO>JO$tYI>FMbi0U#W5 zs_NlUr~imNO888T9g&!r2g{-2ODQQu zDEp}{6{>9NpFD?bscJQ7;|3Uu(#1mUE&?ix5y0}m+m*%~rE;EX`WmDC(+30B-}RyI z+U3ReW$Y9Cd!w56*TEV=$I0_BR|l*Sa|Ez+sDwB2cpFQ1vf>R&^GnmNlR-?Nv`Ix> z9p+m3Y=cE)=MPwWL62~spm$@#%+&O0#sE+#xS(H%wF_estZIUgTeabNd3kBcy{UVy zb!Q7Sm%0Gc0<~3w((!Kiuv`rb>!~=})A{!G!xC6k0LRCZKKUEO?%e?AkW-f?Q36!X z{k7b}h`6{;2^xnZrYCs}`7x4jD{e&g_<*NpBPNg}U@^OP+XsCh1e))M(z4qiO0 ztu~-_Q))F`qhRa*WBriBepx!%ub>>^UnalA+(>=T87$-tL4&p)LH?jU~9p5;R67SkdlV{K}h;loQhHj%61UOcH^z zp1>YsIvyS}z;b98u7G^x5-=B0lBByX{;nTPlz_pim?^#MX1TZ6Nyq1lvSvkO0yo;Y*U21@|4AeSDf>3MmxaF73%x|*| z@OJe|vfB$eIhwUUNw9C(t{$E$91Ct(ek6Gc8X8lMqgp7+GNkK2K_36cVHD=b+28%~ zVuOHL0}v_K1MeYEwHPf)mgm{yTetU@dvF=oOT8{V2L}iJO~ri+zkbEY|8=>dktCZ{ za4*CV(9#UFwC`Yt`k0h--MeLe4&-B(k0z~YCOWT2^v-R=66g*k5U{eMuGIr=BDS%5 zJKy|%%3(+bb|J$+V=fmE#)Z|5#&Nc#7ZXyrsF6nPAzsMISy17V@57Fhz9%Xs7U1G! zq7CZjjepx<)d9$o#A0@MvS>dLR2fwhF6E28*>1= z7qtOEWm(nl4@~}6gIV*2#|g2q9k5`^0N4t~yJ=xJd$vViUmpnK)ZfhT$8zfH1<|Kc zQ||y~JC@h>_jvLeh3z+F9kfaHy9+y+rp*;HEr**HqXkRbUYGp9m%Cl_GW@Dol0UOQ zb^m(%w&!szOeM9)5nYXpAJWgxx4l$A!s;729;OkD&(B-d-`R&1DV1&nh*iUM|2#ab z#6VB~jDp81aj#zt|AB0leRdW+Y6bmXcac)J6s-Yyug^Yj7e7f52e+o9zUJcW@6UP< zq`%X@_GqZ9yEbp@rNAfT zro={mQEedLj@pdxzP$7AMMelb1dV#<|HF%c`5vBG`iBqKLK?Q@yiOI%SrWi&#Ucc5 zg#TygCZs7R`;H{|oPx6Qb7f_cTilj_KHN{*b~k}14RTTLmCLSvLCxAjupUNc=0%jj zu*OJH8E{gfpH||~{*Z^t)obh<&~q@PFJ8PrtpJ~yR8T&jxw-i;JJaht)G7;DB6h1O zUX;Obt-7M*LeF;Ffhw0!6LGSfV2viPqth4ec|gZZiVOV-KGA1Tw>r_f1ZPSgrayV8 zI2XC#CqOc6cY~r8Y9$9yH3t;-6;%Dk(SEx`;warW(y_&qm_!3$g|!vut*K6i7)>ZB z`k`NJGbF>KprPSL>6HN$RZ#d%`=jXB@bC}dq{xVi`_Dd92N&ocJ;-?*o=exBQ)xIQ z=@UG@jR5+?GVExyOiWT*FEznJVJneht@dMUIEw%_TmlG4mI6K8fiLN_1`wZTXDh-h z_+BUIDIP}@BqStLZrgSD6LnyXg4$Sp&>{7x&+2`mtgMXEyyoEGsG9S^Md)~+f+vpO#jH>Ug)EMp0hAF|fDRJR_!L0n8T;YGhsb)XmoKpm+CrX~5s;9QW@36F zf`Wp^EGvk@fFQI&b9sV=g-^XWA3)OvXA2mnM;5=B)z#Hy`BDJv^$`W@t!y|C1wRu3 zO4n^5XbM)KBn%qVS5Yui*;7}re3k!LJ}j0n3~)}kBP^Vvb9p)nHER30I_EejHobkz zb)KC~Yp(?8YQp}N;ERJrtU=g)eA$Me%N^0eBRTUp6apZOt5t*Tzdt@SUd} zV`?}sKgPwqgbFqk?;O;*|ACKv@r8EWLZM7pQc4PxV$&)rNVA4IVQO?>kZCq5t7x^t zDS(lMWeNI>3|4EZ9Zj#(p%gnHsFd0+h@(Em%DVhZ!@70JZa3fBgGtvNjzlWjZyCVA zsK5l2wo4zk{0-yZN-Zb|gWFC9PL~bv4unou>7tp{2nqUOZFvgM0{W3V{VLFs0$%`( zpgUPG0se4jMfft#+I`^`0(Ay}dh8JX?eCEr8V`G8`^(G7DlpFMf`VnhrW*q# z?8*Yrx`Ic{myR1Q>s-U(9^f0$KSo9U0I}Sl_J#T1!E7i96SG!LRCYFF13fvMz^N#| yvIS_MLKTn$pcR(=7iI|IK$#r8`TysOOPa8KTed^YXcq+hd-+`US%Ijo&;J6Iq8xPq diff --git a/examples/running-vicunas/artifacts/A100-80GB/forward_memory_plot.png b/examples/running-vicunas/artifacts/A100-80GB/forward_memory_plot.png deleted file mode 100644 index a7011009c793b4cc2dde8ee3077c711766da187b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44235 zcmdpecRZH=`}SofTLYnxqU@0o8Hvg)vbV_IZX+u~lu%@al##tRnTe2!%w&_j%F60F z-+h12c)gzI&*%B)>GkXUx&uuK|v190C zLgd(EbfHuKpL{%Z_~R={Z+iiU6HhPpqYrRBA`cfkE^Lg|t9l|YN-6z)7)rD7u_1<|3mysZs zA3wGp?9lsY!atr{STFnKmE^?FpTry-9Ayq84AC67S{wczdf!=)>m58HT8l3ydUw7j zgXHK@fua6--@QbQMEzTRvv;(#9v$p$n{6)i4OZAQs-{R$czf?98rCrk4Gx-2wT9~y znFO?NbtQ_9r89K*+1c5xJ@~u*0K<}1Lw?VGG3$CMJXr55ao5ajVs=)$@OK8>T6@%a ztL{`HagUWBB*F#5rC+4NBBc-Mw?{mf*;SKBf@K4Cc0Bjj+nB9}O3r(2E)ZdJV-3=Z zN=l~o_9N47=WMLVvH1%3)w=Ce@9h-|ztPFbyJBKumv-(K%98|?lnBBY34i>P^FZpB z)Sq)dMn={T_g1IkjQyH|jvi;2W}vWd38fJx_8;@xg(sr5(Rs)n!b zuhSYAPQ+ibj%u$ks`E1K%~IPr*dAN&y>m<0U6@f>ON& z?6}YPZm3n&)TA0W`0Z>C7{g+4`ZcYPy>v_ZfG|P%(UCKrjnho_-^-e+oTt*a7yI+y zOK$%l;N#;P92|UYKU|?x=T)wIpjT!Upq6%h;*0cOx$Tt^Q%lPbxWEa_X};(D)6){P zl$4ZfzkW6U{vsXu>(?*F-772Avu7|Fb@iySv`eAQ^$`J+`4s(oSq+(mzN*l{yUczd6u1>*xA{+ zr{dEm<)!cCWRX(4R^e?g;^L;t2aK*+S!HQ75RuWw`x zzJvLrn|<@_m0gM!)HLh)oFW}b!*;!~ zz5VEM4%HUh!aCv%b?ajq8VORq;syo=x3#pi7A^0%jgryvnhX}1qESytVUFcBBxMa! z&<$+zr;^^MUjAMl2)pcEU7dhgEBW;7Y~qXDMF|~dDJd!JX_INpyLVSUC5lp=7RULg zrLk#no?NNhHcAq6mgOw9{BmYvuA2bkzt?dh+xGs!?xuRewGc{ESW8ZBZoz?p1mf;X zp(nYt;`>;6cqq);B7)%(S0p6`97nIrZSIOMCck(=$V#}owQO5)i#@ujv$He3psx+< z7d~;X(I1a}wq$zyY1YMJqGt}aHsuU)e8wU}KPM*eu)%6q4s;)7c0F zFnA1`baH2X3bs;kyE8l-e{*wlk=>9W3G3HFW9g6Pf=eT{bf+cU2~Eg@Ab@PGj`5w2 zi;fPgsuGKKw$9Zqyk6_R%y03D{=x~;{jFigcwt*wa$*9DFR$!fMqmdj$MfR}S$t}| zb4xOKZO!RLZ0y~yudm)zP-uc3OC~KX?Q^g>Fj#IoK;dgN`z7TFR#jU|fQ@=iQM~)r z!R0yq{KndJ^sTcT97njc3*`PD9?+aVO+iOThqb=WOiw3-nZE>k(|KpjG|!;=SRAim z^IMffo4V2GY&ZDr2KjNTqo*78e}&2OBtihXAtQs;A8hG!8`hk_)?h=v@9w5yyAg4T z%aNZRJ;c&j1M|1cTQ^{lIW6>F^jT{VB}O(H_fa2$|`OGlMM+Q40@k z<4-Sz$^QKL)N+7Qd9Az4|mwkz_;60@Xv30kuys%)p>0;4?8x5 zFArCuO%bS?87;+!#S54{(ipQ$@ugKzP|zv2IhmT4<~-hrcOgbG+fHSy$u@hp?%U#K zmD2=1Ew2HIhlj^g24O``*NJAL{r%rx{_W9Y#~$Y8ar^AfXNEp~`l9B^aG7;rhEm*Q zpSJObjrniR5K#%R-HmwyY_Qnu$n|grB4cCYE89O@*VfmYhDt4q-4?abKiN|{i%j2t zEw#M$Zf~U)Ew@qcRmn1&K2n$@T>dr<6BBvk&glmb7Z!ytr9Sw3xU;)hs8V70&XeZv z!RiAQ{2NLSzJ$Jf!BRt!uV1N(Wvy#wxXylV?(9^W{`pgJST#wEb=svm{>uH5vSk>9 z<5&xX;!g$N3JfLIKe9#MT!p2`dB$(Y6i)_Xq!tNl%g?SvBFuTRPHle z?0OY;)+Ib*VmcW0aHXT@(BVV~GbuJ1!tD2~;v^ki*w?SO@nn`p>xNUmSk8Vavu5Co@R)4gZkQw*zkDdIQ)t&g+x% z*27bg(!5MeOhMGLu(FJ^G&luZrf;f2kg@a)uKKn8EIOJEZjnRw+O-T{Nv{p+J8zX~ zWJ&O3KE0CE|Go{2jjY;$cWZgr(xqP5wx0x>gf-M7f;G3cUO#vKyuz^5!NwgtnVp>- zlJM(n_Qqva&oP#m{H!c$BO{|!-@UEWvN8?MN|+!#nRmMjx$G$f*m{o=U&4sx>S&!j zu*pO#b7A&`Y$&f`O{l|2jp6TC?6+QVVcO8XT@%;I7)*$N5T}d_@#U=c=!{6JOA3%O`kAH`;u(1&c2ncvi zKIRzz`I9|nb+DN25&5ZykmC}PZGV1Buozyc-*Z?RuB5_l>*$0+_&0$?y|%XYtYQ^j ztiPJ1szkdDtWOFdt?c6^00e~`1u&k9U;6fDj_4S@}Z$i@Tx3;$8rF?5X zq)=ewXRgq{diAP@Tjj=$8+Jn_L@+Re@9qmh5Il2#n#Q2sN7w7m&JGJVH+ikcYQehS zldv%7Kfmt5);c0fB6s6PqM-*oD7}asJuwAC5MZG9t*vj3HvKlbZ#<8W4~6h_Q`&a% zaHP!o_;=Wvp`j$vdsz!B1%AvJztvZk=>9Q)i})CXi>qRnFDKYHz-GtB=epBAe*WB= zr(3G5H;?cMd~P2QjD=fn_Kfwf>2@*P+_PQ=GK^8gPR7K9d6T5VZs_6t-#_0#2B*Jd^-Oo$&(NalW$>L$)rrg$zo?p)#oyl7 zdtVF^DOZ-)uGHkVDzMP;84>z3)Rv$Dg1h==cl)rf^fBHPfjL0Oo65>9e}2ybHu?&I z_9DN}LUv}6|mwI#PHV2|tUF1e=^Z4aT87_AwNHV$fL2ZuWxBj?|8LA z1Vn$ct)-!lU0okw5Q{9j7!cs5m-c(Nky2NKQi@Kc!)X+xto!+IH;f6TzHPytWvQ!7 zeS^y72sT*j!N+S*}kx+kRBLcGZiD z=2SE^?7Db(v_WD=cm$#3zx%(bc#Ym&2tS$EKJh+e{fFz9iFS^ZOST!68vymP1+a7p z^*-tT_4Rd&y~)YR@remjHN}Y6uUQC*h)e;rK=yxlNu~}GuIEHBU6J)S_PI^3jny$@ z@t?2?uid|Y+3#ReJG%evE^d?X$dM_COw_XRWu$#L>499$TrFx@mUHLiY_8_CLz;W` z?3q7b!~QX-{9-G#-71Y$*rQF~Kcc9D{J6IEo(eMFA%7x`_2V1Nz9$OncR2+G1wRpg zH}+-bP)%xleKnBV=4!1`U&9^k%ImFdZ3CxR4D2Bqp^rOFf26S+EW$Ia!Ci$+&Mn># zH7CSj_6L@lBL$jwKA5gXS;lz{Ml~>W&6C9yz`Ce|T(sA;{iJQ^o z{x54?@uqyS#`$lVU3yH3vJha`0mMO)Nb7GHOG;w8vhnrW+>dG@w5dxEVh>-@2n$Ex zrAtH=7V<$!`4l}O5fM?(7NEhY!iK|U*dyFh10x<|;fA&D6KfOi)e)k$gor!QB{~+A zoO~K0BzsKZuCd={EL*u=xZ7gCu1^=NFcwM4)37Yl%JS3GTexoxg?o+%95K0fFZjGt zY<#)PQPyXPiDa*T?$OBh#y?7XQeG}B;yC*BjHH(dR0?7~J5F_r138*pJ`h24>U}P+ z)GQY0bxQvF$aYgn>HTcV1BZ&jva%B&K781$cUP(LSfzyO4#Ixxxw=m3sl@OkryZdFZZ*$>$IhU-g>>7l(m&>)Iu|v7{ zEG+t~9xBMoLnhpF{5azh^ypE)XP!}E8`K$UYH9(HGO}9FX zFevEEAcU(k&|6wsavQxHocxVyUns-~%movA&)dPMN%`)M!&J6Fp{IN}!7Pru0P#S; zYs2bOyZ`}IlskZHCuU}XpFSlIEBFHiw`pIFCSXRvh|?0pfLu|%K~dv%1Cq@e;7_ye zRQa2#s;vNJCZ?x@($mkO91gSI_U`^~Y8sjVt$aPx(RcS@umcq{nh})kD%Ny&cOR^A z<3eTd5AW5w9hioNIz^?;OU&i>mx|k9A4r=3<`}H=7KKWUtj;JB>ZN^nS>3OeF++oM z9cIC_cY)y53~)^VicthdV2Ya|n|x~JuK?5zh)Ac@f+8s?3B^so7-!F& zBZPvl6`(~oEPdq!K_UP|p zQFOB9@$q;Gj{*hTdv3P97{A!gqJwKfz|@gc|w+ zKom%5JT3Z;b?57MCds19&dSo?aeB&hxukW`=$)t9(^Ep%2Zx8rsHmt=OjWSH!mUE! zn*Zen2`f-J9Nkd;x5uA`;3^+{9U5i$Tn6B84xue=s6q5`9| z_jKR4MvZEpKP6Vf75Pvb(ZD?9mY*5e_0D>j+tWy5s@(kxrv0(|^7o@aicBmlaH5bA z%dLC1*TyZmjs5)jqu3;D1%V->m|te!-m0(}g!F6?YYg=2ig>_pmb!$$t1|fn;4j3AUQOQm& zF2{hDpeCc`rkWG&qB!qoe0WYAexBRq5$6NG5)}urFL!P%o|I2lZc+awxIQ4UV&C z!;;0E@slMyTD6URgWJw{e@Zob zl*xVc5kO;pfYSI8FrlrfLvwO+6yd-qs~vwSWJy_4Qo;|Am0P!D+Cn=d z^yNi#DMTuxkN=;@X0fBw#PsCE>6w{;4zkq-Sk41Q{fI3{#?+K`e}A7PPV`F!Ke4|k ziuM5k0iv}GuxfCpfpDdvrgkdLb-u%oy~9&gRmGkfv&1O6McZD3H^hFmxoPw)L+ zs<&EQ{qa%QPU=5z3>2a#Jm&i3X_Jv6Ug{mvXU_~tSd}|<_1n8gje~iw7R)hBlX}~7 zDeFEU6gl6ra(al+e@CFpj9h-g#AGMlV zBxDSZDU-ftNP6oA<4(=sU4uv&p9kkoo+7&E>9%N$RTX%_pA}KlfjaKd( z^=t=sH`IU#QgxIuuy|9-_X9^U^iq)N)x8-bGK1EPphW|UETD5`{M~Ta3s-1oK zIe^!x*iv+P`6244s0p19lF_@GeMx~$T$tIi=Tg>JM&*unbD65j=wdF>k3@!n248#L z)3{VGvG*l8oIulv1x4IC5M-8pX9o z5=Z}vK&iPRBTI5_HkL*FJaZdYt;>`k*&EUL&j%VG?^!Ko%Eoo9{NTOHOX5-*qTJeF zCvocES4jB3RSr9zO{Cqrl0p0W^`mp0qnwO5iW)(+w7Lc%yO<>N?jQeJKQ&!*6!?#a z`l7fqe@+>=KffP!FPQ1{esUphmbkicU})v1#!H8b|GGjB&i6l@3{|KrNxiOYgZISd zW(m`@g`;v2lTM7eE|IPL!MCEX$&qMOx_RVsMOmaui(Hmo6-$U-u58Imsw+P!PE>W1 z%LWEAATzp4B}-r=;y=4nuJv4}w%)et+mo-AZTYzdI=)?9Xk9+$d7u#c82h&^RNgA< z$C&@=@TwL*ud6l0`L$ZG$t8|O8%OCBP+Iqky*GYEL*>voq|SRACAPKSzXO%o-y^26 zq#-MdD^s`*LbYgL!(DA80JIv%wl7QZK$M(I>o>uac2rOo1 zmTCL&V4|t-$olT8<@#PbdHTs0vb+RmeUjF!zOcOeef!3e59=|3Ldp-?cUGH!H09F_ zeUENp7OrA<#N+1Wy$+~>A6U;TYmEdJd&ZsJ`rc{-N^ekPw+el^5(rHS>G>GY=whC0EKoS`-~VL}$dnsuChXaZ z7eP=<-$k$r@F2pLSN=>#uWfFA0Dk7>3IXfcR0&yyU`h_YY?Yp(-WTRWBTc)C+78di z#EJRa1%)LOsI_k~I5vlMnh%!TBWsN~&1G*pRxjBa&KRO?bYK2^f4;sbLUW#gCtzbw z%+Eh|Y&f8$lH8!iSa^9Up^b7?L*oSWl7dm03E)ZA%S#-(0C*U}Gf=5KkB%m=wYB9o zsG^3Al6m%8JMAvxbL_19bgr zQ%--yvk0dwCr9}3;lp%{AIJaPcc2Z@&VaHe%z z-PY41m5}%?m(Ur<*A{hNiD6pmap+usx+qCfC-1wSm_&8|7N+0lIz==UVgth$XWT8K z?$w0)HU|gwz>FDV_fx7>ceuZ7r$LDN#7;uPk%F5)U zzT1X~&w~}1m6auP^=g0umkIRh0K{S#T9!ULMeWtO7Du)DTsJ>W0BxS7k;9<>7i(;7 zB^D7;E>V!f)%Gk^XLJ#ntGjC~D}-K?4ss16D!7W6*UCftnUbTU0lr z&Tlj9S}F|f*)r>IFyMTRU0oEMF{G|PKV1bfxjR>z4{HSc*{YFjAr@W*N zEmpTFcM2EzOc^?{VSz7ag!xon5yR|D(u+C09@+x>mk9#f4On=zJbFPW*Z|u@LP^;Q z<;*&)OQml=fu3HQ4CA{Cgw6#%zBY&zYrvk|9I3^efTF*0u<>oM)RGEN{uMlHC755{?08un7Y6g;EvA<^{bP}S zocsv8?%Uk8KfYn%2!RUH6GpidDwZT=H)&pueNz_oCO^e>W>C`1!@-{`n)|r!zZ5>A zv7l~$kAc>#h%Q9?HF42F}%#9K0uQ9*s3q|Lt6z)tr|Uk`~_ngyA$nlBr`?8%g(aXhSu;F|>)r zX~Lx#L80@^M|FdR*&+JQ8rUzrBHK5FXfdbS1Q2q)^rWI5-6n?_JTorjD9&6@YuPwN9$x12ef@<` z`p=k`F|K85uS-uXor=5d>RMEPRG2+gAHkG+X1JDCn)?=A=Ypyrd5Ffz%xMAh=9+x zhXmUNY5JJamD$e)$tW7N#}s>K6NRb_$XI4wZg=T>!T_V*-8}NjS|>E?D#B$pa`H3c zNy@@+O|oq!I(bBHl`PF`@U*+UCKHGBOCx;ta8XvSSLUfgY#8Tp?txe4L*Er6+z&ae zmyGDxb=gzdThUO9pM0UAQL%I?ur9tA|3rV3d9~8Bu-@e1{#!(hL@Nc=#>`&o)uSx3>Or3?Dx_F2^~c+B3IRq?xu|B^+9;R&_};1TGQC^) zW4yWUXEgXnOkja{yiodc{v%(%EIWap#B)k)kjeL`Am*G9EEZ=N+A06jTE}2H2Bv{C zwsGESKf**g5JqNxH43)OKoGcYYI3qHJC+Qhzeu1d;V5<2>Cw3R$IC^lsp z_^DplZqnB+VCI4{BsZlWhkgu21MAXJ=EYPSA9?UN&Y5LhP&Ff41z(J-njG%;t7tEQ zN67uxcV3gd=yO@Um!&i@Z$)5Cr~h=3CUiA2{cPH6@|6hXAz3*>OptZw@8n`ex7GO4 z5_!2E1r*Pvxz}*RISg&uC)ZA^c3k_TX|1d0Ey19_BP;2o9Xc#*ITyRQQB)CijXnA* zNdReGKxZKh4JOrqSAiYtR*8D4{m^&zDc)Gq@3^kkrTe*d>HdX|Sx4S2RwwCgnt-Z8 zV2AB&4w9bJ#yqR+q`MnP$n`K?#Ia-}=$=hP00uO~%UY$k}0L`<=r ztyx|(c1X2QCJ6|uZguwA`*P~{x^)MR{hm*0sWrE?S#)y}rwL?IC7zD5IB}|kAI!QGNyG3NbISye;+TvXjZf|d*Q7bl)YNSC z?`QglW?i0W*p;ii%t{!=OQ$5{miBq}W%$%@*NT`L++Nu)e^krm@W4=$LNzz?$c3~s z!^h{yF+Mg!LyMBJcC7%Z{2$$XpN=2MN`8e9H?C`WC&%cT7Ks&0a*}k`TI!`5fKs!iVx5mW1Nb8?4SeHOzl+Z`fk4t`ClhKuXn}A zzxdJxs2(qgXMlj3FW(UrRaMa^ zi@P-g+*)rWpo(9XesFjNE+1n3?X4*&=zvufbNxwfXJ459>i=13_VYbUPOz$DWBaA@ zOLw+C6{X6&Ox54!7b-6FFLyV`pwaz!SCBQR%3VMvx9_Zw&~a$Pu5D}tB_K;-R0H0mIlsjp9p;QtKW}2M|WRvBxP*C3XhwcyG3-&ClFd$lp$2i z*P#OvME$JYoP>k~0b>E*T^tnpX;(ST`03YB4czpQ2o-0b*#UvXL)3n~I$3Pi_PWnT zz;n%Hz6x6Q0LK;ZOrYu%$IY=2J;0Er31L<=Yc;s}hgn${qFps&5+pnv@;ABo_-@?4 zU%g?J*4?WYJEvDd*6GAApsnY|HU(Snv%BJk^}VF*VZL61CbaeBDAN1?si4|G#PYAt z0k`M*XC@9R%p)LknVy-M031D7>B#(u^7Iqn6YkD*#E_7Y)xPO4x3o+Hmcp_A54+KW z1GjgZ{rX6udB$@b52Q1Uz!ZP{^eG5>Ssu6u ztcgAb?962sLTISUL!;2<$AbYw57UL-Y}BuSDn101FC$Kv6+gXe40P;#(aOfd5R(kV zeKB;Y<*r{J{rbnTH&s3iRWdhMg&8RD2?!kThRTGra7KVZ z>3AD@UJoDQM{6}g!H63E_b8dA>|7#pbesD+4J2K~Kpw^!lhfKS-RM~;BEsY$FMM2f z+((sWE2Z}7!Q<+kZCTzY1q^L;<2D@mRo%7cQTXrfa#m7dEUT!v9(hKRnwi&H9sUGL%bAXqonuO2>d`5A&4Dn#FkG9 zLLVTM1VNh(qDkK32@ChtQ64P5?#zRI=j>0=gO&%y2nbP6LfAcqGDQ1j&>a8!Om1#2 zaCCIrV-FfWe*DOPF&ZRfNRbIrRu1UQ{_D7u>E-JhLIau`f|Pt`EYAmM5iPj!*_7*H z)HdZcMs3~sQ*t)-^na~ih&_L4JatH#Y{aC{aguo{l#VK2ibaoS*N~9!2WOq$yE4tE zyH+VW^~-sKanINkxo|4p1afj3jy`^5YL;gZA7uuVTs$;GDjdfmAgxXPJzV{Zs$c?4 z#Qu8_#G{0=Ibn`ZY`8uB{l!yx((z9@cFP(k1(sQYWbV2@hhRQNR(+x1Ya-L}X$!ut z-epMt4wU_(*;~MCN$%{|U#D9OCZj__2CNa9RY;u-J5^!Ndu!=&Z*OmFKC2o*?F-|F z`*(TF1;;Co&tv|~k%rA5e%x(Ir)6o7vsEAcVDz{?|EZAMUd1njQUHSVox);cViKON z)_T$Ag@=a^?bgUuat2;y$$x#qqoHot&RDG)(xz+rwkCWm*z0wEA*l;OFR2nYlkr|F zvtNUj4H7V&lzH;xN%~3ts$;|>IlscVsN-})ca$k6OG#ir*dz3 zKaw!^9C&R5H)4NT5|9CXpY{c@sq(0+HD^r%0NryIbgWXPw@Ei9g@ix$ycp0ivOa%} z>(-5(EW09(iISni|H9PahW0?GC+2*{oVFc)7YL>%HknXYy)b@KrL1!AnRaNEGDVcD z(reM)E0Gr)`zUFoR7tj-Wve1d(cjjc{CgB5pjKW|7^F%dr^fHO1sD7I`ZKm>!B=;^ z>yD8#(WIbot5)5oj+Nm&*%!S%zKeF5a#fMhCzj-Q#NL!w9#681DK~mQ(#`NHLS)UY z_(Rp1fStpeoPwks<0~Ie)xpfo<_b}e_l1A^=%}o@ZLe8rui0tjZrU|c*{|hN_RHnc z(T%e{8f#iZ1_QT)T$0bgtDtuzVZHrEwDEdVx##216`YTsaT?Lrj*gO$H%7+Wjb1jR zs#SeGZ8l$*BK4~xQSHD^U%bH-DWL+twx^-hiBfacT|d^*lZ z`Ow=A6nuDcl8YLWDkxU&LgN{<)%H2NYR4t+XJeg}A56PLNq`B-E;j(`!BtmYyLCbH zb7NO_+MY<6-nM;JoO&t@5(I1k3RJs^Yf2Pi(Op-{s1cm0^;U+#1J`)32=>SM*`B7Tq|c8WwX$qh+F^ zqWB@k0ZSzf%Kw1GM18Zqv!U`O!^;9eSI-+lJ<=4W7ip|1uhJnvbplP zuij>BIi_H^e{`uH5|RFJ@~{1R8d~XWKEs-z&BXyT&>mm7c(G~!&+o+L5cgUx&H+}| z+}nz!C5I*@x(P1Hc(Wy8>D$Zu8hz^dSzlY*j=n+{I$Og78%s+=!vvy^POUo+#D@?d z!|9m)c34rA3bQ2n)^(@g+mx(o@ulm0Zv-=qpo#QhwkWD|Z7cBfnu#cH_;nbp%q$^n zV{;RlyvI>Y^!e4!0p0<8>;xTM;tMk+llP@KL1Ju+EKkHm;rsDY2aUfXB7k3>Iy6<~WVLxYQ*`%eJ|L%?bMaqrD)kh)g%ybsql*pq*P%-)yt22M z;1}Ed;HM)S~L8qN3vCmoGQKK$4o8`ug^T zB4E*>h)KJ+=-W1BPL4X^+~vgnAaK}$oc=5->LF-;kd_QJW+0!z4+22FI}U5tQnit| zwVfV);UH>~U+QO3S8Zj1D_lt;RFBLU&zg4f6F+{{ zwpK~ui~0$Wuo0+2Km~eS??J7x23j36&{gb!CV?@x07eR0J|I^HF%|UwgA6|Z zd5R0B#)qN@6uZRO&hGB$G{eYCZc;3gSvmrAWTpT#q0xdwOj0}Jki_L~-dkz->xZg| zZcat$`C#J7l&ENeCSqOPL019g$it8DQv!+Qi-bSk_*^UciIu8i3W+I+0tO~ zZO#~rd!XIo_uJngE`%9Ta|6{bW@2ye8*M9FS(5XY$*NqB*k2fsUe>_k`Ug5=6IAKC zx#gE9L9Gi&d*F2p?9Ln&R|Z$r&9>DWBL$b8nmam3!SMmzldH!y{xgG#@@$rX?;-&* zU2->sIbG3_B|%Lj2L}fzkicSAJ*1gx@hKHsgW3)$uZlpkZfZSv8nOSaE9->y`@fX1*7lsSF;K#K^&}TFj za`Xt~r123F8Z6~cK$Y5Ez4SthwoAmY_%4xNxlJ%YT5`};1;oUhL?#IzsPy7R>=~dK zd75@3@-C?1Nk~a^%F9=6VA+5?oEm&|p{Vf)sRZVr5mkwP2fMmBN}E^QMn}_Mzs69e z1rabF29~Jzu-zt>mVQ{FB~cTro`1&P_~7@W&(F0D0z&%b`u3^@Gv1px`YzwdhMEs# zWuSp1VrFKx_6AKaTGxOgfqsTsFMaH z*Q?&CKbunV@NYz(kw;=a;8#SgKUxQUEl~D3NjUZX9LqfI#v5O9uUYzoa2XC?hs&wz@RRKSL>KuToJ7ti;8O z7e&()afwA$L{1aU!Rl*AFwl{sP8a}*MYpE^O$A_JI|QmaSukmWmhjThtGKujSZ=Zz z>P;U$5J6zMrl$5>ApumllVAya1a35_U)LePBql$ApTczc!)qv#!9DaX8ij_={RGK< zQKeSz;ywLSRhq)}ng?Z?dVPymL~Fjcytx*lJo*+t56{Ws;$rahY&`mW z=WXB7z$TU;>Sh!EP{I5H)*S6zP=v^UfLm#mjg1XeoTyq2hDx=dpkQ()&Nz1ATo5(P z#BuDC5mE=XSA#o{7alMOB!fth2TYG~+GywU>w?vzDPyuamx$MZJ_!a{%5;15n{V@& zOo#Ty&qPNND;XQ0um;T{O%M0~SPc{;4;=y#RaI5>8scD21IWIweTCg6^@yZh)0&9!#mjh&Gn#MaE`Ugm zhk;!DVO^bXp!QsT<>*6**WftRpE?AOsptLs33hiboB(bi`elodkPz^1chy^mhxNbD zE-XBOcUbFh3Tn<~DS$Y>qq8$(+khB2{7{D*w6l1Gn76k#Ztm-w;)RFc&oqTnLkuz~ z1y!Cs`F{x;9NdNB-JuSl@^~gd1S6m2NUaB(goJ*Fghs|27rYg496^wUcpFs#2ETVZ zlVLm13I|geIYdMtXA+q*zs*03i{m&yu(7^=6}V%Cgq-aG&O4G z(|exw007W60_>cF`^l_Rr5CUy2A4cK=)Au+6j z%jV@Y>C(4MD#SYbYzCDj{|>jAb^{%2+isN&JEc1X+Ozs>vNU4(PQ!2w6z1iTgUbd?W&!#6%LuonF6_Qq`(tlP=LDVwa0)&G z=Basf)QCY_`#c|Cp&BRfw!qweNyVI+gN`B#K*_z%@#SS-rMFhd<8rnWK=hd_p45m3 zhgAAML3A`I0?vX2=lh@=ys>5qbo!Ikv#-EjjkMAe%gf>51-=UweW)Jsurk2m(Bpvr z!2CT2B7dNS%qt4tYHwytRXB`1`S|fBcpsWZN5QDD(FjP%vDxrjek&k2kWe5hD->yz zL`CTs!ds`oxrEbKl%ULGzPCR_h#Ws?@R6+oZl3z|>2Qc1+&V=V4D<)&J1apI!1e3I zz>GtBqpd6}5{OX$pdDB_GKfxO{VBeH{iyTyFdG0@-`p+< zo|O_eUtvAri)lPTEHj+PN^Fzus{=O+_@wYYmTe91!RM&KFC&V1dRw+^9u@=6fa(ddGIYe-zeC?C~>3x z-gwx=ru51TuCeB)$%@V6$B(IHZ*u8tuCJ}hK-9uaz}n@xm|v;9rLCbc4*rZlz5G@< zKESNI{MfN$s2dA*ic;VH{(f}C0=&d6PB)SOTnPyV82x>^LiE?nTg>Sh8F*Oz;jS$> zU>my8^2KFJ`0VgQF!&0_U6cfe>A;%|dV1)JO+lz`M#V17waB0w7|s}%3~+nJSww5# z{cARhlzap*2sze!=rq9bHdJyCPL$A3(~5#3K}*rwOv%94{8e7{2ge2X#!Rz=0{b_> z-~zI2X=_WgbA@Q?CP_ay z1Gc8p9Ut5*`gT;8z&2ika|{ym@OvOrGD(kK63T?tV{hKeV1=3q_w-r znP;vh)YeL%VhapZZ>p+xUd~8fxkAmrK+4RV;>Fjv_VpSST4FG&Lc^8Nl6djrg$eke zHpMwcbh&Nto&skNy85mP=)TOn*tgrk|=5O+`*U zXn4p&yeRfuzl&QM1nDjl0R5{5224cEc4G|>8hUY{LB|a{Z5{dfia0MXJl}KVEw)Q4WBM&C z&b}`PdCt)iM}-jt*oWca6yOaAgYPo!&(o!F&j)A78L+*z0Gkv7PJo;F;9PWQfhlBM z632N*Jzh`SG1C^QelbTVN|4nxNG8m5P zfFR8Z<`iHxL~F$>!{Ph|WM%?!+JpV|BTLIX%nhCrIg8%Svz5U-{(N;C$H3%>qJ31< zG4R1LL)$f8)X}Jv;t(Zm4k${&+QosaVRG{F2|Oz@U2uMYza}dGVO&5nhnf!&5fS0K zmHKD?9=R&lyD^A=GuDL6WCW%z@2)GCM|jzm~X`Y;6MGb@Bw|ehrjWZPXiH{XH2XL^V}O*6=3zMlE}tyP6tJ z{hyzV;DT!&895C;JuVCo%M^v{8E14t)g zeK5b}-X-@HhNe;w%6_*D+PE(lKFeIak7xdDA~IbX!-je7-hUl`11^(d=YYb%LiyQ} zJA-|>Z_EMe_w4)uH7+npci|uG?{k1P34zFpkAdsxAsCUMEr%9?dIC6euvX_?;y^r< zdLi=g6dX~j>&IsYq|orsbS{AZRe0Pl7!wFbsuIxxE%_75@TUP zN+gOz$YuzP0(Z>(U~$@prLnOY%gf|s`(qe_ykLJp8cgJ3{Ur1S2}qHju5LBpB?pph z$*Ht}3;4~OH$8iQ_Zt2N0t7?e9whU|unhH!%Y`=a?p#y2ff{Z-dKXsTZZF*6>0t-p4@UN8gz zuxHj7&9;C3d|g*Jp_gbMvcuGykTW(9eX4m-C9?77JG3#*Yf1=@m47|^qT z^FvPwS(w0aOyRf9YVNcLQBUdx|2xm%NA+D?&kF5`et(iNW5f=CLDnD$LxDzx zj=hTizVLmv@IPpEryrmdF!eX{@l1eE{2%|U)4~Tz<|?$sA*w4^Xd73(MDKI3hv$KV zFVNvFpdg84*mfmHRCV|A`_xPP;I@~)dKC{G;e-&}?8fR(8~gn^4+rL~EfzM2xSvMH zkY(ZN_TpC|X*00CbE9<?z1dAM5O+2hPrfeY6{dHpmG9) z_yqj@Na_Q|)Xf?b;BZmX1G43yow0(#Q8N+8g(3A^ zs7n!bJOcz!fb&%A1WYfu4)_rj4JTz#0nwRLeg=R;8~C4XMjpUHKDhZFq6Y_?bWkWh znwzr%$YKh@B4kf?U-@CJ?g!_wA-Tc*t^}dB^rsCC()GIwT+#Mu>FysKWNeSrxLt$H zz6RAweBZ^(mrublB}n6MlP{`!Q)e8OHZ3BJWJ#Pi=G2ho70%#60*5i*?d1PXfyIh< zGD-Qsi8_GUPRNqbFfwvs|4uHshjdub_m|Vtdr2)D3K>EUjw^fe>z7ORC#dG-Jw2;7 z4gtE*pi`KtwSao~0KQ!Af(GcOU_pBM6+2ML*9d@62%NuKaC#J+!v*$bSz`mc_1q|3jXd75JL>RDB=VR)CC24_Tog2vlym^3UgP7VQ1PvcWjGEbZ3$9!QZeSbFjp zHPrj?<>;IXunx*OI~OYq4H$W=0B3qMSyg+nJjdWbGCSgnBP&!=V%9z9=D)xUz5ftK zM?*&!0^AA6j4t7nZQ#5QkhdZ>*uQxG2mApHQ(OmfuK@ZFp*-CT+a(g}Y6NhhL{m=y z1MUP=SF(P7Qo!UOR*ufdvuRon9zke63X+6=$^{;Ip8*F^9Hp<36t8L$Zuz11QE5MN zL2vJ{&i}^TTL)G7zHPq?kPxJ#8zhugIz&nuk&uw?1_22vT_PaTh=53U2`JsAgdj*a zN=ivc2uirm+u!p%?>l?uo!NW;wP&39ejOLAweI`6uj{;y<2XOpU(lyuenW&Hgs28M z5ld*Dx1VbehW6KY5ClRTuaJ-s(89zJfdE>7ufPO?kQqP^)G{^Yt46tkFbnqqp;o~a z4fv_h1o|N2FkT0WpWmFmt83Zl-s=3krd9GAJ%YwWcsP*>ezNt6H1YsIpb9a7!K~`hfI4W)sbCisn945FgHpBB; zogk^h^bFO@VVRw!<&|I(pvZ_$Z3CVScE6AlwFGN`;{=g}w@Ung^z|h-QoHCQY@~Cy z=lENWG2=Pz1Zfuuajt@j24elUT)Iavfgt#TSDZQyq{PCx1mHO=xP``G7yg*4RHDtG zqM>;Kr5!o?2I}ptbK>hYkz;vWV(Ka?vPiGe7Pyu0e8Wf>u0h(cpq<>KbX5X=HSEg} z(+Q@B_&_8dhP2&Ol>V7);R5=PLdLB?1Wo5WU4_);99B#vNg&-(BTzq}G%SF^B4cgM z2FuN6lUYS1D3OrwbA#S|_)Oi_iNm0O!UlN}hZt27)Cx9F~}KWzJ7gIVjF5j+HiG_5`9q~RFAVu z2@y_k?BiZ%7nbtXdY)?UEyFcW33!@SW`arH!43>|OxNA!Wg!2s1ifYm5&~(_Y@
j=aD_rN`P@HZB_Zv~Yp!e}vRXJQSHSSj1lt~T$!3-2Vq`}QmaIYRV<~w5 z7~o8)J-t3B7SVu)acqb5WkGO6aDD&5L>U8I(Qla)#rWq4mk4r`uyL8Qegfv1mZm1! zs=Ms;0ldlt=Ha!e%7S~g@DI>{sGSU=Fv7V3yFoh{GBPvEvxssG|LRnl0;<40t_0*M za+SvS@iFjy)K+66dCX&aX~uiYd}RTnQ};`1Xg07aK$BenSp-a*$O8%Y#S6&v;=ZJa zfaIXD9`Q~|$;h68{)p<*6MFLGNfT^2VEVo8fRPH@C@6XyFGV7ae1OrkoUL>3^4Gft zmx`+yI_998)~=$I3*`xfomdum1bhOwMf7+T!S=x@!G%#)S{(`1Cp=HPV3{leaxVnt z4MxAUESz$+N}EsWq)_^~pP%{yhzuFP8CI5-kOnNCcpKl*AqQ|mUXVkuQ3xzF0#;)L z0|#T31@^3nl>|+Ok+2xYnvU5Wz%I!c)@}uQ2~TB&4i_8}@&cKWP9TmPTw7cF2tXnq zp`7kS>?fcEc0zf>l_2gmkA_Y0A{a9iz=x$kj2tlX7eVT!{6=60wW@G)gRRf_^Oh## zH=n?!32vEwn4je`RiR*DWE258)XG4F{s5~5l^os;Xs`wB34%UQ?*%VZ0W{qguvDcm z`~@Tif~oik_F^~)`0l@c-2a*J_f!c9MS^nMFfL)@MCoQ}nT^0E;B6NhcUs86o=BdlaB98f%2V~!yta^V+?Z87xg zAN|aRqyyrPeNGjx9B&v8u0(IzYN3$bvYO6k1_p*sNEE`)-rWBzXbBI)VZ%IRTCh=- z69{WBP}b3J>ua--gc2+zu&WLN02w(~f-3UW$SEn`7_Y*TfxuD`g5L4TNdy$1BX9X2 zANmaD?`i~l2TnE_3|{}{?T~+sdlQ-%<~p>(j<2Y1Sl*60f#RPHh0x=|Kv}zpvTYxL zl?fYdsFP4?(x~UTIerJm9X_gKdfE_4c6p7j8)9UV$N@JZ0yq?kumQt^oh)2{P7|Lw z_OHIH4DsZ=kPHyM)mXCO?}d6)J&h^D5)2M$A&MCpB6cR_U8r}i0xV@W{K(tDNOK3X z!xA(h0M$+32`?V3V4+ z^O!^5Ef7qzfLaO3v1bJZb~>Z*l!uk+C@Y6e(RSP(gIrnH$B*U^!|3Vl4I;q=6#${3 zL6rSnelf)TV936cXheeu92Of(0*?X{I8q+~6}1Z@fspPT{Iak@($Oi*N8lGDTW!RS zKrOxG1)&c38U5aaZ16jVw#o)5+h7v}Et|?aCdS4HwFV@4#d7L@ZxQXJf%zgTO2O7P z|G*Rv!6Ie9d#5=)4uSiwB8Go2sdK_XL%=}5$led=p8pZV{^JrDV*lIQS-@8TOpCBC zask2$Ux2cQgNrNr-+wizx{L=OB<z4>qGWa3#6e%!(}N8tF!c+jn`|(tkT!lq3Seagu;w^jnG?9xkZlo1^dr!2zkdA+ zI2L}ra?>bS=!=X2aRZT-&!$KcAGNgS30o%Evw<%pSPvLO9CCbY*gCz8k1sE|`LCVt ziet^{Y~BrI2Ev9A@u@mHWi?q9cBLitL=%?Tlv7g_!R`YFywLv55uMY z>!QA?;*DW4g4j3%^h2czz?dslApd{x5p&sw+$f-|A&BGJX``MAs87_M_&lja^5OO^zSf&&NiZd zTh77C#{DnnNUKBP37f^@>3~+<<-`&(rMD${s8{2cxJpwp#LO?zv$hqkj2qeNJ zs2nc@xspK$>A3s7B9qOLxyFr;pD5 zPQcY2B_SX8P&XsJUv9-$Ksi3Sc+}QZwu|RkJWmuJTpYP8jctAjRTA?T%dOvat$6-AJ3BVSY!YfVJtd$7hC*I- z#`N)T8^m0xPTvIO1WKjX5Ql-=aOYQlhBOnT7lJ?4yy@Dw12tgapM!SrK@sH-@f#uo zn}+tRlMS)jU}{o*!UrSsYDPd1I11Sa1R!#W2uam|MJepZ4zrRYYy78;F+u_q{$}r< z0BW)KZb-Fm^B0>UNIuFHjI@9Ga(9{|7!CCo)W{)#Y0L(}3EZJ}ST>Q(6RHaZsNp6Cm*NIYXH+6QLq`g8;BPd=8c465_={lq1&#reGBM ze>rTam9X6aE`g+E<;gzSi1zkt!qQNVN8;u z!X!W*RQS|ye$YWwNW^tE3>@Dp*RPWR34j|6MiLqa=dpvMqvxP{ATVO&J5`}H5N#3+ zWB3w|B1^xJhzn$;z~{hin_M)IlGz8jKP}B9N;c(xS$Gz3&8wranoOZWM4~mw-bs=3 z?u6|STP%XZ`>b9p03#4^(d9;2dRjv;%vp{vc_XSs?~Me)VqpN1&-{85r0nX7B?=0^|VOrN5|o zh_HP`#xP`a5h}4gH~-ne;4A>l-*l@aX%mNPjJqXxkhe+xWsn1BU0DFP11 z^dXq(v}gFvTsCS|?;1H^d(mEmyG-*SOLx4xW01tB>ENaL|K(G&)%2{J>4i{1&MRZ>^#3o>-Xoqd>3utw!EHk{) z7KG9)7_k7S`n7}?*E1=^os;?R+Q{T#+cQWe!8W5mRT3`nZ6CY{#U>(JA?4nC8pz*k zMI*XUFm{`fJSf5>hZ3`&hiMQr6xuuwctQ1hb0St@tJBA${n@B@j@2USs zX$Wh^Shk~{6Oa+3xV1?zWIzT*xy{WRLQI7AQs}?XYMcU;39!RJ@HKhb7wemT?i~$n zDd9bZ3Ik~r>+H;%%5Sfyw7tg$pB=Wo&%nfk_5}cFbf%avG$_>9)q$tBYOBALXycB8 zhC zg@R~+@|`;_P18s{sPzoOR6mCE2%_o5F;NJr5vH7Q2vC_rEDcHfrlx2B*w2-)fdJ_s>!=rt`Dz(67Q^ft@vH9`-mYy{Mku_tuMBP%8y5PIh!E{- zL=%FC^foZ+|C951lM;TJdFjGC*Sg;=!#C$Ed$@O~N>a*9eXd7fwn0c`si+`52qr_Q zE=fV6!N9@+HdNJr>=&QQEa)bJw!ECSJk~D#afi0dk`u9dOzOr4hmFZg2D8|yiHS&e zo;gfK2=LMkg0zUU2;$E%gaTRL z5!MxH14k(8Y{#j8&rG;Wt$|cTIYEsVXQ`%bgrUUGC0+S^HzpHS`MT9d(+R5Aw zEA{MYeS!P;Jf=9Otq4IID^jtelxZx*20j>{;Dy~n8p^?v zp#!Tdmg6bn`$0wL`tOr!>5Sy2c(3FuVKoTsdoY&XF*>@*isFvwAQ1Gmd#RPGi|RqY z3b-nBF_*kBwqXZSLbwm8>Z+aGzVBed^PGmy{ge?EtjoWLoP^L@;6{$MDMn=bfz^u@P-?mPT?O(pv&z7C7}1NL&%sl|KiilI8AvJJKc%;$sLj$8{iS@`Ag# zC`3Yl0<8@?v2QqzUJC$v6B55h95d(y`!*;AhUFYN4wjpEdlk2+B&BeH#k!|1ZK6zk z+4!ce8y}&AmU~ZRU)M?>4mq`Prp_eBJM4LG1x9=mPUB^4pzMFR+CgyM%sPMhd(7zE zjpM_Un=;f;QtC<<$(lU81nH|zsFRWA?0CI8occO-+hNRW!N}n&h);kVr-FjlASp$p z#|@ZjVqi=oIU_KRkkl~>4SKm1+@U%E&?1td-=QNUEB+#3Z|J4PKq0I5V^BJw4#Geo z%r^)|IL|fc7T+y5Z5{-kEmU<~*?%5hf^F8^J$B!GlX{14v`3V$_-ZEiQ&Sn@wpy!a zpNn}v^~z7T2=<7@zexS;v8*qaZR~J<^A%l{Ce7abwk7&*A@`|Ya7T^~cMpZ0tJG zYBNwBBE(;07l)k56doP~S$pzRj5esbkeY67kYyIqxbP2hkk`nYCr14r!quA)u0~)L z5C3;UK+!@{BuE%H*1TkXC3Exo7d?O4wEunO)i1Q!gd>KTPur)}P<}HwZo5BGcd7sI6<<^C(x7L6NSVfj*W5w2=W^1jJPu7w!RbZR@zSDq`n>k? zEtlqnZ{-ZBn6!L!J?}C?lU@3_`##i_!g)>wjM8J~6HWtAcihdm6$M0POcdlW)qj1W zHE*+rZLJlV7lN+~EdPvxN>^@mAT3V-IbNW9-BSYa4l3Oz;C*m&_vNaOoj4^FO`DN{ zJ;(SHuk}9ivCM4xc z+|W0A_c6GhuiRhnn~%hNREKrTf^m0ejF!(Oi1`6p=sMbc_V3DYUszv+e8A(Ii+hXn z7OPMD@`OKY;+V5nc5I%RK8SUS55YjhT)IX^6dEdW@MY#MXwJg%ZES?Sz{bLhU%I2nO0$*A|dtRVjtS=Yg)VfU%f2~r{aGpo`Rc5SA zlr;{+y+1eWBsUi*7BBr}HYN1)no7F zTKmnfHL@G>hsqaB@*Kxv&}j5#gE@#)5c|qI0!tYBQlJHx4&aT5w$lXCE41Ro54Ryk zl3)melYrC|QobQ!atP)l0V;Et7k2jrqX`~C8w>C=E+c(2&|`i zi*$Rg4gU>kQi@Rg$_kHUdX{36;KmTvSzdb5GE(%V*YB-xtYc+ml%;rtH z@f8*c3ifSi(AjZ7#(jBN_6b~<;m2V5AgVDy2t|+)1H5nKEF5hj8PwW;Mfp#UTK{&! zwjdIU1tiM<83UTe61hK>BwGK*i}fbIrjwGSgN=GRrZ>8~j(4qGdR}1dpy!3g?LTq1 zt<(5r${PGUY!ZIz^43?Rc#+;bbDp31O8hu*rsL~oSJDGAIp#_+GIw8h4qpGHa7R_VA`@Xed0k)IMyf;KdD=it53RHFXwc3ilP_FLn$5ULAvi$b@?OD|J$c$N+7 z%W&#L$C@l2MyV`;6ZfVQlRe9=R^CgfGgyydWe4H0%lZV|-XKt~Sa?fAv2#)|9zTV4 zQ1g)c$N)5bNK09(2UAYd42E9sW{b0mBA@nA=M`=dEb(NDo(YL8V~1Omnq}qa%ARM9 zC1{^Q$%C7u>x^Nxu*%4W`-;<1%zxWb)h~M@+SY#7 zz;BersW^RZV4bcm`zud-Ibm8)t?Y-zY1Rd6IbtOf#@V#0uI1i2qmI=wJcB9YCz%c; zIJp%0EPT>L?!E~ob-&$vc*?|?%Hpfz$=5;}0dJQ$z)M$U6a6L{LQJ4|AYNp=yEp9M z$G9MPTBOAV&Ge(!g0Fv{Ecu#{;*L3JCZ!^4n;zPka|mnPP?A#|EO8F7#7W+NHY>*- zPVPz~z>O;F(yw0b-LA>oz4Y$cg94f3Ll*)%yY9`F6#)~P%LJG282-AvN2d9fx9oiY z+6w(%_)Dqq6=PPC1xgGi+zgyMXNsapV4Q;<5d}F>ga)tu<`p;?kdd_n>psd5(#6?y z+FHiRp?QXF`mbzE2fm&a3-Op>%xrElLO8QWG9kE1el^yMk1A>3Mze3${9OgkynTQ! zj{o?yTpbs|!g@~dT{^SlBNwc0I_0u}`h$wmH~M20Nx3RP3meHjsJAQQcwU6U4pUBB z)3h4nqKS_~T>~kjFfO3M6-W9E;lu$fbM#HT*3!m>k_Eb%D`a_Zuj+$j~qN$uE3W!m0Yjf?5` z{6oE0n^)A#2;D)Q!6VY2zFqEq?!Vo`zC@IRgH{q%B6L#2wW3O}N~zgH$1(A3EBMQf z(*?w}0x<_KCBUhjADZ;;=ozAN83avqbKbT!rS^VB zpZFd(y=K0r2=`UNwM>EK4ig)+4>#5Ep*?nanMEx3+l|2f%Ptq;MyX>qKm5I_na?lS z%4dzIhpo1BP;B_?=GM*MH_Jb+9tk&~L9KXFydJjm7W0)Fo)mcoNZipsp!?WyPLC#0!mZ8{+Qk$MgLtIef9 zO*3Ztw3N>@Vty>G#y&p2S;0rntZ#)KHA^U0hMIQ{&`lrLD+x`xa|347g^{ns=V#OH z10R|w-tieeqg2^oi|3mSZn^+f+l0Z_z5zA-z2XKptTapu_VOnYSG4vwkFUa3Ev76x zREUSFzwf>z#<|Iz?rrf$qbD9vdTkuvGY(gk(UT+MTg4}DqwwQ4LU}`lNTyxH9U759 zo1Tt;T=g^P$u|+r>fO?bLws)hyz!>u#O9Adx9tfHD=YK_xP!%>E6bWXi7su>Yt`-qcO2+j{1N9blO#LKNpk82aegXf{^ZNEyJEywp2Npls2w5%+M~sv&kCmv` zztp?8A9RsO6*s=gPq!~;%3x~rgz?43(i@XDq~qM#InwMSxW3?_lMxZ+3#Pz{GP93_ z5%tbU;bJVQRYS3&MX0a1r07VZ)Za3+%UOdiMy9bz@WGi~toz>O3jPG<^3RNUy~WiJ zxJOFvap-#wuXOaQ;O>x6oHkSk`^30K#B=Mfal5t5H<2&vVh4sychL5oNF1v^HP!7) zZ_Qch%OMrcmB4C)RNd_I8CX`d)dCRCP9ekWX*8csNb#M%Nf_}Nf7ii`=7(4S6&}_M(Mo^bvss(*ycMjkXci%M^|sl?f0y)<+o0mkUkHB)N z&;jO4%Dn2r_iwQB@e@t4PYEab)VPamb$AgfsjVhzer+>syM5nxsJzp;0E)z`AcaXjA%lX+4@D>JpN}T zJWPy?9>>A{9j$jS74fj;MTij>Au3uWVe#KJWLAs*tVe>$SNQS6~ z(6I25uba<}w#EeR(}vn0yQD!U4*eEmKN@#m{Y1rxzL*aR{hhQ)N|o;`WM8XICU|nY zlHxtQ_$=q}u)I^vv*aHP7;S3wZzV{|ur}3LcVj11n>PV8e)OSHZB2F|ISuX?W>yZ0 zsUl?asPMj`*uN%pxi^|(m&had!35>*77a#7;MS)EFB}FwN4__2Uz27~#mrXB()Kk@ zHKkmxBpYZRu#5eHSg1;a^3*RKOb7nBq!^}TGaL5*HB;leNkWUb5BHf zebB>eqH0&Ij2TNq9uheGatSuNaAo$&yoLT>`k>oBm`F>3kAw?ntUIQ%lrb-%T7$2+ zQBuk9J5Q`1#yl;N&@cPg8Go3#lU0xfRy>rjF;_hI3>`9rwDm9Pa|_>WdbR7aN%*(L zWdWX)C103JxjqTpr{#rVR1{JqoLH+fnN{uN6rb3vIiWepS@vMdEL)NkSe)6CFG#w| zW;RUflM7HEbk|j__0T?zusJ7gYu0V%a3s&SWku$XFSaaKVg6{wHw^E6kxkzENG5*<$`upC77{4G*sWrTUFjmA1vU0%bl$k)S$28GQm zxVDh{C8SXmrUt5GZ{iyq`g(7qT}M|ONBCv~miFu$N}lObCn-6t4_{s=gG-L*L-@R; zwB-K%D-9yZwXG$1hEYfVK*F2D?9ilg_fZ(n6Su_WCK ze+6nGmVh`))GDjD%ox{&Z}vyu0>wP)+rZP^tr?>6=zNn>IlNrTeAQB4U0(n7PGL`Z zEVp|SZ2s;{@Ex^RUb{k(ww)`IEo4g*Hg^(XNUR}xlO zIL@6?UoAP8?XMd$DZAVu#(qo2d!WK6Ox5XWfq2} zp(Lp!G`E?Oo@2l7t32a_Lt`fp8?W^=pp>;Yfs4A5E9I*GFE?SB7wG;K*QC?8D<8GgH!B)rm_p3~JIgdUdTNy7aS`ezNgY5m&tj!PR4L z!l!q?xLtFOGU;NUzMRS}_Iz&j@vE1d8GX|Fn|t={#pawVs_>v1Hr9OKv;QyZH=zn% z=6v>zo|4|iBwzgN_D}*YqxBo1I>C@a9cgBUj$YFraq{YsppS{nS&d70D_f@cRF2yR zubu)$h1}s(V$m7xM}KD8YpqbHCp95aC1k-QJ)Hfr#-{W&87GFcV9AnfcOZ5*sv|81zo-=nX;PK@X7A9+`I z;r^@8yj@EUJ_fOf&y%p|46&H)Tq^CrKnttKb3+CEd)F=LgXCuhjKGcsdd4uzGT6pu;t4C87Qx1QOEVdc0yGc9r@6;467uCJUBK=(S4z=b; zIQhWSu?CMFfqVr z&ATupOfF85M4CkP{0ynKXG4!C9~pBMTR5|z{6m^Yi-GYR(h%IC=f354c_{5U0+y~H zj>eKS#SY~W)+j8DFtu~;Z}dEdlRjLteZ@o4ej1OByekYSzfWPJLL-npa79XjW{R`3 zhr32GuA~2EeZ}=Q*`x>RW%SFaR=)P#A2gv6(?TCtIxXzJI(%IH8(f#MsA-^Ouv8sD z9{}~Y+l1Q%F>E(qp(`&Dytd@Tf47SFW3_d>g@u3~w~GDysvEKJR_~!Bfx%eXz{kuH z)~kQ$hlw(jeuZEz7eiq&$Fhw7Pk-ceu6C?5r1~)O1*c`}Y{65XnXgxn1iroeZza;C zBSOB@fUK^SA#wq1!-gs$s5WMW>G^8-CJx<6wf2z_w#@_RDyM~Bv|viqjK&$>v8Mos z;lLfCnu8*p&oR~RIG7E(Y9EHTfbtEp*yt)B%a-}gipw6c5ASEn)X9~_|!B zc0Uy?OAVC)lkEXU&_142P(FQ4u;x3qo{?O?)DL*a@Dcu{Vb~>i+A?3&iMaW%q`f{c zP$!@w|8LX&95(Gr+!U8wT;hJ{h6e{(JNL&Z1(@Dc&33YP4~CIfJXNbJ=pM)rkZcqa zMtcc)J3;D8G8-(JW6blX->S)9Vz}&JHQW{UzBnT`=G%+!MLG=QYc)3!3f3|hU3Zs= z*_p}w#5O>U&kcEqCakg$FvjDOQHKu8z00w%$z=J2VoOf{X6PCP>utPVGbk|&)+Rp9 zkWBuESr1(IlIfLY{zWzaLoc(Z>O|mciT1VvI4#+UNQwrf2qV{HXZ8LvKkw=UCL|3Lkj_Jd_;O|S8nk$CaLjbVP9`4 zcCU7C7U0aOuqlU{1E$T}qS(kcZCVq&jMR?_V1pB}np^e}07Yc+)18lRDVA zTSQF9Ed~1BOK(74?+4M@GrX%XoIc)YpNcT zl=W$i)zetz<<5c}v2{$%Q9@1;DGU^+NTAzwb|&AxY%K$CoDjEAJquzh4;!;}Gdb=+ zn$$R4Bi$RSO$2cyvC&o{Kb1LEn=5V_RzDiiu&{&jYC4?eKifpnb33`)o>-9S8!A;X z!e={ox%B694c5vV?i2c(Ps6mjeg^RVNzS#Is@s!W$v7vt7Ac#ew;pC6iRBr4J{C{o z={9T@8r4k8zKyNCd#C+z_ejqK#k~^w6#{fYT7imbvsJCndi2vRTr+an~-JiLz}E3Y^K! zy9>@Kg2&y$IP7{W`6piJgwOaa11$Di)6>>X>5XartY5`QwMGz-zNkJ}-p6ENXH%1l z1>DUQU&Kl51-qyZun%IjtHQ$Z$?0!}5;R)H`DKw$37FZJ>Y{=$Ql|O62LGC*UebvF zDtV3Jv)*U>mEmyMHw`f$CsGMF4+k)x6I-c$`)z`5S?s+vLD_{a-wH&V>C13DBf(r* z9bj=;)nd!&k+aPr8`WqhsbJ=ukKgRer247sElwD92SZ|I04KP;Zq#t)E{%2F)m_F& zS7F)(*M(i5gH>2IM|qHoldqLZ@X-Z(;M_5FP%F?u@aP@4Y{p$&cn=Xtx{dzUE53il zbMx2ZoS4ErjF@D;V--d||Kv$i=j?-1aQfv+yW~Zjdd6`NCr>ViiwdE%9)Y*`#Lpto z9j4a}>K8*6A_fo3&P7*#y$Ewsk1(yIO6eHui3;m@k8Q zl}Y3#D|U1f^0d94Jh^}$9;~3O#c~)ZbvK>O>+W0t@zY}B)meWHCH$x^<7v6Bw);m$ zb*~H^3BPG&j0JOY2YsIOJm>S{NFGYQjjeQO7<6M&`O$6i@7D$61vq}Tw-Xmu=v!QB z*==cohP!L`!n3bYJ>c*g86K8{CK%`t0ekGYk)GwW;C;5gW34AI{kLtep2X{^&m5%l zc_f}Lb#rNMO-8LZdaKv)IFr^3DcYnNqJNX;5%}1%Z#av-VC7jRQoSlHKoRxl`Pq}| zk+Bzgr3zjfv~mSAFSdVVum>+89&@;jCy@G zw*0{GOXjqYz@hJG%62!UQ#+;q_j|H$L?YK|D;|7iYh=D?vYXHZ7L0ZWuqQSlOzZHt}~$>80$o*$O`o@{M9p$!cU>$YlU5E~u1P|&O)1$s<8h^ddy zHqA~-|H>UWRpVwJu>Qzn(i*>~n5@x_k(MZ+b`$roUChK)AcmVfVCds(E7waV;zF;t zGg%~3ShZNvM+!B=gq^4la@e%-7q7Die-d$k)=Y}SMmWb~rzsw~tC4Q_FIIqt<6vV8 zM(`uo@byX(K!{02z**?;-)*7u@qWDrJ7gJcz&eJmV+0X_U>MrKLau|eGsU5a4}J#1 zoJhBEadGkBdV>-U1S8s>Gd=Esh_jDMbuUQ;_1df})zxU{1(|g-&XU8;NlpfTzrB88ka(;UL~M6kb`;g2?*-pmP#!Un?oBAoF@DldOn~z$A5HuXh>G6 zJ2?#t?vP&<>7t>H&$RgDqgX6>^t`iUv4q*_f7rLV`~|7U$0M zTEqnBFyDBiRDLqvQfxC4|NX_RTq4o<2iOOy@SU#E>Sbpb-0U+U7q`>&V? zTlEWIutsp3$vd9RhNqB~UcKhTvm~>GK_o<<`SyL}Qq7VST?uo=T>on;iL&xbSBqv9 z^wkV}<^{i3JhXQ_lIts2JK%b`Z>iER8u!ZTijZ7cBbi*-dEqzgHt3m(0S+4yK1!LL zHHNNM;H>e%El02!<<)0>t)~Ql346VIl9)&dsf1I5I1(yv+j~3ctGf)&H^XxJhMZ37 z%J?c~QX|DK-uH5+E(vmHbjV^6i1)MNQ{WW$++b}^w^GI1OrP!-w9trV!O z``%t&LxY3!zAaPE`3()zCH?C=cWcFyOv{zZTiXep=lIz~+0fgI4W1M-X3F=Q z+fgNN5miW}T2FXe6kSqq5V0zE8!Q%qv6wgeN^PI;nbxRm`l_Rs<4X_q+~s8D0b$Fq z#K4CkfL9pV!OZ-yAXEB_Q5O0$I<~yAVj=%Fh3>o}-R`#ynBi z(IJ!c*Cnt2SwJBOTioVmxS%3v2FMf?;Zvpj&jhnklLRfMY1jg1sn=PKpJ)1;}`E;Cn5(_1rg zv;3J?oUKU{b3B|ZIh=G-e&R;rTThPrL-*(W z=R>=4q?)i*C$i}$W&CyNesM{`AD2Uk26Emzaq9)H^F2AdqbG_*ZaUYCd^uk3tFu`v z{2XV+1tYRmkw$5{i6W0EI+%~qw|el9!0#Z+l3iSH$UDBxx^sAqk^ z!IgeC9J9uAF8!l>ZnES+tk|ng;+g$~lK=Lk%XQl&y^MNpeTLkQmy8Ok=`ThD$h3ZD zRZv%wTw(4x@sUo&q_oUjbGf%;J|g==!@qoMNp7*{x@{~)nwRZ8egW*#KQJt2QY?=A z>%3cHns=RkTpdH+#D%m@*YOQ2j#kENwLtY-zceEzbUs}1D#-V;QvMvRNOgrD1B!>R z4 zTxG9F>y}QCEHdTEDWk}rx|q$~O8jBmVKpJG=&HW0Fq0uw?Bi#>qpftPVOBn|S!l{n zZp6#*xAt;(e$4wV$?zP-h~Yd%mdL~~@(mpihwH=z?34bl_$M>j*=tX}N9EE_dE9{+ zatRPGRf>_iw%mI}d z6nlkoScD+Xw1n=oTCNbhwT|O;`vBJ1hX`#vRTBtA35Hz{r4q460>4xUU*ZPi*B^wY(zIf38nQ!G)tlXlwsN^NW(BHP<)cfbLg^I+|H>vOfb_ z=(nSZro+SEJ>dP=;>Ep{m*+PAXCqpZtW;%cG-a=FY-L_zGs7fS@s6C{t5w66!n(n} zlMf^STX>ARh+cd%xUgaV#lM z9dzgQhtKN0uRr4AN@w<(dZ|ntak`2!*w{?Aehm-Y)T5X5$*!vPk8aeZ{qdqmDEYaj z3$-f2*S;$1NI&g;OJ(uql{cpUxkp}~^xX7iYF_>%RM0A{()SDcx;;JjY+WZfjeTxnqdsjd$geeJ{J!qW#eiRc8q_u0 z3A}kFXndJxBYVFPzc(jBaALD)jEiTzfYNFpf@S)OQQ1Y7VChdMwUMas$C=F~X@Q3) zO_*jbMlLblUFNLV@3GU1W{q|DvZnho1jew}?Pn^J-bkOlsJoiZ%qIQi;b)u5g~^sV zvOf}iea-15_M@I`R|lz+D@GHojE+_e2hOHaE=<{patxtWDXl2w@P_WHpvZ5nxJgXv zw|?0im<|0E%D81IXL*y=AHLM`YY)7k;Nx75)fZ%*V`D?^2iKHD13t885y!XulE3oR zZ>A)zi(Iv!Q(4{l?Q4WhQVgqO-qT3zt(+!*1O4 zt}L;og8%4@8-yCU`@?qYNT?}rL%MN%qDsDXYYa8AbFaC z?rBVzy;Qgknd&olJO=D%;tPz5DUy4m7aOEIIYQ!IGR-uv7#9C~3Kpl?Ff<=c>DR`v zOD6439>(}?Ma|F|6+}sS`41R3>er@xkN(cMwJ5*TobU1cb!aB>@R3JX3sm$2R^4vo z6hnH(az7Ix{- zWn(gnxRw$lJXmNA*--fWPQ;}){$t+b45q6TsJZYI)_$d%I`xf?KJJC zsGTi-3OU6W`4lYP8>lbaO?9xN;B#fzWZqm&zP0rX9{+{A6-4tCb>kDH9u2AYB#V=G z+k@2{ndr&YVVO#HI7FFqMJCrQvp0Op=sB&%1nN3JI@;96%<5Q5~-nE{Nb+Eo|m#KT=?fGTPZLr4f z!O=ZeEm9a#bw)0zYlZd%FWM<3>7Ne;v2s1N4ZQ5)s8ha*PvlMJG)5Lrsiv+&uu8D}!MX^a=XY7BWPfR$gZ+awn%#jv4Yiyz z57$Q}cyI_v8}NROZmqiq9KwOXH&zZ*GW?d_mgR5Fq>0r8su&GvO!vh+Q)|x~we){xbKT@vy;hqm#n?DP2Q#gx4@5#Q zO;onlVHrJt8v9!I)@BLZjUh8-)DNYB7RsG?`Aw98zZ)TZ1V@JID({-%(W%>cr7yYs zDbltFB2r1Oc#;|r>hV}m!k_YTUL1mab26@k@Zmq&`EN6V@G0=aS09>bH#DqWn+jdp zjGCL+3G{hYmhTO>&N=jgbEvwZ;FEA(KxT%~D2(-ym*J={nE)WUA&TnIyOABnDxl1uRZF#(No~ST2^w|la zqq`h2X5L9}+eK@8_-ESk9Z5c6V{V4aB@s(}5ep(gK~Ag7O!TK{_!?d2ODkUa&Daxh zSElj*DjI2BjILO!O0)k^7S(C1oI4i#f_J@OGL&sND`xwUyxqdlGXBa775u9lpXP-_ zW~c^;{JE+PJ2K$2pjkwp8b57m3G+Nl7xgWEbdj&K{~ha#)47GlV~;^YRqdD@xlV(= zs7RUsC7(7ygBJu*n$At8mmlRQmLZ7{GR6W`w;f|;R{E8Z+hxTEHB^1ZH~CR0F^mOlA0@K9BJF* zm4PEB+gOdhR~HCaxfB;KeX_jWi3gvd^3O*VkKS)jV=v*Ah0f#SMF-9IY{$gvm^tx1 z$+9#7S@=m{OnC&y^@gA2Y*Kz_$TC}Z~RitcY|Al=l*-yq?XB{Zkyduv{hDCU#z`&a}6^{cS1+= z{j+f#EJE2sYSFXvj;li#{Bb{2e#ly7?L9FlXqKE`zx0E9T$@QGII#1o${%YCA^On# z98X=wcCAv)iC7u08FO0YJYnYxBJx)lo(%|w(9zy@R{eON)jv=&gd2*Yus&9uHvP=< z=dotL`@anYLKB)uCu)o}@S=DY# zD>Ts<4UcE`xhkuAG{Kw2X-DhOcz1i0dXn^aO8RP>f@iRtx}nfdc^-j(U`vPmNo z&99W18-Y#9)ti1qz4`Vr*#u)t8`^6d+HUo2D?xm<__am~N!eWrB(6aX(j`K5_h`uv zlg-{6m39WcPf;8oOb-tduS%ZHAY|neH2rsv!Z2ho&7Y9b5(=RgaT2EYeb;g=*rI5D zJ-fT=&Fxw3wOCa>w7!^ild!{PsftUclbdQ0nZ|23@2`8i4>gddg&+LktGY7(K4$KRQNaOT z`Wn{$G^}$Zmdx>}$uGC<*niFs8wAOD0)bnxZmw=xVkq?R@xFrDjogBqhm?nx($^<` zAFLt+eQ_hc5nbLU(-_g?@Zjje-6LK6beK#(j)@F@ZRLL+iNcSm{E%)~s>r)=7v6z7 zcG+74eoQV51$931mRH(fO9Vd=jCcJq3K_91n2{HN>+%2gB|T6-&L4>M0+Z}<*=^)E zCv6;@{8~TyDZ_@bix21jVM71v@~SFwiGIpg*VJ65p;_M%IF-x0;Bne42QmZ*ftMK> zqgC_ve8dB@^YTJ~0}02(L%cM&weP3+`c50i8N+05D^?2sNy#;QKrX}Pe{`d*iB>}q zRgN|+<+yQU=$p+k=*hQbn%YwT`_%_N=KH!&u)LOcs@fu(k?%@^p^gcx6WFt%<51U{ z)(-s;@i%K?^US1Im*v0S1b&R5>fF@DgDmX7UP}7^QM6*h~s^ zJ{~u|E%^={GoWoE=W;wrS`-8oWIn0&l$|5~1@sW??eG7FQ!&m=62H_L^8vjCKyNKAK6cfl)S3$Odu=ra&ZfLiahQ{Xuvv&P8%hqp|XWwB)%G3VI^!F!|erZdKH|NP`GAV02<$01VkifW4&u_E z7Udf?d;UQddJ~`-8^Cs9&@P9DYmFS;_d9F2#r^%a9S6{577+NkW3_2sZ@`k}nG|4H zMdv)ppu5C*sv-n5{7ML`1NpMtBM&R`%13cWM@Mp*-w>`pGYP*7JTm8_qMj=yFSXd{a@O$w0AOF*1cXk#Q z*{MdKa*#8#n+}H6^qWP@)=FF9m?y2rzx&Mo6l7wkE20#L!8<`wM=46{FB$=`crjXX z+Z;|yI_K*wfWD=^FjCgzPX5OTTpO4XtJM>x`#$$?c`ZeWRsg^Ox@`V+q?iRBQb0T1 z?n9dveN$5nUEPF^GfV1e3A*ZXWivn1>zZ4@A(~h`EY;|p|AYdW@0|loqUO6@SzbZS^s3^}f3=d)< zf(fZFLo*BOP{hsIf-ut_+`@|ZK(z!fWG4=QL zdq!R4crLr85iUwCNbfORCFP_toWkV=u=G+*bLF`Zjqf3qzu4nlmm|jrQPB7vRhT-4 zCzGudk8q+_FSVdf5xp<~;Xp@(Ea8i7#)&Aq8>g@F$q5sPFA*_Yg*1UggH`*A;Oc*H zLH9dF{zT$Q?D4O5C zK2{kuf9dn0J^-*Pxd!Kbd0u}WAY6H{u{EUu1J1CjAVshKOi1lNSuB^lhlvTB{O8Lw$YT58J2xTiO?Ta?Xml z{KY_elB7F+5_VTq0&%z8iBMkstGD6-!k6&eK%MF-EQ)e3Y@?Pn9g2Lp@-+&XSYW|1a5sxy1=dsPaEAR3 zGh5^1+if^&R{!4UK4s1;#JO^o@Z3zWU%o zCuhc5%W(sr^EIp#@G>Pyw52hV5iFlldULxixH-*+XF-+!>VMvOsF>L0?3S%TLB3Bs zC+Zu<9mBeuNJ+T@V}lf^Kv-kCz#ar7Mr#@I`oQyDO;@H4`8jJLRsY<5)>o=4n?KKW zT^gvAB~bm2`jvQdf29M97_GF(4GjrdC#Z|Rv$x+o!^mhO^!dLd4Xk~nQVskqBzg;= zi_6;VX!g6Y>mxsdZJ;$*qY~wJsM9|TySzbA-(rtdPkZ%om`pY%^S{aq-F^N4e6zVB zrbb!uQ%2mSG;K9h)723X6$EE~Idg%-`Z0i=limZ8GoEQWU)xrTW*c8_@!z&9SuAFT zIRPx4m`kS5n?bUvXyO!@Uz+%@{K(!LH_|OhoOnDu{0?vf`L0_+^WqRSUgfXa#Sv{uF5*!hK@`8~N)CRX2X_A5Q)pyf|{6c!$@ z3s9BD@^Cqt%8a$Drl3R3N}g)jhML((46yJDmX(#s?L_0Yu}Otj!}YIclQ^B(L8P@{ z$-yFNw)w0aY?34^D{E94s|`k=W2q>xDOippCpC!NX%MLctmip>^fU_L`Wkw6Cahjy zkwg#+MP-=&dhqsX$Jb=fUQ#|vX>VVz7$n+jIhj_xj0Hba6#0sZ3d_;CjPzk*G@Q(8 z*%tU?(FO={k;n5Bl9ImRI}oubBc4t6q^v`5KP0E%QH$Dut|R}no=||hGyrIa5D%n6 zcmMX=G+?fAd3moZWDAV+W5?TaM``?K)uZ}(cIfLj<`lJM9j7unlfgc-69Vs31B`ke!)o2T&`)> zkeo|$pB!-541e^9{wzI_H=@x#uW=5$AVhy^qXnfjEzNj$d5_-i$|rX0heW@U{t^4a z9iGMs825z`43op4mU}xGDr+l=U&@dv|EUpwA|Xi6dH4kfT^Hk-*UiUsz*(*J^mGA$ zBEAKhFp%;%w8u5s2-+DCtOM<`-ShyYu)Qqom%$BN}y$))MxONEgGSFZl`_I5#Zs ziVsA=Q_gM(H2i~Ty+#KaBs8d3GhruLt8_{T1&yweKGKyL*}#mJ4lOt8W9 zCL!q|$2&MWzIm{#&0Ot5T6D*Ah_bHhtj4PtQHP8DCy(;~b3oLKEyRvg{2asP65|g3 z_Qd%3>55*v?Bk5P5z_`9rEu9J&V7QCwVH>^+4mI{Kw9_f9_nrF;nUigJq}fIXB|B!AL70Y~*nDvWcko00wv__SKY020 z+@xbNPHQH+r=nAyG1t&+{sc)Ooi-*LmyH`Ku_2-;*9vKoCnAv~Af_e4D^+v0b5Ztj zcGA*$?D`ssNY@hH+f1ybw9!EN1Mi14sZGTfq>f>D zOa#v;SVxks*r{l@6Q9wI{L)ck+_gxZimk{F`4335|Qg&xLr;y$*XMJuoEPGMO px741T^7#LX`~Oh?fBi?a&UJ!*x}-!xy1Ub&B&1V78U-l{X+au6K)PF`8>HUa z_uldSy&UiT0q>6CxWwU{y+5DzS!>R@=Gwu^iZVB@lU_%mP&Xbwl2k>Z(EU*;w3(|{ z;3vF&6ASP^L1!r~XEi%hXE#Gf6O@9Xv%R&Qv$e(ZJFX^Pb5e`tB)xfUo?2kT?W2kP!AYm;v7YKugT z+jdboUYtVaFH{-5?7EoS8PS=!jBz6C9^6tP_?~tLzF}_1dvHtL*v?q}Md=}P*}XC^ z{;v(<&+nF8R!vuEI~Gw&b=>rE+Wbmwqd4U?b@-7$O68WsRqEHbjswQ}(U31Bg4XD+ zr6WI-6ubNXfARmi2sB;1wi5}mvCP^+*pa=)!zZ>Y{n>fy%%AleQWx9s=1d|nlov>; zsj0Ckgkrc%It}dXBI@cyZ!@bKw7;WH5pwD5pRH3*=C?EL&w9-6(-2LsAd#&~LS<1j z`}!JwPj_O}_Ya5z8pSX*D` zE^lyJ?%7_+P3y06Fp`a7jJ%91{tP~4wA2LaS+(;A{RZExtStN4rpw5s*Lm(UYJMh1 ziF)p(!{3dJj2L@`N{FyOMoERc?f-cq! zI$xVn5mjGPdpE3$H0(;4bo3A0>)X#AHyCnm zT^#Lnu?N3>Yt#`%w-P1Jv3K(Zl~@YwVPZjp{VY0#kaN1hx0fShW2&Dt?(tcV81&N@ zx-QRrdlgD4O3KgAPfkwmzFJsW<#*v*ZDccA`X=B8+1$*Ge{?i{;nT{cwb3$q8zjCm zGw*5^>199pq#+z3(x&QO|)i{gKqNq!)d=S&i$D zEy2jh$WU^@D7bHUhgWH7R0BB*4~vG$#&sKfC*6i%Or|{P_u~A)z$Ur@842bb!VICsHg-1 z2O3{r-`2yKlg$PtRn@4+!m+X~KR-Ow{_2>$U0Y#0`S+Iq*WAj=HDX_-fr%=ISKnS? zs#REPudS^ys^{IJ5O(GH`m4ryMODak?dpBV0HF<-UWXgJ6-cC`p`pQk%&x7;uaA`< zBd4Lo9?o_WkHt&KBU4jT-p7may`d+620x*G73qsl@`Xu1{2=j0{<%->m92(s!U%EL zT^nybjZ-!(L$(z(0>P=)FtY94-AvobYE$uL2mtsyDmyznv7?zw5G`6|rZ?ainoL%^ z)b0;GrC$%cK_>kI{W5Z&!$taLOWlb`FsJ#QkB*qOgxucyGx)Z#@qUg%vb49i2&9VK z2d}P>i~CM2;61UlWPb7D1&{epe&(s1lkk||dUO5u+}3Qf_sM$YM7>XP zxz#W_he0Ds;^D*55+h6&jeHd9`+s1@pqdyt}yIu<9 z1y0{n_ok+%1VN{q$0zX8sFalMV#8+lz2CU{zQ=TS4QG5s^?#XR=S*Oeb-y~kfgiYPmh~?#j6mrI>fSRZ*Sva?ksfsXAde zM*;!@#oot`6*lA2$$U1UHqFh=qjg^V_za5PORM;}xDP&Q6uQIO!SY~P4HX#vOn;#D z^eLG>d!1e{nO*HSNMOIG8^jm8;+Ns>x)V5G!3oCkQ{t&lbP( zaGL{h%nsH@yAZnJQ~a_K&MCg^)A0QH3TSmG93bXVLxWZ99m zYB=4&nr{uubX@GLJs7i65{duf7zN9!+TiOGjd7qoR&EuTmX_90Ju_oq+LIDoQ&Z#c z?4Ka!lL{hdWoDK|i-Fb287IWW{V>tsH+>Wo5y5Y3HTET6^{b-^9VcgBm15B_%l@B1 zHQ9JJ8{JCd4!q0Cu>3i{_dVzTLM~$>4U-NI4OIsi^!)jA0!>ZLY$z)9oUa1|S=iXv z=vTe=|74>&F{!-Yu`RV3$#c8>QJ2bAW#YG;;P^}isUxbdX!-o?>jN<+JJh4J9vfWFA*{(XPd zlIK!VDg?-x3(@%W1Qt8@w{vxF>nFy5;el`89;gsVt^EA(s{f0H8oW#@cYrB1HMKK0 z&G#b!0!IIxK}~&X#e#BS&GG8Q4DzvI@-gL$F*DxU^BN6XC8Jl8H-x29!|cNPa)()v#585tj;#X#Va z4HrJE(Ljq4^*TuS5(fK}l9Cbw=!W^)wQKa_loz6kZ&$W`=;0G#F=PQ5q(CZ~E)XE@TQg`llp-^;&4CzKt~eg=m_X4nDlLKt zNY*vA87Za~dLIr&9aU`9b`xqjoQ1aze()FfGy9Y)ZJ)tOiDs*Z_b_B;&Jw7B0I%D5(iADEHEu;EW~?G2r@`@l z@V#7fzzrtlbpH+y9;+ej_lj+h%q{xsjoQKivJl4)c&602ZQAyqsKHf7MMsBiet0kJ zhU7cQRti12a7YA%h081k=%eQWXl?ZfZzdF|eHwIL9SXZa#&-)J-$=K?aZwg(QeaWh zCbvEr3k%B?Y+?i5ii!$T$fy%FZV_;-=dD!hy*)Z(SZEejyJUup`oE)`D%ey!=FX>iJ4z$SvQZ1$Ie0&fG(-&=>q`x4QgSXmij`B==~ETii(P;@2#yl;}!O^5)ctp zUWcDMsEz>CrZZlr@;l{(e+~gBb#9xKP?=j7=UUz-TeB;tqx?$3k(8@_6$Yash%j0YKs4F)}l>w&KAI)LqD5c%JFCl>rS7HLt7S1ZB zluBqX=knp5LgQ~Kg6AQG_mbF{SRqWc)nrB?%=%=gw&5QxAz^F1&q)HW6(JP92cHIa z<|D+#oTw}ssGw4MZcKAim%gO+r;%}TaQFaNKSj`qezM{VWzvQ3QSR-d2ommq#>PhM z!;|U1DGY<@**+ZE>rRlQS~CvX&ML*)TU+U$ZN@3)W_s){iFob>In1}JeAX2NWK`v} ze6!B$kdGu`i`KJ_(#_3H;Z#M@r7olbXaJnf7&52AXN+ujG48y?uCwMrY9c> zAn3`3T|?o0vW3%Vq(YRl9^vxYO?CHW%H%)!6sKJx)&s=Cb$zVFdX($)vzmin7LI$K zqCb-Pm(LETd+oh+9AlOqZ@w_AKJn0{bgoKzNgzV!2brqJbiIJ z>r&x7?f&4OHm*8r=-7yU{v#A#IyX1>ynkNEdW0j6Rl5mJ@%G_iTT9F3SXn8696$bw zwui>=UueRG0Z)>LRGP0H?j2`@ZD56jyo8MkZHgPA0*7sia&*5U!o!QopFf1Kg(b%)Bs5K# zqA%qCs|{FlK!M+RrJoKpKNF^!E$gte@HmOjCaG{&^pCSpwB9KIyEl-YNxU%7@bK{m zo@w}uZK5?Ma+~%0Bz9Tq6vi<>`DhP^l~Tn0-7Jv=N9_Ud&< zQ8OK-@AKQtAx(hS`pXqgZe46*&H;!EQ_P(&>lKqbO!@Uoy=*lqwROVC*jcQjqoX^S ze>3>R)xp7Vyuzlcre>hl!)ZRdw%fcv>l(6sPD|Y$bAfha9vGJ|m%xQOk3*&Uk>uG* z4u`ZE&OKD*F-}KDPAwz{vlk1Auo>ix-<`Z*1$3SWaf9})(P_f%A@zj$BY? zzW?|U22q9{Fuc(7UfBEM>~JMD?cKZky1FT&ZQR4DqFzB^Vc9^NTU|)Q=qEQJoP3jQ z|E4pXQGPM+LuX)M$WeTsWQ)GFyq8V~E6~M^}-bS(cAw zE!je)Mpy`(jR7D@vT>{(lQnMe=5s(lQ$#$t)HtD{af6!Sx%c}45J3d?0t%R@^@vtT z=2HPo3e02omro2(1PByP({5sT%U5M(+#msT+4mHE!C3=bfKN;uB%S#I*fFF4gwDWy zbq3=znD=E|hG>C1Ll}_hk0hzUn-p9wwihNFoKPiYfL{F`{6qw2NG2PwU`vRZFN%mw zHvzaI0Q*a@wygl@<^OP|Nfl~v8#cW*s~{70Wrd=V4wtvLF_{c6!6PDS0}uv$P`}Fe z>@Pn1E3<&)WU~3Th~)t#apc_8Znu)@*1AVRW>WzrM4rw8?(qxcWnf=(fBq!pFZ6YF zy|oy~aX9?Ci2wAP<95Y8KRT zgf#G2kGy+%8Ow3Gha&FYQ)zQ^MsBkn@43ZA&}L#_6F3cC8uj_Q=( zg^vj&fEB%(w%iwwofbQ9XG1BJ1&p0W+B=}v%R{1YDU&f^#j7=fs_5wWm}=DU zSkL`GOaOe!9V}xQMETc6iy>a4_mw<*yA!!(m^449WDHu~27qk>aVaY)`TB8ks9xZO zPMK*KI^)^Np`T?<=z-VXZk{09Oa{ZwMR@PAIh26xl4V+x=+&`I~g8lmbFRJFx#; z?pp?Ofp=fG&qEzS1bUjZNYeZN2=(65c>k9#uDqy4)#)k0K>(=A^;`-5qMeRF3efBo zc6Rn(e@;h<=&#LSUA=K5TdU-G+|7r`TlUUEny0VC{h>sQ=8Xn_=s$a70YK`kIrIar z8ixP=ZH^S%Q}W&ja!X>tC<=A`pD(!o=L^+K z2`z8|yu7#HE1In+OWw>HwCpqI`RAh%b=`naR#B0jl{FM(V!1#XM@E1o0E~h6d~s%k zPuRhIj4BJ#D?;?=FzHwE@k9`*$DUszA1SF2Y)m)wu8&tTJ*)n*hXJ45E+)&DEZ~rV zo*>d0ch71f_1(L4Y)TO=CavOh&^*(>SPd&)!@=PnZ3`pgkI^X9VM2~pa&j^k|RXj9FK~s2>Jf;qesQaZA99MtnUgp6t~}b2TKX{-qf31z`n7{=2xD|>2@0x z(7rgIlLK5*Qd!8$q2n$>6d)$hgLDtV69Ssz*q(KQp7q=s9}iCgN{$7Tb4XGK3+<76 z>*Mhdha}^tr@Or%eBpp z&5KajMPyw;7lHzd?+;WElveYhg2WOV$iR#sI6rvs00js|1yY`fh)Adn`lU<3;o*o{ z-O3lSdACquObm$WwVOBjEHX7dSs;I^;G4sE9a2y(Bo4?QiYWqyEANn~0hjA z5O5Lh@Bgsycr+CS0Ur+%L8;5C#!_#Z54*-uPB@cmO!eF#t@KzTftzk-0W#d}{tPEyT&+r};Db?ViK$r5Kq57mkY zbYEb*p%(zPBW*f zoIev0{lS0MEHDYefN>^0+Z@dcOD2(D%bS!KL^1#d%^=j7Pt^(n;FOe>PTE5UmP!O= z5+x!kD(HPw+#v?M)oLQ{fZJwr-Uj+kC1qs~GyBr}%(<+iD2O0_2c{xees z2L&y*^Y1RTj>Ut#{Dqg>Rn{GxCX?S zY1gjO@$n6f%&V7~Dtyto2d9n8wgI$Qe!Hpf)6-NaKR-XkG%*`49XAjk028WxkhpA= z+;$i_L~lcdCknTlq*zN=m+sj*c!wl7wX3N=vaEn%}7-4%hW-?^}%+ z+3KxKzrpZO(DMve=Ck+${HE+Mn=xdRvnPr7EmVFrpz@$73qUUj1P0e7Eb7UF&>qOA zh|U5pLy)NOW^J^|a_YN`%qE8M)XA-Dlr3}vh7Xo7^HY+^gX+pXE(Sqkh2F-8K681` zhi`AQw1$#!E9a|&))hC9s{}YOltFLftO&3k8V88y_2+-BpoF)Aj$$$Pg~_ZZ#`LODRdOuL8RXTmi+D83t)hp9y`WHWcvF0 zd^Tg>^Hees^#NpY6DU=n1(J?C|M?XRYVU3bL)tA$O2u-^!I5egtLhijO*65YdYp-6 zJ=4=^0M0W9R=TTSz0oVeGyol}m9_@J%i37^TPok97tjecRX{e@@lC>a0_ijFm+zMq zmgk@3-@zv;eVte;ilz(8tX4TbHpb<)p}W-c{^UEKn3x#!V~_JsEI_wxi=gteOa|4S z%lY@u+zOAxwJ)SS3OBu7oBdL-!g`9^HsfEMt8Z~#?3BG9+NmUFQO&yQzs=>71Zzr` zi-re zI3IySdHGRgI&{mqJon5&P;4wQYCAd@w@VcBsdhMl{$3TRX&*iWps`p4r{?A|fG!M` zpf#ML>w7SMzD_waNYh5pRYQmi6652&G5~-U3$Ir9WQq5D7+_Jq_ou0evV6^}yEiJ8bt|A2w>5aS6q3M1X=dJ5N#5hRq4}5hp zLxK9=p7l5yMJ(t@8yp-Q3i%0SWEGIxUDrk)0Hi40*M+L2rQ!H7Rp~;YtFz$--F-Wa z-RC+d%&jc?G^_bB-QBn}@{%$#^K)|&>gwv%OokvnK-u{=JWTZT={Un0wVOHYHgYq- zO5<+Q^>)Z@;#0roXv?fJzH&y~Jxv_}N&KwQ5gVJ)g6k*8Cs71_yrcXFFB-qan(fYIe|@++6gjdh6RqA>n}s@gQv+HE8Hily7? z#HhMcpM;+|=sIC7o07?!kI@<79fIQ7c(`7ivxGY?De`Do-j1FwkQ}aVxknLq6iDk2 z2jR7Q{!2Sa4g)C;oafPuzYTtz+VbW-Eq#~Wj28jM3Ix2e|6u=Cd2D}X5#{39=^?c3 z)2l76QtDS^6L@cSFToo4elY1OclN9USw5m&b*GVoN1 z>;DLxuuWnb9DVW)wbr;&E_6cgMws}Y1txX8{G?>>nQx5M;e7Gn$Kz_FCu8c{uq1ON z2x7lV{oQHQZAI*%pOCctb7m=B>*IQ}d(0j+T0*RbR}zC5_^X_-<0?2$yXhgTh7xn4 zi;If`N!;GrN(Z8j^wj|UqQK+uLN*)hAG0$v_d$cus&TccwjmKk`nMuO2&%YxwXvfE zbbl`pbAP~DDqGiuvt%5e!N@x17v>g6PEXGnna{cKSMA%8B|x!oY_L3JL@~_ZXqm)w&Nbask9nNfm;5 zA&`*(er)gU8G|%)dNdz_AZz*eLW3hY00~i`EzJMOvE>g>7e_CFC%m`)0KRhZ&O9qJVH-+uR3Sg ziAB)bD(+9EV|wmE74WjWxojK_hf2&Co--+vvkibIBBG zCVbt+D|6FFOIIP`uU}WCAQCQoa~Gdo7O!zFOEuAli4et7<9Ghy%HGBKm(s2YU#@Sx zy*OZ-0L@lmC?1R)M{ZlvNs!+AYuxO>VFX8|U7g|)$kD+eAv8QZ1nlesa`ON`p(MtF zoWCpr`R^7x4I5KzpOs?q`r0$pU%o99?@v4C#*WJ(57^L_jc?k{{JyF8qzGOLI5w_WHMu{q)k>rT9tr zDpnzm(iN};7%kF3_TmII^HDwjrB2}zNbyK90+pveW*#vbus57WfoH(L%1SPRQWPTR z^!#KKX|SJe`duKLf_+g7IZeBV<$>5b9-%T09R#jl3=Jcae(f#zt0oCz)S)q`3bcCPq!;g9&qAIzk_w(Mwf0`^yV|1j5-8ss#rBTbs%mmeo z8q`05Zw^Ud?UIm?*xKK3fq;;}T@r)a$N)(Z=@^=H5w5N4d!ytO>7iapD!$7Y*lh?~ z)*^JiRl3_q9Ua?%(QR zjUlOVH|2~kJw`#|4DvP~KCTJX@_9Lur5-|9Y@|}86SLc$7XNVf?td?U#6q}M(;7d{ zmd>a=TzO}=W;iSTm3(bl+P~Fh0Y8{YNKDHuWLlAULw2y-)`7lG57&x?1n=GSf3LVS zs}hW@cK6-hiMf}ck2>HEfbaXzjZvq~H;zp{UN{VBZQ z!GA9pv1{AhI~gxchnuPM z$AIX1=Y*=@RfCS!( zPLO~s6xK04)0%zsS?1p3^VgBI{$CqMG0i;O#%of1?D@vS0Yu3Gb-9X#WduYBSY#gv zx$>LK(D8iz=8XeLod`Sw2t876rAa8s`uZ07%->f6Q%P|qXMnVW&m(PhxCo#bVoOv= z;`s)KEg%J^?yG#zXhPEe{=Ui4)+`vbKm#eU7RCWpNa?1YM`N-Rj^m;pD`gK~0m)2n z@?b5%jP@5d8-b6lFqj1Ze!Eo#UCLgKyGCj?2jO{!M^wP;rdrq34{_V!AZ0i&3*!_e!t(jsthsw{K_wmF9QA#b@u}FpFrZcGLAJ z^YV*udbty$%;8mqgZb6fx1T<-pl-8hEC4-Sf_`^Af(`Y49v7ScWfSgv-;R-DU63%D zEu_%-70y+p92D>{b>=5IY%OCtB$oW@dCJP6;x>iW+&+&FrSjF8e_8TgyS6mQAr>s1 z=`rgAsl>N(UtS<{IOUF*woI_Z0qdV9SmJIBa)ES?piep#zpS{-Wc>815~k8vtZm!h zuGMQ1Sh?vo-&n<6RaNl-^!d-?Z6Rd_nppe8`n|t=!E7V*{CUPU-WCXgzrii2@fr?^ z2SkQ<&B^uYHNe!D8k)LSbSN~vtgvM09m(KxIN$>++(yDR3c-2PS0XQ?y2qqtReDb3 z1Cn5CwHRyzbhz5UI)$)7FzfMqiNkrn+z847!~`caq+nZPF5_ZROHL7{rF4#z=Z-iF zVf#<*%W`~h@hITo%1mw-i3Pt<_N(^7|4HoEK|+tV0X742)eJ0mVIg)i5lQr6@U13t zHyZrk4X2D|An#bJdmot62@NyZt`s{K=KA<96ZKz0fc}EPH-1Xae_=e+0>JnU-u(IG z{p|ZRcG3U9`1TJ&t7Tf z`B1lxO0K~~=Ig;b*E!%*Us`hoY z00a|}{f8Shy(Ew_+dC2bU_^i{k#+L0<+~n#R1?)IpuRW$Qi;~7<5MH4y;tr(b6)4!$J)Y4^Qt%z%cDh zNl>#Q1@EAtSKnQK3EWC50B&QUh0X~6BUQ1B3Hu3Xy)*mc5fB(#KLwQm@tB1ag1cWA zlz##+5h7+85aDH&mEWN=!euGC6@Ze;0uDQH`k4lluxgh|0#S5-UI5NpSs- zxeA`g1|Mj~k#L)eIOu~b@p?Mwdk>J_Nk;`tCNP3`sRW%7egW^^2}1jAIP`REY&}`) zAfDU?35SlCcR#pIc9(0KK_V!Hg`QJtrDiQ?j@vd9`Nw54AV~Kl*Y5ydD$adu+0B<) z)PMAm3c=p?wy9{;XI;8;bB^tA-z4qEFz)Z=gnh@v#P{P#$YRm_F%_S|m>A*&{sW}D z4h*LsOiaE%+z zl9Cu;Pp*Dk1Jo}vCgwX}@aMX@jMpI=GTk<(%z3y%l&+#xQnn2EPEot2)VwX1{;?PS zgk<`o2#U-?oQ{dH2UJuDo1|xa&KA1@8IXQ9tL9OMm8RMC+_>ZMUA3UwF{s7&{R+B;v zD|v4+5I-TN?}L2XXIW0)%K|d6UEMyR`03UqaCl*0$JZC>H-Mk7HhN_jS7Jf*fhHA~ zy9o#GHdB@0XmPQOf+Ybvu#DI5Nbei|f!iV%^G<-vK^cIb4bG)jtve?QdTUECe*g}> zVz*5_BOg65xR%`HxQFXKQRYcK5Y-F<-W;?{ys^mz-oc$G7bHXcsU5l;Z5cw><^g!I z9?Qydjql<-19())UB-80Zc>S_IG+TKw%T4$A(Wv-PNl#Mf_il-Sy zr1f@EmE6-8Q)}ZLhI@LfpwI#Y$sn1R6Q;{M^;c*f(y_2?-^^U7-Rfzkz3)xDu8%6%uv{*dphv!Lxuq z14L6cdk#L2{shh}Ey|T7#SJ>}KV*BDH9yNkAM7_+yTJyV3#5s*hEwmZKf1na7{-mNJkGq7*DH|L;kOIM#r}&oeo_s~Q&3GIvJ0}=E z0HCzO(1?J;90p7y_!}-1R95oC@-qkuj%HyNLx3WkBvue5CWFS|AdxR@GA}p0Pq3`5gua zW*M{kRfX9mb5T);^`9@=^?j-7M94o$yeVC92iQzYL-W#Y+6SB{17z=j38x|z2<{JN zDVks`XKQyC+AGZ?^mpzcn%0nj7HwEVgE(ls2EZg>V+Sg1od!<>>{ztHW@ZMX(Cg#&xnR;b%R@2GuPvCgqgU?Da#K1 zcX14Ld)GR{$wU|Ba;$WJA8Px1=U+sxaz92-cx3?;XuJ+9R^u%UjFk_JGcqydaTCjL zbFu2xCV=rMTkgE^9m*F>LWuhS&>&11>^Sf4UdF&E27`RH>w410F}P^5z+(ztvWRQ= z^zRZ9f0Nd00n=I&~W2P zP`R{<4IWhcJk{2Qk(ybvR^?iE2gJ|>&Z{&n7P#4Q*VLM>pux6axgnK;bc(m0LpcSx z(b>(NKBgLq4Qv}?-b7oQJ6U;aZy9jZn0oVi#Qn}ipvh?p?iXa72DDR1oJhfkiuc8C znv#qxc`bxr1{%h#EiG^I^75QTAN?=zYBQFtYv>FHCvbg0gU4*KGxpfElfWbGYGe&l zNKL*%O1Hyjdri@rzQC&k6j<@31qfw*fLB-WC{@n zf-MlLpnXjO){a6AF^noN0otmt%qI{l2D?il}yk~|7PjS^t3 zt2j7F3likbSV-@Em3I0FzKLVkSA=FZVwF7Fo|jJ(6CJ?3n0w0(Z8eyl>aSws^K zTd3y_4w>a>{IuxvL}#P(Sl zyOu8#IuXUy-CX8BHG5T?qdgnU>mSedOqPG!M*8&Z3@oV(;K5MC6oY;!AR;6J=~rdK zNLxT!04@9;vSQ>C56t&jlSC6`aZe)$C?De-m_V%NnF{S%anUD3<0T>mQ(mTGS;C^?OW$uB39SgAF4?!?K- zDGMn&7S1;#WXsMV+`BMd64T!P2(gesFO#=I)bG5u zWo1z@FT8w@EwP{psi;7mi#S|%J*0~SA44~6>9&X#e-&j5R^6$f_HNiOf|YvSwG z#*D*P{;t22$ZqUxPdZjh(p!X-=3$nYZ>kgv8IA?bd~og6WlT(` z%_&jKUwJP;<_2PixT?YZYh+^b2COgtSvy2FL8)-}^h5_SF40!qer%^~45VBmGcy?g zE8yfJsdzjtBMm?a#uNBFpe5Iye4t<7o9uMce&3fowWs2gtC+G#bKmm-za^SN|C)HM zDe;<0753iv4U?xjLLa?X@~;_ytp+B8FdE0kNTIuxl5+ljbOI)jAT6i38F1TKa~jz32L5@EQccf|Dni+cmvOa>_xEk};mQ*z8knYgobWgvJ-MxE^VNfR43np1OZzOkWBrkfPn8PM)brKp`HRmt z9ZdxV1y%A@>7rer9R=cZd)4`*+q4PTWeHfdZUI&Y;1mETqNsM~Itm7>(k0$pN4&I% z%nxBmFQ3o|%p}x}mPEk*4ur!j_&aj}6(M8Ph_x1GY~~%Hf2>fXR|hbd(B;oBL+JT_ zZ)w>ElrD03d<_aV`*fVdSIm-e+d{KBnaS=+t46Z;rpBJ?_o=}0x|sk&A%0T7 zt6>XKMcJY-mP_yka;j>k^bOek2XIIb`r`onafw|t9SGvj90js5s59Wd--31;9HG{A zu#f<-&*(FUnwbj>1(^Z?4-45iG7AfriFCI?sfAX&5CBN79<-}JA$K9B4YA_|e8ji` z8~C3o;`j-;kl+nL(Ip!OUuXO5A!3P8x@J>_^tm8K)QrR_E%eQz{kTcupp{GaALFk8~L zKx}Ae2s-r#)qKPp?Nm-Av0x2d<=+***?q-4vC6O0e_`tdR!4X2h3ghhvN7*9pKj>e z(DNyMTLAiF{<#MzAR~Ro$Ixz+AI<@{9y>uJ*q#*PY;0^2!PKmV8Gm$iR1Axly8+Xt ztspy`=Gz3^r04{7???Ec>c(Wy($8v^UsjueHA)A+D4^yfKmsy-pvsH1r=gI zgU;~4y7&)71p#3C`SWLB(QiN)G%_-3MhxQME8RLcSPJ8x;;2Xy@o=nGS&fM2Mg6CO zfFsyW7lw7XYU}C{NuipUYh3SYHd0f^p!^!?};H3*ttk3oWZ0stdyISEXapj|M^#j(Q7qsq&dFA44g@OG^>3ZoR& zfl}WIu!TDm5kzF~-*3b>x`B-iZJc`_p@^AQJxk+@%*$W|e@rK=X=+swOb=gHh69%# z!}e^<_-P`SNlb+JxfWW?yLa!FEz@uluj*Xw8uw;aXW}<}izpiXN1sGH7i&1`@)F*oyiiUg$-;(3r6&E^m_q{mHM2xBC~9- zUOf!3NzFCghy8^*_7Y$a3XwWM$ajZsyuPd#wPa8FFiZD{L+}HZh)8f~8Ll*)@>PlX zZiL6(ymuJlh3Xn_n#RKJLZc$~Jhz62nker6l52$N=8xLKFok?5ca6SCPGM+wGRUX* z?DYEx!H}O1>p<_SI18#U?3kAoB&-}r5h!G|ATN&*kiHCz`XZiHWI7rO+uC$P8nEM; zr6p;|F(WXZ=ulHs#15O~KU?3AnDt@kY_bGK%b=<(K|Cjl75zJ92U;huwm#VBp#hE7 z{LBpUnCS$_dIGSs2K~Mu*}zc1&#Hn!%dlX(S66Nz!`{EA>Q3-C!`_b`NL=lDtE9MC z$ICG4*lK_2h1kB&aq$zu0-k>Moe7DE=06*^e8!bG4~Nhhr(63~f*IT`(W^kj02ezT z%5v|7MSO5REWq44G6;u4`d*Oa2-)@Ik^2Kp2kJYxWZ%FVAy2J90maa&vZwav{NHOI zq`A9jj}#OD>t2>=^J|FogKI=Oyh;**#6Hv?3~V*7`c$F&N<4@Jo_$;ZUhg zf7DD|Nvm58zY#jJCBXNFV4w%`?r)&;)fT$1j|&P0_tI_~xSn8EKIdB7w&KsZ^5(6e z$yO)>)vzuy6a+?Y_98z9;U@%_f%HKsdI%3JfF9U9bh1G-3vN+V&<@gQ3z4c>$tyeI*(1nzOvX zdSMI^9r3(woL(nl3r$O#=GG5{RHgNcsi9qmrLR*%8wI8n$;6fiva3Slgt2^Oz8X@v5P0R-l3_G|mJE9KYc|H5Pk;%?y z=j_N$e+c4kz_Gal#4*SV#WHrDe9G%N&GF+cn1Pzqyify2Cnxipo4qhih7c;qv?lOm zfY$*5o$EhK-@T6}^fI#|#~M+tuP0i@3BV(ZeEnW+NH=`C%Hb|9E^h6~Mom>!!a4f? zt#-nl$j|xzq?{znU*h&Uu!h>y8bu^cL} z=okCfiyiF(l$4}C;$(kS1`;CT%Vvq|t{{>yj!=J}dSN5f+y8JF&HUNdtNJblFYC&+ zsb0|BHDoY!;SZ|5JQRNM>!hQxa)MTE^Bj&+iQTjqJl;SF(n}T_$6GF)O%1V6A1n+Yr?lM+kFe> zi$7yBfeHZC5T?mtLZ@8$wcs$QiHNxjJgQ(b%II6@O*~lip^+T=>c}2Y-xcn7W+>vS zbAwyxNw!HgP25eE!~X21p5R+NZN<9Pm%p4)uIb(*9oqdz_TTzrFWykQd|7E7P5sj#0h20*o7-WoYdn>3-0|0y9dvlK3A^kR zgG8-vNXFLLxt08+Bbw9;A-d~aMP4_q3y*0DeVJfc`z5~b%l}~2An)+gR%=;W{Hw<} z2K1Ykq4z4WppJMsbp4X{+cr@@vlThTd@}KdG`PO*Itia)or?WZ-qdUyR^D8urZez( z@sB~#vCgY6cuya<8!Q~=znH~rpQY;U%eEUtEbmWyiZ_`4^u-;yU2Ls8mmT?13>fd% zvHbDKy{Eo0HGPk(Ml2G+GI>L`E9i8Y@SWe zW^=p2OnoyD-2}jyayM9uey;Av-F&cM&|58)dTVaQSKJ{m9P4Ej+56XNHFq0u^4TE+ zTou1D&C5sYr-S*s3Oh+5W`P~Y^EnPOQ;+CAAg0uw|6Tp{@d*~Rb^vb$2L;UlP=>J- ze}Gj3;9jnWfxNX);WluPNf&D1laj)u3>lbu9GN5%z={TTXR?i3cbzP+M>2WVHSJLl zCDtr6ql)63%uJUp1;#r!`<;Z#Zu*k#K;604{7f`!ioMtK5917&5uogJ!Q50g%-RKo zgm8&G&dz{L2Tp2a<_R7OaBPhwz~C{&m;uj_0GkOs*`xBlGYvn;Scshgp6>#qLXw|x zlVdvD2irzH&M8rs&}Dag4Fq{CkE70X_k>0SuigB?#~N>GnRi&$TT|H)tJk8xH7 za0GgB@Z6nuds+rlWYC~eRa1jm#!>S3?Ki&Y*wnwrcp9|quxls+#-Z8%Nv(pyU-DM^ ziIpFT%AuNSp58sm#?c)&(z4~au6HKTtG^SA!4drmMqPEeY84XPOTTn^MR8z(Lrlhs z2Dwc^!r)g@g)T2Gp@NCkZBKC)VRxsS>Kg_td9N=gy1e=hmig`FG(S@pRv_6J&#Y$R=GA^bQN;ee>W_$sH1Yn$Z5cr)22vQ!=&;ksu$r7h@@BQ$6u`^hNgt**!|XoD%E-yYX0|U^ zCW`t;V1btk4kz>)F2N(5preWSa*+qcL9x)r{pVttkEAi|x&_-4h5HDci%m_YtD{cq zf}F@^PI3Ykdb0&;Yo0}kM^cwbqvb;L&9M0jGVKpl2L$`;i1GyTV{7*@jD}y#yl{<%Xk`xXxPFZ@ytAjcfl3 zYx|af>7`x~^3*P*0|<3I+iD=|F+M!-N8iyM*)r1idb4luEg$>(T}8dPEqb)HZYeO* z-fi4VM~ydh_^NCCM%Ven52LwU4QcMoWm6O|ijfYtKQbnETp#d`*YPY>%8WBM$X_S;kk#0dyx&);=q`Mmd0Rce)l@wG;LQ+~l8VRMlyBn!*F84X( zea|<}`~0)_*n2$O_r2B?bN*^BC5A2#;(eS`b6~dVD|)zTJjgs9BlhBJunC{&%{#d$1Z*0Wh^t?UC&SFEK1({Q8RhJW5{!1Cyfn?@drzHE+=*u};*>*z_Wbg5q%J{OvdHeY#eMzbeY`S-yR+m3)G zcA%wFR`NNMu=1@Ijv=VfN(8PUuxtDII|i%^GKKA^EB&m;MH-%yw$qVvjKQL*ogs=v zekcsB0nVz+XY?K#6)sQcNWBY$1FxXJm({&O`&-!gPQ*oL3?W7Q%QyKX&Q*iA(_bY4 zRg<#sHO!B9=xg-s@HyPA&TohIr2n(7pF=|`2!suy&-&!$dU6QgVAdc>DP(7IlnCS# zELy{XTiyd1Z<8{SzeAWgGlc5kSYlMUDPg%I-fPK0nb<}ID-OhFkDyjZqHG+CmjF*k zXghexj?QoF>0lA3tsGpv@2+QlSkQ^W!#dYep_e;ne5bv`xzc|n&8xmM{afIWg=AR) zz!USd;8v#LvmpbpK3UW|0bbA!Fu6cXN&yfF7Ob9CGQ^`|=qQ_WO`F_C;R3Y83t{?Y zAnDd8t@v?NND(J)VZU6W5huR$`@IJGG8n5sR+6JvUEcl|{{250(zp<-P{LYArD$~p z;x#AoAMGI5KJLuDXV(45%Mu!7H3yZ{om!fUv#Wi5%;*X|Q0MRRYv)eyW?(BOoIh+3)q$x8pvbHj5#`2M}UNHK(0)_3-p5k2yTBC_MOP99nP=)=dQVgy@l0c0AZ( z54&UkH!UxMiZ9G+KAXB8m(kGP=^#Fp!OH>H9X4`=0Idw&A`<=qXb?&|@Q{#3#4aC9 z1aH&%VauKS9t-#I^?a}Vcz4r`?DtP)CU-v?FVXchd~Y#qV&={3_FReZGi^=jmPv0* zf0BH8HoH4P_Lozfx0sMr0HjPVv@_yCKzR z9p?L~e5?>Ekc??>x;z*Dx`r#OI{joGR;V_zGS$r)14_()&4QL89Xfw4s%W^Rl5c~i z40}y!XAGxl{kq=TO^e@aB;U%pt#_~N2uKH>8#HbDubxNL8pFohEpjoXyLF!psgaR9 zz`TEgQ}ppDrB9c;naMTOAu{>K@UJglM8xRsc+$vz+a9|uP~T9bn|Hl5nd!iL`az?iI3bh zRW(F|2Z2AC9rk(pPf7C5*Ozc&6=9tqEF)kaAGm~{%>1&NM1F;r3!bKXu=kQgKJR#E#h@^})R%X-t#2Tn zuy^3ZguntV>KJ&bHV)ez=#$Cbui1`_cO1%%exdlwUZf7ir)=w)NbN2yqt^1v?%r_{ zDe*!9&)F}amJz!UD4$la0wGR_T|Nl*$&M2+*1PGH3X>lADf%C+?mewrQzg1?p(7ub zaLRm@DjBDmDL5l6qikk_WLo#;{BW|B~9G$xXA zHdsya2A&BN>oN=C``tpb@aJV8TE@w*!(ywHk7%QQ0$^Zctj$RiK~MJb5y#%(SK9eRK7kxNUX9yIpZtA zx^bJEI}U7=NVFo2s22tq=lLWY2c7?Z+H7~2^j=GHFcrS%i#Jgu;`_P30s_?FfQreh zzRWoi#>L<4_0DAQ*P35Xa2Dw&o`L@WFPt*>8sRn&b_gF2O%B9^rU2fr3K|3?%oSQ2 zEyx_xY-NF4NYF-fm2XK0N|hKbF-X;>xpOR+q9xa| zQ){K}F0Z@JZps8*wUZGD9NgX;OeIPW2o7VzWJ@S=cj3#0mK8_s=j_aL9c}?+asVL#UC1d5Te`m%HV8RR5`2 zpvIK`se40*1L+51Hf+ALv*on-7_#E@hvlvDr0?TVXat>JfavRs&eq0;qKC(^{B=2` z&H-@KY<*PSMu-|qV#|y1U2m0$CPhh}-@^|t*NUoy$3^k1*ep*l`!{i&PDJGU3lrKJ z=CoCwU-(8{p5**q%YI7V9MreK0ZRk|S-^pqrQ;9jNT9CJ_fmlZCbb8&)PK3eJ4Sjf zODX>O^%8}^ucx``;(GmT7oXu_KycDbf}Pj_JGDkCi7UrjVNx12a^=iDFH-&p-kgN| zdkbdp$L(u&umCRuN|_f0AU370_}mbVe6ex zFY!W(-jzoVvEjufl1`@V5AO3hiS?7Gv{3#QdP*Xi-G6yO%oVRyxD13KMEx~SOr*Sc zfq^JKK;;9>A~c^Oc_v`p+d)dGj81p|z1w{6ol=#~#1NTS54|7_^g$a$*TNr_*=={< zl)Wep5YuSg5v0`GnZ)lNcYT!gt(ozFCTex{n^JfK;{j?}ClJ$FKc|;9+J4h6vAq9) zy$w)imM%E$mel%_6ISzkI!r-+w7XdF=hdG0 zT$OrCZF0GQjq=J;`vZZZe5R&+W8Cwh!^Tnf+cN~iq!f}F9E>jIz>29IA4z(uIp&%V(i!F5PW zg#zlDnxjeYulltgpt}c|#e1*}z&|a%NP-fmbP+fR6elYXas&`-bI0n`AE@NGz_N>E zIv`qK5E}J@*XzwQxOX}c)X2j_00d|YkamTnx0t6zf@!v60DPD_Zf~ux&ldf$dIuVC zZH)x)Uh@wbKMeLRYUkH9&f(0-NQh=v4zKl%LBC(_%+Z1_y@3}&V-Qx&L)LfA;#Q2s zmPC!qe^4Z7;Ec`8n%{f?(f?e^gD#ME`9nwnGvrCZwGA*8U?6PZpHc!m7YVI}_#b2F z^Z-e_>$0o@q6SbbV@PEEQO(!p1@p-Xlv`kl$%F%Ye{Bc>>QQb$UIze6Y-9q@L0tUN_9T#X>9C4^pzs24k+IU>Xy}w;a zB-12Tw*6Io(Acqk>F7S;576^DLGn$N`xemZz5u?;C14=uBl25?!1M+v4-%XO$zF0VUo7nW~1G=4o6%{2}jb?hPiuv=HmLDti z$(dgZ^E61J5jq+`9z;rusPaHtL*9*ZECPZdkby99a^izn0J;)cS=oE!x}d=lgiuf= zV5Cw0OUp7J=K#SefS5vaFgUzekNyN!bkyl%2&xef82$u8MFb#*^aqfs(g_KT&O^O8 zLd&9on4Nd(6+`E@HngR+MhXj%gJfR0>D;GDInbAO`9!#%8nAhJC*MvZ%_E&(7lxFC zUvUlJ5H_*J6E2q*`9?p!c+qe_D-aP^f%1d_VsU^Gf`fwstUi*!(>#`w^C}32Fg+yW z7&I5ivO*;I02AC65upMa0XlHSQEv|qvMm8n%G$-9eYRB|J1pgcUjx4N?db+|7a=|1 z2i+{*z$J;~0{X!2Td(Xk0ORJIz5<@p9JRn~eu{e~}7CgHta8H0@K4dB69NQzi~Za+rTzQ1yKaT0WzIdLjb6w&S+1mD+EltxwU{F z8bXR>GTVzFpoSabg7$QA3gPCD5myT3K1_Ur^t&sIKQdCwm{%4pq0&fe)7f@*zangu zeS~{PwTk~%zK)BTmo_%Dgx_(sn3qLUS^_WRV1{Q|6>}@}#5;9*d^#sFVCbt>6A~AV z@M`cifxKG+DFu=Q4hs6s&zK*F)?jI7!2N1jrVV;IvvIebS!uXt!$FFIa&d8y3~`J= z#gdO9XcMSZK)h)PDP>~6M?Y;KA%LLI{7?$6U(n8V4$DAfkaXeGW3Msk(0m~!Vi6G_Fy~l>*W%{4FyT(><{WE?k-E4o@Ck)Q6JOwy$fxHfc=lZD zLGDd*3HYk-X5Rn#ftX!E@$@+_(R&=8mP%L^v;tdl21C99^q}xDVSQON%2m=Pm zp+CVxrv)kB|CL)30K7Qrr4sraux^P4ezI_1AmRVu4_o;X2w_B5{wt>hlEXzNcqvF$1MAE{1n zg%oW_sklsQVG0#u49&v2e_CC8DuC@L)GEc+l>BD2Um#agC0p2=YeV#eKHrVeP z6P*|){~gMkhuN}Gvpr8OQVOLfpsom^;fLP%_?kXBd!kR8`fL+3zT(u+=2Of*>uaJu z(q%?GRO10b3%=}B6MJuZwiI5WAt)p-AlSj-^=XV6g(@RsAzNO9pI%13QuL9OY1dZnl|Y@)TXkoRQ7(DPL7nP>^UsQfE8Dm7 z{;X9{^UM`JU?|UP$Ly4KSgX_L`TGoJhhy(yKAQcUX)SW0{1<4I6=5A}Ak+Qbb9!uj zs%M-(A%T0yLeOtFFBd|SqLP;;=v&mr_@BS}@};~v{~yJ+rz(O!NCD1WnaBv`)Zix~5BgJ}tB($p!~1n7_)-P5grBmlKT^G`ja6B^oPnP?lm65pZ?y6E zOgOu_CK`A<9n5=DQ|x>pwc2*H)CkI!e@IE2Jb2hci(aRgf7TlTgZJ6@LM*WQat zodm>~(dI7UJH&CNo9U>D{oeiFc?^s??j`L(R?2ebBzM<0=4x8u62udh60D0%Da=moMZTnd8>mdaMyrCUFn>Gy;om}mj+`;oXE_=QKACi)`mudE z?{88Au~bcDlYL~(hqHrA8yX*f_m@0ezE1yBbnKS*076xe-pYB8>$np8=LEXW)8+3N za_zb6szY~|7r^rQ*kkJ2G-Y~ z1efqrJucftd2NBT^BxOwCR(wI8xwW7ine>KQFz$48oIg%97}Z+B5H2c=mil(0Nr`it&xsUh(7J%F#<&NGPA$-b=$cymr-8(I>H%kXmn zAK+^Bo{$>L+Uv&#eZbWZiZA}sf1cbTFR&NvYA`6ck#m4yZEd4woUK0*VTC@H`7N%x z^NKnv#GA=u$Lh5I`V?~Gxg^ccKKBuVg2|Ys7P((fsq=PaG`K)JayouC_WG=O5>tnC zc^mzhx5VX%;ApV=(~F4Y`#Mltc8TiWtS*zQ?jmiQEWYQcaK0nQ`Q%SWFnsJCF9TPL5 zM{&*eRbE;J@}!fsaMG2sAQ6ikq#L2&pMo%Uo8ogY{%H{=!;?0JZe_pM^`c@$M5tD> z%(%d3D3fWk+i9v_K~^z{V@qmVt;s2bf998`A${#ra#_X4^ULds$yGLr5gG(4QbxW4 zcwSE$sz2^0T-}3x;rDkX3(0hbmJTt1Aj#WEBCH<}YH-WE`Onr!2Iwg5%rA7{ksWGp zK`6iaWbUhcO{;uvMyBz*&7B)f@E-io@ML6b4Y0s`zVvi`wyo~*>{r}C5~v8~;}^(=!wyol2B*y`+FzL4+wvz`1&{v-(rA;QN+eOpmt9qH zZnL(uDTNnw6x(Be-x~$IJ~_IT+$d0Bg3cNh(Y%VmprWslO0e(Wm3&m)H=ybIZ|`^+ zc*i?lRK34hbB(Xo0&rF~`|KmOCmp|47X%1M?TY-n)5hgk&@`o0*3_BR1%kXX4Vnsy zBH`UtzJt9FTp^k7-9rOR;q*c7A9;2MMWuUwgTiK*qHFg=#{>6+zVprItXQf z{GhY(&q^HXYHy6z1e!2zf9USPd zCUDuUY(Jzi)I>WT)Up3&J;b>R9!TH;K>#NVP30mKPzdYy5BP`#5ZHXHuB$605!Lcp z)m`VR{vXZyoV8|hU3$;{5ZRq$W={{`^lpph9C^Ru6bvzO=5w6=8ey7`ZJD0F$$220 znB}2fd7k3onQBe*0~$ooqal4WNQu2PA+6>yUf>N+10N^Wt$e}ug!?ln`#D>kVfh_K zpH+1LUR*p>J+2h~`64h1x6}Z`OdBpCDk)WB&hwg>dbe?UP3!LlrM_|)Wt5d_A0H9* zTY1*kD(s$c5Iv~B;fanDTQL!?kUmwMnT#7Llc&1lIo465SN-na*2)i1%zTh|Eu>R2 zQA2I(=v{BHg+S3d#?e4%cvHUGT#l|#h)|P`k=Y$HO|1uF_4n@)>_bNkCM8txzJjfE zW^S$tx>yL3SOBSSR|il*bYs+&7cT?Zf;`C*6^tHP-*hRI%_qb}4(GTJ*T!0!Vx>bD zoNTA_G2sJavh&;p?t1eN5k&@!;>L@S)T3R`5of)Dt?XGC{?$w~ZTIYC4{@O6YnRb+ za}z?UQUK%^Ku^@waSRCg*tyUAKUO9TSQ+Not}Q7_$d%Fhc4T-$dCB0oqZXQ(btT)(ZQO8bzY zsQaMu=_oICd2%yL8MHbLH>0VQX=Sb7H8%4e&>W1$H&$ra;yy~OM@YZK04YgH@MC#Z z9RQ37TJ>6ly`rm@ZqB;vGH~zEfY0Y6H6mrwKak2#Ajp01boOXifWk#q<@+_43!JNo zRz=JWElYmIVn>@NpaCiiH91Pe{HVQ_LmXz{zdMrc44xNM9f&TDAokevP! zAsL87TY*CL^nB=D)dE@snyVbKss0fKvD@3{{M-xO_uvHFr!H~}a+`WkshJSnVXQVw zczl`^m0WA2oEuM?-4J}2Vn08`!nU&KTbp1P1qL{d9>SC+a4$f<5fLS2WOcPqa4ek~ zty311TDLB|Kn=Bt(p%|vkEvb@$2#w9%;Z2<=?ugF6~fZ6+9F4x$x1f6WZ{srd7oSK z1+{X}APGv=TkPWsPPrAg19r;f$B)G7)B4aophFwOF! zEoBqt!TC$yy<;bxp_+I6-PO};fGtqY?vc5e-c_5^EBIVI?1`RuqZ}AIq@68hv_lnC zi_=_BU-o+i4N@e%CChzUkumz?&=LVFlwV#Gg2qoB1cbb8feXB8Ct zH_%EROr{?^%$NUC^OWZzQ`=~6)pBL~g-31{Q-HABIx?vW6ijHyMbeST1(U!C`S{fC zKz8#8e3a?xBJL}Djp*0djy0E^EOBZFaQyAUS~b>f{#zET1tmzn z8WEwOkT+2lr_LK=4UB%3XSbAsQ#H2jdb^;ejAiMe!u;B2`I{r@o*m@6u+O?Z6d`bA z$$nBaW6B(G@5CB>C$Wm1a8!}QKD)SgCOTE5`{lFICwsBnqd#j0$p+sdn@;J%D>yMU zr(f^Be)^$Wy`7;p;DL}hw&NUFUs`$Iyr}wHi0Q1Ytv6EE0#+|rHK`3sa!=_)j+T2lZpP?3* zVR*)p`XKSDuvlRIB1M9HMS$B>a7vzZQwM(4C*tT|6F6BzC?O zYQwV87ebP)Dx>uaixw0FO5F6gnf#Q>HTP{*;wBH|A;Pxfu*98Z)=EBWwE6g?jiZr{ zh-Kc=cBaj_2;`XZ_E%2+6pdScok^|pteJ!eZqEG4Rs-QJf;^6E+C|FY=}G}?3)Sv? zIqsx%T=&*A9q3Hd9^wOMNGCs0b^;65mw~nHSmkY^YW?0)FZqp;@zUg)qDAVku zbXQ<)H9VSrtD05*pG&*)^ebH2YC<8S|1NFkiA6K014IKPU+PK}J{|VrOV$$Ra^A7K zX88CFbS$012fm7g^*k|myjkPr;_B}RBGOG_HKKIGc4((NbcAx*Nn983PwOX|q8=)% zcz^tzHT!g$7EcD#cuT7nS?L_y@hD6H`2lhdJzG(6+_>tM*fMT~Gn=o=e|{v2;c+XS zy?F1EQ{+sC@e70-ykUn^!Fs5mDF}(9Gx=ATH755xuaUr%?*GU`v93JB$^rfPjggM8 z?3`j^sfgzXs09?rO?U|26><=wfMd~Ty8&B^;?oT}|GqVn@)SIa!GccRE0@Z%Pp3#p zy=BkzAD_`To%zw8UhBk@Y04v#Na6REJz&X6epVD>a>s??`Gf>|LC#V3PyumWew7Zq zYWBvj7!F{zfcX}2h#>(YyXOAFRf5FNA&Y)N z_V7Y|Ysih}iH-G2c5zp{!F9sYotu5y?!7^_atvjwqv|IU6{^;z0byZ} zgm*#u)cbw;!slL)05S;hj1}qTvOqC;=xP5$sw>}{D!DSn`W_9l71xspe1q0g9iQUdkjkiK^m2<&4&eCU9l zO~8GV3gQfmf#O7jhjcM2Z~Y1DQxXO5gc}Uvitj1cPT}>AHYGWKR1=nebfC6!;HAVh zsyIU(un%5TDYt!hqZz9;ZzKMpk% zv^zL_F{-BR?%{}sV*kwH>rJ;MWg%==`_|UVEOrLzp>AU{0DolZ6P9>6!~3WDRQYXq@b-C;>`Nn1(MJuOymRzQ$PGb=?phb} zk9l9s>9`n8yHipz*jufS^OuiZ_5u8e!I7!yiiX#O*agkd_jjHjjKHWS=T3J=Q-9)} z(I7+2XqM|aW8=ZaDODXjrjj)Q@R6X2M7icaPRjl)KxS{t3?K|^5MV(RC2ldFB#4C% zKKj zwx83IHQDgjq1!=$T%{L4=BlSlOxDVNztQLeIKpY8sQG41*NU)e0E6EMCv77L)6KGs zDktw+3gPgWYaGr$BUrNEUK$s#Oj*VIK$m^M^42jjeKLEfh;7{%S)TX1{>lU4oBB0c z>Pjpw%(hF74E?#)zu`|*pL)T#Jk*;g8O?Vy`fl2HcN$ICz?ZK@)VC71Z%0T?*)0AJ z4q6@v?W=7r{m4T_C;|5&JKTq!F(U>ffb9PQwC+1(JcE;nX$(TrlM-p(-Jn})W8NwG zW3{Z+6Lx_oFE^W*(tVC!kI&tIT1gz%&Gzvz32dox7g-jJo{69BC074^EmF7?NPPRx zwYbY(Lihue8juWRuTpF(3*3MNAQE5|6@rW%z}%4?WdzYkMMtx~@OXa0!eo~(3>H;4 zyc`NOo091@Vi6!NwtUtj|NpuZK|`-2^pJL!2R;c*A@Fj^5brk(HAIw2FbxWcScQZj zbJtg(4m$k#vu7&IFG^1B8Daf;{0*G7(}snOP_xUAz3YE;b+$;WE_tvuljrM3aZ5w` zzO<-Ux?C*_t7zy@n$(dr#V)zVGfy9jPPx&BxyxH^DU>*9% zeASz4!eX(zGr5KY=kctC;}Iw5%e`GlzS_)yKDlEoA$qm)p zJ8avpu`H66w6xlN;tXi5wU)0ktp9hG1VXMHhE&lLrejK(Au7e@MiLHcrBoA5+Lp_UBXPtd1vmxZX7mWh9!m~P>}Mb| zk#WeIn@WHOS9g53hB>5Xz1YL@(Dy1Yslya6z#4+(C8JIP|z=S5riweTrlA@(a7OV`5$-R=`ovhn(^o)-J-5{cQVKZvUb zQD%ojJ3q%sp70H?iv+efagZjohv;bCGsex@B$l)=i*)YrMMJk;DM}ZnDeKf0dKlYj z`@Jf#X(sGsZuHhasahU(5o}QuBz7BgP;*|=s&K==(kQw99n5OT&w~6u$fH^Sm+&X# zr3$;_11E?AH~s&fit zWX=|ZM8o6*jA_KJ0m%f&5O$a~$qn;1LG%Dmm~L~^sa!TzH_}E&g zqjw7n>+R18t++a>xr@(GTyqAuw17t{KN76b^8f#8B8md4iRFM%ITHX@IB!x=$~{!W zV0~g!Ye#kkCVeqYG9O@Fy^3jbb$VghY=(Ic3sNXMN+?QB8_>M(6*CRkpi^VN}U~VvZpVb7{#B?}djdIiug8rd8DdM|V0X z6OR{oNckvb{xY=&lvEh`iEkzQ>-@sLQ}_=$AhAc`sbc8xZr$$4)*SvWpe-O2OZr4# z^W?p)(vi@4W9xrFX#<7^?}TjTCfhGpZ}N1Z-J+s`D(33H=oA>fMFo^HKU2Ve$y9(l z2D|~8iURsrd6*6Y^X#}s9%&~*_7K|Z-IN>5M!Dx^oN544AX z%fRbn>Pov0$9UJ6L&6yR1S9pY`}^T|XHg{ht^x*;SW+nPGfZ^;2T&4(3;!qyWt?gU zifjHPiE+Ov>Gc&J8e^Q=ZJ8YXny}Q+R(t#Q+Sv0a6cQb##1VRH@{DB^iw(-)zn+qW}0| zbl2q1%dzUc?d*=d%ek$Q;=(t+ak6L_XsB8#aogGMv5(}|D%=3%maH_9t5(;i~A zvTa@%v$E!}A|Z8XVqlMLi@suX$CR*mtKBqd;p{@7U1Auc-Bi-4z95pV%d@*{9D8p5 z&5}sxQCQ8l6%z^EnQmjINhg*sJBgRuxK|vpFb^^+$C0qKRCS08{;Q)K5Sk|&Y+zyF>by(v;)=6-tnAx zfv_9Ys$aFsBA~=1A|+iCqM@c%w6jUJLY0@72Oqdwiu10`#GzK>omA)Du9@Gz z)qqEURRS}vKmJ)!@>V*vPF}C5Kj(nUzHrS^PmkKXX2kEkUQ(SJJvhsv7gI-aa7PftsBSpue*FVGx#nU9)S+nl-bf21(>pLG# z<||LIaSf^a-7+U`?@B>UpD7bZlh#eVM{JC4tY^zONZ9Dp`0_Ajar9zncc$Sw%eMr> z{d-=s++JUuuOerW1tgVZQ9&fqmoHxyYbI47Dx6cd4b16Ukk7?pjSKt^VQ_u?Sv-_z3j!-F3STYt8rZJS*GyC(dWwQiJ(tyPYOQPPZrPnC3pDEhMVKP z4fD7}ZT{rIV&0Ki&2y2JofU$Krbn$gZ1-eMDmf0=|NP!#soY2+;Yw zN5-*(C^V(3O99Ei&@9y7$X$T{wveD>X9V-qkOUP{eGfnsJ8UZ^-1LVSUI@>ZM(vEb zsBNDPy!HEuaq9J5$WfEyVAZ4EH?O}nbL+)(<)FILa}?{`Y_qw&G{^MDm+E;+%XcXw z-hDUZ{@CsmCUsV{+u}5Mvw-nWT>F<<#o^@EJC0<&uN3Yn(@rFJ55N0%yxg;qI7rl# zDw7$zy@UnN#s8$!z^O;{XgkLL46*d7suDn2&^S!Tfu!2~n#paL41{V30kzc8mwRcK z^T63*8bYu-Xtof*UtS*f&lklxi2bi#(}fw`0Q7zb2R@Qc2|jHdyUGyp^A+U+=fC?a zO;B}fLG&h9-7y{9QAjyaz>9(kdJRQSPr>@j^DQtu8Vwg2`Rp{4W+_}F$Y4w1JpM&) zvS^3nM;aC(Wjl9%7~G9p;6o$RwdbOd*E#Up>LnTn88xHA_j{PmUR7D0 zL=)q&qx1zj)zuyDu9lJ4U)>QEOW8km;l!!ONYNY&U61a5PAws=>M;xH8Q^SxZexSd zKlbrx%3nN2^zhwS7ds>^k6l9lVAhXh4oq8I zjh%K^RUvk*F<4AP#q65v-|-8aShJJt3*8#o-qL$wX@YWhn37!RN6T`^{od1#WAi@M z$jlJM=A^b#)8cYNj`&~k1$?!dJ;Tf23S_Bb{U)4k*50@EZ0$Y6zR@2}wDAVa<)r3+ z2wXMoy9o}Y2GalB^)7vHfNWUzd(U&iVm3o-ZBOH)tI!KPa&@AP@IBdRgnuKFjZwDnS@%GL z@dD3szN^6c%hzO67Cj&)CntX!A3x(FBwTt+(CH^fGfp3oZFk-*+z>@O4z1JfM{gO7 z8ISdJS+D2Kop&7N%$=K`VaiO-uH@=!=dQJD9g?3$1UoDnKR{>FBOqEbkU8$nN##^_ zA!^2JUd^8fVlR)kX(|}jvyJtOUH$8`=pyqdrlmKEl#_I6DH&}VGCkX&qC`UELDn~M zFY0vLGbD(zPbdvotFY?T1lG$zD9y@J>p~O(~4! z+duU?%u3>x@1K*x2YbL3N0lu$P0>?PVPzk7_~|vnP(~Mv5sM`+H)rV`p)3@{?pxw@ zULFe^1ju_^r>MQN=EJ=l#Bl(RflS_+;>@-Qu z@b~cc3_mv#b+e#|9S)nEK?c+kG7z~(+0OyTkxwT^bEk0#yIt&)oq54fZiBtw2aNhi z?E!*k_eeXO9#{vE_cSv1a8Vf8v(TT06t$_=*+nOYT@CX$tMSxCf#eP|BMVR5B-Zwlhy5>o)nHXQt9*Ql`r zp`V?A)#Z3^B*CD$m^TZn-avUJY=rq%DQQ`-K@)aGMbuGb5ak8lj)|zYnza;BQbNA5 za@{TbS{$AmiUdS{f*-O~#SbiPGpw~%*-c4IW1qL1TAU~)ckG0J_=fteV8^G4J}h%6 zb!hfh2DWh4){Dbhr;r<2qX~tF$F>3<#@mr|L+Wu1$9n6;n8k??TGCK3XJztcxR_f^ z7zI<5+-fgK#`ySu+gw2x#Te<3tsPYWpZRnjC4x|Oh{B%_v%+_^yk$!DP`gIR*)TibST)ov3CdxL@N^hr`=-KCO*Dznx!?*V< zEG%ar4_Co^Y?Vy#_m9u+dV!i;W3~sTd|^=>j5@lVWp8;CDo0%?s53Jj#hNS8|4I&J z`7z%h$Au5amB`JYf}ETg|I5=`;CfpO#HRUS$%EuNWn^T)=m1xXC`lK#EL^{#s8=S) z>!rzLEwrtfN-f{<>-mvACaS5_>>j37FA`?ljPaM*CHf{-^Wc5EoohA0-bc|2qFPec z=p(Jg?NEM7&0ta3;1%#0)N&CS-+2vn&(Gm-3~y}jA~nZ*u!FuscgR_P=7gAcVb)Up z*#g%+iYO$d=o$OXi-SV{%4#KA5j!0}UM_Pg)AK+l6c=s0p(vGL1D|*Bz2;P~&bHhv z3s5f~zdN^od21@G&SP~KbK@nX1V~a+gpl@peixT5z3^Iv)@*A;`32@xA3%I zI^4WJlVJ3&#b9#b;L4FIaY)Um2sbWwd#UNJZ}SoLC{Zw>-rNai`1gl5&2FYF!e;|G z=K_%j5>YTNtX z+->V4`Rz55+?d?Cb2;~5&Fg>CLRH-*NPCh?eQHzeCnp}ob5dNBsaeOnM)~#b?9fp9 zUc&iPF1KHY=6odfbg=EqFs*Z}3Y^NwOR?a|BR49Gqx#G3H)(l|=Wz5~tXqiBDf|B= z+NwRGL7|AkifnF#jHY)mIYH|1pV=6Bt#NrAn0T1OuNqQv>ZiwvtnZ z)%(l)i|ZUj-zO;4vHT5X@_v)Y(R-uq=lAeWty%YJ-Ow(uPF@k{ttBS}=H%xuRIL@5 zK;BIM(2xoDdnh2G=A=6dF#{0UXn%)85f}mnXq%liau;C9eQy4{L@TSbEKD3F3(dds;&%bNqRZz&nPXGh z^^3H>9>ZKvpM(~Sa1Hetn~ZhljZ{93RJ?_$eOE~CorC(-#nOGwoChv6(yGG-wNt#Q z_67rW_#5X^r*G_(?r?}j5t~G?cd%DQ?Tr$dH@?7u(4q#tvMY`aUpd^^HKOL9@)_q8 zm-}j5Qsvn-Zi;w^Rp{r%ZV$cDSZbofH&m!WVqP zQ<{2kqVL7AsABNyLf=ME+gI-N(O&C)2F;sKHtt2+hVj;irCs1?uTGzjrRT^&%X_Z# zTA!?C$6pxV9JCIqkEyL$ z?e(3!wvt0uf4k!woCBN#!_Sh*NhMf6Dki-m!NF3EHr>fp-vt^f(1~Fh0feCWp z)-U9w3yquw1T`lf`f1Q7bVS#BB;?9a=ulhu`n^mG#K^p5o_Pz$$d7^SJpB3|{p9i^ zm)F;|^Ft?EttwXwM=W_(9f}SMGr~^nFsUX`3@-4m@|8s$QBLwra&>Rm8#r&1jc{4S zPEp~FlJO`BpG&ck`EQXNOOsDrT+B8;;nOHCaG-{*;uPUz=}L2ndH<*I3zkKePw)Rc z=KVaIcl7Uf)Wh$%gcelfcsiImx)$2B{f)c=`F)tAxd*MbB3inD{lTwt{D#e+I_1*5 zipd*F`7-EV3=L+-(DZ%0+wSN3Uq3vb2^=s&)qjZLK}0m$wBBbvIiV)usCXa#w-k@~ zMtUpu-e)+LhDX?|_vEAhmR*wE-MCi~R-dry<&@ytGC6CQEJS4a6X&)ab5G~5BV%3-+`RfY!teSgE4%k1nc z17TK-Tc>vf?%b2)^_<~3iXQjxhYn$fZ;Ob3|5|}V=?!rl!y5swD<>!C=%N7#*wJzp zpP1-w=`NH)J=)y7<$_C}CL1~Nz{vHF?A%VjeofG-%m>@<5;5_vskIklrG7udPVye~ zEI%?$uNV`zRKDg-5hc@Tpgir;IBd}HWg*LcXw_(u&CYWz_?G#blD}jJp;zt(6y|?W zO~mA$FCXD<$V~bsXk)`Uq5wB5i|>{E0KpWhp?Q#_h_LTf|wt2 z-C@x9N$Ccl7Id85=W~`1hXfss=H+K4vu%rwa%>ptePuRqYCWXO6fLOvv;J7!4LSba zGR;VKu3PbZF3Jz%ZyUw4j5h))uTKfS`rqtu>hS?bz9Ge6j=f&7=dBT>+Ks}wMzFl0 zcum&ExNA1=?r|qZB|U!qbZeb} zu(neReHwcD59_W}oI~1nAd|{&T-B&HB*_ErfIjobStUMmmXXC75X8XBgwBzEp zS43`7wr%;&W&#?@#ksB6<(TGUsmSxMg>y@vt*bT0{E`OKUX<4-N&4Jd{6>tje`?5SX!T7q8X{3Fu9R1oB>%qrmfw*uf>gl3a>VF~j&%*i z5TfgA&UQP8H{AS=s;69KKi2+g7j?AAWWIX9;g(CGonvLAQLn9&Q(CTOX8W4{k$3qx%@-` z8`gCmhKs{}VH1DDH|X?x%N`!itsqVM`t*p&HSCHY7>zLpDO~_j@Rp|K_E{dUDEoIz zU0%2ky_%lPF2Rm1w^vHtNvIHFj<8KivgFGqQVg;&`=n2;{JX1QLi`!kl7rB3 z!I|)ohnReA^T3w}O^gJI@PiI;Hs>1Tq}}P>NOU`L83yF$vI0baApxMWOy*akfsrYR z@ay??=}V`cD|i2T6D)3p^i4&o3{BN4i{6};POq38@E)q;oNU+RNWP6rkK5xsJac+9 zF#U9ZPyWvx3^mdp!`1yamSsHCY!{QR$Ef7RVEh=<>}-qG*Yz4Au{7INNwhBKf200Z zO7p?ZwW~v_1G)=Vd%wNzb@N06excr_e&q=7$nbMUbsUNZjZOo3lnk(8bKTlrwWQ|P z=b-Zw-f%OcFw`}mrze!W_t)EGpWo$*n~Xn~&o$DE;Kw&*Zcaor;a5+F;Q0sU(y!i| zQsF(0s>JmfKW)&f+q*%jf1ZG}2#=vfP%~a5ulB&2Y(Ta1weM3^bjE`(6sz&Nxg{ft zXCz72Hc&rMAv#F`px-&gxt;sRo<(Z3ocP5NX8hdF9_+HQs977~r~SV~9gFW4FKbfq zxst}fr>w2LR zYE-rER8d!sE!Z*&Phu-?l*>~Ox<^!Mza$NX9Mdj(XO(s3M7VHnH()b;Q;A+aH? z!#YMwV?{>f{pFbAmsiOAufM9It~s0dd#IH4M%3P{h_O1FNs4)y_J?M}_viN*rtS{q zZ%vGi0J(aB!1?M^S*pi&P-Gr zT8rs+ri16_8nXH=fEiv_4PMQ-ctYpyfm`v^#G^8$iJgW{&7LCKyT%vs%Qf@vrtX%d zym7wlW4z&+b!Gp6+Xe1FW=K`c;zmd-Mw`m^Wd9jA&OFG0Ka&e*kfxC3QZ-qrulz9$0lVwR_k>!WujpMSyW>VQTs4 z>~rLI&z%NR`o033YtFc3CNq)x$qNU591!Cbm{_Zknsw`A9oSafm6>=()(?-i6r%@e7@_A-kY@rk@ndq)XtrSQ`iGy6 z&wqYiF5QhRxS=PDmVTr0+(xnJAZ`0sWUopF4Q>QrIoqCgxPF-A!pG*)oO4T`3_sj` zVaw&;(p^ho^Ow0-@?GY;9e%u)MhpuS&g*m%h6Hqb~o+U z(7V0CY55V~)L&go_NLd)x50L!)uzt3VV#`UF8G|0(>U;DXqB8uw1qdEzC*ktIQ}ZF z5(BErmA(`0{Gx?R!#R0Fj;Ar0t$7Xh!@9lI_L|-UshQ8@ttHS;cpK8g02o54%?~$K z7bT*p!>Wz(Zn99;9MWzLZN!6{vRy1eF|)RBV9|ct!g&4R@3}wkZwLoUa2*wlFuyo< zM1F{nvOKztCpzB+3B{OKkmRhMFnee12|F>`_nj@;+`$HiIWi7bQ{ z-8$iAFQ7_5NC1wJ2)%FBqjbLXvXROFPP8m9_j#T}l zqBlPb-#l9k=&r5i;dwgn=G2Z4+xo1N%-)&y}$}ig|2J*OQG2L3k zz|PDr?9i{_{Bg|FJC4A2rHh_S>0ask{~G%aps2c~TTp_a0un@tVgMwofH-7O5Rja6 z7LX(v1{e^KB;i9r6eJ5nMhOxmDw33>z>qVNBTn72ddtE=e2r>R)ADUR0=d@#YMUr(~&X?wYyCRGGjQlEu zJfUZAz(hD5C)k>*>#y1tszyJ*k<;MV;+E>tDf=Rwo!Wl*d$O`ejPZnQo)}&zKI3kg zRySK;$3MCm*13Y~t)Dehg?dNpw~HpJA@d3`Hj)8`vuQ7PZ=oP>0c}o}hMVf|3b~+z zU`V8}+y`|`_8=h-MS%}C_k0dK1PD$AZH2UwfB8puJgaOtmz=ibV8jt}vXX(LXYL0u zo%=-eL$d7sBZVdrw{Tk-$F1X2)pmQkKO6lUY8m=}-KVN0c(eaGv-X9en~MPArJ+EF zWe<5QTaw4!!WsInIKSeD>tzhl&PvnOpgD9g%`o9V1s3;f#oHjY1 zsICE5abs6N^X@;NC+W$S^Rv{b=23-igQ0zKw@vKMoqnv`IPSeKKw#hZ*tOOODiZIM z*rKwu&K=iygoFj`p91#`dC{jFO4^C7W7Bk3evpra zi`@ESQ>Is1!*vMCtPYxmoSTgKV*P2psZOc3Y*F}Wr!7<;v!E{{=BO^9m2(Guw}z-- zYvE%Hby>4xS7#dd7cJ`NUm_1$k6dZ@Xlna6kNvCEL;Mu(`A#yNt97TI_-a`m#cbv> z*soU{2foJ^tlEs*scfDfCG#NCnHy(D%7^T7L0H5r8LSF|-qqzQRFw*5H>U?fW_4UM ziS96$EdIm6h$2I)i%IWuyopp|UVYKu^>k`kH>mDzp-NE$*S78Q1v!y?8yHF2)6-j@ z(AtSbqwypksUF_J-n!{P*)ZC`l&^87JLw_Z#=lHzq|AJS-lQbAIX&ddbjTGi87yI4 zR)WlSY2;$wRfaPJjNqLctzXC~33Q+A=gb}X3eOy&bN_o?3Ma_Fe@X5RHU^SbuX=t+ynE z*pWr2i+s|yN((3b0DER9|NAa^f;o{yAxFs=mhR zMk(8%$C1csXpb&P{&>s$-#hX{@%dXc;D9Lcp3j|M2)}Yb*uTvxq}OBa1QnwA%~xkh zF_D^D+tn#w87D@4O}kGR#ahF+nP+&zh$f{5;n9Y>aOeo{k;j5)*2sePp^ zc2eS8a)^_c3I>PLL~mY~8wvaRPDx%TJYtqgsv?O%f(8AO?yQ|BnfoE}U_6oBn^dB@ z#xQY?T}=iK%bk-RCtt-ui7N!I<0ER)J|;AQS*%odeaYx_i;7Nv{?!px%m~TNX1b`# z!BRTzuUCUBrBd#-{ArMXPEiwse0RV?#*@{dJu;yo2R2rvpqu29lR+Dpd$w_LqAQM* z;~jKQVkzp$OH)hkn*L-kudleYKgLI6m>MA`teQ~}19c0qBVFx^sKIUDSNM)ecfl>g z$^4O^a?_moCbA8TD#7T`mgI``uhQ;1d|MmGSryYAQt}8F#TS?L-}``OL;TYqb&p^n zFG1wwmFgSDjJUg2w`}Lwv!1dD~j&=Kw#I0BC z7sP2;d7o9N&Tdln<;u9%)x8ZI(^OwPN8}@M7VUkFmGm+nPcY{0BWY&px!T2w(0&HN zPqB7R+&Hy9o!?K83`7KzT$S$9{0oxMHJAz5ixIz+6t@!M)>*$yj`u)RaGdnbVQN)i zGr6_>CztUJ!$0~Su|Kn}bXh+$;a4KQ(C&3TbN9|_R_kt)@l4HNFyXY{NV8cYIU&pU&B`kUCy>EqC4EqZ=^YVX_ej3$$yxy z?1rLQuM?G8UKpW{^`7cNZ^_(J|3U-Th;hQ5FM0LZ_PK0&Ov|+Y?|11$J`(twXv-ogF z%r$pCevHHw>xS~m0M8S7B1MHJeqSCs>6HOpGv#kg$z$6hJ5kl~4HJR*ZX(7;_DpXH z-@-ljY9|lKI_1vEyGG8^(}__R$ZT_8l-d+QycL?boI+vvHqS3FraggITLh2w@x1AFZmM%|4<7w zo~|dua)e6_G?2O%4J(aa5ay;cu4m>SAJUKs@FNV&I)DA~K5v(&m)l;ZD`DbS8A)mU z*C>JEGQaksPHdx|^Jo$F*PN%%mnq8QAc`8Jc;b3!fZF&|(phXjBpCkUg>I$u4S@T) zmT`uEVLo0ZHu5`id-CMTC2@=h(%}q}s2)k`nwsB$d}|Ndk3s3?M}oi$kTi<%_n$g= zxeS7K&|Omk9gdLxMA~PdTg)Ux@2>zL$o~LrZhovzTv_~xADYac9@IUXdrn&3P=y^I z*IqD>yclc%rCA7Wl?w8#EjmMnz)TX*R(K|Ok@(WKqO9MfoS&TYEldd(<1bw-`g;D4 zk}&Kx)3{mYm3G^+=m&l2WS{NcadlBV^idHZktDfJa3kA~{84J_Xd|QXh*R6%ioqV) zO;hal2cS1`{56B$^`y*sJ~Q4KL_%GF-zWL!@sB&5t3MTZ*P!F28>{?Rvk1uTTiV?q%a9ehaJ#U(z#SVhA19-jJVO_ zA+m?izdzl|mNK22S2&5Ni9!RF9k6_`NGpkN%ZF}geRyyl6H=?iY+K5=NhGlQoUKyH z)~2}Rdx`pc^fkPyvZR^@oZ9kDY8bNAz z9rTPvE!tRMn2?Day=2PUBfME!UhTr^g5Q-YDe2@(BXuV2CwuthN96~RGQZM@p*`#o zE7i`32Q$(Ng9nLfU>#Wdo#=QQ$gsATM--sX6U2H=d)VEGgP@To0Cc!ibaX;nEhZuB zIRP>@S?}Ky6n)Y+Cl}qMVD{a-0E|JWO@J~%r$9@q0427I0WnNaR*isOycG2#H#au| zVq$fDeFYU26|19?(B%4JIe7Owz0Gut2Dj;1N61Z&@SY02o1S5~v9n^?x@#y#Bd6WGYhS@E8kv_hqK*c zk9j(_x>0*fX-ndw)y@L#Vzvjy-$RJS-#6-g3D47_SYq}~i{}WcXldNh z`H~WuN8HEpaT8F~RZvnQ1CBT(uMg4`+PkMKAUz6}ZwFK`wuP0!@7^(hSm`vWA0E;a z1;j8}RaFU~bm8pmTmh|t!qq2WLA!Gq(ZiUebmH%0XKYgfs@=fc$%04DP_3LcHwHCn z3%`5bVuNy{7w8M;$8kF8QHuvQw^g-ABl4+MH|U{9(+AWR{mt@YjS3XR87HgMCY$3rEd-uuM@CUpw|W+ z2by-9Q*rJivK?Wj${F1ccL6L+<(h}rKOzd;9wPg58Oc1-uC(FlM73qg(wI-^N4EYG zE~Z~<_`Tou?`ZNI;QZtts2}93ipKB+?kn~j2$+9htjj9{TT_Yhi_E{~bz z4~#}+8ZA}Yl6x$f7NzG*=66P}MhaQF+k<2KEpbN-&0LP-0XkCODowmUpfn8}dKMNI z6{n1$3l-ryge*AG6{s&KB+z}bA8LY@P5hQ0KN{y? zy?bxFK4snk7y{DR35+0xJH{oC&qM2@RetBt5dDgS5RZcEg$K{B={{NI+|?4z+K{@8 zx%2Io$A{U)tLs`bW?$)73D589M_*XcR%+{ccHhX0Q!k~1T&W;Ri*)g{Vrk!R>S+34^cH*Kv6g%4LebAFcGA>r_t}?bO=W*7 z1c|oiiM{41$6VsW@ZoSdhQn&YybOY_Q=^_`JruS;z$y?NM4y#GPF^=o30n{Z5QS@= zY{4~Vwrc~pwhdAZ$KGUq9$qo_sl+adZQ8 z7-ymP)CqnYcB^+dsjl0R)gkKCZbz^x8a@^Pbna^Z%7N`R%?;H7pTh&iS!IwYiA=}ledJr%N&eCJz5@ojEJC+ za;4eZTbgjg<8@y@-y?mKlwntGq`1IZ*!^35OxD`8;K{r6i+93haPG-e=1Y?M33_tL z_Y%g}%)94Y#aDllT1HwHns`i}8-m=7UQ&UTPhoiny-Vue?)pvp@+VY*R3sZDul@l7 z?%9VJj7P~We<|0pV(!F&T?|jWVT#mrcK3%97o8n``ECzt0dg~sc2Ne+CR5y}$dO+X z62ACr7|U}BsL&QpN6AH;Q@+80o)_C#Zds3RhfRS9+$Q#h2BZT*H(&heDO(4s<4q$< zyaz>})-n_s^5}mw?UcN!ToTJA-~|h-!%Rzg`|w$99{N^0~cv>NKh6g<2Z$z5wL>e^YszC^xSV zo5E~~UpkUbm6uCZH15Ru*+-@sF=Lb86DQS#wd~j2-*V}C!_+hpdPT5j=4IWI3q*rC z_Bo0JF@iH1B zsZCXNT;kAUUMO=a#1aYA$SVD00tS|*oZk9dJ>qO(n~~R9c0GBG4$xk|uiP0$>E3<_ z%=kNy{52S+Cd{8v$q5$8CHrMymX=i!)kc|JDx*3PnyD0)CMS%K-G1-3tCS#==7hRb zx-xe7JBo2hmT&)(a;0V>MXdk5E-u>{(E6-kheHN!NEV&IblKMLf$vdjdKzJw8fn!m z0sC51?H!lw5+;OPU!lZyf>_+>zCBY`zY*MrT9A6FXe(>bxdMP&LF=fC|4tckwd+|AnmiXfbdUk|4}B3klRd?$*h#tz0b zJr5q*Q2t|?igB`n&AgI4=eC+!lB|szW8ZDZ*g?L{$nHyas&jD!)FThO-feQDzev{< zTo}tz+uU~MgRbB^`|OOzzS1PiDfe~i*6tHGC%LmsFQpuY*bmvRIpj2HypvXa#KAq9 zcIlL5v_9NWG=)H-yCFGTRHSanYnjoDH}ETQ3eJV*L0VO2!p~qc`R_X_S|8(I4ras( zQmc1&g{ocl_j4m)IUhq%c7Jl|5&jZ&*WS3_EZ-cEdAAR|Rj<{Q4Ol$>539Q*Xg2hxV zgmYn>VJAmLSPf7i7ljD6kjYld@4{%Iq_f*U!zi zE4!e}DC1*E5VI9Sa94JGDwM$fy3&zuQ}<0JpG3|zCyj~4-bW_c$)Oc0SBV=bQGEE0 zE(~uCyLKc^h+ma-$*bltV440(^G5=W+?gro`0(8KL{r=KFVdnul%y z4P7?(^z#gBWt~Rw4flqa7M3D+2l7ZZpWG1Ak$qUooaTG=3_Z27gt$w52(pT$-pQXb zxx3Xmq1mANq85Gk#BI^!3y+!WdXY;jFOVK~=?f4aL7FA3TYw(5FW~}V3hZ#-N zuqgkYkN?Po(}ia{RAl6GH^x0A0f9*YN zfpLVmi4yN~j1BW6m!qSOYdDh)#m7&LF7PgUu0?c`Jo)J&R~geNf6cV-X|?`UiLg{n zTDceH7aq9yP$ak0+p&pgX9%s9;U8HaUjA)%y|DKK6ml=LLl^hBQP%fttZo;S z(1^=2;u{H`3M?g%-M^4P5$q(v#d3P8%R6Uk&%N$|xzEz6?Murs`rmi5LGJXrw>4r0 ze>~`;=e|U+=BLQ+UlPynKK(hOT~k9Mo|!DcEc}cGEtP)y%o5czCldxqU$d@-(!+oH z-%EP>zKt%*2*!8nWne#-DMu@1_cOJl`aUxAuBVX!T66O5x4s;j-pk1R0(Y#bx@uc+ zUF1XDl?>X3TauR!9#9IhGl{GB-xM}{SJV}AtzzprX72EG*Me(=v(Qr=dOiX|<8jTu zQ~NmvuRrpfw*||-$H~{K{5R@@tk`%yi*1AQ+7raSf2GB(!oDiC`B2=)QN=aKM0&`q zajC?6sJ1HVEb>4GSLX5mJU&JF!?P?Ib&>Z9OT(|szuz44PmMpX|L1MW(%#;xrpbGj zT*(x$QgSSKP2e|PTm1bm%dlJ%6!nlB+=hG)T=1{b2TT`5O$201;ge7!8AxNmC--?F z$Ax^79uK*4_~cCYcciv}%&jF-8~m?dGeEk0s?;&VKwEouLLRx=sc_KGq5OnlCGt9+ zk^lZa8t=#Bk`T@d@CA)~xkW{6K!5$q(lbbAzI|=y6g0Jlhlc}r^nOOSXVp6;Rc-B% z0lTc!RDWQDQ;wf#SN8IZlJ;Y*48q;Y7NpF5|;fcM?Ovni2)!7^Qt;m zeuk8EfrgoHX&UVTYKZvw($dnr zT2;`$Z_~Q9ZCA5|{+#d#*mM5A^?Ty~WIzZJ1Rapkv-0u%16a_<9q(F?j}Al)p|yJ< z2MZF?!1~bgE3={<1Z#G`@nR%_!MC=y7LjoWNfC4vSGkiA1E6|h;aE4mxTsKvIr9AT zu^IiNHf0f4Gz+xw5LBl@uM8YeQKgXf0aTB-nVA^?-DcLm^UgvaVe-vSRPUD1_JZ_? zxERRTgH|WBuvP%c!A%a1b%O@lxsc<7rSeHu7&0>UZn`@*8#EJzUUEYDbpA6y1x z75vT};49<0sDPs#3WTZ8*DsGkkXVo=K`4*y1nCj_{U>GUD*p1JV#~9j>B|vs;2efk za7tWQKmZfb4JP9oo#JeF@5aw2dq=?$TRHx>Ad*Z%a#Or@hqsaY!2uiqTFDbden)v& zg+*$uyf_8kn%KIdB}c5iAAIBqLA6e;&pkLN0g-VE$jx?xjKsG%_oxy>M|1OQz*k}g zgjq;#2GPiZoYzK4%F&P0Z~^oF%8&!lYWP6H=*^=np4=azm^m~#v)B&^;1R}ikTFAu z^kKZInVC~~3QI#&gM{RCb>E(1ht}T>*P9NS#>PdPD`Un$(;j)-Yu#cd*?W1-Rn7sC zytRbB*JPdy1w)_=XoGPXo#-#a8Rh**62?i>RFbFW8&v+>>YsPzfa6|UNvGH%N=scm z1H`d80Y~L0s$x2e+CwW_ZTxO0O`V>d^-xrGaLroe=Hk)-zJR!q&mNp4BqVLXSX)&s z=pi(+1>ju$J+PUHw*hEA5UIpw7ZeaeUq8Bnawh?p$DyU|2H@H%VG8oV41-iNWSl_r z5g5l?ffw5g{wZqsTHnxpz!DChh>$a0?ZL7zbP&H4^Q{Wubg8SZ7Lk{iNBI~7GW-sl zJRv0_qN9t0ZfIz0q@bjnoL3wE0*6w^Q4uE3aePp#`!nx6+&1r83=kPab`&xvp%D=l zzy)qq!_CtX3BeQKuPoq*8o?}{%px7^S;bJU`_YULCT$jyYnw2_9@8xs5WXKcGIqk# zw}R?@yCY_UeiXgPjTj7)q=I~o>3IFKOXrA!LBl8g!3&VDEW|3fe1_Z)9zYsU4AJ4? z&ShH`?d|Pcy2U~Cj@2>$avc{Ddjo_|8c`}@{a6GT5b~3@X za01SCmY^??%(If7wd0DP?u{_HBmIoR!b7lH6(uF{cZOZa;q*OEWBw3I*g)g~u(V`6 zlik&cKch8?2yFE`ce!T;YR3~A+IKtsj=u`8R3qA~10X~LLiS%QZg2J5UR((Jhu)zA zA7E{8pcOHU(zhG>6p3VH0iO<(-SCqkaEEx9p>D#OEDJh+h zK}FlGjKQ<8b8=o26C2&yi-RBg?u^KqnWbaDFS<@lOw4WV8|_@~qX9<=EwIP|7?vKQ zLIJMKv+8>JeycL(3#2T^Ci($R*#w#)XF;J7pwAngf=J^3Bu=*NoE&~K9g{JMJk-*<2t)RyAhK32mE(nDI4`)U};fE-l7lCH)A6l zH+Oc}xVZ9Vqr43O6d7;mepy*$jo|L^)6>A5`x_Sf6#U(AwQ+C(G{?rQ_{rPdT~AmR zKcX0gJwcGZ@@RWV)KCwLm|RzybYZiXm)D=+N-9W}`S^%{QA`4;73+6$4B|2&nF{e{ zx8J_G=aMxWUW;e-dGyVjONF18Q2^WS0LbQ3AqG&R%IFS_kB`T;9d~2q>Q-9uaVC5e zfQ5#YsvSRg6TBNUEKGoFwu4I4Lj{GY7reFO2=Of>Gm<&0*j6|BEhfC`ZB7mWC%Wls zW0=Lpdpd(Xd>z~{hjj2cRnP~IgAQm85MxQeDmdp=RiO}r|G|o|??Pe+u@ajrH}8 z-OI1z;%4T5`o)54_AY+md-;t^<><*l-HAG&b-@QS&w$BlitW!`>!cZ$wx-Su;fmx% z0t%Lnu>ZXi|G)7EbZ{WS%KsR~*pK>bC`w65p^n^6cALekK3PJ_a#;dmo^<`ntv>y3 zYyLuUFd+k{Z#1A-uvl$`<&?$&5E~ICfn5<{N$^}XxyY?I{iN>X_yF>6b1(Hk0LlOf zj$DPc4V(ypWt^Ip_Ek*GGozweC?2f4>SE(!Vj4i5=BsUGfi`ZYZ+sq6r? zWFgBF$f~VgNBU44numrG=jN=_Gcpi5H^li75)#J7$A9iXRtvQJ)^~PB193&M>%5xw z92%hVUYp&rdyqji5Djyz-DAarKJml4lbsXL!v(#^5b(MmKvt7%Zp!)m` zAl2E~*%khNst^l4{5y2EVBWC*vEICyBh9M^iYLf(R90421E(0G4Gba~=llmstX#%? zcW%M>X)8~{XgHygK=C{Yt_=p+^u*y|gJkr=O;=RXq8kXp1LXPm3jmHCL2N7fo7u$i z!HT%KFYKgn28xc=>6$be*OA&S5GoW5L-@ z_?*nc97Sv<^GKXwQThao=`aR(AM^?u8yjJ4I7$bn>Pp1cTdoz>v9Yo`pB(KV^aD&j ztCAp<022gqGgOR>OQ;ptF3e!NMz~*~`x>tc0P18b_9X*Id~&qEbsD7I2oIr)8D(C8 z_&?BXM*=(0ivkbc3{{}Qx#=yy29 z{HFae8H3Pxu09;qf;J)Ku1YE`<#%5mMrcn5s=e+&`NABg1tN#f9bQllx(ro67$2CX z)s}0~CLeV=_Nzeo_Bs#Gaz0C2XJ;qPF&;#U;K7Bb_JE-d(O3fIi#52If`XzRoO$z` zTjthwb|SW-bq6CWEv>B+Q_bNA%-V8EYD$Zwhpx3~LpRQDoTTxLF54h@MVDOnRyQa8%Y};g~JiWXoo(iEDK2N_? zVCUj;Jt*h_?8o8ZA=m=ez7t*;hz6#6wT&&I0>(Hf6LF~lNCG5)u@_zAbN?*MurwM@ zP9sWysK0UJ29#Sqc&r!!SHs-G<Vr# zT@4M5uAe`@fCk4~ej7?aZ<#^o`}EAr8i0&9zR?;V&gmguX?;A9yx4t7Z+(3o2=Iwv zZ3qh{My4EZf~Kyqn%WluoWWP1UVbBJpRkA;vJ&b{5l?}>bBF3R8IV=_0yDD*CxObP zPeV#1r9e_@j`W_n3LE>Z5dv%)Or8%yJLJlPmAX!Z&S!U^6Vn9yKY}R$lQo}eiiC)` z^s6mWEJiZ-V0Ta--i4#a*?Jg{dJw0pgeeB#;slIOb*hN~M8-f;@dcr|hAkZ8NfeBa z4;VHm-lRAmgTG?u=AHq#rL&r-&Y5Sr&0C6I{bvsTe>kW92m8d{$(#8X2a;WMQh4x3?t#+%LMfAg{{{Dj B(#rq< diff --git a/examples/running-vicunas/artifacts/A100-80GB/generate_throughput_plot.png b/examples/running-vicunas/artifacts/A100-80GB/generate_throughput_plot.png deleted file mode 100644 index ff3d6338c7fa43a16f6d1355f121abc3efbef66a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 58622 zcmeEuWmHyO*ye+XD4-xIh@jG-ASE4=iXbT|jg*vxbSNNFA_CGa0@4D~4blzL-Q8Vt zpZA^bTWey?x7PfbKl6hH3eSms_P+0{_VJZ_A$c8(3=4%qU4QmeOaXI?RBkRqhxe#t;{TK%}n&}zI$zLV`6E+&dkmH=)qlMTU#p| zUKSSf|NH}HOKT$*&b-15ILI}tr)oAR6s|7vg_b6iYJx(|zIi70M9Cp$b+R1wKMBR<@kFlP^TK=m@tHj4T`%Mx0nhP; z;eSLvu}S>@AO6chsMpu{ZAQu%=9erPL~KVdYtR`dkfy}=H%mjMFY!}qfxpxQ9oa1=)HdJ9UXl;kc?Ary59Huj~~QpuS9Nnn!xiGdooyH zLQvs+@TadY)a~q8KJ~>NX=!P0_jCT+kIi(tlf=Eey{Beo6#Ej1Kf(3v^s8tS4MwMC zWH1SxA0|H;iQ%y+emiqT+ht2U&ZePPt;Cwfs3S(VErM12PK&HCwWRM_%68Bzk%Z zE_~&^Jw3U3c?`c!Mg|8rTbOcg6=Z2uM0t66@vq4~mnURnXV3ffi?M6PFEu1G(zm4K zNc7?bqe`~frjx@?J6Cf@hhg2v>js8~dL||&0dm;&Uum9y`t(WX;{1%me7J8?)!LeE zet!OVqrI&y;7%N#clKc=6;`lpyxTDYe8T18?7;D>5K2)|Q6iAcyQrw>$;r{q!qQMa z-HR75RMpjc5}u$xwiq?G>KH4rI62rb8ZCZXTw1E9uV07FQZ8&}W)|mcKJ?3*_@TOv zzP@K{>>bTg+o{!|{3{DMG6t52V!olw=R157y+35&iPR1Zgy&S9ANx+5jg@k`pKf6eWNY?btW$7S zt**Z+g5mXrYt%6@c?LIqbh1&k6XU!yFLUKOnO9jEA2Bg8+?{92^YC%5AN?6BJLAq9 zQ*(3PaM8`p&A5RxZv6cGa_LGDQR6GJ^l1ZI<&HaPFEfb0eftK3-;=JGdeXsnwl|=i zu9pAf{K#UgbT9a=c8N7cp}I^6t;{xgK8)*A!wWvlT)UOOxH0$h%!b&`9DRL#14!Ag zSXx>>c<|ur)vJ>svVy_mV)753;wI2&7c%Etf~go(vdI-wU(oRL$CTJCu<6xb{hO(lEVR%_>26T% zfo8ip7>rEz-CbLlA7VT6?bohf=X5}CZf{S4r_NzH9@8mwLH_2=8x*Xw=T1(%Fqi9w zh9bXw!K*yoer_>ZoJgs68=iiV#h6~>=R1xc%{M5tp@!vD?2{!+1m1t}AOHiK7>|ss z<^1dfyMT*INQlG(n}oTm$Wr^`jXN!0?un);q+oS-cf)E3h3i0Od44{7YdA~ln}O_1 zjUwjGjk`i{b@iWaQW+ZfAY0OfmZv52I4rmxa@zd?S55z1qXAnP*Z`)4j4n zDcV(T$rOhk?cl#78V_F0nIHfY>RuP@ojC=)6>%^&73{BakWYp9+s-pUr#Qn;^oZLcw*+m zuqfN?h2VaF4qlXll8i*?ImZVp7gEA9*}FI*NKKE1UWHA!4BxNHf1shEVesgIg~i{9kB=WZIH-E2;^b6%t~&DDJWRdN zlp!)Q^7wIihv2mfYar?wZo=|*iYfB#WdgX7{m4RFAh5?u8VX*p( zQoetuudJ+;N|6d943yBf%7F&x*$Aae11)r{%$~_FiC#@j%`ZPczb98$l!lH@(tR8r z8KJOA|Ie3kjmjCyGVr8&iaMjYWYI!mxEzpE^&Eca`a&lj$=jVSI7#RsE z$M|h;#;={BuoB1xoRlWstqpTF^zTYd300j`&QlQAxmT6iZ&+;Ff2Wtd;5~bf8kbv=2ucuVtusz+I6Hk zgpNq~XuFk#^4KLQDTz=;MFm3+`qTR-Wk1!@idH9ymGK$aRZ)aOnVeZ({&`W# zH%drJ*%|MC-u#@ULQF~u>+RdOa072o_RV?Utu{g7D|X!ZQByeXFmv5=Djn8eLbq}8 zyEQTt-@DbjtgOK>OP6yIKG6;5KpGNY;i6 zMU%Tn?GLrC>VLhbBcC)hG=w%eNEyt-aP5JYAH-e5BF3vw*X+v*(74jLZffHR;WpHJFqx)qetS@3aj) zmy#;oD7;wfNqJ69L$lsJ?tXD_Qo%0`4IaiY_WT6SF}=Fl2Bmcf;3d>={>|N-877tN z$)zRVkdR)g&a;gTJlm?q(>BM4+LinW*qf^LepPDg3+Q00nJ#`4ZcDDx<%qwo{30wm zx)o-;mqfBIOjcD@)x{vdL%z@>1+4mAo?JY{+A5|-BczG)^hBR)4PR)BWOw{LCc}Kr z$8fQJd39^1abYQ}bfxN=%)2`byS^>7wf^7a&vQ=@`n|YySpk~ zou_AJBDvq*hl1Sj=g-GzE=xA6$!ZrF@&VJr;*ydrsC#1v4o||Z9Q!)wvQyQzP9`Yg z%Q(o|3AA^zPyfVIdXpNtjk>y)Z?`bv#`G$ey)*aRz}BOA2W1-V5I_quYq4kTPPwtB z?b8&}(_ma-uzx%cXTE&p3Q|@D*wA7g>0ZLbr7&3Nh;$WoOp$LZhQc;G%YydS8`01jERz zyvcfR;cx^s#q99pB;RJ?MHJ^7<<-{A!^1->7=Qk?h0gc^N+0J6Yd|$u7S8M=j@FOD zS+sx1L_a>>A02%K3wwuJC};+1lB3if9QLO0L|McH_f$<)l~2h?>!~FPi&p*cuzLL9 z$k0$?w=veLT`VOO%jycP!w*h(EJuC^!pU6rE)i41#X@0|>)`kO-xbAqlqhm)!^rusvz^XgPqDCnlj#$A^bca6dUXY3-C-th&SR(4})@0!r8sd~UHT z5#6+q3;<;ig@9_6+nK9#ef%tpP}Rjb9|{T)8rHnsXtE?Op}6kx?vf+L4s}KTfz9Ej zZck6oc7czVmlYIyAd%bEjd46S9|yGEV^y>qy}OeoCk|r5wVZsRvHgN21Q3CM;JQ|_ z2qDH2tPCw3ox8C5zFSUIDy1ukR}p+lq6cD^XFUg{RvPD{apmzcQxuowi!AlRTWkhb z-Q3*7BqeP&C!YXXtbrCs$#ey(4ua@1aw?@Noc0!bQ*pDiv&F>3Hen@oWva0gKTzH5 zdZU!3P6W^HPY`7%(8au>A|rt62Qpggw08iXB>@mj>oACY0%+_d`kP~&4&)dj&`>Db)T5|kJh z5unwu@(kOfIAgfqerWI~S!rFK<$BR|b`FTU4tTBcVD3HWdB;aKxC+2jpvxM|UAgk; z=~u(fxOPCZCZolyq91V*o%WXN8yjs-58u99AARQHa!PQ4BRnuutk-}gtTI0fFcG)y)*9j!*7^;1BlU!R{IAw>@ue`3GsP?!$EAcxciQkKO~-*dDd%WVM ztCOY5#v6{6+Iji-%uG*j_jQVXqBER**(gF71&EyqN~+?eOP2_N6*jcANNJBx)%y}S zc2rr<`Snz}yEoL;iL$b@E0&j+hc3i9zfc24cOFxgBJi139I8wV7D3S|8;}(JXuCmz9+T06PhXlub3Qo};R!#sFYi3g%>}{kWrKiQS5_n|sU=BYH?Y0;z+b z&4r(6EkF^G0?ma8z$^mrO#0G-f0^|DIEjN+$JbC>3+>Zh<%e`+(&}K|Ol6fPP;n+f zK|w!nZ||Nod8*gI>ZM?khQULa^RvN0U$x>EUxu6E5 zL-C@=CVt>B%?<26EL_`7?X9h?l859sFSO99Iv*xz*>acA(TT9Ju~UFSI>wbH(M#Fd z7O;V?(=alkfeah@-YE4YTdCpVxgp@d<}#U*Htj}MXUm?bVNd5hKlT4Yik9- zQ(@b4txT{`6+Tl6Sy-*Rw^olnG`FqFaTlrz4JCXH>qSaf*aHYqc5!hrJ=FJ?f)}SY zu19z8`T0r7W2FgcUcKjcFH6}Uq#l;a!}}9%Cu{mbmB4%fB#zmu_Nyu}64m+Ox*M?k z!V{SdZ$l##2_*vx;yC~5Y_kp**29E76ZbtY`V4~`33y3;+_sB&u>4b0aGhF=!cStPRM-BOAN+;mJ}76*L6aIB8WQMdMn^vKh(cWc>9c1wy}gQ6D{^X7*CrK< z?N*4#$H&F*d_)cvR}EGA`tNHGpHri`V8P7uSjRB1wA8NeM)cA9LSI8dw)Xe`Rt1HN ze?L=g-#b8~-QLPVeaMD5`%meG$- zO#D$j>rZblfsM@$TgC9x*%W##(E30^@~o~FUb+V3qs#FXMf`Pj)vQ4MA>1FdO%glq zqzG#}JA{SD#E}1US+`_-H7{F(exl_#M-&qq3j$0MR9b{BxxbPp6g-?n2Z|IF7DfbX zVg?r5l>tPp1C~BGGBUHc*aZFZnT*WCF7DQ4V)py)-&IuMdC1cT4?PnRxdack4%9KI zPsr*Auq+CYcz40&cqtX3#~|}W!R*o3*B6E9+}zgI7vGzsEd&ZlD+1a9HM(D%aaopc zNyzilAY$}rVmBjsxpnl!i_zK1uo2?|DS02I zWj2pkR<#5EM0+YFRXa8o`?~d8cd9HUKrhOlDmfj&RFa6qh0-Xpm>V*Rp9B2t;N&F8 z_IY;JfSjBhKGc#=@YK+dR##W|@c0;3{!wV>U@^31XyG7mdFVK&->& z;^JEVo7Gx0;g%spK};;6q(le<-CM-N&D~Y}R{sVC|J7=c9iYDS(Yd4GS9SIEUNB}8 z2bI8eK{37ULi9W7kgx#iA~d%@0Mb4M1bl~f7Ua_Q@%n89Y;<&VfEN7%+S|kq9I%PK zlixEi@H$M~0<)p70RaJ*+^)ckl}gGox=;Fhy7@B}Uq$U6nfMNAP=m;R=^aDJJjz>g<;ZasrRx<;F;$9XDMjnOo%h8^^?UR^-vX(W+Qvp7sW8SRXEdZMx3#t1dTa)&Mq3w)p2J?h z3ffg%T=&UBq$sD!Cr5CYe67raKgT#HhgloSe@}PI(dqnL2JLdpnJwpq6`wyPB%1 z4(N=Vd)!b;1o#euwHYFLq{kfg6XQSpFz1()GJH{)eZbwzztA4t2%=<~T;iqR;9znd z>&qVSxb#g;rJg>$3X0*3`^5?61V~YpIkyQ2>Y#A}qfLgUSzK1u`|~B!{@O_R#v7zE zRp=>D#GDE=J5EnY8Q|)`SQcANeRwhgUDjn5(O+avA+sd~-o>1!DmNycLjrt&Vq1Di5czKjM&n%~=6Kn#cn#crMCsA6_qw!a)VM-SQAAxELaecsvbFR8yL1lya#m!_z)tA9@~sNF5Ch9 z?=Ta{^;BB=I|wZxkM}3^{gD+q`$u6N`TdZVmUefs8^Jat>Z8hrrXB5zhfO;owbP`B zXTg$jMXQA$kB&ui*_e*+(6)pRw5zxbc2&R_{i5SjX*C+@*jXFT#f-DzVu?9+vFpQa z#TTy(Yr2d#S+Wo}17FUQZ^NV-U`j=>>iRs66} z#|cLd#_GD7uf*@OB~C;SElpxzGz9*h7ltvutnKaHsT4hk<+Eqb<%R|sN`?rr({kc_U=3~>c6jGGO-J;sYQS0w6*Ch>)WTMeh?QCDm}VtTUXh$ zqmm0v`Uh|SgU%$C<+=2aT1xRBV4BEENL&H=OC(E~-D%g@(|iYvou)TA6+bQzSrLVt z8m1}QBg3{^U^|iWy?X%brY~L52bk*Q#)f6z!hJ8iq{Z*A=eBDsjnv#vub={5Vk1WQ zuF6U>o$;6wgoQdW7|>WV6;IdR(x@>hkP3FkG6j1C6esWi2ht86VjBR?5>vCDH^fg? z_O|9^zD__CV0yc$$>>txc_pnz%`;q`Ji7P~NIkOkOUku%=Z-hyk^L?4+!6En{Qixk zy!i4eBBFn>ioCH!sv)cH7{fV#&WKB*tDU{zntZhN<9Ts%wISMT2@c-{oc9Ust|D_e z0bOPKR{rs2RtB|*=%`J%aMSXh`o@IpOnS0^aZ!@+oM}PZn4@pb{woRjyVdRASWf8s&vce^f6rVRr$M*<3Afr zA1i)0nAbDXZOJ5gv*mJUNUQKa4j@ZIrBN3kv!F_pG&0QIG!hKwXc%cO{X|88d;ec> z7&qvhMktZZ+tIM7q{QfuA0{+2tnx?NT%j+~*XPT>*AXNmPw2)=UehMC(A3gDZE93Q z!h+Gw-<6UkN^o@d-{Vj`S8%!!@;FAXHIg2suJU$nN6XGCuaJJaNzCopCDl*f8vkQu^E~gWa)5O$ZxovJ(n34&24nE=-@hDYgI@so5FOh9Dhzff%c?!q3|~BJe_}!N zTA_I;YOgM8+X-9f*}2r+CA0_G9p+x~C+L2<&eq?v&r&E8oCZWn_5hum{LLi&{P}ZR z6sJEN8z525Qg5nSxdSVxl(uU>%MPG0AhH42Y@ogQj8{0%%*{0b#sZ}lMb2&I0cHs3 zOBiQAM!+6KxqyNSpZMrYK=;eI`+i${JEDys&MQ*Bp;b60@=L9D_X2r#!F zcUpM}M2N>}mmWsy_#Nm?ARvEC8uf4O=(qu03wYS!az>77o*o9Gzkn2xcTNs)00o|m z6`-ynsJ^iP&!VB`J$m!^f$Ir?%0Qsvh>`&`h6Ae5L>@MDNmag1adzFYXZSrfxsO%Y z^EG6g`f}v@bbV8)uO+a~eXq;>K>sOSc>7_*T5MHs>gXk#XvuiY(Ms27aJ$*z(4Mm! z6BGqL$?53~U=VacYmyq{1OZNW}f5a3TwPm3M4rb7g7!ZVOvl`>;wVdhVeJ-@L|dj^#oHk3HPmU}PWQr|Mydu;O#F#p3V<`99YCn5n3$eV2)>;b10PlZ zvD^y_U+=FD*)H|WrEl1Wmy|fZ*L;5UUVR;dUu~&AS48R7gQX#|7_+v>-i#aOKTC=1 zjFUW`T{4zte0ly+?<^k8S$6D0&`Afn($eP>A;n1=U63un&wW*FRg*B**@;_R(c zjezzMNFTtPfcCpjZf>4|s`@oFv>ueg&0P+1vQD^vkPN^^-MSX<3oWoQ_-kZcSyhbg z%ri|%aYc8d%EdcgWfiY)O44F^;-Ha8(wZ*rHpg0?N6U>JRraomq+>3wu1bR`qL!f)>p(vP+&=Ei2$B!Sd6zDqh5X1=SWWTqbx|;Dj zVi)@QUAvR9IWzg^>uH!?54!5Bmm5Es_I}wVS=2OY?#GqR=6sQ=MuD^N1z>$Q#0CK5 zO8^yMWo4ZLt2jo;Jq~;jk|tM1<;o4S|%W0XlZMse((TC zTU-0V!xU_RDv;t$0NemwF&p}Y@n$7eyILWpc}481JmGoet=8grS-kM5z?8^qit#y5 z;y;cD352D7cN#pgSG#9}63?5SovjDK1UUq-hX>16P>q$EJNmb`=G_;oIJ}Dc%zhS+ zY@K|X-!)gtYom4-k^JZ(o;L&51(+(Zeu2-X%ZwI5FbK`r9KnPJ&W#;{FKU{|MAIF-`KdfbGx$#d={iMJsz!#og75jQE?>34f zQ|WVw>+#d51` z$?)duiPFAJTY8upthDS;{CNVE>w?p$%qmMGspO%&b9%dVLf=VJNZ-{4u4%j6huAW6 zCuAVmuoiy=?SRv?PX}PO_pK5@9}l?**cbB;@pdlyXxv#i)2e?2x6>RE1W^lkGz@mU z;fuV(7QMJ@>>6EG$QME;lN2jkCQ@M!>eeA7htNLx`At_Gj3aoI>i+%dylfljDXVL1 z%}q^U0lFIjF$G(r7o@e?s~RA~1OTaZG#X?0%5IZa_-HPf#I)yOQ$|`HK8@Edidc~^ zXU$+V?(LOkN?~ayoSFRq{e{9~UCTbHA`QWVS26 zGsOSXo7#Ok527JA8T>>fEdEe+_8N1#kZ-eMNsYf5HHn4+_(KsWAIXsJO(gWC<<4}V zn<$Vbj0@F)!aQmH_K=c^7`o#MOx>4Fc7|rW&ka8llW1U{A1K^WSAO>pX4N&CYYD|U zkdny-B!B{oRkhga9xw!2YHBg}7f|W;E+F1zA=?}gam(|v6@&#q(hCj?69*zoB@s~F z-0TacAn8&hEY;?3p}}>t*HZ^wc(FXbg=r#vB>Nq)IC&&}Gc_8=eI`Yw8&xic!3%f% z1^;N2jXTXcVvjI?uXH|h89R78G&qQdhnH}14vtz~P0f4Ds(LBjIBK{Y%OA*L-WGUHZ(LDgajn5&UYsr{+n93dzp`M%ZreGIPL{-7{A}RQM7ZnNE4>e5;WXLTN_2 zUJtzRQbcU6Y4w%kCnx(yP`23Hvvup7dHEd9^;F(XLeEhC>F)&B4z*V<#)@uCA}6T0 zF$wzW-p(Vc~@Lb^cg5GxRxR$$VLbx#jw!qGWDW&2@a z>L=OOKP}Sj0T!H#YE>W17HF|zq~t@(^#kr}gUSkm(Om|HYlzLQ4}vn1KwMezPf3|0 zm=I0wx8k4Y&RfbWRxeZN%!=#&=qf(Nls4dtmt@e|(!bgK4g0|u(WMNQHi6hBDx~97o@+WYv$KbQ z_5rOUAS>&kfYa_>-eIffg`YlMEwx?x0M!D@1mezshpI0`1i) zcGY+KK1|k-+NKNmQNNG+3qiJ6zKu1XlZ~}Th%vMRIIS5jd;>$H1C+n~aN2)$qKXm~ zC?OiotO>OW!lme-9wUwoC{z)gZ@zWM`y>2*rY}$oxyRu(TcFZ^3a-y)Jw{7Q`5pf# z2|pSY6%{o-H5HsH0BDfr`L|p!1mhg%Nl}#-=gzRE|3Dncxaa$|ks`~5(Gr{0(Gmh6 zBOg9|h~TlIC7^p@0HrVjT-cK(WVGf0SkRo*9@>sLT617&eO7K;ArSMFdLe!%ir!Od z7Vl9t$RL}$yY-!&L69{CvW`{_nWU!X=0wdh=hq65nvrVoBjRz`yo*BAJ`kge%O+M# zHwN>lfLg=|IR7n{6+?P5NqAJlo7T^p-Sh5E-SoEEOEvTI52=p`#D9qGqkw=%0O^rW zks`WJMHMON%KK2gU=q+6gq(E2@0f5~Wrn+O)tyO8qX*!;O)L zu^WI;W#tp*2G;!rg8Cl@y1J&dn4KyMa}q=;zb8bE3$cR3@t-~KsQokkVf+2(UuT+V zOu^T5z@vn61%^Kvmqh?%zJxAL*_D-psy58skRX+GykJFFHVZ84-KK8iqlwS{E@uNo zgcyuvBipYhoRqe50fPAzG9|LEYTEb~)H~l{e*BrZVDZi;cg>>v0hzW8d7{-9^NWj5 zL3=3y2@4cLpb8-o5yn<8KsR*jNbvEIjC)yQUsW|xURwsBO&Vfy=PLjaF;2vgd%_x+ z>0R2}dqTX{4GowpyUDUH`uX<3RTUBa!Q*yP&sp5N4_W=QCSs4Y4n42vI@A7`!^f+(MFw&=dsw_rMDSbssCI@|Y1kltw_F;P!zjjf-M_RV)nM z2;@N2WlYSSZb{0U1O%SZ(d)hSp!0*mmkM|iw>-$(8w2v2vP2m$XM5HTLW&}OO>r=& zl?-Y`v6V@h$7Q{&OsiwuhZ$D!hFM@Y##KnWw&-&P4kn3*$QoKX7&!p6d6D!-9j*{H z6_sZ~!rkHD=Fh=Y8?12V0-A-K2&6ySiGe#pZv5_NdnAyiM#w3NmIE3%@)MP=$m-gh ztO>Zy^%;~u8xSwGoYy!%czS}&cMWxi*A@Z4<;TkzG5n5Sk)R4N8&;RYO}+3*2D0lW zKB7;DcQN6c90ytQ=mFRx;B~|DPqJJAw;V)7Hf~EGb!kdJ`A<$>L_ZFMwoX<6`Ii_F z|Mbkw1A(-I7OV$hSlK&ozz%>!IqluMsDJai2nmOr%zXg?O?NGbCv5ixDWxm5#R=T@ z0CyYY0Mo)K2&<5dQn9h&!vlf;L2i)m9S$gWu#VSnD}q4k2!dq%amh6hB6n9`(Pr~C zG`&pxZZM}XkxCvdS@dO=CH_7O&HU2QNlu^F1Nf1QEETvT`8q(3MZh9OmJ@&a`@`njqiq(u?ty8E2GxoMW>KPK5QV|( z*WKV26EnX`b1zrVGK71@x&1f*1XRevfn$sUVVypLPD}V}ktLne-^SVVH@bf5KA7u@ zwE7c?qKV_q&0lE{XCJDS)ym(8`~m_{g|E*v2GM~-s%`)FV0{eC!+fJoihrVofMJFg zt0>`#gKIkpbK^Ti5oc#-bHSQN;$)CCfOvkr>x?P{U;(kT+MJbTYgI@ATLKBd0CE6* zKVJeO#C{Pk*goChzqP%c1g-9P$EYo$21&JL>lztxQzLo@$j3-#bI%-H^~ToL=T*by zP~{M*56a%Mkm@KA1w{vh29C}SCN^&8LlropV~f1W6l6p5_<9)D$D$iL(q+QZ{V94s z!sNfx2=N~|kM7jh5t&|c00#{WI!dR{0H|G0_WS8*QwT`G?1H=1ZwY09=KQg* zPYM3>r>qn3^SbZPLL5%kiolbrhpYi)ViBMNisqGSm_`3Ey@gJr*{0y*6PMj?Nl3}F zfewNwD@8>I`i;kbH7s>aO4+<8-IsB3mWMtiE!)a-}7jcDJLQcm@ zt|s7}bwA)>U^prrcMnOBJOW6R4(J5{%T$<(mM-90dOr~;uUosj1~8RS-@kvi0;?JZ zTO8Kht{pKhIk&K~^0!J%D+r|`q6!#N{0C0RfHgPkg6t7=P0SKRO5 z+sJj;(nHKnXylt)Tdd0T;u6hGlOh$z7*_W*WhWax25#Fqh0Qd|on`ne%Yz$V%al`o zNf_uB_@C2IPm|!Pkm&)6FXBspz4{!62C4qgUtv3fr2FW=fXW451t>_M6&#G|Hh?b; zxQ$t>JYwT;y%c(sUy>X&V$XLFI|U60?UtYmcf8BC@A)8-GTEp&!0DLy^%@&GbeFrO zq2Ui%7pP$!U^Y9>g{eFO=sApxXcZprMZQL{q7Zz{aqa^8%S7_KjB1DThsN|O#9Mw z<%)0XfFA*NZ0%$Q*uak2tdM6zV*0QiVTQbheEUJg0OW&_pdivxQOIpW6!G%q%lsS? zz;Qw|y0;rX`yrBlP`cDI1EChL{95c7CI-fg4=&~7TkF?xa15Z$BJ$=E2GX-($tY#NBJJ9{8pr)s#2v{Z zXKGi)LA37K#Qf~{5ncvpf6%@l(~pzAw6K6_HIX#Q%*=cw)nVHIQyf+xLVn>}GRR|B z-j4PEHrGIuFen`naDLdrGzsBA$Jsq^Jetm|dYnI*f=`}0@S_BlU+PE{kqxSbHVF{< zi5aTjkUYOS(-s#E$c~YKh*QI^!^#t7qvoDBFoAn+VSymv(e&UfYe=JzX}&w|7~sc2Q~alY^-?~`XFq) zK(=mF9?jqO6u*lCiJS*^t?aIk-R|1sKIeM^s%>3M%W~zjXxN4USjPw+2Qq7xrUthODas_6`brtfft`XU0YEqut0rfkfec1KMkZDEZY_e} zkqv~ZL>P;IY!^zQ@dPvMYLGW!)56^KO7NCrPqapWt3!cyZLh;jSIxT*vJ2I{dsFXi zFpzaK7eQF+UZ@>^TG8DTWM}y}3^1ZzPU_~!sN7=~UKs`HE6VQ-3)WS4*=vh7VQr3sJ3zXu^`v=7TG%hrpsXQTPjM| z-J9#{>$YpdVZfof%N=c`r42G-el-mYaCK7Dg5jz3S(5sZ=}USpe6~lN@>}nG7;(&( z=rUv}pL8?6;>T}K)Xw=5mB3UnxBfudgGA7py8cqf6(lz9j69rqc zT76fwoqZ$TFrWr`)w*`QQJ2VypJ~?Xbu4;r!R|*y>xIZR%MR(pC9qYx$KI_tUhd~*lPOqkm0eXXVt6umMv9O+k?Ko zjaAWDqjFYGmS~&O=TY~g1Jk>Wxm}Mr09e3oQcqHL!!IE_lWW>QV?TW;I1#~_aTjb} zD=^D!>yzG4_td)1__VWM)Zq23wV4e!oxQeXbVh>0lS0Edj`d}qLf7#%LGi8E1(&Vh zU;yd;Az78&ZEQUBiv~~tB-W!K0@)2HmftxG85xX+kc&vw=-e5re1!VxG?+IzS9-a4 zi0G|BL%4GC-0HZ7ml+ln`^y0}#Y9t{s6Cweln>@NCF$b4y78+KFt<{12AVamtzyqL!uTXDA#q^?#%%6WY3OxYlcJ=KDEiVj@bss5$;j zlVK`Ud{jq|hWZE6hiC!*{&jVA=pK-Y>djDLfi0_TP~9WAEaUQO5vyF{4ByG~z>P-~ z!VHpcAj}{ieS?!9YPjCYe!J3*%qr=6p0V-9Z;uT*oW{eKAM(p*q&P`FGb*^;1sORA zoXA(YIR3U6`#GV_DIieMMvO)bM?F-?w?3LIVmISrfc}D5Ps$X4FPZVO*zw+bf5{~HhL-hP&VId@ z4@4#S3rOiKjv(=K0J%6FBc*msjA4wjPZoef%EJaD&))!`dtvigW>!{Sett3lFSFr- zrF8v3KR-Rl*FYPB-KV~=*%bPVKJ1SEo|?MSnW2=94~OX-07#5Bwr|TAhE=HE4$tWN z`b|<&U+{Dx!2}u|9k2FPU4Fl$&9U)}@?v8lm;#~pxdwZ;YXU4t=$j|pp|+(YcW1`D zi-3i3k519^jX0l{Lm6|d&)=`JIiYN(q!Dof&hsX>Az95TN-v-7^(WW9ou&srf7kp! zl%lcPu@LM314kxNR2+M5^#6h-7jJh0sG)Q_d=DE6Rz{$@ehv>O0Ts|u&tY`O8_#>? zohk9mK%Bt=hGt;tjsCHt0wZ(stQ6^HcX1z@e9JYOr0iwlhC%L#-dPOypB&r5G``m9 zhJBZ2Z^x)s94@WRIT91$YzKWB0UALT+p2r zXfodLXK#?-Ma+HlV%Vz# zPCM+~GmdkvINKjVlob#?8vSlg0zNjIsEh?sT*wN$hVDqR>VHdtUEasAc+orXkZGD% zxjnx@5b{;rBl_WONsgKRNvW@=XV2dmGW)od)ygX3E`1duQo6pMFtMvcCa}QNtWBy1 zBnC0qfsSrLl7npJTf+;B-`^UNFBrePNue>^8XK(Iv0J~rqb~55at~PCF6V7NyJay2 z1$;yR%+e?V{qiv^TOXK``Su%HP*DRQ@D8bEx&@4_WI;(ytC{_wz#1AE^}>ct{jnGPiLgT(4)N#^8T~yF(f;-87tUuHuriQc9tDn$j&mh{pa~g6a2MF49$-gu zeupm*p}`#Ad5cWv0HGJ+Y$kOK3T!rDD9z14e(RNaXVQ%f)e<+sxU)AbHYbLRs2kbJ z9BVf{EboNtjqv?6=J-x2AYozl;74}0Dpy*Nl++(0`eYgpMC66w<}65PTz?<>-%+$7 z7>^iO4qg~54Q={?K}YF0ZbP*^NxB%3b22{w54@?py`gt!TqwaWNe6OEV1yZiTnM2J z2*rAV$jh!Rt9pRk{4UnaV;d#7>whH_k%mJDSmz}_x`*^1kvRBq7(T=Yyxaz(S7~eZsL18xYtk<@^xx1NqibHn!}g_>OD^8j;gQud+POCD`~2MVb>(XQ!XA8Z=ds=rCD8%>g-Fpvsa0Y0aRVGyI2Wr2(;NC+JTN>ak#Eh)$?FZtK zWb}VQKS^QE$P(_GTs z)H^v@zf#W;>4!m*@`TFdpC{}7Gmtns{)7YDiUCX_44Le2z0MX^wl>^qOQj6-VWwqW z0Fhc830E;^YZHt)#5#|;Ww%rL(mbTZcQb?X-i#p^J@dV4qvC?sgz>yWn^V1T@gIbS;%BiLE!@t0wEmgzs-*7Sx^39pe-Bzl_Pn2-K5gtWbT_qCxRvd0Dj*dVdUy5MFh-`iHWZ_=zb zIH(~f*p>IcW2ftwnsPY=_oVW__ztm_qRz>czZO!mP#bNW4dYCo>k%WJ^+;46_;Q<9!n^Ull||2z6!`-OdOW|R zFOKIfl^vXRb`NKbTdpKmNP!RS9~lW-&ToR=ATu#uW^W0skM5cd=`3t5H^SAu=Y;|( zR;O*5fc%C(=43;uR@)Rkev) z3(2v73e+}k9rqfu7AV(yqiFldDQSvlZ4Rr#-wJ+ zonX(|SfPJfkYVs`(rX@?C+~`7;{qa6>E4--%x9r5t~1-Aup?kq!u}fAG$-`PW?ouY zUrm`BKR_9N zGbOXl7F#o7)G<|Nb7vmnvcNx^^7+0#gzga;PO`pH;XhWjjRMgr7iz&ja^`asbTCD5 zNn63JftMc5$J&o0ZyAEEd-<@>LEl@~Rz2%?nkZgFL@>z|w)`sali}olf|{f}WHIJ# zN98JahLs*3DyYziD?8ng_!& z8B4oKp(s>cJ5lwCifJT38Pf{*5-$1j8DLB>Fp61g>E(icQiaCG$`}|>!%Gpw@nY$( z0RkXmRI2{_d-i;WxjuyZtw3;a#%CJ=L3D_DzFlM5F=TS@qse@y`9zM;`;C!>jn3I? z{3P2A5`^aeW&h&G86#!3@WkR3d3U_qPwl?McklkVPV)^4UAP1b(WRps*I&+s$^{_| zcG(I9LvDe<5E2t(7Vrp5$xTZbR%~>%zeF;twQUkLSSL*R;<@5O-}@OKj5~*Q7HQs9 z$(WooZa9Si(T3vja_oHLT6FrVw!QDl1vN^?!{7Ar5IR|9M_z|B;eH_l3OpL(3>>%rKM;sFI>J>gioUu6*BnqwKBauUk;rdunO=**HvlH#k&^C5k6vlDW4k1 zwa0g&qpNA{PC;taiO*@y8<{GLuA`QGg`)R|9za%zeka|h=k0%b+Pu;`2NGmOH||qnar_=7zfNbbxiI+m z&8)$)#K+T>68zH-LN~Gw*tOki*;}K^u=i(3{-)%< z5NluTdh!@LyKr(h^8TJE4imjLJUHVm`FGjhI*YV*wkA~2(w21_p*-L(TBy_qlZGeJ zzY-*?tI^K>f**g)BRc3#yA#GV>7;5wjQ|Ezi1G5s#i9J}`emngYh|`I=F@!tjj*?j z%5ri=YtDJbas1Ahs;H@`NqUJC2A}ql=M?e$tJxHt(AiR4P zlLE3Ej_;Ng1GmbUKS%OMAYPXR670C-d@#W-Fg!|6Eo3N0rp?Vxj#JjZwENO)#G)vw z)0i8B^}jbY1tbbamZD*ya1=eo4kwsy&om#n8>YA@o5F^=`}c3R0UE8IdDQ1 z?~+IcoSkj8C?R)Nj&_xRR_v}^__$N5INqgb!>-@8W!oUt(IH`C`1V)4ls&n~Eu#GhwJg=?2HYwvjt39&IoOf9m>9WV zohIvTCHcZGXg(jJIyoy;^lB;hNzYgdzH>VYmj5MZbk&!Q%)@{@kWD4B&}&rS%VvR! z`q4JMMVq2Sp#j#9OcPv@X>S@w+a35ZB5Y|rD770lSueHY&%r;OF%}yyhb+4dRq`l4 z<=BfAmk}lMF~?>#-Kzv?i8r)V|Fq>9r?27&x<7MC*X|X6&hlkbFdPhd5KzT@ww~~* zao<;NER;HHdc!_qF*}a(I22RY^^Z9Y1AWCvsQ{M+>ML`a`t z=xuNr2cV`i;mqt1n1XiS*s}EXs4iE93Zc^Z*I*%@w5m%Lk+TuK`NM8-O-$ySof;wP zouA7)nfgb>E1ZY31f1dOJRxX=meRLuZw6t)pVQY|LFUO9f0D zO;x71jZ4x0c!S9gYO$iAlkAK39ILRUL3UIC8Wch$JgkNMc9`z$nvW@Jxku=&yhNC3 zb|>_@4QGB;>WINTES;wt5coF7HKII&iA+I0=y~Lfb>fzpcWj?1ic!lC9;_MbRU&+z zsS;8TWEpB_$j?n3Ov)TriTz1h!tVG~eHiWa@!mI|Th@$(py$q1Nm2%*J>EDdReK<+ zlwb6ah<(KTp6M6<^T3yzaYT<8O$}`VdEc*LM4d|D1lhYtiaVbDPXy9Clq zx4LTP+YWBCKA>AcK@bW3f#xj$KSZBBfcoV)(%UrV_10^Kia|&a*ISdq%QNpb2&$R? zb|f*#Dy)WG)2Q_hfG|DTjD&f}C1q!@t@L${L!j~{alx0CzZ8|HR%D)gD}4*n=jAqy z&~a!|x)kSM*?HSRm}wE`0AJ7+a>CjokfC$@+HX}Sz|9Y*c>;W+(ir@F7s?J%MQ)%#eANX>_Lo|@)$MfJU-uBO72ho zUf$+>Z1?rFA*fT!eRM_u$G_nNH^rW7eS!O2o4U*`R+P>DG%RRnF9);N&w@f2BN>u< z@P?`aPmi&B`OjQ4QXLJig8MNd9N31+%J}e?fr@7ZI#Hnbasa9dTpCS~aOMDgaC`em z|1o$0jX}K*3dh5GP7@Oo5IARCi3$py1kRI621MNBmi@=z_C^5J=Z>xH@==b0^h!jH ztF>1;gR%W;1fi8Iw|%E9uUjJW!UtxbxSoe78@6;V!2d92 zyEQIrr&73HWmJ|MD1Z6Tw>5HYidw=ZjMS$ybkh9y6pb+Bue)K}YbG<55(ixbau*xy zh_2dNJMXK9n>rP2=kbiPP5l{mo9>T;hstugQ(mc10dL7nu&)&Plhio&T z#Jk<*zlyAm}o8^|0FsZXTg+8<4+#nPpah6>Gf5Alkv3D+4H6R1|@=;JwfJE=_7Kj*L0gFRGP|(oU zwii5F*!cLF(3nCX!39tN*@qkgncLk2i>LceT{|2X8PO`&)RvKnx~svf?Lr0klW5Oy z*N%F$&~%tIl}^e}#b0yK3{&2zf2)xOP0dhJeIAb zV&ER}{(v&Sd#GzwBa-@Wt$b^3g||B7R#~+AAo~M5 zm@JqmlZT9qEQ2>jIX`aQ=V5BToM{+CLY5z=pS>u;IrVlL#%5~Ppby)ayn*;Sm1nd8j3u;&E z=lY!5pf}8h*D25r*Kpspe#Kx96vcB_1yu(JRc)==y=B#}m;`O<$>m&1A$@rJ)K@56 zPIXx^IuZ%f)rR^<PWH_->ciuB0 zTNST&M!3&iBsHJ&kM_n^dA_r&V1GzcOCmk@yWb4|(!p==^;CI+ji?LH4$0#aM0+Y; zAtK}Qf?thW?B2?$Gr~%ztYx9@h=V;K6q^{M>1f^JNN@CdkPRa-C5vP;t?E(K6V5J{ zof(-2=+=N!R8=JeKYv?w27FDki+>-<6yX22W1}3`SZf0@n2}v>FgSGYu%PadQ;ZET zU%z5BvL6$T{-Anx|4pKxn%X}J%GLx5$KO9cJUfjy$1A)~rG>}L?bc1NT2 z-i=qSN+UL6GcoSd2fN?>0*$#EPt6mj4Gf2}e;gY-6puWUB4&IYH65N~VtLm!Ev0cH zVsx@WN#hTK8cuF@D(v8hNGW7GqW29!|B!0X;_K;AyK{@3B4Dex=dtIGKD^W6EjjUuHMcb#E7&grV{_h5SnV^AKb(HFy_L@nlfKO*1BOc7c zL0%@P@M8%SP~k=TC}7lpY-@Ir;T|DWGvqdg(UM)9TCAI1Q-Vhzs)OiDVTZh*dLHef zZootDqN%IfW<12G6TdiYZ~p@t_>4wI`A&cM?{Sp|Tg)gVKcD>I6*JPY0}uSyi#1id#^9UnW+pEN zEqFly31rOMrR}Y)U^$Qgj}5Xy_C4hJH8o1GIS0G)e`TO&A#+zy7hO|}?Pvx*yT2p|us5hKega&nwZhxz?x zqoA=$Ob0_(OIn8M3YYbQwwI_Inh&PvQp^Nn0L~J1-Nh^W%#MBsC|-uyU^5C!jR?-_ zw)IK@$hdu|~ML zR_X`+H=i^zdc71zAAlzOi)x5sSxGPji(ibWr<j%*0S9EyJfMAiv1@(lo@U)O^Gif*L-#;h4GE7$b}f~akJeRU z#m&+@AiK*a5Tel_^n!(|!BkdYO(+RH*A3(1iTzONm)@6&~y|7O0ZfY@@&I ztULUHUtRa+BJq=<{_bN~PiO_`Q3>m_pz{3K=vBQ$pVH>Wr45CkL$HU842#-!?0ZJBBqYH% zSNet~LbN^KWiKisjnQ6UjE>h}E~d#X;daEHy3rW8bz^aP^)SBR_o#2%)cH3@=7(oQ zXp3!aH!s#q|0k?eZn=a8&gF%9ncW*k%1iRX&FYcuCnr4|wlv?Es7$=x+k=P;;62WX z<=zc{c*#Tm{nzO<(@ZXtqP735(3gVidXxhU`4(;7CAv&=H z`f4@r*obCW*2h7G_M01TvExe0zi}UXL&(OeS~gAD9uK$N$N<@QEzNcU1j*BF^w1eZ z1D~p7q_>&~WUq>kA9Eawf{MMbPj%Y+%2V`omF}eTmfN=#AW*h&;J-JFAzF*Hb9gv>y!y#r>V#Uo(J1XH14t4HnPsEa$Dm2n`TiuT-Dyz@*Yb>9^MYuKn!qD<7 z)*q{#QUT~LEE2$8Iefwg>85FB2jbVK&=m}|g6;8a#1 z>u6Fu@0n_Bc6NVLq)hLLk%^`QS>a@OD$_mN@ka-YNvVMY^wo#GV&P}PL>$h}byvPz zH=QR+s7^?4Htq1_CU=%p57pMQn#5=llS8@+A~ddJeKlgFP4S^SA2I^Kkpk>ufkOX% zyao7&7u;F_qiyS7+=9dCg*y6$BWXrWr zvG{8u{@T9W{7VekYgadJH3ka^k3rojTb8-RdB3qHQsnWvf%gsGeJKZ<)$+L#^P!t? zK(^OPVl)#CEYKdv3hYtcTmSG^Ep`(Ac_L=sXhVGn2)`gskB@I{Xu!XEm$K%|k8D8o zD0clAeAIdz_Uv_2yJA=I+<5Mh4n6dt1u4?QHX*_th>Q~vG?VSK}NHE6L z&q6f}1zAHvGI5}scbgmeH9i4Y&xp6)oVPGrYCrR9VQtO^6}@$h2ZCo)%dbFi%>C}= z#9a^^5830H&C1xhdT;9G2MJWa%UWC{yjs99B6}b%r-D^Ro`r&DM|DZcz~}nGhPTSj zu%s|V;dt7ez@;c7Z%Q+dEGDWxDU>M|o&s1kKSCpLpu|)~CY%}%>GpaJWaJZABeLoB z&rx-kDB)`dU^l*9?O7wLhj$m7$t;X)%{5v+Erq8(t9*SgPO3qlmX{V6;cDQHR4PH+ zl{Cxi^T9%UU>sjUelQuzw_Htv@ErBxd^i`eckMg`DYa4MMx3pgd7rxDljC6|WpZ_~ z1W`rp%{;s9moDjtqZr2%c!N&nR;f2s(UA-yq|1>m1U2gG*AO6nL(yj&$ZqZ z#s9csk;GW@9l$JtZ-YkzEaiqy+EwJDQ*LkR&zyqia-%9%z^;#N$8x*XGjf3o%IP?o zVeN<)uTnC-Olj6d7D97Q(1IwUUj%6={QiK~;&CV#mwpdH>HfFHc~U@eYil+s6v{4^ zXFCsnrj-}E7<5y^vyn%K9RtdGNUppLp&*U9h2-1!K0NyRPhUI@Cs^U-V(!A%s+$>b}b8lO7KhLW4)e}kq@ALxdCkGvm(DEWiK@GFnu5`17rK?oS0K4d0XAr|p* zrYPc51e&;C?vmLsH(@jmecH&Lz4?=Y0Nrms$28vPp_oLLmd|CvT5>$CpUO7)iVeS$ z;a|C7NoMwkoaYw$VWKoeC85esGe?A{r{}(NqI||YR9gHfl^Q<^qC?|KuDf&T!l3t$ zLUv(Rc5xBW{^?+AbIS3^VU5cnA9P$8Xu0qZvaXiMc5oJiTX}0t zUN^W1tW{e+;TTfF?}ct>H`G(QV6c*9zAkt^Qab*(O&@j23WPP)PyaI26LqFFM~pRW z-~6QMRJLVB*#%|nB<9eEswDe@K`Qddl3ZEb=vB8wd?E_7p96|{<*>ZO)*>ce+vzC#3^6yPpsZ&?YS|e(#nCoi+x3! zmA6uLmnAQ@($7bpgoo{=yQ#986r*9KV`io}%UHP~FfWOjKK^XXZT^-80T#a6wy z3BThciylWgRlAX^Bw{w%O*7$;;p!E?JZxf=70Nibv+sjL#La3Wu_oZ+P5LY{W%gN7 zwiI#dPjD(Do!;agSHKwo3mz$09^b~s&aHpPqu?Wc1)1sYQxH5VUR^xM3xAH4tS>FQ z{5L1KLVZ`4BV~yBtJtlMxtYR<#IJZhrpa{OZkV&n>K^{tN#NssE+)A*PY^imgrby* zo!lfU-sov2rZ{mOO?M`8!8nktbv78aK8j=}3Wzoyy*C`<-<-;KOo#r*jfXHNFh zKN^4zrQKaTa@McjOQQI=c=%tkW%Ue9sc2(4>NW+t7SD~ zRgI0C&Ny|#+iazm_6bT@H2c4g@$BMnn3C>xgUdk1AssAm3o0#WyW#eY`r}KVq z|EgE1R0ks}Fer|-&g}R(eZG}~HuRY79->3}@XG4m(ucU!m-vh6V<;~sL~b3Kb8{4_ zrUamoq#kqYplU(K>I`2DQq%{mtNBo`Z#^GtF5 zd8kA;Gf5upQ;;71CkS0ZomKx8`|2So(~6;bOFzlCuZjXC)ROgYe?~5*#}ZPli+AF& zAQT@??!Ml6XayWDjRrc}Aa1xlkW5uDP#~>1AZ0+YTl{$guG3b)n84&1N=VOMLc!T- zW4K$(5S#XWqDl9&q? z1by*ZP_QFMuwZV1=8+c8y_W!aLlQgzFe;DA^q!+yH}H0dw&OdH`62Z4S6TsJ1N28x zZWANAC!-CQH~HD`p}sINm02kCdTO~GC})0u5m|&M#P>$KI>!&PiLX9zVowbdjBw^g z>3i|4K@=%7`tGol8mbv^Qy#46Mj;e{f3|~8l}Kwc%!N?|dpL4D7ML56EJq->zkGi% z_`euAXHRr=Bm%fzoQ*L}YkLQ^98g+y4S5_(D(PqIdMtke4ZOYC!m}Ql5Oj>eOa}C_ z{Y-sEnl3Qpy*qvCz%_dyq&H9IsWbC+gIuvoTQlNP1jlE*|3(Eik=^SQVdlsum6g)B z{Qe1e`Tz!ihK0WI$OfQDov&dqVStrWRA2mHnT@sYqe>mU36e zu$_@CdV7WJ$M}p5Y+S2V<;ZQXQDI$ZAYst_xgYF;(ly+ht!1)n^>Y#8xX=iLf_fM36qqo#nJ<9tOvuLW|RO<$Fx zDFTcnZvYmc4FgXAm4!s8Kmt0y?PXiC1F#@ambZa<8i`6oQW5|f6BD;A#`uNJm{uP6 z^~Wcx&PdR+Oc=TaISv82MkJ01PI$Y2aYQjOG2cKih~Oc|gB%vL21xx2SfAve-G3Xn zpx_JWbfw%wy_tIa+l&OD#DQ`hE!{m4;3Z2pS>b{-k1rtzJ^76f?v;BEzRjVu*x!3d zcOV6uBRvl(f?I+2ukeX#RsnzJt99rU-=}}>OE=TFOLK=?ZRFJ}yfSxaNvclfe)?`L z+)v}JXj59aovPO!22#uSvHr6Ct*r8_Z?4!A6>xUc%z$D0NmC$lWpM~v( zw9rED4kn)wfs42aIR_CsCXc4s*S1!&d9Kqnc7;7CE5 zd9VUAB#`2Rb)p~eBcJ4Hu=>)@!B*n38s$JvC7i}zCN7VY@J(GxT z?;|d&R{P7djzbq6zGMaT2C6xnjRaB8IFRiBixx#@ zL&!NSAbduJu!@q33a{s1l>&~>|KdC?f(zFxRm+dC4R~ts3ZEwec@^A>>rd@?G#(#0 z|6kpg^sb2p9x*mC{0Fi>6@Qn-j)|)pm>pnNYV;??xy&Ne224MCDpgdE2vuTJ(|Gv! z71^piVhj3|Fyg{x(<=bz6O#7yb`JRC<;bab1o}Txg(H^jN%#sOL}~=l;w_Qae_rYp z!8ro!f0v$q4=g+FKt?%+E;A1E8+@6O_^2)o-eTFF`Mg;ASai8 ztR>QT*&`FQo;bm;F8J+|>&A-HlJW!VF0+GL_95NZsTyP#>kI9fOm@!Ug)TF#1$IYD zHD8O^)^Oc!=TrX8T4`w^ znUDOaVzw}qDemw4EBCH}#t>{6uwjO_dwm6~M}6iCxUmEs7LkJpE`^Sb;d>F_I=&!8GQG@nYzI^v8vM_wp@q$=lTP~8ZEOjJLd|Un4{k=2hFw0 zm(Uty-7icXyhne7+m$UaZ^S3{g^{WML}jr+{lQJ3z= zaRD&j3Q|db3EI^f=v0B(2RT(3>J>0~q!~6Cl4q|jrb7GPUK)C4o4t$zF03d%ZlYoD z^%c-LV&t5T7FGsV^3vE^ezU6?%58!pK0B|P83pXN=&na;4s--hrJp-27aM2%2z*w} z#a6*S1wAQ(&>8eA)9WAJ>^vd%Tb%J`JdIT8pRC98k*X-HREqJ*2@Wn!C2Reh%Jel{ ztoO?|Rx7+2d1IaQ%NXA(w6Sq3E(h-IGc6M4$%3hz6^&l4xi1zOnfNc%J_<=n5>r8W z)_oImfIpF*Ro53(Zrs%)$?hM?CVFI+a?rG<8zXXhHM7*a_xx_E{Kqg_EvT5e%mDrl zDb<>w812u=)I}4T&@f$9`B*R!itZ?!JwZT@j}q;JCCanWNrF`NUGz{|I(mB}yDIT* zp&>V_nA(~mnnuaXqq+^_u~qO#Dkfsa{)8NHJ3)|4)XQ-0e8Jwf)z4afeq=;Aln`!* zQHG;6-4OZ}0XNil97i6?*N zVAP}F{Kuu5zi8V__${$auK_qf+bi@JS_(JJ97>blA(PVI-82;KH@Pw%<@vGHI5pQs zyG~lV;t#T}XAfXKpsv-DivJ`rnOX#z=8Et1 zR=8I!GkMtNHb%-UI~vH}fm=kX6-VCFQD;CUPGCvEz!#_Wg8s$g#RQZjpg6h)D8hdx z9H*c0yoQtoIA};&BCtbh+4SC^U>kaP8-$pgk4Lto8yi}r8a?;#1a^Z#c60R+pccN0 z5q6}6YIZyxCW$?~?v(zf2&pgr7*!D4VP}+4*v@|0nIJ=g?$Sr^As+;wJqzs8m5 zVor8(vKHINdSke<;fQUSgqhgkPpUw5f_?nfNO=kTh!oD0Qp7ZD|qW6suO@bn!7_BhC;E9Uk5 zf=;*pu5C`l9*S<=H`Vqa74LvPE|EA33kzKLIXY#z5@#?789A>9tQKw8(!@J(A75J# z0AMm_o{HUlndp2N9%1OPVxbjF;cBBN`IH=M*nr`@)W+V|uk1cv4e$%1$Seko^nYsS z4HeAeonRK8zbTr^z~S*ivp7NtmbH~YR?Qnc1d9z=A9ehM!QEf6@P8~7RB#ltz0I(0 zA?o7CEi#~0R*vfY(e52D%~^jnD8QxecdcO-ABcblwX_+u}1%|V)MV4P?|zCtw0Hw#soZ~{?e z5-EwTN6ao5M~Ocv^key3_hx#82T8QX<6b_2_0!6fhF&msmbHlMxtk5 z(}mru^L|A&TQ{Lo>aVgj*JXT_$(u7fP%D3LZP;E$bBmB*nTR$u|f!R6Lp{jFF zhshAu3;O33s8S$Ld7Hv*nkT>oLuMi1RSG5fK$wa?`g)Ia=h7kWbJueu4#cxwsE)#o zVhaznMNP^r_Ai(|*)@skd;=g%<9<4davkhlgF80wCZ)`yOUxC`4>YE|_W zKamwWbIB}Mu$DWO=Sb^988qVoJZlp4bdbXmqFAbjUIE5LD|%Ltj>}&>;9V4eiU((U z{%i#5A_jL|jBR%Ac<6Z7Tl; zM+CpR?fS%4_m=t2Kw?jQg8A%pvbg2~Dao5Fz9$69Fb^2Uw4B3`V;FXY%nn{oYQV&4 z%4)ax3ZSeY#gk3X22Ux+Ppvibx zhUu{VAic!t{V7^<;|r#h{n6;b^voMaOVKSwS}5dr48y|^n@LxBU8u&A4kY=$HoCIi zsb9#;-DwYR*#0@~{wYB`2HU-p8vB}r8s*c1^718I=DR_-ngw~Jc|$=9BNiKo0!b}w zciU@*eo-G$jJdv#P^}!b`{90~r8}AqIojWy-Q`Gf+!oxSqg_epeeJ!+ve#YXU>s!< zJCRVTd>xmnZS-gH^=s5V37N6I&LsTZ7FTgKPHRozbq)xr-F?F-88BQVVQajd`_iMF zlS29}hpnk-CHvdNx#8PJgF_TQwGvcLgG3JSWDrK~SbbN8W|Os4-b({~FL-|RKON;H zaNyUoINGtwof`S8|E=NV6A6rDcwYQ&zG7O&;0daauF+i-c9C#BadCBN_PDq6>qfsF z1|I*CTbLd5U3L*JqjG&eDe=Vkrtbt^+gRaJSYM!lX&L$-7&D6GQl6jmdS`-vp|X;P z`nP$W-Z#lollEYiHx^!)v3{9>OGkfkOnQ!$D}}T!Wh0HA3RIT_Yy~T;>S+*&zxa%W zwi8Or@%{F;@>56r!%H$3FYKk0Oui`c`E4G0uQ@bZx0s{U549pem*)&a@%(5n$Vg>o zzed~FErewT>5q<{RPX0^>PlNEGrI>J#Z+jk=M5g6&C`ZP<6_%bP5m5kt|> zA{#t@XQwj3?!Aub%iCo31i1lpftg@n+r6|kNH@3<`dV;e_i$?J@#b$Q-sTV)zorF~ zr<#Kc^V{O8`B`u94H|d%tu8r(e|rq*SJ;S#9W=1S#Ux&K+dgscml#M5yAW$?{Z65* z-?v|pX{g`Zr(0iIobtBBz@u@jS5(&AOx(oOzjq;gVc)-9O_;BFTE$*H*Lf(v+u+yU z*Ci|V(J=k~_;BZ=9c5U3TC?oJ!s5$jCTpqb8I4Vp=P>QE%!dBGS(`lbdd%O33oDUV zI@$u2bu>FO19;v?nF<2mkG^~}Z?t#prS=KVLVHNmEa-e7!gv_BZM~A6qWJ`U>O+N; z$`N&CD)l%*E>loOx)&;0q#i*ygCZ~hh)em&**lzG)j_L}E*NXyt%eaE)v zL~gI->BzT@D~ZhbbuRP9nTViH;ny41^ck2Rl5cGT=VDfSRV=!l7->c6J0NKaoL zIlFHY(3Abrz0n0eSgpv059`6sG;gmRU2}Z9{Fort*ZWg#L%}H8HB}9bHi+24>)vR_ z!zXkICJ$Y17-mTFY-r=n%MMzvQEPb%A&jd1xeM$TOr9+^ahbHHOpNv-_b{0}qu-nQ z$9{ZiQ~@|g@gLq0oD&>4XY!dQDKyto(^h!mN*Ar1yUwVe&iUWU%abT*v+N{3PxL?y z-E142zePf~*1EQg+P$*T8y}8QjUGX4D1SO%yKt|VOw*(c9qDxku7zK2E;9_Sqwti{ ze?Z9k-puoGF#*Vgw-=N0Br?De81`y8`*2kE#m*#rlA#|wi`tSk&9eIqjwtguR%d@c z8Qm3Jw5=PYCblJC-r~aEjC-Y3>vH9VW8fZo+SQB@aM_TV%`2K6ta6NeZlL9#%4D3_ zZTzOk0iGID&V5dreV2n*PRC|#GNJU-?9^qsu{%nvRy`KjvoNNw`DOUSHh{i_YH6!m}?oLpiO|UgOM05J+4qq9^GvBcl{W&@}grn*`=<#$Sk- z7{acN4!u8i-=A}8!|mw!eeXk$vwg+1SJNwu+nK4|SeIC2jLS9061}o1BWbhKPLX>g z(kVu}MnN-YEFVftQT{wuZo8^7?LRH8BS~WI&*@Z8U&{Ox7ui|7-`=$qMlSo|JwEMy zu0RP|U%S51S$FMH91R5ojJIosLTO-7(Dz|hKSGU6CLCOsghiMIGRSkw&T(*WxrOZ> zx>{6if>&NWdd(sS?51e9% z%}AA+Qn62{mRcJxV{vf_bw`EStS>rL-xHF$l;ff~~(C06H`OeCg?d9UcIPaN7ki!09r^vdv) z^PMygulV&o^*`G-xI;JF^c zZIpA995UKR8&4))%oZ&9pX|7(bIu*26nfYhpeFdTj_9iC>Gv+vghiIMm!ke|9eCS# zRL02FIPzTYPIZmgr(Lg+N$hmFHHtgjtxwyQ1`8FoXzLVpOP%Ac>HIqQv4(@gboxv8 zX@NGH51&B5k7)fS>y~T5lPZ(dm{Mr*Dz*%87~^H}sk+;1k|oz^y2D}oIOK79g03(e z#^X1Gdr>F>a*JfY10VHEeH39+5dK%@l^hIrK{X3zp|jN=*IqHU&6%Z8@xo;%MWIsS zsm6!rj0q<*pU7v?1-fF_+CAA=or1gRzR}V~D9PV%9iC6{bm>0xn9FqOTiV8Lu6@Yr zVkw=qFgaqNe%c|XW??Cj*SgX_GcWASY@uy7VznCT=0du44_3z~WM&)WMaNY1AyUtdkX#;mBA zw0mamMZeMTP*N^&4P%{VyRGlO{juLzvd?bLRHihQKj}L)QwvAD_tOpBBsMN(vxMBL zC6;7bW0Z-bNhO#Z0&yv0FdxnS}6l;A$6x2g9{x|?!A zim4?kruX|ESjh)rV$<4;b~y5Oozwd2Rb}d7-4w>!JDk$nbtvv4G?hw7ObB)yZP>mS zgnAf8eyS9bxj#$zi+!>3YdtkS>pQCb*$fTokQ}82gx(G>-aclluIL9`V(Anv*3kKi83_X; zX2*pupCA9&?|n%|_@Ybz{VC7WZ+zaY@Lo5WCf`bMhQeFx=T2sn`y>~FL*o~nhM^gU zYbddomrj`aiB6iND<-M&)7^of39PdZyL**k0u%ff;lb4wR1>5okyRZ}e~k9GhM`2J z25!(eojKx$V*Z@4VD6tMN6ev#d!{g|son9e$#F4QHmdyN(EhNcG_IoEwz0L^eVK1@ zN$sF5DQEgS2BPL02dcpvj1dX60&-bhjl3e0<{Z5A#K$HBYf7tZg?pGnXHE?Fe?LtY z>}%!1sPp^A5X|T!_2GJ>emU*U)G=;ylFK^YXxD+TzGVLS1*6`Y$ZF2->H|k2u)~TI zTCGlI>@8T7*vBeSB(7HqlQWJm9KZQgb?f)5c()@q2Z9f3w(shPpSESJ{?^ZXPr!q> z-!_F!zHzB*t-}d<6@BhSOq_FGez}j+Ao8 zzx$=~Pm4!K6ZrfmUcVOKGN5rQ1k;6V2YEvbo#fpwV&|v9wa4S!>Fl4*tNT1^p0Rkr zD)mFM^v$nBvc35xw9g~!cBSh3dzH$HYZSwa?4or&;-^6#hv`d3gDhs7g9qdk5=Ux= zW~EA{*)3)qf6w*UPt?s{9RGsAs%KisaV9>Ug)QYcp(0072~B)sMfOK9`|Ot(CTxE{ z@NU25#(E4JKzbWT-cq5v-6O%{HY5${KkrqaxI{Lf^)oz1WqL&MOgn2~sMY^i@PY@S zCna)xc7grqjQ2vmO% z)xTe_wuR-z&ab_6=FJ{@eenv%OGPxW?U}|$ohFFkUy3KAibkl@}@tHxSDEB&4QRCLt^BPbjBC!#yELsWWRW&o-!GHmh3RN z3WA#xA;O^Cay6!H=9#Z+lv&1{$HGl!HmzQZwbV3IrIV3q`@#VENNu#E4qJ!NMduLt0>o3EZ^~hVHI4=0)r^*4T8-0X>O1!03eT^y7-?kRoYMfibK3opmakxgQ z?_fysEvQXy){l0j#g%Oct@Pt*{sZJ)6*pCb@+~}&VRcB;etT*n^~f6c|C;sNu=Q1Yl9amLuR6kl=728=`wA7npfws2U(12P;9hF8`KX6HqBugzp|5w#kJE1;I zDT^PO4z;OMBr?r)mpJ5%HTmIBh3 z)4wy@lKf9O;L{@rhV7^Af7{<_vHlL9D~v7q^!f9nSFae{>JpKYzySvWB;-!0Gv`B% zxnR6L(xgl4qeRbbb=wXaOxfM8)k6WZ_`;Pufr@u{GgKKoGo_k#FFp%PQHL~FOj!>X zbFa?!HR0}`9qL&$cz79byREU{;Q`Fpvgdq1iOJbHd1!urHSJjWg2=*D+uSV;tQGrK znN?N_a!P?GF%@b2P>hNsj(zmb8`UBzKaMmtM~fAOlE;kj_U+rX`#r))f;gZue#4lh zasZ@_6qb*DkP8@=wB^|A{X6DSxh#O0CYfAA=g8R3eJ68`p=o4fee9FNJ2VRAx2X6% zI;S%vN?C6kvu#YZPF^h^@e&OQyFNK+qu?=)U1k?PT9=-+ESe`Idb#jZCv5Y(71Val zJ3Sn0;u9#K#`lr<)1#zhQL~{ZY>L+uKGylz`fk!Q#rf8j42jg;@jW=_LbN&K&KCsg z?b_MI?Xdxzsx0e$w-|L+W-W@(p(olcf%~MUM-y73c6=1&-Z-~s_!4JER_s^bB%FBi zrHi?Li8#8HeUo^fQQHzq5ryL(y|8gZfc7lpG>^e4;B)msoNzA!q z^V5gfewAPUsXS%(n=w6jP-{f{6UN)^9v!v6;Lu_rrquyg;IV!0eSHIufOSkX8+@JV zn9pznJ|Edi%b;2gq&r2(=ucvmuAAYArNzh>G2d)^+oh{WH2+zB< zCliaWCF|ms=`H#w#%h?8rhYkcu7+fnF}sCfXUQ4~=+x-OsJ!kz@qEeFwL>wPzkH61sLq^Sa#V1=n@f$Df zFJyNcC_!(w@`Lje&0v&t)jpoQi&4L8)gtFIH_F=O z6&J-Y2-E`*@(aH7J8jds0`(W?Wntm%Za*zadF4x&!rrF!@u|5+`OZDhzT9M9x~6nZ zK}y7hd;ZOV%hihUvu4Zgn8m}5v()C!t;#l4E!mYodb^7;n|gDZjalC*4V9VwY4r4G z)E%tt3RoTt9QAY;*Cb#)ctJ4Q=*i|_C6Bg7i-i`}FvR}4qd z`6MKgKp3>})r1Iu2wdks$s=w@&%>VfvHe-Wt@`ZdmshfgM!2!m5P0bPfmN%n)~D4^DXE3OG`*70x4uZHvAfKE{#s4VWeiE-8(-9+rbKxdPjT!XV2z!_Ztz&ip9$DQqM6HwV1_dJ|H zTynYg#Y4fs43K*ez7(H4VSr%91)|f4&}u1d8llpP>V09}MKjx~uQ)!?9p05vmych! zsjUi1Wu_U?ZW>G~OwSrb?X^(lI^X)x(zkGjESwo<-nyKtmNW7TPwkdHUU@mm`LR+a z=UIIIH3CN4Zq_nJ?1ymas2O(jJ=CF-HB;Soa+=ONbTrj9?BA0~V9kEf=q!I% z$Oyy7IscmUk0v4#PvYtiCm3yp12>lrA3sB`6-&mp2SJ>w$hBe&#shGzINDMZ<=guD zhbs8PC(m!Ex?WF(S$DPVb2sCUqic&K-V?n??%pFmin0U9*nJ_^lwn<&2AxzkFg!Xt z^PqxXv~i-&o9xD=@Xb(cZ2H@q0AZNizXAm9ugCFaWkL-N4V~E2%_nc82(ZIoW5Qmv z2cNVG=Pj>}9jrbQoOa8YV`+>rm1v`qG?A;@-O%>1t3LOWnKChPw&Z|$pD7=+`NqIr z4R83-rxbHOLEV>!nY*X)H)c?mMT_-%wki$(EV9EJ!gF-}uJ6c!j(Pp4oo2nU(V8a4 z8g65KyYfm`V;z1oXLIn;?>cTj(>!8^ceG5|tAWFJtq%Jj7WA;6IG=p@{yK zy@P`qi9g_=#{W8n$T5P@9|9H!FYl{9si`-?gm>pm)-?c71|L3r=v*>PJbQf4YiIb+ zz|fF}v9|^BU-)B#P=j}CdR2KoQDk(Bi zq?3OFp=H-PF|^Xc(d!FuOIgS8(T?aApyV-p27AJM$c^jRxeJsIpDuh^inZKd_#Up2 zX!^=|P=xp9%PSD*_O5LPCRA9gKEh#m*$DmgYn)Ye*?L7T=leq-3N!~s*$q=uNq`;x z7#;1>F_a)+O$9Cx&j7QgM6vM)5i&mrz`eTRk`poMkA^z z%||;V&+J$m`8#J9x+P#;yfC{`M|;?OJ#zYR4ZDYb>CmZ~t9EPigq|ziOa}&v5+|4~tCp%+J6_9zN8&qIsD{SIyL0_nGC6q2&BZ-7^UEiU ztb0E8GTf58#Zxw6LQ@QL&j`Wa)>2c+KAre|K$AMFw2dpF>mdsmrph;l|htbv79* zn11$xQP;gKPH<)he-kLraq9q^|7ukqZWM4WLe!d(3&*o(&(h}L+%#fvz5=)EH|1HO zJI5`=%s76hLcGrQXkv{v-pw8>zZ3_(-6O&c`yH=d+6o3Jzi;I1+&H1<>)ASmV!!j; zl419hMLTzXPlNYS^_QqVa#A&sApLtW{Zi@ z{1g2%4yhQL8<&7RTUvws%!145l)Kcs#V$*16Oc&*6$4&GCk){R>~``Z&XqtMjRCd& ziGgB}v2Ow4C3!&lfzj-uusgsk0qYEE4E0-HhoUdKTR2Qm7)~o zp6=VifP(|I7*Tn=^YFA4o?BP;_i^Nsx&k&X{eri=xNElvde^vZYns`ovt_s`n7!6E zSi@^OGUDUk=@(LnUY4l!=V=>v1ZYq{5c|~ry+5v((rM8&!BgZji3Lk9&9eReCj#j2 zKZ8LXc-3fX#aKpH!+F%y(gF$-8mPV6xLw_bLyS|`n<7nJe0e%`xma3z&ThgYnWLQ* z!)KwMCe=%B!AJFl<8eDgXGHT3WM1z5ZDXw3X`wE?_$W1g14=>wZ{S=}C4swcDv`;S^xE^_nWy+?t#)A3n5<7}ck; zw>NM?OaDJKpdd+nqKJ7==00Y$!X{(zB2vT$4s8%o{&KY%t3!m*u<*OY_7FUV~P+?9#du6=keF(d8Xdo z2`Wp|>)xu2FQ|weeH)k#9T4cvrcag=e=Gc}BSC(4aq2QI+>Jw|A69Q};=J=p-9cS1 zp*HsKiT!Q?;SixiPR)VQ#@HT<8H42Hl*uXU#is`v-h0dxA%dRlImTJ$P@h5vL{;CwQhaf3z9ae_ zHyVkA zeMlu<$i6&aHdBfFu5+c?g=@YZ|LBVG^M%D2w&5L%pqMGM^z5&%r!KDjniRJ@+-TJ` z@@jS+O}-I7J7@PHthK;E2j_+O+nTTQtGg&#EI(0Eg$`FO{F(COky}PbNqnw|^(*I6 zXER8X*BOadXnUzcP7X?ONThx$!ypob@0Ea{*qn5EW(M||7L!GXsU@RVS@mnhT$hk3 z>G6`KYR0sRbgHY{Gxzo4(wdW>x%#aK5yUj3j#MPJwtYO)n-n4k1X51NHwk3ljCXza zr;X~_y0(sU9@}#!-fWUA&U#w-%!VFsgemvItR_Cb(Cd|_%180nXA4fB-#?5p;>$tP z&Y<$XkRs59{$KL%pH)rE&zcp~9bl@ft@ovTLSd~bzxN3&FE^LVjn`B#WAQt3(jL2} z>#3LF-w}9|ME3qS^(R~u*+g=m!-cQRx;bJnUh2j*w_1gnYTjt-e_#H2ZiU)uP4qc* z6?Yytzr5HHDvtdAC*JNHos#P=2GdZ-L0*ket<6}g>d#Accixz(9}?b)qrne<8BJWE ze^2eK1pvwGyCXqit=zfjES1J8wz*oET+_|uw)A2Z3Q5i3AF_hdB(gfOi_5?}mAyfwu)LiVtKE_Y>6xm!&rV#Rd`YjpKr<7pGp!>D^aA?+eo&bt{} zIw`85ZP*C4g#BupD@qkw0_9&5OsP?0?V_ATn)TyD|8rN)q^ZLjdvTVZk3Byq%TE-T zhFq@+I^8r<-dGPdEqxVr+hd~lWL&>+UEGcD6{DP?6Vu1V-0a-o|ClJK?9fGAt1hvO zjIDX&iKHl*Bdqa_vBifEvHJKRH{(nET(%d^`|{)xoi zsW0_VdfWZrr2XE(*TpBj)P3fEpwFv(s5WE1&aszm?Br#fY~gLh+OyRk84-QG`Wm+J zM7=aL40Phn=t=!Gqv@-W?kx?HtDg7YxKfnfVSEeV4Q7$YbHslv zX`Y&L?ycXRjvx&4l#z)WwcpTpl%o~JaYj0~ki0e5IJ(?}ZNs?8%|rjbD@1N{C3Tru zK1*G3t)K&pm=bnvAgnn6ZT*D47hlFG2D%NqM5lM@5^Aq_U1aUlP=a!Oser^oZWfwJ z{kFlRqV&AJw>TBJ_$kKrB>0*Qi{<%q4(S#7W%a(hr@{RE1GFLo%rm?ZM&&l8En}yJ zTIJXf5PVzyEU@UjjDvg2-mD`M?xC34-CyczMSF~<0M{>=X9{fzW&C0C%csh^?E@|s z&%sAP=cl`i3&3<<kdW^If^TGQhGzNh+nA^yhjV zTwsBO@$TI_^MM*r_D)TGRwn(E1|zMZ3U95&9^oVeQS!kXZoPDq|z_9zHalm z@Uv`@_kHQfPQAU(9=9fLI;h#4GSWOu%UC+3%*;$5tDTBp|Fbz3IKxu=&f@%>nti}_ zeVnp*Zo^t{C^msvv>qT~P>_WiyIOoJ%1U5mY>-3BF;Cx(3o zyF|CuyBGU;-YM!C%*@Ot;H`~lQ-CRhE-Nm>tB(IQy?#>lmE%p7la5Z6;VX8^4l&z+ zklipf*TJ8P0)=sc&Q0`<8+9G|$A6DK6?!i(Us-r1J>~oTF<~OzIZvE*t^H=l!GX~0 zO^*y`PI0=*&|6KNh>WG_BqAyrrU+G?dpx{E*RsKW@W)(^KCuDJ;8Bu!6wC zrc3K2Wo{{kc-Zr8QY&FQ1=rW6K<4mUb~ca>emc81K={jy;zh@jV?|1ti1 z+w9M=o4}=Q@Ot(4YJFwYR5c zW_|$L@64)Oq}W3Ptc+We0ug`swtuI-WgD|$vHAU<0Io@<8E}Qq`;>b>;tNNOBIdmv=;(q|UCBtgyVYNFrZ|zqWGpv@B`E%<95g!!r1_hJDU!$d@ zvtM5R5^3a>SWin&#m~`i4*pbdX7PXzOx{$+Ie%rDAVZd}yel{Aa-vfXwq^|yOhsle z77te*KFpY?y;N^--oBw|y+OJ(A} ziNfJ?-rbNxkQz5zrkB`${Ul6%UNB0XQc)4>Jn0$;w;5G+b>BYomyT!Ppgf2p`_5P^ zPjLIR?xS72)_V9hXrjo)uot*J4sYA!{wZ6aNH^b;D zLA;w7{IOF%`1n+v=ilA!t5kia=X|n9I;2xsVZ>4xPZ$Qm59ZZXxk4)A(f>Q}C95{V zq}8pN2Mz`_M1DG0nZYE2aP|P&(`V;f-aXJVcUeve1nZsfLZXI6Tsi*);U(sb?8d3C zPvV~@%?DUQNwm03y)fiIJQ8!#I0@MKjSRJ?#wExBIibu6)i>jSX0%)AQ(8f;wv8zv zQ9jATsryqI)6}Lcg@&4r7L!JD7)~)U%y@VqoSEt~(4^iRaZr3vC(feyfz8;z-uy=O zt%TpP>Syui%QI8xAt$T1-S*_fM1x+(;_xlyq^Rpyh?D&bh*2O?XmV-RW%Xe2%V&nb z2(}Y!1|`8@Zl%BWMaocPv|h@{I9%b$@i#F`^a)x@lDWoNPk%m&;G?qP-OaDBg4aoG z`-k#`G&%w<*Kc1&ubj}uq6L(BE2pb)?|I^wE@5VL+)WmE&RBeUao<`F<0L$sH8*W% z@8wUHp#lRzK6aPM>|*HGc8Q8krIdXi9w(cosoV8TG<$aRpiVQ{)p$i5iRMrnIXEqK zE}e)T^WL$?g~I4JbfL}~Sp3YLvP-aT@$iHr)`bP8@>b*BEYGf6bVR()|CllL)uGzv zY{u@QN;HGo>y@BRMPsty$DO%5uWc+PZ3a^1@)3>=9P2yu9HgAL?T? zb<{|O7fLH)xHd*5v#`J`w7-Sr7Y}v5lqh4;u8M$_4Am@3&$q&Yx0bKgk4b( z>~)Mi|3J|>8+*ptxk9aK>mvbHsm71Xk99NBFn24Io>|%I0H{k17h|=kFnuEK$)sM| zj8u85s)~d1zp_i|EOeO14 zjV6!xNn1Tv+g)9khSsrzn3hB6h25r66qS_H9B?f~m0N;uF)o z-?)^Wx!szgrJ}Ce)4M@MWS<`=TEEiC8d!WR;-%pK0c8or?dULHJ6Z~>j}2OudKHYqJ zGRW#pUwlkHO0!|R_p0t!(TCj{+TomYTGAFtl!nS9uA_^CvQrVg1_;hS=yc2^9>us z?%vkHfwvS?;lF?bbFcoBcLG3BxhP@}t7Q{?^tQfjyP_Y#88=(mUP;_MwYAOtF1PdS z;RZ2H-YTA)?M^-*AP!^>w?E6$XaoKXvzz9|ZC3w*aZMI#j-2$(&8YT*KVSJ{j7(aON#OZ;@bh;>w2N{X3*7v5g7p6AxLJn^^;4svhlpe#_*GfI6hz zCa{UyPtE?OY-~n1JE2|Hb+2hH-R>EDOt@+I3i0q#zz0670V+4lgA&I!Sp1u?FXF)m z-ll2?L*i^39R++?p-cawC$a0R!ex8`GYaCX4ZNAaK(m-#5J1vip{<`4?*pe{#nX_n*VSj~di~DAIAh-`|=;_3)bxZ?opy4LD|HMv303ma zXoQ5L=eQ5mx0at@R6Uj^#lbnb=I|@k{fKvY5-kC)qv9~=v%7j56kztksR4T(#6btBU@BT#q+)J+Ci4$sKC-`>O03$w zcYJQ79QEgEp4#r;1lpKq&H)XlTe8k4eCe7;XBUS@xA|l{>zHtk1nOWhuywBTj$qa#B4r4U@k^0>i>Bj8 zjBzaDm8$5AmeLhCRK-%my>GO=!UrF!k?<(;KKP}`02d9*$Hm9Gc|JzBT5s{Pc#v?s zX15`c#A48A0SsGlaWRYr0$|QW>_cwk^5*2kZPyOkVMs%VhW5FHvYkV>mLM14#RO+eSJcA<&px(=|2bgOB_(Z{j#T&JZ{;TFPX}+&3b5p>zFr{hG+q zdt(h zj3v<>Xr^Fx?|QQGgPrz+xhy2AxnEm{>bgdO0Po0hetqk%)wSCMRblEnfh~=1>!K1H zC-QOB>9{@HO2s18wi;$n){JS=bC32Nnj$S7`4PAVY*Eo`&H_}kw6x-}^U=@TF!mxC zz~U^Q8x{oSs_dX9XP8!k2&{m10!q#CBDMJOJh1{wGafe;poBXYZCJyeaIwfhwYd|` zWhmhmG^-ScVoS;8liW&NFm`fNlMIc8V*85O)1;?EwV@Neo`&PkXIev`Ew~r%s9vb9 z3k40;`L~T-1c(j+IxIkkuaG32};bLVC0)g(>+SiVbGC4JSW8O*Pp3%^p z{?@JvT>wE8sZYleio1i~Q%FNj5j%5lBXfl~)~R<7_2p15EzGFC!gyOptOnFpK^4`!2gBgms1(LM@h;E9Iubzkqzxt zkubqVRZU4%QKb)0`%FnBAQj*Oi*&WX1x@Jnx-R(Hl7b_kbF<>!dluPSj({*$qKT2KH@Qwz#TY?t%mr4ogFu{xQ55Zcs)-XM@wx8>R|A3d^8u%eo~gu>r3*=~7xg(cJA1Lw?9i3fm7q!s(I;x&!s7_0_1) zpBrH)p;v54N^bK*J%fVZlH~GiNurC@*MRC6!_p0U9D!D$yJ5wxY9!`cTMZKND#@Lv zn2L{SEeQyGFSd-vdpxf$kXv@_A2a%^@#>v=azI0lG=1arif!xsfl$Th$mSD$thA@k zocms>7i#E6mS*-5+SDd{A@T3tOCe=_CTe*WKSCvh;ixZG9YY@2?hh0bw6C2pysyliHpZlE2|K1p}ky4mpX zm_BokNr`^>++A7zUgPNHBFu~}gVyNV0G46r_{pf&D#J4U>GMfNh%yg26$DEwN6NY3 z%Ji%6SJP>82Vowe)K7cuscX9LI#%%HE^1DMlCpcG)Na(bCOTi!c@kfF1$~-uDDflq zeW7`8lM^3#3Ot2Nlher?_-Bop?ahBW-lS;EVr|WuR$^MTK)a3BUsN}~IvUyv8N36i z$=Vq!^aDu{Q=#%{RAU|E1|2~I2JGbgn1;Xli6`Q36Ux6cOi`lp2OFF?@BQG6a4vVo z^-8o-UC0ykOdc|g$Bk>QfG-98A(rC)WD|@>AL{EBY9hVw=G_d7$R_y`uPdeI^r>9z zH)Hs`c|W0z&%+J_kK2B5|Ndp5G2B#X6~9m_#1V zB;95gS4n;3nej};pLR@|@Lal*pa|N?2Z)i#o40-u3;UU&C7@Y&)A=g0i%Xy7l}~g{ zhJ1Z@3#qSG$F%-A?IEA9sOm-nAMHF+{`h4hHcEhq)~?5iM?_j6>NW)kPi>2tcdv7M zCbu%q2a0#Vm(c(kglbC1M?!#{g;b|`oT4E(X_5`3c;!E~N*9d&V34I{iE)L1i(p5@ z{F%2Y$Wh<$=C_R6wKVn=7p@ingis;lGD>GbDB zasARlg=a!2n>C@h-sy}Cv{f(9Zgoj3yM|hDPOdjc3`9harHQ2~getr+91uE7_=b@R zZfvX>*0gmw2mr5mQi|HSIu!% zHTl%0jW)df@X~3%ZBR<}e(B5xv#^``mm7jT|0rIF zh-?4iDkdh(v3rn?BuaZn2R0TKkR@wo!PIgIOmU#|_8vrT`?7R6Q4oIv(bsGsmwi)6 z=qO;5-TzkJ)_gft;*l38kPvNu|Jf-GFP7QVzvd(DNWoD3(5^0G`4g8v@%^5Hiys;T z+I(P|bJoUn5*j9Bl~*vMG2H#WkQfO~eC~H|7e@uSnBFsurr`Z#r#83k^tbqlA0%C# zU#FG+2Pj715rq#0I_SIo9$y4^12CLK;7Ny5-V)IBZS3#wk3U0&|Mz;aeitZem96Dg zz9e$XDx6g%>bWQj`ye7#ovHkH^$mR=898j-VDqrNLyQ56dj5~2pXdwhoU!yQzWXyE ztOM__jBh^C33%bGvF33y0Us%19hsDxygIbqHhDR(iyXrF2?gyP90tEK_8bmWHWr>g zqSvs@4X4;{+^F-&PEW`3zHHe|`>D?kxA8@X-0w9tiAX&=L`q7kMEW{Q4LXGfe;GZB z`N>22>Wcj1C~C}vDMHN8J+bI?-v%{`phV;zekSFMQ<_TrUQ&1^QeXD4@5vq=`J2Jq zjlGkV5VsPtm(T4Eo(?YN$(jwIerK2zmD;=k^;`W0#y_F;-JSl`>yi)M08-})Xd7!< zxuQt#k^qaTZw}Lr!VHnO&-RB^ToU;3cHH)`nQk~TQHi3LVW{$+Neir7w!YUYM>+B< z<2suOJqj#M$UVy5)q|?JGo1LLkcNB9INcg)N7M5A;1DWS=_=S(Y^i^4`@DJcy=yHf zCYBq=Q|Jr5bePMtc2T$SoSq*w z((YVyDq3LFjh{?6>sVn&w)w@p+b&_A_P2%5*W>{%(io;c3W}_gVF5>j$zo8nwY0!+ z?)ilX6H_3l7ee0z``R_rmSD=N5Zu1`%(GDve}u`4c6`G+Zj zJM_Lh7py|lmF$kO{O%pqo#KmgLQeDSASKBzJs08Ldwa7R-FH{ZkX*UNN4C7aphV%?&l^UKeG8r?jmR#Hd|+S;D% zJgDQYYm@4W#5qmv&66;ZqaHA zfPm)@q9YE$uWhyGw>tZzPF;`$eVwXSvxEE>9yWJT6#R`UtW7r>`7jLIgKze2nAua5 z$``SpQH}yDxsmA&LnT18=Tcif(Ih;=+`<& z!~0Slasf%nr^ouuKW+MjjZ|Bt<#l%I3X0Q781-$)eJhh1n)ArwJ=4ofIo4kLIaqF3 zprGBHpxk*NUxoJg<=GXbwdL|OWQVLb@pS)5;4O9D+k60559lw?E)18H=EB1F-BUng ze%01c{lmp#=6+Q-+21EE9;rRCWcFpPBgDgZuZ2|mUKf1x)`+vf$&35sWEUYWJq%49 z+g2S|9W2P)2bSlTBwc0rLh{|T+@+^NC$?q-36I{zNV?}8{JU;PPQVM{eXI7wtXo)- zWo_Hp;j7A5K|D9cJPz?Tf=Xo`4rglbdp}7;J<%=A+CZXG!7fJ_TA7$^%i|wzY>mciHlIOOGo?O@F9q=omJvxqBz8 z6lAn^2oHo#ZypZ|Q>%%L|MKdI8orRmJ_+Cf-Zp}&A7a>0<)1UEg__XSY)O=4h$L*$?<6TU&?|jIST$ooF@hQ)q zAiDqLwJ-_;?LEqaxKNiP;gO^Fxi(IdmM?OX-b>Bdc!Gcw30&WVJvOm;c6VRdq zTml2}%%t|NK)olK^~IY5zOX)~Yg)0Ls6#iAs|z{j))^Hkj_uty4t&e1C{ZAKh8Zut zFbe%*(dNkTY5%%8q`3>n6QKq+l2ayo@)K%pAJgw zh@%#Vdy7V^oUDfHs?Ky_e_tw|Usq-MBP3)+$tU~;-xJ+HCeQe_o0Ns^CgE#NSG55=?xX01de+H%t{p zG&fYjRgIQ+OD!|AGbS6hHj1*+R{?#`)~??KG{Me|$#Sltd(a{q6#hN>DQag-}~~nKuXo|buJ0(1Xh7j zy5Ky(oXDPYK81?pLuOFt26rO&qu`jfvUIy6kz=F%BF08}9yZ5eyp1a$dM@=-09FYV zRw>Eawe!@DT4nR(iy!X@N?G8a)YVMM{XFBe3U#v?W}8Palm)DhUPSRbv1a$qjaLV+ zf}j7 ztaUHCur~eQ)t{%g^Nd@YCtf$!0uRP0yc?z|*`1FSLTWI$x<=OM$CKW36I0R&&|oU< z(xj5X&W)v3KrtxrWa@}Mp4zx&ZCF<}zJVg-wuZgwOqOQMoRu)SG(2NW|JU)~PF6#o zQAT>jyXKJ~1!#EI@=T4>Et1JW9EY4ixP0AwzYM$D%hyrKAZ)bZ}n_Ijud zRvsU_<929PL*B!yt)AZtjt3}KGx5?j2?v`4tI*BzT&wSYHdQmQ2r6}x7K67~2rqLC zrzA#aMZ_Wjk)VMGUzwRolZ{9rLLQqH9y_Km76X(c=#?LO&z?(n8z$(yKn!&o#owjiq_$=6Xu||Y|<|JFv+O~#AlY;Fn+R8wZuxo} z)N+r46Cb3zr#EkcJ-O6RE!aS4pivVQr;@+A*4#OAE;hLzna(#&Zydng5JmEwJ6A z-pb}n+Omf*Zi*~af^JKBg~Q@OZp~xlhc>mzjBiZj&_y8&MFz*pccWdb)nNCxi{nJ? zJ9Y^?ADu`1C*Ad5mnXliS23Y{+Nr^HO-AQP9Ay8jZe-jIU>FDxG62|ds0>RZm`iKr z<$iIqFR@QML;;E&LU_^#uo_i(dh3_7)J7wvgz|6ki>rNYl0@)b?lq}(n$x37%_()QZMn24K7qBwjo+K}Jh(9> zi*sX1%UN~d&{yAf@7kclr-P>wadVD{^@Oc0!waFC_v_HgNTnlfjdX*G{{Ce=YMzg^jZSD?z{NRgh6m&#y5zo*e6U@1^7UJpawjGD$@NNC} z(@A92Ued}u)KXJ>12)483kzkHQ(oAg@r*g@w$6u8$~<~C%eRZwvQo3RB=Lt;8j^?8 ziZ%KLg+IJ%98IZF?LF2;Pz{~?yj^3t=jz#JYXkQQmLCeg{ zlB2;PKLuVDlonPn!EfydMehZc8?osfo|`ZIUUu9;&Z|i{iU6^DRBmJC_u%VOE}^aC z53A?C$&r4N{|lLRQQ$N=s@>G$pzqwq;UcI!`8&pn2FNA<=jlCc{OZlS)s z!=gg2I!E18-yLRb*O}~er7n%_+NGB=n7G>+9P+Tpg(gU}l=xc1UDd;22q`^eI*{!$ zv!&Z|5bdsHY?x!FqL%KkcV~EiFS7NOq=FJQgwcXx6IRHOf}^1ua!10a6Tdwn%VPDN zK}dU4hUpJ;J=YClO_o*KYCD38f@V7Y3M$wy+)N&@lp&#_775Fwy+L00&yR-cz3p;0 z$F-JFGgXPEX(VBL3GNCZ@6o&kdHmIFR9r!q$w+8dHZI{8<(N<-pAP*!S84&S+x7__ zBHTAzP|n|;5^s^)U0@G8t#ogNbR^ zJN{b!FIo9+d%(BQ&^8`i9={QF1IoH&V_S)ns|Ss{RARwM4*s7xUI2bK+0KY8Y-$YT z%@{1pt&qWTy2l~awm*S<-$9n>8{}}LE_{Zcq{>xz+wHC={&rShgL``T7E&;Xj<@CiK_UhY8&#`P14%lGt44B|8tLdALE(yZlDxbu}%6!+N zx1=2M_^SWr2s4WY=X6nZQlxJu@y`*_o1rPEtRU6CVHVH~ zH|m6^Gp|Y3X@=kK6;@r{L6&K@1X*zN0jk8U-FtPU3dsX=a3^Ds(SX&XEY}U|nHs58 zbBDw25ZE2*=7D^U`E?%5{FqPw{wAMK0VR$hqm>DXuMfi+OyFlq0qo$q;PU>TkN-+PZUVb$Xox&SP8#T~X$Su@pz=;A zK@6LQhNjb)^#j@s>3VoPnQQht)t)bR)XzJ;`4-5>;d*Bxj;*w+gTAi z=lS_2@Lm`8IxDmGwMH9qH%d@$1-V#6kqt3?hSoEP5o)NanpFDO^yi(Q8*Qq1pAZ8+ z1hDX1+Ch046K%T)#}1qL&h*U8o1ma;)$WIPBqXLz>Pv1yuL4Zd>%b}p5r59o&XM}j z&+WEno|%aE_jf=;Wklpw5HNB}c%b8E5cot33<@;p+}8`_tl?Xg@;q-R zp_Y9n>{AG&yxk$WG&O8oT+Isz1hHf7?d>G6&lW&_y7a!E(pprHJ3p{yHBoMJa`uSa z*Xu1#JcJRcA|T1^x2NylzIV^?c=|0tHeK9p!wuK1$}RB)!P?^xYc$K|ub8TaDgSb9 zZK&m+>h*tDG953oL)!K5*aINRw*o2@AY#u63cQY6lL^42ZWw%`Hvr=Axg&R>gqt0=4`LS7Bjc5>itBlbVC=?PQy&8W6AU3iza_;_52Q5{aXNkp7<%$)$Y? zq;q=*Oo}gn)EEM zg5Uadu_sBz%}vCn_PhiRKpA+9Y=fr^EOG}JbwrCev4Gyl$!S9-dfQf>Ig=IcKM@VQQJ{bGw&mV60gO_wJ)2JYCX-yj{TXniw zXuf3BDvBC#>b%bp2ac znh{vbPKZ#hs~JCZi!HU{qNR~;HLR>n^*$L2K$(`{FM=U;7{tA%mOi}}V4WkrV;u-9 z5qj{y+S*KDv%C@%fr9TemqM z+Q<0#PT;xYNlHnf8I-Xp8tg%eiP81?VX(hrw3%az0w4axouoKqq+b2 zZYRrW0w3D#fvUX%uPg49@&z8mvHwCGb(Q*;I65L!@qKA=@pbc=97Gup4-fr?s#aiJ z#-gHQY=cw^x9i_$P#*k(ZP^X~d$rcf_Tubl2O-rmfUz^2h$81Lqs8uo4v;Z@vDOE0b*+_Y(zy=8GWZ z_Vyfs$@!t)gzFM__u!xfhFjWCrV5QuPtTwGTA7*2!>Z7VIOe}jSDGD4JOqQE1yB?) z1;zs|wLtLCpL}1~^cqg~*R07MtxVRQw>2F2WL+E|)?Qk#^=5zPk9wm^yk~na#J3&vQv(*epkKkOBeI2?M<5&vNAna0xf9CJ+kIb5(VuC3o zFcyB~G*)B+Yr%HP^N<^O*vPJdlIp0#_`+BD2XE5c>Z)7wMGoBkAHsHIh^2+R0OWMY z6}K1|-XLKQA_bV7S&Ew-tdIP(nW8f@GfPcPg_)sM-)ol4aoB!;`}PY&7Q!kBDgc!ftT1 z7-!Sg**Tx!Jb6E@FFMaaL!%LbeUJC$sWHOf2iiJVXjQ!MfI%XSh~qmD&Y0ZheH@&T zpZ_sCo9*e-r{EvNWumAWNIQ%>L?I7ZZI;wg@@pQ6bo;qt)qiCsR&5n82B^~APT_KR-OIrm|t2lKCA$pq@Pwp zn-vNXQ0WLCqXQu&_3PL9*rm#{viz4LJU}l)DDQSoP75(+i76^c;sXN%hJuOY#vY&G zGV&=r9MP3Q0S`b^Q2i`}5#wc3b2IA4gaprtHE>7_3BRI>)wf<&RtCT+c>B0eA})4t z)_#S*2~`G~5Bv~rsDfHTX*(daj8r;va#rtM$HT)Ls@`nAwdMwyoXRr?(d=9R(l(T~ zTi%!MHD`YxLKXL`v-4TK@71Aumk=Ol-V%tL0CEDHlcLA6GBYV8ysCszYmSzVcIFnJ zJcX;}W1cH)e&z?7ETnJ=0Pr0OBL3pyA_N~8ejMN7j)@9^pPK?VWE3()3uun8iAPWo z5xs%C8B_~^D6}3E@B$+JF5-@ch@Z8(?Jd_sWhx^hb45Sr>>U|^wf2w$ngD;B!{jgjyc9~l|(*z1#rw3pw<4}^=hU%wdOox!0|H8$?~$RLgE zRe1SN|72>-&(D91jaAgwA7yVnfq=9#?$A@= z$a%tv9Kl_{Y?@nQshPAJDNq2rc5k~59kFQwNeJz1-FoDnB`XCp(o)5L&gbVZU*uF( z3Bj6fZ(}S9KqWh10>aU*J uBaL7PFZ}aE3jg0R|Nng3|L>V%@e=*Z8k$m{bw@G^{8ErrmdTYe@% - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Inference Report - - - - - - - - - - ┏━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┓ -        Forward       Generate -Forward Latency     Throughput   Forward Peak     Throughput  Generate Peak  Quantization -Experiment NameBatch Size            (s)    (samples/s)    Memory (MB)     (tokens/s)    Memory (MB)        Scheme -┡━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━┩ -fp16-batch_siz…        16       2.00e-01          80.00          18706         596.00          49664          fp16 -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -fp16-batch_siz…         8       1.05e-01          76.20          16829         297.00          24551          fp16 -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -gptq-batch_siz…        16       2.12e-01          75.50          10441         406.00          40774          GPTQ -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -bnb-batch_size…        16       2.21e-01          72.40           9917         295.00          20260           BnB -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -fp16-batch_siz…         4       5.77e-02          69.30          15690         153.00          16896          fp16 -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -gptq-batch_siz…         8       1.16e-01          69.00           8568         231.00          16290          GPTQ -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -bnb-batch_size…         8       1.26e-01          63.50           8323         153.00          10760           BnB -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -fp16-batch_siz…         2       3.37e-02          59.30          15313          76.60          15858          fp16 -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -gptq-batch_siz…         4       6.84e-02          58.50           7170         140.00           8717          GPTQ -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -bnb-batch_size…         4       7.81e-02          51.20           7545          76.40           8539           BnB -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -gptq-batch_siz…         2       4.56e-02          43.90           6800          71.40           7597          GPTQ -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -fp16-batch_siz…         1       2.60e-02          38.50          15128          38.40          15548          fp16 -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -bnb-batch_size…         2       5.53e-02          36.20           7073          37.60           7704           BnB -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -awq+gemv-batch…        16       4.70e-01          34.00          10183         442.00          31247      AWQ+GEMV -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -awq+gemv-batch…         8       2.40e-01          33.30           8394         191.00          14176      AWQ+GEMV -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -awq+gemv-batch…         4       1.24e-01          32.30           7245         146.00           8602      AWQ+GEMV -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -awq+gemv-batch…         2       6.75e-02          29.60           6756          74.30           7511      AWQ+GEMV -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -gptq-batch_siz…         1       3.51e-02          28.50           6868          35.10           7077          GPTQ -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -awq+gemm-batch…         8       2.89e-01          27.70           8394         261.00          14176      AWQ+GEMM -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -awq+gemm-batch…        16       5.80e-01          27.60          10183         492.00          31245      AWQ+GEMM -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -awq+gemm-batch…         4       1.49e-01          26.80           7155         134.00           8642      AWQ+GEMM -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -awq+gemv-batch…         1       3.90e-02          25.60           6792          37.40           6981      AWQ+GEMV -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -awq+gemm-batch…         2       8.01e-02          25.00           6754          66.40           7509      AWQ+GEMM -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -awq+gemm-batch…         1       4.37e-02          22.90           6792          33.60           6981      AWQ+GEMM -├─────────────────┼────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼─────────────────┼────────────────┤ -bnb-batch_size…         1       5.26e-02          19.00           6945          23.70           7281           BnB -└─────────────────┴────────────┴─────────────────┴─────────────────┴─────────────────┴─────────────────┴─────────────────┴────────────────┘ - - - - diff --git a/examples/running-vicunas/artifacts/A100-80GB/short_report.csv b/examples/running-vicunas/artifacts/A100-80GB/short_report.csv deleted file mode 100644 index 93e417ce..00000000 --- a/examples/running-vicunas/artifacts/A100-80GB/short_report.csv +++ /dev/null @@ -1,26 +0,0 @@ -experiment_name,Batch Size,Forward Latency (s),Forward Throughput (samples/s),Forward Peak Memory (MB),Generate Throughput (tokens/s),Generate Peak Memory (MB),Quantization Scheme -fp16-batch_size(16)-sequence_length(128)-new_tokens(256),16,0.2,80.0,18706,596.0,49664,fp16 -fp16-batch_size(8)-sequence_length(128)-new_tokens(256),8,0.105,76.2,16829,297.0,24551,fp16 -gptq-batch_size(16)-sequence_length(128)-new_tokens(256),16,0.212,75.5,10441,406.0,40774,gptq -bnb-batch_size(16)-sequence_length(128)-new_tokens(256),16,0.221,72.4,9917,295.0,20260,bnb -fp16-batch_size(4)-sequence_length(128)-new_tokens(256),4,0.0577,69.3,15690,153.0,16896,fp16 -gptq-batch_size(8)-sequence_length(128)-new_tokens(256),8,0.116,69.0,8568,231.0,16290,gptq -bnb-batch_size(8)-sequence_length(128)-new_tokens(256),8,0.126,63.5,8323,153.0,10760,bnb -fp16-batch_size(2)-sequence_length(128)-new_tokens(256),2,0.0337,59.3,15313,76.6,15858,fp16 -gptq-batch_size(4)-sequence_length(128)-new_tokens(256),4,0.0684,58.5,7170,140.0,8717,gptq -bnb-batch_size(4)-sequence_length(128)-new_tokens(256),4,0.0781,51.2,7545,76.4,8539,bnb -gptq-batch_size(2)-sequence_length(128)-new_tokens(256),2,0.0456,43.9,6800,71.4,7597,gptq -fp16-batch_size(1)-sequence_length(128)-new_tokens(256),1,0.026,38.5,15128,38.4,15548,fp16 -bnb-batch_size(2)-sequence_length(128)-new_tokens(256),2,0.0553,36.2,7073,37.6,7704,bnb -awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256),16,0.47,34.0,10183,442.0,31247,awq+gemv -awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256),8,0.24,33.3,8394,191.0,14176,awq+gemv -awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256),4,0.124,32.3,7245,146.0,8602,awq+gemv -awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256),2,0.0675,29.6,6756,74.3,7511,awq+gemv -gptq-batch_size(1)-sequence_length(128)-new_tokens(256),1,0.0351,28.5,6868,35.1,7077,gptq -awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256),8,0.289,27.7,8394,261.0,14176,awq+gemm -awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256),16,0.58,27.6,10183,492.0,31245,awq+gemm -awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256),4,0.149,26.8,7155,134.0,8642,awq+gemm -awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256),1,0.039,25.6,6792,37.4,6981,awq+gemv -awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256),2,0.0801,25.0,6754,66.4,7509,awq+gemm -awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256),1,0.0437,22.9,6792,33.6,6981,awq+gemm -bnb-batch_size(1)-sequence_length(128)-new_tokens(256),1,0.0526,19.0,6945,23.7,7281,bnb diff --git a/examples/running-vicunas/configs/_base_.yaml b/examples/running-vicunas/configs/_base_.yaml deleted file mode 100644 index 82507ea4..00000000 --- a/examples/running-vicunas/configs/_base_.yaml +++ /dev/null @@ -1,36 +0,0 @@ -defaults: - - backend: pytorch # default backend - - benchmark: inference # default benchmark - - experiment # inheriting from experiment config - - _self_ # for hydra 1.1 compatibility - - override hydra/job_logging: colorlog # colorful logging - - override hydra/hydra_logging: colorlog # colorful logging - -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - job: - chdir: true - env_set: - CUDA_VISIBLE_DEVICES: 0 - CUDA_DEVICE_ORDER: PCI_BUS_ID - sweeper: - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: lmsys/vicuna-7b-v1.5 -device: cuda - -backend: - torch_dtype: float16 - -benchmark: - memory: true - warmup_runs: 10 - - new_tokens: 256 - input_shapes: - sequence_length: 128 diff --git a/examples/running-vicunas/configs/awq+gemm.yaml b/examples/running-vicunas/configs/awq+gemm.yaml deleted file mode 100644 index bfea3e7a..00000000 --- a/examples/running-vicunas/configs/awq+gemm.yaml +++ /dev/null @@ -1,6 +0,0 @@ -defaults: - - _base_ - - _self_ - -experiment_name: awq+gemm-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemm diff --git a/examples/running-vicunas/configs/awq+gemv.yaml b/examples/running-vicunas/configs/awq+gemv.yaml deleted file mode 100644 index cd6aa33a..00000000 --- a/examples/running-vicunas/configs/awq+gemv.yaml +++ /dev/null @@ -1,6 +0,0 @@ -defaults: - - _base_ - - _self_ - -experiment_name: awq+gemv-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemv diff --git a/examples/running-vicunas/configs/bnb.yaml b/examples/running-vicunas/configs/bnb.yaml deleted file mode 100644 index 61cf1ebd..00000000 --- a/examples/running-vicunas/configs/bnb.yaml +++ /dev/null @@ -1,11 +0,0 @@ -defaults: - - _base_ - - _self_ - -experiment_name: bnb-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) - -backend: - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 diff --git a/examples/running-vicunas/configs/gptq.yaml b/examples/running-vicunas/configs/gptq.yaml deleted file mode 100644 index ff6890a7..00000000 --- a/examples/running-vicunas/configs/gptq.yaml +++ /dev/null @@ -1,6 +0,0 @@ -defaults: - - _base_ - - _self_ - -experiment_name: gptq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: TheBloke/vicuna-7B-v1.5-GPTQ diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml deleted file mode 100644 index 4d10ff67..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq+gemm-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemm -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml deleted file mode 100644 index 3ad5c0b9..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1 - id: '0' - num: 0 - config_name: awq+gemm - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml deleted file mode 100644 index 989520ff..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=1 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml deleted file mode 100644 index a7fc331f..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemm -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv deleted file mode 100644 index 27c8266d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0437,22.9,6792,7.61,33.6,6981 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml deleted file mode 100644 index 35df0e1a..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq+gemm-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemm -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml deleted file mode 100644 index cc8e9be9..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=16 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=16 - id: '4' - num: 4 - config_name: awq+gemm - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml deleted file mode 100644 index fdb7f01d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=16 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml deleted file mode 100644 index 9e3fb6c4..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemm -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv deleted file mode 100644 index c1055877..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.58,27.6,10183,8.33,492.0,31245 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml deleted file mode 100644 index 271b5596..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq+gemm-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemm -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml deleted file mode 100644 index 4c5bf363..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2 - id: '1' - num: 1 - config_name: awq+gemm - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml deleted file mode 100644 index 8211b85f..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=2 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml deleted file mode 100644 index cd938d98..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemm -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv deleted file mode 100644 index 38cc2ad1..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0801,25.0,6754,7.71,66.4,7509 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml deleted file mode 100644 index b28d4709..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq+gemm-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemm -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml deleted file mode 100644 index f986b5d8..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=4 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=4 - id: '2' - num: 2 - config_name: awq+gemm - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml deleted file mode 100644 index eef8c9ca..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=4 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml deleted file mode 100644 index 0d38b107..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemm -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv deleted file mode 100644 index c4dcb12d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.149,26.8,7155,7.67,134.0,8642 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml deleted file mode 100644 index f7b17e95..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq+gemm-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemm -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml deleted file mode 100644 index e312b8af..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=8 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=8 - id: '3' - num: 3 - config_name: awq+gemm - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml deleted file mode 100644 index 8cd14374..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=8 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml deleted file mode 100644 index 4039e79c..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemm -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv deleted file mode 100644 index 7d352638..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemm-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.289,27.7,8394,7.84,261.0,14176 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml deleted file mode 100644 index 06a5987c..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq+gemv-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemv -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml deleted file mode 100644 index 6f4c8be4..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1 - id: '0' - num: 0 - config_name: awq+gemv - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml deleted file mode 100644 index 989520ff..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=1 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml deleted file mode 100644 index e9981adf..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemv -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv deleted file mode 100644 index b51690e0..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.039,25.6,6792,6.85,37.4,6981 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml deleted file mode 100644 index 853eb511..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq+gemv-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemv -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml deleted file mode 100644 index 33f6d907..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=16 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=16 - id: '4' - num: 4 - config_name: awq+gemv - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml deleted file mode 100644 index fdb7f01d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=16 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml deleted file mode 100644 index 964c8911..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemv -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv deleted file mode 100644 index ee379e6e..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.47,34.0,10183,9.27,442.0,31247 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml deleted file mode 100644 index 57e25e56..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq+gemv-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemv -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml deleted file mode 100644 index b9ec1b7b..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2 - id: '1' - num: 1 - config_name: awq+gemv - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml deleted file mode 100644 index 8211b85f..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=2 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml deleted file mode 100644 index 2a087cb1..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemv -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv deleted file mode 100644 index 9af67a3d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0675,29.6,6756,6.89,74.3,7511 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml deleted file mode 100644 index 7b422c4d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq+gemv-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemv -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml deleted file mode 100644 index 8c75d0ea..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=4 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=4 - id: '2' - num: 2 - config_name: awq+gemv - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml deleted file mode 100644 index eef8c9ca..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=4 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml deleted file mode 100644 index abfbfcdc..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemv -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv deleted file mode 100644 index 1ea01ae7..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.124,32.3,7245,7.01,146.0,8602 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml deleted file mode 100644 index 6b493dfb..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: awq+gemv-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemv -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml deleted file mode 100644 index cae0cb49..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=8 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=8 - id: '3' - num: 3 - config_name: awq+gemv - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml deleted file mode 100644 index 8cd14374..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=8 diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml deleted file mode 100644 index 28b611b1..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256) -model: IlyasMoutawwakil/vicuna-7b-v1.5-awq-gemv -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv deleted file mode 100644 index 11aa5b46..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/awq+gemv-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.24,33.3,8394,10.7,191.0,14176 diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml deleted file mode 100644 index 011fb2db..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml +++ /dev/null @@ -1,72 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bnb-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml deleted file mode 100644 index 82545db0..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1 - id: '0' - num: 0 - config_name: bnb - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml deleted file mode 100644 index 989520ff..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=1 diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml deleted file mode 100644 index 9ff1ad9d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - llm_int8_threshold: 0.0 - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: bnb-batch_size(1)-sequence_length(128)-new_tokens(256) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv deleted file mode 100644 index bb71d438..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0526,19.0,6945,10.8,23.7,7281 diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml deleted file mode 100644 index 1b66ca17..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml +++ /dev/null @@ -1,72 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bnb-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml deleted file mode 100644 index e904559d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=16 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=16 - id: '4' - num: 4 - config_name: bnb - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml deleted file mode 100644 index fdb7f01d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=16 diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml deleted file mode 100644 index 3c85dd65..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - llm_int8_threshold: 0.0 - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: bnb-batch_size(16)-sequence_length(128)-new_tokens(256) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv deleted file mode 100644 index 8b6dfb2e..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.221,72.4,9917,13.9,295.0,20260 diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml deleted file mode 100644 index a5aaa817..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml +++ /dev/null @@ -1,72 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bnb-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml deleted file mode 100644 index 2ae6349f..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2 - id: '1' - num: 1 - config_name: bnb - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml deleted file mode 100644 index 8211b85f..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=2 diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml deleted file mode 100644 index 28ffa32f..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - llm_int8_threshold: 0.0 - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: bnb-batch_size(2)-sequence_length(128)-new_tokens(256) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv deleted file mode 100644 index 227b9932..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0553,36.2,7073,13.6,37.6,7704 diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml deleted file mode 100644 index 2ce98288..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml +++ /dev/null @@ -1,72 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bnb-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml deleted file mode 100644 index d8cd9ab9..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=4 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=4 - id: '2' - num: 2 - config_name: bnb - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml deleted file mode 100644 index eef8c9ca..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=4 diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml deleted file mode 100644 index d6d04dfa..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - llm_int8_threshold: 0.0 - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: bnb-batch_size(4)-sequence_length(128)-new_tokens(256) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv deleted file mode 100644 index 21ff66e4..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0781,51.2,7545,13.4,76.4,8539 diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml deleted file mode 100644 index 15192fa9..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml +++ /dev/null @@ -1,72 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: bnb-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml deleted file mode 100644 index dae7feac..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=8 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=8 - id: '3' - num: 3 - config_name: bnb - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml deleted file mode 100644 index 8cd14374..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=8 diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml deleted file mode 100644 index 5c2f68dd..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml +++ /dev/null @@ -1,79 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: bnb - quantization_config: - llm_int8_threshold: 0.0 - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: bnb-batch_size(8)-sequence_length(128)-new_tokens(256) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv deleted file mode 100644 index 694df838..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/bnb-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.126,63.5,8323,13.4,153.0,10760 diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml deleted file mode 100644 index 58bc0553..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml deleted file mode 100644 index 4a2b3ce5..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1 - id: '0' - num: 0 - config_name: _base_ - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml deleted file mode 100644 index 989520ff..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=1 diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml deleted file mode 100644 index 97782bd7..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: fp16-batch_size(1)-sequence_length(128)-new_tokens(256) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv deleted file mode 100644 index 531b9c5d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.026,38.5,15128,6.67,38.4,15548 diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml deleted file mode 100644 index 4a397dfe..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml deleted file mode 100644 index ff9bd759..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=16 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=16 - id: '4' - num: 4 - config_name: _base_ - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml deleted file mode 100644 index fdb7f01d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=16 diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml deleted file mode 100644 index d34ba391..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: fp16-batch_size(16)-sequence_length(128)-new_tokens(256) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv deleted file mode 100644 index 6cd91656..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.2,80.0,18706,6.87,596.0,49664 diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml deleted file mode 100644 index d222d15c..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml deleted file mode 100644 index 9b00c477..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2 - id: '1' - num: 1 - config_name: _base_ - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml deleted file mode 100644 index 8211b85f..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=2 diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml deleted file mode 100644 index e46da2b1..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: fp16-batch_size(2)-sequence_length(128)-new_tokens(256) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv deleted file mode 100644 index cd257bc1..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0337,59.3,15313,6.68,76.6,15858 diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml deleted file mode 100644 index 5952fe8f..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml deleted file mode 100644 index a8e88740..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=4 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=4 - id: '2' - num: 2 - config_name: _base_ - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml deleted file mode 100644 index eef8c9ca..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=4 diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml deleted file mode 100644 index 2173beeb..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: fp16-batch_size(4)-sequence_length(128)-new_tokens(256) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv deleted file mode 100644 index 5e96af21..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0577,69.3,15690,6.69,153.0,16896 diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml deleted file mode 100644 index 50cf10c9..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: fp16-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml deleted file mode 100644 index 08aafc5e..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=8 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=8 - id: '3' - num: 3 - config_name: _base_ - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml deleted file mode 100644 index 8cd14374..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=8 diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml deleted file mode 100644 index b2ff40e5..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: fp16-batch_size(8)-sequence_length(128)-new_tokens(256) -model: lmsys/vicuna-7b-v1.5 -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv deleted file mode 100644 index 742a27ac..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/fp16-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.105,76.2,16829,6.9,297.0,24551 diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml deleted file mode 100644 index eafc68e2..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: gptq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: TheBloke/vicuna-7B-v1.5-GPTQ -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml deleted file mode 100644 index f5988a61..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=1 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=1 - id: '0' - num: 0 - config_name: gptq - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml deleted file mode 100644 index 989520ff..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=1 diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml deleted file mode 100644 index b474c3df..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 1 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: gptq-batch_size(1)-sequence_length(128)-new_tokens(256) -model: TheBloke/vicuna-7B-v1.5-GPTQ -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv deleted file mode 100644 index 75f51632..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(1)-sequence_length(128)-new_tokens(256)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0351,28.5,6868,7.29,35.1,7077 diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml deleted file mode 100644 index 88a1cbb3..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: gptq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: TheBloke/vicuna-7B-v1.5-GPTQ -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml deleted file mode 100644 index 904f449f..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=16 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=16 - id: '4' - num: 4 - config_name: gptq - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml deleted file mode 100644 index fdb7f01d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=16 diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml deleted file mode 100644 index 9f78a060..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 16 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: gptq-batch_size(16)-sequence_length(128)-new_tokens(256) -model: TheBloke/vicuna-7B-v1.5-GPTQ -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv deleted file mode 100644 index d8802536..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(16)-sequence_length(128)-new_tokens(256)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.212,75.5,10441,10.1,406.0,40774 diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml deleted file mode 100644 index 308bf1d2..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: gptq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: TheBloke/vicuna-7B-v1.5-GPTQ -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml deleted file mode 100644 index e2fd14be..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=2 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=2 - id: '1' - num: 1 - config_name: gptq - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml deleted file mode 100644 index 8211b85f..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=2 diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml deleted file mode 100644 index 1a41cd98..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 2 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: gptq-batch_size(2)-sequence_length(128)-new_tokens(256) -model: TheBloke/vicuna-7B-v1.5-GPTQ -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv deleted file mode 100644 index 8c7d08c5..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(2)-sequence_length(128)-new_tokens(256)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0456,43.9,6800,7.17,71.4,7597 diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml deleted file mode 100644 index 9bb84dff..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: gptq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: TheBloke/vicuna-7B-v1.5-GPTQ -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml deleted file mode 100644 index fe585a3d..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=4 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=4 - id: '2' - num: 2 - config_name: gptq - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml deleted file mode 100644 index eef8c9ca..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=4 diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml deleted file mode 100644 index 8515f46c..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 4 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: gptq-batch_size(4)-sequence_length(128)-new_tokens(256) -model: TheBloke/vicuna-7B-v1.5-GPTQ -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv deleted file mode 100644 index 38edc30a..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(4)-sequence_length(128)-new_tokens(256)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.0684,58.5,7170,7.3,140.0,8717 diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml deleted file mode 100644 index aad9d7b2..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/config.yaml +++ /dev/null @@ -1,70 +0,0 @@ -backend: - name: pytorch - version: ${pytorch_version:} - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: ${can_diffuse:${task}} - can_generate: ${can_generate:${task}} - forward_kwargs: {} - generate_kwargs: {} -experiment_name: gptq-batch_size(${benchmark.input_shapes.batch_size})-sequence_length(${benchmark.input_shapes.sequence_length})-new_tokens(${benchmark.new_tokens}) -model: TheBloke/vicuna-7B-v1.5-GPTQ -device: cuda -task: ${infer_task:${model}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml deleted file mode 100644 index 516c3ed5..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,174 +0,0 @@ -hydra: - run: - dir: experiments/${experiment_name} - sweep: - dir: experiments/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.input_shapes.batch_size: 1,2,4,8,16 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.input_shapes.batch_size=8 - job: - name: experiment - chdir: true - override_dirname: benchmark.input_shapes.batch_size=8 - id: '3' - num: 3 - config_name: gptq - env_set: - CUDA_VISIBLE_DEVICES: '0' - CUDA_DEVICE_ORDER: PCI_BUS_ID - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /workspace/optimum-benchmark/examples/gemm-vs-gemv - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /workspace/optimum-benchmark/examples/gemm-vs-gemv/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /workspace/optimum-benchmark/examples/gemm-vs-gemv/experiments/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml deleted file mode 100644 index 8cd14374..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/.hydra/overrides.yaml +++ /dev/null @@ -1 +0,0 @@ -- benchmark.input_shapes.batch_size=8 diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml deleted file mode 100644 index 7e234d3a..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/hydra_config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -backend: - name: pytorch - version: 2.1.0+cu118 - _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend - seed: 42 - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: true - continous_isolation_check: true - delete_cache: false - no_weights: false - device_map: null - torch_dtype: float16 - disable_grad: true - eval_mode: true - amp_autocast: false - amp_dtype: null - torch_compile: false - torch_compile_config: {} - bettertransformer: false - quantization_scheme: null - quantization_config: {} - use_ddp: false - ddp_config: {} - peft_strategy: null - peft_config: {} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark - duration: 10 - warmup_runs: 10 - memory: true - energy: false - input_shapes: - batch_size: 8 - sequence_length: 128 - num_choices: 1 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 256 - can_diffuse: false - can_generate: true - forward_kwargs: {} - generate_kwargs: - max_new_tokens: 256 - min_new_tokens: 256 - do_sample: false - use_cache: true - pad_token_id: 0 - num_beams: 1 -experiment_name: gptq-batch_size(8)-sequence_length(128)-new_tokens(256) -model: TheBloke/vicuna-7B-v1.5-GPTQ -device: cuda -task: text-generation -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false -environment: - optimum_version: 1.13.2 - optimum_commit: null - transformers_version: 4.35.0.dev0 - transformers_commit: null - accelerate_version: 0.24.0 - accelerate_commit: null - diffusers_version: null - diffusers_commit: null - python_version: 3.10.12 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 540684 - gpus: - - NVIDIA A100-SXM4-80GB diff --git a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv b/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv deleted file mode 100644 index 544369d4..00000000 --- a/examples/running-vicunas/experiments/A100-80GB/gptq-batch_size(8)-sequence_length(128)-new_tokens(256)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -forward.latency(s),forward.throughput(samples/s),forward.peak_memory(MB),generate.latency(s),generate.throughput(tokens/s),generate.peak_memory(MB) -0.116,69.0,8568,8.88,231.0,16290 diff --git a/examples/running-vicunas/report.py b/examples/running-vicunas/report.py deleted file mode 100644 index 7313c910..00000000 --- a/examples/running-vicunas/report.py +++ /dev/null @@ -1,231 +0,0 @@ -from argparse import ArgumentParser -from pathlib import Path - -import matplotlib.pyplot as plt -import pandas as pd -from flatten_dict import flatten -from omegaconf import OmegaConf -from pandas import DataFrame -from rich.console import Console -from rich.table import Table -from rich.terminal_theme import MONOKAI - - -def gather_inference_report(root_folder: Path) -> DataFrame: - # key is path to inference file as string, value is dataframe - inference_dfs = { - f.parent.absolute().as_posix(): pd.read_csv(f) for f in root_folder.glob("**/inference_results.csv") - } - - # key is path to config file as string, value is flattened dict - config_dfs = { - f.parent.absolute() - .as_posix(): pd.DataFrame.from_dict(flatten(OmegaConf.load(f), reducer="dot"), orient="index") - .T - for f in root_folder.glob("**/hydra_config.yaml") - if f.parent.absolute().as_posix() in inference_dfs.keys() - } - - if len(inference_dfs) == 0 or len(config_dfs) == 0: - raise ValueError(f"No results found in {root_folder}") - - # Merge inference and config dataframes - inference_reports = [ - config_dfs[name].merge(inference_dfs[name], left_index=True, right_index=True) for name in inference_dfs.keys() - ] - - # Concatenate all reports - inference_report = pd.concat(inference_reports, axis=0, ignore_index=True) - inference_report.set_index("experiment_name", inplace=True) - return inference_report - - -def style_element(element, style=""): - if style: - return f"[{style}]{element}[/{style}]" - else: - return element - - -def format_element(element, style=""): - if isinstance(element, float): - if element != element: # nan - formated_element = "" - elif abs(element) >= 1: - formated_element = f"{element:.2f}" - elif abs(element) > 1e-6: - formated_element = f"{element:.2e}" - else: - formated_element = f"{element}" - elif element is None: - formated_element = "" - elif isinstance(element, bool): - if element: - formated_element = style_element("✔", style="green") - else: - formated_element = style_element("✘", style="red") - else: - formated_element = str(element) - - return style_element(formated_element, style=style) - - -def format_row(row, style=""): - formated_row = [] - for element in row: - formated_row.append(format_element(element, style=style)) - return formated_row - - -def get_short_report(inference_report): - short_columns = { - "benchmark.input_shapes.batch_size": "Batch Size", - "forward.latency(s)": "Forward Latency (s)", - "forward.throughput(samples/s)": "Forward Throughput (samples/s)", - "forward.peak_memory(MB)": "Forward Peak Memory (MB)", - "generate.throughput(tokens/s)": "Generate Throughput (tokens/s)", - "generate.peak_memory(MB)": "Generate Peak Memory (MB)", - } - short_report = inference_report[list(short_columns.keys())].rename(columns=short_columns) - short_report["Quantization Scheme"] = inference_report.index.str.split("-").str[0] - - return short_report - - -def get_rich_table(short_report): - # create rich table - rich_table = Table(show_header=True, show_lines=True) - # we add a column for the index - rich_table.add_column("Experiment Name", justify="left", header_style="") - # we populate the table with values - for column in short_report.columns: - rich_table.add_column(column, justify="right", header_style="bold") - # we add rows - for index, row in short_report.iterrows(): - rich_table.add_row(index, *format_row(row.values, style="")) - - return rich_table - - -def get_throughput_plot(short_report): - # for each quantization scheme we plot the throughput vs batch size - fig1, ax1 = plt.subplots() - fig2, ax2 = plt.subplots() - fig3, ax3 = plt.subplots() - fig4, ax4 = plt.subplots() - - short_report["Quantization Scheme"].fillna("unquantized", inplace=True) - short_report["Quantization Scheme"].replace("bnb", "BnB", inplace=True) - short_report["Quantization Scheme"].replace("awq", "AWQ", inplace=True) - short_report["Quantization Scheme"].replace("gptq", "GPTQ", inplace=True) - short_report["Quantization Scheme"].replace("awq+gemm", "AWQ+GEMM", inplace=True) - short_report["Quantization Scheme"].replace("awq+gemv", "AWQ+GEMV", inplace=True) - - for quantization_scheme in short_report["Quantization Scheme"].unique(): - mask = short_report["Quantization Scheme"] == quantization_scheme - - forward_latency = short_report[mask][["Batch Size", "Forward Latency (s)"]].sort_values(by="Batch Size") - generate_throughput = short_report[mask][["Batch Size", "Generate Throughput (tokens/s)"]].sort_values( - by="Batch Size" - ) - forward_memory = short_report[mask][["Batch Size", "Forward Peak Memory (MB)"]].sort_values(by="Batch Size") - generate_memory = short_report[mask][["Batch Size", "Generate Peak Memory (MB)"]].sort_values(by="Batch Size") - ax1.plot( - forward_latency["Batch Size"], - forward_latency["Forward Latency (s)"], - label=quantization_scheme, - marker="o", - ) - ax2.plot( - generate_throughput["Batch Size"], - generate_throughput["Generate Throughput (tokens/s)"], - label=quantization_scheme, - marker="o", - ) - ax3.plot( - forward_memory["Batch Size"], - forward_memory["Forward Peak Memory (MB)"], - label=quantization_scheme, - marker="*", - ) - ax4.plot( - generate_memory["Batch Size"], - generate_memory["Generate Peak Memory (MB)"], - label=quantization_scheme, - marker="*", - ) - - ax1.set_xlabel("Batch Size") - ax1.set_ylabel("Forward Latency (s)") - ax1.set_title("Forward Latency per Batch Size") - - ax2.set_xlabel("Batch Size") - ax2.set_ylabel("Generate Throughput (tokens/s)") - ax2.set_title("Generate Throughput per Batch Size") - - ax3.set_xlabel("Batch Size") - ax3.set_ylabel("Forward Peak Memory (MB)") - ax3.set_title("Forward Peak Memory per Batch Size") - - ax4.set_xlabel("Batch Size") - ax4.set_ylabel("Generate Peak Memory (MB)") - ax4.set_title("Generate Peak Memory per Batch Size") - - ax1.legend() - ax2.legend() - ax3.legend() - ax4.legend() - - return fig1, fig2, fig3, fig4 - - -def generate_report(): - parser = ArgumentParser() - parser.add_argument( - "--experiments", - "-e", - type=Path, - required=True, - help="The folder containing the results of experiments.", - ) - parser.add_argument( - "--report-name", - "-r", - type=str, - required=False, - help="The name of the report.", - ) - - args = parser.parse_args() - experiments_folders = args.experiments - - if args.report_name: - report_folder = f"artifacts/{args.report_name}" - else: - report_folder = "artifacts" - Path(report_folder).mkdir(parents=True, exist_ok=True) - - # gather experiments results - inference_report = gather_inference_report(experiments_folders) - inference_report.sort_values(by="forward.throughput(samples/s)", ascending=False, inplace=True) - inference_report.to_csv(f"{report_folder}/full_report.csv") - - short_report = get_short_report(inference_report) - short_report.to_csv(f"{report_folder}/short_report.csv") - - forward_throughput_plot, generate_throughput_plot, forward_memory_plot, generate_memory_plot = get_throughput_plot( - short_report - ) - forward_throughput_plot.savefig(f"{report_folder}/forward_latency_plot.png") - generate_throughput_plot.savefig(f"{report_folder}/generate_throughput_plot.png") - forward_memory_plot.savefig(f"{report_folder}/forward_memory_plot.png") - generate_memory_plot.savefig(f"{report_folder}/generate_memory_plot.png") - - rich_table = get_rich_table(short_report) - console = Console(record=True) - console.print(rich_table, justify="center") - console.save_svg(f"{report_folder}/rich_table.svg", theme=MONOKAI, title="Inference Report") - - -if __name__ == "__main__": - generate_report() diff --git a/examples/training-llamas/README.md b/examples/training-llamas/README.md deleted file mode 100644 index ea8bfb12..00000000 --- a/examples/training-llamas/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# Optimum-Benchmark x LLaMAs x PEFT - -A set of benchmarks on Meta's LLaMA2's training. - -## Setup - -You will need to install any necessary third-party libraries like `deepspeed` or `auto-gptq` depending on the hardware and benchmarks you want to run. - -For example running PEFT on two devices with Model Parallelism (i.e. `fp16+peft+dp=2+zero3`) will require: `peft` and `deepspeed` - -## Running - -Then run the benchmarks from this directory with: - -```bash -optimum-benchmark --config-dir configs/ --config-name fp16 --multirun -optimum-benchmark --config-dir configs/ --config-name fp16+peft+dp=2+zero3 --multirun -[...] -``` - -This will create a folder called `experiments` with the results of the benchmarks with a training `batch_size` ranging from 1 to 128 and a `sequence_length` (sample size) of 256. - -## Reporting - -To create a report for 7B models on A100-80GB, run: - -```bash -python report.py -e experiments/hf-dgx-01/NousResearch/Llama-2-7b-hf/ -r artifacts/Llama-2-7b-hf/ -python report.py -e experiments/hf-dgx-01/NousResearch/Llama-2-13b-hf/ -r artifacts/Llama-2-13b-hf/ -``` - -Which will create some quick reporting artifacts like a `full_report.csv`, `short_report.csv`, and some interesting analysis plots. - -## Results - -### LLaMA-7B on A100-80GB - -

-throughput_plot -

- -

-latency_plot -

- -### LLaMA-13B on A100-80GB - -

-throughput_plot -

- -

-latency_plot -

diff --git a/examples/training-llamas/artifacts/Llama-2-13b-hf/full_report.csv b/examples/training-llamas/artifacts/Llama-2-13b-hf/full_report.csv deleted file mode 100644 index bb28621b..00000000 --- a/examples/training-llamas/artifacts/Llama-2-13b-hf/full_report.csv +++ /dev/null @@ -1,393 +0,0 @@ -,launcher.name,launcher._target_,launcher.start_method,backend.name,backend.version,backend._target_,backend.seed,backend.inter_op_num_threads,backend.intra_op_num_threads,backend.continuous_isolation,backend.isolation_check_interval,backend.delete_cache,backend.no_weights,backend.device_map,backend.torch_dtype,backend.eval_mode,backend.disable_grad,backend.amp_autocast,backend.amp_dtype,backend.torch_compile,backend.to_bettertransformer,backend.use_flash_attention_2,backend.quantization_scheme,backend.data_parallel,backend.deepspeed_inference,backend.peft_strategy,backend.peft_config.base_model_name_or_path,backend.peft_config.revision,backend.peft_config.peft_type,backend.peft_config.task_type,backend.peft_config.inference_mode,backend.peft_config.auto_mapping,backend.peft_config.r,backend.peft_config.target_modules,backend.peft_config.lora_alpha,backend.peft_config.lora_dropout,backend.peft_config.fan_in_fan_out,backend.peft_config.bias,backend.peft_config.modules_to_save,backend.peft_config.init_lora_weights,backend.peft_config.layers_to_transform,backend.peft_config.layers_pattern,benchmark.name,benchmark._target_,benchmark.warmup_steps,benchmark.dataset_shapes.dataset_size,benchmark.dataset_shapes.sequence_length,benchmark.dataset_shapes.num_choices,benchmark.dataset_shapes.feature_size,benchmark.dataset_shapes.nb_max_frames,benchmark.dataset_shapes.audio_sequence_length,benchmark.training_arguments.skip_memory_metrics,benchmark.training_arguments.output_dir,benchmark.training_arguments.use_cpu,benchmark.training_arguments.ddp_find_unused_parameters,benchmark.training_arguments.do_train,benchmark.training_arguments.do_eval,benchmark.training_arguments.do_predict,benchmark.training_arguments.report_to,benchmark.training_arguments.max_steps,benchmark.training_arguments.per_device_train_batch_size,experiment_name,device,model,task,hub_kwargs.revision,hub_kwargs.cache_dir,hub_kwargs.force_download,hub_kwargs.local_files_only,environment.optimum_version,environment.optimum_commit,environment.transformers_version,environment.transformers_commit,environment.accelerate_version,environment.accelerate_commit,environment.diffusers_version,environment.diffusers_commit,environment.python_version,environment.system,environment.cpu,environment.cpu_count,environment.cpu_ram_mb,environment.gpus,warmup.runtime(s),warmup.throughput(samples/s),training.runtime(s),training.throughput(samples/s),overall_training.runtime(s),overall_training.throughput(samples/s),hydra.run.dir,hydra.sweep.dir,hydra.sweep.subdir,hydra.launcher._target_,hydra.sweeper._target_,hydra.sweeper.max_batch_size,hydra.sweeper.params.benchmark.training_arguments.per_device_train_batch_size,hydra.sweeper.params.model,hydra.help.app_name,hydra.help.header,hydra.help.footer,hydra.help.template,hydra.hydra_help.template,hydra.hydra_help.hydra_help,hydra.hydra_logging.version,hydra.hydra_logging.formatters.colorlog.(),hydra.hydra_logging.formatters.colorlog.format,hydra.hydra_logging.handlers.console.class,hydra.hydra_logging.handlers.console.formatter,hydra.hydra_logging.handlers.console.stream,hydra.hydra_logging.root.level,hydra.hydra_logging.root.handlers,hydra.hydra_logging.disable_existing_loggers,hydra.job_logging.version,hydra.job_logging.formatters.simple.format,hydra.job_logging.formatters.colorlog.(),hydra.job_logging.formatters.colorlog.format,hydra.job_logging.formatters.colorlog.log_colors.DEBUG,hydra.job_logging.formatters.colorlog.log_colors.INFO,hydra.job_logging.formatters.colorlog.log_colors.WARNING,hydra.job_logging.formatters.colorlog.log_colors.ERROR,hydra.job_logging.formatters.colorlog.log_colors.CRITICAL,hydra.job_logging.handlers.console.class,hydra.job_logging.handlers.console.formatter,hydra.job_logging.handlers.console.stream,hydra.job_logging.handlers.file.class,hydra.job_logging.handlers.file.formatter,hydra.job_logging.handlers.file.filename,hydra.job_logging.root.level,hydra.job_logging.root.handlers,hydra.job_logging.disable_existing_loggers,hydra.mode,hydra.searchpath,hydra.output_subdir,hydra.overrides.hydra,hydra.overrides.task,hydra.job.name,hydra.job.chdir,hydra.job.override_dirname,hydra.job.id,hydra.job.num,hydra.job.config_name,hydra.job.env_set.CUDA_VISIBLE_DEVICES,hydra.job.env_set.CUDA_DEVICE_ORDER,hydra.job.env_copy,hydra.job.config.override_dirname.kv_sep,hydra.job.config.override_dirname.item_sep,hydra.job.config.override_dirname.exclude_keys,hydra.runtime.version,hydra.runtime.version_base,hydra.runtime.cwd,hydra.runtime.config_sources,hydra.runtime.output_dir,hydra.runtime.choices.benchmark,hydra.runtime.choices.launcher,hydra.runtime.choices.backend,hydra.runtime.choices.hydra/env,hydra.runtime.choices.hydra/callbacks,hydra.runtime.choices.hydra/job_logging,hydra.runtime.choices.hydra/hydra_logging,hydra.runtime.choices.hydra/hydra_help,hydra.runtime.choices.hydra/help,hydra.runtime.choices.hydra/sweeper,hydra.runtime.choices.hydra/launcher,hydra.runtime.choices.hydra/output,hydra.verbose,backend.quantization_config.llm_int8_threshold,backend.quantization_config.load_in_4bit,backend.quantization_config.bnb_4bit_compute_dtype,backend.quantization_config.bits,backend.quantization_config.disable_exllama -0,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,16,fp16+peft,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],63.59784197807312,10.06323453900614,159.16368579864502,10.0525442846563,222.7615296840668,7.182568741870341,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=16', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=16,model=NousResearch/Llama-2-13b-hf",9,9,fp16+peft,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft/16,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -1,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,4,fp16+peft,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],18.260884523391724,8.761897584700462,44.20054507255554,9.049662155600046,62.4614315032959,6.403951852734807,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=4', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=4,model=NousResearch/Llama-2-13b-hf",5,5,fp16+peft,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft/4,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -2,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,1,fp16+peft,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],6.025439023971558,6.6385204199833945,14.0109965801239,7.137251046214708,20.036437034606934,4.990907306886948,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=1', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=1,model=NousResearch/Llama-2-13b-hf",1,1,fp16+peft,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft/1,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -3,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,2,fp16+peft,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],10.59528875350952,7.550525696951984,25.423195123672485,7.866831805643994,36.01848530769348,5.552704348655116,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=2', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=2,model=NousResearch/Llama-2-13b-hf",3,3,fp16+peft,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft/2,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -4,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,8,fp16+peft,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],32.72411274909973,9.77872196118758,80.59089398384094,9.926679807775873,113.31500816345216,7.059965074052975,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=8', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=8,model=NousResearch/Llama-2-13b-hf",7,7,fp16+peft,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft/8,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -5,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,bnb,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,16,fp16+peft+bnb-4bit,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],56.82151055335999,11.263340128893413,140.7813069820404,11.365145233408818,197.6028189659119,8.097050479204011,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=16', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=16,model=NousResearch/Llama-2-13b-hf",9,9,fp16+peft+bnb-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft+bnb-4bit/16,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,0.0,True,float16,, -6,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,bnb,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,4,fp16+peft+bnb-4bit,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],18.608861923217773,8.59805401642388,45.62080240249634,8.767929955964831,64.22966575622559,6.227651900262757,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=4', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=4,model=NousResearch/Llama-2-13b-hf",5,5,fp16+peft+bnb-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft+bnb-4bit/4,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,0.0,True,float16,, -7,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,bnb,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,1,fp16+peft+bnb-4bit,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],8.35726809501648,4.78625306083604,19.78277015686035,5.054903797955798,28.140039443969727,3.553655288902927,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=1', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=1,model=NousResearch/Llama-2-13b-hf",1,1,fp16+peft+bnb-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft+bnb-4bit/1,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,0.0,True,float16,, -8,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,bnb,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,2,fp16+peft+bnb-4bit,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],12.181357860565186,6.567412345628946,29.56349492073059,6.765100017310723,41.74485445022583,4.791009637809817,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=2', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=2,model=NousResearch/Llama-2-13b-hf",3,3,fp16+peft+bnb-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft+bnb-4bit/2,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,0.0,True,float16,, -9,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,bnb,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,8,fp16+peft+bnb-4bit,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],30.924877405166622,10.34765621889054,76.05532646179199,10.518658419037845,106.98020553588869,7.4780189100648515,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=8', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=8,model=NousResearch/Llama-2-13b-hf",7,7,fp16+peft+bnb-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft+bnb-4bit/8,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,0.0,True,float16,, -10,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,gptq,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,4,fp16+peft+gptq-4bit,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],27.047818660736084,5.915449301361359,66.39001893997192,6.02500204679365,93.4378387928009,4.280920932760473,experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf,NousResearch/Llama-2-70b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=4', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=4,model=NousResearch/Llama-2-13b-hf",7,7,fp16+peft+gptq-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft+gptq-4bit/4,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,4,True -11,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,gptq,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,1,fp16+peft+gptq-4bit,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],16.171680450439453,2.473459707702364,39.22436714172363,2.54943565153479,55.39604926109314,1.805182884589461,experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf,NousResearch/Llama-2-70b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=1', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=1,model=NousResearch/Llama-2-13b-hf",1,1,fp16+peft+gptq-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft+gptq-4bit/1,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,4,True -12,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,gptq,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,2,fp16+peft+gptq-4bit,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],20.308139324188232,3.939307226670201,49.545104026794434,4.036725806284275,69.85324501991272,2.863145440744907,experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf,NousResearch/Llama-2-70b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=2', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=2,model=NousResearch/Llama-2-13b-hf",4,4,fp16+peft+gptq-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft+gptq-4bit/2,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,4,True -13,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,gptq,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,8,fp16+peft+gptq-4bit,cuda,NousResearch/Llama-2-13b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],39.81309175491333,8.037557142507247,98.25630164146423,8.141971422038537,138.06939482688904,5.794187777841986,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=8', 'model=NousResearch/Llama-2-13b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=8,model=NousResearch/Llama-2-13b-hf",7,7,fp16+peft+gptq-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-13b-hf/fp16+peft+gptq-4bit/8,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,4,True diff --git a/examples/training-llamas/artifacts/Llama-2-13b-hf/peak_training_throughput.png b/examples/training-llamas/artifacts/Llama-2-13b-hf/peak_training_throughput.png deleted file mode 100644 index c5be42f5f38090908e5723326f875ec233cd33b1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 37616 zcmb5W2{e`O+XZ~kJW***qLf(~5@kp!W5zO+C<-aF%tNEnAVf%}3`s~xW`&4kDwJf% zEb}~`Z{Nr7KYZW2*0ltS^18GZeVnsC_{oIlX?@ zb%Ve4&KEwMTl-|qp**SCWc|~XFX^s*7;y@9^?m;Qx!C=b6ytu0iAM@*%!}D>G07|& z`o6Eyjv})Re_%J1r{|>ooxjf_Rq{8jJiC^M<3ntPTF2K(lh5Bxda&Dr_G$es@|4@; z-*N?gDJ#goxm?^$$4vfhvQ+N>c%dTq*7o+>_`k%0$({E0_P-h%^SW}*ojcc75hkVl zN8W6Deu66cr`Y>MwE9qM(Mmnf;m;?Zi{DO8-apabSYyZ8y7E_!^Co|VqSrbp)pk-r zEwes9UTSi?%?=d{xKTSp1e$VP6_d{8l=$;(;`aACa&w8*SkI}q7t21imp#opxWtux zxxvQd%ip;*x&1rNU%p%_+T>E;Ej3?K6RTp!5*iS-elYfM|L>gmUQ3~GGt@_{P9=oNuOYoAQSZ=-jl6Q>e}8c4rg9#= zv#MpRw=OLEVbT3fH8INDhC9mdD@1=RE93b-cH_nklb@+v{QUfZ!NI;fTKrpuP4bfs zi>o^dTq3wMV&341rpNorex{jqS4XeJ6^d(W{H*?b>K1;N>Z@EwcQ;kUs`c0kW=?4( zrBxq3eBeEo9q2kUZZ$Q0@vU)1V(%d3O?tX<-_JDNvP%`AF2s^Js$L~^a)qNW45CdhuO*fWlsfS&!n3de)=># zH__C4P<+mIu=xmHE8az&9p@JE0|&*#sxoYh@j}X?6`yLqF`%0n@7uL|_aY&o6!*qY zqun)*BOR-8H?2c|idoMNbd*2aym|9r0i)DoysD2MKWe9&v*5Oy)0^C0AS6tusw96@ zy*Lr49=#$rH&?gJeX?cgR*}nvQqUYy`QGXda+1#dFH$J?NhCpo=9FzqYM(7m_me01ptuF0vNd-&3& zOZ7i~ROPxl_c!GwjpgE%XR)>-T>msTd)&LX91&}b+u(XPZk`lSoN#_!} z*UhPlp7KTXsApa4V-DMtk89)Qq!?H1kdcv*%w+LsNVgy<;P2nRC7}|BPQB7>LFm@# z?v2~sJg0s)>dz!nRwZwiUGK&O-86Qs#Ir~Xt$+V2VP7m~WTcvG$D_W_%p9(eqMGn& z#ebiinbC+mwjA%#%GNeYB~0?x?c1ga_8nig96o%wBuHp47KNEp)nkW*lS|dRYkfa& z$HwkGdGcgnV4&(tRkrzwJgO;6*aK$hxUlhkMaEM)VG&vH*Gq(E!@2>n9{xJ7k(se5%BO|xw%lt&WEXnvZei2dgU*SlWh(ynU zJZfm9?0x!y9z}&GoJX*fp9A=|IgkI1(9d&w`Tg?r!~iv^;rW59TbC?ZLPBzI(DKrk zz$39IpC80-&&#zhxU(90^p2$Kbhvt4;C{m;d}rRZjrTPKpDt)}n|X*8QhTMz6?5{r zg0}W%3Fq9wyt+w!8W^_o-J*n>SBZ zu~xpj+MpgTO-FuQPHq_zc=3l1*Sl+C%@Q_k-dvKDH}8clvuDp9UftBYOIB~S?D@u} z5WqX7vmbBjfb7I{ai;AKNf$X26VCQh|CwfShNWwazJ`cBV3nGGk>G*gj7E$cla+1h z>r=-4Y3DeJC_g)Jv9~rMn6*J=!=_E1o}NpSlap6%-sf>P!+Ho?avTMww=OC9-Mjf_ z=k@E>ISe+_A(`ox&5U*zygC)%^*iU#@87?FW!QKk0bBO}5_`mQ=#G!i+KNz#7jARY z^$iU|7YdhMxpL*b^8`P3ANB9m4_UYBK7OQUWo6~%3^R80u&Z`E#nj+9* z)m|JA8?*2Y&ammMPBqmKx9d4}_U!g&2d+lqIsZ0hi55-#!Ok^8*$f`}@hwin^w<0S zSS3T`l+oTg?>BGuk!bPWA%WX^*^#5~G(R7>S0$A4SjhOR!0agNrnPI2;EDq0vJHSj z&gQ!4_6mOcf|Ak~Yu^Tom~o4k)qXq7xNMm!C0aZAg2I(6 zuP;quH8)6S*mnEY8Y}LoK{{oY{_L~MbGYX_Px&w&hsy)dW=Hv3^=JjIuV25GJ`=SN zE}NhH!?;*(NR0Xrw^N;PCS6k>f%S}@bN$|v-*nHtyP`unSBd~p;1S=CjmRy{!|U+&{ct9NyE^>2&yMe64E5HzXWnJ|Vr%`81MHl{rE z8`;z(p#eFIq!8UQ%3@W7`3s$E1HUDTzJ1d)t%@u%E{@}5r8eXaV5f4;Kll6gT1SIR zE%Fv#l;HV~i|~z5iFnRZ>Rg}k*N`|J_sZ}iE5c`|I>Ul5TKvvd1E32Bbk;}kX)?D{ zbL;H;e^vd|ueRFG#1ziy6&Z5K{v_QK>Q9xAO^E6_2+@ zpVuYWxlgrkTC6%dJF9->M8atvuVKd~Ls_*az^5@z=EI|-v(3h*pM3K!n2^#xdp1NJ zB_tXNOt*}#2-&K)X|6NO{WOIGxk=&Jv0Im>@G3O6?+QauZ%1a+Bmst{a~pTY7V2ne ziI{q{ZQC{;*XdEU$QLitYNx*0^FlcrpWjT<-qtxvvr*m3xc>&&%pmqo5NylV&e2tJW`HiJ_=Y8f3JU2xxS zi;VjZ9<<@_H4}6<$f_N*??3A`w`9?x=9ZQt&&BQPb6pQ1!I=&_VgWonJx_SGfBv(Q zk+BoszzX`21Y-7dlQcGP+U2HKT1jiNdjn=f0W4}R^Epy;uyb3nTWtG&o{Arj2IhFe zeX2)4oyy?`AezwmJzh&?EG6~wcZFVsag^2&Dfb-$0s=q-!ex1L<7axl#~0_i&PW~# zKr#+h>q(TtppiS7c{rPYuY*$S_^~rsSEZ0Ji_1<_|ZyGMCrYkBkv7P3*kPY+-P89tgh$ww|>8itE-sXtb-S8_Tb>4_cg~4S$@Z@<@LKJqhIz(SV?TL4;`PHlC z;YV%`j*Yb=QO50gKpN;-%e`8!wfC3zhSfytp@zG-xD1Yr7|u?8`{OpB)=SgxuQsgTqhx^8 z_C6~M-Jgy4wZAVjmd$>1Is)QJl87wX%}8a~ysz)jrF@!t%V; zBKYS-CoU3YToLNv;6SikLS3kod!oOsj<&YT(5l53OP^HDx_5qk&ZU*O9sB~<<=xc2 z?FPoi#qnB6sprzu(~B!BnI^MG+C6taXUI+cq^2)NA6TUMN>g5F#iZ-yw5{M6=I3Ta z^zXF!=Dxpo@7}j~EpM%)bNgK;Ezj!dpP zGh!Hh`sgHi$k>xC`0&ukNI_B2Vnahiz=h%vF&jzKo`7$XQnNGOGw!YyPLryK_0WM9Y?)(VbVW;rSXR{!?WtLhjY#kbURr(W~iaS5!>#qR@OQE4z>CX4O^6mYtIm zByPvO_hd-^j~_ZI=dcEGXyYh5cI*K9uYPZ3*zxuGeQ$5Z!Tz%?&CU5}2{jtss~KOS zyF%3qIcRkQ8P*aFx6^3Xhrd5k#H?BuAw`B|Ct!zaq+C*d{rYv_K`Zv#D>i)m_U*~z z$4qw^SwmZ$=k$B;OSK0!@A+KuyXI2nyJ%wSFXANnx_w2K=|s$X0DV68Nqq#6|>=|2Q7fmk-BM@d3E02lK0!=y6?1#ipu5M*Powo zX>`@ZGIn%yFmr3(-F4!TD!M7fMh80Znt54U~3Qvc>U2(x665>8`1W1kzQ|9DF| zObv6OB*kK7g4@G}Cr*-=g&HZ3?H{c4XSBg0Pc8m*0yX5y$E>;n4`-*rfySVsPSv7}|j)%2P z)TXa=sE=HdTDlXf@OG9WZF$c>y0k={VR8eyLykLl?kENBV=M9xjpPj88XLtE)z-`D z^${PHdVdxB_k%BG|b7xr;`X!kqU8tNdV`otXk%`3ulT ztJkLu`Sn|pJ}UzQ1J-r&!}6*4Q}Hol^8)e|zo(Slj&su@Xo8WTIApu?^PV$Yxq4N@ zgc(a`zF^Uzp_~?N;N`8DOmWP&#n{J6a9LGSc#72z;B%g+cMhU z_$=b6XKPQ70$8-o;qSyhQXwHBoT}l{uhJal z0eV3i98zU&*O&54OA`V9QnK4ua(C^n`uA45R&P5{*wC;IZ8d0~ax^6M@2)3(whLr)uZe4G8*WrdNDis$2--u=8i?~)SX+m7S$%|-N#~LTTf52`rEHB zdCIG^4z~cpKm||^O*W%O?8$bJtK_Zi+O^BPF|#Ou?@Zt!dwyh=OMpov_ip6VCA1CU z5kY;wdHc5d>|vLlcdt$-cqJt8HD=ls;5JdLBRB+X6tmw``#weu| z_>Q-d3wYbynA{023ncGv*ho8hd3*2Ow{Kt3&U7WN zf=}D-+xiRZ6&A-B%Dn}YY|k3V)%a3gzQM-Ers(_kQ{7E@dDeeEtu7Zw{XW%Gn-H(9 zt<4EePcxS~@7#_$v?BcV*$nlRbHk%TuinGPQ;eU1#fMgs|ffhyzg`Dh}=i#nrCRiuKY}u zx3aT4rKF@pY8^KKXcTC-QH!Bm_h(Bw=JN6)qe6z;43l|Ev$#$o*kb~Ng1Anf zKCJ-k^#qV5MtcAWP2saI#|G?`Xp^eQQt+|r&(CoP3aXO_ykKk`OCAf+tY!Y|eFUWx zVWnO;g=Q~dBKE-Ca;p0)U({JHSubWaaxVys%P}%jz6X=^^Af>(9o;V^q~YQI{1rQ> z>7r3SuSkj2O4jGkpZ7{jvwP(}gZA)*X=7&E1w+FZz^lkNr_@2aJiD-sIy>U6o^Gy3 zx;fB`nkX~T+a+DJ&9(-j>hMT7jjDpFjUr#VcI}!1g8K<-m&w&$!=|`X@u#CF*N{XB zaUlD`rAsl|y1LxRmg;v_Z;LtXG%7bT(N!67(-*Wj?fRCMs#`_PUyCRg$C@juF}K$z z8zQxyVg{dB^!c;=mIGG;t$}9}DyEb3(j&8<(O(4kQ~)q%#%`hYm;gTh zvWRP9wH;o~$OT7or=36$Z;+LD-5Ll|+2oO)mX_8<#9`cyr}D*4)~sHK(rkXlhUQ7N z9SvJuDIVsgRQ-e}PQthNjll7o+mqASVy^9oMv3W6Qcu8 zIt=gKscyK$ab(T?f`aVm-9f&n0omi_ZfE z)?jk5g^-ts1GPAH4y4b0LPCE~nL$JaK`!tE(+J+Yh>EhuxG{D`gd%vPXk;?m{$E@6 z?%kWHpZ66z<;}Zy9mDNq!CbO$UoBt1$8!9y4tPOXFiiD;ov7Ba_*(Fcm^;6#DfGy@ zucedLNsEe#etNLwC~AU;Y-OC|?s(MeL%+op`2%lG%9z<$$voxy*nEFRKz0= z)Jq%kahS*xJtC#bL`9`9Z${$+zaI*(gjPjf{W4BS*2m4t+l$I(KSJ8Ci`1^P7 z<`)&o`rUNl&rzn3Rv5_P^YcTZ-2MtSFM9m;s-X72`qP!lVK<|zqq870-(58l9c}FN zjPi(8DirH#O`UiBnQHnXV?zi^-LB&gDy?j3N^{WD-`^>SCw~wkEiEnAX)^KDZajqc znKSEm?BM64DbjNGRFR3wEK`FOoA|!~^I^lpo-epV6irV?)*f+Q@|1UZj}ENM2On#N zut8|aB9GQ**z83k6A6A#DdeCJ0>gd$z)dKtK$*}wAR}Mv{}t!R2V8|P&;Y-VB4P!! z3Az-Sh_g@f3DLoC?WoFHwC`R@iV*4}Y0ydMjW5`s#9XdRR7OuQIaJJ=0rb50DvzL` zFFKJF%a)JKNwTuCYuBy&h?E5;;ykpPU+=AMqUnwqJF$*jxN@bF7Q>_ky6!2UcF2FC zNQ9Ay#jr6qRBHeali%4>OY6j3r-g~yiFO$gI-#G=B|>{MC357=G^piQRaJ3%$v{}9 zdv8Cm16wlOVW<^|Aq-S=R6}k-K>;!Z!Lid$SFZ4)^9k;p7UoLB)A?+Ls11GgW?o)i z;?cEE(iT}B&j5M=d?*e;iul^^+>>A01Z6#-D-!`v)c0F{n= z?zjxFu)>pg zkH
3^eJVX0xY~*P(-AIEcl#0Fn#c^_D}|c;%1M0bd>SVVB(}B2t9Zf^FzGNc6VO9PYY3zfI{AUBPLCHP?U zWR)g+Ob82fBwF1x6eQ_>=kY5&-{Ov9%MwK#XcdWj379Dvq4l26pR)k2m~QmcP1u8! z0E>aC($xn zTuGRe`>P}_FZR!2#9DI$S~MFQ+ZE6X=y-+9e;vdFm4jcno;pAe@luq6Mz4-HB|UvF zmN_hqWvG4h-Qqz|jj2}~0z5n@c)#i9jgbZ)ZjAo@vHr}NGw^IA^fJMwb62Sy{hn@x zO31+@BtnIS7lkAJ5fWTqUmyAw5wbwqDvj_rqngrLB6vccKc{mT>)8OgXeU-DY{aK< zQ+2~4&_W+JZ`rbT>sAGDsKR9pX=a|@-rh7@P@(aP)vJrpFhHPWLP|pc43cm@fKAj6 zxDPbRfSba9;oZdmtLAsw(|O*xL%JW(sQ*;V{4BaqgeswH2p zzq>`s?ePBn`5svddlp1{wF_f~(PC8l*aR(trirH!HGIgdv~6LZ19KpNc@ zlqbsZtrO00z295COb8zEC{g0#x$y@j0R+#)?IOtO{=VgUcN|UTSiYzM+!5~=?8QS zOH9m_Ng`gxCK>ko&Y7{`9&fgMboKXt4RfCly5MMO zxeqU~?#MCnMJAn_c-5jH7D-7)S_St{pN}+I|Muca!c&7q(CuBpcFf7f-aZzdC0HYR z#hGDdQyD!#_qQ&Im2!V0-vubGak)zbxl)EnQq+9|&eNHuE=jHJI~1 zuo?kRSHY`6vU>7|6>NemfJWahr(u0$mOa0B|NaY*qVF>@u!p=f>p%q?qJDk}5%U8x zx_--+&&X3k2UOXP#YCl(DjwmzZrwV9A5cm6AlM$T96D_?Q;Kw|7A3d#9|H&aIQ{G} zfZ}})g9d1S{Cj%Qs6E&sbO&{_GTHD1`mI7ZMtC^(%iL9oOy~2U#Hxht*}2mLEJ?J< z+R>4b8Z}L1q92fwx__kX17A8;&)b0X&B=|KIh#>X$>@qjjGV=Fjsl}zKEh9Ief)r>$MI4CY2sU9bs z#6fk(63fBPh96z(uON%p4Iy_O%uVO3qvQjhJ*xqCjo63JgY{xr!1wF{Pgb{Z6b#nJ zcYb~~dyB1R#;-!X(}+)g2Q!j4JG>GaK;|0RWo*KT zhe*4%JslmNfR%}>2!zlJWKv<1N)NadDk0ZGHeTcLKhwWjo|`M>KLsEk(4u^t<5r2& z=({DA`S~8mr`=@tP3}fogQD@q>d$fO&adUjI{@RUmqz;uT7C0ovTK*H@TlL!(?j;= z6!?`AbuL+0r~q9^nH|EK`2BuILB1oJRH@4unPoZF!)@{jogn$c<$TyTZ{13-ie?)$ z#{HxufddB)Y?p9)0zM|jag$~B;$fsa%WKz`G-J`gWZVGOb^Qu->2ez@u^o&YWOqUy zw-((8J0CR$(_n)ga}ZOj*~YC~!KyGqR%OGEMa>sNaU{M9^g(fIFP6Yg0n(Q-O;;I< zb-iMMe$DFDBs2gtd;DbTi$aTs?DzopAOt-P_Bx``ZWA?+0;i^*8U}!$>@;?{d?bpF z1IjH*F2kEkk7nPP{O+m{85ozY<@s|MxlB8*JgZgau7Nx~p{F>?*eH+Z87Las$0iCQ&plZ~u1 zS{9lmWXE!Cp%B7c1)wf~HVOa!D<^+h)YNIngK0c7zr*Ed80I*qt4&4w;PRL=$ zb)cv~Is!(^`^^!wC)2ea>7&@2)f&NA#Ia*h^1L!e7pG3l$MiBC+hb$j<1RqAPA2Lka9Ep&Q+GG6 zLBVdm%x7bxj zKv(NTlY1R6TGD199wgZ4A8zD(Y@@e-@9{vUCw@(IlTeQzA+AwD3$aI0QQT|a{mykQ zL%@m%bvMBggo3sn!4GK&>^>p4k&^mCm@|l0!|$(75%v?-Kf*cy<4yHv4IBj0Ko?{> zpFz80TV=df_;*y%a#CP6TE@t@G#sIg+&`Z3t{sK0*xUh$lPGbhm$SnjG()!F@|-?; zK-3Q0359HGsNHmy)1#)K_iVQWlXr0}CI;;l3Ysv3(2`Y~^U->8VFN)vj0UkF{^%ra zwMMEN_?tn2n9k?nUkK>|hDHPBhA>zKCA51K6h&M%b#`>@sd-C_vM4(IIt-;ED2?tD zYghnhkc{A$@COR)-X};F@$MJ$WZ;erE?rte_;`7VoJH|m!xD*KrvwHEuVZ0JD63q! zOOc$bbg#9NcEFGjdF97jq5y)1qO)u--7wr)k<`$<@Ui{)*xC1%Z06?Xq z5_m)6uM7b&zS#?{JOwHb0YvaP2<{VK`1X;cOb<}trHI=&+$$pCiGcnNFkdL|P&ode zjuGV?1OmFCRpt3i83W9K0%%0}fA#mc@7xtm-|0DzIYG7D0}#9mO9MnRgidQMJVd>3 z$TF_tSROw4O4$5YAT)|cJ0wc~fp;i29moW*x#Rgu_F+CC=kepm`QQ=nLL33ZZUnOp zrx$%b)Hag1N#`#v?q+2wtuE1xCVi5)H1YQ^Nbch1_99;J_Tsf<4+o#CrFbP({4e1N z%@8%A3lnDpe17x0?_~o~#2*`7uu$zE=6oPt**DaYN6^WM`A&R=VYQ+j- zOEoHsVYwh9L+?H}wh3yG=^hY|Gu*B~}-J_YMfW`C0r1 zD_7q4mh?5>?=ae>MlbeD zoKplz3dn)%a7wG?p6&k*(YT%7WC^bjWyzACbrPuMFTeMsPIHp2_PZu)QRUB{TOI1? zO-xM@;XxfDkVOFPKB6cj#Sb16U;cmVPwnVH2b?!EQ^Z!l{`*QDQN!Jd7#}Dvh7ms?w?cmzd@F;9Bqo205-K!aKG8~Xg48Tf&YlF9N6_J z`YN<6pBfqtDNl^)UpYJ>P3|-1sE)Co=T6)MV|U+S57z28=Z|8K1jIDVLQWtc27nY| zS{T?J4d81?>i#h^Bn8~6TpGkr_Y1eRS7l}>4%%fYY}H?(Z_yFe5Hlg@ zjm&xR*`Fw?k+-& z3lz7z{AzhP+yb?HJ+#jY5(L=NILZs&wwcywL*M}=jwzy_cUU%T@Nb=egD0?d*QPzO zJP3nxs1|#ag9izWa@9wx_Xu7+VmF6R!t`Ad{)t$m$czcJe3%)xySceR7DM|&F0m{^ zfV94F!DDKo4P6n;f%HrpH{N;rbQA8d19lXJ6xZ@FsW38el%y{O;r5Q7-+HtP=fPl` zzGI|a#We!L2T&CvLzel z59v%A-X&a;F_BgCu)lf0H#A<88#KDz`Y76p?tvy_AT8}d>&a26eDS=1m4rhfkw*LT zB0_JIKA6%TY;dhp9f@kF0PC9=S+(|%bUmmVD)Y(^iPx#lQ&B=)kZ z_^tVXpl0(;K$#?D&~U2*8e*6swe~8cljHyJC>&j<=<5OWnalQ^3<X*uQ+yH-86{a_N8_*J9a`V zstyVZp|I35tS`cXrUttZJBFM(VkMHs$pY3;LRKGLPQa>rKkdLD+@&mrO;D{nkBcDHiz6T=0$kt>bQ3Ye8fd`Hx_zTA86f{LP zqkRoev56Qg0qBWaI-ankVC*=s$zX-Njy4~+Ta0Ol_3PJToP+fJJf~k%@=*>E3B3o^ z$y>bpMHV&FroEI1gc)vg;`rG`;b1hF`E)f?PD0YQ-4IHydn_gzp|%dXF?@e^EO%&s z7l55%0MD=F^t3?UtnU#Y@<=j9eAu~nFoz=nR<#pKCpt}Cp=HNt|L7u#`?LRB?{gst z6skp2uC5IOtqZwZ@gn_&hlZhnfd{%85GGZ2w35gabUH?XLN^a^RgL zR~MJmP#&+3z=czN=}pAK89Xq6#~?N&NYU#zY>&Bd#<^-sOvT`FVgs zKalC(*VhMfcRcSA)BIV`Oq%hh9|D*sLs-V>O$KI=5F@6Uth8uz4X5oi={tR-L;yX# z!Ejr-U`A00c@uBBbCT#>AiE-aFIu{E1a;K*M{$2+7MTk}WaF=|!87n2fGNvrZH}Z(-r?@^Jy$R`LPuxsRKB#fcNe%w;Uw zw^wJce2)4!E{4DY@V3ni|NkgKK;4`bjUv> zZ4{i9ph|1d=nFS`lf}Fxi6wah5e+r^K5(K9^lOT^ZI?8XC@cV z+~~t|dpzI?hx6sF{lGb5zR~ksSc~Yci>t#ylAP{?K!Qs{We=jzj}A12!SaMXAKt4< zUJu(_=A6IuOAexBNfTSVU zITo5KH-cgQ5P{lxNradD!YT^ ztug8N5(OQDi0CBrJZT%!=VG@WJ?AcW+6q)Z{mtRwp)^e^6F!edUoF`uPuhxrzCRJS zZ{0Fd6<)Z^eJm6_#PAdS`vK$y*5{)ZEk_sguPbwO>#Yv|My0)qD1jVB%0*_H8ggv9 zI&0A&+IUP={acPu9Joxu&{S+M)51au=a?-Fku!BG{?C!~oOn7VYs`acK1Cr%SNs{H z3^0^-za0Qgu^%{V$(rqV^YYv?|I)I9j28AXuSU$56P=4NbsGs5gNzWOz{UJE>ls{U0uK^Uv_aMXv6Q50jHZgsh z2N8jI2e2Kec8&kAZNfj2Xkm1`rh(r8B?n#EaSYg8(t&1S?=_^l%N$%S^3VN4)pvRf(0uF9Ak zo*&5tOHu|OI;U15ehND9kt0XIka*yUFsdVx6ciX3)v(Nn7P_N^egXvsd&wGbU!4HZ z6r$E6uUs%ST`moGO|_A}UC*~FTb{k$7}A68hq`o@XKFzO`ipg4^d>d64fKD><2m>~ z)y;x1c}?W;W3AzCr#y=otzby3rR{!?rI$ zejrv%l~*LF$5l(v%|h6(wws}SE;AAdB1+yb5MmFcT#TMCYz431CmGgn+;|*>@qgzM z1N`}*ZP(+(0$M1PSdYG2iMHe;D7yeqbcD$xaP8d0_;{_F(SPm)%Uc9w0oO7;14G(5 zjX2GJ9jd^Ff)BO*#lJQPVH;_|%G6ERCY15XChEK=9$r#Xl2xy+qC(*2KloG^c<(S! zJ#uR>mHN2NPliS%11e|{*em^tN-HjG)sMdY>jVeF3}t|nM96$1Z57rNMt0pK9G`IJ zgwH~DGe>qC@A-ZoH;ddTgW46{Px@dc_J>HB#Dn2B(YZo~%2+ z5L05f$ozmi)et2NWxAV4ZNO8!vVbP2+~r_oZoYW23w=W|Zl`w)6vTh0all)ff8{+6 zAkzg>c_D@Tf8J2v-@os{VWIsftgRh?VM7H6$jz{8A&|uJib8~p*8~IvP>AApVDrLl zC_p8JJ{~>2um#!s0I4V0P)-YricI-Z;G9)k$cc!fHT3r|6jsdYUijey9as|>rPFrM zmbSGFFXSNFQIawMLGbVG>J{s`bg7;Ly zj@L$SiF90TcXuIRe4zy8I*obZ00E2=&-uo<67qQ2iWSut{|oT~kGCP&Qb<{K9J$o2 zR~G*8p)z`OI2+J?tyNT1+`fG~oI_%mP?oE4rL9ZXN`xB>TwkFwQ8w?tuxRBb{z8;~ zbXg-cDpEjGh)?)Rr64;sL*^?iE+*|cjKMXwQnb69mIfjQ&U!sXMUQTc;!oI9Ajv6^ zdNsECAiFA^a&ayNe*B>GI4?OiqUIlP8N2`l2#1B1Qx!}z1^D*_UV)$y71po(tTQ#y zdk!23Ku#$ixqJ8Ss(XsGz~k$>xakeXc8Q<=bME)cBJduQQ3)gl!S8l4C!fQtn%2?M z+Uom3ZQ-mEu^c4|ow$Af7Z_j!UGe~U3DkxuxBF)AgY9CzsFdY@0E@P4jvza^(EY_z zcsk(EnjJ0^ASYPh(}w2eiDQ{zVfw)XBUpYTjL5;)+4A@A?1wRM?I^(VBMa({(IS+= z)vH%KV$=XgEe6?!0uEZkjKyC8yoM(8Z7Ak%P!kJdo7`C_G~C0M8v^(-Jw4qczGz{0 zDG%r_oE>v)+V_8+G=pjf0@-aa4Y-1fVb?-Ktc#3KhD+e@R7rM0!-&!0lS|hwWTe$d zCUA#_Vj7VNW5hZK4Du;BSVak~{hEI^I_cF%M@Nmo`H<-%=p!@a##UX`H(B6DQWQL1@IMn=kB z5>}jsMinUVT0ZR=1~ayP?b--Oy=e7|M%uL3qvHiRK=A!S`!d(vw{wBU_mM>I9drfF zejeN~)YEmFHpPzpr0KB!wBPNBf$Tu0!9fO}&ARrG<1tGxy`-6ipOMIxQSx*6BhHVT3AQYHDaJ)SS7i5VqiomF_ z?SLL&kPbcU_#V5Ff|s+0n_J;&#KHnxb~>~y3WCaZNtZy3RTJL>!4wc6#at%$kveX- z5F0@OD7z~+^5SraB_8-^fZ9TR_c_wtZx*`r;L(ePw49CAN=-?rna#U?osL*R&o5+K z9nasjl#f~0cM{_OWUa%_AD@4-^Z$K??l3vyU~Ts_5| zWdnin>88ph|3Y7iloK^Ij~N6lGBi0h#sQTa@aPW)lDnN~{dNgAd79+p79cjRfUraM zkI1;=)I1z}lQ1X#gXHQGg3fqQ#}n&+vJ`?hNII zC~=zHr-F<3O7gR_(}QOshGl<$|HOi|ttll#Jc2@EZgwZsm=UNq(XcI95~g1jYp<7& zuS3ay>pWo&dlg27*MM||G!fk(37X^{3EJGcK();PpN) zmow6}p7r{o^hm!EId{HwqNj1^V*>Iof8zuJR^qpp5$t(19=1DE1zU+6Vgdk_s1tKK zehR}eX}XJOt=3z5G&pBdF0Dq=AcPbJwPX?I_yPp<=Vg^c!b)MD1MpCbkXcOZ4j@O- z0pbi9iN1&MF7oPjXAUO5mZ>{zffP+!sp%nt_iyK5GFg&ToA8m^4p_zZV#|nGB=D0WDI7?3sA~9{?kP0*7;F0xsxaxYmy89xY?lM&b3-k{-Kj zccbvWfBy5oj~VOYOVeGQy>0Vu{}mt^tq3{GazoCTNQR>blJdeP&@2AVy!VxOFa`$) z+K^7`pFgl=JBdGu*n2QC z0G!n;evD1>C3tS)4?%>1MIY6RfSo$d1Fo;1Ohly@6Q8(d$ zTPBSWK9F+sM7V%KP)YR2N}mS`(K6BM1+V_lGn z*1Xu*%7PQ0Kxi+^2Sv|$<{dBk9lRDluzs*P9mn7aF{Z(&Ng;^@Qtr9lz>Pt~mm2GP ze1hBSBd8N{Mi|&za-I#yH$ilZFg(Hyxrd+s5n3K{v<8f*WbnZ&vSiu$2n`h17;t=q zErtP!=(NZzkj)7QB`!nIj^vCr^mH(Jd_oz;;Xitw{I`zd_#M#F#*mM$H03@A*+@<* z!;gVOCVn+&^;NK?;=mM4AH*ge-AMJpjT-yqT+UIyTMuz5c{ zCY8up2Do!_G!ux4rH|WGx-`*hBOxG5t%WuNfd^To9lRTS{3dSFfO9g$dJOFsG#7rh zTikZY->-a`yl| z8I}dqfV~e{k5Cb)!mTszxt~ww-Bb22V4EShZkf=k&UP*Fp}N0`5MW z<3z-w2=%xwC|e+xtA0M3by%UN7XLk1zEnTQNfws{_Xd)Pz8PVN&L_<`f8w)uY~^jN zN@a?%O8E%!mw{M>(L@&BbS!-g{u5RG7LM>q1}j3~02wGuV-cfXTI`W2?twy>NKsJ@v>@mjSW7!y0L9%Cou(7SMK z>tLR0)B)ff_@+e&0K)X|*=z?N>=r@f80Gl;Oqm_?s@~K)c;lFP1jBQzbBu2-BAr@CL)c*F5q!Xq+x#D1ywj z@n|K!!Av%00r&9L>i8rIsSepeGS?yl(_HYnYLa<7(jllZ%feQ!3HFDW%*nKMDUOlx zM~z6B0CCvEZp4;~xe^tCMK3ZRD2vfSSOv7m;fJralV$MzdtP4EuMa^d`ZMEAe3TV%07NY;+tQNG7#lG>F2eEjg?UsfKC&`T|g8 z83cjjh&A}zRrg%qC~kw3v=YzdtjIoe?M$l42ki6D$PZY4O7IbRGSC8L2Iq}1H~4JK ze|^GDO*RvBJQLbOa;zDqx|$p1O8`6Aj^2T~`w8=;a3>#)R*yIx57$uvPHrjoSvllx z|4hyIRO#-A;YteA*t}mErDiGl$TL51=nX~$Dt|s={<2cXco8*kR#G`cyqmRtI+Nw5Z(vgs>u&>FPY?ug@gDHhrt+E}| zzUJxEO>NrPU0PCd7coQp5a3a~8}xsIT9;!mIC z6XOxQn5zLOF{`hQcR(m$AT2_R;&Q~%4T*rv8)EDOJ&Os)W0|fSpo;NYN$J3uIx8$J zEXedXv^;{`15-ph#w@`=lE*^p{BmqZ#)tQnOF0yt zgrhW&qY7|zM>F-2@re^B;QevLT+YtrVU-qOWC$e$7VEO}v`_@LNnrQD)Gm9`cgArhY zhOnXNFwW+0E*Meg`!$b(MYx3iip(v*J-2YM3hC*vH@gq2M<`+|&-=CQ4UXy@OysaY z#KK#C7a{rPTAwaX(C~io!qDrgT=|pA+Y{bdQBnL;`{(e$gl$HmhXi^M58Mp)lT;Io zSIS3ZS00it9)Q)P6a|dTq!Dos2ppXK`bx?1i>U*3iu~t`+I1;K^Z<7Vg0yujUVGTFRdelzv533;9lX(YntRqZx zmc6xn<&EoaRW44`ecoe@QA48gB;h&^1)Q;@CO?1cH>O&MjP(U>9c*DjR}v(1XVn%NY!{JTHK4M# zh+JkSLc5$7*vE2NuHQ&0Tiyr36gMpj6VNH z@e(+IV~}uy&M}}aDC8>^y!Ye~u8gNqCBitDZTXrtF(3eldmi^ulc;wnVde9v%vUfc z38Iw-`u>q>x*kBK(rUETY%MeMG4zuJnL?@l1B6M=IV8XvMhjxzg&*w$GV&G)r_{uP z4M85Z^Y;a;(bk|N_=I;B2Sx`LDtHQNFk^rt_%O#9bZK6gZB%xcZijqbE+AJnm~@aN zFzu5>JeX)F5T`8}$5Dw1PHZO}xVPJ#{pfd8O8f~qtpcn&@>D(wCb%OC@%4}ch*34k zVVOu205dX8^3>aty9YLH$Jwzsyx=(ORUYs}Xlt(lS!u!l>dC@dM$dm#aW=Or??&mALbg3uf8k2SG61$(fAF}HyL?I*e7Wlwc z=>NfXfaByKJIJ$mT85wLBFwF>z7#L_F+{hrRz0}3dygK)Aj1K4fJk<0rh2s~2JqfP zKSTOV0LW%koy-+ymQmbrZr{&$SIZFX!Y{tJG)^?&*xd@X4_^6xsPb;|2%QMk0fLj;=u41sx_VV__PSI3VWdbYL z36c#%0F}V&06ZlaaRgrpa~cPlCcTtge}8h^b0@yoS$J>#XwP>=o8m=mg1EPa&HD@} zaI4|V2Jpo?Do3lUC7u;?qt4lvdvIX?-~^hlNGT!{)D1FPzUHyZnYL`fxkiU6FjpG3 z7T$w{7F8`?o*qIgX=5;RBz|AVXoujkV=mu-`3ZzT&fazSFyx`HIOc_pxWMmYNnm4$ zL>(f6FtXt=*8gP4!4Iec5I(nK{mIdqf)~_{IFU~wn-Pf`$+QE}NY))mO%3QB^pFZ> zqa-s|2SuIjx(R#o8}vj#t@0QZ+#>06kb-hZCaS@3K7bF3_<2dI4hj)WoWg#!Jf_c| z1>R0_qTfWSDusDqs^gi3@Zw<#aHMhJ-L=G$0tEaBH+I3qqy&3rxik`h81z7FW4|^P zqxXPDaA?9}%n%U~f%DQxuD}M|B5cA0t&)^-*2X)#-DNiU)}0l=@iFL}$vJM|espkf zHX??Sk2A+$)-o_ND?^GS1M_%0A0gNY#bq$Dvh#Vc-;Rp^1gJwW5>|~M2y~n5Fzg^8 z|Md4K+SzS^p9i@Z7sui5pZjj#`icQkGC=_a5wi7mDAC4XUVsd(aDV7#hCt&ZiB_w# zkE(;bN9KqjE*h9h;EP4_DMYtIT*zn=78#Iv8ZfSqlT7+Q@Na5tZNk~~sk^1Duni2a zUQKZ9L*4!IOw^meJDmFqYYv5s1QMHrj!ty_9j44SKu~fFC#n<1vo9gKgX_>meZ=3v zm$Z}tClb|z3g=k4FGEQ1WmLge7MTJJB zD5>nqlC4OJ6lyF*C>2RjG)9Vyq>L674MT`Y*;*_mOaIqp<~iqi&j0>D&pFS1&VAnw z^;3@6UI~%$1~($scc!o&B0GtvK-ZOyi7IzqHXa*9oNxwgLD&c<=xl zlNMq(OHrz#sV{Ofj@dPSrIznWDno4%D<1>jPPe9D^en{;@-_8~;2i`MaecRpnv{fz+&7Xg?NK|M=ybdH@ z6}}ntP!-Qlt}VwF5gr4^WD8E*QqeOs!Y3A{4m;j-EGs~VVj~6T zA%Kd_>KT^x%=LVFqZ(vn2oxfs;A}V~ade_T1!cs9yUXv_t})s#e$c|fJLqk<_0sbG zn*bvA&7^s0k*_l^YbSQ;vncb-pP!O?VG$h-^t(mp ziH#~Yf4f3L%*j>w5E5un^tEzdYY$NMzD{M$4zo@3oXHLx_R~)%pmAjX0-BxO-A_hX z+7nRYEIKDO==ASzQLm$aIA#FxAP}Ag=k0q4*(muQa{^>S#ZPmD+c3np^2rIy1cp9< z4>qECaM~30Vx`7?7Mqo3XRI5}l?Yew-HYH~gxG@ewg$}|#4f3FEHEveLN{T}_%_kH zDAqXciZyEK0GSfDd(!?a%#9g{utfZ`TJu!LZjkyslFd!kI-)vea!=eW$ZnBdZ?VXi z5nR9m7X*mbmWI6H>g2GNJDW|_vd&DeAMzJ4iP1Ts`NdR$odme@qw~^=eN4%OrmdAs zz5_j;qKheBpymDIOYh6E&-0o$KoVeNIfe53poyK!|dBl#HWZIMMLaz!RPX};_0-Pb4ahJ4lb?JLRUrL>+195$rN zr18?z`5k>c)ryjmQtp1eLvu6QL^T!}jrjO1A0kktn?Vp`2=!N*zjP2!X zumD`w)z?>0924QV4x$bvIQW6sXU^eG=B$cG3Au)NirtwrZad=B%DI|Ps+Wu-_a2}} zrNE(W<{9cl`@zGC%|8PI4MJ-HXj(=(_a7Kwvz0QY;@Pupn6tAq9GNFipI(E8#Hy-n zerd&Gu_A|1i1YaP7fcVm+$Ve!3Ga7-DBXCyA7Imwi6}VFpTC8tflx9$VcxcRNOVR( z+tO|CkQo#j3Xk@gT?IF^0%_0`bgz$N7|`@frH``V+}{Sjqf z5j4LP>tqu)X9q?}Zm0J?+fj2OX31xF-#M(E_924l0W1?G=OM2Xt#NsIxd2g0O9l}2 z=N21!Ap0kYi!cc}C82A}+N3{WIeH5&%WZgJGGAL*$J@KwchUMGFK$(reHoRp+-|^dbtJqt)>mgDo*AW8 z&n1P3v13~%`#3_HJ%8;Ox$G>Ile{O(nFGSmB!z*DvTojN$3vI7H_8Uu(|rlVkquKX zMC7^?L1SknIY=!^iS4|6c_AbiRYaIu6bpHl(N2=j2@g#X-!bePK5KEbI-cVb2ktcG zwQ_QDm{^%<=Ovz#zEL-72%eDlS^s7z zumw9GQ$S|~DNI~}>7&LrmB%htH2-QgX+!fUgM`ktECqyGG0yG+d?4WS*-5tE-_4ml zo64nKwS~Io3pP8^D-i4smX@r;*s<(-l&?h8VL$$eLyqzF4dFVzee}w-o|-Ei%!^x? zzvF?Et?PMM^rSf%33oi{`Jy^m^4G5+KoV&_lO`w8FBB{%V3Qb}bxFAw4}(+!i1LqL zI6Y{Wh#?YY(9P*gV|w$~kBhhqNdpFhV|I0os@J z>p?dJe#)af<6``Ev-E;(L%s*-(fDB6IG;Z2Nh}cvqY}`CreI&B{`dZ%-ZpK&h`ptU zgKjM&eu`HQ2U5yAqL>V^$-pbT5Og4q%KyND1C?X*UMh_bb&8&2e&Z2~u8EHgB$V*T z95m_S;dledDJ$szP!TzPzvf2l*?6&YSs$#V@WJA5nBFqrhqXAtxS#UT$&(m}|! z&(njCapGBIHk=h_$M6rz+ntOukjltH9_hu1;_~K1|J$e&mmlPbveZ0rOsLyac8wh~ z=6w3b@S7{Q6&Vgx`ondQeX=F7s2cegzz>$&m>hE(&OxX+c^|-v!SMpJQ`mdORSY@ocQipJinSz>y=k zbb_a1MW}_+m(g(r`dM3H@emH#e{Fg>)N%XZ?8$j?Q~K|j;T1MxgrrS^$U&HM0=9#p z4kZM^Xri50Dpvs{7^#k0&1fj^1>nfKb7vpqN=e+*uKb_${29TCAssTstu!4T1|bb? z)YG9qbTysBCjjLO;t;1fP~IwxB`OP#P9t?fNg-d9ab!E}i}{czUhWBK?4#K-=DBU6 zQA%3cod;tcusH~Qp8dAegh#4J>w-FBklBduIxTQ7+Cg&Z8J|IJD$DXyjJ*AlO)mDr zG9$+nE6s($UUuy=>%n6(M@^@R*8B-rNU__I>1+|T(6JR#h67|+vp0Bo^cgDA-RAi_ za&Lh^vw^v_4GoLP*rHItZ*B!&0++F?d>^R;ggygaOGAx-B4~q#!)|&LmaB%CO`7W2 z-MoBKn8Bsrky=QDlgIv?UYNpTV4~;)=%KnVog8KoeW6oygN()iIw#!Yo-egdonit1 zlUJ@;14h0WGy4RvSGu~q{F!T00vb)Ii)YE8T+7z25iJbxOmm=T$&~X&#CF4Ju~^}{>Q>lMFEDPlt?zDb}r_=6TmA|_aa zNDmG*olTFLm+U#h1K2y8vFbYZpX?!-5wwWYGB)nTUc{V62n6t%8$+ZT-Mce7OGkb8E9y|bd$ZgYKcvJX)c-UN0oDXga2q&Y7w$MrK z0gw4#5(0|WuG*0_`Ilys`}XEPmTat#3}+{waPSh=B8mq*7Ui| z2Jr>I#hlcIC6;FMn8QUbwHp#pSQ!!mnwqz8#&q5Cj+$JVdlsW1oYur81WMJ_)k@}+{L(9lqxoDO;1Qjn7br+@Pz(b@2c}41Xh9L7JFKg04%_sKIrUl`GThO zaF`jEt9{(GOgIeEK>}Y62JrK{l4*39R5dh$ICSx$hH1U6jW!1i$?$#9ujiq>IWggt zDQqSk5UWi0b7kcL`cJE{GD~=`9&u!DPL(2oHo~Sj&4r@zBrmFfwCfiBaKw$p4oV@r zcFCkI)sxlm`}V*A<^R2<)z;c}>brlzFx3Y&!FKl0O0*o*tQ1yyzrj{keuy-6bqiB? zu_ZP#oZdftY0Ex_I;Lm(rTwBkt%@Rsm)zL!aIHw`cvm}&D3}c9aakQ4xiZE(s%@q; zWq@Y~QHj;Q2{r$JHnD!u3%X@pkTa^kp^nywZ`@wnwzoew_WOREMN@NZ_F2vph(MX= z#%wI36s};RbQQ-odMFfRIFW3|w5F#>5z8H&0+}G_t1lwNFKQdOry8n z$~DMGY*Q{f6tLmwt<$rRaIuiMjv_mi7AzY&`e{`_);1&W3V8;BOLyZ%mU*itf4?tc|asbfaam0p9mliihK~laVhGSO=j~mM>vjxDAs7cKwt8Zgx5xB8XkCC7@4(&-fUGN|N50UJ+MmG3zID#LXRF zRSFcV!VX{5d%&E8WTC|9h#3^RELZ^l70*FK9z1xOT9$WgtgRP)Xi?Nb;)n$(YOy+s zGNt>5o6T!BeRvfS|JAIGqhMdmckYaFLL$X2uVS>a9J_!1ZZD6dQ%(5om!|f7Ox{pL z`$E%;9>eRoHN(i0=wr!8rp^DU>c`kh2iMiB)>wZ>XiVKnd0NEdMva;@`!2MLWNCbN z;I6F=KemYIKcUK+h70@&A!`c+G((CqK>({Iw47MSG9t8l*@Q5Yy?637M}>Srsw57h zY-82#-4kag4`_!zggv_x;upVO-^d@A-eg8j_AT4jIz>XOLxY}W2YU`EEfAPC(E6er!g&@qf^(m zZaF47NuUv3?*6eMN5s9q&oS-HU14(D*{Ge?Z61dEXch-KB+1<%ySf?fL|x*0wo=>Y z>hO$9C6jG8?JGUpX45bjc=5}DS`=mtUg{Z)T~_nz;DxC6>5pim#S|6@w0^)5UY z1}nCZ(d+#EL!=uIh?hc(Joy}oNHmdX9`ZB&7G?z&1^kCb%?3Nl>A<5$dvmt3a&lTD ziWFKN1`1|lm2np*J#&SUJ^VYMJC;5lX7CZnpWNroNp)?@-#r7BAjYr@v}so-Ku@lb zQJ`(GNcwaxBdQB}?u-%dKS2OIZ@VQEfPBfq_cKHhhIm-69wXZ#+Ec{9MZtD|soZ~r zA^>#@s!sU0n>{_V?X5yTH-8JQVvxddzfM6d92j3od-(9Z^RI2+%Pm0pyJOhb|eo_|8{A7bKWI)5#2^kC*(2>lh`9 z2&zGJ(;Y0gS!epbv0Q)T^A)jD;Nu&3mIkq`C7fTeS%R_m)&R3lR?8{`R%q#3lw0lp zrR7(Ek@w5HR$h-D!E2zsW&B`N_{Os~uCCgE!pO4nD$o zq4X4jcJ35IJ2(|-+<6ir_@jGhCMiPhE9FYzLYSO;G$qOMadf}{7}k z4KwL$4D+yi%8P=4wT3#izCZM)Cab&{82Fv2N9cc(Ob=0l4g)AZY^j&+>&#RZ(X0^K zzTZdnQ6>L&0E`}yQXnK=toTRoXYi6tAHrT!h5G^^C}3hLiU7plc{7HZZiMCPy=O*; zRyCg1;OIcs8WdT<85L>x!GmowrYea71!*I~>Sz2&r0%r7`sxHZ6dt;II;M*ddfsAp zvCTV>N+dV`2ZG;gITO7S6R}FPOwnUoS6tIpw47*ay0!8qBR&z9QKLFzmL{I7gjT8X zh)!je2HjAaf+o?cKW7PP45YRcv@R`>SFdi-50F7u>1I-&@r;U67fyvY{hg0skU0!! zMf+N=S9zKnkUQ74;}m^*ybI7*Mvl6|!x3jCX%qrN6hR1Xd=61AQkTgMG8ET;5V*MJ z;T$NhOvGS|RJ6DXDqWrj72k71gR`9hd+1t#_?D2~iH(`&&DUy4X z{CGE_O%5s@BeX))63h)+dDZD{YcvQ`(E9ef9m0lWan=fmWl~EZt4GM;iN3A1I6(t*`oIUHz_Lf0J)<*38SQl}61d06v^lXz;~yq058CjzL5YzviPP-M(fypNhV0BG+1? zou^D*AkrYZ^MaG;R^q<@ea2`LDBnZW*`)12AZN1i_Kpz+_K#W(mhZQ>j$-h9mFb?VR7~I z<&8!yTB~rxS|JVb@U$jRlGS&}2hH}y8O%*A@HjIG$Q7h9Ot?_s+TH`CgW`0ec$IWQA0G#4^7Iw#_AAd(O`d*NX5)1pDg^g^7^w0bXtW9FZ?b#!`@ zXsHlaiH(r{Jo-fROtWaWfEd1cy~FrsgSGFaE$&K_{`SgWOgHwte5%_x(6VWphhUTE z4724tcrf?eY21w(o0=-Pgqe1i44u#C`joMkBfDhzCK}1R(~Ubpu#J4P9g-j-xpFlB1 zO97Us7nFntS(@+L^0AoL&_02b@4D`%`kxYNeqX`M z6kf~l568K48~>S=M}wsAf4o!0%kaM^E8_EXcG<&GWY>K$m9Z3L!gV9aiCgDXztv?= zHnYLTWz-6M-QUqs85l<0r#)0Hb@ssHb+d}ivpX|}PVl^G|VgyL<0CbW8Ufaz70y4Mh@Ny$ak*Jr zg)6uMbga;DBCJ9H%FWHYh=y)Ct@@fZ<*jne`sXDUy1C`B<{zoPq$$GDV>_UOA4&cu z5?RuX1wgwjSz?J?$gpN+;+<8ySNGK)w?91_=BKkralw+L<|0~9{LOW~4L0e=L0W^I zE*}=#z8*bpU2iTc|8ir7au=;M4lgtBo2qCm((J!Dz##0-vLW9}9hFcxh?X`>mg)wtDqy;X~%n@5^ae zHFoLM1t~@42Je`sFEzHPyNAHtA$JMT#xpF{&Z4np9J={wXxaq zaM_Jz7sOg&Z$!k7Yfl~%9%Ww(35Xmfg_{p#sF1WZT;=$*$=X_4%}xz2sq3vD#-9)a zq$~(EZQDktrF9Oz9+Rb0(RV`SO}qRZLAhC${lETF zMCsrH4=|=a_Xp`y6&ioq_2I*sZr!^Z0q}Q^{cy0?nSK@HC%AR4<>>~Y6EQJ4O4C8f zcdSy*f|TiV=1jGnUl%Jc%q>VRgopbDazs!brh7p#Xx^_KBd23-|k(-;V@U{Jx zj~_plmX_9HI{SncZkCQ#y#kk*ZOL~Nh8AFO4n{`$W^ANS+pCmvBjQ;1AJpajk-{o7&l_n~QKc$sa zi+6}C@nFa;~KxDAY1az;T{Z(Z{4coFlWxE(!&oHFI*00iSKD< z)>KUiGvCr_P<;YOhMhi3+usMhEYD7Gn#3}5Ex=qMh>)CzBzi)RrLAhu`ft3Qtm zBr@ppj-b!VgS#kG`~D4z{By~V1SN9^hi+>wMZrXfe+DD9?X!6JGH^t9$i7j%Zo#~< zN+;z()d*g;wX?ei1FNa2>AhwRL(HZ`p$u$p;z3`CgVcXOFpMnqZpejyOq zf5wJ74Gyc_G^DY6{`~np2M@~Y?ftIk()RSk#9+A5I$Wi^pm9fy8ikx~6tD5;>orWL z6yWP;>f>|qFYmLz9}Q}#zppy(+r!`gaG6MplCSVpLh{{Q(Cdl}?9An+odAswM%I5V z-nq`?Os45f;!K4RqFOCIy%1np`h8GV4gyy)QA|e0=Oa%#Om9@zY?<+4P?|#$kHmJ? ztVt6mw&DP?Jf5DO8fb}O*q`jPpEn=iB5>{2Ez4VO=YH*~rh2hMrj9=?!5ji~75bA6 zCjI&aP=;Mr7WV^Hqn$A*e$*Xpb~6B&-b7T@YBXKtXbu9W>6+bN4!gwihulip=>MI;eI5Ouee$QzUaM2==(RvUVpxPFeIDm?_i)2Y zOsvdakRr-#>cGOW4Ly#1d&xBB1x_U<(!>(cs*I5ZUQ_(M$!lqv!)%IvAnNU3Benbw z+UqG{&CrRXCWWa8D zCPvBc{CR`OLx&PwJP7X~^A4Nd*`Skk2@CeHn!4q2# z-`zUvh@lRe^<{=Sa5~8kjk&S5@v(mgvG)=BGw=t(+n_;%a&O*Lg4S(uD1Q>P=gZ~i z?(Hh6)*d`qCHVU9&<9N$152uBI1Sbh^2umDof=!K#f>K`Y6-U@AA=&6&J2-`&7jt; z+lxq#;VS-MW!o7u%+NdkRsP~d;KbHf6g&83F1DuC7!t|(vLww@0`%C-bL%3e9v5YmWxC%2#=$L*X*FX;8TFIyJR zqMbT5sJ_1b-Nu=F`?_T%yMH?>fp2Qd?%WBOuaVJM5v<_4XpxJ4#Lrx+>*M1yhL*s_ zcef|A6?^VC+kQ|{Q_*{5k7t#YOo2T0z7_0m64Qk{Mg zb?{IQr#^iiX_TOSa!&bq%%+`M^&M4mUmupA%&;rGY^bXnELBU@@!hK$Yv*%rCVe&A zep2z`DdZ@#)4dZG-+vO&bzWArcUV+ZNJ+JKqLEBzQdch^FSS_d$?d3x!&Hd9;uWxT z>C$$-TE@;<$xFU{x1l^y;E^?um+;V`Hh0#FZ-IUuKQ-a#jSot1E}lePM>8G?fmVxb zm%*2$egvq3&q$9>-RrfZV&som?D1mG=x3=y-a^yicm=Qz+nHRFM z`W~>3KKwB3zz3t`%*>v_*DJP+=%Tvvgz7ley_;|5=U-p)qs*_SrltZ*#O)o-DJIm# zB_)c(RQ#Fkqy;;Lo;_=-bnN{3UqBe2zkDG{|NY0%E8d&dZrU_<`+VW+?dMFLs>~m0 zIV!+iZJni}T9NL|cF=ok+-TB>T&z_003(EOHjn}{d1HP#)?d{RoYVcV-mM_0j^Qgbp zr$Jv+n&J`@H=8g-8b_ zk;CWN+1eJ@c=^<1HcYXyg0>#`%Yg%OjUX7RA?u2b*>>Yrte)>xYIfSEgN0RrL{0v{ zK*xnbM@HwKJ#)SAEB`~4`)7^sh=Bw1-g8&wZ6u%NWeq_=HMg}ovikB+Suben3m=G{ z3dm>P-2FPOB)d7=wdv*M2i4x`_YIPCwVHnnVJp0w02E+hHrD6q(8N6P=*Erf*HkLM>mE+7>cK?+yD%+tSxH;4AY`-))!U= z^ijU4;o*kOWkfRCLw!-p$HZuvPMDCFo4ZcKWVYFM4OyzqsGWgyBc!FvabAhcUiK%& z6$q1n@%3p6h(MGWVPR@)cl0!_G{P97{p)>1YO222F3`wh)5ZPv+d-0G!^dPm{GI~` zZpdh&`m`}unarzeXz=~#J+Iw6SHv00D5swx^MCpI`7rexn!L?iJKBE#f1bKXr7qu2 sUpK}7@e~FHHOklc|2x9};g`QE??}9@*wAdhL4p66Pqa2mHg(?mU&+?<2mk;8 diff --git a/examples/training-llamas/artifacts/Llama-2-13b-hf/short_report.csv b/examples/training-llamas/artifacts/Llama-2-13b-hf/short_report.csv deleted file mode 100644 index eb2a3c8a..00000000 --- a/examples/training-llamas/artifacts/Llama-2-13b-hf/short_report.csv +++ /dev/null @@ -1,15 +0,0 @@ -,Model,GPUs,Experiment Name,CUDAs,Per Process Batch Size,Sequence Length,Training Throughput (samples/s),GPU Name,Num GPUs,Num CUDAs,Num Processes,Effective Batch Size,Group -0,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft,0,16,256,10.0525442846563,1xA100,1,1,1,16,1xA100-fp16+peft -1,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft,0,4,256,9.049662155600046,1xA100,1,1,1,4,1xA100-fp16+peft -2,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft,0,1,256,7.137251046214708,1xA100,1,1,1,1,1xA100-fp16+peft -3,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft,0,2,256,7.866831805643994,1xA100,1,1,1,2,1xA100-fp16+peft -4,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft,0,8,256,9.926679807775873,1xA100,1,1,1,8,1xA100-fp16+peft -5,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft+bnb-4bit,0,16,256,11.365145233408818,1xA100,1,1,1,16,1xA100-fp16+peft+bnb-4bit -6,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft+bnb-4bit,0,4,256,8.767929955964831,1xA100,1,1,1,4,1xA100-fp16+peft+bnb-4bit -7,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft+bnb-4bit,0,1,256,5.054903797955798,1xA100,1,1,1,1,1xA100-fp16+peft+bnb-4bit -8,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft+bnb-4bit,0,2,256,6.765100017310723,1xA100,1,1,1,2,1xA100-fp16+peft+bnb-4bit -9,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft+bnb-4bit,0,8,256,10.518658419037845,1xA100,1,1,1,8,1xA100-fp16+peft+bnb-4bit -10,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft+gptq-4bit,0,4,256,6.02500204679365,1xA100,1,1,1,4,1xA100-fp16+peft+gptq-4bit -11,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft+gptq-4bit,0,1,256,2.54943565153479,1xA100,1,1,1,1,1xA100-fp16+peft+gptq-4bit -12,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft+gptq-4bit,0,2,256,4.036725806284275,1xA100,1,1,1,2,1xA100-fp16+peft+gptq-4bit -13,NousResearch/Llama-2-13b-hf,['NVIDIA A100-SXM4-80GB'],fp16+peft+gptq-4bit,0,8,256,8.141971422038537,1xA100,1,1,1,8,1xA100-fp16+peft+gptq-4bit diff --git a/examples/training-llamas/artifacts/Llama-2-13b-hf/training_throughput_bar_plot.png b/examples/training-llamas/artifacts/Llama-2-13b-hf/training_throughput_bar_plot.png deleted file mode 100644 index 76fe3f70b642ce4521fe893783e636af4035a844..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 32054 zcmd?RbySw^w=Vip(k0y>NJyuMG$@KFAV@cYqO^2(3!=1ABA|44r-F!p2nfzdcR!_^)r5#rI}p-?EoyLS{HqEP65 zC=}W>&L#L8(SeD1_|FZeTRKh}c9u@Arj8b<`=(Cz)^<+TR%XmD7LG5h>}-Yk#QB7H zn4da1*}stB=ePOSH}KgxKH+CRIq!fUf@^NCr`zVuZg+h5{-Bpy+bbGh{ z+gXQV>;il1Ae&B|78eVxv>uC0|7)e-&&qoJHJ#W;u7#E6>pJx%WLSoDw@3*THD%=$ zlct!?lZr?1J^Wj_Cl6NL-JHICeYcaGOqcHY=WCP-2_~8%e0>lT(PTi8!k4$P3MxD- zEbIpXnd}w#5^cwP1^;`ePXLV#{`aBq|E7O#+05`f*synW>=~%j$bAs3!v02#IozUy znv5)KMa}W?`mf4YUmn+Z?B(StWYXgi6Njp>w{JFIq%GRGySumc_Hy@+A3cu=*YPSzIelHmD*VWb8 zJ3DvIe|fWge&+co^gtnBEff9tV3T-y$VWs-=%It6I|nBx4hA-nV)*5&CQqN@U}Ixf z*iA8JW@ajnIP5N|@2(8vU%i(Ql#^qbkjmemE~AvlXE^=m&n0tnb8=2KR6MtKSDo`J zGfse_ew;TvZ0FKImU`B0!UEm$OXZe*0aaCEA;e74$-+;Rk9`JS7vy+7Z4V)I99kVI z?k+X&Chv));!n$W(JC?s-`(A%FTCk6s~E*_Q%rmPiT3tvC&hkijl-PDjyJdq z+y1T#&Qu6hreCg>@;W4mzxF7gsY%*ne+`#|gQLN18@;u)HR+}+-|6XTheZsd)ba1R zd-v~mGWnkRo3@9@X=rGe`Zzg>B=8wVkh0$PIv9=1&d-l*zBs3A$sOsPZ1fcLK6VNU z4ldOe#^*O`kiByUC+Gg#oE5dE$l4c+v!_QpPe+OvloR=0e*DPEqg#f#(3{wueJ?R? zB^Y_UD%;-?e;0b&fBqEfT`)5<+uB{$sIVEQBqAcZ1CIeq^u5v3b)wmq30Cv9<4_zW zH8l<)Az^`DC7y(Ygqb@$vVhB)j`d^%jg;qpT#lf>BE}^GlDM&};o;#^EGbO)U~4f| zRr^&I??dRIm>WFyu8r^IWM|`k`SPVUiqG|Dfi5eGBp@I_bMSm*zc7!DjqUM`&F#5v zbd>4&>5=2Y^768LwsJhCzWz9U$)#U~zMEvv6_UwQb7E%KBbmHMB|dEXT{+8%se!p|)&y z*D3Saa9)54JK6j9?={JFH@w#y|7Nc-#yrZ`!aY7do^|N6mhs`5+gXE$Qm)>~68AlC zc=h`A6>Umd+G|TID|o3lUA6L}pY*5UGD&-pV6r?(=Ype4MnFgy9K$3NVUd=WCJ)O{ z_NafcY=>b8P8Cm#11xBeUgfg|@o4@}dt4?KkKdg%yR6eb8z~~hz`%%Nl3_%ZxNaJW z*!&`|s;c@vJRDqJE}Wy9USyBA)+^|j9;2;SYKHYLH8m3gLOK2#3n%9t+uvWiP(6=@ z29nco=pr|M*JsK4 zqYH?N-hv$^e{j#DqseNBJ&scy9geZt1Pcqx(}@~E6%`faFnH{(%nWAVb6kBw;b}Q% zG&nRQ=)9si`s-I)OAE?*2RaJ=@85u__pKC|YIypMFt&dkb9Pe6f?~aaiC3>I#(yg&)p=V&oEG{Ni zRaKp+bG~PK1lcyzX1t11r-T`fty-zst<#gu=HxCX_~7p56jMS%0;fTZeS+ZQt8aAre|*xIfgO~Rk{TTy&2V1*iGz=?5jLm4 z`ROa1s!aQtHhU*0&7>z*R$SN57N}+6)MkgoNP6xQK)46R$HyP088&+CT{p+@gTH8j zr2XyNx8!f>lI}tfkKu6Ae9DVDIuxU*1=08S_mORU`@*n$cU!6{A&pN=f^@S zsS=Og`5bI8gb*>{!LNXzY8xE9Oh_vtYj3}$JqOj}qi#7nrd$ie&io$+2`8M&XTt%R znJiH+Uk<>Lm}v6Ww@rp4zPmJl5sXh2QdGoeZEgL%%;H{fsa?;Hy(!;|OjtEey^4=+ z$ITF`ZfD1p6OJ^St2bRYGN5P)2n#Df&~_ftUAI+S=!rKqF+m9n3;!rGyg9@;_hRu| zrsG1-_n{$$f%mscea?lYaAxceHVm^@N-8UZ;SfCRwd{fwzI*rX;nqxGTf_5_qD+Xz?;|5hu~%-DSdW!i z51$ooQYWi~h(74HJTxZ@$RN8k=peASDe=7z!LpD=B80Dq>kyi8>CLJB!;n5Kb zFE8Ox&i%mP;9DE6%Iv?Y?P9LniZIz<8^a+Y>ViEYym#;3*VLQZwzVxSvex5OgisuV zYHMqgFo*LV;X|>emhup(sH}ve>*(h09?m2&CTcq&{`Kv3VtB;R>gwyZ6E$ddcAGri zY6P?*IF^={NkSHQfFDRnNc>>K%vOIEI`&GrZK0Y^|KcC-uj3mV8za>cKtcOVd+0!h zT%dI)HGDIavM;Gd{chggeaCyN+Z(@c5>N}EkTZi6C#br!une=u)pk?2U($-2Jb4ld z)uQeDcarfco4}YDVwa7HaIHeUFT-QazP>s-IwpVTzuZw#IkhB!{hJMz+1}n3cUmGL zVv>$>nCtq&oBQ?Taa#~>SY%}TP_AmJ{fs<{q+238J3CXi+)~_O78BVj7KM;$*avZ! zHEO7=R};w>cBK}Umcr(`V^{?QNZ|aQ{i*WhV`fI-;^I15;8XE+z(#i-%=RYot5-gI zw2l2LI2enFK_bYg(St==`uuHfBfmk7AJo7nH=5ybKk8TWA=?WPZjmg26W~Y5t2YgC zur!=cg}^LUrD&5MAJyIV*OF^C%d_mJnnp{_u`dzP;}8=QH~5^*_!Jn32zi45xyCNvK?N9b?4C{Aiw9A@0(q%1k{UD`dq1vFrkSARHb&6!hkx!TQ z#)Q&3b9QoImRww1Y;fJFYdKFo7!MaR18FayR&Er@{`xpag2lVpBYG6_OE;Xpzofg4 ziE5kejO;9f$iTmJ=~7$ylm3n}Y+^ALeSOiDC$gs&7B#l zT-9`~QB~tK$BFVMF(}z2LGus;DF`Eq$3n@6$*)UgMhbMZzO$oDd*ZleE8x=tCM|wE zb5ubWsdZieAGTJ8xo04l+kisxLG{dh{N?qmgV(`Es#=CDTBhsfq~og6ty{S8>&=ef zXf?a{7?0Iqvh0j3$y}TQ*!=eH25W-F+9n*d9^RAbRtz~nf=`F@;$I~suwUQYIZ)_Y z>|S)ft)zqx=m)R+pd*|NFP-9s6js5Sr@-8a?arS*v$00cgO2|7dx?DE3rf-S>TWL| z|NPb&Nd-X%d)G|sss#t18ASp~C4SvG{stC+2J71X64(G-;HHkfkJl2h@F~MAxVX6F z5tIs1upRe83lQQ?rTOXU(F-&`0oRR*;;LMpG<2;1sOTQMOPOltNBlwXim=L;00RK) zx(wj$pwGkiXo0HMxC#Ca9ThyplKjcIxyhhkX?ZZ))XeNk(c6TC5Xh44-Q7yI%mtB= z__Wt;Py3EvyoVI#I0T<+15O0hj$eCYc^MaynS%rrE1*=tkjo<|d9Njm13&_xAnv;H zPM%@Pakc~3>-12)!Go_IYDSsCV)c~Io0Dwgd@bG&+1UsSa_k-Z^{ZzuEjcMEL%-S< z2NzdPQ&Y2Q_A3_?6VvF}Sojd|x7$F8?6;=nP%Uq-+mTRHhrt1iEbdE6qDHdmtb?&h<7IwVJ$cSoB-x}_E{K_&1TM?*yP@+OWSeEF7o)6~{B zZMpQM>!zX5&L4+=lwq(%p@%rKP1)>$q^;{Hy)qH-_D{F*08Lst(v` zG_RG`(b^XrDc9|Y%@nu+A3s0&}lgT?g^IXSaXJ?$MFdZ{gx#n*4%%*@WdFEOyRczF10KPf3GY~fY}d941%aK1K_)quBe-^!=L z2~_g%khsFiTJDU;JV&^|)@GBUC)v@SoN+fOyO0u+t-XyII_r#WhW zR#qY)1HrST+$F(ONfE()!+yV~B6?}*X)W8|-nsWG1h5iRxe&&LfG+*~P)NW4D@A{G za&T8ihk9aSg1n2;60!%EwzjtQXbGB?l~v7)MWX%9sTlX2KQnMT6ab{oSBVM(@_G8Z zPIPT;t;FZdV_WnuhE;E3uUfppaAx$Q)NvPLP(b*Z~?~( zz%YgCn8oLsA^$RC#dHWrMj*4Mc6Kp%6xTBDzF}YbS@?D$<^~GTdV7=i2@0i}F5TJS zzC*&mz?&!{3D^%+CP|pSed9fFLr@9KI>JaxK)?ZrIZ&W0?6msxzSjkpKGhc2m;Ifc z_5@yi^5|QZ#RL3P+@_|c-yyP2Oy9=adAmG8=oB22^SN*u7*7mgtUhM%>Fc~H=cc#6>ky}as;aCOj-l*rb^Y-)t~;V;DV1p=o8g4{LvA{z= z?kJg9odq0CHOi=y-PQet@vLcNY5{7J4XZ z=dYO4)jKV}w&1@FS-HHR8rA@qowD~`U|Dp)PLrv0-yF~Q^ogsQxUsQuW@957IcZev7Hj#0bYhQ>6;^hb&=f+6Zb6AE zg=|27!x0lgxovH%oD`BPa5#(nB1o}wZNd1|qR+?y)8Yf^fuw=xX30&rU+fsE{}DYt z+%})6cUAoq>CtqxezUb;R)c*57=bBJL~02q&b&wYmtei2_&P0pm$T*oHIKz#+Wqf` ziQq@Dg$T1UGdFkGU(*d9uO*-n!UWnumJ5*VYQ3=?K!#Gs1?6`)UaM1Bq&zrzAP-Q1GQyi6<8!JLtbP`goqZLUKceyk zyC=tgdJWl>fPldM;@msf9gohGkZ09cWRAmVY1;3qlI3e0)$9IYO_(A^{X1+~`q;(TeU zSq8GFdXty?*LOE|^4sqNsTZ&w6#y=YC@>%OD)Yn(ItK?Ie)3*eTZ@9KV*-a6DVYda z0eE3D*Y#3)WN&|6|DCu4x{r@flB9c?PTH8~+H(ik^f{=Np#K&Ou39X#_+xRwruh~_+4#4vB%RcwREz{_(qxq{)0D-uWKxvGxH*5h0R|?|vj{;qdHT5rtYBDeW zMAaCWnZ2JVmYdd4jRVc6jeiN$;(3Y5#r|}2h=N%sFE7cT#l|tf0wSacd%AE@5EppH z6;ow!G-?ICw2KT{K@jcaHvsS@i2Iqx{pc@wl%~*wZ40lACyU-@PoE|&FIS1#{9*>~ zJ!H=DDCS_S{7Gy|N}gJpV2)7v3@%MHlT36t8HX8gR{-(qNyVQ&g~JC-0GT9D<|!qc zt3$EOW?}^$6PKT#A7S@15eGnVn0a~cn;HXQA`wbf@j8Qg)-o|+u(5|PJ5vL+my2E) z9tuRYD_5^Bye%aHHV@PzN%#qopK4LhG$y07O{3$F2GC)w)i($5bKczwp#Ec>5r5_MCk@ z-kQMx42I}@rDy!rtG$e^6Lp|wJ{HnVIJsIsijkxMMLIM&nFdM~f{-9{K#`Qo&NXQk z0m@1*$gi{TFnL6(-=b)Q?bk*HVb{rkwuL4nP$Ez-E{+@|4^@LA z(8%rW?PY;^2t512`SjT{Gsw??u4EMzgKFNz$5Vt5&;&pTXMig6^v6e1P)GbBJGM?t zNNSsN8Wm?nU7p;yD;ODi8S~6G`Dmzc8o_B$WmpHF#A^H>2CWwtCh0+=gGKcBe@gB9b zmw1suZ2&07EI^y#&?$l5Gys8ZV1vE2jEgx^WT*g1wsWWyItn_?K2k^l9UiYh@EIi~ zC8t(FQjLs|$kXpA0KP~RJxG%bzxF6E10Hs3YwI8M4V4ing6~d~5dwt)aFBq&9ewjS zg-atR<3Xwf-U5hBN>JPcOxrM#_7HS~#65O7_cwk=LX)G%P-lO&xY-Qo)#2WVu{vNG z$nm#lVhsSfD*!kcsIt+I#SVvM~~vwmGc) z(yvPHo<#l_Ad+F5$)JN}fX<{-_83=QK|vijEz&50ukV9{Z9ssw|1R9yM!!TxMwY!I zBq%5ca+aXoq$EJtwr}5vfX4J`5hnxdVOL4=gZ;Js9n-L=!b09Z-%=yDw(M&_dxuS}@i~(K&13ubHx>GuE(CB;)I5BB z-mr~azF;e>BUI6HMae)<0wDr&pzI(3@pcR&uMsV%talx13v4mojPVj|b@Eao-)&iK z2%2_WePcMYS6#d;a3D+u28PzLG2KHD(SxCe;COg==&l8SU6{YiocueQIe!#OjwB#3 z5DREf>*vpR1VCy7se%U=;z6WQ07PZmT_4wG`NqFF=mU=zY@Y0({}N<19MH-sVLz~3U0qF2_SbJj3tzjY z4CwHa3Dj`Z;R^3S`+Z#$Y%LxS)5c=}7bNs34%%#KwjfN;bZWi;Fzc zUSYWD;`QRBnWBBs7laylP_Baku#1=_ikb*`Iy28|w%NHFEX!ox`~ z5l}PN)Ys!fZA9KnEnAtw6fhdzo^L*%97zz!HF%KkFC#E2DJ2CDjux3+ZS2^hgO;k4 zUZY2)T9LsGIbc1A+6mHsF!1v5opO)MpBIZiLDI$-O6D2j_>Sp_X>H?)`v z)b*aSpg2fOUjzw)0<;Ylzsbq51S@ zLGqzAvK-HTd}IM;aeJomRjF~2QR6kue69OHddQ$%6bwM%F*mzKlX;m(F_0^yTiFHz zd!gpro>#bS#6K)$~jF@ zGBQRZIz6-_sc$%bP&Q*8PLlSPKtxL@sgGA(q~e;9wv%?TQ71?mnhbbnvgqjO0+4`U zAMQG~G6LR_Y2@F0vI}ApJutBR7`hYP$1eQ2Q4UfoC1<-YXvD0kFJHb~V$nr`Jl-Z0Eb2%D3Wc#X#X?F z6gu@TyeT49=m6=Fj>pC64l%8mbr^IYK1xWww-yAPzT)E_2MWr#T^^CYkHD{Azwpev zvs*B?I7U9Iv!3qs%Y;QlAYv0h{18yyEj(Cf((gk)&IDEuL>5GIqXq%CaM?)+Z6BR|CE_e?<8E4$l-Q7MpY4p{q>>e*! zOVm&kAfSKj8=O-;%W7gqnDX##A1ssp=jXWWZO0_z(*ej|li?ESf*>Tjr&inf@jhN& z@s}4^M8{2wn7tEc?nf&AvA&Ga8j z^qV(nW@pVFF+5De$CHTh*i&vDP^XjfJpL#_f8`-d^n%Wf7CThkU16TPC#Ia00WISAUpX)7v`zZc`fA*Tsc~^PL15EQN}*KK**M(Bz`;oh zsdv2yEGlBm)7tA;8TO2O##(%ML>L;Xq-#BDj}sDdTc^b28WGcEpm}#uz6jrydb$V%F6@W@6wdjiWI|=9c{8B0R%ON1c zwEu2o*EJ416kKgp7gsP9y;|n9>H2= zDpPP||LT9awIu*pm8z;LSopqP_#JP=X@upxD%O=GDEz&~fjiip7~ZSA9L;5Ok|*Kp zWK&H1<|?bmZnO?N4m!M3pqNb^w89U_e_~zrdhYpm+WG_)mNQwk_HB zo|ru0P#CW=Lit^L^sdBy#tE_(DIZ_RN3~X>=OaBFt&N_bkqOjN$7m6L-x5?fP}HD4HSdjcY^DWZ*H9 zq0e$hy)wcdcy{87LS12PH+{m<4xuLK<)%71_64x6-r>o~%(qm!lJYm<7H}%y z9I00U#x@`PFeT{YC@HUC7ugHDVQeeK&u<5gSH^1;t= zaNYgmxn09|5_sPd6UPXNJF;P7T#5q2k^-nZg4yOx5WVW^{TZ2={vm|DnFcrLoR$Zv z4>q{c(EFobEIycS4Xht6og+U#-C?4asK7rz_nAHIjC=z1Zx0YQ4LXKhmg}Y|wPX!G zpOGYTYJ!3qz%_+?dWueR`7b)J-f4ie_wAOxLQHviqcCIz#4o)HD=YLoIL9GC%tUpv z`uf-wJ(}|UX@Tuk(@Q$!a&n6$4s(v0`G2_5{)mZ*QB;pXPDx48 z!#nW{^YMZF&8c*Hrk$n5tNrxpQ)Yp!stM}4ni?HsHS1l|$$EcPEC=xFYgurk#kCa~ zz7?=kVRG=8>r(jmFk363SNo&debeL3W{fynekmyilz`h7CirD8p{6&RFHvw)cAcM% z4^*z^>O@coA6uu2eE9I;b9|-kZ>$%Ke4s{2_Q)Df$?}0c15?tMF&zHG#=((ku5`pE zD!PXO^*`5XO;INGQ;*@qRquL-Jge(;NQ6dD)V{#Ddp8_}P+6y4d>!4fupKzfQ3sn- zS-R!d%#xXL?x?CN>F996>c&57Z)>AvVbPxNfsQjS%B(m(JG)XRfrb6g)u@;EGm492 zLJ0LU3~DbrO>Jxv8wvZ^@Aayl%b+xIK4bzWQ?*&Q|opbw9T&}R+vEVj9W9LOUMz-8aIH^I-xClK2`DU-0$7P-XlRI>^=HG9@zIYU zJNl=)Q^Wr(3ZegGc_8WdOZT-Ga$b3Y4LArNtH-o%b#7^e~w+ou+Kd_~&pj$C`l z0-)+UUsLutfHFtFy&hQl_zOEEliuO+G7BY4OqKSf0pSb)F|eRp`|EVD<^B~FB48+D zXvv-b3~`y3MhIRYe>l}3=D54(gMi-Z8(D$WL8M@^zHzIHkog`f6KN}(M1v#_Sq?LSkNf3xQ zt$>iw?R05Yuv!TqaxE;h+W{XD95K_!j|2ACy@d;hCMWMo0&xj8efpGVL;K2cEkKwn zSl4Y0Zq>UucUVFBM)|p{djSbD#Jt4V5zEQ~^f>@T6-xfeFs;I8?|sfJA@ks9YZC|0 z6QiI-7#vQ9eMMl48BnDE(GG|C$-eWfm))l7+;EkhO!&%hBWTg#&a3<86E!}7N@+oa z9VklWzx^_Wy=ZP)4RdvbspZF~_L67AOeLjUQAax$A&H5u(N&GA>7*cvTsu8G19}(0 zrE$8j@~gx&d1AP_P=Dmomeo7suEVaoZ~iENZW@T~+Ie#Dg;To-ugV5r(I4Fs)(Hh_ zODLf7Oy|{KK(Mi-r5Oah4mBOlmj;H^Kfc0B;7NWN7N#U1$mRDiyT^CWC-{C!MJsIX z_a<)!Xy*0UV33|oJPu&S$s?NejEsp11zHa@-NfwusW{#HHN>a^w9M4ji2m2FOH)Sv znnh6KPmfpCb~mInhciFZNL1XMoil%Q&PMw1!Lw(V&CE95yKT=32uyn#XZy@X$~wO7 z-ID${JRE|{4DdKMWrkYz%KHn~BgdLNh0mI@EqaeaWff&;_WwuJ`XAqIihk!m@Zf(1 z*T_GA%7!AiyYc&ytt~s0PZmBttznjh+Kh~ykJ`oL^bC~)<+(9z&Ttecp(FO$8T#uU zrhn%%Yl{}|QSrSy{<9tbj4tdi*(CO>J6P7*UQ^vTG zrP8TpRw$TsmP3$+@oW$qU z`A*Ny-oxK%;b{<%^PDUJ@3yr@{ESYS|HQ9K(b?buR@izY>VpPAHpBwuc@}uUSJM>UyNb#d$s9 zw{OP);aL-K1N`8PGw#1F0^|eS}F*92Te*@aEKEIrqoS7J7npZH)n9BFKTyPyz$E z3jhdol;)?|Ab6=P--~E_2SLSPJQtmMEot11-=3V{o{IdtkZ-AX0J5f_`(F|WET-Re zdti9w2F&;U{jC$8xub+kh5D=~s!V_xGc~#JYig5Cz9u!zN~8S#yNL6K^WhdFs3-)t zB3CjsIP9PVaedPV>Om{Q83`uv3^?yK^>5!BHxu2t6 z10;_r>l3I+$lF4jIdwq@^09iej}8NAZ}^W}{I z{f#p0)t~F#bJ$;dKmmy~enq8of z#kI^Q{c?`Ipt0cb>N)wr1O+T?TWcmKG)B++O5M{v_pT}P9bqs(0BI%s!lyQ>ww3~v zX(dNTArLbuRzv}UNJ(KVEiG}f5yoE|ZUN4OhWhrc2nYCiJU7*Qm-S1^o%E#JI4!wG z#ZcT#?KXoTn`YSP^S_g#!xJtKP#)g7YGcE2cv2kF)Py!Mp=>h$*$TQ1$IeY}Ni*9NgLf$AxPchd&f*?Pv4=vtghBPDcQ7K_zkhG=Kh_Sf)7``p@9+Xm9kq z4lP-Er;Ul5y;Zp?G$9cb`R(KN3)<%NV%8CE1Q_T)GtD~N#GRg2%l8*FH8njx30_j_ zf^izhjwxT!XTy%rGQ9+Zr}t%ZL}X+qoYfM~gBzs<_uujXm3>roS(a<`;yk3Uz+|99-R|Yg5e(hV?!X**bz* z{Xg$2C@V)8=&t@W1YeYp!>{%%g55pB`B`NZm6n~wSTL8$LWoJY#Hl{i(2&V^{KZkZ zJ1z5GVpomVQGtPOs>Ia}bAy@^)(2?~yx)cf2f=j}-(f{X(Tp#Onwhu{HI}EShc4>drQ3 zdWabHjZPNLe^a%H4U85q5V)f*tATI@xN0v5b!2w9xB^N(8RtN2S%9GwmnfN$&gU zgCTvjEghezDF{F(X@k#_OC`0A1Wfyh+%U;;d2Njzg(@wz z+g?)VG%yf~;m?N>AdFx-yvt-{TZh{U4iX?n0YoE$d{9#UqGhzy+tl1VhDCvWwmpWZ^Xyny*QIu^FiDHz(09xg?&w~ycdh5<2>79G` z+5teLc_~K|5WbvV9wPyr4HO7&Xncig7rzHhTJi9h$ggN4H?0u%)~qk(Dr623F!zSP zSVS+in-VK9_GMNYX9QWm_e_*{a#C8{bDtV)OnlnfU_S*r0tw@})djdi$U;K8;IaLZ zb>*n(-e%lOY&twU;DYW`Xlg1eBC4Q3t2c}`Lzhnd#&DyIDuf_8?DJ=PKxtS~9t1&< zFruQO26FrFfFptcyspSzLY7qkGDS_3oZ;;`rLtO4?Rq(;u+YX7wlDHT^d@-U1+4~| zRo+%1-rFPrQ)b9{*Vu!4JYWsLjRyl_-uA0n0|t2+17wRx|7^$d9X9A=E%-KgE+!veI;zj$EO_)1?uPf+b`=7F@{x1 z9>^mWL=}RF3iXB+&yM>@sNInD#rwj90B2j}>xf+Kv05zi2)YR_{ZyHC@irzQqZq392BJcM}Bh+*_PsYStgL-H2f{>qo zlitP?Sf4!vBUIql(NRo|oWtJXO4D|s+I&0x>LSAFA@Sy@?U!%Z`;hkdCE|^UgUvVU zMFteseQrUgH7AG&|h-G2;AxrnzoQ1TD`1I7}Ie;bqXAV1g#0kfGK;>-`3|XX5cP2N@F+ zG4!T@k+pUx9yy39j>X2s1%dCWZK6TEqLQ$w5&e@v{Iv&&QoHoq^JAVutT1Sg&=1Hv zy?f=uyB|N;Iyxq9!L**KNy|Z#^8K?WRl{TB`Z3SrhY#k(5>1JLdZ}Ad)+|^{33Fs@ z^O(5XFmK51k6aLa!63E6y#EiyLImdB3qE5aZD*WhJRF_;6_T(ej&{cX*K?zlaqr#* zL3V8E>%(+$5rWssllUM>1*Zv|%K=$gVjlau>dxZVM4g@<*lh$>jb9Nwdc1pp8@6a-uY4g`)?4|4Fm8YQCmN+eYO*FU@@*6j`=|Y#fHHEX8IAy87~nSWV|3U_*E^3_IYXu7+23gN3&83#1!oOwmYA)9zm%sK^b$ zHmLBH-%f?K_Yk(#_qMtl0lh>`1p+c4(7|{@sO|4#i#i`YJz7N4|M7!(wEh4ZZ$F`Z zSqU?kWi={_9p>O?Bp=QnX&uNK*%sGK!#}|nEqkCZVax|G)WrJNReqKFnnxJ%Mqnj-ci)e)2|lnic0qHda2L#)e{)Y zyHrBJR+gn}Q@+)$W4cVx6X+0+B?PIXT7ZH_~+jFaoJ~e`VFwt`x ze38Pb95|3I&>=t-E3C&ZqYyi`j;^jLSe0SOsr`T;{*I0F<|LE9KepggHL9o2S2I+S zm6yg|yqXXK)1-VPB?AyJT-SrwaCRcRPpW)hSWhk*HAcL?ca91h0s(lo*_hAV);8F6 zb2j$fI|7)Y3HIpwiYKqyj{-wNNkv7%TkTH|31MEB5;eWH76JMl3?;BU8?nvM{0bEr z<+U=L@_)J&!+aA#OA^ksFwUkLo(!rBG}HX%x({HS;`Luf-_ue?F) zwl6c-cz&X(pwQ&op<+yyLhBbBeTj$Ax2E{Pd+T29;#mL+tkQBo2U6H z0338eXl%#P|E)feF$0-aro3qIa`Od|T3sV3MW~kPRx%g@r$hNQU(iCQiu&ZhPHChl z-Hc1?>r%P%;6IbyKkJ;SzNXfzyxZU6#PIgUsI5UD`>8t{{O1)E_-KZOH{kyz=F}~F zH(2@voUej>#?su{TDcE!x2dW4*23O!$n3R*1pn5izNS`XZ4r$5{o6M=m2;t4@Zc!v zjGpn3eFuRB1mi()th~|R?5%+mjgErG7Nr>Qc;#YN9+2utG_N!O1V4w(bzIsh!-uZAFMh^W! z=S13T6318t1=bzz^{#eZCkLC5)hJ3;NP;dd+K54F%YSGCQ<|)7^AQGP;5kB*RiTAl z?kpn^6y%4F1v%%3YwW#*c zz|NsUM5d#(tP^R3}{dU6Cif|g*=%*dv_zaX-;>>>@lXy_4gcNJ%5qQO6*{6Kdf z9Go}j0{s;ngnrO~jBNBXW=#L;v2uQP@ZP{sDSEBb*tZYyX3SrnFJ2$|`tH z!@|0uF4m0(in>@EgKgjZ^Q-VAiD4!%49`xV0vADGqSMMQH^^x{XO-4tq|okfheHi? z6c2cA^kP3NGGbSiJdm~UpVSmQSpFKh(I%4(;ahtQD8t{XQu>DV`<-t%Vg$^)o`Rf0 z{Cwm*81gGPYfW$b+J`K8xLWKEqxzk{Dt*l^&exZQpNEOdNJ!vS+Zrml9V(z&K+$r5 zr*7~*xzv)IR{!t)w&~sFEU;rXiCY%_58XCUQUig#nY9LRA=pPmLt(fb3OAR(v#Pk7T9JOd(b%u`an7%rQyv=r$?VztWoEM~aq08tZ)OYCAfr&1% zr6mwCKLDXd+_bZubOU*h?*AyBB7njN9Z@up$bvoKUV#6(8F+D(^Zz8l<-Vm~eJ=9t z>y+lZ&u8^OF&*wlm0aH%L){I8y zQI?XbQl>m5n1;o!Tw!eO_<(|YGA@QsD8#G=FG2Q|2f9Q;Ma5lB*aMa3(x0zK6{;}R zfCFw3=kCIW;<5@U?# zBO3~D8B=onI`_H+_8axKf__#p%^&1w=~9DSl7d2cUfQK25c$p zJ>g(uo1SS`Z7%4I=f?K+^@S@eu)*RU6c~ts*oJSs_+#?B&YAP!2OJnO2m)6!F-(!h z(s7=O!r&Ql!w1||qoNYW^h6p_JP-K3_C+e_k57+tzv_7eeHvq5h6s@M^CZY` zxbMY9H3&V^v_no|y{m8CE4FG?`Xfn)6Kba5M7sL6_h%26lAd_?7~pB|UYdW}s42|R zA(9q6`{cNVnGgHmQz~s@mW%J>ym}nARl1VJPn-4xFW&Z^+DrL?Tk=+_#Ko#Rwe+b* ze;|x5|12_$gkdLSE*zQhgb5;KOrBoaOJ{jLCM@g&WR%U1kMhx=$qX)S78p)=P~;jD z<|8R%fW8D{=Ecs+aQTVKTQTI4gP@seSve(T&4IDFPp>PoEzAh(&wFH&gTsUpi;K|N;o_+G z6Npyy?{>olLAg618Yu2E;OeE@-ekXb@p1M}SXq}KSo;*=auev>hrRSBFgL&X^x4X_ z3HrparoyKz_v0(x=FP>*r+0e30lHdfOwfgOv1cC@!-);~?BF0>h1WPU1$8B`l^v4$mwZzhy2Y2!pDc1X{E9s{E!d4`SKX?1c7TPPl2T;k+D1P zPxl*tBcilfxj5CWD6jgXs&kckPnM#XVQR(;$ny};S> z+GyZohg$Y_b{B@TX6EJs;m#7sEW{5*yj46+pL;WQ>9_bR{rOBNI#ohV_(?J>uTu{`2d?*l;ucaSUOjV48sbeu@Yh z3U2aZg(><7cH}wlTzSprhrA@r!a!-c1U^7oxQ792Yx&Xu%yRtK--d$&_r(Z6KQ#>Q zt7_@p$8SS8#CVfY_Esupb7aEFXq#Z&b-K102YYtsZUr4<&%s3L1}Loe1KEMwXx_o*QpxZ+92kXsK`i?cvO7l~LwqW&$wf3D)#1 zn6oJ42)OUa1#-v07@iPzzE}9zsyBk-aq@+ z6Kr6J`BxMKo%S$NRspjP9AtnTx$`AI2U$i=I<{prgKAp_BKpIrco$fN+c4Fv*686Z z>HZfJhHsE-5k5TqCo~-IzQ(i%vkyckHp-yEjTD*gfq1}%tG{3ZMCCtL^YuIQXz(c8 zoj1Td8Hg{JkhBFA6%B%o3)U09!tcCB(bCW$LyRz~1in!vaH-g%(u*!Xc<>+rV&MI$ zMcya8@n{Xham6@(_--+n(P=(E=7z-%1z*32)gUGcx$#5%#*MoJn8?E1ReUYzhddJE z(T4R3KS0OyLoUMs=i$G?;RALF%r!vm3zx4=PfsTwM$S`g5L-RzE*#Z??0ZzmJt07K zF;K|3GK{Oe$@{m6^kc)Fh6XUqy%GZGhnWsmAQdPml%e1U|IbIjve~V2xU^I|nreRjz zBL1IjRSFpdZoSa*7=znS7Al~Wz#HjAlgYx1aLpk*`+k$H8+RH+$PZD0NPIPs-R(xf zvlzkvnk$_JV3g@GjUUKs8`#%W=k=+3`>4x1kY zbkA;{2|O;0HT<&{v3i`9MR7g9nwC8^ENq4ju72T~Mg|u_00hhbl(BrgkFO z6#>&HiCA3*ScNb@@tZfzk9U`0U}Vq*d08K=*Sjcqcyic61bBdD9)(=F2=`lcLUM<0 zsiVW#^z^hrrKszM0i+Oe7*6=-KBv8vN64@j+|2>zMEjx@q@F&+h7FLGmVT(26$iJt zWLWejAXDKe6gaHQM{{A*Y7QLsR?Jg=;CHcxNQRPoYjmT7#Kb#SmlbQwI0#{Se zgQ2Y4fa5<|@aX|gJUsAHDBJSO3^ZmI7BuKIIa+{o^7XKc%bE`K5!!$MW`J24q;&?P zSpzT;&ohz?qBq>h#}?@iALf*KGk@h3@R%$Z>IT~)dGV?Wh8r%ejZ1&OSq@c*m4GmpzLZ~y+;rqz~Y z8KqJwS|o#Pl|+}Zm9h;=WEnG5GtAg>WzEJ#VaGy0}bQi;)?CGAoO`mzP!Z{&n8qi z%;@u1uLiLFU!MFbz;npN#=G`DdG0Yd=z7pjfM82e_JG&UeWbNc+(tB=IlYF+ZuD4k7F=IBF zT~ZV#RRu@|M;1W7Es4eXZm|2|fAmL$~%i!y`3@C+3*mPPSez z+uQ{TD{3W4iJyOS*<%%Ms<{f0#F`0bmz83w9fHymvV5(a#~g`Gso%YrwGgCVErsB4 z_{R2see?+@4c)nff30;c{duy?JnT~D%iL>QL!W+AF^2}oW&x%YmpBB%>U@bCh zLl>E?t-XB;UDMRDqobn{$DETi4SnytY{9-Tu1(U{h63$U(2s1IC5&YKh~uMLTD)WG zBHn0>t&K_>BHJvPBP3ufxOS}{tE+TU8Xc5&)(T z;lxCR#9%q&gk~K_m;jGJuj>pZL_!DV1_It+sYq#fv(|tGmyCeZZ_p=6w7uW4G&f^6+RSD6gt@?T5-JEFN{Bf4#l)i`S(O ze1nXkDppKtF%)&4F$xBWj?? zm@rnELA3_yBYygx=jiv5Mg`h!H5>-ayjU%V<_D7e=wV2kDV z;4Y;C$KYM*G3=G_*h}NsT4%J@nHmvN3Y%`>E%GPT8pKx*sRxood09qMUkXyn|+xOl{Wf;9CF_nGO zw;HigP!O!C0MFrrgRafk^2H519H{+B!x9;ht*s;65FeCAS57$N|G5G|vz)amPi!8Or- zk!>>(bMoHVk%VLYgvwZ3eZTEL+ok-EbzT5vmXU-zR5g;V68A;L!_*Sky3b*;p8Qg? z+YhOT->|RyZyjMEj!0PWyS%oK7qf7h#@MfuTnegPHoIs02UBt3jmExIw2fb@Wu~jO z$jWb=v%D$ZqHZ|z`pD9@;4#R)mdh+XoU9G$bH%U`l%=WJHYVr>!UyDcbux)Uw3&rwpy5QrVHulTn~sXb=36~Ut>~+){7f7lr~0IzD``ZSvdZt!-&1RhK&cp zCOoK*_BN4IHjs+uc_Z=Cyy4H@@73X%nQ1G%gaAqetE~u41K=G+!iFSSEfQb8dX<>2 zlKah*y8ybA460O7gHTh{4Cr0FbO~+GIl?1pkiGBMu=rXLi7sZYU!p)47nf0}20PP| zH7|ZEQ7C(U$FBa4SYTgJg~F@^%w)h+ynr%T^rt>Ik*{2xG$~DeI+0zMb?1)d^y_<@ z<`e#;WQl=}scgZW;1Gxq^hIL(=9?o~sikoFY<=AoHLJJ<^*v$j4%ybHVb7jDWgZkl z*yYF{nAK(QB<3nRBtBbKUP3V&r?%$tej(q{)@j7%%{^Zec8}yhKzl+gCfjNU8=3ji zWvpeO3GU;jP$*_5H2?5M;xF4B*VhNAPIOx^GkyCwo4h{U9N)hbyRgeYb(vYwf*>GB zyqFk5b}dCx37+BvQqj@&YJ!q=Crmy1^sywBo@3dOg;TiJj}UcJQc^N*EFkc&!MFve z0I?lAjz8I*_i$+JfRQ6x5Hv+DlqdCQAo&sTptzR!`1tL+p6(&*iDsYYucr|(3qbFc zn-YZ7v?_mX7LHRdKVKvKWAarjuMAh;I)v%>So_>qR-WW{)2}@?ch{NPSr>|~wF>?&iWw>vCc^lbi@sV4UAuxvXx*yuXx0X5K zyc_Lh5nZuuMSBPTt6pw8kX)>aGkwll%f z3Svs;Eb03Tn*KGinu)AZdHf(!WqKGi;{LJK1o6VJ7P*2w zbUwv*C5jHexqGASL}e}B3A{zegx~j!Yz#V1OCiVW_uCY0jpA{c#X&1E!zE9KI}{(A zx$X6mSk@gNna>L4dp50DBzu_XOYQRo}J3ntK_Qt-093!zxM$ug{N|= zNAa06-rrA_jo_zENJj3ObjP4aeK@Dd$8-r9efsv**v+PG; zqYm0I{wq8qx|_$mzvM0bP95Qs85kMmlJn9cpK$QY9 z$DGxCmI{(tUJe$iLBrO5g_?MgPqoZs<)#o68&weKp2KT@WYDKhm=FzruVVrzMhil% z_gOp^7A-~CSEa1cU8Fop364aN;g|`Rj{+OOUv!7HG&Xn2WP2zIdu(HmSsfZQwsi7s zmN4#v-?g#XQPD%|EyJCXvVirNKzoFYWhKt}U^p4Qc}4-} z?Z&tAlXt(^W*BT|Q-Je}OX;9lI1}SnY0;|d3E?rCg_S~VOO}+dIr#WD(;PQh%kLQS zzGgot&{E1>UQ^}()zAJT#|-GA@x`WWI=sh0vm#kch2OGjrMcSoQ(41R7W}a`9lDK~ zt`+>ATC;vHWslO?HfBe;IYNK5&V$V#&HszL<&eW-_AX z$5{j!!Ag`v)%z^MG?Rap!jTUM^p#JagwI~0`)IytMm+JM8$R8Ad%2BgU z4YDtr7RoR48T*E60lu^*gv|G}*H~)L4FpJ#deXbggL=?2H%QyTU{PR1{bP>bql?7V zg6n*^4;O8W=jwlcBdb2rct64Tmb zI;_i;b^wyLZysGUWN!y_X>Vj?l!a;ZXI-csOZHNer20()%BwnK?!KKL}2RT4rr`TOkbwYgH z^1}}iFr=YRgrYObMZ}2H-1I#r__Glb{KoZ&EjN44U5D4coilanTi0#Z3JNlTT8cOb zS=U%jD1d;meQF~nDZEiwA=e~~>GMHkIB833$*O@rb=(m@duGj@K-CF+q#s+J(3BC7 zc}5lZ7>s3W%GyJR#!t~7(7%5u3YX{x#Ayj*#J5wLX<_DfbzM{vSQ2KbbRALZAT~z5K4-hwCRETOeK_rEMbNkGxEWx(gAFOR#~@@oZpe8m}~W zu6c9_6MuPHefl8+ELvbcwmY3h%VFA2&e$@@zP|8QrIQ<>jeRH^R5w)2r)SqS47$pR zF7adaOfw{(qYOnNO&y*FU7-1@#5CWIn9JBPloo9M)cs>v$e?A zihA3&wt|a+7lz8SYNR3y>Rkvvfs-{#aXk)HZJ3K#oap@dhR2zj7dW$gu8HhZ0m6{O z4s6<3v)bHLF4~Udi<(p>NS|@;n zZvBRSm&~ zH}{ojBw=Cs3Ek(Pf!I{&(Keo(F$g0MitgOQ*RNmqTFZ)$c`#u8vmsgwT>v8RbTkR? zp-H(;iJMWh;jCtR&YSnlI$&MTFOO%H=KI5PD7@-%Ftd2G{A~ljNa#{yaZXF@opQM|-uIWX_M| zw56EJ_583A-x_uHJkC%KNq~B;sA=$Hnl7k~*x|ypuEndhfZEFlb_wNL%=?R(NWAHd znBtCc2)^)bZ}lRpT|IQ=p5{9j|E0;N{Ga|eALXQxfB#>d6ZQYe#mjH%iiW=gh!(+( z;KYE``xaT(ukh;5S}+$nipT+0(5id)mL!H$>bv3Ef-FRAFKp}@Y_G1Zt$lzuk;nt~ zq|@g=o8T)cnH){`k>stWw1S<0T=WZ=$w$BMcik{;>eL@ei5W&sF>0sD@7f*-wD6jp zeeN8n&4x%1krn(OJ?i6kK1PwTva|EuvS#H<9fcau{c%4kzYUMCJW{)VMWSJcY2BPX z2HQQkS9Gx4?QK-l&l7j0m+3!mInej^oyE%9Fx9aF@DS`ym29&rDnL zlu(Oa{g9oH3FZ2NZ`-$*dhwh#(`qM&hE|?E^+4s(Ygu*MwG(R3mnGJ{dyvzqeeZ>> zFk%J@x3rvg9g&w%mfCUJ2267@rseNnO}_5)#Neni6caklshrf$*EgAATd5ypEwvVd zvE6|;taJ)5$#fc89dP;Kqer2%&Ksp&bgV0JyIA*~$;)n*r{4~>Sd%>H!o$DL%AX{- zR-C90oc?#49+b`hJXDeTDAwVV$yc_nrs+#EW728>t!JpH23eR_Wc?4>%fNvY|F)~v z4cqS56k^WVKmKS%czxwX>T!o>jxTS2O9nyirA?`Y#I_$ZQn;WO?-s9b+008@k?Id5 zOa`SrgDi|HGI76dZ~`%AhEz3tP)Nn)qTvKB@9)vX%#2Ck#8fJ2>5Y~inm{Nenx81Z z=mVkr6AHAYx$F47j|v=&i))^WKmm1GP|pHGCa#eiCAE6>L%a2s`?c-$$fWsG^KD1^ zR$t+X!zx`NxdCZxdZXossPsu-lOruM04;yyMHmUg$Sso4EAX&0$P#1K!_Uk)FlB4=XJL(boNj-#6OUBX~)R8ce3LDTXCMNMJiE?p?n1VNf#6Cvkr zp_b%uZ^M2GpAfX%=Z|p6_tF@HfE_xT43L@D#qbm`K6=!gJFJR56-EK5WzX48j9@AX zWlJ*-s9HpehP&3q<)|5O_w)q5TL~An&dl=wrNwu;oCHOF${5fLz-2~E_W959OBh_A zu63)(`;e|J%MI=WAWOGl!|LR}_`uMirWLM?9_$L-9iQcVln$oS2M>0o?Am@-H&&T{ zyprd({B9N6W(41z7u^IxX$Pu{vY;s;w*tnzl%rng70?tOQ4dB&e6yYIwuwvKHrMBP zO>!ddH6wev|p1YWJE@aSuNT(Zo$#|@I-wtg#FcAVC?g%Dl^EwDUUk&smQ&u5bXp#Dtt@I zK)VFuX^nL+O7=HTsDAqS(J$7Q+`Vh_00J|T$K*xqqY$7$}4o|2L;Xp4v=6 zBWzuH0-W}+A#9GEvN&f<-C9ConJ0iZXBBJTAHDN;~_S+^M^JC^j`3?kvTPxQv$FQ#CBuqOq>R3KKxI~IiP*eP>W4dgg4 znh-+BYLL9{79{(`tX{3B;K;IW+!%8-+BB{z@Li`+n;H-F7bAA`#Yj1WFYD}e_m0hU zy{Sk3R3Lm)xYq*o)G%dFM~&Neih9b(U84@BFy-A5r{{;^ZmR3ugsz$9mVde!EIKgx zQfr~2(vTt{w&Fz>>%Or+u*QV)n*ERsY=neiO8=dltCGwqEm@exL$}Va($7D=;LV7A zx8U6hZ@>G32L;y2bS`Porw2m3lgV+j@Nn7H!RMbnd14zh*xl)ip`F;)iK*1Sx7q16Oi(NFdSXP% zc)$|w2o{-&bSaxmQ41NF()85dwj1{C+cz8o9Jp_6E9C&)%VsWPTS}%zz4ZCrY~!V) z0A34~855`4H=mz8xVF2Qra(`L(U&SsINdoUA1w($`cZ#ae`m6H6RF4Iewp^~5IQF` z)J6g~9;GNXLO|Oy;-}cHTN^1NPADa$tjIJD0 zCExEY1#G!%XeCZhkGYN>HOkA!N2llPKfP!Y3AkCsIV~}7{vss?g=IiB7Z2D-A!4y| z8GkZ{wN%0deMpfD;Ff|^^?LZOf4}1BboXKZUT#=BPRL8XRg4|V4jc3HmyUl|h6T;0 z5Pb$;%qZTkw<00(iy#T!cnQZ(86wJG?raY9V**J|WZ1S>BsEq}sJcSUO*{Q(bJ{_U z7aW-2lOYF;aMBOKg!@ACkxOa|X?vejiPj9u1^ce;*mLx$9u&8k*go(i~W77Mg<|YHlf= zkFaL4Fo|*&R4LveG9PmI(3Y^hHn=q@e3e7Hv27A(a)p3ujSQn7{Ne};vN!Eo7OTNGF8sR9 z1a2z3?vvK7n>AcE9Mfm2m$=xm2pLx z!E^}P=F$c0;^R}od#&(0ioO*$i4ER-h-&gkz9vT_i2GPNr`55DxVxbYDNVmdcY^qG z`FeETx_Rfvq{6VJda^I;I3>>DpH9Gf4&7@vFoF4uM7OW+4LVqh+;g_Duqh?KB?y5` zw)r2Lq(Vos&hN4Q_GB6zGp12pu-`b^t!vl*9J5hKODoc{&Ye4VASv|UU>Fh@Xa*%S znux18XG5CiH#0S5PH+X?e-HqM&-me@=`@=F3)ps5K90esdf`le#L2gBpEfAb5#0tP zYcy)iq#nVn#NBRRu`16KT^AmlIH1?uDg#b`j9obqT}&#e8%8|s(}B}RWV%qHgXq(z z#it?Aj7$M=uBeHR9M3iOn(+z!R;Bn7qJQDIXMBHuIvrWJZr_ezoe#QydSRU<8)^x` z7435s_xSrp)MgN3&5x~)i5hg$U`dQE&wUYIF5@)$#+OAfjuwDhZivRgfR+Q}Fa*Ps1!R$E_ zp7Fj4_@=sgdeS{cdDNQ@>r+W%X|0smKxqa8(<4!-W@eLym1WXIO!o|fJ=WSfI?`X8 z@fR`_M~1qN|IV~U3w=c@VH8!>sFkI3ami_mgj0u)F7*7CI7cpo-O1!Z%ZVa#xGfgW zyzE^7N5)BvD1XLwyBkrC)h_{gYOD;k`)Nmf|!G2`4pp@tHGS(QHQ(do)S>Bjf8d<^)H}nR7Hlw-|G5 zhV$hUq_V@Pl>pc7cJ*fF{C!m8IBbnD&*&wQ+fv@KAv}{Ns3;n0I<}-QHSL{mtjcU+ zR{_Cw7L<7wXV@&o*hFN|9&Cs`)xgtf%rIv2nJNa7J0y`s- zQNo$?GN6)adVj&_<(zzl0EhG4G^lI=JOdGpH8^zSBB8X)i8#{Lwv?TU)pOOZuDss+ z_1&(W# diff --git a/examples/training-llamas/artifacts/Llama-2-13b-hf/training_throughput_line_plot.png b/examples/training-llamas/artifacts/Llama-2-13b-hf/training_throughput_line_plot.png deleted file mode 100644 index 967ec40547f48a9daa9021f5dd85c9aac4b1907c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47250 zcmd43bx_sq*FL(zKt(_ir9mYnr8@*^q(h{n8xE$WM(0%-^{U zKVch~UWOMgTamZ6idKfU4m#Ebh&MX6AIz<6%}sQl+8bEgm{?h|FmN(3(>*n|wf$hj z&B$o+f4+gi%G!wW>G{=fc*xxk;%YVs1eOl+f0QhNOcMm6@>=5cD<#Lc%^3&R?qkx% zy)lEGJ0%jYb;*xre`gJ(P`j5^%|b=gE@-sh2;2{6?)k*u;|E+IqbVyPy)C!}Pf3Dir8^v_P zM<{vf_S)Nbguj}b_iJ8MmAGFzxLi+|Ou7=-NlXaCktM>GDppDdu!(bKODrvIp>+iHuftK<9l@ndv=6E4aZ zcIU(O^gU-LcJ>V0wL$4vc7Jbg?+Z(Vj*g+AdW(n)&U!cM4f@ z&rC=2I(#t*gUBVs0;yLqiMfCO{J4~*T1c`nny=Rz$Aw2gz-&59U4OpK6JM`=-_}0k zxIg1fEUTr}^v~d5U$Bg5j{p60&E3dTE4>{q8J??`DIMMHefQDq`We^C+FJA)R}|;R z=TDwIkxt+}jjveI+lBR2EHjpe35L(T=?E^+tl3}7OhkQvkKb{1d5+I*OD&tgtFzGT z!^Jizqt{QUggi2{kyEJisS zB;2-t<0~0qCrRB;J_VDBr4$w>u3ZHMJy6Me+w^xJbE%lDhNZWX(pTIZ8_&}n%_0_w z^~S6%fLK^oHc;E+@-Z11nVr+lY+dwA9n^;FD;~GAe*--&$7nG_U&88 zQN36oiG*hvM7Y0oxFp3$mb|dFwRPtRo51-dOUK9vn`)ck7^MXU0joIk%a=hU0`75Z z_4D)d8u5ye_S@4hPzEp0kJ&nhW?kTDL~L$trT_fN>3^=NshQr#f4-^7&L+j@aufy; z$JVJt6RuQaZy}K^C;sZytJC3<%H!%|Nl~s#my5HF!jSCj?6aMik%K6CIESfXfo1hF zuRb1as=Z)hl26^-o~a2bE-wBzqN;6T7vSlMc=6(eL`d4STSl4X%sXZdjz|Rsg<%7K z0=AIOaN5Y!gS5!dov7J57o&3y%bDu(;iOleH7dzc1D)@@B{b2yRxQv-*7@Zd6&=lF zGDwcF-ZR4%bJRn3$SoX5y7|RaPcyo4#!2L72p_TOfmoHK4Cx zORdL6K3l09@*WixRd;u{@$^ro`ucigI_#RQ`2G7eM7&RaKK-9Rf6A=p68ih){JBkp zO}}6gr&zCaMny%^LUx zlemhKdd`1OO~tyMAMGu7JUToGy5b8B3TpJjXQ9@tW~HR0#>er*#KicoFAk(_=E}InJgzUr3bpE} z)k@NwPa0t4wT^p2saliglVf9%kVvoz2?^m73hO-%{`3szsXbz0VKFy1Hy$t2iQsEB z>Wxb_?1_PWk}J`dV7Hu(TCstB;^vlj(GfV8^7&xk-**f2b9FPj<93ppNVWH`0 zYYK(aX`j5@bfkHBIE>k#^TTrcL;s~jUZ-XVd1}Rs`xzM-I4=~uW#YL__(+T4Fc8#f zA<`woQy^NW>)q6IZ8*8Obc~I&lxOlFoV?%!hlGSM&p51fK3*Tq{{b;!X=$0KQN=8_ z>OSP)CzSjiio@^L)*J!h!3H#e)8$7{0e*EwGTL0844!9w@hm@FY(allZYmPVp!s(+ zpYZ7D$n&DbL`SD7OFp%zsAzt?SPxmOva+c2eH-1cFC9`+Q`hDi1n&8G-9mrTkBkw1 z*OOMj_C(B1JoxTK_{?x!obo12?a0v`3Eb$V?9#daGizW4Ek22w;uUdh43 zi%Lp#r++T~)$<65uL6XoCLM5x`y=s>n26($L9J5{1H#NoeI)lwX(`Ji zO6kVw>9~CL%E1Cn9{bJlSLa7tY`t8@{ZGv;ETY*h2p}_BnG0)Z#5pg>Dk zzVqGsJi*$|Zlyh#jEKkHkMd1S?Ot(aCCpP-SC_Rjf=;cu+-z)kr)7Trz3Es%{uPS8 znOWDT8)$#0DjDVF<*kU0_#L*1AnWxUT>%Q|Pm_2A3)$HC=|(9A9=Sw{#Z(0mR1zfS zsIoS=I$b4g4Iq{h6!bi+>idRr<93nTxjihYS7anEqgL%a6h(;B^Ky&Z!=FC+`}~1cP~AzTo)G?-xllMI@}mrg%w*Ty{+xKcdOEJM)z-jGBTi{u49t&`!zH) zR0mY7a>F#SNcqzs|8jD2A`_uXp;iwp?oX(K8KgGIIXIgyOe0UQ7lVFCCI&o@PyiR-LmvU@xdanFWz?aoqqeY?*PQ zLw9m^cR#$Vw0wg+62sY#XjSr(q#_wEj;5^q@R+<{(z`2>+9faqZu`ylp-fpwJ2#iP zTvvPJQ{jD(B>j-FrrUB4qI2)?u%)}3uFWEn{2T0n|Eg%@;?mMLe}4)}O5ac_*%Hec z?)Bm9&^10B5|ZA`M0ZJe%-Uo*t$L-U!F=Na2@LWu z%fr2q9A)7Ua;OxUu~l{48&VeteYTs^S@L9egH#GMqO;vw7oI@n1rR1>XlSS)B;-|{ z2a_k5={NbMEb<*ESgYPOn_;fbrShgYol;GlvjWY7#VgRqa|WYpCEfhm*aW|{I%DFqy%|&Mq8b0!VAUq`h6dV_-l&M=k=nJ6@~COBlQ~^bk!h5(JUd3|Ac;nx?b8vCtI>uRx7QGc!T z5f+?(R1hCK?Ahg(ys*0#1`&CSiM>-KAdA|(|Sz585}p>c6^FL3Ue{QEl)y)7kh zxd`PrIw8|fH~^j$f-pJSoFG_*piE7ZqaIFtVbZq>qh!{66qF&GSWs_aVL^{U!pk{z zBsG|u+yyA%1uLskwa0b+KY*!H_k4tCwCiincJPRa=^xW5i9^9kY~2I67E)ebUd_wP zd*Ls9HJKn4QsVA^wbw?9{S=Oiq-MmCtHai$a_V-S%P~10A73_{h!BPB;h&6`=g+nd z$S%2rg@nlU^p>PO<^b9mp5?V~Jt(T!X)4>Pne({T)cmu$nt3%{|6#$a`T`PZ$3KFO zb?()^#7Kk*q{u=G2YYslNph6I!iKAuCB1Td)C3-ff>xuqkO(NC8UWPk>q-&%F>C;+ zRsmQR1)!6|ARBw@ed2@ahP{K;J_^AY0go%Q!c2Omn|ZhN^z@vMx8t*UFB!f6{xyb0 zHttVC0akQ!dRn(vdwo84&FX&2ZrqP}I`lzYC5Fu`W8gr(SXU%3FK@VwCR{Rrh)W0- zoRzZK+0n5-M>&@xL4f1-eY~WPHLpJ3hSKi^#m#>oK-t2|N_u_15)GH_-v?Wh<*E7| zA?wzF9O^IjrJ!zxt`S*T|NASCgs-`)gZGKKS*P^sPyZ15Yo7t^7R*-K2QcOD4Ag<3 z&PYW?1vdV3u4*AR4o+CA{T~$=#0PtOmAL}7Qc3^;y4P2ih+5av5GZp4RW?S4W-N4c zevq{KQ$)}KM2kU|ZRI*Y+=yI3{k-XV(8EE*?-~URtyHfK^EMVqH%tYO%`{B3uF@@W zbG-O7tg7K@W!E@M=+v=NmG#O``z-<}|4VyIZ5O-0u*%HFFkM_+M)TATTG=xoF{K0Y zafOr)5bRcv%S&Xnf&vP}@v`y`I(jNp(AT1(_4Ysit#n0-hotTOEEz1)5eAsk4W<8U zU|?TKFQk@%u`we!)&wgMEutahm_(dOhk9=mfr&C`)waWSYO6D{vp-Q+SBDC?x4Ziu zfbnpyY9KJR-0OfZSfrHCpL+pkH06~&J3AZBQT_qU{>HvpXM6kCM^rKoNJtV^4$Jsd zCr<~-$;lr)c_Ip1gHm!arGJq(Rc?k$6aF3GWpZZbX?e^Z+d6EayC&-R*JTk2u&Nj%Ys15)N!NH+c%IYV~ zys6*P*B1z=S20tD*!gHPdTFVApxjIq>Qgt8#YSqo-wj$hZ)tyk>XU~4OwJj|y1ZQih7*y|8x9J%eq%%0!*bHUi5)epgXb>56 zhIQD=zyy?N9>X^F3_K8=B&1o=qu2QPp+Y-t@`?{8!t9m>HH;wWOBoRnQJJNYWjKv8 zjiMUp6BE^Tv`?{EBtm`&sTI9PnR7kB07OR+$VLkg z!}f1Kijk2K;N&cKe{Wx3?1vAVp!TcPIon0F`FeZT7Q2N!$$kN#{}p7`w^h~@WMTo` zw%1S`KUiBUU`?b;blE0t;mSLsKcP#4@`VE#+H@pm4>X3sR52`nQX2#imJ3LV2ws;X zKR~4qu(4AfvQF1HU_G7EIVtOV2sz|WUt%A~1;v$>nLtf|iN=udIcLpzOeUZ&KXn0A zYCcgyF*Nr)cg(o;xbrg_R{zk@(1}iLJcpZo=0v3xsl{ZO)6!{KaOm;-V%-*Iw|X^o zbw$V?(gsPVv)=;)jkc$&^3*Glgx7z}e9|sV(jcR11UO;uKVm{0oZm>!T3*gJRcSSL z@{s@^Kl@=CK0dxYK$$UtyAvPm?3DX1A+l+as|Q7J3CcP_;Tt4_1V$e4=DJE7@`p4r zVsvzrVAV0UkIL~7KPi{%;yGeiNaeuf`Sa(-!&wg@>vYGkNr61k*7gMszFGJYMa}FJ z3;w@zf5vN5EMjL3g>38@FQ)$_NXbU_nP^og5}Fh?FzK~I*7-rVp?^PM&OU`p5VAE= z{>qezPZ9FD$L*B3?9FiH3kOswOYRfIf*c$QWG1j`xIAh_6oJ~u>v0ta@W7&P#Zx?AMyU%nW7M@NQDUbmv1v#p;~70$sVhk(0Xooz58m8xbR z^k@MOejbPIVLXfev9U2nO!YG3R}&=$Ksg$mbrdFT0E=YMd5EW9KYXeC`_5#U$uGzq zC##7bQQUSkpp&h@(E?0JA-1{>!~(#S9;8O40u6j1>2Ox))hoL9+0NK^9$RRo29oeK z_w~u-x|u85&nW{07(;x^tkQT-&Bx7`31omsn`!X zj}3ZAWKht&AuJfEh!3jKwlS$AT$=$w(e1Kq7FSsK7sqXe>adUy)Fdq&h;Iq|zRxu^ zJWz?qxwuaARgPCgK_EflBAwdkj^zl{tg$b)S(WLGpwIo72r@=QLxz8IhV;$*HMiz~*#;xq}WWS8l2d zf@IB@3A)L4h{|=9Q45AR5|RBfQn@Ztk8V6R&B$^ z=gSuxsLY1FaUEqQL&Ga-b!}Q>nBkH>g@ugp#?iB%o10UWSY~Es{*PO$4jCG9E3D>Z zkRnN$ad0)=vuCfC?3U-}8v*9A1>`DahlpMRiurK7tqJHM#Gr5;G*qC+v1doI8Dh(? z-}ULXCJX@Rc|=H<{9P!aMRrers7t$-z?<`l|3F zGncUfO#&O6?YV};Tqk}f32p5}5N}FdjxGIdQJR{XEP-?pIpwKutZ}C~O$6Oif7{|M`&&Ca7}EYCI)_u(3Ii)-JOu zF&ks>=W>PIzX@xpWTBX<=v2vcSKbOeNP7Y`)&65nK(H5tbcC(V61;iM@QGy z5MSV23$^48$ch>JWnAHEZ)C-7K%#&`+6>Uu1je6net39zy7+?;PMKIlwsYC3IusDG z4U04aHdF7)N)8xXU)kd2kgR}6wvwSULnNJ=mr$}?gFB!x>UU*XtsA)Q{~Q!)!lSPY zA-qA(2#$(6X{~_Mj{_G-rwqv|nq;TMpo`Tx;ElAj@n~Lj ztA`0noI>`p$MrcW?4Hq7g#`?ss%yj1$;pIB8{|BqO17Hg8NjaJg_7ILy!a?6M3%&q z6%?>@Rq~rhM^)S|{Q_zA0_Ufytyk{d^LakCN|E^*>pjCrzB(?bMt~DKigbR-QRA3T zmc7va@}4DzN$)mN*#owDpNLcLvPm1Xdq`Okb3H>ZU5?BFPDo##9pbYZKe&DScHX=4 z?;sEY1y3p%t97!`(9l3Kbx0;65H3LFbZhRdUfl|GdloZ zg7A|$Q%n&k+iUQfk&N1{v$F~Co7f=00l%=GmI;bHDtue)1;RE!j8F0LGlYu4j*gD7 zNp80>iNtkvpF_S&7Y}~!N5~i636%LGyyP2baecFC3pGU{h7@n1TsJ z4NeD!f(dWk;S?f)B&5(06GrQO{%nlvjCPS3lvvs)%4*$4; zxMzM`YK`uk0>A^zC7RcX8QI(cVu|ds06b-Z0^{#v0Lnfa^4W0YGC&lLgE})5p)oKp zfMogs#5@X6ZQ%THS})&$)rLgZDHHr$1gSj=TwkyNHEtam3Vlqg(qHG|0KEoDnDFpU zG|(>jLM^@sM(tksC=-a5cpit=t}gT&H~v!&VU0)7@!-^yaYu%SKSp+$pl$+4UE192 z23g8_SojZ!if}GZL6|TBN<_?U+c-WR1(RM{Uj98@WmC?t>gvi1T0|dvn})SDGl)vP zN?HIoq<_rL&$z3W0|)|DK?iCa$Tx5vR$-;qZN;sao&OmFITZ$E=Xknrka4Vdrq!@{ zaXja-{^Az2Y^a!-6)s(sbCe|b3ioSv>X$YrO3TbAu%Sl+ib4mh$xqi)W~ih+tRk%) z9p6q5Ru4}0^kC*aNX7`k_!gQdJcQAlAn-tG89F<4C5z9axDwQlyiHkie;!a?ILKA> z(ZK70O$|c!9hO>x-6<%D)Fg;;Mvz9%Sxy)qF_HiKXND( zYXHxKp<2*KM@8u$(CO22@O;t_4|rNrAr(S?dU7I8|E??uc8wg8PBw(WV3o}(^GFRK zccokv`VLSTtUrDXMv4I|<-qsSvK2EWF)=Zbt&d^Q$kss2Re)Vi12stB)TkA@eXz{` zDy5}z_n@074%;0DU4nGTK9Z1Ry6kv9dwHcnpGCT)tSroPECWyCdJEYi2Ve1bcg6?BrPylw$1_5voK5_2kWNz5^-!)VNap(esffPq8 zA|_@wJa*)U{pb;y+v!TA0`x4%1(*09;o?%gN#Iqpwy_CCJ_Zd9EgRCvdf6*3Cs6ZY z#|A-nOos-E6d=lw>(C-mgVhd`RNc5sM<6G?G-zS965afP>UqgX+o%?2nRQdL!@4+;uMWKtm2 zm}ML^tZBe|AZLY6R$4J46%&wYA^_v1$y5c8tbVkwA<5UQQ2}A;yS}+O-Ng8~II=|r z`8ls%pe9~$up#4UdxrJ}&UZS8q%}yM6@_dNXc8eMSJNX$d{HyUn)X}0Cr%_JBn*e! zGwL)@?eukYo|PL92<;ynbU_1>)}(M^^J++Ra;a~<;hzPk?cbnb7~A1_Bdkb{YGDG< zl4|K4pR&~zkXph)7bJu56)z|#7=(SY8K$(T*DG-;eEs^hxTvV;Aap8HM%c=egVI2j zqo{oZ>;c*O;EGw=PyYJ#Yh!DxFAQ_lRC@{9NODkiVEnOY`7Wgpt4n|Wv_dik!Bo1w z>{UA{^w{YEo-P4}nJ;DIcS@c}bgL^^C%ldG# zy2D;5+CI7X6m{dIi5A(43GAYhO(+806wWZ~pYLv2R~HAeqlMaiP=UVt_}qfV%R->Q zRg%-e9}o~28)B*MaOnb}SFRUaxIF6Nm|J#uhFGD-J9-U$yH&nBd`kEmTRK ze^jXOsvXqgg`FK)D8gX7c;M}j2HldMKYxDZEfxAi35Jn&VUwoB^mG*$~$epXlivH5ZHnG3i`ym_JTl!AFhM@O_@!{zm~_6Gb4!rSK-$Q*RO zpM6lJYMF063ry=dl{yy!JIYefQ360=Y?S#n(R6RB)u)trbem1(41d>F9db*^wW7U( z+I|-mK6Taj?Q%u7QiFyW@Fi=turn_%X~lKdG}dFQ)7LTl-hcL`7GsXDsQioJF4}{~ z)RE`g9f|gJzQ~0-p@s(fK;ay2*b6E6VuG>(h}@w-i~IN01= z_?4v4$>$-iHXZ%W{)3lBAp*IewP;~9cN2{mIc=#B@FJH}a+Wj@kB%!dP-;Y&qhi-Q zq1M#&xyITq0ZOHOiuc1eF=5c#1Ybfxg~i6=wxw^TQ8L}e&6_7fb>~JuYhvY#Q;6r> zc_PBXaQwM}+a-zo$oW}?uUMFX=5bYZ=nZFd8gF~RV7+eV{~;A?nzxk{kP!JiS^ z&A*qejeG6EZe^C5=nf8wwYBE=UN^GY{GqO`rLnoXAcMwEUy2CJ$#Q7t=?vW~M*#`! z*StQV2Q0@ICMw=p2q(+;D9`^O?g^ndEV@6Hl0C)74ozYxEq$r;T9~nxkLX}I=Htg= zM-`iAEcvhrpepVZsYbHo-Z{W+u~#adEFVM&q+o@-fvbE@IXk(-kBF zNfLF;HX65QkcyH1$u0>V9v<_@MIyw-CFwwFo007fzvoJq$5MB6svq$=+tE>gqf2Fb zg#~y(l9XP^c1HAYbN0x%x)ROK((fHvc1%}2D$Ogkn!{}M-)KA8vjMBhT}gv`?(Q6= ze5=q<#N#T##bxC0l~=c3-&;b1$0MjRb)B4vU!6Rq5dC$KySu4<=jZcl0ZQ@nB|Wc$ z0*k7r5X#hzGSgW4gYJpaE6A?mij&R#Bk^E7O@cOt)g*ky?Re)Lu(ERf^%d@9 z$O3A&|E_-5TNvPKuA?aX&NRZ@8?8>F<9! zJuL}1R_R^kHnge&vo)tMJmpi*UYaVALJDqMY+)JcLup)W**KZIc3!ED_u6fed+}^o z7IQyAGvY?QoIsFL%puO1T>Il@()!c5Cr3_0qZ-)A(*n69I4EU?_d&X(ZbyfDVSedy z7~{wfToLV^9v;jnyuTazMyObUCDQR{e4&&WDTX~~)Hu%2==78b{bP2M%czfpil0B= zHb%_C{CxLn;uxp~6woR+bIYey?NzKW_Zgm-{j)XW_3aUw_3<`+XJlb>IPGe-c0+XQ zM~EF*-`5qZnx%$RAVxIy?NByclspm^COdSaRb|ls+b`iq!WSA!b*(d5wh%_A7UPQ{ zFp$&3VR`ZZ2gfrZfutu^^%*PcSL+oT7K6@aqGR?9hdluo z*?R}^w!)l&$HCHYwl@4{Rrq4FkK%p8QUB*6=Gkhc{HlewK^Mnw^@|pns<{3&I2Z$O zwO;)Fs=}NL)iXWa{?|OL>bHB|o*vqV+k6f8B$%Q%)EvGD2ybe{i-?Hq z3tr(B^r@iI~wUy1zuyX^!x_5kBssTN%F9PF=_AY+v=- zj#zAsIqqZlXDMahc00R2l2vNZRS1;umYbU!NHl>zq$IAcT+{=cqlAP%vw8^#y77>c zi(g***?oUfPQ}gr{p7?^`pp}b__5ZEmz0!-5WqS{nbB!AqW;eJ@tIK&4#$IFNQ+=I zi!dfeoSk>^p@4Ap{_I~1=&)YOxlaZlsxxKhT3R!TR8~9M`}@DP{20?UH5H9!UcqBA zd{Sx@XfXZLN40W^*u*D8h8d7G5dh8sP zg4%OWhit&xSeC|lr#6CMGyS%kJWBKie#~bZWv+fH^b@*Lr^2#0#=;|!PlgYo&Qfbu zYEdJx}J z#S7)m=QB)_0J+&Z!JpQ8aY{QZ`})A~qBz}t*1gIjNEN9GjUwx?M2@m=s|B(j#3AHG>p%#@~F3)Nl zlBuZ>bNu`DM7U*eD>ffcHxwrtZ^UoEu)R2i0(WYvV59^hr;b+*-Zo0_|R$YG`R z3l3^;FN1W{ON3b4;(bznpR}~sF&u@>m2&|)y>W>@oen^mKPhrOwQ_U}8{ynpO>{6= z8&m@|hY-XCRMd;RSRNe*t5@K|v7D{F-QE;jRdoxvPNuC1qc6rr?{WwF9SjWJCa)L& zHo5Th^s0<$4C;zIv`2ek0+*}5s# z-pAP3uf@e51d?|31t!j6bobDIw6|}9fk#BB*lw_U?ldF@gercFi`P`EvVMlhS9j=C zjuj;7C1%NB{Pjh4VQr|5?j39VY~7cn%*-!7K0xsTbz1#`A(Fx0Dr;*?4xif6-2A;a z-o?$*ZlrUf=aNubb6E=e4_)-h?K2G{nHQb+X06Z+JN8l%-)XTCf2J-6Zj zEG8okjy~x3W-8A)ku+|MU8gH%wysaH7zLmvDP|@N=Z<+pY6UB(D$~oCjldHOy4Z*6 z-Q1wN+*7P~{z7w-R&f^%n#nF1()i8r6O-9mdO%KkQxzEN=jvratGO0a_HosAb;$PU zFW)+-dqIE_-y0grWXka2^Vmn6;o;(PKv%5I51;JxaE$bWEgdkjhFW&C`Hg=&0x>Ws zgT2LQIF(FpXS)72(0@{h=f9OC{KePP@LqVfBChWH_l-q5GZNxoGbO{{na?=;3Hg{? zk5GD^9mX#HQTdKZOpuZBX{|F$o@MwrknosZ!}6+HC+SculFA3Fx&o3Gw(ZH z%hWFNxR&8|u$0!<_ZGiFhC}p`QF~<)U0gGokTjtxK~26Vfp0bE{d1L4GvQ|v3bU3k zZL+`k+KYlEMOo2nc{w+CqA{NULBVBUw>>$#+g;H*K+FMe_ zXYRCUr!7e(AKUfv@Qz(UgV%Fvd&EONPp@5){xL51Im&u&~j z$$_Uv`|do6Pw$f zzraK-OlKGt$2dtrg;|#b9k;!it1(*J?Tw{fCjRfYeGyL&QC-NG6qjg?=*|O00ifLS)U}0Pa7Bi80-Se4x#(gD_uq)XrdMr_(JDc zhHCZVpzqMp`HAz!0Wh1MG83}dGhTd$57`ZSiF_he4SEwjeuq$OfZTUG z(N%$~AvXO{2>C$1dag#D)>OqMX{KyLr|Pj5FJs5ud5?#^*Egq}e+}N-twXyQxFzF( zg(WMfi-Q;CC8e3ggTrm%8C?0kjMVlNV?FR@QzTL#GN$P1#uX_^J;JI8eT_-<3zS zUUqayfd2H&bY#5vKEtDOiic`|izgC+9) z^iOv`Qh_hdhcQV?8S|?SEK@e%6Y_#8zvA?K9R#ZWx^qNa-1n8goJK~sa2a@09QUxD zhF%iCEj4`cU4s&vosF$!cvz{8e(;%u7cLg-7wgZKUNl@bGbsjxTjAPY2tvYw%Q^Nz{=!WNXgGOKHY2$Wgifv10BgTv_6pLN;C@j892ad1*I+1&`5G0$i*}!F4+;(}T*Pl=WUJnpp*wS@4nw$qFOrW2LqxEkT(W%u}NP zF%Yv6DB(wYl?NnOUO)=xH}jBzUJ9NDSGU5Y%{;ltl~GVUfRW=+NX3Bbk^asd$DDe` zd@+mjqc_lK@hUOM$LB0UXJQ&4Se2tujN)~=W658atVCK@Q$9?vA5-> z4}Uz#@2TiTriRhH=kPeV$z$Q~$$R-=S6zUShT`UI&^7UVXwB@|&&UruFNqS-W4RkI zu#0L5E=EWh3WHVGazdK&sKYF8y~z+f)_mDS>Z{>pFCgq+KmS(#riW0)Z(898vlJ3S z!PuFan&H?RrAvr!ewPII$jNcuCKAe~gjT8?*pc60i8n7_UXYK-R%|dDOnZfdcs@db zz%gH%47y529x06RxEyf;a%VN}zmJAy#D87Oz+y<()ZMMIVWwGSols&x``*l~km2CS zvg!A?8&xjHPhi^@I>Qf!3+JSlIIVV`fZh1{)B)JMLP5WP;11PxpZJ!%kc;sL?lQCk z!^h)urlgX+`UUQV7iuM}7=-RA%CT0{yU(bp5#Y%KFehYe{9vV1*|0OrhiltHdI!`L zJi=IT+uj8*+MOj|S6uu8Zsp9O6q!$G!1a#qS|{3%Hnc51=$Uxzd4y2u6-o`=3bZiN zbbc{|tsmm!`b&B`o5S|>^6zh-fri~gNJ(w^HnD(pWvPv{Vd)DgyRN?e?=*?ec&u6I zH4cf&V@~_aDAyOIKQIW7u<`MGJyGJp6OvGUiGY>{QnuKfarp*!xFP}aH%(0V6xOdF zPnu!j?^D?Px$utHJjzu9lHZl5Ch#6eN39bG8@U)rst8`jRLFeSU6%*e$E2KB#CS16 zxm-mKH^qzlt@#}dhSU>7$oqPv=4y`jb^burx1M}#x1(j}^aZ2XEhb0`iIW|~gR#FX zaZ}C2q2Gmk$7xAjpnTal+ZTMl9IIX{Q?>9F;QYj7;To{ZzfPBA`#WTmB^nRRFWq8_ z;qS%<2nX++~p9V@_4gPs_QcW9nhSNoC2pvi$Q!PN32^8HAT2(sF(4LKAS zcg);A_VpE|)2e-os$KE)9p+gnhsJwo+LI@_>+mr zs9s}4GRz19(dGOwUntF23f9*u7p?898y;9_^jBDstrgLu){=<*deX1Q65##5L(m*EIhRT)amf zk%j)hBI8Pk`-JS#&_P4>1bsoi&Y1}k7}OtYb;VzUT)4WNvf`DYrAePY zi2Rs&-%+`uloT&U_~J*mvb2R(y7tEc%>O`80l2HF{$HWa{F1Eb_<%$8rXOmb{PO!Y zj!~BZYaWRJyYlJ^hSM`GDh7rpu*5<_sKu2IN^{3!+6{1>M{r}b@QIsvh___=BBXKn z?9p`sDCZh=&Jw9~tV**UtoaVDy}hqABPi+U@T8<@K?p(;8ReW>(=VSt+w_tcV@^zT z^a-CWd;klbhNcwugZV-RFb2k)&4J}>xn`%e6jG91bb5OD=$M~>xPt{4QnWMLjP&*6 zLjT+-`mn%APX0UB3T|Y4e-nF@+ZX?FT#4}Z?U**W#A8>K=f6FJ)}PUnJIn3jLIrEs z9ZH48od0xZYzN(R#58Ho^-s3Z#O9A!ElID7#Dr&@O8wGB8b8zUIv>w+A?1;pG^A<} z`!?^cqz%bJ>lRb)@EWN)o=AxXY_=5Xgx*yRXJr{ z6XTuS2mb9};s@2$?7qqQi3Z#iL8Z!E1TQMkY&~G+&}I%EwP0gq6$)>8eoL&a`;|2a z4-=pTNJuQ49eRw^fowhZjG6hlSRY~grw`76-agpS`oM)(c6J{Yj6^Iz3D6gOmzrlX zbgw(A*jtw0BvsyhrSleWZ3NiWRu_KNI6CUj)Ndw4nz#6xs`j~H4-;bgQcfnSKiL0@ zq{c~uLsTW(uI8A>fC3w-x|IzclKVovwBAC**KN0z_WkPSd#KgT5MQBht0{$NGQzgo1){ z<^82$`P9ZEOp*e0jT_7J?bFj?(mv4Fd-Y$OMgyVcfJ#YPd6NL9lx7?%n6)a~i4m5|MB6!cT1iV00fk=0(lQeLCIMT%h$b?8q- z#j58dj>WxXWz}CFPVSC5B!h$kH)g#7!sXr&vbc5gghj8F8VosoJ+b_Gnl*kVLr)CU z=X!gebvZeMyaR1HlQ4QsZgj%f=ip{$@wn25>wS`45nCm^g`<5x$Uj}*+MP(7xnY}6 z>m`uf_y9O1&ilKT7I&}p^&*@Qsq4L6rM#YFh1XJKI%0nG!}UNvJLD@i%`2wc)g6=bN^N&N{!Tx*2|;qY;#@v}bF zsfsO5yA9{m!oogZ43}umk57S40YZwCiFanU-=sTaVqmZYyG|fJi|GDFp}_p#w|+k8 z9!?gRjbm$b9;`5^F6cbKy%}B5kqK78#Yyo4w@+zqyFdBa)4l_#*f}_MLf|I^ECWL3 z9vFRob^{rx~nw~5DW znqC_Tq^zN8yEkyddTq*qbEeuYbm>6fVDYyT0J}J`z-_n#YA{-OO-PoShpSw>=OiJ4 z+HkqQO`zlcPD2w#MaCLIedm_9&>&^UAQ~kN%{{OyJSQUqzUZFd|8)5^*tfwIlwm9{ zdmm%Y`YK-Xn|wdlza=mEn2FG3IaTwKFf=3&7}E|+0^EN(gM&dP`tLanjaRvu!-|6< zzvbS-2RC$$OX5WZ+#`d~Ztyozez|0e??iWT7qkt}$idz1zlhIMW+nXPuJ; z|D$dQ<Oruo1STHCz`_UgT> zR1m|S|E-Js{_@L1ePP||L2S&B?A*=H@-L=*%UteWyD0x9uk7wQpW?Z)H|NI(g~VJ; zcB>@Jsb+WOp1OJMe){isd<-UcN|+5l+?}UhKXnu75-Qh{J5qev?!^E4m1E+6efhx& zCw1sT?<*&F?e^R{zmI0*XzmPlzaz|>)#Y=@8Tgzf#BEXSBPfS$Y5nT8nq1%f*LN$N z6t##o_gL%)5Hr29j;?2NbYo!Lqz<`!;_8)@|C!@P^l2sIr{KHm%wlGbGAj2{44m4= zh@u5vLr8c%b&YDiTv!+VS$sobahZAOxm&g)RhmD1YlFQoOU&?*Poy0E53-Ng-c!*=Hie|y_dDbKhKL~jZ(X{Po9 z)q>1VV`NVb`el-<0vM#IW(;b8M?0ZoMm)EHdmFoUW9o?pFw-(ZyDD zd>Gn(@YRvTr)3crr=>jep@!>;m!g9(*Ct{2>~%uvWhaY!&53up5ed^uAtAx9dVC4N z_xh-jBz3M1Sfc2+Ebsh;TbkfGr|CK;m(T@!C!}W|@R|{MLXp+Avg!jRCWF zYi?V8uADnM_)~z3>l;);xE)DcVSdUsv?rM!Lc+)YYjpI}Oidqjnc8<3_+GwzslWJ} z3vqHTkRle?_jlF}(G_{!3U@K_d5+9Bw}_#+A8av|!0RNKt&k8#A8Yp^CjR7vv$jrs zacd?7w#!h>Xs({vk8mt-Z)sJoQ0q}xC(gx%rJ+kNbaY=@>lgCMzP>!!yHV{x>KUq7 zOhmj20!6cr7&IQATM-ggN7~RyekSI8L{Q3MSO@-L%>hf44K-NMp z^x&Cpx@7aQ6S=+U?%dEF(=cZGq92HR1QH?QDw5>kVw33QFXZ zv=ZW%4b^la*#(T(0zdRhxwN&8`+Z*H<^|-S;v(vM;hxi-d-qb*N>#O;!8(WZn}X}v zh!A-TB_t{;tDgArK7x z-`jJ?aL+0SZoo5wv_ydbL$JYedjb*X2cN<8I>hNO*Z)l(gys1RnRrq2Id?Az)&6uT z?d_R^mHuSDx9YfX7gyrnI@?<}^9FY^1ebA_q@?7%p$rTo&?0QNCRH^Pj%t@%zdo${ z`u%&7>nRPgF$5i`$@23MxDE1#+#e1JOIxBvND5i6(GxuW2_Vf!OSliGsPx?{}_WtX`m+V2;79rbn zt-fckTd6*-TaFX3zY`Pg`E)vFX6ONKd%y`Pku)e7ZROzsFXymYr|=aC|ToG=T9Ntb^T{)#L%- zRk7rLy~W=s(lM;sWbIu4OPwq2wg)p(&Vt@mje`}kg$-YRA$xfi^Qx%Q0X;*OG0oTw{N2Ahl2GvfKLf^^CRiBmKDzzVY2OucIz8|M_(GcO=yg7K*TjbF z*-qeAj~+d0`8(i-Tq}@!dxe^izHf*JK&8 zB-Du?#srI`E^8hid9K$bnG+s9*okIoLs|vCU}}OlGQP@LW=I`uDxm#;E37~B1(_^z z?P|%rNqYR_jl0xp%OmGZ96u0BG;7L5;TAy@X!3XQy|VK_2KP8McW^yP6Z%#;POwH%X(6eYi?WM$P=JOV;ex{CoOL?-$~; zxrmpQl<;jwpjiFqwg|OWts{Sh_{m#}oW;|NxA`2Z!wPS8UlIq5jpS@^Q{plWa13?W zxf~MwhL$EicNrmwR!TWapZVQgu|(kJvQ{C;9_#F4P#vb3Fuu`|tlgaZU4g}o*=-ZeMOG!)5g95tv z_wW5iP}cqqEU{#~QAhexC@C|*Pib0a0RA`-(d!c%?u8mOe3up$g@rVCdGux-eaYVJXNzu`}r27Cy6`$4!B&i+Zd(Nx+OGT*7hA6qAp;twC1O(lq z2NS%dKpEgPe4n}F3fL5?n=uu;A-NhclvRatq2ECw;`)2~PAuVeZ!b0h?R1}IV3tmr z|ACSS@|ZVNc;#%f`E3z4&9;j_8$(zn`sH$UC*}I(Nx%N^wsB8fyx>f^T9*P92JnLL zWa&&QEn29ceJS79tMN{ux^aPovYNH^J7wbtf_VUV6kgOp&FP+|CB{DFR2YSYUxN4z z&?*X`{eyO2%u*;gCG%P;ZPyk5nye%TA><_c)47Js3um*6bgrJ)15wHJk_`&w20KDq zfHM$$fcW|QH)u}3&s6D_dhai3)uH^9YTNdv_}H~pUq^I{fqv5wCroymK37hHMueva z_G<+E1eCWT^vH4(@_em*D&n`Z>NoFhQjxy>^fiX6s)3t`Xf&Eg2zH#hpMp7ggmXmm zK_>ODZH^lJqMEJ00s%2ggJ1dFGxAzjT5viKO$QF+Dt;OWJ)o1tO2E<4x{1Zc}*S%{Zr*L@*=Za$K5YJmS5e&c-?Cffg7%1+Ry@Bf!x$70GoMF$j0h?^q21IK5tUb+2S^;+(NA1;5KZt{mVHXk@p+KV(VLD_}b{#7v`Hb=_$ zl|say4{qzH?d4P1F)=836ct4vFl}^5)xGV?CK}xpvFpankl56r!@^aY^XX69R#)jS z10$OTe*gB=(m`25w7;ln6$-t|FwA}{>kQ9P`zq7Zsi0-9s`c;BQDz8nm4PD$&HoR- zMiLKZOTC3wfn|`gLl>8;o7+2RO+25nmm5ix&l`C`Rp*fKnM|mTjhjELViSe)gHT;D z{1=lUl<~s-{j^uKKVm@MMM_KC8_#Izx}W>{MT6%+swFZ(ktK2ZUGy20hpx^yO#rG= zJmCq=_FrDh1gIDpVZ;80;<7dV5JXSx`u!T_o72^QX?j3hZZw}{_wZp!JTI zMN!3Q`P#-GhtsI`b~}*J+9%ttZTp(rsLu9mqHb(Zh>DRHYv*-R-MmRkv~(ZjqMx3X z=j9P|>Te{=|MK1XPR6zIW}VZ@>r}w3>A7(}_o`F;*f~0eR8L2T+I#>cgXiW45LO`P z5D=#QM~C7?h@!4e9&)RdgXQj<5VgWMEBol-BK?e+lu=N2=x|y}y$}5NV`x}r*bdal zG$@DL*<)YHZu3WR%;7{^fB$y#>XbVq-stw_dn~I`Z}MJBVUc&~u^iV@F@xiAuV(aJ zi{g9}L?%fWEH7R}pKdstEupppQX~zUyvm)6hvv6`d}Q06s@u3m{<-P zUc|ij8CY2{o_fYIG~e+Lmqr`7r)rLoc`}%YV%xo`whvu(U-DI|0wT{ZWdL^d6eOA5 zP-}v-9I{H8kfMuEPF6kI&_DY#%MW5*Qb;7OcHf{+<-NWE;!~QL0ZNwLzErnHH?1|Jf zdv2!+LbQaepTpUHcDYvx*)3kf*MAS!KVDrOp_+WM?|~lqA5C6wyBh(s0?O!>1Do^n zgQKS4#>U%_&O<)-_wUiq<}vsGo{qV?amIc8$OUl{IxKgsMhDW~FO4?c$F^tt9MEQ* zt`t)=)^tjAe0&TDrh>q0eqbh$q^K(my*YuB)Eay%lz4y{G0Z(|49Dfk;$&Yb734O> zvU$dehqc8$?0ex>a=w-%oC#}kwTJK#jrd(*VOTWTNI5RVG|&t2O4v!<#&Dcs_n$&_ z8l|Y~U*S`!D-Or9*M%xlGxb0Fuinp7H!Tw>L>R1s9Ba#>WvY+rgJs)4UJzDhP%IY& zYB|p}4$PfD;Mn_n`>(LYvX=H;6uD8VBE|imb#q@F?bm}w2WCIHi4^!9FpZyh^z?P3 zr=afyh%2b-t@zer%bqomHy}o4XK(%z+HNPu#Q^GnH7KR`KIn(U%WTbx*f?}`WgH@A zXl4a-2hSu2(7gX%w0UeDZ$(TGW*ib$L$*;DZjEubX5kua*45sQfb=>*&Gx!=pp7`o3adSAFZasqx^R`LmN~z)^=_G-ts!FE5^?H$&;?* zUok@8pf_FJ^MWjh;MSVY<6jR-?qz)@f=2>PotC`3q<-1B_4cF<8Rw&jE8tPsmoy5VGERt%0DE<~Ij@c%RzZbEO{$8xpoTw-7L-v*cv>P16#Jo>t8g9G)n<5Hu zBG4H=>lX??YVG#B=yA6uO`5LTq1;M6eGH=t#6rX!6TLh=4qn##+4tK5EZ0h6sc#pS zL`G`7Q*8V!ss8ERk77@qLY9XnW&HPJ{J(ufG>Qz%-X{dz) znhtUh1&H5x)rzX@uloi^;#s^aw07{K5=lS9{pC42iN*Q<*E*#0pH~P)w5`W?zo+vzn1}5Cuz;`ETAV_{xY?8 z?ms2`C2*FJLx`6 z+LH(tNQ^!RipaW69&N1C@#78dw{Z5yNgv16sE-hFVzkO3kF>2QQLy*w>tk(k%?Lrp zmGO(~xKyt}IY2~#sOA$KeRu8mabOAoH7f|NqfiGKev_h0VztNK;7+8Pm1DPxIq@RI zPJ-|I5L4l!!f4-)(NdJ0D@!ZIjayU|nl~PxNpbZ{P`=AdB7fZNe3~-dP$6|gW7hTP z3DJIL;|`a0esccLb5dNE|0%Vc*C8@MZb8;I`L3=UeNb#a1UglyIu2o6N9--4e^k2# z-wL@cZkC;3*1ya>eO_*6`IV&%t>x`7Q{yc%CHb1id)e|RyLA+L?2of(j*VA!r@8Oz zW*T?_2&qq63|-hi08cNlw`d(vOFv9;dAU=Ij~O+^pW5tNotuVX}P)4K73ez zaD4oA@Luz2cr)Y6Ab`|{g)njwz5k1O{TC;y^yuh0DZHmmmYzLLc(R+VOZ{5FJ=2H*>b|-esvaH)*siS!AumrFBm0$! zh>;M;V~5a<3xGJJ>XM=n;aOm3xm%53}g3?bR1vcHFjh`>T+5HJMFz4kh=gvGDrEjSBzSMHT*v z5b+v@ZCOD+{+rs-n}Wmp>OWU?&Qqjwt^#%!i3Zqpv$F_VNG2J z;mLf7xkRYq`d{68`?04$%D0oY;Zz0RMQN2!klCM;oJ&!EN?y;iIxjlrM)*({A)Oq1 z?|)hO{puKk5RIqtJlD}ojI=>U;bNwRMHO+^VeJu0x?KJ4#^y2UIul(l%Xq>|wj30w zeKqip7oVvR$`LKx`EL>Wg|Vou{Ms2{AO5R&5ads#+Q*IvOu6$krE#wn55c%LhK#e3O z6{?d%&uM5>)EOT!Nb)tRqd3e0DLVA8vHh~cR#)k@yv+mK$(9S=WK-zth^q6uO_ z!()#VW z^gkg7&{NOEgubB@1t6*IL1mXj>FPF|9VjY&pJ^X2T0VubFi>h@++=iJ#nA`nouxLbnhX`ks!rCSq<$JlBSX>ex$IVMs!OX`N#pGzi1~ zI1ESnugaU;*dhD;Q5x#YZFUJ;R(oZEqP*5g0u$~v!g zh4Q{xvCq;RcMa0RySCn`IJMCj>7pM{gL-TMW8U(z1)&I{mrh6x(i7CHc zoOZ%wVBluue;|d$MHZ=UqY~f5(F@UkH92sZf=HT=gVnS4Wcl@g^stu_;bG%x*1b)d(knU{aN0u zD$`x~K|_D-;#2)HP_%V8U$t+f$*!a-H#5o8VK1BZSd3g?Lh?x@z1bPg`)i#3@u1Pq zqT@6O{ie_;&jl%MIU;Fnn1h$6d&9t%F44|IMS>oIJAowP0(261%RS!1y4Vofu#;6~FsCPCi`4Z6TSiuy6^$fOdW=&u4e$BRAHeG{9#KQEtC4V~;08g% zGe{brBBCjIwV)B5hlde6M0*M5#8mdnqwS0clL^a_YNlJ|QKe(?KX<&0RXr|4Rja0k0 zd_1omH%GAiH7lO`LCvF{#`qs^xxyEMVHZ5P6Q4L!Zoi%>-d2T`PSJ1v1y0uQqiTo- zXyIYmlPR zq|~hsw(v2nCVkArDnZaztS9psC$e~Z=+bSpc>UCGd_eA#KDF&WA~OEVh6I{@Q@169 z5Xkol0KFdzIq~s9kTZPw5@S15V-1B2ec4lwGirO7Nz_duJ-=Q7R{CT^nevHW&#BJ4 zC!g|5i=}q!HjH`ZM7;&$oloa-y$3yiM>2%TOV*Zr9j_z58`{>8=Ju~uqfPc+yxC)W z0{zc@N0-qJn-a#b!<3&A>=?kY3u8SbxwHS_qqo0S>+h1}51%9z zr2^gO1#2lha5c~^|6{icX^(wJiPPr`Qp#fC(|PrLnkwpA^qkH|6V}7{rJ$6!0HXaQ?xrfE8&%}F6J`uFL5zk|Oq<`o3 zoOpJpo=NQH%!Km256@4&8aRb8kP(TW-+cq!LgME~05^dKnj9pq*Y|W6s&@&$=NcbS zrojoStpWMPKLJ!K&mvp~dDE9l3>BJiG4yyWRBs)pRM!vAkGvPwO=M_SY5GjT#NWz* zjS;Ga+FpD=rz}ZkZ>AQ&OlKj^t>-V{2wUFSsf^J82>}sFO0Gq%??wO>PG8l_=R>cK zpJQpht4!0Y?76ulq3aQ!(zc&vz%gkzUy>QP)4GG*9|OgB-d1Vh3g(+MUOd z4wmaJor-|+Jg@%2Ooli826|0KKD$ltF*>;!HH62}d{R4HDYz3A3%dEHzyrON^V z=kk}rB*aKZM=*pScIxMD8-vu~98!v0W7QCf3dn6iI|}{q-mKc_zH}eCk@W0O!~b9#cb1S6RxuB&7caXk#6AG9(re z$$l2qo=)#QgA&KLhA9<#OXY(et77nCaFe$qROM-TNF0W`Z z)A76rk>8Q4E@IDziAz2_RRrVFtRa4$oDC)uz&?f90^B7PKq~@GA0V3ifTh(#9_P!q zGz`ibu6`EM9-m_zttn-8|ILmwxO)Wi$k3yX$4H@x8e={gO{T5o&?#J6ZwvQEkL<(x zE2p;%*z(dNPw3hU3RocVe*fM*d<1Ti0x!$GS!iuPSI@trP`=BNg-zOYW#P1ZzL~t(i0{~#_RxgNUjZB@zdxd25>U|%6pShl)DBR zV`;o8f@6rspu~!B=kf6&ai$sK+6hJNcA8He%3hR}B~Rcis~=f1=WS_5>9Fo_>!*)3 z^qb0l##AE0QQ4B=_UJC>pU$e9!CYm^<-r_1TufZOI*Gb9i znELw8P$71pXe}f%2+GDEZhx=LJy3M1NN#f?m8Mb2rhT|j=!4#aQ9kf{=y!&;JL-_x z+jW}(bVk8y$`9#i?0&g)5ncU_=Wu46A86=)zp+ChoEGu?t}3M2S|KK(*7!_FiF02g~Y48BS{qV6iG(zH34j4kqvMzV!pt2Na3zn2>(*p z>njSc{2qIzIc{2Dc=Gry6|-%*RWFgf zMQV2x4w3V=$Oq0%&kyo8MxHSWei_$qdVZJNDJCWAp>&;I#EL~{BmMGl^GT;<)v|7;f$AA*qhnY)hiNxDu&$_oCV;*DKlPmy+!E_xGHE_y8#@e@vhtB zND3{uU;r-}4ba!CKzS+zxV@37si5ODUzUXi5iTmU%$5ZEHaM1D zl>JA~lOrs+)k3=VF5y>|_Vb*jPF&UK)Pc-0`PIIqQ9bkk`BvSs6ECFwr7(mv$ckWZf&h*~XIv6CKot;Qt*DFE(~>Z+3q(Y_Eg60e z$G!^lyRAlIhx+ZqxE+Vrmp#9B`7zS0cd9BM=e1>p&Ed<2z^yNz@FLwz+k4B`^&DFkJlKf0K&v~N#tfqH=_x68-Z$Jp|4G^?jXY`ArE&0^ zM)s;h+ddAqa$o|L^kJ76Jor%nzIe9E6{rt<{@NgSXXF>u|1*|yhu83#u$ z=d8`fx2yp;Z5OvbhvY(~d4c2DFr>pP9#H|UqLyq8Kc+@Wi^6=JFVsr&Ek zm^Tnk7<$#7e}MB{Jvwxttrt&n#iG|%X`ola%exk>h48EzZ!cUW;kD6oPwI#qTFiFU z;m9h9?<(FKxkO%tfHi(1B7F*iAT%KZu^fJ1h4`S;JQlY2 z#J#8LXpLLE7w!s&de2@M5k#hA(=aVgBwgwk7j2=kR#=0yCd`w6 zctldH^3r&*c(z?9Nm!!gS$BO+7TjFSgw@o!^ z{`b1ON1u9NN`w!-TF|sPGF5+<*?fM?6sct{XWUCp6B`kIjrRBSE`RNQN2FHtB8Q}mzURQ+ zOZ=Zc)-hBDu_)`aTl$pvvY8kq+Rg~rPnSS)NG0$H8w|@B7?&qT zAl^i_&$!u}+k-A|(f{-oOiJXN1lW9y_W_9Nf8~FCiX9@Hn8LXm@pb7^GCIFLROqGu z$0PsZXg{6W>7KNYx*4dDW@p<}_)6M1_??*M)yp!e_IA<0SvJ4p@4BD4+dgM*Ru7}N ze0*-BI6-`uXW?wscNmvm5<)S6djkDu^1V4^_WZM3b$~vKtiU4~rhqO7jWQC14uMIk z-4t0^Z^I+FQ|#Y(wesAG3=%~Rlq8xp0(OGz_$MdQ?|Dg5NS^Hmksd9rqcslB5zxE} zacJonVfpVdYSe#O+db}C>JNRgy+DB`#ShFUzF&_BkavA#bPv1tXZr`)r0$6D-z9n^ z_rI*}`)AYq#87sX;CmAkdB(kM`i5%hmwk9_2IKWye*jm>-N55=&0D8TpPxPN$kz?t z;}P4(OFG5sslXn;i6YdBnNMhCD!$y3#!(oPciW%qcwJ+dmtTl)Xmb*$bDJEt#GAf%*f6^)IPe&_Yd1Z7!PA%wYTTW`$Q!+naA=)K;IO3 zt3j~`aTsA1TnsG(&clH>Nkl{>i!`yiJD%BV4n7CwX39ivN{KA;`J`hvmX%j8B@hoI7Axer4ENZ?1F@J zlDTk>Z3%?SC$K>YVc8Qz&vY-l9#eJ3e($YMv(Uj~Zd4go+nqRa`_x%k;`2U{r=EMS zH~N{r;5v$jY8uvlD6^W&H9PG!L3S~#k4R0}3|ihdhQ`zTM28uwznGxR#)9}gR!j(p zPb34d_ehB$B%YO%k-BYsp&2r(XPC+$Ffm~swxE&1Nd8poEflu>D7-lT_Vd)w=3yf> z&CEI8V)nJ9y}p`>OQULyJ$t{D8V5)49Zu3Be21j>^PVlY#24IK8I}mFxoDf%O6Ey% zqlO!#R2e_71>B4ULG1;L&2krhzct*t@wKcWzcrN76szq!S;q5>Wgo zi*RnCt%Nc~ROx6_Uw2hi@!1E;7n0Ikr)N@PT#Led36#%azERYj&)k(mXsdDlk(z@k zH?Uv!<%w={)XxGUbPqCw@RRRhAeo3kXnQ}!=~Lgu=qZVC9t&yx8J5hwEkP-udE?Lm z!>`L}aO862#T;%(aoSq@-ZIX4T`Rxd-i;HLkisud3CY8}81EDg5K7Cqu$fu-KeKb_ zS>j|WjA3|GkmQ8~yi$p|v4D$FF6bLGD1reTWdNxQX}*X2Q8ZjG{%HB$L^Lr1F)7rb zLrboyA2ABvUbhdg&6pNr8pbe7hAVDBk+A=yK&aU&&;%l96k8DFL-et z?a)aI&8MmErA=?DbTZOUZ0 zE?m}ecAv=`;EpqGN*yC6e$FvXsWq5zbdJJ)oU3bSFG_*iwC+5M&Zys=%!5nz$L0E5<%tz5^g^E zy?&fb=fB~ZecCAG;&S;1+IoX~3#caNO?p@tHIbv+jFIJk2?9K~!!niu>O%$vPgR4**_;l!#qD3TzE!5xp-zpBMt5!~pO3gP_H@#J=?i!H zOBk=a`771w#sXvsWIw@W@F3I2Ae8S!$jVI{0qP=m8 z+ak)}%B8%qDE810+jlR^tkNV6yPL@3`sds@)xx+e;k&K&x}4uGKxkq0JFBtnG*RvoCo+*Sifw1+D-W7p&dhZ^StZ(W_z97~x=nHst z<5*xPU8lQ)M1wTJbT5?T7QS6-d)rgFj5jZOD)VU18pprEeNA5czo{Ytkr^Q8JHcgz zL%)m+Kup%q5Em964j@6+{xnMWs6+W0-LBA)bdQ!7hZYUIHc2%V&QG6Iz`Gq7O(ano z;gmAPy-snXzrudVr6k7xM29kA$uEPmN`rP~n4UBIs}?T!YW;(aC&wpb4YO~a z{!U+pL$$K-U{;c4*`MT;1cK|Bat1L29xelg-2<*)ci60eWW`XP%o7_eWNkF@Fu9H+ zYq*j$jPR}2b5x(jwRJ59EN4t)8&PZ*S==Nmh*^~e4y&$jm+PuY;=S$>;Uih};4nmG zTKQPv{KPSLcT~ePSEqjgl~kdcZ#AEc9)&XC<&lg+;#DYqF{j(o=IA}R00kohsqL)F zihs^R%Tdcg$f+Zy4=Tc_*RNF)S-^@B;SJSJprcVjN(1Q&Mw#Z#OZ&gRt4FU&LYYXZ z8h%8O&zF^jwnKi>>pR8cg7@t$x2QDlS*xdR@;tUF+nvyuWvzsx-6)+=Rz7)d)+>OT zMX}%Z5BXZ&f5*ry6^0kuD!fOQpxH|yiz+nyzfV{cjO9ps7VhW0G!17azy5#ck zJjmtHCxN(n2yW6_HPy4%Hj9D5a|F>F5AE^GxH)f9|J0dTSefvXQsL?sH+ks9N%0t) znx(6=aKhyeR%Dv5&WDq*Zqhu*o8Cio^C9~@uv3O;@Bdsg-&|e&0t*B&-Rds^;A-gk zPJ!wuA5f%TGa;Iff7;fG9e-D3E7(5`4Bnbm8jQVbu=GOQMCbb?Eex@OcUFO@kJ4Vo zZ6Zq6G;$7%1=3tIEaEd9jA-PbVkE&v0Oo@Lz(oOyF%h80Wx)d#GG0e;M4FquYsZTU z@ZV1>@>Ei3EiriIR*EP1H9i7~LEHbquz47RlRDryx7J=I&o)BpE)yQIsy0pH50&2~MRC;T4ny7MZ!+=wRPC!o$j{As_~Dg`c%givckXJBDb+>GJI<%D8u z<(9wxK#KUT(qV&8C)VxTIPae&qP4O6ERcnp<&JunOk*5(?PqAagmpz!A6}@}Eza}k ze#VwZDN1sVgXX6LP|OIpt*O2dc2Wl!Cn%dfsHbp#Zcq$19INDm>4~ny`bdmW8%5M; zfa4*$99RFNhCdRSCfEt3I1`o8a$oqh&$;-Be0lINRPO2GZ#!4o`<;PFw;tTdHLedc(+=G-BJyhB^W317aP8Oxlg}_S_sq z^c^vmZW)bt#_2I8d_(in`FP1_In}!UsazTdcs7a`YT%TUyebh7VdX*2)3sW^0W?vo zb_6uZ$b+%Xg3&3c=|J%gIFAd?`ZQl>xaRe;SrCZN<0ziBm+HCrie#o^8)Fa8Y2*p z98R!o2K=0|86J5E{$|yv;$F8NQIbXOai>2_Z=e9@{Ahr;4G=~Ye*XSKyB|M&Du6mF z@W!S9QGZx@vrNNB44oUhBzb==GqbF#lg@10p~yagrNIjuQs}``>jE}hC4y8Agy^K( zS5N909w{I{JEXXM5Cxx*uxD{G=j_Zon&M7$QCbxgF{ytx6aE2kd3qS zlC$lhHZFby=vQ2ec8?gb{5>&4OEv#}GA61?Nd0cxGk>cLROUnd8W zWldj;T209p6dQX(Sa=2R-dW`Nf2BAZrrZ+Q>6u&6$uB9r)&&#!iUu_@vFqmkWWb%mR`w){rNw-anhiA4%tY;n;n_OukW_&pk7~Y8xsZ=Q zPBcFjC=wuJS^$3`eX3&*Az1=DMo+a(51id{>_*wFYaIX70SXsh|OTAK`l7S z5y2?P_4b=$1VITzIZr!Rn;q;y)ru}PH8lYYWlW)h+v&@+RyQ|$x@e&K_wef+fx~h5 zmcKx;+kpd~t&HzqEryVNf=3zO3+!Da;Xm_s^{@+#vPB?`k&ajFv!Bu3cPS;q*S2<$3T^?#CpYsk%46 zrWI?;B99b?wJj3Ka0I2r$En!+MJDB~QSh9&r z2hBv*g(ENV>)lABBNbq)ChhNoC@JKsjaLqOm6tdTOCR1mo7X7nR#&|p&vC`H&v z8L>YnWFkcBzl5g{`K{_VstA?{5zV~~%C`7dd!OhN156f}pNEdRs-?Px=_1Z_kiz&f zJj`3g!;C!>MG!L3*50Ku@vB`FTSKyuv@7f80%STT+>NHE@AUm| zQZdRv%{LSLbfM$sF__0f`UxutS!aV|94=fLfGg(VVfGvB%xN#2So(;AOjFxDEtxT2 zsQl6`gd77gvbVGY7pB`nx}Wd5JZp!$H}Wn>beIVdDwxZx_s|Cxz?Fk$J@D!P3%pFg z>vJb zb*SxM`H6b8ejf4kRsY!kd-Hp+n}QX37KnpBK*}%n5L~~Hz;gZ93JDGPv{K0gNP@R8 z17s-)2?+t2l4eW9*{t#|ba~Nod9jx0CTALTx_lvz+v^be(JnmwB0GpN>^Lu$nOKRu$XEflo zyac&mXc82k+9YyE;2~^r=YFs`k8K40)Au4@-sn+U zLPjw4<;-4wiSek{v;gSP^f0UmmcS~{)0L(YJ`btz8v#=&@LZ1qM`ti8duQ1e@PjVt z|0+I=H`$lPr<}R$|G-Y0lwEt!cyCzt+pM84LKJdE@NGP2VQ0<4XK0ZcQL8%R%t)c9 zHbb*l(tWVX?uGIXvhb0_s<{Gd9@>)enVFg44x6#g5B7Do;6%9BP#E%Zq^Y#Y^9rTU z)h)v~!LZ5GNNmIT5`Fnjfz#9<);!ldpE(jxKJ5H_ge@v8oC=m0uHd``=92eZChi@U{siYWi$HzX~=iqn+lf=G%SJ-;IR(DAyhD0 z48Vb=!F@AM`3J}qT|LPqfsnxY0LO(r&hW zI9kw3d|+5HuIz8QMN8L0%*R1A(bf&`H;ux+n>!$Z?Typg zKmd_B1F6A$_yu!t@KCyb0vuU@u)}n^JO5>9C>H86Ezikal$mb4B7p6qvTbc%9JQ*A zllRx1y-A(2M%X^Q_rkj(4z^=0e?0fP8Kv$+=PGpR0Hqbt*+#z~J4g?aTn*=&18e^& zXQR8#+CTvTLShilwU1~E2EKj$`u;PX1Up5<8HKg{T9!D^H7-)77>uKLl{2?$w|8=} zim@!U&-4#t``P)4BM!A>m5|2W85=B@&%RPsRv%$ z(**tFQM;*H+DEVt;|<2FJv~B~qwFN@H1>LW%k-*_D2IelR%tdG9yMJ&&J%9ZAk9=@ zQ)u72If_jRmE#V8QNo!R)M0%4fnZv5{NH0;WQK3?SNv6BG-aA|p2uF9h2=X_H=4-eZ~%7;rg z=f&DZ#*+b}u}P0?|dNGfxjUtk3IJ%b9L!bQqP9+s?)W`$KU!)!@ zL(mR*gTiQ-_#^C=wGQNvFtdh*3J#qKfNRk~55iE=rM;?GvX~}LO*Jw_u{8nJ;T%o$ zym%O)vZEU}a(GYbCrMMtFoIMV2{8?5R0XFVGwqY2FX!hcDKS6~0rVl>fU_p{5a!bZ zu={}q1L)F4fdh&~Son_<@9CuUmJ&n4FwEHZ6%k?Bjf1@;Wgbah+w3dZIiTgpiq?o* zC2W&13VbR>97T+1FmB>6`Ofer&7KEU1_aoq7}mt9(EWK1Zco%=ZUxHc5V%(^Fv6!( zXlPV3E4Ie7c)w87n(@1Ij!$-LjJ*jW=ae?oq}SX;6w>ww90ymwPT8J1zrQWXPa8yH zaGOf|3F3~%C$gvv@Q(Qj1qP(G8*DUdU^BHP7L<0qxRBMOq`w=8xE{opAYi~{igL^^ zDItn(yc$S)LRf)> zfJ)l0foy(szN{2AfZr$tlsMOubfSYbCMu^1P^>AvQCd>kzTy71dbik<{XIJ-!eioj z*QXCpyhZmPPM3-upb!%?EK3G~!E!eoIWD`I>pnH{@kHPbiwD|_)Rp5zhokzS0o>rE z?K|b3$?L{5cjIIiT~jwN{ay^1(kFc7)J!?sx-vWyUAG}ip+rG^{kk;|O}H;@kIUqyLB_T0bTbJJ7Wk z__VE}^RGAvspl1G~75$9a@VxJP(RZU%yPKkD`!UJ&|9zSvGous=eb!&> z#wm9c!~)Mk`ivcC_w$>0SU0Yx;p1*5q5~xN{?rR|bMsjE7aQ=RH3Hu@GV>A5w#P>D zmWPK4ZB^LEC01Lj6wfx^!N;PTH}+P3GshN(?y1_ zuPY|`30zzfKFTw_E#aTCQrRG=5NpB|D-=Tn5oq1oH_O@FFWwCdNydLsf?YAsiNcTkORimW$R(Cc|}Ft zcoqbhv5!-uGld1^U;G`TQgKhTOdv++X%p;qw1LSSr{NJ`c zd-s?rti3hU-zcM*WD`7|5cWH@-U7#JlA}jB_O}|Ii4rH=QG4#g!_15Z9*rsvWwsyW z;V6L)L0WVyuHQbjmUSpi8z|j@*OMC?c&|)wkE1aT1M6OGXr7allzwn@p6U|_*J$IA zmEiT+Vs*fj$EI!YLvSS-GSyBWIx9(BKKFh%Me1EMmE_`XufTOgL|khHvloI1X4oMi zI7dnd5Cb#q;Pr>h3e)-R%W-^uavtyl(=h;d@`=eafeqBRIq1#4C>U*5le@mWE{h%v z9^A+Q)5H$mtMYrD(#LghR3(FkF#B!BSGvh^k01-*OGSE7U2!~<#Tf5w!FR6j5t%!g$Ttfg{_2m`j_v#S$P!Nn;-@IZMMc(N#OXPcF;V z8oy-*_VxAc|M`>pE+(~#`pVN0P`19QW;( z?fbaMNJJFs5(vCr1Fe%uP>>Y1Q%ox#Oc!8srAv6dCo})#wWUPBz^X;-4uADUuq7ur4G&9%n7Eb&UXRFvH=0Q$Md0P^;{MFjDij1{Ee}APV*`a| zU60YjbHch;e}p#P{VVt59C&`+F=S4ipqbS#6Atgf_3j=!qTRyjwT$OAY4*-vZt=!L zLOP~*sU>|VABu`FZ;Cwo`ZbF5;JXHI-ltDwh};QIM>^ixZh?t`jbcQJwgmb=yWF0V z+nM7TGq2V{9#!14h@y87L`O`Q?(7@Y_aJu_*kGC6Jrxx_BobetK6iDMb8z5<3wB{i z--e?A?pe``O_b&yl7BC{)MqIZ@O5z?>@Q~!x}NnvL>bgoRHP1UqT{8-)FX7VBU-o84v-Y#pv6m{>Nphw8M=enf~|ohL-aF<*}SvsEaa zC^O2Lncm63MkH#}jo*BAIDoVl1z{MLq2ZLt?ByT*>6*t5H;6>_b~z(i5-y%{8e|(s z6=bp?QkPCuN0`0y_VD)IkCJMInV1}C)t;f0Pi%c=Gbm*UgY;KO-y;VH%EX#qcdXtC zub3T(RfvDYje`0qqEfrONn)t;N7s*a<&et~|rWF3Iie4N6F- z5Wdkt*Ws3?qNqrc$bvv^2b*|s;0PciBWrgQo|^fy@zz;=;s&uAcR34Q`EW(|w~8#H z>x|Z)a@Sb=QjX%EYxkZ<n_YC0_QM-6AFyVpBt10&!s+w5M1>RTKFYEXk&+F|T!{LzY8Ogx+By z_|~!+BMDd|x6zdDQ%=8v?HI zhfek7dYlWatPZF)Da1K{52>ZE*WHu#(@>yo_|Ml@`_xnmwxXf*#KeCeY7zwb`1t&{ zTT(DxWPLBw^~=5xlfFC`Og2bEMSQN`i-Td@4_ZXS4}j6J*TfCFLNFyRoNR4R$A1Qp z{CN(a0W}IjobcA;u;KQ|_sqElrO4eq(xizMqh0wMqo(3XgN{Gi%`xGXu^bsCHCr46 zL$4I}*q7o}#_I^GD_B+n@To~`_g{Ye_))Kxh~sQIYq-eZ-(!?9mt1yDOU0s>O2Up`W6`ODO0JS<3o(d)o9{D{-I4s zce~cm^_{55-nuh21(v)$9{-5jJ*f=W%B{7PNg8Z}^nCORP551@N2dF({XOj5yy%Fz zv0b|6ky8vwk}K@r6Mx)NG!+$b$F|UxAQy@}1i-~f%&66`ac(_6#NX?e#GvLIyx^J8 z{6pup)xv_brQM~tM9zVwxNq;X_-ps{zGC!rSbSMRF3xg&tab?bY~VHtzTs7Kh4G#6 zW4OiY3bf8HTN2Aiyd`{B!#_~+8Hl7JC(k2hH3XQXIkvc?mg#XqNlvbK?BUASC;*rw zypur48XHps{w54*JPtRn#H_Ruel7CMGSAt1#ytWb%FsRPIecvdMpSjGPFvA7^HPIY$?cW zKDft^ul<84GVDRI*7E<+-j{}B`F88yW|=Z%%sfOX8AC`OGBhY9@yJlAgphfRC{#53 zB1xhs^AJ5`ij+!HDO0AXM3hwKai4eZ|G$rSzsKIcsr?u&b8LL z5We}-mXKlxb9BV2sck;F_dlu&z2#KSSUKtvwq9_3mjEvABWOv97|=zdtLOCC0kp?d zTg{=DO9eHiEBN`P0(=#)-GBOz5-Q9QOM8^-h)b9;Ro><56l{tey)26F+*JBRv>StML6!}E+j)@MX zS3j^_JC3M}^pB4A4y~1G;IF-YT_oeqoz`XH$BrFSPTEco^78U#ZCslIf6213v4Jk< zz;U&t?ONc=!CIWct)10r`i^2^V*Nuy9iJbXmAkyu&#;wgXl3dO_nD>F|5_wEpe8&#SI3GpwNNQY*8S zKhBKdih8%&zp8BXRM)+soA+HL;chQ7Wt`WdotJd<3%5cyh{~2Txv3_B<&ihf`X2qv z%+=)&L92cM>@RJ*uBN1L(x`Dtb`|R7L$*C%1;+BP2fdH)+yD(#S?u@dlKbYZ%^qQH z0UHAkZhs>L>6dy^($DkVp9)Qg|>;wkT)YO z!l8uJ_5&d?2?%(wB2(tC?d~>8rH9=NDaQ0(TfOtXFS;+ruwO9T?#U6dKh!A?+Ba(k zZotC){BZZA++W+a>|o+8;X7ce+2=fJF>)hB$xlg$UyCg$MZDj>ziKx$)j2sc0uw5< z(aUfVoRX3t!#uZ#cR`B6nz7Bgd?lLu`SG)-?K4e$+MOSCR6e(sgF$PQ zguX#)!oB;!DG=3x(^V&GSEyYmPwx3GZk}uB z@1In)GKJ{;GWBD+lvQq-oO9Vnds&nZ1DAs~$Cc^VUXdd5Mst4_^Q}CuO9J)FR_q3K z6_yjd&Udspn~&e&&xG*QUU(_?z$&n*Q~V1*GnVf%oy;xYH!t={;_(!lhsagk}xS=uyqt?-;2@LFFkclOmRNC%r zEgtm+DUB~8q7S`<@Th+&$=`aqpMfaa*Cq2}J-MclAuNx~*UeBiKp~xbGqm$f=-lL^ zcklQJ(i?1GlOA~S??)>=tLk}Y@4FItoMvN|3sKQmqC=Q?vFSW(Zq+WEk2RKo{ zyRPed8A*|{ zXDST!gLwt2HxbfRLFIU!=-C-z3FNCTt$wF|xmEUI>&=?>HeoMGWLRe%AyY#-S6Y=G{r4fO^VZI=$ z&5Kg{&AYe<5A7-LF5d6f*b#L)mhp4^7wfKto_QWY(dcV^LemdspIb}eR&=~fwJL|6 z_944gk7te86gt~t!;Z9z)j9Vprkt$o%?JIYac9vcJvO?=G%rCN!B{p?Cdig&p=a+$ zIcQk}*(7G;eD{E+)~k?-%u~liq!@Rf+wEwo>A0^=3H}qcY|E?>1)8A3Rek@r8B>CE z&+o?^b97IcJkxm>o~YaF4lXF!B)OzEKZkt|PlgZjtQ2L}&rgj7-ow+}(?_NH*tXtv zA$30`h@Q(%oAFA~{wCkIT2h_*B3K3}wOzR)5823FTemHpp^rr(Bpe}ZT|>h%iIA~J z$EQ;Cg8dFvN+rQ${ry4V(V(WM(t)2J(WU>ZSx*%*-3QPyO+;nadhay(Zdz{LQ)1 zew&P~cQ4jsxR#Kx_s3LAMawl;PsEH6H`9t6r$xLu|Ae&)H42ayj$dMoIvS}0l2}K@ z>Fk1CN$sl8PmaM~{D4c^bOi@ZLd`rIs%;+g{B@~tGj_RHxPiV8^_2I(z~prFXnIag zPFpEROU+t2HBQ}o5o2dldFj&4@RBV@^S|FTzbVA0&V1t03D%z+m+t`~kappMOhb8t zsZLr!ffe=`c)zBSeHE(|BYvGR6d}^E_9Y23;80H<{$Y40o0kk(mG3w1A|JT&sL(f! zNq;Rq2oH@P*J`&9<}9rG#U`-Untt5Eq9qGbxBwBY|yx zDbZZmF`U`7bZ--3poP-X<)*yEo;`bh7KSY}dQo(A;u90o!FL!inBvbZC>V{(mu=0Q zJ&)iJlO_VFsok7t#?PY@qSo#bAkcx^5YU=9NtQmQ8$9R!k&NsqhgUooZAn;77%bSvH{ z3xJj%D!ndB>{!L83*N6`LOU`x}nT7ToEbM zV!7+swJgrRG>Dv5m(K$OkGi@l?`2*(lTY90pZM@`cSd>JR~z>`zHe^dA1xPm+j07@#X-!%V zLl~1)uCbc9-gKmQaUsQd$$4#Mm~fSy3x={{Rg32mp~Iz zCR?885f)ySZ^iE6;W0E3bVJAAe6*OnB{j}UkXSCN6^_bBQEe|q@LX*1_0C?RvpNwT?WjE)xO0QqPoah@JDhUY*S$TQ)A1bY0O-BgD8F%p`mNs&d z(Nv4&zHRZWtb*teLOI;i2qyO7`*Slh4ZApT)642xw`~K;uo!*O)t9d5(6b9~oYkMd zeqGplGc|R&ghXo4#lY{hc4hxj47e(wBI>sq>;xK+ z{gT_2Z5i;hUT{F^%=J?ZPIW6is!m~MJgSOiw6uiK819~)IDGr+clS*r@acF~u4IRf zd{9v#t*XlV{{8zV2RA+PuFxcyVE;H;;-rHA>Xw&Wf1`r4lEeT*ySC*Se0Ug2-nIes zX=J|?39!Jcv99o0jR}=CeG*I4(cO10b9R3%*@RT&(uBj(8F9TJ$71a`9L=wbfpHUWx1B*M0x(Mg?ReUG`JZ*MmRfC;~MoB~jeSAW~0qm*u15YUov^oc` zyT92)q8{=|HocW^b0TDm6JQ(Xq4m)YbE*5zE;S?;AEB$E2qsM6_l@NmsZGh6hrrST zI$;@#p&=%C#tJ%w@SyhrLvxF}2Q1W7lbeWeG}Tm%zuiWJkBxcGk3C*amV+=zTG(oM zf7u@LyGfHYWT2szCf|arP{h+gc&)qe&|KoWF(|f4D=y~6xqZskaMeC^V(-gTeF-c! zL0}X&3mEU}$kro*|1R`EQAvp*63{NRDU?0d?_c3a$o_SSn;taGBz039_+Jp#y|Cle zNav9;mXwpbW7P%LhBqMid6}$vvA-~`^28_Rmf_BeZLaDfBdwCVxzK!p8Q^V6%-`l` zeveH}C1&SzLXOk&@;JVJ{o2-!Qk+}f!zr_zDQ4B-!swC zCXvTW$G~sZIX=Jc6jGLt950RE04D+lds4i-iZce~qytq5-pjH#PtG38vdr1&kZv4T zJo+2_4l=0OfGBE(ErvuM-@(t#9geWL-DRcz$ttHo4^zOo`Kdkc-wR_or-#xnF1`u8 zuI7Ufj7$QIO-!7DHH5fDKrDex(-HIY^Ous78)>m;JB4)0*(Gn69%{+5zzT-uZ!j@2 z8JQ9m79MIZl)$m~>)K%J>E}mgYIe*$z~nt|uv!@g25Ogje7JyUl~jZ>#<9*<7k^lh z&LpZ4PM<#Qh;=0~PcoDvC&UAuB8WI+2GCXovdc+-ZKxJf;h}LN1tb8{Qcm}&sj0#e z$Dq*T6xI{SO8)#-^7`BbF&(yiD@E+Tk(ZZO{%`PV?HEUSh}Oo9=a4C2t+ro(n)~4N zSii5pQ9(+~#7H}-eWgxw557v_BaB>JT=Kn@Y)iK@GBMdWWM*WLJZa1J?b`<}zwTzh z$7L`(9eu9@yiZJnZ=ua)ITx2e*i55CULhEWR!4CNO9O~I13e}L+HljIJImZ_Xp4xM zY|HX(%gjMjx=l+kD%=z=@g_Iq`eOL0(L`hGx=_8CIS!9e- z>}Z5-^9w)13d=YN@v!+50Hhu9o?6H%z)&iK_hW^iAOqp|t>Y9LaSo0THIv7Rmyb^c zJz+#GD#x4eY!j^d{YvL~Q_~i>0Vz~c(~`Cs+Kkqz>DCbr5Qv^kLr1ZQ2M_@9)|=nZ zIt48$rLQmX>1kZ$NU9C%pU*lvq;5{7f%kuewDRQlpw_cmU1G&_1dfyl6Q18XRES#t z52569lSl$l1RI9Ay@%innVOkBDm~tn_U^08L^(QtV^?Z&kv0p8TjkhW*^wXLt|DL?A(VX_l4>&U%nhfD1hXpF`8)z0)v#EfJPt&Ii!9lzkvCl&s1Dm z6!t&x>@NuOFlyoWY@TM+vH=4-ZGZ>&4-ZpET+vt27W(ny$Gm+rA}(_{TW6A!le6$c zbMW*m0?AJtHzmd(lKCtwhlcg}nwpw`rG+V1+QK4R8aJg1StDsJt&Z8*w%iCf00jX< zx1wEAWaQ;d2vYEJ%RVIUBRm$rp~sj4j0pC0gLN)9WrctM8zGGecdQmuIbAzoFGx+~ zF{VpASz7$DM0H3^NLWT3#1%NjOTbdo5C5(D?d&U^A2G4993zuaEj;~ z+G1cpw+IRT@Kd`E=nl2UA4)Mfc_kogk#7A>o5XkOO{_qjtmo(FJRvg!i2*;Vr-}gl zk@}NU+#g3r7hIgy_YN}QY|;}DB6wsuzbmuRhWYy>@8v4Ed#0*DUwqxo7A>IrNd1HB z4ZBrp$DLmhsT~p$GFHp5+!3NIq~$4zE+}VCXU!4#gOA{2jfi%Ydk=Qq)KOM`Lv;li z#szV`bEv4Wc`d=m#}F14hT*OP*f!a*uYBiCj`-o~AC%9|dA{5cA_{!yUzk z+0ifJc(na~*cuHH&KHf1bVSYKtRgs~qRG53nrnz~*x%M{iOu<`28ewnC5f|V&tm$U z%Wl20^xfH{M?m^26YhmsS)0+_6*N9{3TSI&=3{b9}gkT=e z+ByprHfuPDIOu7bL1bF{$UU?}LFw`6&yO+Khb{Qt&^kd+jJ4T%^gArg3ibw#tONdp zqj0A!p&~q-5Y-Lf1`AC2@uiEKW)%?RkWyr{Wy_=X_V$W(kP9T0rhlg{VgAKMv{*2J zq4g>Jmr)@lb~d(4iHXrDS_mGsti;RF)-Vyx9bL`x%4{QBgPb8JKq; z!zqH|F+gL`Awa9sU;KGde|&DC`txU}C-wFHxrGnw$Xvnr zxDV#zagiY)t=G1Ctd9LT977U$hc`Vkw}0>6Gr**p_tDJdze92_eES)Kz}TQXYa|HHl+Lza+!R+ydq zu81SUNc@CyvYoQ2^vfUn{hQB)R?()e3@a%BUZA?(UVHRU*q{QrvEqB1H>=d?%F0U8 zcR)-nAaD-cQ|Hm_a31u;0HG97@goCiUEGO4Sjwo!K(frdC(`!rsqCB_UXY^RMXX-b zhSZhy2~^4jxk^e0W}W7~U2Z53j|-=wEc{<$xc}XcUzX?s7-bE2Qe&qH{FrSqH!7ew GM*JJ6tEMRc diff --git a/examples/training-llamas/artifacts/Llama-2-7b-hf/full_report.csv b/examples/training-llamas/artifacts/Llama-2-7b-hf/full_report.csv deleted file mode 100644 index 4dfdee19..00000000 --- a/examples/training-llamas/artifacts/Llama-2-7b-hf/full_report.csv +++ /dev/null @@ -1,561 +0,0 @@ -,launcher.name,launcher._target_,launcher.start_method,backend.name,backend.version,backend._target_,backend.seed,backend.inter_op_num_threads,backend.intra_op_num_threads,backend.continuous_isolation,backend.isolation_check_interval,backend.delete_cache,backend.no_weights,backend.device_map,backend.torch_dtype,backend.eval_mode,backend.disable_grad,backend.amp_autocast,backend.amp_dtype,backend.torch_compile,backend.to_bettertransformer,backend.use_flash_attention_2,backend.quantization_scheme,backend.data_parallel,backend.deepspeed_inference,backend.peft_strategy,backend.peft_config.base_model_name_or_path,backend.peft_config.revision,backend.peft_config.peft_type,backend.peft_config.task_type,backend.peft_config.inference_mode,backend.peft_config.auto_mapping,backend.peft_config.r,backend.peft_config.target_modules,backend.peft_config.lora_alpha,backend.peft_config.lora_dropout,backend.peft_config.fan_in_fan_out,backend.peft_config.bias,backend.peft_config.modules_to_save,backend.peft_config.init_lora_weights,backend.peft_config.layers_to_transform,backend.peft_config.layers_pattern,benchmark.name,benchmark._target_,benchmark.warmup_steps,benchmark.dataset_shapes.dataset_size,benchmark.dataset_shapes.sequence_length,benchmark.dataset_shapes.num_choices,benchmark.dataset_shapes.feature_size,benchmark.dataset_shapes.nb_max_frames,benchmark.dataset_shapes.audio_sequence_length,benchmark.training_arguments.skip_memory_metrics,benchmark.training_arguments.output_dir,benchmark.training_arguments.use_cpu,benchmark.training_arguments.ddp_find_unused_parameters,benchmark.training_arguments.do_train,benchmark.training_arguments.do_eval,benchmark.training_arguments.do_predict,benchmark.training_arguments.report_to,benchmark.training_arguments.max_steps,benchmark.training_arguments.per_device_train_batch_size,experiment_name,device,model,task,hub_kwargs.revision,hub_kwargs.cache_dir,hub_kwargs.force_download,hub_kwargs.local_files_only,environment.optimum_version,environment.optimum_commit,environment.transformers_version,environment.transformers_commit,environment.accelerate_version,environment.accelerate_commit,environment.diffusers_version,environment.diffusers_commit,environment.python_version,environment.system,environment.cpu,environment.cpu_count,environment.cpu_ram_mb,environment.gpus,warmup.runtime(s),warmup.throughput(samples/s),training.runtime(s),training.throughput(samples/s),overall_training.runtime(s),overall_training.throughput(samples/s),hydra.run.dir,hydra.sweep.dir,hydra.sweep.subdir,hydra.launcher._target_,hydra.sweeper._target_,hydra.sweeper.max_batch_size,hydra.sweeper.params.benchmark.training_arguments.per_device_train_batch_size,hydra.sweeper.params.model,hydra.help.app_name,hydra.help.header,hydra.help.footer,hydra.help.template,hydra.hydra_help.template,hydra.hydra_help.hydra_help,hydra.hydra_logging.version,hydra.hydra_logging.formatters.colorlog.(),hydra.hydra_logging.formatters.colorlog.format,hydra.hydra_logging.handlers.console.class,hydra.hydra_logging.handlers.console.formatter,hydra.hydra_logging.handlers.console.stream,hydra.hydra_logging.root.level,hydra.hydra_logging.root.handlers,hydra.hydra_logging.disable_existing_loggers,hydra.job_logging.version,hydra.job_logging.formatters.simple.format,hydra.job_logging.formatters.colorlog.(),hydra.job_logging.formatters.colorlog.format,hydra.job_logging.formatters.colorlog.log_colors.DEBUG,hydra.job_logging.formatters.colorlog.log_colors.INFO,hydra.job_logging.formatters.colorlog.log_colors.WARNING,hydra.job_logging.formatters.colorlog.log_colors.ERROR,hydra.job_logging.formatters.colorlog.log_colors.CRITICAL,hydra.job_logging.handlers.console.class,hydra.job_logging.handlers.console.formatter,hydra.job_logging.handlers.console.stream,hydra.job_logging.handlers.file.class,hydra.job_logging.handlers.file.formatter,hydra.job_logging.handlers.file.filename,hydra.job_logging.root.level,hydra.job_logging.root.handlers,hydra.job_logging.disable_existing_loggers,hydra.mode,hydra.searchpath,hydra.output_subdir,hydra.overrides.hydra,hydra.overrides.task,hydra.job.name,hydra.job.chdir,hydra.job.override_dirname,hydra.job.id,hydra.job.num,hydra.job.config_name,hydra.job.env_set.CUDA_VISIBLE_DEVICES,hydra.job.env_set.CUDA_DEVICE_ORDER,hydra.job.env_copy,hydra.job.config.override_dirname.kv_sep,hydra.job.config.override_dirname.item_sep,hydra.job.config.override_dirname.exclude_keys,hydra.runtime.version,hydra.runtime.version_base,hydra.runtime.cwd,hydra.runtime.config_sources,hydra.runtime.output_dir,hydra.runtime.choices.benchmark,hydra.runtime.choices.launcher,hydra.runtime.choices.backend,hydra.runtime.choices.hydra/env,hydra.runtime.choices.hydra/callbacks,hydra.runtime.choices.hydra/job_logging,hydra.runtime.choices.hydra/hydra_logging,hydra.runtime.choices.hydra/hydra_help,hydra.runtime.choices.hydra/help,hydra.runtime.choices.hydra/sweeper,hydra.runtime.choices.hydra/launcher,hydra.runtime.choices.hydra/output,hydra.verbose,backend.quantization_config.llm_int8_threshold,backend.quantization_config.load_in_4bit,backend.quantization_config.bnb_4bit_compute_dtype,backend.quantization_config.bits,backend.quantization_config.disable_exllama -0,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,16,fp16+peft,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],36.81096434593201,17.38612425324105,90.82892441749571,17.61553393108114,127.63988995552064,12.535266213074618,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=16', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=16,model=NousResearch/Llama-2-7b-hf",8,8,fp16+peft,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft/16,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -1,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,4,fp16+peft,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],10.65952181816101,15.01005417779644,26.17747592926025,15.280312016365723,36.83699917793274,10.858647797772315,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=4', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=4,model=NousResearch/Llama-2-7b-hf",4,4,fp16+peft,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft/4,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -2,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,1,fp16+peft,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],4.55856728553772,8.774686758908217,10.863666772842407,9.204995154121027,15.422235250473022,6.484144378288669,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=1', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=1,model=NousResearch/Llama-2-7b-hf",0,0,fp16+peft,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft/1,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -3,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,2,fp16+peft,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],6.038196802139282,13.248988501278507,14.59524655342102,13.703091569434395,20.63344502449036,9.693000842206184,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=2', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=2,model=NousResearch/Llama-2-7b-hf",2,2,fp16+peft,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft/2,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -4,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,8,fp16+peft,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],19.43790316581726,16.4626810448742,47.72488141059876,16.762744638739655,67.16278600692749,11.911358172626793,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=8', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=8,model=NousResearch/Llama-2-7b-hf",6,6,fp16+peft,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft/8,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -5,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,bnb,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,16,fp16+peft+bnb-4bit,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],33.6070408821106,19.043628454080263,83.09710359573364,19.25458205840699,116.70414614677428,13.70988137805954,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=16', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=16,model=NousResearch/Llama-2-7b-hf",8,8,fp16+peft+bnb-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft+bnb-4bit/16,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,0.0,True,float16,, -6,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,bnb,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,4,fp16+peft+bnb-4bit,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],11.067278146743774,14.45703251319073,26.702176094055176,14.980052509243013,37.769455671310425,10.590568301566467,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=4', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=4,model=NousResearch/Llama-2-7b-hf",4,4,fp16+peft+bnb-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft+bnb-4bit/4,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,0.0,True,float16,, -7,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,bnb,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,1,fp16+peft+bnb-4bit,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],6.390275239944458,6.259511288335002,15.36588716506958,6.507922316865925,21.75616407394409,4.596398503896343,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=1', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=1,model=NousResearch/Llama-2-7b-hf",0,0,fp16+peft+bnb-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft+bnb-4bit/1,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,0.0,True,float16,, -8,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,bnb,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,2,fp16+peft+bnb-4bit,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],7.136318206787109,11.210262446525258,16.748042583465576,11.94169402204942,23.88436245918274,8.37367965512124,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=2', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=2,model=NousResearch/Llama-2-7b-hf",2,2,fp16+peft+bnb-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft+bnb-4bit/2,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,0.0,True,float16,, -9,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,bnb,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,8,fp16+peft+bnb-4bit,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],18.616926193237305,17.188659216806784,45.44451022148132,17.60388650028503,64.06143808364868,12.488011882521189,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=8', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=8,model=NousResearch/Llama-2-7b-hf",6,6,fp16+peft+bnb-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft+bnb-4bit/8,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,0.0,True,float16,, -10,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,,,,,,,,,,,,,,,,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,16,fp16,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.25.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],45.30532383918762,14.12637513135754,111.96182560920715,14.290585128404912,157.26715087890625,10.173771134392712,experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf,NousResearch/Llama-2-70b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=16', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=16,model=NousResearch/Llama-2-7b-hf",12,12,fp16,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16/16,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -11,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,,,,,,,,,,,,,,,,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,4,fp16,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.25.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],19.096375942230225,8.378553107879062,46.270344257354736,8.644845989803057,65.36672186851501,6.1193217063049135,experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf,NousResearch/Llama-2-70b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=4', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=4,model=NousResearch/Llama-2-7b-hf",6,6,fp16,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16/4,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -12,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,,,,,,,,,,,,,,,,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,1,fp16,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.25.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],12.218624591827393,3.273690888805487,29.746723175048828,3.361714815159161,41.965349197387695,2.3829183341151587,experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=1', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=1,model=NousResearch/Llama-2-7b-hf",0,0,fp16,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16/1,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -13,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,,,,,,,,,,,,,,,,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,2,fp16,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.25.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],14.054343223571776,5.692190572507505,34.34596490859985,5.823100341837308,48.40031003952026,4.132204934982734,experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=2', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=2,model=NousResearch/Llama-2-7b-hf",2,2,fp16,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16/2,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -14,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,,False,False,,,,,,,,,,,,,,,,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,8,fp16,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.25.0,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],27.57860779762268,11.603196301576345,67.7247965335846,11.812512417121573,95.30340623855592,8.394243517356594,experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf,NousResearch/Llama-2-70b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=8', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=8,model=NousResearch/Llama-2-7b-hf",9,9,fp16,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16/8,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,, -15,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,gptq,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,16,fp16+peft+gptq-4bit,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],38.72175240516663,16.528177580997216,95.55383205413818,16.744488060860643,134.27558636665344,11.91579231410715,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=16', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=16,model=NousResearch/Llama-2-7b-hf",8,8,fp16+peft+gptq-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft+gptq-4bit/16,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,4,True -16,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,gptq,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,4,fp16+peft+gptq-4bit,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],15.456857919692991,10.351392296629063,37.6063711643219,10.63649556220649,53.06323051452637,7.538176551284372,experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf,NousResearch/Llama-2-70b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=4', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=4,model=NousResearch/Llama-2-7b-hf",6,6,fp16+peft+gptq-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft+gptq-4bit/4,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,4,True -17,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,gptq,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,1,fp16+peft+gptq-4bit,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],9.378505945205688,4.26507166852606,22.25546908378601,4.493277568022772,31.63397645950317,3.161158070912042,experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf,NousResearch/Llama-2-70b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=1', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=1,model=NousResearch/Llama-2-7b-hf",0,0,fp16+peft+gptq-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft+gptq-4bit/1,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,4,True -18,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,gptq,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,2,fp16+peft+gptq-4bit,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],11.28474497795105,7.089216473771431,27.20784854888916,7.350820100333349,38.49259519577026,5.195804517279647,experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf,NousResearch/Llama-2-70b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=2', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=2,model=NousResearch/Llama-2-7b-hf",3,3,fp16+peft+gptq-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft+gptq-4bit/2,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,4,True -19,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,spawn,pytorch,2.1.1+cu118,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,42,,,True,1.0,False,True,,float16,False,False,False,,False,False,False,gptq,False,False,lora,,,,CAUSAL_LM,False,,8,,8,0,False,none,,True,,,training,optimum_benchmark.benchmarks.training.benchmark.TrainingBenchmark,40,160,256,1,80,3000,16000,True,./trainer_output,False,False,True,False,False,none,140,8,fp16+peft+gptq-4bit,cuda,NousResearch/Llama-2-7b-hf,text-generation,main,,False,False,1.14.1,,4.35.2,,0.24.1,,,,3.10.12,Linux, AMD EPYC 7742 64-Core Processor,128,540671,['NVIDIA A100-SXM4-80GB'],23.256840229034424,13.759392800080551,57.13273501396179,14.002480360943691,80.38957738876343,9.951538818660604,experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},experiments/${benchmark.name}/${oc.env:HOSTNAME}/${model}/${experiment_name},${benchmark.training_arguments.per_device_train_batch_size},hydra._internal.core_plugins.basic_launcher.BasicLauncher,hydra._internal.core_plugins.basic_sweeper.BasicSweeper,,"1,2,4,8,16,32,64,128","NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf",${hydra.job.name},"${hydra.help.app_name} is powered by Hydra. -","Powered by Hydra (https://hydra.cc) -Use --hydra-help to view Hydra specific help -","${hydra.help.header} -== Configuration groups == -Compose your configuration from those groups (group=option) - -$APP_CONFIG_GROUPS - -== Config == -Override anything in the config (foo.bar=value) - -$CONFIG - -${hydra.help.footer} -","Hydra (${hydra.runtime.version}) -See https://hydra.cc for more info. - -== Flags == -$FLAGS_HELP - -== Configuration groups == -Compose your configuration from those groups (For example, append hydra/job_logging=disabled to command line) - -$HYDRA_CONFIG_GROUPS - -Use '--cfg hydra' to Show the Hydra config. -",???,1,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s,logging.StreamHandler,colorlog,ext://sys.stdout,INFO,['console'],False,1,[%(asctime)s][%(name)s][%(levelname)s] - %(message)s,colorlog.ColoredFormatter,[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s,purple,green,yellow,red,red,logging.StreamHandler,colorlog,ext://sys.stdout,logging.FileHandler,simple,${hydra.job.name}.log,INFO,"['console', 'file']",False,MULTIRUN,[],.hydra,['hydra.mode=MULTIRUN'],"['benchmark.training_arguments.per_device_train_batch_size=8', 'model=NousResearch/Llama-2-7b-hf']",cli,True,"benchmark.training_arguments.per_device_train_batch_size=8,model=NousResearch/Llama-2-7b-hf",6,6,fp16+peft+gptq-4bit,0,PCI_BUS_ID,[],=,",",[],1.3.2,1.3,/workspace/optimum-benchmark/examples/training-llamas,"[{'path': 'hydra.conf', 'schema': 'pkg', 'provider': 'hydra'}, {'path': 'optimum_benchmark', 'schema': 'pkg', 'provider': 'main'}, {'path': 'hydra_plugins.hydra_colorlog.conf', 'schema': 'pkg', 'provider': 'hydra-colorlog'}, {'path': '/workspace/optimum-benchmark/examples/training-llamas/configs', 'schema': 'file', 'provider': 'command-line'}, {'path': '', 'schema': 'structured', 'provider': 'schema'}]",/workspace/optimum-benchmark/examples/training-llamas/experiments/training/hf-dgx-01/NousResearch/Llama-2-7b-hf/fp16+peft+gptq-4bit/8,training,process,pytorch,default,,colorlog,colorlog,default,default,basic,basic,default,False,,,,4,True diff --git a/examples/training-llamas/artifacts/Llama-2-7b-hf/peak_training_throughput.png b/examples/training-llamas/artifacts/Llama-2-7b-hf/peak_training_throughput.png deleted file mode 100644 index ed4569499cab04da07cfab0e7dcd3cced420e094..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 40152 zcma&O2{_i>+CKa<&r@j<85&4Ll!%Za(u7n(h9Z)XnjFhR#tY|WnsZ1e?%8-f( zg($OR9=>yV_WQj1_)JuiMXxE=UbepkV{);Ug>S(rW$|A@8MU00+; z@K3ezJ}L-58C|u(M+5&%u`gA{kCQyEvIz0;@aQR*(_bfl5N|=NjbBIBO4F<(e`Xy^ zEy3@*W&iL0T%qja$E8=VUUdo>+4c14Q-|~C=aSCu+__Wlv0)bfHPHv2eclTTGFH{R zV!W({?tgU#b(Nb7oO=?UJh`S7zUHj}tEX=Om!MjruKE1zRDt{5q3_cpl6V*Yn>RnM zurE3Fo)3R$lJD^S=I8kB_G^MyNOA7pzu(}g$*PSTH)iJLRb^Y%6w6;7;#jJ>?dFK^ z8`s|7zmr^kHLO%}m5x*K-h9zjTxN4={HW{XaANECpXn3rRa`qKU*C{D_2F*NJ;jS& zpXEBM!UF;Vn1Yu}iboZ^c!6h0u&RkZ_PO?vU*{p-O-J^1E6GSpt6t;VmU&^os6c(@ zzNVH|vZ^m_Q??bG&sfRHkmZt=Vd4{Z?ZpE3cAQrm`SCh2BO@GN+Pv#cL|{cIi^*NN1H{ zzJo>d`I5?4SC~p%TcS1{RZBPU`ZS_)^xKQ`&t}g~j}L_KZ9AP{R^p}KJYmu|A$d!| zd4t#VFVW|=AAjEb%*Do5{=GcF@pogoUCdDPGo}?%rW&6f9(I6tk={Nv)hej*TmFkT9mi4&LxMf*~!Ym!7({CwaeJp zSbXTyqZO5*tFCaYmP@!e)RtWl9TQ7EOuxVXPLfg1X{upqz-0!u6R$2aH07LGp`@f_ z8MgVJuP@Du^L_XAb8SCv(9qJd-dMYj*|(yiVxe%-GI~mVzTo-u=f$oqQils&9eJfp zO;Y<~%)EAC%T%WGrk13qr|XJGnf={fZGR-g^qPofaKe!%M#rL;ZPcfCbaaf}abEnw zP@AN>OZhtW|!_UQkJKv9&u9SBW;@fINtzLcSF6Y$ELe~*mcX#(=Pu=d{ zzi-o;zvc7eV>hLisJ7955G608T!%Y+4aO(hy^~LWdRX`IGo8<6LuWN#?U>SDrrI z%qM3T`10b=L%SKT)p76gMvpu;^j)%EM^#IU>C>a5Ra1X@p7CGm`}{=g=u>`|^XI>J zb@>S>Ug$Dn-~ zf?Kw186F$+!yoS3x3BKeQK@z7)}=j8))kX>9X5Y#RtsLy}O9q zk&%(%iHVqV``z5#Pfheh^|ux*<>1@;X4IXg^vxUQwYx)A&CP|>)YVm92vYgp3(7lO zdMRgT=ia`)WW>m1PrSFm=ePM$<6egXL;p-RWu4S-Mt-R)_Po^8)TDa!=!y#$F7&n* z$euWPQiU%7-(#4Q!Zk6y4+hVCd3xNYtx&=4?AgTR2FmQWdG-QQT(uIMq4GPaarbcVOdLwZZ z78a6lq@$yIezs+~lv#0P^d?@`$EHPm*olT7&-b^tHH;!Kgx9U3*VEH88=Ec`eN6LC zRA#d~1KVRs#>x}~BAG^)Y`aGuNxJ*Jl6Q+6ZMIsR%`|7*qI=O@My*RT7~&@m?9 z`^aA$Q#9;-Q3JXRjtZGC^9%j z%};i6k*#@Rl%wWL%fP*IWuPw|6B6r*x`bT|1?|nxtku=k>5_L}Q?s*^`1#>p2$uIp zfAh12f`O|XtGQRK2$-LnQAdFrY%Q?)@>F23y+rC0-#pG+Z)5sWaf39UW6w?r@7TfT zIX$-PdzpX9@1cl$&!2D2fB7=`)cf19D&ESRg39dn_V$rtdbD(m?7GJ;{{8()d*42d z60f<043|LZZsGYrZ`A?Inp;W=_ue z-W4`jyYC$xi=V%BEAOKtPcBDAE&KWIR&utfgM+l3eOqXvdVm^Eb4|3Q`ppgd9q<)u z&uJH*`}RU2@5|muv5{AQk`%VGR=2Q&dem^xHjdUy)$MtlYZmoO|l0mDeKr*$RXecawd&z3mJ+(d+^{c)8XnmEKst*8$soSIKROk&%(>$PWg-?dRC{ z?%f-@N|q5B6lGsH*S>wed4{%OsVO$f)y?fyT^%>D$Lo|mH-~?|U;XaR_JdaO8D=HB zKi=P8U*K%*()XF;^XJdnd-tlNvO9FW;c*#kT~&USLmlyq?COW}mtI&BKdQjkkYS6+ z)p&O5y>j2qw_zC>QYcLR2mlcUg;jUt&Z-4)3S?T>#!1A7BUa5P2ER4@wL?a?sZV0q z9lquoaE3)ksc&FdSY>O0bN$PU_WV2D4Ze==&1e@~x@r{*m6@42nKbYv!;A~3@A;W8 zn`^Rrdu_I!`OJ<)>Ic+9HWeYRYHU1n^Gw%O_Eog9va>NxaH6Pjx@>}qNkYkH$6IkAm1fBDWmE-@$Fs-r z$2|rI2a~Xa;#P?6rTEXyI3@-L7AncEIa_4o#y#Q_fB)juC@hCfjeo|jQF0Zc0s#Pm z)6&G!J#d6+eVEzVBdc+ug!=pYk$mnNT?SD-II$yDp+V#Q3g$|W^ke&MGiG(sZ-oIs|y`l zRL+~JSuPYHN0rol`Es=2K6yrp8^Lh&*hEIj{1U1HOP<}Ym2SW)tUTl_;m3`@emDNL zvC!=gSMe#~Y!$9{Z_l@H-Q7pMx~2sD**s?c21wPgaG! zcM9|P{ddNB?AJ#vo71O*Tk`C^$8&KKmwyFz#0!o^TNTE%wYBLV)7QV@a$M)Yf%)EI zS>?6o&Yf$xgpWp1=0-4zY}l|{yk^$(Z}EeJ@vPK|4|f&fXGV(^3mG2YZ*6TQW#-bQ zOXLuvQ9)s5VqLL0bf6_qJ>7tOitFFsQDd1|S-aBpQ@MG0*A0j|zd!Ebp@iu6M>SMu z_T3w^*-GJ@t}b`$w{O6z>ICWU*s()l)SaK79|bxtRms^SD=Q1{v<&4jU}j=yp+{0m zsHool=LI3)7^=a(W-jKyy&dU|(S;=gA0Hfa{QLXxSZ}?0>a*QN?tgVrmKRzVVkWy| z%nagQ=I2|^7=6vN&uGZ{iTc9K#&!ky``!EZsiVQmHYMpL?`{I@vaZVZUhtwyOMU4M zR|l}K8a+0Gg1~3rR;ah};)M%Gewpi~X!ATj_l=K2@X|cyfogwOGig1w`!)*ljdQ3 zUw*zkdO%hz2q_6jONYlF0l;9P(<2$^=}XJXL|U8&=32%V{wjRP#DU^3m}5&TE=J0$ zsH`k|{d#@N20C;D2I-y|$DVEQ8LQ2Vh|r53edp*s_;;qnt|^no%gbw}g3~&kDDl6| zE%q(B90*0h!a3svRhqG(b~E>cy+gXv3Ji00cK(4NtzEHEo+sO_)!|m zP+6Zn|LCUjN(Y4*2be4bG8Y2_Ygg;9?v9S@XQz-Swz!R2-c|P8QM1C-&aPj}-i_v0 zkou`rg7Xt*eKB@-9A9lbmTeh*c5k%A)SiKI<$$M8&qcR2p!zizM^7xot;#ypYE=6e z?~gZcPuRGTl?Dbs`>ksm6iH#7kbY9YDgs*C`QWc=bla{ z;R>yoJF&6mlf&O*mE2@DAAb>fwrsMue(kRi#b@ZFb!t?4qI0iYp#wI{G=D`)pfdK# z=F`Jv=&7Q+pXok3wSQl{vVg6vE$KqAD zPE&F)-GC4l?ZuvTjg9ZJl{&B7iH^QXu&TH?YsuXBY9M=Tj1THX>Bo<n4eOiSDNY zAcQat1SkQXl--iGHn+AGePV54L9>1PcFO1FOGP|peUc{IL-gf+iPF=nHVB67s``D4|m`y~48c za!1s%vA!=uk9HpsGf2B${Rimak;~WVuV=I?d#3lc$a@`(rGBcDhvTN@Ua@pY3@CH_B0h;%B#*O zewTNnD{_nK@Jvy#iou2lp@#O~UPM_nBZ*m9SP0L_Y?qa-xggXuVV}h-X~e6Ox&-i4 zH$AH+=!+LXmxLAh$l~JSNNtbPcT0QbzFu^3p-=dlJg+Wun&!d2TbJW@UOIKm^tnwv z*Zh0gXu5?*uU!}Zdi3TV@n>Cz^1!e!R+{F)fqQCz^qC%0PB?~hSF!-n`GJ(m-`$d} zFY)$XxpHNKM$i&@k4b3|5P$Xr0qW=IJ~|>vnx)lpcB(*0Ss+Hn23jU=PMilgDnvy< z+10|1q>dh(x@0Y;mqh;|Zf8hjWF-Hp%3tx`^SkDz|Ca1%rf;x6*!TPQ4^$iR{gz-y z=044Q=s%Rv4%jAGc%SZ<%JVy0ZD}`c+5}c(;>xy5w=OL&Q5j>S>{|1A)B?HM<{299 zyf5^A@EsvR34LSqR{1>IUE}TGZ6?yGRFmd^)Y^_NmJ7I)6D1u?%jsw{O*Q89YlkYO z(%Fe3&dC9Fxb*YW z&kvZVfHZOTq%CnH0#(&dnXZ5eoSJz2nst@tuO`V0@dJ3{480G{sS?Rsk+1 zX4i(w-5EOg{(LjnTO2^yme&;hc59#YW^wuTvpILa2p3dU07GdRj zzxH0x&qHB!vlD+`Vo4mFoe#NL28M=ebXH!&$D`15dN`=g?>PTE%`W~pZN5c0Bkx9i zjntyP8GU@ZaIw$W#2h+fa5kxpKPTo4+cS7(jR2he)DnH3J#!8loG`B7EA>c25XPd) zRfY+K07xVerbkAm>)EfopgqjCRdohn@mB&D+B9dcpPm>B$9Xz|Wys9VE_-HO>on_Xb^@KN4bCt^ zpFj%IQe!cEK_nl&b}e=_`i}s>{<7xgRp_)MuKI!|(kr<45gp&`c#FMFORgk1rC>il z`q_~xL9yd6p6lozK@op$-@XRP)pc$l4+PA~zrB16665cTQmI zwr%yro-^SKdJ)P#g@}ASgU_xjjEPu*-u4m|^kn`hm}K#gYV)0YI0;-cGc%hV`*;w9 z7vazAa?ctQZUGFZVS&^7&}-LH1-as1WB~k@zkA0{SWC3;iMnE$X#8t+T9Gz=aZX9m zK%id%bOI7_oE)}b=a=VxVw^YM<>~L|*MZb1xQ{|TyL0ya=_v8T+28J?^E1per@6h= z_A<~%2SAu1L0Hie>7ysR&v^d&tN3ig{@Wb@Xe)QRZv~yoy<~}q9qDTJnMLPKa%IZ>HK!#rUaUUU z_NJG0A?y1}8G3s9KtbH zPcu+i?tf3)7WKdQR00hna?{ZRq>oX&IOw52E6B(AQ(axMzpcpF;IW||>Ge*$4h~Xq z{;935YPp8kPr4GkQ)sb>|4vb|9Q&sNXVFQ^#2 zZTDV~1Y};1?4p64N=ei$Uiab7cKcx9K2|UfG#x4dek>du?`1qfxF436S}qq5kOaA! zJ=fCy^5Q`fOJ!hc$ajap5QvBYYYfETcjwL>bbmacQ%;1R^yLi#nF9EAv_WAP>Xe~wnFsj`-Uj0Lo8q?fj;CxpA z45*=#A=?=5HNB@5--3DMS9~Y3QIJ!2wbiB2P~nH!mq=^M95E*?jlQCxpbxQv%6D(T z?Uv(H>+!_=XD{!2qJvde9{WnAnH*fGiQRINo^nK9;(B^>#MCt3VVAt3A{zjZ-g6th zEX%hy4(_N7D4uB(6yFobWhQm<#0kIL++0IE?t`Mtp}T6#w~QWJ-$3y_S?Mltu9@XlGxg1A$@Vs1Yb}}>+2=2K1*A=eEW9gvHs?zIDP@q(aRwwS=iZyy8r!c0anL# zdMusaf~Pwl40k_G64RN8^mbh`b6B~m@Q zIFa99GQ!cK5k%gUXD^8U^+7|^jI+n4ZID0PpDFF9?%lh$#iEQJ^~B(G>)m?@5pz(b z;Myw#dDf1KD<$!-z>$JN_r2@wO^_z*+1c6a(CNR6+c}f`=l#PF^#D!_@@dc}+KN3x zh%$s;Wu=PO>@1Zc>PPldAS1W@7SZEdE5(&V$(;US^w%=8afKB9S!b%Bzno2Dz=-(G@b zIl@E<2@CJKT|R7l!qSpnXpZYF+CAs3L_Cf!a{I$LK0dDRHCkY~m%8-#w?Bf0KrS>9 z4fKs*-Taf2lT9o@qx)j7yUwq~lWI&w+ZFi;s(6K>OFW5E0v3EX@?iao^Jku){=|yH61_Gh`~(Nl z$UQgV9j7+jzZ?=0QjP*k;9q}B9@p;Oy9okeWMAn6^jagZhWh903``E5qp_}nQiMn+ zBHsuS?Cjq!W@r&B00p}_uVIz72i`ALP|g zOlme-t8%;6UQnL?02S%mx0|8eC3L6Ag)t{bP=do>g+;m0gdhCMACWF$w zo8CrKDDr&3xeOE-*93H?!{g(-0Y_4IC8`_-wNKtg*7`#SmRx(!o?U={Jc5E!Ph?M2 zFw-+Kp1>=yHkF{M>d{8>d+sOu?2Mx8OZYG|GeI!xf+qHmXwnR`M4;?}GWAC?>OOwG zePeSa4a(>1GJiIjlv*~slP9l$J`)xaq6kX@IY!yd4E)$pH+)S#!(+$3|9BmeW}LSf zve@^Yo&cit;+-fTbaKgr=WzbX4v8kpSqchgpp%A^lME5wtgWq~5?j6efW_z-7`O&t zk%(mr3=gjYx%x-0Zs@w*HXpLeW9t$38ZR#l0ATxqo&R2+tYwxngvgvFVMC|iXUeEN2sejs4_S9 zy6P5WVWP9LE)%zWsRY=$bi=+$S^QUnQ^eX2PERSD!w+Mx%WckkY%sECM&y3e0K=z9X`Tzm88jork}$VflXr)f%9 z3}(PLEl5&OWfaiE6CoUhWEogzAILT*U>?Xn-?yCTd#K_%E${Ct5Tk~mCu9R}Cw}0g z%lhdRH$@NX=Gq-=YHm&h6^eI#b1mk{5zChhO12H_pcyg{;qwY3M@N4@yHJr_CQyP| zVLNu{JH8+@3(M5QNfNHS=L; z2t!?caayWm_A*k1mrFZ}S3G|esF6eiabk!dlte`ZJ=Rebz|Wz(dKtit_sAK8i^v#Q z^QB(1BU(zA#@PW@Zd9YQ4J7n>y)w2s6yT#{a-}EBx)b+b}L!l-`EvixFm6aP> zJg)&-o0^(7{J~0=wzbWN{6O~67ex`itd@8^PEk>@yrpF|B2feMh#rgrTWuTY_ut;S z3raqm9b7_1wnujVd3-m)Ks@5cjmuCm_FP=D{ah=!(#s8!m5Cy4D*<!d{=`elK_%NTSwj(w#xh5SltIsqmCZ45y{ zL1Kp=@>YfmF2v_}3I6!?i^bb}L85!tu3h@6wWxT~NY6xi0M(mzG-Hj2r{|~QZ}VS% zuEz@E^AQ-D*am|orLffy;uA_r^xDTAkWIqEFG3x+GfIqRy zkwgK}+kE2*M+XrhfqLM#q5*GCda$QvC1DWQAh7EH3sP7TP_WOn7hi(|fOsS9+lw_3 zIMRrrbmQ%KGpobzRc6xbds`e6dkz<S){sAN=sDT4Jg9d8H#_ywG54lGBh@0!L$j;sljkug3(e6-nanK&Me1 zwF#(3Yfv{(ETGGl_Koeaz`)*5BX>ssyzbc!z)9%r)?&}q*hlo-bkUoS z-_Cor#IwqBfZnO^vm^p z0P3y=1+hp-NJu;W&`94ofeu{vp}|IULS@hxkljA65VQu?T)E{0y|}>}uh^R)*$MrJ zg02Y)T{lUC1)U}jxV`BgCzo__Nehnj+K{XIhD6&d2&WMcWZVip0KPVJ9 z#iUS>tp%Z{j^0TZ@+YB(5$5VRq&&-(`9sUoUMv&<2eZA!_O1E8K)N3K|3|d|*h~bM z4N!4prob%7>4qIk2S}JYu=x_*0I%if3M|R4DOa?$twA(`A5z7MbAae2VUo{{zz={? z%jmfc&DynV3152np+Q9M4AieWFec%HEr7B&JBiB%Ck9SE77!MMK`HZNv1v@>d1_KX zd-m*rl-D(lsUSO+gh!7y5xyU0fQOL*etxB|Uu)h^LqbsezfS2Cp;n@IY3|6e z7{KzK|H;<)N`#6h*&1l~6^thqr%&JR%=3}G&nkKJ>0N_Ukk#PvA%%I0O}N?`FdC8_ zvry5iSvQc+mYJ3IAMrCNa`nfdX zAKdmH?_AfA*Qr+fsNlr{$(fI6;IrL8)G7cP$bd$v7mxj1SQ;_ii9gVA+nY-dseFa^ z+}{;SJ9l=o`UcLcSN3z^R9!(yBaH%Nrd`mBnzOA46av;j>q5I?#R|KY+<}lV+4H{* zs9)PWn%~CG2}5!K?ue}3bmZ}*X4P7e>LJ9!wyj%J+DtUY&i?4pt2B9%n*w{?gSM|L6 zTG}iKFdhwvJwHFs%*?C`!E^DO(^-29iRd3hLHhZVe6*iQfN0Xe#g{kUM3#u$ct{!& zC~RIvr?;awUygXtNQtw&!;%7InSXI;jUTrdY=qGvt~NGlqu8g%eGv%Z! zzolC2d6%V&4&Zhz31Sq6ZTPOJqBZAju(C-W`*YEZ&l_iDExZw+oeKzfCVzv5$I~0j z^s%9Vpa&w#e*gYm`@n%C$JQ)cw*HVMsXf2<=lUi=jMj*SR^NZ_+~Ux#_@?rZhDDo?%OYbF@bFYH>Aw{WQ zWq(x>v{Vnp0@eXK9L?xW669q$qQ+7kNDg}!hF~cR=G}N188+D(E>C)q z{V9G`v`yfnY6KYZlZdZ32bO1+l$7XLhof4QH#YKuhp9vk?_v$X!fFqokA&Q}6fltT z!5f}L;}WZQaWko0ICuX(;Wt|J4XXGh4FyKD0zHu*h?GQ7mGE&#{riL*XuyQwtKp#J zU5AB00MS!0B)&$r{P**Y&^4v$t%W+Z0`=e(8r=HF$7HF}zkOeb_2u6W{6v8~=Kb*D z10iZ?X=!y~ETc-1im|Q2M-9n#o<5k%rwRZnaV&e?-YM7|tOBo6rI4Wsya*2uZ%yPT zn5P=d)bQ|dt?4?OgEheaQ;!YBtlvXQ!#Y9B zQ%1^F$!`M?(9JrbNy>M1H7i=B>X$+Y%tix%8K71zm^mHcZRdKr6butS9#kz04Gm5F z^LB_s1UjNUp6l3w_kV>Nno@O5if@o*<0U|oKOnA zfqmN9{Y$a~OfWI#lInU=pVc%!G7Q@7J77FjNI1 zzPz1AC(_ZTFhWB_o?yi1U%#3ld>AQ4OgDl0a8Hp5FhIgElR`5MdSQjMORp}`oV}iUYi((ve+i34=_t94(!*)zfA5~ajtl*Gs=!v)W(3Jpu*5yp zv9Yn?j|Wx0KZy6LzS+4AogT2z6__Ak=m-D;NW?11drohK(DLlthK(CpAPFyaj$L~m zJ3Yey0-gi`qI-YeDxxeCM>|MkBL6R!G$QC2?V%>&)Cat#x>vaLH;p9EZ^-pFw#+$94SfF`X-Hdp$HCIy>Q!`hN| z_pMvEY5+A5B}>hp)uJI~Pntpck`pIRP+hZ<9rzIqg|Z_WbCy<*r8#A<(iCNAP~FGR@eb^{a<3BBoqNk zd11MOtE+rcQWE!?HDM5P{rvrryrU&pkm5-hh5S{CX@<-buUSdA+~1Vt08Ism+qp9p z3(&nf`x~vY*Q`AB)Mt?o?%)3bsv#3%O9?uM;xa=z4!{ zwKtCD=voQo0!mW68?EJ4c!gn^>SFG@0>=g&Xfoo_0$lWNztNOp)HewgM$%cCYF zBeMkN;tK5IzXcM+uI#GD=8Feus93%^c#}^Fhk@TMk>YM*t-OUXRSAu$^U_VZO2cCrtGePC({vRxH zgbq@#Ej~_Z?-aneZnou~Rphxvss(|Rtid4?(h>|C`Jgo*=>dBzh*M40niKK0b|=Z( zVm)szu;t^dQ$8pZI&^Am!W4y>l*l;D4y2@}zFkUmj0nCPk~HkBtT+1V$fi!+2I&fu zW!H|a_@!R)hQ7cWX#Zp)z_ienCRIN@`!T3^8O=r9)`q76Q(2#LhRn&3{y<+*mBima zGjUEq&=A1b2Z7HcX9(k6yIfqJk3q(|@$X{LPIkRrN?Pmb>1ii_RT48&08=kszU)M` za1w|kU!}eJqnr=WPxO2{3{SJueHo-qL*=A!0He+u=bVm;S=@#0)o!=RzHivujB=bE z={RhSWlL%$6v~Wx5jiS)Z8x7R$`(9t)pH%G6_Q4gXA9uTfFp}2BA~w14jqyWUke|L zh`v2AB`H&o^N!BjZledlvQGY5+iXfZOErllq>=Lgf7My{v+n~W{-&6f3e(_|3O<8OA-U9ayF~p#fkvu}u z-Sw#Q{UA0=(U4aoPz^<1rEm(6|kawDP!b5??P{_@Ilz0*+iU_9!kwLE_ zqgV}2!t}^|p+KncQ!|q3M53PQr|NVYvXl32XKQYu(2XyF*B`|HD}+ypjUa|~NZlud z+Q29VJ5PYsH7xc}fQO6?#|T3#Yqx9}Iip58p6ShRkI=0Y6bu(uziJMYg;6_-aG|7` zK}rCmo);Z@1iWQ~_Aa-^k9&Az=S$1@oqd$|W3j5RXicVwq?y~pq_lwJd&PKpGPLKWKIv4X_T1sG} zFOr0+Dg}ME?z!zD)b8@8rj>YYIZzClg5p;^022Zp1O0z1{$&(>4|6-ODk?PGn_60W z4EK>Wazf(Sr>*_wMkI|7%HxtHODqGmKY-=ni4tS#4RI6r0QpD*EvV{NF_^q0IKnP|e0_vdclFpne zg+V3p(W4NUx4N1faR|&IxvkX-OF-JEd?0ajfYQN1Q4JFaz-RYf5;mqagrN{zFH_%_ zj4G(=G}M|kYX}kt7*;-G20r&SB+}HLfAG^v7|)559P%PRV7eAK<_=5^1uYdN_6n=V zs`y?>vue@w(twljV-Ug}W1&46ZS4T*x^Q;Dxhop)Z7~%k*hE zQRcgiaWLR3jsCY-{G;M)uffl2<5jdUv||Bh4g3Y<54(4feJD`RH7Z3>#>S}7l(O~N?ho6X46V#UrqhN2xd-CUzig<}9j2Wlit%Q3?_dk!H zs#L^ack0yrhR;%9mWgQ7Uw*zg#DE%$>%$j#LtnsW&p*F6i6z z5=yJMpf23js{cxOo6bXLsQ^fRE3AS-BzpSE|3g;W5p$! z#TAuxi=&iXhbh1bH%~+I3De$gZH$?)rMP)&lmA_<5$(9>mC#oHKSs?%B|0hEQY4{l z$)jStWMUCaHHO@7R_QJ7p9&H#PNcWXK2?-SI8InV_KTwB{Ua?y-udqaKq!*Z0HO`E zPB5VN;Qq%HmH_+?2Ob?xxC4k%5bC?0a+A~8cmq5Z;EE5%w=m$5nhhPd^uq^E0PPB7 zG;_3apkGd)hj;SdgQZKkfYJs+^>pq@TJ(6lz>y5sz3xncfN>Mx!&62W!R&#GL!4F- zckfC>rKY5qW2z0E06j&TQP`pWt&%Sp3SEBb6aN9)86v0qj2XQ`V?lUW%3)#N6Q()@ z$bTM`h2+fTzczhj=ym6jfmi(h$ZE}YSrT4YRT+q$X$YYd7!jy@FC(K z23h_G_}HOu`!C>g^r=Y@rpYjfK?_bm{E8Z2)uO*aqd+5<-T`S71}8>hQ7uptBKsI) zxtehU6mNkYF4y~pNkqbA!D0?xkucr`oCoofNc*<{ZKV9mJBkoTIo4Md)yZlK_5(sl zqGA`JEt5w|W|js%c&UJG*< zan4aX`;z-W@d||}kzZGvjc%pE;x5tG*H2mS*8aI`kuVgq?zW{}$;%r#u<)mh@R8tO zqq2!s4Oz#HkBMwxtK~g5`HU!xeZsyP9k)X4dWNYoi9c%9(o)bium!CqNd{F5BJknp z_7ZQEs_j9U5l*fZS} z?FbW<&3Nv52=mYNd|Vl|M+nkO38Rh|pr3Yexj#?8ho+K0lVYw;K;i zY9HM$$X`RT=nqNFQYgmkG?{w=JU#W&15GUG(5N+dL^pn6{9AAE`V+=FXynL^S8{T; zLBeAsptTz&;sW+XZ9OK1pS_amUw(8%ZqM$#+DG=r_}tp|>aZz|ay>Q@MS zT%b;3a(V;AK$~;t!r*Ji%sVaCLQr4^qO&;HBL1ybL0CWrOWgW1En-jSS zOloIueKJO=RPeR00DcnN7;&$GO8&PQy1*t6rXRcL59tcVI|ir$L`p$t^BUwbX+8?n z{)2iyQM)xY69LJohL#yC_L)(;mfYD6xniUd;LjPgnC2N}HqzKMi6GScm3vD0qR|rYL2n#%Rg6LAr6Hg-F%4&`+t6 zH!KjXZJ`dtDJf)dk+Pt1%YlCp3jM!sS&5LOf%&zQDP|5$p4&z(}Qt*^X z?VW-NaTp`~H6)zzJ0W9Qu)OZBuC78Do)up$W64bBe4*6*%Ym==0d89~Z$CV|i73v1 ze-#jTc%Y7f6ZrRa?-=eTf?y`V7;b1BnjipUgqr>baC|uWrU-j@VVa|jakK&iy-Gnh zq+~+`={mefq)+MK1>iBNa|@BdUH|;N_S_^A`&H_#%3yKXa zX==vCt1&&3k2FUpT!_+(f;tf~BxwpE8{OV+&q+b>-;KddTm(VvSg>9F*Ne8w$`bP_ zMP@e0@ERqAE(Zp&e-B9?y*>zJ{v9sNgl0trBa9j`u;Bky13LQm@J3Uck7d&%H&BF< zR|iv0=5p{AiIL;qgMXxsQ)n1DG44Pw?N1ud-(2Uh}+ zA-`I*Lo^iTWZu>{wZ#m}Z9Y12&`c_?*7sfH9)8|AS`t@;`jrKO-xN!H?*a8 zH)r+Mbd674&+-0ju9Wv+-d9L8XK5+CH zq5grMQM?;nOTe{j`*T~stm7^gt<-moja&273@(Apnl)ca#j&LX_t05&6bw(n!(Z&$ zqBKM=bFz}1+!sQ`e3N`>c#F2!)Fqhgyq}vZM_6Yxl-gGK0zf@_={$=-gKM;q5%flm zVXoaOSgyDzGIxe^7>1(fskiLsgVE7ZHGmd95HF2sdUWVp;m_29^~-4>b}<^0z`~Z7 zmlKumR{>mOIJm$9tcr>+O%%`JPz}RFOI#Chq>=G0BJZC$(`TZ%A+=ox_f9xPxBQA+ z7&b|G=c=sB7{@mH=Fb5k43B|4A`?mmrz|armQsp)Q>4@vTeBEEvMw_XCMcrzschN8 z18~d+r*}#=j#$rm8}d+7YUq&&5YWd3B6)?JFqVO<(T*%vGV0WK)Ca+j}MIb1oKGHZcgOEW7`X5{1#MByxlsTvo7l!F9> zo7s{WD?#LSBAt=81^+nA&$@v)sS)5S3fSf|wg=uF&NWJbL_|XxAhSorIEB}D4wWoa zf???9j2JB){$B0~osKF+Ye6hYcQFM36V#!n2fDS}@8hd>;1$FeN7@d4OrnydA#DUI z6$C8JLGF#=#Gpxh<_K0|{)6=O18@rfdpD7#GxvAo;pQ{X8yHa`{wF9HXxszf9fWNv z7|bPMHvZG?b#dNw!nqA6*X96;?X|)sIOPy@hB5Y53dDObPKhSr>#jfn^~R4tp;%lx zux#aUOTq$AOhgbFad;wPW0`@7y?*u3;noZY4BN<8OMeU_Hv!qukLsZ4rSBK==Jil? zWs6k~0$U5wkt7s2j*>tv#p1d%@G+!UB~&sb<21cwMnCR;_JmU~NdPxTOPO&}WC#pG zG;z&RgmVHD$4ZgOFL;J&C}L`>j+WeXurBfuP8a^~96Vjf5^KTLK!&TrH5OSm^&+mA zm1%D`v-(<#ISzPjYgTUSW`q%z$g7mJ_q+$uq<|BMTO4;4WWxPGSiAq2T&NE8C22-E z538qkx$Wcj6alxI_~eNs##bb8r%*e51u=8zYCQdEF2#gOH z!%cMU3&SnP&F0nENb1_^bOZ0GH0Z5BKU;s!v{AhUwM>}ov?=`>fJijz?4)$p?EP+mbH2tlHg=Wh69VGh^3q~fxj=(vNox9-6$7Q4u= zam+)|L9q(^0d&8BgCYW+{4C(tz0q@)%J#i8;ITH$EkJwt0l4)8c}xEozK#F2YuDhw zUjae9c@W2G{g=*^Ru~S&qk51_coFF@q=k_7G%e`0h3U9j0^-d7ER>M-p{U%Cp12gTYh)o4lWl%^`tnq@<(- zB3H1Y0VU&!TkPAE$S4arBp{pC`f>k2O-X_)DGAClyx-bw9UG5Y&g*T*1yS`Fu7wFv z9O!L*w8SwH$-Zwd^U$R(!-<|TU!vWfas4_sYz<`a)1Uj-*%CNkNxs>PiE{~Jz9&UJ z3ttv?$=8T0O-K~}YKhz{Hk({ma3>|~2DU61i6#U=%OdR8gz@y~b<&cO16Q_r%xXSZ z0|fIL_+Aq()(Q+TlF`<~UVlq2O&owVS``yZAs`>fo_I`Uuf$b2I+8r;@%S=9U68U?mk7Qu>-t$@fF}av9 z38=L}!4dk*7GR$voE}$VsT*Yc_tygU>I7(*V>e1+os42rPLD^|I|3~=1nWhdnj>h| z*o5aN=!C?@bs8TY-V0}hT#f&|Jqhj@j*UQ=M&p7GA8xfk;3j6AbJ&MP{d9{iz`t`v zh53_#lT$XiSqVnT<*fc{larGe623SgDk-@i!^7lmA?xPoJso-eMLvHNMoa~Tz=Wrp zfFA)JOE^q2)Pa;iMx6g6QFJrMIMpM*Mh3qrQn;IvVHnpiB%+Lv`6GaGe*jDyn-nwR z6UD{{BePlCR@}?tOkaUIg_{zSq4YErIE#`&E2MACXRbzzL%KMO05MW4x1Zxj7ofoW z{5BCzQAKEk=9I|LGzKC$uq1wdew0t+D=plG2JHy9f!IaQWtLav`5*o5u@4T?ozoad zrJ1$Q4-4J%Z5b;Am{xKX0dX1-pCQtXG{zL&F5fqC1ap}G;zbg^Ag+m_u6rd;jE;~^=qJ~kfYOCzN^Xd2@;AGNghyN@fac_)8z?1&C=!~x zTug3N0nLdy_Q~%7g4b`~4#se{$;%7ZdVFr{iAU*64wA_>2u;T~5muAnd$-b4!^I6urLiGR|8lR5UINBAP1%+ zgaH}I?Ir*bC{?18q+S5o_~Y6Da@`rZEEFl9w6ecUg#JcEl#+=fq(4nmvwB1_-=D1R3`0wA>cs^VM$OMj$T+{VJE|XEW zh!DCc(d3d!AR0$F^IeA81mLrP=1D`5IVmEw!85Om8_Ck=L{atRr>5g|O;5wp3DZg} zngw1pW*%;Cp~p|25Rn8rlxWJ|NS#!q+7cl06S;pbWLw9(>x)MvA#F9x-UCI*i#ZW8 zK&}Kj-AD?`86RU{k>{t6u&7sH80|G&Vky}OeOODxK>$k@Er4MbZyEWT;Dx}`#O<^s zZYzM{0m51f-45aES{4)_$Di+3*?xPs)5GoTr1J0sBW0p{1N@L(#pO4oGbC18U_I2j z4&-|Mp@UHM5I(|aau+*fDY@TAR94;d1re8GEC;Mfb_0YaOr5@9lxelS7mzmmxF7%0 zQ^z-{xGt=odA3Ckl0KcbiV7Bij1L0gOTZhd`uPk^2FGtLWK4WInYy}h;|4`RJKjyM z&Y+<7lwwStO*JP#3M07*hpl|~gMzzTjhLM35Z#|I3j1o$L$aJD>)mJywH~X=0>B^=k9lN%|Re-^o_-FzW zFmMU%Eg~Vo4wC1`isIWfy>m0aKW!qnI^hs%5*7>oY)aM0h>x`Wn2l$_6y9|Oqw2FF zAGLXKW28C`Ii>+ap*TWZqeUi0Pvi^dAQp0?*ULu*p+ykiJM31RaY1l?# zTDmaU8U(m*jy4iKDS2%CgoWxKPTU}4_D(jA?$B>R)$7wa;aXs>PrZ? z(oTROGEswZv8%Ay5@kSj+RXqrCR4Lv=Vhy;TFcyz)DQ~nKTq2MnF!^`P>TK$@n?B{{`!H5;iWG-Fpgjo(@(;UXr$)%n5xsUD#R^N?pd-vma>RX8A75GUV zBp$gblUOQ%aiNn!@|1C(7`%s5bcM_u7(P374aIl~&glvC{|!4AnsNXdqg#y|rKzj_SdkW^wM^W)l;W3Y1Z#Hxpd{_!Q&}~EJ-ktIrqjY%Itck0tKXh)@PWR`y{yK1_?5R-V z#3$Eq;Gzoi%kj1A*R2agVQ0m4Mx=$2JNtE=pYGiE>yA5vLPL3xe3oOZ=LZ%kyJD}I zl)StlJSHK?x*>oE!D!it=QfC-V00xR*o+|969Unl)Ch=o)3oIuT?(D=6;jCm+@Js; zn0Dj|A7zflgG1Ev1|Y3$6ya^Bbd|2!Frq7soQWuD2fOce^D43#NENg0xb zEL0jz6(v;2YNA3c^D<=0P$ZFvHld_srlh3OdEL%B*YDcrI=}0j_0L{=Z|Zrz-_PeB z-tYJQzHz}&mo!WO%|2k&MzQ^A1-b7!#sD{Gm<=taL>EsOzVl|Fm=3rRfi{VhmRT2> zfoNJ8rWt4%?U!1&R)KMknXPSeCb`8=g1Dq&v0P`*%693kzGgGohE#dImI~BD_{Y`v zbCa0)mJ$T#>@D*P!VW@taiO*kbLcS9@YV{yftL}8>WYVoRbIUl`|2t`)sB76%M&|e0!YVWaZEO1UvZzIRC=Xm)Vv;T zdKC{dH8mC4$;*!)ds`{HoBvq$TzXjgpP%?_f`w28Iil;9KA0RZ>)7`0yPck8WQ~}c z4-|kuQ)hD~tb`h{eOyQBD;7{+2ae5HXQ@b81>UaBmG4{g_scd zrZWdpZm5(q!L9m_W{fz9VkVF-NxqkG%~F^gt)s91byacIB(0kh>C!03oV^-^hjF61 zAUTv#W-KUy+@{4y^G9DCxa!B@$joKKfd0pD!^*u7no6^_AHLMO831EX7}btw5yDDN zTE8f??2vCz`XUY%r6P^l|C11B5wnHJV}e*D)Y6|Y#4+@G;s_bm?Pd43nxJBwQi-~> zTM7jO2x^=|iNFBCQgz{UG)(GMMw_qE$(NFPuHp>MCnz2flRp&MKF3XL)^PwCK@quu zs9lJQ&KTxDrZ9XF&QNlEbwOPIyG7!X0%Fh9OaaV&zE5=%#R(XFP{UVLqjD>G3?k_e z;1m9I)|KTJ-d`rvwezgIF_XeUOuZ;S%q%T~ZGT_BJQP))HptyLj9uD~8}D(OL0<&C z*ZArZ!jeXrDuh%e`CS9D45rFyBd3@L!HA?%A!0p|EYnH%ud=B}r;FL3ke}$;n_{pb zgclVT9rF-@jKFQZ|IoMmdBry}uU_BTXEhk87DEakC?PE4@#s`2s3&DS6`~#MU+sTv z*_a@i*t$bhj?Pj=$^Z(ZTQsMX(MoCp0lgHAly4${$vnBh(6&16=%qB;4A!7ufAF_) zLVF{h$b$(9arCl+h(X7W|ZUXv0gy-e6l%U?jXNO|6Y zhJ+*V-!N36Oqu1Ic`;0rBI+fhf#OUrLs`h=q&gFUDu-(Wfw!IlxMLfSPMEC+H3c=J z4!w4|SY;Wzi~zzUg+0cqb~(r9&K9VGj}}DHre?4aVZezMZf?yGhv68ZQwVM@CCq;6 z!C4Ih4t$>ddW~_UYMJinK+bCrkGL^zj1d*F#Ooam{d|1&6XspId>J77ER2WSu8M1z zuLSf1Y7>X_?!z*AcAXTZ1=LJ2Fgj|;%F#VSLqb|0fjTILWp4m+Y-37|uPrtEdA0oK z@Y3?PHX>Dld6Cv0&3Z7$eLWUAU<>lF3aDUq@zl zm~k<8bTlO0Zlr~M1*Sl)(1ejYp$r_!jjlOI7vd7QvW)2-`gA`R)sr;nf`UtJk3fv_ zK}Tvbz3lfPtTr_0P2ggY~;Q6Ejd8g%i? z4+4l0QxG`N>wHf|!ntqRxIR!#OHmCJ!=Fc zX}blIp>1W%S}1Jl<3dg4ARVmffuH5*%M3P^j`C1Nfj3uDeCfy+d-m=X2LxW+hnGoX zo9XBv8|`Uqk4jD=81-WLyGQB}#^yBJQXq&I3q$2S#yd2usJR#5bm5z2-OJjc-kY?< zVtGGF#F3)+!KvW>(POu@?T+($FW%{`iZ(8vIQsjEj)K0!-ZwUQP~xv^c-c4(N%@|k zs}5+5KGRz>OZ^*H)IgeP+!Omy=x&URjFgnWE5&qIKGjSbQ(b8U_5^3WRlW}^mqIxR z!O9Vx#8snckc$BTro-t(Auu~LYN!oSdpaR=f;@gfce!}C+YIs6nc3n?$OJ6iL1=7U zTqQ{H*$Bd-I%0xgBV~g^76kyYQNrR>u z!<0Zp;76SL8{h>0rHN4p7$zeb+$sDB)F^L5M9RxU`Kz|`V};LFPH7s_m=0E15gtVq z0)|QuqRIKii*O%GS=55LejmjDU2IarXB*|#Cx8og$+N>}|5q)7du2-GN*$nP z@L-WLW9GeVgR=z?VS`4vysph!ee-K( zU3;iPe(Na8&J*sXj^ddw@E_6?=in%}Fg!pgAs1%Fgzy8UITVVKrylC>R%C&}yB5GZQmO%3sM!%i_Zsg2m=2-j$DwMar-ubhJBVo4 zqT}?nQ3pw+1Wp)I6vLX33rgQAI#tT={!1VC5}!ted!Iu)GEEzTmr51b z`_40Wdo_QcC+WDPK>qOYBPpdxQj%IySBCkU{D_6DB)VGMs1YY<)1TivPDjXzlVW?iS3Dbff%6V2~ByIbwyRWfOB;0)~(yb z*^WO-)U7*BreSV<@?}>rP zbD7yTlhUKkQvhi4n{GjL3-B+SB|uL>c6tIdiSq*wl8jft%@f7hz}TK)rr!h=W(FKq z^hj`RC1458I?@Pn(o+$PY~fVIQ{M|z2tyl}n3y&}#roQZmuu85*4vW|rQB^X0XZslo6#UNo`Iw8W}e#ZF49bw)TukH(q z%Fhrjx527)UwM(nU5Ys_=K`{Z4EoVhZK8b@uN-lYx_{r#IteCKfX*AhxE}8AveC$$ z1v-de#1ad>={%qZfu%>j9{3}ZPB`8C^+%t6Rq>g7My0n0kh@wuD0>G1?>`J!+XiQl9S16mfoO^8RuRT?0wDj5Y1@SJKULRQSc z`1=hMQeTS>iQgh4343qe<(xA%{N?M{u(QRL^e`GX3vuFn4s#33BV*7l!*$uo`5l6sk$hhW6&t)sf{jwK^o^RokmP^;= zOeA=q^Ov6{D^tk#GK_(XR^YFuwme#hS{P^iu#sJ4q8TU~(lriGD%ux0 z?)*M8sy7A})H~GHYsW9)C@B>{%uf!@4q?8?>%`nFVv*D^j0Dby>lUeMKbM?tSbzZy zI`Z+wbM0TjGrXC@s1FcgN(&8hhRPHbM4up8iSGg*C%x_8V@f*f1-i>Numh1P@*7eN=eJo zO>h*&2TfsNJR-6tDV8@;NRue115t!RfgD1rPYI+DfStN@DWsE@56(vr78DG_k=8=Q zWfh;EA8}i^{fa$D#vh=8YZQc(lG?ouFJq zJH69JvZIEy5=6xzW%E`H&6dcHpzI`@uAm6!R1awYVTsOBc{7ZqEpYx>DYxnl`kQXS z!NIA_%0c6PPG+=|4sho{{UO;03n$+kQTDUdB*0U-smL6WpZbHKOaDlVA)vhsVsczW z8*-tY#))C}&z*kHwSoH;;{wD70(aAi*XvL{)GL-@GNSq`=oZwAqvr)!C3krBI2yMA z5i%mEb0l9&F#}VjLb1eYbO3b_NeNFt*6%ILT6&{$+5cB{l(-u6Dv_Qr_A&mjo=E!{ zeY@S-;fL#`22r=f6oF>yZ55@xq-g}evI+1zS!Vu%1!2y^o4z{|vr&uR=MVKK_`{j^ z$rMe#SMr&7iWpnn_1pHURWZn#7(XfGsE=jR=>)?WJo)kO&j7U}<+%wVPJgUy*RI`A zvxCRQq#6ojK23~_J+XghJ*5{&r3T;CCbB_~)4@SXqZLu6KiXz?E4vSHCDUbMfZO9e zWu+|YVsM(RNQ?fgQ9j94K@qV^^Y>-TmdRztrurJOT&aNdh3?5*lNX%tD!usUx}WzB zHMoR|l>zMgU{!y7eAJj1Pl2~&h>dASG?pIihZhHn;UmC0u1(@UMG*nq z*_6xk{l^bOv~W__jvs&f)W>R@n*LSsS=atDYL|l%m61L70QPW4(vOLC+ z{0Va)q9({0`GWwMalUmw-#k9rMxp>pc%?<1QiwP1D$~@X4(la+4kb-u4n|Thc>6X8 z*iuI{OlYgW@VaCZ#2F?i~a$u{1;4&=qU|& zJJA`)Lgs5Hs>bp185SHIv9!_fjY*;ZUAFW#Umar~E#yKi_<`EIn9*tmoEbw=cq}@f z(B)q)19w|R8a3c^1#H^XASx-@avEUbhx6hTrmr`oFCX&pvh# zyk7LR32ThSg%69N#5Kkwfv~gPr^NZ?9P7H^Y|FFnr|N^ci36B`E`W5R$B{Wr+#;E& z+W_MtSCawz`d?C>_>+lbYn`HQxW4-&uR5a_6S=G?1VsNQtSfnv1f7kpAK0ar{gt+$ zB>^bUgfIc!O>Z|WN!i73XfN#5oktu%s?D&Ex*%3yIPFcm0;L!4)Tur|CJIxf#A|A7 z5m7i-X#VhV{`-9+4Gr5Rxi-jzIGSSe&KTI;2Z3!IF1BACL4_um7J=gi`9+}+^AD7O z_m9mT{^98vbxO;uQ_rhE`g)k~(T|nteNH6*f*Wt7!dZYYNUAWtbUnV4%|!<~$6|5hc3M_m`?QWq10v8tRr{ixJT# z8F8eig1$8+F7#Xw9-e1p|BB{odw6(#IM$>>wu$J#)lV6(l7H*Af+ZB8D#iUINYqCt zI4)`ID4X*7>->!H?)&lh6i;t3fr+^2nA<&ubD^&2N#gGccv`F$QKpNw&4h4!L5j_i0as^aIO3&=RcGKSwBK<-O=hOL~mIotG2w;`Sxv2ADB zgqv03EG12gv=>wZ&@kJW$>XH7Q3(8j`Ezr&aa~1QSfmk|)9U^9RZRFa+Bw-6rwaWr0Wl!WL7CkdwKE(C^?YJw&+fY?21@?=dk7_vP! z8xy2tvo5PUuWgZVfl@s>y?oM&`QW5C4CotzRFWeyM;OHfHI!`gLDy!k@%ZI8Sa>G~N>EETIDD);`O z%(75{PVP-e=tE~H0l=S7rGPCQTneQxJ^iXljzJlXeJa1W9}aCU}k(8 ze?XT$1>9F%jJmYCbm_7mna6_$F*#WT4|5!I(>N$;M5Hm4wHO3Niv;lRNrXx)3$+9X z?Qfl71u&>X#lL;nPbaS`PKY|adp0oWlIPH=>>TLw>BZ%qa|e9CF~!(vQX!FL+Rm%o zW|nUJitZ7Y!K8oy=bXQI@vEw$xY1cD)e!B`uj0Eg0)IZ97D)k7b<;s6_x}3|&E|Wk+m#cr2Tuqi1#*1WZRP0hm5@ zfGZGf6QB-)_*Bdhw_{ezjef@7A=z4pP$i&#{D^C(d+sKFz&lBMM4jPOcJQ@yBdDz? zi09*iC0jK>rG?6&(iX(_*946NQ%wL{;hyRbE3KSbPnecuE3Unq?VCt0r2{(IRs5r`vn7#k^M>qCon;-07R5Q2hSl zQjm0)vU9L{{&M?o+o{!yzdi?7+PHc1b^bVcUkKWPHzsll2*SJ1tS$Bk(dZP%`v+7C2liYYqmm78p49_y``rRQlBV% z4O=ns&L&WZGbM3bKw;XY8fDx~5!f4;ZFZmo01Z*y?(-2-`^~Yj`l1t~c2&&sRV%v` zcj?HgW;=P)G*QX)6_6O#vLig(ukjNIrl5D%czC&~>hBe{lbQ&^W3a#Sq1% z3q3Hf<*EmO5DQnrWWbg3uBLNEP1P!yQI>!UFV=*T z7RFGMK2L_UXx#nEuSHX`1VO3I2b>URL~!`N(VCV&b3y^<*n}!d6oRBG8RjOnR$?(j zvo!`5`SmxY*W4XzuHT|<+hCwu@e8-U7$coCIhy?i-Fe}1_a3P@7ij3eTSRTs&E|jx z&^_x!)+2^&LFEdH2@aW*(`VqtzUyK%^HVDGZv3RhVGOzk1GaVdKU z#GzMs!_OK!{`#)7@r};OxthwlBTYV`j^-yT6kyOIvH;;p+$*!`0wN+BNq>;HyW~dY zWMF1~tHgi5^4A7>XD%>nEq#wHF=1{@w#*=&y-GDL>sT1(7`OMw4vRjF$D2$Ydocz` z7LvJ<;$O$Q#@;^k{C3oRga$OZzs6xX>0kh%2-RNkZaK-F37t3dyj|YWi^(hi@HM;5 zzP$kGl7bH|!%R)d5Wh_PP>K2jW&=?qe0);huPF6Mv+S-87h`HZ{)7BnIWb^Zxl@g4 z$Nqn;TzL)BOn_TxJT&+VD5l*uPfAV)!KSjkO1JXz^XE{4+CBt`R5qQM^XuKDr89Gf z01^9mh*E7qMXLF|GTC|QUk$f+(>eQoUu86=rUINJcIR8Df(k6zqu=5E&@w=5(SicZ zizMZiV%jQt4Ksv)B3PjmkcF~@$sVR^1MY=z1QrmJ#Fc>uAhv*!@ds;yXLQTsHA`F^5yfaG! zc)~Zpgs@n-1Z5&maHMB|K^Nb5oxf^c3%;NOek_jC|LHC2jzghe`P0k6OGk~hiepq- zBo7Al!%TO!dmkJ0;>H@|@JQGO)Ms%S0x^p$;v=Kp zQx-gerBLH;C6qm3td9I3TP@|&i`qt1hZv`bX7te62KOvfnDcX>ge^Jl-%Wj#c@F@D zuISo2U^!_(r`D0&vyoD4<|(J3zqeT@p(+&WKv0OO7uWncoXMjP%+V#y;W?Ft+>L#s z2?q`|R%Ih8R+J(Y1efT7mbMQApNY$eJkDEox$TUNjg_QMzo{#d%w&Ev94Tu3(KYPV zA@##R3H>h$ZKVFE=NRaCH#@8Bl+iHnTao?ewXYGxB5y)_&X9S zCUr0hBG5xwC-5)T#Y;|#{&UqZQ-11%F;x#lUvJOx|k zJZ|P+UqQtD(uL2v+~ZqKNytEaeX_r7x0pl@(!udcWGDJ_O2F8AWaV?#!01vp*Shri zT0L8a1_0(G%HZ+mvyPCv-gTl^___Z_d7pxUzmjo^;@*t3w1o~Vqj|gR+?U_$haVUA zV1&CCKi-TSY)U9x75{q&?*pO16%MYs@iAHx-&y^9B|LL4( zB!29cq`4!h-4|rY;zun3gs0gfwiaecpjXP<@Bwr7_q|zvds#&A8e_6Y*wb&K*JZ7Z z=ptk!MZ4ozS*xM{jOYK0AV8r1)@-&|U%jfw^#Yi`O7bVNGBe+un$Brk%$qwW zTJ|gM$!8I@Av4`Fhle8@5TPb|^C6z)-DHg!pyL8a+8VbR3xrw(;Op=Hr9gAdi%4=5 z+@V}2sbIMHQXNtOCnozndHJfk<^RAUKQ3ogqMDeWQ&`9V!NP?N0l@wdtX!IYe{E2s z4#mUy16K(|Q;*FVDy=mF^6*`~!3V^coGa-tf4%`t?Q9W&0=p>{GSG*Tz}oZpfQ6s5 zMvJwHlmXzLr#@`3|NibJpQ_a$m*%Kz>m!~PUs!QTrFm62E&C*rV`fZ7tBHzul#NZ; z>C5-mZe7&7Se+V2AuSsNWeq7*%qS&f*#Q;f{C^s|E9ppy#Uhd4@QR?03}ntO<<+;< zix93+rUy4;QqCaGefD^OKq;vbiAN8iHb+I_5i@z-Yc5y4K}xc^&g0;>M2-pL^;-kp|h>w^<5S(?!Z_FEr)Cj zv^~3sun|+L+b^jzc~a9O<*TGN`9z|@K;<)vid6<9=y3`uk|bI#^~g!Dj(s0o5?N== zn%E0}yj@S)nNPDPOGt&|0Ef(IHFWAdRs*;MeDr){<8dw^y91ZH7TIbR z?(Ep7L*vYHO-&u$kcs=RW`3#*KxN&dd8VGt+X;+_|eec4#`LLEMl8 z5VX8i#w}X3sJK{lcuYaGn{nZs!@3Ot3jb#3xBmC02i~PDpsu9^b%q%(DlV1-x@F52 zm_8k@>2;Sg)^>I*2i9ObP^3Y7EW6a&YX!tx{grBvb15__)BTC-U@A#LPT6SE!Pq#O zumQc7I*gr-k4eE3SZfKa~h9IWRjDSxafPA zYcEf!9Sj5jgSr>BP<8uI9CZu+Tz+M0Y1gI`8^jHCk9e58qGxWs{Np))+a0a=tg6xK zD%oNDBXkRnzG#~%|ZG}hhXVKa6x_vK~qLi9EqO?AL6I#ljK?&-ul8qN7Cb1TI2WUiw#4!>H1r27{H(%0fv{lH0(Z!H_4eugTCAZOpV6#i>kbts#H;_a4ie!erkC6A`Oes-K1ULrLdz zep-E>^6YV_*4-FYA!05vwfEjIM7;@LbLybnhmaZ&DmP*1p3vbG7Z=m41aT#Gh)mgw zZI6vh9Y5>nbg;b!&z|O$a>&?ewvB<6vV*3zK@yz$wUT?HyLA(-Fftvk^7)v0eI}Xd zja)J%{Jr*$y`Rdx@-JN)0WR1t(z4g=v!YI7S;RPmN|pev{CHi zrH&jqXHaIbr18ww_4#fI2Y9~Id;a`udQa{!x9DfZxs=imr?bk?LcDtSZUXzvK%|R` zioU#9_gQ&vO|yQjL!1^aJO!EE!ne?NU@yZaqwmg{JZVx>QZJ?iLC7CtZ?%WLeBZri z&-fE3rX;u5R^7FCZ${TxuhBj}CWMue#NG2`>5kuIld6q`c$sMsp8Ao?ndKE9SzDFn zs65eEZ53@*Ztl6J`621?2fI5g>G#+4Bi;{7_`s(iyMXM3oBNO)laYUVW-ofZ&6Es1 zb2}C~$XzF((Ek(@`lC#JW4I9X66@0(Up`-(pZ4%!7;hvmE9t4PqT>4G)LBW2aLOf_14kmNp<1MaL;l4@#L^mr;LP^0sE?* zi9%%gm8vcfdJAc1ot&K?%~v1Ox?zZ7!ezH}b3)S`=OemDrTM}n(PsMeoB4G=*Y?ug z>OEoH!{LV8;$OR7J#zfY1Rju#X65DO`P9(k%iav409xIUKmVLPci@bKQTNs2a@z1` zO3tO!{y_OQ8yX9om!OtPy{ zQZwv^zbs>D$=cS|$@J6v_jkT#nNFTQT{kT)E%bbZVUw-Xl@l*dv9rrSDD}Pev#)*6 z+yU-i9N>IG!EIckXAH_|(xb+_t(B+euvxQawF()FVofA+Pbb4<}xI~0fr zZ#wkq6^w@G%e~>BPhjB$AuLmWL?cFvHxc!~18*z{HNHvJzk~=0>~-}0>#aFUGZYh| zryd(;{wX0Y&JV9r(5tHuVlvYKRp7ugp~4LV;h_xlD2ZPCy>M$^3nKj6gRwX7-W5Ad zN25rWWwnNe`41xuR*#j}twXTz@E9tcb4@Qvb9#nYuc|C8hz$S}(_ zYX)}-yM3jNU*NNt1Ad8G)?6v@856ewrUlui)O2yHA- zLAd{Td+=k4-O=gBb+e25{IRZ6=rf|IaK!kG7z`K?hFB7QjLN*_OvdaWUKx7sht&b8 zyNY5Z;=T6D71JJCOg|Dq!8?FCUQWb^;YxCAMn;Bki3pfDp;PVbwmfn{jd0vEv3fh@ISu#*G`(3^$@|E#_%+Is*VkSJEq(RDT$t z(a`MZy?N%XuVnO&Iy~t)>vgo6H&5jgK4D+_iX&?E5xDG6NT?rnJ7N9gBITRIJH}0r z8~3&^yImkq_a8bWmIr)1g^UXD<_i{?R_k#Mh3N{9h?qKaW;zr?RaKVnv7@o(?YB=? z-s!j+g#w{ZNET?^`oAwzE-;ErzjNn?>)-)A4m#$Fmn>niB+~BWsg- z>8gM6)*iC#=|E}Zh(=48kZ)|Ftx6p~l!VFmElg`^?*wh%yk*PNBsIG+4xAqRTSS|S z?k*S=3)=V59v;cMUV$SNa|e}8nPzLt4kx>#VeF2Oy`8;SFF*hQg(BLo9F5EzXtr6a zRu;fLOE4xU#UF&FuP{7O_WJ(ffe-eXsj>&q!8GNRulA1WgOg`^ES5AeJSob{^V_2t zZ=YZMO~Fwkn=p*=7L0Dpm@%9RAjT|E7MI=%8&iX_f`JWsX>M{eHgxM6k-O&6oaM#2 zgZ~Wit*g$zL@6A;Ygbur%FhV?`~$cWU=2y_+sqoo)fCC^%Bw0dts9SjK6#X|_{i|Y zseXSyaV>iN+6YTQv?68@5;BZw+{4h&G^X)Lk8W~0lVd)7aK+JK5?wl#(8uW&(`{`R zyNpY{27kG=Bxc<<#!bFl%&%>V$Q;4L2h-!jR2KJZHDp~yFFJa6504!Y5z~53>}6=W zt?|gJ@y~yMdUpT*^>_-f`fBP?HL=(R2s?;a(C&*tVF5lCnM;pf7EBl&f$+6xg%}Xa zL@aRFxVVINhcV+w{yu&^AJ+LD{;pH?_KnR9AERljj3daGfbS#V6lzLCeN&%5k8U~7 z-JdaDv?ABYMYc6TLzIL+2&!&<`d~%lfBec1@l@gPiEtJ=k_A|*e&l!zg*pIh{Z%-* z$RJxSu4Zw@x4`tvvJ3=Oq^Pa8ri~smrU7%>vZ#X(S@!koTXax-ku=1KQ#zR!&Cl)~ zQPYhAnhD0grXMR=)3ch0N{!pRMGNh=ZU34R8d&d0oa4nQ!IGD;Q zra{0>ZQhSC{xP-a<%36^O}G3yH55yoM~sM~(0=UW3M(VAg6npg_AyV_*H3=9<%~Tw z-FWaa`40j^!M7O+1wWff7d7N6EKsl@nGoTN_}dg%h40=S9ycUtNZekX2|U#UC?Rb5 zr`m&CvU1g`)0N6Wv0n@fHDeo(Ozt~q_`AGf@f`5y+ey6!T0u3wpkFJwMk^v_kDR9K zg@W!NY9J1#N`9q25i!bm-7Y4=l&!I)P;b3{N452nsWV=C29NDXw*+j)t!6y)Y>B63 z7ajDebVjA`?dw`kp^VGE8)N(@_DJ^;b&xP`uFdxzH)@pfOKnIjDnn9Y5esxZi{dTX zWgBWGFXsWJxei`c^L-_l!^c~_b=B)gv2+->ZrwWAbapqx&FQ7Vrr;)lP-r5l znYbhwo{^M$Fj#MLkLv?iA@SUU)Yj!<{lN72c!r&77HB(MaO0*y{EU6Ei$6sV>K*5hmy8FQ!I z{YD94d*>Urs@H!09ZLS;Hl~6k%)H{2JZv=;-mYDu0aAAB+qWyH9WmvTl(nl@r!73q zC|1aAC*j-9%)7DhbW*HVQpX;O;utwu5t36#^!TD6Tow+Sh6lR7A=KBYEEVceps;cD zZ5|#Tk7BBinv72PonkHTaO8WBvfEwhzsO>eg18goEiF4?k!5#m5dZ>IP@aD6fu{3P zOJ&G_i#KfTC#HG5*;clQxl08Rb0=>GrN0w_P&9i_nf% zSWX6>p}yf#4)NdnTM(lXoqF^TKMeM-1tFUwm=*HeI7Q9$^^adR9rI3mZisb=+=IS7 zwwfM!wB?#ZLBx!CN<1+d$7iRbg+kmB@G<%@NEL*v?U3GKP~|04Gi>&H!*vfVg$ z0M(M6%`k;=kajj^R@^5|nYuJcAmIp5UzCSQ0Zg&5ZHq<=r%^$t@ zLDO-m^ky5n@Lm7evJWcjYOVQpOY`9jVE_#|$3E2q6d|x<=HWVc_L(_ZG#)Y;1bK1f z)%2|v-VXg*b?e^!C1Y?0es$Fr#s`mBAC{`BeK>CvN`ZSxTudsVA|7u2>wo8NOLbQh zcX;bCGBejrTYIJA=@B!ZDbB~b_UdIx(%05j-KR3;(_y3Bu-Mq+g_z-{Di=4N`DyDG zQ%q(zPi_#0xmaLWcJp6r8N#M2yOIA2oop4XSO^@V*TRqgYB*`^KY#3h`oAj=AyWWM zW#Vx%5H?tQmrk8Fa%spk*130ew38QkCTs{yH~!A3BSGgHEy1}Lo?EHs9-d#hKr%Ym zJ#@i*n));Mz?yG389f@cb&UBZ%Z~i|O7WnhTtP#5hg1KSiiK!<3Ehepr5m#Z3Z5M^ zu-y@P0RC^Fv{Q;?jwd2QTPjE|3e&v&hi`T!I0 zNgr)H=kjO^=-LB9{XE~=)zzh(7%@ym_Cp8P34BwOA#6X)rtv&5DLMnjU=}r!_qpP` zzvXwLg6Fvs{OzdUZt;nIEG&Zix4*OXK3=TvFEJ?lpS! z=)>*Q8#Q`K9Z^}kXz$&H)wvSy`t|$bKX06Q)StTkY%KaUn%_zT4*k;8mA<~dLf#&3 zm-296X>_ZQbDz?d4gRxs&w@cn zhy-@Tw;UU~ApY$JXb5;=NxLlCnPE&Q6h|(K*@ucxyr{7J+7leCQSeyJk(UZCe=931 zl>hh|U|RF??D0Veg~ZT+6WhQ44#MdClQY}joUN;^dO)66dv<1?pv8FM`M&oH_g788 zvdsYg;kMKPT!7aE_Kl)T5TO2GeA_dWKzqvwD&ss zcVgC`cpS4GKs|NH=1T0)liYD&1Yut)PI?qJX5Jq;w-7sC0LCH+Syu zIrlftx%Z!Q?zs1#>lp8g^6Y2FT64|$iMe)|nu;PGHWfAsg~EGqU;Z%)h31DsU75nV z2EP&fGd2f5M4S|KoHT5oJGq)TJVPm)IN4d*I$6CmWpH`s;P}$khM!xQ`wkbw3nwQ# zM^PRg>;F1{+t$H?hvD?18BTKD?*3Cp6pFwE`R_`ORQ5|0>P_4Oc^OT&7 zYg?ziVxOEa)a4016~6hgG8JQ3_T}i9HBE>OeUoP?wp?o^|HZ3bRWn7MYr6 z6;q>+?^f})oAj*+=?dRLXYTRg;SUz7-a{!V@2|gR@C$`Mzi_|I;Q!}uwcKoe^6=*w zF0nKd6Vss<9Yb6c-Wnk{bnWPi1V7MoPf+?>Vy z4yDh@ayHXuXMaCwVPT=dL{D~XT%3uORpg}4>3ZIw^51V|W%okxX~TcStZja8Z*ON> z&;G%=w)w=T&M}%^D!R&fRaQ<8{muv6?d|QQ;qUAodrRu+7mke>y!7UGo~F`S^rYV2 zP^-)RZqc)FT$$0RSK}dAW;OhH%w+EA0r_MbfrNxaUx6Mur*@(Iy?gikN5(51+0V|; zzu*bcGchys8P}tRhK8i!8d+IcFaCVGy3qAO!-~&)Z86O^xUi7h<6!Mp{Iko@4~dBk zf`VEmNkwn%<`gb3&gUn1R#sMWo)l0$?@5hv-JBqGbz{{HxQ54T+Kh#I+LIX_t-`0M z=~QU&^BNBiPhXBY`pC%0w~`Xs7&?iqsfGY?>$HEa|Jq_0<$_7J>E_n@^R$=#WMT>k z3KqZW=dhZrrG?`Y?>xgkSRV~Zx@-IJZ%jB#!8@YSa@z#Gi85_6y5K!{@BlgU$d8xI@q>BV1bEa!%_Yw|zri80hfNSYCwt4xeVYFegRbdbW_Oo&U~l7DtSMI=%WPgmQW(*^sir!-)DAGlai7K9Y%cr z&VO-OciGU>L%_zy)FhBqOv07U+Z9s7FHQmxzS}f}c0<&!~Z7^B$l4g^>mlCh~ zbAuEj$V}j%v~fc{J+ZdQ9At5-suIwOxx5o!sBXW&zolK6+*m~?;bHtcSwyGC!#N{n zV~6Bhv%Z0W`uFF`V-@!3w{?H`#mAFnsixthgq@ZMN6Kvp9335V^75KmTd(adbZZD- zJy@owk7>itH>#r(5)x9=)>h2C7iG6TB3NndNQHhgu48I`J}3Z_;1kRj@@W*6pxxP# zeb7-)szgLE9yPMM;fp%uHU@8_I3r_X6k%}(fj9*%<4daJxpTdx?_Yywmr{>4VmamI1BQ_^RX%cz#-J%`rotSmNteSJbQvMjaBi5kzPfdcAZ2|NUNcz6O< zLxgpg7oO34DPpep%=aSwii`O;p5!N5;#PJ@k{&P9bGaWUS5#Haw7+F`J3A^q68Ad$ z;C{G4TU}G5UZ5-Z;>C;lt}a}IDkm9v`9N73qZ6fh$k=G4oKH0GU!dxThIDL>|MB_3 zaLR8FHxctjfHj2bU;nRs@Sz1~`Mw_efIanJGjf!gdC}>$b^yJ|~ zY&NAt6bu3~eZPO>OP%i#W5^J$n%DI>P<>>8iCSO3&CSlrYO*#|3|Tq2s!B96Dr&p? z?j(b-@X>escJn&|mIJu28K%;FFW&08t_|@C*iO(ulIrd6S2#Ubw?93wt`vo3O2KQ4 zf(OTPc6Ltu@L~GrW~~OrX?#N2#l@-Z-sU8|R<0(I>Lmf(Jfxr4yoB@0TE|6vkHZZ- zbaZsMhLV1rSsT89%@_r&NM2rE=?Jph4&tU>(~Vb=Us6#~UKCY@=b?@N!TIs-qOjuvZk5w=?5uxq zMTPL*(jRn)KIChyk(6-{8*hwN^4___aTXUF04qM-VgA?F{=R~p9k0*%zBUCl zwb}Mei^Klw*RQ|9GrPIFH}v+ZDZT!9*A^ejr&6joWo~XREKyoW4lO(>XCH;Esp7ew zzK*69YwYgEyK6fUn3SYfNWTFO-%wwVa$M+=f$|S!b{bYcq~nf$&!a8IT38*p)Izwa zsi{NN?))(3(U7=ilWc1{v?DJrE+_e*89$`D zAiZ*%u+hm{Z))Wvp`f6kt63^3*szw@CTghZ=;(&ZZRtf_H@F5DAd4ECn+Fbmw_s*v zosZ!AoRt-hoT*!s;S11sZk6Ug2`{+`s34z zPVFuKL+}R7aKPu!EKnqvxVV(KZ*eSH8RD%0HNfcFIXX7Yv_!o9_%U42av=Tb_vcvGuM=JJA~9k! zSsR5+)5r)Fk72bmd<&wcS=8N{lb4t9@@$vj8ZHG&LqlT>hIw_csBL|;97(W2QVTF& z_2GiO3B_{UXU>GEG#U#QpCuipjCM6bHIqqsYnC^PJo%B5O_f-E+J9;@*f+r z=<%ooq#^k?dn_+6Bcn>)I9#IHz%ZT;Q?0fG6s-MzG9xTQth;S13569l#}Vsog08L{PXfSA=cxH z_$`JpWTdUE^2Z$G=&xM4(%#k8OdxgkX7&W)zaE~wwzjsyVIJG(c%Ho4eV45#+q`Zs zh2N|dcW8LnJ}o9PiNW#hEwdtanq&|+H#Zu} zWPPL*dmqM&^cSpD4#+&$Hz#U{JK@QL9=_)Y>jboS1BFG(p-E_1=W{!rUexJ6is5X# zjox%PS;UdV6pbP0>(}ek)6=HI5EEBXjqo(hC9txvA^H7h?n^=l=&?CCIIyjK9>54< zkdr6S3_fcUwwsc!+<~f(11DDtH2}m@;eA@BUl|e-k`CDpFn{PXsItT`T0vyDb#LzN ze}yaHpaKhzuppe-;w94^VrWtNdx*z z))}aBw#;}^pzE;N($%HZFg*P9TWP6Q=vN5HMtGI&r7lo z3t5k_pa{>-&PWEH9llTN(Nt+{Y{Y;mxZ;N_3zM!yzQ%?C%%FGgNP?wa>hfP-RA{}HZwCbw*nQ&5`E$pc9LvfUpATL$@V-Fk2nU>2`=J7MLznW-s;x3_ok z8B7Ai&GO$bl?NXN%P<%h50@@44yCU6*L*~Q-}L6MQT zmCp3ObR~6Ck~X`5AzxQjRW+`9p6tC0+6-S?+;((0iSAgn;5|LGf#XewAyG7Mt^-aE zoo81`R&$G)>+6~COn~qzvmTYCcC{(9o#aLl3fYY5q_0y8ykLMreXE*#xae87O3K_X z#YhT96rs&T_3^OF?ruul`A7YqF1Opta<%f;(6EO^z*Ba5emL2AK0jLR?ub$orj3n*sckHu~J$ ze5&uO#@5!BUu9*bP7%~>J6G3B)6=VbRztj%^9$Wcvp|dPD=EF>bhWql4+sdTK=N}O zyDAJeI)WoQySuuO?4Vy^CzBDyBjSA`AnA3)U?`Db4V0_WW_LD5>eu;~Nw@_lgj3tI zZ4Ucg-QDvSkN^GqCs*flPI%+SYbdy0El<^;hJ5?>jWB`wt_`lDq9Toi2MKEHWZx3w z>eUQbGc%t2+}!v8Szae4QPW9!g=VX!{VEkPZ+}C>!`t2+BXtp4UoQ=n!W2prg@^;o zK!IKeT(GmBb8XQae#a{y(6wE9y4S8GBc&nus!tkN0nm^DATbDGLWUtM>^g9{trm*! zz*8~NG4Y#SC%r>H@LQm@i@q+los@K18zOqm5L%?5uTL{JHU`COYxv)nBBQ!s1}ulu z_i?b$)1P(5>y(&ZpQ!atGE2!+jFEl%loEc#bEff{pkc_=1E1TeE*k#zo-+jK;0%<> zw1R}#STts4W(rD51UV%N+cQCt50?wU4=XZ*8Zk3F`zKGEr*iQDoGUsy8bL&U`G@a5 zex&*hKpqgl+>nvs=r0?dQK(3S99^Y4k>{dHe`2PL-F^7Ys zo8>$|daRRbybsEtSk}XV>cF^yJVf3oBkE`JY}YvXy0BnRpLR@c&Lm;G&w#D9Dr$90q6J__es8v`t+%?kr91-d_49uzcY(f051S^ zTOf~Y16`^-v~rQ6q@=_)%Y*qU+M=c=CJq3iCkJ1F7s6 z?t2k;&-xxu`jNpaGc$BnR#tnrMi`{+i*rx=8koF_P3h@c#}cTf9Ti?|ehLiP+X5g| z!GQUvrqUtx=r=7H4Nc@NjjXwsBMD>CWpncpwY;Nb@2gui6R&hvdf&$uJsfNM?4=TM z=XZ6x&!uv*Fp-UojmW^UfJpqz${)xfX7j(^3;SM(!?dz?EV4*_cEf}4RL^^IyDwJ@ zM@2>D#c&BL$Z6#~Y<}mbhixE*Z4DavcCeBUpAJEe(vL9({X}fg1s7|0HguhK_> z>&~kKLDA9pcli1LfHFoinHhAPKQ}LNvh;^}4CtCF`M%c!AE|)KmV|@~5DS9fp3H!d z!rFdaTPq2IA|l8g5Yy5Yypc{5alE(Dm>U>~0X2yk;@9`$ID70V7m9-aIc}NFI0yI7 zii$a5qfMyuCKeVU*YK$3C_Ggpy*FK#|6YYUNt^)mW}Z7bc1COwE+lfHo@S(ks*8mY zoPoN7Ff834FBs#cJoa)ZGEWkK&e45jb^HHk`$9xGX+T zUdyDW_uQKDAM!o;L$0OO&%?(3_+)Du9TiHhFt_L!JGyIZW)=X6g-Jr9Mooi*RK6(~ z?~1v>gCh~(#t0fJ_Pz8O8XfH{;JzTZbEMsTq7JxN? zs*KiR-@RjTCE-04Fg{pZm_6KVa}bk#@%(u(uwvupP$IYOnX8D?* z|9A_4#wUSWuS{vm#Iqu0(2G9@B1L z?Dj~#O-(ISOO?RCnLT}d7zQT@FoG$NRR9rcIz{>Uk+8ZD93rC9AD1xL*wJyTr<}tH z@)9s4G9JTxyOz`PNuLM+mO+aSKz#wU&YUWOLI|>^!=@1myGZZez2HX6=T=s@p!t3N z^2M8P0C=O!qenOpa%gXyHPzr~9CGfi?frJ2Lcn~a2i+hG0x&hL12&m`c+<~o{Cz9n zG)UJ#rwSX1N?%t4j*^78RD zz*h-j_8|o=im5QH;z02iOoZ#_J}JO9Gn?g6QFsin15w`~ycKnIsX$p#2cZX)nWy>R z!7V^ODt*=8_=ZtV?aM>b=GN93(3RaFk!RRSkkq-(fRR$=o$fCB*-K)XUDtP<#PymY6&O(Q=oSjE`bRG8=^bF zx?po9xp@~zGsw&6-#qqTjn#OPLwarlrL?!))(8X|dXVTyr1%x1X=FeuDt4HE2=|04 z&?~XdsLNpTb*MSO9d=?WkuvS=$2Z_nXW52+d%2cDC8Y*)jG!@HOp< zCA_1mIxSjJr+NS?8AU||pi(2N0%+QeY`K%z>%?qRhm*dFp!IJ9RSD|RFtn9_0~`ra z-DtIbcLVTmo(zT34?{(}H2$-EOuge12?Yh#?b`)js*LfOvPO#f!Sh0RN}w$`?p+rZr<({v9u|(Y@}p$lCm?pA%S@{qperT-IHp!2GWm zfYR9!B?H9##-h3l4y8n1ncm*s_W7snPYtvD-H&&^=)0@@Bbp=o$$%wmXZLMtVS#OF zX$cEt4q2GxAW%1N_5HM;4W3$Bx`~_#Qp4-CG^1#5@1rf5L_y1V<5#aZUc7pRU7%Zf zV`HL*Q!__RhC;|HQ8qcv@RRRC7iG}k9Z%gTVs;g*Qp-WIzW)BJo}Qj2pn+VC=4WHO zhmibq$bYD#H4EjGFfmZ1EDPW z*k9I>G_?`H%x8eG)A;#8Tp-l@$`kkVWEOUI95{pw*a|!9&1o0tjo4tg5h8FKISq`Z zY`M*N^UHvp-4DMyfWHSJ8WL!8Bzyb&SGBaX8h-tHxCG|ISfBjsbYZ-%?4Fjkn3zKz zA)zP0BtIL!e0gg_4!n+_{nbI=i|gbVjJIzq1D_-TXv?mhI%>fqrh;<#SC1t-uy=E| zHHsKG?tjiZFz`gD%IRhFze7+V?}HXXj>thEf?*+0+*9)3-@kz{-PfI0`iS6@t01Rl zKsjlI>6{-q8QWt8tN3+ENy(V&AVes}-Me>XjEsz6VhOFRtO5ZZ;tUoUk%N%{dP^Yk zPzMJGugl8HkfJUuDvAwk@X3qIN7ew55p`&3tn#}WxI4&6mL_U`Zh_vdUl-%(QG1oq z^4BMi{gvx58?XKS{ikMTn1D-%uS%uFvyZlIeudPBbLWE+SQ=V6i_6O&Rz0EI+#?K3 zu5;Zzq3l{e}}JfvNPa0D@!-B#@2YRTnh@=a~f z2|#Icsco$|(*|*@2^8<$#U7qs?9hohy=I|4C1UI~hY?4xUgMyH zIBP=a$kud|m-3+8mrl^C?aoh}&-Mq65Zk;MZW~EJ4Gj&kk+ky^(y_bCUd9CUlFffI z6{S3X7GH)ZFUWS%4Qh}bq+RF%EDM>NGsnQdAaT8^8Mrb~fJ0B8HiI8f`x+d;3;EvZEfF1$s zkslRJ%)iKKX;Z|z$?ty|1%*RrjImdnX`kLv3H3k)Jcg~h%X0z{5Qr(Wq5r9;|l1Sym#-0#6G?fz4+ixQhFeA5ir~SGrOR$2X5A$s#Jh~qn}c4JBgGi zLU81GURYY<09Ph5H8qtSwR!$LOSq#5uuU&WlGA{f-J>~0MNhsHtL!vj(tB+o|MOZ` zbaHa?-zm7^N^~Edt@pcys=%v0EP6?=dI$i-2?tLG%wB2~uh)?sjfevV44psFlIrSe z!2W?im=;DhfXz;?u0}wy_yj%+1;Jz6?7$as$(I(ew010?KCvvOlkoP-~^*V(%vf92ORq`u27##1n!!a$@}s4NOlCXvWTNh>MgLU18#ys`?s$Z6(7@H`Y`xTCmHw(=y7yu3U)1B03^ zG1-$CZnybO_P}o1s#NK#OnqCOCjYWj+n_n zN|8PcVD=8$sL|2UCdk@=d4rYMnudza_&}c?8W{=ySD%=a)YR0JnRxK-SJm2_s`en- zt5>3LIv(zY!c#KHv^Sp~c7#TFVxw|2zdq(ul-EQfu7{d~LV=Ap{|hv`NXXylC=jRc zLE3NviUHG$ct=R`Uzr42Tm6~>#ip7X3GnaiXf`Gk?D_A{yTQQU7Vo}tpQRM?QfNn7 zw90Qe^3bI^Ib~%`6e0N`36xE7TBVl4<{osN$98nmv5%piLdA~2CS7Z$zs7W8>v<_j zS!6h_2r-=T<%`zXwA!u1v^2~&OLdXi-eJG+eI~xLO#giQU;RsaVdb_OeCGGHhlZCM z8f76yj%M(~LhnDO33{mcTKLjoF7LqgrPHaGt>gSJ_xz{dNnn{sTdg@-jk%%LN8IOr zrT`$$Y z5>2~mzcr;YRz(^)H^(woxqI#8WS(RNU@OMOg`@r9#-y6|pmZD?_t1}JdA2H7EwckHt9ficAa7f{`j9Fqu-9sAe^Tg)g9bd z^^K7W;cl4p`O_!54EV^-#U?C{U5?LfO9x~Z3V8S6+W61AX48%R zy!?5getuCoMw9A=1!2VAC=`hQ9mPeCiz+h>0b*EK!dID@iBZmcFaL^j=?h;aW^;ou z!W%6w3VN^gtn0&fT$Hh;9C35;ND%ARuh_X)c^gdyS#5d$|%iJ4y97L<}!5Uwph~q7feJYlC3n?CX zc_SPc`?uz~PfFYJpGujW>=C)02w($dZ20lAiP!tYw320UriI__REV@Ysaz(3+uLqQ z4Fz;}r++YtI>E%&X!SC#j;pZH=KjNn%>kI@Eu!5YOW%(E?(fIDYoq_WbR3WRU^aw4 z7ziqRV4!$gFy3j4v*jRnP=FjQh<&f$GH08@T?zY2^7imeLd<~sulD${>}c7(T2U(H z$)0^%jc0gveOPGt4e0QT)x5l5?#)r>kEH6yoNe7#Oc9OZwVS$GDSG7!^`EaAsvy=i zP1LgG=jTqvNFB`Or2+Jxi_m|B&D5n0fQBXW_%YLe?17)xuFYp;WLyQslY%;_ zu|B=JqEJ0EYis8tHg(b~3Sm2vk@8KTRPl=<3pqKyC{$;>yIEJ_hoP!{-o1Ui-hw1f z;58|WE}@~=5!rrW_Z@tEl6tG%1#jNE#dve6Y>c8RFwl)gtc)HLU!SnE%4IzOqQR(I zSnk#7;brhGjo|Q3F&CPEOtW46)w?gIK7Tt~ExK^u;DNG&;`P|W{b8Ht%;P9k*ZpGX z8xubLL_n=~h>wjU6PlW(UsA^J3@~{p|Y`5in-b_2?|~X-Us@Xh>}uQvoj|C zS~Eb<-i)R?vkP#9!G>UBqN{Js(oH=9imGtG-N@k?k5MZcDl$sB#rJz>NlQzMthP1* z^o6__EhAp(KfQ~up%HPm{z8(H+CqYWJ`Eo-x8j$FT3dTBHQyd6%kS}}qOmbOsBzz= zg6U{m`orH_e$L=pCCFgR9E_jh2P?5f(RfN(SX8L3&>(4NOf}#df?3FBW(-50 zE%sm{mn+xz#&+D^!$q;+&|+Kgl0+nfl1P}bPk9(S$zX3_J+>uBqW^Z<=7eAn%O3ma5( z)3jT+I#4L`gtD9`-B&Ud4_nqp@7f=%UFUjwhd4ErIis);AGI{ z)o=-w?^!XmTyP|3Z>`Z!7a}l`WaOLC;REatf=y_3Fl*lDF>GdlTYdX>8s!K5CyZQN znlzKyd3hm{-oew^X$#Q86Ng1m7xVI8Nmk`@Z-%G8Dc}Mu81_$ImF~NuRj#(TxKEEF zF;PyN8zAQBLFC<3NyhU3PY<8wH`5k;X!pXk9&wUZ>s5yY<(I3qPCDZ3x*7R_zlM;+ ziNt!;r4dqmF+8|^O_H7jsy0iqLF)+c zHbq%kw7tE(a4{N=dB{FcMGPpYo`mG)KCusxbDx6z*AUPa?s!f#uMIB#%*+Ntrh5O* zEY_yG5#5h(TwoQys`Nkg0SjMy6MB=N&@hT?gD^CdrHT1}zm3>?W_?r>m+~*Ei;Ks% zZ*LeG8EY%#I2;BG4dhn`O4Yu`a$LjTySZT(niFbIC;jP0vscZ9_WI^<%=P1mgq7^Vn0`uC1;F zL!7hF@SHdV-$3?wC%E@t=jdvQS(^uqcm;8}%~fU*k?XD-)&8|UB0(P6DzyG}m-=Hh zr+54M1$#e-;RsoaCT2^9hsX4|2@ChN9IUsc7ZiZHS8acGB!h7+n9NdNUQ@sg`mJ|Z zApzmzJAX57$dxVq4$}&TE&788VOaRJx!)uu&LyDHCEDjac(KA8G)_=#V)ph9p}F$L z*?Bok|7{@aWjgE}D@5=SfyH9Hu=^d70L++{&kq7%t%dcp9vsI!IxGN_r?Dy1wQ!#Y21ZeFReaxw;_X|a z7X$gEUEQ1ui9c@q@x}R1(s)uZ#`o-(SH(})CpzSGJ_JHSy25tG**XWOn?Fs=GQ%(l zHbS4hW-vb7=!Qlj9%!|Sf?SSmt(?3RK<`U~>3N!Ld*-t-_CVat4wLiAnsoO^=RW`$ zzzGg1(9@mKY!`w`UPvH^j}`b==&Q70yqT9js0Pe1pM&`~0_l zIE-vVYZg_mmZNmGiiAVd;-l8+^%#G2iK)fK0Nv6^V~gw%C^}&P9>9n&ZNWAYc=?!8 zNa*8>e_8lYUT{!A%pi5*+D=qA9&MwWm@qaK8t5V4hU(JTD9aX4^v4$ysl!VBxd(}Y ztL#c?l4z=F_oks)EK$gsB1_dM@>M@gvj;g(YQt#xSJ3?MK=TWb3qgDH@ndXJEwC+I z&r@BfK17}R2IFtuy#r6fCc+uiN9OXBygX_+AldRKwa{7{ffOz%v?^HF-J=0SM63=J z8en6Fh=Mtf!K|p#vNmjHcj0B6YF6vaNx{dbngJZN%z8D$P=bGNc{bYfkVMsgu`LKE z8`K7awQ*0fyeH=bZPDu?X=yPPKJe@S((oe43KZ0Ot#K*u;6qMtnD1N!A{qAP%_4Ml z2E@cj$;zf?YGiNnIW1uf4fjjmZ+9$?480ITk_StF-6a;rRoa#ub?@}t++a~>DwC;t zbq8lOG@(x#*{Pt42iJHj>pazJp!Z!no*Al+sqvy@PB^LN_2VtBRV7FdTi{C z?SpkaQ1&=}SQ@L*f&LDFJfOJv>sS?pbYrI_2MQ`S6)3l%fJ8DCB5Zd^95wr-P=2rk z`7EO8xo#IAfddf}E_LaJLJ!z+f3>KQoBL{OcN7|jZo`e&H-|X_b7BEKlK?uh|4WZbn8r7*6m^$(u_G4Ne&tjLm9qNZ(qUyrj-jqU7M-9J0raj_!6Wm`wMN zYuqWSJ6Wu(gVq34J%SQyy|qz(X_LE%1iD_Gh-8NCo!Y(KEOwY>~xqO)eM=2eLNuxIS5!Qqz# z*DJ%X;$#_bPJB20P*`Y~daCr`!8H`rNO_Y>kOX##C5g^MkWDE6BUU1$KBX}`au##Z2*c7m0A+Rf_j(jLnUC>$WzB9tv`kX4Z!HA-)RJag@trh z*0jx{GBS$uWn#XPYT~iknVFztjf-#U*=>;qJ%beEK2htkehq0omv8ol_F-HU$n*al zGCj!Gu5{4azJ!ejfEB{^hqJ*uy5;&%O!x@mE%P}!idZ-}LH;uCIyyQgD}5%s{NI}s`S`alys1XY3PPaE z&~&;{3yx_i&}-1C+_|e+@QP<}!*z21HIy3&OnNYx5b^=Jr}evqI`nTK=*%yF?!KNL zCO5aWdcjBZdoddkl>G5i^PLp`g}YLn>$0O`xZib)hFM5_r%|V-V6~Xaj@)S^Csx zeMBMOum;1%hVE#$N18^mTCuQzoX2!)MgfwK$c|E^@7?!hVh76#??t-(^=EP(D zgKbIz)Y8N_v2s$GEC4kiM`)n7gNJS05>5-sG3CgQWxw?FOu(NqjK}CG*Tdh5BL8q; z`oW}S=Q0qb_B^m69sF7OEJ}Oy`yy^~ia`cAp#^#bW+OkQm-;%c>*|gm7l>o)CM_>6 z$pz0Ux_;D?M#$!j^7(VpVyERIh-*Nq1Q3LGZ4Dt&62LOxwTMpB*8@uW{^v>A{~CTn zLDTI(j{Y#2LDdfso?dQkEd_1Q*0k)$vG>RYVc*lK{7bH`ekrY5L<4Odm&3={b?0TA z(RAbBFX=KdG8)>@hvw|Xfoe|TO87+vOk|MY-oAN*lBlVUsbl>0;J*nfIC4!ZYFhm# zJJ&SVXlM%%whxKCtU~TBzl)VG3tKDsX^Q+(=ih%CMC7iB#}yJ!CUtmK%hK|$7`>U)rxW*43VuPz50KFrhKx8AK_*nyh@C9Zp0bGr@Mu=A{w3|fDuq1%6Jds_x%deD2X z8H^{h`zt?sB&VzUu_8G!@#f1lfY9Crm}(7Zt;4Fz+^ z1msz0e^!egJ%Vo0wl?-QXXI;No`(Iz7B0Ft+f_vbMHt!Nhh@;*BL~(kG?f5$gUwoi zp=G?FcZ{4(ADNLGnSE@2(2yt(JImC-^z*V!N=}Xp4aI_n&3Ro#NKN2AIuIa-UcLt5 zn2nvC8Ehfo^e8{@+8N>RFm$FVpi_d?!akVk3&M0ONSH12VZ`h+usH%8)#fjc5_&5g zEkHWR1n*X%ibq~k6ttmmM!BlS-zo%v##mUI$zAU&Y>bpfycn+{OB1qR6(DzBB$C#QUi={j1O;DyOEF*m#oW zYv}hcJ7(%&T@B2|>5Xx>AW_Kbkf^6CoR)DR-S^h~WY1FGeV??r`+t-B&SoU6p{F1` zPy4btLoPwBwwiHoZ^wmn_J3LHYw_W{3%tb6E+=ArGzQWwCP*o>N815dxVX0mDn2t{ zp`9F@lUe+x={Y>rDlg>wk*7_VFdHo;yHg;nqxwn{Mb(z{G46a2G2XSSw{*|X9J8& zi>SJK4Dyk%Sb1|tTn2@>8yA~0RV0kJmrz+-3^O9v9UKV4CQsP65})nu0m@ln;X3cD zzCHa_8j%tLv(`O@7&>&Y@Re!A-H7PuY8(rEm;XK$7S&z+df5o^DE<9;fw7GtU)yKF zyAXS@8pxjNKOXwlAk|Yeo&B!wUB;(8HhWlyMUbxKHA_lg%}3wTSZErr`QBKywdMJ( z_)nuhx>!pI=#qHU)XG3e6cOM9k8p3W(ECrNqb&x8mF5e}8F_lpwS|X97riw1KxF{d zi2o#i$p53@>L)EuS5Qs~^vVkx!8y2s`X408va)_YV?Xl$QyQ!C9j~f19j|%@I|{@> zlUy3;CPg8P9Kf_mbEsjCt`;>Om5y;I*)fY-`;SrAbtR8lx_Bvh*YCH$JG(~H+X7CUCDVpOlNpY78WsmL>}KfSXN zBNlKv`)2pfPfu{H)z}An~FaHID{Mv$4_k7pw8Omx% zKnvZYugSN@uac$WlVJt%&3 zEB*X(X_6ZiuBzK4Za(@Ic2orP6D=Sy!>-&**v;c}bbKxr)eqwj+smk-kuoAZeH1!c zXaJc7fGXVp{s1FmPexuI^vn5z;0wahq}fvM^>5$v{NY|qpUNN)X5{nKFU@xX$}0~O zA0Hj_y}fsNyFl$#l7&MNz_L2f1kptEq)?wl8^rqp;8G3KO}-`@V`;@cHS{hUXZsP} zp6>gtjX``L+=>Yv;$5X;|`&-K#wFWFz-vl~qyIYAP$)zq(makV=Vw}1 zLCIT;Fx!NDxadlIXc5?&S0{L&$k~xTC1Ep5jh#E;}u-oUyEN; zRKMotHuTsM3ka4PTWF&bv;S^6JC5UZ-x2=|Eix_PL0}#P1>>clP_Q|r0gxuN9f}Hz zREB0(Iy>*Tc*8zP|B{kl7R1@r_f=Kf&W<_N>uQ-%_28E1>*q4F%<=T)4N^!C5dE6( zESFYfw=}%~5(m)vABrq=JuykIvCTpRI!?hFXoAH7+mIX}rrzkErt(|RPzgB&CylzM zMcI#4h|E|nL4qt@p=4a%6kh6&Q{6yv)aOFn53#r$9xKN_+@TL|AQH8RRGl&i0#xtb zy@MgwvpLg}l@*Vs9gRW96CyHk&XlRobb?d-B z2%0zY-S2Vq{qW(b;`@6sss0D+KhaQUXF{1KP2s{`ZUiR>-Qg_*PZl$+??C!z(;oQp zh@Q)^`iZ`aT@E@B;mV4~k7Kvjo(dp84y-vm_G49HBgACp^KK9KXbC+e@IACHa);t? zTZR_dw}ij9Tj;X3PXQryW#Ihqbr=Yz8AV3;up~%{r5`=A*%byE1V;h7mzWcfk({jeUrzQ1x#9rE?7!i9@KT4dDs)Q}ieu#xgI zAAbR|wKw^-P$dC0v9f z7@)KJ_kilac4Fv@sYIqj{Tc`V<@=ac+wWCephs4B=`Fe{XP!375GAQqS z=7c%ylt`0*MuaE{eMxtV_-yojBk3g2i;H`v=l9X0=?TbG%dcTrHTPx^_hu^kR<38M z(nA-Q5adJ!ksrA^F#oX66Ro+W#xKKU;A_?HB0BPv03?c4fGt(710^PjnOk03`bF!0 z5<4};iQNf>YPZ^ZeFQHZHr{ZorWzoCxV z0gc;!VdQq?4N!4$)g8U$#&pb8Oe2~3UuB381UVt--=d+R#-E?%XZpnjdV{aa4;{RA zl@%*@sP7imH#TCU2jQ#2p-Y11;cv^WQou#K>Y)oxR zkplq>fpA8gUYG?~3nXBLz8zx)mVq$JFXGk!wQblzI7wnR`=yAi=2 zpZvwei5k7$fdSaLN(vcH4lEz<6Wk_Msc!$uN>(l|4%kch1?7iNTSb18jbYVkqd6>q z7ak1YkLhrU2%?ojje?%076_5)bqRdXagYfG{S4$Q1?Z*AP$cM~8HJtiWJY?9yWwyx z(zL-dH7%`{{HK|`t{XgR>go-Vl!iG*bzzBw3;qAH)wd4lV!&nrtB4{=sGxprxYMD) z*2?PR?2!Tv4kt)=@tnJdX*lQs*dSOofDxLWUw}QX8!WP^>aP5`&LhNCH7@;A5k4y( z>CLZO9uGOZ;q@ZwwXkK5cKqk1*wddessrZh<3Bw=z5MqC3tKcC%5VCoPe{Dw>lCAd zSS6PvM2pm){#=op(5p1HF+s_1;el8;Axvd`J3^$Tb{rD%JU%E|ND1NXv;2k zUNuol5~4{F6@ivyYPf*u-_MmZLQkH&FSZ^fa`TL7YK@X=>dI$|=RODgX>)btFhh=g zYN2~|Yj@Wlq@#td?Kn%z(}Pk6?MavAzi83}!gXugQQx264ubeVbQW0bpA01w*CuEK z9p-VdNr+LyJpD=N(&@U~&mn+cSml89Y zE}wL1flz|ptw*pdI@jfVK;{HYcx!8CB?^pQm_L#D+ZxFSi;sZHh9Fmz3u#5#iNP7` z&F+agwqa-gmvy@_S;TB>>5m-jX(J~Ku#}5`^f+6i4Qs@=PJS;}&Aj#;_0RC2*Eado zS|RKq$A@~eVfWa?L@a2wfzF|=+Ir0wP?%vuJ2x&~o}8A}YA3Z?u_I{fQw?T(=Iui> z_zJ&=iqGFBQW&kd-nb>yxyE8!^8U-7nM{BO2BU-oslC0cI=my|#mn~@JvFWq8=vE} zOF8r&jL@Oh4$c|ellnKu-&c3XkP7ik+gtueN%*g~(C~q+7;+U&>h<>0#UYeDSQoY>|nfkQ_1AKM~bI@`JbZQsmo0eGVKm+s%2BMRWo^=SM2nw zoUVu9xr{hHD=M>~9_DRURKjh6KnNSWWui!WDJO zx~SV?=;e)#Ulg}qB^h7epsR5wD5s6d$Y*I9f2*}yTF~P6@y)Hy=K0Xd1e2-9MDqq* ztWj@sa<;G-h~^(bA1w{digCkht)N#HU_31i6=OmsA)@ipGVumg*=ebl7C1?d{VD7b z*jpW>1A1t`)XREwYTL3guJ_Gp*{WDYvyU11==bm2_B-Y`q5V4ILEN?3wPE74sP*kx ztf7%7Az%PNXMrqRoOhW|88q=suZ-Yl-c#-={r#5tl=r!k(cTgtid}UAHV=_cPyZ1c zJW1eA3ChfLPCem?gqAm0a@Y$Cq>f6uj%Fr|u>0HmZHt!XUw({8@e`V8vX56s44>+e zKBN*4miLHlE7&W8uOvJ#c!^HC3-iWZj zT}gqNwDW4oI6KKQrD8UQP;wWIPDLUnar}_ctDR)f+Y4Lx)iW%~YWI)?%qG~=NjXOE znuY2i`7_vpfIyAXT#4u1C)Yu(sfP;X1}z4#B|UigA9<#(AZGDION7N1U{~0d5WsD4 zGS!=b0?0U*S44f68(dMc$VjE;JKEZJSy)-2!)Vkr<$3o=yY=GAN-!L-II;o#H``4F za=q#P#Nch2H3gr~RDr2*0yw`lMh$&{f(x)zVRvD2 zD#3{CW?w`kZX0-Lt?{>gr^+XznYp+yV0cg{c%23jt=NHdXZ(7z4sa608xofgOV_%Ei7oGV394Tkr-%PEk2W;8$@rqY!ztneB{=r;J#g(4s4=p|QP} z_Fo(kueCMHVU$E->?$iJ@QPAmKBwgpT4l!~E$!l_Q@G_|Ej+PRz z`jM?|*=20KYU;03GDK6^z3YGcRcK}UH_eU=9(dT7Ljm!J^C=G%Lq z{6p8XT9KiIu!x9^oSa-ZcW4gD_U>rlLU*1FFee$A^7+jDCzK)h}Sv;rIz#{ zcy(!`-3&TE^=*20ORi;}RxG;VPqBO9KDB_|W1WkA?Mv~)gbyE_vF-PkC}FDvC#t^3 zmY+%}KeQ?2Fx_@CU&qSYS#pF89hyWPo*7i_0UESyc?Nzoyb&xmf|He%vL{smUbsRD z?FgT=d5&2Gq21xtKz_pF(D!}tqd_};$7wlS5`09Zov#CLIx(m9j|#0Q;?+#I8E?hD zo*oqRKJn~XTiS;Y@M5~tO15;MnF{+Wr5PDJdz0&3^Ta3ddd%A_e6*;k7a3u+1{|gi z#$2MKtDhby-1dqNuEc-WccDxun7_~X`Pr157nNq`u4~l8I$2h~y7L2SpNnG){W>37 zXi7%<$^Vbu&IFvw_5JtnTF8)085)#%4r!tY$xJd8N|91_rKEvm7Kubll2Ss0NTg(_ zkPK1nqCtf+Mww*_alX&$xA*S+&-q>Foc}rh)3vX?+lIB)`@YZf+|&2|+#qBC$PtB1 zI#Lx)C&c_q&MMeg-n5pv=^~pV(EI15i^l8L1;HY2edx_Id-l=;jmRht^fqc9>THrf z#~Iu|tuw~!yxm55z2BU<3E0=F1vyaK(R9t{5h+>D-UA2jkPhdbtWcB(J^u3wdAp-_NL$qv}P z`-je^TokChM|(5#8*U{i-`}sJct0~e{X*Z0$m)?lg^geGQPey|2hMNhowkXmo|b*= z+E2)b=CtfhQ~oHnY%IxFEL$a9wf35prNd z-3HNQiq`OJoUi^rTxn3amAqQKXHngc??L2#KsaMBC*PUYa0J=62%|wr?vqnvCk&`0 zE}o*4?7e#~j@~@ByKWza0<*q;gHdb63yMO;9=Z(u@`&lFGmZs=#WoDBfGJj#OFdna z3J-Q%Z&2t#yo-2;L{cbj@Zy)X{N}OA$;E3`B1#VfS1WWdI{J-khW#A@$OVL|-S>9$ z>7zyZg1P_?}fs=g;q2ynMMxtDUuVa8%R+N*5y3xpN0mkl$EY^*h2%h5!H{=GOoHdB3> zwK&QV#G(wvCB7#;E<4+Dfa4v+$-p|DRNKB2seazMbLXB{SKY&ng7d!@@olt5bQ=L> zJ2Em7BME-`&&sc%0r4O-2!23ztxH^ejQ&rE`_p)|N0U2RY&rxXu9Vi~!uxw2qI9O- zM?pm^#|yr<2u)u4-P^os>1epfAWG~#0R(KNy5@f!Rv3W?CMJ1-?@xl9a*CNy1O&V zu=4ckGN}R~rA@89jm7*6{l&7JDiPl-xTHBsoe?6th(!d_2HM;Z-lSKbd^)jez4 z@BFtkU6UE_=clJ8Rutdx4dte(J?NECmSwB*+A7iOEzNzwLH^YAv+KLB1m#Uji?7z6 zCC2#NVSge`TA`m`S6w|%t1c6;Fh%sMy}BKq#`^zSvsla-adGoI@7&eL{w(?ML8!XA zn#gnEGo7}$lz8RZwW!|4)zDWv9+J_|{H& zU(T9yFx7nzwS29zeirwdFDJ}9-!3%_JrZ{?bARX7xp$8Bw$=nI@oDcU-FNhvW0yc2 z>ykBmTz=120dI-(BW;7yo}$}GL2g4~*u%pk5RJo#1i2Qt9WXODKaLh4G}BzU5kESh zuD{Y`-MVt9ZMM1DFGx>M5<6uf6Ox*`oCHU>7-27Y&xmg-a}8W;uS!7FdV+;LM@3p; zXo1nW4=1OiER2m}qoO@hqJ?(P6PeVD+IX%unr+rM{^LVKwq@mwu!iWD*bt^Y`lOlwCd=e$1ls`^|0s?%PZs-q$FWIShqqR^7GtWy0ib*8VNfB@-gDT*w}p6^g7 z6qih~cI)|=D4y3G6&dPXo&W1KOsKmzJg7kQKPThHQ3O3$ zR^INu^N3^$qx;+2r!71ZMpILpBQ%e{+?2O(bXVQ_^G#8lvmeTQeP-<>w7#;#eV#;0 zjs?S}gs?l&(t8IzcQ{3Oz@1=^S-$2TI*UZ^L(VOkl4ZhReFY91n&$ck2TK8Uv&`N7 z&UXX){zC6A?K^3FcK({Dlb$k?eVawymK@>#0e{L`z3TUlkL*t zIpUt=c^8olX()%4LvSxe(12gQgrTtd*$HEhy85&RjbtKr^cg0Y&j5fO6P=*vgbHK6?3d9dI&eoDPeMnnj>aK+l-Hi$))>udlE4p6GD-&6%0G()+)kyU}9n zsIOmerj5h-(4l)8KfdFvsL9=9M_R=Cf!hHeRKY-`0GvcF?pd=?;J@7JgC!1ii4^*o zZ)>esF><$+msTPM8G-tYVUg*ugUTfl!}1UI4WTUD$afgS>L z?CF?w{dyQ^E(Fr(;((qG+?rW(ayK07H=qv{0K|q6NQF9!b#ZtQ`~cwsKn+j&KF0tG zrcsDO!=Udh`e8<Qt)S*pH&R4IwzDT(Yh-T=_$R_Oz)Pt0JN#+?X3aXLo@IWXoXg|AV(}qiO)Cw zs`^uJVjh|>0;M^fSD;f0AQIyb(nS7da;t>X86>SiM=xND*?PjT?82otHnCbdIgQu% zkt?;TLAV9KYNtXgxw@1r{aUp^0uzHAo<;%Mg)ZN7IY;pw>#w){4iiJ(ce~Bq`h!H3 z7P@#(cGl;qE?Tq*?eKJvZQdZ51&pcz8sUxq{st?G zzzL)|6$eaN4jO_9$O(O*jNe|p?v_JHf4_&lg9A_osN@L=u^|R~==(ze%XxO!40d+2 z8kw4a6JGq!Z+?9dnH3%`?}bHzR(k7YYso`%3X(M{xnX9S|R9a)v? zI0S$ujkCxH6^fA-3DWWz;=sB*0nmVVdsa{o4d-TMMj+wYo6a#gO^tkg&5lk5G$jMC z0$f^biUYY>igdgs`d-VT=BqpJ+9hXjki4_)ap{$84QWNwLoGG;TIa9FEN(&{8*C>n+|D40xS#|aBF+K240O9O#67~p!HQ`D z1;`4~_GDkcbgO|lc|(~*d>Q210c_?}$Um)@8Idm=7&9Vu3Fel--hcZXM}Yjp0rLdH zJ^0Pap^>_mPNit%gW0M z^{1ku;@89gQ$7<58jKnbL4M)0l5+t1Ji=CVdv%3Ii2uI1hrzNWtU~BAaZ5D3`>*7y zAkMP6ZoUTi4z$mpG;$EN17293iM>QsiXZdjJ$7s&xLXmIV0BZ@gbt;51X|jO8~{O$ zv6)%OwQDj2JpA?bjgE{=9Jd7Q0+VKK9}m_o4kLmdgBM81UA>=+lWzR6bN8FHAShZR zqk$@FQ&PcGNtx!t1!SbB>zODh#V=d2!qC=MFx@VT2kRb7pd&;1UqaM;3xMH(qVROZ z<&iUBO{-H*+RPF z6$+n1=s|~;1nYG4gAcaiaUAbpi8Zygu{t?@ekps3bk>`h&5j49_!<}V`Qd5yCZ1g0 z&84AeXlJ1o*=JKTX3UtQ?W?i1$oC{LAZ9fNr;(pQI}6hvd-eMDv)s#Yt^#fo5;R32Sy=%|FmO461o%`F zn%LdeheZM|+<13QapGAeJ06(Gwcg&|w+{o>f(F8^?=6{oSQHN^V6@STxIL6O$ZpuM z6#$F%H-bJi(CO&!Di?s8SO!HgfY+hEd1ro!Vh8RmCu_$+5^D!YB7innsFB{rnZN}Q zmkQ9PC#uDhjI8@O=fQc30BnOQ0U?JEe+Re;x*|d@&iDVh3Oqe^@ZH2^WoH0y;prvz zM`Ph~sIe4O)8EhJI^-6>A~Nu0X4C5%M6#kzH&ps-O*K+7W> zo(aLvtE(on@gX#RGyQifn~(bVAd@q{czQ~cBG%lq{t_Tbn3X|cxCaNe-pMZbg~32x zHpAqh2CqjN@Bw57sk7b7j(N9M;|u^%ss%$Oppst-30V+=0~&?Q<(a;Lqj)hsR*Aad zq|NclOO{ygTM0?1lN#CZKjX;~!&)Ww^Hydi9A)w{)~gx(|7^=UVmAu%BB%+Eb8I@ zN^W+{lPj4nMq7sKLB#*+-XAvPuz)Ki*q1v2ZXA4Eb`cdvA`J$~CUWSUhrV>yyLX!y zc}JgpKiB3iz;87m??Knt|49qD@@C);%Kb%T=CVkHzUuqk-6FZ<}%jz}(-c0Zl=Jf>Tc z59}~v58sO7mhlS_=K{Pg1xg?kKsgn0VY_#_osG?G zrH!}b&3`#QcaZBw2La&Dkj-Fv(@lXI?vAO>>LPnfn&VmSUeu0sv z=MtQ^(Ks=h1EwdE{<@@yZVPePXt24?l-8^v&p~mkfsc{~)E`>MMsCSJV7f%Uj2+xi z7*DI@E#jbG)(JR`u4l+au3M8q2yi*iNYjS;`(rkrtJ*;}0Kum3{jlUWAI|ev_d@O3 zLAAhOaa}&vn-_rzC7$Ft0yp6Y^aAev44k)?IE^P^*R7YY|Mk*sf&@W?!rci1MX##j zq>xQ=C7}^y^MqO_2-9e#()-aa{^5&nqrIk|WclN`VP45Z6Qs#sp`rQCEo1`HAk_jx zjZ`pkcC(&o6ZF7II&a0^G|NwD{YO9b8Wd{q$5;}-?Kd$(1~Uj&gPs&E=0En&_Y&zd za#ZLmj}A&-SoQ~?H#ZK% zSq<;hoZ!!T4$&>7GMoL4cztT=^`| zF@WVL3TrzAQ8H#^gd-F7CI1pK1Yz8BgJ~=?CHBd_*YBedf)~mq@<*r>M31t{SdabFC-6lnvG4H%@J;6!5x3V zD2W%Fo4ee0!-n>6aHU1GN?lsO()opY!d)=-WV4`FYiUVyOYJuYD4l@m2$|;@(h*ij zZ;9ps`ar6*F55wq#TUy*JeRnCTvniYURqYRw6->fuZC&6xbZq2&l99vMUPq+c)B74 zZXq#tY5e)~;h7fXo5rH;59x z5a5g2_2-z~8BY&CLNx(KObyz5$1Y5sQapR~=uy9sv#MlEIw0)>&yMYRl8zX9pJSDr zfR-d6N*oKxuvvsnR`lzPwW?w4SXhF%?diIEiJ7pv8V6cfDdM4*qaZA0;Ke)&IB*o8 zbcc(aOtzfDf5N>upqPZr4d)l$k(>gs^k|eJ4kQNtNS&rVJkm4oG+>;__|PekE}H>d z`|ahBAOmlm1uoA?M5-(l8J^PHuo5Q)3x!EC;s#7BZfneV!SWvF8?-Rn^eirG?HpL_`~m`W9UUF`xw^ldlfcqh z$?tieEdeh8?RnM&HyN=n?aIU9$SW^czt+PLr)23dj#3A=1MrJno{%fk5}sYp5T$`S zd3LifD-G?PCy=lQW&2uZF!_jRL8aS+P=!#6bxj~*3Ob#B^`CEf10`7CiUwL3PeMbt zv^ujlooT%g(0!P@f54C&zdpuCyb(^&F}Q>X>!^~)k7>wE1Vu$f+5XCLuu2zjjUnTJ zfw{>jexPmSzC=rCdOwOD&D-?U5*k8Xh!*8U}`*u^}Xe@GT7BJ*b zV?AUfxY5sao zg+uhQ<|pighKrJAMgDR8JIKF0ts3aBw+#yU*`6T!dM~CE<8`=-{M@H+VsE%HZd3_Sb5o4 z9Gq(-_K^o_OaglxFpQMB#I%ibVYw+`-j=~q^7=zUSdsJ+B!v&)$)YQo4&|(87Yv&6 z26gfSSjEzTNQjhx%C`+h6yXA9KODGZE=xL8@%>=D0IZ_eOyNaDCUDgaV(Relh#2Yb zRMFAzGjM_NDGE%Z>Vj9o|5hSfdCe_?`BV1rf~PT|r)3(RO9av{&4>cipp9o9=l)tc zWLzJ=P!?S-5ShLB&F^YzNPtduTY+bSdBwjx3Iz}P)2bwkJbV%^2*f$RlTf(%TZUZQdY&tGc=BYWnOg{`t}(%Hm_57pqB_wD^of| z_Xg)1*s_587HCT3s$X}Tm_Q^1oMEn$R;R}!9z!D}TBTZE~?{>K^aX52XR@=LuIc)%3X2&j)N{2-zz7+~={jb;Sfa*IJ z*;_+U;0`ua0E`B@w1FTq>KjlP(tic>j1Gb4!&Yy$vtpHTAe09ttz%OqTsuH!*J6SU z4!U3*Y3O|CKyK>U;fud_T^fv$1>v+M`7|Xw8Y)O)SOl5bsIBXT7eX>kM)l0*Ceezws9YTt?gW+D$MU@aQO)> zCW|yql&eT4d0`TQLZK9SziBPb#5t+DCgWCTs#{%m_e;F(za%@_+BaS{QRdE-Otw+8HtF07M1dt;p1L(4|7ytcfq-m2NtM@iO zxMAtv_h~hPe2PM35b^WVWAZ4aYZRJ_{3cw$<2$eIw+gy6Rtkc&d7w^)BQBy*IC@Xu ztHytSSk5N6hud6Sn6%3kD}*U${2h+Mw(7GAz$7;SbpOx2kfzx}+>Hw5V$2$X??tBo z^=bQtys1fz5cBF8ZKL$5WdqE6$3R5{`C(z$|1#vfFr{lIN3eLF_Jkhacwbs>E+XMi zgAZknUc?8#YRq6I>lFOKQk>w9;Jb>6BP3W# zVf?FzidY7Ui>rb!$fvR5XCOjCRU#U}JsFgVL?G(no?Td*za7oG0RaID7f3QJB?}QF z_Os1j;Rd2~40e#W7*F&Yjc)MpB(X;dBejI{3evU+_IR*akO@+$ESQEZHkWPHe?=aH zjB(KHk}Frl&#F2tq>z7}TSdn&N~WZ2XkvX;UM^EPO%}dYm28!PIQmA!qKQ+6v^oiB z8{h|yo4oy%E@r@igIg4YAd7}MUHE9R^La6s@H|x;5ca!;)K8MBx}*!&CJdlO1c4{`H9>@k;P?ffRDXm?x8(U+Dk|69Gs()RA!C5^K+@sR_QY$DKr-Ak8flVA z_=BX?7dD=y7)?OCy5P7KUisx(m2o9fDws~FGuh9Ab)1bNmSb1djwR?f`|CcU5xjjY zfZRXD52lTCbocMy5BpYd;?bj@yYevpg}1w>2Qgd_K4~+gTuus`$gO4=F){*aO|WER znB#-o=dXP6_wV0h(@`=+-iA!mJK$g-Gunt2Y5^RLY{2?LI8MWn0H2>7hZRYGnzScEqLQ|2nB?tKkEH~C^1vcNHW3Ic#I@3z&OVMG;YF!urK!aJ5$bd!5Bj099Iy* zlA0(=IL7=hMOGgjWJTUJ7;}PWML?7K3={#c_TZg=+6nq_H&x#7B`0}W zo6^vR66DM$Tm;5&drFV|3J>6p!DGFU{;fgvHm-Q41p z(|Ae#IPFkV>TVXD*!Gm#RLdKISeDl!NXp!U#&+(A;~P9W%+ z6{uOWK8cgMS@m`6cs8{N_lxK46#g~jq$MNBymDY>Bz<(%*5(Gf&Xd7OdgPd*-`Iq z?(RMX>O)&$6m}J^wfw}jYpzz^bllY%J3G7tv@U4&A1&{WN0sZ@!M%}jVB48InDlTp<3Vvu&&8ODQt#?&=WncyHDhCD1Kq&%E8Q3Fd)%; zT^dFfylu|23`OVC%8NltLhak4mNHs91_QlQCJ1#h1W zuwf?3Fl-OLF)Kw1g@6h}T*5|?d6O7Hvj*FMA9f#pYzvA?=$P=h)5DI-v^t=0n6n9q z9*RQx!?PW(uOW)_Gztk_OY8B`rZ4~28O|+by1QzoTj=r}r6OIB#{P1I?NJ8hXU3|w zh2$=f4{fXQAAI5e^7-q(_RD{IeEa{8U%w7o4Wis&Br8dEv$HcCep%Pu;nD67EXR%= zgVg>C=0R%UxVDQpOBU4*g*eV86iyc-*?@c~mh6#Jsxei|}AL$KTz|4}Oq zOw9rq?6n93G0~$sOLQe!EdxqeEs#OMVV{LHZ@364aXqkoPzGoh%`5Qzk>lt`0gC-B zml$EBbWIPi$2egY4?=39&u}b2P8r@teCrCto-5x)LzbAEbno6WcmOA#F?TqkuHdlC zatVLjWaK5t&z9c5wADSV&0Thvas5uKhZ}wUuF4GWDXTNubgg;mzy4!;&M7JJqsZ~# z)x|ioCN}|U)ZE&dYPk>ObnY6#)Bb1f`PdB#&o^tl;$PEU@TR)Ei{)%YU3k{|s*1WB zZL!*>McQI!rl!IEv~k75oS4HVfu>ph`SX?R7S5lKdgyDUgqJUWVdqC7#GHo#FsdrM z(7u9Toy;}fvBW`Q_b}7siwYqSl(g@Hcdq z7dko=U=-{8sep*fm$xQNQ|EU4^A?P#to7%DMSnco_)S2UA|ez_R;@wJnoNR5>W!!w zm^wZ@)ewvX#*Ly;Bvy=!oC#+w3>A1O5O7dtA(QPQ52};nH^Sg1#(ZJE5YeXqm19T* zlJ7Djkq3(u@qzEK#^megnTeM!UBVo9i|2_eXT$GDs#=-U4sfyv*ZoRxU$jm@g9Wbd z_6=WH+YHNhm{kv!*`x4O#repEtAXYdYSU)Bq<>i9!#}sK_6#T6BY!YnVF<2St25ny zej%R!(cBJpL-vZsOethP5y^UB%nhjLQ%N}5NnDD_&zQmm1Uw7siK@NX@*-#~fa?)^ z@#5JCJGf$Gh#>r$wI~uIlXD)5{Gk1J4MJn%KVJkC8Hh#;fMp?~S%Fu>;G(oFvAqf? zU=1#kkzqsT^)ZUd5t@SIWpHoEC}w~;*&v4iOv4DZ6r|*>89MV6(r-BoIDUGrToZS zXt7W@lIIA*${yUYtQv&G23JW`kB}i70CqdAir|c7l~C~|F#I=}n?RE(p^_D$wxs&&=ygb!NiRH>M5JoA^=TvObcI!$-$wEY1_C}8yk`>c`&y+qO43G^V*1X0DtVWooW7K z%_mxTqO?hnk7i&ZX(cnDFs$no)@3SmykzJmCJRJpmRSdJiq`}@M*)INr9feQvX{^Tf`&h?sBv#JXaElH{z=+fflKJ|EAiy+-JReIN| zx;X-e@74reyt>uoPybWNHP3UGUpTidb0g!+&Z?OEVYv!BmYw)1?yxFAyrRl|ZYi6~ zk-BZWk3@;h+WJ|i$Vt2F$$>D1hCJ<;l_MX<9)9k7Iq`kmci-y{ZLg%4?EYr(eX`A7 zKfI$YLRo_u*If&&UmzrXvrUJv2O^XbE&zuXef9bjjvN)~TJ;og_-T_J@T;+WbL_+sZ1B)oVj3_3Yp-}7bS)XAwJy^t*w{~sZ5 zcN-i&oSUvJDXI6PzTLC=)3Ltt9%x!#+&}5Ocn_JUPZ+a z&b_Cnr>yIA>o96_K@%FS`HafDv((u!kt>(N^eS$C`vq^#j&tI~khmPy1u2 zpv&D4^bt9X&VAJHcWVXz8Jt$nV1;%-rt7jKL0VIDvnbDr-9_W$J_+jkFXIUqWAJ%f zM@MA1@y3nkK0j+W&`;0EkXf~AlfFJa99tk{mWIna5be97B3NDc`m12ZTSdcED>5_t zPUXVbPoF*oR%#4-PT-v~KE}qzw!cvSZ2NBhym>otDhVuDpbS@Mu>T3K*{)qTYIO>y zu$Hc6rKg+rKKyRM$;%s0#Jma$R_=BV4hlbh{BT)32i@2P+CPV02M$9I!bN@U?d8$9 zo{?cj8SmMX5k5J3aN!m)YUQC$B)x`4N1tKBq3X`N4%f{SBvB={8xME$98hwWVAmN)HL+5}-r=aLfa$5Jtqn%Ly@j}w z+qO05M>~L-*R%$?+oqz4UYm~rXgs*(RJ;)5K;8E4)i*YV$c<~UmKHbVwr}4I%6Is! zoyUVlJaQCL5#yk?=na`<^PyE%PEJHN{#M)RXuA*wr=&m`N#^M`?z8)|-YxHwkCL9A zUN(veP|`lL)naIW9&0GqObDQ#6$j? zbpqY2roYxvN?ljE+p=`ruKV)a_pZ3;8K-Jlj~u*=qN3^e#KeYHsN&j@>J#BV1L1W< z<33({Gk~JNs89Pi<;!NNe>EjV4?hz(Tw}-P~P9&OEiVZY`=xU4XXIRDS>&~umKvfOu+J`kNGwK1RQc6#dl`@+JJY{|*$BQMjoW*I@5O974GzvkF6O+_W8 z1F*g<6f{6Bb~@Bswe#k(_KSqE-z7CQC&IoJ2w$=|f#eMmtcRY%RrG(qcLia{q5PL# zL~?hi`EJd!Te(7#YvCtWJ^ff0XJ=|*v{9}jip7Oryqkj9vpt&iaHO_Isut;bd8rfO z1d@?Voh$S{$D_5!IS3FIGxC}at=eGsZISRyv}?`BQf9;oLSv=o) z7g{tE7nhXxJx8D)w5oXX?p^ij(FQ3bhFkAoZu=*=CQxk#8|%X0v?O5 zix1UP=QYyppdc?FgZk{r$IdZ$fJb`^I(8rd@ciIn&)H9(3e20Qh?RU6y3z2L9X1ty zKicCxD;Idd3ptLup0SNhEER(#sXx~Sh6uqrj-?bI?&rt)9?8K13kVC#B0$gC*SPB1 z{vlCG$(ouPct@*uKEj{D?z=Cmu?KdbI0-A;cKbmu1IMu$)S~)j)3Px&(Y=q;DoeO| zc&d@)D|fjgzg$>Q5Tx$<^W;RTIT>z#q?4pvQh*;Sfl=1~PrFC|x4-*cY(giaX3kX3 QM+$#T3@r3->p2JiH|@3AumAu6 diff --git a/examples/training-llamas/artifacts/Llama-2-7b-hf/training_throughput_line_plot.png b/examples/training-llamas/artifacts/Llama-2-7b-hf/training_throughput_line_plot.png deleted file mode 100644 index 340767f195ab64591bc7683510f79343d8ffa0da..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 61746 zcmbrmbySsI`!%`+5dj4irAtB@q(mA~x?5VhK^kdL5JW&4q(!=0y1P_bkdTz_j_=yf z^S;0Ho$-xx#yI2r!9X{gd*An3SIjxDxd>8_6Tgi~go!{PZc9qMP(mP3d=Q9hvo~+R zD_lRO7T_C?qv%UVWm^+R7kztUgsi^fYb#qvD{})1XJdN@b6Xn@mZvQ2OcZ90j;|ef zSy`?B=M5~j_NJ^9XP0eo5scRo8V(2qjz041TDDM@IRderBKhK(ifiI}lIhF)>i9l0 z{uwS`sZuHM)(wJ48Hs+38y4x^Gu#p==^zS`lri|MW4K-)T#se3uJi1x)K)RyOjz`Y z)3ayK6t>Rz`FUMzcsTeL`*@C$sUoQ*MJVpvypG~4y!|6pc^u`Rf5I!%%p%wB|NEcg zt~;2i!vDOhSv)`ifd+4e{|U0cL!tZ6uZBOoFZ|Do*Zsn7r6Dkp*Q2=OVRREs;oqyz zQaz{h`St0@7XuM*hd+T$p@6mV%&ITqMs&-HNCTd++RdZFEzkk0IFC%7U#iUc?nC7rNV9V*{ zd=u?4PM1-s_5~@A{nuhq&nyxid+M=evF*e2)3qYFmAc~gE-nq62K=kO{{9@L49sAB zRkoIu|ne?UqnwBt;k%Dw;y_*9k#c&qCS8AoP>nL?0f1p}+c>a?rvZMgvTFzbxu&g^fM6%1mzeCvdaU{m|<^*;|&Bka*Sck&x5+mx_*Vg@tOadeML%8rI8d z`$erVs{2j+-_yl$LN+)|2VT!MT`Sm)m6Vg~7$1K+Hox-sZ+*B2!oHfM`uz80K5V(> z@~eSFp7HVVy2Gi32N_C|Zf?8*0Rbn!ie@(?q@-Hf+7_an;I1<51YqAkX;xU2IB)#o zRzhG!l6xKdwza*0_<5I*kkHW(7DKg z*44E_DjmzWU|&6Ypp+#?dU>);j>n=MSYPk?Q0i!DVM+Im^5Oc#+qATYA;jD)pDu%m zsKi5!HhxtQ9gQ!ic+0hacmRuo4SVKg?2d~YX*HwR8_U``HI+D8ptW4vSN(8}(7@b0 zr=ke5NG6`$U(fsEprD|(1ub##`1n{MO#}@E1qGP{9UUDqiBIqPSWi~|$W>)bOiXs#0_v+5?@WYpt7Q@*gxs5m${w{G3aR!qOKJeV?OmhhWI7>dAb^V^9UC&mX1 zDossIKD&#(WiDH~%qL+aJR%3HBW#?UZE!6p4fRb+58$(YX33K?F*BDG7pL0JG-%r7 zFini*YdnXjXM6luq1SWgkEmv)RZw*`4}>zO`OkN-cV^{-l85U{^>oovnQ}=VA|r9( zLYx-Efoo$$%3odtSru5Do11?m;kl=!r6nUHV>VVu8Xg`_)GJnJ>fS_#lJXf? zTZh0Nz)PlQhZ_?;j{7T$H?TL%)H?61{Gd8` zI~}PwPu04F{b>)jr1wTRJ3A9ivJPHj zQvC9wFf;7kyLaD<^n}(oHb#!te`ZMQF%mbyrqV+el97>3Y+gYP!oa~A7!60d4~(x*>N+jFhRi`=+z1FG_m zd<_m1R8&3s_cD*tb?erG{eggp=+EY6a{7VkSfLK=>`FoO z=~P2r>9ot$#S!^!;wNq6<8e?MO6=yv5TRrOQgCCB+1bB9WRvi_Dx_bei-!?BeNAE9 z6V=?+6==}@;YW^gmiC}{zH(ORD0)m*7VTnh>~cp4v904e)G`u5kGR8)NmEO+zz-k3 zK#IgABq-;Pob4;~Xvp=7({l9V*RRyrBu}@+I*%w@ zP+lVVJx`-zn6+S!@ZI)T(k_*B8a$9;i;P5NW##jW3uM0t3JV*IMnmQw$zI;t8iZ{m9J+JBmV%yFKqPaFKUYpjFi|HWmv-WPWWeVZL9$ zsOt{wTpYVGCJwov5p>a8Wc+d7R~PE((@A_T$LD*4gpXe#>fMh_CQ6MRSBHb(pFi_8 z%J{p#nf;&)xJCG7PIoYb#t7OMJojU>?-WYP$|pOW;y4r4g_8IKbo|Uymx!GNSD@4XZ=i8|83raJ=b8~YZyWO;_I23RTP>i2n zU0%RmdKXNd0VYz(Q)BMDI9MCYvYYSdOW+PGYP{qX_kR{a{j1!(70}J!nMNUiH2O0Q zo*xN00zQ4Zb5K2qg?i)GtND%)SW5y{R@P)e&z>M$+HY_PM&ld9AAF;uqid{2a#i;? ze-rEJ=`9T;N!#1ABYP^;(s`&Ct2l4rkzQ~7;(`?xfnpXI7&zPHi+UXmEoW_^vIv^X z>#Ix8)5DG6w{KA)ndZZUE@vTy6L}oO)(i`kv*cXe+}JHgnJWJ9TmRyOdMX8d7a3Hi zr>BVm?s(qb-nYl*f7iPF9WSARl91)HH4ShqO3?Fkc4I>p3iZeV^s}d~+Z1Ic{XKiO zyXR{|25xE4*&84g zvWS(L4leb@bDZxF$+tmc{zY?EV><9vG7?za(A{S;hLT0rx1FHA-LPc|lmIO{nX6(hw38OXuX|td>sKyGz^H*sN}# z5uY)L8Dlup;HyVTd zF4U>bRGhA~){=y+uW{K*u37Jnq>YG=j~`BbhFVafi_Elp6auc>!V%Pxm{?emfFFk8 z{UL6!tFr^1_2?VWEi=ST5O7dyBf;ZyHsM?y`MSFDOK}dD$Z-FFhnaslu*md z%QMGA8K*TiFLx&4>_$1R5%+}p^yt>@H>c&l9ym)8y>Q(B& z85ysm#F|F96Pcc<^I+W6^jnRs+6v3D++LgMx*RW?6i8N`2HP3I0!!*zJGm4==4(HD zq8at*UF49Q0Oc$6+_R8h?(Xi=@Hv`|(xbK$nX<$oyS}LCQa+B5qw0qVv8HmPw)U6j zCst<+IyL3PrN+H=WS{X`(6kW*1O(r!Z1oCu@f%BV@7+W8Kp{ zoQxLfkqh^dMvww689Dxtt&(#O;%K!J)87wh&QAzD0E@#1Qqc^ey)n#slUE6kUo{~@ zHlT{{?f+H&Ge6JOL;S=lFg7;UO)E{I*!#-sSEY4uZEbDM6ok9Y?;5*5Z5RcDx}9my z-*@%(sT3vm8gzYn$WwW;+sn$;un(0WOeAdjsNj8srTbCiWJxWlbcLs= zsCayOdJiA}NXo_B(UCKP+MGn)nbYjM4}>+F?KHoVl2U0wnfsBAqmvUMuVc2@34rZS z)IkF7hYP1K%Zkg(7uF}r_}}anRVT$6JQP@EHlKqTCL(<3t%9O zn>SOXV_6`G@!sCJ{d0W4ZNNxCTCa1muCuJTw6rJou>~?@pYrjU^6i{KCzDC$Cxj-& zj7G!4vRQFaQ&VGMWvcUnN8r2e$df!zwMk%Ku#$ADxlwBM>pi<~)k(vk*Voh``@sPv z6%}y6OyrYz#xSa51J8T?`gO84LPKh@x6{JfI%z(bv#L=XbcAxU)ePy_8!9Z!=;-gY zdrvZ(=eMV*A23f~rub&(FHF0?N?%lMs7Y*mN3t|EI>g5%-Gh{%HzK8~| zKtMcTRJ%TXDOl~YMFt49d&D5Ssya(5mZcx)qypW2?MAO8z*OuI0uAT8(o!)@Id=II zcge}+fppk8JDUK8TD1fg88JTo^742d2fo6rCL8L$Q}cbrUs_pg13+RvUi?rxhDpCI z@UG?=xq#cd$jHcF#<8MmU*Exuxel<(?O-*d;mUpzKRaq2 zU}*_-=zu+g60k%@W z{=KdWwkb}){RvbLDoD)v&;3N=P%rxn0N;+fbq4tRBN-_q`uI1y^o~w*ZSBMk@z90_ z!O_uC&CaBX8h+X%Rx=@K0e=4PKYl36%R}Nn5)ja-pMiGcxxVc@n8-ve*%SNbMC2M1 zqBKDs7uq{CAU1ApQxLdT$Mt=EeL;6&QP z!3)bg%o02f&qoiqA8+dblPU;hKEX(7X9nA8(}9hp*UMjl==f2AN31Fy?wAJ8NM4wG9* z@ojkcK4e2M9&>5E*vyh{cR$zZY%?l!r1(5hfg-A+qgQgfi(;D(p9d5}Qw_i&mjg}% zL|*t&8`_p)_*gskOJa|0p$<1_6r4>-<{)KGv)8V*UJ=B_j2|0eM-? z9{}2kVK?ppOqo=$KC!b~0e9$*?CH}`U#dF(WS_e(0kktZQ%q5RhjJYH)PCV6&`e`k z8Yr2Y+3MIL0|Zs#ag!BRa#hooZ5w{@_DhayCCt?Uhko*kp{Q!0#L-& zow9sZH#fH;Rqfr#zCIbm!I!|gDO#Q+bdW;^k_Dv2#5%P6Rorkvdcw|DyVvl6$6=}1 zX-(~k?X(QwHLk;nU%w1YOaesGT+?SnSYqer=EB!Zdm&?ifz2Jwc#r*xtr(|y`0!aP zX58Y9z8YKIIxjtHTCBA6bSi-JMgZ?$ym+Bdpvl#6wppt#$8G26XardP^N+j{=^*$7Tc2kK|KMZ=8?D z^A;2sqLI}HqHv)`M-FloAQD!{gAUuCS`T&Wsc+$s&1J8eQJgd^EOI-X7G`HtEB;j2 zFZK`}KveZZ{pNAqepRV9QexO?@y@+_AkX1&U1M{){_GdQ2WYrIT*X05#LrO5@rq%2 zSu8xCv{TA{T4`ira*u=rp1^v#hLsI{n>0M*pfP(fD=TXcR7<=V2_hp;Yun#7s)jZL zfct06lI5R$fy9E%pQyA>5c}q{z5RN$P)EAn?O+%$I{*DCeEu-9DRMU4cxD_F(YvUr z_Z@U#FUvP6{BK%-r4LWSdgp6bN5HdYGvBU`##JfF)Ajn%5DL`SqHhj#Iug^tWYxyk z8|&-CQo_zXy}c}P=ajweu?NC~y`IoCBMNE@U9q%A}oLRJKKWdMS zn+P6|1*6@CZct!@pcPwejE#(Bz%N++DyN@R5U4(3wiqi+T2t{y3L3z4svVXs3S6&% z!zj|-*IeEO9SU+Or-bWdlz1}JSP-(lg@d(L}V8_l}0vbWG|ZEZtJ?B|yTvBrw@Xt}ted7W14ob*~C zd9#(Xu+`MmfYv4e)}xH<2GLtJt>%@U2}oq?GmR-VPHVddV|sby=K#NFK~eE%PQC%r zy#U;HI8QwcFb!pF?+NG@lG4(K({*oF-2h5-j*LWugu-FcM*t6@QR7Gt+5Zm}o|v5E z0H%e<^imjt%lfh&Gq`o`i5e%O&)adO!kRlXTUoQaogNC0!(YQBeQA18a z;FZ_e2DiyTq9lNT;<7S#Yq|&kS&Buv0!Y5|^XJc(pcX)lH(VbtS>y`?-iL8Xg?Dw?i5DG!CEZMEP3yXp~hlfib*BLK# zg%4*dqJm_kS!E+zq+1XAH4UK91rWxEi}X^EvJ)A97c9ledfDLg>QmupUkVBeA1F_G zze}KpL0I3jv|LjUF#ikP7S;fXRM2v*NAn3m*alAYzV$thGAI@e$A7Tu>gtev^6}$O zfa33wl3qxiAjLc&QSFe{m`sR?|K_p!sFEP}lufBKh9+J}oDf$YPBJT5Nd(c^6sf+Z{dV-MJ z9B>b>ivx9=sc+9fsXyMRnpv4BqXpd~9d=^X-2`-t;R3B#P#Yc24y}QQia^%{%n9YQ z%(xe?rL}bgivYNQ!$>R$#kYw#UqM7bCeE&}_dz`D89iiF&AktIZ2-do0dwT;AbD$p5(D=iulmRNVIx;mfdhD|}TEMcA3-;9W zloOI$<^6LvQb0lmfXm8let!m-Q*cIK6?F4HCK@oZq5xGMS~7muR4?&FXf3cHNd5D6 zp3yTT%lH@)vPUAqq&4kEx+5+lr~eA%fE4QsAPQ0`9=kzs`_w!4$jsn5dCD<7nL6+#%bKBEDkMC_n-sb%NdP?N7)d3v82OL|U3Yw&_E6 zd)yn3*x8lR`D$`W?Lr$yf zn>+Y

ApSKP z>FYlP?M4zBaSSCTCFg)CT!j-#RSqNtQq?gsFrZW|)YdqK!pC*$rg|LVezYlx#B_s& z5Sh{q4Gj_g{{BX0W^~ZkB|J~}!&$VyQAkEmi;9X$0$GaaiD71pkBgJQ!^2YoMf@x9 z63c_Dng(jg2r?lrCMeQ!rRC*O@YrAFl6WQMlli-M-X#7$iJ?~Q`U2cT3id1lG{@m$ z{pWw7ysTcZzF+MA({5=2`G(Y6Ak!*V4>o`Ebz1hfwmy&ex}H*7U61bIHnT{5)H}Er z7e|l-W1NVPkPstalE@eX%++`JJBn0e%u3F?+89LIdlVPY;&?{8PnnDK8d;py#}(N* zI3ht^{@VN&U7DVrzKE2Hbudv~z|e9em*xN_C@$!SL`H)*@Ne3Gh2EuI>&z_gpe=YG z&}2zzsR1Z|wozl>UIg7!$yV@%^~LjXELw&Fg%raf$2mZa7YTo+@U$RJJ$WW8IP02Sbt`}S<>(sat9dan|=QxiiI2%!ZZekbg3|Y?EB2csAquv zzzI;zJ8c$IsDpqk^6&$UxP3K|t^mIyLN@gO@du=L^P>6`3q{U1UX~hR0N4W6_&U_E zg4xvnx)yjvr_o3V34;Ld06#(|Zd-f1zNKX^5Pzf~ohl^_U5E!Hv3}YVk1r$dsU@HC z@UQ{qb)x(D?b}nBnrSq6RP)|h$pdH$Qpt4=jxrC)7N-ArC^97?UWkaG0G3)=S@@l- zzJTv*lySSXvhoJ9p?>^$D?~d4BBp(0gi9Q48^RN&(dOmIX(jYWDC)dgXpfnhWvRFn zB_t#ovK;UJ>)voB_Ivx5UboIQdGdidnnI2WOwwP2A|vgO`2cYf3+uU_9=Wu%bVv=f zW4NO6q`X=+%z6tafsgS`j!OY^TIDbM&m#Cgg8>H=%DDLWdc`x!0t7Y@XCRC%?Eka{ zxd>HFJRb=^&j)cv+!Bjtc(ZMq7)j)K@B8@Jyhu1LzyrJfU4Nm(P1$R zjHR==`QcD$AXs?W0v zmTm=d40AEn^h>z&VaC8~9igPsN=opR12wVxb ze1#Q!R@T<2V)3ih3umb^yE?BiF~EZw>gzuP9U=~gt_z=9l9G-tFgrUNY6-=iI#j^y zubrG8lo+9%v`ntj{Bd|eK}TI0CB(sDx_=-W-u+?fBV6|rjzk2six*$Mh#+1&I4~Lo zT}MH>dHh98Ofof3IJo+~XljHhj$eNt<3^)kV7P8-YC4CFcb~Uqf#S}66b*;VQe8|G z-`Q5$H7|SzFEr6>I~bIA?o+mGFy8Ut4?Me=7fw^TLvUBZ$ey~gqf(_TFQ70KN=t_S z|G0{Fecy&*voBAPv_z)RCKq#8ntJr_a5`CP21b}u35tWUXqHAvDdOZ$Fm@EZVQ5(y zq}F%~o+Ny}9kw8ER~Ho(SCo)X;KEHYGQ#d=ada%n()GLGU)m;1SCG0c=+$$n^$@;& z5YgCHOoGBFwl!Ulqt=gMYDx(5rf0erR0Nrzd#9W>+7Bl$z+6CT{cE_nwKdLuk*TZz zBG}5npQ{z&FyThs{2=$wdWP31$vEsWu{>ON$FX1 zZ?#WNQL1;AyB&}_uCMHAw(ra~>l~kqKo`5o%F1|pV40A2iH7p*$BQ(T$ME#j91mOt!YeDgu`JBp%md9LBO&U-f~XO*2#`FQ~Mc1u>2p>Ie+V&c14b>kYx zznxc?W0xUk8jW5&1zIi;r-s*WBxh(=XcC=qu>aiITYFdKVm(@Lrj=JOl9sAoq&EJG zw-|1bl2U~5Nj|=PH5u-sJ`@VNm235G6<cl3X;`Q1TxKYpt24VU~p6#OsG z%?#+)Yzl@-&+7xcCtt*(r+8`hclkZZwm6^&TGNj$FIa=cKtAL*4#GtTBGVR08y5dqAAw6dFrr{Dm&`9CCB=TQ#+~7LMh32?(1nGs{2>qS zy#8Ej%qXaQsRFeq;?teMjmfcLr`3_6MAwb^d1>LMza(cDdxQOsTbF3l(-qaOY*9j2 zQ~A?XM%^BDbqr;;GopYz2dieV=f_J>t7A@jqJ>*pL_pO{d$X6TQm`r!M#k(9rJkkq{LM=zdXCt9ue5DU=?#aIigz`GNrh@tR-asitgri3a$06*F}KYutwFU z?XU8`euafB#=UAyBO^(qd{00P@U$0@Gct+`*hSB-qM~dWzG~3!?C3~-_s$ihTrgz( zTpjD_0l$kO;GL4Pp<6$7(#3Rc;E>_?ycahI2G|YmHQ~26`t4~P6ux^R@R;IYE{A1j zN8h~F$eL;>P993$7!n=rRCo%F6;0gHf^)fbAn$6k^Y_?z5!FyyKIl-q!(RiQ-_D&- zZI!TjxAjPmGJ>i!37zhFKz)h8$a-#lUaRZBw#t)%7Jum+y+%%6Uf$y~y`e5oB7FSb zWPyZ<(#3Z*`On{z&7^^y1}+Rgo7RbV(*RjBh6cB!KB>eN!Puv3{K z8gYx1H%P637g79rjy+eMbzmTgcx7TQPn`=N?o-&xg6Z1aHDu6L7duRa)aRAx1$<;5 zHO`(>c>5WfE5}6dwu9%!RPDTFsOds@c-JR$I$w5`!A3i-&IdWJK1iqL2Sk9q4z_2~ z-j{dy)%Wl9XKyU)Y1uwYAX2gOsu%56NWQ}P3RxEkH7GO`8-@+zlR_zB-;8@T!B(-e z=PtM`A3+UMa&Q=Rw5B^2b`)1Q&Hib3ATkaQ&y^h4hykpJ?xX=js5!tDhT!bmTe8m9 zJg6qY#vXXX_nqIsG>aF9wWjs>r;OJQ)nmefJ3*J9ax zF{gBUVl1`hxf9bZU2=@n94%$MQ>MNr*t6e0&wtP+5k}S+QAE5wBIS;vXtbdGzFTTu54Gao0X$urYF6;KT!VR3wf$4f*3zs#hCVM05=_^y!VPdtA zfxBCc((zC24h#F`g*?SoRIWdH;*22Sb4GA*IPeS94?CS-xLB6+Ij{TtiZHdX z2*}CNC=6qMy!~;sGRw4DyQ)Z~n;5K!AS;XZe?CBTb#+lZJ$~38T)S994(-_e-ViRv z$D0e!lkLgF zWrx<#D=z}`c&z|9Yb33@FrZ)PNua@2G96>^gbfB>R{cFx? zs~b(oA;w`j%Bo%O7R0EIqEY91J3}gZWaX%Au%^`Phhs-*Qjj(aY(QXKoLMqIhDe&i z>w{Gq1n^t`pdfmH)LD+^tO3{?-Crg50zcjYHLQDoQ{W~%BEs#Eq^Z<6IG)D=EF1gt zZ5ZBQ_!;bpuAN>PAB6NPY8+@Ct77g9BY(9#aGS_e^u;@v_jI(r&)tL3?*kc`zSqvq z?U2H-v~K$ZO-D2G&E4H-rl#C6Oz}8ADSBDl5r6{BA`-)8UcE?l!=5?F8dH?`ymKTK z7pqX~n8&Bm+0pfArekd62Qzz`t}YUD_#NFzQ3~xlE!RwIyH@9Mf|A6`rDQ9g@Z_G4 zT#}n|m5m{6AhHB5*z?#|C#HDo%GNp`nEe{Y@xJ;U#j=r+ael4bY9h2d5`BKFF=45H z^0D`2mDPB$Nd3%M6$66!2}!0LDLGOBEHR+PedGZ{*9x{LPtugLronon^SfiDf|^l% z(_nS@?)k-0*i&9S)64TlT|(4tRgZ%aIu^Z#rWXI9#{koMqWp3=XJ$z=HN62p^60V>X=EcdInI)w`~X^v;E0`d$mFs{F$p`wb<&L)n60jA+R zcjK0j^UwR{+M(a_lUtAUhP{m#xE=_aDw|f6szdx@Kf>nOKfK(nbPFm*wtqH=+}sDO zYU2ig*Pr=@7fLKQc3lnQHC$0%QR&1NDt1nf>srs{{e6jRI^?JG{ooPu|(#6*WNd=G|H>o9C6H+v@82VG?#h3Nc=Ji+&$zs1*L_-`>IvEx5%)g?eAXBJY>X?C#*Y%Uy; zT&ph$Un>4itI?cusCtc$sV7MF-(^~ar>E){C zY1{RN|9$&xNH}%SncdNd!!gS{9g0WHV(Fnl$r#XRa+Y_ZD_FFs%M3ka{%gU1RtWEY zzk@T9=Q#L2iI$g3Gd(nXD$v4tWccq$-G?_1$L=zSD~g;G{mZ!QUqn#*;rfjPKS5rb zod(K|Z{ao7_1x^%(x@u>u;ftPhmuTPxBs#7`^8E~N}8;EPlnRx1H`#dFRZzxrL?%6 zK>^uWk9SbNxgR}}{akMF_h*4Wb42Q$H;LOXlcbNd$9Y;JTe%L`)>f1!3QOIhZA243 ztgNl#N0j?ordA7}=^wFd4b2fR5>^+aJ%-3CipVNXx>mu5@7F^~YoW zwN#x#D^CVUoNse8W9O(bx8kakym9_}nhuCG7@nS|8uI)eYY_v;wCK3F%1AIVU9ZLS zdR8f|hsKrr9gTkulSdju>}KRwTB5T>UVDZKl`om;JuT|pmG^Gc-0-I^pb$cyy4X%O zIDW;4m(`JW*78eMWxNT+Cla>{G^mRJ`r_#_V6`x|qz-&0bAkKslO9i8^WkMrU|5_bHTSLRh@#R?~ z7_NyClT}3^`%JG-;KE4>T?QVSpc*G>Hp|hXMDQ1mkDD#^pHWS+U0%5Gz%Xd66mfdC zmF8kAgo2v#%F+_2sV7=Gj?0qd>HMQJ#9ZlK+`E%uz^V0I>sX10j{xwQPnR@+)h3+V)Zz5qe(n zy`QD zm)K2jomD+#pphzAhqWJUJ$_Gg=44#s`e089I^-|a_T$^hvVJa|AQ7~{;hy_w)EE=e z;oA-`a+>}uywp{@L>Ng(bQM443gx6t(XM_=Q_Z%Z{O&?2qi46DdxpdAkFdpP{(G6s z)^`}FaZl|EHBV5%-uRfFzI(W6<~};Qso+^YwsO{`Pf3YnVJGIzr?L~J4@P>(!C1fw z-4TYiV{od1;o7yV9QC3cF+Or~y-XRxWWz@9%j)=vvQ9t(sxar`aB~}6=3>@hPk}XM zw;1LOCHvh845jsVJtvG@dZYW)3$Ne9!}9|?+Ml3K!_NM3cQNnro85DMEn>X|h;6W$ zAvekIEjhcrkzRiO2R5&B7sUDbp4Agu-Tp)#X*!LcksPL>Y>s9)A0CutnDo18*AYm) zySi?2w=2VDP%ss=PX2dd;Pf&E>g?kY zZe|E--6z#p=~%W0)goe0xm~t^ydsB}2?|K&b&p_zZd0=88YqX$@Pc-pv{TzDFMK%jr12hG;APe3mdmCANDQ~< zJ3KR{;k*D2_(umz8bIeL*l4`ej9S|P@_~5%Uha!(9=1@8t=%(rUK%4uLYG2fEEUFSxr7v&t+HU=#rlHwgkbEpD83dEgp^CiBh|l5Sa{idA z4$QTwW_MG_Xk>N&k%eMxYWv2d2>Uk(*975BYC zURUq#cN_uHl6q1-tu6E5kgt5B-6P{C{ReBv^FJcu;_smKZjLLICw!yUtZEorzuPK`v#lg|X>!DoZaV0|NW{uI6Y z_xIr-0IR;SaY$Ji<9sI$oUicLZ+#S*oGb>_FP_biv&`h81xAzK>}fu=2ahS_s%V6G z37zFL2II+^u8+q98xRrk0~)*39Z3P>U-cqg6qTOv|965GZJdV``?MA5}(*FGT8e~uE5A_!>2=3n1=l96w72>wlZEkD3WooJp zqcb)Rjx6!8YaUk@_hChWwHbCtoFI-*oZP75g%<}%1!{y^17sfo(Z#}(Nwb}41_Q_| z3yThQy%6wR+=Zi5&_u*VL`dK)SKIJ#;y{wS>10;0G{%AL+JXuRE-uHxn+FS_hGAhl zz3qC#8XQt_Pxo{x<`QoUNK=K05jiQdD}TFhCuu!bi{}&~L$9138XO<2IgzKYhw6^o ze^TH_*>tc`>_XDkIQxUm#ShP+Id7JVV*%}71u}IoF#T|Z!ph%tdLAL;dBmI;LnWua*&{7m?zoZ z?7jj5IaK$Ccxmt|<~Fff_R-mC?oyuh`0F@cCl9FO#_b=HWa75DKxBr~fA4^Gf@Q?f ziCBFs0+v1)5Y+b3jL@Hzmx(8PS8m3)n$&kdg!l$j6$@XkwXZLcUK!6~A1dBGJeI`F zX;3(?r4ouPCfF`f#r6{Nmec0s(5n<^1|O_ll$4jl4j@)XEJeB_lFMypW;F=n3gJYM zYQ6XRIN^Opliok=Wf+(>;7O$c+n{0Bry!oglfPM{#lwfU=CU6t5fn^ZJEH`i8hQSsyIF{Aw^ znc-(sTVuZ1+h2;1tLF0;&j~C4H9*PT-aH`rV1Y^sjVU+P{A5Ttv-CQ4 zV}$MC3JweQD;UE>Gpc9mHF}lX79|-|aB`{`x`RarIUa&TE_J0daGKBotRn}9!8u=G zl8HyE^E1g_w}JsqP7A1iDKw+^p3qOTeWtTqGwi64q0VTd#xG9wJ~i;6s%%CE2%Jew}BzjON9!r4?X%)z|&vd3%DzFJM| zp9$w~0=gZJy{fH90YudShpB55#l=Be zY>z5H^=cZL;W!e~0R!_x1Ctof zn}Kjx%(9>OpHt_vqF*OIS4qT~_LDa@jxozD;@)#&;(_iA03tJY#msxJ&z z(l075@j>1AD+QCf8XQ!IXg^a`6sDAmgagw#6t{p}w^bgWG&5$DYr_tR-@4=p2Rb{t zj6PuH$pAwGm@^wnr-NA*!6Tvww+6rk=$^pEnQ6)=&z5%>kQz$$q~zpb!P1*bmKey{ zQ>!RFef1sZXVx3N9|lb`Z3ApE^farh{wSVPWhN;R-YwO_;4fTy>e%jAP(@3nx|pfT z=!&DKa7^~>mt~y`uiLA?yAggzel4a((Y4xtd24bNSl8UIv2#W@nQmy^*LE%Bo~u_y zz;TxW7z!SP1#@p|^ykkAD98@b%}CY&w+69yP7ika&vp*A}8iRqM;mT;jtH!uJH)BSWcC}Ua_NE``TBlkuhGQ(qO326z7x%$2BIFk7TU!_CmV?)P z5Y9fnEPnN@(i3^=76!a*VC+PQg^|gE3CzyHVIGV$;F?E+BSkgN8=Bkb)c?2(LRX^9 z4h5z<3(;#`BaWv&&#U$Yk8T#2&;CSEovb~;(jnE!9ap_Z@vmToMq~ejhC`NXhUGA= z(hq;{j?%pDZ(VV0ymkkLZ7H7;r_HJIElkqil(?&lsxU;}Zt=(BwAi2GOy_G&z z9sQqsd|O)!u3dIcCfK+FxSwmDv=6r8;7Esw#FXCrqLQ1hGoi6FV&@)VI55Y8Nn;d>gC&AX8p%f0Vtx3mOj`m%E$AD!3ri zr{Vm2Mihv;o43YV>B|{TqxnA>Q+0%Aqw~fZ2#`VEm+ZJZ&t!EhuV;q5x&EOX_LpNo zqq)o;)icsh&<`gRhXprKn4(JnQ?hv^PWa0ZWyzD`3_n#aM`JX;u>}}Xo*zzn6l!Bo z@bgDCUQLSx<5}-b4Z?gYN4b;K<>)8ONJf5L&4Z!@(g~ zn{;S(#2XCJn()aU8Gl^2XIb!=KM#ZuU+KSk)kT}a$+WLx1Cmf%Zysiz+6Ew;C^W3h zQkN|?Y?5y(+4hU*5!64*ma|9%>(o8#r6StuUyeAYd%2&ba;j9{30V;1bh4JR3>%Fq zWe}D6eLuj3uxNAbIWH@bQRZ{)*8^&GQE}>3v48#i9S-M|n0`04Wtvr+qx}OD)#cui zFH~tc1WLJ+B^UL6%u4ngLLuhn;lW@n>T&%0GwkYOYlQ8oUi;}mFL>#}l1f7L zc$;Y?*Ie%7V3$PaU=c2iijecNfjcmb-NbbV7;(>Qd80Q zS4Tc13!oRf?ht|Rr!9`{s&#rAGH-^&*nNM6EIE0RPODNRIh3A;hLAi3@A$a<)!{nE z#OOqYC9YwoSMxW!G8lZbVTwNDwzXhv(g0oM-Ce5F`>qtL?{T(ZjSz{6AC_~HU|i2( zN;dmP>C3C@z37+c4&l7*n)YJs#J09ZV;4G8-yAV~ZqY~Dt1a$09Hj?Y;(pY9$q9e! z;a&I-KJ6C@3KI9I#pvJ$`3KLN{3* zHyW?`)0^J{6jr(zm*^t~F*w}XT34?toB zCyrm9rDUdzA~V6>K2vA41MTtNAi2E?g705>$VaL=N2m57F*jKn02kQA6h4>2GVd#A zFv1bC8&Lx9SE{-IEaM|(-ur)_A}_N^lnwMIJpO+cht^P2RT#jSRh_S~haMiT51zBn zbFGcdgGE%$%{V6uxFZas;Z$dA_C`W*3bF$R3&YEZS92}KMHPulZ6Htrxww^<*6gt> z`5f>nmZ_<#_6Sq(F97BSm*=*D!NF;Y=`npNSG1X#gQy3L_v67%9w(ci_AqwsN8Z>N zOf)VksvW>3+y*n#+4o#|fL&Y9vG&-cN>GIL)SbLeHTGYPOz9E0dGqM%sv`Bd;hw8t z8!+qYBp>a4QE8gT`gA|%^Kz!TPtou4Ht6he;-BJoR}bx}qLpGJ6Gkb{+A#c%aQ~g- zNk&L0H!RJa-}3)DJEm3E?WI(ayADZ{t(mV@IMsZESL`Ilr$HsB6poi10jK=*nQQG~ z^QiFs)YiTrQkz`d|KX8YmoTaFY@wf8y7-c93AkM?{}P~?;=t=dg{QZPY; zdL(oF8AL?IG!a$g4~mjADc9%cd!y)E9oCNqg}4Djxx#6b9zAbsF?o5tdRt&TNK=4f zdZyj}iXhSo0HQP;@YE|+Tzm%<4dCev$ZcZP^Y4yaUY_`MOi3$8($ds~bQvHS zD{Pj5vFgX2`9{>)Rp|?MrqL;owZN3!Dv4Wj?@GdG0viv|`E^}DsxjfrKflLnqPWrpnJA8ipuHD@JWhqx^ zQqSXqMnZ%U0|&ja2BwgUkPPYm%E?d`lh!`fP}WsoK>;e*m%qxVjFJd>5eo>&2CF4J zNm<^go0>*{-{_vK)R%j`JQ3|EOvFFYVR|``67>(+a{dC1kf$sw3-}}sRx7T-pnLiJ1UFyzg0I4oyKP|LTD(j^ ze7yf@oUDw@kCAB0q_=eu;o;4F@xdc_)1tv@-;4EmSakWtM{>EYMf<~njhH-UFH+up zq}ti!fc704q4^N<5Qk!}3L(U53-}#UGZ4_V4p!FI>#8D0_VCkRD|}jK$wB4xR{bk| z%1!)H=Fz-L+g*g%$G9(-xAB=ZpB=2W14_rT{^dBjGTGHdceK2CC^*jc(Mo6-+XIW2 z@RzSI4MxNl`!0)}JAXuGHAcfp*h<~}6GqV%PK#_F(a^ z`aRFge`a^g-ZT5fJ>%Rj^m~2dJlDC_aeR&~{KM}co(c;~W8Q@gQ$|*Hu{+ivCI%k> zCOKB>@^suZG^sd51^={;Sioo34Si_t9#U?m(yQ=LVwZ)TIjc2K7!EV3TIN;B#WRKa z`_qBFNQv2pm6eX`Z^PTS5kTV*EM*m~v`n|qk%E5$%pw>Jgg9QOrmV=u=?j_n;z_}O zs!6l_X=Cip8E@sfWa*wBA*+X^Ofy0r$HDS<=8T;gpdj2~Xq&3D#Vb|JSJJ$4!1bhV z>7N@Yauxe@uOTc;{kYg_$C9UbE?LtO!&dR)>e3JQL5a$uKGhRJ9o4Ia&(zuII7p=q zD#;2-S@G_K&PW43LGxlL8t6I9M)dP7mQoAL_LKi-Vh(|VW1ETc1TfQHT%>6W(#eSD zzjD}~whM|mK}I` zd6zdu_kmBdCRo36n+HffeSN@wp>m$Mx=b9tZhE)Nf*h^D(wEo!^QQyl-a|%4JfC=G zm)GR!jb773y8Y?b#QFIHxy-|b%iRU4bt9fl8&CUgIn?7m>Nfcr6K!BjD{@B3gg z1*(fM=_b7b8L|R5op0x>A^q{E?y1P-0lBg=9@v0T?7;O3yp`R91C_DeIp2-zyA8Bx}%G0c%$Z&|p#1hGRXV2r@5JM-Ie@#hx zMpC0wqI|>&#-~xc*I1ZoP{tBIdPEEo0l*qjftCXXR{ylrgoHffWNlo4zwkXqr@mS{ zqRMfG8Csb4M70g%5NoW?zt+1AEB~h& zZkmxtNh^5Dq;OJGiH6NIL$hL(bO{B;-0rQK>8FtJ@VTqYKXFX)4IX@{#J^usUEuhk z=lyM}X~`#z%RB!0S$#g~^D?SKbl{aI&8fIgz#sLSGSp94!WM6$d>k$~-9mT&ulo3j zlWDDouGvL}YBL<3MdPikM3d|>axQ?~us7SX?-c(3iqJ(gn+!Y+zrPwf$|fEe!Mq#l zpUAE*c38n)Y)^jxN>qcW`~L?zmm9CZzWC)FY-A=d!74XsAv`-G4})P>PL3JKMl3*c z@?IrB3wqx$1+u@oka;2BxB9on<-2EXb)3@mcLjwGI553TAFI#E&HceMgKKzpkRH{$ zge>PQM}(D`{c7mo#V}krH#!R@4;nA-pfDC>4pxddp&S`V({;Mz(Yv1@^6% zSNR0$=GO|6>RbjtuWD!#wT+IZLE#I>_gG_VxkFbzcf=!Vj^KuHu|=Yw>Kg-l=*yqX zhGs(J3}HA)LD)V7%?(waC!FOz+1*0_IsH6ao8VbWmc3aykC-GKf0FUD8Z{j4-RT|% zzq4CfiTxy(YHjELbA66Fx4unVK-+Ns!Uqr&GOKNvwE97hH|J?*Cp}Puk~xfX3jZKQ zp-VlwK&tWu00W-Xk9nwn$C!-~jUlCtq#aAW#2VFhq`(|`kyNhqrBt;rA+OC30nJs! zFI*xRitb1EF5!|-#)5+xoXv1lyVB6oYKR82?izQdiHzc8mF*4-?hAFvh?Ih9k+&PT zFl{v5ou5F*m;z*4ObAgShe=mE**TPMw>PF6l0e-DG@6;gGmYv3yU9= zwiO2N-|GBGk?xkBd)@QW+N#EWcG?U7so#eP+^QTsTypl{dvsa`y9xW#^e;ZPLG20$ z-rGyP>V~I#Zs;~mP_crkt-+}Mk~ki}3Z zNfH%SgL!z%^%ai}`Fd$y6cN_fuTO#(d0dRQ%J;7nZexf9xLm0eHGqYVy{!OU`LVh3 z);J=}vuRKdgN0#_VSYYns^0&j_tit1mD76N1-C=#IyWK-9i5||j28iar_)pdZ~yy4 zCK`>SKy?x{7{97AQQNuB3?d{H#w13vTf#jN=K|Hc2d7LrAXkDOUu%^gn_7VF8^*qS zGQKP4JB#!CO)Kt`hqymgWz${~+3jHlhGblaL{|N7{BQ zVT0w%RI+zYKJB<81*RaAfQIsk>xuG62@`+0ht}z*N)4Lc+GASiR()I7j355CrCGD= zd1&)mpJC+H69Q}>_buWDY@9ocLaBZqu!8j}65@Oi&NX7Izcrk%snF`o290N92r?$2iXBz7+#Z9JJH}1VB_%nPm3bJ{OUODq=Ns7y zO#Gzo)_DnWtY1suom$h?=tO8%5TM?_a0Gv`v)7rvls#Xx#ue2)X9b|c4KZ6BRe zZa#V_grX#Gnz!%@3pSd1^7@&-let}fT@MCW+y@97{T@vomfy4D6{^^pNFch;DeM3S z2s$dC6>ckOUc0-Ts+2cszdzM*z9Q@GPaUkN>Q&X})cvnk-hb<(Fl1|~b!|4!+4e-7 zYe24B{8xLh+(a(qK2fjx4@Qoi#U}HuzoDi2MRTu~PK|Cw@2epgVq=5Z)=; zsOseGuu2+B8|0dyav+ZCSflZUbX*-BE%riyp#@9q?2A-;(PiLxU$QcN9?Kx-vu7+? zRhby$)LprgSf1c~1wfE9G+!62QVob;CFzY(f@mR)Q6KM9oIG zS_1v4pC*jUC##m7Q;4V$^_YC*%8PV9-ugIP7`Q=mVe#LWJ{^Q>ikT~}P_}S&8?_B> zWa(+Z({E0;-MMrk9n?0$_xf^QrgIJu7SnnYF|9=cSs@F8Xz) zU%y7Id`vc;$V5}qt;^V6)9Qj**ne*R|7l&Co2gm!qhQ7HO9LMFt?h^Ok1-MVISW8B zI?xt58BrO%Uccie6YT%%nu>OoJCwUe&~aJq@o)!eO?S!fvM+a_mjRfvL4jDg9k;9l zr}^W}M`oK^Dy+ikz6MI)Sw@9wji(EU_$4L%84L&iBWrUH%Tmgy#`oEKP0Qb5Jy@*4 zzMG@>f3FcL%=4z*as;4IMi(cge9mj^^z=E%?dt}C8BL;y_cx$p`N8DU;rxgj#<3*) zPJ334lO_MdoKjS5!_G4>Ri80Zcx|vgS_!JU6WRaxQ&_aPQXlYKwLwEdQTVmSO8#FU zIR!(LSgm;S1B{bldb-Q3Y6tz9c9RKazU!-Krssz(+NHO(s}5qd;#Rb zG;2XVs>nUg*0$iwV|}AhTT#42!MEu!AJjNpiBKLXz)0lZ5t2X(1>I#k%PzAnI3FB+#T});h(WAu1j-m?gtxuh)22eq)T%$a~p17Lxqly zceoDoYybY4P9CKwUezjmC}O=8yfCuKW-=NyLeX5%elwK2{-$S{BHO^h~GP zj%i@CxTyVaTdQOYZOlOWBeOm&{M!`h{-}tWRPuy$m(KE+j}A5*i0ihE7Z;Kb8SZ)@ zOvwHy?hVh8!gP}Gz5@IS{)fJSF%Ot<>=(>_mzxR{d&0y6=&2tcYmXN6LI*W6Fc1&g z?cS`pv}InGocvBJGIejK%2juu!_k?jACX}_5i-#Oy|Djya0Wz5bHx3bxZp43FgE*Y z)WMdOn~U}phS)l~azI`|6%^_ntlBn!qgofVDWVp3oi|q6kUd3cmhcMnu8a?2DQUj* zIx)nA9#-}%MuPf=RL#Z<;_&bK9vmqxAAYKM1|j7vEk=yf?+Za2rpr1mZQ#eGL$aApW&AwXc4?wtlE&N%pmK{@iw(EpM| zK%qUq!9qj-kC8^|sBJ>CQ)KXDRZZr4BK#k6cA8R0V0e0faOI&Iw(N0PvqkAOzV3Q;9j)t z-X;LhE_*Dk%5|T9VNUs;NK?wdWnus89}&VH=bLti*OVe&z+<8r=ET9dZK{ILu|B$+ zA^*VF?U3GZMp%h~y+n_ESwcirG-_3(^I=c|~W{!89Q_7$Nc`HX(Zu+P6N+)##Cg-*A} zk}MT?-JzNEtKL-=RB{aZjW}S+8mOq;m4Hcy-~0E-Ui?GQqQFz_=qVt9c@kFhfXIN^ z{=bo*NfmxEbV5i)?X$q63=Ix#5T^X-2+icOPzpeK_JgX_pfjgyO5?OT%IOJdVkz^> zmS1TD2l5tV6u#3_bd#tgL}&w(1Al9#*E&c+%wlrUiU~)oM6`o%NbHdd5=x6%?ITRh(|GlgLheUs&Nr=a#qliTAlh6iFxbOE+|fJ z;J7tI5%1vIc!6wfox}Qgywzvxq@ceDtLIRWrfIFqb%zyYL(Jk%bTgPFBv{nI3_e+St#nG8VCQub%&_Lv3cb z-itX}eR3B}54iV#qd}{HNxej@$?F-{8u*lWpc^O#FgMT!16jO3g8YG=0ME9>|F$Mk z`DtuRG3bQ2xFo@R4CbG3PjG#k?#%8YK;ZTdsWk%cU5y&zgYOm^Q{m|U!iRO)`9=aR z0)`kN(a{|6YM39atO9i=pv>4j3Ywzu{EVRU{i;;&0fL9e1vKa_Ya>lBE7XLx%!O%H zqdV8mtvITb$t$AYlX=pP(ss_{6Cbu(@Ee?4mjyU-YmAfDmST;?XgNIAJ$n*PT4~bk zGq~n>@2*`4oVF~7!7jb@gHnBdWd*nSXOhTJp*k~k_)*@zy$${0=(Mz)UH2mWCbDdm z?&FzjB%~Rx@DbMm2Pj-GdmM?#FXNy&v;&Wj<4G@{_gZ0pe!pCa6*IFH_(V9kxrb`a z&}{>=l%n1pFn~xo#%V9BR%fQ~R^c1nGK;a>Q#@TCn7bbR#{v#5^%O9pN2jEO6&F7S zx>hz!$KfLW{kRJyn~)K7nn7VfN={CGaajtA<%M}8Xx~23{+^aL*aw*7W zs-Dt(LxWW;_>N*9wSbBe_(w0sNdMAO zf!bC1qB>JN06Una{MX=o4Av8w1@is9?V&tnSAb2RgF)p`6Lg_4zYv36>0~h$bK)pD z*-@*$`VaFToawZmA4%q)+EANqikJwx-lLYLZyWxBRrqF#JzsnIOW0ImdVM@{4|9(~peF(S`)wW``!;NJJ|l~)^7bX)pVzEWf^&sq zqfgQ<1@>e|9J$|mIl_1`CN>rfln<$?3EkbL4jW+?jrsH`Bz4;80AmfV(hJI`(vs(?Iy z`1;N*&oCX zx!*G04?TJnkEK*JhrYr`f@t_tp@jMOw^si7=WNzckh0p@GiIu8%_#@SCH7mzHPqiK zuoE^kR0LwAN^j|xFCPM>?jvB%r!1&Fm*fv-?WZIqb( zaDDGopmXMO2_wpda_Mux`(kl;gDG*hmlMY!kQ4d_4ero42pI0Y&hNd|Q2%vpntWd^ z{WDE{@M-@8W*=ZIwT$sBv*Q4<0k-eig@rbk8P6X;NtkhB0HqKrr><@oraYDFnw@As z4t}0P+x=Uglni?G|4Z;1xqk2ey5Kd=to(35c)bT~H}?hJ(!=lt<_JbcpCPFM+h06W zZvyxaj9~8p@FXAD!a>BZ2<{^(6BCmCfATpl+hQQ2Li_?%0>IaPLP-hn8I+F{PC!iW z27}{6$sbJv1Six6W2G(it5=>v9@xkP1@Phznk^D2?(sZ+y@~HzxTk+XU`wcGcj=#CQ z4~|4SM}Hw6xL8?srG5=6>*(uC{-=9Z$Um5L>ug}^Crprrn~Gy!+JpUZc$vHEV4cVB zX!F6&`S#7%qea}06we&NSW_$^g^3ve^=vZI9NymkuJP220o?y;+77^(Yk?^;*st!5 zj&`s=smK7={LfT@U=HI(gRZC>=vX4Jv=8+54K5U#o#yGzEr8m$vP!N`DTs^-`=w+wdMURNaEsyd7gc{Zw~tj8 z@12j|kf@cg7#$N)*%l!hWl!Cc0IvlUmogr5Ss+sQ>3#S|qNp1uPbPYf8u{PNUCXTL zG|=rJQuz;a03C30zHK(s7z%o3uwRN?*MEAuxiLY%ig;?x3td~`m9@2UqYee+1pldxrLna&SZ^92K%%xb9F!XH zxI3UChJT3?Nes9+^={zAoGm;7MY>L-$9wQvf^(rC3J~ryfKlB5&FxFuD%;Xe=HJ0> zeR$Dn+n5vyY#SH_WaZ=l`0DX!>G6->znPAGv)aX0OjUn21U|}J&U@B9{^T2FO(GY@ zt0Iez$|F-gi#)9s>&twBi0}Q+({TTdcDnn0(gxr*_ZrVe%);GtugUj-L>;Peq=CT; zcf`XgAY&!+Ilfvp0Z%P_XL}vQG#ZT_Ws(i}bh3c6bay*EivuguC$3#2{$S+1&gI${ zKvFxaxg#*qe5H>D6{Z1bw}GC}F*Fn^6Z;ST3i3T#V};?uF=UwwArL5F0|pgtd^An& zKMilFsla(o9?b$e;YH{iB=4MfFi{xo26O~#483knJxBxpxCS){Tou}7v2n$mHy$s@B*om1V&~0Ua zBoC*{=FS{4H#gfx)GBt)_Z~%yayeMps1*7>S<>uf{_`VZ%iK+gv|>MLRB^-cR>XiSayVRml{Oui9d3>h6-JXIjGqoNe1U@3q==(;Wu<*q}d+;mG!0le?ns z8=VSzwW&Je(JT(m8tfM_#M5eQ-H8>Kz%~W%&^sWIbpe15R+5|G05b-ZEBMtafbMre zB8hz6U#Jti#{czq$0OvdI&PWUrrPzFV9YuCWq}{()k2*VjXF2>YP6x~Cn+PxusPl!PXvD8_X70(NMVLUMg~ur7I@B^d2P`HxMD+F~Q0hI;p8{1C#Q?4n$#@3D zjRUw`fN4REG_da2JGxVDkOYi=PHo3lt#Q$hieqBdKXH!YD&lH{3kD6NV z$tn3rCneER@QMF~+=*2FV%9M>O?;@fzFM5$uQjXD_n>1S+3Ucte)C2_R1}exmInD2 zxIp#=6{w_=(p@N~AJWme+`k>d360_|#l2n(mUnlQm}UngKP5Dc8CsvS6b3LIQ5@;D zGpkHoTZq(cZ6o!$&Z>%XiWhXF+^d_qDe%fTo(_d z?kx|Z%T_5ddfw}&ihU@9KbEFvD%Fe{AuKFgD^5h*r}RdeAdw!~yzi`ifkNpgx?`zQ zqVv$~kaXTxWp5HEMq>u1g1GMWe8wPNL(W{c9K4EcPzS!%|%sm3JVG0T@o9W z-k5%(Lhx$;h$1~O>gHWH3HPhCG)9xWd*%#2%$R3_BZJu=Rm3{JIhChH#gyIRNz*e& z4oGhEzxYIb3xTGKMq2Z<6+V22(Ivl|n?m$Ly3XYF1JN9u;^Odop1ygG_eCp(c4(;W zVkiMQ0a4DD^=HYXs)7PvBVUD1HjY6I3FZ9ry*NSqklxh?EOdD~HJp$g^93ThCPrTa zt`n>ZFKwGxc~@Z|*xMCN1I5M-Fkl!#5D7rk9RTmUv9Yno@mHP+? zFTUa7xAu-%{sto#Yhn2V_{z>Gy(sDi2Js46b-#kNpeE?>e0o3;~KC zMkyk!?inOO<$D1=MqgiFP(CwRE2E>f2sC$gVydf;9e0KlO243evtBXodAwcN8*b(8 ziyQ@yaA5wo+TLk%G9ErW$cK0U0MhW{;>|y} z*-+6)0sOSBrzZqjcHlz%N%kZhI*9NJHUole*c7NLkh>KJ=LYjXn6GNWhJOG44`cU~ zzDX2OaEA<7l_{yIGTsRXI!Zy(`0ast|91TK)cR)Rut!SFHSx)q-y55bq^}mH+klNk zED#efl{D3uoIL5aP|smTrrfii2NLhX?%+X=)^X=v;@e|Z4y!R(av$_8njoCQ!jZ>x zIIVMlT*u@F#;I7NZa8OB7<{ox(76L96Ov&*-smX+kv7W!6^1sVKQ1ZPO7Rcm>jgDA zG9Lt)!UO><&~23iq+IHzM;pWi5aRfcTABdXABnzR;Dx9WGcZx`Z4qPdg%-uV_H_wU zxuIM&J1FzcN%ru#y!Fu#N}%kl8u8yXA1fR7?&QLCk8xMlPCXBshM4bkWmSaA1nB-B z;*w;1n?#VU29Kj6Xf?E$)n-{W=tla+~*9S-lbK9=K;n!ljVk~vv!!}*2VoQFey3dUAUgvaUqnC=u(5kTM z)4OeI;U8^y!q+ybMO3%F-@n4gM^ql?>r1as*bFq}{0|~PTmJ-*H-BU_8@vddKyCyg zcM4^jNiTrzUC#~}Avt4WYiw-n$DcpRK#R*Kjs;U3_>S5EEPnF^EAm##DEiJi4ouZV z4t$`NuZYO>?+IU5lgd8eQ^VA4@F19*9I|3*D3ALfhNUbVdaBl)gD-YNtUbHqVM7Ye z?Wl)s)8lDiN#2vc{hmr7EJ68GWJl)ARaSD3j*cQ3;Bd*av$G&h zItGO!3!r3L$Btb&58)m81py?LzB~?#(rzHfK3p3?#%}@Notlv`7^tm{mn}EZeOn*W zVfyw9s-K9nz2|&c5fMD)XgW|F!6X=}a24S@NV4}r4f(yb39@!pBMU+`)b8s z*G6xB2_eGK6c;m$V#CP*&m8dFfwUM8C0*fi|9$AF@!i>hP)Q_v*bTBffIYqsITX9V zNQRIc9H4&!z4tTBrGV@CtWDP)(geeQk;41O2WfB6lCA=@BJ4FpM_W_OzlU$2Zxh^@ zyX&-8(@N}hOYeK0X0>p7)3O?Ih^r_{pS}tM5g9grb9rHJ_0<7CqLx!vE5TZ32FH`p zMpa9<>c6N)CjB*hevd@bBaCdV2LNvbDI4EJO$a^erDf@4auo=m*Agrhk*8LJ=BA*a z@pjYK^hZQjaS@KXv{>eznmwXoWx7ECh^Scr8HJ1x5nxjz8Jpj2XZqWN32NL9Exo$5 z@`89_v?f6RbS0_$*!uR&P~D_+h-;(4T;Z*R5)4+gPt@TyE>&golQ_Qkz6kbV?yLrL zcenU$|BBcBu8Ksm7{GUXPA_F-Wi1Det=7FqUFw84ZnWD>6ogD2?_}2g%~BvQa&!9e z&G4Ct>(5_w44()s!La0YxNaBFdkW&snpbthx)5iNA8%ZZr z&n7Z;H#fH4yw}KYaJI#HN}Kg)XS>*2)6 zwoFN}P^J@XnFdd7qB`62+gU~9o;f)Mj!4$; zH#sJuqaqk-KKMPmg0kw_-s%3D1KNPfe{>h?qbnaj5`Omc_05vjK%hi`-)p<+x*y&) zIKQF+YZy9VAt2yWfqv`-De?dj$-p_=oxR+n_v^lN=ui<2kKa7EUQmlV+QH53r9<|u zhrOkb&CeS)O&C-SIb}5yKI{8?ePftF6-vF(7Gk-(=>M}z+UTh@)!V5i6Upa!IM8Xf znk0m(o4rKyS^F0Ab3)l*$2*^crKLOie@1D@0D6Z5ub;Gs+itM8BeLCuIcJX zHkpT$QiD;|CtdC{)fK7B`NP5JrEMmo_f$NIoKM-SlnQt{P5h)*)fUx|6rJYjRYieh9>^#%(*>nxN^kR*s!>;NZSSReL{u>sr}C+&77 zE}am2_bo;mhOnCPzZ1c$G;Q6ljH%lkC({WqMaQDZ7q9snq}HIb#L1Ul>n%I8=gDZQ zS^zJmM^P3Q7Qy}izY&j(coI?FKr_Oz$-L|wXx&g|uzgmNeeuU$C|PN{z(AGLU@~Hz zYu|x^b8FT^89`1)20b8FC~$u9}~li`E(1`@u2#A-5&*uH*xgIzcMKs zATMS3=d15iQ=NNOy{;)IBnUX%kqZj^>MbOW$dTw(r8(h^ij$2|NTY@G3w|WIJ4IdT zJNu&Aw;i871@P)hgB-TRz6Ja_Hl?NFVvUCvD}Pg&J>Py`7G5ZQk6jEnc_O=RP;mlo zvlWQ+_l4ZJW<2)gy>K6bg_bA=o#Regps}kHz7d7xSjFSq)!*D~XEo0je$SsAk2V$6 zlm!531Hp2OWmT=@fo`u;$M0X|b{K>ncG5T)-NdL}VPVhTScTd<+ZOYBgjDkyRarj- zi$K>q!Yo#ve0@q}b!PKR6khz#CQNZ;O#~g0fp3-zyq=5q%g)y!_iD)f`|^tE_3x}3 z-QC@AN=86q9V~}^Q4gQ({pV3CF@V;4I#ly1zBu=Km(wd(#|jpYlFF9xTDwJ0wAR>G zO=nM{2XAbt_+D0$bab}|PcOL-^;eKfI(!jNP}+Hj-{dVaS78==+A8?2<^$Rh!6QJ$ z^W|5)fj0SLdx5u9LcO1|jm-i~-BmY)FB9u6eLuHdc0GTJE@ZjzT_Ki_G|=<@q|oyP1gmix-pS^v2@WlDoS zcDD)v?o8AAmDgX-1byB9h;xB1kAz3#Lp`xxL}zo_t?GUbc!VUo=f?z`*42~Yg_87W zGU79BsR!=IBORCk2nd~84vZ)ZdaOw(^P<;e%v({zdyWGoGEL-3aoC~7%Z^~(YyUki z4xvA;KAg=htx*IEq|1NEnyJgMX>Wp63NCW5LJpRX?JYY9$_{dk1@UF+vAigWm&S@F?41iwsp5XQ(sXOT6UwB{FEUY{a-3=wJTBG1h4Hz zrnmb0X8$E6tEy%VrIO8VpTHWl#>O@qh-X*fBn0VIb8Z-UYJKQXbqYDAgRW&A@xuDZ z`>tOpqjiL(SgZ?|KwRA)5fuhCddyjxOZ zKyyJ_FSy>`dgxD7>zw&fD*9IUUQU)t&ex^>dun?6GSWuah)xG%{)8=Q#nLq1k3mJ) zzAlQA@6dPM9TG2P-&kpbi(L88gef_SyF!oz0y>#^YT7ty0IZu zJpo+azoA^g%Y8PZt5Yn-P3}T*Q_jVsBNFvvFP_#L--2Xc7_WES9o5JF_x5C;Z?Hc> z(rJfP;fH01{o^A;w$5&140d7RG{A%-Bd7>*M$l!zpDcWR;fSnm=2-7x>n)zW)tI^Y zxKP(4ub_TOM^)i3%T0Zq9Xax@=1gBR>3c(Wu8o;l7{iFh)V{9v)2nc*K75iQpSt?8 z@s5pLWrvcUQ!6vJFa@E#y`XMw*otalRE(-)M_7FW2RLH$IzmlFee&XAT*!u=; z9!GR5G4MGs#~c#-@_9V*eUZK9!V$Ip>$5LfYQ1jWf--I@^-9TO?7hc#4UA*Ps2(Z&qCmYzVw3~plp#UndKKtAK9_TpQu%6NZnRYP1iorv3wtGigHuS zshZe5G8^;&=T~ny>EhjU!c^~{sc$GgsWR(w4v`c#Z2932dhm~`%&xUg_j~-%5xC5+ zn%|DIzgQcQ094SdrmVp2jD`mO+ZnS{b^?O$ZxQ=WIB@vowL3Yxni9+LyAtO*n?7O* zo8FeNvlVY_N8#e&K!%wDGF$NrB%3UJvl#Tv%F3!D|NINj9s$Vf4+~#gw+|0Dn8ha) z(>rDhQO@Ww*tGE$RNCpT8qav7e8F>K$vd3k?SD0FUqB;nhK++^DvQHmbv?J5?)~{) z!uoWE(`)O%TOHr*jf>Ygd6GoYPZf1!F$Gvf44*X{vts=y=(Txq_*#7=Nnu2n!NykY z9qKzw^NB7R z5Jq1jzoMQ=No2!(uiVc!lE~DJzigf7?%_E;VYD)0C6VF%=C6kzD=pr$1gajwXzhjN zh)3dQDD~p;bGL*{pNX=&mj*;9{d%DiSAcL2OArh<)6U0 z3#_XCA3j(v=>q=TxHAl|#s|ZfxLdkgSLo%rZ~T{_=**u6+aDTO8fxR}$_gY{9mx4? z1SfyxdQi%0#X%<-gOXWA60=OZ3jc83)+C@ng?5ck!1#&9-Fr4#G_j$K*#7c%PMi-= zDJmFeIUbe`q?cuH%CG87`F~eUE#u&3czz?4{&XI-1V+AaD5rb;_?Ei*$XZ)Q1{F~B zb`B1P^B$0poP2L1!htUDJLWhX-UgD}eF)P|W~QE3h3^CPyLF{M9;YmZnSB2{)f!^O zj4iLypd)|qVnwI+iBP?pf%(N8+Z#MWd0Kzplc~C3o%)YqQeud%&5f`3Pq;yTO*1qG zWs}GomES}gHKaXfH(OF_D2rKnp2Y2I5qa{*Fh(EX{o6 zDschfB`PT6(b}`lJmFzp!5H%Jlgpvgm+`r3=pI@BNydJYfWEj6u zD7(5W+ZrS%kBzi_sHXgoS~y<*{X^I||9LI$OJtXfcEWWiU~SLyYBx63^v=Xl8=`nb zL!}WXn=P57^bwtsJQ(k*C&S+F? z@`f;mNzH<9-2qu&o0atax)hDn-tmfZ`iC1G>P5Tso9;jTv#lYlb>W6VqU}=ghe9vA zGat-ZW54@3_$Qhntz+ZK@1bl{2`qTCGtF+SN79?=i@~9N7P0|IAX8c@g6tvpp)0D1 zObjH9ug(5KZINmL+>awKtq6v$r^UssVT!EczpE?a-Dm#P^L5nhf>FrAzwF14Vbe-7 ztHDxn5ley5tBmDk0Cso3Ck_VDN6bJPhNziEkaOgg#_@Kf&hxJfge4-u=J2^`vjyCZ zz3^ysa^_)1Zk&a(3Li;>{CS}3m(l?-6d;}hmPN<(w4UHF2IZrC`>*C?I#*JaHu`vz z*SC+BZmO3+*G|QO>x4nBlyymEkxrDTCmYWA$U?_8Ida_tJA?u$o;V$fq$L-)?ejwM zPVaz7?s%ep#v_%FjgCgKLlbXTZ>+PSJ8sI<*Z!UU>vJ>`GBjAnOYuF&pJ6Ug25N3# z7|}zD1ZXQ4rCeYE|8e={t20%*2xijlVe;F;3#yD$!uXto_cy%0Cc~4mhXTM=58OTj zvYwLZd!BYrT$qucyg+`HEy3Zw5&ms5C8CuzqlreLr}}q92fFj^{Lj0T80aEqf=$}| ze~?vEDAbB`JHJp=mwdwJj{KNrhA?Y_Ce#g}m!$DU@)m?r_@gqMoODUQe>ODtv$Vjb z5dRlYZ!WWxI@}Uz&1oxkk0gy+E^dOJ zB>$dyw+V>Z!uy)vuha$KiFN7zkVpGe2iwfi0NZePAUDjF@6%Aeo4s)1d{|{LcG*5_ z7cs^TNJ)&EeNZ6NUMW)?Q=hOao3sY|+c~i^JOP-@AC`dMlu{1IQSEotbhKL%1hl19^BQNTeon1G zSx_vbWt?%oTit*m5gWFHopyC4!13UzXn^Iz_xX``zCQ~zSXol-`t3<39n)TeEREm& z++xn8%`H4Ykl*KGv60JNW94Er4}7ol9KpZ>qrwe{2nAW$5_GcgLPgOjpDMc;_9ouN z&zL+q!_~S((0Aa#``cW+qK(`rJntOmKdXsjQ%i$0#9%wz>m_OiC(bWlT+f#E;ea0+ zmw@RblR7zmn><7rqF0&!O0~w06F$WfHiJ!+LP%aMvF^{uUuACglj@HnKKKV6YZ4+K z6~yc!<2F=Uvsh1*qG@@ri8rn>IbSyaLKW6U5D65t{_@U%Uj)QlFAf{E=!#cgP5CP_^BWYg866R+?ky!ELo;b4 z!)5{J3DMumR(R6MPbuG6GPhub(YB1pf3?bD>P-KO{YgHM^cpSo&J!-N0O}8^$1dd? zlbjUoLI_cPsr2vJlfPjf*ZHXn9nbd&WcwFPCb_UTQA<|lH8&3t{4spSVe|VrgBM2E zP&?Om9b7}>X5%*jba}n0EV!$)#UT^RYA>?-+$p;fb(;B0-`)3O2o-(KIvW|bXc0f} zViRilC!h4&HJY^`EN`gPj5IRjJ#^H3sE8**PlVad`Jm7S7GS6}Ms0hOR6$0WK|#Jg8NN1|Wj*+M>SD1g90w-O<8=Qd^mWS0U*Gcz3&PHld&b69{#ZM_fXE$X zruw+_uJh#ST5n6*vVQ)y`7r}CyckL!DQa+?g7CgK+pR=kss%Z?3o_U&

-
- - -
- -### Batch Size = 64, New Tokens = 100 - -
- -
-
- - -
- -### Batch Size = 128, New Tokens = 10 - -
- -
- -
- - -
- -### Batch Size = 128, New Tokens = 100 - -
- -
- -
- - -
diff --git a/examples/whisper/configs/whisper_auto_opt+qnt.yaml b/examples/whisper/configs/whisper_auto_opt+qnt.yaml deleted file mode 100644 index 614e734e..00000000 --- a/examples/whisper/configs/whisper_auto_opt+qnt.yaml +++ /dev/null @@ -1,13 +0,0 @@ -defaults: - - whisper_baseline - - _self_ - - override backend: onnxruntime - -hydra: - sweeper: - params: - backend.auto_optimization: O1,O2,O3,O4 - backend.auto_quantization: arm64,avx2,avx512,avx512_vnni - backend.auto_quantization_config.per_channel: false # blocking decoder quantization - -experiment_name: whisper_auto_opt(${backend.auto_optimization})+auto_qnt(${backend.auto_quantization}) diff --git a/examples/whisper/configs/whisper_auto_opt.yaml b/examples/whisper/configs/whisper_auto_opt.yaml deleted file mode 100644 index e3da38b1..00000000 --- a/examples/whisper/configs/whisper_auto_opt.yaml +++ /dev/null @@ -1,16 +0,0 @@ -defaults: - - whisper_baseline - - _self_ - - override backend: onnxruntime - -hydra: - sweeper: - params: - backend.auto_optimization: null,O1,O2,O3,O4 - -experiment_name: whisper_auto_opt(${backend.auto_optimization}) - -backend: - no_weights: false - use_cache: true - use_merged: false \ No newline at end of file diff --git a/examples/whisper/configs/whisper_auto_qnt.yaml b/examples/whisper/configs/whisper_auto_qnt.yaml deleted file mode 100644 index bfe447c9..00000000 --- a/examples/whisper/configs/whisper_auto_qnt.yaml +++ /dev/null @@ -1,12 +0,0 @@ -defaults: - - whisper_baseline - - _self_ - - override backend: onnxruntime - -hydra: - sweeper: - params: - backend.auto_quantization: arm64,avx2,avx512,avx512_vnni - backend.auto_quantization_config.per_channel: false # blocking decoder quantization - -experiment_name: whisper_auto_qnt(${backend.auto_quantization}) diff --git a/examples/whisper/configs/whisper_baseline.yaml b/examples/whisper/configs/whisper_baseline.yaml deleted file mode 100644 index dbfb5606..00000000 --- a/examples/whisper/configs/whisper_baseline.yaml +++ /dev/null @@ -1,29 +0,0 @@ -defaults: - - backend: pytorch # default backend - - benchmark: inference # default benchmark - - experiment # inheriting from experiment config - - _self_ # for hydra 1.1 compatibility - - override hydra/job_logging: colorlog # colorful logging - - override hydra/hydra_logging: colorlog # colorful logging - -# for whisper experiments I would like to separate them based -# on device + backend + batch size + new tokens -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - job: - chdir: true # to change the working directory during the run/sweep directory - sweeper: - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - -experiment_name: whisper_baseline -model: openai/whisper-base - -backend: - no_weights: false - initial_isolation_check: false - continous_isolation_check: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/.hydra/config.yaml deleted file mode 100644 index bc53e9dd..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: null - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/.hydra/hydra.yaml deleted file mode 100644 index 735d6ff2..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=10 - - benchmark.input_shapes.batch_size=128 - - backend.auto_optimization=null - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=null,benchmark.input_shapes.batch_size=128,benchmark.new_tokens=10,device=cuda\:1 - id: '5' - num: 5 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_128_10/whisper_auto_opt(None)/5 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/.hydra/overrides.yaml deleted file mode 100644 index 674a028b..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=10 -- benchmark.input_shapes.batch_size=128 -- backend.auto_optimization=null -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/hydra_config.yaml deleted file mode 100644 index 2b3b39a0..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: null - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(None) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/inference_results.csv deleted file mode 100644 index f882c464..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(None)/5/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.655,195.0,0.696,1840.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/.hydra/config.yaml deleted file mode 100644 index a7997175..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O1 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/.hydra/hydra.yaml deleted file mode 100644 index b210edeb..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=10 - - benchmark.input_shapes.batch_size=128 - - backend.auto_optimization=O1 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O1,benchmark.input_shapes.batch_size=128,benchmark.new_tokens=10,device=cuda\:1 - id: '6' - num: 6 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_128_10/whisper_auto_opt(O1)/6 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/.hydra/overrides.yaml deleted file mode 100644 index f7b480cf..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=10 -- benchmark.input_shapes.batch_size=128 -- backend.auto_optimization=O1 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/hydra_config.yaml deleted file mode 100644 index 379f39e6..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O1 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(O1) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/inference_results.csv deleted file mode 100644 index a3e0ccea..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O1)/6/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.656,195.0,0.7,1830.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/.hydra/config.yaml deleted file mode 100644 index b4121189..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O2 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/.hydra/hydra.yaml deleted file mode 100644 index a587623c..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=10 - - benchmark.input_shapes.batch_size=128 - - backend.auto_optimization=O2 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O2,benchmark.input_shapes.batch_size=128,benchmark.new_tokens=10,device=cuda\:1 - id: '7' - num: 7 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_128_10/whisper_auto_opt(O2)/7 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/.hydra/overrides.yaml deleted file mode 100644 index 7cda9ff6..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=10 -- benchmark.input_shapes.batch_size=128 -- backend.auto_optimization=O2 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/hydra_config.yaml deleted file mode 100644 index ce13e5b6..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O2 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(O2) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/inference_results.csv deleted file mode 100644 index a9089161..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O2)/7/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.343,373.0,0.384,3330.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/.hydra/config.yaml deleted file mode 100644 index d23f77c8..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O3 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/.hydra/hydra.yaml deleted file mode 100644 index 05534707..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=10 - - benchmark.input_shapes.batch_size=128 - - backend.auto_optimization=O3 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O3,benchmark.input_shapes.batch_size=128,benchmark.new_tokens=10,device=cuda\:1 - id: '8' - num: 8 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_128_10/whisper_auto_opt(O3)/8 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/.hydra/overrides.yaml deleted file mode 100644 index 77aec7a3..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=10 -- benchmark.input_shapes.batch_size=128 -- backend.auto_optimization=O3 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/hydra_config.yaml deleted file mode 100644 index e393f1e0..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O3 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(O3) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/inference_results.csv deleted file mode 100644 index b7893057..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O3)/8/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.338,379.0,0.378,3390.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/.hydra/config.yaml deleted file mode 100644 index b3adf76b..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O4 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/.hydra/hydra.yaml deleted file mode 100644 index 3ccab146..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=10 - - benchmark.input_shapes.batch_size=128 - - backend.auto_optimization=O4 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O4,benchmark.input_shapes.batch_size=128,benchmark.new_tokens=10,device=cuda\:1 - id: '9' - num: 9 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_128_10/whisper_auto_opt(O4)/9 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/.hydra/overrides.yaml deleted file mode 100644 index 2187c855..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=10 -- benchmark.input_shapes.batch_size=128 -- backend.auto_optimization=O4 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/hydra_config.yaml deleted file mode 100644 index da439386..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O4 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(O4) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/inference_results.csv deleted file mode 100644 index a485ad31..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_10/whisper_auto_opt(O4)/9/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.133,962.0,0.209,6120.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/.hydra/config.yaml deleted file mode 100644 index ef40f58e..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: null - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/.hydra/hydra.yaml deleted file mode 100644 index 5d8d8e81..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=100 - - benchmark.input_shapes.batch_size=128 - - backend.auto_optimization=null - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=null,benchmark.input_shapes.batch_size=128,benchmark.new_tokens=100,device=cuda\:1 - id: '15' - num: 15 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_128_100/whisper_auto_opt(None)/15 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/.hydra/overrides.yaml deleted file mode 100644 index 0c7954e5..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=100 -- benchmark.input_shapes.batch_size=128 -- backend.auto_optimization=null -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/hydra_config.yaml deleted file mode 100644 index 36667af5..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: null - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(None) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/inference_results.csv deleted file mode 100644 index 8298cbc1..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(None)/15/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.655,195.0,1.3,9850.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/.hydra/config.yaml deleted file mode 100644 index e14dd026..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O1 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/.hydra/hydra.yaml deleted file mode 100644 index a71c9a31..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=100 - - benchmark.input_shapes.batch_size=128 - - backend.auto_optimization=O1 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O1,benchmark.input_shapes.batch_size=128,benchmark.new_tokens=100,device=cuda\:1 - id: '16' - num: 16 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_128_100/whisper_auto_opt(O1)/16 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/.hydra/overrides.yaml deleted file mode 100644 index ff8539d2..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=100 -- benchmark.input_shapes.batch_size=128 -- backend.auto_optimization=O1 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/hydra_config.yaml deleted file mode 100644 index 5d62e515..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O1 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(O1) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/inference_results.csv deleted file mode 100644 index c260de79..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O1)/16/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.657,195.0,1.29,9920.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/.hydra/config.yaml deleted file mode 100644 index 23bf174c..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O2 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/.hydra/hydra.yaml deleted file mode 100644 index 189ef951..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=100 - - benchmark.input_shapes.batch_size=128 - - backend.auto_optimization=O2 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O2,benchmark.input_shapes.batch_size=128,benchmark.new_tokens=100,device=cuda\:1 - id: '17' - num: 17 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_128_100/whisper_auto_opt(O2)/17 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/.hydra/overrides.yaml deleted file mode 100644 index b907d194..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=100 -- benchmark.input_shapes.batch_size=128 -- backend.auto_optimization=O2 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/hydra_config.yaml deleted file mode 100644 index a016c199..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O2 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(O2) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/inference_results.csv deleted file mode 100644 index ef286a15..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O2)/17/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.344,372.0,0.961,13300.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/.hydra/config.yaml deleted file mode 100644 index 15156ba3..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O3 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/.hydra/hydra.yaml deleted file mode 100644 index 0aad17e0..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=100 - - benchmark.input_shapes.batch_size=128 - - backend.auto_optimization=O3 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O3,benchmark.input_shapes.batch_size=128,benchmark.new_tokens=100,device=cuda\:1 - id: '18' - num: 18 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_128_100/whisper_auto_opt(O3)/18 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/.hydra/overrides.yaml deleted file mode 100644 index 2698a733..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=100 -- benchmark.input_shapes.batch_size=128 -- backend.auto_optimization=O3 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/hydra_config.yaml deleted file mode 100644 index fde6745e..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O3 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(O3) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/inference_results.csv deleted file mode 100644 index d4ea576c..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O3)/18/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.338,379.0,0.959,13300.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/.hydra/config.yaml deleted file mode 100644 index 6662c5b8..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O4 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/.hydra/hydra.yaml deleted file mode 100644 index 01fdc713..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=100 - - benchmark.input_shapes.batch_size=128 - - backend.auto_optimization=O4 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O4,benchmark.input_shapes.batch_size=128,benchmark.new_tokens=100,device=cuda\:1 - id: '19' - num: 19 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_128_100/whisper_auto_opt(O4)/19 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/.hydra/overrides.yaml deleted file mode 100644 index 88c44fa3..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=100 -- benchmark.input_shapes.batch_size=128 -- backend.auto_optimization=O4 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/hydra_config.yaml deleted file mode 100644 index 2cf2c43e..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O4 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(O4) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/inference_results.csv deleted file mode 100644 index aa15d519..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_128_100/whisper_auto_opt(O4)/19/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.133,962.0,1.04,12300.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/.hydra/config.yaml deleted file mode 100644 index ca0f6233..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: null - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/.hydra/hydra.yaml deleted file mode 100644 index 2330730d..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=10 - - benchmark.input_shapes.batch_size=64 - - backend.auto_optimization=null - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=null,benchmark.input_shapes.batch_size=64,benchmark.new_tokens=10,device=cuda\:1 - id: '0' - num: 0 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_64_10/whisper_auto_opt(None)/0 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/.hydra/overrides.yaml deleted file mode 100644 index 36d158bf..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=10 -- benchmark.input_shapes.batch_size=64 -- backend.auto_optimization=null -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/hydra_config.yaml deleted file mode 100644 index 2180e94b..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: null - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(None) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/inference_results.csv deleted file mode 100644 index b3d26ba0..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(None)/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.325,197.0,0.359,1780.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/.hydra/config.yaml deleted file mode 100644 index 9b99b8a7..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O1 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/.hydra/hydra.yaml deleted file mode 100644 index 82f48574..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=10 - - benchmark.input_shapes.batch_size=64 - - backend.auto_optimization=O1 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O1,benchmark.input_shapes.batch_size=64,benchmark.new_tokens=10,device=cuda\:1 - id: '1' - num: 1 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_64_10/whisper_auto_opt(O1)/1 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/.hydra/overrides.yaml deleted file mode 100644 index 0fec0b72..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=10 -- benchmark.input_shapes.batch_size=64 -- backend.auto_optimization=O1 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/hydra_config.yaml deleted file mode 100644 index 167e904b..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O1 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(O1) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/inference_results.csv deleted file mode 100644 index f35e5774..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O1)/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.325,197.0,0.362,1770.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/.hydra/config.yaml deleted file mode 100644 index ea8da583..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O2 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/.hydra/hydra.yaml deleted file mode 100644 index 8b2a3ef1..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=10 - - benchmark.input_shapes.batch_size=64 - - backend.auto_optimization=O2 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O2,benchmark.input_shapes.batch_size=64,benchmark.new_tokens=10,device=cuda\:1 - id: '2' - num: 2 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_64_10/whisper_auto_opt(O2)/2 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/.hydra/overrides.yaml deleted file mode 100644 index 254f2807..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=10 -- benchmark.input_shapes.batch_size=64 -- backend.auto_optimization=O2 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/hydra_config.yaml deleted file mode 100644 index 3bfd6858..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O2 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(O2) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/inference_results.csv deleted file mode 100644 index 83b7c49a..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O2)/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.171,374.0,0.203,3150.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/.hydra/config.yaml deleted file mode 100644 index 115e66c3..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O3 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/.hydra/hydra.yaml deleted file mode 100644 index 28f3f1af..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=10 - - benchmark.input_shapes.batch_size=64 - - backend.auto_optimization=O3 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O3,benchmark.input_shapes.batch_size=64,benchmark.new_tokens=10,device=cuda\:1 - id: '3' - num: 3 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_64_10/whisper_auto_opt(O3)/3 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/.hydra/overrides.yaml deleted file mode 100644 index 2a84d5af..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=10 -- benchmark.input_shapes.batch_size=64 -- backend.auto_optimization=O3 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/hydra_config.yaml deleted file mode 100644 index 8e581058..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O3 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(O3) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/inference_results.csv deleted file mode 100644 index e2041dcd..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O3)/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.169,379.0,0.2,3200.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/.hydra/config.yaml deleted file mode 100644 index 59d5cad1..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O4 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/.hydra/hydra.yaml deleted file mode 100644 index 231ae6c0..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=10 - - benchmark.input_shapes.batch_size=64 - - backend.auto_optimization=O4 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O4,benchmark.input_shapes.batch_size=64,benchmark.new_tokens=10,device=cuda\:1 - id: '4' - num: 4 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_64_10/whisper_auto_opt(O4)/4 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/.hydra/overrides.yaml deleted file mode 100644 index a5dbc9cb..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=10 -- benchmark.input_shapes.batch_size=64 -- backend.auto_optimization=O4 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/hydra_config.yaml deleted file mode 100644 index 964686aa..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O4 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_auto_opt(O4) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/inference_results.csv deleted file mode 100644 index 05dd1385..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_10/whisper_auto_opt(O4)/4/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.0668,958.0,0.116,5520.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/.hydra/config.yaml deleted file mode 100644 index f951b3db..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: null - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/.hydra/hydra.yaml deleted file mode 100644 index 794cedd2..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=100 - - benchmark.input_shapes.batch_size=64 - - backend.auto_optimization=null - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=null,benchmark.input_shapes.batch_size=64,benchmark.new_tokens=100,device=cuda\:1 - id: '10' - num: 10 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_64_100/whisper_auto_opt(None)/10 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/.hydra/overrides.yaml deleted file mode 100644 index aeedf8a5..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=100 -- benchmark.input_shapes.batch_size=64 -- backend.auto_optimization=null -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/hydra_config.yaml deleted file mode 100644 index 2ab3377b..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: null - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(None) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/inference_results.csv deleted file mode 100644 index 0b435dae..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(None)/10/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.325,197.0,0.796,8040.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/.hydra/config.yaml deleted file mode 100644 index 2367ab85..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O1 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/.hydra/hydra.yaml deleted file mode 100644 index efe66ce2..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=100 - - benchmark.input_shapes.batch_size=64 - - backend.auto_optimization=O1 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O1,benchmark.input_shapes.batch_size=64,benchmark.new_tokens=100,device=cuda\:1 - id: '11' - num: 11 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_64_100/whisper_auto_opt(O1)/11 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/.hydra/overrides.yaml deleted file mode 100644 index ffacc743..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=100 -- benchmark.input_shapes.batch_size=64 -- backend.auto_optimization=O1 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/hydra_config.yaml deleted file mode 100644 index 53e6489e..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O1 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(O1) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/inference_results.csv deleted file mode 100644 index c85e57bd..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O1)/11/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.325,197.0,0.791,8090.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/.hydra/config.yaml deleted file mode 100644 index 1420fa94..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O2 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/.hydra/hydra.yaml deleted file mode 100644 index 0a511123..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=100 - - benchmark.input_shapes.batch_size=64 - - backend.auto_optimization=O2 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O2,benchmark.input_shapes.batch_size=64,benchmark.new_tokens=100,device=cuda\:1 - id: '12' - num: 12 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_64_100/whisper_auto_opt(O2)/12 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/.hydra/overrides.yaml deleted file mode 100644 index 40bc5479..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=100 -- benchmark.input_shapes.batch_size=64 -- backend.auto_optimization=O2 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/hydra_config.yaml deleted file mode 100644 index d8cc8673..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O2 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(O2) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/inference_results.csv deleted file mode 100644 index c234c739..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O2)/12/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.172,372.0,0.622,10300.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/.hydra/config.yaml deleted file mode 100644 index 853d4167..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O3 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/.hydra/hydra.yaml deleted file mode 100644 index eace09c3..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=100 - - benchmark.input_shapes.batch_size=64 - - backend.auto_optimization=O3 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O3,benchmark.input_shapes.batch_size=64,benchmark.new_tokens=100,device=cuda\:1 - id: '13' - num: 13 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_64_100/whisper_auto_opt(O3)/13 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/.hydra/overrides.yaml deleted file mode 100644 index 6267be90..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=100 -- benchmark.input_shapes.batch_size=64 -- backend.auto_optimization=O3 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/hydra_config.yaml deleted file mode 100644 index 3098e267..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O3 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(O3) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/inference_results.csv deleted file mode 100644 index 7c50c820..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O3)/13/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.169,379.0,0.621,10300.0 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/.hydra/config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/.hydra/config.yaml deleted file mode 100644 index 1aadfd30..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/.hydra/config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: ${infer_provider:${device}} - device_id: ${infer_device_id:${device}} - use_io_binding: ${is_gpu:${device}} - enable_profiling: ${is_profiling:${benchmark.name}} - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: ${is_gpu:${device}} - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O4 - auto_optimization_config: - for_gpu: ${is_gpu:${device}} - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: ${requires_calibration:${backend.auto_quantization_config.is_static}, - ${backend.quantization_config.is_static}} - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(${backend.auto_optimization}) -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/.hydra/hydra.yaml deleted file mode 100644 index 3ae446cc..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/.hydra/hydra.yaml +++ /dev/null @@ -1,177 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - backend.auto_optimization: null,O1,O2,O3,O4 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=100 - - benchmark.input_shapes.batch_size=64 - - backend.auto_optimization=O4 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: backend.auto_optimization=O4,benchmark.input_shapes.batch_size=64,benchmark.new_tokens=100,device=cuda\:1 - id: '14' - num: 14 - config_name: whisper_auto_opt - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_onnxruntime_64_100/whisper_auto_opt(O4)/14 - choices: - benchmark: inference - backend: onnxruntime - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/.hydra/overrides.yaml deleted file mode 100644 index dafdebbc..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/.hydra/overrides.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- benchmark.new_tokens=100 -- benchmark.input_shapes.batch_size=64 -- backend.auto_optimization=O4 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/hydra_config.yaml b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/hydra_config.yaml deleted file mode 100644 index 21fd8d41..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/hydra_config.yaml +++ /dev/null @@ -1,109 +0,0 @@ -backend: - name: onnxruntime - version: 1.15.1 - _target_: optimum_benchmark.backends.onnxruntime.ORTBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - export: true - no_weights: false - use_merged: false - use_cache: true - torch_dtype: null - provider: CUDAExecutionProvider - device_id: 1 - use_io_binding: true - enable_profiling: false - optimization: false - optimization_config: - optimization_level: 1 - optimize_for_gpu: true - fp16: false - enable_transformers_specific_optimizations: true - enable_gelu_approximation: false - disable_gelu_fusion: false - disable_layer_norm_fusion: false - disable_attention_fusion: false - disable_skip_layer_norm_fusion: true - disable_bias_skip_layer_norm_fusion: false - disable_bias_gelu_fusion: false - use_mask_index: false - no_attention_mask: false - disable_embed_layer_norm_fusion: true - disable_shape_inference: false - use_multi_head_attention: false - enable_gemm_fast_gelu_fusion: false - use_raw_attention_mask: false - disable_group_norm_fusion: true - disable_packed_kv: true - auto_optimization: O4 - auto_optimization_config: - for_gpu: true - quantization: false - quantization_config: - is_static: false - format: QOperator - mode: IntegerOps - activations_dtype: QUInt8 - activations_symmetric: false - weights_dtype: QInt8 - weights_symmetric: true - per_channel: false - reduce_range: false - operators_to_quantize: - - MatMul - - Add - auto_quantization: null - auto_quantization_config: - is_static: false - calibration: false - calibration_config: - dataset_name: glue - num_samples: 300 - dataset_config_name: sst2 - dataset_split: train - preprocess_batch: true - preprocess_class: optimum_benchmark.preprocessors.glue.GluePreprocessor - use_ortmodel: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_auto_opt(O4) -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/inference_results.csv b/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/inference_results.csv deleted file mode 100644 index edee3b53..00000000 --- a/examples/whisper/experiments/cuda_onnxruntime_64_100/whisper_auto_opt(O4)/14/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.0666,961.0,0.65,9850.0 diff --git a/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/.hydra/config.yaml b/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/.hydra/config.yaml deleted file mode 100644 index c5b06a83..00000000 --- a/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/.hydra/config.yaml +++ /dev/null @@ -1,66 +0,0 @@ -backend: - name: pytorch - version: 2.0.1+cu117 - _target_: optimum_benchmark.backends.pytorch.PyTorchBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - torch_dtype: null - load_in_8bit: false - load_in_4bit: false - bettertransformer: false - torch_compile: false - torch_compile_config: - fullgraph: false - dynamic: false - backend: inductor - mode: null - options: null - disable: false - amp_autocast: false - amp_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_baseline -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/.hydra/hydra.yaml deleted file mode 100644 index 1acaa16d..00000000 --- a/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=10 - - benchmark.input_shapes.batch_size=128 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: benchmark.input_shapes.batch_size=128,benchmark.new_tokens=10,device=cuda\:1 - id: '1' - num: 1 - config_name: whisper_baseline - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_pytorch_128_10/whisper_baseline/1 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/.hydra/overrides.yaml deleted file mode 100644 index 1af87e77..00000000 --- a/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/.hydra/overrides.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- benchmark.new_tokens=10 -- benchmark.input_shapes.batch_size=128 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/hydra_config.yaml b/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/hydra_config.yaml deleted file mode 100644 index d069d6c4..00000000 --- a/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/hydra_config.yaml +++ /dev/null @@ -1,66 +0,0 @@ -backend: - name: pytorch - version: 2.0.1+cu117 - _target_: optimum_benchmark.backends.pytorch.PyTorchBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - torch_dtype: null - load_in_8bit: false - load_in_4bit: false - bettertransformer: false - torch_compile: false - torch_compile_config: - fullgraph: false - dynamic: false - backend: inductor - mode: null - options: null - disable: false - amp_autocast: false - amp_dtype: null - disable_grad: true - eval_mode: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_baseline -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/inference_results.csv b/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/inference_results.csv deleted file mode 100644 index 4d412b59..00000000 --- a/examples/whisper/experiments/cuda_pytorch_128_10/whisper_baseline/1/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.41,312.0,1.07,1200.0 diff --git a/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/.hydra/config.yaml b/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/.hydra/config.yaml deleted file mode 100644 index b9a983d0..00000000 --- a/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/.hydra/config.yaml +++ /dev/null @@ -1,66 +0,0 @@ -backend: - name: pytorch - version: 2.0.1+cu117 - _target_: optimum_benchmark.backends.pytorch.PyTorchBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - torch_dtype: null - load_in_8bit: false - load_in_4bit: false - bettertransformer: false - torch_compile: false - torch_compile_config: - fullgraph: false - dynamic: false - backend: inductor - mode: null - options: null - disable: false - amp_autocast: false - amp_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_baseline -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/.hydra/hydra.yaml deleted file mode 100644 index a62ef43a..00000000 --- a/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=100 - - benchmark.input_shapes.batch_size=128 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: benchmark.input_shapes.batch_size=128,benchmark.new_tokens=100,device=cuda\:1 - id: '3' - num: 3 - config_name: whisper_baseline - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_pytorch_128_100/whisper_baseline/3 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/.hydra/overrides.yaml deleted file mode 100644 index 8463a032..00000000 --- a/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/.hydra/overrides.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- benchmark.new_tokens=100 -- benchmark.input_shapes.batch_size=128 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/hydra_config.yaml b/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/hydra_config.yaml deleted file mode 100644 index 6561339e..00000000 --- a/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/hydra_config.yaml +++ /dev/null @@ -1,66 +0,0 @@ -backend: - name: pytorch - version: 2.0.1+cu117 - _target_: optimum_benchmark.backends.pytorch.PyTorchBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - torch_dtype: null - load_in_8bit: false - load_in_4bit: false - bettertransformer: false - torch_compile: false - torch_compile_config: - fullgraph: false - dynamic: false - backend: inductor - mode: null - options: null - disable: false - amp_autocast: false - amp_dtype: null - disable_grad: true - eval_mode: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 128 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_baseline -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/inference_results.csv b/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/inference_results.csv deleted file mode 100644 index 190a3cbc..00000000 --- a/examples/whisper/experiments/cuda_pytorch_128_100/whisper_baseline/3/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.411,311.0,1.66,7710.0 diff --git a/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/.hydra/config.yaml b/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/.hydra/config.yaml deleted file mode 100644 index b0df60af..00000000 --- a/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/.hydra/config.yaml +++ /dev/null @@ -1,66 +0,0 @@ -backend: - name: pytorch - version: 2.0.1+cu117 - _target_: optimum_benchmark.backends.pytorch.PyTorchBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - torch_dtype: null - load_in_8bit: false - load_in_4bit: false - bettertransformer: false - torch_compile: false - torch_compile_config: - fullgraph: false - dynamic: false - backend: inductor - mode: null - options: null - disable: false - amp_autocast: false - amp_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_baseline -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/.hydra/hydra.yaml deleted file mode 100644 index 6e5db4cb..00000000 --- a/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=10 - - benchmark.input_shapes.batch_size=64 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: benchmark.input_shapes.batch_size=64,benchmark.new_tokens=10,device=cuda\:1 - id: '0' - num: 0 - config_name: whisper_baseline - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_pytorch_64_10/whisper_baseline/0 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/.hydra/overrides.yaml deleted file mode 100644 index 458105c5..00000000 --- a/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/.hydra/overrides.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- benchmark.new_tokens=10 -- benchmark.input_shapes.batch_size=64 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/hydra_config.yaml b/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/hydra_config.yaml deleted file mode 100644 index eae87554..00000000 --- a/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/hydra_config.yaml +++ /dev/null @@ -1,66 +0,0 @@ -backend: - name: pytorch - version: 2.0.1+cu117 - _target_: optimum_benchmark.backends.pytorch.PyTorchBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - torch_dtype: null - load_in_8bit: false - load_in_4bit: false - bettertransformer: false - torch_compile: false - torch_compile_config: - fullgraph: false - dynamic: false - backend: inductor - mode: null - options: null - disable: false - amp_autocast: false - amp_dtype: null - disable_grad: true - eval_mode: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 10 -experiment_name: whisper_baseline -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/inference_results.csv b/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/inference_results.csv deleted file mode 100644 index 1de7caf5..00000000 --- a/examples/whisper/experiments/cuda_pytorch_64_10/whisper_baseline/0/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.102,627.0,0.549,1170.0 diff --git a/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/.hydra/config.yaml b/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/.hydra/config.yaml deleted file mode 100644 index 6c25cb74..00000000 --- a/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/.hydra/config.yaml +++ /dev/null @@ -1,66 +0,0 @@ -backend: - name: pytorch - version: 2.0.1+cu117 - _target_: optimum_benchmark.backends.pytorch.PyTorchBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - torch_dtype: null - load_in_8bit: false - load_in_4bit: false - bettertransformer: false - torch_compile: false - torch_compile_config: - fullgraph: false - dynamic: false - backend: inductor - mode: null - options: null - disable: false - amp_autocast: false - amp_dtype: null - disable_grad: ${is_inference:${benchmark.name}} - eval_mode: ${is_inference:${benchmark.name}} -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_baseline -model: openai/whisper-base -device: cuda:1 -task: ${infer_task:${model}, ${hub_kwargs.revision}} -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/.hydra/hydra.yaml b/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/.hydra/hydra.yaml deleted file mode 100644 index e5d45fe7..00000000 --- a/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/.hydra/hydra.yaml +++ /dev/null @@ -1,175 +0,0 @@ -hydra: - run: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - sweep: - dir: experiments/${device}_${backend.name}_${benchmark.input_shapes.batch_size}_${benchmark.new_tokens}/${experiment_name} - subdir: ${hydra.job.num} - launcher: - _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher - sweeper: - _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper - max_batch_size: null - params: - benchmark.new_tokens: 10,100 - benchmark.input_shapes.batch_size: 64,128 - help: - app_name: ${hydra.job.name} - header: '${hydra.help.app_name} is powered by Hydra. - - ' - footer: 'Powered by Hydra (https://hydra.cc) - - Use --hydra-help to view Hydra specific help - - ' - template: '${hydra.help.header} - - == Configuration groups == - - Compose your configuration from those groups (group=option) - - - $APP_CONFIG_GROUPS - - - == Config == - - Override anything in the config (foo.bar=value) - - - $CONFIG - - - ${hydra.help.footer} - - ' - hydra_help: - template: 'Hydra (${hydra.runtime.version}) - - See https://hydra.cc for more info. - - - == Flags == - - $FLAGS_HELP - - - == Configuration groups == - - Compose your configuration from those groups (For example, append hydra/job_logging=disabled - to command line) - - - $HYDRA_CONFIG_GROUPS - - - Use ''--cfg hydra'' to Show the Hydra config. - - ' - hydra_help: ??? - hydra_logging: - version: 1 - formatters: - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - root: - level: INFO - handlers: - - console - disable_existing_loggers: false - job_logging: - version: 1 - formatters: - simple: - format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' - colorlog: - (): colorlog.ColoredFormatter - format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - - %(message)s' - log_colors: - DEBUG: purple - INFO: green - WARNING: yellow - ERROR: red - CRITICAL: red - handlers: - console: - class: logging.StreamHandler - formatter: colorlog - stream: ext://sys.stdout - file: - class: logging.FileHandler - formatter: simple - filename: ${hydra.job.name}.log - root: - level: INFO - handlers: - - console - - file - disable_existing_loggers: false - env: {} - mode: MULTIRUN - searchpath: [] - callbacks: {} - output_subdir: .hydra - overrides: - hydra: - - hydra.mode=MULTIRUN - task: - - benchmark.new_tokens=100 - - benchmark.input_shapes.batch_size=64 - - device=cuda\:1 - job: - name: main - chdir: true - override_dirname: benchmark.input_shapes.batch_size=64,benchmark.new_tokens=100,device=cuda\:1 - id: '2' - num: 2 - config_name: whisper_baseline - env_set: {} - env_copy: [] - config: - override_dirname: - kv_sep: '=' - item_sep: ',' - exclude_keys: [] - runtime: - version: 1.3.2 - version_base: '1.3' - cwd: /home/ilyas/optimum-benchmark/examples/whisper - config_sources: - - path: hydra.conf - schema: pkg - provider: hydra - - path: optimum_benchmark - schema: pkg - provider: main - - path: hydra_plugins.hydra_colorlog.conf - schema: pkg - provider: hydra-colorlog - - path: /home/ilyas/optimum-benchmark/examples/whisper/configs - schema: file - provider: command-line - - path: '' - schema: structured - provider: schema - output_dir: /home/ilyas/optimum-benchmark/examples/whisper/experiments/cuda:1_pytorch_64_100/whisper_baseline/2 - choices: - benchmark: inference - backend: pytorch - hydra/env: default - hydra/callbacks: null - hydra/job_logging: colorlog - hydra/hydra_logging: colorlog - hydra/hydra_help: default - hydra/help: default - hydra/sweeper: basic - hydra/launcher: basic - hydra/output: default - verbose: false diff --git a/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/.hydra/overrides.yaml b/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/.hydra/overrides.yaml deleted file mode 100644 index c4bb64aa..00000000 --- a/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/.hydra/overrides.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- benchmark.new_tokens=100 -- benchmark.input_shapes.batch_size=64 -- device=cuda\:1 diff --git a/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/hydra_config.yaml b/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/hydra_config.yaml deleted file mode 100644 index f3c7fe0f..00000000 --- a/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/hydra_config.yaml +++ /dev/null @@ -1,66 +0,0 @@ -backend: - name: pytorch - version: 2.0.1+cu117 - _target_: optimum_benchmark.backends.pytorch.PyTorchBackend - inter_op_num_threads: null - intra_op_num_threads: null - initial_isolation_check: false - continous_isolation_check: false - delete_cache: false - no_weights: false - torch_dtype: null - load_in_8bit: false - load_in_4bit: false - bettertransformer: false - torch_compile: false - torch_compile_config: - fullgraph: false - dynamic: false - backend: inductor - mode: null - options: null - disable: false - amp_autocast: false - amp_dtype: null - disable_grad: true - eval_mode: true -benchmark: - name: inference - _target_: optimum_benchmark.benchmarks.inference.InferenceBenchmark - seed: 42 - memory: false - warmup_runs: 10 - benchmark_duration: 10 - input_shapes: - batch_size: 64 - sequence_length: 16 - num_choices: 1 - width: 64 - height: 64 - num_channels: 3 - point_batch_size: 3 - nb_points_per_image: 2 - feature_size: 80 - nb_max_frames: 3000 - audio_sequence_length: 16000 - new_tokens: 100 -experiment_name: whisper_baseline -model: openai/whisper-base -device: cuda:1 -task: automatic-speech-recognition -hub_kwargs: - revision: main - cache_dir: null - force_download: false - local_files_only: false - use_auth_token: false -environment: - optimum_version: 1.11.1.dev0 - transformers_version: 4.32.0.dev0 - accelerate_version: 0.22.0.dev0 - diffusers_version: 0.20.0.dev0 - python_version: 3.9.17 - system: Linux - cpu: ' AMD EPYC 7742 64-Core Processor' - cpu_count: 128 - cpu_ram_mb: 515637 diff --git a/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/inference_results.csv b/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/inference_results.csv deleted file mode 100644 index 34f95b1d..00000000 --- a/examples/whisper/experiments/cuda_pytorch_64_100/whisper_baseline/2/inference_results.csv +++ /dev/null @@ -1,2 +0,0 @@ -,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s) -0,0.0911,703.0,0.966,6630.0 diff --git a/examples/whisper/reports/cuda_128_10/forward_throughput.png b/examples/whisper/reports/cuda_128_10/forward_throughput.png deleted file mode 100644 index b43723e76f3709f1baf49cae7e0e3bcfb41ede1f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57908 zcmeEvXH=AD7cIsZBdB~aAQ}snQKUyvkZv@V0R^Q=6O zzuBTAmi7+&?QCVF6Yrv{X9G$t}=iBP3LH6^Lv-_QioC1cFy~({lEEc36Zqe&N#R!@;~Mwv6d^063@&% zvuyg}v%fuGwc)H~){zrGCECrn=)T~0gM1e6t79IXFG>f}-g|$pd>@$mF}={qsVbt6 zGveW1=w6s670wxP2@>S>UXOdASB+4dp{~%EuX%V>fB4RE{OiAX7L0$z`mt!zmoFEc z{(t(-8Y|A#Qw!A$3=9??YrM)j=UlQ)F2EwwvL(#9iiz)@oH0+YG-~*k|Db!PxvrjG zpth#6Q!sl-d#qAId}OdkJw(PM zYgA|MGMt zQfqx)M5t!R6!dGzSQWn5e={tQ@#)PSqte-vaZ3tc#-J?8itX51nO67FW1v1L+BDlE zFLy-6d;MZl&k?83Z|-C~HE+h_*N2aBRLAlztuzmCAR$ zkSV#fb>x+XuO*apJeD>Cb6DI!WGd^3t;e7FUi?#=S8h*>SB@Q%%aC$53G%>*@ zM?ZL*RfpJR$y-`lzI7_wRdDvz)eWbHdsDI{RkAhppQXrKf!R)MZZLcb8SR#|W(x5mC>I;kM3r&k=FO=DQmVldXgCIXB&hdI!29#zI}X8UmV9 zYVLbr?I!lTF}i8;;Fy6+eSnN(V-c^p)XSl|J1JH93Qe&|MFB4`eBynXv9S`9r_OwF zzsw|LO_{Nb)92Sr+;QjgrRR0l_Koyav)erSGFIDFDcwIPT6gPvMa-~?cgO6WQT_J0ro~ML z{NE+VT$6XZ8ZB@am_iJ_Z*E~9*owWC*8@8!n;euaxulq&LHZ(qabZ{y=Wd)vc z8+N9cTS`yktEJm6ug;q)>~^7U%+9?7w_8_pg1?YgJoW;j!``uxKDJeayWXD4SJyWS zWvd3^PMYsNndyS1U(jRFo^}kgQG9!6yhKW+{ad`#CpT-L54ZP=thK&WR)6iigJn`Z zpGymlRM-SD{gy{Q9Cgq$unc&8$4E}GEz{wKMp(Lz(}S_mp|@CD#aDhivD)U{ z>fD8{)*W5Fg^F#F`%7*xStS~Zclzg!Mpy0l4spUm?+Ui0s%yzjO}0pcn^v67Bej>< zKhnysy<0Jk_Mf9~HXu0Uw|{t&^mcEE<#2z8;2+7#z2!^s-D4-G&te3k90`v3AdP;2{&#M+5jDHY#eDc)U6`Rh?x#;x9K;(gIpT^l=*4XDR zaIV7`6buh^IrM)x6%^9fQ)bFGacv1W!LPD*WT=mQ`CwddeAb{?g>}-l712}~ut@WW@EF{G zthX8eFlbTj-nXr*;bm9pPSYr(6kC_BmvSytkJu<2X}*+Hs30EU-fJ*Zsr_MWW@kQMi|2c%vidHzD*Mq?;n2jy#OVtZR%f)P zAFT2iYO%k)oAWXxs3oOZCB!7p4e$wr@F zM|VBwl92d4ZpZaa9jEvy-z+>@bG*ByLJrTk4O_thtGpg@^d;sqEKt%YP|>;O;qm>I zsfRU`dtPUb4mQnQY4&@Sd*A!o^P5>?Hd9`u*j0C9qjEwWH1-yT2LH5llLgiQJBh7g z)H>vVRP+g(Q?}ZzYx+FdX4f{g!!7sA_VzZvm9PD4=_&*wnq=I%mB&bvDFK4~!?_EB zx|giNoZg5wr*Xn_IP+{%_v|^Z+`EDynu%TFz5eR< z)cQapUM4SFb<8cH`bDse_?c4N-g@lr{QLK9-=|-QJkde&pzox60c zo5~Z+_XOzbiMovraB`AQEj)JXLS#|8Qxh*I?(Sn=?`Z9tvn$4W;yndf0)m;rE9{wB?&MK+FymJ~SJU~pQ*R_m!7MBWvx+~; z28GA^Hz(MFu4;j!5C{xvHZHhuF$}0$7y0 zuD1QbE(4C*W$lWA;=@W1#k^&UCNIE>^bXK7jBd{piSSp<){Ic`M&cDTJwbSCJEGF# z>*rMrAOboYZ9Yq%5!;G{858zp5VyoDX^kNBPB#>x)ZIDnT`$qM5KeBRT##!x| zow-@h#HGG=C}aBy0o&9MC-J^1LdM&zn~-C#ss~Bc7n-(*b+~&*OmUKKPH4D%FtEv$ zZki=fxc1|j#}#(jyY^xC?*Y;fEwg&mw-=c-f8^|`nG0|?{bkvsx+?^Dy|FMdi;(r# zkByELgsHl8)~&z3>1_Ue>3-t{p2Ho2pT~Nu%^z9aLU8oPe}Wa7Yd_HB#e4MJejF
|aU1Q(72bJN7|31K)b_(n9-iUmU05n} zW%h1O+stx9lyX6gD~>nKmdGCN%o#-@G)aDcq@ofqOYoUO-GvQe_iPe!X1jF1+U&=O z@EmmsM;xK3Bp#P)mM{X;x9#JzvjunbM;aD3@ag5*-7nR1L2Qw6AMCbwpzsa^v!d)s z*)C_xr~tkE){Hh2YouC}AEwV~KVce@bNJz;sgZR193$(5jjTFEZmF{PBQ5XbYrH0l zy?%FXqi-|tNKW$QH3#=%JO^rhr90c7@Js7SVr%?H3%@ygY;-^VDZ%~ron*D#a|?uy z720_YRjl4$T<6-Psb#yY{SVz@Al^cRgA{!ysnd2@(kj?+Zk>4|Z_RuqS2;Ai$OJm8 zAC$$ERc7_vKTw+gY@VHQ>*c^+_r#XuioMO{37-&?xlfNsgok<#xEhLCez1NlZdb+) z?VB1ooZE5f2aZ&^a0qh-%h{Qr!m`a8XlTAyq5R#0-?B>#0`)r!{s?GR|2P!K8KP%2uuZG?VLKz0#@;Jd ztIsm+t@a#K!l$}m+I(yfjcg3>9|*XU{OxhzyPV z>49~g&O`fN-`dgP5+t>Y#Q<8dJAl8&xg%7&AT|c2#@Tts+hNJ@jhG;!dd#hSj1sc{ zZN<@#XG_u?IJ_-wSwprg3#6{tL$P>xL{8Vv`XJLA9wUP}y~#EedW{)5vyJMC)umqo zdItm8hy(b4!nRwf-&URNxe$QTI{EG1XuX!Pv2z1h;cRSRnIp~RFGA&$DlY*}NeeBM zw64NP$zeqpdb@`R9Z`y-(1V|UFE zMWfsM?203qRI$`{X=Q54zT0=b(!M}47-5)}P9>1$0IHSC@13ia5iE?~0C1UABZ)qE zJa8~JVH2gs^Qxn33B=voUt$l0%IPeSHf?khcJJQ7s>>c5u`_Y+DXm@j+V^uGHq#Bv zIFj4QEYpwI+^T!-?R#aGP$(@fUU1;{9lh2E-4)fNvYQt;>BM11L+$fL38l4Wb`=sT z7aXsI!X89?X6sw{xoW zUDS3W%!Mv>7c)nCE8ZS@GD8~$jWw1`TKi+Zt{frLIv}o>D6|ZeI8>s@R#^Mpe`6@l z33E1~1&;sffyMGo#|Fz#>W;Z`cD;$&8J|_ui-g-cQ2aXm*4_3~vE3sos`o~PuO)sC zm9;TMI=#Kxy8PFh5gx~=;-cyfcv>DwO)OJd&1IYF@0jVtSDWTpQXxi@J$dHZMFdWN z;9od=B3J*=3|j4jy)79ge_F;M#)^EQuv&}*n?d^pGcHe1NDRCnjgnA>re z3HjS)j~wZEQ-kWp@|=9brAq8L3PQ!m)3`J2eoR|c&8L91oghw_t_5TdRr_U zBf$n-i*Kgn5#7%KM$0wBL!08Wv&vL*W%Y{Iu1d_9PP~cod8N)RX-n6%3F(U{vl=y}8I?)kjOErM8tf#EK;FPruf!IhlLLXB^etZ4m*Iuy#g;9KIB zJp)t>oc>v4FH(_wTV_`=au`)BU=L!7`fyb?K_&KP)WAqVvzuB`YaonWTD;+;e^bgs zxv_HvT9&VMu@D)m%`)H$hr11qQ{1{1hNt+13#K1wxlBcxse6y^Nq*sQAkBJA>&T;z z+`t9!pslB;ml@>>B%Ac<^u$aDqE>*tDfQhC}WIQ&RtHf#Y3-2Y0ZfR1ksehxB&xhv&C2_jrM<(2ZTw& z=3X{X?ePr7;%2%r)Wi4es#Tn;T&y|*OR85}JipM4c>mTq>7_lORviki0=gKMNQ&{T ztW<<*KdO+4PeDpwb*#H_c&K`;Fk-a(X06Z1oM?wf{356x{DE5{2@jOqWUuE`VdIow zA$WzVm>(jA#V#au=YcS(Q=2`_7Fn8F%nVztLm{mFT@ z&ljJX&!RWeI&MJW4-h0tMHEnNI0#O8Y#I+tPQ0;w-ZIL4vhN*BDzPZ}9?bhKLdD%4 z1Yx~g_P{oJ!eAMzy`T;a%+IdLpHlDZpj>T<{L5|#u?weEpnYR&9zs$?sh*J}rcAml z9utV+(3DDVyoO!qo?v&!1LSlRh!Vkf*bx=Pl<_OIucPXJ`a-3(R3EbH5U$m*GVB39 zts0B2))!Kd20Rl&MWhRc$RTf85EaLoZB-BahV7&~s8|MhP=E)%1TuxVq~NuO&4?zk zN_r_P-nYTuGHTW57!WH2D5#{The#amEU*Gs@-n?C&WO1aMSxB;c&CT#*Hrp#(V7F! z*tr)&I>bxEcNOKRH~6yIN8atf`4|96D~^B_iaqS-j`g`mF&N{eXPL*dxmKujG*kxa zcsZ5eUAz&Qgc)bxf5#XVQMvXDf*!9xg9Skp&n z;M=^=#o_~IR*E621X8Bj<$bjywzbggo`}B?4RF0KDM^b6L1PH+Yq$ z+6S-6Ggrid`Su1j5JXniZ>^l7f-UkGz*p}U%8jew?YzNkrM3fC>R!5yqHeTpY&8BZ zmZ#u>vbbIPSdDyOTwV8I=EP%xHv~Tu77~9E5c%uT_L|zREcc<=pFzzY1={AN;CB^- zsy7y$5CDSlB{{pQS*7v_&yRoFJb%YN5Ia|6W34a7cYOyycXW^6Hd#@M`bA)JAEOk` zeGDo|`-5Hjt+I5x>MUQ6n!O3Ns6 zfN&Cx0v7du--D0RcdxW=R9*kIH>4 zTaHrMW3Ne*d4aM-SEE?F%T^f@Ad1G@#0sSqFJP${9_D5dW@|lQC9kIHh+_i#4|eHe zx1o{rTkNK5S*K?AC}*#Bl5Bp^{`6-}ZlQcj_x79h)!MWOEeTE=h*g+G- zI;v#|$)N-fgRoZ*VbH*04A@ke0xl&u#Je@)6tK`i4cyTdg4MV$2k;X0;0lcL^Ye>Q z#&7H$0y7f;WFbP8BQZIlUmv<_C|4v|5#m=zJA<+#!Sc#ODRFqQ zGFL*+ow#6cA(Knsca%WZ^%rI#(-lf(G_#(aooChG{()oydqmN?zm}fUkCn%oSj5Z+ zUvM?X7k9>!Px2LZP9VugZq1_FHzM~uIT?)z|HI7rmw+JF@(oo_md+lwC*cVrmQXGW zv4vPmVn|l)FOD=D!bVwz>YQ+s0dzj!76cJpB7m{YN2Zp^flNb)kRectxF4uG{DD9C z*FEbociLCmNZ!E|HGH}mL4pB`K@~4)4m(X;mLWF1;SVRpIF>uJ%i2eW+Bl%jw-y9S zY9kyG^8?CYAMwg0$X-p^D2&bEU2#!}*E{s^v$@btbP4%e1MQIzBOa=V+aV$06DUcN zzi>-8tk~f$ASQ(}dJ$)DVuw^$*b(0<6fVL9RagQhDYLK2m5UKuK%j4dBvcGK&3=rBu_Vpqrok!}Jv=IZS)RB$9GyN5{Hg0_`J7%@E15%C)!IeuRXGN(8vK&|!+f zXlw=F4;s*bYEUQ`;``qwT8c_{C~gZ^k_TQ?e{ihF$A(rs!c7FpFS+v_wjw_U0j6@G zo8PP5L2Y5O3d0nG4%*=pNPen4jQj1s%3aNOFKK`q+=|cS$}jD`AbU(3?-U8u zlMqyNbwkpVnTmCY4lCjeRXd*kECXn0@2-dv7DevT(wLF4kUX$&9^9wx%N|pPCe^8B zQp(>21k^yc0Y%~>ZcC_QsIM)8NDz9cQRuMCm{bV(N~NzjHQ#=!8zAlb?w@9d=9eR>YqYCrN09o?f7#*rKzi-ht&^mF~WS;N6RGWkn*F)4I+n ze7?o&4(essbT(-^h~%p&0FW-#@iA9=)LY^>K{gQXTQGPIy>G386!kEcgnx;kLq$f2 z(+aRMA36DAg>A+itvISWKDiV$qvGm^UWCX=gZRv&m4><7nQA#q zXqHbZb( zwww7HdXK&=ay>j_?kWZ?465fT^%HSMCIZUjDA`A6ogdoJ1FHZ*e-gEb9uYie3g9~c zW)XuZM5+?$UZ@8Ym&NaYbf-e7GJhmQ1!>GOU0MlOQ|0Gsk`K)Yzfgrl+2x}9?AV1w z0ziK?wl=+6>vkMyMT9wuI^hPwUQREK8)!KL!yzlvbnCLff2BHS8_FyJw&zV$^@>c_sUcBX9K0&jOh&CoMC< zrYh4V?Ze4=iI@RFQ$H<3Xg^fn)T6?b7OFZwXXO&IC?IdCe|xXLtg_fbtbjGx@wpJP z6&xEc@T(60N*5=Y&l;H107K&dwxEIEvws6^CoG~>ggu^~{@M5AgqlpMdoy&HPe9M% zkXp|yR8D^!)49O8awRca1U9Tu9=gCD-~a_J&?s$m1Fd6HZT%wqmwL$*u3efprkB%( zoz7&zVzP#c>?Hj2O(J8VW8|NuRCfciJd=gUp+aCI-SHC(TyZ2K@BkbkNPsTODf}IH z6FYFCDTJh7)Se6%h%9~#px zSjb${F#t60hm9ysDH)scBKaq1htI@@6E-$t(Vp91k=(NWJjs5KJw9W~jS&6rky1$h z2$H~&RV)+MyJ7^s02F+|bK&ViiUaf-|FS))1EjYV7Nf6{3E(kFHwAaEb;5&i8B~RI z=TRh;641w%`aj{P{5i8*@khJNE3hJ&m`z`$(U&7HZ?8uY%2iT2`ycnp5k9lb<0YKg zADoJknWTEj&Z9v6fqT{nj?_zg<^dljKmUkYZr90Dyl>q`?pDNCJnL8oba&Ag^ctR% z>OC~;GFU_kHe{+06&Jt#+5XGF@_2Lr=8*hy&-(4E(iq9nWA>8r7DvgXtdpE6Op)X3?vue;r``5vf$Gm)kmVbY z#@)}sFv7?K$5?_~b!NW&D%yf5@h(GFQ-pY@&M6G@V?Zo%CNKjuB`s-`{Nk^lmfC}| z+1pu{M;ne+*Z=W68-o^z$lWrvR2Boj`oQZIpg)|+*hMcd;p0&+4|_U_7iAaxIqm0F z5HnG%JScyIL`;G_c?Vc$7AVS|ohK&+09Fhy3d%c4f-4DZ%&)<8wqNAt^9zXt=_11X z`fb8vc-7=aZqgOHyOwxR#(oOJ1PF!B6M`b^l5-{qIl{OFJ=kP6-YaL%KL{blI}J%S z07a-^1cuXGG=6-ZyoCKCUh{UxIKDBkih{`@XEU}@3>8V-ms+EU;FT2A8_ZRf`Tq7T zD1$^79(;9eBN-{|b1_~aSRa8+Nq1Lf$>*ko9eO;CxTTJjK;2QUbyg$-{#!uRl5EI`J+HMXdXp&!OGN}rU@NN`DT^kbtT&m6F=>$U z9bh;huT1o{+v9_IC1!jHc5LHU?%qz>pSJ~c;>+h3mujyNSOxNg)@(2U1o=CNi6eH( z#~tO`Wq6Ys07Q^~a*Cbr5R9Xk*igA$K4& z2P@dvNUTfqkj`vhpImO{3o=0jTuMIhv-lY&)FM_;F1G@2mLOPBCpWi1Gv~>wtrv$nUEliWmb0k@c<<%vO}=*eJ2=l)@Ne z6|Q^W=OIPK0YXm$#DEuE1D|c>TGT*WSjOPIv)(nv{w_ z%)C6tT|sH6qO`GR3H#D|x5r#UVj&KRKcXnv>Wp2257_!(AGJ zX~QvD0AY%lgq?`IJtsKsAblO+azs6fM%#NFW?{az0LNsKrE-Vp5}o?D!q^bBn=eT z1>x595_{DrMRN?;xBGzuX$-`;lVpf=REJtp+Sv`h1+Ie>OasYILB{q>a*HsSaJ8&L zIa%995D0w77OXiE*2oAUk0&J`r}3djQ*vs?IU|qHvFlC=?|Xh>F*!FnIzaU`T-Y#E zv`iL}%bt@-MoNUHBseG|v8H|3RBzx{_J>|m$5NNd>+ksJ0M?=ox!QRYWTR(W8tNu@ zLWQW@q{c@3vJ1TXn1s{oQ6?1-BLF{K1L9U5K1WB%1HwTa86Iidf;nT5=>|f55Vmc@ zl@l}bv6U|av2m+8u&A?}(!Y#qO>RsqxZ{}gzap+7nnxmeQ0OO3ytMxAbP^{(LrJPs zBe7FJvjo8;nS6hL$?`P^hTERaB+m-bI@@I5GL~*WxAp#kv9UfeOtU9}MygQDdx3-+ zz(P3@o&z^#;VYT^e0hj93uBp5&OtiJLp4Owp8veI42iWIkcXN;d_wOybmvs1A{Xea z?ZvvcfWBHtkOBoWVc`{3dRc(Nq)O5j6xQMU@>n`I5Gvn4Uf^*BPK>Ndl?FmXn6pGM z7P^f?;6AJXfHe@Y?V&j9{+NNCbopmZrIm2bFp+@$7~`2L7&pvd!p30mA#=*4{@USI zOPItc3O2AKvY+#~<8QvkOb_^pIGFQ7GFl@lT2i(%ciCm8+Rc}*J2lG{ zYHkVkDOoJZr2wCqvCk2{1M}WX2_N= zf|^85?qEMn6?vd;?Ug@fqZY8oQ#Ih}G+D;j^3jNPBt`uiSi!IyI|z01H+$3!5kq3DTAsoc|aFm_bg=b`5@z6 z;2PBe{->}_HD4!uBFV;gNZllUk)-C-Ssrk-EC-? z4VQNwi;?Dpe5|9%t0@ZSOX?_&7xM)?1KBd9*&aYxlF z5Bi`4g2HPU_**Jc*ysl15TzM`I8Ta%_?>%NRLQiy&){UkDwuYdFa&jE%$O&;6}3K> zV1#<4|0ZZW9zQ()R8wl5Dir%wK-k8PuQs~CsRIFwV*zu*Tot#Y32f9p4xl|6$dZZt z8BDEPDATF(@x_N*MNn}_o?=4zO2E>@aq1e+qP>Ne>UXGKymERVwpyU9D1`U=0s|;y z+vot>vjJq9T4#1ChZEuHL8*=0DohrUl2qrTR!F9LG(KzaKr6fhP*SDB6rD9#WP$?M zpO1pL7?OHS+S)G{g9 znkLvt?iF%tQgKEIoInMnKTBAf0uj*#jIJ5$wcUk|%{= z4BVB{@C7;&XUt&w&X!~`h{}a)Bb% z+|iB7ed%0{F!oXb)B{_S)ePd?3Mh`r!a}^U(RX$lu!wK272qobqg5!^wy|L2UxWEA zkYOunH&e5a1%3cd;)3;@J9q=3<<@b~@Q|IWY0KXocI2HXSgyU)@5n9cu&f+V)YUs@ z5A9+_q97#~E9tSZfL&C@koV+&No5!x`xHfhlYk7lWkC4p9(9^{lmL1ZC;=qv%A;MT z6Bc&j1m!_hI^>Jq6c`%rIPdua>Q*r79t)y`Q5#WJ2Jowpqk@9chCbOQ`R#IY>0*Hf zLXN&GCN*9Qf(1*25B1jPB_2M6=`mZD7*bnK)}IO^AB1&xeV4tR9h%{(8}#Lt)-BV2 zH@{CLh3tgfm(CA=Bc$K|#!pPH#7F;r;lJOcVfgQW{C6?@cX#}sZ3NaY>WUB1@Ja45 z4&vTR=Hv8?vt5D1aM z;BTcr>5PBI_D_Lp*Nxr46Dq-b>Uy>(!IhHaz zA@gu`6H-H!njuixSJ@?I!hf&4Jg?x5>TtCwC|aTn!eqU2S z_)Bl?D6vcXPfpO{mT0B9BJ5c78!-yR2v$M_B}O??}ip7O`ah#{E>10Ez~2j7srBdFo11~vTYq`7O9PQ zCVp;zpaSNMge!1?dApq72Y?Re8jpyprv}=&P|`*Jo_|)%4ui?;U>OmNtm_dpMv(yn z{M((_=<$!-9UWjXWl)|I>PMVLzZ;mb7vRSuFs93h;wN^78lKF=#(&_Jz_MvYaPb$@ zYG;zag4i8W$CBYWCVx`YrSav*Bc{u={h$Jw3&b=Ab}xN6^@z*byo-d`{LLHnuE^~# zWBm zT8*mFfn6Js!^{;F zS{L?Lpc!@B((9HEhFb)P930)1^{yc4yRbR__3uD?A&|J@)(@d?Ds1Kg1^=&pevR77 z=d^FoA*%#k#R+OCt+-(vf$)ORa`s4z-8UorUH6HZ3M)WxzGRAoaZTR{H5%}(k+c(76iKzd%+8T!EwdGMTC7Q^riudry#eit8t~LYYKbScn>m_dHAG7bQZ=cQ{&J20|&ptX!s zZhn3n3%*2UR84Fshp1s9$Qoe)URK7oGPO3QhhjP9AMA}F#0U?&1^+04-RSMQ>nPOE zSNw^8?AGU5{ocnKDo1XofB$8Y#2ytlq?W4fcuGGjnOwevyr$<+s?J9n8Zoxq14oc? z#B&5TLX>e;aBdBygBHMT7f|;QnAmGa-Jq@pqn<-?XkvX31sU05P&6)QK0CJ{1a=pp zCtVEM*^qq#Q4A$U_Kb&$_LA)^o)Mzw(TLIs2O1Sz!3s_XXoS$irb<>A(Hkp{p>@`G zD7czrlkTDoGMLFTa5OVXOJ^`)c`XQ|7Wdi&Dqz7@*Pko9wPPuXa0%=b62dVeEBd}X zGfxf2{B$4UDdudgrc%f6Z{Qj;t04OvPO-6qs;~kN3nfnzyOPX+j68Z$IKD*5^@T<^ zqvEuzPpHQ7VgJ_K|8cwu-hLnH>H)MqDzdzLvHwS|j64;}RTIDC4@*IYu@R|3ka51M zIm@i_{Y7)kC2@)kDpH-+ejLU?$BURn*eQPa^JlUo)S5$&T9&qO{N_F8tIfI+0k0|@ z0MH5fNvI+m2cXb=k!XssdWV*lPEZg*2VJ3Z6hl>EFcB&KR@*<4laJ^MppWHDYT&_Y zuZML>w3qY`l1|}Du)Ny!CT^D^_z{VJ#_c+3Jb_0pkotlx8La4Rd*F3{^{x*>!x>}( zAH;HYif$l395s}Lf`)&KDeFQ<5Gk-}DD%;0GsIwmMhOiCMMBj1J47*3A1Ljhvpj&5 zWhS*HbM2uRaa_rd8co1&6rw$+@5E=gb_7b5v?(brmzMqmltFW28OdncXAq8 zf>5GDm>TF)e7$2@d5%Z<0sfw@p+#u=< zK-8g;wQnKAl<3WIjOE1=XL^mo=@5xt2O^ldTBfwXutNQbqBkbqq^6BcLIpp$?#Mx? zi4{+MRX%&h-?BRx8E6#ZFbAI0Lh5n#`7pj6S9b>w#I->Hr|{;&6ut^fj@H)kmCqA!gOPGb6$>&E zO*0M1I4^*v3zYxljNiksfInm17#6DohTp+S-Ry0vQA+rM4<9-pyqm6^bI52#~@I*90ih1POFtDP|ux#@})9w8f) zPDzGy8y$Q@?WS)4#mK;hCU!E&sjr;4eVrIq<>Qe*f7W=8BRE3n&@60fE+$65jU{C4 zeApXz_=t|*&lRlz#yU?-e-bVr!i3^}5yYMtxq1Nv!@)Y<{1!o&poOD0i$lC7SKLYf z9*q3@EqMzY=_D00QKDE_Oz8pnY>3(&srNK7Z+XbqU;$u*jpKT7Xn+v+G#*Jfv^Emb zCRT|$2i3lQwC@{%N5n^I%cntER9`uf+$Cf%uI`9eq@($;BY-#NJH6gP`(Aw|`Dp2- zla3k3U^~-{GxF;4{7O(?k?zkm9MVY*U%?pMnJI`X>&%X4OcweU7B5gd@&XF$MV#Ox z3#C}%s}clDfj}>s<}_H9fTnaZ0gLF!e1v-aZ%5k{9#wmCe^9-Lc#;SZFTx~aUM$*s z{sncY?}0qsq)>x`6we+kjb||^A9lPU42bdDlJ67*?4 z{F46UqXAzB1C|Drapb3WX->$srxPa7TJoH{^7-lG@#0D(sz6nEtCDEL!aMPTEcOV1 zO28MIEMPNBI7$fRtOCZs3c6}BWqh-*v(%4KE`t3$3~*hJOt37XM#(;&h`y*uI3t#c z{^!e>@C15C@fKy@fg4~yV1ekVaoFKydF}t;Ts`LL@46wg)&uk3DI)7MojdULVn^tM zMUYAc!FObA>#O$s8ga)YtQMg`&G97sKsVAc0I*;W{hYck)nz7jt;El$=Gqbqq?jzU zi=u}!r7{6!Incc9K!NFK;_3;&|8P};&ZI%aI6nrCv3m&{MPjXwcwpp;@yFJG6DYRM zbT?p8)SPiQ7*EVrL?JxFh~oJO95@hlY2qKmw&5RORk>f;1;aNv)uJ(8=*5?waAnnO zjz=Yq%xQGh2BzOn=sff23aS2j1DA>&tbxO`0nKU!O3Ymoo9f4i9?Q1*+SoJdz< z+MUWpB4}PhtP41%KRzbhiiRwLlQaOw)ul@C>*3E7u5RVp>Kmy=g z{+vvbXq7rZjcsHx5GWb{@O4qhe;!C+^)Tysok3UQguQoyty@)Me+rT(XALmEyVS|ZIAs4;(3ylL$JZ2i$H@+!0%O$dRWug7-ruC*NrTkfO9cwjfQIPF@i5?JX?xo4 zLlwDdT*IiRfLaWwcYthrP1LirEdK8(a2z!sI=+Ty1&*%~9^HedCwId%5|}vge7CgJ z=U?$G0jn_koe7SsgxTaKrte?-1jR&91^-9|`JcO+%B0b~rU8$n+RA0)!{o1nM?`a2 z15~xG$dZXTVNirgl`jD2kXMqChXw`mGx>l0&dSH`V3Md&il`y$d#^SzS$LlUp$_t(G4c>}xb75z zlJPNJmxskg(2ZPrWPYSW(O7h>zzgyxgM48y>8vlUtrPJs;2LAnF&4F?p^`rm{m;}A z@dtNQXIL&w>IL86yh+Dkj}f~U5D*|Zru;7uXejyvnfdaS$q!6y4por$fCAra9E0Ok zFh29zz|>^}QZ>N*ocwZXX8Zx&4F+~d!)w;A7(4}k%8(%cclmx(u z5%>mK*naSU{cbPPE7yi(0}RVGW~q)1u!-tYUpc|1=v-#SuJc3{On0EWkER)V?Z3jX z9fY@5U{u}~$a1D=`PThcK!x_*Mzu3Fa8qZfAA?8-YKlTD)|GFgpa&RDk}xEeh!JAoM5lTcCeI-&@dQGL5JMgZV)Mn(zJN8O z_Vu8B--tq9I_KAkYg(!${w*jDzXk_-j71i7(rUXfm%92F=~&Rf1HX*_0GIaEgfWgz z8U2LnoDQMD(h4w}$ZPt0Y0?2vsc!}qSoSwDwRc?vY|~T>|0}5eHg$nNJehF<8z#1I zfPSq}sxTPD98Wfw4>|&sm~rykatog0=IssA01*fHcReM!wO>HE~i{uTT zuP%qXK%E;EGqJ^DUTaNhM4PY9gd)8zz%Yi#9lmoi!bTq$zZEaBBpuf){5WUun>S8W z+&t|Lizh;}*Z=ZlX2zPp?x$pY#MM=AwLzK)a-5iK{pCQ#)o4KW8AD8o=bkq~Fqqqx zfGz3S35uM9(?N|sFB!j|D~Xbay;SHNglv9=#w&DYhv#Tlgm^bBj>W8<6QfzMh1t7~ zd)O@&+RlB|C{7$8Z2T*1;WVERIxTSh5LP~&n)@}vQ+MO*vqS6Wb$e{gz0_BWbBz2L z>4=m0-*4csyYs)S3a^ORsH({iKzq(r zaNI|Ih(5LoD}(27qeJM(uGxb`?VO48YyXwZsZ)(f z7IodVATx+D4=|gUi(9J!cR1__Q=dH(*Ua~~;P(5|-}|G*A@?xI(@(0M6|CRf{sePk z2TK2D2Sl+qRgoj8Es)L-Ll@dAVg=!m@OzDO6+w6W{4F-bd19~VM4C=e!co-L2kFia z(r~`;^amJ zI2^l$-TNRf3R)1_v*z?8SSaWf814G8?Td|@78%0HT97&XZYbh$3XnES- zVu8B9|Lj?d4L2HG(_A*#|ND`=@>~f{-)2?LL6z?Pi_II9T2m@2?c-W9&l#C;l*_u( ztICpXIJJ@*4*l(WK-)dVMe{(C9&#}@o)2G814`46kU&484rR#71_CLDBJ6+Xg;P$v zaClcBn$x{;+@1phAaw=Nu@@!b)u_@K73c){y_k7K?gQ7QVb z*imbonX@0(pDlo2x6qdEQx7?E`QpWkb2`m(e0JR0zF03-f6_xALGcv^i43g68MfNj zH_e#2Q0aL#&VKWZH2LRR#om&g#`7%@nC2@u{^*4i{|e^0)G=&^khzfIzBHO|3 zC!iAEz0WfSmCMD&8-Bp)>KDyiP@}%Re{koa5bY;jeodNjihlk5M{7TRJQ0tWc2rjQ zx4)l><#=3PWYRGlZ$v?1%{LDAH#_eN!ea!pUD)150ueB@7ZTC_1;+bV4P8 zrtfTDpT7|o0nq(XP+Tu^y*DopbxjLMn}h<7|B6%bKs)3hjB236bpCL&9g{DD=6qZ!OE%i83FeJ*sp!(v3qQMTFlHgOr9CV(YSi)Wz@xjJl}M>gk# zRW-l>i(;)}>HuCj(hoMfxhbKOce#Yb8Ay0jf>gn${psx;*6X)SJ+MaBS4jHCzE{_t zK7Cq*l1;!VCnsmey?r|XYHnta3@jU8Hv7xu(AE@o>L1DTZttHkE0=sfX~q>imW1BnDb#7 zQ|lKYA)(E2#%YCvYXO08V20tzIDsQK#y<2y;=YEdMWH6iV~mVT%by}A+N*GsR>UFD z5En08SU1$$0^6k0Pe1)+wd>~i#YJrt5Mlstf)$;m@CNe_kG(5V7f2YVUYNB^ZTppV z#~nImvlyah-(|YUK%n^!+e`WO$M3(JgyU5-(Co58`^F1lThHPC!$_}8M4A%?j~_oi zit&K+(~A0~sX3C4qXULmL1&g?)kJDVsZpO_)#-^CzPK^;`^NSPW(#KH?$Hi2oP_)o zbG=1FqXELQ%1*n*OqSLwsI@1wBZ=Vy>iH9UGfF=hLm@C^U7ZyMfQfTcl+hOKuGcmO zV(SoiZ-{k6HdS6-x66fEnJ5q@dz4)i}4 z>c=yJgD@83yexn5o#&7xwXjV=lRX1jqK^SjJIcpo`R9Wj$wi6!hbkIz_uwcN(ZF5k zAAQu~++6;M&IW}6<|$TC;ot~bW;P(Z{pRSn8o@$9RO@>$bnk}FW<9YhNr5%wJwkt< z=fV`sESjKMb#)fd{pjeG=SVubF*+Xzb2OzdixS42T0)++GIp+gIt_4TGgb^cbrjV{ z1CAZNhD{W^Dq6rp=oh&{|LhKGY>pRv za4tbYJ+0)@`TdDzZ5yluYWkybEic!9{DeJv`2l44Uy;{pnxGtPM^*UnvWP76v1v|mAnPuj9&%a5aSqN1N-n*s%XfyTPut9nK`4&HeQ4Kf!B z#H4k|cynZ}=K$Vvu+bREGuf`q*pz_ybRa$XtM51<%}$@c_5zFzle{3lQ<(qtSoc0Gl}Er@(K^w^Y5n3C8Nep=dro{TDGQ-m z@|?39herCrX!RE&L{W;J={cN$_)ZpsTX)S@>Y8zyJ)*hyx<&sn-Wj7KN_pJOYzdtU z4*Skzj}QY3@LFRsnL*qq=UrpvKJ}Q6jz4>90E77(veaHAHo^|~b`QN?$HBU)1?|O~ zMW>}}TRU_&?vS;uoRhh9n60LvL1qD1%0vJ9>#yqfakvJZq@Q!I3lc~aKrX1nlK^`= zlC9rncUQ5xU7@Qzf=Q2bR+m#Jrgb^@9W1}Q*qcwN0ezHvu=8nKqaS}2 zHe!t|qCpJ+>QY#}DFZ+cIgaD?acr|yP0D6N`YX(4+oQFXA>I_=vd?kIBea+(lupAq zA3zC~1<}2O7rs35#y!Wi*%o8l&?~)ELE$n;m2lt7#eagyWPvKEt*!m*^l4^`eR1SG z*KL|Dvjnc`@!MT~w`>b5a`kSYPSJ275B!3KYuB#*b!Irb=>p7OM!bSz0&dDqpLL+- z=Yy!$P^?0j$%WeR5>)74=gyIj5PAk314+N-pDD!jINY zGcz}Ti;Xt7jLvHC$L}uyKXeJok~XFizp{dfnCAks3;?_9s1AJd6t#q|CA~b%&XXwu zM#CcEMdqY~2^siP^o=T*CQhvlz424#{<63Q*ev{6wy$sfguq>o4nuw^MW@-msszUn z&z|90C=Emi{xl-NBPZuXsBFS^oiu9YWdhiN9^tuIlZ3|17a-6^tvmi5Ar(9|YneKo zyqD8iUofMH3u&~rD2hxP1mJiwL#`6%8L19mFx#cCU(OAp#rN(^R=aUz8t)G8@bU{7 z;Qe|CJkJqS+MRG*Acen;J^}+3xzqXh_)fNxAUlO`_ViJy9xI^PnO=~8#@IIY4KG}t zwY$N@?*^Fn^x3oTF8KQkxamAIb+yKRKG}lt(z<-lsY$Q5t(uMC z$W&9~{oy^XvlxMp2adr|2St%%~kb zx|SzEOxFw`4!^VGBF3rY?UOI}{G0|Z+GBLUiicNV#i2NUN$ylfqO|-2c9AKjgn1s? z3C-Ji$57AHKVnnj-pOWg(`kxh{0go1;R&!;9Z9!()1H8rU8{Spe_}V z(7%BB?!@kx+IQN=hkFlAZ_ch;L{ar9TG#izPw&~khLj#CG`gU4!+N(znaKn6fL|b@d=wJsAI@@-B347~E|1PWxN(yv z{ProiZP;0Vgg)LCog-Q~4u8E_+=aZL5ysd#s7r~L)rrxW<^}%BAF`7V%xc7*I&{d= zwTu5!lh#qA4XWZ=m-JfdOX(K12WT@w*5((2Wq>G;sMC1?nTOE+{p-3fVUK(3yV?u$ zDnH}Mu$%e`W;Z3J^b-0s&|1Vbk3}6(c7ElYDu9D3FT9me6gi&JBN*e!P z_jv^o?Nt7GvS&c~CRpUn64Q(Qk&4;!WSe=wiA^c52~iVrMIN#ZS90>YsDzswz~4wDMFFbs;FA&f0t>*iZbEbtHO*4Oi4|%Iqd*rFh7kGu zKo+B4uK>&EL{lFkCB(Q5AjhbdFvvJ!<1>Qe`JQ9-uwS0?_c-;{;749TPuR-6FXouw zICI2vW}t)t)TSS>JjUL;6ZAuI?t;1q_+SS1)itDOkRl?f+aPPLPQ$S{dJYWDL0hh|o#YmC$fV^nrgf z3;?Me^1xWD2cBj=`u0g&A*z8|ssI{gEb|3;ko_+yxvZjh5ICj-2on+#=y%YNHHU{% z41oy&Nkf0=dF`sF|C0W=`)JZ#z+42Pji7%{6eL=) zHIPjW63lYu_Q@Zv{T@V^MuH|X0T@#L)}&)S2WHKhrDd%htG|*^Mdb~hXf0CO7|WL@ z;^ZFyGKHEH#afy%B>TbvPQYl$SL4s%!KJ3G1I?u^0ZP*D(0k8{6gy!ilR4%Pf&Dqo zgmq+8z>RPUG!(dNHCUJ=ae~nclsT6WIdOhYHJfOV7%rc;`5ssHx_z|#n$&w|to~`)1ibm+#M+laq-G|N1 z&GRlUok|Bo6$D9B7u@d~H|{_Q8Ypc#DyMR%JNK(;i2P50q>(J>k6FW_CYft71?GSZ zk8FryL_&|&PpF-Ho(6}QU4bFcB}p)0)~L`5*s)`WXfoYDQoQoV!&ty{>>thn65`nc z_)xVV0!u(ktN5+w(NEKtXlL;J{bWp`lZc%J3K8Jb;4PrTjz6LLBzlF)Fk7VWHIxGY z3BF&4Pe$*;T$Geo0prm&aTx|H9F)vglk({;sv9QWfTBxi<;s=R&8icte-UdWRBtm1 z6C4jSm9iWcP+(gVk%lv_kE5O&>GGV*%lx77pRoQ1e9k-0&6lS`vDY4|;i1DGz;DuF z8INo{hC4Qc)Dv}!E@u7S&>Vw%qF6dvs6b=q@$-J%QcHb!oo%D<2Sp%FFIPv95d z)kiYrdM}5+Tejl&I}58;-&f!M<2!{b76*c^{71L(^o;0Tf2`Mu63XN?j{camSw+2N z7B>TEi78afFH`(y%SBeNywx$8-u>DB$&*pS+36-uHc7 ztIRpqT>pI%|9uk5{}~z7cpu;sC0+Wgdq*~!x)=h7q`((~lSq>X-NpLAv~Ay5klLmb zx=A9Z=2;+l=|GW#>~;m<$i^*O0{Yv9R|c|e1qk)OBl91bmgc}D%RcHgi_r(_9{}dG z2F&y<6#zTr!wHbUem{RECdc1>zCJx$U_Vg6SZXA(SULpQkag<9g+b)9AxOC9HK^!q z9H#pjuxqEq^_nlBSQ`smL^Ct#-II$niQc${dxObm^#&A~O7TEFv;rDnCBMuY_MrA1 zQxlW#80C@diy=Zfnw>2yIhE;+9~V~E;cESHI1viEe(%ua8u-$hq1bH&Sy~(DtY#L* z2UAd~m<7`#U1XHXnd+rH_b@3nW=bM8W6e8dTYdY$4?Y3tnM!3rQs@q^pLL3k(hX>b zEIof+#Sr88`S1HNe>8ebyGC`?{*67SV&y0gQ$8-grfG{7sbVO{vtYikwxR@#L!5-@ z@oF|RADP8eQ!WX22K6PtYl~2+{!j}YCC}j$RARB3Rd{mytvGCDTTK8=2}{r($GTf3ds3)}lY8qs=nO*2+Qb}a)jh&kTBef!1YEXzKL zW#wg`E8fl(SdnDf3Ni$whp#P#5novVM0_5)hmu@`xtAf!4; z{4-am21xC&!{TCG#m&}kXL=`Y{QORo7Hw<;x?Nmf`RVpnQ20uf|@WKcGCCe z;1)4+A24<#gfUP_f!%t}-#`Xgnt?43K@_>Q`_x(>i46`^^vv~!SG)i^oebor1QA)1 zn)VM=2sXbA*+l8A{^!(^044ooaT-qe0=OfuTdM02k~iI2iE z+4mEJ^Z7J24h0s?$jXmNL4;@XFMtri{l4Qipq*#PGd4du; zhbZTAxLiO?bMtUm)ml+MKRmIKp3Uv>l)(z%mCL|TcaM6ycyH$nfO`%kj%AdbyLs~_ zRFJx)5_xc(BOnxT))G5{P2Z>fKvWk}QxnECucNne?b>*7G&+W|XCEf~F}tmT=U==0 zlXWydu8_X&CR0!fl}ZH-p=ieqR;$&C<0I6?8Ry{mog<&?7O!h-BNTVHpKe?7b1rd| z@3OXpxKg7XH7yn(DxUOFOiktf;&LMVIuS^bg(KXKfOk(2C@tdy%#Px)&&oec4D|A^N>7Pxq&nR$gLli zqO`~EKK9wwDERN@V5*ns;a9CG^<5uk7f{d+lFHP|D*g1$iaq~{vUk?|xvgst3AX0N z7nW;-tVTa}ulviA*Bdjmv39-rXyuzG<=x+bH@N-pRe39E$+?Q*g&0)){7xacdH| zYxl`#yH?M1JJe{bv$fsPS0!_Y{QT&`sS|yAEFbFHan6MMbZ4q>h7Nz{Vx~ojL zoSLL(_oj4l$JB-@>#6m_ikj^l^0BsAbpC#W2hNM;=zVVZX6NUDDb6nV|rrg&7cP2rbt&+6uSR zz5aQzn!x5qbUrb&C6%twAy)bg8z#^<{3?|*y2dZRG=BNzOsjUki_%Z6HZEl4f>zo4 zQ5=&W^zYw4wxpya>(z-9eH3)KB^k}Ptuwi3L@{Qtt91S^ncgn~+@y`4p5gwZ(suI} zDSYmI*fegP@%;~e<@bvCr;A3yPw#pii+xLHe<9 z@y)s1SgaiH$A5Wiu+K+1Ibt>b9u5TwS29ggXOC08gnM9?ZJ!4o-PJ{6o~?eKqU{c} zM4r({JWja|9y+v$p~~3z!*rC62VV!N99H#naoc-1Us>;)%5R=<%)GKosq^IzLACfp zz>h83{qrw0VM+hL3(?|+JVf{epxYMMLUYh_X$N8@V zRSs)8ES=#7Ytq1VXNYzW5T6J&WzzEQXJfOoLJ!f)cEHKPmKi_V*T$_{DSdp7oON+K zH7&$_Z$X8RStNI7{mPYfhOB9sEILG(K0$EKI8Nl(RDD6bMZSRFE`s$&JJ+zE^&(hk7DMuvuO$MB7$sjbdEfl44?ck8nFc*dtBCD0cZsz2iNTBb zjo0|J`&@TA{1|1X%)J(p(YY8*^d0T5527ok`jgoym@OY+?vEL-ZY4D2DRt`9Y5B5c zdXRe#z&%iTQ_M%3#n=P~Iql0qvYKpX+NqNYMjOvrM72kEfe~sN8{SzUqH!sP^m4M$L%NShyI$y~oi`F|UtR;Z zcl(t{$K2&Qr~C|S>gov01vHh;@ZE@6vn*Mr@>RhPD}xI!{D_QIlt^OQCL-+6iqqRh zsWd8-kggE5@;b3%9f;(U3&s<#^?6%A7qhoFC$piV>2q*FC$zou>Jx?|Am&o+k4Erdz z0ibG1)Q>>8cn!vgJP*UATgQP=>VaftF(znTG1yTvb^*g{B4}p#KNo`pV|2C%pQD*| z5K(x{@w0r{ao>8Tw_#_h_;AP$H{N?4RKGi!*5W4A9%g<0Ft2REJ3?O%1pKTsb4E6? zcl^m7eETO@2E==3pI1ODca`%akfAJPinQ3QPMl4c5s--q7tsv?-3|nyhn;vih8I5s z)!9<5Nb;o0?|W55#L^ayv9hWeE5nWU?UGP0&qOAu5P=kM6AL*b_@9`3pLpMNn;Y>( zoq=*ui@CWqA&X4rtPB22@-9TYDbpksXsJU*Q( zmiz`?Qkz#w$Izn{3lnd{{W%EzanBs5IdW4T_G(cSH%(svgWc#CL@xrLA5V(7=}Nq7M6mg9r2F?{dd zy;(;ictgrDV_G7u8TLkFlGjzdU9dhh<4><|^$Se95bKy_FJvE*><}+mI4OyAKjJEe z2Sg#+VQ97)Q_Me;n|@_#bf;@%v^)L!;;9 zq-@kY3IUX{vD?7?9C=`zs(jm)EiLzt+;@Ip!+wK!P-72G(jjo!O!!LZz8hI5q zZ?@*C{My_6-MF7jil9Q2lfvsLWCUOXK2ePRn=ud~B3C6xRzPKrR4$iF`R?uzvo5QL zztL3}zs|Jy-MNfpvsH$P6e7MsE#Id9!Y$yE?*zkSf>Oj36nj_M@Kd1FS80|+EV61; zhySk@Q?ZR+2QjPG8|XC~$Ek#28#^dQ{x(jQY~6$pP!QsKWHfFae zDI)mIS%2j2s8LFY5{KrTt5HC_W+SZU{rDt;)o5 zw;lF67+QVr!+H=b+MH0Yy6s-5{N&`^_SN7)gS>&|^|~&DN;4I$ok*!Uwz36yCuB+~ z^9T!(@`~jVqY!HmaUWvBE3>qvsyG4pNz>-do40xO+xU~HYBzVA|8_h>N9kPeOH|E* z<;x%qr%y5iw@?uHQNJ`tsftdhNh`17y~i?8&sFSTrVKsoplQ>lb#v9dd(5jT;vN}u z#9VwEwf#+Ep&^@=u^N(1iRmO)o#>BxVa6USo!sIQPOTBD(@y^l{1CbaCH_UvVn=kPu#@oDUW z`OoE(fOpF@313atCCa!BxBEWzL@U5|r|tu9Ag2$2wn}4)oAgm5IA~S|Myz3Qvb-8aEfVARGj-K?DAkoh?w_RsC(j>lS#a~72s+J$Hq@N{gTD^KVfm&p^ zG7ZIpy|sI2L!4#P5XN4>Hfmr%KtRFPB-N(or3W9+b$Oo>G2g?A5h5rXZ&}^M$KYZ$ zjgA?ivhu@tWhcA7&<+oO%bX$9qpT1vqLZolhsBsfvXCZQjLh)d%)va%=FOPpxsERN zZ^VZrF}cS6RWk^Sc+!)k7OC4n{aJ97F}Q`tAgZgG6p(UPFb32K4(eZ!W-+qg$#!PH zm7I;856j$b?vp{GS*w=v@(Rd>u2S^1!4S8+%>AgJ^3xCH!ynsNZ{~*A$j;8rL>9&r zTM-MlRNT`hO`EzGbsVON^Y`iN5-LO~I>R6D9Jo#v9D!kPOZ3_l$5R+^pkQ847+*}u z>WXMxCTcjpR9TN!nVDTLu}FM+zGCfXA(CW>XHkZ}iHXMxN+|ZNe_vo#1dScljZ{Px^HM#CCR2%Cqn|Hi?IcDT*=#wR72T4-sub)Q8E>SKB9 z7fe9wAOdhI?x6(@t|9f51);TFECU=yxs59v{mC#%9xpRcQM>X3<=+*tS6AjZWQuyq z@naMmH&4y`iM`c7vv;p1l1sn)=j4l+g!upZhi|TP)*pr+M5TtrQoi;YqOuPQN4F$;nNaq23E3uQ=La9Ju*6?i{$_>)9+qIF*eb#!Ru~u!`q?ZH{ zJe&vw24ZY$YkRT9uvJY=Bzi~;C3z@+b2G{Nwd`K3_Qzv%tJT<(((!>Dx^m4LV>7d~ zrlhDfi4Q)Y1iwTjt(-PZEzx4EXd-0hl^$Lg-5E43$@3(6l3SR0Tn7WWXG)~1_(R#B zhaaUCE#Po$>N+u&j*cp=E!9p_$vP~kwWmi5fNJLFskxNY1Y0@q445{h{uiD43zW4o ze9XJlE+Al0;-b<#fd8Jef(8$<^buI!k=- z6`g5~+S}XebF4PhB|MrOykCZ{&n*ffs7QR0=5om^kcxC<4ufEfGe&P!`8h{lJsdkg zr9QTcR9yV}UId;MH%K@57&svqXF{?ab^lb+#W)m;HiF}UW^umSKTi|*^p8`1&1DFY z;aZ&~?tLRI$ExVr3P_+e^KJY7$*~=Zgcc^wBHeSJ%I|h{_q>+xwlCfyWBXw85bc;)$@7}zl+UA7husSKNL5J$&B%iK+9s!Tc63qkIZx< zvPi1%n3*%iW#@Toz4%14Ms>UyIJ;4GxkxkvDGwDALAfxfdzK@7UCOMuHxpmD33X)T zKbe`wx(yq;%$=8L>~ZpVG*8w*Gx0CHpGjtVt)B826N6Pfp7qRMFD2Dr8hi7H&g?H*kIs z{4(IXdNqSnoI(cr`1lm)%MMq|F^57yJ!;=+(b+W=7dHkD?WG4sr(431{RuJbh2%%c zOMTS$^5#vQzJOUiM;@CP8!HYm?kBP<5{QBUu${<>CC_sgkMM~VB9Er5pn%bTgu1|M z@l?RJ=b2-d`@?WvQ|}+!9$!fWxyB@ z+Z7nNV$EGc!RsAxaKc#zN4f&fC42R329)^NTb-B%@FB}a6k@sACtj@NMNM^rmDzV0 znh)<9Sx``5u=KTkyR3FUxNVzM{ZU6CAQE{y#h?D8v3{t++(A>M;m1g5>4WD8ws6&W zeeCr0uQE(3kMa>pQ+GxuXP?NfC4&MC0djQeb4#<(k^f3142HQ3HP{jFY=u;m|OA(8f38vprM$?ZM(T$#Q&!VilVzGeNAa?p95Gcdd+^gg}AKnesZBMF-DOzdg7vs z%p$K^yY?BuIi71X9s{m@-=8)v8};RbOWQ4Sc6zy+G{{n0Vb)PIiHO2#;=1%1zXE45 zjX)N?f(f*8*sv_#c6-be_^NbZ?jIwDX(npJMW1=c*WfcyK#G^}B>@k#THe-78ha)} z_Ml>7!aI(05KB5dG~@c${0T902Y#xcimk~0u*Xb7Ik9F2Wv95fFvDtlVv&P`F8Uz_ zg0l^zT4I3@@+o!VMOthvqOL+OMeEpBrdo(hNLzAoxo^?Xwm^ZHP3B$%!wi$Vc8BnFz0iRgfDjfo~7`I)tQT28fOcMxkRDjwPs)78Y+tS!l%rsU<2; zDJ;Mt7{MJtRT@a6Ge)%hg*ojT)sfT4Ol+F4;lsL%8fn%HodJS9(qgwTemJw={>+jkVLp2Tr{-7ttsv;#IQd%!ens zgU#$_Cx^Uoa2|yJDU_A1q6-1mzuZW3$^FC{d=X}QixXQa&B8U7g_53oF+|&it8PAO8sNf$F&?V?&o{D3chYE}{NR11)w)8l)iFf6CcELP_0F{`Hh&Ku1+TB#-m{_l6=6$>T&0Oe_kc!}X%`Jxw z8njGKi?77%W}fxVrJO%M6SyDKmTWvub}>^Y2p@Ot7RR0`?11}o>u1E@3aovq*oJs1 zxHc&28RlVVKlSd+%hQC>7aswpM;5tZik*wqFvr!^X?wx^!UiOcARm;N@H+rDwxSq^H6S=Q2do}YzdPA-fC8}u^4HTt3xIGxxWN&V_`%h|qMs8L z+@r_c^Zb9;Ag;+)i*Y0>l;7wPbS6 zsD6W8;z7?%M;e)XdZe=NPt_lBQ}lvw-FQf-O7{@bBqmav>LpBHQK5fR#^oRc-Hy&u zetID9@*C5xo7#X+Nyo)xzLc?S9_t2%Ep0PMS?J>#ZNKinun!fYz>tE2r!G+i$GM-d z$cb6tGnxAhrV_Db%P=WUT2`_P9XS``+05RVC7>rVZp?*z7P(KQF{sXKj{WhTz+%P} zB5nY3+&=!VTY@~wOAvQRD2p&A?tS=~33um^se%7)=T59Tf}`xpOFDAvcaaseWd?x@ z2-OGY)OP>|GJH?RH-P!RF^R$bB+tLU%wL&S6HjF!3RA}{?9`^zfBH)Tv;17j%FN$> zh~lMC$oCcsSol*oiWB&$ictdbu^p62f4YrrldH>!!R;Dlajq-J(E3c7GDYj+!maQ8 z-7YlpKcRMOj|`RQe~Zpam`f}!2J<+neA&6X z8ol*RETzP21`i-o$i7lDZ4QdKl6UloU((2K!uz(svt!H>FqEYbIBS69PB$A+!^mXV*j63O3xH%FxPh! z12T@@uF(!m+>)PRJ7CJ&l|{%BM_G&Hcg~>{hdZ$I7V_W- zAdxFA6%}JDrU?=(>e<6p&%`MLIkoJ8G&V7b|Nfq;_>byxzSt6`rX&|aOyU!S)O9e^ zAutRGe#JaR!9Iz_Fn(0*fZX*d6`nCCy@TMoJXfW$6cFJTo}!q{XT4$p;|+>2Ip$=)uu6vlP7IiM= zl~@pvQ3rsv?Zs4y24x20cMSS+JKR3H57XS6`>Uy0zx#HCpJ)Avw>3c(7odfF&WSZqil>$>bUH6bevCIcU&*-49rR-SsXkgd1EZ9 z6&Ztm57e(=~ZY^&zwhyQGziOb?C%H30?S| zyrm+-ygPMprqw7_`F8@l%_r)KQ-qk?OEibH;HC~Bah|7$MR>h+{oW1f-75y0{e?YD z`K<@BZN|s7nN=gX;?u>BW?c?d*GM_T`yFJV5siBo2Z)XDLrH_p>#ver{jY>;%4614 zP+1yr9ujafm-Pvf+VN}=LZ95@5D-u@@#S_^`Ih-REIjz34H|5?^V<)p*V^9Fy&{f*&riems~2?`20wyE^I(=axYlfhx?2BE=N z@^wndGevH0+q9j?$Fs5iod^^&a4eI+CZIFVQMY7oy*#5VlK{v0gl_K*e^FWpg2TFn z0auvEVB({MD%*Yh#sx2fO%wM@6(Hp)`DWpxS5fN3^ItlSinoEndQ!8s?^zeeb zfzPIk9|XLLm7EoN-4cWxoKNiB%p&c=jLQPG@K8=p&ZK-|L50!mICh#)2qYtk5RfwT z@u#@#lYVc>)qh!k43iBcq>E|T1e|BMA^_EaH6#m%>M32$=YiCPqg2 zZC_BZNQ0V|roY@UFsJdS5BV9OM-|?2KxG(oTL6=kYyNtrD*qaFd27>ywj-RK@s3<9 zTnJC)J0_^X;D-`Z^D_4}#l=bM`L&3TTP90iFwMuxVEd18gH`1#t6QV#+;kJ*)_Ah5 z^fH2l2(v3jDqsgqLb+G6A69bFjl_@;j^_|RKR@%)R^%zED{?Z&_5$CzKwh$kW1@8X z8235mQ*;r|bFE^XD%$Z`^%I=uq3$8Z>rNs&o1I&O^^>!ZIvo08!-o^gEKfX!-G-5B zC}lqP&91`8K~-*C{aCM%hQ)=>?cqj0I&R0!mP(lj0rHlbTSq{DIDu(YU)Vcm<~=Zp zuPd*8Tg2I)fc(T}S}5l20BRv`eP4F8Vm!lyWf23%kAG-4;TMDb^>0Kz??QS){GlUW zQvA0XNrDLD)hBqL`>YU5%zYTLn#;A%IwEudQ!PCqAr87m`VBZ*eWR_YkSJcnqJUee zD{eE2%l~A%mObWS(j@s5)7OMKQGUt3B&m@xloLF#?g$%t&$?S+m*2^5-to1pin+l- zlU9|+GGp{fx65_)qZhP_+jL2wd{{OWwU8r349~HIRzr*!71*ZeL;g-?phfH%g!=-? z1~2gklWc99jx+UTIzN)B^Ka`rC_^q+-LkxI0Z6m(l%EU^FRoL9dS)f*Kq=c3)y2O6 zQWHkDcK-0{sxoKz(_=K?h#pehUHV-Vsl>e_&%8dzuTeSyA^ zie1B1L|S(EX!x9`_fWhZT<90{@a|T47u!P&n9xr|l*B zX*RNai?fdZ@vri1yV;N4=2xh1+Ne$eO&@_?DdSSGO#xJeW+V5yhIfqzMLI+e#axms zSd{Q0x%Tqqk>$p!;?(L7S)Q>kxrAf!k&{}25`}5JhQM}&`^s+CBw^h%DdMQM?|~)J z85<*T6thN=>;QOg6Ez56AL(nULF$%1b31^9x9i>ecER|*P>+#&WSh6v3J5DmA@q<+ zFu3R2dCZ|n-lr=JM>3xJ(Xx-yYhBd^*UWK_4d{2v<9T(9xN{ogtx}9hW(F<2v1deU z0au7I0-1{HhE{wAm&S73v6VdRekbz53-|%;V3H@A2kV+0>yx@xqo!$T!jANSptK|IV9aQLD zszwU=uV6CTck1+few=6U%R)issSyg#-0q^xRmq9#fcA%LB9Rff$c2Fo`&|H{6?-wl zuh5EExeAw~_u<`l&D>rVPK?tmb9ZSH-FJjLJZ+tZ4L$jWl&qLVj##W)6T-FzL9`=| zWnw@B4w&c-S^1^i7SUJ0Bhn~a=|%w&I`S06vVzbN&vQx$dy#O?5N}O!mzLs(B57Zt zH9eRQL5601r*Y}+8WAd$@MJt|Mmbl*4QO(37NfnU6t$a{FHJ1E~N z0n9EI!UaY{$xAPQB1B{G`Qt1ofQ{A3#6;c(*~6IS5MK*8#l{6gI*Mm>uIwq(AJ0CB z60uz;V$Y@?J6f?)w~*Vw^<8IOf!eM8$V;!-CnvXV28Q9NN(zlldN7jQG- zzUM}DqBMBLZ{a?K{{sS&u*wFhJ>-6|{DZl1Jw9tJHG3Kf0C~|2{(R(y0m&Qubh_K$ zsrhPn#=`BeuXG-}$-$fh#1lqsW=-VBBfnWsOTQ8C=*-UlZb%)S&$foO(u;d`nx|n< zCirg-t=lAGw9tHJd)~GI83DJN$~&W(vkK4_K<+`02))45(=}?P2%&*9MD-&26YA;` zus-g2Eg)#IMB~xZpk?niSL?5-_QGMH@(CO9x{B-NY#H zDp3Q;cRqhUy^m{@TK}K3!I|F4u%|p)frk0rLfT7;g}BF1v!v(}ZG|@Kt(E-#m0$`v6s+o}V11JuXE3)|H{x4GF)tdXz>$n z58bG?6M3xb?@UE=>#tKQNGHh+HfdCc*NTzOlbtIJJCMP_L3EOm#-B=R#b^JM5FXc{fhmvEHZ1EN++KUuw(n9jy$%~rYWy2gJsw?}wE7z~jg){6#v;qnBAs~ra z6G9N8lvcC*=*(6!F?56zXr3@Y1jgmVs&IhQJ=fma;}VySPEG_@e5rU|WOA!-#KC~) zFA^j7eN&VnVNoYuEICGJEr7nz+aNeHv@5tLrGu>f6K5iPvtW|9OtGtdPo?Ql9eE$c z*zWwgj-K?HwCnP~Bp+6d?8EnqHyDj*D6dMvtsN@7qmupMqg3*=2p6n9Ht|9SRXMEoR zAVge>1ykZ2rTChCRp2wjV*}+9{+Q2cv_#s4y%`wSC2$3SIq8CS0@UvF*kc^iv^v9` zg;w58&XnK_{Y3~GKs#eb@gW~r)L8W9P|&t*?1H1=Y-@jrP#;T|Ff3%3xG%8=%P5rK zat=3A)Wk}}AMmps$sbu^Ehe-hc!^7@2yXGm5N`~kxj=}bgo&O$%&}u>)PDW@ln3&Z zWcq-HMI5q3rU)OnmF_rht+AO$HL96+ao;WP*!!oYnV@4wYS5uk9pU3>4U^~}#g~^N zVkhwoJp0o%UuB;3O?k<;fBQkoLpl=|@3Byq@^he5#Cw7Kv9S{5OcxH9S`ax_bI|V^ zllUpF*>@|lDX0xK_;86# zN(e`3O~efg@K$I&)R$T>5guPpygLB5pWV8&SYUcQBiRD%i_wuQ(29aw6-4_T4*5Y zOF_XAs809(m^NVxQcvja%bUBj7G@=U^;!(=h{!?vGp+It`l@#oz0GO&83zgU>|5Cv z)Tn7w-%CHc1Sbn3d2!S}A!3?UAIg!@c@3_~J0Tx|h72U*am|t&l?gLT{mxuiI9Gnp zkTmo+TGFbbpZgF*umL!aw*-&^`|CjQ!VJqsi_umh$KV=m?Z2omI{;)^AjsU@!ty^6 z^>TC8&W&vo^KF8&{~^RwtbwdSL@GIrr}PJ#kAr_l85?qJh_de&l&rH>=QEIp`I^zA z6ByclC+rx{qX+Pr);Cp%59yq^8gWmS?Kh6EsVxW)q}L9*!|N70lk+0kS;RINm2Kl{ zNo%VRM@||R$$GM=f`*N&#p^0B?%37XV+^*Xxql%$zKfj?L0eF%iX#1-Op8zKAuV&^`KyRpPCX|KU96qMbUxRF8uQ-y$N9}X^C0RFn@{Xumv zG*pPAvDB0Oy}YHbLZukxb4FPE&VV(%Oo1T3Q)Vtu9Zmvlk>?H2eJj5rRF<>Q|$jJchHtBP9L-xr{@1Zx#zwduEH#s%IFPH1q1(kuhNNJr*Q0B=%TPRKrf$4jAo{lh5ry zSfkZQ&>V3w+(0yMy)T+gJuiUjPU9+IiT{L`eDl|HK!|H(3D1vxtUO(O!?$sak5b1# zar7fFJ#FdY=qa{;@e}zxY$*t$CP{@BP3Nl)M`9q#2Yw+ZWqt-%gABZ;lp{@ zr*_<{o~zr8J2;C>T&N#}oEKz=qE&4E_|Wg93q%-1-?a%c_g6H^({kZ~VO;4l8+pKbVrZ#c(gx zJbJ0Yq0N9q0+-W^u^7XVLqtPAY_13_H9$D5PU(>zRJ%pj{XHDD|c=}79+U4Y1AwtEEnMtCeGP#b=hE!o!H1CXe|BEE&3+t z;^-$WCDRbch;beTUq15cb?Z8jFvMLGIjQ(Q@m1*&({Bzw@ zD4<-_oY}A%Q;0f}`$2_eyG&MZX`QQ@znbL-JslkT8%~wy?-}E`9dU9)kud=kHFK~RxSFAfm*wYDYU}MXpqcXiz-Rr39q-q`s>ay!~7mC2RF( zv~XJ`h;9~jsoM1q{DWj}5pj^n$xiZy9uo}GFqd)#>j)#5e`Hh=8{qq_# zb0^IPR1jqfObANsc)PPS5}!{%j+eh1XOcHw3gk`F2Qzhoc9%4~hhrk_Va5H^AF?|C zQndK#W0SG=-G=D;9Nl}X!?7JpUJO$E4-e~b@#V^zLtWdoY?NcwWOkE5gBx|%i)`M0 z=Q5}GfK@A3g*pr{>!3GNuZh!Qvn2-VUbkEO}p3isp6aLEm zcBILuLDe5hB~HYmBtCYmmk4D+3jV6-`#$rDtZr?ITbj%blVCl6I;ko?yY-baeB{TE zAF;1GHK`&u=#s5jVSs(qxhNixI77o4cDAyrb*^+bDi;TE(t?ww(xvfelU*7N6C8S3}s&rc+YGmMy3+kvTw&socH6qg``I= ze^2yvt8-Q62EP528-E2$lI1xlHpD56eVM_s^MZ)c%CKo2z0O^_eDhq{__FS)bCC!8 zkC$;0O6)9}{;7}w1_lOz1tcxKr2}M}1Wv$gS2!LYp{|!&^??QhY&9_-^{Mdi<;Ugk zzb!9}N|bMIbs|fbZzBq4QS8w(Kk@nyJI>aK9yz2W#YhH8(_elv$$QY_PK5f11Zwi= zOm%*~NY!C=O}|A`6Z(%A2So0DjU8k{c_C==dO)3*<95s!>4Tt> z|L}}z*(_talV<*uYY_%sYl2iAlwMw5eV$Wlh08qyK)r!{go7z{f`Gdc)C8J|v#U5} zt+{neW=(Az&Mml=a;yg-G_22i4jOi0>8dIsh2;W@PU*~=c<^--DYPK{L$oBA-qV3qS*W5*FA zOt><_=Y^ELnkYgCMkQlV41`ig$Z(fnSU^!%ftEdHtRuN~r_kdSr+YnLpejy`nRTmy zpw&PvJ6$iw-?Xjl#Sh{x%_$KN>8;Fej(oh^~9m3E^fy9+nc$9$jP4EifsQx^t6n|5mbag<`Y~rs#k8$H1I~#Q9+jkp7cUz#L zWmDe`Fmr`equl5cQ17b`JsT_C22VPGhh1^m-SZw3EqeBhAc@F|C4u^Q_JUebN!yVs zEf7R|(&yUJBTt(?{X4|33O<6fzkhGpMa5LOmvu-z<);}{k4W?J0O@(*`+V9k$rzo@ z%yx34FI48n+Ov~?N%z#1K7Vij zXxhxcV8ga;jsphh@L3xEamGal7I|2GRJ?88aa8(D?BU8x|L4cabDKV0iiGM`rtmP zb}dbewtAX&xskY8(t=Zs>_W*Z%4Kkk@30I#a`Y$&)cfou>Ut^FpJ-de#W(;F@#o~; zr{wC)2iQP_zlIPo5=yrK7$vt;Vi|-QpSfeBI(Kh7yM;}velKGuOfaNynL+@P@d}DgHx4eRe5dP5 zA>pAj7!uH#ls(KXY^wH{ru*=GAR5!(Dmfh!QMi&FrC0hi7OPh>2d zIbqY$9`l;f6O|Sx*i9E<1``j$fm5z{dF@jkSNq0R$CX|tCMMG-e9HnO$Q^Iy0wjpl zt5;9bi>?kYwMys24D1iKQ6sapuOz0`Tl@9ig9i_mMv$Wi!B63?TPJSCrnop4Y9;pf zw1!dM!NJdY_G}{woufGXfXS%N)fZM3z2JR5gH1rYayay4FO|&kf-8$;TIxiQq>q0d z3?7W8dkc+zpp}Q}TvP42?3tpwK+opy?DL?xTfkCeWfM)J$k?;z65a%@tWv!P7w<@kSJvuU*SQDmayPS(Z!#!hYxKDkunJ z`-NUb!!#$ajdx*-(6z&%RxGub{$B{~E!`&Ta#aOiM&q_3Rmv(M@xTBJ!45x98sHZ8 z&#k14l7*L`O}iP)PKvVWwe)`*|5u%iWdTdWjGi|--WZ+O^Dwv&I==HVM?P-c*YNlv z0wEYLOD%ovY1fe=>epW_6%n(P&6ZccVFNB`gVZjePo7NSz4D%R@lz!nUAs11(jcln zs;uWiB?DHcOvEH zP7{wx-_B2bYHIgUX&wAEK~|eX(i_a*F?36W^1YDx@OuOzg=qW`NENZ#P9PQNKm!E@ z=6Md$)oYVgUH0wj_FNg>>k`G+6Y8j^t4w3twXCn&x;5=+Mt^;Ab7kIQ5{F12V$?4x zntZn2&y)y>j!H+&eGq=X)UvYCTWX_z*v+(-R(6M^;!W=m0huIn^q?L%t>9j>e zjRtKxPgoWj+P=R#RYYU4HB9$8^sN3gYM!V>Wa6qXO+0g#vKDFF49^*4vMycLa`@cA zX9~{p8lTHnzTUl$pV{11`FNZ5Xl(V=|4`?5sFH%Q-a^3wLp?{rI33LrMdvN#Ea9tH zr{pFPnB4}?sER=8qq+4+365`_Kt!j4r=*m`OUAB|M0>`05SHupU#FuW%bM1&m6BUK zsMaBM_?61je}#M5*UlsN>@cQ2 z)4qgzR10+?2z|`cL*9w_UI0j>3sMWCH`5{RdX;1^dF&cFa$~C^ z$o67uA!91}i530f1R64t>q)yswQ-mwT{KbRoy>O-4nktSG(CxB{#g}MQ#v-G$~_jReq1U-@$#XZ;@?{e!66})#8z{eY+Da9Z=D9Ag}3{ zwoG&CJF#W;*Bh)<*5$cD3ZW4af15lkmU-KC*NdSCO~Y<9`=~^;P=k0dAbg`6r?`1m zQr9?dg{P!Gzvm}w^mB;$G$IpVVlIv`jTz{o7mSJea-57UELO{uDvaAwWK*tV##|Ql z0J$gN8g-!DL@(6RkO8OOIcoy1SDoQd!{l?@Mh5j(sf()5#QweKzD|_p4|&xVNIIGy zsVSu3CDd*<+m#QM1y|PcmGNM`%1o51Te*U%uW9tSM&_25SfDfr6(4$;Z71*155Ywn zp@^5^Jvyq$ttlO}_Fj3P3+h+yfAMqfGuGA!?FRgYiH%Kb?dmi}F}dy5VqZ_^tZPq= zO*a*=)!H)FNq+yihWeGuO!A@|*9+rSO=ZACIvkOd3(|(5l#&0dRK6$k_H36Cba6UK zo^(jA0e?c@_0+!FPZHxUyE!WxfgBmL0WTGXTV@5gMADCN`S=QQ zyLTn6Wg#*}6~_qBW{r!HiXF#}ZL)7S>4CreVoj^bI#pk|r;r=O@s>RFD%}Zk%btkb z3${cIu6!%OGh-KPKdvgh6_uXAOcG*{v2mj?XbIffy?KWAad?JTEnd-BKg(?U9Bkn! zg;+;(^D8mAm0vQw`jq4@{C&*Zt(E`wXmZuRm0x4545;-V%e-nQYot5we~w3 z>ooso&kJ^lmj7D(q|zJn@AYnV4~15K1??ZxWKz}l*8%hs?*G}@`r1G7N@UefOIH!) zbQdRi*>PPh^5fqjjv+^meRNg9bkQM{sz`mV-F$AR2*wNwaOE|u7E6SJBben!JcqWii3$DF1L z6rV5H5Cp+z3K~pQ-l$SJNC=G;9WujDVoC$2l8zBW676_OzUPb?K43ye(O#1F9UwZW zI_`LNdDyf`la}2kDIhQRP}oy(t|%_{W75lkHcw`pkp<$mbmCgB6xWF?T0e&N59@xPd={=0*fSgZz)ZPAWsg4 zlv&U@(A>Jxt{0pDqiu?Wfj=t%B32rqR;^lPe*m-2AX=2%iYvb$$`JZ$d9XAI+1(|= zGCsf`v4JeZzpi+?O~!-aIma;4E#MGEv`|WN@u&ebkO{JKiKD7br6LR!RTjX#C%n=^ zW_@J(DdGALjOR(1pk}yUWncD{{p{IR3LwKNfF{{@w5K!XtHm1k#64B_K~@|Hph_CJ zI$p>xN_63svXD0KAgD_#L>rO|#MRN(iVBq%De19YBgLc(D;tY;XUP&>+Tn}qLrSL# z7(m!a`S3gVB0!;rCRisyR9!W-2KJ8$!3;7*w&lX)gO(AuGZ z7vfgK85~Ge-3r+PB=QuR_Uu3a@~HxCLb-co?!D<%q)TiOA)%0HIum0Tms5*I-fOJg zeXO~c)v7Y6#tcp@ebyD@v~gR1f?rk0R!h6`ck9P9*E=3+TOA)C;~&BgOS$M9D!Qf2 z%#jW|8!;K$$)FxGwcR~FNQcJzTs{#ZVk^c?&wskJ;N=#Ea>TR-?Ni%x{yv_iQd@hLZ#vSnD%Qkk{SF4=dPPYkT_<#%Sok592}@<%?D(~QQm}}%20v8v zeP9PdV08_CxBBSWg(9=3lD&eR(g45W`5`%t>es3p=3tj~MH@Cg%`jh7l>~|h%9K1+ zvPF?EB@E9IRgsZkFPwo%yj9WoWoBmf_>d390ULFQvgmn6oYv3?g@i)%djh2pSUOS+ zgp@utlH5fxKnnJ~WQVM)th1%1nogY%E_5pZfFl9m+^7Mi;T8vE)4WW9qn;?Pe_)I} z2lbJMmC;`_7FCm~$D3<9Uw=D$xTxjGRF~7!Yn_`F*dJC%N=#87Fr}*C=RZa7$O&tT z0Etf~+gHx}qTX0dN_~fr9$eZ`U{y}e$Ln)~<+E}0WokyaCZGd3HR6xQrHH&YxMChq zNvj^y>W~w~m(u2&(X=r>1t48?Q6?>#nhNsi%=|ps%w`&YR>wVtuTi<$St*nsA%=~_HUFSMy zpMCZ@^V7-|OFokSTwYF2?jzySZ%pOnMwH0O4Xgi;5Ae(nwhpoQm+r2`mb+HFI_~n= z;N~E=e8a9C&aS(hw`}}kkAs{07T4`MT645!Xne7G*RCDz^R%^H{&<0wtDBRy>N>G6 zKIFq4ORe1H%5XU`Uk zS4=Oid%4qa;<~!9hA{Vao!;3hI<}p<7FK6hPWuP$fsg(>W@{gR`u=Y@Kk)&%39`Tc zxnRWmzx)J4cgdbx`hWMy^&fsTVctv!2M5n?aY#t}n&7*uP4t73laniUm#)j-rM z*|j^`>)aYHrqycucIPKY+%{<`&?&aRGr^-kEiPX4`K+z>S00(?Yh}C@4@*UXUQRWo6 zCdPEK{{D`FfxfDNDAOpTu{w^+8q@0{Y<52~8#{lG^V-Xslj5V-ell9WZRMq_FAj

kp5cQ?z_e<(f-xehJXZ8=>NL%vdY^+(K)g)3}jYu9@$?J!073<)9xl*{v?z!!gpk zL)hE#*1clqov01Be&B~j`1HK8+5h%RneRYfNc)(9I-<~tg4DIYFVl#) zGjYGOl}Ysy@2={DW4~Izz}(Ke{jzJ7kCh2bWZK6==E=!5$2C9O-{er`-MKtKNwcNv z7O*wr>ojIT^t;Pdw>1-VrXM!MqS~YDvi?G=iiTB5k7Bs3)dhze2-D(wcQU=oQegx z+5PPaZ?2TuTDe?#WO{vHd;GQ6v9GGKeI1L!tQsZim#+R(nCDWcSC#46+7gm2ncQ$G zWBb;J7mlA_9$Ne3mq+e8MS7>6^ys)#czRaDlBWFEaS0LO9?x{S46~nZ{`FzJl@3;5 zx2UGCv#Qv3|Jxu%bIVCrwm!;p40Q>HIV4teE9}?ml+>LxY=5{~oRzaI&NaTI1It+O*xHplf8MjTRu}f5gjQA8@jK{eH z7VdfT?5OmpS{61p$tJYPCn5Fe*0_hU zX6n9Ow^R=8uJSgl^*z{l_2rR~pMCX3Tzs+-Em6O-LgzuWNpZYYiksx!ooHd;lo)K* zDBYT41+bi{h1=Td@4b3@W!u(Au|a0$R+H-@eSOlqoipFv7K#>)(mmIuKXKl!`WM$f zl#{#Rc1K?7DXZz1)~0O_hlAO5yx6m~a83$t)CnJun>#f!3?EmA@38dfM18l;N)IO- zWcAYilbenRZ~gF*d(W$ACoD&IzFoiQ*P{mW(yDy+Cl%tOPJBOV-Zbn;zGq!{2rNd; zV%=`D)F&GQGF$T0#ot}kb+0{VowmJfV}PbxwZHOtoO@wb<>$SyuDs(*CZ1BXu$iu% z)wL1cHrcEBw-M>BMe_p%OUCc~hCX(rib3Yh)P~lEgQF&##y1ntdEGuTAv9k*TQ4EA z#V7*Dd(FkQyLWDH8ol;JuSno}z1_T{+%g3HcW;SGxMo_#jK1E^Vnm|*zb-jC)A`9) zg`sm2O8P6@iU^=WZ;y6Yk%BlP#z`YYQ`D{V|EHppKw|LuwEozLd?zD={^ zo{smdIZj763K1yh^h=e2t~<*j4X#&obnSlqu=`%aT93-zDY*W5qaZcU=Lbf(JzTZK z0Y~;#X@XsPDi3T2F51>uIe)~2T`MnT_xFs)61^%pyXf&ck4iyqO>W?WE89v7PU%E< zb!XR}-lpVf;n$s^&vjJDOsMjBd3a83Yq6n8wohM6d#RmM$(gUqaQa(&8WN&$P=rc- z^Sd8e*hRV56?MZrLccsR_n&bWQ`bi9Z%hw!%TqlybBpP_vKIe^ld`)DT^S|MxmIOu zDNl82#P=M(tK{o|U55?Z_xarA%6;(B=QTg;6y1fh2#ULOO^t5kgPai)_paX#%hW@Z zE`yN-)Gd$D(>2k1_st5!zP81u>!wC}nVyY@*XPm9%gb}aA#%b$CRJJ9?#23@0wG_R z30w7i9TTng=1@}Sv;9u(k1UFH>%szI`UWAk`?fIVxEYpowMGtv0awi0-*nw$)ciMz zUN}JMZ<6zqADLT=gDh-?-+%DYAs2k$gE!crGLI^6_Zve-Nb0Z8uM2ZGO>ry?^J>ZU z!~t+Ga|nsT0gFK#=Z7=gd-iwN>Fla6x$Imv1A*b`Cxs2%cUP?U?v9GG9UWKF_E48p)ywcl&DcWa{My1$i+ULIhQe8;8o`x2Zhho0 z@OMe8O#KQTcBAJ&f1g5-x`o!V(AgJPP8%a1>D%vl_UrFI@V_3wGH) z=`A?77ml6V(ULpW;$*8w-P|gy=IUwc>gqqM7-m^MPO!_q`CY$jB}3jq(M_BR(@PQ@ zSgZBjqhCF{zW?mv040TZ>xaFjxQOQ3%hLp^dM$Uq6!*-9t5y)o5A>8~A99dh-622N zA69n#E04wsw@rs9%ulW8HZkb7pCqh?Ky_DR zMf%Njtf3}6U!0}E=8`kGn}&pJTaAW_ba#9Nkdj(30%}}ImUr*Mz$we+5vpzDJ(c7Q zy0*J%(UR_=9>!Gfd0N- z<$R^~`JMnciohA`_QDC|AKs1SRaUdOWOMSX4GV$O$24X%W#J3Jci1`WI2DE647T2G zwbb+~aLplc_hZWA$9m=a5QLsx@6@bP8y&b<8~+adeQkG< z{g~=K!`qU%PG=8JG)mrEm5X)OvEH8UfV;MA)X93ATJDHL=?O#-P-=;LcB@K(ozwv- zSe@G@nPj*}-w@u+K)mO>xo`-p@roDSL2713#S!}Mu$ejpB<<7;aj-R49GX1j+oQaoYAuw}R<+YHkc0y$rBx#*XJVhIe%LRgS8y%I|K?0@$AcXZHp% zNc=3+vGBA5d|`ZZXEcBv7Il$N+uGU&9PjkrRD2PjxiwFQZ+x(PPT}uAeab^O`LqQb zaGt(l7NQQ9NlmhjUf5e^j*l){&k?rdDBG>MkO9a*>m!5^-#mtJF$d8wI%40}75eQD zhj;WM@C7N{HMe%T;?~|etgO3O(+8=UNPr;NKD0LuAuau-npv#Ij)sO0Kb=x@`N4{O z?44cpDRI?kBjp;v|6J~){=uG%tls#Srd>(BGpfEDn*HwO9qk?EeW@u*DJi zl9H_54Uffnacg3dOPqezel4-E^%TpOTP0k13{d5*)qyl2^=_-pCx{@<44OZGHvONB zA?ep%J&2C>>FddP*<`o64mEd=rvnD->}`Jj2KG;c zn_m3<^tMY@)e>An-J@dX5>Za?iY#C>PhjQyz)1e7p}vRJFJ3nFS1CR5qPrT~AF$sC z%bR@PNZkQWBq!PS`aUr}E+5+!%D*qBJRgjn*Vbt|A$y2=NWt9%Y3m%6ezLLHfHzJN@K&%4*936VkOYgL(4`(NuG zQth!QMsRA5tF5TUBfEE-hx@2Oh%aky7T zOR{NH%ee0g;c)0A&aVhRT?KapShbvaCOwmG*YocKHc5^l1&RWON3LnDWSN~qAizo;6WTpK#yqw$kY-Kt-Td{=2)^vnXjDZ^IYN7`uP^)AN>-);59wO91O zW2D!JjMcofj69bI`~qZ2G?9XjJ&yy#T%I{?&c4T8FTAEQ2gF&`ni{_A{?PKmfep7_ zAi7&W?^Dfjau!}pDVj$hxgE($+v`W_bE>@DvC_fVql}tOFzwZ+W^FSU)c|ssz?mz* z%53tjUt8%crC0{%L;zD1<<-l`SuX(SkJEbA%vpxoV`UPTB9#7@= z;f*F`N3sEJa@KvCwe9PsJ1?GW{`Ig(09c`Bk##2N8N$hn>(*gS_RI$1AtakKcuPQQ z#maVGtF*pYS2An1^OGG1%H6f=uN(VF_W2aRfv#@U>A|b*@rBE_M2F{Hp?g{}xB6ep zkOQYZ+qdnSM^jeGosTtQU?*a@(p&`8NW=j}-`~F)=xyus^!1qY6TJ@tzPhakt`6LF zKiMIFa?hk&_mgMVUYT13^6`1dEuE4fa(;IHfh*>zk~x4EKdrj@{F(~`{jtAIS$_JQ zafntroSOPSyoD{eZS9Wn*LF7q!*e=3zj1)MaPWoV?zoBB^Q#t~JqF-twt2qc-98`7 z45Ds0e%*%n6tytAQTewaQ2@cJOy9o!`Lk6Ty;x#*xMF*P^LY*Nfi(ktb2s#-Tnu!z z_`nYTN8D46G6MZG`h6fk>Ds^ZkRWYvkh>Vql~@Bj{Xk4;KPjs~hL2r1qE192Bg-?-grZ4yg7ahr`XHtB93jQnh6 zwFD~HxaU*e{lC2I%&s97=CB6NzbxM^_uLHdB3Ix_x-+@kF)iLs< z0Jj5?(PLKV+~EbBlX8@{0FV-bkSjVh!@&rWfPsN=1V z<`m#wLT?`)=XCR5A9^B{Bu!9V$ovcI=#;5^^j+nAyJS%1-Jm!tX} zPj)pt)*V-%lQy&@MGP|8ht^_tZ3?dr;+sY2Mg+wHMVH9R|{ zp5Vylzr1}oA3p8@99!h&jk}Qk6_KSNJiB=1ao*dsstDYc7y(Yzz|a#80fttI6RunR zuTPWT-5wVW77!M&PHAq3BrvK1YX!rEERwEBSJ?Uh@J{Ve+N-aRj+fx&TZ% z0Lv1wwr?(inC13y9gF)`MC^U>Q~E%EZwgkfj^rd#0#Z@6bev!Ug2o~VB<5fNGGHEehD!wajbn{SQQ&V;K&at4z1 z?33+qBL16>=$kUb@ZUDO1n2D^J_jYqisIzGoj&0VBJ>}aoT+YJl`vYVs z^*0|h90LHJb))n7ePeCS%bV~0vJy)neZyW9GZZGw+ck|_uUL!q@Ccsofz42C++9}` zu0WDo`ek?FIq4Gc7LfULe^T5Upq(}CP4+;)KggjmB8@hKVh`j1%8%`Q`=r|d8gO4V z5TCk@Z(lps0fwPeuKC6mxovt$x_h0Q1FAS{&w`-xM_p!&xs^lEO_UJKKAT~`)Zak4 zCc(FF+XmDrG=V2#r7OhEMTlBajU%>c!|k8P=Im*0svE9sv+`>FF9Gr(rnXAIHyDj% zCc9eA{e4e7l98JmAiut;8Xac38LNC2se%8uhd)=GxBK;)t4Q({Kr3ja_O?ARQ6Qy7 zr|I9*-k>~6uldGJ^@L2*YJDx_kIb?Y!Gw>2(=p!+3ac6!f+ndlq$)YrBbq`7u6NfN z4p`O0j?V(|_~Qa&_I13m>cU4W?=70(QLze4(1YmWqaB7-ptRR4O)e>g7gYFk>Iw~- zSt1 zG*hSXeJjCGEexIQBH!23uAYg2k~6zC+d`qGuejl6R)RwB{F%O4td!)|w)Yj2ypX(g z^;JjaIM1I6O|uT@D@n@j zc5fcZ;-!ySR)0@}eDT~YIB6Xu=DS|qcPLvX@AKWZgzK}n#V37B0@^_z)S922elY?6 za|qC2V?j-5sQ^ zX2UDK*W~@}<4?ICoz4v7zvbi>e8P&G^db0|Pu_6bLj1&vV|+BqU~&VirI&6ttOd|1 zfCCL0HDTU4*t;t5{Q8WaKAq}#!*2-l*!zhaZq3~Ka2l&ThW+o34o_9lb+g}tsvUfq z25u-9G)|(-m`p_vQeW+3b52)eUU-012>)b1yt) z^pc|!w}8NAp#bE{m#C%fIig#8GUs#FyG7^Xae{-}1^C;qg(Xec#AL=F&#KkumkHs_ zNbZjJZo8Kb_?diWK(84==R6|77P#UhXJmF;a9Tm41+Z3Q`3zBJm61>hUWz znt%u44}y?MYqv<4?%{-=2e+6G=ivV0r>TrjRse|Vml2Y8uSNjLl(gi>0Pl%vOniG* ze=utNN$|VTz?uPiTDVV3;yF3v=U4_;hGekQx=0I5fEC8fTAP>j0s z65DBG+~AC(fhJk!zP9J>Y2FW`HHuqS@iwl6$U2Qors^`W%<1GZF42 z_x8|Lr#I^fV=Yj41aepLYW|vDZ)5k*V|7dsAs8uadXX1zIexq65+N8EhrWAzPzWgk zInB_!ac_iR$%Nf&;Bm-x=X|b=FA*!*%UyV$!J>_Ga_*E0LGFVb`X!2qsp9z3m^ zs`k4PEFxfujgS`~a-h8v&9QzlHEhR1oQ) z0n|J|iFTttZdpT3di*rbuU?@$IdG~;k%D&SO07(NQ1;4K6?<;7exQpbdT@PzCp`VI z9J2;r$x9U-P*QF#|ny~dAfd^{-`;*~y zqCj=zw19`#bIs~@yaYdD149vt$esZcvfdGrXM-H=1hVADsx%NNV!YZ$?IjLfbAy9| z2kzH$Br?E!@6O7Qb$iO9Zsyeq6e_NHZ@^*qwWq<{Bv8Y^bTZw|zP!dD*dn{Ap4!5UY!;M74t_Tr-_-I@(QgVGM=*5RtAaocz7$EIi z*7EjxcNT+j1Dw4)_$znD* zyOE#IoQAD%U=0sDs?=a^voj}BFGNKLxA_L?`&j*byU1e5Ti=d^N2L#j`w9nSnkq~N z`in*(g!M5xHMe3_ZU63K^GHw?{?`(+`}ZY9Ugw!IrypJLlcuv`kH?~>oLSf`Y5>Z% z#$9wnaiI<&xv;tVRtl`r5hRguMbj_yV7RTF*#r9U8HGq7)}8H#(1BI#bt8-G${|r{ zMIz@2LThs51IfkmI#+T3Iyi?Ot4M{>aM}sqD>6PLe)jpC?P0)GVid3~H-~n|-NAtm zCi=;*OPCv(0B%7jB9+B3l_cS+!IPO^dP^LSsO*R|QJly3z}aZtyP^^Msfph7ITbq= z^+Qr%pWWACy*?BPy+-qhp;cb3%SHA8jguoSpaTgMXlF~@r~}y84eLM249~l#O||06RefDWxl>r~|u?J3?v|3LjUSoUdh!@E<&Y ztR$wy6+C+W6`_cCwY_*;!F_5bT-`l*G5Cq;`EGR?nSz+KBhcaF`s3?Tpp|q zX5C0b!*q)EbPkSpTgoU1y^22S2*&9_4FOvvUGLICAdAj*ed$M!m75m#0!iBqK`yCG zGAu~*9G|ATgc9}HNQ(uhXS!yF-V8kw6p#uL{M_lm;h+NavJpz^ zxVR#63PKTU+K@3UMTOF>?cNgQa@!rh4KIV`rBIsykXwjq%5lEx;DkMvxNOFgP6*lj z23#Ecn+Cin_3B#n>6B%6;e1Zh{pmOskkc0dQcef|u@jpa>^sm~rThBAuPAdQoI@3h z%8Cbo4ddPJP9l52&?(4;cVwh-LZOGAe0Jro2R1(4Vix|cK_mik!xSNS_aK4{auGT% zPcJ*8h$W4MsCi8lM(leXvpAxc5-(V_R^lZfj+`tRtXqG!PY+A<>VA~*yDlC^G6V=^ zI=o}Jq;OExtbkb7q)*O8E<~bh_~R?cN^Z73Y=V~{G=#pzmkcy(;<|4yKTuM?44~)t z4cSNOL#Q`wQ2r*``vF0gWGmhIDbTG88{)7S2yoR-VXHhDKi`z4D6h3kcN5RtUeVLWvu4Y?(m;ihkP@DC2y!4)lR-+qrc-lwZp6oAJ*oS! zBxEJhnLw}oFTpvc{p=)r%?1|yUbp5_BP1Ry!_ENdsS?gy2M>wpKT0-yxck+8@seS0=?jQV{9E9t&f{e0c0AdX zLi)YA0x9izWa!lE5I@4>8}R&21Qhi@GH286pPk{0?D^Al;jvl zGdrR~0vKdT)aSH7WhLmI?Pmz9cSG?(%q+OSE!t2_x?Ti5$W-s%j>6}^eb4HfO2*>% zcibHC&mrGO)gX)&Z91wSjAJyP3_|)f+*uvg#7OFgc;5Y6M~t2r2K1$5;g{x8KD$`2 zb(t1Q4)ttj3wXVaY{%iCW+3aNH}e{Rpr{6~G_k3Ai4Ls{9`TLBUS z;dEjdUK`**izT;+GT|=^FJuB7M>!Qoj79z8pNC5!$yj!3MwCt23Dwo63AWb(6Wx=F zPT6%Xa;Yu#tzsd1$gmG;UPVU|G9P2OXEc0l<6cMs2bcW;j@067Q{I1H%ZRd@6O`^D zmpX=1sceUgWEqpIAT#J>so){G{`2Rv)R2bcEgG$VLcm&NE2t9B<98ase~{OAa{#1Z zLCUu|468Yh2q@j1uogP49Y6KFMKasd*%S#Tg0%{9PXy@v+M@7`yQLK8;*qbTUUK$# zX}$IiQza1#Q4n}(2jgJP!}1gXYD!a9q=iCJ$J_F$-FtF+LFPs9kRP15Z!?z+RfGxT z#sj<=7fDM79Oxp0i8VRr(e$hv$S88ov%Oy;nCDFnu`$)F1N<=tU&e$t3jQD+-k}9& zq7M9C%nnULB>?VqHnXxpYB&{~obHLDmCnu=zpuGuqYLlVVUi6|IUkAR|MSeP8OcC0 z4nQb5FF%_7sb!q`rDJP*uq1KD?&r8V9acnQ{E#i*hJ`?uZKn|t{|3nxK zncy*;T`?*j#sDgZTnK`2LT35)Jcw*LXegP)O{Hoj9%U+V?Hrh?G>OLEtd6;#7HT%r z^_d=W5i80W)vHXE-tKNe@Es2j*ACB zkn3YEOrf+?iVFAz*Bb(=G~Ln;eh10019a9#zzK{Uwc#GmR7>x`9;rU2&Xnj7lu|2r zpcC6#$yw!VrY%G8VkEBxT{2*GN#`Loe{VzBziBI^3 zVmOb}Q2*93#DQ3T0Dw*vYtC#+*8yu!(REWgN@Y19c(kSA|0ZNbw(pbS1xo`kgZ?*@ zBYyV?r=kuw(#kM(pdI=NIvyX$m}*%^64}EjwxF`~12mrWVFY&4(ghvcx8qz+2%w`} zLVFj(xbFhEhEZN9O~XA~YUuRmcFzRn~e`iDCl1J*mIR*KM;;mKzvuKu3yztExYvJwUai)K* zkf8@ZyQ5;Q1-`hjJFADtg9;$4J&h8@3`$&?ehW_NdAfh=Mf(mka z5BOJmsNJWPI)3f#k9S+uv1A&x%71OhIQl=^;L{C+4s7qcpk>^?Nmo z4lq8^(qtGg<2+7+Cy;qSI}-qglOq%6PZ7@B^X4SJK6NQMv#A&*!Hi1VK9tI55IU~r(A@}f>F$xpMZw)uP3?|| zN3YB3Veq3U*-9pkG_!jAX;&6N&0Kh2*A=?r6JegUr?!+OJ20a^hlraq8zK$UEF|O1 zaj9B^oD6a;PXW;Cv{dRwm=HQa-6I5`ONNEiuuSt94%`q$S4()-5OTsm#mt26gQokh zPNMcb(tNNJsE{jLkr@2%jz#&+!Q?4;_#X`Q|2a?m-{D~Y=QCmc{|BSx|E%Z#tmhwO z%73ph*z1lgDMrl?G+@yFD{%Uj1IT5gp;TwH2Kbq9rY+R2q#I0GcJdgSwW3k@XEJnKaAVH6tT4xNX#jvaCScPa&WVCXcbw)bQ9@E9ksdu zV7+WWU^mGJ?^E+y2;YrFm;aibzjeB9J{ST==Zodr$TSmeQUBidrc4L);8M}g<8@~+ zb4rC?V`Di=G}3;@A3y$5BtR~`6m{YIsAFslMRCgk$vJ~tfJ;{S9m(w0UfHKV_bqly z8|qC_EoxKJCL3%8C5Wj|VWZt+9MKC2M^N7<<}-no!WnyD81c>4?)KIOp9_TuM>CL6 zAhk@nnMpnoI6(0z0NlI41olPX;U~TW9NUECDaRKKe@_`?m=Atg9KZxjVPvYW7NE}x zsHPGNZi9g1WBseOT4_55F5$xVl=&cck!pld9U-@oKAZ5xK<7HHMSJq^>E3PlkU}Pf zk6xJ|Iof>7Rjog;^zUveXMCai@r0&=!qzBM31UE+Gbi2HhrJAfxc3~AZ=ndt zMXJkzczcZ$oe;QFkdRd9(R;GW(MLsrt%G-Kq7J#Q2-uD&SYJkkKnW7#jih{w;z8RcjE3 z*rRSrq{|P3%p1ZlieTCJ^a?3El~C*tbBgeEOj?e*W-HW7=Y0qIS=IqR83F1@3+!ZX zS%M5E`iU2@pIM_pDEWk40qDq}&fTRh>tFN2_P$Jx7&`Flx3(qQtW4e?Yrm-H(Y06e zE0r4G-@aU;#(zKnDSdqL|5m3num1mDpY{J-{eSiyHn8;nZg>Cx7pzVg6#4WwL@qaY z=~}yi9t&2X*^|mDAPHCmv?ySSsFPE9yLf1r8D|ddLoKiXsa`Pq0Vpd$JNGF>UTMCC zYI`fRevTBdP|^&ic>RPs=SSe-C$fH_SU=7=@#|6Z>d!E&!qFEEEr;4l2O=mtY037* z?>}|JwWt05sf=gxi`w$jur`EwC&-W_zs&|WPyp3w2t`9^7J4NbC3~Tqapfnj)%}~Q zyG>kVp-9d!dh)1}>}}9>N$JNs%p5JBJ{znh1;>j1Q9o?NsAXW20vXru zqSf{|${vQ`i;N(IGM+BODO?4~$hIcNJT?dm(xFCXl1Zw7?9qcL;}Fx4GOsFUY$h~6 zQ;K>#8=FKmfMxmU-v|-JAp2w^{Gmzj7e{V=j9RXI8;c;Ucg%jW{`*XPMe5(trV9lG zOO1c~SUZ3{1`Y#MB`c7!6a3-k5$v!)X}|X|EJF2^58=9D)Cs5+=dogu-U5cT4hpRTP;{q|eb<5JF9J&%0rQzHoFRjb zSowTHM`-%l*FeBP5s-j&RtWwh(6CsGF05RXgd%Q@ny53=OGPKFD}Ri zX+piquGbG$QTVn6BwH;KdIOHUf@J0r$PdzamIkQ3P|wCnZz`B4e78x|4RFmEyU`)^ zK`%r+?tpXAC?u32_Pe89Q4FE+T~t2<1<(Vpp!kRkHQQEEibz2TsuuBHHCH0r|1NR2 za7YhC3CG#0aN*Y@?Bz+xof2wK63Z3ciF)WNL?_L&Puk0|JH*fwo2Iv*l?{@sELDqV z(45AjUh{7v4Fx>F-L=l7A3>9SN%Kf5?Cfm>v`88>{r*nLIn9c|b3o#{&;5o`QcTH< zC$v{?Akw12q@2=qcci9>o!$I?tAS-z-ZlY9<%4-`%51xDtoWLQ9(fyxhQ)bMA)H2q zN4eZ{ss5T35Cm}2hlg<%8^ht&oVJeV~c9!|N-H#mfC(Az6=)$=n zJzgl37ebKCRwxoho={D>qoh7i@5yt(oHBl?K+4XvX2qaVPh=?BrLI zCd}WnOay80bn0^qU{V`HK`BQAZxgN_8q6y!kKxYvta$KNB>N;aPG_d(QW!IA)Z8zG zEocX{2LL5_-3}gB0?iX?xs0WKER|EonVt+noo-bl80UXJwP5C!8~h2u?~Hd}MnbW0 zcDaU-0!`^4ivuGjo}^Et@+%T0RL+f|a}crG24dcG#k^PN#-pQBYGqLO*WlD^(5_3eJCL}Yuphn;oS_b6 z^6{7N8<8t_0}VV`2s?~J@u+)hyS}eNS2wks=!^TO1r%2ci&=1Hr>M%CG;{>Pd@X$L zSI9oIt9_c#&*$!>gZ2odMPz}|_2rBRC=@~DGYJKA)tvv7y*6=8kYIr!WQr7iIUv@R zp;mRM`11rLE-MVWYyRTkk$;!LA*!F406lcn>xr_@K&TpCf7GylFViV6@_z`SBd1j;flF9DJWGcMt(oYO=Jrue^|nbHFPPpFcOW$RLb$uoD#R@&Tz0b$mn}kHY#{3kR#2t0cH-7fud4ck z%m?~Kofi!0p`?KIL8-dgZ8xMEe~Z3sa#y`J_#TJzg5m{{O&}MfxfvRbrU;?uP|c9# zt?xe+Rn61vz}R;2ia9+Q8^nb7Gd!B4t?GYW_9j$2W6dv~X6^|`l+yw(d+JfWmZhpc z$iApwR4gv)3+|)^5sI@|#CfHt(6YyW%p%!~ZkU>h6zi$`N)M`QT^>Gf7EXZnUoTaK zn!h`m^OcU~=jE~aHq5ubo05jVT*~??8veVbt^t)zC+ZPjoeOM%*QZjNHK0F(j>%RB zvS2h~C^w5pcvnek=dd{@1r&Q7M>W8^8vO~R;Pw50V&_4+bYqx?<=I9=uBD)}ji#eN zO8U^4#Sg{%olrAS%o_pj0R@hzzkF4>J_-#m>I8FxZjZ@lIV=n-O*r+JyMrm9;UOR8 zH)%1Bax-d74-6Z@*62KZDce6o9id-@fMCo3(a-tvy>Fp1+D?rJTe1Ia>VR+B*W2#P zJ8;?y!cfq~)Te4}p<|zYK#F$GSF%M@@Apg3^x!M5S8-PsALQimdGw>gOa}Wsl97A(qB>|rkAce-C4bfhq69smA40X!o zAP27xdRUgZqvGpz;I~1Ql!mKO@=$M-MWlr!7Pl!UW_^%7mdHWC>kIf#r;z`;f% zv4}c7h|)F83{@9ICP2kXz5dfQSGz-l+|P5TwBM!O9{f^%cAx%XdH-eE>fAR&de|Kc zjZNmlZp8XiP@P=<(;k{3;2_W9o%pxFUDx95{Mj1&&dGTLi=P6kE7Rq%!rRq@MQXuZ z2)2AjomQ7i4C~=g8U!E?s6);8x1r?(qS6*`bkT9b5IC@W?oPZfW{tFjR!OEX7a)B` z1{gU)_q`-IUbRe^ZmOFL#g&v>#sDudV?*2vj>|@wkUD_##K)*X<~>5%tHn41@kP?+ zInsippIv<9m@{)Wmwds=ROoI==HNWMBgX=|8#l~}I1kQUdfOwRZHrpitXGBHfK4;v zyI&r)S>Ff8aGafvoD*cbx7uG1@t7uo#3dN8eTuLJGm9J`At*+q3&;C3sjWpdIEV<` za9$d;d6OXgQOg2gSrtD{mJ2~DWQb;_q`v)7W#Oyzd!U zQfafy+iYwg7mf4SIIf8kF%Au6P_R&jL_PmX6MQTy7J^(zk4}*v;Uq1LBRq~oLaK0* z&O3sQd1C!Ty51w0E(hj)UKp-Y-1Y9I^4e_Z9D+7}Cih%97J!tanK&*h40-`!)<>6A z5H>KT;`xBC8$mOb9Pkf)-+M=5tttIuZcY)BRErF!Tmx{tT(k^b9q^5s9xI4wk#Uhp zLGwTX6v}L};(x-Y7-4ORi5Q%iOz(l795HGR+hy2M9Q^zUU4UGoo|@3_8Q2SXhpKgl z*$H7fa+sXtXJ+Au<4Q;2-cE6X6tb#aNzQ01;$%Oc%LzE`=TX*Y;A<_-?w`e+3<{Z( zsQFNCM}wELx7QYvOMo5XiXyHe7kku(riX$bUH)QGMb7 zOH9O#XyMbKfq|teWp7rfbFz>MM$i@Z$9tvj@p;Iv0GY5H^TsZu&ABM!h`?XWq6rbG zH0v~g(dP)d7dz1co&v(=#Quq0kjG>$^{&JcvyQ+{Z%h|c%*XVWlkCnaLnB9k3ney= zP;KQh>cEL;c4T5I+ym|&=RUAr0PBY6QdRsEl*(jC3giB>VrniY(xIPCd_Hu$w622b zZevueIeb%$<+;n=tS7Bfys^I9B4$iD+Z8L~;J?e@xZKdPAx5*b8Y23QVrPzP1P2q4 zz$t;`67rN%`+7QI%@&-wl|Er>tqKNuBv^JlTx~+doV2kF(uT>WJDDHDB&atN-Uswy z$|-)znLkK3ZJu{y)%Gwir;{WQBl{EI3&{2+epn5a+myM1jv)u2_!`pQ)5vg!XED+X za@l!zqetcG?oCKG74+)RIH+CPh)w~_)N)~$6x*zr?4|cWs`3?N!nph8ZeUG<;`YN+ z-G@7N^OaW0`a=MX>`o#vuZ8Mm?P);n`~!gN_HcElFcON#7^~xe|LlGdpe+(`ln75B z^H6RQv2*RQMWdMO-$x&@plW+LMEIQ?V^@Z1;S30%cxXU>X&!M$x`s>5&Uwk97M%LY zzDsEf65;}<=!|zSn4UA6;$U?6MN-g+GnNaieG1bHzXYmF*fD|HQvzMKMF=^A1j7VW z+0#U2At19og1xO;=YLve$y)5zt*`uRh`<*J#F7Nm zL#!{wMOfubcb~tjeq}s3s=8zhG?0pEkc`rou*;9*SwuC^V&$`K4ChGGx39sjHQKOG zm3)G<`-O!q6h$-d8XlwRbzM=-^;P72n!E|ogi#sHid4uC_ygFjYD$8=p@PIsI)h6a0+yLA_4VNoi8)t z&TE;GfLmRS;T#BvY`CY^>fgycmbe%(|32tEHh-~nmc9)W)F7-g(L(bf8gu6h3C>4i zRxTQI^LgShp^9S~Ro@m4^Vx}7=P`l8M^At`tr|c6m<_TNq=NZ7XF1!q^HMj(niQQ$ zCn4>UaKIwmmG$~cTsIjsW6kfMnk*CXsE;-9+N+f5yTGiMl$voFkv+N;ECAs~&*`3TB>OVU%PS0Al7Gq8$MdLvws|Yrw2)?P3s(m;=m`3envaj1JOCOb3$Q0A?2x^kb;V zEUBUpEGo$=OvxZ{Br8hsI_!QEh4m z3G>Rf^-O?b5m95H27Y_(=1>?v9Ur>D?u_nu1YgSL zxEvhoXLD72x;Ajf-Qr4omtmP6U_64`%Tmy5aI6Igbm7DZ((*9i-Om%|&jwduSCd5m zLm)ZawL< z#i8K#K$o0&qX%*%PdH33tJu^Yw>$cQseT*}NQDOeROBTbIiDDQ%1om1)<%kw(<6C}0@_ z_;60j6{pT2c#;lq#pb`Cg}n*$T29T|y_NbHw4o4A3wdiCW0JFn6I)>(fuK^?pY{F- zg@VxS*S4@5LVh=i&PFr{FpLVbDz{#T$(61D{v(lq9klH8ptH(xM`C^z6B%`t*X>%x z0tWptdk)#HBHuR};g=m2Oi63XHLLHSuY+x9V8E9l8Kg)=yZy zi%(gtR1?3i~CBrDH{v0fhgPm2c$=E7ChhOlmSWaR$ z5qPXIl`9vSM1ovaJSouO zsA`b_1A{h8F;uneU1w_GLI+-%_!_Zh2#ivP=wc+I7@OmfQXs0HDkNdZxjSQS$(DIR zE=w_zrXQ`B$XBOI7c}iD1j|S50E%A4SfGFE;RN2pocJquwl!n zL2H&98%VgVPOvU9|oRi1WGrv4L zJpQyWJh2E2={6YnfNEo|aBBF8!?T}W_zOd(S%I$75M(rKrbVLu$(b{M{dQU4tJx4G z9W04%CJfh@&LJzD%}jIs%MHQ`IU^inNRZ^`h3UGB1ZD$<9gXt~0$%%!Yfj0(uVK7O zRqaW}lX27cjLDd0z9xSC5QQIy-ad4_^T?s$Uy9Bo{qo($#m@h^{LSrez8tDH^t=C1 z$Y@f@^73_l_>IE`tB$OMZQ5nn)gtGcPNS=wYL``BBLHBj)*?sN79zJk>vWx*H!I6X zMBIvW>uOke!wQ87Fn-7^8Ctl)Ryo-=tpECC-QhIqJnkbD_3u8MccMw>iTxx!{ zkWFNq^2I!reVZ7pt4s2PgRs=n2i@-0h+huj;rZ4cidm*-NS-t41+9<}slV0M)C`aA zXXTwV2pcMbtsGBr#01%iKxr@srErmg8f)jI#aysAz|y38s2UWcQxSn<%%6l{lL`A{ z=`N+Ck?ZDSOa~>tNTJ%44{rg9!4WU0`s)>!T3EYyy{>$u|BtOPbF><4Di@tm>bYze z`g-EfD{C53o?U93w9?qcvocuvijWn1W3CTtH`)uP^XNFz|gXQ1wzc3Iq&`%><+8krdO%itJIMf-H9VBybBcSC4kckw3VytnT zwPH|9d$=v2Tca>)Dd;I=7<$Z_L_d$4bIEkCfzv>k_rS$M^mNXETm;l}^O8--SOVM- zH_abSmR%_ESPZytw41~n+AdG&Foe2O2212pp)+0{+crMioGAhn(lX-@rkQheRA^8uh!w4WI37pS}rtPJm zwCJM5z##>|5hn$l25D+z6GnA0%!vsZNBG}-hqcV5u$TibIg10Fl~lRJ5n`9EwnPQB zfX0nNDT#en$!KVbhS(Qx@6$VqX?%S6osKaY+QzB|eG-yBy<%_9TpZZ#%efb5RHG=q zt86yv6E}|rZyqoKYIH#=+ZTFU7FuuoQa5KrJzLkSi%|}-CT!x+U>+t0pqRHv?z_vE z3`Syw8?K{Dz%n)e>f){)Xs~3}s#Q<~^jQy%OOR9e9WThw&yRB(giR|i9;y>R2McC^ z^Bw!k+4uJ@x1jx0w467Hbk>B-4iC!oVhi<#*UMg#c%iGXX+h*m2*WtJqJLW`BnGaw ze_X5ek=}@UHpi=}srg*Ke&ND2MMXtbHMKlYXm$K)BkAKSyFzzA11L5$QVM36O&?Sp zU6|cE{tUS9c_4J+4kgK6yx@xJrN%*2aO39sh@ov%5RkKt{r785MNh8akKx3AJ_x+m zs2g9E!Jq~IWp_vH+y#;Nq7;o&0;QHQh~~R5VGn^iwP?90-6wG>TsMwFpn%3pA}e<8 z;OZ1u;52qZPD8)Rk*gdsm3SQmOsc@m!el;2t~O2-TVKP{I*F z8^vhZ*Y)d;HHug;0O0cgD;KRC{_`Jdu@ ztY~rtr#$dba-QJ~kZCp`3;4IRC!1*b&}!KQZ+MwaHaWwA&c!Sk;kBIdWTM}`hSDmi z+8jZhT>9I~r{;3<3T*dc?6tu>VK{F*0Z9Q4U-PeTQD?$=idp@aZ#J|Ifitqg0DK%< z1k_dxZY&H(v)TuqV2Tj0xR2gOjilc{@%Ltb3Nu7xwC>No{kP4-B>7V;B9#cKfKXkt z)D~U*)JigX*17yk_U#ho3Re3u1rZ35x@J&8h(H!2`dW738{f?~8PYSJvIDbSgQ)q@ zc^H788}pLFnc=fGPU+{CDgbdSGwIYie7s-xFL)W$UQQ^S6|ip?9gy181@t4mJ^#xu z{~YIXdDWjS;6-dbbC6dcl{jh{mhuJ59jz#!IUpS6;yH`IEf;+09B|AlL?=$1prFla z<4=>>;YpgrZ0BH!`(#2ahw&=iW^xnk9)y1nveGfdVj|o>9jKxXF_TY ziYrhrt@WJv+54BsX)JA(jA}z=!XCdZl1B551C+dsqv}Bz28N6SMwg+znj}5st!a5_+3Qy*obBDe;f8MUc4A)Gy``OJ9jV- zESST%DTNn6i-8`I<|fg+{G@(GU*FRvTzTGvv?{8n|OM9vZ&+*;z(M;1nA)Gbh^C1eRja1L(<_1{Gk9Am6(MeLhFLPzOGJkJqeoTx(i-@iWm)YR72cFO?gE&Xl3SZU?Bd}-|- zqcssrVh?SXZT|WLL?oDub(fuG9C@ntZ*O$zs@gojWFblEzZn$5X+Gz+z{g+u^E-@! zcVuIHA_s?a3Zb+&5An#T7wu$)f4eacV1Kng)Nfm92{^{<6f0%szTirX$OC*a&@;Um z1(#$BTW;Nd%b$%g3;!c9gsOQC?E`&h#LhI()oj#Th8axkQlr13`ytoEWRS~&M5x5hP`26e z^`9*X0g*l39L&}VsKpv$9`$I<0uevW?&-_cvcLl;xA+}K`x#JW5tnQ%`yZBH)Iu%8 zzQfTf5n$|5E(I8+P9-rLw6L_Kig4>WZt~zKIkk>CQ-wTI8b60f%$Ez6il>2djx|kZ<=pnM*yz$*v2q#oABS#*S z<%+uCp@4yee9Ce$IJ7A94?LjsqvzV2-muT0XP*5K7MVY_;~6lN-Ka)@JS=dHcrLk zlypuydp`<|g;EwFkOAZ&Gq;^Q><<*Q1xSlzF|Jxfi8mDZ=MdC{rfb%m;LJ75f7yh5 z_3v*H5Gc=pJfB<$j_)dwJ%n@1fnMu82Z_PNGX(&EOYj^go|~Bs3R)8vC|a-zC4H>l z@CB!i@!|LHuD72CXL!kBLY2%9eDVM5CpG>xq=$t=jE0zr%9A~h7RDj)R}>Q-GCwwf zk4k=MYV7dS|-_bl&;BLhnuL(@F=FFf*Ct2gHe4L@3@mA?17k-Q?G+RXG%p;*Lz_Z_LV=$ z;6M8>JVZg|ov_*NJzXYfC^6ZxWlPkcohVIEsGMJt0a+dxHvZDBT{1{wrDs;p5JD+B zWQP-{Nt265?yRb*S;+QcT+b087}W%D3RWPBrN16U8XWm7e`A5a=NN-vC!%Jn5NUiQ z3U-#q!ytskBw8L8c?}d zf!XmSzHjye2iH&ph|-;Opc+3}p0K{9-Et3x}9Pa;r+I#b`ob$eY{7OSc$XGH6Vi(xOOVELUmY zNQpsOl(e9vV#Hu@jUlEMqSrudB0!p*Xumb*ZC?txR6nm4ofinu?OHTL>^?^9R+@!Nd=LSk=%~K zk2$l7Wg!PNA>uSGNiILQP-NVs7rR|}C&kJP@qh$B1%0It54|u0KUks5hrh|HzTk5i za!QIegAWOMQY0b6uDRgNvyn;mIj5qqnqvin`F?5y%$ZgyxMfO-CG1y3+UrWTnH@%; z2)u}lw^cj$k#`{&#YrkOBh*Qy)RHEqm0yv)C4Q_y_MwSIEa1^h47h&r=pW(3iC=d% z-Gm>*23i+qHB+41*2UYrdF9+TZf?ivNJjl5KuUe zXbvF82~UrtiLI5BKJ!!sB#=*54;Uw#9f>GVu|TZ=%|SUpwGG^5Nb}fljN^nUTTU_O z6X6L-yJM=ZI-I|DJ3n?NJqOsj9?ZM&iB!q~vkTmTc09IHOil)3v4e-)x%z_(+3rVy zCs4mjZZ8WFoH|D(3>AI+hV2QuQ@2vdoF%KG?5l%J6iassGtD(m5Ez00U0V6veNS+J zhcGAEFss2G4*!f`sRWgT3rFNsV<%H--w`zbYXyS@J*-;ctiaTsCLhc@&#P1CPn&ug zhvK`0B7x^h$|fAjF9J< z8=nThLDVMLRUrgb3f4QvP~yP>!aWJzSMY5H#1mI7(@S(>=B^`{p`WdYY986ps1@q;2d2fHft3W%L3?CIz#7UYgG zmC(`97>!(CXU!D=g5u4aKlu+>fB(S)THw~#*LRML=&YHMBR9Xr-?_o#D>@n4C@!aQ z=}ZIyJruLB)drK9;h>Ni>F8H6yOHTAdt;c)VQ+NS&2RzS!a8s!AxE4`d>$kslSkK45H%ILiML2bpQ7wyjB%zwnp;C|QfPMi;9ip$^+$REG%`MPNb#)k&P;*X&x-k($!>lj2r38DnAIr&bnqHV^hyf5ZUZeW&&XIGD?UW1sl3=m?!ntA^fqcry5BOb>;?IEB>o}JVngWfLD4jbwC((Hr4qu$zFe#vfuY;{Rg`(yLwLDA=}!<)8v z$?ddudhq62Jek&8o>}?=(^_-w*@d*dx>K*_H56Dh4_bZn3LaB6rhXK_t$3B=SHgze zb(DD_Mxaqg75^xTL z(`?hurKF@3l2hl~Abqp~XQBsO`%SI+@a{i;7hyIUe?0!i$om}cuB5%+2RE4){-I?G z*IWEGy3^x;MdANsmjClSJN`+p{|EY&{5QV$YEu)Fqd3r}pp;vNdVyBYzSxVM4JqvJ z0&wsPD3aeC^uQf#iGort7zw&}y3MTIaXm!~x7{ZtfQT!;O1sc^*LNLeo7EpzN{7+EZ zpr8p>%mIQLDlIp6I$8pKn6QxJ5))TpnE*1x1GM3+=l*?)U3-=g+!2T#0VAPif6Yo# z`V_q5VAk=@1ztfJkh#`{l$66Lfy|;}%U%?WiV7jyE1Q;ZGy+e&sr1HL*{XeL5gz;fe0I*+}SPfpJ3JTkn z0amh|p*;)LDTZi1N6l3_%MbJ>wh9UMp6(g?7%Gdsev3!y;a zSCI1J$d0(}ePwNMnd-~a7Y&@Zy3;v)Zkm{Pp4Zc>{xmxFZApx@$KVw>*}Zc z)bGibjY;-~S|}gdJHB4sSfnJgF@4RK@S=;CGg{x4JFdu3yz#CA*!=Mg$?HQm95&Es zdsxttedT3++sY5?@_fT(JN!FmZOFU-yUN}5*|6bv*VoATa$uMWrpLBbKGQ+2;WWNy zX&4&=x8L&SQV{;XA<4zZ>#wRMPp&OrTe9)Womidi%bsGW^_J{M-+b-4Exs=PV za4En4VbLWkQ;GETFQ0<>!adSqjkPBf*b2pWK)0d@oj+^t7I3VzHXf3 zRuKJj`U9sg__=q|eH_)Y5YrfK!5+_j_vCs31ax)|Y!Ld#dYp552}6-%%Ie@gB1z6X zn=bn_peK4wi|2;+9zE9b*}>+`qK-#wB0_aoHL=;k-24?vwR~b&M7_lB;Bv`idZY7M zuuepEAgJBZ>aw1=f`pw9<>14paNYnUDGx-~ESOf(I*}m`4oasekKCU9zW3?^K@$UaEmDzn^Q-BZlr96aRZ3!tt=MAqNDc{3h>g+mgaA#HF1 z)%hGqK2anvu|ONu8O^PV*o46U95^&`Sj1s~NMbgV={*Sr(exK)ZKZI0X;BB_EjH~{ z)6np`weLOJq7F<;Qv%>S*%!tFPR~@jbs_;6aoMjh&I|y?EOZiPm7!p@e7dlktBdGY z5-qwVm~`W)wt74dv`JPv!y;hl;Lio{M*`dtUtAsa<-}Ko&KO`dSQ8f;yYbh|$g_)k z*Af$=fKLFm4bV$Uz8$4jkRktVImBo_9rg^mU&|Js?T#1s*Ud?lnz$h(5fr}GT)E%pX90Evh~6N5lpqBXM%oPs_} zlVjs>#D=h?iD*2DN{rA|7Z*XfPtPFAn>wKTp{=*z9F9=vl;+g+0z|PZ#a?nRbZ((p zj)gGAM#*xJb`e6w;pgU2Dx4~ zQsX=p)Ur6B=+Jc@t4H?zg6_Lt9l5~1}U3S)}yq7XRj`DOe!nz?C%2mG%@Tj&P6+vSuHl3k;%+B#E zKwLUyk4^r74h&(!bUJkJBq-Q(0>Z6z-YQHBMSDa^tOZR6z}8tdyNRmjc*#W@6wi&g zX3`w+c=1zdry**G>saukyHg#N#M`bb2cYw9}IR7N=2I-_?0^`$6Z z?+N(&o1(@2bS_@KTCAcFVad<@8MiSCU*b}%9ieX05Ir!XDyKoy$psc#|5ZieCD$!W z4CoziDN`$>;HJx=8A2;NicvP}v27lzhD-0ybf0sV14`;d+G21&9YOlnva@1wRuUla zZ`6|tSQMNlQ#`?GjPaA`?Yb^!-%BO$3t3`dVU7&K7OD~|#(&nEEDHF8!b}S!Vs<_$Zjt2-%RiNzRXBU)*hfZGiyl)y!%etDG=`johTyX zMI<|opMay7M3*pH7}Dxw(zK4s^ke(?_ZzZl_0hA{QppE-`E-S(02ryodg^SME+GxN zW&BknuSVM?T7Q7hsOF3(keOycm+R}@ljW8x#%wB<@30$KLi2Ea5bbmtsNqZ=UK+;$ zy)TlH7xz3LR(=)8U%kPE(VMCUC2=k}I-sM`*nZRPIQ5JjPPC}+X=A6Z^6$TY@z7p< zbk^2w^N{<3UHpk)3vn5zF(b@xMfOwKurn|V>?Nn{(g2Vl!vj6o>p&=ZEE#@zscA#N zabE@b7AETOF^ss8*6k|)v3-6^4Tn+b`x(HkVeLZg?FQnCD51ShJS)}$f zEKLKFTSc93YAFq6GYR$+iiCMH%B1B-csMqH zJM+hP6cf^&mVHBFvNC9N9gp3)$Ot;uGl!U}b40LY=;;*YY18t-0x<+|LXjBTcJbGO zBhkP~0*~AT07rI=zW7zRm1#HAWFjtvn$>Wi8<(w+{93o{k&p3!>SUKX7 zAU>vSmBb+uW2nK0)`f(~DC^NM1Mr-NU;ql+7hn{#a^%LPrO7BefyL-Izm?8DY-(e# z8M`CtBLiY(sia3W!r;hNTcWuxz}p?hD(kVYlyWPpPc-y9pvanzI@1kszoa?AU=Qtx z>QJ+n*h4qy{wFvxpuQAZNeNVe)T`o(2@zBjWQEvYoYbb{bg5cO?I{FkEYGVb z38SD5K`rZO;x+>Ji&)VCX@otEdL)bnTwyGC!$7)~ z%>5<6ZW9M$pfQzBO=L?)%-!Zj)uf(F_31aG0TB=rrF5$9`)QeT5dc*xlr*nX9f$Ho zANYbRIA%f#VhN|%BMLo0ObG+^=QUq$cx>3j!ZlKpRaA`dll1VUrjO5!svjy?*ia%& zN@wfb4bx2s7VHQl?HB9)N|Ygtyd$zoB8`*oC>B6e$)(W=Br0>+Wy%jjh!}pzq#+;# zVuyJTxCP1d0H(CFM8_weXE_Qqq95{)u})9Ekvi}PBq7SAg(y2B9|bkd3eN84403v_ zRXUz0)(x3%9-!PKbeRiTGT`GIEEWyIt2BER`ZAfo5R0Ix#`!I{)xe1=&h~x%4s>roooQmv>wsAfU+SFaqEB`r9wKyL9B{7=phdw(+9*UzS=~(O$)H2aM_q^LDf?M z&9Z@%w6FVZ-bByIlP2W=@@YQ1^@6l=ZnfVcwn4a~p?W=^4EGuv*dd;{M`EpaEayUIN^bb=u0{Q_YPG7T0`(u z^^h^rD*JY*d_c%*!&7lM#Id0G?ZfJxVM+Xf4E7anGM~x{MZPl+C~as!FOJ0K%&%y; zQB+h!v;bQ>;p!!1CIpluW1szcT9NHWN!W>WuB#hkd^Y!%>L&#Pt$KQTf(4h6{}(fA zZKz-p*W)Ct#-4gWNemTC`csq5ks)6q2Bft|?qgi8c3B+0nJ8b@!6WGK5qw(M&PonL zFskR%MFJi~I0Wa&+GA*;!R*mV49l?%Gev$!5v(A08&>TD4Cg-k?V;48u9^eXJm^P2 zFO8}{%QQf-wv!(gL*SB6~gDIkY(WvrV2&`s+o8|Kt!P}mau~x;dc}C7##E3@oJ8$l)Sx8^QMZ_@ccGxtt14g zDzxe5As0G^sCN#kE(p(lL)P{^!tbT43ksqS1Rp7+de)%4Hd%#O7I9rK^o8& z9ge3o+*Nf~NdBhHCyeOb$;KN5Vy{FrV+dgrFGJGicDxRIk;7WqRZ2A;ZO(8yDC4Cu zrIKxL(H$23ZAVh|J5VhnU`23d&(!2%QzE(b*+Wi1ZY~I8xx$0LFd+V2Q@09#qvtVi z`qXLDrYS#72UE_bQ3`)nL~q5?9FjX)o@VBZ5fef;N=Vn>G@`LzA|C*~{}4G(lPRG7 zu>*t9VT;hJkE%@e!#90!Jx?+=*A!=K8rdSktK!4A`JqRhj!-;M)_AE01lm&lfHnx| zyY}Qzy8zc4T5{K8res`U0))GC8csxCYBlZMIETJ1TBTC+z+fq@Tp74-89|4agrqeJ zieAB-8$0F#G8ZbA67l3~==ICX=4NiH*ROjPmTH4A!1h^Y8r1hwZcR85vZUC2eNoHpYUP@fiWJ1s zyq}S1#oB{vR2xu;hDnBNrlF7vgB=4^T{EHHW9dv{l#COy2k*f{qG$N!Wo!=W3~f%v z2q>JtlZv%9^m@tOKtfAVdtg-V24sa)Au=n1L?VRhM7jW-X%8E;gCj1{^Z!)oUq9vz zes6r6(T?h{GCThr03?_H)q&!=8sVe>no;4fk4|g@6CLm{4#l+a#2Nxti@Ye%P})zh^nfp3cPVy z{jtMYK6{3K54hyN!h%%VU9+!h4sO}Mozk6&ub`@N30NW5etDk|H)N(>sqIf}GDw;#^0o)J85^?DyK{1HNx* z{lbHDAeJJjd*Y3h+8sWq=k4#eLdD%7K>&m7YznnMtbCYyA_H0hqT&%+M%m(tnNI$! z;j%uVQ&=C>?D^pRt4)5=-8-2be~LM*7ZE&}-{N%z`|T7gMxfkoz!tE_#w$@X5Ago1 z@BB%A@mG&UlFV`n(3!!TjH+4Lc_zrnT>Gt)LMaWyFj^#O#rk zrCq)R+BFsP%{UaLx7xCR$tJ-b+Vck+n+atkWTqSiLwPVI##D@Pb^#e~f-;kS*1_ZB zfbk7r2x8x#$?o;?x>oM5pf6^k4`it%OJax{BX zbX~6D@TjI+0gbNsXxVB@(1wFX=;p-wjBXF~V^A+&7et_?+NXGOFV7P)peXV|Sh?4f3$ty0c^DF}OaG~5WG zA6@Uq8OZp^*+b0k4lIDhKY3_7$vXGV+5~jxD4To9{xC};_l*S1P{QDQ60jc?7s4q3 zR*W^=T-4&si&<^|{rBH`Tbq$<_S^*aVr0=Aza%_X!&EWIze+4*yul)Mm4_Lc-~C)K4bn&U`TR zm`nKf^uEbEpMU%~imSkH%|>|6(pHYbj-j7n+)~d~n-4`OFNK0!*81E8I4i_RkY>Sk zHfUqSVq+jH#Q1_Vn-ZWZ5ZA*cVgu#*Jj)fC8SEW6+8xB_dbnip(j~80O(JnIgQ9NR zkKaa2m|iCB6q;%^;tQSC6op3!`3}`V$TP*v$muK1Y88>43ygjD$6TUHC&zxgq|jnf zoxai;B2^-n1GO6q*_nhJaHsF4-qw$huri4NQb4&=>9DSc)L_&k0Ox z!fC~(20}ZD=qDWdn0q~s2o*67nl7xlF>(6Nkbfc4UZhnS3k?oBr>zc$JJL~=jBriF zuLJcYM0<$Ra2w2RDmK%xj_K<(q%&)$T_uMBP`#q4iG*5KKLm;T)D^JW=3J2)ww*QV zpNGD%NbyEwWR~Kn=@RbIxA?=1LdS;#EO_u(zvUiODKBRVA=U;iBYI* z!OoB@TxuS(P>NqUqMG-HY`X@zecpVT(SNWofue-urU!Q#?n12m#PD79x1KG%0dLUe zrAJo-xJj_6qg5{y-=T`FsCR|Q!f>gipqAt_r>Ddh@3ot)YY@@>oGp<)dZYT`NrP-% z7~X`UEr{Mh_=J31%PF3Ul;6_-l6{S+5C)HtQrnNy`$=1IzxmfI-1XTT!iH(ez1SV1 zfN>vU3B77rPta>S%V449h{$V0BC;<{M;sK;6TA;jd7>4jjuQ_!+5#bNvm1zxYMi%I zGTl1By9W{N1Hr8!J`sd0PDwh*j{A8m7A`6$3|wv-4~PAHnwTJY&}5y&sUs*J%Ia_H z9$bg**vCWWIV*d@u5@~!;|fLjWD1}!QdWM_fmck`Ic4mwa6HtI{j+nT?q0iSY#_Qo za!qolizN#OYAOn18kR;vbtlyP==3^DV1l(5feFN;xW{(}esqrU?!uw}BxV3^S_`eG ze)P>(g^>>}!6obPt^_l%RjW<|n3#eymzJ+I{{n!w9vhVeI1xce0G!eX2iV=k8X8c& zYU){x^Ahq06y2|2{zN$wr-1W_qfH{bf$y#ZhDD~E$<0j&$R1Dr>Nx1tBeTOIA< zZS5cY3lLsIk+f;xm+m8c9_RUiT<`(zy5nqA?$oMpixqiPmC(=u4@(#X&*3k1))HDI z8Uo0gg%!?PktY(K+UnqOEpoWr5gyMS)gVI`;A9Z#g0KSJ3$4%=5N4)gnZ}}m5g7uA zVC4vXaH^SXv`No){Tt~}J`qJl_F#Kp+$py8Y$!8~;B~~IQyZ|~SS?_uB?6|XA|%c+ zaXim@c|zj46-6$RjqCrX(V&QI zqZ8XDzEA{kW+tu4IW3jJE)|6iO2ECZh*x1MQwUcAN=h~WqQxpXOJXEa+u5BbteguO z3hNx9vF;iUMrCRU(-B5&0MW9JtmoG=s1cGR)+9K>V*n9a4U5f|>K^1VWzQT?z)nyr zl>>tnq?m}I+-&s-1RNBWgR8NA{XMS#aO$aV{IbHtpCQc`n#WLa$V9vpIe)*EU2qooD@a*L+-i zT6b^w4ETAF#Uz%2v`03^cmu&l~(>)EAs@Qo zmdhSr;yYk8T)hU2a_WCQIJ2OIDkF%p==9B4AZeSv&rpKaia7|}c&MBFJj2B5#zVS; z%Yj6s{-|6yw~(<9n!{1zj!UKJW17baHbj_^0My`p8Jec%)e?srV&K!Y#&j7q!D{7AlW?zAWYRdhjqTfPpaBq3gCyH)!(`*LfS3dZF$Wk{5RqxJ z)b4Clg8VfVFeLYwfO}G_gfL(as5IKMf-+|GsQ7e_H-3pyFS&$9gWyo3Ky zK5Ukw4RG(GeRuAps4xlZaM(^;GL=|xP?03ycB`FRIs^a?6q1dSM`Zy%7`& zAWx%7J{oC;t$-^I2LRU4(h!?4m_7xR83P(;-`zHi!NzNQZUu+zq6pklw_eNt0#Ke@jwgJIeNZTLo#{=76iEfZ ziva|v!V!Wd$vjq>cr$ZgaOMg6q(!a5e!)fc4U!$`_c@2yCmdUJYGWrBN2>u=t>*ki zw5d`CMMpJ9l&#c#G2JG6gA*yPb+X8B1SmQZ&DWHGgcEEhQOaT4^fv|OIxMR}kqkK9 z*qEI#Qz0)wqhs#T%6*<=qkF0CLnbo{8LzodO{)Ua1~~d1r>L8A!jh1OP^Bf7+&MUE z17fYoOcip1Y2KA_Px9Y`ruw$1L-(jz2Tv;wqZ zWdXa9G4-JV19Ga^0*WK!BsZ&2{SqVt2*V13^(LnkP!C5UtY5*-kq>O-mYbi#19%O! zUgcK?VYYd4GvGX&h9)6zEWnG37)H5K!jzwB1^o@!B*tDctQ5ct)8_7do;mn^u}^nr zv_Lcibfa>>i7veW@MUq;aCP4fcQ53whb)J>f^i8=<0gE0YhM55@M3KibG@qz1 zkvbs2`dBXP%41_PIZok>C5w*)kEPA}^TC$)Q7oY0SCV^Ld*#JnpWhq&{>a#4URvnq zV|zBoDX@>2#1xBS;{MRD%5HV`HtK8qsOLZkNj+Z43g;1gc<5cl3WcC0$yJUKs7D~8 z4QU>3GMF9B)u+5>^hRM`ln>aB_2EI-#Blnz9q&#(q< z3b|x-*pW&bHI~*kNb*?oGM^^%f~JdH;1C2`DCh?;C9hfSlNnk2yt88~H+TgyPWm5l zNQPn#sI71|9%G&l?$C&SIe4^Jd}L$~d@rSy{H$U~EyFz=8}6~cm!=!266-5_U0w>H zoF%lp1sf6Bsfcw(d-mXCYjv|0rybH@|3d`9d$?|XiTgs_dcO(V)5>f`Vnv>-D0NK|AIy8RJ;ACjU0Vtx0qkh+z z$-2Qn23mxMQa0TYu_L_9-fN-b8WCqE@FS2hov~@(wrx7CyWyxgJbq66a2SGW3YFMK z&T|gDj?UL-XH&0ZH{PWQuY7Hal%H(R^_;v>eT&5k799{L9ND3YVq!)FgbAlJqSDWn z`;XSzIQU9}j(26X6LhQ|`Q3+BirIY?(34!`J*T0_&=lW{?Nu#({l%4g$o-U-C4NO4 z3lS;mj@YV7=8Xsal&43vnm2p+nLGswz`VEt2+;POhsVAs`v+Xf(gioXCFbM>$wBG=0j8*rFagN3NgomCJ$z@lyOT-w6lLGFE;n4Qo%Kf7|b zqE4|+rKY*Il-7YT%?vl%b<+Nxg+e`!iy9yd!Z(t9=jENhk=EfMd5SERg*AI050)xYt>a~<5#RyH<~sid{27A8~)zz4QTI7c1+|4v@?x*MP6d6N@1YL6kV zjq|l=pVc{+T7r8x@DZ5n+G3;1cG@io$p3L|{;4P|Hx72@nT#zqo2fii;~e#cXljJh z?>#81tQnn}IrH9KeMA=fJSy;#EmK9E2f7O#;)TuvEIQePl>9(1C$TOQKkK_~#V)g_=vGh8dQc|CLbReF7Td-!{;E1H9 zC$`4~VUv?`6th&;%lI7t%yDLm3_(mR5yG-IljC9c6Fmd4xe9h=^bN41h>z;GyzWEQ z+~-F}paZloHPmTuWFThJE)m8_5>*9eRE3KAn(rLrpZHw!!XA5pc&=ib8Cm=rUwV!sQ00<;IfUy3;yOt4HxWi}=GIl^BO7_bev z&_EaZfY6H;7=`*kcFB232cr>S3?9k5!V-WhaQ>sHlAEu*qX+r}# z#a%b?LWbiha2VgDwt`6nb*3mp7|Cg{!it>7)y<`a9u*B@Ik0c)AyKgS#Zk~t571(!CvPyj0_ z!6}Nk5z9apA4o!)Dtj5f>^|SxR-?I$P2wCOxSrA*9QLTl`l)StFJ~py1%98nu?jc5 zolljjtMw<>9}|Of|_v%+nI`mzkP|eaEg7{a<|e&T3#fjrB)PXH=A`K%{yss z071+$W@PY{oPjDkp;7-e{96ieR+s{+Re-Ytf!TiplsdcD#EZ_{(Z3(Yq&(6pL>)ND zOlGv_5p2iVU0ccKJ$lOv2)1Fu>5oA4#PIi_Pkt1Va82Oq%)JCn53o%0n8R3gQ^{q3 z%`>djRQ{otr_)u+5tzJ5=p{|9)gv^GaAXnM=Oh)V$-#-*@j(>B;7hpgE&&H-hve&rb`G$GTpD_}2HuN+1dyUr6wIH%}06+qdGZy*O zA#<^$0^DcWOBa9E%*^?~2gbPlZe^j{RDb>O$TjcfzO7z$AqH?HPi)>QVkrqxW=H~O zgH(=I@f-(_Jg_BDy7+siZj~-Xh$ejtcowtIlW+%@DaXecA<<0&E8nT~bNJVyh5L%%JGo@Jco}3AdAE92v z>mGx1W^PWSm4B|tT5ymuwQ2-8wq++lH92g;6MDPxbrVn~Ai(?*;Z^=n!vUG-eLd-9 zdDtKXzPS*?=l}aLED=V+g0Tq&I5-_?vOgne)>prpfdxh|?O~Edpd$92wa8CaxcuhH z3kkBLGNttqp*hGu9fKH&*M=*Dg@cr|(y9~vj!X5X_eW;Q=sim(>AwH1%@nyGdL_CP&qmi5^5 z6nsya`qY~l9s?3Uk8yKluMQIVKPmWYUt&RRZL72s4k)fS<5B%tB zCr+#r$^O(67Z^i`ps@+_HRQ;QxYX_t5}?Qsbq^V!$ya$`3uAJz09d64DfS%N021yd zzRO%tfNnEYH?W*fjEt2Tq#ggBG2GRuWI%Tv@&FI&eS=n<6 zSJdWe=gb3YNotoJoy1!>?m+(7^*6W%Y`ApR8JE9##%d4+p56}8fAFDH^YNIbkr_Ej zrbC=;O(I2{2)C*A=pE$Pb&7EWo)Pi&6a;BCgH+&(iwYJ+ALRs-cgzS z`z#cT8v!73_6XG~Dn zriBCJkU+T2b$@ZZUS)>79t4g75vP>_K7@9@SD{K0I?7Q*kP2g@nlgHWWI*xH zUC%f((luWumOL*7$DGoL1dLPF1-KF6!w?ipzjs@aMCq+b<+YyVwT3jPwuYrX;|xc> zL!bi>fHoave*==Z`+z##(Q!`$!+S)55alRFOT~_x+$61hGhX`60!O;7;xdK97Ace= zeS*PIL&4f%x4n6+g~iDao7<8!Ya$l>w(HDawVoRWZMeKRD(&}U9%~m*yrr3(c+@gl zY0vJ;g8oBC&ro|bW6`oxGo+*+UWps#xWMVPgU^85V~-DVxLJE^k7h`A#ECZJ*)cjb@qIei^0phduJ2J*kPe2ygK6Z*G`-Yv|y?krK(^Ve<0VK2Mi=ap8vs(TM@6 zG7%R&v`!Ju@B{woRGAg{1;MK%#%$udXSY@!SFtNal^WC88s7O33C+;&zB>c0Xlj0t z!;eyKn~Hrzk7Mr`d{A>PRr=|tg{i5jbVcnYEp1_CHBoSo$6GE*+F%66LkudzWX+m0 zbG5bcw@Vrn+@$B`|Gv@`wFT!<+NQcMswoIRe(BPs{@KP}Udg+D{WbW+iDX^-aN}&L zbYbbg?%Z+0_V>X89WQQbYJ#lS#MARChDXJrlA!_|Yrz!o9CSia2x`mV1VO9~mDd@& z_ByLHP+}|A_jMc67QTO_H9j3x(>Yv8+=UB^u{R%*+(Fdy-nQ+s#^3<;z+CCo>aWHG zT#NiPGwfi}lo>Nj#*7)`1J0w$Q)}3eA!o<7y^zt`{P%A3uaEcMzTFwwP@?t4x84II z^3LFT<#GB>`r!v%34QX zb8Ntq?)93P3vlJq@4xq7ziyq<+_^W!Lw10y_JJIK=%`U~I2CrRpVuq%;(lw~1Um7R zCQMiiF_p!}jq#CZ&)R;_Sa;^BYZ+?GS1(_lMOabw{wq2$(KJ0h{o>3mqUU3R`&rv8 zJgV(ozfMzO;M7$&mjlCYD_T3sZ?&(lZ^+zWy`uv%ZI5`1a`#k7ua>t{m!74qZ98w? zJRjiHc(F|&PEtZ@QT17|*(E&u63W@@caBKC*p5YrU$B{!T~J^I+q_+pprGK>rxViB zv%-;C4;DDLoWbJVR63(qX5NDC*LQNZ!a%FFyfN68$Gk~GpUUlj{Yyz*-4c*a zDawx@#a|$A493VB+@EUdnSP@80eE63i%Uz>663C3HAl*6W?>P$V8H^_9bY_~-alQ5 zBMI7$AVu4B-xYy@f%VPJWms|RlFH5P>{PO+r++#&T;w`cx$=2dm-&-8WY&g5SpKi7 zs?Gy|azJLv0|rZx<`THo$yf@=>tkbMCC85+@1K3-sq69Ezc^NHl78{^g`Km531;|N zd3&dpCfLRS|MGqL;~X_Li%*|Ejhwlz?;>^4_4(aTcZ8~HI=p7+K5-BkHBZ!Jj~YYB zY4v?jc8=dMZwSu%MIekVw{1&_^LD-d#PPJWn?A#3B2qP%XE#^i>?uV+Z3I2xjPu3% zmQONjJH;DM6M(f^(QVul=+mTe(5&z*u=nIViK=SJm-%XwADr%W_Z?4FG;d`m+ zaaf+tfMoGHAEB6xz=`_$dB?lwjc3QUFO`y#+JE?P8Omv&Z13E~y5Fb@**0QN<^AhOEtX0^Uout}6`5iYE!(+s=VT=%V^`ON!9#|`dcInVK}FY*P`KB}P5-B^<0&&2 z$4L{Q_*!rlh1g7L?yr`fkBV}J^1t7J0VSyB;sLKQH*<0yqhTqPBI~3rvj5v{Hdf|P zdHD+{rks!)vpW}^MYeYVPAviFYJ%Ee!NP^XFvMd^1f7Pr84q@FMkHD>`;?*8QS^vz zta|mTwym`x4M{)=mf$T^7=Muac3GQ69J(aB4uT&Nd<&R!d!M zU~sVXlAY1f0WXd%>0a68SdxYv9UZZkE|s-5)KtBBGbMYvxvlNR7cW+yzi`3n)8Dr* zYA+MD2y5c@@U63_DJxUz$WdU3G2T8tla-a1Sy)&kto4KBZL)@jRe5>2q(f(x)YYkP zcOJ5@^whF2Gdl}wM<*nU`}gg8h*vsn%$NlH_QA0_t8n}V-Tvk1Q&+3Si}xVVtHcE_ z`c9NGcBWiF`H}6SkEsWSt4y9d*9NQfOPPPW)w}YfB}m$-czIP>`KRe=QN>_9$yp93 zG^dGCsns_$1StaV*irX5InLd~gYO$%V2B}3DCB`GIJpmS(vEodOCPO}z%}3FU*1A| zD88|EQ&n{}zX$|*32=L)EK$gwYtd400Lgt6*8e@H&wLvUkfMy9yUiGqJv`kFWT2OKn=BzP^6Rm9vofyj@FhMj7>gsi=pGGHs3DNKLiWZ--^Oeb4g zTlJPMHMX&_3EKP9iff~%%Z#i|aYX28f>`?vlm_)pO(g&ahK(2z11;5*AAel%=bwMp zfBa~?ci?ExraFtNnwkgpNuPd|&YXI*>pU+kReL6rgu{=z{w5^tb%;4FcWSU`;;;_# z=2?nT%7tjCr?)@MFbZ|uw7uD9aHu_)-d_j~s0ut^WJ&>1GL$r;^r}N4>ss}lw1Z=O z+^-+vKN;-8{$}EWqyf5ylX%vIg3)aa3vvszaU50O_}35kOdeoJYZ8|$D?7_%y7VL* z>H|)8AFhr-9MXVTWYmYTe-U@`tH~fy5pwk;|@N%IAj-Cx^I9YmSN8g z9X%Qy@~VM*_BdgE@$voq^KRrd<~BC*M~)mR9Wyh)GPA#@?*3^j_0i3x#Y zKcdW~j~_oC8#ztn%bFoKHYH^_8kouP0A4r?nAQ5($OW090U&C8-r4>fSD2iftmR#Q4jb}wse-!5cj7-6_~^Cg`e(qmnY=nc zfX$c$!=D73)JQOY^Y`uBH*%K!?*!jRB^*V{h$X=lPW8C;mnovm>!<=mb_r{azYLTe z&bW$FY->=n)QKSjJbf?j8M}YakJ|xvQMn!Q;JC^g9I29Xzx~!1DQPXfw(faG2B^Pr z5CX*{CKg#7X~3Qw-LvEu516&DJ5>f#{Td+ija|ETvGU&mrEq;?V=+EL&7*o5m>E4< zw+1;E7Z*F=-KZ8wRTN>nzKBy(el$&d=FAyvr^F}ka_7n{%Xc$vf3b3TlKsKz{8P+_ z;$Vv_Ifw01z7f_;(2S`;3yXgS(d-E2nTCS2=>4v)1tv;z(>fI!6Y~(lJT*{LxD5xS z%l+i!tPmGiolu4D8ezX~odcvq%4_;(UNm_Fs|ep&xw?0ChM`PB6exT1<%>`7AAcxm zX;}jRDS;1o@X3=g*gJ0!I4p4{V7lTAZGeh6!yW-{^sLSuA^v-Ubim3n-KUMc{r7u7 zF0qtjf{sm#YbZmOqvqGPt)!&H5h?H&^zuZa{&ECzgYpvFWz)Me(wQ23WulKyAL`yc zK6%O%GhN*Ppkm*ZZMTfGK%7QqXy)USh8CDO&_}jRRsp_V00?4Y7-dFE6!+eQ`f?$9 zoeCjkSn>Xw%&7-^p3mml3I?m@oH9hhzT%=K5G4;Rsp7=-4_KOti?B+rdNKxPM=d@z5&4<{=m&ti^MX$m#Gu!WMhept0a`Iu!#A3UC}*S1}?@KZ3bz@ezx0bEZ;8HGGMp}D=SO;Nn% zhAkd*(BZ>}uWc+ZdKFWQi*Nn%<=Xwfq+UGvCqn*TpG!He*B|{XBcuu~5g*k}{HSO_9#A-F3cW5k9m0~6}?`Rxjqzb?NSv;TVBxN)#mPdhfvHWrtGY=`^0zUl!|!9M^~prl2@ zoSG{7_hT$CyrRMeFHRrjRYH5fl0ynHjHoC|*jh6N)Hw3h8#%25wx5#@m=jS7^T`zN z6FUdOjBO#pX8os6M-}scNT4W15@&jNjP}(v*?owdB8pf;Q;Zc%kOjadveBFe5A^q} zxg0-Qbp4-ds#uG_Eq)U-EZ zDlm#7G(4(p`?wldh5_LeM2jc}Ur>h#FKHl6n=onJ01V7=oHdO>;cY*&z{O*$65xzi zo?4RakIKvmd_l!qQd+>;R(C3{n9rv3MU!)HUBD%c`qLO#&^jKG~XOPcAL@@ z_Qfx6`J-EM>Veylz_Axg8lmQMgxyYE(fI>>mq__bN=xfBGFlG;i%SNmRMt5FOHeIp!2lh=;ii;!JHRv;X)c+l8q`h))+FIWNSG0k0HQupSKg{f&|ZLLGoyC+sq zPng@=UjXZJ2|3kr;$dJk8zJDhkDH#e~C~yv|zT4k3$gyp=BMQSd zh8{Gngrkp)-$@3q3qYjW01R7*&97CbL@Bb}g;Q>1eEc#0i1`8aXEZ@)Ehad8Q9HeM&~HoWH*ECL+f z1UmeleG=a|5c)(#$JY9)ezLMG(}nB~w6JyJx_wY~{3ItQXM1y-i9{7m!`k4Xu_>5m zIQtC~wcHWbgLNP0KTZPCv6?@BClOmz3xqhh1RzyAgX9V0)6GWCc8ehR2?gt~J6?!V z)OsEd7_04<+k$LMiZzZkg|<_qT%-vjG`Eb zu+)go>>oCT`6_a~)|QrNWc@fRSTnZdcgH@52SlmR?uEZ!D#5GfNd^sX3<3SiYkp;U z0L%$M?Xr_6PvW&RPM}E_D}3Ged){u@9<*mLR(aQoyp5Lt&Nm(($6>8t!5>0*d=>Ho zHYg(tIjorT+B3~{+Q;3fjk}?4-IZuDJO`SA0^G+L{u%5$;0b^v4yg9Y#H1vz`d8x7 zn(jXn)!n@L$}ttY1TfTgSQebXrr7TPSt?)-;9We95vqj-(Km0}$P82{1VaIMM``TX zv50X7XsX#fIyz~C!WZogaxvC&AWhKV_~_O5`#1jEmjq&!AthM8`~)-IfB+FP z+L+kb`(De?Btbx_w~WjbRaNtF-}-&n@lg=2Hh1O87h491UQ~D8mtU@~M$8?f$m*J! zH$abqZ~s#Ht%b;C@ISY*Wy>{)c}EkvwZ6IS3bG~7wwBERu5KJ+8xkRe-o1O%x9ipD z0QG~`aw1WL_d{KcD=P9OZx4PGF|`Npv)wTx13@?nZ*Vd=B}5>gL?!^kIRR&aK_f7% zXe^F(qeW&yTHr8GwQB%j=F~Hr7e-?r14@^i17v4}^VtG$BoYMG&42%lOcXeP5#)^q z3G;`1OXIbVJ1)y;@gGDf<5g7){*OZ$P&fXP@7RC7I@|SnTttK}qBcuvI!llif@gOj zUJ#O`NdpvS8w1rZ!-51bX}$d(KOjII*a*24UoS;adk`jF~84W&ZHcDll!l z9EWeaUn2h?5)Ink^)p?6`e9$6uJzINM@jbw{6G84BSe3|HO43J6_$Z+T;xvSs&r+M z5lPm3uhBLFBN(0K40u9K9bi}mM(lLU`WajSpi^vZ6pIkQP-oJOxp&s!6 zJ8MT(2^fww-vbD9NuDA6gj?L|?i-pKjokr$WMz;Jc3LTVBALnny-7K=1J-*(fnlO8 z&s;(X9pXhQu<%FcLjXh6e?aLk1GW`HDV|C-Tx1CGb)Uhf`T z96frJ9GCl3IOaJ3N+diO((ZszAs?UL6dg4*d`;hOAH?y=lNLLzQ9F`DtwDnXIe@ zfqVBlKC+CTnxv+YNoYY-dDu%Nkd5guK{IEpFfu(Vl-hAqT`>tdXv^v6r(yistDAO zTtq^d@FK5%lU?32!%O(*!i|st6VYOX%=wB6wG@zuSNj()z7u0Nx67nsAz)EbQ!DYZ z!2_AL>Si={?1(+%#Cpq?6+^Xk5fvpu?dv5vUykDNF9v{9=8e+A5-|W?+~;z0m+>Vb zgx4WW3c3OkuspeW8$Jt&vGd}aSZHAG(df4s%2TmqnoUXs3T*D7l@CUJa^nCOb;i(F>K^8Tskp0v(3qnMberZ(>#i(s!~|;_q_4wORuywJ z=dm&Agjd1A$v^xzRr1P=DS@z@|Gd-^dByDbjcVF31ylkO}^*J+D7e z^J#P#Fk_PfM;luuHGrM@sb~c`hk&FfLBKc*^#qm~4g&{V z9EnHUGDONk@Fh*?S`ZE{)V(am*yEDd@icAghXiZ@3d_-!u$O%p?O311=pzozjqKFI z+9K+KPF>0&YJuD4X*dYWVdM;O14+p;UC%Ln6*dVIt6@_tuYNWbe!?MWd6sax34aoa z9`?+A;h%K}>!%3kYu8`?ux~_d_mA;^mUb_Nu74GvIP$;MU%u`X^{yR}J*p}voi`v} MWcYK!g4MhJKL`AU?f?J) diff --git a/examples/whisper/reports/cuda_128_10/inference_report.csv b/examples/whisper/reports/cuda_128_10/inference_report.csv deleted file mode 100644 index 00aff69d..00000000 --- a/examples/whisper/reports/cuda_128_10/inference_report.csv +++ /dev/null @@ -1,7 +0,0 @@ -experiment_name,backend.name,backend.version,backend._target_,backend.inter_op_num_threads,backend.intra_op_num_threads,backend.initial_isolation_check,backend.continous_isolation_check,backend.delete_cache,backend.export,backend.no_weights,backend.use_merged,backend.use_cache,backend.torch_dtype,backend.provider,backend.device_id,backend.use_io_binding,backend.enable_profiling,backend.optimization,backend.optimization_config.optimization_level,backend.optimization_config.optimize_for_gpu,backend.optimization_config.fp16,backend.optimization_config.enable_transformers_specific_optimizations,backend.optimization_config.enable_gelu_approximation,backend.optimization_config.disable_gelu_fusion,backend.optimization_config.disable_layer_norm_fusion,backend.optimization_config.disable_attention_fusion,backend.optimization_config.disable_skip_layer_norm_fusion,backend.optimization_config.disable_bias_skip_layer_norm_fusion,backend.optimization_config.disable_bias_gelu_fusion,backend.optimization_config.use_mask_index,backend.optimization_config.no_attention_mask,backend.optimization_config.disable_embed_layer_norm_fusion,backend.optimization_config.disable_shape_inference,backend.optimization_config.use_multi_head_attention,backend.optimization_config.enable_gemm_fast_gelu_fusion,backend.optimization_config.use_raw_attention_mask,backend.optimization_config.disable_group_norm_fusion,backend.optimization_config.disable_packed_kv,backend.auto_optimization,backend.auto_optimization_config.for_gpu,backend.quantization,backend.quantization_config.is_static,backend.quantization_config.format,backend.quantization_config.mode,backend.quantization_config.activations_dtype,backend.quantization_config.activations_symmetric,backend.quantization_config.weights_dtype,backend.quantization_config.weights_symmetric,backend.quantization_config.per_channel,backend.quantization_config.reduce_range,backend.quantization_config.operators_to_quantize,backend.auto_quantization,backend.auto_quantization_config.is_static,backend.calibration,backend.calibration_config.dataset_name,backend.calibration_config.num_samples,backend.calibration_config.dataset_config_name,backend.calibration_config.dataset_split,backend.calibration_config.preprocess_batch,backend.calibration_config.preprocess_class,backend.use_ortmodel,benchmark.name,benchmark._target_,benchmark.seed,benchmark.memory,benchmark.warmup_runs,benchmark.benchmark_duration,benchmark.input_shapes.batch_size,benchmark.input_shapes.sequence_length,benchmark.input_shapes.num_choices,benchmark.input_shapes.width,benchmark.input_shapes.height,benchmark.input_shapes.num_channels,benchmark.input_shapes.point_batch_size,benchmark.input_shapes.nb_points_per_image,benchmark.input_shapes.feature_size,benchmark.input_shapes.nb_max_frames,benchmark.input_shapes.audio_sequence_length,benchmark.new_tokens,model,device,task,hub_kwargs.revision,hub_kwargs.cache_dir,hub_kwargs.force_download,hub_kwargs.local_files_only,hub_kwargs.use_auth_token,environment.optimum_version,environment.transformers_version,environment.accelerate_version,environment.diffusers_version,environment.python_version,environment.system,environment.cpu,environment.cpu_count,environment.cpu_ram_mb,Unnamed: 0,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s),backend.load_in_8bit,backend.load_in_4bit,backend.bettertransformer,backend.torch_compile,backend.torch_compile_config.fullgraph,backend.torch_compile_config.dynamic,backend.torch_compile_config.backend,backend.torch_compile_config.mode,backend.torch_compile_config.options,backend.torch_compile_config.disable,backend.amp_autocast,backend.amp_dtype,backend.disable_grad,backend.eval_mode,forward.speedup(%),generate.speedup(%) -whisper_auto_opt(O4),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O4,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,128,16,1,64,64,3,3,2,80,3000,16000,10,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.133,962.0,0.209,6120.0,,,,,,,,,,,,,,,208.33333333333334,409.99999999999994 -whisper_auto_opt(O3),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O3,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,128,16,1,64,64,3,3,2,80,3000,16000,10,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.338,379.0,0.378,3390.0,,,,,,,,,,,,,,,21.474358974358964,182.50000000000003 -whisper_auto_opt(O2),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O2,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,128,16,1,64,64,3,3,2,80,3000,16000,10,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.343,373.0,0.384,3330.0,,,,,,,,,,,,,,,19.551282051282048,177.5 -whisper_auto_opt(None),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,128,16,1,64,64,3,3,2,80,3000,16000,10,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.655,195.0,0.696,1840.0,,,,,,,,,,,,,,,-37.5,53.33333333333334 -whisper_auto_opt(O1),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O1,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,128,16,1,64,64,3,3,2,80,3000,16000,10,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.656,195.0,0.7,1830.0,,,,,,,,,,,,,,,-37.5,52.49999999999999 -whisper_baseline,pytorch,2.0.1+cu117,optimum_benchmark.backends.pytorch.PyTorchBackend,,,False,False,False,,False,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,128,16,1,64,64,3,3,2,80,3000,16000,10,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.41,312.0,1.07,1200.0,False,False,False,False,False,False,inductor,,,False,False,,True,True,0.0,0.0 diff --git a/examples/whisper/reports/cuda_128_10/rich_table.svg b/examples/whisper/reports/cuda_128_10/rich_table.svg deleted file mode 100644 index ed21a49c..00000000 --- a/examples/whisper/reports/cuda_128_10/rich_table.svg +++ /dev/null @@ -1,129 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Rich - - - - - - - - - -                                                                     cuda_128_10                                                                      -┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -Experiment Name  backend    backend          forward   forward           forward   generate  generate         generate   -┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -name       auto_optimizationlatency(s)throughput(sample…speedup(%)latency(s)throughput(token…speedup(%) -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO4               1.33e-01  962.00            208.33    2.09e-01  6120.00          410.00     -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO3               3.38e-01  379.00            21.47     3.78e-01  3390.00          182.50     -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO2               3.43e-01  373.00            19.55     3.84e-01  3330.00          177.50     -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntime6.55e-01  195.00            -37.50    6.96e-01  1840.00          53.33      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO1               6.56e-01  195.00            -37.50    7.00e-01  1830.00          52.50      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_baselinepytorch4.10e-01312.000.01.071200.000.0 -└───────────────────┴─────────────┴───────────────────┴────────────┴────────────────────┴────────────┴────────────┴───────────────────┴────────────┘ - - - - diff --git a/examples/whisper/reports/cuda_128_100/forward_throughput.png b/examples/whisper/reports/cuda_128_100/forward_throughput.png deleted file mode 100644 index de97128f446f669098fa0c92f693a2ef1fae35f7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 58341 zcmeEvXIPYHw=VWXj3x%bPE-&PgeW$;npg%D6a@q+5h(#dlmY3*7&VH54o#(5P?Rbl zO_~iA5C&12prDii=|$=Q=UxLClYREN_MdaE^W)>%`8LALJMa5EtK92e_j)+HTW$My z)8|j;NT)Mrwo98p)r2x3xXWl#`aW`tkxPYa26Zas4`XT;$uMJ9L_aiemY~p`luw;{>|dOgSBTO zZroTZ7IgHnQE2b;;Zz;Hp7+jew#{X6NqM;U6|?CPlyw3OoXl6QTv^}0@zb4~7HR3a z>#|Dg0=+|>QXXuqbQ^3*oL%kzx{IwG8hre%_#!#m9VR9w{yL^tEaS2T=P9{3SOxSX z*gyVO%%#c9;qjl}ChC9me&5~rnu#CvDNW|{2tDhRsE^Ovv`fA*%%|w?-f&h@v9@7` ze?53X*H>63 zNG4a>%=`JbO>dK|0y}D+bFxN;tP5m0K?`kOEJ@T~%Gh>(?xowi0<{iXzq*nAQQCBZ zo=Zpe-H3>YmNl8hbWCOQ}dX~$H%Sx-1<(wDJns&{Q1yFPwApC zhEKfrp#JnzTQUbeMXt4Y^ZWJZWrw^KRlG(9t42(Dcj_Z)dQxr#mio=XO{eY~C*Hx=()Vo_sa=7T;U`x)Diveedht+)H8&vyxONVDUc)wvwT} z&**(0cwxi)jk~U{Wq1m%ihp;!LX-aj=l$~jTLA{SN77pS{Z}0@y?J4M?yBTU7B*O+ zx00EiS!(F+icO1uGE?67{Ht540oRcSp zVd3yt;l|}A0lTp4>#JP4(mS3l5Ou{0Ucw$Ksw?#yuP-b6wv-w;TWRUr72_qQpNqBGSb)B0=aq)G zueLT;rZVTRx4djus=vWodDtPRA1frkD{y`O0qYDugAAuz44>uZr7`+ya*bhd*(ov1 zwv66_eokm;Sa(u?RegP+bwSAi>)_)pZ*O4*=z7DQ0a?ZcL9(Jl^_eBo8QmIWN_5M4 zk_kIc&s?Ny^>V9YXnKErppI2yaeXQ))vl>T^!c1QbLOr6eRoWHXKob!yBig?Gx^B7 z&gzr1w47T9+pt^G^BTd;oTU{6a| zxbo0l-60v%moE;US>&%3t!r{t)PF}+rfU~GQgeZ|Tbp-kXve9Hkp3$&W@^K@PPkP? zk~u7Kpr?f_U5Gtv-Wrk5O{HuYXN01C^}`e9uWoE5`)pIJ;&!)HDf{{P6=zmBeg5PJ zi>W_3OKC^vNOGn_`=eqVb5rbFAB3A0iSqGzw3$vpaBcV5VE5^b3%BFBs1;M&;=OC{yT*5E_Wp)${fJ}V_IKt< z%eeHlTc&(C?fB_#c*GPd`L=YXmfo@KAg51v#RaC9|9wr~acT2^F&!39(qLbfWJvmcug{G#aY+vIoXeYEn9=fn6XH1ZCYEV^E0nS2a4K!)Rh=*?=xN_1T@DzUrdIRiSUPLvdlWY zV;gSE7w@fmdpI^9*9#Wkd%Lz5@oi#hSLg^=ENiI6`gP*r(xBsS&HKAwt0%N&4w_yu z@xNj=5*hPgWvFY9PEVEl$i991j2jBCm&43D;RSwCe+0vzlHOWGazANjWoBAte z{@p|L=`eSbGmDPDfOE63N>bYwI~qSY-g5PQhO=1C(kyj3+lGs)4?U|rC1lWNa6nk0 z{o$lk*s6>}do}FnXCb|#o9U($ddd!EbJT-2uFe`Ljy*TQ4ra2(;?13AEZdd!R=P74 zy9-4|248I>SHT^b!R{N2ZmEXA4!JGX?sNT1(P_^<}@E ztVCQZcWR2xDqSb_>9b4VUNu=!c(iQii)CG}whPxaC$QDnA&OluW!PbRZf+}QvK5&{ z46(XTk+D+!ZkJ`5rZH))51LbIyx{oU0>}C}OG_iQWBZ<*VHU`W^}ctu9_Vcg#WRK- zdG{c-{V_kYRNuZ1_Nfl5PzjO$z3|%o-(OMs-C_V^IzM;y(zf(2wT|b@az>K=XsF6^ zCs&mz*tqOezxL%+SS{_Tng?{GrTg06l*Y-c#Napl8EalA|EZ zj83h;dJ0)ZNU6Wp$EYHKt#shd-f*>pu^BxplfIj`ChTI^Js5&kpjl$EmPB)Wo>X>C zVyQvey{-azA(nsvg2tBXyH`{mee&HYkyB=&u1RIq1Ko|q&ps7LY9E9XNt-vpI+rj$ zc*(b}FjpRIPT*%uFV$o2ifi$d&Rh|7@9}iJ5%H)k&EUojgtwHRvqpYRhZ{_1OxJ&4 zEB^BG>YHV5Km!6S0kRLHiraxlba!aXm9?o=IGpkEoWwaR8M6XOTR34MtobqiQ>h;? zZK-*7K|(}a52-UdDMmj{&|m*}`D5InmSq|i7Iy*AbEl`$ko!^SRF03BP=$47fBhEL zW5f(y%O-3jZu;UcGsTk-Jmw_5cWT;la_V=}eU|Uit(MDbNem8f%#bGE+=36{H+221 z8E4gccqrC=D6(w*p=al}R3usP%eST+inH2NC1(H5PWw76>eK_fCcy{xZ3ca>Lx=aJ zU={gs^HC-Ct(>N_Vj(M%GVr5Qxw*NIt=+l;jVdw+TMz6io2&8l5@8_JS+{H5kled(HJ<&fq0SXgy&ZEyJqA_pgZ#9b>z=Ew|8FAuv8vsxQYGA z;?_y?Cz$51Od>RV3XxPmc>(|4QifRYz9ReR#FCY^05%Q^$fp)<%lP=J!}C^EP5?4Z z2dq`Tx!qgJQ6%S#u-j#`5Xa>R8}|M%2BVR6CHt2hu#DS!Bx|V0;?Sa_4<{7AUH{&p zh8B{+Vk-?w;y=r&GZ$VagjY6P_{*$YTRjCuCA6|K&-7Pnb|NsD!yKY_2FoQHdm*^r z(vCF{wMwpXbM`lIYHYs$S}i;2*@XoZ)v5*)b-stutIr4IP{5~L@DZ`j_^idqI{n=O zQ-t@sT8CjMl#&E1K9$CXJ~}y#gWG3Mnx$}WQ=$vfyyI{`Cjrpv7E()g&#n#c*6ODn z59To41}my=o8A164eh-=240e#L402fS#NyPSGlw(#5DPmyo(r4RT-Z;8Trg_ukfb zucHqnf9$ZyTNTifR3T;m-qEc7)kWc4NYKfsE& zJL{`O<45qnE2iG#-|HSMXBUrP5^z8Ievt}eI9b?cmvcw95EE#o9*8BM;%R|v z*O@taQfZcnS?+oMtGX(g>^a_f9ZEWgYs@F#Z7`lm>@>_%2YA2Qfkt+w=VDoCLP~8nrsjD0pVb|_GtXgaa8*ogo@}!g7(0fNt z@SD4P#e(JS4H^yyg|y}_sEqkoB2Z;ocJE*+N&x}}wGojq5lP-d+)qc~4CJ#$?_#rtomc|yFWYzJ zUtK358v`4rgwYmdjqNtzxCtj&SGk3>FqNP4>uRHDO6=9fLU2;6F7yd-CWEiF2t%!(E@*Mq2*r`0b2f@@PeMMq{v?Nza!Zj862VeG5r^?U|g4P3n^Yl&s-!Y!XP+h!7+f3OD4Y25RGJW%utHtQNTH~*l7ht z&5n}lK+AZ>Ol5Ano^wkg6;-_bWgf}+Tmqo53=W{0qmt+N&Y-X2llzBLLK-UsRJJ^V zb661!32|;s-KVMP_vi0SYlc#<^=f;~K3Jhj$VmR2Pt3tAY*KOkRqcvD?nFf7QgYL? z3lRYn(q)AmdhW%)&uq=e!{Y|6StG+PojGEuA5Je&c`uFhKluKT%idiX>hu`3bfXi7?vJ<7rtL9)!E1T(gE@J%MCfSe@(=IGwkREBJco2sJ_r(>rS zd)N|doS1cB%}Cl$E}>2Run)KEyI&wu?BQN|NYAaEgm9~XP@9VCC80$3unEA~U_pn_ zQz1i_ORhcTVyGP~2rW~olGUGY9pCz(sUV>>e|<%i0~bX#2X~2B;>rq3d7Fi*xT@b? zFUaCX%#{8aL>{^IkUhI4k!wT&A*KTOsZ2~1GH_TzKxXxd0VT?qgvA{ZCeBS5xOzC| zim3nor7#4i`Rgq9#d~*FYI++QiYvg=Eu0121Hr1)0^Yp<1E6h5|8lYR)DHw!^Z_8K zC3v2cAEGG#z&fg= zcTkUQ-(Q+F?1x+%<=_HGsw1l@QP2j4O?acBT6bQi;05y0?q!i|Z@6$e@G}6L! zI=Oexo=ZqcE|-qvWZr4>`DLc82$gn(&nawE^|six`;{K-vau}g5F9&{H5?h6=|mM8 zT^w{(cKgWFAJrv$WKm@^fzV8bK9}<7hffiareIZw^ss?Nn&Eo}OR=|7hDxNF8>>Ak z0qDw6UBPgJz*b4X8xPw6Uzi5lmE@4u>D#|sM|JmYwcxAYtuOltJR)16ZCfGuh=q=| zm*D(vKDY}rc-lN-M|90^v97snj^R{Drq;5ESBZxi8h?CxRu(ul0cq9vp~nQ#Ap)ar zLj!eKPt|u1?1cJCKA{rUzl!ob+SPMa(vHCORyM>U{J}4U+TxdZ;5RXb6s}3s2b@Na zD7emaxSVNM{suNxwc3WRyn9{$_V=5FHFHLKW((t{dT`HCV6YlXV-1NzA|i%1DjpHm zpZmSZvAbsNB!GV?Ez|@OOQe=X`;T@xq5&v=h_SV9vB#nc ztw(vf1sv9NO6``YhfaYUzZ=HJU8Bf^M_=X41wJ!c!nm#)ASK!&7V2Vy;tw1_WKR%6 z!L57G<{uVVDIvkXNdCBF(6MLpP7y1Ly7gGivpIuyU}o-tYGX6sBl3h7CRv84Pz9QnZl~qpf4DEppgz9@k|H8c);&Xs|SD6cMIHHH|3Ct#0J!rgr3RH|$ zNX^cmjdAy|KLY-`7I%3CC_>;W$5uUxL&Rd7LZKm~n`Eg|hQ0y6bb9-i#8d$B4dcx7aiMs$G;TOr}C9%K41r)P*S?rp2eDg+1l7|~Cs z3^|wo!h#J_fPS}s2CMWKxl1sjfZ_`g6%o zJqi=m8VoFGOKgSnA;ZlSwesv#nupZz!9HtH|Dd0ymiSuKB87k@u(_z+8*4BAGW|y} zwWuDa#VC7Df%Lg?57oUOXxRPx4@W1~%RYa4_D6m&T1T(D4)&aavkIYMaG=bkE1zFT z-*yip3$`X$u(9U+V~faWt>1z8uA@FWg|d5&nnK6Z#|XrpLqyI!hFkMz^<_&>CrHk- z2#~SbgA0&79>bwD_rtLGQ6jA%hM0FNee(>8x6*?`WCxyE0V&vh zAdt^t0z<`tL-0Rvq(8WZy)L=6UlG(sCXo`vH>@BlKZStpr!aEw$xI2{QTWe@3F`>SF>-BrzC?7(0_Yl6k zu2fGZS~oGb;nhu{zErA^i5uIh`)HV{g`h`djF#5o(m%6Z6{#vL(PQRwn+!*C2}bR& z=iWw*ww7vJaw?(00?=RYF=^(Vmdgv3e`j#%EN?o6a-WtH=RI74_+T4j&r&m}4A5!e?@-*b$NXk3e)&gcH2j+xG9vbBE z@ob_#y_bDe5rCBU^%L(w=vxnwiMFFZW~`=QoZ1bfVaj&0N0fACZg zGPP`LN=+wnnF(V33(){QjiTNhvCx;4|52UD6Ei$KP?}|mceC5z6ARt?KJf3Wy1e?3 zO=~Kr9{C%wR6jGZ?arQ?@k2co;owjRT_jbx4f3uJe^(g*w`S55om_y(?zj2mR1<{K zewb|$%5DNmMI;reyDf6+@v4mb3!dkRj^_MI0Y(L zL0o+5jj=`^CJ5=j-{>q_uafTE8ethgdFrz2R#8)kdNx0*D_la77iD-U_LNbh!m;sI z05KgRj2Pr;>fPm2g++V@V-0AkC{)oL*vK1fMZm>eY2GxfdHD-Y75>SoRp~(}~QwbK6^Vc%39cZJy zM!;EnxI{9;xwimYk@he#+5Gi#gy?|+x5(%uU4}!52z9Yg19YOuufuC3lGFp&uL5sE z9Vo^K8U_Q-`Je4nrBaP)8QPkA$?d3pi4f)a!I8Dlws13AGpVR8&RG#QfLHr~d%0d# z5}_GMTrm~gGT=g|MEn4^3H5fWdIyK+bIVjhGbSqey;HVgqPC8SUpgG0E<^0Vl=)56 zmw>GWp-2cr$BMvFJn=gq8AOMOd8@R2y{jDLz-xq0WXRQk<_C!sO7AXGr6!`RbjN{R zmw#S@9HxdP^0Y$md4U`lj9{b;M$lpYh9g1f4%D4qaOA}mvk;wSb_$3N3>K>0@URT3 zGGH8@98Qlu`B=DGB5^%lo-qV zt5)_}>%vsJ9!(SW?S5zMv#Y*by1&YMDNpb4((^zH8Jr81oM6&(0iX|9)iGI)?;p6j zwSsSc2^R6pLYo~qUDT$ra9x1>JA=&u*N3*S2(@fc5d;zTek5(Y1Qw)7y*l#0*Xqg{ zwR!-HuTzu16m$yi^tPe(^bK{>gNJK*Xn ziDm3tEHzhzaUtkXtbn6UjngE z#^)?g1yJFka$sL-Hfv~VCKlGX8BUry;{~x%lqs{&pot95s{Vt&6G`;J+z@s#kRp%a z1$~}p@Vc@hmYv}@SnAsM;f&~#5qO(ol^a8_X>lh$mz%Qx1X)_wE35!{HlW^Pm%*3% zib^KAzGz9^C{wb}?$;wF2a7@{^>aO2026ajlj|1sQelOs^ruiJ+Oqfd&KwN@)ZL|> zu0tK(?k^GHc{LXRK$F5Coa$JJ^7oh59oAwQ_cWKA#2(IHSGto06!oJHn3!`d!Y=Kb z5jZktEZlf6tnHUsioQ@4uv}{qoCAsbzU8O2(%F@6eC^nc??DgM63Hu-GB^_AK6%3x z0!6SCksK=VEk~fyut8As>uv!Cpz`CxlQZGFR|r%9d^B6spi7k;rp_V)gvZ0y*<`+k z?_L4Etz)|nGu?p8*vVBY64Ab|Zf@tmTE&y{ugHi|B}D=3<6#Xo3VmAeG(AWXL_Z9n zJ*NSngZO;6Ea|nHya|#Cu{!6hsNE6+tVWjGRVhq6KbvQp)dzwR=u<%-$2Z;%G#zVP zDxnq4yD@AXkBTPd3ceJmJlx?;mE|z1ncJF?T8_y2o;B2;+rIGFuaJ@4M}l782pAIL zHqeLyHR5eVC*c9HU|S6;B`xngno1o2CYmq)QF`WGn_js@^1%g^Crz8V6D3h6Qnx(GJ*c1a zCf0Doxs*EfGlZ2C7-*?_i!cy<(y8OQO85Q?;H`76Y@|sl-iRXs1`i-s2x#8`kRr<# zUEDJ3@ZjUQv2LFu(SK0lr9`4Ms7wT?v4K=as;~(#>LFWo>xV#Pb9~FELJS8X4)WHW zNJAvyaO!OhhNMM;S`}b3{HT$yt~_K*Dj^u$TA<{YunZp*VMK~BSg=4s7V~6njL}D8 z(PPq$QShajloU}cV^Ef|^_LIX6l-e3al{Ek$v^_J5UnT%3!DyCzzwQZ ze`5&}aC`KJPN)_F7Aralc%$Q6KplHW@Z>VU=~N|bwU9@s!QAT|V^H}N z_w3Drtiu zkpa?|(X206boAT4Jy+M7YalbqRCUopp@2-CE3tB&V`nV_IWv=k7Fi&`;Hqcm@b>Do z)Keg=`MBSZaEKnmh3)R@-u1Q+fQq{LU5d)FL-~lo*OGL2@kV)zwt=Gw6;1si*5&ba zMp3sPP$VO<0>?QMNz^lLKCam37Dd>YzC1+iEYe25#m|=<NWbpK)|)ggDYhbjcg!LF+o$2p8(%M*)yJ;^EfX-L<|vY;K4c12u0axV(A*3;mtifGza(a26xut<-OucLLxsitr$;9sqp-HUU1Q z2q{@Zyp7l)QCEQIeOBjFImC}xwa$|>D!`SK1`UVzO z*>=C$ZiWV5_Vd1u8lj;-MlZtG_o#2rlmtvHl80RurNyuAfOYt zAoMhZe@<;dr)fqGG1d@48I$&Lijdw#KP}z6W%j7INR&r?9GIsV=_U$5gAxf+#(~G= z0n7$H7j{=5QjIt$;x?<&3b2*8U{a{sB8tc5ykn*}v0IpbT*LctR^E;J6y+2i_T}xX z)%RbNzLOhvA1C!1ap1O4RG>;y0nID};tL@(A*>2J7C;SKs*PYA#B18PVfkfA_zD0s zE{P<+u?`*tX3Zz01*;50AC91|=MFK1;sh01&d=Ux}5r&uCr#n<1qK~M}5TiUQYJg;11l-ybl}B%ad-!&ji`-y5Hb&^p9MQw_bZcJ}6EK;rei2)t6U_VHW z6~VsDQ*^RDg8qpMkZYAJr?Mg;Hs#~FatKvOHkY5=2KB)e65X1yLl`V6{Rg#Rlkx=* zSYCoKWDSs>?gNOssjjBnMkFYnQW12m7O}Wf+!RM|ton?K$IPnNcf&(Sg(xUIOR;-5 zwKrHvAjOkwWr4fgNIBRHNCGAJ!-@I!DAtT&k}t^|v9lD8sL7)dUSRKTiH!M4;q#ll zsc4{dLP!~&(mSvr7!bNYYh*C161OiJ`EC|IARX-iM+7V8!wJ)BK}qDzR+YR&@fMXx z9sy=Tm_q}f^Ulwc5o~xcezy?@RziHrf@G6m0s=mEfW-|#VZK_v`Q9&a(J0-s-Q5wJ ziC>FjZ&uS`8fa8>FX!2%O>|$&E-Q zi`XJTeQIbm2ZGm)B9Mo(g83F@hNG~HHx}-K+26vIbq=1I6Y<%V$R^ahw@{fNQL08n z&qwFc*$cp!t^`k4XPsZHC`S?&%AG~Y6vqI`P4ln%LGF_&@^nNAXnPGZ6H?WjN!@@x z^}c)6cv2UTD4YYK?%;Rpz;Wkc(E}-RlR1Y3f)fT#q zsI~((J=`6xJZF#`nNSerTM%-9DyVOiv2(i+5~%>KCC&z7l?tQX?$73#)e`MQJS5tX zg1vg7&I<-&7r;A+@FA`c>=4B3JWl$;d_SR7M86lk)6@*e*+)iFy z&w;L`fci2Dq)MUw1_cB#Z;g;NkiM9-E@fs<|N4P?k)*s~y(OhOdbrmexTrWx;W{MK z^^|A;f-a}xjweZjz8#t#N#r}-+R_=L8{+oO%IVA>6$M{_KrBjk>;foTM<~0+X7u-b zZasqQo=11R(;xeHbvPb+?EC*4(=&`u#m6UbUF-&HvMkajyWI{^V8*_c8Z~hb;h%U# zIeq+3|6=LsXa9w||IZKqvr4dm(LT<9jNw1V@c)%DC_mA$qz^Bn8F^1gn=F0@1sib6atv~kzzy)LCZaow{`m9zXyVQPhUcolsDS zGD0AHqmdwLu(6CQQ;uEv`QR3(o@N~vwXXX71W-9Y+rj5fn7UL?|7}nga%gI;QUa-F*s40drh-GT5g-o`mqqbJ8v@~p zFz8>vvY@&Yv%A0>iz0(pj6yaDWs9CJRF&%T5a|zN2WKTW%lST-I+jaSi|3%f~!2mF#Xbl z*OI62emeVw=4JiohyPiSanAog#_%8S_#a0GhKBW?X(V5Vc;q7K=4m<+bH5=poeUyR z1$p2#vWm?cXr_2y>nyHoUvr0yM;3vcenr$j8X$;@do?f_O||U2HO4zgkwPMF8qSwJ zOu{F8dBh)RnA0E;eP073V?5AK&i1f^wU( z&_>1BXMlCfYgGOVp`xW3+fHyTQqB9uUsxkJAlO^GOfYT z9&x)CJhaqeNW%Mx3`hV*sY7yE^;}^5DmQaykETw9YnBLGdBkP7{fsinONc0^KEBP) zvEa^!73tkH@__ml=o0Q?>7ijk3IQ7kQR+cqsX#T8yPA|N^!4UWeAFgul-RV&BB=mh z)B#@3plP#@x#R7+z9kZQ)4OT%lk_K~NTnfJs>*$M69zPF^3@$gNJZWD>WIk&GP#O21Cd6&@i3%QY9-W>lxOsFE0?yN z$RxcQLk^oWnp-aCm6UJA79UZOY9>=Xl(iMe}s?2^A-@-vw-FwZRL z5_H!O9MXZNjKRRZB9UwZ8X5>lhO``IDZQ6k(%PZi9#73KF;M*A(L0JRw z_P<_d56ISxE+6&kyU@lT9UTBWEGH3lZ;R~hu}}Ayuj$V>L=#nnrEy_1DC9^!$*RT3 zz)Oe?_kDe*qMIQ5cB3|*MGNT$WlxXyWan%9D;E0HVk(DC14`5p1X;^q1pzgaOe=5Q z7kj=;YlAWYE)@b+kUCQ|X~QDaOd9dLuT6h)112`up!kChdC4v(?lLJBlqyLU{`EQt zvGQ~eM{7oBz^wYwX!wT)$)o&8>x2k4@|uxZjm%CV4CN z>>q~4lLO$JM_R^ztnh>Znj9yxp9~8jjbZ{}KpZQ=EW#@N^VLqQX2>{RF z{_cTY>gn+?H(xu6&ou_ZmT*Lcy2XOyw)_SGh{$dC%m83WmS00dkt+1<6flzKy8dW*OUt!XOBE zX`Y0tOv1=i=wa0778+U5kA<`dyY(HUQV$TGHzr1d8i<6&xpw0pm$nkE=IeSV72(>B z$Q?c+G;%~@0h{DTBk3j+wDn;qt zJQ;DsXQ}|P2fPDJV5Y8Si%@l_qJ)#d`=ES^auMp2DQfW^0lMzH;?(&v1H9nqepoz2 zm(MPD&r@{9cpW=POASaD#330z2`en`zuM-AzIN{Ft*_|h6F6dO2~I50l|V^w ztb?0!tY*!@$A6+-mQYp#5;Q7ncmUnwoh)NikR*?!jtgU%b zM=cLaR|o2~pf<-x23m}C2q>sS2cy*X5G#*c3GV5rS#UVp}7g^gflb^hmtC6+|O zam)zYU&j?XUvqy)86OAKtR>by&I;9K0m^3@;R!f&?gnZzIv++BJrf@hvRIJE~aq}1&rzY+8i1xOCMMJ34GloA*$ zUay1KA2)-7Dw~O-GoOyiv2lZT?23Y;4a(s~$nGe%Ejm`aar@%+#w$bR87AKrlB9^v z`f-Fr=A0FY+G*3wAcL*DeB*b_m%cL{pP&&9Y(%gWI?;#JPa2SDix4S+1-eYa4|I;Hkwvq@ zG>JN3U`L84X#3fC=cz{iyA{GCniZn$GM{U&I2Ruu!)b3k&|l1ppEiK^>U$l`44`hr zhW8GKyp#qSf^3Tz^tlDlLy?-oec*LyCxWsu^l7|^f`J>Fgd);|Iw3PK+JKINBcuvG z)YZZ|wi>04)A(-tbK@rtab8UC-ilGN#O&##AjTu~w(!&G-KqGzn=TMKz@Ar{8=^Ut zD?8Tz=kwLqnb=Jd6`GwQI>%b-_gIJzU_802$6xe@5)766@fbs`6CUZ?P9zY{K(d*^ zKW5UNyKo)ogC*!dBvPc?%os~#e4hHjHZ!O)=U^dLJ>Na{j@=S;&rq{D)A=2wNuv=g z0-`pVP!iQ$9*g9B7CUiiI>UsDS8EF90(=TX7NZWs493j=hj~R z=wCMnfZawZelf;DNRuVXL3a5fW8zl(iEa`GW|ciXc6ATky~yoj|EPW*1L){e3lRzC zeJWWrN(BwMq^vy{HNV3o)uOLEKj0Bac1%h>QYg^eZ4}5pM`%&^t`MMUu7E@1IQz~X zlhjj#AW?!TfD?E|_nHZTiW#@Bqa4ll1hl6A@$zpA(c!EkjR}JVg;O0qVkZ`gcN7bT z8&~`jn3J=mwDm*r8Vn4X;Lr-1Q|jxN!0bs-$XhuS3^X;W%>6KnCyYdRsMXFI(EJx> zqSif4Bl6B8>LO_jy5ozkg)9Y`Y^WO*PdZNst8bHu1}OfIs_tSX%vKt9v?NiyZ`*K;m7>a z#?ZN_vM^7xt`Ua78z02_r$WB|FVIBs3cR3?2=>vQ4viAw>BIre{J-9vZws|?>Jf!= zYOGW0CK%1=1YD}66QcgLHZ)#JS`mmDjZt0XOS|{yiSSN(NKYhrAboK!7E3(;iG=gi zcqW|<4qEA9L51_Zf02|^rKK& zpgh9;M->;lsS2cYLXDUFZK!!jnukqlBG`aadR~%d0$@QLo_Lf_?ch!GU{)hfXT#Vh z`4$6*!R4%JbclqwR9G=_xPS>F9D~kRBZV8m90D1nQnj2i=CYf1?GIBdgvk9eG&;*5 z#o5j>#+&F!Dw-Brie|X9>WQ(>xwlagCDJa$YIOjTJXw*kpGyk|3Wr*jG)50rjC;XDqqrRtoDk($ zVIYQiNxKQX}km_}FzXVc>(J=4E18k_JJN^~KAZLlIbwH}4@*0N$ z1>jTy{_!By_eet3V;Blj?ZcQN)urCKym=sWYe|1gV~+g~c8_`7j@ ziVPZe1$r&1{ZiozQj9`GJ7A-?LV!jg>am6D&^7$arO@!NQt@g+=A#)Y_mN>)L7xnei_NU@9JMY2p&kg4NgOryfC@q?(x}l-H3yn8Uso{k zV=3jk4V4!`c8m)DC=Wrj@B%551xb_dp)c`yHR5wDY?fO!GGquPw7%s5WLi2Oq8Up5 zFO565_FNnOUZUHvr^RpO!O;*U{?FJj4L_m7bVzm-XGH>Nknn3T75I-DDETN!kx4B< z2Lq191QL0TPw*nG?QV8Pxb;1xGPX1;B9Y2T3Z)7gt$a?Q2jU#Yah!9-1k!yO*8XPfK|CKJ+yKHIpGPNd(wE2E#KH+qBf~>^ zI^#g)O~k30g;{jMNS7rr&<=$0*Jvu(>>LX`eCuvYPH=*tSf%Hek=tnetfvV~5^fOF z_Rl4zQi;?#G;~Pw7c3E~e(4Ctg@{;Jpa3WNApWbTNeJ27WA%_nJd&FmcC!u#CLAW_ z!l(kALm*bW9$Dxm6qY_TJ&40d&W$sS9>QVqmFh!C_W8VO-4&!^E`+))kp))w_t43G zHao{3(F9vaOWglld3aCs5D&^h2Q5O8jX?E3UU?v87TS1JY*7tN=#|k)DjYCHIrvwA z2v3U#VNcS+*JHVLQ#o>LTSc-pp%Xn6ERHx%g(X5mZ4l=BLa4KxLG_c(&T;Z$-3_cM zi&UIQ8cwNy<1%tqhyeSdIU8^y%)d4P(xZq(&3*d>MihBW&EGquCOegs?y8IFcpB1o z(h({&{qYai2LFd{U1HLzq_U+X!YAegE*-z{^{935ZMq;1_OG9?A{}#bHE5i?%e?`# z7K(D5RfFy>K~1`Enza3gZ;l?Yfi#AKh1Fv?^hvuPXBg=Gllmc0m~#v?N)bkcP|Eus zav1ygJI4@B$j1!k0wy89pNI=P{xA{hax5~L7FVKuOI{|M9wsU&NkI%{zdo$*CA z8jFYP5h$)gCKmqnt-%wjPfWW_4N>Gi9G#H5oI&I>9o#@I>#sj~>&!yv9fQ%CT}OpV zC#a`@o6E=6Zhy0&i6n4D>2Ps9{T;--zfPBV;Qv=Hpwx%~y1jGI$#T<0f>B1mW)gT7IEa zt;cWjfhu&oK8(Hjw>Sxea2iU*An

    *Lqd!ZoWPqNcGLzeD4&;m#a>SN>iQmFazL zARwq-=JDeojlPGIZk{71jY@ziir85-I2gd(nvlL18GmDo{*TF{W3i@-zi04As1K17Z^j7N5ur1LdHKy4CB zMbMSf}V>+t@Oxize+hkTZh;RLeac5IAb9lECDr9h@1v*o%nYyPbb((DqKqY7@k{88$yyC=&a>wx&W9# z%S*hz2}w>Jr~J^!uTce#9q>c4+=wGGMh>+V)VM?pj(97ZCIrU$-nzJyp>uA>Il=1! zD!#$S;_-R2#Z7YFs`RW54K~ zB4ht26vLc-E%rdN7vRh6#f#N`{dICjM~8sqew^cSx)8yYdF(G4)P1pYWlU$>Qt?xX zOUFrDXcnCT85@TKeU7(Q=0IsI%vBH0I&S6fzP4(ToYxc1B};oF#i!~C%qYPG2W&$F{v zAACF+&ALZ40f=+F0L^CS1xV8o!;`C_yZMM+t$}C@XSS`W1nMCnE=knGGqUu@KbFA~ zm|wu)9kA8`G5v(yw^Zi9JH*jnq@ltTP&4f8C2GLT@)jjgjN%B*y^@9*PV2t?_FHj( zA!&o4EvKf(8D&rB(~8=^Ifo^h6S%?pXfSFpW_$H!jPAJ)E&HtU_lHSYUg8*uVIH&= za$>$2bAR?gK%}`35-5?B2+Jz)-lc07in$+Ww5TUK_a0ee{9=>WLRk$-HclkL%Fm2p zS~$3McKL(`GhAF0uq}_;W-V55SRek=m(^rEco^ivm!X$x^Jt{NM{E|qpyWoOG{u`p z@vq|!ZN~wjzhY{0IIf19jDs9Ha86%Pa?RiGW);LT*PI%}nH~MG{(|h7;c%4m2{@=U z2Ca>y%a)xzf8H1?PG$CE^peVOqc#L%=MXvhH{&C zB7<+$)6;8j3Ux|O`+L{L?Qol}=;sQo()Gnsz6xXf2%!`k&ce=)QKv59y@OAt;-5m? zmD({2;Zf6v?ExND{UyIWl7Pkr=Mrj$K5zRa{moMIo zT>xq=&`1Y+k^_z4QAZqlg7@&J+SDzTlei_JZ;@Fqlx2}b3d8vKuz)aue5T!B%2tqG5x960 zeIOv$Pr^sqPzfVSi?|9)m;Qn7{Olio*o=oauS{7DX&lwpoD3}ntB7}Q4o3G`aD^Z^ z<=(IzKcKXFe)dPPKbjUQy9J?c-WzYsz)qsBF9?0xiyz$8Pg{>e7k+s4>QyB^o=)8h zkKW+eX{_S6ax1Lm1?-3>NB0=k<@)8IMeHY`U5{R%{W&X;n~P~yI?Jwg%+|y1w*)Di z7zRp+vu)JyM}raE8xci!HEvBuj$Ds7D+!(HQ(mS2U~P6Uc=L~NKbqPZdSZoIYfU{n zbYzzHa)!o$9U4ne$1~}uG^2j>_S9``*5IIv>zKoLLDoda?Ht<6KjTbnJ{F7h0!J!f zFuzar;Aeo@CsgrZhjv0!=!o;rQoz1r`bPLgfV40l8n3_Ms51a)-|2H!9>rPrzC%m!2ktPa(BA+=XwGBqH8Tu$V8U=xJF*YXMmaH(S_KL}!6Z)*gytqJ z5Z_z{ho7Me`G_99s9~lH7vtXOs-6MwbQB&jIax7P=^^@~IAd%wCWb{pk%i+v`4({*TnnnlaOqnil4d@a!81``4OupLtHa5Pr9PH<$^IB0V%!97-T0)LECmOqRMt`3eKKU!l9{C zkPvruygxp^wFCBQ6Ky><{qzvYYybVYb?z8#5;jtbtNXfX#J)(;@#Oki+) zB4T1b7cV}9+W1!-rB9PIG|^Iq02S)e`7Iw2vPiEcxc19n`2bI;2Y)?Z?0g;v6<>fo zdto7Yq0qAYG02002vf%OLl`cl$vop6R^2Q^^F-sE*_d@(j$o#SxC{Ztw6kZ=jxehv zw4$nEU1DhU{SE?ryaLC$F4{`{2Xo0k)r<5 zly*y-Bf?X|AS`aE%L|}$?*t^Y&ST{C9%9jJse&D>u={thuu+&5?$eCXlck1{Q8}9N zv-R9D_gM+{nQrquJ~YaE1cobv9Yem zYV5v#^go_}?%IWo!UL{CrA4P^UPCoH!db12gibq%OGD1`5go1@D4YL0Gs00%9kFYM z!0IRt%|uRkLSliBr*DI1W+AFkpMPYqcGTLp{92n@#_0H%4L0ICOat=+&~S_bSR#!R zeI&GF1lm^9InwA^Q!opUPCN8`CaUB0x&Ev2&>+#6tms~fNHIaJLfgS%V@^&E-@b@_ z-+I6wrWh{!5nF^KWJz0~R>46y0OdFpQsPNJ0df8L=VZRwvuE#6QITtUAbRRFO7^MM zvnKTb%P?MTRv7~`gJ@yN)Sy!f9g0$@~ zqwI69P=ri73G^E%keu%Kmkc1( zK5!pp>5ek!>T*EX+`wEMA6XgPiLeZTN*t1yPIP#NUExU`HCRl75v zGX7M_`XiMu(6>E#B3&dny06b+4FuIjI!Fw%ZrqJHc3P=sJto9WkliTQK`4I~as&+k zLelqV*)}7*5%Bl9eEG%WQ~ctZ5dwTDFh9^38Gx31Ti7VBDY~98v+(AP8~d}|hEnfr zMU!KLX;CUUhuLWLEbd})hAzsYL2 z;kp0bZxeuh>*1y0_!yGQ&>Wtxl)hZbJ3P+a-Pb3sJ6sZmEdA@&t#Q9Snt0>J4UL8F zi0-xdfdQbq#8R4p;_5-F@4K`61kL~`0;qTYxLrhivSzG7FdUbTKf37a`xL&9zp9|j zhd;j{SF;{!vSsbg))vJPb0|h*Dwl&Y@i(9!_kl8SR3dBEu$FE;*N%;Zf@3m5#bcoR zhKgiqcs$MhQf9#31`p+o#fyUnPi*?>Xp%hWUKu(e0E89i#_hEqa75tKr%%5Z650-+ zW)a}C+^subR ziK&N>hK}cAxXmksCW0>osCeSvS&Lo z?j4=lG_+Oq|;=~^hRo5E&_(#5ywTLWs2bMUbj!IfiSH>(GT@ z0cQ~JP;m&kY#&9|8h7NxCJewQ@(@60JtVB>A}A<$Tx#YLhQ`&TX!o2qQk$1V6z*N1 zkv3U8AGlHoPr- zuzvp%8lTU}URl;vN(k%1F5XTW8XE1I^AH3?L`8?9hsJhY2w7qTxf7^}JVbmK4wYE| zx3{cFx)2s7f;2pB6FxJ?f7Jp$Fa@_jS^s(FjEr-SCHRn+IPh^Rs!u*3^=dLd^b38Z z4MTtipn_Gyx~>3%geM<_bdHidp*8o$GBKprv#@g;s6r<0zN;Y$6tOMQqIfQvrhY3m z2hO-_jN&7qBviVgpCF8yaBb6xi5w@8-VB!QW>6&Immv^q9|QwEvv0_>26r;Y1GMo) z#QyJvh1HR5=y0UH3XBlsZd`w(Q}^#jb;t@4P*5o>U%%d;FcBcrIRWv`0I%=v9sRH+ z>BzfWNV5LO!!agoXfAf0nm%WRVWt9Nu>1z)??-KG?Mlh-znNTcTns^<#4VU34TAV@ z(nSz07(C7e5@9$v^yvsWU6-v0VDsl5554+Ubj7dHi{Gw=<9uT?-Tszf_|bsTkR+f| zu>QHCnEEvmQec|?1PHik;oLzS;OB_ShvFYj&)O(-=F;fv>3;JVPURf=JJ4W|}{c<3PQAU~8oZ4ebME*>@V4P&_}mRRpDeoVD8ud{By#Cudd(C|> z6oX&w&2X~oX??dqFZ{ou=v+`NER?hT3ozm+729ZORhvOmBnpU18ikP;vZ`Y9zOs{v zZteqIh+3|J+;|hC9|(}N8pr<-=ZC&(@0kkZzd!K6i0b^HLmq*(-sUf&Ux$iezz^t+!3|5puhFtdg4bB zk3`AXv13P^8bG-?@X*mI9?*N!oFYUI$(=>Tac@QyG_(JGBQGGHx*hQ8Q`y*zn-0q6mlBc z2GG*BZU|yvu71T5(;vl(D8AdB|CPN=--Enogr?1-SX7SRcp!d-?YG8kN^M?1+xfv1 zfXu)C`m0YlIR~VOm|74hiAnBGS>Uji0zlFJqH~8o0=Edjk_uV`?x6){5<}Kg8@|Le zjlXhHltZ-sotxi^J%70LcgCUgtd^TLYq2qwabMK_j`M@)yG%lvwkvKI7#tphN83+u zHEN8?JN|5_wIbOYWla~XVG=N@fQy{ck|ig?!om`i9UEsX-J8bucQ_L|4H$q9{!>t7 z&xz>)TcCy}ADy#7e_HEk2HF}YQE7Phv{Xzgyrn9HT(S*dhw?2B?wN?^yN;WF*!DUh z8FU$rT)8NrEp_AOP3n)|1*HhlXN|cXdffjfEWvS~JDu^>VzB-Qarj^Dy?I#9Yy0+n zWeP2s=b4g7<{?U%T_Kc=WhS)@Ayc9Z8JFoQvnCp3$W%*ZNGuc@B$A;*R7j;vX?Q1|9Q7C9kO%ePR&+^us6T4J|Lc|1oB7XwmG6pwop9f|c?EZZn;U0-&0U%ii8g%aC&?^z=UdF@u8yAGiK=new24uK8wklI76&7Eqnn zAy^uGC_49hKbrpE;sI~eP{ zT!}D<0;fnBT6L<6GKD(gV!WzW2KIng&`IbJy`g8i3#(#7aTx%NqqHt*T^K#m^xVg_ zDlONb^CoTTjLdEqD<38TklZ1kz_%^sSut6fo|%tE-n{+m69=c)0J9 zRzh?2wo9A|@jf@sv-p_D72o)|scwUxyWFYmOOhNv_Ik-myMuc>cr4m|^qr?k=@JsE z0D1yK~*-UQxq(+6QL|BD#Bsd62cf0Kee?lQh!-f6-C&fe&@^5bMs9qdIx)1)8J zQr}@~=y$s9m6F4X5t>Juwa`wgl)UJDOzs380ik}%#kn<5aw>TPApx6z)lnu)xs-#D zJvG6>#q-!Pp}|fa}hi2Nz)B zRGz?HcffI>XY>Jz{KdCSLHB@Vf7mq1138Sl4S*pjT^<1zX~&M3d;h>L;X^=BRc+LZ zdM4S6c8h2=W?#Eu0`}n7X5-0}=iYpA=mwsJMeu_0s07RV1a#G{lFVR>nqzGHn=C;j z!aChWP6ahye}+I%w|@QlMg7-|@Ggs-=8odNn0elfOQY+ir$R__c%!?*KaMA;&v90@5U=Os3kM3T25((AR;A_9DW;mZpKRx(R&BZP@gUBBfStLx2+f zTc#3nK3AQlO=bG4+4J>GtWA&gI7(Sl9Qfw;Uw_$S5->-i!AmP4YORzNX*t%nM2sd{ z9_QugS%QTRA{-jto9);B~Oe zv;&Zc)DQKn{``N7>LZt4PJ=eywtKffg@=vdVr zz90Z@-?L}z1t_d;wh3BC`YbY?Fd=cqz?E(4w1L(7_H)v?SHguuT41tXE2=F;j-pLD z&X2Hi^DT(Gu0!Y9{&$rM;5qZ4p5;A>6i=O(d~I!CNZ`urBi5edEz)7&%2e&mEYC;O zo6tm2grMps6g?fsAFslxGp}gBn)T|Yt7xeFxP3Um6Ag{Zmb|ZqXq=fZuk@9ASIR4d zfOiPQ^Z5jEN#72~aNnJFv-)y4nC(vyK1nJz?0H2IQ|##O+9uuzX2GCD(9-rr!R5{Nqn=?K1|zzwys6T@MRi*wE2#h}}W&I+j`Ux+dyY5P5?W zFW$2>zp~kRw9~R4aqA-X?b@nY_uyU=n)q&Cnirt-8Qg&K@pgFJ`iOl-d3jgO!yoWu zRuuKCb$#mXD3xER{s_&lcklLexIe#7>wyn^{u;V$_^|Q)J8bcH)ju)rfL>(w%GSoQ zQD>)3hVu8%t$r#aX4HrKU3ET31l8SDoTXE*vRbPvuXm(foUyy*o3NY+o1Yg>Oo(yY zx_j7;dL`;f2NI5V^3oqAh0FA;BiB<;d+c4kF*<*1UVcza3xoB0^$s5o)0;8=)TGsW z?|-&FJo{yI{+7CH7j(65)oj_b_3L*%zxAm8)Q3JPiXgqos?b{zJ>Hy{^z{0w&Lj4; zZoBqCiw0fnYxQ32wD)RQN&b<9r<>Lu4qM%8+WKUZJ10lGcU?LlWU=k=p{s}PD0a?G zZlF@Rg`T^2D{|h?_0OZzf80v``CRj!wY96`<;!gi^lum6|dm;K5zFiCQ~udoi(zNVfTZr{|q zb4Zi2U)ZOQzVY{(7FU5S$t?Xf{=EwQr!cE{H2)*?{HI|3RlWc7l>S*S|C=)HpS|+W zUiqI3`F~c(|Je$O#ZY7lCgo)A`l45WCjB~;NTlDvI7l4_Z?>@EO9_=#l`PInpK>O`qjXa#c8u}fWlQ%-R z=#D=<&F|-J-Ym%I-t}+ajunMft+XK`WL;FXOy|cm@vY|QT(fD@p~fei9G;N8ZB5uB zv4mvAc^^s)GEv2zKGX{E+*a$3r;cv0VV<4cB>pmK`4WSa0nN8+wruxq$sdP@^f3`H z4Kl|}fUE9-oa9Se(I`CrT+jIxnN&%+Khx9a&Ym+zoNTjZ%_?1eEkTKP;X5;OBi=@j z%*@P0{SgSRe3H*!d{E-kZ(6R+etPC&wV=H*N0RlDXZM}DYg6i0SJ&j9TCSVVpFc0(rsdIoU!b*1gZG?aIuiaN z6kptG8q})SE-k)(z@$yujcMlLbK=|@*vWH_#z^p;J`7PyDHLBb8{|00Ge@TAD6__3 zC6glVM}9^I1JT=*nTF4=4tv`O3&@dYe#!)6isnZ0HB@d(QC>ge^}et? z!|dj*TP;5*O;?%Kio=E&iNRI2KjySCZ}0go@2Zz}e~PchO-9LbM1~PASwH(%Hw`hV z$uTo254ClzFY>sm1`M3HIMvNP#pB}|ozibep<=kf2EP1z?kq9)%0&KW^Gp*A@>6@( zG-8gCc*|N0SosGAAEe1i{hqkFFFy7xpUJJ!kfT)m9jMU7KSo_lnSJP>k^tvV(3zacv7D$iWVkB4$iR4XfhKOg2Nh(Ep-7 zof}mLmG1S=?%IB~%(F8P>Xn*WxFp24=lNQGzkf(NXCs{sh=>aOb^G@2Rj~E_jPd0} z4qQo-2U1?`I!>>97#bdf7pPyo%4+=OYOJw@;8M-)?J(-MT?4f!J{viVTPDZkJ~mRh zKk@juT#Thc#$rr+hud}zWxaGE(6{SGi}?j+sZ0o((7*3*@qYOo>r}^6yB_ZB7CiJ_ zxZUqixEoT36u32>_~G&CyO^-LW3CvW)gUJIY@g=SKV*qX_v&l&>_QSB{lD!%aRa=?QejUbE-u1SS( zgCwte@EX!`Wp_YNX(Ok5$;_L`?k|)&GiIq_$*YQA-)!MRiGgY@Sj<1ozJ@aV&HTF4 z8ZlpUlGaUZ3=8nZihb^ApIr>B_#-_%eL~R0{%fySwl=NG>e0e&tv%|%xDY!_4rDUn zb(>8)htm;0a}%8arA3W(l{Q&1eN%lg3&<9vV;H`zpICaNBL{1r*|b#R_1UAne{aMO zlxe~;Vz7n5vdxe$9K}_e8jebsaT(2)yBQ2MBZMtW|M=t!vvr>stT-bCq?jUPCF?D&Ut z@yg+yl5PPJDERQImkF(=_y1gyDr5Q9(9J3m|F9hmd?o~%i}ue$tCFOlp|Sjoj;?hw z-YYSpGm3_e+RbpG%hPVCO$}avpV7LJ(^TD6pI!tL+;C6q#K!Bp?6ImKN!l0mtU~vS zcZ$(yM_5SX5Lx;xNjWzA8qqt=?VwPvoLy9VpsT*p_IBW_AK2;d&<|7h<0@>Kud&!_ z!#A@%Ht}-V=)+*3CHY~pGmA;^TYg7j-G2{JvxbJo8`(?{y8$c{tT(QRF0yMRFwM##8EHQ^s1dn(b?MMy zOJ?Q*8R^vEzKn}VbKT;0?!)$N+Z1i^5wLG;r?Uq5?JL@NX91X`b4T_wPMD>{Tsn$Y z5N;~nU@XNAW;hPVE@TmAr{uy;oM5Ha{AGNZQ&RK;JM6LULyepEA4Y&YH6id(%3*zF z%Ziw}vVu_|p1M6jx5&VNk2Xg?O=4z@DjPtKaE->A`WuAVcZ>Rh)SWIomYVqDcq zYSM&Hk2mBcbcKhAID-Rd*r+}z(C>wkUc?%iS~Et4H=aI$Hk zLEOm68KnLlhejyvHPHvVdXWQ;EPJ2$myR`Qi$uT_gHb|lZ>ind43C4FbO6!@e*#)t z`UHdu$MY?|`NWPhR3$2lyN-41))iSUvUyuM)urf1zO+0`BNJ0x24VqvgA4Wz?HXw` zf@tBi;xO6$0NQ|ewj&d~=ocA1dGciYGnbdWHI>#9WJR$;qC#{)ph+iV%GZ-Gy3+;4 zUPT=7Cm;?TI&|-$LnG(7RNESJmplW)Ymlr*uiin6mGib}0i?2$?8ZgfaRIjVFotoT9t6qv-6G!6|DIp{9>-GH7UQ!$Xq4^21O*1v121Gle8)?OkVtGmpofE&MebaF_o8GNf z?b@}?UQVYEi=c6vBS<$E#}ene^y{tn9-})+L#+GEqw^rHAQ~x=L}gs27%&lX#fn@+ z1Qo+QfIvO9hMv+kvh+*ePtogFe-6AakQ$|B@%10`+1NMn+ikpbNtxgp#1AgOareD= zOEhk)Zg{jfwR8x6@{BY`zR8zI_=0{pX$+Q;?g82NbFH$rtA!WTZeqONkl^D-3gH~h zP0|mDv~X(Dxmf&PV%6fgH>Ke#gQi!ImLa=Jec+m{ry4gdyYYMxhDi^Kjg2)$QzqV; zVk{OD5^kX}F$vmPOMdiRmy-w#QD;}#DHY_G6t;rRFf{$(vMi$F=m2 zpCZSI(zPzGxwf`-CQ=(-`}w)L`G5U26sOaD(&&8|IV0jXX9(CAyR~&Bo`D_%`qtF7 zUS4xmR*Pxq8;tr$!{Oyae2#pJU^WZfeU-^{4{+Q{173Pm-HY>MYW7=n!8bT7uHY(X zy-D5!wM!=-BW2qF&`^u&GSa5NOqJjsk?>7H^Y9?1E{D=@wP{`4s+&sFoqZNQy}|R7 zwjXTQwZ)Jw+3p3;hXcv?5n{al^q51ekDk%;gSM8IAP5Os;_ABV$nKCfAe5T!henM? zzQI4u9%)^tWy=^bX$*fE;)CKE>xeWgv4d=K71I<+S>T$O^9+#$0Rq#)>(`LlPlMjuD&2OIaiz0j7nngBxFRc8N4@W9ycHh7U#AEQb> zPDYruzL8_}NoJeTd$SA-5YwdZ;ua#2N{A(oYIW!!DzX$KY#M6jbeS9pz!2{%O+qqB zH0>)bPHpw{^lpzHX6`u0Mc<=6QA{CSx6=8hYtez+ZBbMyjCN}oWGans$`IwLF_U}`8*9%^T_hwnor&l|0vY%G*I_DK7rKoJF`nHKHi;F(9tNr^ku@7 zLay7T^o6A}y;K+0Cnr964~*e&0*;D~f0Ao6EW>kae3$jKpY60Bz}lqgP-ZvrelD(t%9b z^t{i4co&M~G>c;@2OmL@?hRQLv}GiOE}bK=QC=BpvhCCJZRKX6EKU#}#JG>kB#-0o z44znw>~3-}cCgQ2*U2~^b~f&+X_l1;4>6C{QYDf#@Y%7CLvCCg9jn`7i!Goy}si9fxg z^fSva?A2hVjUC8Je5p$l05`8h0G$!duD%-!7#Bruiq1-5JWP6KUuJ!VnYsM;&nQKsGLa_RB7& z5-Szw=?5fAo8rJ#h?@XzOq#D*c;{DUPgRITnjA`7gvLNRQEWF5XMNr!PTAt=^jWFE z2P@6QOpa&(ruQYnNC|=qB%iI}#<(?_=kx;>O^T|i@qNn#og#^t*pT|6^lEXHYW~ytDK6t;{1{>fLv)Ik-kYxH)-Kw z43S1o4kp%Tn)3+zu(2~uO-IQ?wv;Y8Mzg#jQorMxY-pSkbSiZzgW?qDVRq4wM$&v7grSoCmpV0>50@G!-lxU%8`g4OIwk7k$H{Uwa0dk(yUTR^P;w2 zjf!=VNR?pET#b8}P@FDif?ND)Bb8bjproD@gI|m7JuusL`K{$jTUU9SdbA{wte0XN z?g_cfSkyRaI%e(mbNb#5XpM7Pe->SCG*&C@pRIK5<7INNWHU>dUQX#0EjJU(qZA`f z5a%E&q;_QP%uRL>M=3TDBnls;`}I}V7QDj>Lep}CiHlT!7#iy1aA64wdj^Os_>i=x zp0=rUwu>{1Cp8q>fJ^CRsB-s*glb?H+iOTX`~1*=Rr=W2-$>UqyGVxmD+C;$u?kbM zH+!spwD;0)x`}iuI7i~@5R-TFzKeZnMaKD&5xfE%atGdz^>u=J?*((A=_A!^c#7Ee zH!(QK%BlrTx{)~;kf8WhbQ?N@)?kpmU_hob?*uI`gB;rn0+nw62@~8OH6CT^CJUfy zK($oT@LQ?F@H~m9n?-X0+J-3*t&aU!jQdFXlW881hRG$3R3*lh9kRw^@t;YerSEnF za6>J=JT|J`U|fT^s1)#uy`k&p;|G$893sG;GA&?gwyI##-{CEnA~}!!KaNTcKqQ2H z7;Q^x(%Fm$M*7xS74ql_HwL_H+Vp5a)TUb4C)Lc{xt+YWYZdo~U31-NVRJtdkENt| zvc+oxLeGY;>XgRa#4$BLvQdhHpe`M&^{#F2r&aLZDO#@ca@#e z)Z*hB)!%ae=xB?rWSlBKSp~Q80L`-Ee<-!+K_Br|E?<#QhT3{hu!593GD zj^2=osr?rjZ^=l=a;n5)kqH4jMs0j;JS_^)OTq23xyA!t?9!s|OM!gk|ERh}PRM?8ofO9pudG@EyHm;c<8p$!?S39;)=TIwLaA&~ z25mK^JCwnl^#%V4kybYU02yF^SoWXKMH(|dy-+tM_s@D+wS^mJw_OCc2~aK?xUE_! zX$rMQzA<1?Mw@BoQ|JE5a|xH=S}7Q$a~8gdE~#eU$DTV)qY1p7baqqdiQ%-(Jg-X- zgr|kVjia%on_&)22AJN55GwOJq%)No!i}*x(&`2Nn!<1k%B8-f3V4vd`%fQCs8-ow zdumtxqRWhHPN1_|TKlnd)5^KlzK#dq3jW`|bLW6mKS)wOuU!60PYl?HH*f*tZD`p& z4izD{7-^gQ%DAl~IRYkgY|KR}T0xeH$F7cFMkAY4T?Ee0R6%(+D%b92pJpQYl>M+F z)uv^${tg}Co26|A#;MxfjjEDE7NMQfJsg+LqV{L=(%wH(@JSOR$6n-QA(jl$MvnDc zx{;hB@ri2qtw*3Lp#iMmxH+kA4NS#JDb3;_+Cgj%gOwkf@U5?8XTZD2Ma!HLrXqB@ zy;5@Qmgb9diw~+2T2N>VevF0IgkUM_4Nlvb50q#7f`O?5?<6Y#m{sr$PtMI4i}JV0 zs0aUq+&`$lx>{yUTbJh1j`{OK^AO{5r`!J6bF0M=wpRm(P4*JC5L}b6=bgLmc zyX6d5xzGPQ?$)hdr5{&KhS1U1-wn+vV-knr!emu3fK%M{yS?75 z)^VUy#^kZ)9e)ywWfW;Bl_qsoON56LG_nUTN%h1Geu%6ot@jC>3e36V?QUG~ESR+V z==QH9Xn=ItO#Hu{K`SloQF3J9*7}MZ6*a-Nh20mw=Uh+yi|)H zdRkd=71E$jJ3YNW;Fi0AYd}PC(8q(g*G;bsOaNk2Lhajv9p*5~hG8QI1Xc;2tDiM} z+XiX0HSYmQM?Tz(CQ z8nQmAZ)Y)%CUPD#sM2`7-kce4$k^-~Ih8*XU6xV+=CKScP_Yv`O;MHfsaQKTVMH+S zH)do}8Z*Ienl9ezAiG8cNJ$wuq(>U8f%FD{XFFVEuI`5P^(Q zpg|UWL$5M&VeUQm?S!&@XmMiBXA1XHW@ic2xufX-tzv_e#mDy^q+=Z_Xy!@}Q*E7JK!4;)+8~iU*H#`NyZ-$yh(Qxs9(F zN}?_@>Ij*C#-tFDH>C%L30E?Fhq+)VDeX~vnSHREOxvB&JCFVxJ9@NMj|7_qei0Z_ z3rl6H7gU)Pw<5N4W9`Wsg7H}vsrB3GT32k%tcN7DECz!}OAZ9Dkx<>zf5{@51qhTO z7OXx>!)7!pk3!s`HV^{K;Vx^uAQN3}mRu#gm~`^sfUay$MinKJdQp(7I4vkT0;ipE zkHsIG?$O^R1vkD713;^62T|H^RQbWJmU~dlu%7uL*CCm#JI@9f>wU0BG86rb+H4-bs9*g$~cbln7&S!Bn>xs^WXx z%%7uIop4+_2&$_h+nykuN}YO%fqvJEzfJ?lHU4M||KLYflQ8eDH>F1iEQ_;y83uqC zTm!~mL!JtxNgST#E#eg`iyn7vU>4D-%=G@gOyy+nsxhziET1(6OpTP zS$2mCffBsH`~?F3eklb*M|0n( zHD{-(N;E4%ovB&&8YvVYVj+J)`06*vknHT5Y@T-_V1aXFi$R&6=NKigtM2aaBr=

    ~IES){| z4(RumD&#E*ONc^xk|zw*P!{HwF)TvXrZjBc{HJk}3~I!dGm}PY!;{`uN!f-5<#QatHW{2HWVl^!?BH3Y0~m&Kc5JE|B%D`SrHXlPQ4Cp7TsQLNgghO5s%x;)CX zzFb{zowZ4L)S57Gh+6k8bLygj~C|25@WxS0E5JqgQ^9FzY1L`iaM<9%l~%o6(HOE&oA*nYC)xfbLwDe9L^3n@wXc*B zA~K;izd_x81A)^XFi9iL%(wwxUY|l*E2l+%J_e!3$<#H1NEkI-H2jP-Hljue?*=>U zxl|k^KHxvbTtp`K@y%y;hiyXijD9Ronv7gQ7(}?0f}5nH2VNV3?})MUK0y?9KRZfKZ zV|1JA8fNB4Dl%Ka6}QII;|kovxQ3~3Jf?e})k9aDihe2}((j6JuuM=#^1$8R}1dJ0YVds#%`MF@MunO5Cr z-cV3~--M+RtiKvMi{mDzZQHX)N8SX38UAS2dk0ETm)ip|AkAE@?_o1=Lr^anwJ*#y zrTs%(3u0YE_q!>lJi;GR47{SCS@`t)hLa~xChAX8#qRh;gPNY>ES1&&P1;=d?myFJ zthVgiVJv9SP5@3Y5#wbLAC45Mi6|RnlnCs=aaMceA}32%_wb^mfL^Z>cUB27dz5&| zW-J*SprYV*DOtyRo2ZGniE0sbq<3eE3D8=?jDD(w-~LX}<@G&yVae6JYOG*w9)iq* z6}}Cn%d63Gufk%8SQ$KX2*Atq&yoA{QH$?ODYESSE>-T@+M`WP+kpdFf1i;XWY{JK zw8}R?G|~WVn`3l2Lm7BqCw{-bEJks1k+@X+RfGE+hdYFl&6ae5BxAcH+o>;nRa6D% zqY{FK3Q{1d&}bAecTu{e+!h-Qb4|zCzbw^^BYECogdf0ZvEBZzO6S8BvWas*Z{Qf5hl_~<}PBlPyNOWcMZMve)6`oULY+vW2e$Ke0dkZCAX=F$?_@98Jz zXG)>R2pstysYjDPBA{eMEFjSS1?CdZRu*I4e z`K{QqnWr)51c+k^y}|f#>@bj%)E8GQY??xrH)1f0*dr*I;u(_636k1qPhur9LAn$G z#bi!e(sxkyENP31l6IW#BshNC2vW}c(M)5=0M;EvEM z9JMf#@Yoj?zwH0Z0|@+_OKg?E!vi6fjO4#CPUTuQOsIOtc~^KF_!n9gcXP3B43kz2 zNBo^VyWOH^!EI?>$G!!(c7veufQS|PYD;r!jR9{=tTE91jf5*dLBb@99;d_xsDpr}Z+tWmS)`dJ%=%s6R~d`{MuC_zuoHV0yL zRnU9yi>p!C7-3sQQwJXeJS|Bdkf3ed&q+O8zh8jnMUEFS5GxTjTTYy)fwj1%*^(p- zH1i)E8U+8m(IV|Eed%PD20!=uY8?I*7HVqCcyqF=n4BnUWJ~~4Czq3gYuu0s$Y(WW zavheY_CRQHBamae@m|x`^8$yEe5usU&RXpAHNGu-F@%RtT)54O2V<*)INH^zPVX%P zL0?m%wqqsn1S&N-G^%mG$`01&IqvK}D;aFljf`;u>x9Vn@X+cxpte)ij;UEDC`^>K{^Fcij_Rfmk$_90EZrr%pM zYoUH*d^P}(o`zvS9E*kjNq3T_CRE;_;0J0`e?X;su6frKdE1QqI}(`7?HIlN zmc3P#=#|!cR8^PCxs24nPKxs<{`{;VlvLXD)43A6pbIB zh3q8MNs5Fdk`pq;>!OPk4LobdhJ)b`c)G_|<6yo^*2W1qu?6Jvhtgd@UxbEqPv7f& z&X>M#r*AJ;ok0N%x$o|~{ALCJu!`&=mX~RZ!(3$+F#wzKSDLa1U@lhk5Wi!?pdCB_ zocCLce%(S_Hfg6ceNG}K7uLE)mWjbkafm?-vPgMaAU8!k^S))Mnt&wy;DS)6M65Rw zi&)#Xk-1i;m9KhCY3xfcfC}Lky*`R`vyFCV)&Z$qa4A)gxVbE@j8^v7si;OA8#iin zM+Q^~1mUeA2FRFn(!Wd?6oyp1T$0XF%MWvcs{H_^=?A&iB?O2eT!u{TP{SpaP6Rjs zWM>ZR5%9R20Wf|2k9lHg5^E<4#Jx4$3^ym#V3gJ2nf z_az5*?UNYyTZv)>17#08Q|%xwQu!PSLd;}qipLB>R@#5W#K}0zaf~1#$yB=m6Jz`Q z&n|N8Gag_i0i~ev=Bj3~wL5r>aDLmeHR1EkVH?j^KrsnbpkXie94@O(InDO{TRgKA z1PCAI4U;{Wis1HV29w7}I6LDSxd)s-GJAkTfbB#8A3L5mb<#shESXq^jwaUjH9R+& zdnE#e^zP@RdJ!ASvI9@`7Q;K_fGV@*>Zak{ntFe@I$(uMi{{NoQS_GBzjUko8AG4c zL_i$~!<55abz{y~lzJcW+|MFWiBv#8m0$IT$ixruvI2XmMq{*#BOXm{}E$gWHEaZ!jppklPD@x@VpsyS(*p54(n?*Mldzi={7$ z1aIe#(O!blxV+S!^O!%fQh9{CC;ZVgFe^1hv>@az96CJM^ zwKOz(8Xin&k`euI&GPqi)iG4R5-S)5p>w~efh(@KMVzEGE8X*?qcWocuh!HSUSnoQ zLIpd!ig9y;Wm>ZMSn|?aK|NE|h0ck?uvmGJKK`NJoqqW?(uszVDIrwuqG*a7&oBl` z+E=vN%!aw(AP-H6YUkG%J2DPY2J^xGx5L7NCEU39o`$k915A_F^j#3&MhOv9!uQV(X zpIlTfh-Y1K#xN#ysEve9ga>UBt3swl@DM528i|;T%|;Fl5UVNaWeyub)&($OxmXY9 zMi1sGQyfvb`F>6w*}r*7h=2Zjm;{_>B+xsIzusbO6xE6(4*EaAvJ)#u(+c>~;Mi~1 z;Z*LE&!l{u)?{E4U*!siJYObX3Mm+Ed4+d>%EWAma2s6P=nOHcNEY1z7Q z-TP2ef_e(Lbsc2ffJS0SLb?`I%$fbp@dD?=}3GA1baH@oST@gw+r> zU0hQ#n+a)xV6)=c%7MGIbi0W3>Q0!K0}$k|7{0o(_$=NRMVZ(ST`!LpN%DxmbdU^B zc?7ah%Qwob(R1w@dl!629Q5_SRN_J}3y5L|5pWJuiTgkOnmA}BOao;2@l6YrjeAuT zI`_oR%-T0a0+RXKavB0APwB*B!{vk4m*Eb=Md6N~d&bnXb6MHQPM`y*tWOJL8o{)Q zOv2rw*|M2c21lID0u_rTMWz$eOex-Px?(D?KH=-0(!~2iYa})}N`Rb8xPJ^h~*UxKUw)V4L`LU2QD(}@Am3NYEiC;#=hKPf`j3ovJKvc#uC3O-2@8A21-;Urcw|NO z7^4eWM}DuZGxg)~O5L+ZCgl{hU(IE&o1e{->O5qYQ_ju)^u7Kg0VjjOTWPX|ce< zJyKU!S9N`H>hu{ia(bwTwTfSvH@QtvjVmud8R`rzES$e7X1?3jty>wp6bOv7qZB#P zZPrFDX_^CnJ+jPbpXb|2vC8B@<(a+yGExjr==HAm%UuPj{aR?^eOc!SM4i{?=-ce< zE^g7jckT?I-AZwJP>}kKlMDMC-LZ4${(}btNMj2~KQ(LD=1hf965ctM)&c_U)KGgL zab!->JMd(B50^%RHC7eP9pm=#-aS+74$75Rtku-iPMtg%Pa6RZpM3+py-jM>s&(r0 z=|@l^4zF%jeVdVy@7_ppd2q$$^`3XHL2~xWE>kWJ6hBS&t7%sa!TdQXpKCp|s)$99%hR>$t&+84Y?{f`Zm`Uf4R>y9(5r-eKHxahQQ zU2ozxc+QUXL9st>=1gzaTL7#6J@u;mfZMZQsjIt8YxhpATeH`#T>cEoGB~t<|9(zw z-CDq)|4i(2#VJXalP29yO^rxSs;Nk77S>A9p5C+rw_e4AspnpP-WyDDcyxzrC~nU} zm!3eCJ569!s-$)ACHvv*)!B+JyRP^D{_seOR#a^4L`TQ)goK0wc4c5_=wv*grvd}f zWgiv5ve}Y|FFwzHpL{7moScVa=0AI;X5aiTL&fkRFL;xaW~Y#4CsS+_Bz9^bdD-{RhsJ(sn?gwZg8D;P8<#mq0o@#Js zZjb0!_I2_Vm5;mZZk>L#Z{upeHSvbzr|)k%w>4qSny}Fw-1whfqn-U%eLD00z1^DC ztK$=DxIF)LIg0ido@d)7efsdcoioSVeMyQav4ox)N3YWULx-flBa6c3366j@k3+`n z%z!t02-WR_oBqtOq}mGO2CTQx&)>dHnmqXshhfeQnQA$0+G*<5us_07-_*J_s+svc zpvcy4+ZWwQY*&j4`~nc&My$>i|ctz6*y4<9VYkN4rEj=&Tscb3349IIhq+)wdBt>WSy?)Ml<{xKaj>JcNY{pAV1 ze-yBXU1_wAPKD3BWQrTv<-3~p9Nv2Ucc1$$R;*aTtMOf}nHD}gNL`Wo$@J(-Wlc@Z z1IY^y#LS4zg z{0>eE?p7`B3F}OuxZZlD_Zw%4(HAdW@;KMLd2`F@(}PIuUvg4s@Wg*bIOx1U{31q} zI~>veh?4Jq{=0z_Y%(dOR@Pa`ECIFohrZ)s~OtUgAMD zUpU@&80$ZYt@ifSE1x@e?mU^^U*Xg>-H|IYZdKKxh(lW~-#-gE-D>^!Ss80vwQ5C> zw%|Pzp?cAFE5Q_=r`E2A980PtQcg zO+sURaWAzJ&-&N3Ecozo<(2%8ssC4r>Hfk#mp}JaxU}A5#UY$&TTi&`USersA?_8( z2rC+-#*H5RNB%lvV`HT%X8t3VK}QCBEK0eW{5CKVaXLCF>9XY$O*U`ELgw_rt0|5j zx>#R#nsWX6^&>rPcC*hy=r7>Fj*i{Bje~TxASSFJH8yP2=-%uLJiPHk)zc1?t)2BP zYG&F$EQl`ZeDD7K{qF9*pFb~SN5&>6d?Lu*i;f=8vC3wihJggosM!}X?>U&Du%9$c(Ctny$e0>Cq6GDAS4*o*n&6Ggj z2lFpAYu?=Ux#o>Up-6rNx_aPJ_7F-A4 zCY$@~$0=OtNp=I*w~vm%zKs>3wJO%gkFy+$}l};QE`3#{8<$}_xRbCipzgPqKP~KzQS_iL_2sFtmS)ey^{a& zPV0{~Y&&DdjI-8PCaOBNP_rLUwquf}U&(2;@y|0XN$jo(6sk;A)LsYUkAcDi<;XAE zP3QjnOIaLNln_z!r7K5BFSDh(Pp4q)B4G+aFn zG}cL9e zI}1j=Q^$_+aqGYK%&=iirs;26qxfS>sQQhLEi||0UtON{VdWjVi2ZTx`9hCXtx{#A z3d^Q|UvbG;R%0IP6k>)6oT@mlp(R)a8al7w`jiY0yUcDx8#HqvXACJ$i_hVttv#}XfPuT}ZT@kN^Xs~&q z5~vNf?%binaGYzCh7EJ&ZX49Acbo;^Nk?Y{Mw*kN{DXOQyLKH*OmNeRHC9jSQ~t(V z^Tz$N>ihTZ-TQNXzMIxd`~)1dcy!tF<(A{dc_Z{o$VDDMz@E6@;y<4_XZ#BMV_3_T zk1aSg`Knk3IaP835#fD;-%c47yN-TAuvQ{Q9z6e zStB_q$>{wscPM@22VAqwY0X& zc%;%%u=>XVPdtMHLXxS9NfbWu@#DaQqv0ZHc0U-*p_rIW#u6bPA&zS+S)q?O!cc$b z?c6j-Ybl z=FP!V&Q4@21YWrCaE)e*Y!5|f&x-7Hn(Xu}WcechNvv;TF$?2&Y%G#hwX*)Z$lfom z_PgI^Dn9KfyWcerZ@x5vP!6GxRwl3#bS@>IVVh)cStALF=NM<3-8!!;K*TMzs= z`l9x;0c!Tfzp9D&C%lvaJ2NPF`Y(OGS%3P~ibyVif=F&|VWF*rOt(}jx6@8}919eg z>EwkA<2$%*e{G<+Jfv(JE?e+waUCh|&YpdY^W_|e!!v=mWAEOUk&%(6N+mUfC5X?- z$!WsGiT>W+-qehWq6HOKaUjOiXc|n%=e)dqTJol+OqoKoHiyNMghWsuONvM!qVM0g zuPtw#FhTi1v>DaGP1)yYjGuQE@LBl^;EBv6_PK+t>GGfpYdtphlbj!O9EGh`i+-G7 zZ{P1&-^Qy4H*^{BEBLzn=}$nX0o0q8vmlDzOpc6t$Q$-y0`|#Mr%Z`46tm+fpd1S_ zeW+d}XX4S_$Fu)J|Hi1o(x>W*Pk)qO!Z?$@-V_4aFRx6JeZjNi&U?`~!@b2BY&oi- zcviDw0gP(atQo%VfkaQa83+tjx?#rEGiOHA5pe3vnMAyh4eQkLe*Jp(+js9eL0P`8 z>(Zxu^Zi&_EnMx#&!3(3^hT59NHg*R5O=KZ?44VALFZo!+xzVK=h+u0>(#7P>oH5} zF-uqiGbu5)JH_TB$Ycra)P(D>bkIucBHb4hp2-XUoLE#%VV_OSrcm@UUg_O@!XusW zbR42`uZmvSCx3G%mpW2 z>m8W{MGIEP_7#fXHk3W{ALD)%T00+r%KNKomJ$ZS7&I z1`Qhc78W`lH0bDAc69stnl&@!n?dHYE+q@30D@$>iZ)UV%UNGKK-|atxJ%Qv6K~DrKM%}$T z0*84LOy#jTK@A?3y_C=={VKOC`*N$kU=pygKjDRl3aep9BcT4AUoYZ_#(;{~=$ECL zPuP9QGVO6R=A&WRMfMu-_lwGpnQdTG{`#g?`gJj7AEEfGY5B`6|F4n9T{O#oC;xWQ z`t<|;Kl@89fu^`77iG^Y`w5Id5}v&S6L9 zb!PPDFvbKRr$c^B-}}#>Ph$YY9^3%gMTV?kP69fCli?I;KV(VjbSlhAA@;S2Kg`yX zK)B-5izz5OQf5lhp@rth_wOe;Ad8s3{KtAfKfl74IqL(MgFi^V3}iu3YW)sg@D#3z zF4SEsa#E~`5cca+u<$9(p)I)6{N81E*##o7>^DgpKoVoT23jK*z|I^i`Fc4*w|n>T zf_8vR%dO_m_ct^&oG^LvNo+2bobNks>eM6o`C$0d5SGp*`CgXh=3+RorC)_;?65nK z2#dRG|A7PkEcq4L&%Q(i>GtetQBqO@otVh|6>va2{-yEFu)YFMd=VWx_vm5H=2H1$ z`gxBYl!W(9Dzyc`_CgHSUohv*=zZ?;<;y~`a(cxu{Qetd3X0O4oTH^{WMs{moetN_ zww<^3p%!CWTa@p-7|a-^Ag*zce^x1816Bp$XFQD;;sZf^JbWkA$qHm`JJ3_?QXe0m z{_6^Z*~Tm`EoEL_-m~x(r+MhSAwMtbc>tn0Fy~kj4B2Rs_tdMaF9FTHf)#_(^<(!gp)G9@DN577XDTM+E1D{!tmE?{1ulM42t*> zvm}_#o`c^g1+e1cZkH(at-iYFr8EYr4yn~5+s;U4wu9LCx3?Sew%jGSN z923KWmXtiFf{HaLyRHRe8k7f`1%pyjQlO*aN{foXqX)Fecqje+hx4bYYd_)Gj9S>| z11O9br0>08A`2KG{b0GV62{hjJt! z;vZRhyX=xeXRDX*=Pt5g^s^sm2jcPMjFe!peL?+1!2=Gc- zUWCt zznYi**#fr(ul_xk{0j7Cf2U}(z0s%g-}NTJ-yMto zI&r&F+4sRgUJbejzkOKRWpnh~&QBLxPnx|)ct2zMZw*VMC#a3V&9zJ3YRn{mp!_*N&9dy;cbq&#sy|?mb)pzcO8q2k#m2_TQyiwtu;B)Z2ge z{qVu?cfb5qYVz>U1OEU0=D9By|NOBKUmNzm@Qb$0{o1|1HGA!vH318n(_HOC+AgYh ziRXLwcjHH&Y2cqhK|x#YAN?w`JJqK?D%@&$%8sWGFRf3Bw#1WbSs=N!VEd|jhw@(R zpBmZOSRQL~Y2A6_izX&kUT@^~lx}i=^Vl?GkGst4A4W~N?>pjs1&55((=UHa3$EGu zCd8%wQcCShf7K07Y?OjyqrB&rJ0z_Lw({2`vPaIa4A}$ zdEEZy^B<+#{#ZJ-=8V6Jf@W}*D=+p1P2K}3NE7M;;z9^nL)V(ydGxxZns^7Npg?Gm4rCrtU zHs?JU>J4;UwJ(}cAAhsyp$fK&*>~^ll9JjO){!|hpz6_ZW$C(<9VeGgaTh4ByE?PT zr%+2=-Rv{up ze+ynC(&?&LYnxx8>&{%(mb}&8eTp z$#(wk*Fmw7`?h6xD1-zD z*E;$1tv4~-rUXI@=T)va&rt`DA5VxjTd(j$f;Dwb=KYzwYBmd`b{-2M3L z$7MKM6+S})?n47z{(3j(IzGuCnjxP$*jt~3`-`2jyxES@;OHrBQ_KTPZOx!)e*fId3ER=%$})5o?jBGqhnIv&TX zXXzf>9$z@lUlK=p>_*O)lDceg1FMtRma7=+ zDlU}v%;zJ*!1_!<1ByQVRa5g0>b?AB+CxN^h&@m2 zC+K;5s$_J(l=JR>k&0#ax88B%RZIhx=T%vf9Rfo{eyG|s(+%!6bIY~_rgc2jxB2lp zeiCaZio`coxD57)8K8w3KC?RyPS9|<|JD92sWriQt|Fa~Je8YDQ@uJ?H(jqfEXd)* zLLd{|**{jbtMpCk0MZ6;EQ)7-g60kjmCXJ&e!>K`&EH)xxpd>3lrDMY`(DBTHS2p@ zaQ5^04&DR3Hjj->3-12Zf=Se&6A70QDio}ytA=>yYH?&rN z%blcGG2yj!za1&;ZmlZ1XqqtFq4XQYxXX)DTkWE}_8_7MRUKZSvNqPRAXOY0p5FCr z0iLjeMiZ7X|62Kua6A+{e*S~gi(PSlXV)cf(|Ydv{`d>`0$1v4;rEo=_x9t!^f&Cj zbmiW}GUx2cYonJRukdJ2*Xnq%crC)loiPjI@=M(+y)+rqdOI7#adI_FEZr=Tf-)Q)IdbNSAgJVs%C zl~h5vQ-=EnkH+NL+l!TQe)O4Pmmhj8-tuN`)e*fyt{L)>nn=6dz;OPnv!1=LVzbg~ zr2Q7*>BnjhbQEcIKmRG|$+n9%NN|Lhf^-+XmMw|A@Ig2t;OFStHDE3A2fAR4^y z&Fkp1X9c{^VqBq|XUp%Y$Vvf%*pqwKJ=)1p36~F_HNW2>TkE$Q)W}&yW*dV9}8&I-{ebqq91HJED7I zRjNzP?Vwp}NBb>0-GZktGjaCTRdpo^R$&uo7=*4mXO-z4)`<<3a`fy@+-&g0p8azQ zcNd%6$t+*Kd~|c>koT2FY#7D-`k`8#H;>l(3H;XYNR2k=`)RE1JW*#<;?q6lGQb2n zqKuL0s`!uxNGXc~ZsW&hwugT^JpaPcvn{w?nevR@*B2L+Uvrdpd~$VGewapVe(JqL zlT2&75f9_=qEhQ4GPuf3h;n|2%<-3h|6tNM0H;^{M$F`e;k@5g-Q3f78&PF# zt%ZE#-p!|7YI6O?>UN(3LLYtS=T8&`vm!j2QnS>O>_jRKh@bfV`h?7k(r3xrOVl%` zx;EklY`hJnU3J>kyWGxNXCKb03}1A5)x53>S83tsp~0?99LUq(*cC?HMrvJEif5T8 z=kvl(cD&>PA|$Phv}>vibx+^d*}3SB zV0%5BPrQ9XTHEr|xYT<`rdpk|NOgI^6BXvnH9rjqYx6|x9Up(=^#zwVL)~JvE{&_T zC$l*fUMn&_d z0Py8ipEe;usoUYX32W1TOj)0@!yfUVbxzk6aiILlLM-eFL^$!qgpAy?!a#NW(6cp5 zmg(l+;mP4g^aiizTMYDdskVo9tk8Tq8yg`9Nis?>LDzF{48Q?EVA8xw{OTHvclJDLj{(+JqUEY|R3V$O{4apkWcoevxw=(FqZZgp=j(3HIpEW2Uv zSBx~hZ|Xx7j0Y{l}K|+ z$fsFdC+F{8E-pOTe(U|Q0`u#l>d9^u$GEPaOq#3IzTg=!BOawZD~#5O7md!ZsdY8M zy2?4<{Zx9_A-x)Lyja}PT$!04_$5~uFNogjiR6|DEDMCvlw)e;Wi02Gr<`(DKSag8 z{92UQezkLxli@(-h@P%zHIkn zJk0#TR$xheJiOciOGIGrKJDA6h|OvrU0SaxY(tq9QhyWCi`moW<=tOxZXabj1}uz? za9PfX;9l5=>plL_eG?1kEcF4`_sdfI#vhryV6HF`2TkOffoyi%-3mAFuG`y-*m(bZ zY~%?Xlh}&L8oe>g#WoG4R~Yugyw3U5?e~~$`p>AFs;&i!Egrz$;-1oh$0qA9m1g32 zsq9pCUEBxIn`+>JZBlzPyZguD%0!zymm^6#Dl@zSn!B=|?Y#4e?DfJWYMbx=B7tgJ zE?aE#Ls!Q*VPBp{$Zs7uk}#`h@P_x^740{>q?1zih$3~05Q7Mc@LUT~n?_!Yzo7tx z;YDPtp83)<3?k%08lc4Ua*dFue&l0fn193cH-TKYnusB2#{#rC=H#f$gpIpmcje z`drjyd356rB%&M9B?Sq0@DPDJxaII>kmGs2fndPoN z&Fho58ONtzy*itK3?Mw^!R||!Z;!Ug_#()2Xu>{J|FYLjkY`iA^&Nu}-Su@zLezNW zb=7#nswJA5U-oaBd0c?~Bn-fzkFU5;m+CEd+a%t?J+caWCrrx;2W7U?v$TrqQ{#Uz z%&>AdKfk7IlwSc(0g|H4jFpiQLw%V;`uoRBUW2&r2{xn5DRba}xO*1f)gEze2LP^n zr$}{|g%P92x(DgW;+pJli!BhV%&WD;>mKyV-VM0W&a&uQ>1O|3bq~HmbhAQdyFePQ>g5dQ#!ag^g zw<)r!*89q)O`uS)ct&@dOV6xByePtYR(Q4-;Eb%?fjY$q@h07)E55~52kti;J~bw)ooi5|yEWW}hSgSXGm)1NvK^-q?Nb;DojD1K6f2o$^CY z#@&E8xd;1NtzU`{`=F95!cA=MYbZ-1Z4{T@`NZF%UOvh*0B;;(?l)MtpjjYv-re_O zaajJ4SW^`d$qAq$j3+XGbVZiCoDbL-|CKAcPrDCjkH5!ST+q`|v@-9isNRB?+4@X5 z-!2i`vIjr&*<*p1x8I+3q#DUv<@2M3^%Ez}b(km0b`{mzh4lBo>5srd2lq@Kz^mI) z+u4>orYLK~uZ+^wstvVn!*^t3z|7Uk9 zL`6sH2VWEkSh)uU2aiwSshBlxG$ZHNUdJ{G%|F|Xi~2%Js{2jiC`pkw|HVH)Uvtsq zlHAT)Bf{o6Z%!SYnlRA;`;c`%(&!C~2O$EmA3cjo9^mvp0t&K=bhicSN?zt#X!)l5 z&OV?EmDK70Jc$od^~P-X=xREtPP@{0BTRs*z?~v7rYXE_K!V?f@t~jD7o}j&V zCq5+*0aGVusL$ngMf#<{X44PGPSL)e?};_?#$Ec2V!WLz z4{0SIVJaeS5pp#u9Ou)o4Kjv$lVR@$az#5ctjBP)S8f%ES{V>I;(mnoK#>)eW zxoJ@zs^a4G_QD0RfQMQrtnwyX%^cU3C2M80cR+fx@X?&i%-?aIocv|p9;@TVCOMZ< zo}B=o(Lk8qMks-jWj=NT%i$gAZ;Fk;pyfwanOZn6Vf`JKT)gfTU_Y_1LBu?@X-g01 zFP-WY#QOa7wsWX8;?vr40flvY?o1s#s?%9IjW}Y|RL`S;P zsOUNa+12tOXEYJ@pD)+L-xury@{NxPt^tLLQ4oAe*pY zj{|Fa3zwYfWxGo(WnVuD4*Gnhwpz;5J?mMF_4AE@23+oZqPeBwx^quS=Ag0X-GPbQ z}+R<>#XsR+LgITx_P? z$v`E6a$FbZYvnaCN)4#|ULiZitc;3e&~$s|ZU=lGg<{HX|KvUQ!B|FcC2@};Wamh- z29Z`4^FTYepe%?1{qhP~oQTq5_hVyo>qB$YQIfe-$yV?4&IPauQNC9CHR5L>O82&| z%psj{heY9?3g>L~suQ#A1F-MrlWj&Jc@&3b#}BnDGlm{!liw3%FxGnER$>vQYvYK}sJ-tj}i=`%{d-oRpq;MLW@yvvap)*|x)HTsp%QoSls-RB$Z54RFMM%`6vBh`3z$Q@99Awr#jhO6LTM#Y_9%-GKcTbHp zG0+QS45h=QK2a5k^oBgctf!(}M4@YaM8_mP9gFG*WG@SDclB1Bj!l^Zor-BKx23bx zK#Mq?M)+t?`&qNnGwA`~)gmD4(T8w19rYy%ExC#b;{wKU#a@yhlRZ4|qGWyEkoc15~?wZ{qfGRB&zsB^Y68znAP_m;M#xstex5K;Z1}l1; zm0)`hWCUq!DP=yj%b-JKS+m^68?4_^=$?vOA5C52G+yF|TN%$sUU!aR>5)^z`%>LOwTxz^iu&*JD!}C12;i38P@ihu$rWOg;ip4oJ>no2_zIqjeRA2fupF@faUbVju=02u7lY; zFqzVop6x4AtA-G&Wf*A|;mJvZWKaRbJiQarmJA`4oJN=H996wRualMH-QGU?Ym57k zQccY)^#(2R0e(0km43+T>8n`F3>rd0%=Dh_cUwC$b0iL0-sOi6AI`RaD4>AH2v3h=>ft;)ny47M<93{fzRV`W{4?laLZ>a^$1(p}_R|N3>^C z94cC=r$ccDcb{go4Ig0kS;G&2?zhw%aVnKlvRlDfhvo#z$Kq4;V`rLuTw>w6f;r9s zb-4abh(^9PhjplQn#JfK;%R{lO`={yq8WlrC{yzG_*7dUFX)Iu z1=N1dLzR!HflZ@!x-qOcW<~hbZI2arPKHxZG=Uu6el2#F$?i?5Vi&wy3^@o?1R!{7 zwVJRg@~IX;pNige=VyBaR-!vHL+x^U`#S0z5R2@erMZ)nz^Pfl>jue|wCc^F2^up1 z0(N1G2Ov0ru@NBLMm2W>2h00(-2A3+*C?GasQS&j*-*9V= z#EEb|z3qK0O_@CJ&$sdz$ks21y0i7x003Jo zB7xXCq;gZiC_Myw@$tmjGndLae;4MAs*5U~MA~h+Ggd^B9>-M)5t0R_>;+vcY_Q{W zFld*r`txxNMnUE}i`~S`~P}%7PGRqK*!&>CD2!L-blv)}{h@i1! zDoW=Rn@t1aFOXJEI|_k8ARO#1$*g5546;6K%;fn3;m>0xpFlj(k%+!ecb5oqz@ky` zwj;^RD1GYJyRS)fZ!v7fi{Hl=b22XZN;!>GwA$OSHXYNuw6LPO!&N~n8twmn(~VJ`Qe(;sKD-aA>pADb=+n{}|^I#Qzsgfd&? zcB_v?fz^naD?q`WbbDs*a1+S5D9${oA+5bI>l%2kvu@4LBrzjQ!%oHM5N=WF=7s+L z|G>X&jHgh9{HoA&{watGLey0Jz6SUPzZ3Ofq6dtXI|1PEC5gmnfhuebG+F`3yZn%uWvum7v#3db#%KESs4c#8Hs7M3LU zr>~L6tz;@o&b`@(60BmYREj=v~@tGr)YT`aDo4^9Tyg))*iv?WR zmF<_3O`hT7)kn0DJHxiAezHBGU{-8IVyJ2IM%IuO*t67>NrrCrK#4-JyBd(OkSI=`9He%p9}ab zm;^osM+o4&IQ6(lSld(A+0z4?KsbPhisWQg9iIRCoQpLM>xPd-;{32c)r{`15im(y zo?t+Ep}OT$l2O1h>$erhg`qIHX<&s*B4^OiP^O_-v2*bbL^go)F z2(fSx4kCiuA(~>78D4Qh!dTz1&bu%|%T=P_BFW}6SGXKGsE5e!nln9Q3rIXXNy7l{ zPik?f7cMjU+H}rJ_|Ub>B&uYvhMI6#ddc?UIIYURUE1i`nZ|W`AE(O}huKAj(V~FE zY)(>!Vi`%%Vl(SQDEw90(xfkd#7o6$szVxaz)Na+8Sm||NmYLqyEdY-0y135P&zun zzNxB2teZQqUl*07`d2>wVD*0u3dqEHZ1 zEpmvu`O-7K_|kfLA!&BR^Yf+9ZllBqXi6HLNaO~H6RwTPCBgQR-$AloR+4z?Fgyts z$oi?kHAyJX3tjVZpu{~Re*li1E~HTTV)rGWQ(*wfah}<0K5aSjJ&3{TdR6ep_}jBPFDTc~48DV>525sht>3J}zOTUs@!UKNxGAb3DE(1g-8 zWM&=im9Q&1`#P2(>bjQMghT@^m1E1jbZG~@mzp0cNJT->)|x$4wXmi?4)H=2r^_3f zd}cM6gox(r46fZRFM0sc4mgq)07_&0eyMYQ8THrsyXuaT3izlQnn^B=N>DD9uQdXO zk$7psKR1_y&2Y({?6w7ETBvvsrMy(Xt~v3Hl=KS*c%+=1Qgk6v0A@LOT+y@;2RH=buo{H&%tCS{a>xi;^>Ilwk=QZ5LBa?c>KE(Pm>dTY z**Rg}&W)XwK10w9r&Gis9jUHZBKwFk35dj5!UIs6I~SQi zA&9&7WE%^$ynKowF?fu^0G66Qy+u(EQ(ejS>oHRzs09D<+fus+r@jSFdHr6ACXP0R zS7E@X)4mr7$s|x8{$%3p`^c&r=(M=hfa zRb@M>`xxh{dp#&`BFRR9nX@|A2kdbx@zU9rs)Wnf(NcP^NVFs31#ktz<~pvUn+wwK z+*A~1glcKnnUvYvsRg^Y*Gk+KAd4s+kVhd76U+jMij?63q8iSVS+5`@@X8#(RJ_8W z`1#`r0RoonhA75p4-3Pw8Eo0FSKZ$(68$P487iQwP4;>xlCU;}iYPlP>`s5zPrmr( zx%{0PsGo!*iWxpYb}kTJYXE8Q0}3R*zHqUYT0M05tGAM;=g@+1&9U6QGZ{5th4*X7 zhj|2kDbMyPlB)qt9|C1E*<$Mu%GuiPsv{!YOB({!7LW}0ablxDbo-osErDt?{xc~0 z7L~%z2FwkJs8Klxt|l6JCv)Qxn-!7TTF8@6J^K{F)??rG|0si#xX^vd3vS^&Um&My z$ox@GLu0!%ttJwMQuE9hXn#^EhgW#pzYR6&&%yrXFymJaBHa%D_&CVz_mQ&I1V~Ok zkW+nrC)VHiOcJy4Z&JR3znK`Hf7+zK%HH0d2Lngoa7meTT-{|L4$Hd1krJu-rOTPYN1OpVm1fRZp{ld@w z_kT0<6n7fdq}@{ob!(OyoE2-=UEO+&tQ1aB0U&oWuKpuVzpR)#jUO}K(9PGp4u z*dKy^N<*p71L^Yohf%dK#4Qh?G)o&tq25rxHgL8i6h*WHDi%O}I|<0C{H$O^@T|dS zESr?pUr=g%57&5vdl;uXs|ye{HxtYoCCXW|W(foOYgBz^x#p9y+dtKNp4-#irli=S zA~I1<*whZ4rUphNI8xT)s6Y~v5o*LRVJ-*B*^Z(;8m^);9Ev~zj<#sMQ~GhR#JXfL z!5^Ife04vTFuB{SwTnmOc}mXcT~Zs*qYUvNySoK7=1`6^AOoue?>!l)>y^L@8G;dT z!INT|>ihn%To|OWpb3>==nu+isY~$67F6r*z?JImZPnU%%I4r$+Ij3*SK*%-ADVvH zqT|zyPnGhX#Df$pp`Ud;I%&cCrc#YAvv!eQ6wrCEym%rq!(31V#I3HA? z<75w@Y=j`}n@x*I6V--78n8iFL=teK5v&4ABef$ykZ`JcczXlZyxlMM6FQOM(*V># zjqxQCef`TyspoEYhWE|~*gKa?3$@$xBiw7lvbso6(7{7PS=Kd%2j;uh0n*g_r|g@F z3qIMLM#-KjjN$>gOYg*=*BWX?cQ?a$qlzmLk?Hw3X#8E6T(kRIkN7BM_<(I`1Fo%w z;UJHj90Y(e90)71%!w#sqEUBAYinM1Xpuanjb*hkBHY@%0Pd!CD2%dJgzg`VkpnbD z0c9hh0qGiJ#)2BXQoHp#dV?)?{jmgVAzD=O2)WR+VZfoe zPiBtQZdn+n21A_)q}^;$)hJYbWOiBg(;8(1F(WGj8Qunn*WXCCrM$M8wdAeD70^!m zUrvF^m3%DEV`>JI7-`bO+HLn~>)?I~pxG_!!E1WMgz_hYP$Vr7x{&gQgkFYyvle^) zHXuP;&{L^HgME3ATd?P9!HS3oY6PzU!05jtfrDo$ z!pEQdWNzuH+s)FK8{QqaC6a56hyF*zGkN~5nI&!%6mWP5lbCL0%E1?(V8j%v1Es)- z7*lGB$__%l4B4W!Gsn>;DF(DOK#g?3n#D6E5fUd<#^Rtb_rOJb3|@s`EuYh?ma+OL z(++A571O5q!02?Jb2sqrXs(cP*Y~vv%5>xL%v2ucAIwZ>frWcj1leINILc^CMh$%3 zQg?LLc(Ppo)Qq?PE!AD{uaxpMhAXG3VkuTNE+&h|sL`5NqX0d|aU| zgx`DyY(4rqt7Uv$?l3ZcGV)fl5LHehlA;ajhj6>0TR*&C1lIdyACms~p#f-b^6)IV zsoZQxbVz<|;tmPyA%6ZS&Rhd2wR>e`S8Oey_BN=(fqMNf#@eGOAngw6LlyL#7EB&= zx#EZ_lIMVzDhy2#b0CwdM7H1kd}aRcqozgxXRGEaGphr1SbZ{Le*XdDKAig~yGmG+ zf=C{Nr!WkV5QgCJT@d@Kfu8k2-+o)20?hm(DUfJMzZ>`ynh9>D}vugI|N3 zgGL5YBuiND&em6YbDEE&Be=V_{64A(WV-+#xZ>fNCH{mjVQSXjN20=|Js}Lql_n_* z2p8uqQrk9L%z68n{qsM3@WS*ZSWZ`JtF_QnSr7zih_p3WmR-q&^h9GUNYW!`!Ca6- zB6BLpfrsG6{r1Ag@H!)FOrw8?K9P^*=6xe1F$;FA7Bc>QN~%nrw1yReEZWulOb2%& z5fYJ&9xo^Rj4$t^<@9IT%U+2)fntj08L%BOSbBi(;2(KK4CSo@#s$K zubf=qR`||Z0`nt~WDeZ!fn0#kXBaHQBj&?Eucb@{PbO?}B0fjE^2{`iQy~jwyEwad3&L!Hh+XOathd{2Q55dumEg9@k){D zFwE~HWo_;~1>hioy$0$0kMAedfa)RqJ21dz5D47|6mQFJc?LjHO_2g|G!Uo?VMk2a z03^0$d?EKHfR000b|Khu0d-xK^p1zzkYyWx`b43(b9miU&#(b6c+aPK7qXt7n&Mlz zf9wbG=+b)bwl!<5)%qvXTNYexwV9;a_*wVKiOzR_!M?-9K81MvvbgzBwyaH z?>2zc|97RNkVVl(@A*@FswAi25AhrGw^jbc{SDcFD%zH9nr=xR)QqwwkH&`@+ql z{&Q%FW0w`uVhtL_luh$!T?Gez7F*sN`>qX^R;6Rx-&Ogq`W}z0W9`7!JQN*gEGmHm z+bGAfoT5m8>P3;3MQm&{+X@7QlbX-=P9uIs@fU*as7^VzlPU*2TkMhp115%Ik%vY@ z^}Q|6Uxu;{rZt)u3lVM>YJuhKuYp9an(9BS3~oF>3oagXL*DHNi&zcxY>3y?E~o{6 z!yVg(;@J*;W%;Z%8B^#+D2DVT$N}9I3hHex&FEY$HWpF@htdBiYT)TYMq3i|paS;< zY=I?0#1=XnI~h^(i=QXik{m$9PhR+j_wg4Xx+G0H(`xdOOlJnn2X*be2$%wN3|Vg4+k+z2?by9yR2`XhG+PX+a*bQd-)Ag_w=nPEMTAEx zO&@(eV_Y^mUE=aXQ=5Hu4g7ZO>Rzu`O5$i`mw^32+w#q5}1ZKN)Whf&m>E*teKr0A0rsYybx z3RKdtZR(4fq>&WAuKw{onZ@sq zOtEzEKy69TgbXR0tqo?2!P_~%<;9&OkcKf(fwR<(Xtym3OWi+qY9z|NtwPI*!?&jY zM}o@2A=(g;IB0${3}79nn%VE7i-H#*QIUob`Z-YIDpUO6{=wQ+g9_FUAO~Cq(35|m27Te%d7n#XO^_S z0f#&!xEx^DcXbmqz$z9Qc z2`dp7XMGx|H?m-VoIEhH4F$fUsT!P{ia&W5Npq&_kQeP2R zL6*g6pD{p6&pkwo340q^oq_o&fIZYCCPUH@yuG9l;<`yw1X(rm=E}Lz@84=ZJUpo( zfbBbIf}K?|2*e$WvJbFlH@hN9LZ%`KK)LLXo@YxA);{(myHF%X*TrLXf>`tduMg(9 z4%S-@<9V#T~6??I;^qY?(WyzhN0qG0`3Mj=%NKNVKS|Xg8rukcehHiAx3LgTFOs z#*_WfhyXKX!JmLz9FL3SFYfH8NJDQ9D2gcc-78>}CzlMm(&~4& zk=;6bf%&L^bM}D3wPeTgx#apObJMUxAkBpOF^JNPc8(lCbNhNaujkn7VPrqKKI)iN}$6#}CDy1a0CDV>PeqAX=a(q(^$Tn1n>~dBH5ej zRX)Nu@okyYe>SV^^KLLOc_1#<)h5IKZ->%8yt#*kp-VPRA1DJ>;C=n8_$I6a;TU!V znHxw(pGNoqQXsUOjscjVSfZNNvIuPl7TTSE;gU>mO@7b&B9#X~(%5crFtILRXhQ7! zQ1|D`@hRJJ%d#Gaz9UmE4+O;aaa@Q*Gl8u6HK}%X=A-#i)Q$-YGlW!t$raF@O*=2~ zJL-j*hY8PED)JUMxdHEcjY~rKRV$^IRm^JBwD+M2#6>d_IFL#lBkvTE0(NpN)baq%u5Oa%NXaqxhhQIgeTk5B=PUffy{1ZU3 z#y~!@d2+$1$9wV<8trgKWK4fGy%9qf0wKtZKqxUX;z$%eKURJTD`+nY*8ikx z4jTk(0S~|&c(j%=J;76njdm@c9$7m)G(&v+L%123!f00vQ-k`qjkF+>5XcvW_Di=? z*_px4GqQjjbpes*)a?Xxs7q}$S4+_O!KV)4o-%R(Oob|s(s&B7R0--AA69}4=4b#$ zMOW-**{+ega99KiSl7?rwUyrOI7~WV1a-MJVpKIknj!2%6oVX)Xw1=)aOt~I-o+Iy zo~&}|TEHIEMlpB)pC5gGz#1I~Hz|11%0fF5%wlSYZ$7NJC3Qj@D*k9_g*`Ap1PVBg zgS!?j7|M)w3>LIL3J^(su=t>3G^`ZjW1%efxpU)nFz&( zZC+A71#?QOA(Tlbn|xe7!q@%ZxP#4+15kH&@VK+Rg8O}2N&HDRvSQJc50_zq=&9TS zBAyPeHV_&#{A3Ms8^P4N9Ia#;u*KU) zh*5HjB{4JyWUi>)KdYo4Gbzwdy`6Q8Jh&a(UDp1(kZ^QR>?gj`4DM_{_n>wP9 zrP=)N<WEGW2s;& zWv>V0sMf-BkPr3LD8`GK?3Yicwc%pNm)8!uoyKmHm$dk%($h_b7fOjf`rb95E7KLg znikZ$hR1vFLyz1DJ<{sw=#D6L7?-&aaM#VehM@_1WXG}Z;H9}#{a&Yweye_uUAU1Gko*|ag#i4#OjTRjEbEcJVL#-h;7+;{ki;5_kz|tuK7;+ zW4o^QTmcK92ntL!1B`s~<&Y0~J>ge?&P2Ou%~;|;9)g2o?wh9|04YYNVh3jGKnp6w z2$5J9FQO23Inqo<_i#HNk;IfWVwBt}+k_4Q(VY6$2dCZ|SzKn$$b9Di!)$;n+0g<)tAZ+!y>L!~hQS0fc1Iufjp-Ve{5a> z8xNRbEc!_D!-Ap(pWVz7Q;>(0+tMJv)Zbddc4Zet>MXz(Rhd$m z=H8B8!z=%y+o<}0@q#&p)N4!q8=BOTym0AxI67TP*~pr&~+T?oNbLh}38ZJ1|;PCJsoWiBWSFkcEC>SdWX`X;aq9x>BCyea@ zOr(N_xfu%dv!mN15IInp4vf5DWey4wNM^mzQGp{Q8EFLPV>QKuDIN{0Jq_^k#^W?5 zIaOx3R~^!HVJjlqO>)$-<<`bkQO6>ki-{&*lC5Pk40RGps;XZ4Z?90Qdu0FXDK7?L zO*jj6CacSJaU(XPCZ{8PBp-1UK`0iHo3tT2ezXDp2=H1%nFa7pd)Z-bbirsAYBv3_ zfdqmz+Mr-hHABByD4H9@!;@F{t!}9|n981VL%9m+Fk^ry0EruBnt7FKhbGR+#R~NXiY4wPeAGa7D82Ux zHRUU1KYn|cx71&lc6}&uO5hu26TQJDlPmSEk1UO=mwXmH|5vVJQG6@4-~J3g{_=k~ zG_1n^Eu>`kIwd8N)9BU%~QKR{zFDP-WU7!)QltN&z}SNo^EH-I`i2Qw8y zF$IDQ1Z{1JFjUz3D_^@0o7_`#B(it+iQQ)c@IqOX?a7PQov1hhKT?GvlvD z^s#mP{u)7F85pG^ zz={L&hHpShX|+t^d+D5oh|3KS5K>=*P9PBCFw<&mba&S9`)>U8!cmzzF5VdJmRoUf zDWh_8W9Q8bA(<)&I5B^@3M0R`g_-Dvm#DMwC5p(`umxHeKr1;ax46IDU$U9vg9PyY zbZoHWjU}qf0EmO$mj+{pFCa5CYuyjt$h>wi`7R4lv6Og1kkaDM-57q!bJ=9G5xvX| zn1wT&c0AHixI0Y4G0B0V5l&?fT!)PDU-nJx^T6jE@B{^vcD7b?)j5s_i!sA4!ufj@^b8-+mU;VnDWh`= z`kNop;EqhL51$$VIQ<)N0Id5gc6h|A@95QEDLy!Yh8hcC4q?VvktRl$!fNBQ4D3=H zQmnj?&CH+v`3uv}EPAg48n*`2FT;Rp@@FuYw*$b4K@}7tIa+)gJA4hk(=Xt~;0$S@ z6$Y@gk4O-W7*X$!nfE2T2$n>=8#Wt!ZuCbF!r?pO7H5|Pa~O8Ix=;ujMk7xmFE4~~ze6g3mGsZoDIE@8dksu%Js?Av0=15R@m;d@e{ThB4 z1jLbyFGciR730NYME3D-+sWhij_hej+bmX?LHA89BFC&+Zx}WRsm1$Edw-Jpp=rd{IH&&qe?Qy3)S$yJ@l0U znCtTuXNpn&=P0hRIvuM>t{ph>Cx|__WXuDMF?;CZ2o&o{CXD(5qNxQYaHR@z;NKH3 zNnqzBzr08I%bl`10)SQ=rsP*i_-sE*~M2ENcce9yM+k8uv|uG6P&N7 zSDmY~A5JWY|Cgl($RdzJB2NbQ5rLj79ndfUkVDv6bgjj{ek$?)afN?7dok2+3KqQlMs(ThYBduR#Iv4>kcyOvuS zaR>otpwRM16QCqpAUAXNqlug=darBYi>5!~I0F>+3kT0-K;3>B%36vo!BhdT4^>Ic zv6HnS3iSvC^NSp1%eh+EEFttKL64T%r}K8vrTXV9wN0th+r=aTdpgF0cpY$L^~#1n zTIXpsUJ@}w$N_Cz(Bz*BZ8=1M{JDZmGF5o^jJ>?c#bZ|McB&_W%7nwT&=8@sp$bKl z2@{Q^R{&AOzvMD9aV;p0m8e);0Y@nB=dq&1KF5e@FzVWfR)1l z#nXWo?g1Tje@vrz1kd{cYj6xn7(;bw9w(Jd(6BY&2u$~4j&(E$pL;2P*q-WJ`7@$1 zsF)ha6;~|~ODiLdP0XXeHe9_63BeYJcNyn!!HGcZ!A=qmo|6~S%^iA;XNYLRK)9npLXpw};XZ7;SX!9R3 z0KK;0Du`zSkS-Xv}pDT@vI^dLvUyiO>qMYdlozi zE64e6O%VW*@diV1ZiKU01^dQ=a|JB-ltmj%(L~&3KQ5bKap&ohp}&Xq@GTegcNyg` zx$TL37%7;%U{5}>oyb&r6>3SaXg5 z+j~GfFUKqlPQUZSE^612^cbKF4Sm#juN<$dE%B{T%$$N{I&2 zH`JMu$xioNy2T&?M1$wc%E059ix0@TFsRs^b|+2Kz#=R?znN=hTZ>9vEwqydZg; z2V`9mai$Knh(u?jBnEnX9VuJ6T@Sw1sye7~z}gVAm)Oqrsxs4uEET=Qq?)NGyp#|f z5Hs&X9X>{g!%0R|0~=so@}?3?>l1e)p+0pJnD`PN)W6MOzE=J?cs87?jE#Eb67HVO zpKz{eiL41XFp%>`z8V78a!GWG%TG({gIVH)DIwTfEYGD;r7(?aMyAulm#Z9wofLN zk2%7&&u{(EKDh{ClZ~f(?(rP&f$I0snc!jQoH>iZH-M%$15Rl;;H5VgS|7lIU1W9| za#xl+oNvM4Y4hElv6YuIk~mO_y`0!L1rXmj$zlseG;!K z@RyNb2He;WgH=msaIc|HA%{-Fy)wh4f&N{}MHrrSu6qzPzty&64zj6uuM}}Wn?e9v zktsPd;#2P9nB*U&Rm?CiRB3&jlo!IGkn*A!n(J~N=d&~4rq7Kz<&f~m{lDzPZOstQ zFpOY5KdxLJ|AOu57U{~_h-{cf|9Rfkl^B1t0py1D5BCnk*LYsHX&7>KyQ4H}N62wjPx>Cv0@BF^yX7c?bJAF`s7eT~sfat^#Xo;w*I6p=h z0Q@Az^Ur60JE~_b{sjukKBE!O``b%R#M6 z+~`Omk`_$hDCN@^#a;3AbB}-XoTL9h1iLrEi2^L5j4ee6hQDG_39=n~)j94n*YlLO zDxW>Ju^k7lqNDhsidZwYsx@QR@2XB6?q0!rKI$Li^`EpPv={6QKK!<`s%1gPU0cf# zzH+x$+pm)5h5vUtSIW?76I|z5P4RLMrnE5I*eu!&k7ePhU~IM6EK+=qZf0yxkn| zr>XHT9`*C5qy~K7AHyze<9JXYuh`6|@NX3*_Z=QOEKJw})uU>=BkQ1XEJJ4$DwT&&x?IRKbCRO0_+1^_ z;%^owHGn{`kMaT~aU_{+;=L;gU}V@n1sg+Z>5}1qbt~YS*@%6pNdg5_MGq*(hXw=$ zjGpz5lkm1j*b}{>WXYuMG(^AFr?JB)(N|Jo0|p%5-uGf#2ZFiL-G5OG=Z!uMpJ9h`M@%NH&bO$5h_Hy`eIj%qsN0wT~l89mBIusxvMqNGVS$PNt!PT>d zSLD8sSeQ*Sj+yWBWQ#>$Kdb4@v#S48-x1uKeJY)2^V`qN+?9ja*zq&VZ z{lS@|KlpX>Dy0v%ty=x<2@|L#kW@H7Yu+crcV`s-aP!Hn z2!PbYlSsir5z0e78-_eBTi-GKvdcy?(-&p}{Ujk*+kuJTG%iZ$%mz&Q$YE;_zSB}} ztuYRoRUrZ^#sZL2Sx1)>YOuXX3r$rFqnj=MT=)}b(tcpaZ3BQn<&zA^!!`)wS=awD z{NYN*OFnwA0C*X4f7kkQ*un=%^8W1wVYj`2u?opzur+`Zb3iq0DTRcPKpQm}hNEy* z2UzofvvBx*XI?n-k2=k#yVjcEPqa{XMSh>@;Dj=QA?&s|AmCs98hO!pEt>(<{-i7( z$IRUR=PZI1ra_tyWK}FAC>hI16sMq|aG}8msqTRDgyD}g+lD95)mW~HZU;%rG@Lat zm=*R#>^oN5CT8Y{Uj8W~vhP?~rF*Z2e|KY;V(!E`nlK2r$Ufd>wC8oA7A+Ejd%T2xzMTtO?B-c9X+>uAaIaKNA}kv3A_h=Wt%PD z_0k@QpXqc8Nuo(IZiG!!r<{xMPi|dA1HwTtpIDh{$Sp_ zh5Q41+zHAB2$H@_jTj8K0;eZHAR{B=r}vKkXp5P-_BzK^5*l#G(-xF%1_5dKP%d@V5z z0{cqV3|MWbe1OVoarE#HPt_er{9yj@e>~c?#Ql8^>qev=+B0<9PulqoP4$2Fn4;ds5#QHqL+lCkgnV@9ADHvBDKNDUw@ zoNG(PO>hR#HH4x#(8GT_OF+RFuFC-%x@pit^41z$imIsT5-+f{7tmVELU=d`hX21JD zM|_}s4%)7W(QJth&z(D$m;kyZ>&Bm(Rt6^zwB+yX#6{7z9025E+)rQv{%jH1m}-j6 z!5(Vv8@6@Qal~e+ev)Kg{JEQ>FW{=^%!Cf9BC%N-zB{UU7T5qB42Q6%dhh)36`oqx z|9s8^hMe0`3-Fh`ID?lpB)Ta%?@GZDNv;2m2kkKY%DXjo%!8Y|UbS6vM9cPI@wK<> z((-Rn+SJHsT++KWU3DpV>n9!L8h8wj-FT-SJ+}Mxy$)u=cCkp+#Omz_NS%73wEEAv z28su!rhU-+H7@8r98g2n;b5~JgsOk;>Bpaqs-v4hK~XU?IsGl_V1F*6OGtA9Fqi+@m>GFf;bc*h5;N0pMT?xEH^foadZKQqmz^Olzv614T9F? zk0r7)0~?0dz)W`>&=%4pjk0zi(pDm=Zt7!HhDeNaGP4^EXGySkL?e|mt!rS6B747S z`IL6#AAF->k~5au@odh5=cBDE1OL4!oP|HBZ5qj`X^{?PZ{F=el>D}UXnAbiWJ@N8 z2Qpw(Kw@5!{%*?!kG08~mxD|jj9c;A&c6(B@gJ|Vc5+MyUCF=-o7Qs_u}?gfg;EdM zAX>Nr*cCJMD>^sf08$7w3Q7h%WIv9S=3no!1qlW^*n#7DcyaubkBIR%XnX{E zSlb5pQUgj6{e6hmX zd#e0XaAdF*3J(p(g3AUnHJ^0Qen#PfiGuqU)6|wB9&$cSjcjK36726)t5-|@v>+)Y zZ;Z2w<>*D6P5ST8Y!L7N0>kY1r%$@WFoI{d+i=z4+k74fOP{oo?(~FZoPUu}!EwCThDWtl zbU?y2IP`|Z{c1J1wxNYPM|Rk4qeV@A=Z;EFngo&_PIcK{pO`4<)0B)tXGnwhG( zt2sg^N7E^eP?$@+0UlY&1U=MB9MWYAYqWu+GYNZ5pYyTUIf6fxa34513~2Ytxvt?W zyVbVo*O;A|pnU^8KVLU?@_Y_6aM@IeFwGmPfwkgQz?&8;2f)5mkb)YBY8;SCT9Jnn z;Xw9D1Y!Y+RrZ3AxI)YkAwrVO9QTArkVTxt$pU;vx&DQB0K6JA*Z{$5uK%TM=KUky z9lu2&aPvpJz68-{0@& zb6w|kUgvdQ)gAruiW>;?WO^iy&vbB--bO>PhnX#onleBgyRf&^2B#z+MQ4-ar(>{t zL`M(J{153H1g`>A2h9f`KIWb|wAfmod$ylzLnkD&7HFJDh*@OphXU@&^Y+_UXud!u zM2)R8`G3a?+;d3YgPQh9UMg#qq12(anCA7RxHKMhny`Lu z3E&Dimg|2<1(yem8i=bwDxThMNFgXgcTmwu{$JQy^Uo4J9mj+xK@x^?4Lc(XXru+l zOBy4o{u2>C167lG?G?;2@_|mwAAN#|U;ZC3unm&4ZI9XHvkaY7NkW4%2|8R3Rs9_a z0t~6)+j1UE%o4y3oF22AXRUv5Io_|z-+sHr3z0`zrjHiuLi#~T(rozVWM z%mV_p^{$7ly)Cl_-tp4?E$#nGC(--`iWg2p2=w6JFcET_Hm(G1Nd-v-UJ~^!%G&LJ zH17Jc7wUKXxt~oe0se;GJS;9XKAv_4ORsLS`8U{2YnB1uxFQFmBO(v9b(Q~I+y3K1 zp2*nsu2*tHTy(-7unOoc?WlED$^jA0Rbpq%|Ap{~JE=gWzX1|CwZ@Y$d#;oN5%3lq zGTFaB`nK<}1r$C@1cGmyFYG2mu#@oj^MRjMN5&mD zaNS%jj@{LN{)FwgF~Sr${IdwcD3rou?*Bpabbd|4V!@XV(US6L)P?&tTXY;JdjNWEd>gWNL^rZ8ojcA% z{!zJ?N~|Q|T9UZ(WCwE?r3>7Rk%#Fx1>5mWjrQMf2Z|XJ&A>rXu0fF#J=@7+qq~rl z+tT)eIzTCljqT7H?fa$_ikO10bQ}7Kf0UXJbPNPiA0}$N?qsFQgCdKyah6sJt|R=c z0VK18K~msG&aDbA3P=~Z+*b)4R)PBO7Q_-3`}5krTgDP2a7S>y`-5X+To8O8vm%)v z{S>BGB9Q$3N}@z;Av{0>xB0)+E@K^rjH!86Znbg{r83rmI>wKdQl6c~!IRcM>w)JuHdtwP9sU2RN&sU`U z9JJ?8AswkMRcn87E5AEED2|yiWg{GAFP>ODsOO8YE-! z4qKTEwzNYak@?7gH9bDzTAMAcYsPVmj!zE_4bAPqg67M|-W~i}u}67qUn7tqhOgVPH*I-<`Vd0|o)4@UOww>!J=sHtm5 zfxN;^SSimR?)@*r2ACqX|GNLv<_7-r|NKKfg&+Snd`dYzz7Mem%fwWK3190+A8{^# z17~vBD*-tQ<>AL1G#R6YeBzupL@Z0!o;%7w!Ys2iZpF_>r_dNq@JR*Er!6QHUunJG zx3eMMRfcD!>p2O+xjG7&l#30}z+@+qyV00MXkv>P=W0X7^FEX^`N7)SfDOVpB}eK2 zEz~Xx9_aX&TzMdf(Ks*8y;g|CY5DKl6|LTO;eU30O@;^)l#^gHN%qm_9WAYBipM3| zR8iOYS~vfMmbtJ~5k>eY@oCg;Y#t!s?O?Z`FFQQAds)Gke^EP0Evzx&Bs}qJ-o2#5>eS zkYwZVzTA`Mx5?9;UmH&AxOFN4@m23L`ff(!zJkqRgrJWx!paO}Bv!Y|#X(aYsRfjO zqB``n+LQD(rCtt1V+{A;o0}gbzA6YhhR7j16NCJ9Ug_C)WXTq;^Bxorx68;Fn6 zRQGB%nw%znTp=fvjkKeIs-QOJtMA9J;ZtDg+j1-Ewm(M<==xW-8lNHSs$JVt6b0-Y zBp*@@B+HfRf2AX`)j%aSEGboQxz7 zK_o7g;4j8<*p4Ihw3pzVvOf9(C`d=4)HRS$3&6Is6J%F{UmCa*p%nz;=R# z4PVnsk!2N>UC20JY>_BQNl-eJ*#b;n4xi^-R7a;M*5P;%MG46J@+ge&kL0ic^#E)d zQK2*mGs@gvq`9%9tUtHj=es+>puv=b2cw^Z#Xcjk5U3!1hCSAb%>OYn)Q1-jZ4_sW#vE}5hozQ)+g!FX;2ecF9$80)9eA0T%w5VTmk(bI6j1Sp&s(` zmV4q+cE9iHV)8^7;fFHP2tk?3KkRd9yq7jZI0hn7l=wi;J3t?g8B^Glr2$GF zxayu882A2%VZ_4fjx=s}_QJL4AJVr2G)K7)#iki3fCEUTQ-qGn<}g=-V2ok4CnPhY z#FQm-z?L5rc6~ts}oZlmzUK+Lw4-1t{vIK-l2XlK~Ro0jC8nEsi@?rQqKP^ zq_`yYnc`w9&$xh{B~|7}lD;j?P3(sq{fG%|F=*u;8~#@iVuAC)_5-9kU40y>UU=jU z@dvz|${9!`O(VNjHP0Gf3KT(ej9Je;n1a0x@WUdBFbe_90Y3tZHJnf9QWf&xJvYc{ zXNOMly;Wnyp6`;iH~Mk_ab*+rZf}qHz?x0;yy7 z1HR<%;uwrXiCtO>1_kBs>rh|51?u)tiW=~@DG6I2&_#NMPjri#JP6a3ar0cM*mX#8 zZU4>zrw4uN`kDG;XhzG;(m$J!sc$4Tc%}d{n$uju$E!(48}1tb^_1?^bO9lX?jw!C!nY>2kD!*_3WQfz;65pE7o2fuFr@c*#@>kCMGYN0%bav|bxD)cS{coo1*z$W-0O@NLGQrk80+b4DbjI; z5w1ELLh<{2ws$L(t5Psr7U62V7u&IvdVB`}Q01p7g~q*AG&k zy*8tNS$Sx7pB$0|Tnk5|HEh#6#p%>*DII<1AG+UZ_2q~*HOBhdU+Xzqib1D5_DI>9 zcYP?_HIZoaPg0*hZ31*~e|6+toECQr@}xPA9VLnjU0m(`WKMy!Lr*-{v4zt^t{7HF z{H6zejY@P%|MnkXSfUr5(jk0_)B0}}(5lkla`EE5&*{fVdj}eL_fcPnNAKbab2~%) z&RMC_rpAOAT#+E?TGjiSmmNKF#Mo6HJ_^RB5aqNaKuRKrf2sZ$?la zR;SMe!?sWB{p2~l^&u5LMtLAwms~#M1U6d6VQ9uaSiAKRgB;^>Pj-~q62p63vUu^C z_fjfD5)VAd@G6U=59ikpLWXnwI71P1UZ6$1;bR_?0%D8 z3V1Et&=-N*CsTs?hX9fr++=$Z7y)kc8eESIM&v-Xw`?x3C#R7yl{%j5P>IY6X6$~U z9C>+k&%*E6oFKxZRp8s$l#R3tpg;QQkDW}XRVl>%w5)loQ$ru~#bcvfd|O;`!PzlR zz^pNC?C2;5MyJ@=b*j|97uKtP(1foTcP84x6Nihm*~5F(*vEXB>hfw>v>fO20wNrv zrgoc-L2PXRH^x*hpS8I$rgvT>v^R1s*CBlQaua=?}ZC)PlzYUNaE3|OIyt{P6U z%4J>aDJW>7T{C*TNeMVRT1AD5;9%$vXHeZjl!tO6{?5>lk;{zhw$;Bo|`zHilh9~Gm0%v=p1O^Q=lUoHUpAV!$yWEepDz>Zh@%^ zBBT~`2R**SC|n8Ic{rXHZ%0dv)A#4+hanLt2Yn7* z2Z-dV6PjXDU*S`+UE~n2tNtEgrhrOGN?m}>t%WH%oUzCFj+0#9Kjrvl(NiPyEuEar z{ftdbD`}0v;b@K-lhb9Z3OifKA*7DR~@%uI|C?5js%AJOs^DGbg-U?Sx2Dj8Fq!~2y0-XMMoF4YL zIi!UUf*E8aEhaG7JWb#LlgrkD&%32St4rO5%%2;4RV~tf5;Lu_Pe}19N1gs?>=W@= zt|$o!!xnuR;cnuFOp~{AUQ9DSzAC<3ZeTIS|2R3?!U zjI^SS=$`B0*>Y*O+86y!syiXnP(izlUCThS+pH*|?-Ev9;SPK;-GO)(Nf&xJ!&HI~ z6`FpiTUK+vnwI^r+E6kHpKe<19r&JWQhHB)dI-Y&X}AGaLPdc()C)px<|QRDM)Q2@ zA+e&=i%V1--cQJA6}Gj|eGn=09Ewc#063%wv5;b^3&fpR6~c3CrBKqP@pnk?d-R<@ z?)@+Q_0gK2P1Ocy*9j1&7)Us6#8*fwR5l~>uB8|nKhxP;F&~cu?=oRiStj*4!c#*O zqeBI^`w5HKP!{`yn>fNZhNp?S+q7Utx5gIw*>N2JN8?Fua9)Xq4Yxf%CUBCOqF53U z;MJ2h?MIB81Mm8LUOoh^QT#X@@}*<{W03T8?~{Lm06m~oO63=-fJx?p-E~3_j)XUh zTf!0+Nj^ydwA878UoxGA9kmS(@6|IteL~$SUIM0dHqw{ZoVs6_IyaHW!rr68?rIGaJboqKNuDJ{I&ppP~@Wwkf-`;Z%3 z=oO0;&sAwc6y%%dlp=%gJ2-=)zjGRLP`L-j77!+K<`T87iR(kDv?R8%Y14py=JH2Z zKxH$+-y}s370ozW7~7nBu7or`Tz&05C35oykgF&cqc3zLQ@*cnz-!J(OTtia;y+pi zV8ty$w>ZNB$;>ovFyL-6o+!qAM2FKjo1?_!qnh8lrZ2$Nk4}ClDz#>Kgv3}-CUzaX z{t#VpVS3bb>wAyI9}qACaAp5mw(h*N>P0#0K>C4Us9+t>5pR3b4aJs2tYP6Skk2HF z6rl#*e-T3fK3oMJ^Tl|r;f2O_MI}-Z1#({d*41cV#8!5W_DYo4UP2rRK;W%dt^H-G zpd8?oAVd=P01dg`{8B$@$YYFOGUPys;S3C7SceCfwh;Ej?0hj1C^Z#b^gK#W8Vq7B z=!eTWK~C{Lod2eA>I7Fi`mbo<63A|kl?S=p@h^vQ#cJ-#Mqv-A;P6XU_^9Q!Q1Y`W z!{rfBkQuOhbss8Gr5y5dG|9s}q%gTbbVqe3J04Z9XJ&l5(%2Y_NZnN6=xMaAaWlpO z1Yb+&8SI%{IO2uvazD~@dO1dL*?-1DTobOka>tn~avLmaHNjov(7>we92BJ81B6hu zXfkqUH5a|CljszM<6F1^Bu;W~dY$2XPe^dt=1imKn6)B-Mz2 zK3=+8pXLW{cueNoZhq%5F+KH6;8Wykjn^T&IVx%ks8E3GeGJ02QB9R^iuhuvPDJ3P z<6bzx9@kSdX(O&BZ5nDF+bJ)_E5?z#b#h*+ zbbxDX?6VQ2*JCcf&byBiDzoq%4)zX499CjJn&Ho+@)EM7&!UMXw3mt5Ddk{3f<%%H z;EpZC+^E5jB`g!f@7IR!l#k0!Dg2H%r3t_EvK%i!Z#N!5vg&#l z?^cJqmrski|C9r8dXO{U)nB`%mwt!ATZ18$D>(w}$PJWIG z>(+NGlqq#}|AdX#Bb&~rDQwD+qDW$Ywf7(!+Lf{<!w<8V_&Y<%{`ObzlvcQz;{WEEtZ=*1IJnYx zI>dBN7~@zCYlvh_7jdDn%*RtumI%Tu6k2vgi@Zme=GUAW-#wx`HF(?)&szjw7zkM* zcPDc)d`sctdGu*DnN<&NVhR^2qf{|1vD#~w>F8v>kNXr$behM^iMjj2I4Gd`&q27f%0d3Sku{f{Na=>y* zRbA>{FpxbFzF?Wg2K3b6pW>Vj#e4WY{_b_oRUEFs#D<_v0QNFEW!0@Qn8XrkpRCXi z%}CJt=oi)^tq-TA6niS13K9#;==mKudBQUz%&a*bH~B82J8s027b*7gpJ63(l5$p7 zbllJ+d;#?0Xr@i)+Lze}Z1m9$S-Ly9b1UP3q6N`<9V7^4S?06kh zxrG9oH2noPzCP2Qds8~OnM9-4GD3*K7A#T>cD_b z;4_evvzCPaH3tQ^P3peFZMA#1!jZnW$JN8;Z>D%61W;`9QZmUAZe9u08=E-9z{3bZ z4IP4Qo@{kEC(&z--x+GrEROZ*DGw${lMXEOnq#VhxG}QU*vrk$jqV8SwD~xxYW7*- zcD|i6>Z4b{Rzv#QLph+axjvR4?-Y& z`drgRT)ePY|6N6I=h3?mGyyZG66&$Z4f1t0nG?Wyri(Ts8Ip=9vGSPesYoa5cGRiS zuCr=&2fcV6GJhxnwsU3rk)uZ`T0>>yGI2}!&6At|9LRoZ0@#pp47IG~>U!4lShllI z)IDMXFKc;0OaD?*EfH+%ImSiIrlvLCcIz;QNm80JcOeT% zfOfS42H!bYTIK|p?isxZd87^sXc{iCJ|)FSZ;7Hi%e)o!Q73`MH9uU?W{f+=eK|5E zy;x`cN<%+>cogxsxitWvr2^1#GplrR0iZ==$L!xZC>!bL?%1L3+8DxvWi26t@ z!|ZJ-+XS4u!3E^Z3?jo!YS=O7JhE?(p8+%{1AGxMIN`sdSC*bcFmi%Fn1rw003cAJ z#7D``M9jhxjm!kp1`$s__xX_C!3^7yHHf|XVqf!LtF;#ZWl#jrTs}&>&2f%53OApY zi{Ttsfzu#VoJ*E3KaD=^q;`ma*B{lSFG|yXIemq0QscsbC?@h*=z+Aaz^E~+&PjNQ zAP-tUs4V#A%`>;I7IfonUV10Hf$Ycz8_m%L$ZQ1 zuQ6$Zohm217Z@$%R$B0&IdL4}Y&dtjGsCecVabV#aErlaDfK;V*EKG52d2L6!FWk2 z2bJUrT#a!E$Q1NJMtM{K#_$fJmIRNQt6Vr{$!XSRY%-QRF0JlYqhL4~WW^X>L5?EP zQIJI`-rpfsNvO_E{HDG56;XPWqF3`~49H6$l{jwoU5gLCg?CoB+uR(>`Xv}|pEH-V z_aouSw9RpJ&Mz}|RNgujgd6?}7Fj9_yL5@W-X7Xbof#)-s*GW*sJM3k&$m#x2IClN zva!O?%U@4H1^GxCC|fU z6-Jah(c_lH2CZHYglf@@JP+23#3cC~hxap7TRXr0FavPaeO)ovtHYv(LT>6|aDvST zSSG=~3pWdYqs`oQhAL2nPs#=U1*Hrmi5g?#F z8eeP$fff%IM$v&hDSdy1q0QUS|D8q+pw2lJm==d&Cdzd0*BFPliziDMOod z4q(Eht$#gYfOrfqG=C+s@0gAxGH7r8_uU*NP)1{JFb2Z}>=jk3sI%zy-8$p$v=U2X}k0-vB~d zC=2~y3eKhxs}9a`fz+9a>HV8XmQ;s3r1kV}?LY;0L9W}<<^*eD;t4G9$?^d#P~A%| zI}|B1Ofp9EoN%ia&-kgWPa;3ey{2ZmGmv%ZT@$pFme#o+CcR1qT~8(E2n#bV?GlJs zF;Zg!Xj`&29J}b5J>ccH*Tr#Jhe9&?X8W`n(VuIBL{|w+dkP6xSm|>kRjPHm`IY`1= zU|m?jo(@MDY4lDg!-h9^B3fN@=D=$m{vF*6=&X+a^U~L3g%41`g*7dJXO;Y{Y&&s1 z4$EdqIoe_R7$;<1!%;kr&7upkEcKL+?5`q)7T>IH4;X)N3yyIF3NL2?K%+;Q=wBa= zE2p-=CLjzqjME%P-#p?e<%lUY8Ys-IDyA82@1%k%bcOCmJ~PyRw`tW&+XCc=jW(r3 zxeR0+#p1lqfz)4QeMJgsdv$kzsNSwuB;O}v-<*z8hqgl8Ks^Z}QxNxFg0MF6FYLz1 z`MI0I5%6J?;S?0}KDLvpI(( z3M0+6SaK4n7G2{&Gwk%2q8LRg55^g}^t}u>kNrKDPfC++ zoC~$D;Npxkehabl6tM$Q1_2-sI{rYbQ9v$GL^jG8#4Atq7(Maev&C<5ZRNU_vV!7bBPJ|v{vfMC z?*fz4##YT&b<_VK&gO194(KQ6f9d82ON?MMP zIVReqkrTUcIK-oR%8$EHnIdZRWGRgS)=c!|aayEDrqk$g zQksdkLapcWYtVt~DR4E(c22Ddzp(6={>z!Fcn27S56C(p3wk#MYby+yQ&7i);VAUz%A!H zR8A9|gcGhSomg5Mj=MZqI*JqLxRG_*si}S3y*fEN9MClgGc`62Ln%o^YP$2laeD-} zJfevNTgYtwEAW73Wyie~+)>2k&@hd=`;3cIMa`o+O3iv)v2!q+dJKY9Nb*zDn-&RW z4f!}YDod2;=f=vD9e0g)m+w>dMz7w$9eqBOGEHW^n6&0PBt?Rc22uzb3XR%BlNvPv zm|BuQ3<-b(VPfWs`ymYF;L^lG+q_9PkTU3{s6+#_FUn`5HK^f*i0e}}UyWhm^S#sqd8t=UOA^1*9gA_gyHV*uOi!Hn51b&+>C zD#^`KEZr@ek5Yo%%LTB1tj(!p;es#(vqchmu%CdLBfWIV{?O&r6OJI%=5rxsnHIG= zET%zeUAnfh%X95^%GM!;?&Dx?ZV~;WYz*V9CWIR#jhPCt?0FvA0+7co-ZmTTraez_ zsrt+ylrym?S_saPu`ik)yN(K+q0i@^YTdNs6uf{G6*>2ZO?8Yb33Y@6gK17G6otu#_NJ~kEV-C;kB%E+b#kD zE?e>k5b#j(`E+BDh`WqUV(jR4*R1ZLN+%oaP9OVxIL;Fdwf}g z#)caVerS<&4jn0=jA#^%qY3^mH4b=>nfDxGe0sF<w zoh~{_V$Ncu)?Pau?^`|I5vCKp37&!-7KBt9F*mOHbaD4Pu*VjPeIgcjN{l z0+QQmSDg);(S)cZ%HzenuRr}9Jd}iv4whlsA%xxp)#wBxkqTBt$h@qI5;y&Y<8Txn z?G-Gg)t^Zf4z0Xm<713YILvr#fxiFi^ugSkoS3=(|Xb6{P?a8{m8IHV7`Sd2YifIXl4|kd_IDhqDmE( z6J4tKS#fY!+R)R7eW(Q3)|hEzgOwbG2X=Gt>kD_df`vBfls8c+0+P6bY7(vtBJD*~ ziNg03eQ|I`ib|}!6&JSSiMy&ZXVf6v9|aN>wh*Aqya&AHm4f|U#b~nJpfO|K4_`)y-icEDfGXp&XpQkHCQzc0BoN!$an_Gouc>!L`lWHWMMW8D%z~7azxZz+)PK zK-m@Sp)$w1xsIU*h;Ak$0e`GRa$oQ28z2eB%6aKbY&zdE7S|6IE|1$xd@l-L*|70f zf_=v=;JCEJ_sQG46=ma*8d8TkdBc;*Ong)(t*3P>`zoBQSr?ONdi3Rm6P*hucf|Eg zh{BZ=)!`%^rTZnpAon(s4PeuktlO+&uim%6pyK2b2SBmN*!R-S21xU=kD-!fFBW`W zI2>bWG=#Jpgrqft7kxQQ zw(f!hEgNBY6A=@AUQyXc?u}M6ABLF6wk3N(#lYu9q(ik1ktcf{sqJ_KRmEf^3Sg=3_?@3hbu4(BD%+vNfU_T;#HAoGYT_wmsh9(;S~__5E>&N~I(N%@t?eEBz& z`#+nu)1ioVILlocbnD@jAk&M-15f3JxV&UjnVz@>5ftfhrA|)fe%*Bxy2@8mYUdCX>&QY7fK|mcx*kbI>7U-$t}`maaOW4j6D4#hV>urwb6y z$QVF~g682+VNn=zI&>JTp(*2PZP#!lhG=#i1t#9k+(76uqi=uapVD^XS zau0Z!90HEw2JVHTl|d87Q=nB=&K~G4t$v`bzK9F8sL!gx_KgJSMeM*%5QH(ibDi+F z?4FLFW70h@W^eaGV^*(g?iQE({<#gFpIdkk*=XigH_SzzU}V8tONQqjRons)Cnd<) zNtE%?TZXP^vVE`S5QguejVF_b%&`T#f6D*pZ;($;kqs8!K-zZl;1`uZ-O=7`z7Rn! z9W>W!reLIf|1n1AsT-hOJ&XlU^AJK6R7ztq2td`r#V;?rI_FU5LQCOu*WCGn50M9` zT+0n3IKgz<0uc09q9HfOSC}U$za$Y)qb7>T+}v1iI1RNqKbKQ0L}O@>Ria2=gsux$ z^ipEZK@9ThI0IGhM`rZv==>N1IC3PFCTZN)i4MC9#ZY8BSrQ{`VLqc5!tgNEEUpq;%y@GGns6fc>tCzcC1D#@5HQgqr1a~1S2hM12q|(sIP?TxNtMR^q4%4m1AS38W6WFYVmr%Y}jBj2^Swa7e@(xnMu1nm~1= zd1#a>frb|0y0cHi6);qGaOVYX%Q^Vl030V&YQ)W&fM9VA0dE?=5S#g#R}i0g$9dl= z(BaXf-jOQ-A&C=yE<|Cp^i&dWgosG6`alSd$4NAK7$Yh*5?6$3{Juh?HvK|t4eEwU zsJnC7n#T2b1^Sfi6rI(~_jX?4cCKBTFolZqz$qIjIwbjEa)pf(*e7mc`2=lsbnfO| zO6|=}W`79&S?W{$G=e2Sw3+8_@*3xY z34)x^_-oC+_FbHo5P)&ezovhDKSQ}nv-M@;+r0dcS@U1?ULSKsUG>6cXL0n`tn=$) zO-C)zIukme-=2rPu8%msHgL>~4U?p#?`}y{QhR=G@}h@^hR41iEk9%3@wIceiFJHV zG=05m6OgkpAlL5nL8saYLcntRkey z!34Uw@`{RcK#J36ta1MM*HX6~J09MABxN~JS*B3aBmhBctUkui<(s?GeNw!?U`}6y3dz+CXM&#t>Er1&CA;K7V}oc1>uKLy=xu zS{l;965KC`l{ZBnQVdI_Hg(aq9C0H%dmbLIk)`EHWN(n!XHd50;vYi5X|#R&wIxPI zE-(J*zTr{a&wFel9Qs7t448a-r{*sWn+?|)TlYE>7iWm#Zzsabb*@)uO9CYWsMr#64c`S9u2-yat4+=`(!~9yFUX=a-*`562oEWe>R8O2R;p z%=+!O-_Y^%zkk3)6!9(se|R5gqG?;A@jJ2)Dq_7!WT#G@Cfnx^fSk-UGBQ$c;ld+I zr!<`ACMPF5Kv?s!x>U_KD>OG(Z{VJq=%f^p;f}apraPQMaEXeDn7?u3MgXn5w_C0n zd*!wwvEBjICnGPP?Qbcn>Wt4BjSpOCRQmUFRYA8bJmmNGDG5>b5WM-tXaE4&z^07fO3hubVUD~(?zknu1gyRo2aL)TD2;1 zQ`szQ>E2aMPG{rBJfDwB&G&bjpoxpgK@+R&ON z4S;{d0OOWlk3gI;$HZe1z{O0lVqhNf-W&QETYqZ1eAVVeE-014I zeY>Wqe5hNW!VUR4^A3EzH0m9At!OB8Sw8E{pKrKrTch186ScHss!Kx=nUpm&mZFtd z=rVAs$Z|~k6TcdW?CJnyjn`QSmN)|ALa$pflHd#taHOuU+&(|Q2k4hkf0eglQ1LP`;b8p{zJPO_JHD6CFTYH)8a`pd z;t3Nb=;`Y};D}OxYiW_^4{qHuOb)qxQ9@j(3k9 zJ&ptg?N}?0;0dcXF8gal7a?#1lmQB z&`o^?ayS*cc=_^JxH&Jzu`X&q*3YA`+#>|v5Ka0eSPFJ1DJcmb6{xWJ>(`@)4u#-a zKd2us^?cuAA(lO@m^O8)8SpF#X<0=@HCI>HE?v4rA_s5IhrS?k|NdTJ1rcE=isi_$>(e_tdh|&HzO3_78<>j`$2Tq)0 zKgoBbC}LU12do10IQRVdibM>vaBPbz*Hbe+EOvbS>u^xe;04<|MK~dh6}p;C${rDk zCpdHFObPeIMdGXG-n?mOZ-1Vg5Z)m8^P4wr+;}snaGTE8KYBSAes^PhVVuqEqnKy} zzr_D|)22Ag4#c3H6cHLa3*qD;INvbzIoH5UYhq9V>ck6g&PZp-4r>d$Ivx${UMXhtScoNA4a-ycp(m$FH;u$vOp05Dbs zG76F!Kg9MC;!y^t3!zmRS3LE%QRY>gw$hW_#l@7NQ@LCjETh1=Hv)Tegr1eD^fj2va4-Sh89eZlL(CB*koRU&RNY z;KGOxQK|c9t&Ds#O!{gc^Gt<@mJKgbkTuZH3n;Mn4-O7SvoH*>B?1IiZhpS8uC5PI zYEinE*C}nU<-jPfx{njNbnUn&YV}uJgW&NX&Lx;X^`tl{O zpkNW&K=;5TgDV>0o0WO+2XD>2*QK8;jMQRTlza3pCZ2zb1{fPP3d6vdEC=y$h45FSEm8WSl88b%5c>6 z=$0XiCK%|zg`63VPe_m*IyCl=Kjt9tIiSYb_uFra=g2y8 zz}HtRCr+Gr+4Jl6#I^V5BDVQ`lE;c+Fd)h^W>_IvT>ne!v}G5=hm3Gt@Cs!uuQ`j+ z9XE7$zYaJ#3}>fq)WMi%-pELIz-Y3U*QHxG>x*TbGOB0sV^zF*#UwzGfx*!pY@`dh5`o4AgXq+FJeWoq1;6-l(a!nHY~B9}r`HX;-C&x90B8 zKp_su3H|!@o9x-VO+jm$A-X{qt~i#);xKFaP3SdyYs#tX(w4n&yiByOmw~42c3{AS zS!92pS319ISEqq{hTRx%d&#^oOx#~qUOsRAl!U_m-kLt`rz$oB*{I;%mt{(9{9qE$ z>kVWZ;Pnr$T(wG8K_LeHT796LTXwHZ9Zw&GSQ^w(0O$GNYI=&U_Cvh*z};m=(3BrN zbt(oQA75HfPGlPw?GRaZ!$(sua-61$K9{e&T7XjTA=3=`K7cdTZ8AH5MHlg~&46p0 z%69QuA_G4xk3$$g-0}V%z1uqP0aoEI2FGNA-y(~!6c;z1`~C1E?&H&=W%*pW4)v#4EIrY&Yai4Z&go|6VR%Qvy=`);hi2=&%AAb1Z$#u7cUAuQ93tmLN zc>N;F*KcrSPEHQ8+IM*aq$3iLmasQ`=%hO}LCEgFpanJ8kY$<2AfCli>OwjW?did}e zB*%i&GnRoLT#S$#pk^J55_&#B>K&Y0qzn^iJnql#=50O414g2oD%OO&oH_>N4%#X8 zut1ppY*J8xvYMLN%9Ww0PT>3bV=5--304`dQcNxYpe8ve4t7R>9Cun`LaMk6v2pRrXv zs6|fnA+H0gTO&eex1{^{_&7YhdTITXpyuXg5)uus@6HDLFaspOTSo^q76U+Uo15yM zql}C3eM9h23$(vHrSmYu$~DTgx)LsIPxyc<qqEm$1}d$n`n;fRV(H>Cca%8EolS$7aaWfT8^b}|b%b}SqVap;0_ZCgz!CT>EjyR z`%renF43as(yDn7?wNtEOOff{|14zMv!QNz2>4n4=FJ%7i=^T)X;S~X%d%5~3f{P& z5ShI4kMBy*<|iA88o_qJ9+awCD;I-FFt)T*O?bSpZivn)q*2^omkH7e+R;F#fS9Jv z&X)n_wr#trr19~;tj%<%BJp)3r;yJr>G;@^9NT+18fs#TQ z^bjbt9^K1DEiEk;*PPcb>Z2fv9nb)K?~Ec{{Qk(#z4t=7zb;2If@}gYGla2sH(UPv z^G*m&oXcnV1O#Wpx$5(Itt-kS-$eb2v4rLM1#vyd-G*!pq#I+c z7K=_y)Wu(elN*G6C3e{&9jl+)eq&rW&2e=%Rx9IdAH^|1|H2CO%NAExGeg5e_$Uh5 z96o0Sfv%(yE39wCK`COtX04tCG_VQ2&W8`15*(bQNxCR+mb9;S+(xq15m7Skq*7*F1O;^p6#$L6T|9d44Bf+?KZb-)iC3_e^;G)x7 zkJnZ ze`tFedq+I2ISc&w{*mHFpaTcU=JbPS#-OPPHfOZ5vK|p@YMJ@R?mAZOEQj~!KM4^F z+-y<-C&83NhJ@(g3*JN09wDsL`xh|?y|96tZ`P%~&M5vZV| zFJCqW-!N#B{r)FcHbfT|8jlz`QcrU0q{i8&Pc79qJTXSQ`4~bHPDrmCA)vPOL2Zdd zS3jZ@l!Wcu*8^Yi4Mqk`w{E=x+V$x1;}IDdIy?lZCYbM#ENkoPWWWF3@c!(s-le~E z)L(c1OFECW0&+zRniZpg5^x+r=j}Ui;2WKXy{uB8XN7C(0VeV!$fN(F&JY93_zmu( zmE0pF_NR^-9-l27`r#Z9KG>#FhznT{f#@0pH#Wg=qY0t^`fEK9j2BMgKUSZfFYSG8U)wbe-RYMH>4Py%ThAr%gI;x0yr{=iciwL(g9tLpdJFRazIi0bk}~~+kM*OL0@Of z^SB{ZB`Ha)^N?^rIf2J0cr#$1qo_fuz0j`G3@R82c!0Zw`@a;zI71M4Ld=4~S)auN zpa_t11Ku9779;}4P=o?Culq2V@m_eg@5UqSM}6)e1*C%J+u4O9`$ORs;o5<@6vNJ) zgDmU7b_}hPM2BSlH9AYRpgM-sq3s`ud+2cg`~f8FdAFn@lG+b3P?lI*^~&Q(-E}-V zZpO9By~*l!xiTA`U0(`3t-0glg`a-GAN0)9y{^nW#dNJ7E!2QyO)dGsN^qAb{ zQLYnXlsd*3%ODjM6~*)oDU7#UU}tMP8k`?sm;dAS4`5qT4j|{;dytqGZQd$bL)(l; zLmosroc`~}vr54^|GC~>oi=y=LMjE>8TD@Fhrn`CH8TE%S zsv3oMWA4gJh4qI7$3d4N3$6j( zO*fO&gX+n%ok&aI&k#=3qgzW3@H1E=Qq3rJ@7&coW`0H3)A8ZUaj&ND`4mLnZ_ffz zuuq0Y0SitO(vbRO#)vM;X;`cy|ff*&IkAAiV zaL<+@Mn!XkNBi7iMc=k1B!(*L>mDpbc3_YJ6GVvw4!nlK?7AWR7Ms+8jrbR$LW`o+ z$5-3gMFP4{Jh3oeOZZ8a{lA@)k2mZ!cAOR-e5~xC4(nO|(O+kD_}%mG4$W!*iToSb zlDGeWw!ixy_?OMUpahFa-Z*;GWHogTbj4*58@z)#8p~v9Nx; zl6a5J8a&+gKcX{Zb?loq_sZv0=n|d*ksAI9m4e~yvxmX%0ZJw%xgIzm{3qUl8NJ#b zrh_@M4T$Qu_2JMyKDo4N@6(bJGFjP|&~3ejF3uVJUkeF}k1#YE*TDj=_TxvAVlrsH zq?Ej$_6j90`v#C?D;XFVl)#OGKjtMiN6VC;9d42CnTlpoVaU(YXeC%(`DF*d62{`r zAR0#FcR~Shu3!KDCKeV^D7W*_QNvW$U69cy)2spJxsUcOHsTn<&V%d(+BD&B{f1~P z!8<*Q`WOu&Mv3fK=pRHYMM9Z3vDb~7L3}md(FfX>DJKF#9>VZn%$+-iU>Zy@^q-FA zQ8daSP*nFeKgFz~?UW!SuX8{l>!6DQB--K*3-V0_un@n6BZSKi8FGQlW?bBz%?m+} z11NKh7?f5tdMDbN=Yf_Y5sT6Ny9Zjc9x^;q+Tx(b!alZ|n`Bw-{&4$m8U z7i?-YpzT^*ug?(j7h$q3FZS)5H<38rRp{V7#M-}5Xuk|4M^2p@D`u|&eTg9#g|~S1 zNv|CZ^X>_Mo#BE$*LhhLQ_=$Z)drw+FkJ-%cFY-;Z+WGq)~uxg3Q*NNpEeHt5yZtM zo}Otq7fQ$}%i*2@F)(|xwsMxz0cOox`T66k=cbNM0 zOU>TBYZsD)!lV~5rqQUHpldTAWRy79mW$aLLeK@B^ni^l9IFkFf+Msoh^W-(+hrWzxu1%ufD@0EX6e&yZrj2H!~RAC7}8+ zQL>=~4W(<)bIT|ydOdE!uiHBL_J~L@RcMg<7daH+V4*UrsIIPdz6IpZ!Zdpwnil+X z>C+xmWglNSlIR9AMM*DHqfXS|;YgG`uWj-<+Kk_0H7M!vFsD+sb$Ttprf| zZ~s!<{*BxIkxswf9h&7VGNIiJi4#*Z%=l CWvKT6 diff --git a/examples/whisper/reports/cuda_128_100/inference_report.csv b/examples/whisper/reports/cuda_128_100/inference_report.csv deleted file mode 100644 index 098cc4cf..00000000 --- a/examples/whisper/reports/cuda_128_100/inference_report.csv +++ /dev/null @@ -1,7 +0,0 @@ -experiment_name,backend.name,backend.version,backend._target_,backend.inter_op_num_threads,backend.intra_op_num_threads,backend.initial_isolation_check,backend.continous_isolation_check,backend.delete_cache,backend.export,backend.no_weights,backend.use_merged,backend.use_cache,backend.torch_dtype,backend.provider,backend.device_id,backend.use_io_binding,backend.enable_profiling,backend.optimization,backend.optimization_config.optimization_level,backend.optimization_config.optimize_for_gpu,backend.optimization_config.fp16,backend.optimization_config.enable_transformers_specific_optimizations,backend.optimization_config.enable_gelu_approximation,backend.optimization_config.disable_gelu_fusion,backend.optimization_config.disable_layer_norm_fusion,backend.optimization_config.disable_attention_fusion,backend.optimization_config.disable_skip_layer_norm_fusion,backend.optimization_config.disable_bias_skip_layer_norm_fusion,backend.optimization_config.disable_bias_gelu_fusion,backend.optimization_config.use_mask_index,backend.optimization_config.no_attention_mask,backend.optimization_config.disable_embed_layer_norm_fusion,backend.optimization_config.disable_shape_inference,backend.optimization_config.use_multi_head_attention,backend.optimization_config.enable_gemm_fast_gelu_fusion,backend.optimization_config.use_raw_attention_mask,backend.optimization_config.disable_group_norm_fusion,backend.optimization_config.disable_packed_kv,backend.auto_optimization,backend.auto_optimization_config.for_gpu,backend.quantization,backend.quantization_config.is_static,backend.quantization_config.format,backend.quantization_config.mode,backend.quantization_config.activations_dtype,backend.quantization_config.activations_symmetric,backend.quantization_config.weights_dtype,backend.quantization_config.weights_symmetric,backend.quantization_config.per_channel,backend.quantization_config.reduce_range,backend.quantization_config.operators_to_quantize,backend.auto_quantization,backend.auto_quantization_config.is_static,backend.calibration,backend.calibration_config.dataset_name,backend.calibration_config.num_samples,backend.calibration_config.dataset_config_name,backend.calibration_config.dataset_split,backend.calibration_config.preprocess_batch,backend.calibration_config.preprocess_class,backend.use_ortmodel,benchmark.name,benchmark._target_,benchmark.seed,benchmark.memory,benchmark.warmup_runs,benchmark.benchmark_duration,benchmark.input_shapes.batch_size,benchmark.input_shapes.sequence_length,benchmark.input_shapes.num_choices,benchmark.input_shapes.width,benchmark.input_shapes.height,benchmark.input_shapes.num_channels,benchmark.input_shapes.point_batch_size,benchmark.input_shapes.nb_points_per_image,benchmark.input_shapes.feature_size,benchmark.input_shapes.nb_max_frames,benchmark.input_shapes.audio_sequence_length,benchmark.new_tokens,model,device,task,hub_kwargs.revision,hub_kwargs.cache_dir,hub_kwargs.force_download,hub_kwargs.local_files_only,hub_kwargs.use_auth_token,environment.optimum_version,environment.transformers_version,environment.accelerate_version,environment.diffusers_version,environment.python_version,environment.system,environment.cpu,environment.cpu_count,environment.cpu_ram_mb,Unnamed: 0,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s),backend.load_in_8bit,backend.load_in_4bit,backend.bettertransformer,backend.torch_compile,backend.torch_compile_config.fullgraph,backend.torch_compile_config.dynamic,backend.torch_compile_config.backend,backend.torch_compile_config.mode,backend.torch_compile_config.options,backend.torch_compile_config.disable,backend.amp_autocast,backend.amp_dtype,backend.disable_grad,backend.eval_mode,forward.speedup(%),generate.speedup(%) -whisper_auto_opt(O4),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O4,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,128,16,1,64,64,3,3,2,80,3000,16000,100,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.133,962.0,1.04,12300.0,,,,,,,,,,,,,,,209.32475884244374,59.5330739299611 -whisper_auto_opt(O3),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O3,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,128,16,1,64,64,3,3,2,80,3000,16000,100,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.338,379.0,0.959,13300.0,,,,,,,,,,,,,,,21.86495176848875,72.50324254215306 -whisper_auto_opt(O2),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O2,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,128,16,1,64,64,3,3,2,80,3000,16000,100,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.344,372.0,0.961,13300.0,,,,,,,,,,,,,,,19.614147909967848,72.50324254215306 -whisper_auto_opt(None),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,128,16,1,64,64,3,3,2,80,3000,16000,100,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.655,195.0,1.3,9850.0,,,,,,,,,,,,,,,-37.29903536977493,27.75616083009078 -whisper_auto_opt(O1),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O1,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,128,16,1,64,64,3,3,2,80,3000,16000,100,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.657,195.0,1.29,9920.0,,,,,,,,,,,,,,,-37.29903536977493,28.664072632944237 -whisper_baseline,pytorch,2.0.1+cu117,optimum_benchmark.backends.pytorch.PyTorchBackend,,,False,False,False,,False,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,128,16,1,64,64,3,3,2,80,3000,16000,100,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.411,311.0,1.66,7710.0,False,False,False,False,False,False,inductor,,,False,False,,True,True,0.0,0.0 diff --git a/examples/whisper/reports/cuda_128_100/rich_table.svg b/examples/whisper/reports/cuda_128_100/rich_table.svg deleted file mode 100644 index 1340e809..00000000 --- a/examples/whisper/reports/cuda_128_100/rich_table.svg +++ /dev/null @@ -1,129 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Rich - - - - - - - - - -                                                                     cuda_128_100                                                                     -┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -Experiment Name  backend    backend          forward   forward           forward   generate  generate         generate   -┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -name       auto_optimizationlatency(s)throughput(sample…speedup(%)latency(s)throughput(token…speedup(%) -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO4               1.33e-01  962.00            209.32    1.04      12300.00         59.53      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO3               3.38e-01  379.00            21.86     9.59e-01  13300.00         72.50      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO2               3.44e-01  372.00            19.61     9.61e-01  13300.00         72.50      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntime6.55e-01  195.00            -37.30    1.30      9850.00          27.76      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO1               6.57e-01  195.00            -37.30    1.29      9920.00          28.66      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_baselinepytorch4.11e-01311.000.01.667710.000.0 -└───────────────────┴─────────────┴───────────────────┴────────────┴────────────────────┴────────────┴────────────┴───────────────────┴────────────┘ - - - - diff --git a/examples/whisper/reports/cuda_64_10/forward_throughput.png b/examples/whisper/reports/cuda_64_10/forward_throughput.png deleted file mode 100644 index a39976535b008cd9bd1b27b4aad8f1b8c9b818f5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57810 zcmeFacUYBY+C7Rfi4#q9CQ*q!1sgSrNRVbGv1|}g6p$VZ5Kuq?L8O_<7$>M0Hv%Hf zsE9OC5D*2n(J0alQluyrihzi+>2062wvf^J&3B#aoZs)9zruCR7eC70@B6&ZbKmP; zYu)$$Y2DgYAB>+qUPead1HtNVjAUfS-I0+Q`}8mG;yXWD+s5NR8tyALxo>c>bNAfr zYAa)~*?s2@7xx_wTR!oyb#-%aaaL7US6;06$#?GVJKdJ4s5t%c3(798_9_dl8vOAh z@9kV|<|ZREX*2&f=A3Sb-W09*vWu4Pz8Rx7{yw~S2A__v>q z{QIwCmW}*N?r$f@zWV3#5C4Dt<~8-=8i8_Q>JJqQwJojwHr$;dD%aa*u~sGP#oxQD?4uJe-kR=E zc5c0i)$a1ahvRf_PcgMzUlZfo8oBG%wCI6pQHyQ$oI-ok)a{ESBJi!8Nn>OlTAZ?W zlErGRyZhq@GeVTDu55Iu`R?>#$G8*8KhAKz{7e3DSEWt#K)Xm}?p=A_O59PPl6<=~ zXBXeSp;CmOF8xN5t2=yjmdCwM6OwcG1}RjvAKE^7p1m# z-$WT1pC?_-cjw=~>Zvktd5&-A`V&i?%)57qRpIn>8{GKugVbN&ow4n=fBmhzrL{KG&Ex*=%39Al-{uF|-qY2c zBkHim0-2B0m9BEM;8oLcEr-2(wsdh|=S$}$E)vnxG z;&y+@RlVr8j5?hePM6k4Vc+<)d)MjXade2IC0cp@H!40qqP4iKI!*ud%EKwmXTuwD znGb>DZVjLQxQ8Q zIcQ_H({NK?fz7bc%A;!TEKQF!Tm-ycLt<)i{`LJ65;`pg%_XfilKhF9&wid~aAI4z zv1!qZM>i44p-buvsakzJ3^RnG7 z$D)TjW6N8jH9eizE^9u6TbQSibjhf{t2LBQp#0WAVo&bllHL^S5__}DJ2(3ci=$(G z2f7`qFFEj+1VQtH11HT|wxp}IvZS;5ZbGEy&WG17?WkHD?bDHGRMKPi?)x8eIgJOL zTd|kT(;S2Gtkv@*{+&1RK!m4{=qyu6&T>7W?h^I&B<&@0d|rH&wyQm}DXO%mNlzi^ z7?(^XyVEFG=b)~sOV-Pk{oQTh!6xPl$IWtI*L+VhtUcBLN98L|PYw20ii%HcKOe^R zi;eWiKO+b-n!KWTS!-fqdbOiN?d4q_b-q1?#}{qQY}wls>6wx{+neXo;qF<3wHgh_ zN}Jk>)a%xSFLe^i*++TVHLDKp&OZINjLeIhiOa_!>b|rqIuYFSz&ly5=5g?eNm?(K z_jfi2sa0C4+DGhKsWaUB#B$foDOG!0lf<2O4NWIhci;D{!y{g%(NpWTVe*RfD!Xu_ zdzbR&h06(M`*!|b7Pn#k5$*0x%~{QNW1E6at@GZSAF3u|0Nk-z>h zE_Q2fVCJJAXLKM$goqmyG$e!K4Y6w@BV6+rhN*2g`Ek;ms2?USTVSa5@|$44c9F!7 zH#0mm(1br&i@hU#!GX4n5|L!6J!0qe_nJ#%^qsT2DrS1z+Z?9hX6M&=dX8`N-r;u1 zunHoj|I!D0o*kz!l#i$_)4ld~6ZzWz`7 zgl}b)EssZfKG3KedhV&})BB>7L3mNV%Lj6qF+n-Kbx?>ZsQoe4IxUr}J@i`>B=Yi(o>1zGg zy+S5`R+(U$+e;>{wP5sJ#XYu!qw z>~1MrI8nRvj#15>U%oylcXQ0Rnd_XQK2Wc{aL+|Fq&HC4SL_+!+jBo-wNch!N5P8q zlW#^N>U}17iN6l+FA7tQt=^RF-J@J#=~g7~v_0|S%A;p4BLE~^vf0}lvqJbd#KL)D z8cy=6o45=sxrMX$KK(^y?@xMxy2HJXQ-@_={&HZpyl^u1gc6gNqv%;mL4e#%?2mPh z*qCkj_wA#fhi+<}GyLL!QB9VYI3P;fdn$Ieg1mx4LSa{P_58IFo8%UrN60p+xi!PZ zlCkXOfjNGg9KCxV9l%{p-=4U>trpQE6OnCY+>nZYpFS>=C&p>)yYDX$PX6KD59HI7 z%rBk0hYc6-?g!H+-;Fhtlu>Go>{$87zxPukQlmUi1eP_zjd((1;`&;#$fXbAr zGhA$*VVA|EH~2;mB8#ZBR3urHC0S%VtxC0jc{h6a(&hG?fdDUYqjBriUU3(oNk}y} z(kW!-n^E4+%^Wp5ZhwAo;;dzp+p~MDgT(_q?KYRb`~6hsbi}w1Rzxn_9k6PS?+6$IqY@y}WiaB2@WK5h=F-881|`*E|e20u=mW$rlRi*J_)_vCIlsQaYARa-x$ zS33pCIpjedo?b?+EAJd@tmmZd-L1H{^XdAA!JBo{Y&v_)EPI6MT@@C|dUaTtwLuW{!bp=I3Ozs5Dhk{a8q!#k_U2%pSig<2 z`uCGx{ZMMZ)vRSYj&X2KxKp~>#lFVy&~5}J>o#Ac86UUI{hG)&hkJkomJ!bdD+%Tq zMn%|1`z}2&%VU#yhO!%g}(REyloYUX2y+A5!LthJhzHLj2N(3 z=Kl0&EKF=!1+pi?+0HndaFr)7ZcH-7CejPZryeM5}?SI z39W`kwsT)RHZEmU8;i6W(_%SQGLX}Yw-rC|Znv#*$|#D^aO=odNZ#07lW3MIUy|f< z=j2!EwXOy7m6x*GgXRGEPt)|+K}gaR<*j_Mk)8-Bb3hMFfnzEs(|OM>;eT78lKck(r5{UOBpJODql z7$=kx1|4-xpH^N9wcfjs=T{6oa5g0+CI6zKsB8Zq%Zmrc6&R{ssIQ#IB8ew&#*PbL#kB)PZfG0aU1_E6)>xPaq)RMS7@?tR zYPH+={FRpYxy9;E)tBV*T9ZXlp*DV*o$t4lwDh2$h*=xC%xrs7T1ek?-2z!F!-4KL zQCT0#U^6`Ugsf1WgNQwkf64?vaqDl6wZ}Q>Lj*Vxw02osbzNI33mFCN6OY9#q@`s9 zFH8Nt`-Zfpd;TW(kuCHOOq`{+TZFJJFzR1)BUTO1I^QpQZx?GM-_5=Uefz~%dIv7x zqV;#!3~CIg>{fFuJ6AoBV4Ps0@AH6Rr|lDh+};m|J$Jv@!qQF%3^zLuA+jT~-Df^Z z!6Izi@B$T)77w^;iB^zXu|}-B^zoxmT7*0IQ_ksq?2fei!Sp4!S;$$s=FK%jw3jWu zda69h!t2+pzD=Fn<0UCp=&AR`rbGzp3oY!{?Z^#R%C;)z{K)N&thZ@P^v#k` z>r3CQCn~k{d3jU+Sx@s9yB*5vy?2Hoje2INxDkZ2o(5#lyOR2R*JGdGniC zaiRhgXSe$%yB8DcXSy{>V^&J3s~|&mIIG_VS3h~h`ni)WOS{U_30+HcB|cM?%s0;c z#==LxJc~TN;2~YY#;yjlfL$?*z&T0Z7;26A@f+q_`V$fP|^i(6N5@G zCOC)1At9S}_FmuTGqI~pkm56+U49Z6J+SONm>4|l^;q%4@(IPE?uV1i)8e~5^>qf6 z|Dx}v9o=-dNp{6i#L&8!IqxZYJpJW>UC({blHQk3+z=n_fF=vTzVz;Y;y;ay$3)%1 z7dtNe`finDLegT?2G0P!S$>CX>Oq~nt~y%V+k^Qr$9G_!z#+{a=uh;b{qkq!-&ol+ zs=kkmI^^{(=&=_`<-dG=z`eCXsnW{RDtLL@O<9{ltWl5Ax=TyavFixNiS-vQZwIfk_xTPJv1!{ zJ8yGlEBB7RL^^VP`r&KQwoLdUX1ko|{05AzwZ(HGj~PhpPF~ zZ&TV1M(-bLt~qmLmS%dsQYQ$YV+?;)4zZC&8P4~C*wj&3c{+=Ry0dy8A1uEWspGr1 zxeoQ6%>%Ed$ie5HqJ)a}Cr5OPfftU+p&&IB)|2%rZ7S7QN%DAr0~w5B**LRks6H>q z4N-Dq>(wQd;uYr+c8EG+hFEF^%{Npo&)O8N;|uVSQ2&n!%C`Xp9Jh7#mEasRx~c8k6n*S7Jff=Tw{i4$O+r>f>u}AhGsA=fcNQI-~=)|OZ8Jr^%UmzwTlm7jV|NiyzI{c3xTcRI4}Ouybo(jvMc*p!DbE^@l=~?{fz#Qus$;uf{$AAm$Up)Mkw9&#r zU2&nRzHhOvWKgT~&ey~rq(6C7!%%`m`Smx0Asowp3+sRWdSaeZMy+sbpvJsO+MVlo zgNl}yzevvN&~paj7^VKM>~0MKrujtFSLCvIdqv_7I=<`D!Ge4XEmyUkfsVeV2S#S`PHp#{VL;TQvlP>_BvhZfvQLv zh;d2s!|{!glCu6UewcFx6`I$JU&o2V>jvi$s(9vzBjxgdFv3YGe*NS7d1T`v)R9*i z?5a>dcC;iUH}Wu()*)Au=N~;+XUlK@Vy%wKGKXXe7}up3G=HsPm&Mk@Y5}0!Y=08x0Kl!l!Ucd& zo1BoCqWALjoS}0pfAMk%I-Qx7L!$pEbeW2SyMOvhY%2yuY-3GEj&OH>m2NTX;iPsYW0Zoy$-1*f-a5YoGMk#_b zH731#Gt559tPs$-jB#4|-AS`86SF$2^#w^M3fPPjn0*EX)hy~Pk64NToZ*D#?%R~!OMhuBv{f_3IR(oG--B$0X zsX0xdvFGPz%MQOh;y)Ro$cwvWZceNWSf9XvQ=0;#FGA_MX1WH6trSIft-oZ*roWba zJIfl^q;Dll6A)lYmMSP5*?v7BCyG@ea&Q6j?bj1Ew(T$MbAS+#j|dk9aU?X#Z4!Wj z)8lCr)_jzTqIGpAOjSHr+KnQ#3D3KL>Wt=!UtN3exBG?Sk=ugYi*JMUD3g<>H5Rhf zzT_B?pYs8_^M4&TCjtq+k(4g@ox1=iGu;XorCH5ZOhb^@lz^&s1v}dC&yUuw#^VDG z!+TSb%62yTKu!6L;Vx0YjmvO24DlPOVD;(;#U&-Efh}tqel?nbCwq2n_?L{#t2^Ppe*T7XlJrX;)eT>y5e1UQm8xzsKA*pL2RL@05c$#;x=b@b z5aSarUxQmo3_8rWQDY>3mZCIP(XK;&WXK^+>VSnsVqJ`iqU4zbpuryO`r1^xqEbX< zM#C^2-~P}j>TMxsUg>T!KD%e``t5JhnuiG$3vrvk0}y;3Zr5Uq1fZx1N8bUm*OfJ5 zWIk`!1%%P$RlE>LbW_{?AxB7c9&qEXDBkS4pH)d|M8t$`*vJldPX4;MoVR}P)h)_N zb5ZO0tIGV-RPKs$4U|(J1bxpg+?be&3R%<{F>9IoZNMBpIqp%SPMuV$65*hu7Z_@H zZRC~6<$(YUladeE)$dN6*&EY@8fu@UfjXMZx|Qu&;`?dsYQY2~G0V0cAdpn~HgHMib6MC!f@aqTXi zz!H}ST8Q<1wHD|7LqKDHY2ln{OV*Q!AK>}-rf$T{O1HeZq4kdss;tf4ThKjY2vO)P zkg9%p54Lj&m=W{wB|&GvGn;wd-|beQl%aTc5EuSJB;6)cO9j#d4te9h9?PTu;nwJVDeMzrPxV$8B)?VMuRkvNk@u z{5pc7>-WoA-zEVC=;ayLlmHeT^fqg{dQ>4^)GB$niqt9x3wQSa_K{&BB{4h(zqsud zQ@Nfv@FBp4rkd#uAr_5@iE%HdfVeCTqPAiKOOZ-3gzy0ZfZCJe$Xgfil ze*L3jc?6XwP(_r>$`VXfNHB5Ffx?gRmkc}LfqDQ(8%ZPsB1r)cjjefejv}j*dQpat zKWfjGv>c{`{Y?{Ol?x%bM025_#}q&S$u4s%j?&f?bl@R1kpU(dG;`0x?*XCQ zO{~2dxT|7v$gm0KM;;>>1F!L`QOQLu>Q-`QTb4fnCXn~V<$P-I-s2roccNLv8P1 z_wnedfakF9Hp|-Ug9?y3N}TV@0P#&CgjW<0c1~&;!d|BjV5M>Y18n&!WIR`F?2Qi5 zP|r+255$}o;X)nia(r2XIp#iq7(v#9RrUkaC53pT*l8uio?UGqYNf;_epO$)r*sqe zF)VdB<5PLy;Th`8ZwjqGxLP00S8I0cD_{)Dob}JIwp(wxZ83QTi50m#D6dA25|lZv zV9bo0YoHvJCEMtlTJBl_(@7fDjq+O{`JIDx1g@<~8On?PBKeFO9~SBu{tJNxfUSWW zEeEm^7vBNZ_j5pF#~L13D83DWn$K?|5Ol{hSx+Zi8%EZ%LsW}N$5vO7F_WjtqU?A5-X=xB!Qe<1=RJ{b)(gO=06g^l! zpS6x3D1}(y7_XMN*u}dZ|K|R@Rzn~a{*j5SsTea)Vvpr%_tXXfVL=G-$is#g$`S2j z=hBB_n~;>APwfuTU=b+uHMp-1+&YMNd>JhVkJa6&FmUnTn(g<5LdK%zfW$`|#QCx`St^T2)eru^H*R6_2N zU;E?)*{_UF=gfe8O2Ccjiua%}CzePl*5J}+MS4S|C#{iEJOO`Ga6_q<;dI7xanzzH zIaGG{L;XzpWtn1K@znjCqYZWIX5u#;!)Y**$$2!}fAFObKzOuQ78p<}0|m>PKy^}; zE@?v{S#t(y*)~#&Sq@152!AIdEQ@Qyc9niTOgITF^I~@zA`bOwGl!U9CiI$ykzep> zowDNSB;CO;==g|SR&XIIt2ba8q~?pjA5(!aAkBH!?phW61iMN8$M>fhu@d+W#Q#Oe z{ZlC(2kTIn--S#tl`#RV{=vrxdJA+==F0n$dfS4cVj(3ZsV<0e4(dq+3W1})*`-$P zSQ=jRzy8POq7zPG!7)?+29uNipz5pez}qVZ9%Vufbwg-~LExUYc-v|S zM3MeOtu|C8g!LzXn3zuv2-KlHDgkND4Vi-bMj%AOpq9~Go?yzY21@JGRWo;eW1hYv z3@nMVaU^d1IpOAyS`Wix|&Z5EImy|Jt^$1Z0h^_Gd49>?hDE!yoKJQ@4 z2RIO5M+xO(qxL#dpGOs?pl@tpYimpVfgB}GfV{Arw?fo&t$qy8sfe(M$zrQ?cOmjv;E)hX6DCB?GSRSx-UC|Tie5DRX*0IIyol3m9Pa(RHj^;IL6nl> zs*y~~Bfj<}3zN&ocUHS2lf>ZaVv=wfX;vS;`l*aQ6aS`OJU>k(;CFc^sKVyM6GpcDLxm^Xr=0X`E}hHpf*7Fh1mA_% zF%TF0DhC;$o z1Y_f+BUhQDk>fh1pUm5{)RIPc`wWC@BEDP%Ic*2iA$?{?W7p%dfr#+=HCe%6~hMx!;Cgw-`xGGtx zja)}s5;tJ_!Co62-<^o+2OQf-pdn$vQ3S2w1WI*zXjz5yMAQv7suMr!``Q;CKl;(y zFFz%=Z7GWj0+OD7MDkjo=$)L!}Iw^Ed5`~J>C?*sj#N0I1&Wl zdRt9+f%J1>%6aq*DYzN#2vk`ou+abY$D)oOt``x|Q&P_yQGlh2@S*UQ7+FKkP}HNHT1ED_qOVmB$F#m5i)`kO`Fx zhSa|ThE)TUq#GW06OhhYA&Ve7Z2~nKj~|+)xfI5RZ4~KBXY$DNyW}zo`dVH+@N1?P zOgcFRtO5~){eH@&(ap3&Odo3Q>` z3;F-zGMaD<^byI)_D1ZkFsp*?BNeEpf$$x^(P(;TNpE9!MItto=&>lmSI+P9K%ne!2s-$VL$MlU05EfF)O)MIC!603%W6pwH(^0S8`E|f4S}0wC-|dkS z!(byuwlQ@|q>gs=S&Dv?S7R8-hk_$;!a)(9PjYfLYOWnAdGqv*KYDc&iK}sr&A*>m z@CZ(gJd_|7)mJ>*qj)(Et@kcI`LLu7iG+#~sNht+0jMQX4FVFhSqqZK`JDb1NLNXE zd!hz=Ja8I@+9{%E!8<}?`7W58#7m`msJe`I~{K{w0=wtisW9cLr*GqG?=lgndq{0yorl1snKcRUtkwTJq<3dYUx z`B{z$pjm|C)w1QvtE+g}%CK||k3fvLz6~Ci+q8Tl%Qv10S}Uc|kn-mCNiv+ICz0_~ znl6`ocjx5f(mJ4BjkZg7ad?t9PLts26~mau2WA>R!1@ArReE~W5td+?5WU18LZq(W z3QyQqHE^3^#N+t_no_})H-a@UTC3T1jOB@}sCR{#&5T#q*&Dxeg6F0&fLX_RE~-$b zr+}SpjBqQ?V>wOYwyX(vUI3elD>E>viWny>rhFGsinVW7x#JTXw<0wA|DXFN(fU4@*~nY|CkM%I|Bsn!G=QbAk=UF_@oLnYUsWKQ1$gN0t6FdV=bawhBS0M=-OJ=40bh3r+Bretiwo zG+ei_j3nJfY=sKNsKxEhSK}I3x3G*7LHK3M2d$pfm!Tj@zjEGcDfbv-Q=yAoLUd1) zxloQ`{v1hf?jZozJb%+@GJJjD@4~=@YYSWnB_V-R{J0CToo?t%z*SZBxwyAKz%S5E z0dj)c3XAaS%r8gY&atS{`JH~`|KJf)PhY5oZU--v?4@wF%H{p`(?_as%yzwu-N0-J-TWqN6i<|*$A?YPD%X;@V}Yh{u0Rs^6N#>%QTDc{Xp%($hVj-GkB20 z_fL?O=3hvR$K(jkgcR|Oi&3dqHQ$33rxZoBd~R!bLMT|=(0-q46o#R|M5(o*BjwXI z)8BaUbSIGUqoa=A1xvtu-YB`z@~2s>GQhMOSA+Z9cv?JiQ5UxOwwaui)X&$=BRvMG zf4+cLGm5AhkSP{_du<;o2_3;=xDlX}{o?)rvhZDSs&WE209D}?lz*ZO3S8B$a6zQR zf$9*tg0?>#?fC1J@2yT+9*yoWRFdW%;C4xE%Srbm$jn2twXe8mVLjNtvA6qKm|QbZ zE{&l8-~c0#APd13akZ!Hq?3SOKIJ~RX!hqx`={VkvxkEP@}X7`B_#4R!ZVPfC^i(s z|H$4Ic71@G64I~)B5$?Oy{%eJvpvg=mXAX-H5QATN=neT@-y8I;6l^(?0)rTGG!sj zR{Q+0bGCuB)BumO7&pyY9YUWwWh0|#B1Sq(xrJ$_ly;N><$(l0SJs4Lw2-e%b0IyR z;Bz8zUb2=dzxV1b_9gFQk7Tv-`6`%EHQI*A9QrH;uEhT*}mxjER3~CL7#%PT8o$O=xl#S1ri|>EQ;~v_0~v7p(HBXyZK#yJHwyqvJ|giPK#} zp^&Q!7FL~_TXQd)>nOy~O`cxoH$X3E%vLtIl6N4doOpEmk2lFSg*5yI##RGR!YV$8 zsg3QFz<%4!&i3oYCnm0;@&|h&MBuFSf64V<|S|pho-B^4-NF#Joe&}Pqo-_6wR-T^Ze(Gy^kjgseb_pO001(_Cq~n z#eB9Q$U#XoErkwB;6#_&-3*sHS`Kq(x~;JWSR_cJW4f^cL&j8Q@86#T%NJE<0WPCm{0J}~?1-nAe3F&k7k=feBWpkrcaepN8jf-(z zYxj1w7%yuA(8)(XQ$nX1xW`nWhtPhh7YFo8>bJlCL+Ym87GxkC^jt9rtx`qHe*dOM znIZrCU_S{6^#-JS4j72e1Ug*v7rpc9Qr6v|>>=j>@D|(+-99+bx0;+OP@dm&jS<0y z#7W6BHv&QU&)HKUZPq~RhqUX0t{yry7v;{BHsyjHvmtN`EZFau|DhDC2_W$Zr4|%Y zD?CVWJr;ucvkYAL>O%G{zKT8!I&4apLg6$|pqP@0ebxbX(iY9bRp0*bfs_=bPJ3AW z!EtJ$U^)#Tl`KZ{j(I|mOZwi1kGH{1&)*fwWz_k3v&lUIavgUqAD-))5u|a<3>HhG zw9;XChJ)m=*amFoXyZSiaE5$&OFkh{h;wd-V`y9rlzgJQt!BOeoq5p|vJpFsO@5KM zL>pd3n?1`8De4E$B<*hoFIt6zz{V1lf`;p|@&)5NAr@}~9V`%ny8T46?a~I}8PI?+ z!OgFh05StvI1p%8FFC=}OMk-3BeShqXqo^t{0t?H<>Qap;z7c+Ap$F8+xhhO7`LV9 zo`a7-q!&%O3m)0-4|C}=ht3m1KfE9X2F%iyUXa4}baME=yzsWI+4nh+Tk8>cib()N z#c#mNt`%nCcIbn8ZI8fY<_fi`5&R_Ex`Pn$h%G7YfX+T*BqCB|mnggAu`b8ZekpDCOif*0RC*B*1k=P@{n^M!kbR1JI+HGdH|iR2hY9WYB#;r%!`J zlCc|Z(0K^2E_6h;h!GQ|-Dho3e1~C}+smlECT#IsApZhD;&7gdewuA~AZfoSjMP+T zWyo%2M{ymVvb4VzZrfkVJfM1!gI|LKodTbf9U`WhdzViQ9JT8eTl1(}pbWT*zJQDf zHD9HTe<)BU@Z4M1H25ev4F|PuzxsAgOw^SZcdC^~^T~xQjpwe~V~5~u-HhNIgoeE@ z*my}vx?$R=(a^ET^_{2mf3fwKK4)1g_x083PT@4mWj+p2=ss-&2UF3dogI>W#=i~4s1n>fz5+ex0oHKr%guxa1jPxNRkku8z8eK9nzKW&k#yJuXjgcA{e}&%ZgKhHFPk3vI`Yo_ULL4% zy!uaFA| z%iPfZY6r@k;_-Ccx*KLUk}TD+pd9Scy^ z(0t|!o?~fHQ!5CN-$5mBdi@1$-|2^!EtpRRjK$gqxB$u%f(}%A+uG79m2)SLUg>G> zUhFA0EYRBngIp{`Jb@4bT@#ddg*Sf{Ze?BeZl`s#Ic?^RQ62KILV@a?pn5d#NV~-O zIB-7ngtXzaW6kLNNO+YwdF$BR<#S-(UyF>+tD=eulShyo3^#?KHq2kYlex=9+$-$E z5cS?vq2E;7G zOi0Bcul>9$f6;A53&I)l@NoJ84s{@XFQS2n8q6CHVod)e|K1xUf7oP0J%$;LaR^50 z6to(M{y6u`56kQCi<0BCiiyjfo@6sU#PYRpOBJXs8F>t4ms7OH4*9D&l%BbDLrfPj z1e2Y#?eQ(>O^m`cjnbE=7Pumg$3k477>lGFTxsKt)@7U_C*)N_AR+M#aC0SKf~svJj-+r!qaVPiYtMLd1T zRxz7I7tTaJQj3LSD~N?=v~AZ@c7c;jnzsZ(@H;w~(o%rn8OCW8P)UpV#9)Vt#iJJp znMl}ewL;eF7&hAZdk05!=93jlZu@iqbX_YhmT@i(uNKA}7lJ{Wr6|6VwfLV&DL}Il zJVa(uqdie_VD|KIy&V*%O0;_{A61Ur*77#*qsBm1!4a75*Dn6|Wte4M;;HbzDho5du3zAf+}_6O#~e%4n#w#V(gX zhpyi@cH(X9)Tj_8i+KWOlBD!%c_f!;c6$bIh4YHc4^jgWhhTU>u^aB&MW3$lmwSa!>ZI(1LA4$#Q|N#+{trkjK+kV zbumdG+6d-@#wux~<1tsy!p&q{_#5Juml$lLP-{YktO?dnD0>dF5m71xJYT!u?yA#OOiA_u?()YQe98;(V|mkQW% zDD{&A!caw&n)QJL^n|Z_P@rYcx4Png`Q^_6Q|F}qzH)|i zR1m`B3CwH(HAZJV;R-BOcI<_PXNLLZEWJ=VH0oE__z$sDsTrmp>D-yWpua)-<{5^Z zY0xpT6iph8Mrd~{KY3KqCr=kq19bzGgq{{d|~F@kylxMc%lA2KYG__ zJE2`0FbP5K)XW*+m|1jUPw;VxRf? zkSR}DLUDu&j}C=>cIq_j0a(vp^ufrRmmTNsh5eI<5l1by`r?1Nz5=D%ad=W}Oh$8L zF^*>Zj_VjIC;iHqhy)D4H=u!L8WxDI?~j}f83jwTw>r@qufX=3H!jZpWHRheQso<{ z*A{J5Yf;xF%$%dOXQ#&5k-}o%blmq4dem6PF9yIlgyx)lT;TXoJCTfy*Q>m_>Gg-w zAnkzR#l6^Viggfl&_0K<-c;&H#u3WD$gw$=|9%7u^!Mpt)&>hw7((ojwu0#y_d+i$ zrXJBo`bMtK+&=(YaL9(W8guF3q8*0bAJk9zv?;){knv{r97an?)19v;83<{;pW{Ec ziz9&GplCpQ>4G;A3Xxc6tr&LudK701W}%4e`J;l6DLcnR3pn9SCo!39W)jwd=v=2kQ-;t7DmAF0YS%uQ+UXY(^OC}TEgjEr0!H4GTDDcKPG zu(S=Sb-ncuAx#FNi|(1vD|!jr1Q&!;DT^Cl!aQiFpZ>#R<7gl;V0B9)Cpn z6xPGY6(ua`Y2|S&k1x{!=f4Dq=gnswC5K`x=|fNz0Kf%me=N;8+noKJY=*{cakS07 zcGMMx)Y0JfsVrDFDsYrtCeA_EiX9$1)h1eLVgMTm&A8D}_k6ABZiUv+e78}{Wip4i?9JutaK2j)w*-~7ijVjTq6O=5)(lr>HOdHv_v4H|uL_kfgw zR;Z5^(;HxW3Cx}31{=ag4}!_R8phx0!Kz7D0D*B`!zKhB{?cHJsmMHP6ySfmf75vx zUpSQoLYC+7ig6A?zR_sOk@y`-J(Z;}>ASu^Qa;F}omCvu4->vlPcKaD)`1QqYauNb zL@-0gKPy2)9=Mt{fMlG+_lH5Z7tY;%!zgSvZf7qMS}a0P?xnuAsp2mALE!g7i zXtLfgT9GfWD%%8Pj_{Zdy2sIVw47LQFs1hT5PRQKY$mT@8wg73PvNv*_-3eK-DKR| z3ZEyrTOd0|)Ha)@{!pe!mmtn=RbW4YQ|?+@&Int4Qluw#C0ztJorbqJf|TG_MBRkl?xvl4BN{W*>1B(_wUhJs0Hukwv#6!PLQH z)Q4G3$Iy^+ta@qHwjj>vz*M4rqh$8JzEcng>1{=)j|p4$ z9mC;4E(Wv8(Qt44p7SFUK4@iw5D zyO?MV$~}PGv8|g%-hR_&tdcG`Bw8dX(!P;~ogX~_umy1v_{-?VGy!-K!FEbAvJoa? z#oV1EKe}*TNvu?hI^DRztcjc~J8lr?^M@i4C0A`a6=(om-9Yau?6vbvkEVEc362sJ5l@>#y?gr0-SR~E3a<19Wp;sipt zBNj_13ZQ{bE{_gtV5-nJU!O)qu9p5VR6gZ14bZsq6CFah;;p63Gn4J@QEC`K#Uy=PbiPl>j@lB7@Wx*LK+^2AI428 z#y|=VH)Neciv5i%MF^(7k|ac_V195lD`)~S#vcx#ePjPC+?l`T_|I&DH}?j#VGGzB z%Q|K+?q)GCs$(0+cA{y0<-zDh7IKL1bg~CzLy)JG=*m^;Rr~*V1#s)*5mS@a~)cx5|Wchmg&`=RFc(>Ofjo zd^0VeCG6bH8zb+zJZT{~ypdq>u=PC1#vsOUHjsiAGI3hb&*^8b27iPdhGn$Wg&}E3 zAr_qGAn5a7$Gq>|S%n@T6r~Lq1G4b77#WQFU^6PhmGSGhH+Q+@(Io%lXeN%E`Sr-f zEKl0hXmOeIWO1qhl{j&0)Micdac~Z5w3jKSe;K(Gg=+|*C0gP&!9!?im@BZx{3Q7{ z9Lj=aqo+IZ3j|(Wn86zoM{mcm9ISRW{($9ZrsK$a?=w%e9J6=*v!1*nvt{J&_L-*vH)5@+QMs1C@t0Eat9<0rWzrloO-9>8 zuBq=Ijmt9s)%+_gv#9t;9dR0YQPH?AHrj8mS?uT{OxZF*;&rw|h?V1rmK4lzK7*lp z0wDo8d(>TPcjYR19v!*Nb$5}q<{$|**KzhKj~2cE<^5=xbf`)gS!|K zst!zh%Hh0-Pt-nL%SQJ{+Lp|*opPK>4OaHJ>-*etk|Wszfhj4=>fZG!`C`e@Qwvty z49LFad|~d?#7!5*{9}sZf~EiZ?pe0y#(*t1&MjEFapR#`&!($Qw>b2{n7@r_zESAo z-@EJjH$lg_^x@#tm3MnS zy|dY`b~IM|sfls-V26X`(EnQ1(;a;582nya{^K{msYT2Q9ew!IRc$K%@Q_zK=Nv*G zD7p zfWBP^B`v7nR|^Ev#UQovRPj8d2fLtPn|B*Cp)_OP~?5V z7GUlhG)R!u^z(%-Cz{Hrync(PSe{>w`vOW47`!LyYIN=f- zVDYu@9gC{L%eP66knlDG#Ut&S!HN{1ZI(Uac1dX!C+1|FYh>Ry$y)d*`n(5-_@Gos zXDy{?U@$mJ6{VS~E_-x5CAGe~974)Pd(^XV!`VS$=&QGrx%QSFHkvMslD8!fqaDw0Z)t-6?{jPv|Kc)e&UJ-XyOA6Rhgp4)S58R?S{Qjs0VpHcc?fV=K z)fuh@35N!ilp&X~P zptE^n+ZB9E;n~2a98J$TNY~pTz{$!X|wrJ03Y-+2G%cBhfJ^6jtcHY-p1 zgY)Ra$W%9AQ2V1Rred;#rI4~qfnw?!4!OkXi{?nV66|}aos_e76E?lyJ?auO!J*H8v<=(_x^ zd)V)Z@RG(JvtOzgLD(4V3Cx6)JAOcdv01jf1b!X>M~Sd;EKTeQLJ0kC+M;K#Xx@_* z3UTrA@h75XBOuC0VT=Y8KYw!mM#jd$n1NDQVxjm~x>vC0>^kc6&bmp$mOgdUH&Fln zv#yq#_Z_dicVXQX@MCwe>k<+7*lDXQ8tix`k1mrYB_RWP0Ls z3=*xt9{ar+QP|B*@YFXH!mV@RuU~pfvT=!%<8qP1eqJbEV=RRTAhtP9fu)$C} zm<@dl<Nan6vags*CWT7WwW{>sLxAImmXad{V`b8QBY1$`a~6?{ z9rrxr_CL)HSuK=i1yGi5n1W`H7zE#&ODSwaa(N7@Hp!WobybhqL!5DQ6j@H{Z1VsB z1A3br$QE_B&2ut{2DDvvq&2{OvSG7?XOfbV8BAkhc*LcXZt4FpF zozdqvq>!z7?RpDFNEqT6F>5>4P-!PWgoneK&J)4AygF^Im{ugU4#DqZhvUz2HX(es z^1y`)7xq0UfjwvaJqhfGO|%Cx#j{;7?8{wSHrJtZje2wKH9%r+!!PKJ!k)KBR9!l(-KUN6`>g5+3ecF<*rjLNYqDQF=l z6)6z%L12@*DQ9<>}P=eCG0}IcV zbkTzu;}qTLUs<`@@~laY*+z|_m}aawE&;ln`>{a{Hey;##A@$jf{lq|whK3p_5 zZNK~?$M5H03tcbaq?dc1oT&v_V=HGGad-+{44dnyA3`6aVl-?!vc_syV>!MQgDBq= z1FpWKCW%eXhZteQ1^z2_UnH~jz3ko%l4Fp8g@FdlMz}s!V>%o@pybEvUfkr^xMBq6 zdYt)Ul9I_e{qj)lA0Nr-2~aqHd~V35&)9qJuWbjxFAopCBN^70aXrzHr8$n1Qyl71 z;p*iE&e{M5gL7ZY|KWwPS|9*!v!R#i9;I4&cQzgoM?q2xHCS3D$9kA<4IG78P_PUA zv+YJ5Pz;pVdeL|i^P>^Qds<+0DWVE`ox{!J?_Y&ZK(3nhTMqPiRod_)6DS22i_{r@ zzWr1c3&x37j1Aag5xACZ!Nw+e93?<6AScYd+LZL$Vv#23x>YWg7=pmtzy6`v5g>~@ zzZSt5ZeFt!cG#>V$D5G@Z8WD_P=K}RyZ_eIcR7g{ zLKVrXIkmcQ#I|eywgGMoT(TAb>`-)N4rs0k`rA0!f@3B)oc;PRG(iPQr5rCK?R1&# zwT;{H`Y_3K9L-`nE0Kd5*xw9`)QJ^S&EzOA)6NN$)`)D^_f9)jSLWLFxM!i(C$qA& zI0*(2{U{c)_{h>RGHFb@95!i;9JH`>`(ObR&#f2Ol!W`~fe@L~M%`_?3$1BvcuEKth zWP4167EhxV=SAN0xHZk5-OA7t3{sGh$OnjSS-f4gg-g%*j=e(|il2`;f?;f$pF4N1 zc*u{X5OrQavBja5t^)e-v@nojze6>c&a&$lMFaQS7emHn(%t`sKSwE`I!00dq|p9_~b6M+=p)iksaBd%wu{oT|VxF=Xp`I1M?7`bqboqOO7lpA5Ska z5P>R|g6iYh@C^!1cbY;@IKQ=`rTpO4a`%s9_LCYi>T#OazwLJ>WiJf zXHER_;XcmIWy+ok7K^=eQp}CsIid`BqlPZJfJ0cme%Zf)nun)>`Y$H^0BS0;zmJmR z$O%SY%bmZzBYr}TV!|PyDpL#fEbR8W8EN;?Fh;SPCNv?t3<{`3gSE2z;e(|>8$iMa zFndrKLMQ^--DqrVi_|XZI!;`GTevsr$0pV&%su>whdj@({82~>ClAxpa!rKl+ZP2n zLkIy9Z)v~Uil!_0ERM!pCz+EK{hf(v+X*vm4ZRc2hRaNdq*)CH=JIsRg;!@$CH2{w zA%vuIFCiE+Z*H)*jyfH~6z9Pm4=HU^t*synwk4L`gr1Y3&DoHlZO7%`f5RN8rktWSwf zIGMU0c}hUJi`HCV6v}?W_J8y<_TjMUBMOr_A%eVr)l>3d2y|v4M{SY}7+=E|=ZG`J zUE3@0HNplre8q)S;BqChH7YABmP;BEzh7%`@!e%=zy8JDG2m}f&Qf$p?M~c6{LpLa zztrwDbs@&i7lfGEh4uMDb3GaIo#*bW(v#44Iy$*|9$ru;ZBz5$<<;07ti;%iijv8a z5uYD@#*Bn6kEUED{wls6nyR4`gtX^h%yNJjx~8WkWP9fV-&fNl!+B3*`8C@bW0B zI~d=nzb}Wm3;I#tgKiEdff3>P8^2L7`8l%~hc&W!k(!i{G9$qeiEE_h(WYebzc0_bi=#cC&a ze@9^n~((L~v_jlA-{>0Jl^%>(ZLzoC==E64ojH?fxc5fK_M8V}P98t!C9^Io-|^Ly zF2#HJSVHNiZqjzbDU)Zxhe#`TpgnLf;4axv!4@lh#f#tYgE!c1g3xcueun2FYi}Ui z#HljD!N!s+Es`tG$qf;cAsf;C%o*KuXCRUA11(fprvSu?B?e=rnmf*xnVey+{?-sg z{t|vZ1MnUVLsB{n<&v?c9U8-kVfYamV4#+803r}={46mbw6qW9vxAC46qsi)#|$~5 z*rm^iPrz|MojIJ~?E3o(*#;6doQlT&QTo(os-iWY9~j3$)TwCBV8tRpA4~=QQQz z{%nwg*^-Qiw5|gFBe286Rha39vc7?n4FIwE5u2E|w}_uzF=0SKUS9gCkKoE^eWXE? z<0$FKa(q0O*;nxbfQlF{QdIaR9Kk?3mXo?NVHb>_)dS8&2mFKmJg{4+{74{h2rAnn zBwj=G4eBeUZ8U*59j1_&>dAS!93^$tS?X*$zUXt(CMez`$!-7-a=;`T@CtK1$8Ne8 z=Owc|X~G?Twh6m(XcFazKJXHyTh;KP!T**f#EG*fe7yMlCLBYj1Y35=av=;Y?8B4Nl80T!Lo4#Mguh0V%pK!&$r5^ z2W^|hxbtv&@zFEl48V0B)ucMt+7*_RCTd8>ekikwn9#&el4ilnnWXp_khx&Gd{}vMUWcU1 zTIx%~xuL6`(o--;TU`p$WpPOC&a$(&D zF^1W1Oc>kAwn!X9Wp_(IgE#ap0ca$@t^w8_gCYFT02ZD#>F8LPveLG=i^v(wioOt^ z&!a6s?d+R!14_@Cl$4HDb>7&e($AFxK@q+CK7tU{Up`R$fS-nnGIXvWzn}m^D_@?j zd28}GwMjC)hG`z-dsq^Z9c9~uZHU>~3)u)dv}SWiOIt6BJ!ur$imU0#9&+m$`WYV$ zNQtZ5kB21SD|}p#D@akhhu6OE13JcnRsLV?y?I=Yd)qZ$i4Zb|jk38kP$Vi1DjBa5 zsZbdz8b~sfY*S?un{B+JK^jmhQ|%HWO13CX8e~p}%2>)!MDJQ>wV(UB-{+s-@AJOz zU-$jbZBy5Ep5NgZ*0GLtyrgBp$g{AQS6($R{o=jg=wVuV!dkRP9EIKg90h`H2laY@ z8K_1mxi(=U_+ixBT`7;!m>tEzR=qenBCX6SH+oT zlk+7b^xr!0vi?gX;Y^Ld)?RiKKkm+3sLR^uWoVNIM(pI zXV|vj1<(|)x>8$}uO2imHr>eb?Y;bmmPe|JfX^0pAM`WktxAqboaLQ_+UAMd?o|x! z{elsoO$8Sph zuRqNE_T}w)yUnlPowtuWQ{r2a_48xJo}3PaPmlWV+n}1_R+!g2-@!e+``6pqE1Hk3 z&U?MBN50C6=COu9u6avb>L&NEzERi``Fb42jqk8bd0xE2CB*l;dyd;#|KfE)E_vH$ zYu9XTSi7Wrlud@$Hj&tMuIrAUss4*Me0_c1&hz!)OC??G*Zvjjmw)lK-ulwmEBAc* zs99Hk$e1sb1*;!@IQLqm)I4MSt+VT-zwyu!;N@(`c^G4#^#ys~$;y@%xuRav2Oq?Z zoxrX-FlU5$*qv<2e73F}OQy zZxl;VJ~e8?H(2lhQ=l4LUd_Q&-|wbuY{cq3i?@`N;LKB@c>BhcHxBZ=O5$&tuV~Xk#cvJ$|Dti*`c41QMibWIz<)#+xeosk z%Ht~h=P+;;{&N_FRrr5-7$OmXx}x~XA}gyjtni7XH_q%^yXO!`9}(K1GvQD4}$rz&1j+pIu2iIr*91yz{L>@=U0a z6li*m($vgHtu-7nh2>6n+CDPAi>uC)fFqpx0Omvt;_xzoqXwl!lWXgnQU7AoS5_@T zrdbk*al9&DSz~r)$-O-cIdUr&cw}81H;U{Tb)Ef3wB56h^6$-`oH#i%{KSWkk8B#^ z)yKJ60!g=uDV^w%n^#}o`UBrLR9#yIkl*BO^YQLQ{-+uWO-xYLs``2XqjgSpa_G?9 z+cMEO9d(krNBSAdEiXzv(<}QlWu1cBFv~Kd=u6EBj{Lie>k1Zq+m!J)+77}`nw%(H zh?nkdp>dF0DhLFWUDyx&0Hu|0Pe1-w(2jC385hI_Boc|^)vH?N>@-Bg!Gs9#w%Afn z?%Djf57IZM_y7LaC{|-upjy7 z=|k3)e%*(Z$&Y=rD;EGD@Y@!grLdwnW|zX?==k_ereo(c1S~m$Z<+R_!|q*2rdu}Z z4==19`A}-2m~6Vit23nqrEvC`fHrWOeG`zsEc0m(6B_sSTNl7)zbXt$%LQN&jOHIV z4n#ucO0A$HUhPcmjK7ZefIbDPJm0RXKXRTl7p2alkb10GU@hh39Q(h&YU*4uwr8%T zUOT_q!(q9lUfZdWQk4A%!~NqQTJ`^?4Hw|Z&#=^!ex`TqsdQ%6TRmOkYLAh>7}i+omn)F=yA z*Vu=08iz{tb_cfHIkw|HQV!DZ$dnamHWbDL^Xs9_w@fo}+ffKY=A!|A)D|kAaNHA_ zC!GJn5wJQ>#s@fZ=-(m!6lt|=-zVV$eGKB|%M-3CB^8C~87U*ZSAr@xC4R`g?0&IZ zRfc5bpDCF#&Pe_A-^8D&2fN0Nlnu-a3x&ma*k@7eLtIZkkykd+$yx7WRsN4!&Wa$O z1wuy-x}7+}%mr1xv#;G`^7LBtPZY7{1e9VlB{EoVr^c$Ig59#b1q~bvggAK|C(5Fh z$&3%GCh9B+b-&oVN;&GO)nxGctQ#B1O+tiSMLjNQxxoiO6y{u0b)&!D==XPW#xS0Q zD(R`tA424Ts=QH3vig3^Rh{$!RP>42vAov<-9Lm$0$)gbTuED(h>nb$_0k(Twiab2 zw!jeO`}{c|F#GWl!VXkHD&Qltzm`L@>XEWj1WMk! zggYpHQ)X5i#->{d<48bj5s97uc3r-FIq?%zrv_v9#K%8Kr#|%GF$LP3>f2w^UURis z`~vXh>HJzL;GvgS#3uFzfRE&cmoG&5I0Ob>mxX^aUjX0@hgSdG?ic%3DM%YM@4#o% z8gtYgfjMpD9z-<6*W~Y3`~7tpFqJ#l39RVXgHEB+^tI%SRB8@&xNv$6DgDNzn`kBO z9uE1aQXL%yyHFx+)Lv)fTP!FGWT?Pa`K(j0yAPD1-|zLfIAf?5r2E9DggHCuC9NI5 z@LRduKn##n^S%7Qrh?8sjos%$oeHi8Bv#_v-XNO`^d$U#1G*12*{IEn`rF7< zuKkL@kqD`8_*n=Lf6F+p#!8t4My~ z&}jVMA`br}&m#;Yi8%RiCg%2vlPPwB!oQ$~e5p*zMEGc+!>(|YcUYPs9D=X>alPjj zuNshKg3(u0y=leC4jQ+s79HpK>(_7vTp2dPrlID*e%YgIvIp8@1W7VRaSKEcsO+&% zkPUxj!7H~vJagvcASwBj4N;{>z8134BodIZD2J-qhYlZhh2gb_S#AJTaTMZ>jaXI2 z7#tM;SFw!MpuDr*us8{rrOEEX_)Q!goV{N;3b0@-7;0h(w+w;WCboBrJd~{7<~_WX+6dCHsc6SR zZej^={qY#ik*9q}BZ&Q0g!T!BYhX^H&fU!cQ7%ufEyjK%<76u;ZC?ofJlPKJKz(z` z(glUcgqId|lW>x;IO2k`=VUj9X*ru&Uh5X191D$QoRGk4PboY>bsI2jSR^6Yn5q*5 zVA%|l4)O*YI?0d+%|{pqi!axPPS&BryOa`sfW?q;90j2`ctKE00!`J$Y>qvO6&GY$ zsP_gyS07Wvy>r#$Ua|@`8m~h7yx6Z%zH30oyXiWKcpgBTV@i_{r5Lae2B**(1tT!e zt7u?I6+*4!^y=<93h|bH@gGJ|z5zQ&5*NWNUIUILR1wqI36dT-E;hK|aBbP?lZs)@ z$)Z`^wg;0Tg+3u*~B>E1)nrZ%A@WkTUpkw2| z%9j)u7iX2e$AJ6EBpQ>eAGi@oLhAK&=euk(013&BYU1dOAkUCzl(V=5HCQ@ELCG7F zeyFeZ0o@(wm{Pna+bO>L&+?2l)%R|sofysf7`pph87bgdw6Qu_?>wdCFM|M0fyi0) zPdWsl>*fx=1H8-q_$y=e!R2sh9IwnXyB1N-OOQVszMdOlfI%F$Sxg z5K1~E8QLuKeEV99a=9BP8Az+LyMrg$#oFOq*V7)cUL?8#osW{4H?$=#! zP$)`6(Tti>0@IvxJh9L1lr^4%qu^g}z$U{hg|bG;2#-u?e9EmEGo$Y{s<&iYqJ0}! zYd(4IecwdNjo8|85yDt9o3hjz(3rw>mb}gsOmlNZ{hLhYuD1%i-z~@S#rezLW~Na( zv$99S1GagvOAkki)dl%Sj-ZWGYBCLdXM&yJXddi{p!;@Gvy(@hvP}bP85|#ggi;_$ zWAkIqpC8W>BLwt#|)TIM!TNvoOUHcV;iI$TT86pY+(vhL>ZV{ErRd>L1IRm93U z3%|CfH(j&p=n;-T3SA&zDB7>Qa2|1uKy8HY7mB3_C>o!pecNcdvVQ6JjdbIZT93yF zRKn4JZs({FuYTEk@ovbc|ikPkvKW2 zmAuHagfbX8(2$UF)R0QeEp@%@4`yrKjz4^?BX~SW}n$HN^wQa-n(89O;IHpjz!)_2|X+qn8{C9#93vn+n zTk`v=vY7!*OZ>ikmKB?>yo)T4azOBKxopy6KRhrmL3S4wz@12p*(c71BlVdb8zq)I zEB4G@*Vs`BYsJ(=Y#my>9R0@y2t|(~$&c5nU$z)W#ET_9I=u{6?8;aj1vT9_N>-i> zqNR(N8&Z*lz1p16AW8!^CPb-(OKAQPP%9R^M=*ro#b&_GE=kcP5+#`Ffp;?;T%z@);=eA5 zt7cM+drrCZxbx@FKP>qnC9mDK1scdxPo4ev9bG}MkjLJEVhUqyHqMf}Bcz$QVy^*y zv1w{En8UQ;RJ-`ab7E%UiA1@+K^xwd!8g^zpuGaYJ)g(|FS|u@NOD+^Xa@@Y;Ssx|89k>-)a@nw>A&iI7V&mc5!sriYR+x(dOmFQydv~Eto6Kqw zxe+4eV`qGlw>nH}$8*v?p92R&Tn|bHN8Yx!_mBmJ^g5D)8{F;15d0S9kkN~eKM8Cr(8nb4Aj6Cl`rs5L)8E=b@g z2B$|X0k%U%l#oS2P+-fdlf%?heg<|hxy~j;_DzT-5Fbm|iWRr~{6Q7;YBpMdJ95=S zc_gZ%Wd;urZW%l4W~LO$Fcu8|K}jdE@tlFcAq4O$u8X3ny6ONb55Wlt9}aM9S6hBxz(EcYUU zf|Q$NvoO#GSTx4IzCys4FHld&bS55C1!q^ten844AQ;}9x}q^t+M`3;2|t6+K|+@( zj7$aJgq>vgUR9zAxEYb8|FF}O7K%v_F=Z%iOs(u?b($|wwbQNhn~Ds7c7kFWuRD4( z@+sIxWi|0y{MRd-xIF5kW=)>#Ht_iUE=859p~w)0DLU95PFn&B6t}rHl)sPL32!Dw z5X7pqJD4m9TkpvPDjMH1op&*bDWJwXS7LFf`kz^pdlrx z2D~^6LARx2w^DO>-?5>3jY@3U1F`)KTH#5@!R?74TMcY3^_h<-uEv%gcE8{vJahiF zu|>9~ZusGlkZP>nOj3amCaQ?p>dfkdSi%H{`J9A=Bi>MI*f7y2QqFeTUsf1q&W>$6 zCD#cOuz?9FnipC9kau<8}IIqkW zRnwgc_28*f-G$pr2$MvBz!!JG$zit;H3~Vn2?hU}i=Mifg~!`oCI^Nt8Jk#* zO`Zu1f~_UwZx!Mg9mY6?QAD+=xkI&8Mi>;0tlg`vcUaXboX6)4lCfOstW%(YsMWD^ zBZT4Yz{SQQES`bJ@nrbBDnu39cX~F<0X*#AV7y!hDr$_o?U(EtAqbz-t1=HHV~F~j z=r0WE&||_-CNiW?aMo62sf+6JsHfMLTP8A>if z@? zjnXncjnWOilJB2h2Qe7N?iUc@#{GYZC_fKd83lfp1{HvS;)Ed_ViztcY4SG4(xsi+ zE`uH0LtpdqbsseE{uhuQU%`nLB70!-i7=cN(%TJSE$QInJlWJvz9q9zC`j0xI1u14 z-BCf!ji-!)j_hl}^`Fna6DrtqDhFFc2_?f*&QA5t)0VBMUemi#+S~8}o5h75RVWK^ zQbYr;p-{nLVSrRr9)EsRJN(bX4$$N`2l@E9?Jl zQJ){_k1mlU5TCBN!1wZ$;@N`mX`#7&;@5F6K2E;A9=*#9Xoyjyq2^G6B#k-=WF9gc zK$Ka#sk~R@jjsc@_Gr1uW+DV|tahTOT(w z%j-kuJX$G`ZaG@37?s1UpY&6>`mP*Xz^NsOmd&lN79?yiT^S4f@Lg5+LQs-Fv$0WV zs(~H{d)C=WNgReFiUYj~Ob8({q?YV$@^?7gSURH4k+3FTmg`JvUC#Q58qot#3<332yUzdh#L zGkP+ToJy0=?;q)LPzgAse^EOEwHo_leQ?&^0s^37LdLp(32tXr4cTiuXU;)(ZIUj@ zGmilAl9uMC`r4jb;1>A?E0P~bnS>)<@T@XcBLUJ4vIy)xYe1i(I~pc)k$8ttKODEv zqW;O5$*=)yIA@2pn2^)8p>-8uSrNzeYD47NV2PZaI_{q6qk{-J@Ly(>zhL6)xPl48 zne$Uz0K7tu->Y{SX?422^j6PnL{NjF^kDEtsqa7nGWP}WxrH(A0moItdwZldP>qTd z^?Mnz9z&4d+93s)&WabS+Cpa$3^j3AK?4BZQUVw9ieys+I`uIBZZJBdOK$@PJmO`n zQ72o|H#KcV)`*IUVFoZ;EU2K^%GPNC9mEq0?dKHbfC#cMx)#I7U&6eaBUL=JSGM$* zcB?^)24IQ^I1wDC(hRP!g5lhEaXtR!6^uzf>~SQb7!mu=h%0%Nhw1$xV^@T{{N_7$ zX_A%+$!~S4<8V|3tvuhceVDS48v%n6)DD29n*`pPspbLO!RtasD)C50t8gQX4L1Xk zO2aMn#@5H-S`tTPyCkJD*hEb>8qm}7y3-rj2EU7^ih^fjn z5<(b>AQfTUI85uqkY!EAkIqvrGnty89`@%;NYz_FGs+}J z0A9(eMWVw*fph1w^uKMadjTsVgaI5`4WT7_=SOffhxh6}RAbGByk1eoy4wJtk5C_l z%?NlN8ym|`jSt!$eyrMgmMq<;uRd^Q4)ifBY4U-V_;KNl#hT33&vk}DctH{0TW~g5tC6|w92pi zLKuxZ%o>@chI=N9pW<0WIS?QSMfk19$6-PQ8ow$`hXt%Y9wM;1Zd@oX`h0|r++((O z6E8)Ae4)z32)w*QHFE=eMgxT2)!6|8s8eXghM~1YNDG%zYS0)4^sdRvAr3dxki%AX zsYVL&O{glc1VV$`ZJ>5c-TB~kW&sb|@+S??s)!vaNO34WPg!F690)DVVmC8ocbFiM zIm{8`v}Md7a1_&ej+WNw29Fwem+=&;b}wf+zJ1;A($oN+6fOxHcs3)b z=lmEpr?_IM4GV%CQ~GbQM59?hyBfri@+T;)02LSk4`Eu$I9%caPe8lDv1yEvv2eGs zTaGDv0|WO+!6~eg+X&c(5Nrefo}D#V&>+CUfxFvWs@)TK80PoPlVB9*hVT2q5i~#o zNXF1e$mbUPOTe6jc0oHPW5_XfP@!WX8o!swgF8rI?pONOr6&g0;S;8 zFASpR=)Z9FB=%^q;}*YQlHeuNqBp$AUX5Xi!EJg8rXf_2rLYK4^`CDqPjB#Qfdm)p zbw5tN@6@Q@ZW_ZbRv|-M0r-+&1}%q*nAjq7bM|}}Kq-o68fSI``2Qh@S8{G!MBl?c z?9TB&z1)P{C;@?o9jwB|DnN%!xG4BkfaBKbb#^u60~kD@Fgz<`IA3}cXrKTCfrYZj z84+L8)0AyFD})A=qFZ{aOfqL5YtMcB@1>KBz{y(78(6%co&XU=cK>kh95H!znRAF) zJ&-C`AcKVX`1s2C(seKBM$sXWY;^^z(&(>LfjT2IO3sz#=rPr%b3eNjI{)kwol+F` zeJh0vDZ+A_EC>pQfl20e4WtRyoY;}J_l`b5OkSfV(C157~1n*yl#X;`b^UNxp?;lcX`r73h9f>=f$ zO$+SSX=ymhyG_OtqV|TMX^9<7_DV&7-J&`hvTG~_02gtgQg2_ZFT%j2aL0c734&DUdX_#8fxe$y6 z3IEcFF!CfH?S`gY;~M4$+O-sYWoQ#5e6x2%}O7 zPVg)WnZRq(s578X#uZl;lUc?N7FuxD{#hMllMN9y%Eqk|{3x1Q7{XXy?HkY%)Vko$ z4ABBMk6a)^IE)~86_G3xBc^;D#~B@h{3Y(0mUb7XNpU>G%5UP>h)0E4S`j|>P82e~ z7JNSzI}+%P=Y{T#hq=rQtAL$pqxLmW*VB%5k3%6LaU|9ua05yZ%6XIQv4h+XoHda& zDi*<_d!VpN7(a782?nf!kt#soiI0Q1*H#TxKi2k(zCd{Odiv)#RM1v|yfKMY#17dv zMihh`D$XasscDi+yo(hVYGkm7c^9NJq@@i~iytc-#mU`_;9m$<5PbbZ=t0(e;o;6= z&S%4Up;Q11&IEI)V}cAxr`cTU=c-ok6W|}NH$a(F*&uvD0Z2io9(`OO%kW} zP~?$_SXSjMIQnbgjc|-ByWaZY9(Vcp@hpecrf0#^A#1d;;PIHn{>u)ffrK)TkXITN z*&(3GX?GNH=3GStKq5|KVh1DHF{sH43Xwe7@QjNp9}`uD+)NIPz?^M~QoH}u_ljrC zT#p7-7kIqe6fR{)8PPLLWoKJ2tm_!s@o{V$2K|ltT9xS{qQ33+X_Bk!tNq5lUdxR2 zrp*4is9`3cMqNH7#kL5)Ak3l^Mm{+?90$+D;v5J$R5WKC$kt4_2~9}ntYpGrdu19x zMp&gm<7BW}OxS_09R=)A4a+IU?1p-*pgTg>J0OCw2@EPT<9H2Fj$Kef_Eqck&dl<7 zny{+^V8~3$X`m*;(HMBG=JL;Z)lYt+AmJK&c-}Y5N8Pnd=!}j76ePWms?#_mx(r%L z$6&V0^G-4x&XX?7xs`)0=9=c$U)=BD{B%s0s)&;b5T&KBMwu`it4iF3ijXV;Q&p0k z*$gT070MG#&$c^JQfGfPs>SposIp|ZfJMgw0tZYom$Zj#%IJT*@cSktZ{7$Ag?Z;F zf7lF9?C|Us!k^ta=?dm56=j)dBP6_m1o}3%{1DSt!k!WrWJH{IpK2* zk{&fAktxvBc|4j8xl5*u?B)($yz#Y1wf&$j?WhmV8*CIOYgQ8$Bp(CbPK{!9sIIfR z>y8LWWAOK=$_*$~dV`nBI83Jubex&UZSLm}ilk^JDc8tTN?>m(79;4@p-#;{a4s%B zixaPi60wBoASd5*f0ET{mS#z8BSe8{<4Vy3W7JlK$`{Ljd;<6uHvo@gj#h7zYk%m> zEKje_i_wlsrBe`nINqIAZBY&j<;0SClu7703RE!gvtM=h!WM9b1vV* z%>j2{$j!iZlapfRz6nJQbAY?`gp_a-PVZ9b(Cpa;QH16h{(|dsDk-=Y3HPb88yxrv ze_+Qz9zOr-!_o*&GvIJRf_+)uPFn@TzG1~v2IgRsQe}o5cFvb1m`xOoUQEycLuG+O zPC3s5zM2F!jk(hD1N;Zs56%DnDhtV| z!x*J|_Wyx^!X+2hhw@Q;PW=*{u3_s?!lB>-YLN zU3c#8qd1|#H@OgYjS)F(P%NrrW>Vji7ZKXLSc?lTq!;RS%&>8P=3eQP`TNZF0^I*u zm#sXNfdi8OVzKPBr@LnSf=HYoEP0K;Q?vM zsKPINZxrDft=o2LMcJ#q=!-8z|5|G0H* z?3c2vwQrqdQp3<5kaWcG$Yw9cTFRBOHG+UKSuvDWfcc;*8D}wZs*J!kLm4!Z!!A&# zIqf^d>AI_umf{TXEiJ)HV1v+w<%BxWwENfF@Hp z*JclbTsVU}nBz4LCi)#PM~+qxLlhwJ43LatPpx~A|gcgIlYj#j*(n_{n|?XQW+(8x2?9w)-fZ>1nLIVc@1L|a7>JdRT##; z4E150&;9qO1|tY8sPVMgbjb|Q5wgx8z)6NCqXB@%kt{J|cZpMVt(GqJ<*6XxW`U4A z1q~7^VuIsD&iLCDr=TxlwSzniT0DmP#28s<#ae7)$GK3L06?-T_}BOm&6ciPbzvR} zYmp+%dxx7m3_jbE@^nD^9R_vzFKp*#ddcd71y@Kx2Y|#`l`!q*$Wj5F+<6IhOD>gj z5%I@D6#xKHN|k<65y6yB)X1g`Jf1Q&=`6~j?C!u3j06+M4X_T`ugsM=ZJiPoc(N%E`{vw8~D5njRG zZiTGVq?2t)ei{2xU_oMWjR-oVzGrI9hG$}o6yQ#6*gnH_B~#4E7{95#xBP;SyRFoAk8P>QdB{pZ{o5Bpep8@0N}4 z9`|U1OA_EqbX)^Qi_DYr0M}Ju3FY;e_^F6kD<`c0savEu1(oR^6dA52(hAVyyUE+( znZKjdqc)GQB$l2W_E-E@L<83MjV~(Bv$b`>*_s8`r8my^+)ekFp<=@V;NJ5?8ef01 zZOqSq?*j3OyF_wHO=leW>@RN(2grDEtwD1uM}IrvlqWaeQ6 zL=8NFoW~zeV?DCG<6ZNDZme>9n#1DtHX~qd7;x3#|K6o=)6>Tar_kBbd}^g^sAD_0 zgtyR;oy^UG)9w15I^B#|88WinhN%?T)*vkrzy#7@{a$`_nk}iFb{uZt)dQSh&Is$D)c_hI$TFjlpU9`c9vM=+hnJrEFyx>L zikMhC)}5?TP$cj{k^@OFU}3t@eKNBn!GJ@K^0Y-pJ0BKy_VJr*^2zoGE|xc82r z`9wSu`&HRCbtWmRE16;(OU70)MnX&wDAJAvn)QW2I%S+y2iZlV4lVQ!-b3QS=_c&? z;hN)z-C;x@ptvp!_yTm1hXoFY=C`e3VgJ*nD~}N?X$w10*;)sJ;CT$aHld546UrJ( zg(G6JaD0N<5r8N&s6N`E8(-NgE=s=1aHFc>@3D_5rK7uuhF4M-Y(2hE382rYocI z3gKxwt6;iWPxfX@7HZ1_wVX^j$eIck;^gdKR=h-DafdK36>#7T#^hc8)o5qnpHnYl z$jO#s01jgqxC>lfWYQGH_D1SZ4K~OEx_1Vi3}?x4fGJ3Ss6)31@`gRO;92_?%!lV6 zk6s~o&;QBuv(^HTI1!$|SpyKlUyX(R_Tf$P1+gD6XE>*uR3i?17zZpPOUUXuMzq@? zU5%WK6)H)qAqAST8@UrJ#H^Gd2s(;8>`n$8s%OHeZx(k^jUx&Kwy+Co>SU_e%yYAr zjsDWsue;eDaVUR75YXbBS9Hn|!+Z&sI0H?d2}WBT#4w1Ryr=I|&0z-x!b?>#@lwVS zJirur!h({?#OY>=_AUoK%8Sak1!8PW^GIBSVyr3Kfmk!=Yp@>1D3xP^s2PQ$w1SNr z#I1-W0?a0<59?CYSrjZ|Bdb1Rx$L3v9F+`{!C$7woSom< zIU_2p`7aQOUcT$r@Un8t7o(lHjlDX{f*{r3F3Cw6gd;bkE>c8o7NThXDt(FAJ$%c$ zPZ1H{4h$VPE(lz~pvcGqP%ow}UaZumOP9WiihwJeFfU^=;?U`0aTlz~OEd-y$BeZL zzLJsMmPW?RSo%1q@A6sM>0o*%uP7f4TB!vlU$#_FjLXg?m)P_1rGtkK^@U#0n(XpX z;)?GKYzs7u?Ml@4i`MR`C@bP<4ENG{`>C(QBfQ*le;NLlDpNg z=<`)qclVUvJ}v=PI0f|i#&2-^1I91wiO3lHS$Y0Dns;M%-wst(FCOq!G*yB^#_u4W z9z1d+7iZ&dFo?c8-imbCd_><{1(Zr>U?)VbJvHY`uIYJC6dxRki09=?X3~?9;sUGf z4oGte;6XS}nZ}mQg`uibl@w}TM}hWOjUV$_txDWFO5B??)` zE?t_?{8Rk%`a#>f0~$nAe|ZrZ)369Nny~W2qvpE-_+uOhJE!vaa>MCanvsQ}W4GbT zRTW09`nb?KaLGAUb&1Z(hljwa)L#2?hVH7Td1OQ69v;QO?Z#4>tx_msW*e!?*qfGL ztS{Rsk6{4rkP>586yWonzyB_SpV?{};G{JJIi&vTXHs|X-o4->v8`->pcz<0vsiOS zW5uRN6+g2fKHVR0eghO))3s~00V2LY*7ytnuhILbHh)$F!hWXU-`D|HtlWN4hG@%z zs*O;^!r6z~^zgK_G|`9=Yd+P?nRmu+j$2+%`fO{LzXHy!dRMwr9*emY_^A})*a&RM zoYV6(VPkIMc&x`m-rBG1jw8cvpI2#~;%~F|Y>*7wyJohv2v|e+Tj@9`-Y8JO+fGNE zFwM+NW~YKO3+7J1viOmhTzgb^x`V?nSYX6ne^Lj}{rm74#TT)T;+6+FHv8}$g@`@v zu5I2B_Es`1sH4x8vqAo9vs4~{l3NW_`8oXgI}ET1K#WmzW&JAO+Nbp7%gWCU!BrRx zQRt%wzROLwp6Zl`^S)=3WZ2!Dh$)EkW8&i2V8*V@Kk-r@5jSwGiB;NKR~WW@P&F^X zu5JLHbti8P*t@XaOFu3w*3Vx!Tk`W_(8`C>IQL=(mq;&DXjSa#baq>s}f?mm0aaB;uDv9qmX#(og*?u#=q*wA+m z&<@tz@8UgzM=vr55Fxg-WcSL^32TNmORd8;f5K=U#cavQ@i_OAm{r+BYQ806izd6d zxizj^vu13^jvYlRDqs&xD`R`s;V|%^qDyO*Ca0tz;~9sHhH+`%e*MNG&iIMag-xt^KLnB%K0f1d zi(Xyz5r3PO_gN&CTr-rf`L^Hp{llq%i!LE=xKV!m#0h=hcLTs%!Gf2Zn69uGkPM8? z9oVaEggD~m%hiG6Mxzb983kv~p~Ee^;j1bsSe|VpKA3$8RsnI?p$&^19L&}F4l^(~ z!fs-i_K#?cFS@wOQ3`5ex3U7|se*9lv3oBuHXG+h^6AI@tVNEah7aF{_a6(3*hATP zCe|KxHF-`uMkFwbw-5hq1f0jk&CU9v*CHuIS(D(KF>^wOS);yxSMfKuwk0ZVI)~f7 z`AaR}f=lS`1X1i5;t0v8u);)=+QK0js$1_mOZ2}kq}oJP5KpFTIrmo8l@-8s;VT@SFMbqEm;H_dy4&Iw>) z)6&i?l3delx7}jYYjYU*f>JXHSqO%b$pUVGvws>*Gko*roCNC;tOuhX*n92TwVgX{ z+iv#jpEZ(^BH`@RX3Jd8o$xulNwr_!z6T@{vq_Vru@jvyo;Tdws0G`c7hxEJtRva) z$3~oC)fVxCr$C4c{xQ>lURIyU&C8?up=jd72v9rrAi#ok3e|S)3hclQumDCk@0Un= zBbB&~@M1dd3y#c%l_9pyp>M?!(GRU=$LOqm$|kdr!fze(=pm>ndl{G@=Fsw~vBC`P zJdBmzwR`t*^nwjG`t1M;2DC#6@Bi4h?@Q(HF)8;24tUAExrEz zLk?lx1H!=?;G`JdEQhym?(zWR%@cBRaKhd#U4%=tLBDq9@I_x zu{k)nIOWHz{y@^V6HZXeyK1JC{B+64!NNu;FOE%AKHl&E&rQdKuXu&f7jd5kSOL1bMxoqH(OtLc42u&@MNBq}?j)9$1Z@3H&BI*hU zxDtqTt*op}hxQ!WvvbdGof6>5Rvbrh`C82cv$0j;-Q%<2kYARj_{Uj> zPs4UI3zd>r)NlguQ+jSuI3U+P{rlg7)GUYKPF=WgFi58(3u7&c*VF1teBA6z_dCTI zFSJYljdXpd9LWd(hhgkhGCCI>9f13W{H@2mbBpfm_xp6BZqtSJ65BcdI7h7ZFUGZl zuX(yR1N;G-z#p3x@0ejMHE`fS4#{};I4y|Wu>1Gzx_0eqm*F=NuHAU0lGq?5j7Ks; zU$JPao0|%N4i&&3a=^D@l9CPpc(Mk2vW_A10pQ7Zq4V`ymNPPcMl2+>*6HQOvut|U+Cjt<5gbjXti+ROkCexyLQ3r z>_MO|Ie2gq+#ssi9dMI5F^JL$e`jYWyX5rt{qVTUv!-eUg2MS5S2(1mS5@h%P(?+> z-h&3oXR`SPrX;7JzztCX((|aAnq|nXr{YfJ<~eUt{a4q8vimSi&Ar%?;0O9o(Ls)s zR|mIu%EpZb`1$83>ODsacH+_{DnRTv9h!;CRjA?m(bt_0K+Jl5^Gj6|^;@%ti!1h| zMT*454=Q_n>g=tZ6RG7agNGOnE@%Me*r#my?u#O#KB$J%FfOyJDFVz1e$Bsy>Ca3a zF}H8KtFUGq#a2{KWfY&)m*jqNY2-aj-U zApzaN9RrXWhQ-IL-Ai;V1X?o{(H>Q38bPkZ6iumcy;VI_A2{(m>~9NY3@V10_!%uU zs<^On5J;_+D1m?sz8xM6rO2hUtGJ$0atcZ=h%HM`&HWRp3G9&i=+P=YJKGtB*j5T| zAz~t)q9+s=22b30h{gBEj~8O`#1B}$pyk<^kTw}15>Ddw?Xr@fpoDp6 ze?jPNrDl6FT7*O1dLR)kbPhsFgWwvkbPNU=`O6LvJI4a*0ZJfkis+`X^7jg>!?p(p zwr36byvi6Y89zT!2?@+-NVFKk`Y6z(0V_bXk-YBhytUrmV3SJY9kAYk03vsmmfE6v z<@>B3NBZ^Ffiie|~wNv17-ApRyf2hud#8uAWrfl-1@1 zBW5G~uuF04jzj>?=|E^`=bpyZM#O|+I9EQ>)%Cn@3jNfaJ9mZ~8ioUPbTE?mJQZw_ zL4#3;i85ud(zqy+@Nf%j>)zn1yWZU0&ow*L#lpa`-OM&jMOuk>uO={ERfMDnTjn6r z*7mlWf}fX&wkSt#g)R~PSkOiF&uUPtqk*g-2$$CK|GD|Xs)_>K_FM4W?17P9Rvgm> zna-GTJ8ydPmx{kSOD_^WCt@5NKeQP%?31@6!>NOG#cCuY=u-L{Io17ZTMQxZ+Xp;` z0l@Bl9ndZKt7L@m#Jv_=vfwAgc;_Pd%(;C#8jnbI6~OeN0J#rg#Zfolt|1_sGJSdn z1Z8~kAFCg?J$ddT5083jERnmH*R6om#4w0k5AW1%Kqf$j8#ld^ zR$k$+zuwvB$LGoX6JGr&(w}`w6LZliVD9BL8ty43Hui3^s|74LrqA>SZ1k&&$_|7T zWV%1K2FCFfMNL3ZPrF2Ahr8&r4cITJ%LT3pWN{Ujb3ij?S>-k9dVT0is z(QPlBy|sg!-PMgcSac^?83c7&F1LHa-uF@RPX0sq>0*WaaYe#H73-fszJex)gD^2z zkO><{V!?7*6$X6O)S&);KG+kWZTc2@Q+=R0$P_s_{ZK(ZKtC3 zV=`r0AFTCb0y_E#e=RE#iB==i3;wDt{L;wzZQo{Kfk3FUtW4Yw6p^o`hh($AY1*`D z8^&}9fLF$n=5B%%0FF%OzKfp9AK-7ROYpOiK$t?(=D}I6b?A;10;#_aEZ;?)8T@IV zbF14H$bHV?k#k}07KcTlJ06#6F2(`34?`vVgruC`$iJSwKiR+nF>+a&R8eA7*NH{2F_?r7D$}iV^{Nlw6 z)Jr(S^B5|s;4*{&kJ>j_-wbIUPTMg=-?%w3dDV}Q^JV<}oS24YMPQtx;kjRcx$0Uq z1UM9+1XW$#V8qa4*~{^L{=zjKrGN;DdUoFKC~UHiZrirJ9^6w|xey{UHTTXALOJF+ zMV*j2Ai1T=O5G&{*gb~~v79xl2ht%8+@4=2PMp~M6LlavSPTDeB~i5AFf}JZ`1O2r zLDQ(J=I#k_9Cm&Bz6iDjN?nTJ$ErWOGp>kgG}?yGg_~|L$f$#0RAW+VLNc01z(1hY4;iaXeB_CEHxx4gRkJU)1VHV8P`Yu5#h|~Z4`SaYz-p~K; z0;kc>ZxP@ka$yn|y-ipO?-f7yv?8avHa^UW`1t*6tsFEajGbQP9%jR9Tz)FBp}Nc0 zzFm0>yBj%KxNY?=t+zTRB_1b`{m>cLT2OVDwz9LE;q3e?d|gYnH#*R&r%avtm-CHI z_?q1G^&WG+gU6|6v%O{2@7JBe%D0+E=cNXD11Z_=-vUNM0s4Dqg`bAgZ}EN_Sa|6GIt`LGH?18x_f%h{6ksdbU(+EPvk(O$eCZmHtb2D2 zWT>3ccymm_mwJ$RJEF8Wk~ih<-HIOvCuz=0a%ulP8KD5g9z1y9-kkCo=}A_W8G}K%7(?H8PQY{U*$oT&{hfH}c~`)i*TH{jq(Jbq z-NH#2d1pwgIgXeLq(rl%U~n`@x=_GB3^v>!s4P9Z6I_j&JhcVSuPEOK(;{7Tb<=tT zMbmJG0Y)rAeT(W_m1$^k%ZGFcD}i_!1(+oc4Qj5^%g}!jbrwe-VDS!cpNh^cGMsgE z!gUby$YsCt?s$sQB#Y0xMp-NGcM!Z;E28dwntE-&_lm6-R^A|6`OwmV$ zT(dT|)$A@#4ip@&@Dn2OzqyV7_HO}({}=rY_ggHH`fu*;|B8c?YE~W-^5V&P=MlT8 PrYoL0Ysxt@r{Dh<2$cud diff --git a/examples/whisper/reports/cuda_64_10/generate_throughput.png b/examples/whisper/reports/cuda_64_10/generate_throughput.png deleted file mode 100644 index 59c796adbe64eccc368903a77ffed7a056cf24fc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60243 zcmeFacT|<<);&t}s9!MZIf;PXBx0kgD2h^yIYzfAD2j-*SO5z}0YRkZ>>MC>x|HAiezN+(2UTjeEzvC>kx8}B!n=Y=a7R`-~$ zIx$5QrfY2T&50?2LZR@J3G-eVg?8WROYFSuRGT-`?4FZxo+U2dYdNp|m5b%CKmJ3b z`N@p9;yqE}o3CxXVi>i#INmmB#)&0g+9$p*5UO0iJk7Q8=hlXjJG&~fK9&)S-@A=9 z@>0H2?&+o?sua#Nd;FzqU#B=OOTA>LkId%P$0sd{ZBAFT?ryk~({QIeDNMKfVcqkn zLe1plMSGrY$*byBIW~2fx<_|g)@1vHn_;?G;UkyRg(|qJ^IN7Js<}`v5{Fzd2w&4y zWE~nEm%a4LrYi$;Z2$Ggh4y9cf`F~#Pii)=j!9@vzvxr5?(D)e zLbD>r9IgB$kM1ds zne%t&p8x8bV-tS14)stkG;=ZQ>k;dy)_&Um_NvkKkh<&1Nh{o{()|LKFB)C_)9jEf zc{6)1-rSKIVUg^)`{~7%#dqwJ!!mdm|MVU!CDo(i-t#WCXz%sJyad;lPbMyq&rrT& zDyk5NxY_l+zwQ-o5FYL_Bj(D+$Oz|#VqKfQ;@f-0s$QMCqWEQvcW0Elx2*GNTWL6F zd~mTGp^t-MOJf+eu$1_mi+P0JxwAob_pYos7}E21`KcbQPmOnn`=#Gb?(gd^ zvc2;%@5Z)=&n|D6QFrZ*`Lu(tzH{)X){N`$ku!g?KXq!rtaWF@aH&PE;&Uy}jVuh$ z)ID7_Ju4!}?kq0dI?&QzaB`YfGd7`rU4E3&^=DT$&Yteovp@c&bBR)1<^nuL521XH zR=cTlxkr069@Z7}#6z36?hbFQ&7Ykgu};yY&(Fv_P`)bOD&*3wTMGkB9RvlYj#<2- z43+yfW3^gy-CMF2k3MMaRfNCsdY5!-*V8XcZ10#&^XhpQeW&cu72`Oo7*X|E`Lx-g z?!uQY*yjZzfm0K0IB;r7s~QLrbhtt*_JTBx+NMuUW<&|oNUNPOe^{iK4OQ`YwM=~2PeRJIA<|En8noB3s-`D1a>lSR?d+n2{OBbG7 zdSFe~*+u!WmKmlV@1Ku0v<^@)YkvE1oHZ`lQs=&`^484mS+hm85kaw?)j^TF^CNi6 zJXcXM_pYu;)@bkQ6o=-FoNRMy(xN>({R}s!Wi2i@z0h>ODJclwX)fAZka58nnmZx}x4mv?25<^@&@k||RE*y^7aOs;XCU@)(wbdIpc8khAyTkWaJPy=w zwDNJp;@3qdb-iiPDohwJ?c>x?ya)$zZoBR{HLJ|k_njN^t{O(IJ-2jG+PLI-8xXwO z%e{2m>Tj-kQ*bTlo8yzJ&g?jmhM!q%t2ePvTr_p5(`qg)He@}5^h7nQwIXeuK6Bgn zA}hwGzP|S4fT`W@a&~09?833FExc~AdCN8*$wtFYUa&haY;CBv7LK~jS#{g9OAox+ z+27lqgKZ;sC8oX1P1Wqa^Ez&f6lulEgqCbYqnxg~>!I>R2rwB*sl!KpvgO8f9Gn}^ zZVsC&CDmbe5DSopm9x3~`iB;eY(Ammi_4ZMM&DSWx3o6wS&Z4T??;YF*IU+b8f!Zn z%kL=aTQ&8-rb!EToyynO(VDU9MCQx@Q{jx0nhOwxtc^oW>_jsW9xd>y{s=>haV*SY z?!CEc6Pq?`qPoo%(dTvr(Z;m}*G$xlcKY%m^VWsa;~)3&iM-f$I2=#J_5E`r$Ihw% zG2SH0%;lw@yhdy8*J{r7FFu*3c{e4#&hf;!3G+VhZc6q#q1{z;Y{L96jZF8i{9(*w ze>wAmU*HwZPL*`WRg074)84**7>C6X=YBmd>z7f}gK^BYEZpC%#PXePO6pcwHQmDr zJN5K;!$(;ip6dJrC(P`%^*kM3^%SIsBB%Uy`H}je$EIj3Sk`cBBkx_t$n@rle#dN$ z*?tO!>F-jfhYEaWRbk09w7ctljm%xcO_S}DI%k#G-M4LfvEyWYVtd)dg}c^|T~>c% zQ`BbVQB(Fk81}s5q*u=j-&yMv?r3@9+YecRjG$k6QfNqx_70E2NN)K^W}|IZ{uNYg{L;YA2QEqFcaBFlhv3;N5` zxGFqu2yXsgkJDO+fZ|~8{_at*dvn^v#dfAeaaE&zRyddLPu1-2X{mjA-=$#7j^m|> z(mZX|b(7@YAM(updDXPQuJ?73_T@2ot35Z$~7U z#(b5Ho=@xy>0Tu}^xPNB`QD42KbEkW>Zcs_?Y2I{g>mok$#pUk4NU_yb0T$VS zE_sNT!Oq3IM;jLMzcjwQZyjLfoQ9c`IKp z^I$NB7F=6oG>JxRgMqy43eWorD3 z?MGK207;j7^?Mkb?B2cm!p2BFA44Ov+tCfkUJCMaZA4Z6K6!b0v2EHOUE>vuVoc8< zGG16eXM9p`Tgi&vuJ@xyO?TfXnxu_qxFYe;`}EsQ06sqQKDga0E3$NA_MeE$(Z0VP zsBFTbJ)4Y6xCzgccoiU>IO5l^x38IY?>MQYUf33ze`(dp6)n|S;zKr`O}cm&fS80+ zVQ;bb{U%r7|CQzEYU1xKZ4qH@FIZ;y`yi8yx5^1Ivy1u1jBida>%S5ig3H##lk!<{ zVf`6xk3$y_34GocS|rbNtj>(}MEsf~Ki8?TRFopx8-4SeZC$OkmJI>shjxDyB@E~P@iOf6dEc&+=sEf;zf%K}naJ=ke3g2#AT)OY;F9a8WLVWNXW%ab)KTAP; zGip-{+5w!2^*Fm|udFuqZ)_XS|EaKC#EjVZ&R)F`*XlqYzzI3+j#uA1IG%IL6ZoY2 zDaw6wa+(?7xeuaP@~N2%y8gL%sg(MnxY3F+ceigt(pPfnTPbI@Y0liab8&EI z>nzI&PTo=7hBIX%N-8vUT!*D9FtJzWfstI$39Ysl4jy_sy{#*RDF{{TV|tPJRpsX< zAk*`qOwe%HdBLG7-G^tYsw)v2$w$HP>gm0qEs6KK-b5)r>(38O4Q|Q>m@sTiYCzQU z!SPK|FR|I!*mGj)GXHLo@>(;T5J%$@q^;3Nhl=(0>S8QyaqPDGTQYjXcxfu z#rDmWX@cscf8EgyP(kJs6z$E97k@Tu?Gvy5?s6XZPQ)xV&z_EB({;1~9u6?&jMM3v z7wXYwEV?;XqwRP;l(W=8^ry4}+of8$@ zoa0%Wl=&0^z^Sg_nrR7gp^&gE$R<``Y=XG%@Z^ibJ1d|3?AnyrE_>M@6^<4!cPzHU zrYp_;>SiSfG9wH6-)s+BE$JVVD0^IIIB2{%+$5KMQc-JMH8^4UK>dJCG z{XkXYmG|&w9L#VmFN++<>OgBN?7Xb2Q88!bEZmXb&N&y)B^D^^=KV~zySt6$lKkkl za|v}Z_hSJ8Bfk26WR-RWlD;L5@;Tf`eEp;-)HIE5n?(*)Qw}`a&~o6ojyNDj954mz zQ04ruCCG2cZho;Xir6_GZAG)kX}(>~m=x=I=cTg=)3U#^N#Ac%P*Od4^~?8)3E3Lr zVCmafU1egAHYDgAW3}kEKN$$H`ue;aj_Ty5HXOJocB7 zlQ(s^q1x!^z3H0~1Or)wSVO7-lCuGroY-n}5TbXYP1V%C9@-bDplF4cG7kS!{| zo%wMC;Hz0o;u=4N7Wc;;Jq5^88AdO%58ZygS-1J2 zLRM0AvB9yrf|ys-9csR@`Qd@yj4S)dkF zxE0{?xVIpBMe_9PKk9jIaW%eT z(0Tk9o`!4j#R`m9<$Y~;Ztc#Wb$f521XAHG{IcFTuF=)a$B#>afE=~7ujlUH)|=-M zxWhB_n$}{SF|pNWwA|WOoYY9|DbUf8H999LJCXMJN^)jS8(3r#O7du{Bz62I9eQ-T zd4A7jU|;o@PWd*7d%H{T?kBh*!u>8}QPSt-X*eM^cw`#ye*H{bY@e;FuJj`Uw5Tj} zyZd9=%?KR%7rs7RdZ_MNZBd-H9Y_+s1xuQz;zM1vk!l`Yt@3Grp63wEvkP5ydNZ-M z5n*BSCjrAvbq?$FTK+Ji*KM15XhUabx9eALx^5?b4LFnc^#Fg+TJv-UDIRmwmJTKc zIT;F3(Zb5ZhKF13uV@M2u@>av4t=JzAfaaipqv6MkHE3DZ1{fExR_iQ^KG^OhRv0} z`c@Uk7n_IWb`|E5i9 z#wx1@R9`h)Q%_6{LVWi(@##ZZATKC-)7E)SEwDYu-j2|ll@=qDNapU**r{H?HW|+A zI&O-3HD!Or`UJ=m35(i9S?}@Jr(fP5(Ww`Y%ikEWw)2r^R?y;{Q(t_&z%u=`!$-BH zF*gw>9bbYa(Yaq4a9te0ySFm`OIpw|3uisuk{y5k;)X z4Uo6MgBezsiwO1Xn#p3s*qqEjHMu>R8d1}{Ozjcn!UN2=UT6ybyghPT1K}<9i8xa& zCwgBtf~Z2Pg^=7x0>FT4ZFoR>G3gobMe531Z>`4m^6g2s&^yy&Q&)M|@0UVzH(Sv= zK#;_Rd!Ef9J5>~S(sz2u(H|wpF@o212u#%fgKamrnwJgBG z)77U1IM5H3eR8QL$~-IcgRgx==3-T|992!LoT_2}kU~GL3=!L~G z8BIpZ#=r`IQ%jcKxAqpQumB(oMr|j@$JLB7u^Ktb^!y}NULbT7x*PA?_q9gqSOTD} zL9M#rP-~55&)bJZD9Icj$r$z2Pp~b%CBW`;05vEEmexh?05WdRT-0;!v*KwWxo~*eS^VNM`-FGj#DimDO*P%KV_PGTprjxL*&$m~EE~YWi)A4Tvr14^ zdJlgXqqgtW1Ktvf3H>73cI;a#A{*AGf%CSln51rV23+GM)72sJI4rs#7agB%SY?TN zGl$!i=l_BCN4uY2eg5UO31-ODj*TL*72uh%?d|6dU=*uCfjmKkFlpdkpKa05@rGi-PGmff!)QoYE2(jckch~r`gs1CU%1M#+M*r zQ^_&%L6gktJe1vXVp-!EfE+8lkUo}f!GSl=G&?JOUElsPDz2}q&giZ`mdx6BoqF1~ z!&Cih^CQJVT@?XHyL8)!+R5csMOf>b=ZZKLPryyGiWh@M$$s+ljC=$Z z8(f~`LFEZ%TjCaX;{-MWD2e)hOJNi^s&a1*o>sLl%!qwBb{SudJJA7j zZFP=UpCdQ1yiU)TDfp+KufDbhDJK$Amj1@GW_)w(2@2wXug7Z0ry@bRTYP;Jhl7U` zc~6W3p1mML$(K~_!!e7M?yQ&&KsQDKMRv~#_tvMYeDIFZxu_dffZ3M@F{y;y6Ojv+ zAqD*Xh70|8QPUbkxC`K5eSkq^2`^H}S=`0J$mn8?hSCp#>{_WvU+euQlvIF$S98k{ zKv`5v1H{fjTDzJH>M;d;yLo{%OF1lx8_G%?SDE-yhl~{{6Qqw%UaT?&na2kpF%AVG zf|}p^AQXhJFJC&LfZ!L#TD+sy>o+rSAwH-9$Ag_P-eMJKDcw+fTQT_HZ$J5fyw7fG zP{NbA#Jnh)W^$^VWc>>8*wst3c|$L7yrkdP6{>>Ky@Y~b@^~r}qP7x;4CT?NcU6_{ z*T?!K_xCpW{PO*%ocGrIKn%&8U2JcD8PH|h_hZ!O5K2le;GRp9`GxHJjrXETMR*#Q z_g!D+(rCKHF>BH2>&bnGF6>A>;RAAQJpcMGfS2sO?715w##4omdjo!R^@5yk%~WFF zEx4!AlGWPv^x{68Nm;vucaKqsVrQgxW!0IxpzzNXIf@-@A>M4h;3&?Pyt3qp1X_m} zgTtJxa7<${1IB2)n9&75AxTxf2RFJ4TmSX+{*G1KP*z&@R+r|qtd+j49OC+@d z9P(($2)kdCA*<`{4sY5C7Q-C})CapxB-tcLo|1>;^yb+W5SMb}K`U%1Xu?xfsRy3+ z!A6orwPExE1lMSUs;IYWDhR+h3b${hD@2XQ2HLy}0qHe}FV%X)WE3i+4{7Nv61%k^ zg^p**x&nl&G{V8$dPGPcu3?P$lSvCNV?Usb?D?(3k$>nT-{O9J!u*T#a0QZW21w@N z(*g=syaHQcG!qS}p21okDac6j)H9Ug)?NS>@j;OqI}d?dT3n7;?Xyn3C~Q6WB*`D+ z$8)Fb|Med}uOA)`cpuo3cs3~1(_H-C<6L3+*N$&`ajcGPF4Wfn2YBIaXn#*+F6zw{ zAjPV?0s*Bx-PTC^i0V^E;Fu-P0CZ;Y?u;$O06E&-6MDh|RP7)xmL0utulx=oas>Dw z4bV^lPtq&4n|xZ>f>)hQ<|_pCA(3oyda%y(JND(R*tI|?hLaZCCH!*_-k@XA!I8*- zQpt*B!hEc!fGNvj9ss6^-Ff6+EBC!TP-saJ0zoTQBFJ^TZ25$Or>BuQqCa&SJUcfs zz}WF#BJ84Ah#{8X*(~y^esRBFabj6BFbb8HY=ED$z{@kuJ_V+23XuYf?p^K&CTk~j zh;+*UGyji2n-#Y0;SYq*EL#DeZ3O|NT5M7K>yZn1_1&myL_EASE}`d(fvd;2QojBzZ2# zK1n0T(GIm56x1-h?P$&N({ndE0Ki8gzV3c;W80cFYZiR_z}vO0Ke@j<*{P{a3wut0 zJsXE=tQt(?)>p2$oNw)}D%IkdN_mH;GGQRyD5EUNB)1FfY=u`MAt5gINJNJss~fDvqg1#L{yI3erf|<|KuBcI^h(ihG6_LuAQo!ZE5O)W=(%MVM6HGzRZB z@33SARtvH7=L{b;Hj~BuIqdGFY>m3;xLB0pVW7E1U`yl@)}r~o7D##v=GBSTvQ zC52j-_w{yLa=UKxoeet8Wky)WS|jolBUEyXu(=OI9Zg!#XeGl<0i?2-~qL9 zhV?W2l^vl*SUx^Bv9{!nJr+zMSPe^tB_)|-Y3sA%3+rMy?iz8>43#6FU?SA0K9SO~gH_+PJXGQL~> zlyxWwMRg00CMd=>AVw@-y9&wa5~eJGJmhm0$xO;~E$b?&l?xz^klYbrx5h$_t&Yu5 z$w=yM46+V!odpf%<*?%iig!m=f#=FvS{Ln$;HN{4knl_TLK~?g;9?!>jw<(RE(Y&Q z`Pdb)FCXz1ineo-I7rjgfkj(%a$i?xcX@vZ^)+lQphv^%iF(rko@?J|cq1dyYF;QXS_ygSugl>VTQBEvFM?`)!$5?7_dwobUj*R@H@zd5{t-d8|Eas&7kswAB}l*0SpF z>veo{C9>Lk_}C}N(SeX#B0&CWKoi@zb?egd3MG(L0wDrte9BEcMXTz}K+7Bp9UO88 zl*rb&r$B5x;q=WnQ2$VYLk?bp=PlQDw%RJYpusWTq5mfCB^z~**+@=CW7Eu!RlgmBlhO?kM zFPL^iW+ssCjPU}cr_Tt3jcvF@9mh=~;Uk}d#y(ByWEv_@7+yW;-~B=;Gz9KxU{5oO zAD^6#bSl=N%(oH3;zHZvg-mWfMj%CK1fX!Ifh{a|u(9(dUjp@6`kDDV(u;riWTKEq z4RNh|d*El-|2qD{6#ah1$++|_b4awadPljSad?WuYN~+80Ai8njY7QoI*I&&gR}Aa z0jya36pgY`T%AE+vjV9&asKvyZoReZXL(6HVL?c_lo14kyAlYcT|hTKPt!WI69wCV zDS*nSx2COGs zLrQ?gOA!?V!O#pVzXQeO%g<)(Wxlw%!;ytMr90H3LGl`JE?+XhNtRE;dc>kWjew;< zqx#GaGCRSzMVcvEptDM{i1*4_13_Yv)*&Y=ik>Atg)t>=8 z5rSPCj&sNJkPjBM9;TJ5VXa8(!3PSAMTmlGur5%Y7XdrxL1Tl_DO_oJD73Aid2-!NeK(LK?v7I_2E zuF1N)r?s;hZ*wC|a*AKMG}5SpqO96A0kE|;_rlj3#62MYpl5~l*n|SN%Cly8lfXZk zBY{P{%u```64MpGm+~KJ=D~;Na9Mh4wC;WCDr#9OfCfo2BRyR zcK=f7J33b`51Sz_Z;hgI?SjGI>-cH@j>kK}d6>Rx`CHBC-E;>vKRkZj^E^b;uY-$`g!BUE#7xSW9Z^zp=^ zxT?hF79c)XM@4i_Y(KqGFO4D%i3T8V^F!^8=a2|A0C07M)XPRqby}}-_tn2NfmucG z3rt!dviY5(NF(M8ffSLebRNLt4-Br;!RL7r>2712xH>CH9OkPhF~#7<8I6I2^#CyJI<^= z!wp@60?!UhZ(#HO4A2b$)I6`?Y_X2=xvMDIB~mw>;)YF?jU(O!5u}SOVSw}qP8x`V zrbkQCqHjtaC%fR%uK*Q@JZZiH_6wCaM%~atbq9x)^^!R%=4ikpD82R$Oxdt#&DnE& z@QrT${rto2&3BS}l_6s2;Te+#r05*DH@PXfUz@_+Sk1Q&E@tao-+v!EESV){SBM;H z2j9M;X@d_=7=}|kaml`KVYiZa(@;#Th0Y}54xh|8nMoQ5Vblr0 zo(KIq{{8W13%~-fd?XZvspT}vIERWpAXz=Ix-B3+t+2G`>PkUWKY@lA1j5TXsqS3i zUDlh_YMGfKrIYH6MH;StJlQ^zk!fjxT8^eK)FJI0&6c%W@7SH=!`cmIhCrW8KUqQd zLe|6hI+}5@`fkW~V_UAcG?kH7OD}6e?&HDA&>H6sn4#M3h;oLogC~V{kyyl`DikJq z&@3{JyJ25Zhzqhlm~^9hksVlB2Y@g!0DKO#m2<3MW}kC1AhHpipkzZE5lRP>W|$kv zO4q_CA>2vn4%Qwa5^2)q+r*QI(8AA`AMShxi`qh)7dPrX=vTu{-m-hyC6}YDth}!I z>j}L($h$%5^n(%U{N>x(Id>`566G)wgt#@%ZO`~f#~G^J01!A&fGF?ew3*It-qOV5_k?4WNEYSDK?6f_j4xbr7n%ZuXMQ6p>!_64k#(*b)xT0pZD$Z6H9*kks1|!Z^Rcq ziG~9gdt=kw-;c^gZFg2L`11a3>$53$^0r_8c;MfqJRN^eGJpR)15H!7YV#dxcO$gI{HEvH$r&fT|;F~M?NUABxb^T#O|10gm|NP&$kpKMfFXYgF z7Uci61yS<(=n$whsC5R7&ryxWR*+QWL1S4Cf-EiOe&bn6r==5s#q~g`8z397pqd@L zZw1O^!g&W0%eW z|Iu;%&^c}ld?{%WrdLP69>Un>(?+K%957E1_CtVIGzC;oEsrlMcmlK(d|bVFj=tu* ze=@+5*Dwa+U@t*1Ol+Mc<$eD6J_105#fiiu4(U@Mgdt@HZ-qt#_;ebK5;qULyOjS} z-74u;+OqKK?o3G}wkM)M67EC|NTW3ckk}-!&S~k2*Ms5bn=5FBMI{`J8s9ymi_Rb4 z+4}S+n-qvscVyP8S${J4`h5re@5$8An>6y138yH1k_vHCSqV>f!|RUV9Xf@Ps25s3sRb^q1jM;gzX%V1yXjyUm(u+yTzh%w0?Rc_ zkwojPiHLfT^iEHm4#-SC0@dCu!BXe?GhnHfp`F6gI22M2Uzrl1ap?4u^CNSD=T}Fd zn5O$(BsXxyQpwYG&AdISryhi8+NdM3tu@|0814u-Uj?Zv9dA$9 zS|s*t(XLC!h4NJX*bad5dNIXu_AvxIq|ep(?Z+7gAk^dqO=XEKNOhIJex{!Z&=Ts^ zTW3LcG*D`syy{?4;5~EK6}ORH>~%rTT}Hl)8)>>b5W0%&Prvogx`2AoYzu{!8FuWBM|4x*VX{gf!Mvj?u z`nEn=!Ppf*z04-5vzm$64q9~rgc4Vj@xJmyIEvH`HCps}1rbMy&{;r!UOo+Y5w>ZzfwxBx|ZIazy4!Y7;dBpE{%-a=*kTLuYyxoYk@Y#)g;_9dre~9Q{J(*s2Cu)P!uV>) zcS+EucY?iADBO%<9d*Oy=-vmz$7QnFlS!Yr*Npp&6$N%dAfK8wTYa9r{&x_{`<}1HXeEn981IBOr%?Z zUpK=3sP=~_ajF0Bn$CGi|380sf#1rh-~RUNPc+FlmpbNHLUknN4e8c58n!E1D-lyS z9a}ntI&W^Rk*Q$;`PHbYr>SLAPDYUMZ)+&AKB^r5guS!wZ# z8@r28K1#YSX?c5bW#IRuzP)Kuqg?7WSJUEI-{6nl?%i4=)tfqU?36QbLENqHMHf0{ z+DLHf6VY}yKVt9~y&qkj^U)y)${TUw<^)JEZ}`~pV6Y^Y2dULQmqD{Fah@H#u5E9FVIqP$wf4lg#V zu)!AD)$`_?!7KE(c>d*x8U}GSyVAR)ed&K84;_v6%TAa#&!R_19ZJ}vtB`XgU$Rjv zJx3mbLem_O+%uDAfsS_-Y`$ed47J;XhWyt0A~h=Guyb!ccIfxLwxtr!4z@ObG(3te z`n!zamhnXvRU(T{!5=!nP)Y~=_xI+|0*#fNO*8y13e)u!?!04z3zkU8%|60;sIpS+9FREtr~Qu1eB$W zXH-f*+#5$eJ%17s5ORjDP!sbZicqOa>T?EH80pN8%^h5tZ*MBB@lHZ7X*EjF3<#Vp zXqaOE*BYTaB=jq$B8Lz6=bHH=*=o&3)19E(b%B7$ZOEsQWmmT?{>w$QHAKt)~`#CUz{rn#Wqky-CS@`x( z$WzejIN|HT=l9@Kt}|O`!5x_&8#?~q{xm0Dwvkj>46vAlGy;{s5mJ;xsw%+I_O*jA zD|K|;hGDJfs#nCs4SgXUfK?~_PM`kEy6C{h^Jv1Lpep5m=A!0sj}x#1kk~rNNG1<{ z`_YskMnpE~;@@gW53nhgiV5j+Zh!eS`kI;i{&Xn+Rs%&u0AWE5QPCcBt7E@y z{_w%4tL26w5)cYfsye5@GPD}n($V>Y^nsMpW-Kp^&-tY= zURXBiu32M%2@Fi}AhwkM6n#dw^T#GOH&>c_^}YuxnlU@%;Px`?%e80beW9B0cEz8z zgJNzWdcWX{8U-K!x>#6Etnip1Uzsg{oI$togpC7_Sn9!c!_zV~bS4wydi7LHmpNeu zt(RoXk@@u6@k>d4rSW%3z*eN>WQu_z^9a)ZvTPvIJPBcWt}JcQXTJ?T>{~<)fBHq6 z*Pe`yDgXYMP_+dxQjBJ*wTRp;Q22I2u2K2$=4UR~7qT;jPmpGkOoRtMXqx&z*y_($ zjH-bx!`LTYX7iRWu(#4d#QDRpzK9MbA-e<2JBCKxyfV6x`%%{;ISb9gHgwvGX(E7~ z;gme|yWbU#8_!H?q&!doJc=ILpZ=HEJeqc6{hTmZXWh08tPPUM5!v&!mF|XLhBiyu zR{)W-Iglm#_3yE8D^pwj#iJ_eYy>2dhlhN>uo7&eJ{T{FcM9QXLjK_EKG?nyrwPY4 z2W{JaAzE(!w;QO$!#kaS$KK+1`9(MJqRne<>}Q_E6>ivX3-B-7ISNJcb<_}titPf> z&18z3aH<&V|MEx}0;noW{6rz&^`YQk)NmYAFm(O9H)5jzQS60T(r=+%=@PWJcs0V1 z!tc4p^z!pvl^*rMwibc>%({&x?$;dy!+RDK7NR>QP+l@K=iTGUk$?kaRFOw~0o6OK z#ZxsLDos-yM=fX6hrEA{*Z=-ZC=ZdrYUH?S!8AUx=|iG; zB6!my<|~hWEoc6OSIk3C&o1@QWcLBg(2=ExfR92TrUB_yABXV>qTbtO4rsE8gnNi% zcdCo5;VfXkCOxDUQ|vKXV>OI>aWG<6A}-HysfQsm9U7`}3~CI!f%=vEfRG9`Ip@uE zzWRQgWJ#f}XJbslX*f&vei8F`E}D1w#g$vX^F-8}DS$cG0irmL#!@KE1C7YYx3`RM zNq^*Beh%uu7c^LATw4>m1OA;%PJqCwgkv;?M$P%}t2OY42OQ7~z@9=ql${R92sVP5@xML>jDQ%sAX1CAzL4IIN}$h#bVLE8pP zDjUSkNB0Iou9Btf+~wR!zCAaF(NIr%J>vwwgGPX9pT$bq8uHvSH)+!F0sfG1vGi(o3IXYYO_2bar!eLq{N4wll zO77r+ed?FCueC9)ioIBD>VoOqxXly4#p;=yKW%G|uF6Pt$h^lJu-B_-FB(cSCRzt? zh_S`>S-jfMOQAlGlYFM`9hi^^CfMP#}u5M#rKPE`lRaAS`j-!icM*I(j?lRKYWrOaZePMRoQdfVGE5?omqCsVzvPt|}+97}#Bn|)&O2(mR zk>J1;%1X3PMZ=0jC4T`2R%oMRH5*&QAF_lMJlM)EeW==EY0Sai9(~Y=+t_pZk>*cY zS&xlDN&k4g)B}TyFiY%4-Ch)hqYm%LZc)5~p{$Ks5Gg2JBIOrxloy76K&VPu?ua&- z$?RE9FLQy?fnLYCh1PFh{eq`hVVcu~m4vS1i+W-{8cbE>mf{{1@tyhkL*C^PDrWcS zu5{E=2iU6z2cZEsShXt$N1P^FpmT|(O*Qly8gHGWO0xbJ)Y(Tk-#QwjbM$%es7M$f~IKli23z?w23jQf`Rr&e(ny`+xR(A7AdPuihgzS2PuuXtyt6y- zEEvcZ;B?9B*nAB1)Fj4+&O#kcn+PXN9X)u*yZjy$%9&}J!`nDHh^7)+-`RX;*}$z@ z6v|%nddv#2Jb7{pZ1)cEi->XFIUVR{*qNj&oZ&`+UtHBK8IHJ}9dBQ=ZIvw+r*B`w z&b0JE8&(MICkStFyE_+6(D8I13mSkB$G7teRFIz8i?5D&yfIZsm$o2Pmf*i@abr_WMXy@J6P$}VV9;A zEN6<#%3!@Kg{~)Y&%j%+g_hMl1TE3G3$|804um7@1WLS8__D8Tsy)2!5W6)@(e}eS zjqZNZ!SKYH)^z`^XFg_2EkSAA35A-Wy%NRNI_wllVxWsr2tq9}MRi6Y#*wj?g)>Fa z`;!Ty;jDFQ;4Kup_CVuJM=9ZN=3F#WK$eCYExQaPo-rV>49bx7NdfC+u;NGBS1t7+ z1nn7n8XmWll#m{n@Myr}EUI(V9xbduY_zlY-rRw^FaJ?{@PZVduQ zfRjeDV5;6*T^Xf`U*|MoXd+Co4&Xcla3&EWxwcKDg9z-rM2GH1qmpmF-ZEu&gfLZ+ z*y9G|IGY5xBe~;w64@-pwo}#;EXd#(-a(*1%Dcrq^VgGT_FY9DoYqRVGFF{nD;tr~ z0@~1MW5SM1_tdB%Du#`jonZ7r!p5%afQG3`@I~3|j%G_|Me4~MHohb0IegbdUHF0U z)@6;x71vQSz#u&vl#UrS0A3?YGemO#$Hyl}!ncaq2k$O^f=c;qR84uZ7fgZ{ZJvmw zOpF;Lo^Z#qEy!X@6ii!S(lb##`ICzkG{c;|jD#{BZlG(caCX@@8H`qN75wH}tgxh~ z7rEW?tM(jjmptT8Mo*cPmdeActk^c_3NYlD^6~7-e~ngoLQ+ef#ac7G;(qUY@sb2f zgh)j&wUHs;8DtxuhO*8NWJh#qK@09yD%s`e`13g%pl~xuqG(oqypKP=T9)W$eNwA! zj$3k$b~gnT<&rK%-u3#dMuxlsei z&u#ytMZ{Cfr-~$5g-blQH&nyq(H+|ob!3WcvSuu!F~OjhQ`AsSoq>~%E`0<=H^g+m z>~qWL7#Jw7yyl?tMP1>Ufo|+AjmE#>gslaD;gG%&)AkZ$2^sxKV+nVnP@IS`#17FI zd%6r#q!r~mxA`3ccF)HFay`+Dd=ri=JEX95ZoQ^$z0w^C&_}8lR+iQ5TLZ0K2@E0- zW(I4-)q7!NgjT2#vC7--&zFz#w)mw+tysDC(~j@AcHX)Js`~AH*8i-pVdcptw*y0` zynvP+ic~a5vP+w7`m}^V5=+(bQHv{4My|)FR5lWO1uo3yN@lft!*3*OcLoz9pMw_}0)QIeZ zuEM&n7B1YC`#GmyaX_gJ@DvUHWVn>FO+Lom-gqW?21?#M&#((Axh2C-J4+A(HzlW} z3M3$%jYYuN2&kv|BFRswV%?hL@L( z(~`N7o8xmf%o#6~M^u(g1+u;ll{GCJ42*_V<9mqp+CA419<1pyDHpOUFjn!9G|Qm<_7 z#2Uz1dKcEC0qqH{F^FR2)F6Ncjy!_H%jxH%gFX-MN47r?Yb8aEu_rTU&Lrwon4$`2 zu!OB0;-EVWl?=T5nDo}q*~#exXv*>BxVfnSD_fa}fq*?Fuj*-{p5$IJ(`=a6detfK zQwHRdjxi=-QiL&qTS!EG10AS|0!B(q7tCeM!; zqzt?tMa)sm6eXJN>qoOp;(Uq2q@c^}9eKR1gxE{u7P$(IK=!aWn>I zF~xXXIhi-&SmVtK!=Z_jqBv>*>F&{l-+90L;Qg@KR3Bp!hUDwUubUJKmIDB% z(K3%!4o5*%gg#h~i;>+s#G`}I`Sg50xbQV-14Ox#QOP--^HZnz5~!nG&%#s?;}5R` z)h_#0K`0H@UtpWE5iWumOIbQRUO03KFUDJr=wjCxGJLDO&`7(7-hyPWQ(ZW zJ~lez9oe4A#v||odPpxDfwbvbLXO>q8@xtOizBf^bWCiaK!~qXHeQcx?6A zy(mJoUEy+fL>dYQD3Od@2A5bV30I`^0EQ{k5>{Fv8?gYOV-1TpTfo`GXz`MWoJg?D zNK>40zhXU;2VP)%>8C?iy+fisZWy(@D7LC+-DU$JJ14$NwMPzRB};;w9gQ5F!|w%> zX{?NtoasWX)fIs&4&7yz^)GI+VDptH^?~6`$yMmrSOXeE(%}GEm9zNH#cH7Zy$*mh zADQ-r`|}SJPl7DDjk8yxe5s=i;Ze1m#&mfaO|3b-qV(7OB8&x!xeQSn#wh;7(B0Aeq*V*-3{P` z1rU=BHfc6k<^YI%)x-o?1J3bq=mphU)C}7 zTewE?+NNW=L#Da88$cuSjz9_WVUp`ZqqDM0PcpcwVaM;lH!8+Ce}c^J0EUj!Ff!0U zvGJIJTrrBwV6fRS^}Xov@EKB#lV<=2=Nv@JM* z<+381k9I#!{dI`SCCh>VT}pyHbhvUZQPtyaY{c{vS)Jt?wmOYGYZx`;7_-O_3jz(s zBCKvO#(rCC2!<9Y;0b#3IHXB-$6aZ5v;fOu1RuxD(l8JU_HCBnhjG(GcpmWt1wwY) z;b>bYp-+?cV8H?;PBwxAl_}Se2R|Ley3lgR;O>5#N-{WA@v+=-KeKAlLnk3>1F^& zF$)U<(&1y5d@hj+VPGYTzfv@mA}}mSX2&T)KH4MEXo{v{x-!`M53(wR4hs5jBs7mi zDkT9xqLa1*5?!bIQJcbM~OJwFleE08ezLS+@OOZJK@F z--JH3BY=389+{XZ=}M@`SS)dJzE9bps#MEZ1K%l33qEBjiCF8ZGIAWFmknh$rTwF) zo-fDHmk%`q=u?B9RVmLp*+V0`PF@2L1l+!oYmV#&BoPJ{j)QuEu(uwo9lWdaHAiBH z;Q%&Dy1rl*;e2V(U+btuV?qe$805etE-4&!`aAXKM-1u|-WL@ikYaqLn4y}DrVb5M z`B{%bIQkOE?GQ`mddw_(bY>`k9=VEaJqpexJeK<_xr zpUTW!ejVIB{Zi5mAnv!5CX?qe?p7PTdL>&Vk#1IJtVJLj8AG}9m6y1(K0A}=7>$mt-R82k||&z5%%O5VP9&^0f$(&#i=S~)Wx zwU%~^ra7|7_H{!-8~&^Hkkj^oz1fD%=Z&sznhRvsqN(lHR{t0I5he8X(ry&s9A3dz zG$1*nzvO4cpv}9OmYLtGp?t3A+F4dWt6F*IM zJE-lLJzgdpF^MdqFpNPM9xjKcE9DGRXAV7tq_bkF#DoM*9s$mwHbrLy*>hic(kil0 zcU>DlA38n%)iULL049l0W1t1zZ}Zl<5I>?$cn&_P2g3}No0ePPST|!z_@$+KLz`^T z#jV3yDHkINrIBeu!MGH)OHA|NUwZqz)Kq;PB9Ab#M#yofa}^X6Y_3N~uida=LwZI= zRK9%#{$^9qeWn^T+6B2p~&4*H;?2i^x z&l_#pw8?rX3!VQaD}PxoYQt`lW@qmW9qmQdA;VgcX0;KNuErz{zQgk4fswyACdtYq zcTKHvOaO-pVKURpHV!p;cHJL~hppp5fy~mf>SVket|c~h!OE2@=c=lP7G?~-;sf_Z zIUf--#v&&ht+E)p^~afx-emxAw`_`Hmv;SrRS|b1IAlfgHTrv3>D)Vz?rG+{_s8#8 zh4}CDLW}j)DDu4gT7OCD^^ia0ccuKpUcf8WQXa_dooJs>LvVegsXXw9QkgE#KRpk) zs!W8o{iw1(2;^lPkSnaj(r0lJ^;8M`#qWD}Ud1M6Lk<%dnPYaF-be(BqtWeZ#Zb^7X*oP+2WE8QsfRhUQfzz_(jxt{-Z$ z_c6Tvw;HPP1S}~zjw@Qwm9-OQxXXWf0bNNT0m?9IMozRu)yyuZ7JOd?z9tL2{`x=N zg4KC@fFBZ%)EqJ5uZrFzG?%DhQXVUgKW%Btk%xrThd|GnT&;5&yzm>#4KqGz1)9a+ zjK)5%PSh?yil^7e_Jo`>gu!b6B;Oie;^CEp*Gk}mlh$BgO3>;?{TED7dyj&=yA^dAa!h4CZiVKtV#(vQO z!k3p+(j>8dn(2SpUvDTl{YxD$HH9j|f#9JNvu7<9SqqN)aanuN2Mo$=DT=L zGPsKO+_DnC2Es0b(-?6z8q?k1e#NOAJxKm*EiEn8(T4lx?jYGBm1*?o`FHt=kL#!|gqjd=H5!~XN!uOAQc~OEcocNdB`h8EyW-}!(Wn+- zPP9-Sw7Me)g>dhZC31lF{Na-wOc?!Lg~_ob={mP*v~!n+EE`(W(RrH5!xOlO|m;zY;vAvh)L+|o0) zRlXK9nQ$k5V>J@PHa9H&kt0V8%+1ZG1`MT=&Ip`SSP5d5>J1_|sYUz|>XV2+{iqCf z3>K(#&-b9B&Z5LF8A#KN=VVm)N0z*TJb@gsn(QtN6$#kz)M|x0fp$oZet7r|b-g`N zidV7K9_6qQe3VQ)RWQz`iuxc*;1~NaPbC7xzfN%(3O$DCzdRQ95n#?$^Q8v%kCFWU z%T{QI$B_jwLaf22O{alRXsMxtkR{fKzZ{M&$v()77r&%xM%e-G+DO1@$!~)$`0%%} z%MjA5#(8yoO=6d`_%U~84L@;*8}s@8LxWH5OcU~6=p}4+4syEMa(eLRH_=h|D=OJAFS+dkNWUJlq4pvV$)rdj^}MA5PCo ze)tC67gHmLu=$9XyG!+AfBjT93_jN9`#C0)Uzh)Z8wjN;harV(c*QqMhRPkKea~b6 z-3=Zy1T4}*aD?eJ7n3zC>W5X)=+9S1GV}RQ30Ddu?}>R z2|Tyl>jgn`HRg|CzY)Q{92^a+a7r~Ok8)^HrFjAsc{7;0O2GE$6MrO6{s|f~er3l# zRYgh@FQ^|E?9UFf!-}i=V!X< z4~C6zwG~1FzGtZkq-+d+`kV}_JmlSSB2clKIOLQ?a4sN z_kiE>bMiBn+BVV59Z5UkJGoe^pduTZvl*i9LsKwww1w}<)d0@FBbbv^x4HRB~ z`4fF1;XmojS!>Un!awB&k_~`k*ix!7^x)Q4Yalq_XGK_yOrg`LzLyl`*c{7= zZ-5|lU2*n^VKMEboB;1{>X62GLpu=1`zYArUyibM))*!O$p_eQhB7J~KXuyRn1?Ag zQBo(V+UemocqvL@viGEV@rF+T_|HKdQ>N&Zo}L~H(a91nMIUwc!*ldAC3X5&sX#(N zj=GVI3W+X58ZVF|7I8`+U!70HM;%>6#u5q%>OOJR3AD1)#qNrUG_p0lgK7E3zRU45 zOQSNN#~D|Nq0(!?nE#Py+ZP06=8YK*<>;DHQ_ z7Q=$XWBs-TT&lQtNN0htR+U?jkn2yjlWmQ>=vz|=l6C?&9GMN7k)q?-TEz{b@)kcS7W^(1FeBCgGT0*cpf#D z5Oj9Sr=f)Y8Axg|8vGS+VY^tfF@Z*p-xsjM1tu!G45QqKPK}e2Q`2&Kmbf1(ff4ke zdrLPd(M(2-4|g4ot~$;x)SA)XymGgtSBk+Q6SI>M7JVkk=Ca2l-nX^>v)>!o z|0ia}{GW}QNq#^`f$Byfl)WT3`e7W&fwvXbp5icY5u_bCfuY~E46L0BNpOBZ8CR!W9yE|=jnLWZXP zyKrFkb%Hb-9SXuF{zen4LCP+ms6GcltVhQ%hg47~>Q89o%yt+@aRE&K@FmA09vKyx zZ`dH2)xrG&y_5__HU6hk--I+ln-Um9>HavB;>%wS4BUGqM@0*k1hWpfu&oE8Kd%J%4|bjBp3qwf%a+j`J$r7K-*v8lYh%Gi>3}f=Qr1#}O9V{JeJ#yEJ~GgI*lYrZq-M%Asu}dwe{o5q&e+_B6e7 zA^o37omUy)8{Qrz$~DGGi^FL0N(=(Yf;ppqZ#Uo&#~`=3zoa>j-bxTX?Uy`&D4ggp zr(Xd_KZBiNY+yq@BVp+<-6T9LY-dQfl+xL1%c|qe&CML=0tM3)km+q)OgsEWVF~&EQOu?1QQ)vW!XsV zdasdA1h!Vebf<@V^SYIM`SKhZ^Yxaz{x=DNimL@ZFZoXO`(qpglyHRs;Z)! z*}$~tq3KrEO>gp6O~BM5=&bh&9= z-|T>~Cm-WH;E3weiKq3t8Rg-4&pv4K5@R4g=Kxj%x=3ll!zfuje2i@Mb{_djV44z|=IDpE?6+hIJY$^BC3{P?x}}_}p`@{CQP1Kb--2EGt{7*q*&AO1SfZtjR~-_XY&it z7E0LvG0l2q@~*I*$Kq6Q*U@2HEjkHLmkkAYe6ULd+-W4#C~M&yA|y4;KFQ`C2k5H* ztGzdY%6aeG#qYEU8TK}mF?(|pNhldoGIL9VCY4N^LK)J43>9|T*ruDv&}e8>lt@L% zCeeh_PKCAxqCuge;au0R+RyW@_5Rd_cHu$HVf3oxSk`|rPrRbH!PD90B%&EX+(e%R?5y-+COzwpO9 zjQd-LD(f*miw}bQIK`H4eu-G8~{j+%#CUD!T(I=Qz40{jEO4=8+)NKw{|1=yDbBDLk^@$}I-boel; zzz0i0!RLj?T{3ZqgS>oFS>@Xcc|$EH~V^&#%gcd6H6 zX-Wq?TE;R!Jj1Ax%>>0io!^OSG}@%z4VFyD_`Or8k07ssy$;(0@BFhY{t2G&&{9vf z`a5^=5KB+%UEN3|BmBa5{NE_`!;V-xu~51jsr3qZ!OO=H2@k^)g~dbgX8jkqCh z{wN8@BoT=+q9*Nm-uRzp(ZQGNVVS_GHt0O3@R?sYZ@0UA#Ncksm4ul?zr46y*HWd^ zaQW`L*OB}OxnI+_+sDi2ELpZ9!A5`kUabo6H7i5ktBFLh*?88B>2%^ZyV)A-J%wCo zWQC!g+sH~~V+1~g272T@EDOc7?DZGBVj4^_K1r7tFK$FD8cyP@u*G_I(ly zREQExN%g&Y4Ryq$QatUTi(6fb1XqHyZ;15OJ(a1QygqLA=^vSoZkQBh(&%Gw3QEZi zKtP#3tmU(4Rk3>1w8*wHuguDcyRP&|iPyDnV2+G$(q7m9 z$h5jVzD|_w*CzCBqJtET<=N13>zlVz#x!ocK>{f}si$^ZO!hzLVytXkaS6hIE0>T2 zED&1x*N|P&=bzzx^}(H@H|`p;%}do~_q;m0*Fb-wtnay1U9>bG^l-L2H1J(cKaK3r zF`b<4_KAGY)ux_Qy82$tXu(jY+uP%0YmzeTk1lIUI^SO0CI=Y`H6H)=znyaY=O5z9 z{QKDFKM&;pq6gv>8WE9B^*wte)am3+(=M*nwUCjW^B$~_Pp5#Ev--y^=xHgS%|yIE zOVzB9u8s~}_LnZ$0pzs{r8~-TZoV)RBtSF>Sem7u;tp~(uaQX9os48_45NshXn?m# z;V4>BaVDr2+6)I-3)|k3ofanRGYU0tX637(l8?Eb9qg6MScZ{SnN*x;o}< z7T64($ma@Q0VUbf{*d@kqhnV4z7Twfg|+uT)^TqcIWS)_+NMbc1SHc*_SS~1r1euA zMUP}4aSa*MEJtAsL)^qD;Re-GP**eXluoeZXGckQ3W8M}?MoR+v&c0@7pQ}>MVQ~S z7}o-}89#Orqi8L@;$v?MY>=r(#I)&6!i*{=pUKbO(>Y z;M)ehUGCO|nh_o5Eg=_ag@hxMSc3-;e*G?)cHGQHcSSCV;vj=ay3og=xFnfj1dXBj zz=>i!{FWB<&vSCg;ZP7Kw*L&haj~Y-wP0$Ry*$!}FtwyPsmA6QgICoUl}dmF{rh7u zZQZ}R%HjP$u(uAGMR2!9yGo{9KFUPu&sz^Ac((rQmzbhbXZ@})gSZX}|Dcoge@O-z zjg9fiKAkKNL7~QiK_wOC!CF@OYxuWN#zZYT-;PT?F!ZaZ{)ARqJg$>YbE zqRptD(pxvMz3;=xR2_nejo@=lS{`MAEQDx&LH-N3T4!$t1>jM&TYUm?F^Qmx=URoy zn9Xtt5~VZT6vs8d7>fAa`5$mA-HKEUc>Fo3WVIk&iv%p#i?TxG{$-+yjjdj@d^_kF z9<6Vh_J2ilwawN|W7Ob?gw3ay+Z~L!W$|fvYf{Frsum~x z?STe;I;dTpQLVqf_Ua}7f9rL9|ICD@^#x`25$7`UEWYR}nM7R@Cv1tIwe7X@{??t% zxu-Yy=AI7yw&L@*%a^CE`YVpVj)_s40 z&%^&&{65ZE<{Rd~jK-y?-oK|c3z|%j#|ojAWQ?V|4*0CLXiq#RlawW3BrNX8KH|qj z5LEdHuT)1aFd>NL3N(^b+%g2LOtci*udCa~)*hZF8}zrod0hz+HHj8(jw^xFM52MD z1T-J*{z)-)nWUnTH7Ym6VI~!3bQPh~cQkT~vLq7mA8rU^Xb8dQLpLl&12Fc4bs)Hm znV2mL^Y(5pJD~dML^NA7D8(U?&Pcn7!$aml=y;U{3NONlVoIHuDH9DB)7b3uob96Z z^_iilO>lk z41L#B**7Jy>!BETH{XkE(l2eVM5Hh%R%K?C1g5bhvv&8#oGI#%udkf zPKuE`uSV}dg!J5lz919rSJ+4H8^R39=%3$#@GLN~X0-osRV_O+kGsV0KuP5&d36EC z1N@pS!X;8Nd&wh&aKUUnVR_EDfh$-BX0S7kj*gx!td!zN?6k$N2Rl1Y>G;WuF%;SZ zQWxhoWTJ{GjHBy5+IU59&>k}&k+?>72)#{+m~tTvz^F{XYvaMA(b?M0GOB-AbLiS+a zm#`{jOz;IMS@tk%=a8AxQ64Pbjba~+bW14&0rm7aAPVA_xCQpzzhedCX@ZL!qA>?B z3vm|y9J!!#hef<>{@kjrs&Q7yPlaCSqQ@ExzI3Np&7eC0H zteL;ef%s#cE*%pz|LRaU*v&?z5^#ks1c0Rf$>@*aPg8U8D<$LDj4rQB8(0Z#RvQbR z&a-Gtf3I28xngu{k#XO<{!3OoUmkGOxGrQ&OmJ2t@u<*UIj2Hna&;86HIJQB-&1zu zTvaERQ}GM*&n_`H7yfX)o9{ID%_zZ>DjM)PTa5{f0u;M2Y!k|aQu}}@$%*T?Ri41F z@#tKhVyjss>(MzZKeR&0$ZL{Gokx7>sSkGXaw7Q`uJwbfGY(TdnBn%o=XPe7cWkP- z`IGFA0ub=d!#1ilWDgZz+=J*A84~gct8$_t#=jC0vi@$pVwn%hI@TxDanXxJf4khO zyC(G>@pCg|D+amr4zbVC2~b;tuZWM_h^aRPl}%x9KWy|^fuBnjEXxe$_!2Lp-N>|D zpXQcx7wMMcbMH+kS67O3JpEtX*kF?6;Gs3INVcMnX4?;rTq69Yi=&#(oh^6D$%hJr z7y^w1xYwHVwKAv@AESbGdKc*R!a1+)6^ksAE1W&!&oka>}c&=wcTxTx7SS90fBwj9Oq@7Z%*%B13 zz;!!lqIOnW^|!bGW|2adj_z3Wdq+PmYFp&WKhnO@#G~;Nim59JqH8llwu`R4!-A#R zsI`LlPmb`v#R3WpwohVS{zTr|u=9NPYT1d0X`5boaLRGn}; zHL#pGovRv@yn8F&k}X|Gv6_&igEBV5*i~&}ZM`uW4O4w{E3hENMOXHjq_oa$m=@;+L6TokP(5 za&grfN#ISo}C_E-TgQJ|1RzKU$A^Kc|%>CS4kLd`tgDok~ zxW72%@-F%rvI-+HX5WRg;Lkbfd-!K#9pf?nP#z^<6=9HdnI?cj0 z9xJPGg41c|LcJa7O1$>y*_Lk5sJE!5FE(yV9bnqrx!YyRBtUqIXQkct#r9#UB#bRB z&mw$JBj_Obgb>hUlUNR3&x-6KPrG$pv{Yx}0pTSrhUkG=2K|-b!2zb)Y2k8^9>JUH zx9ASiMJ*b`5}f9cVn5j?b@B}f?s5VR6Eg#l8ORqBru@i=&)`C)jZTkZ>f!--UKteh za!nW~p3%5rwp=ZaYj~cDHPVU$mxJYVUFq6F>JtQS1=wrR(To&0TQKJ6ZA6Qcezs%B z0Wha5l!iq#2-YmL_gGnmlPXxx0A|+Z+?Plx%|Ph65q4uN#9Zl#pr5w-;NmIT2Fm6r zIC+(mdr#xW*>@6vx(akI#KtUK-wOn^>8e9v7G7`JZ+#6y(jw=?{uFS(lPL-xTx3#& z$#0~dqJ(;9kciHqAS%JbT8u-~z1qO)*MlS5S@C(!c`|M%a7iaadi?~y!uqe9U>cMo zv!)RpAtXe0O>uLwBu6{IoNzZeSGnfPu0H;XRY4bP=H0)L_XRo^u>?jKk%$zp5Rf#0 zZIT4%3Ug^;6bXz-4|9C3aut@fV&|F^l!M7>aOn9?YxM)Wde#W~YJlnby{5kLAVZgC zOVoP%_jp?Vn*scsETLO;|D5UU;hA3;^^OdEEQgnp_d-!Lu_W^1IG*MnHKtRJxdj(r zFqPP>hb7+p$u8#Exhj$B<1#m6`zrU9fJm5Tvp9`NJ`+AQz$}1V%9LJf0?%V~qC1Yw zN*qrD_h6dJ=}4f?cBl7-KG2|CodEN*yEgnijlNT)jUY}4Vp{MywebSM)W&h(or(tT zYlDFJgtHzVT2SUg4pzxF4R<>B1+^i04vI>@S>turVn5F%I0X=uV_aE>BbtR9DDVzw z{Y3jOBFu=%QE3ID21}exoDtIjACABj5cnF^@pKX9`p4VI_Ubqa7o49-2=Fjp1|y(j z5WJR=6{le>Qq-7n{dJ7LyUJ;$F94#q2$z~+datpRTR^!}Ua)4we{YDdO_L1)H(a&9 zry%{OOG@qYl|~}fKguFq5g1rv1|Hah-^Jv`Q;4dI_EuydaE4w8aM9BV1=prp6PNKE z{6k@O3iyvTZWA3N2!f50v@?jN&GdDeQQS;Mq_Ss;iZ?Qjww$cMdJI&bTO@0D1gCVV z1mk6z6j^f42%*&DX=n z2F;ic&O!$BEv3a~nHS4sVjsAN?MmydajIR9k;DdWpCHXO{~ zE!cTV&jP#%-I@!^rh9ISauixa%RsU);O&xLs(Ek^v8NW5u`SA}Czww;Jsh|$XDvde zx4%h!*&n5R0mK(kh@&btti^NTZZZ<@^> zsMK)~&XkY|iYi%9DIsh>!KyCWalme+VDaLB7Y!rbQ}tW66yAEEuxu>|MV|5Hvm~ol zFGVQ}tC83VPj=(3v8N>yj!-(o&}PgDW`Cdd%QrGkFoE4=^!vPXBMSr1>0p|NprzN+ zkg977l8V|!cbpI>1fNnWG(ZPB!ElQ6p$~4-NsF5ULVVo}G6ut%iYXZvhR%Rw<)hBO zfQFW2x|qbiM&V7;_%mV(=IBw0Y?EkIpV+ro$PwbhychYI%l7w4VVP$8$|8WK}SweQBR&amlqwF)Sag%n9%%C-9~swskjLy$Re%66K=F z`Q(wsSzq2YtM8NTonoK2hPetRiZx!kw4C%D=)=l|O3V~Rd0wrPD~;==45^Jp)ox2K zS2fP=AF|00gAaPf3d+}r)7#M72p~8*B}WW0%E0h$d3qgRXj83I>Q35ZONZGNa<@F9 zOS(nx5KW=fq3G^@Z(XKydhQUPI5ozE#&jHPk_E{}%-KKJ-{5swPQ8nh3}8Mi+W03` zJFH>VNAx=>W~YcR`wTTE2GUC#J4d7VE1@?&P4G}byf9rVQ+SIh9#(JjUfg*N5vuE(y>ehLR>+$Lyg=LOE3_q`Q_&rmw%93m!7IIl z_{>vJ=Kr9NBa@@*^-^~_-OU@Trk2SG4H)o3YLSUep}P|&04$~IP&Sh!iGE-`H<+O z=tA&k^7b%vYhJHAbCZ#;X4Xb=z~qq3_;P%M$v(RdG#1ivhzg&9+B+N=Z4l4F3+F;R zFwgzbWvPX1?qqtnV#tSXQjeFv!rKW#qdmAnXQ)2|wO@i<9!-3LY;cG{!hd^5-2rEK zwD{C4ah$~if1<{Kn8lollmhPs%d3p&r3$SDfP(^T&~+?fOVm5&^k+2HJ`gENbd z%mFry1rgy&(Wa(hOeJlq=~n?xx3FV^8U$<(HafXnuHgZyMFH`#HP}IukP==nyc0p- zKnRfAF2x+mM5Eg+!}ZP}n;FIZ(rI312RvS;;5O*F*?9+&%Iw~7e;8**>_&wX*j9Rt zq}?~e3oA28OZBV~Yji5Q+jmWgGDW)q3ZVwMA>bUJ0;^&wXQ@&W(y38}u+f`JLyC8h zkxw&_(L9i`7um1$y_<*U*>=!nsj0cSxoc`Q&YeFp@N~M8nJ?SqjSpKs-n~&z*eco~ z#{d#Q-`s)I9M__zGTwhjuEFJxFCOu0t9EL_G>-v|Kn5PTR|%GIZ{q30-7JTNsGgLA zDq-yeYylKcZ}Q)j*wtn4>zbOexGw$NR@1nfC2=iUy#8M)kK>s|rcGpApsgACHeBF| z)FMc4c75Hre`A2T5%84xz~ZFSzC@x2meiu~xD3Swfwz;4qWa~TzAPUEBNcpfsnA8D z!=pyu=fn?2g9m(&amgqQ@6n8myO73=8l0ignnv~dpM6+~Q&j`U_OU1{x05J}^7TP< zWsc=M@0QcDNskH*?Y+;Oq0t`iG~or(aK>J}VeGBk0G$Pfia9KM> z$At%s26SBTjQ@hvZVT?tW4!e07Iq?`O>mXl`aO?$)_T|iW z@HZSkhdLH=aP(%Cv{_I zXVW+np-+{5CVa}2GEf$GQcR%({o!;`v3YtOh)|!qTzRI~%2fcZ1k?mMkn>ei&)suq zLq|$B`!3AG1#T>4nDzm;M0NAqKdw9rPZYoX-4LO-k!3ae_Zd*9pkpe7G%OQmS7_N0 zP6aL;V$N_@D!V5djYXy5aGqa#QV zJ1x4l=*nm{(bqswd2nkg(x#!5CU?wVC&g@Y04<&rv)SioB4~;}a2-o0yv&$~-+I;P zbwI55aN_+*aHHUh*#hf6RIri8N=j}Xfd+*B6QEEiqw}TzF_p#WdFboTAw1$P*Tmf4 z-l^sZujjqOa2pt{gR9i!K*Lg?R;mbkS$GsqIZGDUkZn(YX`HkYT%%0lHAqLnH*Vw{ z8eG%bExS0W!SKl`G|W?o{t)2ekDv?njqy-FAI_P|=XJKv&T{eXms-5>+@gv4{#RQ6vbZb%?K}#87B9$H4|#ZtmTpnEeuh1K8cM zW(VFVcg-$VPS;t;FQ9?sz>PNog%CY98H|(&)&grPLvO(-Pf0;A9v;~<-SuY3N6S$tZnxC%d7Dmulzn zhbBXg&5Mrcw})*Dy)^W3u(`6V6n{7qA=J8bKzNUP$6PP`;FfY>)>rNp#swDk3=?@| zN6e~(S~|h6qwuzq;O&e@L`0fr9T>q5EBO$!j+hyQ*vIJ-69eEXv7{YH5Kby}J?!UL zlHERQIj&kO+5r)z%@zC&a~=$iy6tN7B17Fz>T<*tNhz+ox8u5~FbbpA)=bwTOvgxc7KIIeMQCa(J?8^HY)7>IBI0j>WDv*I*s07N0!%6S2o zWAs1DGmI~nK$lg`+)W{x+UA{F@3Bzitwq4#F_Jb-_}xJIy18mQ*}=%S^%vjvh~E2h zVOwmpIAw-%!FE;Tzm*Fk8K6&7ul}(JH5RHI3cBUv!$ozA+oQN%ncor96)UjFN^3L(C^jroDGc$n$v@dD|5O?8 z7lAuy6eA6zcE$y{kw6uicnP5^Uq&u2K%E(aA`0rjQh;QOYww560ouNSPCl49XyE&L zUjMnq-!jElYfII5#aD1#g*U@0O#5#&7u-Ogu0(Jt_ej z%4Hm-(B11pD<*WILN$q6hlM`Pz6Hh~=%Sv3ED`9R7kPR5iF~y-Wlk>!1hjZ4ia%<^ z3CJT+W#T6IA?B6PVui9hDjNab3O0}AFJW5o>c|f{gF1r75){dZD1vP#UL_^iG}UBu zV4ogzW1ed()(udBKQT@oe_9NP~OUJ6_Bobjwt-vG*2YE*b~YdF6{;rK(UhW>}p*V!O1UGqArePvRT{?PVGt29;4X)aoXZsRNI)*I($nI(zL6S3P z9))Ih94&8Czf*RJAZH&|AWAya)+AjU9gd*evtgIStV^=CyVj5s!2-|a^ zqCgfDOW38d*ynhUh7arvDA6U_d+1R2;%QCYPdfx>g3`AFF3^G`5db!YRNW0rr{Inw zT~OLwJmxB)2Ohj_7{OLAL>2^A4;I9#C7SideyDdIQ}o1B%Pn^>pmPMeEielKx|~HK zFQrzEP$!@QU1<1hP((k+JsNaZ-oq6~L-i0-6d*!vXtHSbz{v(L8DU&dNzd}`@^;`E zbI+3+S2dvQt%aU)9fxeZ(-<(HI&&u6rDm*U%Fx>T(FXW~n<)WcN+ge!9@T&&*od)q zE#5G$x|@vM^p5YMQLlc1Ee?%%Ft`~#7V3Dcvn`h+8H+@SZU zCFq%iZf82hlN^OfrF~Y0bUHj_4PM-5nwIgi5)$DPA%R&cMhfF<0h-gYM5aOz4VXoY zujjl!5K^~P$$Ulu%jHC+zzP=9b>*_2;z~8q;eH*D(FJ*yylOoZg1l)ePp-psTtEY$ z7mWeLX*Mm>*e^NgPmA6bIp|lGQawXtq#Sd)M|cP;|EkB}uMANvi&z5Q?cFZBx*b-M z(JE=}QlR@~QrLJ!k9kKrZWQ({9u|C+q4J;)&S~5RMZ1^$Pnj!SvWfh1MX6QPN;?0S&me4OURfHr1r?;l`=)6wLQk5x`00*Ax&FGHkv zp`oS?$Ce;p15jx?TrsL9>FnOj;h_lA6J{!bSj?M&o=5{qb_(KK-|jkA_NoQxk%L$) znd#D?*-;#>tvXlFP9-jGD#z(aa2MKObwo?5qR2;JEybw~yh}EMir}I#ey>$3L8KNF z{qJL#$_X$bozLKSfodc1r?Bomy|wqH4#)5+dcvbB$Y-cC`iL2>poc~T;n?|V?c9pO z)QrY?E~qeUFlSDqopR?_?j9>=choU0cgjes;mC*%>`<(*HxyQ$a3TUvkp;Ay8?j3Yn&ck1?Slr#?ZiyrROy_d2+AcwI{C>3_~ zs;Lt`bYb{<&L8O<#C%47nCJ}@^Yr3kW+1DU?OGWa z*(bo{Lr<)a$6Qt-{fxDOytUGZ>kkLfL0uhih)U&@cA7iu)YS#hkPBiYV26j5P4{}9 zYsWbAz+D*6(HU}zH-70T31?g44XEfaNhXnE_M^{4(pcN01KilSZB^hp;67%|CJop! z%<3Cot2rtrt$f9%w4|}`cit}|Y;AEA>o}3`&3cy-) zRFcqNg{u>9qeLUUBp5IyM=TVv+rSXcvrb5*;2~g+yVB%VryAXMJf-OketiZlRZ#er zqaoG@Em8x{7~GYjr+R%Z!F@y@VyU*H+RN9H;D;MXr$Hb(iQ;HFGY6pQPZ!_1Rhae1 ztm($fF6rwaMZUvKjmp>n#1@)GQhbF((9WYmNtl2peTD>X8q~XBjGKrrfPd?UkZ!f0 z?F7G(qZt=RJgkpM^dk)eNpyC``R`B6+CXplvdEBLmvMnJYdG`+L=_vK`?s7zU1r{R`7dp&#->n)#B+g4X@i`uBaue@Lt1zlM->b+wtt{g)~(o+{Zx{e;E@2V)J5TSG2F? zB}5Cj4FM3Q>9kZ<0Hhe;q4x8;-)Z>FU~?`6r6>9*$tZ&h0%=%yParZeH)dD8mdgcS z_f==P9IduGS)}Vz_uzwJhWgSvkC%@b-D#90Fx_y7xY3pfGz&L!L-Aa+IOyggQbLx5 zl2PfUrd%h|#94Av)%K+Dd(%DQ9}Jq3vj}xF?+uS&ImwKKw9wi2zzw62hVm7<#c_T> z+KIecT!4dh9|G(hO~^ik1^=RL2J2@i&oyj`Dzm&hAY>EUBrrtPk>mtN8_uuDa`8jW z)%4!b{V{0({H)wah0pE?Q<5^0=O}t99fBNpp=OA}1Zd)O)ld^y!=jLQJ0~cx3K3p4 zzZH`57y`>6E7Sr{xPly{S+v~4wWG8OD_e1H2ktZVWiKG^<@g-{Q1h{B4gX!=ktWU| z?b_`9IygX!tv1g2Jf<=hV>B?}S>dlH7)}}`+SC!84`D5w>C}heDgm~QQOB%SSB>vh zw&F@CQU{Af6g~ccRp{qP6h)ZTVCK!EgHAAO*{eEd$Z_R5UU83t3HXgFy z#`Szc^p`k2$xcp$&>2 zcG~j$D;XEi@@^*3N1&8u9@M5y&16YOCM_mE3N}u&Yj8HvN6;Hc*rqM)LVbpz7x84F z(7+ce!!H|`SiQZfrsr&^(U$)89vZQWdS-` z(sy~6hTMOZfB6duPHk^Is-zRYNT(3FxE44Sb1~{&6sU+sY0D|>6i&l- zfww?bAWWi#)n1Si3UhSW=pEt^wDFxR*oVI3voBnzS)0Bh^*=#|9pI|Q~-7s4LZGG zqlX_r3Iy{sm-n&dL2Y_1+l`BpNL0Yv4+hsyBIQ_{(^3R|wI?@a<#jmKI%^We{zDrc zhl~T;(3gn0$m}@+yg(y`Kk+jR_gQV29rdp}>b$DBc<4J-=Q5?W{un2)_aw431l?Rd zW+2_46bLj;w3W}$FsJm{0Y;<&xLV;CWLK@e(lLkwBUyqLn2G{hvhO)z0WmTd95;!S z;fw7UVEqdSXF9Z}qWqa6a=PDL;F?Ur$j<;hoQv=8;r~Fu%^@x%j~Q1elSc^%0sYeP zvf-C5DD%m0VlB~U?=;mCRFgL-Wak=omQ4J(tV~HDr-U!1Z#@<3gq3&|Sm@(x+)@4^ z#Jl5V{92ZNmL{(pt?<&~&n#-Uz(Rovdm=S5d5m!dxpWF~grgi7WGT<>WH*itX1&SV z8=M|87ZWJt54jrqLM0+(D_Hw1T}bn?3rS5LU;LAz!Xk3#;MW6S{(zv|2AS(L*dD3~ zuarEp#w34;V21a9L2+|38fVAiVc@aqfsjqFSY%b@4kJni4}^HWOs-(1NdO!ZA0bJ- z(easil>~Dl&M-67y@H`DNBu-YaP3MV++V{v3=LfAq6c{Q1az?&P8g4h~|KVwj zVSppYMwFW~o3d2J-OYy~ynwH9goLr4Kfc_x$6LGP&)jsWy@{_V+Z~9n7G=x-6yOAl zFw&;;F&NoTJ5XQ7>MS0DIsD1Yb$$ zW;Cw-NX%P~I!-VY1!szAJ~cIT&f=RR=?m-UO4z{B@d%|1w?K{_a;EDEkCz$3=N`KA zHTnSVAK|rR8P3m)LnS2_NsyP~X6}Y-2H6I@dBpSZOH-UeI%{X9&kk)D(2(fDuZ+N| z2hs<53A{_J__0|R`0o!P3LO&&=zB(kQ0>a~oJ`9QbV%suhkl1ETxd}n1F+{ZctIQk zar;KfR#B`kQxMI6mEi|x;jx|L`cVn(PpoMCH%>2CT#SY)OxxS;c+Z^n<2rNsClieca=;(i zj{t8asL#DOU7wx-r-2S2IQKiLLO=Hoq6@+yhul|=p+)jEM09~(gxJ&J%z{}!Q_|Ne zU6nAc*0&8G)HOnWG+wU~YhIwDV16$4(hIjW&o!rnpB(9%ZPY_@-+xTQ*ootwFaAk= zO5X`TUNzUNl?(B{QaY%6MW0TkLFZfQ#SU?U=D_`$D@ZvRX7cW!3KB{z61)`SusB?E$z^eBe|H08lLExyym#8 zDE&a8?5ZxYN2(hdcA##xXZwJdHg3~j*JeS;cE~)^ddz#Tv+oWbJVJB41}2ZKev7VO zzrGWtM%MIW`3ptb-C~tQ8ljPqFOXyU4IVu9{simB$zY>Nj1d-o{qwQ=(K*>mP-x>T4#wXksF#EIuL)*b!e1LB2U zaf;`mg9n!^T==`Aqhro(r*(MqmVJ6J>U5-MPv;!@;OFmEqLmH(FsoA?I%Bs5Y&^q( zOJ9Wd(;Os-pa}DDRQ#jwPw1zlRJHBMsZ;&XkdVp>{)@$&-91HdKd)61#r#q)^1*Yd z%(M@B>hy?@A2-_A+AjKLXxnMM^zofi(4hsug1R~*5KzIFy z4&@sit*W{h*JY6U+TSz%bjBWjFg&3s5-i=GkT;l*Jv>}gquu^E-^f9#VrF6C0|e|c z2LYTG;Duhn@a)NxCx$7fpg^ehw-^k6Fx;@ZMuB2?2JPcxCyL95;#19K*4t;@U+ifD(@M8b6rf=UiInaiVrZi}0*?dImW!blwG zNh`D!em|UosQ$Iv3AecXRksRkVy!e$!BoCZTFF%zn1YOyfc){aw)9C>!LWnMW->cE zb)YT)z_d1P{B^j18uHblzjMryKHJ=GN(+YW($a$J=jOOR#GSJO;quV5zDyp`#Ub>1WG}ba_$l4gH8i}TxXQ^f<;FFHOC}`T zbkMu5BDBm#((=AvYy{@c7c_~Ff4zP!VyNiYtoB%#pJU5?HKtZpVJN7=@Qig`4T>Um zqgjsKRlj+pSp4Yq>!eqy@JcC_L3U`8<+g1%fqq{^b|=F2rDb1dc@kG}P~T%1I<;Ecd8gO9XD zwsWfRG;sU~w=7z`I0rO}ix}vCc2`<@F{#V0yo^$Ll}vMlJaZ zXUtf^5(5Bw?!^VY34P#R8KVsoHwVAMeX3i%U#)2r9uN~7ixw0a65XbdsY7~U9$|{J zn2VDfjDMO}9@}tF=FNuo&$Ip`Vt;`>LJ@Xjnjy>N;r3bUZEvwCDr>}?2^|4NIZw6a z-Yg0EBJ#1sNASicJ=j;-4mlZ{&x3Ec*4ZKoXS)2cdVr&r?^>l*vnOSLI^5JyHXFPp zO~;ajz?@wWJP#s-BS7}Je>}tXIaJ?777!OsJ9m1swd3*h^}7Gd_^4r<57QLkTdHmg zW*@ymQ6U{Vc5La!jS*0EY`8Y#V3i*}WZ2e?Mt(zL^RwJ)!9cP4QEEkn$Zm5Z} z#Ms3p7FB~_$i-uazeao^7+H5z28BiY6SKqc4=3? z&yoAZ4!bIywQT-LQuF4{T@TMf{%P@ha5=5+?!MhiN&DS4C4I5T&%+9SZ*Oj`?zX)t z;`3)`->F+?12cYsZdP+g%_@8+TuHxYsKKKE5_|v_oXzx#N-1`8k(Knu`lrJgnLw(@ z2HH@N{JImKbVqaHi_cQoyzGA~HCZYgzPvX{M%1(ur$;188q(9b=~8X2DZ;U6Q=` zftN;_lo>p6~;&nd|@v;H~SQIWx^!0lJ z6c&mDkw{UlRsOdpC`oc)z9P|8wJ(4B_99q}#`o^s8#H}e1aQxb_#C)pd{Su4R7Q_B z0-Au?`^fOz#~GbPO8e7gS4}Qqsk^CV?Yeb*nMR0#_PTFCAN+v1Be%Te+}*QF2Q&P0 z+OE+Bjj_8f$$BUUAPkD)cKGb=bp)a0)2C0hb#>5{oK>HPwzT%srzOakvCi*}V>djX zkDGXI`u0dP$}BJ+Jz5B|>Z8=u1qW6O$#iB%;43XJAFHKhh3Nt71mFhZ;OArT|4@Ab z5$8)>WMFVwJZOPPyBD&lDEUG8mpZS2iKq*6P-W%DtTXi0*<}u)=|RTB&hiTDF=&r2 zsEh~S>@(`$cL3}eEdjTT%J7*u2d~>ZDk=);;`#eakGeSIcf6mCOj#dry6sU1#RH0G z{OYf$_+&zlfd_s;hYb$@A{_E)tE-c;WOfWei7WE>cCUx?$?$(~%fOTxDT!=8jxkKX zzI`u(v;^IO-rl`?A7^DPMN-EIbm7*X$rn4?*ikO^Ez7U1+N-K?*w3|WpyU})=Zp9n zA;^zxmT)ee%hCrnmyUN|Vq|n2U$XG{9667bp1iODZrL-ZT#uik$WnpbCV&L~M+m0n zT9I*a7U;NMCb=h}!}^@?x;gX9z$epXYWj5q3D4*6oh!GiHD9~%i48uCCdR6(8{@M| zFChkG`0K8OPirM*a*W&K=Q1&#-vUE-;sazRoE6g ze7Y_d->tdf?7@BeIw2~Bp$0+CbL!TuTj(>vb}H85h8A-?Xxy}s2Zs;#Nd393F-TSg z>Ad3cI7wJ&=#s5luVa9NU#?tPl(4kDvW3)GhEWWn)2CnwgyTzJU7553&)~TDtFGLe zi9*1x7^ab}f2FGL6;yNGyLLS;E&|kR2dY0}zxTE5f?i?&7v z7>Y^9;ZuN~+ZS@zdHCs@YwgD=dHb*nnxj9OE9`qGyl2#}?VzzO39fa@42_Y*$zUsg89y zS`brgLi!Eu@nE|KQO#i?NayIDc9;^1Z;91t4})a0hb}U7g4i95nIQ-#|ux zs}enuzuvXu*MCG=_}0!3%cX(n&j||&Aua)qbU41^X#8HZ3(`w)Y#{8$BZ3Ld+L(N_ zj3e$(018zzRmhNMdg;ZTpNrxNNWm9?;$z2;H^g)SXdxjM^s=nEYib;jB*D&Ih;-3Y zXRW9qx8qZ!gNYIaJPrR|BE?XbXRfiKAq2yJt9a*7a0EjB`pY82cRFuNbpA&6OuKg{ z{sZDIo6}9C&4x#meEC72s!^cnUc!%qefC1UbW@BSsC!~D3_%{mcD@FA8J*d~{PP+% zqCsIAXZrAW`xbh0%WEMAAbT0(+TcFNZm(W}1M;4S0I(Ow`B+sjmoOi+*Wza7@1o?B z!a1-WG$~sjJnzLA_veC%42KOp&Z{$61S?TOb-Vv@HRoD!m((6K$ghMZ{8gU*PPR zpxXmhTC%MNV4a8i+On<3LjVVlY-7eunq-PH4ilHFOb?)!L&lS-IV@}=gYE2 zys-X1{RHiFIq< z39#V72>uucF`PPgp;l3+IjyOUCIB_<1kh;^DjgJ_Yj}#UM0F>9$0;6slwrTC z9`0IK##!m4U;pe{nIDsyt9{1erm&RN;gf}XOWS5mk8dV`xE?=VME3>&XJon~SrB2x z2#D~ewzjpKn;T7A92^|x^d89Ho4xS*Z*6PY0~X?s()! z&@peqZg|WcOlQs7+tk!F$u@le>beImi#PQZKBdRuBRbi_t4g-Hh{~j*;8qmaZ;UDU zycmS0RSrO}hyQteOX<;Z4nn8j*GA+8&C^R&L0%(E_yJ#zcG2P=6vcgPinGl0Yx}SC z_O}01;a`bt;H`$22_qyInrOX=_tBfem4IO+2Q;b*Qln$-4X zW6dTH#19WoH|~qGvYj?vE!WB|%dVir~s1It}*AW+>m`iev?hx&pIM{upcjVB~XH zGUou#>o;Ub1WN0W8#g{Djx3x5KpyZ3V5^vZY$&!2JVgRU5nZauz0WY(6r;w}6kr$C=e9R+p2e36_?5kQ4O0|%~*i|MUTu{@W+5aM2%()ri zAr&L>NTiH}4K=It?1gip*7?3={X`exuZ_0gsKlc30veE0*^HKuXg?LfWc2`|55>$0 zT&`P~CoE3|LSF&KF>bPS^#JK5;$Fb@2}K~&aRLN@lC{oO-#m0Up1=LJ9*x~1L&H4K zalHNgqaee&eWVhvAo5Ns-Y2^Jc|b3 z59WKY!7B2D5|RuudoCPukanD3xr`hm7SH7q0Y%qFqWf`g3#6V28NZ{z5sBUyuWG-K zYJs#W5J)xQ4);8R*c diff --git a/examples/whisper/reports/cuda_64_10/inference_report.csv b/examples/whisper/reports/cuda_64_10/inference_report.csv deleted file mode 100644 index ce7a584d..00000000 --- a/examples/whisper/reports/cuda_64_10/inference_report.csv +++ /dev/null @@ -1,7 +0,0 @@ -experiment_name,backend.name,backend.version,backend._target_,backend.inter_op_num_threads,backend.intra_op_num_threads,backend.initial_isolation_check,backend.continous_isolation_check,backend.delete_cache,backend.export,backend.no_weights,backend.use_merged,backend.use_cache,backend.torch_dtype,backend.provider,backend.device_id,backend.use_io_binding,backend.enable_profiling,backend.optimization,backend.optimization_config.optimization_level,backend.optimization_config.optimize_for_gpu,backend.optimization_config.fp16,backend.optimization_config.enable_transformers_specific_optimizations,backend.optimization_config.enable_gelu_approximation,backend.optimization_config.disable_gelu_fusion,backend.optimization_config.disable_layer_norm_fusion,backend.optimization_config.disable_attention_fusion,backend.optimization_config.disable_skip_layer_norm_fusion,backend.optimization_config.disable_bias_skip_layer_norm_fusion,backend.optimization_config.disable_bias_gelu_fusion,backend.optimization_config.use_mask_index,backend.optimization_config.no_attention_mask,backend.optimization_config.disable_embed_layer_norm_fusion,backend.optimization_config.disable_shape_inference,backend.optimization_config.use_multi_head_attention,backend.optimization_config.enable_gemm_fast_gelu_fusion,backend.optimization_config.use_raw_attention_mask,backend.optimization_config.disable_group_norm_fusion,backend.optimization_config.disable_packed_kv,backend.auto_optimization,backend.auto_optimization_config.for_gpu,backend.quantization,backend.quantization_config.is_static,backend.quantization_config.format,backend.quantization_config.mode,backend.quantization_config.activations_dtype,backend.quantization_config.activations_symmetric,backend.quantization_config.weights_dtype,backend.quantization_config.weights_symmetric,backend.quantization_config.per_channel,backend.quantization_config.reduce_range,backend.quantization_config.operators_to_quantize,backend.auto_quantization,backend.auto_quantization_config.is_static,backend.calibration,backend.calibration_config.dataset_name,backend.calibration_config.num_samples,backend.calibration_config.dataset_config_name,backend.calibration_config.dataset_split,backend.calibration_config.preprocess_batch,backend.calibration_config.preprocess_class,backend.use_ortmodel,benchmark.name,benchmark._target_,benchmark.seed,benchmark.memory,benchmark.warmup_runs,benchmark.benchmark_duration,benchmark.input_shapes.batch_size,benchmark.input_shapes.sequence_length,benchmark.input_shapes.num_choices,benchmark.input_shapes.width,benchmark.input_shapes.height,benchmark.input_shapes.num_channels,benchmark.input_shapes.point_batch_size,benchmark.input_shapes.nb_points_per_image,benchmark.input_shapes.feature_size,benchmark.input_shapes.nb_max_frames,benchmark.input_shapes.audio_sequence_length,benchmark.new_tokens,model,device,task,hub_kwargs.revision,hub_kwargs.cache_dir,hub_kwargs.force_download,hub_kwargs.local_files_only,hub_kwargs.use_auth_token,environment.optimum_version,environment.transformers_version,environment.accelerate_version,environment.diffusers_version,environment.python_version,environment.system,environment.cpu,environment.cpu_count,environment.cpu_ram_mb,Unnamed: 0,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s),backend.load_in_8bit,backend.load_in_4bit,backend.bettertransformer,backend.torch_compile,backend.torch_compile_config.fullgraph,backend.torch_compile_config.dynamic,backend.torch_compile_config.backend,backend.torch_compile_config.mode,backend.torch_compile_config.options,backend.torch_compile_config.disable,backend.amp_autocast,backend.amp_dtype,backend.disable_grad,backend.eval_mode,forward.speedup(%),generate.speedup(%) -whisper_auto_opt(O4),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O4,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,64,16,1,64,64,3,3,2,80,3000,16000,10,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.0668,958.0,0.116,5520.0,,,,,,,,,,,,,,,52.79106858054226,371.7948717948718 -whisper_auto_opt(O3),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O3,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,64,16,1,64,64,3,3,2,80,3000,16000,10,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.169,379.0,0.2,3200.0,,,,,,,,,,,,,,,-39.55342902711324,173.5042735042735 -whisper_auto_opt(O2),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O2,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,64,16,1,64,64,3,3,2,80,3000,16000,10,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.171,374.0,0.203,3150.0,,,,,,,,,,,,,,,-40.35087719298246,169.23076923076925 -whisper_auto_opt(None),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,64,16,1,64,64,3,3,2,80,3000,16000,10,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.325,197.0,0.359,1780.0,,,,,,,,,,,,,,,-68.58054226475278,52.13675213675213 -whisper_auto_opt(O1),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O1,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,64,16,1,64,64,3,3,2,80,3000,16000,10,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.325,197.0,0.362,1770.0,,,,,,,,,,,,,,,-68.58054226475278,51.28205128205128 -whisper_baseline,pytorch,2.0.1+cu117,optimum_benchmark.backends.pytorch.PyTorchBackend,,,False,False,False,,False,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,64,16,1,64,64,3,3,2,80,3000,16000,10,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.102,627.0,0.549,1170.0,False,False,False,False,False,False,inductor,,,False,False,,True,True,0.0,0.0 diff --git a/examples/whisper/reports/cuda_64_10/rich_table.svg b/examples/whisper/reports/cuda_64_10/rich_table.svg deleted file mode 100644 index cd8fdb1f..00000000 --- a/examples/whisper/reports/cuda_64_10/rich_table.svg +++ /dev/null @@ -1,129 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Rich - - - - - - - - - -                                                                      cuda_64_10                                                                      -┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -Experiment Name  backend    backend          forward   forward           forward   generate  generate         generate   -┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -name       auto_optimizationlatency(s)throughput(sample…speedup(%)latency(s)throughput(token…speedup(%) -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO4               6.68e-02  958.00            52.79     1.16e-01  5520.00          371.79     -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO3               1.69e-01  379.00            -39.55    2.00e-01  3200.00          173.50     -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO2               1.71e-01  374.00            -40.35    2.03e-01  3150.00          169.23     -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntime3.25e-01  197.00            -68.58    3.59e-01  1780.00          52.14      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO1               3.25e-01  197.00            -68.58    3.62e-01  1770.00          51.28      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_baselinepytorch1.02e-01627.000.05.49e-011170.000.0 -└───────────────────┴─────────────┴───────────────────┴────────────┴────────────────────┴────────────┴────────────┴───────────────────┴────────────┘ - - - - diff --git a/examples/whisper/reports/cuda_64_100/forward_throughput.png b/examples/whisper/reports/cuda_64_100/forward_throughput.png deleted file mode 100644 index 59d6e2592b1faa054ad3f4e2488c77803bc53533..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 58139 zcmeFacT|<zJ-ib==iCKvBSdnf_j4rUEfJlo4i6WvPB2p7KF_GAo0wT=< zqErO|QHsG3r6Z`QAc#_=_uhVUF2S8-oZq?Qj&aYu|7^zC=NQUb-}ip+`#kfRb3V`d zdHvdzA4*P>l#r14P`K)AJqd|%`4SSN-~4?nzVoA*MHv2{>VXxT4{WsAdB9<-t%byz ztq1n)u{p43*S1d&TG-m{ve~;(NkwUa!l&OIIIz!7OEUS?C(E)xaX4%KfL&9 z&a@lTBR1doOm6;$?O*;pZN-tzbAJ2nr)mA^Q>?b7&)P8Q##r6I{nP8p-~T@E(~_=< zQeE~l6zPBtLfSIOrE@S)y(@Q1R9?MHrliO6&u|g^YW~t_dc@%V}DLf`7#@{YiwX7#eyS*vLp)&ugiEE1Et;27BAR*yChxgTdd9ibM z^I0!1#SZ)DyF&Ht9rbI&OMR46UF*wh9m>mw1|7Wqb}OX4HA|TbCiGiktrxmBAR_+x z?q~b&&$LUQY`DpOm$qXy-lxd*Xy|HRy~omnFLV4?rVb4>_1CO( z>;3h|H|sPzHpQ2@_C?(IeVS_R)yoB!R(P&HzeskS_P_z9JM}UC`_kUmH=QH=al*{= zV?Ug>={uW}TP+Ue*$cX}%m+p3o>GSPQ_ZbPlN?pMO6<-Eimek&A5R%AA+bsA?J5*q zUU~9BV@YDg>)Zf!m5#b1<(^w*!RzN$6t2@=^WwI>kJB$lCa7I4X~BO7jGOIzeYulp z@`U{A96z7K9kr49pKJ6o+Y2PuydJ#JZ?EdIkpGK4yonJ?)Aw-m8lT-uc`r+IIVaH~F8X_CAh2cE8BX z$=bEW(>&NHcIP$II|tPc)}Ooh)lq}1(bmQBJ(bFCzI{!p+5zegYjIa?)!IXC&cCH%e?I?^eiuHGH&G6NBbCoR%yo8mj?r+P>s|}A1RI{($adhUv2fQv@7rg(S zJ$Q^$7w%jg|7@q!@BIjB0v-bb?&s|vT+DF&U99Jn_T4d^Anpl@wn^>Y7 zxLpey&bA?;#8#Kr#uq*E;gSc~zk_jkn^Jmf!wXYf`W{7@s{2{T8wI*_KC`W!sM(&s z=)3$+U*A8uuB-9I##GmVo4X&&aXVdHzqv|{Kj%)-(Q_^CkG880aM+2rToY~(sfX=$ z)22*|dnxw2hnH*zypxLXZ@1Wi&f-wB7PapFYGsB>+iW;bH%a0<-WX0U#U}5W%Ip>>)NVvHsXCEQU`l` zipvHKE|&H8v@QJhu6sn>lP!D*TN3sLlnr(!Os);Wx~|pgsaRzFe35+QO})dN^}Y^e zE(vLe%sYBEx6U*#?dy^AoT8Z7&Qo*HFbEgI69?wccPYVilI$%hrz*T7CsR- z3v^VQ?~eAYPU&vL_1&%z>9&Sy56D_YCGM6=%}x zuB;0R#2Ha^>FZRC|8S<-B0OzZK80zEcURyb4%r>yZ51EcllA>p3cdphsr&!8ANX3on~Vf z9U1#!=J(@fowE#dyke3%QM>;O+{~(Ivt--+FUN&AwpEE=@WsL6N-UsxZ)bzw!G`CR z-o|?~E*_nA-glv8)o&lKGpRZ}*b(hkX_(rpzXb2rUp~?80j}!o=O>oV36G3_uGd#~ z==Jd)T(({N!YIy&2RDKfypoq2%)p2{h=OvEN7pvCwFM0&eX?E!$Aeq*#L`zkWd~}i%LE+I z6XgwkciA#8(6Kc`rZ99}5M!QlYQL>ga(h8i%kR?@U!PDv);7zr;&e}6*-+r<2}?gU z-g9FG<7@rxx~Q_+S%&SdDY-q>K|#!mhik9-B1ZrC!Bo-S7q=@tq|86xmq%id7b~SwyI0I|X@AK3>OtD}k&n7HC)D3oCs8|Pv-j;Oid)V$zPJ;r z>qCT~+X^I5dD5+a`||{wpzS9$m#pzyl9PY&E4%s|_jl%Lb}0;QeIzRDl90H2a{VX? ziI*#D@sJe-MzvNyGsK)zBRC; z$UOBrE^YEjx6at0$(=}S_}Q)fuK~3#Hs`qw#-0jpe&wk=r4MK!I&CvEC>U`yM zM2U^7^lmk!^kv3PoVp2xqLF~y|sU+;g6mu3dNVT}iw z|NWP3eFy+XxKIQiPis6$C4nq)NP0Ztht$Cr_C?DPb|kS^&8yT>8Tt=}>x&jy-X0URx-XP;;E&}N?S6E% zdln;<$L_~rlLHUE`q_5i#}B7teY)`&HqM(QiQRwbB9^&i|9!7Ihr6T3O%hES`@tvc z_tfF3B<9`!NH^tmmYue4Y{kMDY|qKsWq5|-ha#Fp__fWLS7>_sfULuSsBCIkdz+D7 zSAiJ-%ihEkUh`+da>O3_UuJ8kxe49dvtX}VyARjc;Q<0$mIj(gN_P46BZ-9d|_9Wy{)e% zG{|g~-cv-t@#CeG6ylpwhXy^cFDIu?&)QgY_xsUV9WQpAStH4}YdO^0UNc#1C3m-v zV#z28_u0wX12yX$arU#4MLSw6G91f2q-@_@Ji4>e%TTGa?7PdyV-Q`c2!}mxoKUSg zCuwbLlWfnW6l7oz`b(KR&v)r<&j<^T+Oo$D8K(dn-B#WpwfloM>F|mKM0eb~7SfuW z%1U5`0&EvsYa*$_7>lfWV8--#0LhSbL0V1&i8u8TY)0Ps;Ribh&c zt-pRx?$L9g-+J6(dOS|z6#z#hiM3eL8x~=e$X0n-%BlW1kb)1VEt(f^bEhHx86LjR z;hbpKj_^gh3M!`8t<&2p-7L~C7(Yef!B-PCZ?3jiI;rfc#D#apGCae(F0?64N|FwURAN_W^*GEcN2M&OL=*dS$NiZO*@)}~v6gvB-LE{S;vn62XbOC6V5|tFWau#n z+>>*~Z|@cjv+c(h$KZ@qBQE6O%Jt?=vIPLQz@q2^HZaXQ6S6Sp14IW^v?=zgA9!;L z`6SvT)x7_elu)#CmSgatlS1YV+c*WIjFdSkHLfWc1TtO*Tdo~SLZ}SJwy$(eonian zGsh}l%ho5xHj|6L`Q?+-Pa=&i@rDBi_N51lttY$YnL0JZRrc23EL^MF`{we(eJ}6U z#}%0&z^`rD{}1ygNG`VO3ujV@BA9^I%bx?VE@a!LH-6p|X9-uEHzQ!cyn9oL{D^M0I~q8(pii|J84s z!VMIWcFh}|`sK*CC_2Fl;W5JVQWb@7^Q_q}0$a z$tGO?Amx_Yp`U@@MZLb6LKqk%mY>Js&>>ry`ehP-{x7A zgk$k+>(QAphzu6ZFMr(y)Pj}q0(LADui~MWS2q4#cMPyiL*k_*J#6pG>(v>0c$Do7 zX;SHl?{J&$rC*)xyD+vPzWB=4dp}NHy6;=U090ZX07rS(LIbyy+il2n>RrOu%~?{c zz)I|ik!8S)gHN{ta~pGO*=YVQa$5T4!jXbI4oxAQMy<{@NPCvZo7pdaJ>v7cDQjzO z!Uo@<8o62rId5|hT*wqDp2*V65X^eKImJ!Iz{ce>3Kl)MYCPIw+`ZD zdiN!*KKDhr@yQkouMSnCnDBcQ8E3NMl#NF9udqpK`37~Bp-Ii10v&k^K!Tblk;O&1 z4^f6P&hpWxbh_-p-Q%kKv7q54VQnEVwqs|>^_PgEOUINKm*f@ow03~e!k!p9<|erQ z8v`W(m?nkNYe`g3y;9k3z|%GXAOlE+5l_8E0iT+7_-+nOv*|g2iNi`s7`J6)mcH z00hF0+Q)4;NHAWs7M^_t;2)!-`0ny?d@ak>gZ9IjKaioNNJK}BL1-v z2?*(RLe?fr(W}{86%=m{!~n4YZC`)1KQ(glfImRz=enj3Q!0#}nwQRX0VJu6&ktI( zORo{&hUw%Q7zNe=C>PX`-i*z#og`nc4bn>PZUW(;40LBUh@-ISO;?6Y&<+qzLeVVE z!vdjFYJaP(3yKlF^4pya#g`1Jj!A*1x}{8>uY^ zd&YCIGS>2B#O>O!#5b2bTc2(}sh?TfwsD>2;-;jwwNWTmKbWPl6wJ?>rqbkv0LGFo zDDi6*Om1C2qyez(-1*Eh{WcD8An;(-595{Q71{i(JUAm&*K30z0*J`@PSY2l@a|cT zAz66lU`x7G=*p9`YM#egu{Hx!B$N2+n0x_{fS74K?2jy`_W3uztO7T+U$+Ze+0s)n z{sB-)5aLZ#iQTUb;29r<>B|F_7!wU66v?>tWSJLWPf5xjPR-n~a;BGjbkc&OGi+D= z{>#)e-ipRmKTh8}??=rt8DgxUq5kR_hu>Tz^YlF4+7HQau{h!FcyNk*g;2{aum#nv zA4?z$3yWQ4DK3qP)q$_P3(XJwI!03Dv=q%zgLN70JS9|AK7Kkzh8k` zD1-O6!gER`LnA<&{|M`c+lv_*?5oDn$i#xjq)##4QSsok?u+9W45Q6dJ05L}1tTNI zlt{a*R_&h{g_^8SQRr&3H^6=-6G;@hz`pj;2U8aafF%rgS+)bapG5d#|5u{cUyU>9 z<7m~vM}vHMH9rOL`3g=jITE|a`i2&F-A9S@oFEYpGnV8>L~L#*%79GMgl~WSf#>j< zP3k}^*f{aEvIGv)beh}KZ}bWzan7&fX8Q*X)LYgdR-RvCeIBKQ44)H0kayo#XPa~^ z9_c6SeZkdId2wJP(^mwZ^RsV{d>HfV*y)=JOu^vmQ@v>Z+`b{M2<08g#^{DF)W(zJ zOKh{2BeRYNU8YbEuG529y|VX%PrkTtD)wF7U~M-8yLLHUQpnjA%< zR&u)a%?%e3<2>T6i>D~vesd{}3?M3CRGG&HSV=xbi1blw`t9eB$7?!uDu6(ZzFr#^ zK4q;&%Wr8eT`vq#zv5Y@bf%=fAsglVcxa9mHiC!1!+39E$e-&~;9Er!kh(K1Q4D%u zYe$G*3@KdyDbRnG>7C!&YYN9>KRaB+Q}UR*=KLfczcd^*y{%EpFRt&tuRXY7GnhZg zIIAKh9taNvknrn3uu=#($&&U&Ym=*V^gj9G+hr}czNlYpNFI++E?X2oC+G|=Ibk36jrby#gVmA*{FEcNDt_fJun{Gjs=;xZcO%`I zZ3cE7FTR7AjJdi2+qoRCPonNbl35N`cRVg^9`~0=cy!8blhiTer%Vz*A8a-At2lTU zg=gk^tWkM!b04C{6cYCJjMkHIuZEi~y!0h04~X{!eo?=BDlk^s=!qKZpJHphy)-PF z2N~z|rta3#JRilL(*Clc0rT?q!gb1vklof}W2PZGZ#lPg|0})dshWomfS(8rIN1Ap zi_d1gF2iKfSZVdoV#TY_UvH@`&zN1(0q)aQ%(tL0nJnosH00EFI$Jp^9pDSWxZr;cP7pmbW}@uk0E@FS+Vyj0UE2 zpB?NmUaAUZfP4`Nm#TCr^K2AshKpDHDk;`+pzQ33PVNk=g$gO*8Z+`&*r3Wdt65); zn|T(zt2pLe-+GT4(F_DcebZUaRtV`1gV-KbIJ9e#5Lj7-DZP)IlhGv*zdKP62r34& zUsbf*fEDWvLU70kIRq%=e8RGh*fjj>+%6;091;) z*aR^TznNY+G}tucov4K?SS6aK&p0U2{6f9I`QE~9zx~AWxdx;qQQ?j4_je*v={9Hs zH}!WWq^?2Y^RY?+xAZB*m+Wa)5vQr|fC_$N*olzk0Hj?UjJ=uo@`v#Y@JA-L8Pbj* z;T9fle}nxL{?L;Rd)I*V|%&wfSQ^5P#KR zZ?nj()+33R)t<+lD^ok69tgr}W|oE}>am4@rxZ+pTuqh$;YZgyG1miP5hpy^y#2T! zqsq@FaHd1a{Ch_yf@pCO3B-5|ajy#THP6e?%vYd_0L#j#N+q>t4LM72a4x=9V_30e zO=-lnU7rEnCwT@XF2vT|MNt8y>jAxTei$>}HdnPeI3gkk!gcG`vAn3$CQ!q1)kPq)}|<# zoI@IpdTM~pMA8=o>t65_aVBw)UYZM~m+^dCw1a!F#IomU54N1_XR*v0l)DLJ$L6XW zzl7D>18{a#>mp)fz`Iw2!_B2M11ad*@~fJz{oQ;jVy*GlBjc*MkSy}{;Nl#I2D(x^ zoWU98zIk{#7P4WbLu#KXD(AC2+c}Gi6r|R=J!EwbK~M_?K-Pj6#jS_%oE9J-P@zpr z1f0>>e*c8xtdld+Y2QIK7IMTiouUA)9jS@|O_^ZQ!J>Y=IwK z!d1(V>QEl`zn(p`3WN`ZL$Is!P>~pcz4HQy-U;zhsdK0(bF$$HlKY$ZgO#PZcLn zkOArD9E6&M5aISrSo+faml>)H!XsjIa`DY=Wk`QyK62;ZpK<2IOm){fRfFGp8@OJsjGvuH2Dw#S+A#UU*o@YzAy$j&d16I;o>|Bq9|*G zG%+24Z}VP_>>ukF0s|z0=eKg%5OoVza2ACV*RR&5gaM{YR}>oy;gI~8UO?ZvGh9Kbbh z9t(kG4Gwo3g)oaC>G1n_G`^5;rHXwbmYQF}kA)(YGUtWVRc`=Z*7P6)W_Cb(GNx2r zjRPiIq=oBXu~&9`5Z509eD8>pA?NReV?}M6)ekP;fsvO%A6O|YKLL>D8DcYclXG2S z>Ty!E)O(;87?zh{y$iv7zS@t!Z^gD<4BlNnA-#WLp2kLviMaNxG|9KOfM}NmlKC`< z6Q^sR*<4(|s;{%*i~s`3p&KqZnTd_J>oPkUKzgwfK=E~{M6UL;uYTP0NdgYXWP>eb zwpoh0AEu>dkcXyw11Id;8x0684@k3->7{N8PTLzRot$RX*O0IdNy`@hzZ%%dyb4#( z(i%Hr9q{X>J=0Zfca(zkug5i|$CKc|*2>m5G}YfDKS^@xl0BPIH^l8hapt8r8~37Z zX5pbcRHlM|HpelHBciC{L8C%S6N&4B$Bos*ScOjg!ANPxZ#Ed2MU4RK0pNmXp)J4GTgdV#-Zx_j;bUo zXG`C*1lG!=rz!X29(DpDWzO~6BNC{V|LbE`=7J1BLx5qw;Qgyg5rBi*>K-^csHe693(2FJe=us?jO*Ep#c5)$I89VGBYae_X2Xs8 zf$z0MU8oD$cfq0Nmn`q|w1=Ees5jn%l(_`i=>cOpo>>Kex4hH^W#lt#R5L_OZ`{Qd z>XM*fViA^iB2UPVeM4$)s-^$w={8C8d3SlPov}m{stq>_MLT@oYZpw_c6Fx4&UgmV zXEg$$9dw(Wz#x6k>kI_TY~XeBu&(#%e&r+*(Av=3)f9wSwgB}*CgAsZX1K|LFH%66 znjCnzq6gx$KZ3O-%E4SHN`XN2`+V!bZb`iJfc>_R5s+V>3iaOlnk00#95s zL-b{pEXrlM!ew85mp02xMzMl9^9`)_v!B2mcPhk&_k1*r!zDcUa7UYkn^G& z+A6F4@P>E0yhw9OxC%g&4Mi9HRh|tx3Xfub=vF zUs~`MeMk*&zsklf4!i9>JB+>t>O-`9Kt2dlLc-_QOPOd`olqpLrDZ1ac}%nn9#JTS zJ8htdsM-Z@e@&uWgo?)k9JhH@-QV71?E-zL)7mq=3}9O>iij3p?$`^V_H|FvH5`oI z&-W=5Sr<=VvS%JQ4Zpf)8I|7Or`&g?$*Z2Wl)ZNvycx?tm^hT|Uzykj){;JhE%AGN z0fn!?OX3JHlGWimfBQERG@9=&+Vk`aoSEEzuAFfi#Q52|wgO#c3s8$FOY95qCkT93 zV{ATr`*!Y4H=0b{wHz8t_P`_1Qy4UO^HCd(5yUhgzbkz$1BLWGL639ptqD!nV%9dp zxzOl-c?5XViL5+MgjRYR69UTIvrDX>?|`?3X9cQ$bpfmGmw^jn40x28Hb;EVuZ z+MEF;3CbCPCaIW(xl2FacxAN^va(2ka&b%Jt=A{0(Upb8bJ+pBkz2jR5QrC(GR|Pp z{ejZRWkYHo&t3fhq$yG1Hsj0R90!@?Lv={F0P66fy(PCk36v3{EEPLt+Oij>Ymp_N zudz-9N~#((dmb3o?-uwyi9y`mv(V zj$e0A-sj&VK5V)9-RaYxubfg$8w>n6bMc#NM3B)IGsM8Jbj*c*5u$C1*_||c#R3~_ z3!(-{IUC@nb>@rV-6c^bQ+7wfCD&h-f^lowcx8c*X@P6WW8_8lcz--|W)fm#KKPaoxZZ`&@W(=yk-S z+Eb0{nP1JC*Dt9yb}20zy7&*8xcKFq8u8tUzU1OG#xj0|{U8o&{O6oyN1Il|+7=73 zCK!iz7r^lZK<*^ala;+d*^KsJ?d<8{qv4eLSE_9`?+~o6u8StFE8Ue>84vyeMY#+% zcP=T!<83dGOxzh@|3nDLCARdyGLddabf;DqaR!bF?>7Ow28637nNezI^FcHC!11Kq zQNr2`RhKHwItWhjV!aI!DYJu_vyBMJ$HZ0$buz951l}{KkEymPit$h9oRL|l)TM^{ zu@KG@0*j35JjbWILIVn}tnvG0>cY7&3G7F}@Hatt5z>@e?LKxsGB0GQ_SQ#B6lLpnRd z8Xt_!m(1Z@TTqnQ@HB`7RGEupop>3m>UVMXqtu*_uGQ>#L=eYX2OhHk-hR3r ztQ2{mT?;58ZFub75bL2H;XYpJ)~8Y?`@U@HMk3Fk zg&1nylw$HxJC84ZvPuManVP|jhOJqJazBT9ol|3?3^uw^(xLq*e1gcKq2yjG${XDR zc5)Fn=zim_=vI`mRrCwcn~~jh1Mt2QSwwMGDot?|y$;qt-2+g>{Y!Fz%q}W#YjQz~ zZ8P3Jz-AYjWk)CPgM6?9f|LBL*@s?!&u|b69e#2#9*KT`7c_(Oz}Aa-6=1zj0qIu* zCapzarHch}*kxuBl;dDl5Y<%1zM+}Qz{tX@9bBe~6O65t4Q#D-Po#GDw=1i`ebENi zc|N7nr#H68f|0lau=EVau?>uhxZ1?^b#M6S!ad2&*l@7`iBcz#I{dJ`EZ|4z{oP6m zI;en#Wzh~i)NGtpZ&dB_pgY&K_UJSVfWUH!PUypR0$f*h>Z=K|S{q${#u=?57llpa zSv?DdCas;6a6E2Aq2i}jP=iDVW()Y4zcgfugQQYA{q1`7|HE;tn)5i)3p@yn{|n$f z&;X0}4Amw*%uFj{8ixFBR{7D{f#lPWWoSBy1-hPM7y`ho2JB+STEzCXHvZc8i>=E~ zK<|;Xn+;ZsCzC>35=;;DB-#QIgt!e2PVQanw?vT$nnqXwvgI1)5(+XrMh{MZ9!nvl zx8dzRbZ;7i6ETj4T{q8xb~%fmZSz#54?+Lj9Yio19~W#*`xhP3J@BdqfXP)yDSIuy3nw-k zr9_==aqdLWzeAwhT4BszklK@D6NA0zRck$Rp8z|HQ5R=QZ&d{||1J|E%YK)^ns+?0+N{{m*VN(d=*CQ-BpIVgo&z)*{Qo zi&EW{I^=@#;>J2Kpa2#DfMQp`fx(w#Oz)lTYoxT#SvI5QI>(L_W)&EQQtFD|26c&- zPOrvET`&j5LY0el3mEp4q)ZQN0PA~}OyN^Y_;(WfTeE0!)Ij~ob|_*%G`HM*S`7zL zHq_eltinkC&2sB+q2?*K7YZJ~+?&3Kl65x*?)Ol7O)1>1p#f{jP?y_~4_q=};5&Lw zefE_ttf4zm@2nG5^!~?lcVwQGaEtqo*}2sdRyh;_i)v4-6x;FPQBqKJ0J)W=q#eAgAG}1C=rlt z76n6buZ&`ETT2NmeAHyN)mD$Lo+c~(v|3zoQ>s+#Xc&IwX3dr(#^1YwR?yy>B6;WJLs@|opiUKJz?^fAz?B>* z4KR@Y1;Q&}GO<&pMEj}P@1~9**o6Wfm5{;Sl97VlY{-JB1KOl~`H8o$FVU^l{cGg1 zAOLTWh~@_&m&Jmf69{RVM#T@ZFXoVIXZmx2*=={R0ZlfBLLD?tqDG3i%P))IB_lBkD^Zq zp8GS1Wg@{$wF3{pdHPcuKv|guOjwv;Q^uVGJ|-;f&M-W=5Z3?ly>@z!P`etid;&|M zh+oNMe+@9vou}0asRPy&ShZYwcfiuhb|qVMOogBfPS>J)3PpJ=JRHG)D9R1f6^D;g zv*vmOnD+Hef1CIaxSAv}`;1^!iKw-8J+)*Yr$gWLJ?#zKm9&DxblH4!Odw_YB9Z04&TNC=OIY66>H4b@#WNbW_M#lVxMjjMNe^Lw7YS{Nc&ghv{bn>xk$PWF^VC}?z zLtEL^%O~Uloj~m(spM4zpIIAr*6By-kwsWWVir9{0+4|_*|fOl=EHATG`FFmuEyc* z&a4|dahCYd)3m~tRnW#EC{o$#=`8@?YQTDta(rrcy0X|of@Y~4`um_%stFf>TxF18 z)l6aq8jQ4%TKx7Cs&f%YbP?j@2Q*)Xt{G`Be@h~ga{&9D9{=z3@klS4rg<###>Vq- zc7^cgR`f{1IOAOxi&D%2!EIjWGiYLMXwa%gxXGgT^7xYH^D?UCUnM|s5cfHEjW-ty zIXn*{0ZC=iA~bxMIKiigX3u(xatOC6o#%NSU?7pSS`0fG^z~3-vZofps1t8J2@V5l ze+-wExT_4B0qAxG-zJyduzbvW)DQ91{qr=CPHeeyR)m`=bN4PJ!@c0;S3^Lixqz#C z`J|Q_i88vBA#sp;UI&}y0yJsJg`b(NG7`1=zwW4iwoo~#d3j45Tyt}oaJEF1frTl= zb%&g|fp1=c?r83`2DC#AJcXb?+l)_ww=24WXFy4mqMNw$8C*5Fhbly9(-U_*Y(|#G zJ?JXhOJpenwme3QRCZR+fKbp+{k1_Pc{kW=w}BC*U(UG>tY6& z0Ww5gccud#Ki&4?l05@gc`W?36gC2KdSrLbJ-tWZ_};FRd(%<&C>|&fs)9(Nv>pnk znvM%Ln}YM2mF}uufBh?3L)sgPRfGkpZbR*jFVulsa_Mq`>`Lnki8YaPie?|VC7vca zhRBJ6$_<8dfX$$07f+m7t0qfjA{GrG(n5M4*)8@A)TJ%N9!sz)5&bGhf&?RLzrG!N ze+m33YoMDhK<*W{r(*#*8(>=y*%wlWE#2%^rIm_I0QVSvEY5R)Ea_Tk2-$_Z@us2+ zwU3?Mw1(=R!(nd9qu-KOksc2!r3&;RqK|0|wi+XX*MYA7M_w?%E!UT7`Dzhh6Kxn+ zZXMiQ_-+5$!E4;?T=aeJu6uOObKp$j(THOTujp(Z z;Fhl?PPLZ(15d(~%4IUdS~=hCr*}lRNbK_B_dfmkKi|YM{J&Tbq2Bft<2HjzWx9aD z_G90r`?pcr2O`~hX3Y|6UsMnE9t~F%5?;F#4KKLC;Lan+A5q#*gx8Q@6Rt2#Hdn9{ zhP*ag6#`*7(tCo@8sabMgcAAuBIp;S0qJjN2jLa4mXmuK?$}R6%Mc|!bU8kfoIEmd z#LFUabUVtaPXRc)pKHPPltC?e<62c|lB%t1hgB^Juf2&0`-ZMG_j}TBfl5ueY18nV zudne|Up90Jg^)6o;~g#COydOM?20C(E_-G8H{EsgMG^z_XpwFc(5u&0XFeqEy`YqZ zpy9KP!(aGybyL>B2kpw|K~XYP3ld?J6Z3c^w*%bd)=Z1rrgq*(6;6f2%BXWwa zNTWUfD~1%20EPwI{T3rY)H%p&$CzIFyBVO!;KGt(2LmcvyO74%g!`A5WRng%^XP&2 z04@Azf8Qdi00zkcJ~ufg`}SfbF6c$(OSZFdoejWr03upyPR=zyToWR{Kx6o~oGlH$ z8`Dco0z09Kf(-5QfGoGb`@Q|0yAHcO-fL$vLVj3t|HMo?7K9L8Mn<4;5`e7!@!?lo zUt`cc^$q>sxu|5(+OEcF3dBeyV0n$u9ZAEDKO_`K$Rau$evsdU0L>u)e82^8#jrtg(#GNIToxjiA=wV|#}1T|LJ@Z*+*`UL zXnbhLy84&jOYVFwKW@gpWgO=~ai|8xyNLa7Y+KZsKeGEbg^B+5hAD*Tl(q!mP-0PW z|E?-(`oH`tjX`i)>RlY5hL+y^?qTV)e$XyRg!L~5$_dLoMg|djNj^(t)?WPX4bS5Z z(R#3kz8uIXR8xU0bVN|kmwxr*2eRptMqcxp&PNx_K_1}L4~lrgA@D25!-Sy9fdzkB zKnWK#1Uy4)vfNG7vFgaFihq8c1Vk629FI<}8)Cw|Km2_6*=;g)PTC!0cHr+?$et89drkq~RpR8*fBTnJ6WW5*uOj^d;f=O3 z+SfOIHC*a03z0W+Px%^~NDqIiCpIv04G=mr)?~0@5sJo6H250yyW78BPI*WG_P{bkZ3>M`DJ=226n7$CBTl=<70R936CBs1ORWf3=(GL&KTjqvi zP1G+CB38uW;Cb723o=$nnVzL$#z_kCATFZ1oA9^*)GF9D3lT3+z`~;t3r+v*h$)0m zon3qTANNi+G&ya0@EJH7TOk#>*4x9Uw%aw_;MkX?IHjB!X`d%dV~PZ z_R3zF2f_7DL84tX4RgC7tSrISzblyb1u4oO?@H(M@KoBimMP_4s9~(=BPL*$fawUcXC^^*E$75bxaHT zhI7q@-CjG!cTvWoJPir<#kOL~;G|*Dx?+!8--0VZueCqLSRvhT71YYvCxHGpQEtZy z9H7H+h?#4k&nW&M!;XcQD}x2{`KYr3P+}Q;XMXd$4H-c)rI0((S#-n17-DBDvW`GV zYl*-YTvwtNc8g-5)iB%D*iS(ct1zk0*$7NmA>Q#193LYEoOqPwTalh77o$fk(I0aK zIHp5q{)&feD{9bL0NA77I=OB5g8IKWD_Ayr z!H!k3KlT{VxX*TQRg#aW_;?TzS~I|QZmw_pZ;K9ea|hd$rBK?Whlvwg$S0@9_6@($ z%f66dXV&aXe=ti`UsMkLuZo54@0Si;a0#R2m)qSJsc)J#vEW$MEor0gQ+|VwnRay8 zgl?JiK|kG;xoh}ME>z%wo`lx}ezM$K`(L+Kj%lEVk8LA+W%#tZ>udz%LUo=;GYUNi z*z@cako|Eu*SI&W7QO`>a0q5?v5M3`Rz7kU>V}b%j<9`+gtPAak({&XXHmqmAt1N& z@cmSCXS1D_2$>}R)q zGW=Ip7f3^mAO4U2eBjB$BKqdoh{w~p1NNWF+z)4I+}bpfhTZ}l+lmKpCWuYfH1yCJ z;RuX`R)N;opn-@#A(JVMymlH+ld!Lt5@-*ijhXP0ts;7>ho7*7R~Qxuj~~4>M+HJS z>`nuQa~#J5G$L@=kJleCweb%=q^jgDE~ifx=)k>>)l`kz2=;P>=_Y7orRdBH5e zg1rrM`Ks|CEnV^BKjL(}<~Yz+Fh~l7G?uc-R@?@O+XPj0>yLMa2Zj!ZDMO}ciGtZU zV$!|gpnBKgiXd+p`m8164i2yDyAYJv>7rq8ndQ^-a{xU>`hasx=5mhcF0 z336L)~c*PCH9HzAR7;L2O|X+Cnj!oN#b>i@P_)P7C>D2Y&;a z(<#OaV9fMR5ca{~{{HlAU`3|GHxc;}SkySIe0UdJ&{OQ1(#R26tPE}HyWn@NzT~Nt z88p})S{X0`(dAy)MlUOa=^_@Vkrqxdfr=bW10dWo88Ez^B?{r-t2R~z^pq>5bidMj zGU7v&jswgg$Ack-^GJ&Bg$ILyn)IZI)+IFeuLi+FxAzr*SZVo@vmTT`)odO03emB;kTipKJt>%@fGIM<<_T{|k7xEAW@eFBrKJXBmTe%gV&!4MH3{x)beC2CRlHoUWt&E8Z2s zO}|Nwc>@#DS`jl2;T8=rqDdpKR2DZ`GT-MMAd>8l!)xI#hbZoXQDNpzgIH9hKZh{* z!5S)0klaI{3dSX(2|amaS=^hFQA(hNDKdWvs4F%EK0{^t$0gY9wRj(SIv~j^ST(~U z-W9bwG!f`%Dhz*HTz;YCG|EL1Bl*T5 z16Wv!QT`xC36cMF@ZTnsenhcshy!Bfj+5coFjoN^T{fnV>CPY7&zo3l5n02a?-mS2 zH9ejTHvOGAu*xI5>ZMb(!1xyHCcJxOekm{zn+*RP8%iP4_2c;DL16W?CT8UbIFO3o ze~iJ;(p-S&rlOz3z}%&SW^tj2L!H>F52$|%H3^y*8ER#({O#=;xQpb3?xl$G#2S^) ziARrJ962fK8<67(_0zu?Ua|ES$bkoj|D*dN@}w3{HyiiGeT2vnv}cX@s_%B0;(=;| zGeHq5ZnV%rcG!s4y<=P7y$5jwq>${7*J+%Fbm+Bh>B!6H5)Px<2wgw#?6!Hs-@W<~ z(gHkUVvi|LnCy?ksKC9c#MY*~6i?VTPc2B@VJ`>50H=ijepmM9!Wi8nKXTVicWuYL zVZFO`>d2kNfo*ES@sh0@{*JpG1Cmq@LiRP{AI>viC;G8>IH>@r4>MZ#Csto{dxnG( z$iA4-V?RLe-D92k(!;NFb`w%Pe7V(h>JhZE_fIHBp`cMzA?YlH!Hh;>z5^R^yte5K z|LweM*h2{Frh{;s?WCzO9(2`Sj=~bpJ*9Hb23Az3=KsbPdcw|)ZZr{~K-lzM847b6 zP-J4ahGRYjJpguvSV+Cq!>L3<+EAoeq6|FG zqKgC~`yQo*1p4&ub8g}p0b>rjqRhRvjo_GhiMW2cZlP;=iw6t7KCU7Xa1bVXSWk;v zk4~VD)>}Oi1H3K?MjaMMFnUt2{rll^6)~l(-ztxza6--vw`xept-%jCmSedw;Acx2 z=nT0(#IZ21ZKu%Md$q(0X5>UvkhJPmaxmQ=CKwnoLHjK_6 z4>O>t9B^VDJgjEE?`R?6SrC7^%E_A=Z>^ulv1^I&ACnvVGYU8i)qs{|_yVH_V32oY z7uH_@6hl_yEfmGOaH;4K*m&$+7B1j(!nmA2M()T>qp@<^nQ1u{ibhf$Skc>1F`wOy zDMR9~tDA6Kn0P}`{#}E=7!Bb*9PWV*v)z3q95e!KXM0wBVR)2?-lny0C0qzYFBNkgz}8Qslocb^M6nDDhuPb%JX* zlBD(`Qe$>NpMeWno_i|a-AKyESg$53rEYG3IEj#h3!vbNP1&tzGpa@xTMl@(gtQgI zOT5g&xKVdgSg>K1Z?HtC&zC;>^?KeVjUh3NZ2=>KHni z+&3XM=62E8{_HQl`lqZ;@aF4X)9!qvyY0*L<-Z;KVa%^TJiYtV@1>KS_I&!&)la;B zdU0W!=NR>|$HzYWWcqt^BnyuE_6#;}pFQ`Sox+=?^?fN(iD|CIW=?ZblMe+sM2_hX zo&wwf?4*H)nDN%Q>uQopD_W)uKa%D%y}xd}rTm7oD!_RLu#bX+1Vj}fK4zJeH^516 z-;gx&9NxYlU=sZt;_-T@fn|gw;Av~Xf0SK1G$K!R3kTjvxWH}3U~LC}etg#OQ@$GO z+CJ?KvOW90X%%1bkS+QAx;j}~4XAbI!Ka_)LC&~zYlOCVbZkY;*k1NxQsu>^WwWgU zCc>hpsc61aI+if9tAn!Vyq|v(%7`EXJm`)56p=`@$-v;RzY!i5HYMHS?Yl}`c%mR$ zJkqauA<9_rdH$IluuIT8)=D-Q68YjzWYhhgwbsLb^HS!%tq$-P0K>^c)a`R%2-n@6qq=mNVs zSjy<=>TU$-&ca?k;`(xS%QL`wzEVti41rBVF%5&&uig+}9YPs>Dm+PY;qfShWW*>}=lN zN5FLK*s*EFMWD{;0R?@mBVxBB@P{jz^QBiX|C>>h^+ms+VfZAx^oiWF!K4CxeAzfN z0Rm1jVWlzygO%AU&1odc#}clV<4UuMW&YkEwFrruT}FU6(KnJQAE8PtKR4!HTTfqg zQ{NJZ#TnpXB%?#u_;qgA8VTpAUw8_ZmBA!3-fb@$-{5w+F;A&4k^@+0z}(8lLmuj& zdY8dMDhKQ|wRGaV1Ae)hdG3(xVw)Ni!e1}ldNfqSL0^EKY!T=1ZyZDJa$6MIEdOag z#5~b91ii;gZ7>6rV=jY^*rgahI)rquPv`fQdRnW94;3W2|b9{ZH(DAZhgi)P%{vp(bls@ zJOT!#w&yErk$~;dn9pIj-55?@o~*P^Ahk+ZzB17w9Dp~v0uaaR;E=GnLJl5h8zcv- z@%UgS<7cG!I1L`~MjY$XHPq!gCW zol9eLJ>)2!=6E1GYN!|kqg)@2_vP#kentXUfNhRUM2K*Wd8Gytn`giQut8Qv7_lH*onnie8Lg!XtC%U^-DfLM?+s1bLV_ zuCRhN(j=Rf)WgW6RwWl~K($e_S&i3c%O85^q_z}0AIMO^t5K7$10WkJsxyT=kCMuY@VW{KONUPr z*?}%-;R0w!_3&%+V;fr8_zeSd54fDuC?9X`J5Fr}eT??>TwacsiXK&w^_sEOk))Jj3% z1+f4?J>by*Gghu!vYEZTJ^N|>LGjUnC=$T^WQ7>xMd6M*HXr<1oCe5Iy1Qyr^2XBD zz%g{KI71egnvjLCcZS9c&f?{>15gqO<>cgSA8lC4(fp`AN-quCi?4hAeeOj=ll?2G z0-<Is03O zI?mH+K)(l);f?E&^fsWk>uu?a=%oj0c=@3~0g{`JNCOUhp?G2u>1cXr*piD$Fkgyn zzDWSD&`I-~TmM@lyStjr>A$_9QcFn9=?Zvc;`!1THpTHeM%ao{>BbTgsrx8wvS%7a zfJ@yVA*xVB>lja^c=k=)BfV!h?h7q4&)R5OLOBU?whPyv zp|UX|1uMw7OxGYeYaM-keQ%?kzAHMWNZeKa4Z9>3n%xvvaZeycGDI0YJ-yb*>P`Ck z(n8V@SD+eD?B|e-vuDrlz&X;srN}49&mq9|YUnE5_?kQdVvH(+w%ji|HsJZ#qChVM zLwXDtw~7a!pInV+vH&wY`yu*dvU&2ik96ttANbXJ)UiTpZa^C*=-c+NMia#W9UYyN z4hlm((M9LsUSyO*d9?#pmer^^*t|IHKC>)KTOI@jEH+1H@_0&h((IHA`os^|jKj2Z zQZe6dS~W-k9%ZK~8J3LNn@f-`Twvu~Mb9P8derMNk(aAXQW9mL*(94$68%4Ln3p4t zxTH4S4Upi^GP}IZTw)Fds;qdLr-+*8wZtk%JTN|>3OGc2pLB^6n8Ku47(R64y0VZb z08}eKuO|m#<}|kr3IazqpV}lhVk9wzb&mOVL<}oLC4e(&3Bzv={$k)OH^pWDybVy) zhGAAa(en{`!v)wP7t*c4YjHmd=JxcbDox){U7d~?}b;04{dbO=#YXV

    ux8!-$?apFaL=PL zEB2qpa6@@IHTcOoD7S!0K9cAj!&VGVS32|p>qr;q;uCyAoKwnKreal_B^GVhu5Es0 z;wU0E5G#R^SOl7kJ5VI zpu+RWzjo=;9Q>oK+HlmQ8(6qSC_gjtK+a=m`J<Ur^M@)p?Mq}ctD7DU@mkZ5L2`?!1WUHpVDnZFK?ZDdULP)b`Y9(SbAk{3vz#*< z1KqhA^Enm&_697i{#xyUc7Cjjc6v0=>bEzVFR#2{h_!zM6rLpM9{V=2WpHnpd&>f#MfHrp#CVb*xuR-;pz&k z8%CJS!9A3w$_8h#!q8Lm(6$tskLu|zUM-o@-`t{i}F94=l z!Ytrz`Kaa#XG8p0+irgyL+Af>66&Z05OsZi`^!Ey8Plv;pO| z75h~szMbF94-mO%U0GR~2)85qu?LFU!otI?l9Yho5Y+s@FLk8`+97_phAxSNQ68P> z(KoDrmiB&t{5%klJMLH^KWhbmV~QEf7WY_kx1k|vsHoxntLFyxf9A5jlZe;?C_;2^K zz-7}IHghgJ#fK+DE$*ae4Nw}t($W#^X)9SZ$%6 z@@I{R7%5g~T3cHws!;J9BkuDaPlyU=>Lu1{Tj7>^^0dqJn;jB&Uo~E~D~fKIyFU1N zV9!o#l~J`xxOG%Qb3Ka4m7^}-@cJCwvHa|t@saz@fTBUlhT;^*(mn z5A!&HNlFHuwYqNBS0T_v&?Zb)z^>9UtGpf5+IJae?-n7w^W^+2t*{q3JC6%GV2oYK@!Tk9TB zwDf1ua7A}MGRgteM(3e3Tfpv`Nmr0SY|zINh2m3@Xcq(Kv*mzpBvEc~E0fgQx7D}? z*epipq$41a&;}}=jt?C@2TcC0d2WmGxFzh(YA&U`1X>gk(`_&Zm2@GO8=s?9IBIu% zL-^IJ4*)aheol+$%rU4LPV^Kbt}UT5hbmxvA4_gpr$kxi`l_fcGc;Z>IkpMT@ZJ_@*;$iV2%T2kCm%8&hO z113~Bmhkq~@EIeI=1l!Qg&j}`Iynehea;}Edmy#NAO>49G{74;g}__nv-s~myU4NuD058%d{&_eYHCY1@#fPP1}cG=0be`RS6fgc6x@r z8FBsAyDqe4Bb{Ipr~XTh`9I&}n*ML}$NXnKv0ne#OX8FGpKZfs`0qBu-Jy%Y!4<}C zpeziR@&h;?ATb9>dvBftK6z+uQ2=@o7z-6ZdKzu}SsE_=JG4_TK0%^`g&$2GY-a?i zMBq88GY^UYm`NdOOpaEgOL%c^6T~bCg&p^_HP#<^q#-eH?^{3Cpdq=DE0# z%g=#3*~!WnD0Cno$_RjolLBq?QfL%kmc7!lRD6^PodzMma2yK9 zRwL9}PnPaP+dpSAk)DJJUi>jL;9sp~wBbrapk#1`3Lxoc$ZUAY(b^ox~D( z0(99V{=V@rFN{N}IQ$ig+3Mfn6#yWDZ`M%|1~TQ*5?5{R`k4Z`l`NkCnM_KU~L z_y2=cI-7sURKs+#4#k}v`dh0wV~xbk(ngdC^os?dM4<>mZjEIP(L!v&3zq(X4YFW!QyU+873UOY} z2nfza6`+=MSFBjUW05w$O17&5reTecbKWh5-d6}~> zL3V?HkO!pg2PRyNHxP&sC;U89S;c`MV*^Y7{okA3iWHQZd_i`OsR`D?TD;9yT*N)A=7!g@gl-0@Q$-^a~Kbf-T>Op3)c zz)5Dx|6hCG9+q>yzWb&jYB1vvLm0fyDKtrmjPvVIDLN;IN`<7WJ2;5DUBQ|I;n}2Ly_9|{jJve?Y)23wg1`IzV;t$UBBzMX01iN@Av!pJkR~y z&;8ubv~BO<9wS2u;RaRKMCs>Z5TOld8`0B~Wh1#%>R8Ag2Z(_!y*UX)ews6L0bUwA z-s2GkgDq8qH1PH8A}{E)8qmKR$z24r1OVEdNN-qB#K#eHJdRu)Y~~f=Da!DPdMvk% z0v|-zLbH*T7hR#juE55k=Ij!_hOnucj`?gLH9y%wUYJd21Ok^MY3pzUq+;?0^q&C@ zr7q#Xvp90Hj)sJSJzO2XSQ@wR(sMkF(@>C2)YR07B{&b`CrlXNn6fk3KqfD0H4X{M z{Ki!Q)H9>sHXdw=*Xr$5`hWr6Ge1)89Q4met}Sl~ti*w(|E- z0^ri)=mR;@;+p<&n_jdb23DXt7M22oL?_S45_Jj$a+#Am87-ljufRNMZaU=S^hFmY z_7GlaL+bb16eR_lYr-5Tu1*yM=6P(>Ap@ut?BVyvrMC12H9tm}LxDJt1X2=W;A6-y z=QRzMQ(lKd)Gtm)tIJK)pCkxz=YqlE>u~V%;5T-~Q3q3O6%KAtBE@g7M3A{pjh$HV zLP6ERSwRL8X*s$s225t)y}fF`Vj1Pn5YjjG(4+j&&~!#Zod;^T(C51`W^?~(LPBQD zjf$Y@t*H-N{qG!-oeOAPmyy{rtLpwot$T58ZARNc^vM=ur}vu)ap+}Wb7)dyz~M%_ zdGjm^;zC0#uP-Xv7vR(~GtW6~dAHPW`W~>Tn{p|BN)CE)#-=tS52~k$^Q^oiWr;qr z6Gw(`Y8xr9^)lS0bxL7jv-`Yx*KsnVA3E$imG;EKzp%~p*|)l&V&^XNYKmX>e&W&B z+cod-Pe=AAH8y;-l%;=cd^z=1RBFqo0KcNf^)EMnUHNCBshM83mgc+Dt$F2-tJ0Es zwtc8QT(!^iovcVBebM7Pi|rcSy@NNJ{GER1WAEn|42Ev?Sm@DGmm8$H{>e61$ zKC@dt&b5o(@yh4WV7=zs-+Qfc(SPIWUe*#-)6FrZAo=NVe@mozOFCG+yO5joxWXJScaby| zc~K(8k)wBpP&8t))E>Fcgv4H_g`Jk&c1=CGS(>W zW39!rrD5H7YD13J{Lf+d z&tdp~b{IZTm1d502l;6G5~30wtxu8Q74TmLyUzA{NH1%_um`#jZY|^{+$CuKQaoDh zJgEIoiu4ljN8@1Gy9EocDSqKz!sn>nS-QFcv=@1F>Y!dhYkyDa_>oR?%r|ORla~}}HEP8S7 z`h(br2;mRh9A@A8E(Qd>LWs>(xi6pHI}o+G!&k)#(S}Mfa^45*eRQI(2aD<(lOoJF z-Vl|pEBvRQd+)I$WCglmi7jQx#m;gWd9^JmA3u8^fSt9D%zIX7l7svQmSYs$QxduM zD@uH}ZT$}EVLK%U5c{t*U9p1fImJ%UHF!6C%Jc%d$iXlE6V4seFxnJTS1D>KFPIT_ zBEoalm1UoO*PMGRmo-;*?)q|5(<(G4b3vti-JPpmEV>Cv4@DXD6%PkGl%(~@3Pm%B z!|6&xO3=Edr@)=-P`b2zKhbYf5q3A)Oq;cgKNO{4*8CUp`wu27^y`a9RX=C`={(b` z7{d_G0uTfZ!R+y3^C0ay20ok%P%hPV&H~qKOsEmY|V3?8BNY> zFIf5VU*7ifhCtqquK(8< zh&?g#(6kZIlIj*2SzE`v?y2g5!|->+_F0qZkIB)F3Sg@H;3$GLi>x%b<>JgIIaTLg zfMdd(8T@dDVT%Oh8*Fr;N_84`+03Y9ta9swy%yIYNTGu`d$aMV9IM#p0@F}7ZpW%C z;TPPSEdS0_do#>~RhcWHNYDu<)?%eUmrW|gS z$<{T6ZfY5?&}3VLPnOvdtkl1M|C*tf&Awi@8wXGyk0xFPWTQ(lenz=>6-riBmEML$ zvbgPtH@GYh(K0M&H9!q78Zq)aW~{n0Ilb})Car$LU_ZyCTfV*X$pa^Jj72Z>!f_SK znP*4M+H2tgF5o<_I*kRTo19f?0wT%B^gI~`e8-VMJ@s$2GCr>J<>hsNEGG``N6h6kGp@IsJ{}E4;F0DHXtMqha-JnU+rve=(&PuGMGu z+9R}bl3H3_E9YL)GDr)A2OH9Y<00H6$|1ZIwm48kK!*a-GT=choF}$yeS(+z5ba(X^K!fMRIFHW z9;Xgh<_#z?-U(f3z1fd%0GT?2rxAT@lda>g`Lwk*V~(s5Yb_E2Eqm2}0}rggwH#`g z6yQE%&>ssikQ>k0c~qSenDB8+5toMhxgnaWeZ_xr1O6!II2!uw!Q9$FfhLed9ITgP zU92RMeF!0DLgN^MD#}b*u2X+b8j8k&@u+H*=TEo>&qIm`&_n7lIJ0EN&W2DKP zK@>mh=ABQ-JP9u6R=2uxUFTvtV_LnCuSwOsCA3`+cG7`^R$5hvqZJm~^9p!0*Wtg{D!KF8fZy zW}uv=ONcR2^wFpIe0dy7>`6YH=Qsh=O!kz{F-X9mvmDH7%#{rz3+zC}i_A_O9>tyE6txESlfS0**AHWH0Ak8-xk)e_nllN|k9+Q}ZzKdCMr;E>ZU^=4gp9lZexmhd1<$ew+B1rh(8X z7CX$$_#D5=229O1Ix;Kvo~;InDs|iOxaW2_ZTIO1p!vjXq>Xp8o#01@_}sac9n#~X z%DbW_eMBn26Qk-{WR}J3I#8I5wo(3Ik(JS0vT5ZCiadbY2Uha%}{@| zOEV5B3Kx28!DTE<9syYyB`lEqF^yradosOt>2Z~*$(!ALJ^8p8F$A*$63CJ`aX_^V zg;D`xMmwIPmNy1Ii*Uh{6$*Y*>L!8lX0~*(nt7AfTMEMj=?*wth9Ge^s;cyRcB|cpMNMF9p$+$FwG`<5DrrKM#eir{1 zv_gJ?fa(*yC-`VN_6md2-_F!xR!e#4FT0X+Z%k_ux-gu4C$XQ9zi5IT+l`Q-1$Z?c zt-;SY%02_#VQ{0E>ONrOHgG6QFvX(HbNWXpwI);|et7eDoY}(=MLUBAS@#Eo*l2Ct zN2IfUtpkK=+HxcLxR=Ik$JU;Y^YimF3+j-#7te&_5yzAKwq1NH=YDI~>PM9BVTj%i zGqEzr^E6N|ZK-`Sk>4M~456G)r>PIGBxk$Gjv_EAt?wdV(6#MZ`sS*l+s)_H7C`4j z4}8H<5%yOAkp1n~$v2&^#Yh=7wc--pN!k!w)3}fd_eMCDD<864?bqKGEx>iayZ52D z4WJZB$$IhD+*eSb=EcP3$K*cC!(s;h=RMO^7dwrmIgHA^82J4!a zEooMW02~y+C(+=JWNtC$kP|NWCD~z7eTXbHS3|<~DEMJbYM>G@v2g(kdtrkla<>&k z#WV}1RL`CwX^s=%l2dh!;lcT9y13u@+^pr?p!Y3}{$!g{-_Jku*3+<0(|HKZc-Iuq zJ*dKL0mBno6FE4pf!wQxs%N~4p5ELNhjRxp4YIYj;uSLuzv-%Lkejw}L7)`^2Kn99 zUs2_qM!#H@O*{r%x>FK44AOEw_1D#`SuZ|VJH3QzjY?j?6*>q~{e`zmujug9)aM`c zXN5HEs97oHQ2{nLpf~R97YUIQ}9LBebl?%S+l?f1?Ue z7;t&Z2`>z+m<1llI(gS;^^e;DSA>x-be;%yb^WfP?^A`am-7yG2<-PUtq#{USW?oX zU}>AF>>1?!4cxheMOf+wGIZhk(f~X97?-uTdagi#ndz=BA$Fu?M%=Q`&keiQoTNUQ z3TB$Fti65}F&}rTO0@ZeO)MPGf3Gra631xDTDWT>JUsl)l6jXCe+;`lf}J(7 zfI|_*xe5|wzGm7ifXADZlj4)mbZ(jD(ymR9($>qKp~0mACq8;LN~k)d5jXuPqNBI0 zm5b2uc6ecl)|e}*D>R57-~r?oqYuu>ADZ?f1C>mH=U-+TeSe@{piLxcf`T;@^hVNN z_C)vG5#irwXf5BFyz17El}gDNU33LNqbm|y%+N3UXQoj~%Gw18xYbRw@2VqaM|aHl zwU41G3r9YEULbZ#=pq24?mv-G^<^o>e4;0CU5~Xj!&6#XTHVz{q%pwG(= zLNRVUnSvMK(bDamAK%`4Yb&h?nG%5APhb{ScTC6@BVJW8$JgR%f=1h2MoF3x+|hh6 z<-3CZ{P4}!&G&FHr3KTcv>_1cYCt(|A|>z=1{3H^>93YN7WJV+c-(?3llK1HGK)zN z@r~JS<|x|XRXP0@2($A$oQ1yj&Y*M# zPG{(KzQ<-RF65%M9Z#ZA;!*4YW;2JcgA^SGPxSGCoUVKn=^aP|&!ik}%9zLH!<=2_ zdP{1CDVZHNaiWZC&%_MJpGth%R1+Rd@pF7O;2-tblk0o-)|-W*s7z@scll8*iYFFx z*@xHR00Z5^ZX)}gnE{7L3tw0+N2xc&5a75ou<|xSK9T1aLOsvG<&h>bbeCQBZ zE#aBw&>#o-cZ`Hm6r|+yh$)js^axXXSR}t+wkzTLZ+sI{4pY-{1!pCQr36~%5)5bL z<4e<(klGoH%$YvEZhUo7)VB!=X~(;UK8XVQ1AQPYRD}wI(@eD39b{0?Nfa`TD3`4h z@3b_Eyx(+wV%AP>P>NIl#Hba-FtMRB`3-uF*bOL={npZVMA%=>-Z|#x&s=`deMy+w zBYfUMc%`{wQD3>bPxIob=r?nXG{-zC99O~KK8cj|Dxh~RNks(j4QGP(hn;8|(95eg z<=vb$6La28(fB0M1Emu5&as0iSTk)o^56O`@s}=La*yAs^D9UplAFT5Jz!m$iz62(b&$1z6fLF(_!)&=nV1daK(bKUAnpm9 zdr|z!c+0?diYITJo+U%E+zSdZ?w+BNg#scLW_(06$ziA$XXCn^B9VwUlgp6^YS8h^ zjSjAF%9N|=qS;P@M&su+Lh)P)ShN%fom?GfaKR?zaPEhOgjC!^j2*)T(A=kkN@G;n zkA@fd1uCD}cdsc)_u=dz<1`;KCqY!)9!zfEZOQGU9PCpbI%rDH4_W%Yy-IFl5e-M* z0JmNW^B}$d?Sd16t4;_xpJ z(ZTg85cf-uADhCF6(;7RP>?|Htt)M4aD6;L-d&2T_`UM?X~XbIqtRY4$2X;=3>^(= zTmapc#fj@XD4@e#q|6Gj($lPas~+|OE&$`#OwSh{NE}#wX=hZGiUAat;syl>`?D$P zB3H_ijH*0Ho>HC^_p09!*}<{900&iJ!16weOcEk&M9|$yOn#3P3!*+(p&|zdX}j5T zHvqUgsv9}|j7SEfACsKW?@GV&q~F?=V8`_a#M&fnxNZ}=NFh8hw;^pm00AOY!1yh> zP48cGjt0E&g9C%>+j=}K^Lbx#VI0{MMoi9_k!PWxGV?vYbh}?Pf()At2plbLQwNtS zEbGWnw$R^@tKody(ZE1^|B2V{5tfsKGl!)&eRKssGbRqsJR{XuI~+}l)ZLjp*QWPa z6V9{I9f}ni^_fcU6D6FMBx^<1kaM0$MU{ZaQ*(Z3Q2^I5Kxw9^w*B0U|k*vqUOzOaLuL0P{=p0bUM~v9ZFs2n@t<`bpgQCtf)3~7>y?ZijYO<-8AAm z^t<5IB|@nqL|)m7H~biDEs&N{FV}Ie6Vy#7Sf}%VAy5YMnWO9Qfb=5-e^XJe3gs#C z(7mzZuv{LYr-E>&6?je~GLcT}wb~34#=~k|T=ozft18Kk^k?C9n@pBCPVWQ! zl%Rx?9(w9Lon_t8v{7D1+V#g1wAE)VCx~uaFFxUX9bS?b$Hp=HR7J6vQ9>>PlJ)sG z1=U<+0Hv~KE>M7iO(>ypb6tTkI6aO!YbhJBoGPL-JL?`YkyKSqyfL%}E3(RigO%DM zQP^sO8BHiKT6`YER%A_;LaHH(4F~v#hV5HYuwwnW&L|jP>JX04!yw8%dUSgrCiZiD z8qn6`5qG6yv^RWeaL@YPylD2VyQN&9z+t)$OOIbsLxoZ{^#<98oUFLyfes4rbdpch zdE&<_?)bGd9hL9AO}B!TxC|J1o31-tzbcVZ{RDoWGlH&cX&`kcVYpiZ%meLfPcP|O z)2|~-E}8#ng3r-H!x&Y)DA^IK#^?|3rh!KYcH)~1X5A*7w?mngCX zBX5{3aLWP1-5bm-5>iyayJbCjR#u{vX2Kbc$#H;_E%Nb3tzq8pzpL(dt$BUss1n^E z9ZZl?Z;gI~uqp)AiN+kv!iW~}^z0k;sA%`Q+iOSTdxZQ$V-UtYQjvhF-1Px9em0R9+fyv2a3)Ei z+CnDBHq)QL*m%_1%tjM^6n)>#@4#IC7>`oD@>d_)7z>C3^j#kEhh_J?09!_KbPZ3k zGWBU|<&Jl{f78ux(ufx<pJ3LDPupaIu&D{RzXO_8j)b4h1Qt^VwQM8rU5rEaz6p{w8K;uAltA zes||BFoc>NfK3(O)QZ*#;*x@NVM=`pq^=~BB*H>_V2B({=X1~Wc?4BfH|t~SM0$I0 zv{Z+BHE`#*2v>cb1mLo)=72sj?fk`%M{yYEsCjhHthUQ>s97&qh7WYBW+K~{5 zMoH3eCN>Ch(RT({$m1VM<;Pok(krVn2s-4n^2( zvy&%#3VO{I ze>Kp3wr{e2`^8XoRz9_dgXy_&(@uc;ozacH-SlUw!a!{<%L4)sHij`6&r%Rf`2;_g z9;-HoAuSvtocey?q@Mw^Hc{UXd{>DNK^h;uD^l# zz1m7R9za4IC(OnK+#rS+Iba|}=A4|EzFkyJ+F?w_D~Au&kF;MO&=8_9^w_x!FCcuz zb9V`jav<`OJrD#A_JMYZgFjpt1wpK5sWmbb4~IE|7|8AcpYAb*V(i z2DL&$O9v)0nbxajN5AFY8)GLc^V>#@^8d{T-HYGAg+?&5Rq`nm#_R-jrkfJq4nAM$cc_!L7RO5g9(|MDNrgR|Da_^oF8u<7Y; zGtv`6!t2x1w)!T|>IkF(n=X06ML64$|O{GtKD(ShUq9 zFHmyERrt(qU2Ak*x2EBp<-hsm%9{v@BuG&6Fm{X<*mPR3(Ljy?m7p0H<^kkSw0R%w z*`q*PAdfa_);$weNWtJa7PdaLed8;T{$s!gUC!?c+fnk)w zUZx$H|HOkhYg3yY?%jH+oB%(d$$Lq;#B+dZLJ?^>tUeaODurt#*ZnVfcYc_>JM=V$ zODgpyfgOmW8xYvp;x^CRPQ4NW?AMj&igvzR;G4TtJ*<^$UO4M;?UT8vRlV~=-$Y2_r=!0EN+Vs;3&bh{`9^+1 zm>pw#j>b;1pVpl^LI8tqVMIlsyq6YFgSm_HK{g#B3tNn2MxfvJF|})vcBrmre7%bI zY&>BlC4F~@twI1t#B~2#@AXq1>L-$ij3305s4n~($y73!@GTI?g|^KRc`fMtwCrEt zW5i4z;fQccg2AoW@gje54$yj#m;jl*x>R&k*#%l!(yO2cKH`-in-sO=>a%R2FlQy; zI<>`BsV4rFg#;9e4wgLzfb={q&zSXi7}>}?0wW)|mB2rNpGFdoI;)bSd2}yRN$2Jk zj-nv|toZvADqSx1ybsE+yvK^t0ZTisd4w2HZGp~$4LA1ftDMyOg-C0<4tu>@ z#~#!U9|zr7I0C_$9FVz5(Kyr+>Bj3%Mi7Bc^t;4U0KyYuv;ChW8!i>kr+A>)V(wKs1g@#JV z$%213E#Kdt{{YNo%Tf;DnY;+;JyfoXk;o_d^zm&AZZF15eN+b?FaQ>(J_YRTF*Znn zFv1`%5~8a4W%b{9oN3aFkQm6{MpFxN4g{QtNP_AN6~ja#lY{G@-BX0ug!4dKqs+~1Pb0C(d5dA*M&k3<4H<7j;HS^=%MsdZ02(E0{-3wKwcK4sdw zZL%CoMosQjCUW1?LH~?K(c)L1iXp|ZtiE7|U1yYC=xqA(K8 zRhAiCu6wYK)_FAWVk|_X+3H|pG%DtKok5-Ng==M9asBPd0D@vY#c+Ut)ciQK^(b_5 zH(dVl*;9pHNH)8CkVq7oG|HuELFKvm#;UC|(OzYtjUYri<<6lY!L&ikQB7DvpDM7s ztmqYnY%wHB1jM!|X1C1`hc6pmgb+;)U2lDQS>jqM>pUa;XV4?lMuL>qTeRNwYxH_A zazE1kV9id+dfx)tV=EX{^-TU~l4xSC@Qg?8Qb|W4lAI{l;x4~@x2vp~MF_DKdn_?K zlzoTI2C{xWaaztpbedR?BBrbIx09kKL&*EUWmKpOE+A(F&D^n#B2Yw^Jo!2GN+V`f z4XAzg#A`5|P?1Zd5HRvapaiiwlW-HC;R39Xs2?I@R5(9C&U;cP*`lm+5Es+hEax}y ztC}@s)I72#?dVCYSX+}z?uNQ&__W)XY1jJ;alPw$N)^eheczs`25()MODO6`TCt-#B6u-gT22*)5h1mrP=$ViX2;?8H-HhQlf(XK+JR zT*pD72*=<73`&ZZp=sDb6K1jW1U7LOo^*k+0Re12RZXBnX$(th1=ljsrG-PZtUbu^ zA_GdrY-e*o`8lBDg^dc{?}72xXr!;40S_!T#L#~ZrK_DpibD7zZX?$?((+BPVn*?9 z^17vYeYkwO$`d^Qb+`0CQO`tn6_P=!7pV`<{NO>E5*-uaznVO$>q}5h>?*t-BucmK zJojY$NUPIIfI2NceW+NDGwe&^8N5)EvFf5coC_Y);b8R`r6k#Cmk>PKNpwI4?O87TR0hH2Sl!J&h?Rn)A%3wdvJ)hH4|Iml3h5NGXOVcIe<_q7Bz953p2CHrG#);E zlw-5rhaDd$3HTW}!;Fs6@IHp?fOY&+02W@L@TXdyw}2gB1Oxrm^AEDQM%W_XTi0;$ z#BOQ54=2|9{{mlqJmMKt2N1L}yO10N2;rjWix^j@1p?B!)ob054gZx})>@8+BI{e; zC@8zMt4_Ib^}>0)W_sX3ze$*ez$T{48)q93q0JQfMm!cOL0n>%V(_r2K-$v>4`K{~ z9IBhC2q{i<{G{#=M>`iC8~R=pxaRD9cR&5Frg=^XOXOQ92p&2Az(T-e?v~@uYGG9( zKDjUu0-^8n|NUdzAlve9GNW_SUYJlvTVx85bM)m*MZecTXjOw&C%I4BnY;vH#Y#48 z>CE)F&r}D%S$qMFHpzp5iQ*o+6X?>H!<#T0g2^B@s_7m8+vStE^=Iz5``V~A`o8zS z3f292^xoJc6YOQN3Y;gL%W3FYOtu5gyC;gBN26$MhhxgW;iPdZ9Fwmg{pI8gt|x0& z0XG6432vb{Ho{&{1WMIY$Gby2vXmcu;=pYs)a>hSbs_P_sdI*Op|!qu=r#u?a@uK= zJ|jyrdF>s60gH(E;q;s#rJL=&k5Ds$9y>=4*ip>K!KKeBmpnnbmJg44^gzyB5AJb! zVMntn%Nlwh%Blpq8(4rk8eCjE5}b6W0~V;Dn$F_-h9LGP&;XuzO2M#*2RN9a3-)?> zIOieaAdoavs!_Iwr@5NB2*F!WVX);%MGmZ#AD{zD{$42c$zA6t4-D+L!+(nWDg^8NU#iw{w?cklhg^?^862!HtQakC)ItmD*+s zZ@Uh#`jei2f6nP8k&>t*Xdy3Rhnbp~n8RJXdXFk>tWww=`slOy>(}&n$Mp&Tw-}Dq z^0YnST_KxkG4kIr(99b8GHUTj8zWU0{*B7|VikFH+^z(?k-}*Pe|%RkiZxs^#x6Lx z&2ov#C2l{ZGlMolH_e$@_XwH>^f&EM)^H3>N?zI%bOD{5)e3rl=N|NKl} zY8?9yJ~KSL_j;lF>*&A2AHR=5rTo7jls&n6wnf;Vgybpa$FnqKq*n^v6Dwg)(PUFQ zGDDqkDVqEX!Y@4%a@iHeA?klUFV2sWdk8w2r@`c>+_^m;mH1LTaHsQi8J{m+%*=E4 z9+QHqkW?;(>5wyf*IsE%XKFWk9l0ElEY`52Dg5f${8hb>fLV!vPhnXN4Q1R9XVuZe zrY9|70#X09N4CDhaIMBFw`m*S0mmGI^sG*JPm%&rnyzN%Xf2bX=IA;105yV+sla^3 z5^CfQ+e{oc?nm6LwWn3Qrz+`tOaj#6Zk0FBlNx>MY}L2EY*CMbthp_G<9BAuJGmRm z0Kc1quPDRySNckcFoz8Z87VJ8uV;)zP_S|zAUU@uiDwiI2N2cULLcB)tchRag`+^J zB)emrWTV?C9W8}{2Pr26$Xqjfg14UtY|gufG?3OP9gv~!Kkj++Q)>t+s8MC>8FBiP z&$lk-_4`_pb!W0$1vfuec1t_pGSS$eH=vPNo-ne}y_(5HfOgrKBsoEC7ILIMkS0tu z_-7!iGygguB7qs=DKQ?=J#vghS`Iog4>I7Bv{GU_m1<}Dv2ztSCwB-wQM~0*oQODcm z+95EZtL`S43!|o`u?8DY7vTFt;|v-Bk&hINy&+o>oN3Wqd<-k=Ow~D=ZV_J(%&Hpm zaic+@!GB8wbe;Z`8=3?}OeqJd{JeZ;AmWaa?-KA7?O<|<0MUdHOp(}L5mYCZ*MXdN z#M2sR%jri8XIY;m=YZ49T_k)eI0K3w>&UiAVb9ohk7g3&r;xlC2T$Edzn60K^7X;R zeSE)ava!jto~zzpTQ%k@SF6!4iRObW9;iqrN&^yFhzfyXi$PUte`n7mvcj9FwUtkI z=!A3E93PK{3HcU+5=tA8If+FD14;}o2?ncxc2#iikJ@~C@TFPSAbR@+AS6=TxTl_wuYYwr69U3%?Ui~UDd*1zFvjiQ-0Xk0!)~ONl zLZ?+=f}jHAP_Op8R0>)GVT=M5tN7o%0U0MkdH9V+PMz5;A z#kecC^imx|qav{9(?J24W1mliQagOE>-6BQ_PH`AlghQOakm3h8 z{<2HU4!0Z3RLMLrr7%p;`U9hT<0fc~>tgu%U$hIN1pfvdW?1KSW_fy2qtq{0ei|qn zA98o|$h!fy(6KR62Z+)0aU_y}if9`BonR?MaTYrm^kzD^zPk3-tDNpzy!)VTa)k62 zL5gj24G4E#xFVJkaV9ljDpznO6=2?%5bMI}pYu7?*VkQ;_uABcuvcyVY<+hSsVYi| zs6iq6<~QST3z0N37Fxaf`6xebwbZu&&@F*ru>pFwQ4%(cs3Q{8sK6B!(9J;D`k@ZT zAa--l9W%_lJXQakcNVwcvi`ybnQRXj420D4oP9yt!VUVO3eamyq}+^wNsXe}wIe-^ zFnrOTYUl6Kt*$)}vX0Rc-viSBs<>_b7{2<+* zS2^~?i9p8-e*~snSfyKC>Ew6e2aDtrvC=TL;Sb}h&2L-D?A{OW+TXtN^Mbq0larpE z^S%A~o6cT6`^wgrFWvO3PF{NV@n+oM`T3J7HJ=~+P%=%0Dy})jsh$ZB>=RON-n5$% zwA{wV;DLPU)O#OA1CPn<$z!^@xm{bAH+1P~UGuWCGR5A#i!e#C1zRzD4xBU#yC92j z1}6At~)RdG7%+O=zg zv|Yp1R;EUv;5nV&rPSmPPtk!ri3%AJupSntyod$tibTVyMCVg*))DskxyhY4@e}6a6Y0~R;zLO1S-R;?T*swTM6$|j9kHy8s zp=vyT@7_XGN=twI@khn)zsJ7&n1bsYEv-|xF5?d{Q*9{~xh(!mS2TalbNPq)nz{-J zXV1=t5B~{BaVN4zSMoI(xS;v#SFJA=a;5V}7Rj#?XJ==-PE&z`Ao(AU6x_Ix0omc8 z+KdNJ-S7}vj%yxu7Lt6N?I?u8OF^dJhebXzRnW7z9sC{Jq!`L`a*9GpGy%Q#apT7? zLiKTSirs#Y&KgY99x2v+Y@xT($0oPL#ZcZQH zm9$yLIS!oA1|ek)NDf2|XMDsCGd-3GSK{s&$#;d9>U$W!?=C}H( zs6W`Fg;8M9jdc}DO?ceRvB~wC zy=+j{-Mh5_P}>lHkAVdMrF$SlXgNr|Q z;zR@hR5f%dkD*)G@#0!f|0;m%eDvYYqc)F{6!jMA^-CHmN^JQ2+0@MJgrbs?-8G99 z#>VGpNPQqF@Uz?`WyR{ImxEqfnI*ZN$HdPl30L3r>C>m+%Gw2ZDb?r!?!^aGvq_5v ze?iVdmb5|Cw5;>uEmY~XSECa9MlCs_nwgo|@28*8pc`wQ=<+L+UGcA8zvkBBjelP* zOV-BWvAt%&rFLrB9UHqe8869iuAQtUS74+`OavsS_RcslHth9u>#t`L z18wt8X&a-%A4PHs@LD1rF=9l&!GkZM&puWxo(n3|Wcl)=xuJURmfUN;C0E+3^TYiS zTPMbuHo|oQXV>!^R%w_Zb9}@wOf5Pn$1YyH2$q{mR~$jE!7lQ!Rb@m>%!>K*yJfkF zv@~%Anmme%uP!!nak-3Ww_)nw>3MRyHMH+}*ULv+Tw4UVQ;1Jhc+p^(gcS+iqu3Gi z`qXFOpI^Ot)o;j<9WMqV5_QCgRsDMHox?yjdCl)bQHy5Xx#NUJ@1Ut0PDld+H0>V@ zv`%u(@|4${IB_ZP#u>lKq8syLVU<_^?Xvukw#PWUD61;GtZi)KfK0Rg`YXa9bmn+< zb<=Izl4sAJ{mYhWL%VhBYLVXx(2^{#s=E06xz!LoZ!2kHwy5;Ng3eoKKP9N(!qUNL zwM(GmG1AvRgl@})QHui|01ZgC14@0skmx*wlqbNrym+Kbc#qZPZZ2z!HOY`cn%#To9* zAAw%Sr!^HN=~<-#YGZYDEC2|2D?cKkMxZB9 zd}ZxkO3f*S!RwQTW3*<%^y!wH(wZ-@R?2cEiFwJ?;&bB(|M2= z;u?Jf$yg1Jrs3nqLa;l$XGl!r5Q2B^+=(5($1rby37;S;ENqst|GcTe2S@0LZp;~h=?-K~cByG5-5XK(iP4IVPN zx9_7LK8ptKyItmT2lE!H2Z!mcjEsx~9T!J-6F&-}cL_#?9_{NphGpKT76J_TmmN^h6_oZZdk@yPb9 zB|3*Rp%#Zg@_$Y*hDco=!s8gEF#TcASO6<3O&|%kof6cqZ{LxU+{QlvGWrh~a1oDy zNMBA)4v&E`0_RW?E0@^6^ASk*tZR$4N;0xfFvnQqaWUfjuM#y>mDFihldGGk}%3OgmK-ve@Q^2AIbinyWLvLl=DC>?owzI~b-y-Nx#^~wk;m5s!*sxTzeU0q#!N8~$Q_`-?rr^&zYO>2HIck|{e$V*#5IS}=nK7CrTXU{^Eo2S-N z);ey?m<0fO(z()}|32}{4h)(8u&NCEpPu+KbMwffNBhY_rWe^&HKDt6)w9ul;>uJr zM1-2ZubhMOM!&8k-3F+tINc{5uwlxSDMy2X#`$NR)oUs1{UuA?`_X}tZlbhdLd3ps ztJjv@NrATNR*8m3jvT?K4;07Pq_4y%T_M=2Sd{Iva*nGX7b8HtX{g9ibNka){=Z~W z&`3oz#lerqGl(&}GF!@$&2P^hr)=pcpq3KVaE718X zCmws@Y~xdg;l{`&PfAb*?% z&CgQAkdDyj8@K_K&7vhsjvhXISnP;bjIRuG={WEUo57JNs+l4f(Q!)f5Ev#VPhPfi z<;p2ROX8nyQ0qFXy;2m3mijBd?S}dLsK5T28=~uJgx3EPNL{blY24M`6Rae%Q0xf5 z4ie}MDNd4|m^^26fMoLI$L-H_(L@uw}58F(Su))i3k6EUqsJI2x z=lu;+8-E|~tGuuCFiC+fc7RXa26rF2kD&Cd#VtX)^EkwgR==S`V^JE`K;A#*rJTc~ z_6#lXR1_C~U&3q=-FI-ZJ)X~_W_5IT>+B^(1)1GZtp)Tt)8Rgci6u-;#dykH8+ z!XNIU?{~I83d316qZWy@{PuJ`1E>aACz0ZY&vbwL++15*`+ie--6 zbu?>@C+8Q(DwNVvY0wmr;@araM^Ohy1!)$4ykNb z&Kp&HUHFC0Du>?Vs=6iuxt13YhKN+<}oJo=FYu$_wE7+ zc=2(ZIzRf@e|_}Xj*gLi`u25iD3^SE`@#}mu>eu&=%0UvLFq4%0#p%vJrfq%9Ng8V z{c|Te!aTwe1-M0`%NZ~^6^Iasbxw!@xs!|bw=bP}?r>^n`%+`)L9MN=$O^`|hzKc8 zPEKquAD`SWul>jR4Qc-yr5EyD_Im#M>&~~(;o%^AoEE8=x{g3`%_#dTb&z}7N+1Aw0?VgklCjHva zqQi(~%a$R!6@7ydTzJy)3;idUjGZy#X43KD68zW2@pg;d+P7@3nl)5*v;6}d;7$Vo zKR3@f#7)`NLD?w4Kw%UW-FW&$_#+vzT^-NpmownXFM%Id^8rA7h=1nBxfzI7D zG)`C%rnNLImKZrWM8Luzg1EM*$P_Ptks8&+N;e>1hAqetgTW%YgZEqyMmaa{@#Dwt z5KKA1n;mDx2>-Ui+}zVz>GQ*fhHUId?g8=FCYQ_b8N~?yW7JK(Ri?C>>DS7UH*8B_25NJuXxP3a$wz(GRiz;^@hf zH7#BkUuD2xV+Gf4J=kaxnLyB|*7DG8eZS`Xg$q##((uLl61N^q(4WA|__Dvi3!fDh zL@~xNQzH=VIaJpS*a)Hd2)z>YX+(im?@(51@LwxJUo#R+;g@93n|O%8D6sxo56b*F z($O%96vurO{ff9QtWj7vCR%!{+@a}2$Ibr*=foo2`zB-quNzw7Nu;^`(+1@46`1@@ z?;8ErIV7$880U#j~Fj;N=bnFXl{O3Jh=F<&Rs}5}qmW@WC1j(^6^R_r- z<_-+d-E3m5*OtZEj9L*;}rAlEa!>6H>|OAy1LVpE-0O86_FnTm!2eMNrJx-c|A zp4u(Y72e{~ogH!6VK%)Lt~wQtKmNI;r9~_O+=i=HYd9EaOSE-TkcHz?W{Smsnd=@8 zC+0*u60ktWwfqlMa(D>Vu}Q8K`+;Dpz3Onv@%O$?FDiC9@N|O-J{W|0ziF4GT-RuY zo>LkQ8f-Zz_-@pRRSOifIjQ1Sy76@hm8faatF zdOgx`6pRN|w2jc>Vf(JNInRf52AW$aKK0M}+<-tmB01RtY&`gAaFlniuD#o9-hu^f zVY{|M6~KVb69m=A#=rUtVY_tyzd{z6+?1;t$HOr%;Rspp7ZjSuLP93KsST{c`Cf*k z3&}$?=K2;&AYlx#24~-rCi58$3>!s)(LIK!WZ!!)F6@ zLbBY!`DXqcl)=>)pDDr!f|bW3pX1<4h>g)uOomo)3q*)_EZZ$+d!9gkJC`&-NJFAD zOT{Yw|2beLfj{pDw diff --git a/examples/whisper/reports/cuda_64_100/generate_throughput.png b/examples/whisper/reports/cuda_64_100/generate_throughput.png deleted file mode 100644 index 48213d3912e76053abd00c19a4039c070f0d030c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59021 zcmeFaX;_u#mMw}Jm~C{4ih}f=ihwABfQm@#b`uk^T?mNM z*a5Kv1q6{cDk6VywQH7tF>?G!2?>cWge(58BOx)OKtf_z!{0u|cYZRrx`h8x-MP$gr>=wbPM3{N zRuZc>?%ZMLu+z?V)3>{=oSbbP?3LwJ)Ma4pQ97f(L6e>!Xk6F@tTh$T;U7z_#2e{1Es(F(sqb(E)?bCYOYt6FrvwOl` z-Tme3K$qD$x%gjQ9Uak^(|+u!+16i}rlRQ|WtJ4NtvD(t>2nE*DwQSPqj0;IR-gQC z%CJvAzdwH4mE))8Z;P;gaLTqg%Itkdb6B8-r>kqA!P&`ss`h<8e%`OQ>rTtek92@c7Qu7T~R zixV6weLTG`8$_oj=XC3JHk?1Iw58z8p$RJXu7N`(BxHT&CxSulQdC6V#8;j6%3K^wg~UX&y>1UdK6vKyR81c z;k<1{vTIOZY`%t?S9e*Z&c(uf7t5$1Wt#~7tHwFodyQ{wt;OYe1jV0rNL96Pf3qyH zJzQ_rnAUg27Lh+59zS-Gf@a9~`#%jgOWN()lvuBGW9uU&k1)^oFPttNY`&xuzS76b zCGdt(taUgZU#*5NmNF+hzawqT5U%an9}Za8Eg)Q7r7Y`pW+Me~%s3(9xgDR<6aY;`Z!%leISzPg{~(Zvf6Pf%Hdm53>`PpZXD z%uRT#+256KcRfSuqTXfwmk%%cP1;>DB|7!})*wZ!#p>5jZnUNK_bQY*qf-udYZ-B{}fc6j!Q{e9gz6VtBMJLP*F>8d`eX&%vAsHO1W(&eaAr>s$X`0Qc-S%&6if`?D|g&0D*eWqy>E+NJiMrT$sjsZ&%kVcv6+im;&rYef8yb<>!!_(PIk&$SbgY3 zKtSJyNp09t{W(8Q*dCXDCRXS-Bf~4Se~ULBm5Q|Kj_&4vaU5V zvhe=Sxr&(=ta1V_Vyo$!Y`+og(Oz2nGHlVVt?%1kpWgfKWw>YW`xix#+7W9)jO|l> zJT&lqlRBH&oAt5ZVg0ht9+-S;tbwWGu2+v&H&>+OxNCO5ec^{)X_XhOT6^T_?y~K% z*je(e#*(_T#uVbg%(E}QnJcr{z3qXDkweP6!uaiFxxuQ+SqWa>twTsssn|2ktvSWz z<>RZT`NlkeseSKXIXPGExbww}lIxaym_L0n!Mf6;;{jIH!aCO_&A`My(Q51CD?ysQ zFSIjKoBP!3ybjo>yn7tH`-!cQZMeTq23S^Fu={kEqUAeQKT%o6)&y z-w5mW#!@Z4;%(6>0rpRC@tp97ly){0D7~|b`~63uXxM1=Yn_WHD{i{`y!YP_i)!;G zHAqOLM;Cf@JPmxLZz30>=3-uBY3}jvkzIVLHGkac`7bkhh7;dCHi>-|l~jii+aVQxSfF zV!nFYDt*I^J{gJ0np@UIX^9TWXliu7POuoY_-(;Nr~6ZkoFB|@Lpa=Av|8oC6{A?E zhwGMGWsEh*#R)zg;!>fSQ0eKG81~z_n~gC!N*mI>84boQ+_7QCn&6XWE>GuwF@D~C z93oTu9wVC&|CRo8YHwB_$-#-?lD~cVXzuytN9C29d=*mfx0gADrF2xN@%QtIPW0@2 zb`d+eujPQIto~uIzP2#s?N8#A%Bpb&yO-y03)%HJ3&%VdIijY*t@**1%O@Vk!F}6Y znU-6E3sZAzO0*fZsNoFP81ZEnxB8JuYS#Ecp=i~qd9$D1+LcqD=;rsFQd zPo|aK!8%?`fA!vjM=`ahH3#pfY2nqoOa4~FIlCK=^j&NH4GM7w0^h!n;Q8D1Z|!^` z8m-y$9b(WHeS|gj?#5Wv7Z?0mk(Q;yTkx?xBc+x=ieNA|L&Dd!I|&JX?nWLCpE?-cQ++voayu)HtPPaVzWTTH?^jlmTzw2 z%-2@-_qnWD{O;khOU|t|S+!3t>0dpCSQByKf`GUA7*{)ScgaR3NawQ6xFbAJAET4BWM)4@*Jv$fV2^It+2yIFTUb;-vA3_*ll+~48VABc0LP_{k6 z?6JAq>(3{vFN_R!T8RY;l#4H!DvWs%aaz7GALy)e+-#({p6pep7hFAd;Tld}z^m@( zz6vA5Jlc)k%-{C-%BIT4CJttX2tOnF2(Mp_i;KI4pO@0N@OZbWY<{t+?h2iTiWIlA z_=ggW9aZT|YqR|&kqP9+jP$t;=wtM&MVjZcV?T_Vt$XK!&~M~B?AfAXx8l6v&D}szOT1iTIy}ItM=k|>!%1e*4#gC^+d+K<+tJS{l}0Z z&lUg=3~!fvxob9l3F5|B(U{5jXC>!}2-7|SA3U;4CGE9=7E({*Y<0I_mu>`duzGyz zyNyeOs%6|7uj7{^BkRw?&zHv9NHYxFMp9URQrTvWV>woNq;RC!t(~)78lP;7Haof4 zZTI!wbH4I(1hbP{v*%{S%bVRABN~Illf0>{u{2ilbyp80pnhE#terExNbi_wzZ20O~4+=0EF> zy7ltXQqfZ6IZ0gi++g?CEb;wxDxY=$V8h)ZL5(3S+tiI8loXCc_Lq&9d+9>>sPGU= z(}MUHVD#$l*V?k{qf8T9urDXCdrF#GdKi{chqHa%NS@rRCGXgTWDbABxrt*!3C+$&fK zA7sJE+&A@MUY=(!T)5DJ1?j0;)aI2b+wcg1Ny$T+C4ZdBmL(Z$nzmt z-3tbe!Ob8BCa&s6lS+r<_A>h!-REF0G$kq|xs--@UWN z>77@9y<)#w9B@QHqCMjsfBQ9@DSuT@ztsz7w>g`LZbcSa<+8HJ`?mtg)cpy7^tOP$Z{~ZIwawOzs9}(3$I`iX)dmKsyE@I# z4h_!N>{n;9F>y%oN65OCYufw%-Q3vK@1;!aj=Q$i)joBpTrE;>y+17o$N&L3?YX<( zOzkx|6Ge&6_3G^RzyRf1CCNJK5!qyGRxbUKGq+M!PT}J$q8`~mr&Vm*?Zo-kj zk#VWkN80>8?l$y?QK|yHOFe0;O8`>*kxFG-PBKO3E~sg17I*;TXCJx@=*x`tyc@fx zuHc8!4}S|7rAkz2C+^^>vr(}d$y?1K>p zVslj&qlhToQ+>Dym!(u)r@68!v*+1pulxH)Yn(*URL2O8|3VpsE=Q;C4aM$pszxj? zz7kisGNU78(XN&Cc_D4-!$u2?lACL`Cp-<{YmozIwT%O4a;tlAI&fx~d(56A+z)Mw zFPkPh9l>%hTa+o&YFXcAi6CcSJ@e-~qczp}ipdtJd6*?T%AK{Di=N*+K5e-*lJV-( z3$}aiJPd4F4~Q|lW6`TCvDuFbv1uo|)*M%@@*YOK_4zoZ&HJ1^y%P3!6+3x8b*XT* zS*fKJ+fm^b*i|}j+hbYYMq;HVPHPVELFf5;B@2Q((}cVbwaB%zPLvHl=UkuX+TYuu z3?wrB(q$}~Rj7;w&i_t8`toK(;i|xVsU{+%HCO#?_6OF5!Q}WB&Yyai}02H!|gqK zv59L>=&s2E{?Tm;s;wNoQo819LPa)maJKq^_erVUSE>)TSQcBQ4j=9b=-o6l;i(07 zk#8?DLetb@CCxaC?Kd7}#~EDIu{xXojm5+FCUINV*{i*p|JZO^RziM81)jQeV&3#O zxyt!ssP^l+^GZk`N*+ZKtF==0zB|~Q=Cvj(q5j=dXXBC^7Qr^#I)KV}lmLp9b~l!k zJLSlY({SIjyWBC8MXHs?e(~B?7aLk0F5mm=s!*y~tqaMh7GZr&^}WN=i|qSU7dTWT zm&+8dzuR`ltI=Jmqm$%`c;?a3RPeUX8@6K{F%_;-jPYCpWQj2P&giuXiZ1c zQ)$~qglB&|>pCt8%Ce+_t37EJE@koCb+LIiJw@)*M=Bc{tu1modejhbHM?g>&e5(u z8@ImtRFnj@PqehOh^(&Me16c^te9}EZuEYUL)s4ve^ZM=`t&IMO7Vo&)AGAFs1CMKP$@Amwg3997bN9K+hX;qgDL|;VUSe@#&d}r(HkU;-A z{!eGwA)l36pWd!ym3^Y8vcK2jvGKO6i2ZYAJUchC@-VGFfPyCiXlICE7>Qnb>+rmW7YniwxSrTob^|Y z=Z{uzSt5V4>K{&Fz6aGKm#t$l2#{L3$ycrrkC}BtSG{I`qGA4ci{R$Xs5^n`PY3U~ z{h8!MmHyEaq?QYXSy;Vg?)L)LqjW1oTtRvK#fzs&F7`K{{lwqFI>smZU1#-C8`Kv< zk%77E^wV}RSyLx<^ya(cEmy8w*_!Xw=e#fMPIJHFo#%b#h9x#`>pV3T)|hF&b3s5i z^!(&QnK4SO@ffP+=5fl_tAgY%N?SkDGg#gF+U$Vu-UD?^tqVDA2io65COBoi7RNNlUto zLdn>=yd&Y4FwMT#36*OS?4KTdjnmFLB^HlO!SSk$$J=$*dBH|?@^ATzga=&`6t;$B z-S+a7bOJ_XF$GlTmR-ItXUNR%uOb^Bg3sU|bOJ~J68&`RSx z;>mryO)xGoHTNchXnl$0(FgnvXeC9p#%p($$}GQjK?CrXp|2E$s4l|Oa~Z|lehrVb zrsk#@Z;;6%0r2)zY%hxk99V(i8PG6#T!6pTy5i^V2SC<@Dddyc^YmY{@Yz3EWqEbp zkwLf$JMpT&&ul*{NK_rp2y20o??p8@XC{r~_r7g7j!0#F|JW3y!o05@sP;!?Ha|8& zr2BoDwEqM(&HKHchDivwHrVf~08TlR7)PC|10!)e8o2W;Y{7%u@x!^Ofsd0#0=XmX ztCX3U>lMEX=jAs8Q^#dNU0hC{&qTmC-+YZVsUG%kI}$cqnDT)kq?o(+1G?3;<|t?* zxD;WJW?WM4YOhb2WzsrRvdNj3F$MHv?M26*Xq|;nP3~Z)`w2me~N1 zC87pA*o$MCb8X|D>`BFV)Z$S1*cHI26`@tOMIB+zlefCAuH&v%pjiM=MSz+o-a!il z<`G;$GKvL!v3?ZNn>`XS-hE?8~{iBSlo<0sV-)kGx(H33Xa|`TR2SB1~ zWXNvg#A->Or5ZT;Bcr|6$@<=sYFGOPNXg1^#3CZ4xc4tF*Wa7qkn7p~IuIFO8F4-P z(+PIRmYx-}e)y+V$T->R&OKy>ScJ0H6}Hu5`!UOrIdU1_m%WAP5g_ySIg)~?JgzEW~dph<0>-~GkN^6-3Qx`fTR%f#mg z=N`=0Hh;Lt&%w(xq1;L9$l~@Ak|-CkhY?mIhes{$&#MaA{bV+4W9!_L#}TrWaF&cV zlqApMNi+58efI(t44*v1KQ}u{{u?XuT`RCXSSvQh6|G^^LmUbspym#Y{r$%Y{%T@2 zhRnz)&7PLr(8X%f=6g1>6jCa?rhs|^;W!_d!EcgFxtNd4JTghyxh|I#P$IUhGGO|= ztq*6Zy^hmeP-Yj)g=FGI4h}?Nfq1r_nN@yB-_O1X-q_ z;<+xb{s1YiQsj67E1RCqH(h0weE~R*>U^6ZNAjlmp^KgIf(4MW?W-x%e2?@o zL8>GHtW7xSBk*TVfLb$me;1cORZF&|2!>wqRPhG|7^qo;fjHI^9SQO z1;m)wf|3pU1(khMZjZAGKJK?>(d@5huE_=TIkjll%gx*dz(Ix47HJb*${l^(M~$^I zdiV&3^I^1wCU{G2z&4%B`d4@22J=aa@)j9zTC1 zxAL=VcSg+xZy#0ONs`+KVObio0lB2HBj;X?)7c0(AX|=;mZ3=cbBgd;SACd*b0e!( zR7$93vTp!FonMPeVJT{AN$fSbw144(RDa7My|M zHi+W&D|BvR#ST{2<%O6mM9m_Jg_b{Y;>4KL{%CjZq_e31#N{TSs;-y>m>Ild=`&Dc z#t(ChO7I*;f`K;PQ2E?6kYzYHlLN!^$XQ2ULt2x1SsZQgE6C@w-~)U>$T;k)zU9(v z{wrR47A1ubIY^)7lU+CO)7%dVmc7| zxm;zjYG@O(YotPQ%hHW3_|{_?_|vz+%*EW@m3LC{{57!E(!7EdF$J&GD+zuLaatrF zT{cjN19!OtM^ZW}_5DlKr~mxy>x0!&yTxzZfZ!1n59T7VAY3a@ytaHG&sbfX?s}wG zTW5OgwEeA%*MO9z5Ry!gfqpy-T<61rsIRA9Q#wLB%pBNfZ>V#gst@9EbOErG_{L1^ zt}lBQ9%yb{^{)GR^JY}mEH8bq=Pq6YUp*E~l<^j%hm9yvm4{A{I8at)S2kBA@MT@~gR45}AQad*#hB@M02&lw7!&;>7rQPkXB}XrQyY{Shn1+?-0$(?c zN|k=MHV^`1SRg?%`u;3;10Ky-#{1!uoe>$OMEuu|^KJRXVtU3Wb;hcbYxm$lNs_c- z9H~80iK1*Ie#ac<8Pw`)7ruL_3laiSf$>H}3t8-xNWB04+ulQljRZ-gmss5l$`YcN zgZZz3EN5|tK7g2)3(~y$Z+E___O-0@ik7PB4lDNYu#j$d+v1r*`pM_fyWV`f|17X1 z{j=gwov&p%;~4=;IV?@IbmNe22b{9>XhXn5)}Sc87XvreZj!0Iv8{MjsJbdoR^a!$ z((G}Zk-xV)<*C-o2C5yM`=oVY7}=LnP=jr#r>uSD;yZ?zZSfA{JbE8i7V}B&RymUE+8VDDyqV~6`NkDgPfFAXI-Oc&Sq|HusGNFR+#s?+t&!OI+*cVXXr5ikcPVZic_f9g?d6+WHylT_@nq&8V>{9D?w4XM z&D&zS`08;{65e}0MIsk}3BEit?8Bdyd^LpM?Ae4fu?VV@S%nq;njRe>mTXsu2qD0~ zYGF@Nt09Ie-Fo?ID^k4N+mooK$(mxp%^Tk_LFsBQp1;_weHDW5fNm4lyE!>Z5z`86Z(bYlmw6ywzQ>>!V9ckQ=NU3=bSN-t^$196`mEKaEv z!KR|+k5mJg$yg@v)J^P05u=KrqR;rqEZmi;*>@ZgrY59fs*r)d>f?GmC$P?tSVD%~ z6h$&g;#kTCg^D)8jz3bTLvX1D*Jj>0|Fnuj=uyqSg&?4EBo&g69bWKc%UR0Wlat0N&msTb~_}sRO`U3AiB^2BE4^;s8!L4cU1X?xOa{IK@qWM=hm<)V;~%?ArTG zcu;Cp!S${}{nGmKa&&;&Za^H>rkm9BfN9eR)fCmWMNLnWY!1(Vd>^&8 zv0XP8Sk<|%=I3cgX2tuTI8oDbXLLSg7(`ULbysnBJquVjOPm?%j!K6*A7NJ`ss|nf z=ky^XYQQ0rLy+1QrMpITI_MH7X7gY~ULyqSSdeJ}k~bE zNTKjle|KfS4oW6~s7MGz;}-4Qf(#MpD}QSib1soG4xdzJB@ne(cW+qJG5rn1m6=_; zb2^&0p|pxv6(j;Cio}I?VQGrM3^}=U=WFhE?7M~=@UEQf!g!IcGe=JD0Rn9ZFsbc6 z7oozISM5Gs#c6&1NK-SI$%CqosMeiyvc%)p|2)4Qd>uJcF660gQ9+D;zR7h^+(d#f zuRhnlH%Gn14;_Io3n=|VU*|GOF{7vxaemvf}*1f@In9! z*pmmYEf|nQuJzRXtq)g-umCflQ_9C9#;XzFBFgxo)|w$C6~g3#g6YXuA(f(&7L^bH za^89LzALu6htzF({2uetKg1iiYUZ_TNd;JjguBk8#wiIhC%f6jgX?0xW(f$?b#yc3 zG+WyjQ}c_?E%Ox!L7C;en-8rUM=L8gNSV}9u;aaP2unXGfvsK@yikz<6|0EH&ZEqI zsO7F9sqEGLuFE9RtC-b2BqXWkMIgvrq2I2zFYD>b%+oZ$Y8n2UI3sw6KfLK!{qU-Z z3iG7&BI~}V)xc@1!D#!wZfjl`7L7+J6p>ZD9Cd5!^oYpO)K#@X<`9Ovd9&iS_#EERD^jy92h015|5aV!q3bJ%9TRv z6W>kY{`xCXceeq&o_I9@A^AyygN1u#sz*0{Bl$=m=GMMlzn@<&-HpZFu(6!!RYD5{ z-AX2LXZ!;B?`ohqiTNEGP-NrtA(oTc3AzrZDSzbLE63o@@K5x8k6$9I;s8!Q=c%m+ ziVC^!{?@OEDqgz+CxqmEMtqjv%zByr&gC=_tOt>dnkx%(tX|V22=f4&i>W}aS@hyU zhPm51N(+fVgRR)b(q)r9-u|A9m|F_qeQ-OZNG+oJp4SdubK@byd@iG*PCU8b>CPf6 z9dW7~HQlGV|MUXf(6thV6+4tQK^h)=sn}8Nl=13ugc=^N4g!Qu5&92uA!*<#yU#I* z3dXTmgk5_dQh_om942yMYouoHY^sX!m7d*p;0IixQ?`LZQ?7JxTTP%&6O_;iehurE zs^4CeB{=Yk#2Hc5X)|zh7@RG2h!Pex?=`z>{2(_^A(e!CrQN}*v*nqX)`n`yL@T*C z-k~y8To&R%XQbK(nmP&Lg*XLPWLBWrZlm2G8u%VUCy;Cr$oE1K+(DX?7VTVt9p{Yj zZo^DoAM6~1tX$7($NpKUfCUF&Tku;br!Z}-YXUt@Y5=U}=5|E1jHoXepGJv6%=%*} zAQhMc`nvMHtf3IoT_7&(p}A^T|SZxH5*e95Dd(Ph+&(&HDH_R!F%T&R;f4%q+92b)@?>8!QxjKV>o25h`^ZH zvlZ?v)ZD72oBdWhJvOqjLS}jYTXn#UB@W0ViQ60NyPM{N>a_5f&!`Xz8xpw$5OP0I zS9Hp^S3t0(t$~Id?g479Od&5GT`t~KjHI9L;QNQM?)f;v5`G;MT#QMUsPDHbFpXM8~?JR>d4)I0Ta9I)lK zr7^Xw@&g#QN~IP!Eg!^ta!KZA1%YQ#w+p)359k8Y)S7qtG!7->E{om}lYn)&o zf;CYh2X(nb6VcwOm)=FFsZxvTsqhh5FY}e6Gs!sKE@;TGPd8)dXD9-Ab`Gr%0p4)T zy?=Zf?|j~tf^WImg65N=f_ig4H&{aRZZnin6d9pn_h40VB#JEt1n3A!>3zTLA5l;F zJ~bNIAyKjaeQ{b$-lA7yDx*;fNSZ}61T&Q18)tI?S(Z{&5lXvNK}yrXm0Jc~ug*NG zjUDgXe0rX(NI)rd6gWVJALbQ?BDz8d&HMyBnt1aoN|{4DJ@{N&UeO%t>4YRBg!16I zBNtUv=XuMzZOG>2t)6$|zH5OX?;}d++S~cfSgn)h+3WPWIk{~OaS%ktnL_%hD5Z!2 zAZlwXpRxM)fzeY}ypzbS@1Q$1UBMS;z&os*H5CX`MT924C)0kwgjk&FNXQ1P@lexs z$Ug4jj%~te-wC?W)B^QUQL<|rRa>x*HV0o*z5)RDN6J)0wU~txJ`6@gP5KhoXmr&u zu=skNOLqvZ6mPgRSr}rgX6}MRs20U?9+;j0I`)3}>GQ15W(Kcqj7Dw8$WRW3o>shA zBpDl;Y%Z*|B>{{gBernm+;C*(E?)q!xNXFlE|O z2a%v@mYM3^#UiGq_stC5GnJBAC<5;lq-j#Pg(w7g zGnDM!xZX~8IPhUlYxpNRRz@@+#s&3aN9wK{iwjo6}f6>rFr_>Bjc!9v_SZ$)!&Baj*G zx-!X0j$n!&BC2lONjkwAnKdyX$(cvu}NFkRPr`PJeNF^!|bU zWG~)N%1F?MCNExu6R3qcQwe|leB#3Iz|l1qfu<2;(TN5MAp*cmD8lwqrUfdfzPkd? zUTnhxuP+eNyi5kabj$g}I#BVFAqI$xQD}YP@H7fVsBgamQq98Y^@~pHTLk8{vJL#J z8sOlL#6=X8De@AB_zB2?LXbFt2r!4mF{G&DERK!L=Tu4O4eYZSNa`(qjBBj=Si{EJ zLa%!*eK=N#?l=1k*_x&`<>DzJC=RMwa{q2$!H6&_f8qaC9}vdF5$ty8AQj zY`Bg*dI0_Pjctugr&bx<9=PO)KaPgn>tQbeRrzJO6WdSU_|y3Ve@|BsvpV8``|q&6 zi`E;#d!O#`)NOyUNyYS*331u8qh*`F>ipt~U5Ry0(kGthLseUYYR#C3TO^;28Z_d6 zD5H2?B5{`IpT7O&4~X$!`c2Q>beFcjE{gV!{DCa)h!OmK+svis?IUDN6wYqSpj%+ z0bW==DxP#2x`$5Om;w0y^JiFs3faNH!Cd<$H?w89i0IQdR{b<9+o++f`MjeE2 zVvR^E;AQfM{a96>KB=Ka`3;IDRG6a}U&C0&y=q25!9%(&Dscu?NwNZ{awFY3nl!%& zotgC?Bue?6jqpu%@Ody!iUibd@Uv}Tc5EJBF`_>_26@;9z*rTf4h7*za9LU|tsfpy zi*ie>WI-50jaaurMG3l!Wn$n9g)t*l;bSR8mZh(oMIC%tde*roX6qKhKF3lP2Fv{K zM@%{aR`0|(QmQtfc2^)p5&h7_W-7;~1-d0z#ewxUXJi|xX|W=;*ROaIY>P&US&)Jchr_Wl+Y&#UhAW!G^bU<^VsNwnZ~Z!UN3) z?AvgH`YV6U3j~pv{e=OJ@f;UK$&?sE(iZndl*Pv|6hllkz5{flY(kkGD21g>!TX6Oh z{cw2+Vm_a+Ko(I{UZowsGMf+|KYqd%#p#1K)M_?V-5?SZ7?`5g=5cTW-A8(qdq@dK zG8uj@EzjRR?3)d$b7uFfZH*5mjmWp;oz%eA2&d9;RqY>L2rh=oVHcS=0KagUGlU}gbFscIG&~8i*kA)9w+=Pw+{dwS zEwP0q!{&$nlOUlYQT@V=mcI6z0}A0aT==FGjF{gd$U2`LG)+oL9Ai(9b zo*x%G*C-=vV49<#o!>O-t&9?$jMxI6NMD{NaQpC%r*QgRHQm~ zH-W8KYyE(n&x6w{`KSdF3Kp>oh7=(x4~OO|5}kB@v#*SHo;ENMb#zbph--a3$IjWk z=bD!EC#peX)rEBW{4!8cPLjZL840g)?4(3c$3;+Q-Zqvh<05nD;e_!RRki#(!m8%K zMGxRIlt0s9o?O$1!qB=ZeIIQN*|}z5QYdGD6v{|?B3*+^$OEC48Mo#4b0rIK{K|3C z#nl+{s12wQ<4>=V{DQ2P74~ zO3AV=t^c}WTeox#qEa@XgNeJQ$NQaA7Fp4_?>jFFAQg7;+9${*l|1{Bi=$BIS{>;o zj0R+ET3&p4{Y4#efHIR@P$T+pdj$^29B^J{vDAks21aSx zkH%rY5zl=7&39Fise%tzC(-kX=WdgmX>c15DZlCMS(^m0neq~IAp4XU0knOoc*-6&-iNOG__6{=^Zv1|PMw7O;OpC0=S#@{pH|R zFi!g_&ph~Pdpx7v%b;mth<3ay&Ew$@ws_&L23V!^)^7)?A@BbU92V+Wxk^h&7QN|w z#fbUo;l+_1t@0l(%e(e28=%Br|0bp>`Nw}J!2ZuS|KQp_DAND4Apf%<(AocIGyHE4 zLi*@au-(!20n9Rk`X%gs&Opha5mRR$p$8%s9ds8!5K%sYv=#>6MqfiYR5Nq(uf&5? zxS;lgf$fy)26viACX;4N>c>oOG!zA?{&-|!5b$RNYH(F_v*bKVy*GXv`DQKGu@Y4Y z-w%-ELgIbo{}bET8m$8`KoNr_3iVTJU|#Qw!VX%pAiaVvP8uy+>3~7+$0`tl&dgIg zQ*gyr#7?NQ{GjbFpq%&Pp>K{u>JML72TYJl+r)^;d%lJKuo`TCOwk&(S|pTJB9`v} z%R4B}sH4(MgjF#6iYrRu8uUc3MCCQNYUGggXbb;ZGp;bCjo^0xJ20=vrdq%{pzPVx zDYg!xcf{|)s*|AtcGxmUl>XwW0jgg7NR`S zX~GV=`urrcegUaX?v7!j7M{s{18AJX=?5r}5;nNhm`+- z95|G=9cL}6NzYK%)^XQF*Btz3kTYm&FU9Kk3E1>XT9Swc=$D-WG?3E!OYi`F?&AU9~KpFJFx}w!ifv zf+8mULqKpwb%qjWWIuY6rg&7`!a@<0;2BuStH)e{c|%aqOJ}mIa)nDp350?m3v`jP zYikXCW0dW7L&T$Af}g~(L+P2k{qR2WSQ%=Ge&T@=kSDU$m6HKDGranM?y0cY;d%)F zr54tc1fX%5UG*qzdiTFl!G!{e&Ms=lxgFD;`ide4n`CL8!V|V=2qvw{CS=I{8tj54 z$_MK&5I#l|5J?X>F^Wb%9xjn)B{n@toVhKVt-LQVBbm=PmXU~bj|iRf3t1fTbh0O$ z>;;KkdW~o?!r=(bMg!2EkaPtt6n3$+URp44lFNwGyV&~LV+&p8jAF2$01IRNytrMVTYZBk`qU|4&jU4Cvn1ohS8$4kWtajf$<)qI#+ zSW~5;>`+2wlN+LzP@)1lEcdiZ^YT|iy(^VIJixJ=a1@he>;AuxNiFB=4%D+CWe}v} zj^?AF`V*Sic4D&;yU_u`XSHhwDIuVeHvu!I=blE&2!sLq;OO+L#q@$~i@w%C!gGW) zt=8A^uW83aBn(8Wn{<5$0@J@pep>KEo3Gw@yb|$)wRbI@jUf5gxj!5(ac#rE(>rAK zZdKLv>$BRZHFP2TBZh@3y{09dq3U_JxH|_57u9+@^!%%02#C0K8qCH68u}2b(r&-} z)YFXXo{V5X<9GL5o*D%_>?(FMdG1K1tnImG&JPm~%n3mO|cQhvKJaC{Emt> zB;PzUJRM!mfsU1hIcIckSoad*bFHXY6S38Sp76=(tE$)Y2qtDIhg|c3QO}VYRJ)2T zycRR=;dQ657sh7N&jlLKks>qMDlkGX@V}fw^86(EONhM>FhoNHB%?qA1i#*(5r=Cp zJ93aW=K!`npvF$0359)O{s8~s9d=K_Ub2JPhZZWLRDlp~4QGbZ+>?sJ)RgWX7`bv8 zmC!dOFBH+14KyV7Cqm#;A)ew`0Ft|OY5EDs;Q^F{AvLQ&^SuvO1jM3B@#aEQ10(;6 zjX6BnIiI}dI#hPt4KNdz^t)VJe-SHYJ{Rz7f5}|el zy4b4zg*F_xOsU@$_bKc6D!TCyXmfDqj~3EbRYz_+6|CAofmta4}Cj8{aP8Ibqyz1C|a=n z2_-Q$MEdp;3ZpkB7HphAxBv#MRUwP!W2?y>=>mfq0t+a6bv_gjpr;rwp@M)kTpDf7&36PKoMjl6aJ^8CnpyuGVH?~*SUY&Ra6?(Y69Zc5ANgd zJstl=uf_Zy(`)}t67;}T{g=wx|MQ;z7rX=ie_LeGzqT_^qL&wRbQFC|w4|``W+6g{ zD{Dvill1={GH~hFR>{4Y)IbLry>Lv~j2vLKFOxxF{i%;uCL$_!`4^(5rFUkmk@CRL zufI7xbyy0dWR4u;Py<@zcW?-tKOmpDk&{9O8y2-pXAS(2td`cqGieA!M!>aRJ5~X< z#=sqHMMa8(Q7&i=9P)Ifgu5m;`wjP`##Ik|t12???vVE`X6$%Dr1EXnW}i12Aj!?s zXn)Xf(A61mapY$O>~W!9BqUG4sdTv3)gbk0-TBK)*_S}ulTgGsT~{c0Z74N}Q%)F|5O&u-gtebU z3_3Op>pFq&>%n?mKI!*QpI?tjq+mh2AVm8K7!o=C_|TulW@!v>sVUOdlsm#HUrkWcY3FlA!fgG0xr`O|jY4}rC+ATj zWV^%*Tsu3GHdO+(1Oi>vpR_z;G^76WHoXDN(468Ss=-CBrn7a^ zXCE$f!#ET}iO_1=0@He?6Hl?(in@9iDtEC7|LskhoY<>93bP>5ZYR&qR;XOg?;aV< z6tZd|sAn%PCBmJJCE~tRv7rP6nNSQ40rMPKs?)4Z^d;I%n+GNgkYhlXxoeQcFwe3% zf+cC-9+;gu2OtOS#mDKl6mkv-9SLKvQ)|Z8wL=`MiJE*Rz^w0ftiKlK!_d(-d@_OC zW^_0les?|k@AEEyh1d*q0f_04)I~a#)WX`q06+sgN4NP6?#75+3?B z%<^zz&!qVlPf;0uiCX=cY4%!U&wwiF&tr$kv?B|rk~c+*MhtuVG2O{B6wVuHmip@l zBGr=NWt7ZOlRdu-Kp+$W#$U+2iT^La+nQ8OL=ahtRtm3{lTd7HQMz+lqO@xTO+$3! zU>KVSzf2uuow&W3vn$C+2UgiV%anVkTrdIddwPawt04tNa*OU!!dk5P1x#)cw-vy* zaPZfG>fqOJVG+$j8=}5Dly><+F?J${Iguyg?2w`Lzd}=8w7b!;?j#T&=Z9uk;;$z$ zSM6->Nr>3>IDIAdXLoq&!OOrh4w&RbPwvdd%@yX2iLlI8X&7H_Uo}v9~|j- zSqWvaGH?))W^cpD4lC?#l1V%_o3Y{K4Kgu@)sNm5b~%Uy7*%4#iHPLw$il@SXvvjH z{^p?XZ>9JGt|?&BXjY7aV9hiv@WVw*$e#Lo&B7^|qOl4dC)#gm-*<)ANf(Zh=O@`9 zMBnzCUOHabQ^0|8JP`2jiI)PkyRRjwecC-3!fEbfsB`e@Hc5OZ187QiCJ~qByPX^^ zyCFFv38bVd5@z7<$69hsAZuVh; zt^JY4QVw$cBWy{?>Y{T%8v+EbSWCp&x@-oD0Y6zY+N<*ibJvWgs|QxTrw3u2UK5(J zvE|HJu~K^ZU~hpn?&G)Bo3n}YHNWGKiBsq;g|}>HbH!pICydbk`tfUV38P_|6MefMHaN>K4$S@jFtT{JamnXjU$@D&P4rcHSn4ZLCBxIY@@FFZ9ikIRM`GO-Xl%G%YyvF ziJC)3OkDeA(IDo~j?!7Ss+Cb4CH~c)FAlz2BHxWU4x!HQOWnbjJFS~yaudP4m|MLS zb?{uU#-6mCEA=uOZI#}A~t4Q*iVt&xAlu^m?-o!smXM)5A3hE zLqUNH0`A2XvDulkR-nAOVp<;K;UCzIE4{+|Lw^}ZAc%YJc>EBlIO}-q{KW$gT2>dR znS4y`6S%#@sg;thfmr2-eqn*|$fB3OPLIZ|n^zg|Lt5-&GoTWAWzIuq(Mnb%;F0buKC! zZ^*=P`nLG1v-I*YxpFc(RCa2gs6f!55&BPy{?(aiAVOyEoEw!WQrKDhr>(kKvIDBD z92+c&E}knFgmG06KqNnZJn);U>c#s2Tn9rLn{w&0WV0XtOal|g0e`7AaQ*2@dQ)7^ z%oJuGndE%u3zhW`2OaD!ewU7^70d-Y28uY&w441=RC+!>U@N@MXN{36VJjaR?rwav z!M9#SjE7e0Cjh0v9-2@pW!(c)2bF*-9~bKVZQvR-_l1uvP(aFqO1Mu009UE(1_qta zC*Xq%J1}bLzzaJ+lhyOq06{JgLZNoTLQ_2-t21Wc-+La;sTs$EQldRR)C+F#E~MKq zB4W_E_er5)F3;H#%)u2qqNw6y8<0N)ro5axk)tS(26gb}EXbHOK`^be&Sg(C=O=>u z)y@f!Q%3t`SWVC1RlSgN^!8+5A|jFRyxrg-S=a^cyr=}vrLTB2Gel@kT~nrnNjsR* zkUoS9XLNAbp#5{);=McJA@gUNN9>h9LXufLDHkoY;(k~VeGxQp^|2=#D&}gaB?S5G z&Zn&`3LRu4RR<8Zgr|AH9?1@>>3AyH6iF!{aw<8;dk84RaJD78Z7AlmUaCdCefiiw zfJhoS2c460u3rTQoZ%(^mx4!BgO}3!>x@WhgMjk@mmE6it0fQ@WWm-fmIyGpeKm$C zD8a%FlcGu@urVj1ag;jEc$|Qs^gUbV_?0$F%YDRq=vRSk8)5muMgXd^5Xx6WndK}7 zHol@Z-$jcj=XNuI_>6IfLwYl|uUHT!)J2Ufo=?`x8Pl+y7D8g%WC93Q2v^3T@$=^h z*}j8^oCf>75H%4e6aw$wT4;?pw+a-tuO)?YVq3)HUynKeal9;lXVBU$;vt1VxnDUv zOHC$?Lfy3}9$1RtUQ5L1AcPq{4BY@?*_n+E3=OOL+3JN!DoU18^D2z>c7BA>?Q+zB|;nE!VxUC_E12ngzh;b|ScT zS&n8XviJl@Iz^Q%&hxU4s3>8!j?pi7pL*n7EKkR%^=m;5ba#> z{Rpv4Lt8aRMgRsiwl3N=c=u;VLfX={F!q$Mo{l81V^KdI9|kAk_d1w zilZyF_ofc~{+@YY3;NkE!D~Zdf+4^t1Zi65v7Eap=p3h$u_b3|H-HZN0x|2l1`ukE%tj5W>IlU&jgZ6#vW7H9TYZ!{j7qATD|ENvp z*d5n^U{5p;VI@mPdVpM!$a#WYj8c~$A%bQV!u%6#Ps+mQTEx%$BV}oCx+n&jvOY_v zpn+cxmwW_ce6E!k;PG=yu?z9#)PWPb!N}y2L<%Op2u1ir4fZ|a-Gx4526P$;%^S!* z^=>@B9P1qCg#d8n=YK))i=ePjC)GYlbyInd+HcOLaRWou7F*rLB#+Frx*j- zSx;xv#|tnraafAD2Z7`l5~a9H9aN*qmpKFPl&++UEUd5!r-o)JtHc4?Mq>TX_~Bl- zEacl#*UqYeXF@!v2*u)G5^)6vJfN=&z#6R|aGjHE`pZk0IF*{`q74b&=SXa+v5J2n zGiMYN9GdwtY}Z+~`YlK;YuI{L!3I|ZOxlQ*=8zJ-bwz{LUf_oz?<@P-6Ij%fOF%LS zq2h}|@&P46P?m8YA%jb({$$V&BX+NS^WB+}-Ly`iOv^bo<%dGFJ0CRsBNzqY=J|OY zoHCc#j(J%Nut1l8o{m+k>bGo8J%8m&Q~Z!h-7QnU9Y6bepyArD%q#3{`(cf-{f~IZuu_0bIzATz8o@J$GWcjPDa;BX}@n&zx&+T zY}v_14NZB=v2>upTo-jTX|?vn;MsB<~dprD<_3rzru_5&MB!dGuy#102s67NMF z+#Sa`i3G&_^cHd8IAuP`%*kFoZ!&K3viUVeZ{5(hAg1TjSi_-~z!JQjV(UZ{{-qfB=j%8KEoR+1cW6l^dh1)k%uVBz8tzP% znLu{#?0O(dV{D3rHLeaGojLc!sx_|WOv6E?p&k%`SrIs+O<;YQCkG#qaK(%+e_)xp zoG=A)>FB2N0YY2i;H6p93YxKB&_!)~_VtK~JC?9XULXWT*ox9>O!e-;JKc$;pMV$( zPEtP310(u*AEFB}F)=MJ1FtcBGRA6T2VHxd3vzNl zKwF%_i0ae%prhpAZC$}0YDR|%W-g+DkAW4$iX&9uR#dbfeC)mtHkl3cKv2$wb403M zZty~8Ntt+SKpEobFb@BcD*of2wj5OQaf8Q&U0{36^z{5;fm?ugFvSjz+EhL$uZRyg z!Q_o%DBohLm>v$41sOL7vxBTYp3Z0(f;X8ChNZ2ma3%$GfpT(LGN8UtgpTW3oGc1Z zq<_U^;1Y4{j1T#;{~rCFyJ+6x=y%e8;yHKNI7B19A1{Z}&|b_IT*WMoPACm@NvEi3F+2b34LL52hYRq4v5HK|>Cfb$6pD_S9eS`CW_+^NJS7Vxl zG4MuC7m~EbUcKFZ>Cz=;%YVOPajH!Y4W{KKhi9+!PK7ODZ1rHhzt$34c4ijr6|Fc% z97KEZ%9She(cptJZhi6LS4zlAVbjO}S+S!FvZ6NA*46#yIy!$esc*PwfW5&KK~pc; zb{e-hQRtZsHpyQ$AWP|qNFqk|iM4-<%*fhMIWghOT2X#*%g%p3GkRYi+?mpS6`-mJ z1*zk4<}Z@Io?1MV3e6Osfor|?Nl%T02kIx&EoHDFk zJNu(h{BY{keSIkFHBd6o*5Su{@#{>Ttd4gdkv3TWjID#m{Nc%EBL_3q2BSBA#Y@5* zfTS@6yeDmO9fY*na}|S!OX~8nQ3WpWD6QAilZ#71M>|prx7)I18Gm^d=OFN-GcvY( z^ovFn;6T4)Z`?QA2Sd?#UE`v`N8=s-+tlF=sG!X_(DX|DhYOcja@gp?>~iQArJHUH z2A-`aW^>pazrOtsKVc77%4rd3@%k3QW^SF;z^gZ0pS#kV;F^pw!wwzL?#LNJ5kokA zUVk}(0B?0PAaRm1k?b`OfUaDe$B&P4wy)!HLQGT*d30=wH!=ecvhbge^p9t}0RK>( zWGpSS0f8a6b|AKUhZB3OR(U!P2Cliup)#jNfAih9#L$1bpokr8N8{&50h%b1OOHa9 zhtq8aYY@nr7Ki>72|&K>uX5HLM*lzGKdKA%O}-gbLK%@tl1N2VDS$g;KMZgf(NZl-xb5fuA{ZZ_ISu-{3d`#$9&k zKWaAv1O1l>u^jyuum@SgI-W<}j%;aHWq)ra_P!No@qQFd=g8e#Mn@&<2pFAKi^Q**`syPZ~I` z_=;Nv=w$??nLZvM8|Hlc%Mp7?w3zRG5l(uS08!lafGc@G+8eV%{_@+?Ad!hb5GISJ zM6y;$>|GfOo*cX#^HECKdVClGY!8~la}~V0as-vy^`7906rAo1ZVjPv&HRF#?Ccnj zDfMi``O$4~wSSa#0`>g=HZP2C_^-?hK*=KZhfrEQIsVB-teoF+3ASA#r&FE_$)IA}nnEmM(O7wC#E60l# zwBvbmy!yvy1S!b`AIf)Bvv1GY*9s|bb=cVamwkYb5m^DQgF}YGKMWJe!6Odgro%k< zKWzyG>__ldIHYhTjAAzkov0j(1w4?A4iJMsUmQa@JC%@)BE@wqM~H#>kp+R4aZ`Qp z$nC~u)?WtI344HUuGRolOaE%!32_Urcy0p>;#3U%{rw-c_j9K4z3qcrFMcF$1r-b{FRpnKV-wVb%`#E#oHVUh+%UF+Ahu3RX|1#gpNK;A&6mq?jR@fJ!+@GnQ?P4>&Py^ZJ|KS{|kx)1ibv0 z%Fd6I4d`d&X~*!ynO&YQ+u{5^XZSA|Mrch}CI_0L@hFR37?5fNLi&l3OK=wZ3(>n3 zj1<9-5AoR!n%0ctMEUVRMgVZ)`S6$w@hGnwnmgOmWid9Nd=}*(gz@Mnio^t@;rL9c zePTFWoMGi)uALu=ixAEXIe|^LkoGH*l_jazef+?rS@+~T3veutBT&~O!&}{WO)Cd` z3((gi-~4*7r{Wr zJyXZ03MtF0Pv!`+!fM4p4!3`E5SBt$%5eJ@GPSMa3VcXXr zRcoUMkWX8>TIx|b)NYm>ZD??ijdK7Edl4s%O7GS(lXyF$Z@vH|>)iURtEacEqc_PC z$1{NbU~t-!Pa*P@GDzmmX6s zHJs)clBi(W3``t6YUm&3XEMLFr#tcMIIsrts1iP zhVND6N3md((I#%p+wn*o_+(Z|njyr~$7wb}U3Oz#19<-x&`3lADj*Oj%wON)#QVn6 z{q;TDB`0$(ugpY`Pyr?(Jisnqzi09GC-bHZc!QV7G2EnXk1w4;WTU8%$Pde#f*fmq zV>_K)9E|+7>8To9$7oAOv&fi;qy3<2Tbc0j?Unyedv5~PbKbrU|B_{tWf(H{8Gb~P ztw^Hml8CgIFqK3^LZPxW%`mnfLTRHtEmR~+VGL=N%9N#|J=Bz_w7lo}tvvVrKJWWH z|K~a0_dSmHIBv(>$8D*8-{teUKIe5_=XG8ZIw0LDIwVV5SwWY~sd6D$3iwZ__yUXM z1qbL=`9Q^G85$h3(H405u3_ie@w^1gya}f^ezYZ?+N!$N@80)#A(}F2QYwI!VNO^c zO#ngUBoy4hR@wyLSB0&3*ZWxXrMEZ*R+#>oAXoyTns`Qxx|0L`@(ehFGR_sj4iQv0 zSd0_97m-<@=!ID%a}q?m%o4WqpqO*?^SaLKGI9pQ#}2*8enfW>d#W;paer@H;83>D z1vX6O$Y9MrdA1GVZh!*H8oW{loPD{1X^7$8YGq|bdAS3bKRzp%RtfAm0_5Zo+*np@ zc7(D5Ge2KgB5zIyrpv~+k8A#R9n*omk|2{~cdo(9-7EkBN3E2+LIH_IExCO{CZfOZ zHLf2;slNQnw$j;Xwft*=UR=9{&^HEJVxuyAPx;F&$gJ}6B^(W-Lx5r3UqFAoVsmAC zQVELmG_+QHI=SNu0alFy2UH;S3A<6&gnUEa&K0*5-`e@lF>xmNXFyC_j$d)T<%)`+ zG5TPKlzszfi|`bMSc!+6#y-!id)N)7DCaRO$o=aVW)J!Z{hSHeBX*op#Kgc4OAii| z1|v^=j@j06mzBHfpmHHtJlN0|K`E8t;AR&VD7te3JnkHWJ&jRi`NFQ0Wrs;1NFTa6 zp*P|Oaugh6Bu)Wg!kM@`P7m**hqRxL=~Yo5i5153#4*QjJmKZ#TM|2eoG3>5`)@t> z|L=Qyc06~j|9lW0#Hs&JT~+_{V%!}FN+~iu4e@`Se)mCdA4GLBhWh+ z!4~-!O#8pZ<0mMU|F7fmgu+oBTTrADnAf9%xSBAZo}v_OQL9ZUHpg@nU!djMN%gvA zzoMf6a-MlOc2ki9>i=!AdiCjkBh{!eryKY-Brhx|L9}LFe1RRJgy*c7PzTK#HK}1% z;I%)sc4lZXmS;+Ph!)qnEe@N#B{M`2RsVg*X+~SJAjX!8-1s4xr7vz7OR{1&ux5s zM%t$xdT{}cQd>G7aLm>)$FW&M)*2gMq_R(NHh@!{n2ClS#J(JhmV3eu-JgG*9BrX$ zeZO{pTU*oDw%5~u_fmn2Ipfj2UtviVR=0(*bqU=n^V3CFJxabzei_2GsxT%bmD}_L|>KT z*JMBVCDjy;{5RN(mm;eHU~V4xrBjKOEYYj(_6giLEnzA2!nj&B7!Ve4-UN)fEz<#Q=|%&H`8n z%5=hGt&7hPwinAJg0umdf#Bc5QM{;$o?-tixU-0BUpkZce7|abhA{Pj{3?7~7XxwZ@HbD{Z>0q{@&6@T{hzr-Y~QpA{nPrT;7jK8F7+ zhW~ysd>UN}TJH%5EzwOCRya?{AwAdWL_7XZx>vN{M}3ZSOiLkWeaiL*i`FVbYCka% z!sxk9XBpA|H-0I}Myh+^3av%O9||0s;KGuMLW;q^f9YSKK?U4IH>ItUFt`LH&IL$K zgL;~F7D+X)odeeLv}QZs;>C+GjKJ7#AK2%5nwkc@>yn7B!#GV9t?6)*fAliTYVD)< z#wxG7SN+t!F06NNIZq_^RXgU!JG6*BIc$I+HQPe+Vc%^Rv(&ZwmN8bkaAjXX8XK^F5Y2v!vFi zB!n94+yqgwInOj*y6QLUs*$?0E;%pc-=(Ow&uhKC_#aTZ!TGBy#>!^3ANOm0ziA?< zEf`XT+jSzB_^f}vv2*4?T_D5BaxKRY?2X%}hzm9J={Ce>E>c}OvwE$+wz|eGaS=XG zrlDWxq?|eaQM$N66p)($Uh9R4H9k``+h=hmj|GgzXjAeTq?5SN<1N)wP@>$<`iI~F z05;st6sm1cP3;XEfMpssbkFIHn>Zipq-h@TM#|CMxmV8Ms*&0StA;ww^Aojdr~;|% zga*_KO*g{GUK-1C&JP@&*ZliCzK`OMAA}JKMA$%!erQE3#rU;lc!^sQHw)_#Iu?C#{+e<%7q8g8FsYPCI$xL zaeF#Je1)|oT0ejb)6F#+$|6c$ggz})hqO0We#yLxs%GUW>C{1;??!1at)Ae0U`fdj z4AR+-K{c|wSFadF`l%8ya|9D|16x8*NLT z-7aiZV&4-B6xrEpTIa04czyleHc;{~oE??IoetNEJo{AtgyrFiPrJoG0730Qfj8xu zj2Rp>2(zf8A$Q%ZPB`~cOqCm974=3cHM}W)TbmzBNGa^5K!u1lO_}W4g=YAAuy=3W zLG63w0+{fyK*ypjINKRe(%onq8MDGD=1oK8b9vPNR|5Akx1(`eB@Bv)z=v%cH^^g^ zozZfv*2vl~kPTVEl_nFcval-Q#zI|Hv6eyReNpPsr%#tR^fL-hZ@<0)m4_<)^yYxl z2~Sf~sq!ns!&<>v%b2SsI3JGauj`#2?KF7sV6A)ovsR%*qzt;8ji_)O(c=@En)C)O zEjsKkbwJ`Ks2Qk;V{!sRa_7D`zdyRvt)F%Phu}a(nu>QT+23yU5H%Tz7vqmyg3Yy^9)^rnZWi}uhozQ z^;=%Cqk*l(WdGSlDEK%-QxJ>x8+LhcV4iJ4P)tSlikLxR;o(owVz3kWHaj@es5S@j zJFaQoBER-=)u7P5Axc!Cbb{KZl6H+!8C_F0tLKCH6`mGSbZr!Gc$HY>3v~2q+^p{7 z&0R-724^}hL$fv~sUuKT;9Xq}4G&l7UdkcUtc|g&lS3rw=ZGgLuO%q9F_9U`9ZiGQ zoYCpon8K`{rCN8Fm;39_M=z7Gp@qJV>}f)kLHsO9+V*BsgB{KCDLlv4421BR=JA|= zV2N1@9MM@1omVh~8o!##9{Xz;?!OAlN@3C@!3iF3mZna0f_FcGTHpuHStx;?(xmQq zccbottPJhk+@^F_reK&4AB8T%x>Sp{odNH1=lI_k27NHQaww8T1F{xWD5v40ro_=Y zpt2=v{XQDqmA1W&^64a+BSYBfi8+{R_&__3_7dibv5`>BSU@KrP9dF*`k`&YBT>5# zC*IKK3q=W`+JL?QS~P}_j9uRz1*JH9Q`nF1Wuuh5$^c7cujbDAtY6a51tFFW5}b@K zv4m8MFOjC8C1>?k8fT}wU!LA=q~{nEr?D=0JoA8|&n5X7R`3Pu8$1y&WVC~Q4L(47 zLhU@K^FbVrxp9eMQnNlsBydClNSR#-5BjQ0@f@yTWt0Vb zZ+nW`lOO$>sIYTsO0}@2Qv`z(6)`SBFqk3xc%aNlrzw@!yWB?GQQQw*B227q4}z`Ump6>? z_1Khx4u~W8jO0~+$H~nOX!bDGAG2t=T=1FZceRfmwv2OzobT#G>zQ}|winu!3 zhqBxy47=f&jio0?F{V*X3VqaAE%rl}LMp!!z170=T8AG^2dh#y>3&bxcLuIT5g+;W z{`Uu5YS*q?=S#CK*~p9iP%ER%U$x_dr#lMcwA{|DUToAby5kd=JW^@ z3tT6*gfQ|MMMVzX5{%686-Ga#_iB75_r4H2sg%9m#l?@&K3EQnmWmbO33_BKjdVdV z-o=8PC0MW_Jab}!(7TBPz-#|-QTu9?=Fm(cbKnje;y8~GilutshEfFSbO3Nqnpr}@ zk-?~sy9q*Y(+gDPQr<|PdWJ)`&1rM z0d16=IbgUPrQS?x(@W_11~rXOiAPc#w5W2t-4HJIF^X{p!wnhS->KME_Y>adoV9+r(Q@wtL#FE@9MN>2&f%FaVH|Wiw4cS;C zu~fjh_{~K_t^bwNEI!AcY$ z>xj)=+uK-!rGy6dbZgV4ZQW6$)b}20!#*l>SMp!XAPLjDXAxBn+(Yt~lY|4&n z0asR23G^X>-x%3dGAa)AhDi%NVkZ)?M%;kGw*AIx!ofw@GgonYNIaVyu z*cEF((juyx_U(4l8ld)_)ur?P2#?82pGW)s{=JaUWqs- zOwE9NnF9}UET2oYP0|Q{lU|dQ3kf7t@C4$l{Ng?Y(-A~i+q%i3mZkL}4J#zDD4K}3 zWk($~eip_;d`h9t2o2Sx#BJ03yFPLoSaRf(N`E`Du>RbicM-aGr%uNREkt zZks>aiy#9x?u2KH-8lF8JgN8xu6i|+5&-@c*q+xZZmjZAdFxhCf<>Z(=gARDHjbJ3+n_nk3EGVyhDHZ?7^<|PbU@0pYP&iM)I$HIq#?7R-v*Ka=WE2m1 ztmwHkuwhK)BIdBywlA*jvna^N!DXhp2O4wsCQM%V*Ywn}Nij$6Vw42bFFJH?Cf@bU zD&SyLtfEp905r3s3f{d|ecCyB+;WKXO<>s)cAw%o82mA6lpBi!eP*J5LNr6^EtfmgDp%}X zICUMr?0y|%@!=uon`OZF;uNlA0@Cczu;6ZrlFvl!4fQ+RayTTvlq%>}vzEXwBU!>vq6g## z3%vZD2y^9xR_HykjX5XMNat&9Z#Bo5vKxPIJcise==$|FfL2v@z}IxuWus`x!Cs@5 zL3T@AMcqCDCF>M`e5HhO(BYq>1e%`CY!0)1aZR~@szUpwhZ{dbxG&$-zN!GWPt8sE z1817+&)L>?<}16;aFWRt^pA;~WPNvrT5lt-!Le_eb`Cc3nymU@|9t*9nkZ0W2l%#! z_Ms2%vD1|EL($KCEzqlX%#)+}Nk5F2ioZ{7MsyA5}h7K zV$ddrp?fsVnRI=3kUdC3-vdx84YTC6*bRg9c@+Cl9dH1!OUB({j$_qX9KR2it#k{z zhy`UB2@Sf?gN4FW8ON((wfP}C48(as|M5EPtWp|=AiB{bt_;PioX1x(zW0B)F%Ca^ z)1SW$7&>qN-VNa)hx@5bz?7wxh@}W3Uo5f3wC=g8IWone-w*w-G4n1oC0M$}u!~06 z=(5)ZVxuxb1#~NUvd|^(vx|;dzG61|#UkVox}`#3e-^J=|MsuO?rsH|?I5FEY<36W zdCZ{%mObdSn50VpD~BAkDfD&W)MuNFR$KeRCCz(tdX&i?c@zjV!jp~N=v4pHC60%NL<&6v5p}=PF2(oj5L`rEFGG z3gbzX(vvy7Ctl<;Fd-#=vBPd}xb5jGxY=U;w7^W8Cb8|I7%j`$G!pgHO3dp09vsZL- zbh0$<5W)f4aRSyI#{?g=uo%)Y3)yXy{d;nfEuU{DrK4|-io<}I*ArIw-^famuDZ;e zm17f#pb_q8rVw>#v=sN}z=v!g7jX(Kp9OU_J@XO15MFQ;cSwz&$d$2eAqXB4~0X++2E@f;UPnJ7Il1f3xB0N%x) zaUXKbm-sInz`h5tEBR>GqjeSm9svLiaoDjc*xFJ2OK)l#FKh+tVGNC6g!w~{0Woo1 zOT!B(x-j(?K}`>VAD=%5NdSlN(U=8meLVCF3wa1|yHV^08H9?|K;11Bg;I_w{nPu< z8G^nlY%6Q4_0XWNYX&=;#ey9N5Pd1n)8d0a$I7lg-#aVdH^L0iX6iLb?Jiuum!RyF z9r+w`eMGlOoegsWw@3!EZ=76FNayu>S|Rn*VU<6+93EY?BKiLENiXsjdnSQzEK{$Lc52UFN@m*`<{k!xGA2@!K?gy5eCa( z=IDeojh%#N&!2x$J4dSVA8b#n*N0|UfmW4gipS7L@(yYF0%}({FOr!s2OOX%Afz5x z`<#*ya_7#gt;0cZD@t5d+&NX9rYfXqC~0*-YBuDAAMTo*ikP7WSH%gbMn~DYE@j3o)OP%g7231n>11>H^AEz z(m}j&Z|c@bCvYgI`R@F2RHh>vES;$2pQqaq6g?dYB?->Of@Kr(njj22rm6*<>7Edy z3hsu?6tG3;Oh@ZXcBhe)8hk-I{siLt5OxD0f>{uy@NKh?g_rdT=aVqC8x0m#Uq>tZ`QEhq56#Qwj?sa)Wrh*TB5Ztg&ks@c8jTalinoQXulWlfR~W#bhf+8p(dXi z9CxKa7MWC_bBY0~5{^*_XB>JWv`%nz3CBrx76O6+j!?R)7Z1_hLX?9ok(x+-5i3f_ zh}ls-UaON#FhjHwl4=YKCmBmhDVFpSDp-s|NQ~lvDKBRK&~Xn%9*1hs#KHNe!XpIe zEyFpziZ;0TxxrwfIAT*S@kH&2@EP4kx?Co|j@soiob&Ybr>V?mPUFF|Oy_JnESdNM zr+fAy+uu!I0nW<_R7?k@Z@ApM=_VA-QpcR=zQny z3%+wlogI}_(u;Gey-j-MEMZW@0uTS*7s*IvJ|G?&;n-eBbT=J64lFBi#5O_<=lO0T zJwlNhlVbbo4VChm9-{-UU*8#Ehx^k)=K_02MmVHR<2=Fu2aJoTW?(?SmUd#8=-8sJ z6JBn_xwR0}(OD3Ar6V0+_k;I!-<*G@ee`7-8Pdj0XXfxCdC=`ulBWL4*1_V8+YqGI z4Sp@){sF-e2BaE4O;WRIcp~T!hDZh)t8?!Q$24*>QYBY$ z7|jNA^E_t5!YxQ+$kZZg$6SrT>S6l{jhOLbSQMsrD_LkZ2ukR2D_H14^lt`wCm$6U zHf$N(&N#GT5MUZ`dIVD@)~wj#SO@6IF^@V-6IkD)hzyLM7t>snHw|1q$NG(X_W1DV z)Uo4nA2s_eu)RqK(EARZdQ;eif-W=~LzN;We8LBebqRnvm-x`(L2r3Z6yPm$mcYZB zZbcehsK617AN#!tp#&3AofGkA$YtFa4|T8_w)F>U5x_-`(haC z)8=Z!OPufC@JLqV5yqCzlCLM4Q7z>N>T;Zm8MKhqL!F5gHk92FAYl8z;aQAVnam?f zz9zEHD%L_=J_8C%UW=6kHuP}$vsg%4hVv!1T>T)@ILGDHIox<9q8Zo3gU>9Qb{gYf zU@?JRS11Z35DEfhh0QeWRqN-J)4lC5(FI$s0QY2G@*{be(o2)f2Ks)-Yh{Dq$M}NT zT+|$oi>~*awXsrdTRIZf7_>rPSSAOz@(AU8H`MrM*kt*OwNQRSef%Qw zuuWtU2}vkrg-`FD+7Y4W7%87|7#z6oA?v;%4kL7=B743>5M#Ra+G5r+#dK|`$G|qG zvK)oZ?lY1vSOOqD2Khd8O&RpDb~Kk1zn>#Sm~~Jhq8?U=Fiai-aL-*n;RHBcejP&g znfNnqCX2f=KSk9bj%@Kk1E{LT*Zj=uIFf_W=)%4j!JZ6{kv(`qwfMaGZl-ATtRNlE zl5+7LsoLk2g;J@E-64P#1UJk?N7yGzQ8vukC20EQ7~gzwE3p7KWr!^4m>ETb5Kc{E zjUwnO`3_$?8(_D&{Bz7g&@LmbViX;LIpmC-Y-J?m1hihxS`WJ326Y?nRy8vrE+F&(5qaKRZ8%WsbOJ`?j!(4N`eyT#z4=>xPSp= zdmgw}Ow_kf6{Vs&PesmpV_KG#b37Jg)AiWWK`{4oH(}0RiZ_wQBLf>XZ({}MmXm(Lp62EyfnlWa&VXw4iyYl1DN0#`lBb0R_PSLj7AM*`|;@=6RBI<2u%H(49se zfX#)RNM6nq5ahBSb{>}hr=;+U$w`~K!0}YQ`LqO?#z)oEZdc z9Hz(kah=Zx;dmt|5md))8_h*6s^kJl`GuOEkzdtz^6^dhO0s!U2@EU8DQMk`KJdU{ z0tO~JX0-Ibf-`y=4Y0U_sV1jY5bIE7XeUf#_GP)j1P(A`-CxZ?FUe_5>^V0%iQ%KVYSgYxc73+lrIxLVHXW z#~04XP#uey@%3am(Jf)a8UusR7RT51a#6 z=MUER?e!D($IFMJ6kv6hhU z@|>I3Qu~mz$r;-OxG=d_VG>9yC^0!Z_hNf_o+uf?VY({+5mq& zc}3N}%W;fSNF^p89p68kvJ~=(OsWxQ!O{oXzae>vr&NMh%kGf)lEUuEC;dBD#bU-q zcnuoaS!Uy`$i`NsWQ)UTXgDp_;-qZUip!B}r6Mug+Pr&xScq${oN@KldDIIc1({AH z03-MqX_&u68kZ9&JNG192_3?^bR)G{a8NeSi^S>VXfQ99Fi#XU&@UqIdgZ{-7X$ z7CVGmOwFkzLrE+MCoBr`SiTq09YLH%{K{#tLdghdd#Ktjn_@K{rgYTAZo3iuMkYC_ zYacpd`EEmOxwFDcc_IZcFpZ$wCP`x&$ni+W>B>UTk_l3Z>0A$QJ(@RxQ%YyT%QynD zhBjF0ds$8zaVs7HyQIfS9Xl=Zv3f%V4Ctv46oZ){;&U!x-O|E<+|X`l0O(T8-$P}3 zk?q!<=i8JK-=IZkHEnBH?xB2uQ!7BU;)K7WTP@cj8SFn3{H3R(Y+iU0{6|??VV5b5 zdng-Vc?^YFbyNd4sqOjLa`;6B@GSop`BUzS;V~C_XBm%AZ=Z^I_LRpU8{&z;aD4jm z&rCcc4y=b9;1?JQ?I*FrIa!Fws5-8LplwT|kEmr|2c#3_E*;H582_guaZNRDhAh3@ z`-eVU&L|%dx)L63hP5YC5ZLlep5~1f5t?6j0Mcl@lVt9Vwq_ykbL6e1@*wItpz@V+- zmvWD?`h}lL+Fz*Y{U{y(JFk?CEx8yR>JDi2w?ynt6^LoP85#rDy)0~-iIaLI1hqSn zYL;QMr{NT__%N&kB76QcHp@8J9B7Os|7_5uG)-c&9AEBx;B_3#wq&e`cDNT1zzmP5?bNRkpj{;cG1hex{5rKbOF~nY~ha? z8Bds&aJ2yL8nfhu8KSdz08>Q9nxMObqMnq><0N&@-m)$`Vv0WLkPZM+6b3YaD+m9= z)+;)PvZY>!o$Tmo!s>tJF+g`x*U>Z|f%j+i8l1*$7(}xPPmKLgR5*S5@N!mCb8fVp z%(Ef;UnAVSLQVvijHTC+vFAPcDr%h85#f7@&(u^~fQ-SCc0kfy&Ktv0^X2RjEpi%y zTdQMc?wrusOZ;P3Z~-y93xqWpN3q0z0Po^VYMPlMXPL&=U%l8pXJ*$bv?sYlEjz2{ z^p!#(C>2PM&X6)fponsVv=@8clW1L;C_FZE<-PCO;1*hnZcK*gBG8^QvK zjw!1FYsGwoWq0fr!{c{{xV3u{+nU<%4j>c6Vafg-DNkObBZR4|dl;>mgu;oK)g}a_ zJye3wJC2$XIxjOyQRG1CmxL5TqGy;}&~L84=PZ|XDV~b+mYgaDVUm*Yh^VJXEa;!Y ziX)F`19Qh*CT*34Nv`Z-MO1HD-)q$R&7H(tP7e+?LN`MlZ<11d{s6piD{z3BaBQH( zHpa({ckei^QNp1;2q+ymJ0ml{#KJUHAe&zv4Dv!WIiD7-&ozgYeGw;+p!$+9C1#9w zz&TVtB&l&RSeu)CBDooUNlVmXcE$8mi_bXZQv>Q(gX%6A<4`Muxk}fjmcC||g40PJ z??7S2v00#DBL?)j}G8Y;_D1aOQsdkc{BGU!rwxEXbANeMbZ?Wl+YaWamD8`4-r zzFZ@4+c8PVp#L<$0Q+1c!)|t~9gQ5>@fi;6Gt^4}T`Z@12W~BhmCU7)_i`9;HXa4> zgw~j;9zhpGJ|@)+NP>6tlzXaNGEAwUf11HjHpLTLco$^i@mc3M1ptMVyzixKJAveS z<5_4d6;4&Wa_m|`qk;k{Cn8Z_L#)S{8vMQIWYb_s%4i#iXez&~o9#HJTAvrT#^S|~ z#YS=>VS?f?SlpTJ`O56~rR5UK3w`5%Y`muaWSeRLa zAyW`FvshwVUb$a7{&*Kt=hgnm7VDnR74>3zfAeHzJ&sio$~;is;`zj}ZU1trCp$w# z6y!=leV%`FX zvax3(Xf|2#=BV+yrldg_<#SYcj#S`4$lq+d|@s?Ya~cJ_+0tqa24aVIBM?_3NV6K5uRBfc0XO)*;s=w zkjdOIv3!8FZ4lKND4(;?#yTV4G$<|B|0vsJeGi@Gq5po>SUr@w3h)N+k<1n}dBinb z5kv%t#dg^k(eB`Awu=>nU zFrY+{x(jg%yGQ8ZOLHpBpz!f>o&*=AH7c$vry3n?nN{t|p}owdr=z`|KZYe`ZWd+? zc|S`TnRGEE>7*5DL;X6@Q5z{zxSdApr{|=cIqin3-f$nr3R&dIO3Xhg;zk9=8YezV z#tZ8J2TDPa>);Kr0f4}iL_KvyeBN5jrdY_Peb$hwb}8-AQfk^hJ?3P9AILyL-ZkH$@hx z5^Z=CqT=w^iILvKNXW~lZ1uC~1@KMf96cFu`dP&HI?nq*RLx!X@?iUkQ8L;-r~+bH zDM5LBtK#ypY_LtE)pxgftitggJ~uMFzHd&?4$zc*{iDZ{4gKJu$&`eeW~3?9JGg(} z%E~tS@6T}>84|VV-^WWU4&clldxCi6Ryoxx;Pg-VtqQA3@9bDQywReu5A3Fc5> zO#{Im^1J{pCXrrOx3jvvTf78-?y4Hd5Z3bmH^ys8P{x`I&S@ztARLj&PL}5=!`}g= zcnhJa9s&xYc|iz=ox%nOiZ%$ipFXv1e2s!cVU|%i%3|{Dy994mhlkTvk@1CRHZ?c2XsZsb#$iIf|6wMyTI{N2mvvRYIXGrE4W$PgfY4JLzo&AXZTSei;u}sBLmGrLYC8#aZ>RhiggW{N3rvR3IdQ53+(1VcXUQf@4B7H z5z4usZ~&ZR$sVe7&WizqJ^H{LqhDzw5_((i{eF-Iu52ga@Omrs2W3!a4F=hG*x4WrXZmbkAJ)a1EC%71!HzOkQRqnmZs8JVHM7Z&eqID9C=}Yk=6uZu zb&Hs8@dt%`*1&+|htR76yNDU=E~4xQ`thswX)roFO9?bj4l1Ik>|wcv<0Ai*mc%H6 zXB&$MAmdX*ghq`lk3C^{B>)``%1c;w=q4y7DFxXJbz8PCD)3-gt6=_v0@EIyJAK{S zmvs`w5p=XCuSjVh>WDr>&E~gvvqc7hmy@$B;V&lJqoQNaDqXawp$#NU#O*h3P*KCh8DeP|PBL5rpi))~~+zeR%!h0@Iw|npXL*1OLFn3YYGF6hMq|3f1Gw zgdyoX<9T=FqRuOs)78%K_Fq7=Y|^RT|FC^a&BrU373(!lxc+)^zWb@}-LB=T-99?p z!0XnLvYJ1Gw?rApn^Ye8b%ExvdBK<0Z!`(MIL7j$*P(+IWwwihPnmcHZxM%Te;23d zoHy0bq4#Ks{03_m*Ikz!cbdj78dcV+=AmI-c5i$1?2!74+rNCd7bQD+m{YH0<5PP? zm-K<(OFn#v{OhkJKI%?ue0_bV|JWxyENr!=rW>p~g4|_?%=|J{s>o|~p_F~k!ot!7 z$oL*TdQ|o7iU$ffB~?}7r%rvZZ)o`PBq`*x-rf=K-*44wPtg|H_1fou#rW^7GjsFu zY$5j>HgA8z#$SGk2OZ_^?mltcxW&_^O$(2XKL5<-?4ymVmn`{pT;5)xtp<=qTCn)_**?%7YR zrwawsU5TJ`I)Cf7k3UN%NEtmCsq?Y!?d>~v%y7P(&9yz3l$3;^ue_^5_cby!z%jjr z3lH0F*@7uz6U-Cr=VO(Tx^d$ZLCrCk4@`EsfB$XP2EEI9NAQOW$y z-ql#VUb7v7$BY>hXO$}F?%^@%r=K+O8mh7a4l9`@__ws|znE_A@`or+tpv9||BaMQ zgi>MuSlkBEv>4RZbzUPo(`defUf-t1M&%|WsUqjg_plVP)GcH4mC^EL9BJ_A&p)56 zpIU)qI15Gjc#QvzTzA(*@(Im^D7k&KEgK-6aN1~9FP*Zw;l1+n^PfI>qU-FOhz!Ye zu$-L2^y#E9ywshpqk=BWRb~*OkNA{}GW)nI&z`K=BQohx*Uv5Xry7y!F>T|Uct$Yh zwn5YPu&L{l+tthK{%Aqx3rBWMoITqZx2-d8ov1M^6kpGUo12qU0Y_Qg`STB6zH;T9 zr)RLg|4%`|!3y){tt&1ruB)xp#l)0u($e1ho`|+hwf4V~Qd(AKi)t+zQ*Xb0_wGZ@ z6pMRiIJDQSTzLYTsNIj8)Njf7$N!8Ekj0hFNZ9+&<$*u^fRpMnk{zxbjlJ(n`-TEr zY=)TJB-Z-)g9i_`<=9IVZ3@8i`vZ}i6gN7OLy=vRa-L~d5cubxiyAPOC)>43y{@0V z($=T$qQz$Luwmh#K9(Loe*AOhl;X$VX>M8YM#^5U zWTe}Tr?&BGcXm5E=AytN4B~=(@7|H}FXnI5EpF`$!X2?>=~XT=>05=E5{^qC&Kx*! z;I78n4Re9nc^)A{lm0rb=i&J?y^r6KzSO7hE;GZzq~a_bvpSblR|DfeN4)_f!W0hQ z{|W!a=3hm*Jq*nZL0?Ra|xf;NAEQXRi} z=IBNH1NndCq>Q_FoZx%E;w}KbGx(bA6^&9wCWsfwu8ob26O@(L_UO^$=*g3YIy%R9 ze|%MGpBCiY->u&N;E8H1u70WKr@S~ic#>EPLBU|-#>;o^-aU8WLMXdZ7{((asI*`S zrEJ7Zk=cx{MJ4T>Zk?HkN=hZj9$g!oP;8k&Lx+admuFU^z`h213fM3b9cC6gj)TK{ z^!V`>^p7@U7MA^$q;8@X8*EeiAniFLGyOCyAk52yDf@^wy^8kR_`ln9`_tH+VU6(r z{1rm>!9H{M8OX2lJuBvZoH^6r@yXGK_VzJI{x3gyvJ5Q+8S6spf9dWYG`4GT-BP|X z0n56wveFr}SOeV8Oo3NkKn)rVw>Gn=s3?|wF!YO~2rB`Mb|x&bvx@}1w6K1J=CQ>G zdWkgNXHWki88m2+_{y^h#~wXMb4dPu=+?D!6HXXhH41>j(grk3)$7|g#bE&f^Q~iz zC6qX)`^+)K?|1Cj0i>At_D7LTUmih+?nr0l<^NBQv73d<(y`H3tmu>aqqI#tHls+? zHp|-oo&0cw>g%zwAEJj3AFgj=5?)bZ)9~@NDcHrQX=!WuF!~M;zI15>D>4*Gl$4eR zAs|;_)l2?7-b?h~PV3Y6*Xtrz)HgSeL>j4p^KHY14JLQ?Uc}C{ChRxm?h?>rDC|y?q^jl3FB$_PO70lgpZ5{X8#pdHMv7CO9 zBJ{4h|oqQ=b~*QGwJ zqR|+jbkOkO5%5Uc0$%j!i4!-HYD!DTX8SC8BAZiz>TekS^SSFOsl9y&*4 z#^5M~?ZZcobiWHmED?cHeePTpY;iskh{bUk;(L~&lh%hD3I7(l)?FCkuOF(c(7ocy!;lBbUb`cjMj+4dO_?Ahh_sDdWfa+ zu-w1@escW-&L6}{k(-}Kd;P7MVVMasWNz_x`gH7m{M5 ztbm;jorpe)3XDN#&Wt^i@@L;(02y6t~N@|wHv1~g_RWo5xY3AU&k zSG~Fwd-T|`c*~T5=gyyJh&g@wba+&hr?0BK}e*obP##=?!n|^7w z-tWKvem#FH|1(l_Mun)fvN8-unAX7bv7$&Bz#~y|!@Hdg3yaeD0UI9n>^n-`sG*^u z8H$=X=LXx!I3*!iJoRYfGXSgzme>=Kt_&VMI+~3sUK+0VZfwmTvuDqqp?!KHf*jrc zFMC;tE#F#!P8}>l$4aoU1vv!ej`OuLpvVFV(F0@6*zgkXKl~JaNBstYJ)YVQ$$GdOUL|JQ)frvzAuF6?E^Z}}xAy(nA zJ=@|3?%1m&s7gM5+@?YGe-?TVbMEfja~JJmueOS^6i#>D`&FZB3!qeZhVv@t_MYup zh3VPK-suu@G@C*FV;|y8>p|5x__0WH%X}OqBKy{uUTRUbt(z2ZDlj&oNHGD)3(g^# z-n~~lI>xtNKn-9a9`)0V3{Np$^0!rAkO-rRg8JnWK<@wL$lnIM(yl-c+7JkTv`Sm*2ZkBkTn)dhcq^2EET z4(j8_FF{ueRfmC}KJ9Yax33!Hdk(^FI5NNkPzeDy5WZoLS+S&yc8wp z3U5GpvbF!i`$2E_-9Oah=x@L2A{2|!#Ougpfn^fRNt>jiqAwZr_V5CRb+FXqv@?)0*xn^StC~y4pyAOp)7E4czc!q_f z0#U(TY|r<7d->;%=|USYhRMcd?`mn>K|3A<`aCd8)l=s@-*}C$8THZANIqL%;Z0US zhI%$6WRpYR(W0o{+z^`2H}u}qVuR3O*x!uBE6^tLvziOTRE}AvhW2Bd5fC*|_DM4i0*thvIjCFahAyMudAI8K;e8 za3at)@FHpu;y(!o#Qn3pxLDwEROLc}S`iUanQ0Rdicdr!0+5`8|ry?bqt4)M%t z{c)SB$VT?ty(=p#+d#AHHSa(oGIFG8<5KQrIw384`}Qr|PNJYPeOg#(0ixt-db<9C z1>O3JMRvlGgeBL1m)U(pYwQ(;2@`bp@4tgL5a&?7(&Lv2_oU)_bVb__fIrw~3l=O; z-jp$-skvD}Noi$nZtmw_z%m96o@6iq`SJYu^HGJ2&cQ)M>x=2bj^i*)Z|sY+2AMBj zvs#+wV^_zfAHsoC62blA+;G}CSkrr9SN8SOoH?eW_tzc-1HXE*S%NN-TlVg}dj0yt zV`_$nP1|?`TB@iog|2>=q_eC$qDWtWtoemgZsgDCt!cWr^JnFieZ8f!=c5@^L^?x< z4kZN%#t1up--kDMUk3c{^8W8>jvorn{UM!I_U$+j{D3u&IXLNIFE7WcmIPvkN=jzt zDu(3L)aTI6T1~$gx9`g~{Kagu!%GIz;;qQ!TCa}j6*fEm=iH1ImM`)thlHN#4*c%q z_{*J^vL@nVk1hjR*}YEGM|SND)>$9$^1r80C}6=p;wHc^%}I|3e&laN{*Q=35ex^9 z7;zQ&IvhZX99La^J$DdJRDk%WmeaL1<_i-rlQVG~2;Z57sjHif8a0aGV*UE{4*i8mzFEJ<905MWFPk0z!^~a+(!_G= zkDZ@u|2nI48tG&xfP+4Nd>zq4RxT8TB0j-uy46uscI`CE*8Ct`rCasExg9g`G>?_M zdv_%(OLy>~K@-4XsHmtE+;RwcI88ZIc=s>B=5;Ot$EH5@pm|<{6vj$UQy09Gm3t4X z5U$pL@mH7FOLIHEOUM1&R8Icm#{M18RQN)X_*vK7RQ_3WVBi0T|54*7ut02Va_BF( zUb+nj;uk(TeFFZa=i%a`NkBA6-9F)joH%1f>@AOQ8?lzwn%$3sw##g{PU>gNh=yMP)%;)0tlI0?~+ zVJX;5l$uN=g9Z+~jPjQa#BX(9#94Gq?>_a|=($TZ2*zJKrD&%pPoSUnvC35}MEiwh44ASYydK=lDYE?cVe zBKs9$u^UnI=!nh`Zr}j}20)>{E8!!m8p^-?)d%2*SMD`uPgH9?6iMciLBocDj#=+2 zjP*%wSR&oAcH|Vq;zg*jtzNX~2*kA^l=5Mvt^o~)9P<6+NaNTg!gqTenRkI5>C-eKznD@PkUKtG`-2j*c4Ne{L#Ia)+f;@>!5`0XHoM-&4JSkG`x{$mDEAO z(eTpf5hJ#4_tjnl#`@f)ONt1`P&K6jzWwx5a`C0+W@mtymln4Z6RS{vyjNv3MdL3U z<)<%SnxaT^zV)v5jCZhKnyl4~(kT*RCBi^MvuadlKkqouUikUTmzx%yo3txrq^G@p zZ4OeZ{eC7Y{(4$8UP2Or1f=as^nv0BJqXCu_v$XW@~k*E)w=+i4SSmh?cZbf)E2Ct zs(NQnW5^CsTDxuAwll6bp-xA` z;c}djIB2p^Pme$FbvJgIx!%GSkWDYJPZ1p^`hddky|fRn$=*7xBMkdr1R_Y^Fxx?^ zDnB*^J&Iwdm9CjRd$#1BY2AM4jlrtCzfz@fvh|)s0EFwweG?Fe7IT z1-tbL0Mz~a_njd=^lrJ0rN5XW3>2$XET4Rz`{G44)coB1BdwoV=p!y)nWcYB+}4Xw(K`@RyjF2Wus(-zl$8DozM}2g+Kn(`C^4X zh?Z^aO}?k=mv~x7{ObI(mU7n+SpLPSMgPD0M{%o$+5KlD6MV;A<<)B~Ub!f0!TLl0 E2a^{6%>V!Z diff --git a/examples/whisper/reports/cuda_64_100/inference_report.csv b/examples/whisper/reports/cuda_64_100/inference_report.csv deleted file mode 100644 index 235f0e4c..00000000 --- a/examples/whisper/reports/cuda_64_100/inference_report.csv +++ /dev/null @@ -1,7 +0,0 @@ -experiment_name,backend.name,backend.version,backend._target_,backend.inter_op_num_threads,backend.intra_op_num_threads,backend.initial_isolation_check,backend.continous_isolation_check,backend.delete_cache,backend.export,backend.no_weights,backend.use_merged,backend.use_cache,backend.torch_dtype,backend.provider,backend.device_id,backend.use_io_binding,backend.enable_profiling,backend.optimization,backend.optimization_config.optimization_level,backend.optimization_config.optimize_for_gpu,backend.optimization_config.fp16,backend.optimization_config.enable_transformers_specific_optimizations,backend.optimization_config.enable_gelu_approximation,backend.optimization_config.disable_gelu_fusion,backend.optimization_config.disable_layer_norm_fusion,backend.optimization_config.disable_attention_fusion,backend.optimization_config.disable_skip_layer_norm_fusion,backend.optimization_config.disable_bias_skip_layer_norm_fusion,backend.optimization_config.disable_bias_gelu_fusion,backend.optimization_config.use_mask_index,backend.optimization_config.no_attention_mask,backend.optimization_config.disable_embed_layer_norm_fusion,backend.optimization_config.disable_shape_inference,backend.optimization_config.use_multi_head_attention,backend.optimization_config.enable_gemm_fast_gelu_fusion,backend.optimization_config.use_raw_attention_mask,backend.optimization_config.disable_group_norm_fusion,backend.optimization_config.disable_packed_kv,backend.auto_optimization,backend.auto_optimization_config.for_gpu,backend.quantization,backend.quantization_config.is_static,backend.quantization_config.format,backend.quantization_config.mode,backend.quantization_config.activations_dtype,backend.quantization_config.activations_symmetric,backend.quantization_config.weights_dtype,backend.quantization_config.weights_symmetric,backend.quantization_config.per_channel,backend.quantization_config.reduce_range,backend.quantization_config.operators_to_quantize,backend.auto_quantization,backend.auto_quantization_config.is_static,backend.calibration,backend.calibration_config.dataset_name,backend.calibration_config.num_samples,backend.calibration_config.dataset_config_name,backend.calibration_config.dataset_split,backend.calibration_config.preprocess_batch,backend.calibration_config.preprocess_class,backend.use_ortmodel,benchmark.name,benchmark._target_,benchmark.seed,benchmark.memory,benchmark.warmup_runs,benchmark.benchmark_duration,benchmark.input_shapes.batch_size,benchmark.input_shapes.sequence_length,benchmark.input_shapes.num_choices,benchmark.input_shapes.width,benchmark.input_shapes.height,benchmark.input_shapes.num_channels,benchmark.input_shapes.point_batch_size,benchmark.input_shapes.nb_points_per_image,benchmark.input_shapes.feature_size,benchmark.input_shapes.nb_max_frames,benchmark.input_shapes.audio_sequence_length,benchmark.new_tokens,model,device,task,hub_kwargs.revision,hub_kwargs.cache_dir,hub_kwargs.force_download,hub_kwargs.local_files_only,hub_kwargs.use_auth_token,environment.optimum_version,environment.transformers_version,environment.accelerate_version,environment.diffusers_version,environment.python_version,environment.system,environment.cpu,environment.cpu_count,environment.cpu_ram_mb,Unnamed: 0,forward.latency(s),forward.throughput(samples/s),generate.latency(s),generate.throughput(tokens/s),backend.load_in_8bit,backend.load_in_4bit,backend.bettertransformer,backend.torch_compile,backend.torch_compile_config.fullgraph,backend.torch_compile_config.dynamic,backend.torch_compile_config.backend,backend.torch_compile_config.mode,backend.torch_compile_config.options,backend.torch_compile_config.disable,backend.amp_autocast,backend.amp_dtype,backend.disable_grad,backend.eval_mode,forward.speedup(%),generate.speedup(%) -whisper_auto_opt(O4),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O4,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,64,16,1,64,64,3,3,2,80,3000,16000,100,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.0666,961.0,0.65,9850.0,,,,,,,,,,,,,,,36.699857752489336,48.567119155354455 -whisper_auto_opt(O3),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O3,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,64,16,1,64,64,3,3,2,80,3000,16000,100,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.169,379.0,0.621,10300.0,,,,,,,,,,,,,,,-46.08819345661451,55.35444947209653 -whisper_auto_opt(O2),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O2,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,64,16,1,64,64,3,3,2,80,3000,16000,100,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.172,372.0,0.622,10300.0,,,,,,,,,,,,,,,-47.08392603129445,55.35444947209653 -whisper_auto_opt(None),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,64,16,1,64,64,3,3,2,80,3000,16000,100,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.325,197.0,0.796,8040.0,,,,,,,,,,,,,,,-71.97724039829303,21.266968325791847 -whisper_auto_opt(O1),onnxruntime,1.15.1,optimum_benchmark.backends.onnxruntime.ORTBackend,,,False,False,False,True,False,False,True,,CUDAExecutionProvider,1,True,False,False,1,True,False,True,False,False,False,False,True,False,False,False,False,True,False,False,False,False,True,True,O1,True,False,False,QOperator,IntegerOps,QUInt8,False,QInt8,True,False,False,"['MatMul', 'Add']",,False,False,glue,300,sst2,train,True,optimum_benchmark.preprocessors.glue.GluePreprocessor,True,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,64,16,1,64,64,3,3,2,80,3000,16000,100,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.325,197.0,0.791,8090.0,,,,,,,,,,,,,,,-71.97724039829303,22.021116138763208 -whisper_baseline,pytorch,2.0.1+cu117,optimum_benchmark.backends.pytorch.PyTorchBackend,,,False,False,False,,False,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,inference,optimum_benchmark.benchmarks.inference.InferenceBenchmark,42,False,10,10,64,16,1,64,64,3,3,2,80,3000,16000,100,openai/whisper-base,cuda:1,automatic-speech-recognition,main,,False,False,False,1.11.1.dev0,4.32.0.dev0,0.22.0.dev0,0.20.0.dev0,3.9.17,Linux, AMD EPYC 7742 64-Core Processor,128,515637,0,0.0911,703.0,0.966,6630.0,False,False,False,False,False,False,inductor,,,False,False,,True,True,0.0,0.0 diff --git a/examples/whisper/reports/cuda_64_100/rich_table.svg b/examples/whisper/reports/cuda_64_100/rich_table.svg deleted file mode 100644 index 0b7687e8..00000000 --- a/examples/whisper/reports/cuda_64_100/rich_table.svg +++ /dev/null @@ -1,129 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Rich - - - - - - - - - -                                                                     cuda_64_100                                                                      -┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -Experiment Name  backend    backend          forward   forward           forward   generate  generate         generate   -┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -name       auto_optimizationlatency(s)throughput(sample…speedup(%)latency(s)throughput(token…speedup(%) -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO4               6.66e-02  961.00            36.70     6.50e-01  9850.00          48.57      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO3               1.69e-01  379.00            -46.09    6.21e-01  10300.00         55.35      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO2               1.72e-01  372.00            -47.08    6.22e-01  10300.00         55.35      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntime3.25e-01  197.00            -71.98    7.96e-01  8040.00          21.27      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_auto_opt…onnxruntimeO1               3.25e-01  197.00            -71.98    7.91e-01  8090.00          22.02      -├───────────────────┼─────────────┼───────────────────┼────────────┼────────────────────┼────────────┼────────────┼───────────────────┼────────────┤ -whisper_baselinepytorch9.11e-02703.000.09.66e-016630.000.0 -└───────────────────┴─────────────┴───────────────────┴────────────┴────────────────────┴────────────┴────────────┴───────────────────┴────────────┘ - - - - diff --git a/examples/whisper/scripts/benchmark.sh b/examples/whisper/scripts/benchmark.sh deleted file mode 100644 index 46613713..00000000 --- a/examples/whisper/scripts/benchmark.sh +++ /dev/null @@ -1,10 +0,0 @@ -if [ $1 = "cpu" ]; then - optimum-benchmark --config-dir configs --config-name whisper_baseline -m device=cpu - optimum-benchmark --config-dir configs --config-name whisper_auto_qnt -m device=cpu - optimum-benchmark --config-dir configs --config-name whisper_auto_opt+qnt -m device=cpu - elif [ $1 = "cuda" ]; then - optimum-benchmark --config-dir configs --config-name whisper_baseline -m device=cuda - optimum-benchmark --config-dir configs --config-name whisper_auto_opt -m device=cuda -else - echo "Invalid argument" -fi diff --git a/examples/whisper/scripts/report.sh b/examples/whisper/scripts/report.sh deleted file mode 100644 index ca96a87f..00000000 --- a/examples/whisper/scripts/report.sh +++ /dev/null @@ -1,11 +0,0 @@ -if [ $1 = "cpu" ]; then - optimum-report -e experiments/cpu_onnxruntime_1_10 -b experiments/cpu_pytorch_8_100 -n cpu_1_10 - optimum-report -e experiments/cpu_onnxruntime_1_100 -b experiments/cpu_pytorch_8_100 -n cpu_1_100 - elif [ $1 = "cuda" ]; then - optimum-report -e experiments/cuda_onnxruntime_64_10 -b experiments/cuda_pytorch_64_10 -n cuda_64_10 - optimum-report -e experiments/cuda_onnxruntime_64_100 -b experiments/cuda_pytorch_64_100 -n cuda_64_100 - optimum-report -e experiments/cuda_onnxruntime_128_10 -b experiments/cuda_pytorch_128_10 -n cuda_128_10 - optimum-report -e experiments/cuda_onnxruntime_128_100 -b experiments/cuda_pytorch_128_100 -n cuda_128_100 -else - echo "Invalid argument" -fi \ No newline at end of file

O>5JF+EYS{VnBY^XDp7f`Zk4MPtV2Gn^8=T{F);El&(8c ztg&h8)+DwBD?K|wRG5{wn@3vC?y29cOu8t>o3EW09N)tsyOQQueO zx};~cnl7hFToGYZd_{S5H}Nk0x0GhITNKTbG~pWltBhKY`I@Y0g_uHOLWxnn?(Mc% z?Zs9Lolr47IK3VocL1X z2(J@Jixi1C&x_96%nA?USarfVXVWnqnUk|Vdy@27<+AA(X=Mn7ya#X#r3Z3^HG02W zn{_bUK*o6AXE#7#U|=jRFE7l^iMhG)LslEeY%U7X*+oRspV~~a+s{7-1%ldErLQk) z1Q}l|b7{Re^hO~a+yoShFuw~zYTov0A8UOB8%=S5F>?~>3HdX1Bd43|<_1RUKnIS~{YHKRX^MFGlR{bFRdoShYA#PIFg?*ut1BBdzAX#8CyUut|d z9ZHu$zVMCXg#q^F0?e3|Hw>OXziWf4yObz?(;yCg2c~!)M-%3GBbHpGw;-_>sZ#-6 zVIs8UW)5fvK9*NhDCfVbuBn;YtKVN9fYGlH1arYS%?tw6s$P(UhK4e-URe!V^;c57 zY{gyqW;aVPene^^*F24e>6JAz(uNynAvb=v?meA1>qFs36bS9`$In^=*c&_E`KDyY zhvo_4L{QLeF1Qks@gjVB`fnd|eA)FlP%ipAB~O;>szm7jC9zz?nKwH9s#^R_S)+Nn zEQam_Hx6p*D6hvCUn>%(JXnI2@kJrF@fkA-B_$;!wpc*wC43GL?bc0Fm8S$|e`MY} zq&C)?-f#k97g9+8Uj={*N-DiEu8fMKOGbk^$@Y?q3&q#3Xmz@C-nPi^G z3*DwwtfFFj@0Evw>F>y_C@F_9&FNJfLp9tNISW4%1k3b23Bz(7e^Re|_fEhV&9A8z zKRle5oU*9z#&lhfNn51cmd}Ajm&9##4_b~Mmxp7Jfbg57s#*p<{5wJicSAly6iKUg z+W9f#5~x;&%8aF;+B*e;3}1P}i+2fW6&2Q4B;Fg(GW1&|I%BZFgGtY6P5Fpe2zZIF^pDu zuu<_&QU)>Ow*J_`@sLswq?7beeL;kK5=a~XvZ;T6;og#t;uVY%y1;sK?Tq#ubOVq$ z3BuAB*~{aQoovM%s^2HBM{51?wz>BxC6=>qX1cFb-s^>e&weTB2^Tefl*V3J&vQ%b z?>OHwHR=uiOX(&uO1e4#7Y}Q(OsJM&w97k} zna@Yb>t0A+>8J8ej<^`Z2Xg1z(T@p*EBU+L9I~iL`s#@-yjie1nS}MKt+?&ETwS;q34`<`Dl^9Rh>BB@n-1pxBT&^oCB%Oma=MM zH`cGKTMOEPQ>s&JNRHorUut=KW;4<1k(E@J_fZ+Y8DfG6B_OG@DFMt%p+%6Y4WNXf zD%%-v=VEC8Iw<}F1t2eEl!JBvBHf-qFdcGXg#bc7wL&^RzWWG>g|jZ<#sI_%G0eeD zA(0m$czv;t%u1Y{H7L=kY2BEp7_{I5{8kT%eq&yEh=ak9+}Ue=MEbq6;=dOK<}%{W zFA4o;V!fA=DL|U>S4d7IO8(AHVo2QPe6tWV+AeJ|13|PeT33u2L*yu8#k@Ug*`sPfsiDkw6yJkh?gLA)R@-CO0hCP zFSRsXW&GpzR>QF#0%k=rii$k-2_HXp)H6)xN6nR(2Sxr9zy$Me+QPdrXN zA*p+0powZf*;c8TfN>3YDTY2uq8zZgqHrlX0KfNN;JU#Hs zMi}**bXjG>>;hs)x-}Xr(M2}cVCnJ&-IpU$6L&I)g&YaO948#FP)5~x$W$3kHq_vJ z{i`=hhL84FFXpP*{+x&+ajnz@ntUl~X6d#+Ey?f%e(5BCVjaC#A^cWIPJ#8|Lmm`_ zKsb3D=`EUa3EIOHld}3e-EVw+I=b~Ae`fdoA|gZ>c#rA6Q%sLA&W)gtD0$Pv#=4M5 zaF*tYweSRYq2y>3_jTd+#29t6MB0^hUco;)Nmo=2B_#uk+;Z1_GuXfbw2E_H<|^pa zfA{`!cZ6y6&ZXL$*JIz0t#~NT8g6{AbR6g@Ca&5$YN?W8l;#LK~rv>L}I#P=U8ve*@*f|$)Q}O)il|*dCtOkaWd_r z>zL0BWFZ44@CIFYPzU?sU)IW0;?#2Q{mTrZa^pE}873`~g|a zr;xkq3fmuww7h(LuIXpD*S!YYfnWD+e#O8464_X)AA1FNQ$?HYjcI59eTF#ilQfm6 z)?RnLr)1}RXB>^E?*h0<13pw9tl?LNmgEKfv{iR-9;E)3l!O?6iqmE_$(3?`5Z%5_ zTC6Z{>EPaRjyA{Jt`MFjn+`m#$bpGL`eZ~8oW~tc~)8Aj; zNE8ksZ>p{A)m6W&X~za@MNL!7rAA@4&4)bQxEOp+_0oT9t%{6tLp)?vw@V_AhhJCY zR^N%2OnaeQwQ7Rz_%wy7%4}d}Bw_+Bd8jE{q{CF}dy1^#$lUX{>52~D?bj4LENSkPN~~&Oa^#6=%kEEn)Je%{gfG@hQ@!|gqn?Eu#Sp0{bBUZ z*jY}$luBe=@=>vLl8U5Iu3OASu$BZPGtK#r!z?xp(bafnga*dRf{-ar$p@@2<(xe~ zf+xnEID@-;BmW*0>N&i{Q)CrsZ3u7Rb7KtMXjN&mA{Q4T!K3K%QM=-u>^XkZZcfOmqF9#EnTB z%YO1lK8nJpw8%pI1}^T!567nSErFh%9unAjQ>PWMc~8+UxX?n%n|p4%HQ|t_A)KdH zU6w4@-L#io1*Ai%4YiQ9wKMbT{=b(VSAqyq7Z*WFcidxT8f6eA@wQH`8|X;Zyq@W) zchY}Ju|As7%KcvaRh57OqM@MyDPz;Edy6bWpTn;8U$q%zT>us`1R%0s9hPJP`wUU# z(Hsl+60)e!yj)QfN=7Nqt44U7kKP#l{(4nB?EC5;Ws-NV#cAldVQc6BVx$pl7fI#^mT9n z#3L6gxF3G-<;S~EO-v+#fCtps)Mv+~4Ma(w6GA6)()ar0QR6YUTq(9(QP#$Vt8z=y zj3$p>XWh%1H&_xC(?nr4rnp{HB(cSRfcAT zR4wL?wpm-|z~PTHLv|-T^nm22B?4dhyq;W7>{Et(lklqzz#n-M;+VAjW({p+dG<#r zxn7owoh^YRGmG;7MBZCQWz}}yqc@-eDuR@RfCy4b3rHgp($YvuOM}vl(t@aTD@d1s zbhm&AO1E@(_gUNb{f%?Z`29be@8^Thz-QpS_rCXaU2Cnm=A3J)(D1A0Df{J$ODgd3n=JkK2u*zC2y$;FQhbt!@olOEeTzDswe%6HUXdvBxZX zOE>#c|0Ig|W@1C3@^ODlVB2x+53YvRjF7>lBn1JVl7cX^w>cPu;`(jFD)TmBQ!hn{ zKTn1`IhT6|yjLGc>`OiQI$Sc6eDi7Afx3ysYP?cu7~ z@{jY9lp+x4CAPF4hN42OxT6;CCN)W)4eIfRYCdtR+fTo$sPSz&hUCw@)@>XgZ|<^k!h%o}?yDQ+02I(wHnbmh5u_Rw}u zw5UX=Wu@>%6#2g<`ILmMl-Xmv8NFU46VByX zP55t+L{96QcLI*^(urBNO8JvR7G^U7ETu5NF-!C-k|P-@PZbP3-8WzEmV#6o|o zkp!ze=dTu61Xcdi>l!YsVxgMi3;s!5MA>*{a`+J0J;1%L*0e zX3wDr^5ZwGNKnP{j(^J)4l8x|-X-;u%KlK1z&cK8^dT6}IgtDOZG5~|#a2SXZ9oYc zA)^kpBiCmkynKF0%w3Ds!uw(kbvEIdl_WgUXe6g1&5iML%WpJ-05$ zPUea*{wWZCz1V|>1lS&VSK0IMMttT;O4+}~lLk`lj8}G7j^H%%E=+u59M7#!9J1}T z^(&Dd26W=Bq_oZAdiFXX3yV3DcVP9C4Yld zCh9|>5o6^WbkyqT_lA>UQ`^3bBT-=3yUElA1W*V3LjzYoFrvAqb7%CaX z3R6E@Wbq)`L4}aA&NG)b@rU3+x|jPyCqy(=!oUGI}M zrYmFIpcDu!JJ2H^8XXz}N#I)vnmXR+-w$iPtxI_{U@|Ro5V@Y-%3WC}eyX%eEi1>! zNWBt3^;GYW(Kss$r7>6#28BlXbrNss^YNOrzJ$9lb;)NA6QAKhzq&ZjmV=3OH`}_+ z9IKV4uXv^sLQcKzEuXUTnwI=EUt_3oCq?C;ysDF3uMRf6ReUa^x}2xo&NN%BQ|Ruth=0%cxH~Uu zPdjFy_@+nPQA$*^f(Bk{u8JNEgJ#wX3kiAHY9*%g(f;;;5L#BoZkto+TTdGwo$>J} z6`8q#&reJW+2#LT(Vp)Ww>7PYC*i%VE!4~EnoZYfsWD12pON%!A$xTEeB#aaUnEH8 zouyXCu2pqxExXbm=tl0=NhYQLQ6)Hq;lh0+Z@e7S7K&`ZslX%#)+^OZQ4DNZ&G=#Ct%89#_$y}eKf$;XM#{(W<_p>g!1*a+iZ za%MWNFa^iv%^527p5xh-+D(8<=Km^24T3BULu$mH_~s&#PEtM{+KL|M$n+e zSt(TCuV2pc3En5=Pan!d3jfWKKQZD--+K~I-jB_DlDEgn-8f53N!FeS$dV`Em57ye z>47;wAOas28n(Rx_WW{XC>xi!&tl8wir1HiRF(<)^;JD4Dv6fMr%uRfqQ3b1_>WMM zq`v(_6cZ*UKxa*_8);rzzbs|#;7^KaIbCD7C%!_){^gALgO@D4J^odyc z7lX3RA@)D>T{K>ko+uS=?(#6o&;`RVgE>aG_KA+E!|M?H^am>*idv<84m^fspV8l~9R+z#70P0O5BJsg|hq7hmvBqSJsSMLEYy=NWd)&C~6RtZ{Z^RI~ z1ulOocHv~S=}KF*!P&_>J>3ip`TFH z#>Z8kVkvWIq8|dFy*(8`^$I{15xvW&KRLTM7gNK?R)`x3StOg{=>KM)FG!zAe(AP& zp?r|>oQY}Qx$LB>d`M%Hu#T-VrP$PFrMl?*ck}mOtr`2554VPhgobZ6`R%%+5-jsb zChx^aIq}pt6|Q#+VChQ(Hf0JWH71pN{dcq8#jy%b)XBld0${uMAb%Xxb?YK2yU8V` zb+zsc*V7G`konO`(_aRx6|3bRA);{PINYy!<+rOKOk=UPJjyq@Y~z-tRW!}P3}xN( za?YscpQ0%3wY*q+Cx#jRfWOOkM#r)BuS+nZZ|(1!OjNr7qLMv!2)nKmuKrX(p^$h}KnbL&;neu5;QeVCHNuLYn`utH6nfwETGO`YF zn26MBYkjRo&0k_01_eu(*3)j?#<+Fl?sR*} zr3H0ZIkep@h(p@{MQEQp>d50}N+l_vIXu?4!H9VFEv^s)Pc765ts$g_AzFdUQHhxa zo1vp=!q!7F!gDM5FJMEw>}+8^HSP9dcyK=gPTPlRj9JIc8!(|<`jpSU-eQ^XHvj2b z^t&+O$_m`i1+2BK-OXt>lW{jjXN#`MJTGNUVm)a6sQUS$w*Pgp>2?hcKudvnk+hzo zD)gzcXfTX66ehylBqS6A;{(*;*}IN*wVrU15O!zP7GT5H-d)U-X zTYhu&t}S@d{FSdRa?}CYMo*9oPWa+Y?nW)A?8bwMzz!9NRJ) z)jaPZt7vB)=*IJN<=yUsyLhE8n9Jo*6&O*~Ju-BY&A25gJ=(#_h@7Q=2yr&$s zzcw-@XvK6Rkx%McuJqS=;;okTM^RMePag*nd$-og%P!q)#mK|x$(%c(YpzqgKY#-F z+a{7|w7;+%XKD^6X!!WzprRkh*XaZ{?)8HE!3Yd^1S*F1j*b-234meF2LN%$w=1s^ zKlqFM3trw>q=y6ZhWPBi{Vma3@a*51iBG<2e!u+S-s16tmp|#*Kf%kBHC|fMKAV=| zP|p`*{Bw(OsLbl!;P9!U?p>0BUFg4Fw43hvW*N?ymX?zkModMOynO!2JbGIDa8KCQDC>NvDhsMJ5v_24V@r6k zA!!e2o+KZ7oG?p&sbvn*i61!5ny9Lho>mX?&AI$wjHq*YQ6|ECk3i1q?U-|=I*DN1 z;g173ECS9BLR(KyA)C50ZXh<%aB=OW=S_jHiLGfvmdacbiYZtWgty&-2e-Syo*Feg zJPf+VBBOSEiTAgkPdV^8u0B24UkeKUQ-IdHWDmj-cB_$xx~@xUqroi?(!E*N0|5RR z5Er?em?6@wUO!UJw`5aO9vOX5{h)<1MY@B7v;A;ehNJfRmiqKyHqk_H`ZR)-u|)>T zbKgpI&Qx zqlrp>ug7j*xis+O9dd8feFCn5SM}u~lv{%;7kCn}FrS2M=2fJ0?SN3j;9%KUc|k$J zR0(-nMn-DC`F;29SWZ)>8g)B6_Lf0n=eI(=`Qe`wmXnXu68fmhC@{_z-X!Lj&2VtQ z{ivpObxbc$-!t+&TUW?IO(h22^zkS6Sw4RZHwJ{BlK&=Yo zOg3qxy+LC>bIF!`qIoH1kTpg$h9;HP8+bK*#U@=a>NgF9btE&Nh_-ijFz(#DciGF! ztI1hf`YPbvd7FZ|y1FkPk|`-Eb%bVo{YsLr{t>#O?L9~Hy%HJtQuH*#kjd) zgnsG|+L_(V$%`HVTaLOn(RH`n%7@A~r)sDm@RT!;B;4kRVxciyQnGh(0eI8w&0rtn zBXFz^(z^^P(1l-)m{pj*-l{69kBMZglu}`Q^n*n0FX&QY*;TDSLaScMuj)9%ASEDp zTTsACO-(&VCX4}6#@Syqr|~^K&p;A81oS`Zsm}KHDVWHF$dEVJdlexAtQS!zy32MBhHi4ZWY2>ZS9h8ytOPYs;vq zbs=GXhP7ogZijw&FyY=B5gjclD;qNPh=t`nEPRNWkD-6-G9PQ|JQK=;h}FK}$cl)F zR5*i(y#;_}9BgFnI0Oo#j9Krk)NFeFv6=0;`k9@ry2qCQ zw{K7#UI7H?D5yKu%U8>C9j|G0$Q16Yc%0bRKEf2R2V|XkLTlH-ZH>Y0lztc|kr-1vB ziFz<~alUc!qq}=ytBr~tnkWcz3qC-RD`+I-oX=v=$jBGDuJOsCah{6YXS#2&Upst?e{c=C@no}|?RM?jgpihZzfJUH z49K{f-8z&X9c-||O8L&`rzcNa_(Jl!>8CvfskJ<=kR^@ewskY_f_E+@@9`V)Nz-sY z-^+~jKwT?-G4syWJ?(s`fato;rP?kzGBpRNv$kv&w|CkPO)ir{>CS!s8Y;f$S#`Q> z!(8n-aT(^u8bj~DoR4@lWJ)xy_D*a2K=6-W4YHN|gbEpar4J;qIMwbSqbjSa)XQIJ zrYY&0no0sUH%wqV&_%8yZp1I~oAuRM+BecTPQus5_0aE=D@&>|e;s-<#-j5WE%v^e z?Ota=kIC|2&akiVU#>Yu?JL(mG#8-XGv+F4cXb#y~%@9rVJ>N`S#YXrHRY(Jece zx>vo`N-h9{wdY!ePgXfWEBg~y_*4Wq7O^}wNwnUoc1M8_7ms<*?3%?Ecld_*V!89p zUl|rkMtieV51t9aNjkT9^;*035rul)A^MuChXwx1@!nRP0O_C)?$c>?pVMg$wbP{f zl<`|nJNtR659~_kg4Q7^q{hM|KT5*ZUk@p|0uwZXV2|A-CRVc+priALt^WzOABf{& zR2$ueW@>+B>sJd@N!qs)jKeh@=+uN$U4Q>Z!o!^VJsB;s{uYCSe;lv#{}H}GUn@yr zU8xuGnC5wThsnal)gd(@MJa0S-9o3Joe^;a`m^q?B>D}jAAy@9tGULm+JTiRYPo2; zo12Jdhrr2d@mwp9<Ch%lACzH8wleVaQ$SZ$h|1Hz*&# zmf+a=mlw92Ot&*LVcZ$&^tmfdoV@@^>7u3UhVPoFP<9{Xd6#*rc8$Wj`-jW zMgAtb6<%V_rGAS!{o`z%)g-nVAFZbVRGPex1CuXy)t{(Qe9tz=?w}gFp zj7w6D(q19ms4e*bUqkSswDb?nnz)=~=9JZusP}5;=gxv#m#mxoA(ir(UA-%G!W?CT zOZQAA8{>0q-L+m<|KD|fROTf|=lG1iX_@A_P{!IQm(Y6iF}wE_orc$u4_7qou)i>_ z?bU=ljVUD};zNlE&Juk714eArePQA1BG(krF((|cOI zfn3d$L@@sN$9b!0%ty__D{R!%Q&9zj1*|8(j!3%gV^ZRK4Hr7D`*~C~59)!|bM~Y_n>{k~(F=i%#l(L&2WQ?;6XIceK(D~Ba1QFKzc4WTVJq{`@LsERardf)erout@|C4!+I8!acKwo-;$pD8TSu=I zv{5##eF=Cr+Fo1C!{@>qp9%2IG3PMbKu#8}GQ5iX+A3@s0?|GqsF+8;o4xMU5EHwE zhK5#sFyRKmm7gSK?7|H?yZyDiQYzY)QvTFYT2lHw9F!^yKt1~}4yYiUwmZUsADTg* z6ebva(uA8@TE=7NThjf}S2m@z?c14bw*G2ulqXas5}mI3bRn`>ii0XEqB)>sw_WV{ zvRo}bo<1OQ;gqk6mcx*INjF|;Zn5(=f6jbspVm0 zj6st)gyK^NA1z&MBys z2C`6y-L;*5cRYdbxzj@4XOjbU)>j}3FaXO>_(JxW{u*OY7^cX~`ul&4G0M=9hKBEG z0~`>;{1i}DR zvKiv}yrVlWUA9l9hq&V8WBWb{m;wIz5Ey0})awEf^r4A~>OId~$0aaZpG@$ZY0fmIt zcdil5{%GnzjB!ph`8z?O@&gRMW2c7>8Yx#8_$YYEd6tt8TUyaSJ36$yZoD+s_FjC( z$R#LQEyI?LW?Mcf;T!|t{n!V*psdS4^UPPe zPUFi)nQCNO)DoW=2#-N0o9tvIfv* zr8T}q+Ip(9cDQlY#qfI>sRLsA1W>oF9)_&kZcH5)-Fo$DpGOpHhI3@5VrWQ1=y;I^ zY&&p7cILZcg*^Ch$hjX5FpE)8w@^k&L0=a}6CmibkQXJYq?nU3EToMF)?e`)@qJ6a z*yhjj^MhHmlY7QzgRF3RQms&zk^fYhu7Z*M=#{r9iJb+1+N-r=(S$n zb;BqFRt8+CCqseCa7>~Pt!2c(WFx+-`<5pxkS2T}7OoHEeU)VXd^n=}*PNbSpf#+` z(4Cgda6M56)6C&2wAd^AttT^8*isb?Tb-~QT7J2gc@T>BQA!-CcrK+yI|C)b1p?+#k+XsK69CuU|I zsMd-G#u)U+ZinI({39 zuoRD@*hH}um0=^)Zg<>&#LvG2-w2r5$fBS(?!4Ep?gISHPp!%)0<`)S>$0zIygO(vULP_J?%WMZiVnv~<-fvNYUuHv)`-L9 z=;r4}9I9BsZxu+>TD0#{WC}wWll+OKv9U22^1bmqJDyruk+HUBLH#lr*rukZ$5dBW z2Zp6x;byi3<35`Wyp=i|s->M&1_8(GsK`W$^;C(eqeZy7pd9%N<}dw>mi&p0(=}VG zb+-L%6Gcha2r7NIMaaD@^Ug=QGKz$}i!r$~yksuOLUphd4^+8WvtFTNA>JUc60mP@ zPdey*LM4wx8#kAao%{ryOhkn*>1p3?Bhw9u=sLR&lgKxHaSsC^H4knV+XZ9dS)ZVS5hzhPcPLUvQS^g#ch6G`jS92hBUQn zCQ;Zc3Ba)r;E^k@UOTmv`@A^Bfm{)zRA;R9ryytAP}6n7^DRuXom6NvNeaI(_8aY? zb{5*A<@Sk6JS2)Vf9|zFjZ)CR&9u(?r#=^wVJrdY&IG0QHj; zTneGy@5l=Ua`7kBz)gsnEkUd?PWF@#${n6h1rdDhN>(_xsoKLJ!^wYgWroXzgWJa^ z9p_i`r{Z^q$07ogl+|L;DMdLBnERJpGZKx)BRh&y$5p2G%?yNa&NJcsmS@sE{1%KN zBZ(%UgO8p6vN@alwL4j1B46hpcj58d&A3xq?!A?G?neatAY+4#WhwAcY`DZ_0#>1z zL{m13JLopDzeKUCh?db*Hs z*>DBGvQn;$Ypiaa*KG8**M$S;p)70hC+VM*lpmv&a>Ae0X143koaK^2tX^iCz2*F} zTWx?8Qvk9{bunM5mq;~vHj09JQxr*^0KTA>U3pJX4_-N>=ll;IgsAJf_+en)=*d>S zpPSdI{!{P_rc_dX$>fH$?L?F)DC63P{;JIKH*V&CxE0V|vcl+;JC)b{WwJHl%zbJH zyv$mXuF>HryTmu7W-V;;b7eTl*~Ug4Gs6VWXIIK#D6m}qHz!4!4k;d!63 zLg9;sH}S2}51zxYZ-S1{;gJy~6&3s5ib(bhi5?;%H>7yfwwlX&YNc+!etYSf5H+L< zdl*baEC=HOgAp0utcH5NH^e@TRcnhX_V{-x=eDsrc+O$JX!CDf;|UVv?RrRm$4j*~wY&G3SDn;uZ{aQ@u@W zB*7vOuCLG&u1**5)0t_dd;jaoIaMla0R<(~kOwdVJiif=#0MG?C7 z2?eXkG)%qh`11!TBdG7*j+DR<+46hzNBGD$0T9&tTPwYo1zF19EdDe{=*RoYXMCHn zWRlr$DL!$pL};m;KxyH~w1PcGMj*l9i{~Jdn0x>SFU9lc&jq%B&_pKnew8AqJzl;K zrg3tPj@%a~lNXC%&FOW1MDG*%u!Wh0CB3|S$TlH7+;FwWA_Tc9-guty?E&qMPvJO* zYM!3XLVQ7uGi~ihOQz+esqd9Hbe$Dq-{=&Zq+cC0di75FIYlB!$F`dFm{B5mZx8^s zhbuxJN8o`YZvdXCN(;p;9~7t;qF8i-b5`=+VP3%@v(G3ylmvXP39v)R)5iAQq-A8l z$Cnh6`e|T%7HlS{>v=>1^?&T4ZgjAS^@IJz*~y>_hl^y0hZ?`;yR?(eL#EnwjT2n= zo1wk68pl%*TW|jHaMwL*Jq%dNV%x!;DBIFv1EZ5WcliVGW~W{t>!R^{w&o?Jxl5Z= zHlLMLM;~s1z8{9sqFD^&>TmvSxLB?MNm)JE=|M}Nv~lphD>?}wp^7QK2rQ69=y%8H z)^rLURekIjh&t|!n(9e+z>zQk@eRT&g9NJZ%rcu=*!u6<4_`RBEp7#-=AB;L_r-hB z$nd2`X0>dOG_P!!Ocsbuh&U- z!8xvW$j~GpR$y#bA4o_db@LmvCpMvz%WIZ)xx%? z^75_YCGuqkL_t`Lg@J(=#r1$Wb}uniR3uKq*9++zyX< zdTe@CGx2=#!yR0=R;L?NU*P;7dVbNQ>I-X4HF5wYAH(JYN9xq-s^!lZ^R$s_0;q2K z)ZC;JS5KW&#g6LeH}`5RJ7*<*1=uFVUDXeZnK4q zmYDms9d4EHuE?yvr#NPM!JvojZYAZn(aG6GcO=urXmGTFoqKq8cgf|Lb_fV~JypwF zPY3bhj{EcV(tYt7m7YdD3y><5T>H}5O>eQr$5E>*vD9+u?6bt%*ONldViE?ErFr^Y zxiXWrn_H4R2WY!{EeCV4$Ac^7!YsyxkU$70KTT1|QFwKxy-EVA~W0GCh`~p2qLfC!VHB z5id_y3?D3s530DShIQFRGSNC>PYsLn3Wo?t(U$44Ur+$Bm!8IO)lV|)6A;EKw93dF zSHEFnHweDEZ4E?WNBh&ZmCT1^xA`42*KKojATvP7YAa{Aq<;M_D<~YubpGssg0!`V zuczxf+Er@b@*rAo2Y7ceQA$dp;CGL_K5=>lvQ~;J`&K~?{0z7C#}4FOzq?wNzTS>n z4K*Dv>?Dj{3V#9Z^j7=pCbDhEhvh@DNM?W+iBPox<-h8N4tAa3N{V&un&Ix^?}vr; za;u@@!Gf|G4>-f2FHHZsQln(e=>4ADS-#WabYK^Hf-Vy6vVvPm`~ z$MrE@a8VWo*$2IR!e@v`M#jdfdyaI9q|rPOJ15Xx?g9HMeSR`(Lq*U*IB_Gsz$LgE1ShPfTENZ zYHrq}Kc8{Y+dB!~_go8or6BXQtK;tLekP8(=lVmga>iDno%KG>V(**s&6EiwHC_W% z=G1u=K__0*@eZ<03+6YV#@phBr5UGH-!z)n&5__dj)y6e85! zr_hn_uV!5`;x4>KG6jV($mi<|I7R5ClFIFL)tgM*bFI%R>UXBp;F1R zT^=R~`|A|7LQ0B!mBxZ* zG_(+yyBV-wJbTt7*IPTizk6f73-iF!R+&mf4^s~W1b1uRy2Z0qj^u1MVoi479F1VpBFYt zl6KV@aK`G3^Akif3yu~f)VU*Vg#11~!t1spia1`CStI@Nt6`K}A^c)ue}#nb7m@^- z(?eyTasdJXa=d>zl>Oh8i)c&^BB7E(jgOBrGBAA3&ApC^37|kH>x1qeKQe(6mS0IA zqR0!BX&_#OZ*{!_eftt-UY;F=E!FzW-9j5PUYyqA|5dkK`Mzx`I*7NP{hzu;w9i-b z_mz3zzHqb}`!ZzE3MIjt4li+JmVS(eNKar9H(Sj<&Gm4Xg|mn^`co&A#JPfTPZhH~ z;|804`~QZcxm20WGvD!Ft%+Jn}Q1a?do~3ed7&Dy*0fjmt+4HX95=U@%GEWHqxEm|w42|7lUwiII zIfCpa;AA}QVBrWksv2Q;d-1x`-=%Qb_O8r9+v~0D^Y#qQ%N}tj9DwywJQ4i4>;^j} zWqy2%Oe9p|iINh&kdRPj|MFGg4}{{6)0ASxX`0qpaGr35 z=b^#eHGa@yqE?3rXXIIF9;i$udwr^~Bpt4^^i?_=Fj7N1cMc(aH@b=WyFAXco9{mo zq)0mGM;Y^xIrFkjtzWrbl|%WyRARDF5l%zDsu_hS*m3HIlcFjTzs=jnYZF1o$MDE~ zh|L-i@`Z#oS2)?IsLdy?{ue2Z&WstQ3R#e{UNmm8=6VD5uVNRrREf1qh( z^tI5MX8DelHOq781J4h3GS;|17k+8%Cv$O*oT0T27JwB$3GVz3qRpeyUvi!I^XFf} zhScByehN9at!);Afckcd17-l9u*Ep*!!eaPH&iWL)poexkCEG@zup zYN4kGC-NDbYqN8=)x`Ct+YHrzuso>AJ2-k2FPT>SAFvJ^+?WO1fc53Fl2>MH`@wHH z3P|TyZ%`sB@^+~W&v&~*vmrbv>OcKQ)lH(&Rc`QC&#ll=~0BXA1efv&2|wrK-WmYkkmgJWLN{rJ27*jAD?Jak*3 z#jRo+9VI}u&t#bZy&Sfqi{uzDnH`jtgQOMkwApnwRuIgYY;V(`K*yBtZEM9;P)XCqAB4e z0FOqPwR#gBD2*xLFT=BTS>R>Q9i(e|o z%`kFJVvp|SUd&w0Nny3y-qs84sY$RTg%H-%gkiS2qP=ti*U3Q!16D!b;(oq>TOl|1{c(#l7Zn6N1uNn{7GjyJ3=PwxJF$IY6&T4K?Dcx)j} zfWP~e42R|Hc={%3kjH2#U1&jN%h&lLCccj)VC@liqV5(of6jt{rnaN5&W}c7*pM`R zdXjx3zbZvq8lN@1!kzbdj(TcmCx*z{^qZ3>G`1k-TJc7~67+zo^JwXiNR2{%b|~#Z zQ+Yrt&fC!|ND)emHeaC?1TS*OsSW#Ywabq4=~i1|aj_vVdCLS2mNJrSdq)Gw0Br@O z+6Sm8pipTn$*AI{rbf1)!{&=V6V0_e5Iet9NteqBI{J=xc40l#GLWoZe0D)`V%>eh~O*=dG{`7 zvo2>8nw}jAystY#xuByD`K-!u5-gZ4AX4H{Q7Hg&;=GXPM8ft$#34YFKEl}O!``@) zQLcs$H#4B&dCd85uS^M9KX42s2hrm4M|JI1*0yjBB9EXaTe1Fja1dT8%WH$zs|mB& zPA2@|>JO9?wK0ZL#IKcFkDEU#Ey_Oa94fP+f-4z-wS>TCJqFS0TJNx6NW~t?K zf~f&%@r7R>L9}aBFzKz7%W8?>S{^@Nj#0W|-N)S-4HC^zJ``kmtNJ^7$oXz;qmqY1 zrRL>vG8ay8ivZ}Tuy`QmkrHy+yOJ`afxUncphUE^w9kMYw_buPbXc!ZhPI2HoSZyq zm3RW3sopv-FJ0%U%P3v1GY62Oh^nX%0)sS5aVC3>F!6o$q$ka5ICc)!c%U+W>C;9t zW4APj8yR|}TCCf_@D~#KQ(M}PGNgX~TUT`7Uk}4F8q)DOtmq&tfHl;q53v)dRfwet zpjNi?KS|*+rQF>GK-&Xop5a(Z!vC_|fH8x0jjsn5dMkd+E453;%{P|t>IE&Xkr8$y{7W;JR*$ILrz%K zj(g)>>njjysjUOx2n2TEHsqwR1a8Cdh9h8tZEkJp!%4ooF*!+6#*{z%;l z(N>|E1ntWX(6!W34MB8b)iG~6vH|GB9-$fn>QfcdA+ zPZ-Lt7AU_Oi-0>W0MjcbJx}t^->9h(5))6)&Nf>V)U<+if2@E@5xX@;JxKXY4>v&S z{r|Sc`1E_7(`kJyK3HxU9eQ5^jv-KnK&SX}{#v5}g*H`~-N*79q;Fc=e>W?VsVg~B zTfEmsVu#(*k^dIpLjGmfrW-b0FIC2yu8<+eiaa!9?CtFz=AP^v9MFRm1TnKF>O)eJ zb%>TqM^{%U7+Mh!5GYTrfYl4wWk!(8NJ$;FaUr|x0gnS6r*#O+tcm#TrEKPEJrK<9!uC_M?pG#}Pd zhfJUxA&|=_@)bfo9>-Wzlsyk4>J2)w8Ph5Q1#+}3jOG(e35Lqm;W@CQ-tM8#`U z;8?YCb_*Dii~^eo%w1~d9IULY3Sq*4NoIcvf?cTQw4ctCcjVuzgg&136(Mi9F-N&L z=#0!L68Z__7eA-)2-<7Q|De1(NN5uPo+gr)I6g-T%a=M7mFvKbWwJ{geLemA7WYS0 zsp=4%G(S2!XMlN_s+cMvQSh%ZMEc0~PD4Y3cEu|{0P9nKza-%@Cq#i+2pEor!HiaP zHjiU_WJb@)MqM}X^Si;Ei`V^-4dxkem}_(8J>rgniaz{oCX7M+c)o>n_vbjfxxEIcC>koXJuKxU>pNXSH5}<@b~t1Co2rZ zq=99pPVa_ryw8(rj2>u7fZ340-Ka^|CJ9wgr#D(=*Vj-Pso3{xreIb7=;J^0l=zvF z5+%^wQ_(cOT^#En2$kIeqzdn1gu>?Xfs>=7V}g*!agHRg^eSaA70}5+n34i{~ zoTM(NmK?6-{5e#Z#c_Zx1e!*^Y=Yrda5ZMF(pl)sn7rE1r&DmLxjk?vKGh~+2`c?g9<^tKd=6qSm-fx2H3oFwM~W;D$; za@B=VjJil1YgMhlYSzwHkZV}hTp=?|R43Pd+Gz-91e{rPycs{(Ci=Q#Z{B=--J0Ax zT6xXp8?wk_ieZt5!o>MDS~eP*)a){LnHx8!G>(pzoHjFGGNw!WMHaBa|KXw) zm;yv{io~b8EwWAJ@o!5!gL#llh-JI;4RZf7D!gb3oTfLxaH%!0Wg0+OcfF*n0(7;k z4tk%#Pxn9$G|;i<1+91l(t&OcL?Ob+X^sr6zjDAF(5h@}f z7RUdtF)zA2^e9Hb8QZ1NTMN21Oyk(iecSIkhnzWLRRdVJ+{W~JkA69-UAH8t2lD60 zm5TNYTv#?@6#5ZNFBBb^La)Fpm8J0a`M-Td0Qoq@_h6Nbyt;m4#{gFJndYIssSf`q z6?he*W3^28y!jG;Y1jp7PD2aBB8A|&2kpmq>esH%P$%6$Qg)3h5p>{ZlYD^(rOxhP za6@}nnU|^3S>Q!i3<1y0uotI<;XOCmy3Ja5VBnp*Z^n&-kwcx0;1f!d0h$!xd7&CW z05)>>XNjXpbyxc8-`<>hJT};Y3;bC3??^!w+%akF6^NwP)mdnK&p%8{fY&n5d9UxX9v^?p{SMkB8~ zc2>Ko9OT>Ja%Bh!?ib+J|2t>!(&?~2lQQ%#4|&o>!a84l6fQkn!mCSup9{WM_PYWf z^7VCJ8vXkM@SdV;WF*RWAkhyhxp{NXApL5&4{&y2kPh*o)G_~an_@1&CBO$Ly`lU% zSZx7_llml6(obO(q1axu5g}D26qilqWyQ=*>)CB@r+gmnJLI?b{QE-M;3C)l58%Nu zR64Rbh6hLX@b3*)vQ(tMA5&=LaxuB`>=?r;041$qnqvI24D$416nC*d2U-1xTvl$U zJ_o$kByCS$p)QZ3871PZiWdJt%}eD-G0|YPGBx#5xfl&lcD$5d;g@=RfgR6FE+(EKQP7BO)cKh#x^qSKaK@D$wDI#0u9x#pph-G|0{tohyc&hiXfxe`e15vHFPapX|jp#oJzacl{ z>-X^z5_j+#8=s;>lNrtceOk1CPx_7R6AC$LMKxmhqEX;45Dm?|nbp;hurS;w{@15~ zfL!L@4f9)`fu8DoXt;gf9QnX+vJNb0W1wDAPM4jk_rb6nF7o^HN9lNfO~%vnoOyC# zdRkOT>C;ptCLmT^Urd%;|PbLba^iLq6I~aSdkhY-jzyhs{Du)*@3 z^*)66U2aF?w!ccv#qlMF2^-sR<4sfr?}gl}f# zvNrl7;|@Z#T#W{J!ocEUPMGR}0(V3M8yj2^BO_WEL1hT#9jbbN#KL~LF9-;!MMXu< zU~gIOymc2;GMVkQH;}u>QqFpf{TDJ2v~uVfcZRa+R>w$Ex?J*xesAA$ok`jMvC#|g ztOb+hzo*wsmA~cF_cfcP^xs-yBYww>j^I4Lcuo@dBL6ONjRqS*W&Xb{K@uqxa!gP^ z2%+`<_c6r(zx|kHOZ4&=4d|kxqA(sX6~r3g%C;G`PyoY}V2Gdr2x?$h5TZ~pXF#fL zWOvEac~5#AP=G7ai~ZGMWP%;&M0*PKgrVITD1j`> zX_JbI3fL&;D+m1*mwGxouK^e>EiGNK6%j!QTsbE9Fn}fM_^4h$%Mq&aCfnTJr5`_j z)c5uhL`tjr6?}fh%gYPLkloUsd(TZ@QqP)NyZp}|^J&w3hO2OaOP4N<{XSXIMt=#e zd60Gk{9p*9CB21s;U{wV%0MuPn0vzC1a^LMLKM;}?SaxudgH%4y@DOM>)HNZT5!(H zQej`C5q{nIKYx_Rg?IQ}ww-j~8PQCB#x%f)h@im0&%g+%b5bVUYU0Zf?;MO`v9lwTcg>O=i()WP~M;55=M8=)U=HsOXxMYRGS5qKTx zplh@o`QH9)@#BXN-kN9XKCh!$bTkSLE+Z~nF5ri2)|Yt`WJ{@_=T1MyN7fIuY&xt! z6!L)f9Zq2K$rBn1)VrHB5-P7%RN>h=A(q(0xf0LIc+N*j{`XG?%jwS*Bbt~0UZCB= zvi5&}ROI@fxb*)#6iVuW*3$~{1{(`YBuxn!N1_}+n>`FK8)iNW!n83kx@9x^fd{j_ zEtbED;UE-m;6Ki}hF{$QC5X#hq&gN34p`VH0c~mfc%@)+Yd|Muzap`F0~+p}0NiTp z=-AoYe}xgLfLisrf8+UZ=^HU9`7%Jh_dw%YeczWrvLa{zr_07R`e0xp+Mu+wwB5DQ zXwV)i2he2?Bhn%0(Y_q7EXjIuvwtuQp&(HN&IN zbwF7!t@(* zecpY2DuAar!^?C5F(?9)4t+Nx0R0#oj1;iC$G>iY>y&Sh9H{)jp%P(AC_OPFY@Vmi$^rD*vGN%hFMvpf2-8r= z;RXv~3qWUS`1pJAd=5--))63X(LO+xy&=mY{c7)0SKMG<1!U2%pHch{J_3E>nPS%pamk<5v zpEOnv4-NHz>3+PXJDf7_!CRC-eD9VN%EG6*TEM`-;CdU(x&;mo6ic|vg9SE?Kg1kj zIgGE~SIhkiFSHprCx9=aU>g*cfpW;caR?wv8_mh3#l^skjEvlqhlW6LorSma6%K90 zJu_7@Y#UY<;wla6KV&vps7?<6T}sbGeSpO%U3V87;C5lGHwM4c`a_+bLB_^eaCZHI z;yEJ~_N>l&LeT#;C7I{*1KsMOzAr&!!0mK0hEps3#{bpawFWhPXVD<80xF;o%R}B+ zLU;&qcoh*w1SD>?;gLW=u*0JYD+r3b7Ghf_xDnK?U@5v0g5kYnXGIW^M@vDLVyz*< z6e1SVND;B9KoCN*=Qle$v$Nm!+wzGIndJYtzkAO;_ug{@BWj?rMgC`kMW*`t`iiiG zXo#zRYb8R@qqJJrj+|WdXoTr**%FSfpfZF!8vN4UW4&Cvf`6ll#s9{-|LIfyH=rTa z1KPqKq0J#7Au^eaZqcLYfTbd8s;wARox3`N&^JBA@Uq&O=(43tUaioZBFzK*u;)u;UH1l*9 z7ZEyq$q=GP>gdnE{dN>oow-*Uq+z#Ea8kkKpVX!vkeP<32Rj$Jak2N6v&q0Rc6JVs z-Rsn?xW^Sj8Z$Y$mT^*I-~avhhcnKdW5Xt@aqG>prBQLb7{ZFQwzf7t%!>iv4HO~* zQ{#1NyoJX9J9-|my)=?#9ORR~+`NHScl4%f_CpjB0!VdlY6W#lCT^38_tF30L5L~7 z^~-D5*t26f1tTlN!^4?!^ruD;kK6YeEO9wnx9H$e_{GJ=nVNN_le@b+Z!Eo}2fxT0 z>sGxvEJE)gzqGV;dS<2$Q^O+J8rgJ}gb>c}>gw9Db7!q;MSi(_ARj2|cOOm5`14Fk)ev+*46pWhIp^3>LnE7B-)?rj${gS&G3AVPjD{yr~rQEjmG>I4jGznhF@TnoJC$(L!jV~ucg}(}1OQj{No^(%&)0Q=)A@g0veb(NM&PQnE~~L zQ4|0!)Jf&4QSV%QCpyxE?51)IKh#K~s6i}=L}DD^lQCT+qV(0VK%kCm!k*xd8$sc1<~Z z#YIIO0-&HEGRiUi)fCoj7jo&BcQZFaI^>>GWY} zkvMf>v2Q01DZ0ul(5t}6dJ%yiso0mxpb*J4s2{2jMX{sYT%g)*Zy)jTAO2ifi8294 zoni3_T<j5L`y z+8%!$zvDzf)*ArRSnLA6@1C7;EUGUgI54HCrbhl@_?H?sr~s&Xy_9vltsQG=&u|ZR zhYGC~lvoq7yqnw|)roDgqjfHW2y15pY}G1P*VdlB8LzIc=0(M{p7@4rr@fxuee$lF z;7EP_=hztR0B`!2BOzF`iWAYWSlPbj0gw5WFS{XvxA8UuxRQJw;NBa8uaHtrLX?7O z)Qd=ovf_8>_8GbrdUdNjH#Zlr4T%mcQ!yFNvUP!s0J(t|C3xb{c_@J@8yr1VFmf*| zm6U`qtwI0e(}98KZKJKFrh9pswH{4^w4Q|$^k?Wjy>?MvY*lD&Z@Z-1r#Q@KD~LUU>mL!JyU-}vm0D2OO%rqBno>B$_42hXKF4e zC}^wYz6t^!5U|Vg=UX~oVHgYTC@CxBXl1htvOaLZj0KVeIaY^1H1Rbr%rI$Dix9Qd zlu?8+8Y3&KM?e=VF!4Tk2dxUfd{2}JU9i9XIl?0P;DxfQ_pV$qEL@xSa*gjsbxu2~*kpx{>E7hB{2tI$%gf8WZ?6)zD6U{Ba+0elfHwOB=y^l< zLjlevn(`(Am|e{#5cc6JA-4LcW?hYM$19UZFy zV^RO*4GtcH$p<3p5+w=nDk|_#*}Litr}e+i+b)DBiX64X%YUDDCaq5AAj^2SgF^@u znksh~8dgQc#5f`U9D*!T8Az2rAAi-t7ZTv}{(*sD2#|*%&?e`cLi}Rl5|WZcK#6pi z!+q#DeCnMJLni!{F^pIQOfm#sj~Wo&dG4=MH}IiYT|K=pX-ZayryHrXOE6&4KR*8P zYo(F{(@4&lA4sL~g-Dd?7%ysZn?%N;00d^Ebl%|Ye zJVtps(7Xb9eiEJ+^U$FSHUkJ6^aq!%#BQ5x4lc1A`9kSgiQ~U=#gN+b|BqD9utq!m XM^5(X+_TFR{DlS`IoNXG_~-uwT&~Bx diff --git a/examples/training-llamas/configs/_base_.yaml b/examples/training-llamas/configs/_base_.yaml deleted file mode 100644 index 36efa0f3..00000000 --- a/examples/training-llamas/configs/_base_.yaml +++ /dev/null @@ -1,41 +0,0 @@ -defaults: - - backend: pytorch # default backend - - launcher: inline # default launcher - - benchmark: training # default benchmark - - experiment # inheriting from experiment config - - _self_ # for hydra 1.1 compatibility - - override hydra/job_logging: colorlog # colorful logging - - override hydra/hydra_logging: colorlog # colorful logging - -experiment_name: llama-experiment -model: llama-2-model -device: cuda - -backend: - no_weights: true - torch_dtype: float16 - -benchmark: - warmup_steps: 40 - dataset_shapes: - dataset_size: 160 - sequence_length: 256 - training_arguments: - max_steps: 140 - per_device_train_batch_size: 1 - -hydra: - run: - dir: experiments/${oc.env:HOSTNAME}/${model}/${experiment_name} - sweep: - dir: experiments/${oc.env:HOSTNAME}/${model}/${experiment_name} - subdir: ${benchmark.training_arguments.per_device_train_batch_size} - job: - chdir: true - env_set: - CUDA_VISIBLE_DEVICES: 0 - CUDA_DEVICE_ORDER: PCI_BUS_ID - sweeper: - params: - benchmark.training_arguments.per_device_train_batch_size: 1,2,4,8,16,32,64,128 - model: NousResearch/Llama-2-7b-hf,NousResearch/Llama-2-13b-hf diff --git a/examples/training-llamas/configs/fp16+dp=2.yaml b/examples/training-llamas/configs/fp16+dp=2.yaml deleted file mode 100644 index 60043450..00000000 --- a/examples/training-llamas/configs/fp16+dp=2.yaml +++ /dev/null @@ -1,15 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: torchrun - -experiment_name: fp16+dp=2 - -launcher: - nproc_per_node: 2 - rdzv_endpoint: localhost:29500 - -hydra: - job: - env_set: - CUDA_VISIBLE_DEVICES: 0,1 diff --git a/examples/training-llamas/configs/fp16+fsdp=2.yaml b/examples/training-llamas/configs/fp16+fsdp=2.yaml deleted file mode 100644 index bb57da37..00000000 --- a/examples/training-llamas/configs/fp16+fsdp=2.yaml +++ /dev/null @@ -1,19 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: torchrun - -experiment_name: fp16+fsdp=2 - -launcher: - nproc_per_node: 2 - rdzv_endpoint: localhost:29522 - -benchmark: - training_arguments: - fsdp: full_shard - -hydra: - job: - env_set: - CUDA_VISIBLE_DEVICES: 0,1 diff --git a/examples/training-llamas/configs/fp16+peft+bnb-4bit+dp=2.yaml b/examples/training-llamas/configs/fp16+peft+bnb-4bit+dp=2.yaml deleted file mode 100644 index 6e3ca8bd..00000000 --- a/examples/training-llamas/configs/fp16+peft+bnb-4bit+dp=2.yaml +++ /dev/null @@ -1,24 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: torchrun - -experiment_name: fp16+peft+bnb-4bit+dp=2 - -launcher: - nproc_per_node: 2 - rdzv_endpoint: localhost:29511 - -backend: - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - peft_strategy: lora - peft_config: - task_type: CAUSAL_LM - -hydra: - job: - env_set: - CUDA_VISIBLE_DEVICES: 0,1 diff --git a/examples/training-llamas/configs/fp16+peft+bnb-4bit.yaml b/examples/training-llamas/configs/fp16+peft+bnb-4bit.yaml deleted file mode 100644 index 4440d565..00000000 --- a/examples/training-llamas/configs/fp16+peft+bnb-4bit.yaml +++ /dev/null @@ -1,15 +0,0 @@ -defaults: - - _base_ # we can extend fp16+peft as a base here - - _self_ - - override launcher: process - -experiment_name: fp16+peft+bnb-4bit - -backend: - quantization_scheme: bnb - quantization_config: - load_in_4bit: true - bnb_4bit_compute_dtype: float16 - peft_strategy: lora - peft_config: - task_type: CAUSAL_LM diff --git a/examples/training-llamas/configs/fp16+peft+dp=2+zero3.yaml b/examples/training-llamas/configs/fp16+peft+dp=2+zero3.yaml deleted file mode 100644 index 8182c413..00000000 --- a/examples/training-llamas/configs/fp16+peft+dp=2+zero3.yaml +++ /dev/null @@ -1,28 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: torchrun - -experiment_name: fp16+peft+dp=2 - -launcher: - nproc_per_node: 2 - rdzv_endpoint: localhost:29511 - -backend: - peft_strategy: lora - peft_config: - task_type: CAUSAL_LM - -benchmark: - training_arguments: - deepspeed: - train_batch_size: auto - train_micro_batch_size_per_gpu: auto - zero_optimization: - stage: 3 - -hydra: - job: - env_set: - CUDA_VISIBLE_DEVICES: 0,1 diff --git a/examples/training-llamas/configs/fp16+peft+dp=2.yaml b/examples/training-llamas/configs/fp16+peft+dp=2.yaml deleted file mode 100644 index b6c19980..00000000 --- a/examples/training-llamas/configs/fp16+peft+dp=2.yaml +++ /dev/null @@ -1,20 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: torchrun - -experiment_name: fp16+peft+dp=2 - -launcher: - nproc_per_node: 2 - rdzv_endpoint: localhost:29511 - -backend: - peft_strategy: lora - peft_config: - task_type: CAUSAL_LM - -hydra: - job: - env_set: - CUDA_VISIBLE_DEVICES: 0,1 diff --git a/examples/training-llamas/configs/fp16+peft+gptq-4bit+dp=2.yaml b/examples/training-llamas/configs/fp16+peft+gptq-4bit+dp=2.yaml deleted file mode 100644 index fd7c3c7e..00000000 --- a/examples/training-llamas/configs/fp16+peft+gptq-4bit+dp=2.yaml +++ /dev/null @@ -1,16 +0,0 @@ -defaults: - - _base_ # we can extend fp16+peft as a base here - - _self_ - - override launcher: process - -experiment_name: fp16+peft+gptq-4bit - -backend: - no_weights: true - quantization_scheme: gptq - quantization_config: - bits: 4 - disable_exllama: true - peft_strategy: lora - peft_config: - task_type: CAUSAL_LM diff --git a/examples/training-llamas/configs/fp16+peft+gptq-4bit.yaml b/examples/training-llamas/configs/fp16+peft+gptq-4bit.yaml deleted file mode 100644 index fd7c3c7e..00000000 --- a/examples/training-llamas/configs/fp16+peft+gptq-4bit.yaml +++ /dev/null @@ -1,16 +0,0 @@ -defaults: - - _base_ # we can extend fp16+peft as a base here - - _self_ - - override launcher: process - -experiment_name: fp16+peft+gptq-4bit - -backend: - no_weights: true - quantization_scheme: gptq - quantization_config: - bits: 4 - disable_exllama: true - peft_strategy: lora - peft_config: - task_type: CAUSAL_LM diff --git a/examples/training-llamas/configs/fp16+peft.yaml b/examples/training-llamas/configs/fp16+peft.yaml deleted file mode 100644 index 1a52b6e2..00000000 --- a/examples/training-llamas/configs/fp16+peft.yaml +++ /dev/null @@ -1,11 +0,0 @@ -defaults: - - _base_ # we can also extend fp16 as a base here - - _self_ - - override launcher: process - -experiment_name: fp16+peft - -backend: - peft_strategy: lora - peft_config: - task_type: CAUSAL_LM diff --git a/examples/training-llamas/configs/fp16.yaml b/examples/training-llamas/configs/fp16.yaml deleted file mode 100644 index 432d986f..00000000 --- a/examples/training-llamas/configs/fp16.yaml +++ /dev/null @@ -1,6 +0,0 @@ -defaults: - - _base_ - - _self_ - - override launcher: process - -experiment_name: fp16 diff --git a/examples/training-llamas/report.py b/examples/training-llamas/report.py deleted file mode 100644 index 39220499..00000000 --- a/examples/training-llamas/report.py +++ /dev/null @@ -1,242 +0,0 @@ -from argparse import ArgumentParser -from pathlib import Path -from typing import List - -import matplotlib.pyplot as plt -import numpy as np -import pandas as pd -from flatten_dict import flatten -from omegaconf import OmegaConf -from pandas import DataFrame - - -def gather_full_report(root_folders: List[Path], report_folder: str = "artifacts") -> DataFrame: - # key is path to inference file as string, value is dataframe - - hydra_dfs = {} - config_dfs = {} - inference_dfs = {} - - for root_folder in root_folders: - inference_dfs.update( - {f.parent.absolute().as_posix(): pd.read_csv(f) for f in root_folder.glob("**/training_results.csv")} - ) - config_dfs.update( - { - f.parent.absolute() - .as_posix(): pd.DataFrame.from_dict(flatten(OmegaConf.load(f), reducer="dot"), orient="index") - .T - for f in root_folder.glob("**/hydra_config.yaml") - if f.parent.absolute().as_posix() in inference_dfs.keys() - } - ) - hydra_dfs.update( - { - f.parent.parent.absolute() - .as_posix(): pd.DataFrame.from_dict( - flatten( - OmegaConf.to_container(OmegaConf.load(f), resolve=False), - reducer="dot", - ), - orient="index", - ) - .T - for f in root_folder.glob("**/.hydra/hydra.yaml") - if f.parent.parent.absolute().as_posix() in inference_dfs.keys() - } - ) - - if len(inference_dfs) == 0 or len(config_dfs) == 0: - raise ValueError(f"No results found in {root_folder}") - - # Merge inference and config dataframes - inference_reports = [ - config_dfs[name] - .merge(inference_dfs[name], left_index=True, right_index=True) - .merge(hydra_dfs[name], left_index=True, right_index=True) - for name in inference_dfs.keys() - ] - - # Concatenate all reports - inference_report = pd.concat(inference_reports, axis=0, ignore_index=True) - inference_report.to_csv(f"{report_folder}/full_report.csv") - - return inference_report - - -def get_short_report(full_report, report_folder: str = "artifacts"): - short_columns = { - "model": "Model", - "environment.gpus": "GPUs", - "experiment_name": "Experiment Name", - "launcher.name": "Launcher", - "launcher.nproc_per_node": "Processes per Node", - "benchmark.dataset_shapes.sequence_length": "Sequence Length", - "benchmark.training_arguments.per_device_train_batch_size": "Per Process Batch Size", - # - "training.throughput(samples/s)": "Training Throughput (samples/s)", - } - short_report = full_report[list(short_columns.keys())].rename(columns=short_columns) - - short_report["GPU Name"] = short_report["GPUs"].str[0] - short_report["Num Processes"] = short_report[["Launcher", "Processes per Node"]].apply( - lambda x: x["Processes per Node"] if x["Launcher"] == "torchrun" else 1, - axis=1, - ) - - short_report["GPU Name"].replace("NVIDIA A100-SXM4-80GB", "1xA100", inplace=True) - short_report["GPU Name"].replace("AMD INSTINCT MI250 (MCM) OAM AC MBA", "1xMI250", inplace=True) - short_report["GPU Name"] = short_report[["GPU Name", "Num Processes"]].apply( - lambda x: "1xGCD-MI250" if x["GPU Name"] == "1xMI250" and x["Num Processes"] == 1 else x["GPU Name"], - axis=1, - ) - short_report["Effective Batch Size"] = short_report["Per Process Batch Size"] * short_report["Num Processes"] - short_report["Group"] = short_report["GPU Name"] + "-" + short_report["Experiment Name"] - short_report.to_csv(f"{report_folder}/short_report.csv") - - return short_report - - -def get_batch_plots(short_report, report_folder, plot="bar"): - fig1, ax1 = plt.subplots() - - batch_column = "Effective Batch Size" - short_report = short_report.sort_values(by="Group", ascending=True) - groups = short_report["Group"].unique().tolist() - x = np.arange( - short_report[batch_column].min() - 1, - len(short_report[batch_column].unique()) + (short_report[batch_column].min() - 1), - ) - width = 0.8 / len(short_report["Group"].unique().tolist()) - offset = -(width * (len(groups) - 1) / 2) - - for group in groups: - mask = short_report["Group"] == group - group_report = short_report[mask].sort_values(by=batch_column) - x_ = np.arange( - group_report[batch_column].min() - 1, - len(group_report[batch_column].unique()) + (group_report[batch_column].min() - 1), - ) - if plot == "bar": - ax1.bar( - x_ + offset, - group_report["Training Throughput (samples/s)"], - label=group, - width=width, - ) - offset += width - elif plot == "line": - ax1.plot( - x_, - group_report["Training Throughput (samples/s)"], - label=group, - marker="o", - ) - - ax1.set_xticks(x) - ax1.set_ylim(bottom=0) - ax1.set_xticklabels(short_report[batch_column].sort_values().unique().tolist()) - ax1.set_xlabel(batch_column) - ax1.set_ylabel("Training Throughput (samples/s)") - ax1.set_title(f"Training Throughput per Batch Size ({short_report['Model'].unique()[0]})") - ax1.legend(fancybox=True, shadow=True) - - legend = plt.legend(loc="upper center") - legend.get_frame().set_facecolor((0, 0, 1, 0.1)) - legend.get_frame().set_alpha(None) - plt.tight_layout() - - fig1.savefig(f"{report_folder}/training_throughput_{plot}_plot.png") - - return fig1 - - -def get_peak_trainong_throughput_plot(short_report, report_folder): - # a bar plot with one bar per group, representing the max attainable throughput in tokens/s - fig, ax = plt.subplots() - - max_training_throughput = short_report.groupby("Group")["Training Throughput (samples/s)"].max().reset_index() - max_training_throughput = ( - short_report.merge(max_training_throughput, on=["Group", "Training Throughput (samples/s)"]) - .sort_values(by="Training Throughput (samples/s)", ascending=True) - .reset_index() - ) - - ax.bar( - max_training_throughput["Group"], - max_training_throughput["Training Throughput (samples/s)"], - color=plt.cm.Paired(np.arange(len(max_training_throughput))), - ) - - for i, v in enumerate(max_training_throughput["Effective Batch Size"]): - ax.text( - i, - max_training_throughput["Training Throughput (samples/s)"].iloc[i], - f"bs={v}", - ha="center", - va="bottom", - ) - - ax.set_xlabel("Group") - ax.set_ylabel("Peak Training Throughput (samples/s)") - ax.set_title(f"Peak Training Throughput ({short_report['Model'].unique()[0]})") - ax.set_ylim(top=max_training_throughput["Training Throughput (samples/s)"].max() * 1.1) - - plt.xticks(rotation=45, ha="right") - plt.tight_layout() - - fig.savefig(f"{report_folder}/peak_training_throughput.png") - - return fig - - -def generate_report(): - parser = ArgumentParser() - parser.add_argument( - "--experiments-folders", - "-e", - type=Path, - nargs="+", - required=True, - help="The folder containing the results of experiments.", - ) - parser.add_argument( - "--report-name", - "-r", - type=str, - required=False, - default="artifacts", - help="The name of the report.", - ) - - args = parser.parse_args() - report_folder = args.report_name - experiments_folders = args.experiments_folders - - Path(report_folder).mkdir(parents=True, exist_ok=True) - - # gather experiments results - full_report = gather_full_report( - root_folders=experiments_folders, - report_folder=report_folder, - ) - short_report = get_short_report( - full_report, - report_folder=report_folder, - ) - for plot in ["bar", "line"]: - _ = get_batch_plots( - short_report, - report_folder, - plot=plot, - ) - - _ = get_peak_trainong_throughput_plot( - short_report, - report_folder, - ) - print("Report generated successfully!") - - -if __name__ == "__main__": - generate_report() diff --git a/examples/whisper/README.md b/examples/whisper/README.md deleted file mode 100644 index 8adc3fd2..00000000 --- a/examples/whisper/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Optimum-Benchmark x Whisper - -A set of benchmarks on OpenAI's Whisper model, using Optimum-Benchmark. - -With Optimum-Benchmark, once you clone the repo and install it, run: - -```bash -sh benchmark.sh ${device} -sh report.sh ${device} -``` - -Where `${device}` is either `cpu` or `cuda`. - -## Metrics - -For this benchmark I tried to compare `whisper-base` model's throughputs (forward and generate). - -Forward throughput is measured in `samples/second` with the formula `number_processed_samples / total_time`. -Where `number_processed_samples = batch_size * number_forward_passes` is the number of samples processed by the model in `total_time`. - -Generate throughput is measured in `tokens/second` with the formula `number_generated_tokens / total_time`. -Where `number_generated_tokens = batch_size * num_tokens * number_generate_passes` is the number of tokens generated by the model in `total_time`. - -## Search Space - -To be exhaustive, I benchmarked different auto optimization configurations supported by Optimum on GPU & CPU and auto quantization configurations on CPU only. - -I also added `benchmark.batch_size=64,128 benchmark.new_tokens=10,100` to compare behavior across different batch sizes and number of generated tokens. - -## GPU Results (NVIDIA A100) - -### Batch Size = 64, New Tokens = 10 - -